code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
import SLIX
if SLIX.toolbox.gpu_available:
print(SLIX.toolbox.gpu_available)
from SLIX.GPU import _toolbox as ntoolbox
import cupy
from numba import cuda
threads_per_block = (1, 1)
blocks_per_grid = (1, 1)
class TestNumbaToolboxGPU:
def test_peak_cleanup(self):
test_one_peak = cupy.array([0, 1, 0, 0]).reshape((1, 1, 4))
result = cupy.zeros(test_one_peak.shape, dtype='int8')
ntoolbox._peaks[blocks_per_grid, threads_per_block](test_one_peak, result)
cuda.synchronize()
assert cupy.all(cupy.array([0, 1, 0, 0]) == result)
test_two_peak = cupy.array([0, 1, 1, 0]).reshape((1, 1, 4))
result = cupy.zeros(test_two_peak.shape, dtype='int8')
ntoolbox._peaks[blocks_per_grid, threads_per_block](test_two_peak, result)
assert cupy.all(cupy.array([0, 1, 0, 0]) == result)
test_three_peak = cupy.array([0, 1, 1, 1, 0]).reshape((1, 1, 5))
result = cupy.zeros(test_three_peak.shape, dtype='int8')
ntoolbox._peaks[blocks_per_grid, threads_per_block](test_three_peak, result)
assert cupy.all(cupy.array([0, 0, 1, 0, 0]) == result)
test_double_three_peak = cupy.array([0, 1, 1, 1, 0, 1, 1, 1, 0]).reshape((1, 1, 9))
result = cupy.zeros(test_double_three_peak.shape, dtype='int8')
ntoolbox._peaks[blocks_per_grid, threads_per_block](test_double_three_peak, result)
assert cupy.all(cupy.array([0, 0, 1, 0, 0, 0, 1, 0, 0]) == result)
def test_prominence(self):
test_array = cupy.array([0, 0.1, 0.2, 0.4, 0.8, 1, 0.5, 0.7, 0.9, 0.5, 0.3, 0.95, 0], dtype='float32')\
.reshape((1, 1, 13))
peaks = cupy.array([0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0], dtype='int8').reshape((1, 1, 13))
expected_prominence = cupy.array([0, 0, 0, 0, 0, 1, 0, 0, 0.4, 0, 0, 0.65, 0]).reshape((1, 1, 13))
toolbox_prominence = cupy.zeros(expected_prominence.shape, dtype='float32')
ntoolbox._prominence[blocks_per_grid, threads_per_block](test_array, peaks, toolbox_prominence)
print(toolbox_prominence)
assert cupy.all(cupy.isclose(expected_prominence, toolbox_prominence))
def test_peakwidth(self):
test_array = cupy.array([0, 0.1, 0.2, 0.5, 0.8, 1, 0.77, 0.7, 0.66, 0.5, 0.74, 0.98, 0.74], dtype='float32')\
.reshape((1, 1, 13))
peaks = cupy.array([0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0], dtype='int8').reshape((1, 1, 13))
prominence = cupy.array([0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0.48, 0], dtype='float32').reshape((1, 1, 13))
expected_width = cupy.array([0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 2, 0]).reshape((1, 1, 13))
toolbox_width = cupy.zeros(expected_width.shape, dtype='float32')
ntoolbox._peakwidth[blocks_per_grid, threads_per_block](test_array, peaks, prominence, toolbox_width, 0.5)
assert cupy.all(toolbox_width == expected_width)
def test_peakdistance(self):
test_arr = cupy.array(([False, False, True, False, False, False, False, True, False] +
[False] * 15), dtype='int8')\
.reshape((1, 1, 24))
expected_distance = 75
toolbox_distance = cupy.zeros(test_arr.shape, dtype='float32')
ntoolbox._peakdistance[blocks_per_grid, threads_per_block]\
(test_arr,
cupy.zeros(test_arr.shape, dtype='float32'),
cupy.array([[2]], dtype='int8'),
toolbox_distance)
assert toolbox_distance[0, 0, 2] == expected_distance
assert toolbox_distance[0, 0, 7] == 360 - expected_distance
def test_direction(self):
# Test for one peak
one_peak_arr = cupy.array([0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])\
.reshape((1, 1, 24)).astype('int8')
expected_direction = cupy.array([45, ntoolbox.BACKGROUND_COLOR, ntoolbox.BACKGROUND_COLOR])
toolbox_direction = cupy.zeros((1, 1, 3), dtype='float32')
ntoolbox._direction[blocks_per_grid, threads_per_block]\
(one_peak_arr,
cupy.zeros(one_peak_arr.shape, dtype='float32'),
cupy.array([[1]], dtype='int8'),
toolbox_direction,
0)
assert cupy.all(expected_direction == toolbox_direction)
# Test for one direction with 180°+-35° distance
two_peak_arr = cupy.array([0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0])\
.reshape((1, 1, 24)).astype('int8')
expected_direction = cupy.array([135, ntoolbox.BACKGROUND_COLOR, ntoolbox.BACKGROUND_COLOR])
ntoolbox._direction[blocks_per_grid, threads_per_block]\
(two_peak_arr,
cupy.zeros(two_peak_arr.shape, dtype='float32'),
cupy.array([[2]], dtype='int8'),
toolbox_direction,
0)
assert cupy.all(expected_direction == toolbox_direction)
# Test for (invalid) two directions with 180°+-35° distance
four_peak_arr = cupy.array([0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0]) \
.reshape((1, 1, 24)).astype('int8')
expected_direction = cupy.array([ntoolbox.BACKGROUND_COLOR, ntoolbox.BACKGROUND_COLOR, ntoolbox.BACKGROUND_COLOR])
ntoolbox._direction[blocks_per_grid, threads_per_block] \
(four_peak_arr,
cupy.zeros(four_peak_arr.shape, dtype='float32'),
cupy.array([[4]], dtype='int8'),
toolbox_direction,
0)
assert cupy.all(expected_direction == toolbox_direction)
def test_centroid_correction_bases(self):
# simple test case: one distinct peak
test_array = cupy.array([0] * 9 + [1] + [0] * 14).reshape((1, 1, 24))
test_high_peaks = SLIX.toolbox.peaks(test_array)
test_reverse_peaks = SLIX.toolbox.peaks(-test_array)
left_bases = cupy.zeros(test_array.shape, dtype='uint8')
right_bases = cupy.zeros(test_array.shape, dtype='uint8')
ntoolbox._centroid_correction_bases[blocks_per_grid, threads_per_block]\
(test_array,
test_high_peaks,
test_reverse_peaks,
left_bases,
right_bases)
assert cupy.sum(left_bases) == 1
assert cupy.sum(right_bases) == 1
# simple test case: one distinct peak
test_array = cupy.array([0] * 8 + [0.95, 1, 0.5] + [0] * 13, dtype='float32').reshape((1, 1, 24))
test_high_peaks = SLIX.toolbox.peaks(test_array)
test_reverse_peaks = SLIX.toolbox.peaks(-test_array)
ntoolbox._centroid_correction_bases[blocks_per_grid, threads_per_block] \
(test_array,
test_high_peaks,
test_reverse_peaks,
left_bases,
right_bases)
assert cupy.sum(left_bases) == 2
assert cupy.sum(right_bases) == 1
# simple test case: centroid is between two measurements
test_array = cupy.array([0] * 8 + [1, 1] + [0] * 14).reshape((1, 1, 24))
test_high_peaks = SLIX.toolbox.peaks(test_array)
test_reverse_peaks = SLIX.toolbox.peaks(-test_array)
ntoolbox._centroid_correction_bases[blocks_per_grid, threads_per_block] \
(test_array,
test_high_peaks,
test_reverse_peaks,
left_bases,
right_bases)
assert cupy.sum(left_bases) == 1
assert cupy.sum(right_bases) == 2
# more complicated test case: wide peak plateau
test_array = cupy.array([0] * 8 + [1, 1, 1] + [0] * 13).reshape((1, 1, 24))
test_high_peaks = SLIX.toolbox.peaks(test_array)
test_reverse_peaks = SLIX.toolbox.peaks(-test_array)
ntoolbox._centroid_correction_bases[blocks_per_grid, threads_per_block] \
(test_array,
test_high_peaks,
test_reverse_peaks,
left_bases,
right_bases)
assert cupy.sum(left_bases) == 2
assert cupy.sum(right_bases) == 2
def test_centroid(self):
image = cupy.array([0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])\
.reshape((1, 1, 24))
left = cupy.array([0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])\
.reshape((1, 1, 24))
right = cupy.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])\
.reshape((1, 1, 24))
peak = cupy.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])\
.reshape((1, 1, 24))
result_centroid = cupy.zeros(image.shape, dtype='float32')
ntoolbox._centroid[blocks_per_grid, threads_per_block](image, peak, left, right, result_centroid)
assert cupy.sum(result_centroid) == 0
|
[
"cupy.all",
"cupy.zeros",
"cupy.isclose",
"cupy.array",
"cupy.sum",
"SLIX.toolbox.peaks",
"numba.cuda.synchronize"
] |
[((395, 440), 'cupy.zeros', 'cupy.zeros', (['test_one_peak.shape'], {'dtype': '"""int8"""'}), "(test_one_peak.shape, dtype='int8')\n", (405, 440), False, 'import cupy\n'), ((540, 558), 'numba.cuda.synchronize', 'cuda.synchronize', ([], {}), '()\n', (556, 558), False, 'from numba import cuda\n'), ((717, 762), 'cupy.zeros', 'cupy.zeros', (['test_two_peak.shape'], {'dtype': '"""int8"""'}), "(test_two_peak.shape, dtype='int8')\n", (727, 762), False, 'import cupy\n'), ((1013, 1060), 'cupy.zeros', 'cupy.zeros', (['test_three_peak.shape'], {'dtype': '"""int8"""'}), "(test_three_peak.shape, dtype='int8')\n", (1023, 1060), False, 'import cupy\n'), ((1335, 1389), 'cupy.zeros', 'cupy.zeros', (['test_double_three_peak.shape'], {'dtype': '"""int8"""'}), "(test_double_three_peak.shape, dtype='int8')\n", (1345, 1389), False, 'import cupy\n'), ((2018, 2072), 'cupy.zeros', 'cupy.zeros', (['expected_prominence.shape'], {'dtype': '"""float32"""'}), "(expected_prominence.shape, dtype='float32')\n", (2028, 2072), False, 'import cupy\n'), ((2862, 2911), 'cupy.zeros', 'cupy.zeros', (['expected_width.shape'], {'dtype': '"""float32"""'}), "(expected_width.shape, dtype='float32')\n", (2872, 2911), False, 'import cupy\n'), ((3050, 3091), 'cupy.all', 'cupy.all', (['(toolbox_width == expected_width)'], {}), '(toolbox_width == expected_width)\n', (3058, 3091), False, 'import cupy\n'), ((3408, 3451), 'cupy.zeros', 'cupy.zeros', (['test_arr.shape'], {'dtype': '"""float32"""'}), "(test_arr.shape, dtype='float32')\n", (3418, 3451), False, 'import cupy\n'), ((4117, 4187), 'cupy.array', 'cupy.array', (['[45, ntoolbox.BACKGROUND_COLOR, ntoolbox.BACKGROUND_COLOR]'], {}), '([45, ntoolbox.BACKGROUND_COLOR, ntoolbox.BACKGROUND_COLOR])\n', (4127, 4187), False, 'import cupy\n'), ((4220, 4258), 'cupy.zeros', 'cupy.zeros', (['(1, 1, 3)'], {'dtype': '"""float32"""'}), "((1, 1, 3), dtype='float32')\n", (4230, 4258), False, 'import cupy\n'), ((4550, 4599), 'cupy.all', 'cupy.all', (['(expected_direction == toolbox_direction)'], {}), '(expected_direction == toolbox_direction)\n', (4558, 4599), False, 'import cupy\n'), ((4876, 4947), 'cupy.array', 'cupy.array', (['[135, ntoolbox.BACKGROUND_COLOR, ntoolbox.BACKGROUND_COLOR]'], {}), '([135, ntoolbox.BACKGROUND_COLOR, ntoolbox.BACKGROUND_COLOR])\n', (4886, 4947), False, 'import cupy\n'), ((5239, 5288), 'cupy.all', 'cupy.all', (['(expected_direction == toolbox_direction)'], {}), '(expected_direction == toolbox_direction)\n', (5247, 5288), False, 'import cupy\n'), ((5562, 5660), 'cupy.array', 'cupy.array', (['[ntoolbox.BACKGROUND_COLOR, ntoolbox.BACKGROUND_COLOR, ntoolbox.\n BACKGROUND_COLOR]'], {}), '([ntoolbox.BACKGROUND_COLOR, ntoolbox.BACKGROUND_COLOR, ntoolbox.\n BACKGROUND_COLOR])\n', (5572, 5660), False, 'import cupy\n'), ((5950, 5999), 'cupy.all', 'cupy.all', (['(expected_direction == toolbox_direction)'], {}), '(expected_direction == toolbox_direction)\n', (5958, 5999), False, 'import cupy\n'), ((6213, 6243), 'SLIX.toolbox.peaks', 'SLIX.toolbox.peaks', (['test_array'], {}), '(test_array)\n', (6231, 6243), False, 'import SLIX\n'), ((6277, 6308), 'SLIX.toolbox.peaks', 'SLIX.toolbox.peaks', (['(-test_array)'], {}), '(-test_array)\n', (6295, 6308), False, 'import SLIX\n'), ((6335, 6378), 'cupy.zeros', 'cupy.zeros', (['test_array.shape'], {'dtype': '"""uint8"""'}), "(test_array.shape, dtype='uint8')\n", (6345, 6378), False, 'import cupy\n'), ((6405, 6448), 'cupy.zeros', 'cupy.zeros', (['test_array.shape'], {'dtype': '"""uint8"""'}), "(test_array.shape, dtype='uint8')\n", (6415, 6448), False, 'import cupy\n'), ((6975, 7005), 'SLIX.toolbox.peaks', 'SLIX.toolbox.peaks', (['test_array'], {}), '(test_array)\n', (6993, 7005), False, 'import SLIX\n'), ((7039, 7070), 'SLIX.toolbox.peaks', 'SLIX.toolbox.peaks', (['(-test_array)'], {}), '(-test_array)\n', (7057, 7070), False, 'import SLIX\n'), ((7593, 7623), 'SLIX.toolbox.peaks', 'SLIX.toolbox.peaks', (['test_array'], {}), '(test_array)\n', (7611, 7623), False, 'import SLIX\n'), ((7657, 7688), 'SLIX.toolbox.peaks', 'SLIX.toolbox.peaks', (['(-test_array)'], {}), '(-test_array)\n', (7675, 7688), False, 'import SLIX\n'), ((8205, 8235), 'SLIX.toolbox.peaks', 'SLIX.toolbox.peaks', (['test_array'], {}), '(test_array)\n', (8223, 8235), False, 'import SLIX\n'), ((8269, 8300), 'SLIX.toolbox.peaks', 'SLIX.toolbox.peaks', (['(-test_array)'], {}), '(-test_array)\n', (8287, 8300), False, 'import SLIX\n'), ((9308, 9348), 'cupy.zeros', 'cupy.zeros', (['image.shape'], {'dtype': '"""float32"""'}), "(image.shape, dtype='float32')\n", (9318, 9348), False, 'import cupy\n'), ((2247, 2300), 'cupy.isclose', 'cupy.isclose', (['expected_prominence', 'toolbox_prominence'], {}), '(expected_prominence, toolbox_prominence)\n', (2259, 2300), False, 'import cupy\n'), ((3568, 3611), 'cupy.zeros', 'cupy.zeros', (['test_arr.shape'], {'dtype': '"""float32"""'}), "(test_arr.shape, dtype='float32')\n", (3578, 3611), False, 'import cupy\n'), ((3630, 3661), 'cupy.array', 'cupy.array', (['[[2]]'], {'dtype': '"""int8"""'}), "([[2]], dtype='int8')\n", (3640, 3661), False, 'import cupy\n'), ((4376, 4423), 'cupy.zeros', 'cupy.zeros', (['one_peak_arr.shape'], {'dtype': '"""float32"""'}), "(one_peak_arr.shape, dtype='float32')\n", (4386, 4423), False, 'import cupy\n'), ((4442, 4473), 'cupy.array', 'cupy.array', (['[[1]]'], {'dtype': '"""int8"""'}), "([[1]], dtype='int8')\n", (4452, 4473), False, 'import cupy\n'), ((5065, 5112), 'cupy.zeros', 'cupy.zeros', (['two_peak_arr.shape'], {'dtype': '"""float32"""'}), "(two_peak_arr.shape, dtype='float32')\n", (5075, 5112), False, 'import cupy\n'), ((5131, 5162), 'cupy.array', 'cupy.array', (['[[2]]'], {'dtype': '"""int8"""'}), "([[2]], dtype='int8')\n", (5141, 5162), False, 'import cupy\n'), ((5775, 5823), 'cupy.zeros', 'cupy.zeros', (['four_peak_arr.shape'], {'dtype': '"""float32"""'}), "(four_peak_arr.shape, dtype='float32')\n", (5785, 5823), False, 'import cupy\n'), ((5842, 5873), 'cupy.array', 'cupy.array', (['[[4]]'], {'dtype': '"""int8"""'}), "([[4]], dtype='int8')\n", (5852, 5873), False, 'import cupy\n'), ((6712, 6732), 'cupy.sum', 'cupy.sum', (['left_bases'], {}), '(left_bases)\n', (6720, 6732), False, 'import cupy\n'), ((6757, 6778), 'cupy.sum', 'cupy.sum', (['right_bases'], {}), '(right_bases)\n', (6765, 6778), False, 'import cupy\n'), ((7336, 7356), 'cupy.sum', 'cupy.sum', (['left_bases'], {}), '(left_bases)\n', (7344, 7356), False, 'import cupy\n'), ((7381, 7402), 'cupy.sum', 'cupy.sum', (['right_bases'], {}), '(right_bases)\n', (7389, 7402), False, 'import cupy\n'), ((7954, 7974), 'cupy.sum', 'cupy.sum', (['left_bases'], {}), '(left_bases)\n', (7962, 7974), False, 'import cupy\n'), ((7999, 8020), 'cupy.sum', 'cupy.sum', (['right_bases'], {}), '(right_bases)\n', (8007, 8020), False, 'import cupy\n'), ((8567, 8587), 'cupy.sum', 'cupy.sum', (['left_bases'], {}), '(left_bases)\n', (8575, 8587), False, 'import cupy\n'), ((8612, 8633), 'cupy.sum', 'cupy.sum', (['right_bases'], {}), '(right_bases)\n', (8620, 8633), False, 'import cupy\n'), ((9479, 9504), 'cupy.sum', 'cupy.sum', (['result_centroid'], {}), '(result_centroid)\n', (9487, 9504), False, 'import cupy\n'), ((329, 353), 'cupy.array', 'cupy.array', (['[0, 1, 0, 0]'], {}), '([0, 1, 0, 0])\n', (339, 353), False, 'import cupy\n'), ((587, 611), 'cupy.array', 'cupy.array', (['[0, 1, 0, 0]'], {}), '([0, 1, 0, 0])\n', (597, 611), False, 'import cupy\n'), ((652, 676), 'cupy.array', 'cupy.array', (['[0, 1, 1, 0]'], {}), '([0, 1, 1, 0])\n', (662, 676), False, 'import cupy\n'), ((878, 902), 'cupy.array', 'cupy.array', (['[0, 1, 0, 0]'], {}), '([0, 1, 0, 0])\n', (888, 902), False, 'import cupy\n'), ((945, 972), 'cupy.array', 'cupy.array', (['[0, 1, 1, 1, 0]'], {}), '([0, 1, 1, 1, 0])\n', (955, 972), False, 'import cupy\n'), ((1178, 1205), 'cupy.array', 'cupy.array', (['[0, 0, 1, 0, 0]'], {}), '([0, 0, 1, 0, 0])\n', (1188, 1205), False, 'import cupy\n'), ((1255, 1294), 'cupy.array', 'cupy.array', (['[0, 1, 1, 1, 0, 1, 1, 1, 0]'], {}), '([0, 1, 1, 1, 0, 1, 1, 1, 0])\n', (1265, 1294), False, 'import cupy\n'), ((1514, 1553), 'cupy.array', 'cupy.array', (['[0, 0, 1, 0, 0, 0, 1, 0, 0]'], {}), '([0, 0, 1, 0, 0, 0, 1, 0, 0])\n', (1524, 1553), False, 'import cupy\n'), ((1626, 1719), 'cupy.array', 'cupy.array', (['[0, 0.1, 0.2, 0.4, 0.8, 1, 0.5, 0.7, 0.9, 0.5, 0.3, 0.95, 0]'], {'dtype': '"""float32"""'}), "([0, 0.1, 0.2, 0.4, 0.8, 1, 0.5, 0.7, 0.9, 0.5, 0.3, 0.95, 0],\n dtype='float32')\n", (1636, 1719), False, 'import cupy\n'), ((1787, 1852), 'cupy.array', 'cupy.array', (['[0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0]'], {'dtype': '"""int8"""'}), "([0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0], dtype='int8')\n", (1797, 1852), False, 'import cupy\n'), ((1908, 1964), 'cupy.array', 'cupy.array', (['[0, 0, 0, 0, 0, 1, 0, 0, 0.4, 0, 0, 0.65, 0]'], {}), '([0, 0, 0, 0, 0, 1, 0, 0, 0.4, 0, 0, 0.65, 0])\n', (1918, 1964), False, 'import cupy\n'), ((2362, 2462), 'cupy.array', 'cupy.array', (['[0, 0.1, 0.2, 0.5, 0.8, 1, 0.77, 0.7, 0.66, 0.5, 0.74, 0.98, 0.74]'], {'dtype': '"""float32"""'}), "([0, 0.1, 0.2, 0.5, 0.8, 1, 0.77, 0.7, 0.66, 0.5, 0.74, 0.98, \n 0.74], dtype='float32')\n", (2372, 2462), False, 'import cupy\n'), ((2529, 2594), 'cupy.array', 'cupy.array', (['[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0]'], {'dtype': '"""int8"""'}), "([0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0], dtype='int8')\n", (2539, 2594), False, 'import cupy\n'), ((2640, 2711), 'cupy.array', 'cupy.array', (['[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0.48, 0]'], {'dtype': '"""float32"""'}), "([0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0.48, 0], dtype='float32')\n", (2650, 2711), False, 'import cupy\n'), ((2761, 2812), 'cupy.array', 'cupy.array', (['[0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 2, 0]'], {}), '([0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 2, 0])\n', (2771, 2812), False, 'import cupy\n'), ((3153, 3260), 'cupy.array', 'cupy.array', (['([False, False, True, False, False, False, False, True, False] + [False] * 15)'], {'dtype': '"""int8"""'}), "([False, False, True, False, False, False, False, True, False] + \n [False] * 15, dtype='int8')\n", (3163, 3260), False, 'import cupy\n'), ((6126, 6162), 'cupy.array', 'cupy.array', (['([0] * 9 + [1] + [0] * 14)'], {}), '([0] * 9 + [1] + [0] * 14)\n', (6136, 6162), False, 'import cupy\n'), ((6860, 6924), 'cupy.array', 'cupy.array', (['([0] * 8 + [0.95, 1, 0.5] + [0] * 13)'], {'dtype': '"""float32"""'}), "([0] * 8 + [0.95, 1, 0.5] + [0] * 13, dtype='float32')\n", (6870, 6924), False, 'import cupy\n'), ((7503, 7542), 'cupy.array', 'cupy.array', (['([0] * 8 + [1, 1] + [0] * 14)'], {}), '([0] * 8 + [1, 1] + [0] * 14)\n', (7513, 7542), False, 'import cupy\n'), ((8112, 8154), 'cupy.array', 'cupy.array', (['([0] * 8 + [1, 1, 1] + [0] * 13)'], {}), '([0] * 8 + [1, 1, 1] + [0] * 13)\n', (8122, 8154), False, 'import cupy\n'), ((8693, 8782), 'cupy.array', 'cupy.array', (['[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]'], {}), '([0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0])\n', (8703, 8782), False, 'import cupy\n'), ((8844, 8933), 'cupy.array', 'cupy.array', (['[0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]'], {}), '([0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0])\n', (8854, 8933), False, 'import cupy\n'), ((8996, 9085), 'cupy.array', 'cupy.array', (['[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]'], {}), '([0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0])\n', (9006, 9085), False, 'import cupy\n'), ((9147, 9236), 'cupy.array', 'cupy.array', (['[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]'], {}), '([0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0])\n', (9157, 9236), False, 'import cupy\n'), ((3930, 4019), 'cupy.array', 'cupy.array', (['[0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]'], {}), '([0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0])\n', (3940, 4019), False, 'import cupy\n'), ((4689, 4778), 'cupy.array', 'cupy.array', (['[0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0]'], {}), '([0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, \n 0, 0, 0])\n', (4699, 4778), False, 'import cupy\n'), ((5390, 5479), 'cupy.array', 'cupy.array', (['[0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0]'], {}), '([0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, \n 0, 0, 0])\n', (5400, 5479), False, 'import cupy\n')]
|
import os, csv, json
from py2neo import Graph
with open("./AWSNEoConfig.json") as c:
conf = json.load(c)
c.close
graph = Graph(conf[0]["NeoParametes"][0]["Url"], auth=(conf[0]["NeoParametes"][0]["Username"], conf[0]["NeoParametes"][0]["Password"]))
regions ="US East (Ohio),us-east-2\n\
US East (N. Virginia),us-east-1\n\
US West (N. California),us-west-1\n\
US West (Oregon),us-west-2\n\
Asia Pacific (Hong Kong),ap-east-1\n\
Asia Pacific (Mumbai),ap-south-1\n\
Asia Pacific (Osaka-Local),ap-northeast-3\n\
Asia Pacific (Seoul),ap-northeast-2\n\
Asia Pacific (Singapore),ap-southeast-1\n\
Asia Pacific (Sydney),ap-southeast-2\n\
Asia Pacific (Tokyo),ap-northeast-1\n\
Canada (Central),ca-central-1\n\
Europe (Frankfurt),eu-central-1\n\
Europe (Ireland),eu-west-1\n\
Europe (London),eu-west-2\n\
Europe (Paris),eu-west-3\n\
Europe (Stockholm),eu-north-1\n\
Middle East (Bahrain),me-south-1\n\
South America (São Paulo),sa-east-1"
def NeoInsertRegions(Name,id):#
query = 'MERGE (: AWSRegion { Name: "'+Name+'" , Id : "'+id+'" })'
print(query)
result = graph.run(query).to_table()
print(result)
#print(regions)
reader = csv.reader(regions.split('\n'), delimiter=',')
for row in reader:
NeoInsertRegions(row[0],row[1])
|
[
"py2neo.Graph",
"json.load"
] |
[((127, 259), 'py2neo.Graph', 'Graph', (["conf[0]['NeoParametes'][0]['Url']"], {'auth': "(conf[0]['NeoParametes'][0]['Username'], conf[0]['NeoParametes'][0]['Password']\n )"}), "(conf[0]['NeoParametes'][0]['Url'], auth=(conf[0]['NeoParametes'][0][\n 'Username'], conf[0]['NeoParametes'][0]['Password']))\n", (132, 259), False, 'from py2neo import Graph\n'), ((97, 109), 'json.load', 'json.load', (['c'], {}), '(c)\n', (106, 109), False, 'import os, csv, json\n')]
|
#!/usr/bin/env python3
"""Test the hashutils module."""
import hashlib
import os
from shutil import copyfile, rmtree
import pytest
from bbarchivist import hashutils as bh
try:
import unittest.mock as mock
except ImportError:
import mock
__author__ = "Thurask"
__license__ = "WTFPL v2"
__copyright__ = "2015-2019 Thurask"
def setup_module(module):
"""
Create necessary files.
"""
if not os.path.exists("temp_hashutils"):
os.mkdir("temp_hashutils")
os.chdir("temp_hashutils")
with open("tempfile.txt", "w") as targetfile:
targetfile.write("Jackdaws love my big sphinx of quartz")
os.mkdir("skipme")
def teardown_module(module):
"""
Delete necessary files.
"""
os.chdir("..")
rmtree("temp_hashutils", ignore_errors=True)
class TestClassHashutils:
"""
Test hash utilities.
"""
def test_crc32(self):
"""
Test CRC32 checksum.
"""
assert bh.zlib_hash("tempfile.txt", "crc32") == "ed5d3f26"
def test_adler32(self):
"""
Test Adler32 checksum.
"""
assert bh.zlib_hash("tempfile.txt", "adler32") == "02470dcd"
def test_sha0(self):
"""
Test SHA-0 hash.
"""
if "sha" not in hashlib.algorithms_available:
pass
else:
assert bh.ssl_hash("tempfile.txt", "sha") == "d26b25f6170daf49e31e68bf57f6164815c368d8"
def test_sha0_unavail(self, capsys):
"""
Test SHA-0 hash, if not available.
"""
with mock.patch("hashlib.new", mock.MagicMock(side_effect=ValueError)):
bh.ssl_hash("tempfile.txt", "sha")
assert "SHA HASH FAILED" in capsys.readouterr()[0]
def test_sha1(self):
"""
Test SHA-1 hash.
"""
assert bh.hashlib_hash("tempfile.txt", hashlib.sha1()) == "71dc7ce8f27c11b792be3f169ecf985865e276d0"
def test_sha224(self):
"""
Test SHA-224 hash.
"""
assert bh.hashlib_hash("tempfile.txt", hashlib.sha224()) == "7bcd7b77f63633bf0f7db181106f08eb630a58c521b109be1cc4a404"
def test_sha256(self):
"""
Test SHA-256 hash.
"""
assert bh.hashlib_hash("tempfile.txt", hashlib.sha256()) == "f118871c45171d5fe4e9049980959e033eeeabcfa12046c243fda310580e8a0b"
def test_sha384(self):
"""
Test SHA-384 hash.
"""
assert bh.hashlib_hash("tempfile.txt", hashlib.sha384()) == "76620873c0d27873c137b082425c6e87e3d601c4b19241a1f2222f7f700a2fe8d3c648b26f62325a411cb020bff527be"
def test_sha512(self):
"""
Test SHA-512 hash.
"""
assert bh.hashlib_hash("tempfile.txt", hashlib.sha512()) == "b66a5e8aa9b9705748c2ee585b0e1a3a41288d2dafc3be2db12fa89d2f2a3e14f9dec11de4ba865bb51eaa6c2cfeb294139455e34da7d827a19504b0906c01c1"
def test_md4(self):
"""
Test MD4 hash.
"""
if "md4" not in hashlib.algorithms_available:
pass
else:
assert bh.ssl_hash("tempfile.txt", "md4") == "df26ada1a895f94e1f1257fad984e809"
def test_md4_unavail(self, capsys):
"""
Test MD4 hash, if not available.
"""
with mock.patch("hashlib.new", mock.MagicMock(side_effect=ValueError)):
bh.ssl_hash("tempfile.txt", "md4")
assert "MD4 HASH FAILED" in capsys.readouterr()[0]
def test_md5(self):
"""
Test MD5 hash.
"""
assert bh.hashlib_hash("tempfile.txt", hashlib.md5()) == "822e1187fde7c8d55aff8cc688701650"
def test_ripemd160(self):
"""
Test RIPEMD160 hash.
"""
if "ripemd160" not in hashlib.algorithms_available:
pass
else:
assert bh.ssl_hash("tempfile.txt", "ripemd160") == "f3e191024c33768e2589e2efca53d55f4e4945ee"
def test_ripemd160_unavail(self, capsys):
"""
Test RIPEMD160 hash, if not available.
"""
with mock.patch("hashlib.new", mock.MagicMock(side_effect=ValueError)):
bh.ssl_hash("tempfile.txt", "ripemd160")
assert "RIPEMD160 HASH FAILED" in capsys.readouterr()[0]
def test_sha3224(self):
"""
Test SHA3-224 hash.
"""
if "sha3_224" not in hashlib.algorithms_available:
pass
else:
assert bh.hashlib_hash("tempfile.txt", hashlib.sha3_224()) == "93cc89107b9bd807dead1ae95ce8c4b0f9b8acb2a3eef704e2fad109"
def test_sha3256(self):
"""
Test SHA3-256 hash.
"""
if "sha3_256" not in hashlib.algorithms_available:
pass
else:
assert bh.hashlib_hash("tempfile.txt", hashlib.sha3_256()) == "a9797b62d8b3573c9134406f42e601219e086150e6c2f32c90c5cee0149b6877"
def test_sha3384(self):
"""
Test SHA3-384 hash.
"""
if "sha3_384" not in hashlib.algorithms_available:
pass
else:
assert bh.hashlib_hash("tempfile.txt", hashlib.sha3_384()) == "1ae83352968f601e16eff076f5967dd356edce4c4c5629e3939123b7507efbaafd1dabc1e459f8e47f7a05df718e5927"
def test_sha3512(self):
"""
Test SHA3-512 hash.
"""
if "sha3_512" not in hashlib.algorithms_available:
pass
else:
assert bh.hashlib_hash("tempfile.txt", hashlib.sha3_512()) == "2ca12b585486d0f775f9fd438a73525b37b1214bc36a8b0ae611d0f1261e8d32b47b923b406c46cc80cc178598d41d42abee3eae5b1c23164b817342e22580e2"
def test_whirlpool(self):
"""
Test Whirlpool hash.
"""
if "whirlpool" not in hashlib.algorithms_available:
pass
else:
assert bh.ssl_hash("tempfile.txt", "whirlpool") == "9835d12f3cb3ea3934635e4a7cc918e489379ed69d894ebc2c09bbf99fe72567bfd26c919ad666e170752abfc4b8c37b376f5102f9e5de59af2b65efc2e01293"
def test_whirlpool_unavail(self, capsys):
"""
Test Whirlpool hash, if not available.
"""
with mock.patch("hashlib.new", mock.MagicMock(side_effect=ValueError)):
bh.ssl_hash("tempfile.txt", "whirlpool")
assert "WHIRLPOOL HASH FAILED" in capsys.readouterr()[0]
def test_escreens(self):
"""
Test EScreens code generation.
"""
pin = "acdcacdc"
app = "10.3.2.500"
uptime = "69696969"
assert bh.calculate_escreens(pin, app, uptime, duration=2) == "E4A25067"
def test_verifier(self):
"""
Test batch hashing.
"""
confload = {}
confload['adler32'] = True
confload['crc32'] = True
confload['md4'] = True if "md4" in hashlib.algorithms_available else False
confload['md5'] = True
confload['sha0'] = True if "sha" in hashlib.algorithms_available else False
confload['sha1'] = True
confload['sha224'] = True
confload['sha256'] = True
confload['sha384'] = True
confload['sha512'] = True
confload['ripemd160'] = True if "ripemd160" in hashlib.algorithms_available else False
confload['whirlpool'] = True if "whirlpool" in hashlib.algorithms_available else False
confload['sha3224'] = True if "sha3_224" in hashlib.algorithms_available else False
confload['sha3256'] = True if "sha3_256" in hashlib.algorithms_available else False
confload['sha3384'] = True if "sha3_384" in hashlib.algorithms_available else False
confload['sha3512'] = True if "sha3_512" in hashlib.algorithms_available else False
confload['blocksize'] = "16777216"
print(confload)
with mock.patch('bbarchivist.hashutils.verifier_config_loader', mock.MagicMock(return_value=confload)):
bh.verifier(os.getcwd())
stocklines = [
b"ADLER32",
b"02470DCD tempfile.txt",
b"CRC32",
b"ED5D3F26 tempfile.txt"]
if confload["md4"]:
stocklines.extend([
b"MD4",
b"DF26ADA1A895F94E1F1257FAD984E809 tempfile.txt"])
stocklines.extend([
b"MD5",
b"822E1187FDE7C8D55AFF8CC688701650 tempfile.txt"])
if confload["sha0"]:
stocklines.extend([
b"SHA0",
b"D26B25F6170DAF49E31E68BF57F6164815C368D8 tempfile.txt"])
stocklines.extend([
b"SHA1",
b"71DC7CE8F27C11B792BE3F169ECF985865E276D0 tempfile.txt",
b"SHA224",
b"7BCD7B77F63633BF0F7DB181106F08EB630A58C521B109BE1CC4A404 tempfile.txt",
b"SHA256",
b"F118871C45171D5FE4E9049980959E033EEEABCFA12046C243FDA310580E8A0B tempfile.txt",
b"SHA384",
b"76620873C0D27873C137B082425C6E87E3D601C4B19241A1F2222F7F700A2FE8D3C648B26F62325A411CB020BFF527BE tempfile.txt",
b"SHA512",
b"B66A5E8AA9B9705748C2EE585B0E1A3A41288D2DAFC3BE2DB12FA89D2F2A3E14F9DEC11DE4BA865BB51EAA6C2CFEB294139455E34DA7D827A19504B0906C01C1 tempfile.txt"])
if confload["ripemd160"]:
stocklines.extend([
b"RIPEMD160",
b"F3E191024C33768E2589E2EFCA53D55F4E4945EE tempfile.txt"])
if confload["whirlpool"]:
stocklines.extend([
b"WHIRLPOOL",
b"9835D12F3CB3EA3934635E4A7CC918E489379ED69D894EBC2C09BBF99FE72567BFD26C919AD666E170752ABFC4B8C37B376F5102F9E5DE59AF2B65EFC2E01293 tempfile.txt"])
if confload["sha3224"]:
stocklines.extend([
b"SHA3224",
b"93CC89107B9BD807DEAD1AE95CE8C4B0F9B8ACB2A3EEF704E2FAD109 tempfile.txt"])
if confload["sha3256"]:
stocklines.extend([
b"SHA3256",
b"A9797B62D8B3573C9134406F42E601219E086150E6C2F32C90C5CEE0149B6877 tempfile.txt"])
if confload["sha3384"]:
stocklines.extend([
b"SHA3384",
b"1AE83352968F601E16EFF076F5967DD356EDCE4C4C5629E3939123B7507EFBAAFD1DABC1E459F8E47F7A05DF718E5927 tempfile.txt"])
if confload["sha3512"]:
stocklines.extend([
b"SHA3512",
b"2CA12B585486D0F775F9FD438A73525B37B1214BC36A8B0AE611D0F1261E8D32B47B923B406C46CC80CC178598D41D42ABEE3EAE5B1C23164B817342E22580E2 tempfile.txt"])
stocklines2 = []
for item in stocklines:
item2 = item.strip()
item2 = item2.replace(b'\r\n', b'')
item2 = item2.replace(b'\n', b'')
item2 = item2.replace(b'\r', b'')
stocklines2.append(item2)
filename = "tempfile.txt.cksum"
with open(filename, "rb") as checksumfile:
content = checksumfile.read().splitlines()
content2 = []
for item in content:
item2 = item.strip()
item2 = item2.replace(b'\r\n', b'')
item2 = item2.replace(b'\n', b'')
item2 = item2.replace(b'\r', b'')
content2.append(item2)
for idx, value in enumerate(content2):
assert stocklines2[idx] == value
def test_verifier_multiple(self):
"""
Test batch hashing, but with multiple files.
"""
for i in range(17):
copyfile("tempfile.txt", "tempfile{0}.txt".format(i))
self.test_verifier()
def test_verifier_fail(self, capsys):
"""
Test batch hashing failure.
"""
with mock.patch("concurrent.futures.ThreadPoolExecutor.submit", mock.MagicMock(side_effect=Exception)):
with pytest.raises(SystemExit):
bh.verifier(os.getcwd())
assert "SOMETHING WENT WRONG" in capsys.readouterr()[0]
class TestClassHashutilsConfig:
"""
Test reading/writing configs with ConfigParser.
"""
@classmethod
def setup_class(cls):
"""
Create dictionaries for self.
"""
cls.hashdict = {}
cls.hashdict['crc32'] = False
cls.hashdict['adler32'] = False
cls.hashdict['sha1'] = True
cls.hashdict['sha224'] = False
cls.hashdict['sha256'] = True
cls.hashdict['sha384'] = False
cls.hashdict['sha512'] = False
cls.hashdict['sha3224'] = False
cls.hashdict['sha3256'] = False
cls.hashdict['sha3384'] = False
cls.hashdict['sha3512'] = False
cls.hashdict['md5'] = True
cls.hashdict['md4'] = False
cls.hashdict['ripemd160'] = False
cls.hashdict['whirlpool'] = False
cls.hashdict['sha0'] = False
cls.hashdict['blocksize'] = 16777216
def test_hash_loader(self):
"""
Test reading hash settings.
"""
try:
os.remove("bbarchivist.ini")
except (OSError, IOError):
pass
with mock.patch('bbarchivist.iniconfig.config_homepath', mock.MagicMock(return_value=os.getcwd())):
assert bh.verifier_config_loader() == self.hashdict
def test_hash_writer(self):
"""
Test writing hash settings.
"""
hash2 = self.hashdict
hash2['sha512'] = True
try:
os.remove("bbarchivist.ini")
except (OSError, IOError):
pass
with mock.patch('bbarchivist.iniconfig.config_homepath', mock.MagicMock(return_value=os.getcwd())):
with mock.patch('bbarchivist.hashutils.verifier_config_loader', mock.MagicMock(return_value=hash2)):
bh.verifier_config_writer()
assert bh.verifier_config_loader() == hash2
|
[
"os.mkdir",
"os.remove",
"hashlib.sha512",
"shutil.rmtree",
"hashlib.sha3_256",
"hashlib.sha3_384",
"os.chdir",
"hashlib.sha1",
"os.path.exists",
"hashlib.sha384",
"hashlib.sha256",
"pytest.raises",
"hashlib.sha224",
"bbarchivist.hashutils.verifier_config_loader",
"hashlib.md5",
"bbarchivist.hashutils.ssl_hash",
"hashlib.sha3_224",
"hashlib.sha3_512",
"bbarchivist.hashutils.verifier_config_writer",
"os.getcwd",
"bbarchivist.hashutils.zlib_hash",
"bbarchivist.hashutils.calculate_escreens",
"mock.MagicMock"
] |
[((489, 515), 'os.chdir', 'os.chdir', (['"""temp_hashutils"""'], {}), "('temp_hashutils')\n", (497, 515), False, 'import os\n'), ((636, 654), 'os.mkdir', 'os.mkdir', (['"""skipme"""'], {}), "('skipme')\n", (644, 654), False, 'import os\n'), ((734, 748), 'os.chdir', 'os.chdir', (['""".."""'], {}), "('..')\n", (742, 748), False, 'import os\n'), ((753, 797), 'shutil.rmtree', 'rmtree', (['"""temp_hashutils"""'], {'ignore_errors': '(True)'}), "('temp_hashutils', ignore_errors=True)\n", (759, 797), False, 'from shutil import copyfile, rmtree\n'), ((416, 448), 'os.path.exists', 'os.path.exists', (['"""temp_hashutils"""'], {}), "('temp_hashutils')\n", (430, 448), False, 'import os\n'), ((458, 484), 'os.mkdir', 'os.mkdir', (['"""temp_hashutils"""'], {}), "('temp_hashutils')\n", (466, 484), False, 'import os\n'), ((962, 999), 'bbarchivist.hashutils.zlib_hash', 'bh.zlib_hash', (['"""tempfile.txt"""', '"""crc32"""'], {}), "('tempfile.txt', 'crc32')\n", (974, 999), True, 'from bbarchivist import hashutils as bh\n'), ((1113, 1152), 'bbarchivist.hashutils.zlib_hash', 'bh.zlib_hash', (['"""tempfile.txt"""', '"""adler32"""'], {}), "('tempfile.txt', 'adler32')\n", (1125, 1152), True, 'from bbarchivist import hashutils as bh\n'), ((1628, 1662), 'bbarchivist.hashutils.ssl_hash', 'bh.ssl_hash', (['"""tempfile.txt"""', '"""sha"""'], {}), "('tempfile.txt', 'sha')\n", (1639, 1662), True, 'from bbarchivist import hashutils as bh\n'), ((3301, 3335), 'bbarchivist.hashutils.ssl_hash', 'bh.ssl_hash', (['"""tempfile.txt"""', '"""md4"""'], {}), "('tempfile.txt', 'md4')\n", (3312, 3335), True, 'from bbarchivist import hashutils as bh\n'), ((4062, 4102), 'bbarchivist.hashutils.ssl_hash', 'bh.ssl_hash', (['"""tempfile.txt"""', '"""ripemd160"""'], {}), "('tempfile.txt', 'ripemd160')\n", (4073, 4102), True, 'from bbarchivist import hashutils as bh\n'), ((6087, 6127), 'bbarchivist.hashutils.ssl_hash', 'bh.ssl_hash', (['"""tempfile.txt"""', '"""whirlpool"""'], {}), "('tempfile.txt', 'whirlpool')\n", (6098, 6127), True, 'from bbarchivist import hashutils as bh\n'), ((6385, 6436), 'bbarchivist.hashutils.calculate_escreens', 'bh.calculate_escreens', (['pin', 'app', 'uptime'], {'duration': '(2)'}), '(pin, app, uptime, duration=2)\n', (6406, 6436), True, 'from bbarchivist import hashutils as bh\n'), ((12682, 12710), 'os.remove', 'os.remove', (['"""bbarchivist.ini"""'], {}), "('bbarchivist.ini')\n", (12691, 12710), False, 'import os\n'), ((13114, 13142), 'os.remove', 'os.remove', (['"""bbarchivist.ini"""'], {}), "('bbarchivist.ini')\n", (13123, 13142), False, 'import os\n'), ((1346, 1380), 'bbarchivist.hashutils.ssl_hash', 'bh.ssl_hash', (['"""tempfile.txt"""', '"""sha"""'], {}), "('tempfile.txt', 'sha')\n", (1357, 1380), True, 'from bbarchivist import hashutils as bh\n'), ((1575, 1613), 'mock.MagicMock', 'mock.MagicMock', ([], {'side_effect': 'ValueError'}), '(side_effect=ValueError)\n', (1589, 1613), False, 'import mock\n'), ((1848, 1862), 'hashlib.sha1', 'hashlib.sha1', ([], {}), '()\n', (1860, 1862), False, 'import hashlib\n'), ((2036, 2052), 'hashlib.sha224', 'hashlib.sha224', ([], {}), '()\n', (2050, 2052), False, 'import hashlib\n'), ((2242, 2258), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (2256, 2258), False, 'import hashlib\n'), ((2456, 2472), 'hashlib.sha384', 'hashlib.sha384', ([], {}), '()\n', (2470, 2472), False, 'import hashlib\n'), ((2702, 2718), 'hashlib.sha512', 'hashlib.sha512', ([], {}), '()\n', (2716, 2718), False, 'import hashlib\n'), ((3030, 3064), 'bbarchivist.hashutils.ssl_hash', 'bh.ssl_hash', (['"""tempfile.txt"""', '"""md4"""'], {}), "('tempfile.txt', 'md4')\n", (3041, 3064), True, 'from bbarchivist import hashutils as bh\n'), ((3248, 3286), 'mock.MagicMock', 'mock.MagicMock', ([], {'side_effect': 'ValueError'}), '(side_effect=ValueError)\n', (3262, 3286), False, 'import mock\n'), ((3518, 3531), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (3529, 3531), False, 'import hashlib\n'), ((3765, 3805), 'bbarchivist.hashutils.ssl_hash', 'bh.ssl_hash', (['"""tempfile.txt"""', '"""ripemd160"""'], {}), "('tempfile.txt', 'ripemd160')\n", (3776, 3805), True, 'from bbarchivist import hashutils as bh\n'), ((4009, 4047), 'mock.MagicMock', 'mock.MagicMock', ([], {'side_effect': 'ValueError'}), '(side_effect=ValueError)\n', (4023, 4047), False, 'import mock\n'), ((5702, 5742), 'bbarchivist.hashutils.ssl_hash', 'bh.ssl_hash', (['"""tempfile.txt"""', '"""whirlpool"""'], {}), "('tempfile.txt', 'whirlpool')\n", (5713, 5742), True, 'from bbarchivist import hashutils as bh\n'), ((6034, 6072), 'mock.MagicMock', 'mock.MagicMock', ([], {'side_effect': 'ValueError'}), '(side_effect=ValueError)\n', (6048, 6072), False, 'import mock\n'), ((7686, 7723), 'mock.MagicMock', 'mock.MagicMock', ([], {'return_value': 'confload'}), '(return_value=confload)\n', (7700, 7723), False, 'import mock\n'), ((7750, 7761), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (7759, 7761), False, 'import os\n'), ((11468, 11505), 'mock.MagicMock', 'mock.MagicMock', ([], {'side_effect': 'Exception'}), '(side_effect=Exception)\n', (11482, 11505), False, 'import mock\n'), ((11525, 11550), 'pytest.raises', 'pytest.raises', (['SystemExit'], {}), '(SystemExit)\n', (11538, 11550), False, 'import pytest\n'), ((12890, 12917), 'bbarchivist.hashutils.verifier_config_loader', 'bh.verifier_config_loader', ([], {}), '()\n', (12915, 12917), True, 'from bbarchivist import hashutils as bh\n'), ((13432, 13459), 'bbarchivist.hashutils.verifier_config_writer', 'bh.verifier_config_writer', ([], {}), '()\n', (13457, 13459), True, 'from bbarchivist import hashutils as bh\n'), ((13479, 13506), 'bbarchivist.hashutils.verifier_config_loader', 'bh.verifier_config_loader', ([], {}), '()\n', (13504, 13506), True, 'from bbarchivist import hashutils as bh\n'), ((4394, 4412), 'hashlib.sha3_224', 'hashlib.sha3_224', ([], {}), '()\n', (4410, 4412), False, 'import hashlib\n'), ((4698, 4716), 'hashlib.sha3_256', 'hashlib.sha3_256', ([], {}), '()\n', (4714, 4716), False, 'import hashlib\n'), ((5010, 5028), 'hashlib.sha3_384', 'hashlib.sha3_384', ([], {}), '()\n', (5026, 5028), False, 'import hashlib\n'), ((5354, 5372), 'hashlib.sha3_512', 'hashlib.sha3_512', ([], {}), '()\n', (5370, 5372), False, 'import hashlib\n'), ((11580, 11591), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (11589, 11591), False, 'import os\n'), ((13379, 13413), 'mock.MagicMock', 'mock.MagicMock', ([], {'return_value': 'hash2'}), '(return_value=hash2)\n', (13393, 13413), False, 'import mock\n'), ((12856, 12867), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (12865, 12867), False, 'import os\n'), ((13288, 13299), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (13297, 13299), False, 'import os\n')]
|
import os
import requests
from bs4 import BeautifulSoup
from fake_useragent import UserAgent
class Dictionary:
"""audio related parts"""
def __init__(self, word, files_path='~/.sayit'):
self.word = word
self.user_agent = UserAgent().random # Random user_agent for http calls
self.files_path = os.path.expanduser(files_path)
if not os.path.isdir(files_path):
os.makedirs(files_path, exist_ok=True)
def show(self):
""" who the definition part of the word """
file_path = self._get_file(self.word)
if not os.path.exists(file_path):
raise Exception("File cannot be found for {}.".format(self.word))
with open(file_path, 'r') as html:
soup = BeautifulSoup(html, 'html.parser')
# Phonetics
phonetics = soup.find('div', class_="phons_br")
if phonetics:
print(phonetics.get_text())
# Origin
origin = soup.find('span', unbox="wordorigin")
if origin:
print(origin.get_text(" "))
# Definitions
senses = soup.find('ol', class_='senses_multiple')
if senses:
self._print_definitions(senses)
# Idioms
idioms = soup.find_all('span', class_='idm-g')
if idioms:
self._print_idioms(idioms)
def _get_file(self, word):
""" download if not already downloaded """
word_audio = word + "__gb" # to save the file in audio sharded format
word_path = "{}/oxford/uk/{}/{}/{}".format(self.files_path,
word_audio[0],
word_audio[0: 3],
word_audio[0: 5])
file_path = "{}/{}.html".format(word_path, word)
if not os.path.exists(file_path):
os.makedirs(word_path, exist_ok=True)
self._save_word(word, word_path)
return file_path
def _save_word(self, word, path):
""" download and save the binary file to the given path """
uri = self._create_uri(word)
headers = {'User-Agent': self.user_agent}
request = requests.get(uri, headers=headers)
if request.status_code != 200:
raise Exception("{} cannot be found on the server.".format(word))
with open('{}/{}.html'.format(path, word), 'wb') as f:
for chunk in request:
f.write(chunk)
return uri
def _create_uri(self, word):
"""create oxford learner dictionary mp3 uri"""
base = "https://www.oxfordlearnersdictionaries.com/definition/english/"
return base + word
def _print_definitions(self, html):
"""prints definitions"""
for i, sense in enumerate(html):
if str(sense) != " ":
meaning = sense.find('span', class_='def')
if meaning:
meaning = meaning.text
title = sense.find('span', class_='cf')
label = sense.find('span', class_='dtxt')
labels = sense.find('span', class_='labels')
if label:
meaning = "({}) {}".format(label.text, meaning)
if labels:
meaning = "{} {}".format(labels.text, meaning)
if title:
meaning = "{}: {}".format(title.text, meaning)
print("{}. {}".format(i+1, meaning))
examples = [item.text for item in sense.find_all('li', class_="") if item]
for example in examples:
if example:
print(" • {}".format(example))
def _print_idioms(self, html):
"""prints idioms"""
print("\nIdioms:")
for idiom in html:
if idiom:
print(" ⦾ {}".format(idiom.find('div').get_text(" ", strip=True)))
label = idiom.find('span', class_='labels')
description = idiom.find('span', class_='def').get_text(" ", strip=True)
if label:
description = "{} {}".format(label.text, description)
print(" {}".format(description))
for example in idiom.find_all('span', class_='x'):
print(" . {}".format(example.text))
|
[
"os.makedirs",
"os.path.isdir",
"fake_useragent.UserAgent",
"os.path.exists",
"requests.get",
"bs4.BeautifulSoup",
"os.path.expanduser"
] |
[((329, 359), 'os.path.expanduser', 'os.path.expanduser', (['files_path'], {}), '(files_path)\n', (347, 359), False, 'import os\n'), ((2261, 2295), 'requests.get', 'requests.get', (['uri'], {'headers': 'headers'}), '(uri, headers=headers)\n', (2273, 2295), False, 'import requests\n'), ((248, 259), 'fake_useragent.UserAgent', 'UserAgent', ([], {}), '()\n', (257, 259), False, 'from fake_useragent import UserAgent\n'), ((375, 400), 'os.path.isdir', 'os.path.isdir', (['files_path'], {}), '(files_path)\n', (388, 400), False, 'import os\n'), ((414, 452), 'os.makedirs', 'os.makedirs', (['files_path'], {'exist_ok': '(True)'}), '(files_path, exist_ok=True)\n', (425, 452), False, 'import os\n'), ((587, 612), 'os.path.exists', 'os.path.exists', (['file_path'], {}), '(file_path)\n', (601, 612), False, 'import os\n'), ((754, 788), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html', '"""html.parser"""'], {}), "(html, 'html.parser')\n", (767, 788), False, 'from bs4 import BeautifulSoup\n'), ((1902, 1927), 'os.path.exists', 'os.path.exists', (['file_path'], {}), '(file_path)\n', (1916, 1927), False, 'import os\n'), ((1941, 1978), 'os.makedirs', 'os.makedirs', (['word_path'], {'exist_ok': '(True)'}), '(word_path, exist_ok=True)\n', (1952, 1978), False, 'import os\n')]
|
"""
Control global computation context
"""
from __future__ import absolute_import, division, print_function
from collections import defaultdict
_globals = defaultdict(lambda: None)
_globals['callbacks'] = set()
class set_options(object):
""" Set global state within controled context
This lets you specify various global settings in a tightly controlled
``with`` block.
Valid keyword arguments currently include the following::
get - the scheduler to use
pool - a thread or process pool
cache - Cache to use for intermediate results
func_loads/func_dumps - loads/dumps functions for serialization of data
likely to contain functions. Defaults to
cloudpickle.loads/cloudpickle.dumps
optimizations - List of additional optimizations to run
Examples
--------
>>> with set_options(get=dask.get): # doctest: +SKIP
... x = np.array(x) # uses dask.get internally
"""
def __init__(self, **kwargs):
self.old = _globals.copy()
_globals.update(kwargs)
def __enter__(self):
return
def __exit__(self, type, value, traceback):
_globals.clear()
_globals.update(self.old)
|
[
"collections.defaultdict"
] |
[((157, 183), 'collections.defaultdict', 'defaultdict', (['(lambda : None)'], {}), '(lambda : None)\n', (168, 183), False, 'from collections import defaultdict\n')]
|
"""
Copyright 2019 Satellite Applications Catapult
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from urllib.error import HTTPError
from sedas_pyapi.sedas_api import SeDASAPI
class TestSeDASAPI(unittest.TestCase):
def test_login_bad_creds(self):
sedas = SeDASAPI("bogus", "<PASSWORD> real password")
self.assertRaises(
HTTPError,
sedas.login
)
def test_blank_username(self):
sedas = SeDASAPI("", "is not a real password")
self.assertRaises(
ValueError,
sedas.login
)
def test_blank_password(self):
sedas = SeDASAPI("is not a real username", "")
self.assertRaises(
ValueError,
sedas.login
)
|
[
"sedas_pyapi.sedas_api.SeDASAPI"
] |
[((820, 865), 'sedas_pyapi.sedas_api.SeDASAPI', 'SeDASAPI', (['"""bogus"""', '"""<PASSWORD> real password"""'], {}), "('bogus', '<PASSWORD> real password')\n", (828, 865), False, 'from sedas_pyapi.sedas_api import SeDASAPI\n'), ((1009, 1047), 'sedas_pyapi.sedas_api.SeDASAPI', 'SeDASAPI', (['""""""', '"""is not a real password"""'], {}), "('', 'is not a real password')\n", (1017, 1047), False, 'from sedas_pyapi.sedas_api import SeDASAPI\n'), ((1192, 1230), 'sedas_pyapi.sedas_api.SeDASAPI', 'SeDASAPI', (['"""is not a real username"""', '""""""'], {}), "('is not a real username', '')\n", (1200, 1230), False, 'from sedas_pyapi.sedas_api import SeDASAPI\n')]
|
#coding=utf-8
'''
Created on 2015-10-23
@author: Devuser
'''
from doraemon.project.models import Project,ProjectMember,Product,ProjectModule,Version
from gatesidelib.common.simplelogger import SimpleLogger
from django.contrib.admin.models import DELETION,CHANGE,ADDITION
from business.project.version_service import VersionService
from business.auth_user.user_service import UserService
import random
class ProjectService(object):
'''
classdocs
'''
@staticmethod
def get_latest_projects_include_me(request):
result = list()
latest_projects = ProjectService.get_latest_projects(request)
my_projects = ProjectService.get_projects_include_me(request)
my_project_ids = [project.id for project in my_projects]
for project in latest_projects:
if project and project.id in my_project_ids:
result.append(project)
return result[0:6]
@staticmethod
def get_latest_projects(request):
result=list()
latest_project_ids=VersionService.get_latests_project_ids(request)
for project_id in latest_project_ids:
temp_project=Project.objects.get(project_id)
result.append(temp_project)
return result
@staticmethod
def get_projects_include_me(request,product_id='all'):
if product_id==None:
product_id="0"
if UserService.is_admin(request.user.id):
return Project.objects.all()
member_list= ProjectMember.objects.all().filter(PMMember=request.user.id)
project_ids=[member.PMProjectID for member in member_list]
if product_id.upper()=="ALL":
result=Project.objects.all().filter(id__in=project_ids)
else:
result=Project.objects.all().filter(id__in=project_ids).filter(Product=int(product_id))
return result
@staticmethod
def get_products_include_me(request):
my_projects=ProjectService.get_projects_include_me(request)
prodcut_ids=list()
for project in my_projects:
if project.Product not in prodcut_ids:
prodcut_ids.append(project.Product)
return Product.objects.all().filter(id__in=prodcut_ids)
@staticmethod
def get_project_modules(project_id):
return ProjectModule.objects.project_modules(int(project_id))
@staticmethod
def create_project(request):
try:
project=Project()
project=ProjectService.init_project(request.data, project)
project.PBCreator=request.user.id
project.save()
if str(request.user.id)!=str(project.PBLead):
ProjectService.add_member(request.user.id,project.id,5)
ProjectService.add_member(project.PBLead,project.id,4)
else:
ProjectService.add_member(request.user.id,project.id,4)
ProjectService.create_version(project,request.user)
ProjectService.log_create_activity(request.user, project)
except Exception as ex:
SimpleLogger.error(ex)
@staticmethod
def create_version(project,user):
version=Version()
version.VProjectID=project.id
version.VVersion='1.0.0'
version.CFTCommitor=user.id
version.save()
VersionService.log_create_activity(user, version)
@staticmethod
def edit_project(request,projectid):
temp_project=Project.objects.get(projectid)
project=ProjectService.init_project(request.POST, temp_project)
project.save()
ProjectService.log_change_activity(request.user, project)
@staticmethod
def delete_project(request,projectid):
print(projectid)
project=Project.objects.get(projectid)
project.IsActive=0
project.save()
ProjectService.log_delete_activity(request.user, project)
@staticmethod
def init_project(validate_data,project):
tmp_project=project
tmp_project.PBTitle=validate_data.get('PBTitle')
tmp_project.PBDescription=validate_data.get('PBDescription')
tmp_project.PBKey=validate_data.get('PBKey')
tmp_project.PBPlatform=validate_data.get('PBPlatform')
tmp_project.PBVisiableLevel=validate_data.get('PBVisiableLevel')
tmp_project.PBLead=validate_data.get('PBLead')
tmp_project.Product=0
tmp_project.PBHttpUrl=validate_data.get('PBHttpUrl')
tmp_project.PBAvatar="/static/global/images/project-icon/scenery-"+str(random.randint(1, 24))+".png"
return tmp_project
@staticmethod
def add_member(user,projectid,Role):
project_member=ProjectMember()
project_member.PMProjectID=projectid
project_member.PMMember=user
project_member.PMRoleID=Role
project_member.PMRoleType=1
project_member.save()
@staticmethod
def log_create_activity(user,project):
Project.objects.log_action(user.id,project.id,project.PBTitle,ADDITION,"创建了项目",project.id)
@staticmethod
def log_delete_activity(user,project):
Project.objects.log_action(user.id,project.id,project.PBTitle,DELETION,"删除了项目",project.id)
@staticmethod
def log_change_activity(user,project):
Project.objects.log_action(user.id,project.id,project.PBTitle,CHANGE,"修改了项目",project.id)
|
[
"business.project.version_service.VersionService.log_create_activity",
"doraemon.project.models.Project.objects.log_action",
"doraemon.project.models.Project.objects.all",
"doraemon.project.models.ProjectMember.objects.all",
"doraemon.project.models.ProjectMember",
"random.randint",
"doraemon.project.models.Version",
"gatesidelib.common.simplelogger.SimpleLogger.error",
"doraemon.project.models.Project.objects.get",
"business.project.version_service.VersionService.get_latests_project_ids",
"doraemon.project.models.Product.objects.all",
"doraemon.project.models.Project",
"business.auth_user.user_service.UserService.is_admin"
] |
[((1035, 1082), 'business.project.version_service.VersionService.get_latests_project_ids', 'VersionService.get_latests_project_ids', (['request'], {}), '(request)\n', (1073, 1082), False, 'from business.project.version_service import VersionService\n'), ((1418, 1455), 'business.auth_user.user_service.UserService.is_admin', 'UserService.is_admin', (['request.user.id'], {}), '(request.user.id)\n', (1438, 1455), False, 'from business.auth_user.user_service import UserService\n'), ((3194, 3203), 'doraemon.project.models.Version', 'Version', ([], {}), '()\n', (3201, 3203), False, 'from doraemon.project.models import Project, ProjectMember, Product, ProjectModule, Version\n'), ((3342, 3391), 'business.project.version_service.VersionService.log_create_activity', 'VersionService.log_create_activity', (['user', 'version'], {}), '(user, version)\n', (3376, 3391), False, 'from business.project.version_service import VersionService\n'), ((3476, 3506), 'doraemon.project.models.Project.objects.get', 'Project.objects.get', (['projectid'], {}), '(projectid)\n', (3495, 3506), False, 'from doraemon.project.models import Project, ProjectMember, Product, ProjectModule, Version\n'), ((3780, 3810), 'doraemon.project.models.Project.objects.get', 'Project.objects.get', (['projectid'], {}), '(projectid)\n', (3799, 3810), False, 'from doraemon.project.models import Project, ProjectMember, Product, ProjectModule, Version\n'), ((4716, 4731), 'doraemon.project.models.ProjectMember', 'ProjectMember', ([], {}), '()\n', (4729, 4731), False, 'from doraemon.project.models import Project, ProjectMember, Product, ProjectModule, Version\n'), ((4991, 5090), 'doraemon.project.models.Project.objects.log_action', 'Project.objects.log_action', (['user.id', 'project.id', 'project.PBTitle', 'ADDITION', '"""创建了项目"""', 'project.id'], {}), "(user.id, project.id, project.PBTitle, ADDITION,\n '创建了项目', project.id)\n", (5017, 5090), False, 'from doraemon.project.models import Project, ProjectMember, Product, ProjectModule, Version\n'), ((5156, 5255), 'doraemon.project.models.Project.objects.log_action', 'Project.objects.log_action', (['user.id', 'project.id', 'project.PBTitle', 'DELETION', '"""删除了项目"""', 'project.id'], {}), "(user.id, project.id, project.PBTitle, DELETION,\n '删除了项目', project.id)\n", (5182, 5255), False, 'from doraemon.project.models import Project, ProjectMember, Product, ProjectModule, Version\n'), ((5321, 5418), 'doraemon.project.models.Project.objects.log_action', 'Project.objects.log_action', (['user.id', 'project.id', 'project.PBTitle', 'CHANGE', '"""修改了项目"""', 'project.id'], {}), "(user.id, project.id, project.PBTitle, CHANGE,\n '修改了项目', project.id)\n", (5347, 5418), False, 'from doraemon.project.models import Project, ProjectMember, Product, ProjectModule, Version\n'), ((1154, 1185), 'doraemon.project.models.Project.objects.get', 'Project.objects.get', (['project_id'], {}), '(project_id)\n', (1173, 1185), False, 'from doraemon.project.models import Project, ProjectMember, Product, ProjectModule, Version\n'), ((1476, 1497), 'doraemon.project.models.Project.objects.all', 'Project.objects.all', ([], {}), '()\n', (1495, 1497), False, 'from doraemon.project.models import Project, ProjectMember, Product, ProjectModule, Version\n'), ((2475, 2484), 'doraemon.project.models.Project', 'Project', ([], {}), '()\n', (2482, 2484), False, 'from doraemon.project.models import Project, ProjectMember, Product, ProjectModule, Version\n'), ((1519, 1546), 'doraemon.project.models.ProjectMember.objects.all', 'ProjectMember.objects.all', ([], {}), '()\n', (1544, 1546), False, 'from doraemon.project.models import Project, ProjectMember, Product, ProjectModule, Version\n'), ((2203, 2224), 'doraemon.project.models.Product.objects.all', 'Product.objects.all', ([], {}), '()\n', (2222, 2224), False, 'from doraemon.project.models import Project, ProjectMember, Product, ProjectModule, Version\n'), ((3098, 3120), 'gatesidelib.common.simplelogger.SimpleLogger.error', 'SimpleLogger.error', (['ex'], {}), '(ex)\n', (3116, 3120), False, 'from gatesidelib.common.simplelogger import SimpleLogger\n'), ((1704, 1725), 'doraemon.project.models.Project.objects.all', 'Project.objects.all', ([], {}), '()\n', (1723, 1725), False, 'from doraemon.project.models import Project, ProjectMember, Product, ProjectModule, Version\n'), ((4572, 4593), 'random.randint', 'random.randint', (['(1)', '(24)'], {}), '(1, 24)\n', (4586, 4593), False, 'import random\n'), ((1786, 1807), 'doraemon.project.models.Project.objects.all', 'Project.objects.all', ([], {}), '()\n', (1805, 1807), False, 'from doraemon.project.models import Project, ProjectMember, Product, ProjectModule, Version\n')]
|
from jinja2 import Environment, PackageLoader, select_autoescape
from IPython.display import HTML
# MODULE INTRODUCTION
"""This module contains dislay functions to render the different data layers
using Jinja2 templates and IPython rendering methods for Jupyter Notebook."""
# GLOBAL VARIABLE DECLARATIONS
ENV = Environment(
loader=PackageLoader('ga-capstone', 'templates'),
autoescape=select_autoescape(['html', 'xml'])
)
def div(format, data): # HTML formatting utility function
"""Wraps 'data' inside a div of class 'format' for HTML printing."""
d = '<div class="{}">{}</div>' # Basic div template
return d.format(format, data)
def char(i, char, map_char, state):
"""Formats character information for html display. state can be
[blank/bold/plain/tag]."""
top = div(map_char, char) # Create div for top part with characetr
bottom = div(state, i) # Create div for botm part with index
filling = top + bottom # Stick th two together to fill the charbox
return div('charbox', filling)
def line(raw, char_map, states, pad=True):
"""Formats line information for html display. 'raw' should come from
data.raw, and 'char_map' should come from styler(data.map). pad=True will
add black squares to fill the row up to 80 characters."""
filling = ''
for i, (c, m, s) in zip(raw, char_map, states):
filling += char(i, c, m, s)
if pad: # Check line length and initialize pad_length if creating a pad
pad_length = 80 - len(raw)
for x in range(pad_length):
filling += char(len(raw) + x, '', 'e', 'blank')
return div('stringbox', filling)
def show(content, style):
"""Takes a string representing HTML content and packages it for display in
IPython/Jupyter Notebook"""
window = div('window', content)
return div('viewer', window)
|
[
"jinja2.PackageLoader",
"jinja2.select_autoescape"
] |
[((338, 379), 'jinja2.PackageLoader', 'PackageLoader', (['"""ga-capstone"""', '"""templates"""'], {}), "('ga-capstone', 'templates')\n", (351, 379), False, 'from jinja2 import Environment, PackageLoader, select_autoescape\n'), ((396, 430), 'jinja2.select_autoescape', 'select_autoescape', (["['html', 'xml']"], {}), "(['html', 'xml'])\n", (413, 430), False, 'from jinja2 import Environment, PackageLoader, select_autoescape\n')]
|
from abc import ABCMeta
import numpy as np
from six.moves import xrange
import warnings
import collections
import cleverhans.utils as utils
from cleverhans.model import Model, CallableModelWrapper
class Attack(object):
"""
Abstract base class for all attack classes.
"""
__metaclass__ = ABCMeta
def __init__(self, model, back='tf', sess=None):
"""
:param model: An instance of the Model class.
:param back: The backend to use. Either 'tf' (default) or 'th'.
:param sess: The tf session to run graphs in (use None for Theano)
"""
if not(back == 'tf' or back == 'th'):
raise ValueError("Backend argument must either be 'tf' or 'th'.")
if back == 'th' and sess is not None:
raise Exception("A session should not be provided when using th.")
if not isinstance(model, Model):
if hasattr(model, '__call__'):
pass
else:
raise ValueError("The model argument should be an instance of"
" the Model class.")
if back == 'th':
warnings.warn("CleverHans support for Theano is deprecated and "
"will be dropped on 2017-11-08.")
# Prepare attributes
self.model = model
self.back = back
self.sess = sess
# We are going to keep track of old graphs and cache them.
self.graphs = {}
# When calling generate_np, arguments in the following set should be
# fed into the graph, as they are not structural items that require
# generating a new graph.
# This dict should map names of arguments to the types they should
# have.
# (Usually, the target class will be a feedable keyword argument.)
self.feedable_kwargs = {}
# When calling generate_np, arguments in the following set should NOT
# be fed into the graph, as they ARE structural items that require
# generating a new graph.
# This list should contain the names of the structural arguments.
self.structural_kwargs = []
def generate(self, x, **kwargs):
"""
Generate the attack's symbolic graph for adversarial examples. This
method should be overriden in any child class that implements an
attack that is expressable symbolically. Otherwise, it will wrap the
numerical implementation as a symbolic operator.
:param x: The model's symbolic inputs.
:param **kwargs: optional parameters used by child classes.
:return: A symbolic representation of the adversarial examples.
"""
if self.back == 'th':
raise NotImplementedError('Theano version not implemented.')
error = "Sub-classes must implement generate."
raise NotImplementedError(error)
def construct_graph(self, fixed, feedable, x_val, hash_key):
# try our very best to create a TF placeholder for each of the
# feedable keyword arguments, and check the types are one of
# the allowed types
import tensorflow as tf
new_kwargs = dict(x for x in fixed.items())
for name, value in feedable.items():
given_type = self.feedable_kwargs[name]
if isinstance(value, np.ndarray):
new_shape = [None] + list(value.shape[1:])
new_kwargs[name] = tf.placeholder(given_type, new_shape)
elif isinstance(value, utils.known_number_types):
new_kwargs[name] = tf.placeholder(given_type, shape=[])
else:
raise ValueError("Could not identify type of argument " +
name + ": " + str(value))
# x is a special placeholder we always want to have
x_shape = [None] + list(x_val.shape)[1:]
x = tf.placeholder(tf.float32, shape=x_shape)
# now we generate the graph that we want
x_adv = self.generate(x, **new_kwargs)
self.graphs[hash_key] = (x, new_kwargs, x_adv)
if len(self.graphs) >= 10:
warnings.warn("Calling generate_np() with multiple different "
"structural paramaters is inefficient and should"
" be avoided. Calling generate() is preferred.")
def generate_np(self, x_val, **kwargs):
"""
Generate adversarial examples and return them as a Numpy array.
Sub-classes *should not* implement this method unless they must
perform special handling of arguments.
:param x_val: A Numpy array with the original inputs.
:param **kwargs: optional parameters used by child classes.
:return: A Numpy array holding the adversarial examples.
"""
if self.back == 'th':
raise NotImplementedError('Theano version not implemented.')
import tensorflow as tf
if self.sess is None:
raise ValueError("Cannot use `generate_np` when no `sess` was"
" provided")
# the set of arguments that are structural properties of the attack
# if these arguments are different, we must construct a new graph
fixed = dict((k, v) for k, v in kwargs.items()
if k in self.structural_kwargs)
# the set of arguments that are passed as placeholders to the graph
# on each call, and can change without constructing a new graph
feedable = dict((k, v) for k, v in kwargs.items()
if k in self.feedable_kwargs)
if len(fixed) + len(feedable) < len(kwargs):
warnings.warn("Supplied extra keyword arguments that are not "
"used in the graph computation. They have been "
"ignored.")
if not all(isinstance(value, collections.Hashable)
for value in fixed.values()):
# we have received a fixed value that isn't hashable
# this means we can't cache this graph for later use,
# and it will have to be discarded later
hash_key = None
else:
# create a unique key for this set of fixed paramaters
hash_key = tuple(sorted(fixed.items()))
if hash_key not in self.graphs:
self.construct_graph(fixed, feedable, x_val, hash_key)
x, new_kwargs, x_adv = self.graphs[hash_key]
feed_dict = {x: x_val}
for name in feedable:
feed_dict[new_kwargs[name]] = feedable[name]
return self.sess.run(x_adv, feed_dict)
def parse_params(self, params=None):
"""
Take in a dictionary of parameters and applies attack-specific checks
before saving them as attributes.
:param params: a dictionary of attack-specific parameters
:return: True when parsing was successful
"""
return True
class MultipleModelAttack(object):
"""
Abstract base class for all attack classes.
"""
__metaclass__ = ABCMeta
def __init__(self, models, back='tf', sess=None):
"""
:param models: An instance of the Model class.
:param back: The backend to use. Either 'tf' (default) or 'th'.
:param sess: The tf session to run graphs in (use None for Theano)
"""
if not(back == 'tf' or back == 'th'):
raise ValueError("Backend argument must either be 'tf' or 'th'.")
if back == 'th' and sess is not None:
raise Exception("A session should not be provided when using th.")
for model in models:
if not isinstance(model, Model):
if hasattr(model, '__call__'):
warnings.warn("CleverHans support for supplying a callable"
" instead of an instance of the Model class is"
" deprecated and will be dropped on 2018-01-11.")
else:
raise ValueError("The model argument should be an instance of"
" the Model class.")
if back == 'th':
warnings.warn("CleverHans support for Theano is deprecated and "
"will be dropped on 2017-11-08.")
# Prepare attributes
self.model1 = models[0]
self.model2 = models[1]
self.model3 = models[2]
self.back = back
self.sess = sess
# We are going to keep track of old graphs and cache them.
self.graphs = {}
# When calling generate_np, arguments in the following set should be
# fed into the graph, as they are not structural items that require
# generating a new graph.
# This dict should map names of arguments to the types they should
# have.
# (Usually, the target class will be a feedable keyword argument.)
self.feedable_kwargs = {}
# When calling generate_np, arguments in the following set should NOT
# be fed into the graph, as they ARE structural items that require
# generating a new graph.
# This list should contain the names of the structural arguments.
self.structural_kwargs = []
def generate(self, x, **kwargs):
"""
Generate the attack's symbolic graph for adversarial examples. This
method should be overriden in any child class that implements an
attack that is expressable symbolically. Otherwise, it will wrap the
numerical implementation as a symbolic operator.
:param x: The model's symbolic inputs.
:param **kwargs: optional parameters used by child classes.
:return: A symbolic representation of the adversarial examples.
"""
if self.back == 'th':
raise NotImplementedError('Theano version not implemented.')
error = "Sub-classes must implement generate."
raise NotImplementedError(error)
def construct_graph(self, fixed, feedable, x_val, hash_key):
# try our very best to create a TF placeholder for each of the
# feedable keyword arguments, and check the types are one of
# the allowed types
import tensorflow as tf
new_kwargs = dict(x for x in fixed.items())
for name, value in feedable.items():
given_type = self.feedable_kwargs[name]
if isinstance(value, np.ndarray):
new_shape = [None] + list(value.shape[1:])
new_kwargs[name] = tf.placeholder(given_type, new_shape)
elif isinstance(value, utils.known_number_types):
new_kwargs[name] = tf.placeholder(given_type, shape=[])
else:
raise ValueError("Could not identify type of argument " +
name + ": " + str(value))
# x is a special placeholder we always want to have
x_shape = [None] + list(x_val.shape)[1:]
x = tf.placeholder(tf.float32, shape=x_shape)
# now we generate the graph that we want
x_adv = self.generate(x, **new_kwargs)
self.graphs[hash_key] = (x, new_kwargs, x_adv)
if len(self.graphs) >= 10:
warnings.warn("Calling generate_np() with multiple different "
"structural paramaters is inefficient and should"
" be avoided. Calling generate() is preferred.")
def generate_np(self, x_val, **kwargs):
"""
Generate adversarial examples and return them as a Numpy array.
Sub-classes *should not* implement this method unless they must
perform special handling of arguments.
:param x_val: A Numpy array with the original inputs.
:param **kwargs: optional parameters used by child classes.
:return: A Numpy array holding the adversarial examples.
"""
if self.back == 'th':
raise NotImplementedError('Theano version not implemented.')
import tensorflow as tf
if self.sess is None:
raise ValueError("Cannot use `generate_np` when no `sess` was"
" provided")
# the set of arguments that are structural properties of the attack
# if these arguments are different, we must construct a new graph
fixed = dict((k, v) for k, v in kwargs.items()
if k in self.structural_kwargs)
# the set of arguments that are passed as placeholders to the graph
# on each call, and can change without constructing a new graph
feedable = dict((k, v) for k, v in kwargs.items()
if k in self.feedable_kwargs)
if len(fixed) + len(feedable) < len(kwargs):
warnings.warn("Supplied extra keyword arguments that are not "
"used in the graph computation. They have been "
"ignored.")
if not all(isinstance(value, collections.Hashable)
for value in fixed.values()):
# we have received a fixed value that isn't hashable
# this means we can't cache this graph for later use,
# and it will have to be discarded later
hash_key = None
else:
# create a unique key for this set of fixed paramaters
hash_key = tuple(sorted(fixed.items()))
if hash_key not in self.graphs:
self.construct_graph(fixed, feedable, x_val, hash_key)
x, new_kwargs, x_adv = self.graphs[hash_key]
feed_dict = {x: x_val}
for name in feedable:
feed_dict[new_kwargs[name]] = feedable[name]
return self.sess.run(x_adv, feed_dict)
def parse_params(self, params=None):
"""
Take in a dictionary of parameters and applies attack-specific checks
before saving them as attributes.
:param params: a dictionary of attack-specific parameters
:return: True when parsing was successful
"""
return True
class FastGradientMethod(Attack):
"""
This attack was originally implemented by Goodfellow et al. (2015) with the
infinity norm (and is known as the "Fast Gradient Sign Method"). This
implementation extends the attack to other norms, and is therefore called
the Fast Gradient Method.
Paper link: https://arxiv.org/abs/1412.6572
"""
def __init__(self, model, back='tf', sess=None):
"""
Create a FastGradientMethod instance.
"""
super(FastGradientMethod, self).__init__(model, back, sess)
self.feedable_kwargs = {'eps': np.float32,
'y': np.float32,
'clip_min': np.float32,
'clip_max': np.float32}
self.structural_kwargs = ['ord']
if not isinstance(self.model, Model):
self.model = CallableModelWrapper(self.model, 'probs')
def generate(self, x, **kwargs):
"""
Generate symbolic graph for adversarial examples and return.
:param x: The model's symbolic inputs.
:param eps: (optional float) attack step size (input variation)
:param ord: (optional) Order of the norm (mimics Numpy).
Possible values: np.inf, 1 or 2.
:param y: (optional) A tensor with the model labels. Only provide
this parameter if you'd like to use true labels when crafting
adversarial samples. Otherwise, model predictions are used as
labels to avoid the "label leaking" effect (explained in this
paper: https://arxiv.org/abs/1611.01236). Default is None.
Labels should be one-hot-encoded.
:param clip_min: (optional float) Minimum input component value
:param clip_max: (optional float) Maximum input component value
"""
# Parse and save attack-specific parameters
assert self.parse_params(**kwargs)
if self.back == 'tf':
from .attacks_tf import fgm
else:
from .attacks_th import fgm
return fgm(x, self.model.get_probs(x), y=self.y, eps=self.eps,
ord=self.ord, clip_min=self.clip_min,
clip_max=self.clip_max)
def parse_params(self, eps=0.3, ord=np.inf, y=None, clip_min=None,
clip_max=None, **kwargs):
"""
Take in a dictionary of parameters and applies attack-specific checks
before saving them as attributes.
Attack-specific parameters:
:param eps: (optional float) attack step size (input variation)
:param ord: (optional) Order of the norm (mimics Numpy).
Possible values: np.inf, 1 or 2.
:param y: (optional) A tensor with the model labels. Only provide
this parameter if you'd like to use true labels when crafting
adversarial samples. Otherwise, model predictions are used as
labels to avoid the "label leaking" effect (explained in this
paper: https://arxiv.org/abs/1611.01236). Default is None.
Labels should be one-hot-encoded.
:param clip_min: (optional float) Minimum input component value
:param clip_max: (optional float) Maximum input component value
"""
# Save attack-specific parameters
self.eps = eps
self.ord = ord
self.y = y
self.clip_min = clip_min
self.clip_max = clip_max
# Check if order of the norm is acceptable given current implementation
if self.ord not in [np.inf, int(1), int(2)]:
raise ValueError("Norm order must be either np.inf, 1, or 2.")
if self.back == 'th' and self.ord != np.inf:
raise NotImplementedError("The only FastGradientMethod norm "
"implemented for Theano is np.inf.")
return True
class MultiModelIterativeMethod(MultipleModelAttack):
"""
The Basic Iterative Method (Kurakin et al. 2016). The original paper used
hard labels for this attack; no label smoothing.
"""
def __init__(self, models, back='tf', sess=None):
"""
Create a BasicIterativeMethod instance.
"""
super(MultiModelIterativeMethod, self).__init__(models, back, sess)
self.feedable_kwargs = {'eps': np.float32,
'eps_iter': np.float32,
'y': np.float32,
'clip_min': np.float32,
'clip_max': np.float32}
self.structural_kwargs = ['ord', 'nb_iter']
if not isinstance(self.model1, Model):
self.model1 = CallableModelWrapper(self.model1, 'probs')
if not isinstance(self.model2, Model):
self.model2 = CallableModelWrapper(self.model2, 'probs')
if not isinstance(self.model3, Model):
self.model3 = CallableModelWrapper(self.model3, 'probs')
def generate(self, x, **kwargs):
"""
Generate symbolic graph for adversarial examples and return.
:param x: The model's symbolic inputs.
:param eps: (required float) maximum distortion of adversarial example
compared to original input
:param eps_iter: (required float) step size for each attack iteration
:param nb_iter: (required int) Number of attack iterations.
:param y: (required) A tensor with the model labels.
:param ord: (optional) Order of the norm (mimics Numpy).
Possible values: np.inf, 1 or 2.
:param clip_min: (optional float) Minimum input component value
:param clip_max: (optional float) Maximum input component value
"""
import tensorflow as tf
# Parse and save attack-specific parameters
assert self.parse_params(**kwargs)
# Initialize loop variables
eta = 0
# Fix labels to the first model predictions for loss computation
# model_preds1 = self.model1.get_probs(x)
# model_preds2 = self.model2.get_probs(x)
model_preds3 = self.model3.get_probs(x)
model_preds = model_preds3
preds_max = tf.reduce_max(model_preds, 1, keep_dims=True)
y = tf.to_float(tf.equal(model_preds, preds_max))
fgsm_params = {'eps': self.eps_iter, 'y': y, 'ord': self.ord}
for i in range(self.nb_iter):
FGSM1 = FastGradientMethod(self.model1, back=self.back, sess=self.sess)
FGSM2 = FastGradientMethod(self.model2, back=self.back, sess=self.sess)
FGSM3 = FastGradientMethod(self.model3, back=self.back, sess=self.sess)
# Compute this step's perturbation
eta1 = FGSM1.generate(x + eta, **fgsm_params) - x
eta2 = FGSM2.generate(x + eta, **fgsm_params) - x
eta3 = FGSM3.generate(x + eta, **fgsm_params) - x
eta = eta1 * 0.333 + eta2 * 0.333 + eta3 * 0.333
# Clipping perturbation eta to self.ord norm ball
if self.ord == np.inf:
eta = tf.clip_by_value(eta, -self.eps, self.eps)
elif self.ord in [1, 2]:
reduc_ind = list(xrange(1, len(eta.get_shape())))
if self.ord == 1:
norm = tf.reduce_sum(tf.abs(eta),
reduction_indices=reduc_ind,
keep_dims=True)
elif self.ord == 2:
norm = tf.sqrt(tf.reduce_sum(tf.square(eta),
reduction_indices=reduc_ind,
keep_dims=True))
eta = eta * self.eps / norm
# Define adversarial example (and clip if necessary)
adv_x = x + eta
if self.clip_min is not None and self.clip_max is not None:
adv_x = tf.clip_by_value(adv_x, self.clip_min, self.clip_max)
return adv_x
def parse_params(self, eps=0.3, eps_iter=0.05, nb_iter=10, y=None,
ord=np.inf, clip_min=None, clip_max=None, **kwargs):
"""
Take in a dictionary of parameters and applies attack-specific checks
before saving them as attributes.
Attack-specific parameters:
:param eps: (required float) maximum distortion of adversarial example
compared to original input
:param eps_iter: (required float) step size for each attack iteration
:param nb_iter: (required int) Number of attack iterations.
:param y: (required) A tensor with the model labels.
:param ord: (optional) Order of the norm (mimics Numpy).
Possible values: np.inf, 1 or 2.
:param clip_min: (optional float) Minimum input component value
:param clip_max: (optional float) Maximum input component value
"""
# Save attack-specific parameters
self.eps = eps
self.eps_iter = eps_iter
self.nb_iter = nb_iter
self.y = y
self.ord = ord
self.clip_min = clip_min
self.clip_max = clip_max
# Check if order of the norm is acceptable given current implementation
if self.ord not in [np.inf, 1, 2]:
raise ValueError("Norm order must be either np.inf, 1, or 2.")
if self.back == 'th':
error_string = "BasicIterativeMethod is not implemented in Theano"
raise NotImplementedError(error_string)
return True
|
[
"tensorflow.abs",
"tensorflow.clip_by_value",
"tensorflow.placeholder",
"tensorflow.square",
"cleverhans.model.CallableModelWrapper",
"warnings.warn",
"tensorflow.reduce_max",
"tensorflow.equal"
] |
[((3880, 3921), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': 'x_shape'}), '(tf.float32, shape=x_shape)\n', (3894, 3921), True, 'import tensorflow as tf\n'), ((10986, 11027), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': 'x_shape'}), '(tf.float32, shape=x_shape)\n', (11000, 11027), True, 'import tensorflow as tf\n'), ((20310, 20355), 'tensorflow.reduce_max', 'tf.reduce_max', (['model_preds', '(1)'], {'keep_dims': '(True)'}), '(model_preds, 1, keep_dims=True)\n', (20323, 20355), True, 'import tensorflow as tf\n'), ((1136, 1241), 'warnings.warn', 'warnings.warn', (['"""CleverHans support for Theano is deprecated and will be dropped on 2017-11-08."""'], {}), "(\n 'CleverHans support for Theano is deprecated and will be dropped on 2017-11-08.'\n )\n", (1149, 1241), False, 'import warnings\n'), ((4123, 4288), 'warnings.warn', 'warnings.warn', (['"""Calling generate_np() with multiple different structural paramaters is inefficient and should be avoided. Calling generate() is preferred."""'], {}), "(\n 'Calling generate_np() with multiple different structural paramaters is inefficient and should be avoided. Calling generate() is preferred.'\n )\n", (4136, 4288), False, 'import warnings\n'), ((5662, 5789), 'warnings.warn', 'warnings.warn', (['"""Supplied extra keyword arguments that are not used in the graph computation. They have been ignored."""'], {}), "(\n 'Supplied extra keyword arguments that are not used in the graph computation. They have been ignored.'\n )\n", (5675, 5789), False, 'import warnings\n'), ((8173, 8278), 'warnings.warn', 'warnings.warn', (['"""CleverHans support for Theano is deprecated and will be dropped on 2017-11-08."""'], {}), "(\n 'CleverHans support for Theano is deprecated and will be dropped on 2017-11-08.'\n )\n", (8186, 8278), False, 'import warnings\n'), ((11229, 11394), 'warnings.warn', 'warnings.warn', (['"""Calling generate_np() with multiple different structural paramaters is inefficient and should be avoided. Calling generate() is preferred."""'], {}), "(\n 'Calling generate_np() with multiple different structural paramaters is inefficient and should be avoided. Calling generate() is preferred.'\n )\n", (11242, 11394), False, 'import warnings\n'), ((12768, 12895), 'warnings.warn', 'warnings.warn', (['"""Supplied extra keyword arguments that are not used in the graph computation. They have been ignored."""'], {}), "(\n 'Supplied extra keyword arguments that are not used in the graph computation. They have been ignored.'\n )\n", (12781, 12895), False, 'import warnings\n'), ((14930, 14971), 'cleverhans.model.CallableModelWrapper', 'CallableModelWrapper', (['self.model', '"""probs"""'], {}), "(self.model, 'probs')\n", (14950, 14971), False, 'from cleverhans.model import Model, CallableModelWrapper\n'), ((18801, 18843), 'cleverhans.model.CallableModelWrapper', 'CallableModelWrapper', (['self.model1', '"""probs"""'], {}), "(self.model1, 'probs')\n", (18821, 18843), False, 'from cleverhans.model import Model, CallableModelWrapper\n'), ((18918, 18960), 'cleverhans.model.CallableModelWrapper', 'CallableModelWrapper', (['self.model2', '"""probs"""'], {}), "(self.model2, 'probs')\n", (18938, 18960), False, 'from cleverhans.model import Model, CallableModelWrapper\n'), ((19035, 19077), 'cleverhans.model.CallableModelWrapper', 'CallableModelWrapper', (['self.model3', '"""probs"""'], {}), "(self.model3, 'probs')\n", (19055, 19077), False, 'from cleverhans.model import Model, CallableModelWrapper\n'), ((20380, 20412), 'tensorflow.equal', 'tf.equal', (['model_preds', 'preds_max'], {}), '(model_preds, preds_max)\n', (20388, 20412), True, 'import tensorflow as tf\n'), ((22015, 22068), 'tensorflow.clip_by_value', 'tf.clip_by_value', (['adv_x', 'self.clip_min', 'self.clip_max'], {}), '(adv_x, self.clip_min, self.clip_max)\n', (22031, 22068), True, 'import tensorflow as tf\n'), ((3435, 3472), 'tensorflow.placeholder', 'tf.placeholder', (['given_type', 'new_shape'], {}), '(given_type, new_shape)\n', (3449, 3472), True, 'import tensorflow as tf\n'), ((10541, 10578), 'tensorflow.placeholder', 'tf.placeholder', (['given_type', 'new_shape'], {}), '(given_type, new_shape)\n', (10555, 10578), True, 'import tensorflow as tf\n'), ((21191, 21233), 'tensorflow.clip_by_value', 'tf.clip_by_value', (['eta', '(-self.eps)', 'self.eps'], {}), '(eta, -self.eps, self.eps)\n', (21207, 21233), True, 'import tensorflow as tf\n'), ((3570, 3606), 'tensorflow.placeholder', 'tf.placeholder', (['given_type'], {'shape': '[]'}), '(given_type, shape=[])\n', (3584, 3606), True, 'import tensorflow as tf\n'), ((7746, 7907), 'warnings.warn', 'warnings.warn', (['"""CleverHans support for supplying a callable instead of an instance of the Model class is deprecated and will be dropped on 2018-01-11."""'], {}), "(\n 'CleverHans support for supplying a callable instead of an instance of the Model class is deprecated and will be dropped on 2018-01-11.'\n )\n", (7759, 7907), False, 'import warnings\n'), ((10676, 10712), 'tensorflow.placeholder', 'tf.placeholder', (['given_type'], {'shape': '[]'}), '(given_type, shape=[])\n', (10690, 10712), True, 'import tensorflow as tf\n'), ((21412, 21423), 'tensorflow.abs', 'tf.abs', (['eta'], {}), '(eta)\n', (21418, 21423), True, 'import tensorflow as tf\n'), ((21637, 21651), 'tensorflow.square', 'tf.square', (['eta'], {}), '(eta)\n', (21646, 21651), True, 'import tensorflow as tf\n')]
|
import pytest
from medicare_appeals.appeals import models
from medicare_appeals.tests import factories
@pytest.fixture(scope='function')
def build_an_appeal():
"""
Build a single appeal
"""
appeal = factories.AppealFactory()
@pytest.fixture(scope='function')
def build_two_appeals():
"""
Build two appeals with the description 'test{n}'
"""
appeal1 = factories.AppealFactory(description='test0')
appeal2 = factories.AppealFactory(description='test1')
@pytest.mark.django_db
def test_appeal(build_an_appeal):
"""
An appeal should be created
"""
assert models.Appeal.objects.count() == 1
@pytest.mark.django_db
def test_two_appeals(build_two_appeals):
"""
Two appeals should be created with description 'test{n}'
"""
appeals = models.Appeal.objects.all()
assert appeals.count() == 2
for idx, appeal in enumerate(appeals):
assert appeal.description == 'test{0}'.format(idx)
|
[
"medicare_appeals.appeals.models.Appeal.objects.count",
"medicare_appeals.appeals.models.Appeal.objects.all",
"pytest.fixture",
"medicare_appeals.tests.factories.AppealFactory"
] |
[((106, 138), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (120, 138), False, 'import pytest\n'), ((246, 278), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (260, 278), False, 'import pytest\n'), ((217, 242), 'medicare_appeals.tests.factories.AppealFactory', 'factories.AppealFactory', ([], {}), '()\n', (240, 242), False, 'from medicare_appeals.tests import factories\n'), ((387, 431), 'medicare_appeals.tests.factories.AppealFactory', 'factories.AppealFactory', ([], {'description': '"""test0"""'}), "(description='test0')\n", (410, 431), False, 'from medicare_appeals.tests import factories\n'), ((446, 490), 'medicare_appeals.tests.factories.AppealFactory', 'factories.AppealFactory', ([], {'description': '"""test1"""'}), "(description='test1')\n", (469, 490), False, 'from medicare_appeals.tests import factories\n'), ((801, 828), 'medicare_appeals.appeals.models.Appeal.objects.all', 'models.Appeal.objects.all', ([], {}), '()\n', (826, 828), False, 'from medicare_appeals.appeals import models\n'), ((609, 638), 'medicare_appeals.appeals.models.Appeal.objects.count', 'models.Appeal.objects.count', ([], {}), '()\n', (636, 638), False, 'from medicare_appeals.appeals import models\n')]
|
from __future__ import absolute_import
from theanompi.lib.base import MPI_GPU_Process
from mpi4py import MPI
server_alpha = 0.5
class EASGD_Server(MPI_GPU_Process):
'''
An implementation of the server process in the Elastic Averaging SGD rule
https://arxiv.org/abs/1412.6651
implementation idea from platoon:
https://github.com/mila-udem/platoon/tree/master/platoon/channel
'''
def __init__(self, device):
MPI_GPU_Process.__init__(self, device) # setup ctx, comm
self.worker_gpucomm = {} # gpucomm to be setup through worker registration
self.worker_id = {}
self.first_worker_id = None
self.valid = {}
self.uidx = {}
self.adj_lr = {}
self.last = None
self.last_uidx = 0
self.start_time = None
self.uepoch = 0
self.last_uepoch = 0
def process_request(self, model, worker_id, worker_rank, message):
reply = None
import time
# Connection related request
if message in ['sync_register']:
if self.first_worker_id == None:
self.first_worker_id = worker_id
print('[Server] recording worker is %s' % worker_id)
reply = 'first'
# rank -> id -> gpucomm
self.worker_id[str(worker_rank)] = int(worker_id)
print('[Server] registered worker %d' % worker_id)
return reply
try:
valid = self.valid['%s' % worker_id]
amount = self.uidx['%s' % worker_id]
adj_lr = self.adj_lr['%s' % worker_id]
except KeyError:
self.valid['%s' % worker_id] = False
self.adj_lr['%s' % worker_id] = False
self.uidx['%s' % worker_id] = 0
# when a new worker joins
self.adj_lr = self.adj_lr.fromkeys(self.adj_lr, True)
# Training related requests
if message == 'next':
if self.start_time is None:
self.start_time = time.time()
# stop when finish all epochs
if sum(self.uidx.values()) >= self.validFreq*model.n_epochs:
print("[Server] Total training time %.2fh" % \
((time.time() - self.start_time)/3600.0))
reply = 'stop'
elif self.valid['%s' % worker_id]:
self.valid['%s' % worker_id] = False
reply = 'val'
elif self.adj_lr['%s' % worker_id]:
self.adj_lr['%s' % worker_id] = False
reply = 'adjust_hyperp'
else:
reply = 'train'
elif 'done' in message:
self.uidx['%s' % worker_id] += message['done']
#print '[Server] uidx %d' % sum(self.uidx.values())
elif message == 'uepoch':
reply = [self.uepoch, len(self.worker_gpucomm)]
if message in ['next', 'uepoch'] or 'done' in message:
now_uidx = sum(self.uidx.values())
self.uepoch = int(now_uidx/self.validFreq)
if self.last_uepoch != self.uepoch:
#print "[Server] now global epoch %d" % self.uepoch
self.last_uepoch = self.uepoch
# when a epoch is finished
self.adj_lr = self.adj_lr.fromkeys(self.adj_lr, True)
#self.valid = self.valid.fromkeys(self.valid, True)
# only the first worker validates
self.valid["%s" % self.first_worker_id] = True
if self.last == None:
self.last = float(time.time())
if now_uidx - self.last_uidx >= 40:
now = float(time.time())
print('[Server] %d time per 40 batches: %.2f s' % \
(self.uepoch, (now - self.last)))
self.last_uidx = now_uidx
self.last = now
return reply
def action_after(self, model, worker_id, worker_rank, message):
if message == 'disconnect':
self.worker_gpucomm.pop(str(worker_id))
print('[Server] disconnected with worker %d' % worker_id)
elif message == 'stop':
print('[Server] stopped by %d' % worker_id)
import sys
sys.exit(0)
if message == 'sync_register':
gpucomm = self.get_intranode_pair_comm(pair=(0,worker_rank))
self.worker_gpucomm[str(worker_id)]= gpucomm
elif message == 'exchange':
self.exchanger.gpucomm = self.worker_gpucomm[str(worker_id)]
# self.exchanger.dest = worker_rank
self.exchanger.exchange()
elif message == 'copy_to_local':
self.exchanger.gpucomm = self.worker_gpucomm[str(worker_id)]
# self.exchanger.dest = worker_rank
self.exchanger.copy_to_local()
def build(self, model):
from theanompi.lib.helper_funcs import check_model
# check model has necessary attributes
check_model(model)
# choose the type of exchanger
from theanompi.lib.exchanger import EASGD_Exchanger
self.exchanger = EASGD_Exchanger(alpha=server_alpha,
param_list=model.params,
etype='server')
self.validFreq = model.data.n_batch_train
def run(self, model):
if self.comm == None:
print('Server communicator not initialized')
return
print('server started')
while True:
# Wait for next request from client
request = self.comm.recv(source=MPI.ANY_SOURCE, tag=199)
# Do some process work and formulate a reply
reply = self.process_request(model, request['id'],
request['rank'],request['message'])
# Send reply back to client
self.comm.send(reply, dest=request['rank'], tag=200)
# Do some action work after reply
self.action_after(model, request['id'],
request['rank'], request['message'])
if __name__ == '__main__':
import sys
device = sys.argv[1]
modelfile = sys.argv[2]
modelclass = sys.argv[3]
try:
cpulist = sys.argv[4]
except:
pass
else: # optional binding cores using hwloc
from theanompi.lib.hwloc_utils import bind_to_socket_mem,detect_socket_num
bind_to_socket_mem(cpulist, label='train')
detect_socket_num(debug=True, label='train')
server = EASGD_Server(device)
config={}
config['verbose'] = False #(server.rank==0)
config['rank'] = 0
config['size'] = 1
config['no_paraload'] = True
import importlib
mod = importlib.import_module(modelfile)
modcls = getattr(mod, modelclass)
model = modcls(config)
server.build(model)
server.run(model)
|
[
"theanompi.lib.exchanger.EASGD_Exchanger",
"theanompi.lib.helper_funcs.check_model",
"importlib.import_module",
"theanompi.lib.hwloc_utils.detect_socket_num",
"time.time",
"theanompi.lib.base.MPI_GPU_Process.__init__",
"sys.exit",
"theanompi.lib.hwloc_utils.bind_to_socket_mem"
] |
[((7467, 7501), 'importlib.import_module', 'importlib.import_module', (['modelfile'], {}), '(modelfile)\n', (7490, 7501), False, 'import importlib\n'), ((461, 499), 'theanompi.lib.base.MPI_GPU_Process.__init__', 'MPI_GPU_Process.__init__', (['self', 'device'], {}), '(self, device)\n', (485, 499), False, 'from theanompi.lib.base import MPI_GPU_Process\n'), ((5433, 5451), 'theanompi.lib.helper_funcs.check_model', 'check_model', (['model'], {}), '(model)\n', (5444, 5451), False, 'from theanompi.lib.helper_funcs import check_model\n'), ((5585, 5661), 'theanompi.lib.exchanger.EASGD_Exchanger', 'EASGD_Exchanger', ([], {'alpha': 'server_alpha', 'param_list': 'model.params', 'etype': '"""server"""'}), "(alpha=server_alpha, param_list=model.params, etype='server')\n", (5600, 5661), False, 'from theanompi.lib.exchanger import EASGD_Exchanger\n'), ((7154, 7196), 'theanompi.lib.hwloc_utils.bind_to_socket_mem', 'bind_to_socket_mem', (['cpulist'], {'label': '"""train"""'}), "(cpulist, label='train')\n", (7172, 7196), False, 'from theanompi.lib.hwloc_utils import bind_to_socket_mem, detect_socket_num\n'), ((7205, 7249), 'theanompi.lib.hwloc_utils.detect_socket_num', 'detect_socket_num', ([], {'debug': '(True)', 'label': '"""train"""'}), "(debug=True, label='train')\n", (7222, 7249), False, 'from theanompi.lib.hwloc_utils import bind_to_socket_mem, detect_socket_num\n'), ((2158, 2169), 'time.time', 'time.time', ([], {}), '()\n', (2167, 2169), False, 'import time\n'), ((4588, 4599), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (4596, 4599), False, 'import sys\n'), ((3827, 3838), 'time.time', 'time.time', ([], {}), '()\n', (3836, 3838), False, 'import time\n'), ((3934, 3945), 'time.time', 'time.time', ([], {}), '()\n', (3943, 3945), False, 'import time\n'), ((2375, 2386), 'time.time', 'time.time', ([], {}), '()\n', (2384, 2386), False, 'import time\n')]
|
from tgbot import plugintest
from sample_plugin import TestPlugin
class TestPluginTest(plugintest.PluginTestCase):
def setUp(self):
self.plugin = TestPlugin()
self.bot = self.fake_bot(
'',
plugins=[self.plugin],
)
def test_print_commands(self):
from cStringIO import StringIO
out = StringIO()
self.bot.print_commands(out=out)
self.assertEqual(out.getvalue(), '''\
echo - right back at ya
echo2 - right back at ya
save - save a note
read - read a note
savegroup - save a group note
readgroup - read a group note
''')
def test_reply(self):
self.receive_message('/echo test')
self.assertReplied('test')
self.receive_message('/echo sound 1 2 3')
self.assertReplied('sound 1 2 3')
def test_need_reply_user(self):
self.receive_message('test')
self.assertNoReplies()
self.receive_message('/echo')
self.assertReplied('echo what?')
self.receive_message('test')
self.assertReplied('test')
self.receive_message('sound')
self.assertNoReplies()
def test_need_reply_by_message_id(self):
self.receive_message('/echo')
self.assertReplied('echo what?')
self.clear_queues()
# wrong reply id, should be ignored
self.receive_message('test', reply_to_message=3)
self.assertNoReplies()
# correct reply id
self.receive_message('test', reply_to_message=2)
self.assertReplied('test')
def test_need_reply_group(self):
chat = {
'id': 1,
'title': 'test group',
'type': 'group',
}
self.receive_message('/echo', chat=chat)
self.assertReplied('echo what?')
# non-selective need_reply, should accept from any user
self.receive_message(
'test',
chat=chat,
sender={
'id': 2,
'first_name': 'Jane',
'last_name': 'Doe',
}
)
self.assertReplied('test')
def test_need_reply_selective_group(self):
chat = {
'id': 1,
'title': 'test group',
'type': 'group',
}
self.receive_message('/echo2', chat=chat)
self.assertReplied('echo what?')
# selective need_reply, should ignore other user
self.receive_message(
'test',
chat=chat,
sender={
'id': 2,
'first_name': 'Jane',
'last_name': 'Doe',
}
)
self.assertNoReplies()
self.receive_message(
'test',
chat=chat,
)
self.assertReplied('test')
def test_plugin_data_single(self):
self.receive_message('/save test 123')
self.assertReplied('saved')
self.receive_message('/read', sender={
'id': 2,
'first_name': 'Jane',
'last_name': 'Doe',
})
self.assertReplied('no note saved')
self.receive_message('/read')
self.assertReplied('your note: test 123')
self.receive_message('/save test 321')
self.assertReplied('saved')
self.receive_message('/read')
self.assertReplied('your note: test 321')
def test_plugin_data_group(self):
chat = {
'id': 99,
'title': 'test group',
}
self.receive_message('/savegroup test 123', chat=chat)
self.assertReplied('saved')
self.receive_message('/readgroup')
self.assertReplied('no note saved')
self.receive_message('/readgroup', chat=chat)
self.assertReplied('this group note: test 123')
self.receive_message('/readgroup', chat=chat, sender={
'id': 2,
'first_name': 'Jane',
'last_name': 'Doe',
})
self.assertReplied('this group note: test 123')
def test_prefix_cmd(self):
self.receive_message('/prefixcmd1')
self.assertReplied('1')
self.receive_message('/prefixcmd12@test_bot')
self.assertReplied('12')
self.receive_message('/prefixcmd@test_bot 123')
self.assertReplied('123')
def test_list_keys(self):
sender2 = {
'id': 2,
'first_name': 'Jane',
'last_name': 'Doe',
}
chat1 = {
'id': 3,
'title': 'test chat',
}
self.receive_message('/save note1') # 1 1
self.receive_message('/save note2', sender=sender2) # 2 2
self.receive_message('/save note3', chat=chat1) # 3 1
self.receive_message('/save note4', sender=sender2, chat=chat1) # 3 2
self.assertEqual(
list(self.plugin.iter_data_keys()),
['1', '2', '3'],
)
self.assertEqual(
list(self.plugin.iter_data_key_keys()),
[],
)
self.assertEqual(
list(self.plugin.iter_data_key_keys('1')),
['1'],
)
self.assertEqual(
list(self.plugin.iter_data_key_keys('2')),
['2'],
)
self.assertEqual(
list(self.plugin.iter_data_key_keys('3')),
['1', '2'],
)
self.plugin.save_data(3, key2=2)
self.assertEqual(
list(self.plugin.iter_data_key_keys('3')),
['1'],
)
|
[
"cStringIO.StringIO",
"sample_plugin.TestPlugin"
] |
[((160, 172), 'sample_plugin.TestPlugin', 'TestPlugin', ([], {}), '()\n', (170, 172), False, 'from sample_plugin import TestPlugin\n'), ((357, 367), 'cStringIO.StringIO', 'StringIO', ([], {}), '()\n', (365, 367), False, 'from cStringIO import StringIO\n')]
|
import cv2
import numpy as np
import sys, getopt
import time
import dlib
import math
i=False
class Controller():
def __init__(self):
self.detector = dlib.get_frontal_face_detector()
self.predictor = dlib.shape_predictor("shape_predictor_68_face_landmarks.dat")
self.frame = None
self.ret = None
self.faces = None
self.cap = cv2.VideoCapture(0)
#open phone camera API
self.address = None#"https://192.168.43.1:8080/video"
self.threshold = 35
self.grayFrame = None
self.cutEyes = None
self.img = cv2.imread("anime.jpg")
self.cutEyesGray = None
self.contours = None
self.capThreshold = None
self.left_eye = None
self.maskEyes = None
self.landmarks = None
self.min_x = None
self.max_x = None
self.min_y = None
self.max_y = None
self.otps = None
self.args = None
self.cameraIs = False
self.thresholdIs = False
self.rgbIs = False
self.eyeLinesIs = False
self.fx = 1
self.fy = 1
self.check = True
self.calibrationIsOkey = False
self.testImage = None
self.background = None
self.eyeCenter = None
self.maxArray = np.array([[0,0]])
self.maxMean = None
self.minArray = np.array([[0,0]])
self.minMean = None
self.key = None
self.time = 0
self.optIfBlock = 2
self.startTimeIfBlock = True
self.CalibFinishBlock = False
self.finalScreen = False
self.screenSizeX=1920
self.screenSizeY=1080
def getOtps(self):
try:
self.otps, self.args = getopt.getopt(sys.argv[1:],"h:c:t:r:a:e:",["help","cameradress","threshold","rgb","eyeline","halfcut","quartercut","calib"])
except getopt.GetoptError as err:
print(err)
sys.exit()
#self.otps = []
def nothing(self,x):
pass
def main(self):
self.getOtps()
for otp, arg in self.otps:
if otp == '-a':
self.address = str(arg)
self.cap.open(self.address)
elif otp == '--threshold':
self.thresholdIs = True
for ot , ar in self.otps:
if ot == '-t':
self.threshold = int(ar)
elif (otp == '-r' and arg == 'True'):
self.rgbIs = True
elif otp == '-e' and arg == 'True':
self.eyeLinesIs = True
elif otp == '-c' and arg == 'True':
self.cameraIs = True
elif otp == '--halfcut':
self.fx = 0.5
self.fy = 0.5
elif otp == '--quartercut':
self.fx = 0.25
self.fy = 0.25
elif otp == '--calib':
self.calibrationIsOkey = True
#TODO
#print(self.otps, self.args)
#self.optimizationEyesLooking()
array1 = [[0,0]]
openImage = False
o = False
while True:
#first open cam
try:
self.readSelf()
self.frame = cv2.resize(self.frame,None,fx=self.fx,fy=self.fy)
#TODO
#self.frame = cv2.rotate(self.frame,cv2.ROTATE_90_COUNTERCLOCKWISE)
self.grayFrame = cv2.cvtColor(self.frame, cv2.COLOR_BGR2GRAY)
except:
print("error")
self.faces = self.detector(self.grayFrame)
self.lanmarkingLeftEyes()
if self.eyeLinesIs == True:
self.eyeLines()
if self.cameraIs == True and self.ret:
cv2.imshow("Frame", self.frame)
#second isteğe göre açılan seçenekler (thershold vb) göz seçim ayarları için
if self.thresholdIs == True:
cv2.imshow("2",self.capThreshold)
if self.cutEyesGray is not None:
cv2.imshow("3", self.cutEyesGray)
self.key = cv2.waitKey(1)
#third kalibrasyyon
#key 'o'
if self.key == 79 or self.key == 111:
o = True
self.cameraIs = False
self.thresholdIs = False
cv2.destroyAllWindows()
#key 'space'
if self.key == 32:
cv2.destroyAllWindows()
o = False
openImage = True
if self.calibrationIsOkey == True and o == True:
self.optimizationEyesLooking()
if openImage == True:
self.lookingPointDrawCircle()
#four final
if self.finalScreen:
self.final_screen()
if self.key == 27:
break
self.cap.release()
cv2.destroyAllWindows()
def showImage(self):
self.testImage = cv2.imread('anime.jpg')
imageH, imageW, imageChannels= self.testImage.shape
cv2.circle(self.testImage, ( (self.eyeCenter[0] * imageW) / self.rightMean[0], (self.eyeCenter[1] * imageH) / self.bottomMean[1]))
def lookingPointDrawCircle(self):
self.thresholdIs = False
cv2.imshow("",self.img)
def readSelf(self):
self.ret, self.frame=self.cap.read()
def lanmarkingLeftEyes(self):
for face in self.faces:
#x = face.left()
#y = face.top()
#x1 = face.right()
#y1 = face.bottom()
self.landmarks = self.predictor(self.grayFrame, face)
self.left_eye = np.array([(self.landmarks.part(36).x, self.landmarks.part(36).y),
(self.landmarks.part(37).x, self.landmarks.part(37).y),
(self.landmarks.part(38).x, self.landmarks.part(38).y),
(self.landmarks.part(39).x, self.landmarks.part(39).y),
(self.landmarks.part(40).x, self.landmarks.part(40).y),
(self.landmarks.part(41).x, self.landmarks.part(41).y)], np.int32)
h, w, _ = self.frame.shape
mask = np.zeros((h, w), np.uint8)
cv2.polylines(mask, [self.left_eye], True, 255, 2)
cv2.fillPoly(mask, [self.left_eye], 255)
self.maskEyes = cv2.bitwise_and(self.grayFrame, self.grayFrame, mask=mask)
self.maskEyes = np.where(self.maskEyes==0, 255,self.maskEyes)
self.min_x = np.min(self.left_eye[:,0])
self.max_x = np.max(self.left_eye[:,0])
self.min_y = np.min(self.left_eye[:,1])
self.max_y = np.max(self.left_eye[:,1])
self.cutEyes = self.maskEyes[self.min_y : self.max_y, self.min_x : self.max_x]
self.cutEyes = cv2.resize(self.cutEyes, None, fx=5, fy=5)
self.capThreshold = cv2.GaussianBlur(self.cutEyes, (5,5), 0)
_, self.capThreshold = cv2.threshold(self.capThreshold, self.threshold, 255, cv2.THRESH_BINARY_INV)
self.contours, _ = cv2.findContours(self.capThreshold, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
for cnt in self.contours:
(x, y, w, h) = cv2.boundingRect(cnt)
cv2.rectangle(self.capThreshold, (x, y), (x + w, y + h), (255, 0, 0), 1)
#middle point x = x + int(w/2) y = y + int(h/2)
cv2.circle(self.cutEyes, (x+int(w/2),y+int(h/2)) ,5, (255,0,0),-1)
self.eyeCenter = [x+int(w/2),y+int(h/2)]
break
if self.rgbIs == True:
cv2.imshow("c", self.cutEyes)
def final_screen(self):
x,y=self.pC()
cv2.namedWindow("dd", cv2.WND_PROP_FULLSCREEN)
cv2.moveWindow("dd", screen.x - 1, screen.y - 1)
cv2.setWindowProperty("dd", cv2.WND_PROP_FULLSCREEN,cv2.WINDOW_FULLSCREEN)
if (x-int(x))<0.5:
x=math.floor(x)
else:
x = x + 1
x = math.floor(x)
if (y-int(y))<0.5:
y=math.floor(y)
else:
y = y + 1
y = math.floor(y)
cv2.circle(self.img, (int(x),int(y)), 5, (0,0,0), -1)
#print("x:",x," y:",y, " eyecenter:",self.eyeCenter)
cv2.imshow("dd",self.img)
def pC(self):
#print(self.minMean)
return (self.screenSizeX*(self.eyeCenter[0]-self.minMean[0]))/(self.maxMean[0]-self.minMean[0]),(self.screenSizeY*(self.eyeCenter[1]-self.minMean[1]))/(self.maxMean[1]-self.minMean[1])
def eyeLines(self):
horizontalLineLeft = (self.landmarks.part(36).x, self.landmarks.part(36).y)
horizontalLineRight = (self.landmarks.part(39).x, self.landmarks.part(39).y)
verticalLineTop = (self.landmarks.part(38).x, self.landmarks.part(38).y)
verticalLineBottom = (self.landmarks.part(40).x, self.landmarks.part(40).y)
cv2.line(self.frame, horizontalLineLeft, horizontalLineRight,(0,255,0),1)
cv2.line(self.frame, verticalLineTop, verticalLineBottom,(0,255,0),1)
def getCutEyeShape(self,x,y,x1,y1):
return self.frame[y:y1, x:x1]
def optimizationEyesLooking(self):
background = np.zeros((screen.height,screen.width),np.uint8)
cv2.namedWindow("aa", cv2.WND_PROP_FULLSCREEN)
cv2.moveWindow("aa", screen.x - 1, screen.y - 1)
cv2.setWindowProperty("aa", cv2.WND_PROP_FULLSCREEN,cv2.WINDOW_FULLSCREEN)
if self.optIfBlock==1:
self.startTime(time.perf_counter())
if time.perf_counter()-self.time < 3:
if self.eyeCenter != None:
self.minArray = np.append(self.minArray, [self.eyeCenter],axis=0)
cv2.circle(background, (10,10), 5, (255,255,255), -1)
(text_width, text_height) = cv2.getTextSize("Follow point", cv2.FONT_HERSHEY_SIMPLEX, 0.8, 1)[0]
cv2.putText(background, "Follow point", ((screen.width//2)-(text_width//2),(screen.height//2)-30), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255,255,255), 1 , cv2.LINE_AA)
elif time.perf_counter()-self.time < 6 and time.perf_counter()-self.time > 3:
if self.eyeCenter != None:
self.maxArray = np.append(self.maxArray, [self.eyeCenter],axis=0)
cv2.circle(background, (screen.width-10,screen.height-10), 5, (255,255,255), -1)
(text_width, text_height) = cv2.getTextSize("Follow point", cv2.FONT_HERSHEY_SIMPLEX, 0.8, 1)[0]
cv2.putText(background, "Follow point", ((screen.width//2)-(text_width//2),(screen.height//2)-30), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255,255,255), 1 , cv2.LINE_AA)
elif time.perf_counter()-self.time == 6:
cv2.destroyAllWindows()
else:
self.CalibFinishBlock = True
self.calibrationIsOkey = True
self.check = True
self.optIfBlock=3
elif self.optIfBlock==2:
(text_width, text_height) = cv2.getTextSize("Kalibrasyonu ayarlamak için 's' tuşuna basın", cv2.FONT_HERSHEY_SIMPLEX, 0.8, 1)[0]
cv2.putText(background, "Kalibrasyonu ayarlamak için 's' tuşuna basın", ((screen.width//2)-(text_width//2),(screen.height//2)-30), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255,255,255), 1 , cv2.LINE_AA)
elif self.optIfBlock==3:
self.optFinish(background)
#key 's'
if self.key == 83 or self.key == 115:
self.optIfBlock = 1
#key 'i'
if self.key == 73 or self.key == 105:
self.minArray = self.minArray[1:]
self.maxArray = self.maxArray[1:]
#self.minMean = self.minArray.mean(0)
self.minMean = self.minArray.min(0)
#self.maxMean = self.maxArray.mean(0)
self.maxMean = self.maxArray.max(0)
self.calibrationIsOkey=False
self.finalScreen=True
cv2.destroyWindow("aa")
else:
cv2.imshow("aa",background)
def optFinish(self, stage):
(text_width, text_height) = cv2.getTextSize("Go to do image 'i'", cv2.FONT_HERSHEY_SIMPLEX, 0.8, 1)[0]
cv2.putText(stage, "Go to do image 'i'", ((screen.width//2)-(text_width//2),(screen.height//2)-30), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255,255,255), 1 , cv2.LINE_AA)
cv2.imshow("aa",stage)
def startTime(self, time):
if self.startTimeIfBlock:
self.time = time
self.startTimeIfBlock = False
def getCameraShape(self):
for i in range(3):
print(self.frame.shape[i])
return self.frame[1], self.frame[0]
if __name__ == "__main__":
import screeninfo
screen = screeninfo.get_monitors()[0]
ct = Controller()
ct.main()
|
[
"cv2.GaussianBlur",
"getopt.getopt",
"cv2.bitwise_and",
"cv2.fillPoly",
"cv2.rectangle",
"screeninfo.get_monitors",
"cv2.imshow",
"dlib.shape_predictor",
"cv2.line",
"cv2.cvtColor",
"numpy.append",
"numpy.max",
"cv2.destroyAllWindows",
"cv2.boundingRect",
"cv2.resize",
"cv2.circle",
"cv2.waitKey",
"time.perf_counter",
"numpy.min",
"dlib.get_frontal_face_detector",
"sys.exit",
"cv2.findContours",
"cv2.putText",
"cv2.polylines",
"cv2.threshold",
"cv2.getTextSize",
"cv2.setWindowProperty",
"numpy.zeros",
"math.floor",
"cv2.VideoCapture",
"cv2.imread",
"numpy.where",
"numpy.array",
"cv2.destroyWindow",
"cv2.moveWindow",
"cv2.namedWindow"
] |
[((161, 193), 'dlib.get_frontal_face_detector', 'dlib.get_frontal_face_detector', ([], {}), '()\n', (191, 193), False, 'import dlib\n'), ((219, 280), 'dlib.shape_predictor', 'dlib.shape_predictor', (['"""shape_predictor_68_face_landmarks.dat"""'], {}), "('shape_predictor_68_face_landmarks.dat')\n", (239, 280), False, 'import dlib\n'), ((376, 395), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (392, 395), False, 'import cv2\n'), ((594, 617), 'cv2.imread', 'cv2.imread', (['"""anime.jpg"""'], {}), "('anime.jpg')\n", (604, 617), False, 'import cv2\n'), ((1296, 1314), 'numpy.array', 'np.array', (['[[0, 0]]'], {}), '([[0, 0]])\n', (1304, 1314), True, 'import numpy as np\n'), ((1366, 1384), 'numpy.array', 'np.array', (['[[0, 0]]'], {}), '([[0, 0]])\n', (1374, 1384), True, 'import numpy as np\n'), ((4946, 4969), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (4967, 4969), False, 'import cv2\n'), ((5020, 5043), 'cv2.imread', 'cv2.imread', (['"""anime.jpg"""'], {}), "('anime.jpg')\n", (5030, 5043), False, 'import cv2\n'), ((5112, 5241), 'cv2.circle', 'cv2.circle', (['self.testImage', '(self.eyeCenter[0] * imageW / self.rightMean[0], self.eyeCenter[1] * imageH /\n self.bottomMean[1])'], {}), '(self.testImage, (self.eyeCenter[0] * imageW / self.rightMean[0],\n self.eyeCenter[1] * imageH / self.bottomMean[1]))\n', (5122, 5241), False, 'import cv2\n'), ((5322, 5346), 'cv2.imshow', 'cv2.imshow', (['""""""', 'self.img'], {}), "('', self.img)\n", (5332, 5346), False, 'import cv2\n'), ((7799, 7845), 'cv2.namedWindow', 'cv2.namedWindow', (['"""dd"""', 'cv2.WND_PROP_FULLSCREEN'], {}), "('dd', cv2.WND_PROP_FULLSCREEN)\n", (7814, 7845), False, 'import cv2\n'), ((7854, 7902), 'cv2.moveWindow', 'cv2.moveWindow', (['"""dd"""', '(screen.x - 1)', '(screen.y - 1)'], {}), "('dd', screen.x - 1, screen.y - 1)\n", (7868, 7902), False, 'import cv2\n'), ((7911, 7986), 'cv2.setWindowProperty', 'cv2.setWindowProperty', (['"""dd"""', 'cv2.WND_PROP_FULLSCREEN', 'cv2.WINDOW_FULLSCREEN'], {}), "('dd', cv2.WND_PROP_FULLSCREEN, cv2.WINDOW_FULLSCREEN)\n", (7932, 7986), False, 'import cv2\n'), ((8359, 8385), 'cv2.imshow', 'cv2.imshow', (['"""dd"""', 'self.img'], {}), "('dd', self.img)\n", (8369, 8385), False, 'import cv2\n'), ((8997, 9074), 'cv2.line', 'cv2.line', (['self.frame', 'horizontalLineLeft', 'horizontalLineRight', '(0, 255, 0)', '(1)'], {}), '(self.frame, horizontalLineLeft, horizontalLineRight, (0, 255, 0), 1)\n', (9005, 9074), False, 'import cv2\n'), ((9079, 9152), 'cv2.line', 'cv2.line', (['self.frame', 'verticalLineTop', 'verticalLineBottom', '(0, 255, 0)', '(1)'], {}), '(self.frame, verticalLineTop, verticalLineBottom, (0, 255, 0), 1)\n', (9087, 9152), False, 'import cv2\n'), ((9304, 9353), 'numpy.zeros', 'np.zeros', (['(screen.height, screen.width)', 'np.uint8'], {}), '((screen.height, screen.width), np.uint8)\n', (9312, 9353), True, 'import numpy as np\n'), ((9360, 9406), 'cv2.namedWindow', 'cv2.namedWindow', (['"""aa"""', 'cv2.WND_PROP_FULLSCREEN'], {}), "('aa', cv2.WND_PROP_FULLSCREEN)\n", (9375, 9406), False, 'import cv2\n'), ((9415, 9463), 'cv2.moveWindow', 'cv2.moveWindow', (['"""aa"""', '(screen.x - 1)', '(screen.y - 1)'], {}), "('aa', screen.x - 1, screen.y - 1)\n", (9429, 9463), False, 'import cv2\n'), ((9472, 9547), 'cv2.setWindowProperty', 'cv2.setWindowProperty', (['"""aa"""', 'cv2.WND_PROP_FULLSCREEN', 'cv2.WINDOW_FULLSCREEN'], {}), "('aa', cv2.WND_PROP_FULLSCREEN, cv2.WINDOW_FULLSCREEN)\n", (9493, 9547), False, 'import cv2\n'), ((12299, 12476), 'cv2.putText', 'cv2.putText', (['stage', '"""Go to do image \'i\'"""', '(screen.width // 2 - text_width // 2, screen.height // 2 - 30)', 'cv2.FONT_HERSHEY_SIMPLEX', '(0.8)', '(255, 255, 255)', '(1)', 'cv2.LINE_AA'], {}), '(stage, "Go to do image \'i\'", (screen.width // 2 - text_width //\n 2, screen.height // 2 - 30), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255, 255, \n 255), 1, cv2.LINE_AA)\n', (12310, 12476), False, 'import cv2\n'), ((12470, 12493), 'cv2.imshow', 'cv2.imshow', (['"""aa"""', 'stage'], {}), "('aa', stage)\n", (12480, 12493), False, 'import cv2\n'), ((12837, 12862), 'screeninfo.get_monitors', 'screeninfo.get_monitors', ([], {}), '()\n', (12860, 12862), False, 'import screeninfo\n'), ((1725, 1862), 'getopt.getopt', 'getopt.getopt', (['sys.argv[1:]', '"""h:c:t:r:a:e:"""', "['help', 'cameradress', 'threshold', 'rgb', 'eyeline', 'halfcut',\n 'quartercut', 'calib']"], {}), "(sys.argv[1:], 'h:c:t:r:a:e:', ['help', 'cameradress',\n 'threshold', 'rgb', 'eyeline', 'halfcut', 'quartercut', 'calib'])\n", (1738, 1862), False, 'import sys, getopt\n'), ((4116, 4130), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (4127, 4130), False, 'import cv2\n'), ((6283, 6309), 'numpy.zeros', 'np.zeros', (['(h, w)', 'np.uint8'], {}), '((h, w), np.uint8)\n', (6291, 6309), True, 'import numpy as np\n'), ((6322, 6372), 'cv2.polylines', 'cv2.polylines', (['mask', '[self.left_eye]', '(True)', '(255)', '(2)'], {}), '(mask, [self.left_eye], True, 255, 2)\n', (6335, 6372), False, 'import cv2\n'), ((6385, 6425), 'cv2.fillPoly', 'cv2.fillPoly', (['mask', '[self.left_eye]', '(255)'], {}), '(mask, [self.left_eye], 255)\n', (6397, 6425), False, 'import cv2\n'), ((6454, 6512), 'cv2.bitwise_and', 'cv2.bitwise_and', (['self.grayFrame', 'self.grayFrame'], {'mask': 'mask'}), '(self.grayFrame, self.grayFrame, mask=mask)\n', (6469, 6512), False, 'import cv2\n'), ((6541, 6589), 'numpy.where', 'np.where', (['(self.maskEyes == 0)', '(255)', 'self.maskEyes'], {}), '(self.maskEyes == 0, 255, self.maskEyes)\n', (6549, 6589), True, 'import numpy as np\n'), ((6613, 6640), 'numpy.min', 'np.min', (['self.left_eye[:, 0]'], {}), '(self.left_eye[:, 0])\n', (6619, 6640), True, 'import numpy as np\n'), ((6665, 6692), 'numpy.max', 'np.max', (['self.left_eye[:, 0]'], {}), '(self.left_eye[:, 0])\n', (6671, 6692), True, 'import numpy as np\n'), ((6717, 6744), 'numpy.min', 'np.min', (['self.left_eye[:, 1]'], {}), '(self.left_eye[:, 1])\n', (6723, 6744), True, 'import numpy as np\n'), ((6769, 6796), 'numpy.max', 'np.max', (['self.left_eye[:, 1]'], {}), '(self.left_eye[:, 1])\n', (6775, 6796), True, 'import numpy as np\n'), ((6915, 6957), 'cv2.resize', 'cv2.resize', (['self.cutEyes', 'None'], {'fx': '(5)', 'fy': '(5)'}), '(self.cutEyes, None, fx=5, fy=5)\n', (6925, 6957), False, 'import cv2\n'), ((6991, 7032), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['self.cutEyes', '(5, 5)', '(0)'], {}), '(self.cutEyes, (5, 5), 0)\n', (7007, 7032), False, 'import cv2\n'), ((7067, 7143), 'cv2.threshold', 'cv2.threshold', (['self.capThreshold', 'self.threshold', '(255)', 'cv2.THRESH_BINARY_INV'], {}), '(self.capThreshold, self.threshold, 255, cv2.THRESH_BINARY_INV)\n', (7080, 7143), False, 'import cv2\n'), ((7175, 7250), 'cv2.findContours', 'cv2.findContours', (['self.capThreshold', 'cv2.RETR_TREE', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(self.capThreshold, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n', (7191, 7250), False, 'import cv2\n'), ((8027, 8040), 'math.floor', 'math.floor', (['x'], {}), '(x)\n', (8037, 8040), False, 'import math\n'), ((8093, 8106), 'math.floor', 'math.floor', (['x'], {}), '(x)\n', (8103, 8106), False, 'import math\n'), ((8148, 8161), 'math.floor', 'math.floor', (['y'], {}), '(y)\n', (8158, 8161), False, 'import math\n'), ((8214, 8227), 'math.floor', 'math.floor', (['y'], {}), '(y)\n', (8224, 8227), False, 'import math\n'), ((12068, 12091), 'cv2.destroyWindow', 'cv2.destroyWindow', (['"""aa"""'], {}), "('aa')\n", (12085, 12091), False, 'import cv2\n'), ((12118, 12146), 'cv2.imshow', 'cv2.imshow', (['"""aa"""', 'background'], {}), "('aa', background)\n", (12128, 12146), False, 'import cv2\n'), ((12216, 12287), 'cv2.getTextSize', 'cv2.getTextSize', (['"""Go to do image \'i\'"""', 'cv2.FONT_HERSHEY_SIMPLEX', '(0.8)', '(1)'], {}), '("Go to do image \'i\'", cv2.FONT_HERSHEY_SIMPLEX, 0.8, 1)\n', (12231, 12287), False, 'import cv2\n'), ((1927, 1937), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1935, 1937), False, 'import sys, getopt\n'), ((3238, 3290), 'cv2.resize', 'cv2.resize', (['self.frame', 'None'], {'fx': 'self.fx', 'fy': 'self.fy'}), '(self.frame, None, fx=self.fx, fy=self.fy)\n', (3248, 3290), False, 'import cv2\n'), ((3427, 3471), 'cv2.cvtColor', 'cv2.cvtColor', (['self.frame', 'cv2.COLOR_BGR2GRAY'], {}), '(self.frame, cv2.COLOR_BGR2GRAY)\n', (3439, 3471), False, 'import cv2\n'), ((3768, 3799), 'cv2.imshow', 'cv2.imshow', (['"""Frame"""', 'self.frame'], {}), "('Frame', self.frame)\n", (3778, 3799), False, 'import cv2\n'), ((3962, 3996), 'cv2.imshow', 'cv2.imshow', (['"""2"""', 'self.capThreshold'], {}), "('2', self.capThreshold)\n", (3972, 3996), False, 'import cv2\n'), ((4057, 4090), 'cv2.imshow', 'cv2.imshow', (['"""3"""', 'self.cutEyesGray'], {}), "('3', self.cutEyesGray)\n", (4067, 4090), False, 'import cv2\n'), ((4355, 4378), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (4376, 4378), False, 'import cv2\n'), ((4451, 4474), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (4472, 4474), False, 'import cv2\n'), ((7321, 7342), 'cv2.boundingRect', 'cv2.boundingRect', (['cnt'], {}), '(cnt)\n', (7337, 7342), False, 'import cv2\n'), ((7359, 7431), 'cv2.rectangle', 'cv2.rectangle', (['self.capThreshold', '(x, y)', '(x + w, y + h)', '(255, 0, 0)', '(1)'], {}), '(self.capThreshold, (x, y), (x + w, y + h), (255, 0, 0), 1)\n', (7372, 7431), False, 'import cv2\n'), ((7711, 7740), 'cv2.imshow', 'cv2.imshow', (['"""c"""', 'self.cutEyes'], {}), "('c', self.cutEyes)\n", (7721, 7740), False, 'import cv2\n'), ((9615, 9634), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (9632, 9634), False, 'import time\n'), ((9831, 9887), 'cv2.circle', 'cv2.circle', (['background', '(10, 10)', '(5)', '(255, 255, 255)', '(-1)'], {}), '(background, (10, 10), 5, (255, 255, 255), -1)\n', (9841, 9887), False, 'import cv2\n'), ((10014, 10191), 'cv2.putText', 'cv2.putText', (['background', '"""Follow point"""', '(screen.width // 2 - text_width // 2, screen.height // 2 - 30)', 'cv2.FONT_HERSHEY_SIMPLEX', '(0.8)', '(255, 255, 255)', '(1)', 'cv2.LINE_AA'], {}), "(background, 'Follow point', (screen.width // 2 - text_width // \n 2, screen.height // 2 - 30), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255, 255, \n 255), 1, cv2.LINE_AA)\n", (10025, 10191), False, 'import cv2\n'), ((11253, 11462), 'cv2.putText', 'cv2.putText', (['background', '"""Kalibrasyonu ayarlamak için \'s\' tuşuna basın"""', '(screen.width // 2 - text_width // 2, screen.height // 2 - 30)', 'cv2.FONT_HERSHEY_SIMPLEX', '(0.8)', '(255, 255, 255)', '(1)', 'cv2.LINE_AA'], {}), '(background, "Kalibrasyonu ayarlamak için \'s\' tuşuna basın", (\n screen.width // 2 - text_width // 2, screen.height // 2 - 30), cv2.\n FONT_HERSHEY_SIMPLEX, 0.8, (255, 255, 255), 1, cv2.LINE_AA)\n', (11264, 11462), False, 'import cv2\n'), ((9651, 9670), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (9668, 9670), False, 'import time\n'), ((9765, 9815), 'numpy.append', 'np.append', (['self.minArray', '[self.eyeCenter]'], {'axis': '(0)'}), '(self.minArray, [self.eyeCenter], axis=0)\n', (9774, 9815), True, 'import numpy as np\n'), ((9929, 9994), 'cv2.getTextSize', 'cv2.getTextSize', (['"""Follow point"""', 'cv2.FONT_HERSHEY_SIMPLEX', '(0.8)', '(1)'], {}), "('Follow point', cv2.FONT_HERSHEY_SIMPLEX, 0.8, 1)\n", (9944, 9994), False, 'import cv2\n'), ((10412, 10504), 'cv2.circle', 'cv2.circle', (['background', '(screen.width - 10, screen.height - 10)', '(5)', '(255, 255, 255)', '(-1)'], {}), '(background, (screen.width - 10, screen.height - 10), 5, (255, \n 255, 255), -1)\n', (10422, 10504), False, 'import cv2\n'), ((10622, 10799), 'cv2.putText', 'cv2.putText', (['background', '"""Follow point"""', '(screen.width // 2 - text_width // 2, screen.height // 2 - 30)', 'cv2.FONT_HERSHEY_SIMPLEX', '(0.8)', '(255, 255, 255)', '(1)', 'cv2.LINE_AA'], {}), "(background, 'Follow point', (screen.width // 2 - text_width // \n 2, screen.height // 2 - 30), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255, 255, \n 255), 1, cv2.LINE_AA)\n", (10633, 10799), False, 'import cv2\n'), ((11140, 11242), 'cv2.getTextSize', 'cv2.getTextSize', (['"""Kalibrasyonu ayarlamak için \'s\' tuşuna basın"""', 'cv2.FONT_HERSHEY_SIMPLEX', '(0.8)', '(1)'], {}), '("Kalibrasyonu ayarlamak için \'s\' tuşuna basın", cv2.\n FONT_HERSHEY_SIMPLEX, 0.8, 1)\n', (11155, 11242), False, 'import cv2\n'), ((10346, 10396), 'numpy.append', 'np.append', (['self.maxArray', '[self.eyeCenter]'], {'axis': '(0)'}), '(self.maxArray, [self.eyeCenter], axis=0)\n', (10355, 10396), True, 'import numpy as np\n'), ((10537, 10602), 'cv2.getTextSize', 'cv2.getTextSize', (['"""Follow point"""', 'cv2.FONT_HERSHEY_SIMPLEX', '(0.8)', '(1)'], {}), "('Follow point', cv2.FONT_HERSHEY_SIMPLEX, 0.8, 1)\n", (10552, 10602), False, 'import cv2\n'), ((10853, 10876), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (10874, 10876), False, 'import cv2\n'), ((10194, 10213), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (10211, 10213), False, 'import time\n'), ((10232, 10251), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (10249, 10251), False, 'import time\n'), ((10801, 10820), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (10818, 10820), False, 'import time\n')]
|
# Copyright (c) The Diem Core Contributors
# SPDX-License-Identifier: Apache-2.0
"""Provides utilities for working with Diem Testnet.
```python
from diem import testnet
from diem.testing import LocalAccount
# create client connects to testnet
client = testnet.create_client()
# create faucet for minting coins for your testing account
faucet = testnet.Faucet(client)
# create a local account and mint some coins for it
account: LocalAccount = faucet.gen_account()
```
"""
import requests
import typing
from . import diem_types, jsonrpc, utils, chain_ids, bcs, identifier, stdlib
from .testing import LocalAccount, DD_ADDRESS
JSON_RPC_URL: str = "https://testnet.diem.com/v1"
FAUCET_URL: str = "https://testnet.diem.com/mint"
CHAIN_ID: diem_types.ChainId = chain_ids.TESTNET
DESIGNATED_DEALER_ADDRESS: diem_types.AccountAddress = utils.account_address(DD_ADDRESS)
TEST_CURRENCY_CODE: str = "XUS"
HRP: str = identifier.TDM
def create_client() -> jsonrpc.Client:
"""create a jsonrpc.Client connects to Testnet public full node cluster"""
return jsonrpc.Client(JSON_RPC_URL)
def gen_vasp_account(client: jsonrpc.Client, base_url: str) -> LocalAccount:
raise Exception("deprecated: use `gen_account` instead")
def gen_account(
client: jsonrpc.Client, dd_account: bool = False, base_url: typing.Optional[str] = None
) -> LocalAccount:
"""generates a Testnet onchain account"""
account = Faucet(client).gen_account(dd_account=dd_account)
if base_url:
payload = stdlib.encode_rotate_dual_attestation_info_script_function(
new_url=base_url.encode("utf-8"), new_key=account.compliance_public_key_bytes
)
apply_txn(client, account, payload)
return account
def gen_child_vasp(
client: jsonrpc.Client,
parent_vasp: LocalAccount,
initial_balance: int = 10_000_000_000,
currency: str = TEST_CURRENCY_CODE,
) -> LocalAccount:
child, payload = parent_vasp.new_child_vasp(initial_balance, currency)
apply_txn(client, parent_vasp, payload)
return child
def apply_txn(
client: jsonrpc.Client, vasp: LocalAccount, payload: diem_types.TransactionPayload
) -> jsonrpc.Transaction:
seq = client.get_account_sequence(vasp.account_address)
txn = vasp.create_signed_txn(seq, payload)
client.submit(txn)
return client.wait_for_transaction(txn)
class Faucet:
"""Faucet service is a proxy server to mint coins for your test account on Testnet
See https://github.com/diem/diem/blob/master/json-rpc/docs/service_testnet_faucet.md for more details
"""
def __init__(
self,
client: jsonrpc.Client,
url: typing.Union[str, None] = None,
retry: typing.Union[jsonrpc.Retry, None] = None,
) -> None:
self._client: jsonrpc.Client = client
self._url: str = url or FAUCET_URL
self._retry: jsonrpc.Retry = retry or jsonrpc.Retry(5, 0.2, Exception)
self._session: requests.Session = requests.Session()
def gen_account(self, currency_code: str = TEST_CURRENCY_CODE, dd_account: bool = False) -> LocalAccount:
account = LocalAccount.generate()
self.mint(account.auth_key.hex(), 100_000_000_000, currency_code, dd_account)
return account
def mint(
self,
authkey: str,
amount: int,
currency_code: str,
dd_account: bool = False,
vasp_domain: typing.Optional[str] = None,
is_remove_domain: bool = False,
) -> None:
self._retry.execute(
lambda: self._mint_without_retry(authkey, amount, currency_code, dd_account, vasp_domain, is_remove_domain)
)
def _mint_without_retry(
self,
authkey: str,
amount: int,
currency_code: str,
dd_account: bool = False,
vasp_domain: typing.Optional[str] = None,
is_remove_domain: bool = False,
) -> None:
response = self._session.post(
self._url,
params={
"amount": amount,
"auth_key": authkey,
"currency_code": currency_code,
"return_txns": "true",
"is_designated_dealer": "true" if dd_account else "false",
"vasp_domain": vasp_domain,
"is_remove_domain": "true" if is_remove_domain else "false",
},
)
response.raise_for_status()
de = bcs.BcsDeserializer(bytes.fromhex(response.text))
length = de.deserialize_len()
for i in range(length):
txn = de.deserialize_any(diem_types.SignedTransaction)
try:
self._client.wait_for_transaction(txn)
except jsonrpc.TransactionExecutionFailed as e:
if e.txn.vm_status.explanation.reason == "EDOMAIN_ALREADY_EXISTS":
continue
raise e
|
[
"requests.Session"
] |
[((2967, 2985), 'requests.Session', 'requests.Session', ([], {}), '()\n', (2983, 2985), False, 'import requests\n')]
|
# coding: utf-8
"""
Cloudsmith API
The API to the Cloudsmith Service
OpenAPI spec version: v1
Contact: <EMAIL>
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class WebhooksApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def webhooks_create(self, owner, repo, **kwargs):
"""
Create a specific webhook in a repository.
Create a specific webhook in a repository.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_create(owner, repo, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str owner: (required)
:param str repo: (required)
:param WebhooksCreate data:
:return: RepositoryWebhook
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.webhooks_create_with_http_info(owner, repo, **kwargs)
else:
(data) = self.webhooks_create_with_http_info(owner, repo, **kwargs)
return data
def webhooks_create_with_http_info(self, owner, repo, **kwargs):
"""
Create a specific webhook in a repository.
Create a specific webhook in a repository.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_create_with_http_info(owner, repo, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str owner: (required)
:param str repo: (required)
:param WebhooksCreate data:
:return: RepositoryWebhook
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner', 'repo', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method webhooks_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner' is set
if ('owner' not in params) or (params['owner'] is None):
raise ValueError("Missing the required parameter `owner` when calling `webhooks_create`")
# verify the required parameter 'repo' is set
if ('repo' not in params) or (params['repo'] is None):
raise ValueError("Missing the required parameter `repo` when calling `webhooks_create`")
collection_formats = {}
path_params = {}
if 'owner' in params:
path_params['owner'] = params['owner']
if 'repo' in params:
path_params['repo'] = params['repo']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['apikey']
return self.api_client.call_api('/webhooks/{owner}/{repo}/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RepositoryWebhook',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def webhooks_delete(self, owner, repo, identifier, **kwargs):
"""
Delete a specific webhook in a repository.
Delete a specific webhook in a repository.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_delete(owner, repo, identifier, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str owner: (required)
:param str repo: (required)
:param str identifier: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.webhooks_delete_with_http_info(owner, repo, identifier, **kwargs)
else:
(data) = self.webhooks_delete_with_http_info(owner, repo, identifier, **kwargs)
return data
def webhooks_delete_with_http_info(self, owner, repo, identifier, **kwargs):
"""
Delete a specific webhook in a repository.
Delete a specific webhook in a repository.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_delete_with_http_info(owner, repo, identifier, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str owner: (required)
:param str repo: (required)
:param str identifier: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner', 'repo', 'identifier']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method webhooks_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner' is set
if ('owner' not in params) or (params['owner'] is None):
raise ValueError("Missing the required parameter `owner` when calling `webhooks_delete`")
# verify the required parameter 'repo' is set
if ('repo' not in params) or (params['repo'] is None):
raise ValueError("Missing the required parameter `repo` when calling `webhooks_delete`")
# verify the required parameter 'identifier' is set
if ('identifier' not in params) or (params['identifier'] is None):
raise ValueError("Missing the required parameter `identifier` when calling `webhooks_delete`")
collection_formats = {}
path_params = {}
if 'owner' in params:
path_params['owner'] = params['owner']
if 'repo' in params:
path_params['repo'] = params['repo']
if 'identifier' in params:
path_params['identifier'] = params['identifier']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikey']
return self.api_client.call_api('/webhooks/{owner}/{repo}/{identifier}/', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def webhooks_list(self, owner, repo, **kwargs):
"""
Get a list of all webhooks in a repository.
Get a list of all webhooks in a repository.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_list(owner, repo, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str owner: (required)
:param str repo: (required)
:param int page: A page number within the paginated result set.
:param int page_size: Number of results to return per page.
:return: list[RepositoryWebhook]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.webhooks_list_with_http_info(owner, repo, **kwargs)
else:
(data) = self.webhooks_list_with_http_info(owner, repo, **kwargs)
return data
def webhooks_list_with_http_info(self, owner, repo, **kwargs):
"""
Get a list of all webhooks in a repository.
Get a list of all webhooks in a repository.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_list_with_http_info(owner, repo, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str owner: (required)
:param str repo: (required)
:param int page: A page number within the paginated result set.
:param int page_size: Number of results to return per page.
:return: list[RepositoryWebhook]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner', 'repo', 'page', 'page_size']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method webhooks_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner' is set
if ('owner' not in params) or (params['owner'] is None):
raise ValueError("Missing the required parameter `owner` when calling `webhooks_list`")
# verify the required parameter 'repo' is set
if ('repo' not in params) or (params['repo'] is None):
raise ValueError("Missing the required parameter `repo` when calling `webhooks_list`")
collection_formats = {}
path_params = {}
if 'owner' in params:
path_params['owner'] = params['owner']
if 'repo' in params:
path_params['repo'] = params['repo']
query_params = []
if 'page' in params:
query_params.append(('page', params['page']))
if 'page_size' in params:
query_params.append(('page_size', params['page_size']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikey']
return self.api_client.call_api('/webhooks/{owner}/{repo}/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[RepositoryWebhook]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def webhooks_partial_update(self, owner, repo, identifier, **kwargs):
"""
Update a specific webhook in a repository.
Update a specific webhook in a repository.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_partial_update(owner, repo, identifier, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str owner: (required)
:param str repo: (required)
:param str identifier: (required)
:param WebhooksPartialUpdate data:
:return: RepositoryWebhook
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.webhooks_partial_update_with_http_info(owner, repo, identifier, **kwargs)
else:
(data) = self.webhooks_partial_update_with_http_info(owner, repo, identifier, **kwargs)
return data
def webhooks_partial_update_with_http_info(self, owner, repo, identifier, **kwargs):
"""
Update a specific webhook in a repository.
Update a specific webhook in a repository.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_partial_update_with_http_info(owner, repo, identifier, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str owner: (required)
:param str repo: (required)
:param str identifier: (required)
:param WebhooksPartialUpdate data:
:return: RepositoryWebhook
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner', 'repo', 'identifier', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method webhooks_partial_update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner' is set
if ('owner' not in params) or (params['owner'] is None):
raise ValueError("Missing the required parameter `owner` when calling `webhooks_partial_update`")
# verify the required parameter 'repo' is set
if ('repo' not in params) or (params['repo'] is None):
raise ValueError("Missing the required parameter `repo` when calling `webhooks_partial_update`")
# verify the required parameter 'identifier' is set
if ('identifier' not in params) or (params['identifier'] is None):
raise ValueError("Missing the required parameter `identifier` when calling `webhooks_partial_update`")
collection_formats = {}
path_params = {}
if 'owner' in params:
path_params['owner'] = params['owner']
if 'repo' in params:
path_params['repo'] = params['repo']
if 'identifier' in params:
path_params['identifier'] = params['identifier']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['apikey']
return self.api_client.call_api('/webhooks/{owner}/{repo}/{identifier}/', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RepositoryWebhook',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def webhooks_read(self, owner, repo, identifier, **kwargs):
"""
Views for working with repository webhooks.
Views for working with repository webhooks.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_read(owner, repo, identifier, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str owner: (required)
:param str repo: (required)
:param str identifier: (required)
:return: RepositoryWebhook
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.webhooks_read_with_http_info(owner, repo, identifier, **kwargs)
else:
(data) = self.webhooks_read_with_http_info(owner, repo, identifier, **kwargs)
return data
def webhooks_read_with_http_info(self, owner, repo, identifier, **kwargs):
"""
Views for working with repository webhooks.
Views for working with repository webhooks.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_read_with_http_info(owner, repo, identifier, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str owner: (required)
:param str repo: (required)
:param str identifier: (required)
:return: RepositoryWebhook
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner', 'repo', 'identifier']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method webhooks_read" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner' is set
if ('owner' not in params) or (params['owner'] is None):
raise ValueError("Missing the required parameter `owner` when calling `webhooks_read`")
# verify the required parameter 'repo' is set
if ('repo' not in params) or (params['repo'] is None):
raise ValueError("Missing the required parameter `repo` when calling `webhooks_read`")
# verify the required parameter 'identifier' is set
if ('identifier' not in params) or (params['identifier'] is None):
raise ValueError("Missing the required parameter `identifier` when calling `webhooks_read`")
collection_formats = {}
path_params = {}
if 'owner' in params:
path_params['owner'] = params['owner']
if 'repo' in params:
path_params['repo'] = params['repo']
if 'identifier' in params:
path_params['identifier'] = params['identifier']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikey']
return self.api_client.call_api('/webhooks/{owner}/{repo}/{identifier}/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RepositoryWebhook',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
[
"six.iteritems"
] |
[((3329, 3356), 'six.iteritems', 'iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (3338, 3356), False, 'from six import iteritems\n'), ((8188, 8215), 'six.iteritems', 'iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (8197, 8215), False, 'from six import iteritems\n'), ((13317, 13344), 'six.iteritems', 'iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (13326, 13344), False, 'from six import iteritems\n'), ((18305, 18332), 'six.iteritems', 'iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (18314, 18332), False, 'from six import iteritems\n'), ((23566, 23593), 'six.iteritems', 'iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (23575, 23593), False, 'from six import iteritems\n')]
|
"""
Books Author model factory.
"""
import random
from factory import DjangoModelFactory, LazyAttribute
from books.models import Author
from .factory_faker import Faker
__all__ = (
'AuthorFactory',
'LimitedAuthorFactory',
'SingleAuthorFactory',
)
class BaseAuthorFactory(DjangoModelFactory):
"""Base author factory."""
salutation = Faker('text', max_nb_chars=10)
name = Faker('name')
email = Faker('email')
birth_date = Faker('date')
biography = Faker('text')
phone_number = Faker('phone_number')
website = Faker('url')
company = Faker('company')
company_phone_number = Faker('phone_number')
company_email = Faker('email')
company_website = Faker('url')
class Meta(object):
"""Meta class."""
model = Author
abstract = True
class AuthorFactory(BaseAuthorFactory):
"""Author factory."""
class LimitedAuthorFactory(BaseAuthorFactory):
"""Author factory, but limited to 20 authors."""
id = LazyAttribute(
lambda __x: random.randint(1, 20)
)
class Meta(object):
"""Meta class."""
django_get_or_create = ('id',)
class SingleAuthorFactory(BaseAuthorFactory):
"""Author factory, limited to a single author."""
id = 999999
name = "<NAME>"
email = "<EMAIL>"
class Meta(object):
"""Meta class."""
django_get_or_create = ('id',)
|
[
"random.randint"
] |
[((1036, 1057), 'random.randint', 'random.randint', (['(1)', '(20)'], {}), '(1, 20)\n', (1050, 1057), False, 'import random\n')]
|
import typing as T
from pathlib import Path
import re
class Globals:
cwd = Path(".").absolute()
def cd(p: str):
Globals.cwd.joinpath(p).absolute()
def pwd() -> Path:
return Globals.cwd
def ls(p: str = ".") -> T.List[Path]:
path = Globals.cwd.joinpath(p)
return [p for p in path.iterdir()]
def batch_rename(regex, name_template):
r = re.compile(regex)
for p in ls():
match = r.match(p.name)
if match is None:
continue
groups = match.groups()
new_name = name_template.format(*groups)
p.rename(p.parent.joinpath(new_name)) # TODO: name conflict
|
[
"pathlib.Path",
"re.compile"
] |
[((362, 379), 're.compile', 're.compile', (['regex'], {}), '(regex)\n', (372, 379), False, 'import re\n'), ((80, 89), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (84, 89), False, 'from pathlib import Path\n')]
|
from input_functions import safe_input, numbered_choice
from display_funcs import decorate
class Location():
def __init__(self, name, short_desc, long_desc, contains=[], level=0, starting_location=False, input_func=safe_input, output_func=print):
self.name = name
self.short_desc = short_desc
self.long_desc = long_desc
self.contains = contains
self.starting_location = starting_location
self.level = level
self.input_func = input_func
self.output_func = output_func
self.first_visit = None
for l in self.contains:
if type(l) == Location:
l.contains.append(self)
def enter(self):
numbered_choice("What/Who/Where would you like to interact with?", [s.name for s in self.contains], self.contains, self.input_func, self.output_func).interact()
def interact(self):
self.output_func(self.name + " : " + self.short_desc)
def get_fancy(self):
return decorate(self)
|
[
"input_functions.numbered_choice",
"display_funcs.decorate"
] |
[((1002, 1016), 'display_funcs.decorate', 'decorate', (['self'], {}), '(self)\n', (1010, 1016), False, 'from display_funcs import decorate\n'), ((709, 862), 'input_functions.numbered_choice', 'numbered_choice', (['"""What/Who/Where would you like to interact with?"""', '[s.name for s in self.contains]', 'self.contains', 'self.input_func', 'self.output_func'], {}), "('What/Who/Where would you like to interact with?', [s.name for\n s in self.contains], self.contains, self.input_func, self.output_func)\n", (724, 862), False, 'from input_functions import safe_input, numbered_choice\n')]
|
from flask import Blueprint
from flask_login import current_user
from app.models import Category
category_routes= Blueprint('category', __name__)
@category_routes.route("")
def get_categories():
user_categories = Category.query.filter_by(user_id=current_user.id)
default_categories = Category.query.filter_by(user_id=None)
return {
"defaultCategories": [default_category.to_dict()
for default_category
in default_categories],
"userCategories": [user_category.to_dict()
for user_category
in user_categories]
}
|
[
"app.models.Category.query.filter_by",
"flask.Blueprint"
] |
[((115, 146), 'flask.Blueprint', 'Blueprint', (['"""category"""', '__name__'], {}), "('category', __name__)\n", (124, 146), False, 'from flask import Blueprint\n'), ((220, 269), 'app.models.Category.query.filter_by', 'Category.query.filter_by', ([], {'user_id': 'current_user.id'}), '(user_id=current_user.id)\n', (244, 269), False, 'from app.models import Category\n'), ((295, 333), 'app.models.Category.query.filter_by', 'Category.query.filter_by', ([], {'user_id': 'None'}), '(user_id=None)\n', (319, 333), False, 'from app.models import Category\n')]
|
import time
from mobyle.common.connection import connection
from mobyle.common import users
def groupFinder(userid, request):
#try to find user in database:
user = connection.User.find_one({"email": userid})
if user is not None:
groups = user['groups']
if user['admin']:
groups.append('group:admin')
return groups
|
[
"mobyle.common.connection.connection.User.find_one"
] |
[((176, 219), 'mobyle.common.connection.connection.User.find_one', 'connection.User.find_one', (["{'email': userid}"], {}), "({'email': userid})\n", (200, 219), False, 'from mobyle.common.connection import connection\n')]
|
import sys
import os
import json
import json_lines
output_file = 'output.jl'
if not os.path.exists(output_file):
print('Did not find expected output file!')
sys.exit(1)
change_id_to_change_number = {}
with open(output_file, 'rb') as fp:
for item in json_lines.reader(fp):
if 'ChangeIdToChangeNumber' in item:
change_id_to_change_number.update(item['ChangeIdToChangeNumber'])
with open(os.path.join('mirror', 'ChangeIdToChangeNumber.json'), 'w') as fp:
json.dump(change_id_to_change_number, fp)
|
[
"json.dump",
"json_lines.reader",
"os.path.exists",
"os.path.join",
"sys.exit"
] |
[((86, 113), 'os.path.exists', 'os.path.exists', (['output_file'], {}), '(output_file)\n', (100, 113), False, 'import os\n'), ((167, 178), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (175, 178), False, 'import sys\n'), ((264, 285), 'json_lines.reader', 'json_lines.reader', (['fp'], {}), '(fp)\n', (281, 285), False, 'import json_lines\n'), ((493, 534), 'json.dump', 'json.dump', (['change_id_to_change_number', 'fp'], {}), '(change_id_to_change_number, fp)\n', (502, 534), False, 'import json\n'), ((422, 475), 'os.path.join', 'os.path.join', (['"""mirror"""', '"""ChangeIdToChangeNumber.json"""'], {}), "('mirror', 'ChangeIdToChangeNumber.json')\n", (434, 475), False, 'import os\n')]
|
# -*- coding: utf8 -*-
import kfp
def flip_coin():
return kfp.dsl.ContainerOp(
name='Flip a coin',
image='python:alpine3.6',
command=['python', '-c', """
import random
res = "heads" if random.randint(0, 1) == 0 else "tails"
with open('/output', 'w') as f:
f.write(res)
"""],
file_outputs={'output': '/output'}
)
def end():
return kfp.dsl.ContainerOp(name='End', image="alpine:3.6", command=["sh", "-c", 'echo "Flip coin ends"'])
def heads():
return kfp.dsl.ContainerOp(name='Heads', image="alpine:3.6", command=["sh", "-c", 'echo "it was heads"'])
def tails():
return kfp.dsl.ContainerOp(name='Tails', image="alpine:3.6", command=["sh", "-c", 'echo "it was tails"'])
@kfp.dsl.pipeline(name='Coin-flip', description='Flip a coin')
def coin_flip_pipeline():
flip_op = flip_coin()
result_op = None
with kfp.dsl.Condition(flip_op.output == 'heads'):
result_op = heads()
with kfp.dsl.Condition(flip_op.output == 'tails'):
result_op = tails()
end_op = end()
end_op.after(result_op)
def main():
kfp.compiler.Compiler().compile(coin_flip_pipeline, __file__ + ".yaml")
if __name__ == '__main__':
main()
|
[
"kfp.dsl.ContainerOp",
"kfp.dsl.pipeline",
"kfp.compiler.Compiler",
"kfp.dsl.Condition"
] |
[((743, 804), 'kfp.dsl.pipeline', 'kfp.dsl.pipeline', ([], {'name': '"""Coin-flip"""', 'description': '"""Flip a coin"""'}), "(name='Coin-flip', description='Flip a coin')\n", (759, 804), False, 'import kfp\n'), ((65, 342), 'kfp.dsl.ContainerOp', 'kfp.dsl.ContainerOp', ([], {'name': '"""Flip a coin"""', 'image': '"""python:alpine3.6"""', 'command': '[\'python\', \'-c\',\n """\nimport random\nres = "heads" if random.randint(0, 1) == 0 else "tails"\nwith open(\'/output\', \'w\') as f:\n f.write(res)\n """\n ]', 'file_outputs': "{'output': '/output'}"}), '(name=\'Flip a coin\', image=\'python:alpine3.6\', command=[\n \'python\', \'-c\',\n """\nimport random\nres = "heads" if random.randint(0, 1) == 0 else "tails"\nwith open(\'/output\', \'w\') as f:\n f.write(res)\n """\n ], file_outputs={\'output\': \'/output\'})\n', (84, 342), False, 'import kfp\n'), ((391, 493), 'kfp.dsl.ContainerOp', 'kfp.dsl.ContainerOp', ([], {'name': '"""End"""', 'image': '"""alpine:3.6"""', 'command': '[\'sh\', \'-c\', \'echo "Flip coin ends"\']'}), '(name=\'End\', image=\'alpine:3.6\', command=[\'sh\', \'-c\',\n \'echo "Flip coin ends"\'])\n', (410, 493), False, 'import kfp\n'), ((516, 618), 'kfp.dsl.ContainerOp', 'kfp.dsl.ContainerOp', ([], {'name': '"""Heads"""', 'image': '"""alpine:3.6"""', 'command': '[\'sh\', \'-c\', \'echo "it was heads"\']'}), '(name=\'Heads\', image=\'alpine:3.6\', command=[\'sh\', \'-c\',\n \'echo "it was heads"\'])\n', (535, 618), False, 'import kfp\n'), ((641, 743), 'kfp.dsl.ContainerOp', 'kfp.dsl.ContainerOp', ([], {'name': '"""Tails"""', 'image': '"""alpine:3.6"""', 'command': '[\'sh\', \'-c\', \'echo "it was tails"\']'}), '(name=\'Tails\', image=\'alpine:3.6\', command=[\'sh\', \'-c\',\n \'echo "it was tails"\'])\n', (660, 743), False, 'import kfp\n'), ((887, 931), 'kfp.dsl.Condition', 'kfp.dsl.Condition', (["(flip_op.output == 'heads')"], {}), "(flip_op.output == 'heads')\n", (904, 931), False, 'import kfp\n'), ((970, 1014), 'kfp.dsl.Condition', 'kfp.dsl.Condition', (["(flip_op.output == 'tails')"], {}), "(flip_op.output == 'tails')\n", (987, 1014), False, 'import kfp\n'), ((1109, 1132), 'kfp.compiler.Compiler', 'kfp.compiler.Compiler', ([], {}), '()\n', (1130, 1132), False, 'import kfp\n')]
|
import pandas as pd
from dateutil.parser import parse
import numpy as np
from pandas_datareader.base import _BaseReader
import json
import re
class GoogleQuotesReader(_BaseReader):
"""Get current google quote"""
@property
def url(self):
return 'http://www.google.com/finance/info'
@property
def params(self):
if isinstance(self.symbols, pd.compat.string_types):
sym_list = self.symbols
else:
sym_list = ','.join(self.symbols)
params = {'q': sym_list}
return params
def _read_lines(self, out):
buffer = out.read()
m = re.search('// ', buffer)
result = json.loads(buffer[m.start() + len('// '):])
return pd.DataFrame([[float(x['cp']), float(x['l'].replace(',', '')),
np.datetime64(parse(x['lt']).isoformat())]
for x in result], columns=['change_pct',
'last', 'time'],
index=[x['t'] for x in result])
|
[
"dateutil.parser.parse",
"re.search"
] |
[((628, 652), 're.search', 're.search', (['"""// """', 'buffer'], {}), "('// ', buffer)\n", (637, 652), False, 'import re\n'), ((836, 850), 'dateutil.parser.parse', 'parse', (["x['lt']"], {}), "(x['lt'])\n", (841, 850), False, 'from dateutil.parser import parse\n')]
|
import matplotlib.pyplot as plt
import pandas as pd
def main():
if 0:
data = pd.read_html("https://csdms.colorado.edu/wiki/CSDMS_models_by_numbers")[
2
]
languages = pd.DataFrame(
{"Count": data["Count"].values}, index=data["Program language"]
)
languages.to_csv("languages.csv")
else:
languages = pd.read_csv("languages.csv", index_col=0, header=0)
other = languages[languages["Count"] < 10]
languages.loc["Other", "Count"] += other["Count"].sum()
languages = languages[languages["Count"] >= 10]
languages.sort_index(inplace=True)
# explode = [0.1, 0.1, 0.1, 0.1, 0.0, 0.0, 0.1]
explode = [0.1] * len(languages)
plt.pie(
languages["Count"],
autopct="%1.1f%%",
labels=languages.index,
explode=explode,
shadow=True,
)
plt.show()
if __name__ == "__main__":
main()
|
[
"pandas.read_html",
"pandas.DataFrame",
"matplotlib.pyplot.show",
"pandas.read_csv",
"matplotlib.pyplot.pie"
] |
[((728, 832), 'matplotlib.pyplot.pie', 'plt.pie', (["languages['Count']"], {'autopct': '"""%1.1f%%"""', 'labels': 'languages.index', 'explode': 'explode', 'shadow': '(True)'}), "(languages['Count'], autopct='%1.1f%%', labels=languages.index,\n explode=explode, shadow=True)\n", (735, 832), True, 'import matplotlib.pyplot as plt\n'), ((880, 890), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (888, 890), True, 'import matplotlib.pyplot as plt\n'), ((209, 286), 'pandas.DataFrame', 'pd.DataFrame', (["{'Count': data['Count'].values}"], {'index': "data['Program language']"}), "({'Count': data['Count'].values}, index=data['Program language'])\n", (221, 286), True, 'import pandas as pd\n'), ((381, 432), 'pandas.read_csv', 'pd.read_csv', (['"""languages.csv"""'], {'index_col': '(0)', 'header': '(0)'}), "('languages.csv', index_col=0, header=0)\n", (392, 432), True, 'import pandas as pd\n'), ((91, 162), 'pandas.read_html', 'pd.read_html', (['"""https://csdms.colorado.edu/wiki/CSDMS_models_by_numbers"""'], {}), "('https://csdms.colorado.edu/wiki/CSDMS_models_by_numbers')\n", (103, 162), True, 'import pandas as pd\n')]
|
import os
import shutil
path = r'C:\Users\<NAME>\Desktop\Work'
destination = 'F:\\HERE'
allfile = os.listdir(path)
for f in allfile:
if f[-3:] == 'txt':
#print(f)
source = os.path.join(path,f)
dest = os.path.join(destination,f)
print(source)
print(dest)
shutil.copy2(source,dest)
print('----')
|
[
"shutil.copy2",
"os.path.join",
"os.listdir"
] |
[((104, 120), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (114, 120), False, 'import os\n'), ((206, 227), 'os.path.join', 'os.path.join', (['path', 'f'], {}), '(path, f)\n', (218, 227), False, 'import os\n'), ((243, 271), 'os.path.join', 'os.path.join', (['destination', 'f'], {}), '(destination, f)\n', (255, 271), False, 'import os\n'), ((324, 350), 'shutil.copy2', 'shutil.copy2', (['source', 'dest'], {}), '(source, dest)\n', (336, 350), False, 'import shutil\n')]
|
from rubenesque.codecs.sec import encode, decode
from rubenesque.signatures import ecdsa
import rubenesque.curves
from hashlib import sha256
from ..formatters import encode_sig, decode_sig
from ...convert import int_to_hex_str
from .common import point_to_hex_str, split_str_to_halves
from .errors import (
UnknownPointFormatError, UnknownSignatureFormatError,
UnknownPublicKeyFormatError
)
backend_name = 'rubenesque'
class CurveByAttrName:
@property
def P256(self):
return rubenesque.curves.find('secp256r1')
curve = CurveByAttrName()
def gen_private_key(curve=curve.P256, hashfunc=sha256):
priv_key = curve.private_key()
return int_to_hex_str(priv_key)
def get_public_key(priv_key, curve=curve.P256, hashfunc=sha256, fmt='RAW'):
if fmt in ['RAW', '04']:
priv_key = int(priv_key, 16)
pub_key_obj = curve.generator() * priv_key
return point_to_hex_str(pub_key_obj, fmt=fmt)
else:
raise UnknownPublicKeyFormatError("fmt: '%s'" % fmt)
def gen_keypair(curve=curve.P256, hashfunc=sha256, pub_key_fmt='RAW'):
priv_key = gen_private_key(curve=curve, hashfunc=hashfunc)
pub_key = get_public_key(priv_key, curve=curve, hashfunc=hashfunc,
fmt=pub_key_fmt)
return priv_key, pub_key
def sign(priv_key, data, hashfunc=sha256, curve=curve.P256, sign_fmt='DER',
sign_size=32):
h = hashfunc(data.encode('utf-8')).digest()
r, s = ecdsa.sign(curve, int(priv_key, 16), h)
if sign_fmt in ['DER', 'RAW']:
return encode_sig(r, s, fmt=sign_fmt, size=sign_size).hex()
else:
raise UnknownSignatureFormatError("fmt: '%s'" % sign_fmt)
def verify(pub_key, data, signature, hashfunc=sha256, curve=curve.P256,
sign_fmt='DER', sign_size=32, pub_key_fmt='RAW'):
if pub_key_fmt == 'RAW':
pub_key_encoded = pub_key
elif pub_key_fmt == '04':
pub_key_encoded = pub_key[2:]
else:
raise UnknownPublicKeyFormatError("fmt: '%s'" % pub_key_fmt)
x, y = split_str_to_halves(pub_key_encoded)
x, y = int(x, 16), int(y, 16)
pub_key_point = curve(x, y)
if sign_fmt in ['DER', 'RAW']:
r, s = decode_sig(bytes.fromhex(signature), fmt=sign_fmt)
else:
raise UnknownSignatureFormatError("fmt: '%s'" % sign_fmt)
data_bytes = data.encode()
h = hashfunc(data_bytes).digest()
return ecdsa.verify(pub_key_point, h, r, s)
|
[
"rubenesque.signatures.ecdsa.verify"
] |
[((2390, 2426), 'rubenesque.signatures.ecdsa.verify', 'ecdsa.verify', (['pub_key_point', 'h', 'r', 's'], {}), '(pub_key_point, h, r, s)\n', (2402, 2426), False, 'from rubenesque.signatures import ecdsa\n')]
|
from datetime import date
from io import BytesIO
from django.core.mail import send_mail
from django.http import HttpResponse
from django.template import loader
from django.template.loader import get_template
from xhtml2pdf import pisa
from .models import *
def match_skill(job_list, skill_list, preferred_job_list):
recommendation_list = []
for skill in skill_list:
for job in job_list:
split_text = skill.skill_title.lower().split(' ')
for s in split_text:
index = job.requirements.lower().find(s)
index1 = job.job_title.lower().find(s)
if (index >= 0 or index1 >= 0) and job not in recommendation_list:
recommendation_list.append(job)
for preferred_job in preferred_job_list:
for job in job_list:
split_text = preferred_job.details.lower().split(' ')
for s in split_text:
index = job.requirements.lower().find(s)
index1 = job.job_title.lower().find(s)
if (index >= 0 or index1 >= 0) and job not in recommendation_list:
recommendation_list.append(job)
return recommendation_list
def rateAvg(rates, length):
summation = 0
if length == 0:
return 0
else:
for r in rates:
summation = summation + r.rate
if (summation / length) % 1 > 0:
return summation / length
else:
return summation // length
def increaseViewCount(user_one, user_two):
if user_one.is_company:
applicant = ApplicantProfileModel.objects.get(user=user_two)
if applicant.totalViewCount is None:
applicant.totalViewCount = 0
applicant.totalViewCount = applicant.totalViewCount + 1
applicant.save()
elif user_one.is_applicant:
company = CompanyProfileModel.objects.get(user=user_two)
if company.totalViewCount is None:
company.totalViewCount = 0
company.totalViewCount = company.totalViewCount + 1
company.save()
ProfileViewDetails.objects.create(
viewedBy=user_one,
viewed=user_two,
)
return
def extractFirstName(name):
first_name = name.split(' ')
return first_name[0]
def calculate_age(born):
today = date.today()
return today.year - born.year - ((today.month, today.day) < (born.month, born.day))
def add_to_employee_keyword(user, keyword):
if keyword is not None:
EmployeeSearchKeywordModel.objects.create(
searched_by=user,
searched_for=keyword,
)
return
def add_to_job_keyword(user, keyword):
if keyword is not None:
JobSearchKeywordModel.objects.create(
searched_by=user,
searched_for=keyword,
)
return
def deactivate_user(user):
user.is_active = False
user.save()
return
def render_to_pdf(template_src, context={}):
template = get_template(template_src)
html = template.render(context)
result = BytesIO()
pdf = pisa.pisaDocument(BytesIO(html.encode("ISO-8859-1")), result)
if not pdf.err:
return HttpResponse(result.getvalue(), content_type='application/pdf')
return None
def send_registration_mail(user):
subject = "Welcome to JobLand"
from_email = '<EMAIL>'
home_link = "http://127.0.0.1:8000"
about_link = "http://127.0.0.1:8000"
contact_link = "http://127.0.0.1:8000"
context = {
"user": user,
"home_link": home_link,
"about_link": about_link,
"contact_link": contact_link,
}
msg_plain = loader.render_to_string('email/registration.txt', context)
msg_html = loader.render_to_string('email/registration.html', context)
send_mail(subject, msg_plain, from_email, [user.email], fail_silently=True, html_message=msg_html)
return
|
[
"io.BytesIO",
"django.core.mail.send_mail",
"django.template.loader.render_to_string",
"datetime.date.today",
"django.template.loader.get_template"
] |
[((2297, 2309), 'datetime.date.today', 'date.today', ([], {}), '()\n', (2307, 2309), False, 'from datetime import date\n'), ((2953, 2979), 'django.template.loader.get_template', 'get_template', (['template_src'], {}), '(template_src)\n', (2965, 2979), False, 'from django.template.loader import get_template\n'), ((3029, 3038), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (3036, 3038), False, 'from io import BytesIO\n'), ((3613, 3671), 'django.template.loader.render_to_string', 'loader.render_to_string', (['"""email/registration.txt"""', 'context'], {}), "('email/registration.txt', context)\n", (3636, 3671), False, 'from django.template import loader\n'), ((3687, 3746), 'django.template.loader.render_to_string', 'loader.render_to_string', (['"""email/registration.html"""', 'context'], {}), "('email/registration.html', context)\n", (3710, 3746), False, 'from django.template import loader\n'), ((3752, 3854), 'django.core.mail.send_mail', 'send_mail', (['subject', 'msg_plain', 'from_email', '[user.email]'], {'fail_silently': '(True)', 'html_message': 'msg_html'}), '(subject, msg_plain, from_email, [user.email], fail_silently=True,\n html_message=msg_html)\n', (3761, 3854), False, 'from django.core.mail import send_mail\n')]
|
#!/usr/bin/env python
# Program: $Id: $
# Author: <NAME> <<EMAIL>>
# Description: Example use of sc_warts_writer library.
#
import sys
import time
from sc_warts_writer import WartsWriter, WartsTrace
if __name__ == "__main__":
assert len(sys.argv) == 2
now = time.time()
w = WartsWriter(sys.argv[1])
w.write_list(1, 1, 'sc_sample_writer demo')
w.write_cycle(1, 1, 1, now)
tr = WartsTrace()
tr.add({'listid' : 1, 'srcport' : 1234, 'dstport' : 80,
'srcaddr' : '1.2.3.4', 'dstaddr' : '5.6.7.8',
'attempts' : 2, 'tracetyp' : 2, 'probehop' : 7,
'probesent' : 5, 'firsttl' : 4, 'timeval' : now + 1})
# hopflags (SCAMPER_TRACE_HOP_FLAG_REPLY_TTL)
reply = {'addr' : '4.4.4.4', 'rtt' : 23456,
'ipid' : 1234, 'probesize' : 60,
'replysize' : 54, 'probettl' : 4,
'replyttl' : 60, 'tos' : 0,
'icmp' : 4, 'hopflags' : 0x10}
tr.add_reply(reply)
reply = {'addr' : '6.6.6.6', 'rtt' : 834567,
'ipid' : 1234, 'probesize' : 60,
'replysize' : 54, 'probettl' : 6,
'replyttl' : 58, 'tos' : 0,
'icmp' : 4, 'hopflags' : 0x10}
tr.add_reply(reply)
w.write_object(tr)
# finish
w.write_cycle_stop(1, now+10)
|
[
"sc_warts_writer.WartsWriter",
"sc_warts_writer.WartsTrace",
"time.time"
] |
[((281, 292), 'time.time', 'time.time', ([], {}), '()\n', (290, 292), False, 'import time\n'), ((299, 323), 'sc_warts_writer.WartsWriter', 'WartsWriter', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (310, 323), False, 'from sc_warts_writer import WartsWriter, WartsTrace\n'), ((407, 419), 'sc_warts_writer.WartsTrace', 'WartsTrace', ([], {}), '()\n', (417, 419), False, 'from sc_warts_writer import WartsWriter, WartsTrace\n')]
|
from portfolio_admin.models import Project
from rest_framework import serializers
class ProjectSerializer(serializers.ModelSerializer):
skills = serializers.StringRelatedField(many=True)
class Meta:
model = Project
exclude = ['id', 'created_at', 'updated_at', 'portfolio', 'private']
|
[
"rest_framework.serializers.StringRelatedField"
] |
[((151, 192), 'rest_framework.serializers.StringRelatedField', 'serializers.StringRelatedField', ([], {'many': '(True)'}), '(many=True)\n', (181, 192), False, 'from rest_framework import serializers\n')]
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import sys
import tempfile
from observations.r.friendship import friendship
def test_friendship():
"""Test module friendship.py by downloading
friendship.csv and testing shape of
extracted data has 0 rows and 7 columns
"""
test_path = tempfile.mkdtemp()
x_train, metadata = friendship(test_path)
try:
assert x_train.shape == (0, 7)
except:
shutil.rmtree(test_path)
raise()
|
[
"shutil.rmtree",
"tempfile.mkdtemp",
"observations.r.friendship.friendship"
] |
[((374, 392), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (390, 392), False, 'import tempfile\n'), ((415, 436), 'observations.r.friendship.friendship', 'friendship', (['test_path'], {}), '(test_path)\n', (425, 436), False, 'from observations.r.friendship import friendship\n'), ((493, 517), 'shutil.rmtree', 'shutil.rmtree', (['test_path'], {}), '(test_path)\n', (506, 517), False, 'import shutil\n')]
|
# ============================================================================
# ~/cerebstats/cerebstats/stat_scores/chi2GOFScore.py
#
# This py-file contains custum score functions initiated by
#
# from cerebstats import scoreScores
# from cerebstats.scoreScores import ABCScore
# ============================================================================
import numpy as np
from scipy.stats import chisquare
import sciunit
# ==============================Chi2GOFScore==================================
class Chi2GOFScore(sciunit.Score):
"""
Compute chi2-statistic for chi-squared goodness-of-fit Test of proportions.
One may think of this as a **one-way contingency table.**
+--------------+-------------------------------------------------------------+
| sample size | :math:`k` categories of a categorial variable of interest |
+ +--------------+--------------+----------------+--------------+
| :math:`n` | :math:`x_1` | :math:`x_2` | :math:`\\ldots` | :math:`x_k` |
+==============+==============+==============+================+==============+
| observations | :math:`O_1` | :math:`O_2` | :math:`\\ldots` | :math:`O_k` |
+--------------+--------------+--------------+----------------+--------------+
| probabilities| :math:`p_1` | :math:`p_2` | :math:`\\ldots` | :math:`p_k` |
+--------------+--------------+--------------+----------------+--------------+
| expected | :math:`np_1` | :math:`np_2` | :math:`\\ldots` | :math:`np_k` |
+--------------+--------------+--------------+----------------+--------------+
Notice that for probabilities of *k* categories :math:`\\sum_{\\forall i} p_i = 1`. The expected counts for each category can be derived from it (or already given) such that :math:`\\sum_{\\forall i} np_i = n`.
.. table:: Title here
==================== ==============================================================================
Definitions Interpretation
==================== ==============================================================================
:math:`n` sample size; total number of experiments done
:math:`k` number of categorical variables
:math:`O_i` observed count (frequency) for :math:`i^{th}` variable
:math:`p_i` probability for :math:`i^{th}` category such that
:math:`\\sum_{\\forall i} p_i = 1`
:math:`E_i` expected count for :math:`i^{th}` category such that
:math:`E_i = n p_i`
test-statistic :math:`\\chi^2 = \\sum_{\\forall i} \\frac{(O_i - E_i)^2}{E_i}`
:math:`df` degrees of freedom, :math:`df = k-1`
==================== ==============================================================================
*Note* the modification made when compared with a two-way :math:`\\chi^2` test is
- the calculation of expected counts :math:`E_i = n p_i`
- the degree of freedom :math:`df = k-1`
This class uses `scipy.stats.chisquare <https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.chisquare.html>`_.
**Use Case:**
::
x = Chi2GOFScoreForProportionChi2GOFTest.compute( observation, prediction )
score = Chi2GOFScoreForProportionChi2GOFTest(x)
*Note*: As part of the `SciUnit <http://scidash.github.io/sciunit.html>`_ framework this custom :py:class:`.TScore` should have the following methods,
* :py:meth:`.compute` (class method)
* :py:meth:`.sort_key` (property)
* :py:meth:`.__str__`
"""
#_allowed_types = (float,)
_description = ( "ZScoreForSignTest gives the z-statistic applied to medians. "
+ "The experimental data (observation) is taken as the sample. "
+ "The sample statistic is 'median' or computed median form 'raw_data'. "
+ "The null-value is the 'some' specified value whic is taken to be the predicted value generated from running the model. " )
@classmethod
def compute(cls, observation, prediction):
"""
+---------------------+-----------------------------------------------------------------------+
| Argument | Value type |
+=====================+=======================================================================+
| first argument |dictionary; observation/experimental data must have keys "sample_size" |
| |with a number as its value and "observed_freq" whose value is an array |
+---------------------+-----------------------------------------------------------------------+
| second argument |dictionary; model prediction must have either "probabilities" or |
| |"expected" whose value is an array (same length as "observed_freq") |
+---------------------+-----------------------------------------------------------------------+
*Note:*
* chi squared tests (for goodness-of-fit or contingency table) by nature are two-sided so there is not option for one-sidedness.
"""
name = "chi2_goodness_of_fit_test_for_proportions"
if "probabilities" in prediction:
probabilities = np.array( prediction["probabilities"] )
expected_counts = observation["sample_size"] * probabilities
elif "expected" in prediction:
expected_counts = np.array( prediction["expected"] )
probabilities = expected_counts / observation["sample_size"]
#
k_categories = expected_counts.size
score, pvalue = chisquare( observation["observed_freq"], f_exp = expected_counts )
#return self.score # chi2_statistic
return {"name": name, "sample_statistic": probabilities, "expected_values": expected_counts,
"test_statistic": score, "df": k_categories-1, "p_value": pvalue}
@property
def sort_key(self):
return self.score
def __str__(self):
return "ChiSqGOFScore is " + str(self.score)
# ============================================================================
|
[
"numpy.array",
"scipy.stats.chisquare"
] |
[((5745, 5807), 'scipy.stats.chisquare', 'chisquare', (["observation['observed_freq']"], {'f_exp': 'expected_counts'}), "(observation['observed_freq'], f_exp=expected_counts)\n", (5754, 5807), False, 'from scipy.stats import chisquare\n'), ((5377, 5414), 'numpy.array', 'np.array', (["prediction['probabilities']"], {}), "(prediction['probabilities'])\n", (5385, 5414), True, 'import numpy as np\n'), ((5559, 5591), 'numpy.array', 'np.array', (["prediction['expected']"], {}), "(prediction['expected'])\n", (5567, 5591), True, 'import numpy as np\n')]
|
"""
A simple script for generating sample data for learning to give personalised offers.
"""
import json
import pandas as pd
import numpy as np
import gzip
import random
import logging
GENERATE_INBALANCED_DATA = False
NUM_INTERACTIONS_PER_USER = 3
FIRST_TIMESTAMP = 1591803782 # 2020-06-10, 18:43:02
LAST_TIMESTAMP = 1599579782 # 2020-09-08, 18:43:02
RANDOM_SEED = 1
IN_PRODUCTS_FILENAME = "src/products/src/products-service/data/products.yaml"
IN_USERS_FILENAME = "src/users/src/users-service/data/users.json.gz"
IN_OFFERS_FILENAME = "src/offers/src/offers-service/data/offers.json"
# Where to put the generated data so that it is picked up by stage.sh
GENERATED_DATA_ROOT = "src/aws-lambda/personalize-pre-create-resources/data"
def generate_data(interactions_filename, users_df, offers_df):
"""Script for writing to a file simulated user-offer interactions"""
random.seed(RANDOM_SEED)
np.random.seed(RANDOM_SEED)
num_users = users_df.shape[0]
num_interactions = NUM_INTERACTIONS_PER_USER * num_users
if GENERATE_INBALANCED_DATA:
# We may wish to assume probability is proportional to ID to show off how we can add
# business logic around Personalize
offer_probs = offers_df.id.values.astype(float)
else:
# Or we can work around inbalance at the data munging stage
offer_probs = np.ones(len(offers_df.id.values), dtype=float)
# Normalise so that we have probabilities
offer_probs = offer_probs / offer_probs.sum()
# generate timestamps
time_between_events = (LAST_TIMESTAMP - FIRST_TIMESTAMP) / num_interactions
timestamps = np.arange(FIRST_TIMESTAMP, LAST_TIMESTAMP, time_between_events).astype(int)
# pre-shuffle them as we will be using them as a randomising key when we sort by timestamp
np.random.shuffle(timestamps)
# generate all users Ids
sample_user_ids = np.tile(users_df['id'].values.astype(int), NUM_INTERACTIONS_PER_USER)
# only one event type
event_type = ['OfferConverted'] * num_interactions
# we sort it to ensure there is a correlation between user ID and offer ID.
# This correlation is what the personalisation will learn.
sampled_offers = sorted(np.random.choice(offers_df.id.values, num_interactions, p=offer_probs))
interactions_df = pd.DataFrame({'ITEM_ID': sampled_offers,
'USER_ID': sample_user_ids,
'EVENT_TYPE': event_type,
'TIMESTAMP': timestamps})
# by sorting by timestamp, other elements get shuffled
interactions_df = interactions_df.sort_values('TIMESTAMP')
with open(interactions_filename, 'w') as outfile:
interactions_df.to_csv(outfile, index=False)
globals().update(locals()) # This can be used for inspecting in console after script ran or if run with ipython.
print('Generation script finished - created offers dataset')
if __name__ == '__main__':
# User info is stored in the repository - it was automatically generated
with gzip.open(IN_USERS_FILENAME, 'r') as f:
users = json.load(f)
users_df = pd.DataFrame(users)
# Offers info is stored in repository
with open(IN_OFFERS_FILENAME, 'r') as f:
offers = json.load(f)
offers_df = pd.DataFrame(offers)
logging.basicConfig(level=logging.INFO)
generate_data(GENERATED_DATA_ROOT + '/offer_interactions.csv', users_df, offers_df)
|
[
"pandas.DataFrame",
"json.load",
"numpy.random.seed",
"gzip.open",
"logging.basicConfig",
"random.seed",
"numpy.arange",
"numpy.random.choice",
"numpy.random.shuffle"
] |
[((880, 904), 'random.seed', 'random.seed', (['RANDOM_SEED'], {}), '(RANDOM_SEED)\n', (891, 904), False, 'import random\n'), ((909, 936), 'numpy.random.seed', 'np.random.seed', (['RANDOM_SEED'], {}), '(RANDOM_SEED)\n', (923, 936), True, 'import numpy as np\n'), ((1803, 1832), 'numpy.random.shuffle', 'np.random.shuffle', (['timestamps'], {}), '(timestamps)\n', (1820, 1832), True, 'import numpy as np\n'), ((2303, 2427), 'pandas.DataFrame', 'pd.DataFrame', (["{'ITEM_ID': sampled_offers, 'USER_ID': sample_user_ids, 'EVENT_TYPE':\n event_type, 'TIMESTAMP': timestamps}"], {}), "({'ITEM_ID': sampled_offers, 'USER_ID': sample_user_ids,\n 'EVENT_TYPE': event_type, 'TIMESTAMP': timestamps})\n", (2315, 2427), True, 'import pandas as pd\n'), ((3148, 3167), 'pandas.DataFrame', 'pd.DataFrame', (['users'], {}), '(users)\n', (3160, 3167), True, 'import pandas as pd\n'), ((3303, 3323), 'pandas.DataFrame', 'pd.DataFrame', (['offers'], {}), '(offers)\n', (3315, 3323), True, 'import pandas as pd\n'), ((3329, 3368), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (3348, 3368), False, 'import logging\n'), ((2208, 2278), 'numpy.random.choice', 'np.random.choice', (['offers_df.id.values', 'num_interactions'], {'p': 'offer_probs'}), '(offers_df.id.values, num_interactions, p=offer_probs)\n', (2224, 2278), True, 'import numpy as np\n'), ((3063, 3096), 'gzip.open', 'gzip.open', (['IN_USERS_FILENAME', '"""r"""'], {}), "(IN_USERS_FILENAME, 'r')\n", (3072, 3096), False, 'import gzip\n'), ((3119, 3131), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3128, 3131), False, 'import json\n'), ((3273, 3285), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3282, 3285), False, 'import json\n'), ((1628, 1691), 'numpy.arange', 'np.arange', (['FIRST_TIMESTAMP', 'LAST_TIMESTAMP', 'time_between_events'], {}), '(FIRST_TIMESTAMP, LAST_TIMESTAMP, time_between_events)\n', (1637, 1691), True, 'import numpy as np\n')]
|
import mock
import unittest
from mock import patch, Mock, MagicMock
import boto3
from botocore.stub import Stubber
import sys
sys.path.append("..")
import awslambda
class TestHandler(unittest.TestCase):
def test_handler(self):
"""
Test the handler operates as expected.
"""
pass
# test_event = MagicMock()
# test_context = MagicMock()
# aws_account_id.return_value = '1234567890'
# index.handler(test_event, test_context)
def main():
unittest.main()
if __name__ == '__main__':
main()
|
[
"sys.path.append",
"unittest.main"
] |
[((127, 148), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (142, 148), False, 'import sys\n'), ((512, 527), 'unittest.main', 'unittest.main', ([], {}), '()\n', (525, 527), False, 'import unittest\n')]
|
from kmodes.util.dissim import num_TZ_dissim,cat_TZ_dissim
from sklearn.decomposition import PCA
import numpy
centroid = [
[1,2,3],
[5,6,6]
]
Xnum = [
[54,2,44],
[89,6,4],
[1.5,0,-5],
[5346,874,212]
]
centroid = numpy.array(centroid)
Xnum = numpy.array(Xnum)
x = numpy.array([[1,2,3],[2,3,3],[12938,9999,666],[54,11,21354]])
pca = PCA(n_components=1)
newx = pca.fit_transform(x)
print(newx)
|
[
"numpy.array",
"sklearn.decomposition.PCA"
] |
[((236, 257), 'numpy.array', 'numpy.array', (['centroid'], {}), '(centroid)\n', (247, 257), False, 'import numpy\n'), ((265, 282), 'numpy.array', 'numpy.array', (['Xnum'], {}), '(Xnum)\n', (276, 282), False, 'import numpy\n'), ((288, 360), 'numpy.array', 'numpy.array', (['[[1, 2, 3], [2, 3, 3], [12938, 9999, 666], [54, 11, 21354]]'], {}), '([[1, 2, 3], [2, 3, 3], [12938, 9999, 666], [54, 11, 21354]])\n', (299, 360), False, 'import numpy\n'), ((356, 375), 'sklearn.decomposition.PCA', 'PCA', ([], {'n_components': '(1)'}), '(n_components=1)\n', (359, 375), False, 'from sklearn.decomposition import PCA\n')]
|
"""
The Sims 4 Community Library is licensed under the Creative Commons Attribution 4.0 International public license (CC BY 4.0).
https://creativecommons.org/licenses/by/4.0/
https://creativecommons.org/licenses/by/4.0/legalcode
Copyright (c) COLONOLNUTTY
"""
from typing import Union, List, Tuple, Iterator
from buffs.buff import Buff
from distributor.shared_messages import IconInfoData
from protocolbuffers.Localization_pb2 import LocalizedString
from server_commands.argument_helpers import TunableInstanceParam, OptionalTargetParam
from sims.sim_info import SimInfo
from sims4.commands import Command, CommandType, CheatOutput
from sims4.resources import Types
from sims4communitylib.enums.buffs_enum import CommonBuffId
from sims4communitylib.enums.strings_enum import CommonStringId
from sims4communitylib.enums.types.component_types import CommonComponentType
from sims4communitylib.exceptions.common_exceptions_handler import CommonExceptionHandler
from sims4communitylib.logging.has_class_log import HasClassLog
from sims4communitylib.mod_support.mod_identity import CommonModIdentity
from sims4communitylib.modinfo import ModInfo
from sims4communitylib.notifications.common_basic_notification import CommonBasicNotification
from sims4communitylib.utils.common_component_utils import CommonComponentUtils
from sims4communitylib.utils.localization.common_localization_utils import CommonLocalizationUtils
from sims4communitylib.utils.sims.common_sim_name_utils import CommonSimNameUtils
from sims4communitylib.utils.sims.common_sim_utils import CommonSimUtils
class CommonBuffUtils(HasClassLog):
"""Utilities for manipulating Buffs on Sims.
"""
# noinspection PyMissingOrEmptyDocstring
@classmethod
def get_mod_identity(cls) -> CommonModIdentity:
return ModInfo.get_identity()
# noinspection PyMissingOrEmptyDocstring
@classmethod
def get_log_identifier(cls) -> str:
return 'common_buff_utils'
@staticmethod
def has_fertility_boosting_buff(sim_info: SimInfo) -> bool:
"""has_fertility_boosting_buff(sim_info)
Determine if any fertility boosting buffs are currently active on a sim.
.. note::
Fertility Boosting Buffs:
- Fertility Potion
- Fertility Potion Masterwork
- Fertility Potion Normal
- Fertility Potion Outstanding
- Massage Table Fertility Boost
- Massage Table Fertility Boost Incense
:param sim_info: The Sim to check.
:type sim_info: SimInfo
:return: True, if they have any fertility boosting buffs. False, if not.
:rtype: bool
"""
buff_ids = (
CommonBuffId.OBJECT_HERBALIST_POTION_FERTILITY_POTION,
CommonBuffId.OBJECT_HERBALIST_POTION_FERTILITY_POTION_MASTERWORK,
CommonBuffId.OBJECT_HERBALIST_POTION_FERTILITY_POTION_NORMAL,
CommonBuffId.OBJECT_HERBALIST_POTION_FERTILITY_POTION_OUTSTANDING,
CommonBuffId.OBJECT_MASSAGE_TABLE_FERTILITY_BOOST,
CommonBuffId.OBJECT_MASSAGE_TABLE_FERTILITY_BOOST_INCENSE
)
return CommonBuffUtils.has_buff(sim_info, *buff_ids)
@staticmethod
def has_morning_person_buff(sim_info: SimInfo) -> bool:
"""has_morning_person_buff(sim_info)
Determine if any Morning Person Trait buffs are currently active on a Sim.
:param sim_info: The Sim to check.
:type sim_info: SimInfo
:return: True, if they have any morning person buffs. False, if not.
:rtype: bool
"""
buff_ids = (
CommonBuffId.TRAIT_MORNING_PERSON,
CommonBuffId.TRAIT_MORNING_PERSON_ACTIVE,
CommonBuffId.TRAIT_MORNING_PERSON_CHECK_ACTIVE
)
return CommonBuffUtils.has_buff(sim_info, *buff_ids)
@staticmethod
def has_night_owl_buff(sim_info: SimInfo) -> bool:
"""has_night_owl_buff(sim_info)
Determine if any Night Owl Trait buffs are currently active on a sim.
:param sim_info: The Sim to check.
:type sim_info: SimInfo
:return: True, if they have any night owl buffs. False, if not.
:rtype: bool
"""
buff_ids = (
CommonBuffId.TRAIT_NIGHT_OWL,
CommonBuffId.TRAIT_NIGHT_OWL_ACTIVE,
CommonBuffId.TRAIT_NIGHT_OWL_CHECK_ACTIVE
)
return CommonBuffUtils.has_buff(sim_info, *buff_ids)
@staticmethod
def has_buff(sim_info: SimInfo, *buffs: Union[int, CommonBuffId, Buff]) -> bool:
"""has_buff(sim_info, *buffs)
Determine if any of the specified buffs are currently active on a sim.
:param sim_info: The sim being checked.
:type sim_info: SimInfo
:param buffs: The identifiers of Buffs.
:type buffs: Union[int, CommonBuffId, Buff]
:return: True, if the sim has any of the specified buffs.
:rtype: int
"""
if sim_info is None:
raise AssertionError('Argument sim_info was None')
if not CommonComponentUtils.has_component(sim_info, CommonComponentType.BUFF):
return False
if not buffs:
return False
buff_ids = [CommonBuffUtils.get_buff_id(buff) for buff in buffs]
sim_buff_ids = CommonBuffUtils.get_buff_ids(sim_info)
for sim_buff_id in sim_buff_ids:
if sim_buff_id in buff_ids:
return True
return False
@staticmethod
def get_buffs(sim_info: SimInfo) -> List[Buff]:
"""get_buffs(sim_info)
Retrieve all buffs currently active on a Sim.
:param sim_info: The Sim to retrieve the buffs of.
:type sim_info: SimInfo
:return: A collection of currently active buffs on the Sim.
:rtype: Tuple[Buff]
"""
if sim_info is None:
raise AssertionError('Argument sim_info was None')
if not CommonComponentUtils.has_component(sim_info, CommonComponentType.BUFF):
return list()
from objects.components.buff_component import BuffComponent
buff_component: BuffComponent = CommonComponentUtils.get_component(sim_info, CommonComponentType.BUFF)
buffs = list()
for buff in buff_component:
if buff is None or not isinstance(buff, Buff):
continue
buffs.append(buff)
return buffs
@staticmethod
def get_buff_ids(sim_info: SimInfo) -> List[int]:
"""get_buff_ids(sim_info)
Retrieve decimal identifiers for all Buffs of a sim.
:param sim_info: The sim to checked.
:type sim_info: SimInfo
:return: A collection of Buff identifiers on a Sim.
:rtype: List[int]
"""
if sim_info is None:
raise AssertionError('Argument sim_info was None')
if not CommonComponentUtils.has_component(sim_info, CommonComponentType.BUFF):
return list()
buff_ids = list()
sim_buffs = CommonBuffUtils.get_buffs(sim_info)
for buff in sim_buffs:
buff_id = CommonBuffUtils.get_buff_id(buff)
if buff_id is None:
continue
buff_ids.append(buff_id)
return buff_ids
@classmethod
def add_buff(cls, sim_info: SimInfo, *buffs: Union[int, CommonBuffId], buff_reason: Union[int, str, LocalizedString, CommonStringId]=None) -> bool:
"""add_buff(sim_info, *buffs, buff_reason=None)
Add the specified buffs to a sim.
:param sim_info: The sim to add the specified buffs to.
:type sim_info: SimInfo
:param buffs: An iterable of identifiers of buffs being added.
:type buffs: Union[int, CommonBuffId, Buff]
:param buff_reason: The text that will display when the player hovers over the buffs. What caused the buffs to be added.
:type buff_reason: Union[int, str, LocalizedString, CommonStringId], optional
:return: True, if all of the specified buffs were successfully added. False, if not.
:rtype: bool
"""
if sim_info is None:
raise AssertionError('Argument sim_info was None')
if not CommonComponentUtils.has_component(sim_info, CommonComponentType.BUFF):
cls.get_log().format_with_message('Failed to add Buff to Sim. They did not have a Buff component!', buffs=buffs, sim=sim_info, buff_reason=buff_reason)
return False
localized_buff_reason = None
if buff_reason is not None:
localized_buff_reason = CommonLocalizationUtils.create_localized_string(buff_reason)
has_any = False
success = True
for buff_id in buffs:
buff = CommonBuffUtils.load_buff_by_id(buff_id)
if buff is None:
cls.get_log().format_with_message('No buff found using identifier.', buffs=buffs, sim=sim_info, buff_reason=buff_reason, buff_id=buff_id)
continue
if not sim_info.add_buff_from_op(buff, buff_reason=localized_buff_reason):
cls.get_log().format_with_message('Failed to add buff for unknown reasons.', buff=buff, sim=sim_info, buff_reason=buff_reason)
success = False
else:
cls.get_log().format_with_message('Successfully added buff.', buff=buff, sim=sim_info, buff_reason=buff_reason)
has_any = True
cls.get_log().format_with_message('Finished adding buffs to Sim.', buffs=buffs, sim=sim_info, buff_reason=buff_reason, success=success, has_any=has_any)
return success and has_any
@staticmethod
def remove_buff(sim_info: SimInfo, *buffs: Union[int, CommonBuffId, Buff]) -> bool:
"""remove_buff(sim_info, *buffs)
Remove the specified buffs from a sim.
:param sim_info: The sim to remove the specified buffs from.
:type sim_info: SimInfo
:param buffs: An iterable of identifiers of buffs being removed.
:type buffs: Union[int, CommonBuffId, Buff]
:return: True, if all of the specified buffs were successfully removed. False, if not.
:rtype: bool
"""
if sim_info is None:
raise AssertionError('Argument sim_info was None')
if not CommonComponentUtils.has_component(sim_info, CommonComponentType.BUFF):
return False
has_any = False
success = True
for buff in buffs:
buff = CommonBuffUtils.load_buff_by_id(buff)
if buff is None:
continue
sim_info.remove_buff_by_type(buff)
has_any = True
if CommonBuffUtils.has_buff(sim_info, buff):
success = False
return success and has_any
@staticmethod
def get_buff_id(buff_identifier: Union[int, Buff]) -> Union[int, None]:
"""get_buff_id(buff_identifier)
Retrieve the decimal identifier of a Buff.
:param buff_identifier: The identifier or instance of a Buff.
:type buff_identifier: Union[int, Buff]
:return: The decimal identifier of the Buff or None if the Buff does not have an id.
:rtype: Union[int, None]
"""
if isinstance(buff_identifier, int):
return buff_identifier
return getattr(buff_identifier, 'guid64', None)
@staticmethod
def get_buff_name(buff: Buff) -> Union[str, None]:
"""get_buff_name(buff)
Retrieve the Name of a Buff.
:param buff: An instance of a Buff.
:type buff: Buff
:return: The name of a Buff or None if a problem occurs.
:rtype: Union[str, None]
"""
if buff is None:
return None
# noinspection PyBroadException
try:
return buff.__class__.__name__ or ''
except:
return ''
@staticmethod
def get_buff_names(buffs: Iterator[Buff]) -> Tuple[str]:
"""get_buff_names(buffs)
Retrieve the Names of a collection of Buffs.
:param buffs: A collection of Buff instances.
:type buffs: Iterator[Buff]
:return: A collection of names for all specified Buffs.
:rtype: Tuple[str]
"""
if buffs is None or not buffs:
return tuple()
names: List[str] = []
for buff in buffs:
# noinspection PyBroadException
try:
name = CommonBuffUtils.get_buff_name(buff)
if not name:
continue
except:
continue
names.append(name)
return tuple(names)
@staticmethod
def load_buff_by_id(buff: Union[int, CommonBuffId, Buff]) -> Union[Buff, None]:
"""load_buff_by_id(buff)
Load an instance of a Buff by its identifier.
:param buff: The identifier of a Buff.
:type buff: Union[int, CommonBuffId, Buff]
:return: An instance of a Buff matching the decimal identifier or None if not found.
:rtype: Union[Buff, None]
"""
if isinstance(buff, Buff):
return buff
# noinspection PyBroadException
try:
buff: int = int(buff)
except:
buff: Buff = buff
return buff
from sims4.resources import Types
from sims4communitylib.utils.common_resource_utils import CommonResourceUtils
return CommonResourceUtils.load_instance(Types.BUFF, buff)
@Command('s4clib.add_buff', command_type=CommandType.Live)
def _common_add_buff(buff: TunableInstanceParam(Types.BUFF), opt_sim: OptionalTargetParam=None, buff_reason: str=None, _connection: int=None):
from server_commands.argument_helpers import get_optional_target
output = CheatOutput(_connection)
if buff is None:
output('Failed, Buff not specified or Buff did not exist! s4clib.add_buff <buff_name_or_id> [opt_sim=None]')
return
sim_info = CommonSimUtils.get_sim_info(get_optional_target(opt_sim, _connection))
if sim_info is None:
output('Failed, no Sim was specified or the specified Sim was not found!')
return
sim_name = CommonSimNameUtils.get_full_name(sim_info)
output('Adding buff {} to Sim {}'.format(str(buff), sim_name))
try:
if CommonBuffUtils.add_buff(sim_info, buff, buff_reason=buff_reason):
output('Successfully added buff.')
else:
output('Failed to add buff.')
except Exception as ex:
CommonExceptionHandler.log_exception(ModInfo.get_identity(), 'Failed to add buff {} to Sim {}.'.format(str(buff), sim_name), exception=ex)
output('Failed to add buff {} to Sim {}. {}'.format(str(buff), sim_name, str(ex)))
@Command('s4clib.remove_buff', command_type=CommandType.Live)
def _common_remove_buff(buff: TunableInstanceParam(Types.BUFF), opt_sim: OptionalTargetParam=None, _connection: int=None):
from server_commands.argument_helpers import get_optional_target
output = CheatOutput(_connection)
if buff is None:
output('Failed, Buff not specified or Buff did not exist! s4clib.remove_buff <buff_name_or_id> [opt_sim=None]')
return
sim_info = CommonSimUtils.get_sim_info(get_optional_target(opt_sim, _connection))
if sim_info is None:
output('Failed, no Sim was specified or the specified Sim was not found!')
return
sim_name = CommonSimNameUtils.get_full_name(sim_info)
output('Removing buff {} from Sim {}'.format(str(buff), sim_name))
try:
if CommonBuffUtils.remove_buff(sim_info, buff):
output('Successfully removed buff.')
else:
output('Failed to remove buff.')
except Exception as ex:
CommonExceptionHandler.log_exception(ModInfo.get_identity(), 'Failed to remove buff {} from Sim {}.'.format(str(buff), sim_name), exception=ex)
output('Failed to remove buff {} from Sim {}. {}'.format(str(buff), sim_name, str(ex)))
@Command('s4clib.show_active_buffs', command_type=CommandType.Live)
def _common_show_active_buffs(opt_sim: OptionalTargetParam=None, _connection: int=None):
from server_commands.argument_helpers import get_optional_target
output = CheatOutput(_connection)
sim = get_optional_target(opt_sim, _connection)
sim_info = CommonSimUtils.get_sim_info(sim)
if sim_info is None:
output('Failed, no Sim was specified or the specified Sim was not found!')
return
sim_name = CommonSimNameUtils.get_full_name(sim_info)
output('Showing active buffs of Sim {}'.format(sim_name))
try:
sim_buff_strings: List[str] = list()
for buff in CommonBuffUtils.get_buffs(sim_info):
buff_name = CommonBuffUtils.get_buff_name(buff)
buff_id = CommonBuffUtils.get_buff_id(buff)
sim_buff_strings.append('{} ({})'.format(buff_name, buff_id))
sim_buff_strings = sorted(sim_buff_strings, key=lambda x: x)
sim_buffs = ', '.join(sim_buff_strings)
text = ''
text += 'Active Buffs:\n{}\n\n'.format(sim_buffs)
CommonBasicNotification(
CommonLocalizationUtils.create_localized_string('{} Active Buffs ({})'.format(sim_name, CommonSimUtils.get_sim_id(sim_info))),
CommonLocalizationUtils.create_localized_string(text)
).show(
icon=IconInfoData(obj_instance=CommonSimUtils.get_sim_instance(sim_info))
)
except Exception as ex:
CommonExceptionHandler.log_exception(ModInfo.get_identity(), 'Failed to show active buffs of Sim {}.'.format(sim_name), exception=ex)
output('Failed to show active buffs of Sim {}. {}'.format(sim_name, str(ex)))
|
[
"sims4communitylib.utils.sims.common_sim_utils.CommonSimUtils.get_sim_id",
"sims4communitylib.utils.common_component_utils.CommonComponentUtils.get_component",
"server_commands.argument_helpers.get_optional_target",
"sims4communitylib.utils.sims.common_sim_name_utils.CommonSimNameUtils.get_full_name",
"sims4.commands.Command",
"server_commands.argument_helpers.TunableInstanceParam",
"sims4communitylib.utils.common_component_utils.CommonComponentUtils.has_component",
"sims4communitylib.utils.localization.common_localization_utils.CommonLocalizationUtils.create_localized_string",
"sims4communitylib.utils.common_resource_utils.CommonResourceUtils.load_instance",
"sims4communitylib.utils.sims.common_sim_utils.CommonSimUtils.get_sim_info",
"sims4communitylib.modinfo.ModInfo.get_identity",
"sims4.commands.CheatOutput",
"sims4communitylib.utils.sims.common_sim_utils.CommonSimUtils.get_sim_instance"
] |
[((13429, 13486), 'sims4.commands.Command', 'Command', (['"""s4clib.add_buff"""'], {'command_type': 'CommandType.Live'}), "('s4clib.add_buff', command_type=CommandType.Live)\n", (13436, 13486), False, 'from sims4.commands import Command, CommandType, CheatOutput\n'), ((14683, 14743), 'sims4.commands.Command', 'Command', (['"""s4clib.remove_buff"""'], {'command_type': 'CommandType.Live'}), "('s4clib.remove_buff', command_type=CommandType.Live)\n", (14690, 14743), False, 'from sims4.commands import Command, CommandType, CheatOutput\n'), ((15920, 15986), 'sims4.commands.Command', 'Command', (['"""s4clib.show_active_buffs"""'], {'command_type': 'CommandType.Live'}), "('s4clib.show_active_buffs', command_type=CommandType.Live)\n", (15927, 15986), False, 'from sims4.commands import Command, CommandType, CheatOutput\n'), ((13712, 13736), 'sims4.commands.CheatOutput', 'CheatOutput', (['_connection'], {}), '(_connection)\n', (13723, 13736), False, 'from sims4.commands import Command, CommandType, CheatOutput\n'), ((14114, 14156), 'sims4communitylib.utils.sims.common_sim_name_utils.CommonSimNameUtils.get_full_name', 'CommonSimNameUtils.get_full_name', (['sim_info'], {}), '(sim_info)\n', (14146, 14156), False, 'from sims4communitylib.utils.sims.common_sim_name_utils import CommonSimNameUtils\n'), ((14949, 14973), 'sims4.commands.CheatOutput', 'CheatOutput', (['_connection'], {}), '(_connection)\n', (14960, 14973), False, 'from sims4.commands import Command, CommandType, CheatOutput\n'), ((15354, 15396), 'sims4communitylib.utils.sims.common_sim_name_utils.CommonSimNameUtils.get_full_name', 'CommonSimNameUtils.get_full_name', (['sim_info'], {}), '(sim_info)\n', (15386, 15396), False, 'from sims4communitylib.utils.sims.common_sim_name_utils import CommonSimNameUtils\n'), ((16158, 16182), 'sims4.commands.CheatOutput', 'CheatOutput', (['_connection'], {}), '(_connection)\n', (16169, 16182), False, 'from sims4.commands import Command, CommandType, CheatOutput\n'), ((16193, 16234), 'server_commands.argument_helpers.get_optional_target', 'get_optional_target', (['opt_sim', '_connection'], {}), '(opt_sim, _connection)\n', (16212, 16234), False, 'from server_commands.argument_helpers import get_optional_target\n'), ((16250, 16282), 'sims4communitylib.utils.sims.common_sim_utils.CommonSimUtils.get_sim_info', 'CommonSimUtils.get_sim_info', (['sim'], {}), '(sim)\n', (16277, 16282), False, 'from sims4communitylib.utils.sims.common_sim_utils import CommonSimUtils\n'), ((16421, 16463), 'sims4communitylib.utils.sims.common_sim_name_utils.CommonSimNameUtils.get_full_name', 'CommonSimNameUtils.get_full_name', (['sim_info'], {}), '(sim_info)\n', (16453, 16463), False, 'from sims4communitylib.utils.sims.common_sim_name_utils import CommonSimNameUtils\n'), ((1797, 1819), 'sims4communitylib.modinfo.ModInfo.get_identity', 'ModInfo.get_identity', ([], {}), '()\n', (1817, 1819), False, 'from sims4communitylib.modinfo import ModInfo\n'), ((6138, 6208), 'sims4communitylib.utils.common_component_utils.CommonComponentUtils.get_component', 'CommonComponentUtils.get_component', (['sim_info', 'CommonComponentType.BUFF'], {}), '(sim_info, CommonComponentType.BUFF)\n', (6172, 6208), False, 'from sims4communitylib.utils.common_component_utils import CommonComponentUtils\n'), ((13374, 13425), 'sims4communitylib.utils.common_resource_utils.CommonResourceUtils.load_instance', 'CommonResourceUtils.load_instance', (['Types.BUFF', 'buff'], {}), '(Types.BUFF, buff)\n', (13407, 13425), False, 'from sims4communitylib.utils.common_resource_utils import CommonResourceUtils\n'), ((13514, 13546), 'server_commands.argument_helpers.TunableInstanceParam', 'TunableInstanceParam', (['Types.BUFF'], {}), '(Types.BUFF)\n', (13534, 13546), False, 'from server_commands.argument_helpers import TunableInstanceParam, OptionalTargetParam\n'), ((13933, 13974), 'server_commands.argument_helpers.get_optional_target', 'get_optional_target', (['opt_sim', '_connection'], {}), '(opt_sim, _connection)\n', (13952, 13974), False, 'from server_commands.argument_helpers import get_optional_target\n'), ((14774, 14806), 'server_commands.argument_helpers.TunableInstanceParam', 'TunableInstanceParam', (['Types.BUFF'], {}), '(Types.BUFF)\n', (14794, 14806), False, 'from server_commands.argument_helpers import TunableInstanceParam, OptionalTargetParam\n'), ((15173, 15214), 'server_commands.argument_helpers.get_optional_target', 'get_optional_target', (['opt_sim', '_connection'], {}), '(opt_sim, _connection)\n', (15192, 15214), False, 'from server_commands.argument_helpers import get_optional_target\n'), ((5059, 5129), 'sims4communitylib.utils.common_component_utils.CommonComponentUtils.has_component', 'CommonComponentUtils.has_component', (['sim_info', 'CommonComponentType.BUFF'], {}), '(sim_info, CommonComponentType.BUFF)\n', (5093, 5129), False, 'from sims4communitylib.utils.common_component_utils import CommonComponentUtils\n'), ((5932, 6002), 'sims4communitylib.utils.common_component_utils.CommonComponentUtils.has_component', 'CommonComponentUtils.has_component', (['sim_info', 'CommonComponentType.BUFF'], {}), '(sim_info, CommonComponentType.BUFF)\n', (5966, 6002), False, 'from sims4communitylib.utils.common_component_utils import CommonComponentUtils\n'), ((6856, 6926), 'sims4communitylib.utils.common_component_utils.CommonComponentUtils.has_component', 'CommonComponentUtils.has_component', (['sim_info', 'CommonComponentType.BUFF'], {}), '(sim_info, CommonComponentType.BUFF)\n', (6890, 6926), False, 'from sims4communitylib.utils.common_component_utils import CommonComponentUtils\n'), ((8178, 8248), 'sims4communitylib.utils.common_component_utils.CommonComponentUtils.has_component', 'CommonComponentUtils.has_component', (['sim_info', 'CommonComponentType.BUFF'], {}), '(sim_info, CommonComponentType.BUFF)\n', (8212, 8248), False, 'from sims4communitylib.utils.common_component_utils import CommonComponentUtils\n'), ((8548, 8608), 'sims4communitylib.utils.localization.common_localization_utils.CommonLocalizationUtils.create_localized_string', 'CommonLocalizationUtils.create_localized_string', (['buff_reason'], {}), '(buff_reason)\n', (8595, 8608), False, 'from sims4communitylib.utils.localization.common_localization_utils import CommonLocalizationUtils\n'), ((10247, 10317), 'sims4communitylib.utils.common_component_utils.CommonComponentUtils.has_component', 'CommonComponentUtils.has_component', (['sim_info', 'CommonComponentType.BUFF'], {}), '(sim_info, CommonComponentType.BUFF)\n', (10281, 10317), False, 'from sims4communitylib.utils.common_component_utils import CommonComponentUtils\n'), ((14487, 14509), 'sims4communitylib.modinfo.ModInfo.get_identity', 'ModInfo.get_identity', ([], {}), '()\n', (14507, 14509), False, 'from sims4communitylib.modinfo import ModInfo\n'), ((15714, 15736), 'sims4communitylib.modinfo.ModInfo.get_identity', 'ModInfo.get_identity', ([], {}), '()\n', (15734, 15736), False, 'from sims4communitylib.modinfo import ModInfo\n'), ((17443, 17465), 'sims4communitylib.modinfo.ModInfo.get_identity', 'ModInfo.get_identity', ([], {}), '()\n', (17463, 17465), False, 'from sims4communitylib.modinfo import ModInfo\n'), ((17204, 17257), 'sims4communitylib.utils.localization.common_localization_utils.CommonLocalizationUtils.create_localized_string', 'CommonLocalizationUtils.create_localized_string', (['text'], {}), '(text)\n', (17251, 17257), False, 'from sims4communitylib.utils.localization.common_localization_utils import CommonLocalizationUtils\n'), ((17317, 17358), 'sims4communitylib.utils.sims.common_sim_utils.CommonSimUtils.get_sim_instance', 'CommonSimUtils.get_sim_instance', (['sim_info'], {}), '(sim_info)\n', (17348, 17358), False, 'from sims4communitylib.utils.sims.common_sim_utils import CommonSimUtils\n'), ((17153, 17188), 'sims4communitylib.utils.sims.common_sim_utils.CommonSimUtils.get_sim_id', 'CommonSimUtils.get_sim_id', (['sim_info'], {}), '(sim_info)\n', (17178, 17188), False, 'from sims4communitylib.utils.sims.common_sim_utils import CommonSimUtils\n')]
|
# Copyright 2008-2010 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module that adds directories needed by Robot to sys.path when imported."""
import sys
import os
import fnmatch
def add_path(path, to_beginning=False, force=False):
if _should_be_added(path, force):
if to_beginning:
sys.path.insert(0, path)
else:
sys.path.append(path)
def remove_path(path):
path = _normpath(path)
sys.path = [p for p in sys.path if _normpath(p) != path]
def _should_be_added(path, force):
if (not path) or _find_in_syspath_normalized(path):
return False
return force or os.path.exists(path)
def _find_in_syspath_normalized(path):
path = _normpath(path)
for element in sys.path:
if _normpath(element) == path:
return element
return None
def _normpath(path):
return os.path.normcase(os.path.normpath(path))
ROBOTDIR = os.path.dirname(os.path.abspath(__file__))
PARENTDIR = os.path.dirname(ROBOTDIR)
add_path(os.path.join(ROBOTDIR, 'libraries'), to_beginning=True,
force=True)
add_path(PARENTDIR, to_beginning=True)
# Handles egg installations
if fnmatch.fnmatchcase(os.path.basename(PARENTDIR), 'robotframework-*.egg'):
add_path(os.path.dirname(PARENTDIR), to_beginning=True)
# Remove ROBOTDIR dir to disallow importing robot internal modules directly
remove_path(ROBOTDIR)
# Elements from PYTHONPATH. By default it is not processed in Jython and in
# Python valid non-absolute paths may be ignored.
PYPATH = os.environ.get('PYTHONPATH')
if PYPATH:
for path in PYPATH.split(os.pathsep):
add_path(path)
del path
# Current dir (it seems to be in Jython by default so let's be consistent)
add_path('.')
del _find_in_syspath_normalized, _normpath, add_path, remove_path, ROBOTDIR, PARENTDIR, PYPATH
|
[
"sys.path.append",
"os.path.abspath",
"os.path.basename",
"os.path.dirname",
"os.path.exists",
"sys.path.insert",
"os.environ.get",
"os.path.normpath",
"os.path.join"
] |
[((1511, 1536), 'os.path.dirname', 'os.path.dirname', (['ROBOTDIR'], {}), '(ROBOTDIR)\n', (1526, 1536), False, 'import os\n'), ((2062, 2090), 'os.environ.get', 'os.environ.get', (['"""PYTHONPATH"""'], {}), "('PYTHONPATH')\n", (2076, 2090), False, 'import os\n'), ((1472, 1497), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1487, 1497), False, 'import os\n'), ((1547, 1582), 'os.path.join', 'os.path.join', (['ROBOTDIR', '"""libraries"""'], {}), "(ROBOTDIR, 'libraries')\n", (1559, 1582), False, 'import os\n'), ((1713, 1740), 'os.path.basename', 'os.path.basename', (['PARENTDIR'], {}), '(PARENTDIR)\n', (1729, 1740), False, 'import os\n'), ((1170, 1190), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (1184, 1190), False, 'import os\n'), ((1419, 1441), 'os.path.normpath', 'os.path.normpath', (['path'], {}), '(path)\n', (1435, 1441), False, 'import os\n'), ((1780, 1806), 'os.path.dirname', 'os.path.dirname', (['PARENTDIR'], {}), '(PARENTDIR)\n', (1795, 1806), False, 'import os\n'), ((852, 876), 'sys.path.insert', 'sys.path.insert', (['(0)', 'path'], {}), '(0, path)\n', (867, 876), False, 'import sys\n'), ((903, 924), 'sys.path.append', 'sys.path.append', (['path'], {}), '(path)\n', (918, 924), False, 'import sys\n')]
|
"""Tests for log_linear_exp function."""
import chex
import jax
import jax.numpy as jnp
import numpy as np
from vmcnet.utils.log_linear_exp import log_linear_exp
import vmcnet.utils.slog_helpers as slog_helpers
def test_log_linear_exp_shape():
"""Test output shape of log linear exp."""
signs = jnp.ones((5, 2, 4, 3))
vals = jnp.zeros((5, 2, 4, 3))
weights = jnp.ones((2, 7))
out = log_linear_exp(signs, vals, weights, axis=-3)
out_no_weights = log_linear_exp(signs, vals, axis=-3)
desired_shape = (5, 7, 4, 3)
desired_shape_no_weights = (5, 1, 4, 3)
chex.assert_shape(out, desired_shape)
chex.assert_shape(out_no_weights, desired_shape_no_weights)
def test_log_linear_exp_no_overflow():
"""Test that the log-linear-exp trick avoids overflow when any vals are big."""
signs = jnp.array([-1.0, -1.0, 1.0, 1.0])
vals = jnp.array([300.0, 100.0, 3000.0, 1.5])
weights = jnp.reshape(jnp.array([-1.0, 2.0, 0.5, 0.6]), (4, 1))
sign_out, log_out = log_linear_exp(signs, vals, weights, axis=0)
# the output should be sign_out=1.0, log_out=log|0.5 * exp(3000) + tinier stuff|
assert jnp.isfinite(log_out)
np.testing.assert_allclose(sign_out, 1.0)
np.testing.assert_allclose(log_out, jnp.log(0.5) + 3000.0)
def test_log_linear_exp_no_underflow():
"""Test that the log-linear-exp trick avoids underflow when all vals are small."""
signs = jnp.array([-1.0, -1.0, 1.0, 1.0])
vals = jnp.array([-4000.0, -5500.0, -3000.0, -1234.5])
sign_out, log_out = log_linear_exp(signs, vals, axis=0)
# the output should be sign_out=1.0, log_out=log|exp(-1234.5) + tinier stuff|
np.testing.assert_allclose(sign_out, 1.0)
np.testing.assert_allclose(log_out, -1234.5)
def test_log_linear_equals_log_linear_exp_log():
"""Test that log-linear-exp of sign(x), log|x| is just log-linear."""
key = jax.random.PRNGKey(0)
key, subkey = jax.random.split(key)
x = jax.random.normal(subkey, (9, 5))
sign_x, log_x = slog_helpers.array_to_slog(x)
key, subkey = jax.random.split(key)
kernel = jax.random.normal(subkey, (5, 7))
sign_linear_out, log_linear_out = slog_helpers.array_to_slog(jnp.dot(x, kernel))
sign_linear_exp_log_out, log_linear_exp_log_out = log_linear_exp(
sign_x, log_x, kernel, axis=-1
)
np.testing.assert_allclose(sign_linear_exp_log_out, sign_linear_out)
np.testing.assert_allclose(log_linear_exp_log_out, log_linear_out, rtol=1e-5)
|
[
"jax.numpy.array",
"vmcnet.utils.slog_helpers.array_to_slog",
"jax.numpy.isfinite",
"jax.numpy.log",
"chex.assert_shape",
"jax.random.normal",
"jax.numpy.dot",
"numpy.testing.assert_allclose",
"jax.random.PRNGKey",
"jax.numpy.ones",
"jax.numpy.zeros",
"vmcnet.utils.log_linear_exp.log_linear_exp",
"jax.random.split"
] |
[((307, 329), 'jax.numpy.ones', 'jnp.ones', (['(5, 2, 4, 3)'], {}), '((5, 2, 4, 3))\n', (315, 329), True, 'import jax.numpy as jnp\n'), ((341, 364), 'jax.numpy.zeros', 'jnp.zeros', (['(5, 2, 4, 3)'], {}), '((5, 2, 4, 3))\n', (350, 364), True, 'import jax.numpy as jnp\n'), ((379, 395), 'jax.numpy.ones', 'jnp.ones', (['(2, 7)'], {}), '((2, 7))\n', (387, 395), True, 'import jax.numpy as jnp\n'), ((407, 452), 'vmcnet.utils.log_linear_exp.log_linear_exp', 'log_linear_exp', (['signs', 'vals', 'weights'], {'axis': '(-3)'}), '(signs, vals, weights, axis=-3)\n', (421, 452), False, 'from vmcnet.utils.log_linear_exp import log_linear_exp\n'), ((474, 510), 'vmcnet.utils.log_linear_exp.log_linear_exp', 'log_linear_exp', (['signs', 'vals'], {'axis': '(-3)'}), '(signs, vals, axis=-3)\n', (488, 510), False, 'from vmcnet.utils.log_linear_exp import log_linear_exp\n'), ((593, 630), 'chex.assert_shape', 'chex.assert_shape', (['out', 'desired_shape'], {}), '(out, desired_shape)\n', (610, 630), False, 'import chex\n'), ((635, 694), 'chex.assert_shape', 'chex.assert_shape', (['out_no_weights', 'desired_shape_no_weights'], {}), '(out_no_weights, desired_shape_no_weights)\n', (652, 694), False, 'import chex\n'), ((832, 865), 'jax.numpy.array', 'jnp.array', (['[-1.0, -1.0, 1.0, 1.0]'], {}), '([-1.0, -1.0, 1.0, 1.0])\n', (841, 865), True, 'import jax.numpy as jnp\n'), ((877, 915), 'jax.numpy.array', 'jnp.array', (['[300.0, 100.0, 3000.0, 1.5]'], {}), '([300.0, 100.0, 3000.0, 1.5])\n', (886, 915), True, 'import jax.numpy as jnp\n'), ((1009, 1053), 'vmcnet.utils.log_linear_exp.log_linear_exp', 'log_linear_exp', (['signs', 'vals', 'weights'], {'axis': '(0)'}), '(signs, vals, weights, axis=0)\n', (1023, 1053), False, 'from vmcnet.utils.log_linear_exp import log_linear_exp\n'), ((1151, 1172), 'jax.numpy.isfinite', 'jnp.isfinite', (['log_out'], {}), '(log_out)\n', (1163, 1172), True, 'import jax.numpy as jnp\n'), ((1177, 1218), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['sign_out', '(1.0)'], {}), '(sign_out, 1.0)\n', (1203, 1218), True, 'import numpy as np\n'), ((1423, 1456), 'jax.numpy.array', 'jnp.array', (['[-1.0, -1.0, 1.0, 1.0]'], {}), '([-1.0, -1.0, 1.0, 1.0])\n', (1432, 1456), True, 'import jax.numpy as jnp\n'), ((1468, 1515), 'jax.numpy.array', 'jnp.array', (['[-4000.0, -5500.0, -3000.0, -1234.5]'], {}), '([-4000.0, -5500.0, -3000.0, -1234.5])\n', (1477, 1515), True, 'import jax.numpy as jnp\n'), ((1541, 1576), 'vmcnet.utils.log_linear_exp.log_linear_exp', 'log_linear_exp', (['signs', 'vals'], {'axis': '(0)'}), '(signs, vals, axis=0)\n', (1555, 1576), False, 'from vmcnet.utils.log_linear_exp import log_linear_exp\n'), ((1664, 1705), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['sign_out', '(1.0)'], {}), '(sign_out, 1.0)\n', (1690, 1705), True, 'import numpy as np\n'), ((1710, 1754), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['log_out', '(-1234.5)'], {}), '(log_out, -1234.5)\n', (1736, 1754), True, 'import numpy as np\n'), ((1890, 1911), 'jax.random.PRNGKey', 'jax.random.PRNGKey', (['(0)'], {}), '(0)\n', (1908, 1911), False, 'import jax\n'), ((1930, 1951), 'jax.random.split', 'jax.random.split', (['key'], {}), '(key)\n', (1946, 1951), False, 'import jax\n'), ((1960, 1993), 'jax.random.normal', 'jax.random.normal', (['subkey', '(9, 5)'], {}), '(subkey, (9, 5))\n', (1977, 1993), False, 'import jax\n'), ((2014, 2043), 'vmcnet.utils.slog_helpers.array_to_slog', 'slog_helpers.array_to_slog', (['x'], {}), '(x)\n', (2040, 2043), True, 'import vmcnet.utils.slog_helpers as slog_helpers\n'), ((2063, 2084), 'jax.random.split', 'jax.random.split', (['key'], {}), '(key)\n', (2079, 2084), False, 'import jax\n'), ((2098, 2131), 'jax.random.normal', 'jax.random.normal', (['subkey', '(5, 7)'], {}), '(subkey, (5, 7))\n', (2115, 2131), False, 'import jax\n'), ((2272, 2318), 'vmcnet.utils.log_linear_exp.log_linear_exp', 'log_linear_exp', (['sign_x', 'log_x', 'kernel'], {'axis': '(-1)'}), '(sign_x, log_x, kernel, axis=-1)\n', (2286, 2318), False, 'from vmcnet.utils.log_linear_exp import log_linear_exp\n'), ((2338, 2406), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['sign_linear_exp_log_out', 'sign_linear_out'], {}), '(sign_linear_exp_log_out, sign_linear_out)\n', (2364, 2406), True, 'import numpy as np\n'), ((2411, 2489), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['log_linear_exp_log_out', 'log_linear_out'], {'rtol': '(1e-05)'}), '(log_linear_exp_log_out, log_linear_out, rtol=1e-05)\n', (2437, 2489), True, 'import numpy as np\n'), ((942, 974), 'jax.numpy.array', 'jnp.array', (['[-1.0, 2.0, 0.5, 0.6]'], {}), '([-1.0, 2.0, 0.5, 0.6])\n', (951, 974), True, 'import jax.numpy as jnp\n'), ((2198, 2216), 'jax.numpy.dot', 'jnp.dot', (['x', 'kernel'], {}), '(x, kernel)\n', (2205, 2216), True, 'import jax.numpy as jnp\n'), ((1259, 1271), 'jax.numpy.log', 'jnp.log', (['(0.5)'], {}), '(0.5)\n', (1266, 1271), True, 'import jax.numpy as jnp\n')]
|
# Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironic_python_agent import hardware
from proliantutils.hpssa import manager as hpssa_manager
from proliantutils.hpsum import hpsum_controller
class ProliantHardwareManager(hardware.GenericHardwareManager):
HARDWARE_MANAGER_VERSION = "3"
def get_clean_steps(self, node, ports):
"""Return the clean steps supported by this hardware manager.
This method returns the clean steps that are supported by
proliant hardware manager. This method is invoked on every
hardware manager by Ironic Python Agent to give this information
back to Ironic.
:param node: A dictionary of the node object
:param ports: A list of dictionaries containing information of ports
for the node
:returns: A list of dictionaries, each item containing the step name,
interface and priority for the clean step.
"""
return [{'step': 'create_configuration',
'interface': 'raid',
'priority': 0},
{'step': 'delete_configuration',
'interface': 'raid',
'priority': 0},
{'step': 'erase_devices',
'interface': 'deploy',
'priority': 0},
{'step': 'update_firmware',
'interface': 'management',
'priority': 0}]
def evaluate_hardware_support(cls):
return hardware.HardwareSupport.SERVICE_PROVIDER
def create_configuration(self, node, ports):
"""Create RAID configuration on the bare metal.
This method creates the desired RAID configuration as read from
node['target_raid_config'].
:param node: A dictionary of the node object
:param ports: A list of dictionaries containing information of ports
for the node
:returns: The current RAID configuration of the below format.
raid_config = {
'logical_disks': [{
'size_gb': 100,
'raid_level': 1,
'physical_disks': [
'5I:0:1',
'5I:0:2'],
'controller': 'Smart array controller'
},
]
}
"""
target_raid_config = node.get('target_raid_config', {}).copy()
return hpssa_manager.create_configuration(
raid_config=target_raid_config)
def delete_configuration(self, node, ports):
"""Deletes RAID configuration on the bare metal.
This method deletes all the RAID disks on the bare metal.
:param node: A dictionary of the node object
:param ports: A list of dictionaries containing information of ports
for the node
"""
return hpssa_manager.delete_configuration()
def erase_devices(self, node, port):
"""Erase the drives on the bare metal.
This method erase all the drives which supports sanitize and the drives
which are not part of any logical volume on the bare metal. It calls
generic erase method after the success of Sanitize disk erase.
:param node: A dictionary of the node object.
:param port: A list of dictionaries containing information of ports
for the node.
:raises exception.HPSSAOperationError, if there is a failure on the
erase operation on the controllers.
:returns: The dictionary of controllers with the drives and erase
status for each drive.
"""
result = {}
result['Disk Erase Status'] = hpssa_manager.erase_devices()
result.update(super(ProliantHardwareManager,
self).erase_devices(node, port))
return result
def update_firmware(self, node, port):
"""Performs HPSUM based firmware update on the bare metal node.
This method performs firmware update on all or some of the firmware
components on the bare metal node.
:returns: A string with return code and the statistics of
updated/failed components.
:raises: HpsumOperationError, when the hpsum firmware update operation
on the node fails.
"""
return hpsum_controller.update_firmware(node)
|
[
"proliantutils.hpssa.manager.delete_configuration",
"proliantutils.hpssa.manager.erase_devices",
"proliantutils.hpsum.hpsum_controller.update_firmware",
"proliantutils.hpssa.manager.create_configuration"
] |
[((2978, 3044), 'proliantutils.hpssa.manager.create_configuration', 'hpssa_manager.create_configuration', ([], {'raid_config': 'target_raid_config'}), '(raid_config=target_raid_config)\n', (3012, 3044), True, 'from proliantutils.hpssa import manager as hpssa_manager\n'), ((3414, 3450), 'proliantutils.hpssa.manager.delete_configuration', 'hpssa_manager.delete_configuration', ([], {}), '()\n', (3448, 3450), True, 'from proliantutils.hpssa import manager as hpssa_manager\n'), ((4228, 4257), 'proliantutils.hpssa.manager.erase_devices', 'hpssa_manager.erase_devices', ([], {}), '()\n', (4255, 4257), True, 'from proliantutils.hpssa import manager as hpssa_manager\n'), ((4870, 4908), 'proliantutils.hpsum.hpsum_controller.update_firmware', 'hpsum_controller.update_firmware', (['node'], {}), '(node)\n', (4902, 4908), False, 'from proliantutils.hpsum import hpsum_controller\n')]
|
import pyopencl as cl
import pyopencl.array as cl_array
import numpy
import numpy.linalg as la
a = numpy.random.rand(50000).astype(numpy.float32)
b = numpy.random.rand(50000).astype(numpy.float32)
ctx = cl.create_some_context()
queue = cl.CommandQueue(ctx)
a_dev = cl_array.to_device(queue, a)
b_dev = cl_array.to_device(queue, b)
dest_dev = cl_array.empty_like(a_dev)
prg = cl.Program(ctx, """
__kernel void sum(__global const float *a,
__global const float *b, __global float *c)
{
int gid = get_global_id(0);
c[gid] = a[gid] + b[gid];
}
""").build()
prg.sum(queue, a.shape, None, a_dev.data, b_dev.data, dest_dev.data)
print(la.norm((dest_dev - (a_dev+b_dev)).get()))
|
[
"pyopencl.array.empty_like",
"pyopencl.create_some_context",
"pyopencl.array.to_device",
"pyopencl.CommandQueue",
"pyopencl.Program",
"numpy.random.rand"
] |
[((205, 229), 'pyopencl.create_some_context', 'cl.create_some_context', ([], {}), '()\n', (227, 229), True, 'import pyopencl as cl\n'), ((238, 258), 'pyopencl.CommandQueue', 'cl.CommandQueue', (['ctx'], {}), '(ctx)\n', (253, 258), True, 'import pyopencl as cl\n'), ((268, 296), 'pyopencl.array.to_device', 'cl_array.to_device', (['queue', 'a'], {}), '(queue, a)\n', (286, 296), True, 'import pyopencl.array as cl_array\n'), ((305, 333), 'pyopencl.array.to_device', 'cl_array.to_device', (['queue', 'b'], {}), '(queue, b)\n', (323, 333), True, 'import pyopencl.array as cl_array\n'), ((345, 371), 'pyopencl.array.empty_like', 'cl_array.empty_like', (['a_dev'], {}), '(a_dev)\n', (364, 371), True, 'import pyopencl.array as cl_array\n'), ((100, 124), 'numpy.random.rand', 'numpy.random.rand', (['(50000)'], {}), '(50000)\n', (117, 124), False, 'import numpy\n'), ((151, 175), 'numpy.random.rand', 'numpy.random.rand', (['(50000)'], {}), '(50000)\n', (168, 175), False, 'import numpy\n'), ((379, 589), 'pyopencl.Program', 'cl.Program', (['ctx', '"""\n __kernel void sum(__global const float *a,\n __global const float *b, __global float *c)\n {\n int gid = get_global_id(0);\n c[gid] = a[gid] + b[gid];\n }\n """'], {}), '(ctx,\n """\n __kernel void sum(__global const float *a,\n __global const float *b, __global float *c)\n {\n int gid = get_global_id(0);\n c[gid] = a[gid] + b[gid];\n }\n """\n )\n', (389, 589), True, 'import pyopencl as cl\n')]
|
import tensorflow as tf
class Scalar_LR(tf.keras.callbacks.Callback):
def __init__(self, name, TENSORBOARD_DIR):
super().__init__()
self.name = name
# self.previous_loss = None
self.file_writer = tf.summary.create_file_writer(TENSORBOARD_DIR)
self.file_writer.set_as_default()
# def on_epoch_begin(self, epoch, logs=None):
# logs['learning rate'] = self.model.optimizer.lr
# tf.summary.scalar("lr", logs['learning rate'], step=epoch)
def on_epoch_end(self, epoch, logs=None):
logs['learning rate'] = self.model.optimizer.lr
# with self.file_writer.as_default():
# # img = self.model.predict(dummy_data)
# # y_pred = self.model.predict(self.validation_data[0])
# tf.summary.image("Training data", img, step=0)
tf.summary.scalar("end_lr", logs['learning rate'], step=epoch)
#
#
# #self.previous_loss = logs['loss']
#
# def on_train_batch_begin(self, batch, logs=None):
# logs['learning rate'] = self.model.optimizer.lr
# # tf.summary.scalar("my_metric", logs['learning rate'], step=batch)
# #
# def on_train_batch_end(self, batch, logs=None):
# print('test')
#
# # tensor = self.model.get_layer('block3b_add').output
# # tensor = self.model.layers[0].output
# # tensor = tensor[0,:,:,:]
# # print(tensor)
# # plt.imshow(tensor)
# # plt.show()
#
# # intermediate_layer_model = tf.keras.Model(inputs=self.model.input,
# # outputs=self.model.get_layer('block3b_add').output)
# # intermediate_output = intermediate_layer_model.predict(self.validation_data[0])
# # print(intermediate_output)
#
# # output_images = tf.cast(self.model.call(self.data['image']),dtype=tf.float32)
# # output_images *= 255
# # print(output_images)
#
# # tf.summary.image('test', tensor, step=batch, max_outputs=1)
#
#
|
[
"tensorflow.summary.create_file_writer",
"tensorflow.summary.scalar"
] |
[((233, 279), 'tensorflow.summary.create_file_writer', 'tf.summary.create_file_writer', (['TENSORBOARD_DIR'], {}), '(TENSORBOARD_DIR)\n', (262, 279), True, 'import tensorflow as tf\n'), ((840, 902), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""end_lr"""', "logs['learning rate']"], {'step': 'epoch'}), "('end_lr', logs['learning rate'], step=epoch)\n", (857, 902), True, 'import tensorflow as tf\n')]
|
#!/home/gontz/miniconda3/envs/ih/bin/python3
from itertools import product
import click
from conf import RACES
from src import dbops, procyclingstats
@click.command()
@click.argument("items")
@click.option("-v", "--verbose", is_flag=True)
def scrape(items, verbose):
"""Scrape ITEMS from procyclingstats.com."""
items = items.lower()
# Invalid argument
if items not in ["riders", "stages"]:
raise click.UsageError("ITEMS must be STAGES or RIDERS")
# Scrape stages
if items == "stages":
for race, params in RACES.items():
(start_year, stop_year), no_races = params
iter_years = range(start_year, stop_year)
iter_number = range(1, no_races + 1)
for year, number in product(iter_years, iter_number):
stage = [race, year, number]
if dbops.check_exists(stage):
if verbose:
click.echo(f"{stage} already in database.")
continue
stage_data = procyclingstats.get_stage(race, year, number)
# HTTP error
if isinstance(stage_data, int):
if verbose:
click.echo(f"{stage} could not be retrieved. Status code: {stage_data}")
continue
# Is TTT
if not stage_data:
if verbose:
click.echo(f"{stage} is a team time trial. Skipping...")
continue
inserted_id = dbops.insert_stage(stage_data)
if verbose:
click.echo(f"{stage} inserted with ID: {inserted_id}")
# Srape riders
else:
stages = dbops.fetch_stages(project="result")
riders = [rider for stage in stages for rider in stage["result"]]
for rider in riders:
if dbops.check_exists(rider):
if verbose:
click.echo(f"{rider} already in database.")
continue
rider_data = procyclingstats.get_rider(rider)
# HTTP error
if isinstance(rider_data, int):
if verbose:
click.echo(f"{rider} could not be retrieved. Status code: {rider_data}")
continue
inserted_id = dbops.insert_rider(rider_data)
if verbose:
click.echo(f"{rider} inserted with ID: {inserted_id}")
if __name__ == "__main__":
scrape()
|
[
"src.dbops.check_exists",
"src.procyclingstats.get_rider",
"src.procyclingstats.get_stage",
"click.argument",
"src.dbops.insert_rider",
"src.dbops.insert_stage",
"conf.RACES.items",
"click.option",
"click.UsageError",
"click.command",
"click.echo",
"src.dbops.fetch_stages",
"itertools.product"
] |
[((154, 169), 'click.command', 'click.command', ([], {}), '()\n', (167, 169), False, 'import click\n'), ((171, 194), 'click.argument', 'click.argument', (['"""items"""'], {}), "('items')\n", (185, 194), False, 'import click\n'), ((196, 241), 'click.option', 'click.option', (['"""-v"""', '"""--verbose"""'], {'is_flag': '(True)'}), "('-v', '--verbose', is_flag=True)\n", (208, 241), False, 'import click\n'), ((425, 475), 'click.UsageError', 'click.UsageError', (['"""ITEMS must be STAGES or RIDERS"""'], {}), "('ITEMS must be STAGES or RIDERS')\n", (441, 475), False, 'import click\n'), ((551, 564), 'conf.RACES.items', 'RACES.items', ([], {}), '()\n', (562, 564), False, 'from conf import RACES\n'), ((1738, 1774), 'src.dbops.fetch_stages', 'dbops.fetch_stages', ([], {'project': '"""result"""'}), "(project='result')\n", (1756, 1774), False, 'from src import dbops, procyclingstats\n'), ((757, 789), 'itertools.product', 'product', (['iter_years', 'iter_number'], {}), '(iter_years, iter_number)\n', (764, 789), False, 'from itertools import product\n'), ((1894, 1919), 'src.dbops.check_exists', 'dbops.check_exists', (['rider'], {}), '(rider)\n', (1912, 1919), False, 'from src import dbops, procyclingstats\n'), ((2064, 2096), 'src.procyclingstats.get_rider', 'procyclingstats.get_rider', (['rider'], {}), '(rider)\n', (2089, 2096), False, 'from src import dbops, procyclingstats\n'), ((2340, 2370), 'src.dbops.insert_rider', 'dbops.insert_rider', (['rider_data'], {}), '(rider_data)\n', (2358, 2370), False, 'from src import dbops, procyclingstats\n'), ((855, 880), 'src.dbops.check_exists', 'dbops.check_exists', (['stage'], {}), '(stage)\n', (873, 880), False, 'from src import dbops, procyclingstats\n'), ((1041, 1086), 'src.procyclingstats.get_stage', 'procyclingstats.get_stage', (['race', 'year', 'number'], {}), '(race, year, number)\n', (1066, 1086), False, 'from src import dbops, procyclingstats\n'), ((1557, 1587), 'src.dbops.insert_stage', 'dbops.insert_stage', (['stage_data'], {}), '(stage_data)\n', (1575, 1587), False, 'from src import dbops, procyclingstats\n'), ((2411, 2465), 'click.echo', 'click.echo', (['f"""{rider} inserted with ID: {inserted_id}"""'], {}), "(f'{rider} inserted with ID: {inserted_id}')\n", (2421, 2465), False, 'import click\n'), ((1636, 1690), 'click.echo', 'click.echo', (['f"""{stage} inserted with ID: {inserted_id}"""'], {}), "(f'{stage} inserted with ID: {inserted_id}')\n", (1646, 1690), False, 'import click\n'), ((1969, 2012), 'click.echo', 'click.echo', (['f"""{rider} already in database."""'], {}), "(f'{rider} already in database.')\n", (1979, 2012), False, 'import click\n'), ((2215, 2287), 'click.echo', 'click.echo', (['f"""{rider} could not be retrieved. Status code: {rider_data}"""'], {}), "(f'{rider} could not be retrieved. Status code: {rider_data}')\n", (2225, 2287), False, 'import click\n'), ((938, 981), 'click.echo', 'click.echo', (['f"""{stage} already in database."""'], {}), "(f'{stage} already in database.')\n", (948, 981), False, 'import click\n'), ((1221, 1293), 'click.echo', 'click.echo', (['f"""{stage} could not be retrieved. Status code: {stage_data}"""'], {}), "(f'{stage} could not be retrieved. Status code: {stage_data}')\n", (1231, 1293), False, 'import click\n'), ((1440, 1496), 'click.echo', 'click.echo', (['f"""{stage} is a team time trial. Skipping..."""'], {}), "(f'{stage} is a team time trial. Skipping...')\n", (1450, 1496), False, 'import click\n')]
|
from __future__ import annotations
from dataclasses import dataclass
from dataclasses import field
from ..ec2common.ec2exceptions import *
@dataclass
class NetworkInterfaceAttributes:
Association: object = None
Attachment: object = None
Description: str = None
Groups: object = field(default_factory=list)
Ipv6Addresses: object = field(default_factory=list)
MacAddress: str = None
NetworkInterfaceId: str = None
OwnerId: str = None
PrivateDnsName: str = None
PrivateIpAddress: str = None
PrivateIpAddresses: object = field(default_factory=list)
SourceDestCheck: bool = None
Status: str = None
SubnetId: str = None
VpcId: str = None
InterfaceType: str = None
Ipv4Prefixes: object = field(default_factory=list)
Ipv6Prefixes: object = field(default_factory=list)
class NetworkInterfaceManager:
def __init__(self):
pass
# def dict_to_networkinterface(self, dict):
# new_networkinterface = NetworkInterface()
# new_networkinterface.attributes = NetworkInterfaceAttributes(**dict)
# return new_networkinterface
# def dict_list_to_networkinterface_list(self, dict_list):
# networkinterface_list = []
# for dict_item in dict_list:
# networkinterface_list.append(self.dict_to_networkinterface(dict_item))
# return networkinterface_list
class NetworkInterface:
def __init__(self):
self.attributes = NetworkInterfaceAttributes()
|
[
"dataclasses.field"
] |
[((298, 325), 'dataclasses.field', 'field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (303, 325), False, 'from dataclasses import field\n'), ((354, 381), 'dataclasses.field', 'field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (359, 381), False, 'from dataclasses import field\n'), ((565, 592), 'dataclasses.field', 'field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (570, 592), False, 'from dataclasses import field\n'), ((753, 780), 'dataclasses.field', 'field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (758, 780), False, 'from dataclasses import field\n'), ((808, 835), 'dataclasses.field', 'field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (813, 835), False, 'from dataclasses import field\n')]
|
from bs4 import BeautifulSoup
import requests
import time
import pymysql.cursors
import unittest
from validate_email import validate_email
class UnitTestsDataMinerYellowPagesUsa(unittest.TestCase):
def test_web_scraper_email_usa(self):
activites = [
{'id': '1',
'url': 'https://www.yellowpages.com/search?search_terms=Temporary+Employment+Agencies&geo_location_terms='},
{'id': '2', 'url': 'https://www.yellowpages.com/search?search_terms=real+estate&geo_location_terms='},
{'id': '3', 'url': 'https://www.yellowpages.com/search?search_terms=Recruiter&geo_location_terms='},
{'id': '4', 'url': 'https://www.yellowpages.com/search?search_terms=software&geo_location_terms='},
{'id': '5', 'url': 'https://www.yellowpages.com/search?search_terms=hotel&geo_location_terms='},
{'id': '6',
'url': 'https://www.yellowpages.com/search?search_terms=social+landlord&geo_location_terms='},
{'id': '7', 'url': 'https://www.yellowpages.com/search?search_terms=cleaning&geo_location_terms='},
{'id': '8', 'url': 'https://www.yellowpages.com/search?search_terms=Charities&geo_location_terms='},
{'id': '9', 'url': 'https://www.yellowpages.com/search?search_terms=financial&geo_location_terms='},
{'id': '10', 'url': 'https://www.yellowpages.com/search?search_terms=restaurant&geo_location_terms='},
{'id': '11', 'url': 'https://www.yellowpages.com/search?search_terms=building&geo_location_terms='},
{'id': '12', 'url': 'https://www.yellowpages.com/search?search_terms=hairdresser&geo_location_terms='},
{'id': '13', 'url': 'https://www.yellowpages.com/search?search_terms=florist&geo_location_terms='},
{'id': '14', 'url': 'https://www.yellowpages.com/search?search_terms=locksmith&geo_location_terms='},
{'id': '15', 'url': 'https://www.yellowpages.com/search?search_terms=bakery&geo_location_terms='},
{'id': '16', 'url': 'https://www.yellowpages.com/search?search_terms=insurance&geo_location_terms='},
{'id': '17', 'url': 'https://www.yellowpages.com/search?search_terms=Pharmacies&geo_location_terms='},
{'id': '18', 'url': 'https://www.yellowpages.com/search?search_terms=movers&geo_location_terms='},
{'id': '19', 'url': 'https://www.yellowpages.com/search?search_terms=electricity&geo_location_terms='},
{'id': '20', 'url': 'https://www.yellowpages.com/search?search_terms=plumbing&geo_location_terms='},
{'id': '21', 'url': 'https://www.yellowpages.com/search?search_terms=security&geo_location_terms='},
{'id': '22', 'url': 'https://www.yellowpages.com/search?search_terms=attorney&geo_location_terms='},
{'id': '23', 'url': 'https://www.yellowpages.com/search?search_terms=bank&geo_location_terms='},
{'id': '24', 'url': 'https://www.yellowpages.com/search?search_terms=mechanic&geo_location_terms='},
{'id': '25', 'url': 'https://www.yellowpages.com/search?search_terms=dentist&geo_location_terms='},
{'id': '26', 'url': 'https://www.yellowpages.com/search?search_terms=doctor&geo_location_terms='},
{'id': '27', 'url': 'https://www.yellowpages.com/search?search_terms=accountant&geo_location_terms='},
{'id': '28',
'url': 'https://www.yellowpages.com/search?search_terms=Grocery+Stores&geo_location_terms='},
{'id': '29', 'url': 'https://www.yellowpages.com/search?search_terms=notary&geo_location_terms='},
{'id': '30', 'url': 'https://www.yellowpages.com/search?search_terms=jewellery&geo_location_terms='},
{'id': '31', 'url': 'https://www.yellowpages.com/search?search_terms=tailors&geo_location_terms='},
{'id': '32', 'url': 'https://www.yellowpages.com/search?search_terms=butcher&geo_location_terms='},
{'id': '33', 'url': 'https://www.yellowpages.com/search?search_terms=library&geo_location_terms='},
{'id': '34', 'url': 'https://www.yellowpages.com/search?search_terms=Architects&geo_location_terms='}
]
capitales_du_monde = [
{'id': '2', 'nom': 'New+York%2C+NY'},
# {'id': '4', 'nom': 'Chicago%2C+IL'},
# {'id': '5', 'nom': 'Atlanta%2C+GA'},
# {'id': '6', 'nom': 'Houston%2C+TX'},
# {'id': '7', 'nom': 'Los+Angeles%2C+CA'},
# {'id': '9', 'nom': 'Albany%2C+NY'},
# {'id': '36', 'nom': 'Montgomery%2C+AL'},
# {'id': '37', 'nom': 'Birmingham%2C+AL'},
# {'id': '38', 'nom': 'Juneau%2C+AK'},
# {'id': '39', 'nom': 'Anchorage%2C+AK'},
# {'id': '40', 'nom': 'Phoenix%2C+AZ'},
# {'id': '41', 'nom': 'Little+Rock%2C+AR'},
# {'id': '42', 'nom': 'Sacramento%2C+CA'},
# {'id': '43', 'nom': 'Denver%2C+CO'},
# {'id': '44', 'nom': 'Hartford%2C+CT'},
# {'id': '45', 'nom': 'Bridgeport%2C+CT'},
# {'id': '46', 'nom': 'Dover%2C+DE'},
# {'id': '47', 'nom': 'Wilmington%2C+DE'},
# {'id': '48', 'nom': 'Tallahassee%2C+FL'},
# {'id': '49', 'nom': 'Jacksonville%2C+FL'},
# {'id': '50', 'nom': 'Honolulu%2C+HI'},
# {'id': '51', 'nom': 'Boise%2C+ID'},
# {'id': '52', 'nom': 'Springfield%2C+IL'},
# {'id': '53', 'nom': 'Indianapolis%2C+IN'},
# {'id': '54', 'nom': 'Des+Moines%2C+IA'},
# {'id': '55', 'nom': 'Topeka%2C+KS'},
# {'id': '56', 'nom': 'Wichita%2C+KS'},
# {'id': '57', 'nom': 'Frankfort%2C+KY'},
# {'id': '58', 'nom': 'Louisville%2C+KY'},
# {'id': '59', 'nom': 'Baton+Rouge%2C+LA'},
# {'id': '60', 'nom': 'New+Orleans%2C+LA'},
# {'id': '61', 'nom': 'Augusta%2C+ME'},
# {'id': '62', 'nom': 'Portland%2C+ME'},
# {'id': '63', 'nom': 'Annapolis%2C+MD'},
# {'id': '64', 'nom': 'Baltimore%2C+MD'},
# {'id': '65', 'nom': 'Boston%2C+MA'},
# {'id': '66', 'nom': 'Lansing%2C+MI'},
# {'id': '67', 'nom': 'Detroit%2C+MI'},
# {'id': '68', 'nom': 'Saint+Paul%2C+MN'},
# {'id': '69', 'nom': 'Minneapolis%2C+MN'},
# {'id': '70', 'nom': 'Jackson%2C+MS'},
# {'id': '71', 'nom': 'Jefferson+City%2C+MO'},
# {'id': '72', 'nom': 'Kansas+City%2C+MO'},
# {'id': '73', 'nom': 'Helena%2C+MT'},
# {'id': '74', 'nom': 'Billings%2C+MT'},
# {'id': '75', 'nom': 'Lincoln%2C+NE'},
# {'id': '76', 'nom': 'Omaha%2C+NE'},
# {'id': '77', 'nom': 'Carson+City%2C+NV'},
# {'id': '78', 'nom': 'Las+Vegas%2C+NV'},
# {'id': '79', 'nom': 'Concord%2C+NH'},
# {'id': '80', 'nom': 'Manchester%2C+NH'}
# {'id': '81', 'nom': 'Trenton%2C+NJ'},
# {'id': '82', 'nom': 'Newark%2C+NJ'},
# {'id': '83', 'nom': 'Santa+Fe%2C+NM'},
# {'id': '84', 'nom': 'Albuquerque%2C+NM'},
# {'id': '85', 'nom': 'Raleigh%2C+NC'},
# {'id': '86', 'nom': 'Charlotte%2C+NC'},
# {'id': '87', 'nom': 'Bismarck%2C+ND'},
# {'id': '88', 'nom': 'Columbus%2C+OH'},
# {'id': '89', 'nom': 'Oklahoma+City%2C+OK'},
# {'id': '90', 'nom': 'Salem%2C+OR'},
# {'id': '91', 'nom': 'Portland%2C+OR'},
# {'id': '92', 'nom': 'Harrisburg%2C+PA'},
# {'id': '93', 'nom': 'Philadelphia%2C+PA'},
# {'id': '94', 'nom': 'Providence%2C+RI'},
# {'id': '95', 'nom': 'Columbia%2C+SC'},
# {'id': '96', 'nom': 'Pierre%2C+SD'},
# {'id': '97', 'nom': 'Sioux+Falls%2C+SD'},
# {'id': '98', 'nom': 'Nashville%2C+TN'},
# {'id': '99', 'nom': 'Memphis%2C+TN'},
# {'id': '100', 'nom': 'Austin%2C+TX'},
# {'id': '101', 'nom': 'Salt+Lake+City%2C+UT'},
# {'id': '102', 'nom': 'Montpelier%2C+VT'},
# {'id': '103', 'nom': 'Burlington%2C+VT'},
# {'id': '104', 'nom': 'Richmond%2C+VA'},
# {'id': '105', 'nom': 'Olympia%2C+WA'},
# {'id': '106', 'nom': 'Seattle%2C+WA'},
# {'id': '107', 'nom': 'Charleston%2C+WV'},
# {'id': '108', 'nom': 'Madison%2C+WI'},
# {'id': '109', 'nom': 'Milwaukee%2C+WI'},
# {'id': '110', 'nom': 'Cheyenne%2C+WY'}
]
try:
for capitale_du_monde in capitales_du_monde:
for activite in activites:
i_1 = 0
i = 1
var = 1
while var == 1 and i < 102:
try:
url = activite.get('url') + capitale_du_monde.get('nom') + "&page=" + str(i)
# Request the content of a page from the url
html = requests.get(url)
time.sleep(3)
# Parse the content of html_doc
soup = BeautifulSoup(html.content, 'html.parser')
print(url)
if soup.find("a", {"class", "business-name"}) is None:
print('sorry there is nothing')
break
else:
try:
for link in soup.find_all("a", {"class": "business-name"}):
i_1 += 1
# Request the content of a page from the url
url_page = "https://www.yellowpages.com" + link.get('href')
html_doc = requests.get(url_page)
time.sleep(3)
# Parse the content of html_doc
soup_link = BeautifulSoup(html_doc.content, 'html.parser')
if soup_link.find("a", {"class": "email-business"}) is not None:
email_business = soup_link.select(".email-business")[0].get('href')[7:]
suffixes = [
"info@"
]
for suffix in suffixes:
email = str(suffix + email_business.split("@")[1])
try:
is_valid = validate_email(
email_address=email,
check_regex=True,
check_mx=True,
from_address='',
helo_host='',
smtp_timeout=10,
dns_timeout=10,
use_blacklist=True
)
if is_valid:
try:
# Connect to the database
connection = pymysql.connect(
host='localhost',
port=3306,
user='',
password='',
db='contacts_professionnels',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor
)
with connection.cursor() as cursor:
try:
sql = "INSERT INTO `emails` (" \
"`id_activite`, " \
"`id_capitale_du_monde`, " \
"`email`) VALUE (%s, %s, %s)"
cursor.execute(sql, (
activite.get('id'),
capitale_du_monde.get('id'),
email))
connection.commit()
print(str(i_1) + " The record is stored : "
+ str(email))
connection.close()
except Exception as e:
print(str(i_1) + " The record already exists : "
+ str(email) + " " + str(e))
connection.close()
except Exception as e:
print("Problem connection MySQL : " + str(e))
else:
print(
str(i_1) + " The email : " + email + " doesn't exist.")
except Exception as e:
print(str(
i_1) + " An error with the email : " + email + " " + str(e))
else:
print(str(i_1) + " no email business")
except Exception as e:
print("There is an error connection at url_page : " + str(e))
except Exception as e:
print("There is an error connection at url : " + str(e))
i += 1
finally:
print('done')
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"validate_email.validate_email",
"time.sleep",
"requests.get",
"bs4.BeautifulSoup"
] |
[((15377, 15392), 'unittest.main', 'unittest.main', ([], {}), '()\n', (15390, 15392), False, 'import unittest\n'), ((9068, 9085), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (9080, 9085), False, 'import requests\n'), ((9115, 9128), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (9125, 9128), False, 'import time\n'), ((9225, 9267), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html.content', '"""html.parser"""'], {}), "(html.content, 'html.parser')\n", (9238, 9267), False, 'from bs4 import BeautifulSoup\n'), ((9948, 9970), 'requests.get', 'requests.get', (['url_page'], {}), '(url_page)\n', (9960, 9970), False, 'import requests\n'), ((10012, 10025), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (10022, 10025), False, 'import time\n'), ((10151, 10197), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html_doc.content', '"""html.parser"""'], {}), "(html_doc.content, 'html.parser')\n", (10164, 10197), False, 'from bs4 import BeautifulSoup\n'), ((10865, 11025), 'validate_email.validate_email', 'validate_email', ([], {'email_address': 'email', 'check_regex': '(True)', 'check_mx': '(True)', 'from_address': '""""""', 'helo_host': '""""""', 'smtp_timeout': '(10)', 'dns_timeout': '(10)', 'use_blacklist': '(True)'}), "(email_address=email, check_regex=True, check_mx=True,\n from_address='', helo_host='', smtp_timeout=10, dns_timeout=10,\n use_blacklist=True)\n", (10879, 11025), False, 'from validate_email import validate_email\n')]
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import threading
from grpc_testing import _common
from grpc_testing._channel import _rpc_state
class TimeoutException(Exception):
pass
class State(_common.ChannelHandler):
def __init__(self):
self._condition = threading.Condition()
self._rpc_states = collections.defaultdict(list)
def invoke_rpc(self, method_full_rpc_name, invocation_metadata, requests,
requests_closed, timeout):
rpc_state = _rpc_state.State(invocation_metadata, requests,
requests_closed)
with self._condition:
self._rpc_states[method_full_rpc_name].append(rpc_state)
self._condition.notify_all()
return rpc_state
def take_rpc_state(self, method_descriptor, timeout):
method_full_rpc_name = '/{}/{}'.format(
method_descriptor.containing_service.full_name,
method_descriptor.name)
with self._condition:
while True:
method_rpc_states = self._rpc_states[method_full_rpc_name]
if method_rpc_states:
return method_rpc_states.pop(0)
else:
if not self._condition.wait(timeout=timeout):
raise TimeoutException("Timeout while waiting for rpc.")
|
[
"collections.defaultdict",
"threading.Condition",
"grpc_testing._channel._rpc_state.State"
] |
[((829, 850), 'threading.Condition', 'threading.Condition', ([], {}), '()\n', (848, 850), False, 'import threading\n'), ((878, 907), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (901, 907), False, 'import collections\n'), ((1053, 1117), 'grpc_testing._channel._rpc_state.State', '_rpc_state.State', (['invocation_metadata', 'requests', 'requests_closed'], {}), '(invocation_metadata, requests, requests_closed)\n', (1069, 1117), False, 'from grpc_testing._channel import _rpc_state\n')]
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: nxos_gir
version_added: "2.2"
short_description: Trigger a graceful removal or insertion (GIR) of the switch.
description:
- Trigger a graceful removal or insertion (GIR) of the switch.
extends_documentation_fragment: nxos
author:
- <NAME> (@GGabriele)
notes:
- C(state) has effect only in combination with
C(system_mode_maintenance_timeout) or
C(system_mode_maintenance_on_reload_reset_reason).
- Using C(system_mode_maintenance) and
C(system_mode_maintenance_dont_generate_profile) would make the module
fail, but the system mode will be triggered anyway.
options:
system_mode_maintenance:
description:
- When C(system_mode_maintenance=true) it puts all enabled
protocols in maintenance mode (using the isolate command).
When C(system_mode_maintenance=false) it puts all enabled
protocols in normal mode (using the no isolate command).
required: false
default: null
choices: ['true','false']
system_mode_maintenance_dont_generate_profile:
description:
- When C(system_mode_maintenance_dont_generate_profile=true) it
prevents the dynamic searching of enabled protocols and executes
commands configured in a maintenance-mode profile.
Use this option if you want the system to use a maintenance-mode
profile that you have created.
When C(system_mode_maintenance_dont_generate_profile=false) it
prevents the dynamic searching of enabled protocols and executes
commands configured in a normal-mode profile. Use this option if
you want the system to use a normal-mode profile that
you have created.
required: false
default: null
choices: ['true','false']
system_mode_maintenance_timeout:
description:
- Keeps the switch in maintenance mode for a specified
number of minutes. Range is 5-65535.
required: false
default: null
system_mode_maintenance_shutdown:
description:
- Shuts down all protocols, vPC domains, and interfaces except
the management interface (using the shutdown command).
This option is disruptive while C(system_mode_maintenance)
(which uses the isolate command) is not.
required: false
default: null
choices: ['true','false']
system_mode_maintenance_on_reload_reset_reason:
description:
- Boots the switch into maintenance mode automatically in the
event of a specified system crash.
required: false
default: null
choices: ['hw_error','svc_failure','kern_failure','wdog_timeout',
'fatal_error','lc_failure','match_any','manual_reload']
state:
description:
- Specify desired state of the resource.
required: true
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
# Trigger system maintenance mode
- nxos_gir:
system_mode_maintenance: true
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ <PASSWORD> }}"
# Trigger system normal mode
- nxos_gir:
system_mode_maintenance: false
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ <PASSWORD> }}"
# Configure on-reload reset-reason for maintenance mode
- nxos_gir:
system_mode_maintenance_on_reload_reset_reason: manual_reload
state: present
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ <PASSWORD> }}"
# Add on-reload reset-reason for maintenance mode
- nxos_gir:
system_mode_maintenance_on_reload_reset_reason: hw_error
state: present
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ <PASSWORD> }}"
# Remove on-reload reset-reason for maintenance mode
- nxos_gir:
system_mode_maintenance_on_reload_reset_reason: manual_reload
state: absent
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ <PASSWORD> }}"
# Set timeout for maintenance mode
- nxos_gir:
system_mode_maintenance_timeout: 30
state: present
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ <PASSWORD> }}"
# Remove timeout for maintenance mode
- nxos_gir:
system_mode_maintenance_timeout: 30
state: absent
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ <PASSWORD> }}"
'''
RETURN = '''
final_system_mode:
description: describe the last system mode
returned: verbose mode
type: string
sample: normal
updates:
description: commands sent to the device
returned: verbose mode
type: list
sample: ["terminal dont-ask", "system mode maintenance timeout 10"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
# COMMON CODE FOR MIGRATION
import re
from ansible.module_utils.basic import get_exception
from ansible.module_utils.netcfg import NetworkConfig, ConfigLine
from ansible.module_utils.shell import ShellError
try:
from ansible.module_utils.nxos import get_module
except ImportError:
from ansible.module_utils.nxos import NetworkModule, NetworkError
def to_list(val):
if isinstance(val, (list, tuple)):
return list(val)
elif val is not None:
return [val]
else:
return list()
class CustomNetworkConfig(NetworkConfig):
def expand_section(self, configobj, S=None):
if S is None:
S = list()
S.append(configobj)
for child in configobj.children:
if child in S:
continue
self.expand_section(child, S)
return S
def get_object(self, path):
for item in self.items:
if item.text == path[-1]:
parents = [p.text for p in item.parents]
if parents == path[:-1]:
return item
def to_block(self, section):
return '\n'.join([item.raw for item in section])
def get_section(self, path):
try:
section = self.get_section_objects(path)
return self.to_block(section)
except ValueError:
return list()
def get_section_objects(self, path):
if not isinstance(path, list):
path = [path]
obj = self.get_object(path)
if not obj:
raise ValueError('path does not exist in config')
return self.expand_section(obj)
def add(self, lines, parents=None):
"""Adds one or lines of configuration
"""
ancestors = list()
offset = 0
obj = None
## global config command
if not parents:
for line in to_list(lines):
item = ConfigLine(line)
item.raw = line
if item not in self.items:
self.items.append(item)
else:
for index, p in enumerate(parents):
try:
i = index + 1
obj = self.get_section_objects(parents[:i])[0]
ancestors.append(obj)
except ValueError:
# add parent to config
offset = index * self.indent
obj = ConfigLine(p)
obj.raw = p.rjust(len(p) + offset)
if ancestors:
obj.parents = list(ancestors)
ancestors[-1].children.append(obj)
self.items.append(obj)
ancestors.append(obj)
# add child objects
for line in to_list(lines):
# check if child already exists
for child in ancestors[-1].children:
if child.text == line:
break
else:
offset = len(parents) * self.indent
item = ConfigLine(line)
item.raw = line.rjust(len(line) + offset)
item.parents = ancestors
ancestors[-1].children.append(item)
self.items.append(item)
def get_network_module(**kwargs):
try:
return get_module(**kwargs)
except NameError:
return NetworkModule(**kwargs)
def get_config(module, include_defaults=False):
config = module.params['config']
if not config:
try:
config = module.get_config()
except AttributeError:
defaults = module.params['include_defaults']
config = module.config.get_config(include_defaults=defaults)
return CustomNetworkConfig(indent=2, contents=config)
def load_config(module, candidate):
config = get_config(module)
commands = candidate.difference(config)
commands = [str(c).strip() for c in commands]
save_config = module.params['save']
result = dict(changed=False)
if commands:
if not module.check_mode:
try:
module.configure(commands)
except AttributeError:
module.config(commands)
if save_config:
try:
module.config.save_config()
except AttributeError:
module.execute(['copy running-config startup-config'])
result['changed'] = True
result['updates'] = commands
return result
# END OF COMMON CODE
def execute_show(cmds, module, command_type=None):
command_type_map = {
'cli_show': 'json',
'cli_show_ascii': 'text'
}
try:
if command_type:
response = module.execute(cmds, command_type=command_type)
else:
response = module.execute(cmds)
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending {0}'.format(cmds),
error=str(clie))
except AttributeError:
try:
if command_type:
command_type = command_type_map.get(command_type)
module.cli.add_commands(cmds, output=command_type)
response = module.cli.run_commands()
else:
module.cli.add_commands(cmds, output=command_type)
response = module.cli.run_commands()
except NetworkError:
clie = get_exception()
module.fail_json(msg='Error sending {0}'.format(cmds),
error=str(clie))
return response
def execute_show_command(command, module, command_type='cli_show_ascii'):
cmds = [command]
if module.params['transport'] == 'cli':
body = execute_show(cmds, module)
elif module.params['transport'] == 'nxapi':
body = execute_show(cmds, module, command_type=command_type)
return body
def execute_config_command(commands, module):
try:
module.configure(commands)
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending CLI commands',
error=str(clie), commands=commands)
except AttributeError:
try:
module.config.load_config(commands)
except NetworkError:
clie = get_exception()
module.fail_json(msg='Error sending CLI commands',
error=str(clie), commands=commands)
def get_system_mode(module):
command = 'show system mode'
body = execute_show_command(command, module)[0]
if 'normal' in body.lower():
mode = 'normal'
else:
mode = 'maintenance'
return mode
def get_maintenance_timeout(module):
command = 'show maintenance timeout'
body = execute_show_command(command, module)[0]
timeout = body.split()[4]
return timeout
def get_reset_reasons(module):
command = 'show maintenance on-reload reset-reasons'
body = execute_show_command(command, module)[0]
return body
def get_commands(module, state, mode):
commands = list()
system_mode = ''
if module.params['system_mode_maintenance'] is True and mode == 'normal':
commands.append('system mode maintenance')
elif (module.params['system_mode_maintenance'] is False and
mode == 'maintenance'):
commands.append('no system mode maintenance')
elif (module.params[
'system_mode_maintenance_dont_generate_profile'] is True and
mode == 'normal'):
commands.append('system mode maintenance dont-generate-profile')
elif (module.params[
'system_mode_maintenance_dont_generate_profile'] is False and
mode == 'maintenance'):
commands.append('no system mode maintenance dont-generate-profile')
elif module.params['system_mode_maintenance_timeout']:
timeout = get_maintenance_timeout(module)
if (state == 'present' and
timeout != module.params['system_mode_maintenance_timeout']):
commands.append('system mode maintenance timeout {0}'.format(
module.params['system_mode_maintenance_timeout']))
elif (state == 'absent' and
timeout == module.params['system_mode_maintenance_timeout']):
commands.append('no system mode maintenance timeout {0}'.format(
module.params['system_mode_maintenance_timeout']))
elif module.params['system_mode_maintenance_shutdown'] is True:
commands.append('system mode maintenance shutdown')
elif module.params['system_mode_maintenance_on_reload_reset_reason']:
reset_reasons = get_reset_reasons(module)
if (state == 'present' and
module.params[
'system_mode_maintenance_on_reload_reset_reason'].lower() not
in reset_reasons.lower()):
commands.append('system mode maintenance on-reload '
'reset-reason {0}'.format(
module.params[
'system_mode_maintenance_on_reload_reset_reason']))
elif (state == 'absent' and
module.params[
'system_mode_maintenance_on_reload_reset_reason'].lower() in
reset_reasons.lower()):
commands.append('no system mode maintenance on-reload '
'reset-reason {0}'.format(
module.params[
'system_mode_maintenance_on_reload_reset_reason']))
if commands:
commands.insert(0, 'terminal dont-ask')
return commands
def main():
argument_spec = dict(
system_mode_maintenance=dict(required=False, type='bool'),
system_mode_maintenance_dont_generate_profile=dict(required=False,
type='bool'),
system_mode_maintenance_timeout=dict(required=False, type='str'),
system_mode_maintenance_shutdown=dict(required=False, type='bool'),
system_mode_maintenance_on_reload_reset_reason=dict(required=False,
choices=['hw_error','svc_failure','kern_failure',
'wdog_timeout','fatal_error','lc_failure',
'match_any','manual_reload']),
state=dict(choices=['absent', 'present', 'default'],
default='present', required=False)
)
module = get_network_module(argument_spec=argument_spec,
mutually_exclusive=[[
'system_mode_maintenance',
'system_mode_maintenance_dont_generate_profile',
'system_mode_maintenance_timeout',
'system_mode_maintenance_shutdown',
'system_mode_maintenance_on_reload_reset_reason'
]],
required_one_of=[[
'system_mode_maintenance',
'system_mode_maintenance_dont_generate_profile',
'system_mode_maintenance_timeout',
'system_mode_maintenance_shutdown',
'system_mode_maintenance_on_reload_reset_reason'
]],
supports_check_mode=True)
state = module.params['state']
mode = get_system_mode(module)
commands = get_commands(module, state, mode)
changed = False
if commands:
if module.check_mode:
module.exit_json(changed=True, commands=commands)
else:
execute_config_command(commands, module)
changed = True
result = {}
result['connected'] = module.connected
result['changed'] = changed
if module._verbosity > 0:
final_system_mode = get_system_mode(module)
result['final_system_mode'] = final_system_mode
result['updates'] = commands
module.exit_json(**result)
if __name__ == '__main__':
main()
|
[
"ansible.module_utils.nxos.get_module",
"ansible.module_utils.nxos.NetworkModule",
"ansible.module_utils.netcfg.ConfigLine",
"ansible.module_utils.basic.get_exception"
] |
[((9075, 9095), 'ansible.module_utils.nxos.get_module', 'get_module', ([], {}), '(**kwargs)\n', (9085, 9095), False, 'from ansible.module_utils.nxos import get_module\n'), ((9133, 9156), 'ansible.module_utils.nxos.NetworkModule', 'NetworkModule', ([], {}), '(**kwargs)\n', (9146, 9156), False, 'from ansible.module_utils.nxos import NetworkModule, NetworkError\n'), ((10631, 10646), 'ansible.module_utils.basic.get_exception', 'get_exception', ([], {}), '()\n', (10644, 10646), False, 'from ansible.module_utils.basic import get_exception\n'), ((11789, 11804), 'ansible.module_utils.basic.get_exception', 'get_exception', ([], {}), '()\n', (11802, 11804), False, 'from ansible.module_utils.basic import get_exception\n'), ((7621, 7637), 'ansible.module_utils.netcfg.ConfigLine', 'ConfigLine', (['line'], {}), '(line)\n', (7631, 7637), False, 'from ansible.module_utils.netcfg import NetworkConfig, ConfigLine\n'), ((8791, 8807), 'ansible.module_utils.netcfg.ConfigLine', 'ConfigLine', (['line'], {}), '(line)\n', (8801, 8807), False, 'from ansible.module_utils.netcfg import NetworkConfig, ConfigLine\n'), ((11193, 11208), 'ansible.module_utils.basic.get_exception', 'get_exception', ([], {}), '()\n', (11206, 11208), False, 'from ansible.module_utils.basic import get_exception\n'), ((12061, 12076), 'ansible.module_utils.basic.get_exception', 'get_exception', ([], {}), '()\n', (12074, 12076), False, 'from ansible.module_utils.basic import get_exception\n'), ((8138, 8151), 'ansible.module_utils.netcfg.ConfigLine', 'ConfigLine', (['p'], {}), '(p)\n', (8148, 8151), False, 'from ansible.module_utils.netcfg import NetworkConfig, ConfigLine\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import template
from .likert_star_tools import render_stars
from django.utils.safestring import mark_safe
register = template.Library()
# Font-awesome stars ver 3
star_set_3 = {
'star': "<i class='icon-star likert-star'></i>",
'unlit': "<i class='icon-star-empty likert-star'></i>",
'noanswer': "<i class='icon-ban-circle likert-star'></i>"
}
# Font-awesome stars ver 4
star_set_4 = {
'star': "<i class='fa fa-star likert-star'></i>",
'unlit': "<i class='fa fa-star-o likert-star'></i>",
'noanswer': "<i class='fa fa-ban likert-star'></i>"
}
def fa_stars3(num, max_stars=5):
"""
Stars for Font Awesome 3
If num is not None, the returned string will contain num solid stars
followed by max_stars - num empty stars
"""
return mark_safe(render_stars(num, max_stars, star_set_3))
register.filter('fa_stars3', fa_stars3)
def fa_stars4(num, max_stars=5):
"""
Stars for Font Awesome 4
If num is not None, the returned string will contain num solid stars
followed by max_stars - num empty stars
"""
return mark_safe(render_stars(num, max_stars, star_set_4))
register.filter('fa_stars4', fa_stars4)
|
[
"django.template.Library"
] |
[((197, 215), 'django.template.Library', 'template.Library', ([], {}), '()\n', (213, 215), False, 'from django import template\n')]
|
import sys
sys.path.append("PerceptualSimilarity\\")
import os
import utils
import torch
import numpy as np
from torch import nn
import torchgeometry
from kornia import color
import torch.nn.functional as F
from torchvision import transforms
class Dense(nn.Module):
def __init__(self, in_features, out_features, activation='relu', kernel_initializer='he_normal'):
super(Dense, self).__init__()
self.in_features = in_features
self.out_features = out_features
self.activation = activation
self.kernel_initializer = kernel_initializer
self.linear = nn.Linear(in_features, out_features)
# initialization
if kernel_initializer == 'he_normal':
nn.init.kaiming_normal_(self.linear.weight)
else:
raise NotImplementedError
def forward(self, inputs):
outputs = self.linear(inputs)
if self.activation is not None:
if self.activation == 'relu':
outputs = nn.ReLU(inplace=True)(outputs)
return outputs
class Conv2D(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=3, activation='relu', strides=1):
super(Conv2D, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.kernel_size = kernel_size
self.activation = activation
self.strides = strides
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, strides, int((kernel_size - 1) / 2))
# default: using he_normal as the kernel initializer
nn.init.kaiming_normal_(self.conv.weight)
def forward(self, inputs):
outputs = self.conv(inputs)
if self.activation is not None:
if self.activation == 'relu':
outputs = nn.ReLU(inplace=True)(outputs)
else:
raise NotImplementedError
return outputs
class Flatten(nn.Module):
def __init__(self):
super(Flatten, self).__init__()
def forward(self, input):
return input.view(input.size(0), -1)
class StegaStampEncoder(nn.Module):
def __init__(self):
super(StegaStampEncoder, self).__init__()
self.secret_dense = Dense(100, 7500, activation='relu', kernel_initializer='he_normal')
self.conv1 = Conv2D(6, 32, 3, activation='relu')
self.conv2 = Conv2D(32, 32, 3, activation='relu', strides=2)
self.conv3 = Conv2D(32, 64, 3, activation='relu', strides=2)
self.conv4 = Conv2D(64, 128, 3, activation='relu', strides=2)
self.conv5 = Conv2D(128, 256, 3, activation='relu', strides=2)
self.up6 = Conv2D(256, 128, 3, activation='relu')
self.conv6 = Conv2D(256, 128, 3, activation='relu')
self.up7 = Conv2D(128, 64, 3, activation='relu')
self.conv7 = Conv2D(128, 64, 3, activation='relu')
self.up8 = Conv2D(64, 32, 3, activation='relu')
self.conv8 = Conv2D(64, 32, 3, activation='relu')
self.up9 = Conv2D(32, 32, 3, activation='relu')
self.conv9 = Conv2D(70, 32, 3, activation='relu')
self.residual = Conv2D(32, 3, 1, activation=None)
def forward(self, inputs):
secrect, image = inputs
secrect = secrect - .5
image = image - .5
secrect = self.secret_dense(secrect)
secrect = secrect.reshape(-1, 3, 50, 50)
secrect_enlarged = nn.Upsample(scale_factor=(8, 8))(secrect)
inputs = torch.cat([secrect_enlarged, image], dim=1)
conv1 = self.conv1(inputs)
conv2 = self.conv2(conv1)
conv3 = self.conv3(conv2)
conv4 = self.conv4(conv3)
conv5 = self.conv5(conv4)
up6 = self.up6(nn.Upsample(scale_factor=(2, 2))(conv5))
merge6 = torch.cat([conv4, up6], dim=1)
conv6 = self.conv6(merge6)
up7 = self.up7(nn.Upsample(scale_factor=(2, 2))(conv6))
merge7 = torch.cat([conv3, up7], dim=1)
conv7 = self.conv7(merge7)
up8 = self.up8(nn.Upsample(scale_factor=(2, 2))(conv7))
merge8 = torch.cat([conv2, up8], dim=1)
conv8 = self.conv8(merge8)
up9 = self.up9(nn.Upsample(scale_factor=(2, 2))(conv8))
merge9 = torch.cat([conv1, up9, inputs], dim=1)
conv9 = self.conv9(merge9)
residual = self.residual(conv9)
return residual
class SpatialTransformerNetwork(nn.Module):
def __init__(self):
super(SpatialTransformerNetwork, self).__init__()
self.localization = nn.Sequential(
Conv2D(3, 32, 3, strides=2, activation='relu'),
Conv2D(32, 64, 3, strides=2, activation='relu'),
Conv2D(64, 128, 3, strides=2, activation='relu'),
Flatten(),
Dense(320000, 128, activation='relu'),
nn.Linear(128, 6)
)
self.localization[-1].weight.data.fill_(0)
self.localization[-1].bias.data = torch.FloatTensor([1, 0, 0, 0, 1, 0])
def forward(self, image):
theta = self.localization(image)
theta = theta.view(-1, 2, 3)
grid = F.affine_grid(theta, image.size(), align_corners=False)
transformed_image = F.grid_sample(image, grid, align_corners=False)
return transformed_image
class StegaStampDecoder(nn.Module):
def __init__(self, secret_size=100):
super(StegaStampDecoder, self).__init__()
self.secret_size = secret_size
self.stn = SpatialTransformerNetwork()
self.decoder = nn.Sequential(
Conv2D(3, 32, 3, strides=2, activation='relu'),
Conv2D(32, 32, 3, activation='relu'),
Conv2D(32, 64, 3, strides=2, activation='relu'),
Conv2D(64, 64, 3, activation='relu'),
Conv2D(64, 64, 3, strides=2, activation='relu'),
Conv2D(64, 128, 3, strides=2, activation='relu'),
Conv2D(128, 128, 3, strides=2, activation='relu'),
Flatten(),
Dense(21632, 512, activation='relu'),
Dense(512, secret_size, activation=None))
def forward(self, image):
image = image - .5
transformed_image = self.stn(image)
return torch.sigmoid(self.decoder(transformed_image))
class Discriminator(nn.Module):
def __init__(self):
super(Discriminator, self).__init__()
self.model = nn.Sequential(
Conv2D(3, 8, 3, strides=2, activation='relu'),
Conv2D(8, 16, 3, strides=2, activation='relu'),
Conv2D(16, 32, 3, strides=2, activation='relu'),
Conv2D(32, 64, 3, strides=2, activation='relu'),
Conv2D(64, 1, 3, activation=None))
def forward(self, image):
x = image - .5
x = self.model(x)
output = torch.mean(x)
return output, x
def transform_net(encoded_image, args, global_step):
sh = encoded_image.size()
ramp_fn = lambda ramp: np.min([global_step / ramp, 1.])
rnd_bri = ramp_fn(args.rnd_bri_ramp) * args.rnd_bri
rnd_hue = ramp_fn(args.rnd_hue_ramp) * args.rnd_hue
rnd_brightness = utils.get_rnd_brightness_torch(rnd_bri, rnd_hue, args.batch_size) # [batch_size, 3, 1, 1]
jpeg_quality = 100. - torch.rand(1)[0] * ramp_fn(args.jpeg_quality_ramp) * (100. - args.jpeg_quality)
rnd_noise = torch.rand(1)[0] * ramp_fn(args.rnd_noise_ramp) * args.rnd_noise
contrast_low = 1. - (1. - args.contrast_low) * ramp_fn(args.contrast_ramp)
contrast_high = 1. + (args.contrast_high - 1.) * ramp_fn(args.contrast_ramp)
contrast_params = [contrast_low, contrast_high]
rnd_sat = torch.rand(1)[0] * ramp_fn(args.rnd_sat_ramp) * args.rnd_sat
# blur
N_blur = 7
f = utils.random_blur_kernel(probs=[.25, .25], N_blur=N_blur, sigrange_gauss=[1., 3.], sigrange_line=[.25, 1.],
wmin_line=3)
if args.cuda:
f = f.cuda()
encoded_image = F.conv2d(encoded_image, f, bias=None, padding=int((N_blur - 1) / 2))
# noise
noise = torch.normal(mean=0, std=rnd_noise, size=encoded_image.size(), dtype=torch.float32)
if args.cuda:
noise = noise.cuda()
encoded_image = encoded_image + noise
encoded_image = torch.clamp(encoded_image, 0, 1)
# contrast & brightness
contrast_scale = torch.Tensor(encoded_image.size()[0]).uniform_(contrast_params[0], contrast_params[1])
contrast_scale = contrast_scale.reshape(encoded_image.size()[0], 1, 1, 1)
if args.cuda:
contrast_scale = contrast_scale.cuda()
rnd_brightness = rnd_brightness.cuda()
encoded_image = encoded_image * contrast_scale
encoded_image = encoded_image + rnd_brightness
encoded_image = torch.clamp(encoded_image, 0, 1)
# saturation
sat_weight = torch.FloatTensor([.3, .6, .1]).reshape(1, 3, 1, 1)
if args.cuda:
sat_weight = sat_weight.cuda()
encoded_image_lum = torch.mean(encoded_image * sat_weight, dim=1).unsqueeze_(1)
encoded_image = (1 - rnd_sat) * encoded_image + rnd_sat * encoded_image_lum
# jpeg
encoded_image = encoded_image.reshape([-1, 3, 400, 400])
if not args.no_jpeg:
encoded_image = utils.jpeg_compress_decompress(encoded_image, rounding=utils.round_only_at_0,
quality=jpeg_quality)
return encoded_image
def get_secret_acc(secret_true, secret_pred):
if 'cuda' in str(secret_pred.device):
secret_pred = secret_pred.cpu()
secret_true = secret_true.cpu()
secret_pred = torch.round(secret_pred)
correct_pred = torch.sum((secret_pred - secret_true) == 0, dim=1)
str_acc = 1.0 - torch.sum((correct_pred - secret_pred.size()[1]) != 0).numpy() / correct_pred.size()[0]
bit_acc = torch.sum(correct_pred).numpy() / secret_pred.numel()
return bit_acc, str_acc
def build_model(encoder, decoder, discriminator, lpips_fn, secret_input, image_input, l2_edge_gain,
borders, secret_size, M, loss_scales, yuv_scales, args, global_step, writer):
test_transform = transform_net(image_input, args, global_step)
input_warped = torchgeometry.warp_perspective(image_input, M[:, 1, :, :], dsize=(400, 400), flags='bilinear')
mask_warped = torchgeometry.warp_perspective(torch.ones_like(input_warped), M[:, 1, :, :], dsize=(400, 400),
flags='bilinear')
input_warped += (1 - mask_warped) * image_input
residual_warped = encoder((secret_input, input_warped))
encoded_warped = residual_warped + input_warped
residual = torchgeometry.warp_perspective(residual_warped, M[:, 0, :, :], dsize=(400, 400), flags='bilinear')
if borders == 'no_edge':
encoded_image = image_input + residual
elif borders == 'black':
encoded_image = residual_warped + input_warped
encoded_image = torchgeometry.warp_perspective(encoded_image, M[:, 0, :, :], dsize=(400, 400), flags='bilinear')
input_unwarped = torchgeometry.warp_perspective(image_input, M[:, 0, :, :], dsize=(400, 400), flags='bilinear')
elif borders.startswith('random'):
mask = torchgeometry.warp_perspective(torch.ones_like(residual), M[:, 0, :, :], dsize=(400, 400),
flags='bilinear')
encoded_image = residual_warped + input_unwarped
encoded_image = torchgeometry.warp_perspective(encoded_image, M[:, 0, :, :], dsize=(400, 400), flags='bilinear')
input_unwarped = torchgeometry.warp_perspective(input_warped, M[:, 0, :, :], dsize=(400, 400), flags='bilinear')
ch = 3 if borders.endswith('rgb') else 1
encoded_image += (1 - mask) * torch.ones_like(residual) * torch.rand([ch])
elif borders == 'white':
mask = torchgeometry.warp_perspective(torch.ones_like(residual), M[:, 0, :, :], dsize=(400, 400),
flags='bilinear')
encoded_image = residual_warped + input_warped
encoded_image = torchgeometry.warp_perspective(encoded_image, M[:, 0, :, :], dsize=(400, 400), flags='bilinear')
input_unwarped = torchgeometry.warp_perspective(input_warped, M[:, 0, :, :], dsize=(400, 400), flags='bilinear')
encoded_image += (1 - mask) * torch.ones_like(residual)
elif borders == 'image':
mask = torchgeometry.warp_perspective(torch.ones_like(residual), M[:, 0, :, :], dsize=(400, 400),
flags='bilinear')
encoded_image = residual_warped + input_warped
encoded_image = torchgeometry.warp_perspective(encoded_image, M[:, 0, :, :], dsize=(400, 400), flags='bilinear')
encoded_image += (1 - mask) * torch.roll(image_input, 1, 0)
if borders == 'no_edge':
D_output_real, _ = discriminator(image_input)
D_output_fake, D_heatmap = discriminator(encoded_image)
else:
D_output_real, _ = discriminator(input_warped)
D_output_fake, D_heatmap = discriminator(encoded_warped)
transformed_image = transform_net(encoded_image, args, global_step)
decoded_secret = decoder(transformed_image)
bit_acc, str_acc = get_secret_acc(secret_input, decoded_secret)
normalized_input = image_input * 2 - 1
normalized_encoded = encoded_image * 2 - 1
lpips_loss = torch.mean(lpips_fn(normalized_input, normalized_encoded))
cross_entropy = nn.BCELoss()
if args.cuda:
cross_entropy = cross_entropy.cuda()
secret_loss = cross_entropy(decoded_secret, secret_input)
size = (int(image_input.shape[2]), int(image_input.shape[3]))
gain = 10
falloff_speed = 4
falloff_im = np.ones(size)
for i in range(int(falloff_im.shape[0] / falloff_speed)): # for i in range 100
falloff_im[-i, :] *= (np.cos(4 * np.pi * i / size[0] + np.pi) + 1) / 2 # [cos[(4*pi*i/400)+pi] + 1]/2
falloff_im[i, :] *= (np.cos(4 * np.pi * i / size[0] + np.pi) + 1) / 2 # [cos[(4*pi*i/400)+pi] + 1]/2
for j in range(int(falloff_im.shape[1] / falloff_speed)):
falloff_im[:, -j] *= (np.cos(4 * np.pi * j / size[0] + np.pi) + 1) / 2
falloff_im[:, j] *= (np.cos(4 * np.pi * j / size[0] + np.pi) + 1) / 2
falloff_im = 1 - falloff_im
falloff_im = torch.from_numpy(falloff_im).float()
if args.cuda:
falloff_im = falloff_im.cuda()
falloff_im *= l2_edge_gain
encoded_image_yuv = color.rgb_to_yuv(encoded_image)
image_input_yuv = color.rgb_to_yuv(image_input)
im_diff = encoded_image_yuv - image_input_yuv
im_diff += im_diff * falloff_im.unsqueeze_(0)
yuv_loss = torch.mean((im_diff) ** 2, axis=[0, 2, 3])
yuv_scales = torch.Tensor(yuv_scales)
if args.cuda:
yuv_scales = yuv_scales.cuda()
image_loss = torch.dot(yuv_loss, yuv_scales)
D_loss = D_output_real - D_output_fake
G_loss = D_output_fake
loss = loss_scales[0] * image_loss + loss_scales[1] * lpips_loss + loss_scales[2] * secret_loss
if not args.no_gan:
loss += loss_scales[3] * G_loss
writer.add_scalar('loss/image_loss', image_loss, global_step)
writer.add_scalar('loss/lpips_loss', lpips_loss, global_step)
writer.add_scalar('loss/secret_loss', secret_loss, global_step)
writer.add_scalar('loss/G_loss', G_loss, global_step)
writer.add_scalar('loss/loss', loss, global_step)
writer.add_scalar('metric/bit_acc', bit_acc, global_step)
writer.add_scalar('metric/str_acc', str_acc, global_step)
if global_step % 20 == 0:
writer.add_image('input/image_input', image_input[0], global_step)
writer.add_image('input/image_warped', input_warped[0], global_step)
writer.add_image('encoded/encoded_warped', encoded_warped[0], global_step)
writer.add_image('encoded/residual_warped', residual_warped[0] + 0.5, global_step)
writer.add_image('encoded/encoded_image', encoded_image[0], global_step)
writer.add_image('transformed/transformed_image', transformed_image[0], global_step)
writer.add_image('transformed/test', test_transform[0], global_step)
return loss, secret_loss, D_loss, bit_acc, str_acc
|
[
"utils.get_rnd_brightness_torch",
"torch.dot",
"numpy.ones",
"torch.cat",
"torch.roll",
"utils.random_blur_kernel",
"sys.path.append",
"torch.nn.BCELoss",
"torch.nn.init.kaiming_normal_",
"torch.nn.functional.grid_sample",
"torch.FloatTensor",
"torch.nn.Upsample",
"kornia.color.rgb_to_yuv",
"torch.Tensor",
"utils.jpeg_compress_decompress",
"torch.nn.Linear",
"torch.mean",
"numpy.min",
"torch.clamp",
"numpy.cos",
"torch.rand",
"torch.sum",
"torch.from_numpy",
"torch.ones_like",
"torch.nn.ReLU",
"torchgeometry.warp_perspective",
"torch.round"
] |
[((12, 53), 'sys.path.append', 'sys.path.append', (['"""PerceptualSimilarity\\\\"""'], {}), "('PerceptualSimilarity\\\\')\n", (27, 53), False, 'import sys\n'), ((7002, 7067), 'utils.get_rnd_brightness_torch', 'utils.get_rnd_brightness_torch', (['rnd_bri', 'rnd_hue', 'args.batch_size'], {}), '(rnd_bri, rnd_hue, args.batch_size)\n', (7032, 7067), False, 'import utils\n'), ((7604, 7735), 'utils.random_blur_kernel', 'utils.random_blur_kernel', ([], {'probs': '[0.25, 0.25]', 'N_blur': 'N_blur', 'sigrange_gauss': '[1.0, 3.0]', 'sigrange_line': '[0.25, 1.0]', 'wmin_line': '(3)'}), '(probs=[0.25, 0.25], N_blur=N_blur, sigrange_gauss=\n [1.0, 3.0], sigrange_line=[0.25, 1.0], wmin_line=3)\n', (7628, 7735), False, 'import utils\n'), ((8104, 8136), 'torch.clamp', 'torch.clamp', (['encoded_image', '(0)', '(1)'], {}), '(encoded_image, 0, 1)\n', (8115, 8136), False, 'import torch\n'), ((8586, 8618), 'torch.clamp', 'torch.clamp', (['encoded_image', '(0)', '(1)'], {}), '(encoded_image, 0, 1)\n', (8597, 8618), False, 'import torch\n'), ((9418, 9442), 'torch.round', 'torch.round', (['secret_pred'], {}), '(secret_pred)\n', (9429, 9442), False, 'import torch\n'), ((9462, 9510), 'torch.sum', 'torch.sum', (['(secret_pred - secret_true == 0)'], {'dim': '(1)'}), '(secret_pred - secret_true == 0, dim=1)\n', (9471, 9510), False, 'import torch\n'), ((10000, 10098), 'torchgeometry.warp_perspective', 'torchgeometry.warp_perspective', (['image_input', 'M[:, 1, :, :]'], {'dsize': '(400, 400)', 'flags': '"""bilinear"""'}), "(image_input, M[:, 1, :, :], dsize=(400, 400),\n flags='bilinear')\n", (10030, 10098), False, 'import torchgeometry\n'), ((10456, 10559), 'torchgeometry.warp_perspective', 'torchgeometry.warp_perspective', (['residual_warped', 'M[:, 0, :, :]'], {'dsize': '(400, 400)', 'flags': '"""bilinear"""'}), "(residual_warped, M[:, 0, :, :], dsize=(400, \n 400), flags='bilinear')\n", (10486, 10559), False, 'import torchgeometry\n'), ((13255, 13267), 'torch.nn.BCELoss', 'nn.BCELoss', ([], {}), '()\n', (13265, 13267), False, 'from torch import nn\n'), ((13513, 13526), 'numpy.ones', 'np.ones', (['size'], {}), '(size)\n', (13520, 13526), True, 'import numpy as np\n'), ((14250, 14281), 'kornia.color.rgb_to_yuv', 'color.rgb_to_yuv', (['encoded_image'], {}), '(encoded_image)\n', (14266, 14281), False, 'from kornia import color\n'), ((14304, 14333), 'kornia.color.rgb_to_yuv', 'color.rgb_to_yuv', (['image_input'], {}), '(image_input)\n', (14320, 14333), False, 'from kornia import color\n'), ((14449, 14489), 'torch.mean', 'torch.mean', (['(im_diff ** 2)'], {'axis': '[0, 2, 3]'}), '(im_diff ** 2, axis=[0, 2, 3])\n', (14459, 14489), False, 'import torch\n'), ((14509, 14533), 'torch.Tensor', 'torch.Tensor', (['yuv_scales'], {}), '(yuv_scales)\n', (14521, 14533), False, 'import torch\n'), ((14608, 14639), 'torch.dot', 'torch.dot', (['yuv_loss', 'yuv_scales'], {}), '(yuv_loss, yuv_scales)\n', (14617, 14639), False, 'import torch\n'), ((602, 638), 'torch.nn.Linear', 'nn.Linear', (['in_features', 'out_features'], {}), '(in_features, out_features)\n', (611, 638), False, 'from torch import nn\n'), ((1576, 1617), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['self.conv.weight'], {}), '(self.conv.weight)\n', (1599, 1617), False, 'from torch import nn\n'), ((3445, 3488), 'torch.cat', 'torch.cat', (['[secrect_enlarged, image]'], {'dim': '(1)'}), '([secrect_enlarged, image], dim=1)\n', (3454, 3488), False, 'import torch\n'), ((3741, 3771), 'torch.cat', 'torch.cat', (['[conv4, up6]'], {'dim': '(1)'}), '([conv4, up6], dim=1)\n', (3750, 3771), False, 'import torch\n'), ((3888, 3918), 'torch.cat', 'torch.cat', (['[conv3, up7]'], {'dim': '(1)'}), '([conv3, up7], dim=1)\n', (3897, 3918), False, 'import torch\n'), ((4035, 4065), 'torch.cat', 'torch.cat', (['[conv2, up8]'], {'dim': '(1)'}), '([conv2, up8], dim=1)\n', (4044, 4065), False, 'import torch\n'), ((4182, 4220), 'torch.cat', 'torch.cat', (['[conv1, up9, inputs]'], {'dim': '(1)'}), '([conv1, up9, inputs], dim=1)\n', (4191, 4220), False, 'import torch\n'), ((4881, 4918), 'torch.FloatTensor', 'torch.FloatTensor', (['[1, 0, 0, 0, 1, 0]'], {}), '([1, 0, 0, 0, 1, 0])\n', (4898, 4918), False, 'import torch\n'), ((5127, 5174), 'torch.nn.functional.grid_sample', 'F.grid_sample', (['image', 'grid'], {'align_corners': '(False)'}), '(image, grid, align_corners=False)\n', (5140, 5174), True, 'import torch.nn.functional as F\n'), ((6684, 6697), 'torch.mean', 'torch.mean', (['x'], {}), '(x)\n', (6694, 6697), False, 'import torch\n'), ((6835, 6868), 'numpy.min', 'np.min', (['[global_step / ramp, 1.0]'], {}), '([global_step / ramp, 1.0])\n', (6841, 6868), True, 'import numpy as np\n'), ((9049, 9153), 'utils.jpeg_compress_decompress', 'utils.jpeg_compress_decompress', (['encoded_image'], {'rounding': 'utils.round_only_at_0', 'quality': 'jpeg_quality'}), '(encoded_image, rounding=utils.\n round_only_at_0, quality=jpeg_quality)\n', (9079, 9153), False, 'import utils\n'), ((10144, 10173), 'torch.ones_like', 'torch.ones_like', (['input_warped'], {}), '(input_warped)\n', (10159, 10173), False, 'import torch\n'), ((722, 765), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['self.linear.weight'], {}), '(self.linear.weight)\n', (745, 765), False, 'from torch import nn\n'), ((3385, 3417), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': '(8, 8)'}), '(scale_factor=(8, 8))\n', (3396, 3417), False, 'from torch import nn\n'), ((4760, 4777), 'torch.nn.Linear', 'nn.Linear', (['(128)', '(6)'], {}), '(128, 6)\n', (4769, 4777), False, 'from torch import nn\n'), ((8654, 8688), 'torch.FloatTensor', 'torch.FloatTensor', (['[0.3, 0.6, 0.1]'], {}), '([0.3, 0.6, 0.1])\n', (8671, 8688), False, 'import torch\n'), ((8787, 8832), 'torch.mean', 'torch.mean', (['(encoded_image * sat_weight)'], {'dim': '(1)'}), '(encoded_image * sat_weight, dim=1)\n', (8797, 8832), False, 'import torch\n'), ((10740, 10841), 'torchgeometry.warp_perspective', 'torchgeometry.warp_perspective', (['encoded_image', 'M[:, 0, :, :]'], {'dsize': '(400, 400)', 'flags': '"""bilinear"""'}), "(encoded_image, M[:, 0, :, :], dsize=(400, \n 400), flags='bilinear')\n", (10770, 10841), False, 'import torchgeometry\n'), ((10862, 10960), 'torchgeometry.warp_perspective', 'torchgeometry.warp_perspective', (['image_input', 'M[:, 0, :, :]'], {'dsize': '(400, 400)', 'flags': '"""bilinear"""'}), "(image_input, M[:, 0, :, :], dsize=(400, 400),\n flags='bilinear')\n", (10892, 10960), False, 'import torchgeometry\n'), ((14100, 14128), 'torch.from_numpy', 'torch.from_numpy', (['falloff_im'], {}), '(falloff_im)\n', (14116, 14128), False, 'import torch\n'), ((3683, 3715), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': '(2, 2)'}), '(scale_factor=(2, 2))\n', (3694, 3715), False, 'from torch import nn\n'), ((3830, 3862), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': '(2, 2)'}), '(scale_factor=(2, 2))\n', (3841, 3862), False, 'from torch import nn\n'), ((3977, 4009), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': '(2, 2)'}), '(scale_factor=(2, 2))\n', (3988, 4009), False, 'from torch import nn\n'), ((4124, 4156), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': '(2, 2)'}), '(scale_factor=(2, 2))\n', (4135, 4156), False, 'from torch import nn\n'), ((7215, 7228), 'torch.rand', 'torch.rand', (['(1)'], {}), '(1)\n', (7225, 7228), False, 'import torch\n'), ((7508, 7521), 'torch.rand', 'torch.rand', (['(1)'], {}), '(1)\n', (7518, 7521), False, 'import torch\n'), ((9635, 9658), 'torch.sum', 'torch.sum', (['correct_pred'], {}), '(correct_pred)\n', (9644, 9658), False, 'import torch\n'), ((11247, 11348), 'torchgeometry.warp_perspective', 'torchgeometry.warp_perspective', (['encoded_image', 'M[:, 0, :, :]'], {'dsize': '(400, 400)', 'flags': '"""bilinear"""'}), "(encoded_image, M[:, 0, :, :], dsize=(400, \n 400), flags='bilinear')\n", (11277, 11348), False, 'import torchgeometry\n'), ((11369, 11469), 'torchgeometry.warp_perspective', 'torchgeometry.warp_perspective', (['input_warped', 'M[:, 0, :, :]'], {'dsize': '(400, 400)', 'flags': '"""bilinear"""'}), "(input_warped, M[:, 0, :, :], dsize=(400, 400\n ), flags='bilinear')\n", (11399, 11469), False, 'import torchgeometry\n'), ((13641, 13680), 'numpy.cos', 'np.cos', (['(4 * np.pi * i / size[0] + np.pi)'], {}), '(4 * np.pi * i / size[0] + np.pi)\n', (13647, 13680), True, 'import numpy as np\n'), ((13751, 13790), 'numpy.cos', 'np.cos', (['(4 * np.pi * i / size[0] + np.pi)'], {}), '(4 * np.pi * i / size[0] + np.pi)\n', (13757, 13790), True, 'import numpy as np\n'), ((13924, 13963), 'numpy.cos', 'np.cos', (['(4 * np.pi * j / size[0] + np.pi)'], {}), '(4 * np.pi * j / size[0] + np.pi)\n', (13930, 13963), True, 'import numpy as np\n'), ((14002, 14041), 'numpy.cos', 'np.cos', (['(4 * np.pi * j / size[0] + np.pi)'], {}), '(4 * np.pi * j / size[0] + np.pi)\n', (14008, 14041), True, 'import numpy as np\n'), ((996, 1017), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1003, 1017), False, 'from torch import nn\n'), ((1794, 1815), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1801, 1815), False, 'from torch import nn\n'), ((7119, 7132), 'torch.rand', 'torch.rand', (['(1)'], {}), '(1)\n', (7129, 7132), False, 'import torch\n'), ((11042, 11067), 'torch.ones_like', 'torch.ones_like', (['residual'], {}), '(residual)\n', (11057, 11067), False, 'import torch\n'), ((11580, 11596), 'torch.rand', 'torch.rand', (['[ch]'], {}), '([ch])\n', (11590, 11596), False, 'import torch\n'), ((11875, 11976), 'torchgeometry.warp_perspective', 'torchgeometry.warp_perspective', (['encoded_image', 'M[:, 0, :, :]'], {'dsize': '(400, 400)', 'flags': '"""bilinear"""'}), "(encoded_image, M[:, 0, :, :], dsize=(400, \n 400), flags='bilinear')\n", (11905, 11976), False, 'import torchgeometry\n'), ((11997, 12097), 'torchgeometry.warp_perspective', 'torchgeometry.warp_perspective', (['input_warped', 'M[:, 0, :, :]'], {'dsize': '(400, 400)', 'flags': '"""bilinear"""'}), "(input_warped, M[:, 0, :, :], dsize=(400, 400\n ), flags='bilinear')\n", (12027, 12097), False, 'import torchgeometry\n'), ((11552, 11577), 'torch.ones_like', 'torch.ones_like', (['residual'], {}), '(residual)\n', (11567, 11577), False, 'import torch\n'), ((11672, 11697), 'torch.ones_like', 'torch.ones_like', (['residual'], {}), '(residual)\n', (11687, 11697), False, 'import torch\n'), ((12131, 12156), 'torch.ones_like', 'torch.ones_like', (['residual'], {}), '(residual)\n', (12146, 12156), False, 'import torch\n'), ((12435, 12536), 'torchgeometry.warp_perspective', 'torchgeometry.warp_perspective', (['encoded_image', 'M[:, 0, :, :]'], {'dsize': '(400, 400)', 'flags': '"""bilinear"""'}), "(encoded_image, M[:, 0, :, :], dsize=(400, \n 400), flags='bilinear')\n", (12465, 12536), False, 'import torchgeometry\n'), ((12232, 12257), 'torch.ones_like', 'torch.ones_like', (['residual'], {}), '(residual)\n', (12247, 12257), False, 'import torch\n'), ((12570, 12599), 'torch.roll', 'torch.roll', (['image_input', '(1)', '(0)'], {}), '(image_input, 1, 0)\n', (12580, 12599), False, 'import torch\n')]
|
"""
(C) Copyright 2021 IBM Corp.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Created on June 30, 2021
"""
import unittest
import random
from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively , Seed
class TestParamSampler(unittest.TestCase):
"""
Test ParamSampleBase sub classes
"""
def test_uniform(self):
Seed.set_seed(0)
min = random.random() * 1000
max = random.random() * 1000 + min
uniform = Uniform(min, max)
value = uniform.sample()
# test range
self.assertGreaterEqual(value, min)
self.assertLessEqual(uniform.sample(), max)
# test generate more than a single number
self.assertNotEqual(value, uniform.sample())
# test fixed per seed
Seed.set_seed(1234)
value0 = uniform.sample()
Seed.set_seed(1234)
value1 = uniform.sample()
self.assertEqual(value0, value1)
def test_randint(self):
Seed.set_seed(0)
min = random.randint(0, 1000)
max = random.randint(0, 1000) + min
randint = RandInt(min, max)
value = randint.sample()
# test range
self.assertGreaterEqual(value, min)
self.assertLessEqual(randint.sample(), max)
# test generate more than a single number
self.assertNotEqual(value, randint.sample())
# test fixed per seed
Seed.set_seed(1234)
value0 = randint.sample()
Seed.set_seed(1234)
value1 = randint.sample()
self.assertEqual(value0, value1)
def test_randbool(self):
Seed.set_seed(0)
randbool = RandBool(0.5)
value = randbool.sample()
# test range
self.assertIn(value, [True, False])
# test generate more than a single number
Seed.set_seed(0)
values = [randbool.sample() for _ in range(4)]
self.assertIn(True, values)
self.assertIn(False, values)
# test fixed per seed
Seed.set_seed(1234)
value0 = randbool.sample()
Seed.set_seed(1234)
value1 = randbool.sample()
self.assertEqual(value0, value1)
# test probs
Seed.set_seed(0)
randbool = RandBool(0.99)
count = 0
for _ in range(1000):
if randbool.sample() == True:
count += 1
self.assertGreaterEqual(count, 980)
def test_choice(self):
Seed.set_seed(0)
lst = list(range(1000))
choice = Choice(lst)
value = choice.sample()
# test range
self.assertIn(value, lst)
# test generate more than a single number
self.assertNotEqual(value, choice.sample())
# test fixed per seed
Seed.set_seed(1234)
value0 = choice.sample()
Seed.set_seed(1234)
value1 = choice.sample()
self.assertEqual(value0, value1)
# test probs
Seed.set_seed(0)
probs = [0.01 / 999] * 1000
probs[5] = 0.99
choice = Choice(lst, probs)
count = 0
for _ in range(1000):
if choice.sample() == 5:
count += 1
self.assertGreaterEqual(count, 980)
def test_draw_samples_recursively(self):
Seed.set_seed(0)
a = {"a": 5, "b": [3, RandInt(1, 5), 9], "c": {"d": 3, "f": [1, 2, RandBool(0.5), {"h": RandInt(10, 15)}]}, "e": {"g": Choice([6, 7, 8])}}
b = draw_samples_recursively (a)
self.assertEqual(a["a"], a["a"])
self.assertEqual(b["b"][0], a["b"][0])
self.assertEqual(b["b"][2], a["b"][2])
self.assertEqual(b["c"]["d"], a["c"]["d"])
self.assertEqual(b["c"]["f"][1], a["c"]["f"][1])
self.assertIn(b["b"][1], [1, 2, 3, 4, 5])
self.assertIn(b["c"]["f"][2], [True, False])
self.assertIn(b["c"]["f"][3]["h"], [10, 11, 12, 13, 14, 15])
self.assertIn(b["e"]["g"], [6, 7, 8])
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"fuse.utils.Seed.set_seed",
"random.randint",
"fuse.utils.Uniform",
"fuse.utils.RandBool",
"random.random",
"fuse.utils.draw_samples_recursively",
"fuse.utils.RandInt",
"fuse.utils.Choice"
] |
[((4522, 4537), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4535, 4537), False, 'import unittest\n'), ((847, 863), 'fuse.utils.Seed.set_seed', 'Seed.set_seed', (['(0)'], {}), '(0)\n', (860, 863), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((962, 979), 'fuse.utils.Uniform', 'Uniform', (['min', 'max'], {}), '(min, max)\n', (969, 979), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((1290, 1309), 'fuse.utils.Seed.set_seed', 'Seed.set_seed', (['(1234)'], {}), '(1234)\n', (1303, 1309), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((1352, 1371), 'fuse.utils.Seed.set_seed', 'Seed.set_seed', (['(1234)'], {}), '(1234)\n', (1365, 1371), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((1484, 1500), 'fuse.utils.Seed.set_seed', 'Seed.set_seed', (['(0)'], {}), '(0)\n', (1497, 1500), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((1515, 1538), 'random.randint', 'random.randint', (['(0)', '(1000)'], {}), '(0, 1000)\n', (1529, 1538), False, 'import random\n'), ((1601, 1618), 'fuse.utils.RandInt', 'RandInt', (['min', 'max'], {}), '(min, max)\n', (1608, 1618), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((1929, 1948), 'fuse.utils.Seed.set_seed', 'Seed.set_seed', (['(1234)'], {}), '(1234)\n', (1942, 1948), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((1991, 2010), 'fuse.utils.Seed.set_seed', 'Seed.set_seed', (['(1234)'], {}), '(1234)\n', (2004, 2010), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((2125, 2141), 'fuse.utils.Seed.set_seed', 'Seed.set_seed', (['(0)'], {}), '(0)\n', (2138, 2141), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((2161, 2174), 'fuse.utils.RandBool', 'RandBool', (['(0.5)'], {}), '(0.5)\n', (2169, 2174), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((2350, 2366), 'fuse.utils.Seed.set_seed', 'Seed.set_seed', (['(0)'], {}), '(0)\n', (2363, 2366), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((2534, 2553), 'fuse.utils.Seed.set_seed', 'Seed.set_seed', (['(1234)'], {}), '(1234)\n', (2547, 2553), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((2597, 2616), 'fuse.utils.Seed.set_seed', 'Seed.set_seed', (['(1234)'], {}), '(1234)\n', (2610, 2616), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((2724, 2740), 'fuse.utils.Seed.set_seed', 'Seed.set_seed', (['(0)'], {}), '(0)\n', (2737, 2740), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((2760, 2774), 'fuse.utils.RandBool', 'RandBool', (['(0.99)'], {}), '(0.99)\n', (2768, 2774), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((2973, 2989), 'fuse.utils.Seed.set_seed', 'Seed.set_seed', (['(0)'], {}), '(0)\n', (2986, 2989), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((3039, 3050), 'fuse.utils.Choice', 'Choice', (['lst'], {}), '(lst)\n', (3045, 3050), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((3297, 3316), 'fuse.utils.Seed.set_seed', 'Seed.set_seed', (['(1234)'], {}), '(1234)\n', (3310, 3316), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((3358, 3377), 'fuse.utils.Seed.set_seed', 'Seed.set_seed', (['(1234)'], {}), '(1234)\n', (3371, 3377), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((3482, 3498), 'fuse.utils.Seed.set_seed', 'Seed.set_seed', (['(0)'], {}), '(0)\n', (3495, 3498), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((3577, 3595), 'fuse.utils.Choice', 'Choice', (['lst', 'probs'], {}), '(lst, probs)\n', (3583, 3595), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((3806, 3822), 'fuse.utils.Seed.set_seed', 'Seed.set_seed', (['(0)'], {}), '(0)\n', (3819, 3822), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((3982, 4009), 'fuse.utils.draw_samples_recursively', 'draw_samples_recursively', (['a'], {}), '(a)\n', (4006, 4009), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((878, 893), 'random.random', 'random.random', ([], {}), '()\n', (891, 893), False, 'import random\n'), ((1553, 1576), 'random.randint', 'random.randint', (['(0)', '(1000)'], {}), '(0, 1000)\n', (1567, 1576), False, 'import random\n'), ((915, 930), 'random.random', 'random.random', ([], {}), '()\n', (928, 930), False, 'import random\n'), ((3853, 3866), 'fuse.utils.RandInt', 'RandInt', (['(1)', '(5)'], {}), '(1, 5)\n', (3860, 3866), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((3950, 3967), 'fuse.utils.Choice', 'Choice', (['[6, 7, 8]'], {}), '([6, 7, 8])\n', (3956, 3967), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((3898, 3911), 'fuse.utils.RandBool', 'RandBool', (['(0.5)'], {}), '(0.5)\n', (3906, 3911), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n'), ((3919, 3934), 'fuse.utils.RandInt', 'RandInt', (['(10)', '(15)'], {}), '(10, 15)\n', (3926, 3934), False, 'from fuse.utils import Uniform, Choice, RandInt, RandBool, draw_samples_recursively, Seed\n')]
|
#!/usr/bin/python
import os, sys
import string
def usage():
print >> sys.stderr, "usage: " + sys.argv[0] + " orthomcl.out base.tree"
sys.exit(1)
def plausi():
if len(sys.argv) != 3: usage()
inOrtho, inTree = sys.argv[1:3]
return inOrtho, inTree
class OrthoCluster():
def __init__(self, line):
descr, genedefs = line.split("\t")
genedefs = genedefs.split()
self.name = descr[:descr.index('(')].lower()
self.geneHash = {}
self.speciesHash = {}
for genedef in genedefs:
geneid = genedef[:genedef.index('(')]
species = genedef[genedef.index('(')+1:-1] + "1"
self.geneHash[geneid] = species
if self.speciesHash.has_key(species): self.speciesHash[species].append(geneid)
else: self.speciesHash[species] = [geneid]
def get_name(self): return self.name
def get_count(self): return len(self.geneHash)
def get_gene_hash(self): return self.geneHash
def get_species_hash(self): return self.speciesHash
def get_species_from_first_line(inFile):
fo = open(inFile)
line = fo.readline()
o = OrthoCluster(line.rstrip())
fo.close()
species = o.get_species_hash().keys()
species.sort()
return species
def parse_orthocml_out(inFile, tree):
fo = open(inFile)
for line in fo:
o = OrthoCluster(line.rstrip())
speciesHash = o.get_species_hash()
name = o.get_name()
for species, genelist in speciesHash.iteritems():
if len(genelist) > 1: break
replacement = '(' + species[:-1] + '1 #1,' + species[:-1] + '2)'
tree_repl_1 = tree.replace(species, replacement)
replacement = '(' + species[:-1] + '1,' + species[:-1] + '2 #1)'
tree_repl_2 = tree.replace(species, replacement)
fw = open(name + ".tree.1", "w")
fw.write(tree_repl_1)
fw.close()
fw = open(name + ".tree.2", "w")
fw.write(tree_repl_2)
fw.close()
fo.close()
def read_tree_from_file(file):
fo = open(file)
tree = ""
for line in fo:
tree += line.strip()
fo.close()
return tree
def main():
inOrtho, inTree = plausi()
tree = read_tree_from_file(inTree)
parse_orthocml_out(inOrtho, tree)
main()
|
[
"sys.exit"
] |
[((140, 151), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (148, 151), False, 'import os, sys\n')]
|
from ...database import Base
from sqlalchemy import Column, Integer, String
from sqlalchemy import Column, Integer, String, ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.dialects.postgresql import ARRAY
from ...database import Base
class ApiKey(Base):
api_key_id = Column(Integer, primary_key=True, autoincrement=False)
key = Column(String)
vcode = Column(String)
mask = Column(Integer)
character_1_id = Column(Integer, ForeignKey('character.character_id'))
character_2_id = Column(Integer, ForeignKey('character.character_id'))
character_3_id = Column(Integer, ForeignKey('character.character_id'))
corporation_id = Column(Integer, ForeignKey('corporation.corporation_id'))
|
[
"sqlalchemy.ForeignKey",
"sqlalchemy.Column"
] |
[((294, 348), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)', 'autoincrement': '(False)'}), '(Integer, primary_key=True, autoincrement=False)\n', (300, 348), False, 'from sqlalchemy import Column, Integer, String, ForeignKey\n'), ((359, 373), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (365, 373), False, 'from sqlalchemy import Column, Integer, String, ForeignKey\n'), ((386, 400), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (392, 400), False, 'from sqlalchemy import Column, Integer, String, ForeignKey\n'), ((413, 428), 'sqlalchemy.Column', 'Column', (['Integer'], {}), '(Integer)\n', (419, 428), False, 'from sqlalchemy import Column, Integer, String, ForeignKey\n'), ((467, 503), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""character.character_id"""'], {}), "('character.character_id')\n", (477, 503), False, 'from sqlalchemy import Column, Integer, String, ForeignKey\n'), ((542, 578), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""character.character_id"""'], {}), "('character.character_id')\n", (552, 578), False, 'from sqlalchemy import Column, Integer, String, ForeignKey\n'), ((617, 653), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""character.character_id"""'], {}), "('character.character_id')\n", (627, 653), False, 'from sqlalchemy import Column, Integer, String, ForeignKey\n'), ((693, 733), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""corporation.corporation_id"""'], {}), "('corporation.corporation_id')\n", (703, 733), False, 'from sqlalchemy import Column, Integer, String, ForeignKey\n')]
|
import sys
sys.path.append('..')
from tighthash import pmap
from testutils import testing_map
testing_map("TightHashMap", pmap)
|
[
"sys.path.append",
"testutils.testing_map"
] |
[((12, 33), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (27, 33), False, 'import sys\n'), ((97, 130), 'testutils.testing_map', 'testing_map', (['"""TightHashMap"""', 'pmap'], {}), "('TightHashMap', pmap)\n", (108, 130), False, 'from testutils import testing_map\n')]
|
import json
import random
import numpy as np
from Source.Utility.Pathfinding.Graph import Graph
def get_distance_to_players(game_state):
own_player = game_state["players"][str(game_state["you"])]
distances = [0, 0, 0, 0, 0, 0]
current_position = (own_player["x"], own_player["y"])
if game_state["players"][str(game_state["you"])]["active"]:
for i in range(6):
if i + 1 == game_state["you"]:
distances[i] = 0
else:
try:
if game_state["players"][str(i + 1)]["active"]:
enemy_position = (game_state["players"][str(i + 1)]["x"], game_state["players"][str(i + 1)]["y"])
distance = np.sqrt(np.power(current_position[0] - enemy_position[0], 2) + np.power(
current_position[1] - enemy_position[1], 2))
distances[i] = distance
else:
distances[i] = 0
except KeyError:
distances[i] = 0
max_distance = np.sqrt(np.power(game_state["width"], 2) + np.power(game_state["height"], 2))
for i in range(len(distances)):
distances[i] = distances[i] / max_distance
return distances
def get_average_distance(distances):
sum = counter = 0.0
for i in range(len(distances)):
if distances[i] == 0:
pass
else:
sum += distances[i]
counter += 1
if counter == 0:
return 0
else:
return sum / counter
def get_free_spaces(new_position, game_state):
own_player = game_state["players"][str(game_state["you"])]
speed = own_player["speed"]
number_of_free_spaces = 0
for i in range(-2, 3):
for j in range(-2, 3):
try:
if game_state["cells"][new_position[1] + i][new_position[0] + j] == 0:
number_of_free_spaces += 1
except IndexError:
pass
normalised_num = (number_of_free_spaces - speed) / 25.0
return normalised_num
def get_avg_speed(game_state):
sum = 0.0
counter = 0.0
avg = 0.0
if game_state["players"][str(game_state["you"])]["active"]:
for i in range(6):
if i + 1 == game_state["you"]:
pass
else:
try:
if game_state["players"][str(i + 1)]["active"]:
sum += game_state["players"][str(i + 1)]["speed"]
counter += 1
except KeyError:
pass
if counter > 0:
avg = sum / counter
norm_avg = avg / 10
return norm_avg
def get_num_living_players(game_state):
num = 0
for i in range (6):
if game_state["players"][str(i+1)]["active"]:
num += 1
return num
def get_player_data(game_state, id):
x = game_state["players"][str(id + 1)]["x"]
y = game_state["players"][str(id + 1)]["y"]
speed = game_state["players"][str(id + 1)]["speed"]
return x, y, speed
def get_distances_to_borders(game_state, id):
board_height = game_state["height"]
board_width = game_state["width"]
position = game_state["players"][str(id + 1)]["x"], game_state["players"][str(id + 1)]["y"]
top_distance = position[1] - 1
bottom_distance = (board_height - 1) - (position[1] - 1)
right_distance = (board_width - 1) - (position[0] - 1)
left_distance = position[0] - 1
return top_distance, bottom_distance, right_distance, left_distance
def get_own_speed(game_state):
own_player = game_state["players"][str(game_state["you"])]
speed = own_player["speed"]
return speed
def get_connected_fields_for_new_position( x, y, new_direction, game_state, field_size):
game_state = json.loads(game_state)
graph = Graph(game_state["cells"],x,y, game_state["width"], game_state["height"], new_direction, field_size)
return len(graph.get_connected_components())
|
[
"numpy.power",
"Source.Utility.Pathfinding.Graph.Graph",
"json.loads"
] |
[((3807, 3829), 'json.loads', 'json.loads', (['game_state'], {}), '(game_state)\n', (3817, 3829), False, 'import json\n'), ((3842, 3948), 'Source.Utility.Pathfinding.Graph.Graph', 'Graph', (["game_state['cells']", 'x', 'y', "game_state['width']", "game_state['height']", 'new_direction', 'field_size'], {}), "(game_state['cells'], x, y, game_state['width'], game_state['height'],\n new_direction, field_size)\n", (3847, 3948), False, 'from Source.Utility.Pathfinding.Graph import Graph\n'), ((1083, 1115), 'numpy.power', 'np.power', (["game_state['width']", '(2)'], {}), "(game_state['width'], 2)\n", (1091, 1115), True, 'import numpy as np\n'), ((1118, 1151), 'numpy.power', 'np.power', (["game_state['height']", '(2)'], {}), "(game_state['height'], 2)\n", (1126, 1151), True, 'import numpy as np\n'), ((733, 785), 'numpy.power', 'np.power', (['(current_position[0] - enemy_position[0])', '(2)'], {}), '(current_position[0] - enemy_position[0], 2)\n', (741, 785), True, 'import numpy as np\n'), ((788, 840), 'numpy.power', 'np.power', (['(current_position[1] - enemy_position[1])', '(2)'], {}), '(current_position[1] - enemy_position[1], 2)\n', (796, 840), True, 'import numpy as np\n')]
|
from django.shortcuts import render, redirect
from django.contrib import messages
from django.http import HttpResponseForbidden
from django.contrib.auth.decorators import login_required
from .forms import *
from .models import *
from urllib.request import urlopen, Request
import json
import random
# Create your views here.
def landing(request):
return render(request, 'start.html')
def signup(request):
if request.user.is_authenticated:
return redirect('home')
if request.method == "POST":
form = UserRegisterForm(request.POST)
if form.is_valid():
user = form.save()
prof = Profile(user=user)
prof.save()
messages.success(
request, f"Your account has been created! You are now able to log in"
)
return redirect("login")
else:
form = UserRegisterForm()
return render(request, 'signup.html', {'form': form})
def home(request):
if request.method == "POST":
form = CarForm(request.POST)
if form.is_valid():
vin = form.cleaned_data['vin']
thing = Request(f'https://vpic.nhtsa.dot.gov/api/vehicles/decodevin/{vin}?format=json')
response_body = {d['Variable']: d['Value'].upper() for d in json.loads(urlopen(thing).read())['Results'] if d['Value'] != '0' and d['Value'] != 0 and d['Value'] is not None and d['Value'] != '' and d['Value'] != 'Not Applicable' and d['Variable'] != 'Error Text'}
return render(request, 'results.html', {'info': response_body})
messages.error(request, 'This VIN is invalid. It must be 17 characters long.')
vins = ['3GNEC12078G276688', '5GRGN23U83H132227', '1C3CDFBB5FD165342', '1FMZU62E12ZC12617', '2FAFP71W93X138259', '1FTPW12V17FA12080', '1FT8W3BT4FEA96950', 'WDXPD944555842209', 'WVGBV7AX6CW559712', 'SCFAC23302B500083', 'JH4KA7630NC013822', 'JH4DA175XGS009825', '1GCEK14K8RE106083', '4F2CU08102KM50866', 'JH4DA1850HS006058', '5N1AN0NU6BC506916', '1FVACYDC37HW59012']
return render(request, 'home.html', {'form': CarForm(), 'vin': random.choice(vins)})
|
[
"urllib.request.Request",
"django.shortcuts.redirect",
"django.contrib.messages.error",
"random.choice",
"urllib.request.urlopen",
"django.shortcuts.render",
"django.contrib.messages.success"
] |
[((360, 389), 'django.shortcuts.render', 'render', (['request', '"""start.html"""'], {}), "(request, 'start.html')\n", (366, 389), False, 'from django.shortcuts import render, redirect\n'), ((908, 954), 'django.shortcuts.render', 'render', (['request', '"""signup.html"""', "{'form': form}"], {}), "(request, 'signup.html', {'form': form})\n", (914, 954), False, 'from django.shortcuts import render, redirect\n'), ((466, 482), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (474, 482), False, 'from django.shortcuts import render, redirect\n'), ((1579, 1657), 'django.contrib.messages.error', 'messages.error', (['request', '"""This VIN is invalid. It must be 17 characters long."""'], {}), "(request, 'This VIN is invalid. It must be 17 characters long.')\n", (1593, 1657), False, 'from django.contrib import messages\n'), ((697, 788), 'django.contrib.messages.success', 'messages.success', (['request', 'f"""Your account has been created! You are now able to log in"""'], {}), "(request,\n f'Your account has been created! You are now able to log in')\n", (713, 788), False, 'from django.contrib import messages\n'), ((834, 851), 'django.shortcuts.redirect', 'redirect', (['"""login"""'], {}), "('login')\n", (842, 851), False, 'from django.shortcuts import render, redirect\n'), ((1138, 1217), 'urllib.request.Request', 'Request', (['f"""https://vpic.nhtsa.dot.gov/api/vehicles/decodevin/{vin}?format=json"""'], {}), "(f'https://vpic.nhtsa.dot.gov/api/vehicles/decodevin/{vin}?format=json')\n", (1145, 1217), False, 'from urllib.request import urlopen, Request\n'), ((1514, 1570), 'django.shortcuts.render', 'render', (['request', '"""results.html"""', "{'info': response_body}"], {}), "(request, 'results.html', {'info': response_body})\n", (1520, 1570), False, 'from django.shortcuts import render, redirect\n'), ((2095, 2114), 'random.choice', 'random.choice', (['vins'], {}), '(vins)\n', (2108, 2114), False, 'import random\n'), ((1302, 1316), 'urllib.request.urlopen', 'urlopen', (['thing'], {}), '(thing)\n', (1309, 1316), False, 'from urllib.request import urlopen, Request\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Functions for estimating the precision matrix based on the covariance of
either the residuals (temporal based precision matrix) or of the measurements
(instance based precision matrix)
"""
from collections.abc import Iterable
import numpy as np
from rsatoolbox.data import average_dataset_by
from rsatoolbox.util.data_utils import get_unique_inverse
def _check_demean(matrix):
"""
checks that an input has 2 or 3 dimensions and subtracts the mean.
returns a 2D matrix for covariance/precision computation and the
degrees of freedom
Args:
matrix (np.ndarray):
n_conditions x n_channels
Returns:
numpy.ndarray:
demeaned matrix
"""
assert isinstance(matrix, np.ndarray), "input must be ndarray"
if matrix.ndim in [1, 2]:
matrix = matrix - np.mean(matrix, axis=0, keepdims=True)
dof = matrix.shape[0] - 1
elif matrix.ndim == 3:
matrix -= np.mean(matrix, axis=2, keepdims=True)
dof = (matrix.shape[0] - 1) * matrix.shape[2]
matrix = matrix.transpose(0, 2, 1).reshape(
matrix.shape[0] * matrix.shape[2], matrix.shape[1])
else:
raise ValueError('Matrix for covariance estimation has wrong # of dimensions!')
return matrix, dof
def _estimate_covariance(matrix, dof, method):
""" calls the right covariance estimation function based on the ""method" argument
Args:
matrix (np.ndarray):
n_conditions x n_channels
dof (int):
degrees of freedom
method (string):
which estimator to use
Returns:
numpy.ndarray, numpy.ndarray:
cov_mat: n_channels x n_channels sample covariance matrix
"""
matrix, dof_nat = _check_demean(matrix)
if dof is None:
dof = dof_nat
# calculate sample covariance matrix s
if method == 'shrinkage_eye':
cov_mat = _covariance_eye(matrix, dof)
elif method == 'shrinkage_diag':
cov_mat = _covariance_diag(matrix, dof)
elif method == 'diag':
cov_mat = _variance(matrix, dof)
elif method == 'full':
cov_mat = _covariance_full(matrix, dof)
return cov_mat
def _variance(matrix, dof):
"""
returns the vector of variances per measurement channel.
The formula used here implies that the mean was already removed.
Args:
matrix (np.ndarray):
n_conditions x n_channels
Returns:
numpy.ndarray:
variance vector
"""
return np.diag(np.einsum('ij, ij-> j', matrix, matrix) / dof)
def _covariance_full(matrix, dof):
"""
computes the sample covariance matrix from a 2d-array.
matrix should be demeaned before!
Args:
matrix (np.ndarray):
n_conditions x n_channels
Returns:
numpy.ndarray, numpy.ndarray:
s_mean: n_channels x n_channels sample covariance matrix
"""
return np.einsum('ij, ik-> jk', matrix, matrix) / dof
def _covariance_eye(matrix, dof):
"""
computes the sample covariance matrix from a 2d-array.
matrix should be demeaned before!
Computes an optimal shrinkage estimate of a sample covariance matrix
as described by the following publication:
<NAME> Wolfe (2004): "A well-conditioned
estimator for large-dimensional covariance matrices"
Args:
matrix (np.ndarray):
n_conditions x n_channels
Returns:
numpy.ndarray, numpy.ndarray:
s_mean: n_channels x n_channels sample covariance matrix
xt_x:
Einstein summation form of the matrix product
of the 2d-array with itself
"""
s_sum = np.zeros((matrix.shape[1], matrix.shape[1]))
s2_sum = np.zeros((matrix.shape[1], matrix.shape[1]))
for m_line in matrix:
xt_x = np.outer(m_line, m_line)
s_sum += xt_x
s2_sum += xt_x ** 2
s = s_sum / matrix.shape[0]
b2 = np.sum(s2_sum / matrix.shape[0] - s * s) / matrix.shape[0]
# calculate the scalar estimators to find the optimal shrinkage:
# m, d^2, b^2 as in Ledoit & Wolfe paper
m = np.sum(np.diag(s)) / s.shape[0]
d2 = np.sum((s - m * np.eye(s.shape[0])) ** 2)
b2 = min(d2, b2)
# shrink covariance matrix
s_shrink = b2 / d2 * m * np.eye(s.shape[0]) \
+ (d2-b2) / d2 * s
# correction for degrees of freedom
s_shrink = s_shrink * matrix.shape[0] / dof
return s_shrink
def _covariance_diag(matrix, dof, mem_threshold=(10**9)/8):
"""
computes the sample covariance matrix from a 2d-array.
matrix should be demeaned before!
Computes an optimal shrinkage estimate of a sample covariance matrix
as described by the following publication:
<NAME>., & <NAME>. (2005). "A Shrinkage Approach to Large-Scale
Covariance Matrix Estimation and Implications for Functional Genomics.""
Args:
matrix (np.ndarray):
n_conditions x n_channels
Returns:
numpy.ndarray, numpy.ndarray:
s_mean: n_channels x n_channels sample covariance matrix
xt_x:
Einstein summation form of the matrix product
of the 2d-array with itself
"""
s_sum = np.zeros((matrix.shape[1], matrix.shape[1]))
s2_sum = np.zeros((matrix.shape[1], matrix.shape[1]))
for m_line in matrix:
xt_x = np.outer(m_line, m_line)
s_sum += xt_x
s2_sum += xt_x ** 2
s = s_sum / dof
var = np.diag(s)
std = np.sqrt(var)
s_mean = s_sum / np.expand_dims(std, 0) / np.expand_dims(std, 1) / (matrix.shape[0] - 1)
s2_mean = s2_sum / np.expand_dims(var, 0) / np.expand_dims(var, 1) / (matrix.shape[0] - 1)
var_hat = matrix.shape[0] / dof ** 2 \
* (s2_mean - s_mean ** 2)
mask = ~np.eye(s.shape[0], dtype=np.bool)
lamb = np.sum(var_hat[mask]) / np.sum(s_mean[mask] ** 2)
lamb = max(min(lamb, 1), 0)
scaling = np.eye(s.shape[0]) + (1-lamb) * mask
s_shrink = s * scaling
return s_shrink
def sample_covariance_3d(tensor):
"""
computes the sample covariance matrix from a tensor by estimating the
sample covariance for each slice along the third dimension and averaging
the estimated covariance matrices.
Args:
tensor (numpy.ndarray):
n_conditions x n_channels x n_measurements
Returns:
numpy.ndarray:
s_mean: n_channels x n_channels expected sample covariance matrix
"""
xt_x = np.einsum('ij, ik-> ijk', tensor, tensor)
s = np.mean(xt_x, axis=0)
return s, xt_x
def cov_from_residuals(residuals, dof=None, method='shrinkage_diag'):
"""
Estimates a covariance matrix from measurements. Allows for shrinkage estimates.
Use 'method' to choose which estimation method is used.
Args:
residuals(numpy.ndarray or list of these): n_residuals x n_channels
matrix of residuals
dof(int or list of int): degrees of freedom for covariance estimation
defaults to n_res - 1, should be corrected for the number
of regressors in a GLM if applicable.
method(str): which estimate to use:
'diag': provides a diagonal matrix, i.e. univariate noise normalizer
'full': computes the sample covariance without shrinkage
'shrinkage_eye': shrinks the data covariance towards a multiple of the identity.
'shrinkage_diag': shrinks the covariance matrix towards the diagonal covariance matrix.
Returns:
numpy.ndarray (or list): sigma_p: covariance matrix over channels
"""
if not isinstance(residuals, np.ndarray) or len(residuals.shape) > 2:
cov_mat = []
for i, residual in enumerate(residuals):
if dof is None:
cov_mat.append(cov_from_residuals(
residual, method=method))
elif isinstance(dof, Iterable):
cov_mat.append(cov_from_residuals(
residuals, method=method, dof=dof[i]))
else:
cov_mat.append(cov_from_residuals(
residual, method=method, dof=dof))
else:
cov_mat = _estimate_covariance(residuals, dof, method)
return cov_mat
def prec_from_residuals(residuals, dof=None, method='shrinkage_diag'):
"""
Estimates the covariance matrix from residuals and finds its multiplicative
inverse (= the precision matrix)
Use 'method' to choose which estimation method is used.
Args:
residuals(numpy.ndarray or list of these): n_residuals x n_channels
matrix of residuals
dof(int or list of int): degrees of freedom for covariance estimation
defaults to n_res - 1, should be corrected for the number
of regressors in a GLM if applicable.
method(str): which estimate to use:
'diag': provides a diagonal matrix, i.e. univariate noise normalizer
'full': computes the sample covariance without shrinkage
'shrinkage_eye': shrinks the data covariance towards a multiple of the identity.
'shrinkage_diag': shrinks the covariance matrix towards the diagonal covariance matrix.
Returns:
numpy.ndarray (or list): sigma_p: precision matrix over channels
"""
cov = cov_from_residuals(residuals=residuals, dof=dof, method=method)
if not isinstance(cov, np.ndarray):
prec = [None] * len(cov)
for i, cov_i in enumerate(cov):
prec[i] = np.linalg.inv(cov_i)
elif len(cov.shape) > 2:
prec = np.zeros(cov.shape)
for i, cov_i in enumerate(cov):
prec[i] = np.linalg.inv(cov_i)
else:
prec = np.linalg.inv(cov)
return prec
def cov_from_measurements(dataset, obs_desc, dof=None, method='shrinkage_diag'):
"""
Estimates a covariance matrix from measurements. Allows for shrinkage estimates.
Use 'method' to choose which estimation method is used.
Args:
dataset(data.Dataset):
rsatoolbox Dataset object
dof(int or list of int): degrees of freedom for covariance estimation
defaults to n_res - 1, should be corrected for the number
of regressors in a GLM if applicable.
method(str): which estimate to use:
'diag': provides a diagonal matrix, i.e. univariate noise normalizer
'full': computes the sample covariance without shrinkage
'shrinkage_eye': shrinks the data covariance towards a multiple of the identity.
'shrinkage_diag': shrinks the covariance matrix towards the diagonal covariance matrix.
Returns:
numpy.ndarray (or list): sigma_p: covariance matrix over channels
"""
if isinstance(dataset, Iterable):
cov_mat = []
for i, dat in enumerate(dataset):
if dof is None:
cov_mat.append(cov_from_unbalanced(
dat, obs_desc=obs_desc, method=method))
elif isinstance(dof, Iterable):
cov_mat.append(cov_from_unbalanced(
dat, obs_desc=obs_desc, method=method, dof=dof[i]))
else:
cov_mat.append(cov_from_unbalanced(
dat, obs_desc=obs_desc, method=method, dof=dof))
else:
assert "Dataset" in str(type(dataset)), "Provided object is not a dataset"
assert obs_desc in dataset.obs_descriptors.keys(), \
"obs_desc not contained in the dataset's obs_descriptors"
tensor, _ = dataset.get_measurements_tensor(obs_desc)
# calculate sample covariance matrix s
cov_mat = _estimate_covariance(tensor, dof, method)
return cov_mat
def prec_from_measurements(dataset, obs_desc, dof=None, method='shrinkage_diag'):
"""
Estimates the covariance matrix from measurements and finds its multiplicative
inverse (= the precision matrix)
Use 'method' to choose which estimation method is used.
Args:
residuals(numpy.ndarray or list of these): n_residuals x n_channels
matrix of residuals
dof(int or list of int): degrees of freedom for covariance estimation
defaults to n_res - 1, should be corrected for the number
of regressors in a GLM if applicable.
method(str): which estimate to use:
'diag': provides a diagonal matrix, i.e. univariate noise normalizer
'full': computes the sample covariance without shrinkage
'shrinkage_eye': shrinks the data covariance towards a multiple of the identity.
'shrinkage_diag': shrinks the covariance matrix towards the diagonal covariance matrix.
Returns:
numpy.ndarray (or list): sigma_p: precision matrix over channels
"""
cov = cov_from_measurements(dataset, obs_desc, dof=dof, method=method)
if not isinstance(cov, np.ndarray):
prec = [None] * len(cov)
for i, cov_i in enumerate(cov):
prec[i] = np.linalg.inv(cov_i)
elif len(cov.shape) > 2:
prec = np.zeros(cov.shape)
for i, cov_i in enumerate(cov):
prec[i] = np.linalg.inv(cov_i)
else:
prec = np.linalg.inv(cov)
return prec
def cov_from_unbalanced(dataset, obs_desc, dof=None, method='shrinkage_diag'):
"""
Estimates a covariance matrix from an unbalanced dataset, i.e. from a
dataset that contains different numbers of samples for different
stimuli.
Args:
dataset(data.Dataset):
rsatoolbox Dataset object
dof(int or list of int): degrees of freedom for covariance estimation
defaults to n_measurements - n_stimuli, should be corrected
if this is not the case
method(str): which estimate to use:
'diag': provides a diagonal matrix, i.e. univariate noise normalizer
'full': computes the sample covariance without shrinkage
'shrinkage_eye': shrinks the data covariance towards a multiple of the identity.
'shrinkage_diag': shrinks the covariance matrix towards the diagonal covariance matrix.
Returns:
numpy.ndarray (or list): sigma_p: covariance matrix over channels
"""
if isinstance(dataset, Iterable):
cov_mat = []
for i, dat in enumerate(dataset):
if dof is None:
cov_mat.append(cov_from_unbalanced(
dat, obs_desc=obs_desc, method=method))
elif isinstance(dof, Iterable):
cov_mat.append(cov_from_unbalanced(
dat, obs_desc=obs_desc, method=method, dof=dof[i]))
else:
cov_mat.append(cov_from_unbalanced(
dat, obs_desc=obs_desc, method=method, dof=dof))
else:
assert "Dataset" in str(type(dataset)), "Provided object is not a dataset"
assert obs_desc in dataset.obs_descriptors.keys(), \
"obs_desc not contained in the dataset's obs_descriptors"
matrix = dataset.measurements
means, values, _ = average_dataset_by(dataset, obs_desc)
values, inverse = get_unique_inverse(dataset.obs_descriptors[obs_desc])
matrix -= means[inverse]
# calculate sample covariance matrix s
if dof is None:
dof = matrix.shape[0] - len(values)
cov_mat = _estimate_covariance(matrix, dof, method)
return cov_mat
def prec_from_unbalanced(dataset, obs_desc, dof=None, method='shrinkage_diag'):
"""
Estimates the covariance matrix from measurements and finds its multiplicative
inverse (= the precision matrix)
Use 'method' to choose which estimation method is used.
Args:
residuals(numpy.ndarray or list of these): n_residuals x n_channels
matrix of residuals
dof(int or list of int): degrees of freedom for covariance estimation
defaults to n_res - 1, should be corrected for the number
of regressors in a GLM if applicable.
method(str): which estimate to use:
'diag': provides a diagonal matrix, i.e. univariate noise normalizer
'full': computes the sample covariance without shrinkage
'shrinkage_eye': shrinks the data covariance towards a multiple of the identity.
'shrinkage_diag': shrinks the covariance matrix towards the diagonal covariance matrix.
Returns:
numpy.ndarray (or list): sigma_p: precision matrix over channels
"""
cov = cov_from_unbalanced(dataset, obs_desc, dof=dof, method=method)
if not isinstance(cov, np.ndarray):
prec = [None] * len(cov)
for i, cov_i in enumerate(cov):
prec[i] = np.linalg.inv(cov_i)
elif len(cov.shape) > 2:
prec = np.zeros(cov.shape)
for i, cov_i in enumerate(cov):
prec[i] = np.linalg.inv(cov_i)
else:
prec = np.linalg.inv(cov)
return prec
|
[
"numpy.outer",
"numpy.sum",
"numpy.eye",
"rsatoolbox.util.data_utils.get_unique_inverse",
"rsatoolbox.data.average_dataset_by",
"numpy.zeros",
"numpy.einsum",
"numpy.expand_dims",
"numpy.mean",
"numpy.linalg.inv",
"numpy.diag",
"numpy.sqrt"
] |
[((3735, 3779), 'numpy.zeros', 'np.zeros', (['(matrix.shape[1], matrix.shape[1])'], {}), '((matrix.shape[1], matrix.shape[1]))\n', (3743, 3779), True, 'import numpy as np\n'), ((3793, 3837), 'numpy.zeros', 'np.zeros', (['(matrix.shape[1], matrix.shape[1])'], {}), '((matrix.shape[1], matrix.shape[1]))\n', (3801, 3837), True, 'import numpy as np\n'), ((5267, 5311), 'numpy.zeros', 'np.zeros', (['(matrix.shape[1], matrix.shape[1])'], {}), '((matrix.shape[1], matrix.shape[1]))\n', (5275, 5311), True, 'import numpy as np\n'), ((5325, 5369), 'numpy.zeros', 'np.zeros', (['(matrix.shape[1], matrix.shape[1])'], {}), '((matrix.shape[1], matrix.shape[1]))\n', (5333, 5369), True, 'import numpy as np\n'), ((5516, 5526), 'numpy.diag', 'np.diag', (['s'], {}), '(s)\n', (5523, 5526), True, 'import numpy as np\n'), ((5537, 5549), 'numpy.sqrt', 'np.sqrt', (['var'], {}), '(var)\n', (5544, 5549), True, 'import numpy as np\n'), ((6520, 6561), 'numpy.einsum', 'np.einsum', (['"""ij, ik-> ijk"""', 'tensor', 'tensor'], {}), "('ij, ik-> ijk', tensor, tensor)\n", (6529, 6561), True, 'import numpy as np\n'), ((6570, 6591), 'numpy.mean', 'np.mean', (['xt_x'], {'axis': '(0)'}), '(xt_x, axis=0)\n', (6577, 6591), True, 'import numpy as np\n'), ((2986, 3026), 'numpy.einsum', 'np.einsum', (['"""ij, ik-> jk"""', 'matrix', 'matrix'], {}), "('ij, ik-> jk', matrix, matrix)\n", (2995, 3026), True, 'import numpy as np\n'), ((3879, 3903), 'numpy.outer', 'np.outer', (['m_line', 'm_line'], {}), '(m_line, m_line)\n', (3887, 3903), True, 'import numpy as np\n'), ((3995, 4035), 'numpy.sum', 'np.sum', (['(s2_sum / matrix.shape[0] - s * s)'], {}), '(s2_sum / matrix.shape[0] - s * s)\n', (4001, 4035), True, 'import numpy as np\n'), ((5411, 5435), 'numpy.outer', 'np.outer', (['m_line', 'm_line'], {}), '(m_line, m_line)\n', (5419, 5435), True, 'import numpy as np\n'), ((5827, 5860), 'numpy.eye', 'np.eye', (['s.shape[0]'], {'dtype': 'np.bool'}), '(s.shape[0], dtype=np.bool)\n', (5833, 5860), True, 'import numpy as np\n'), ((5872, 5893), 'numpy.sum', 'np.sum', (['var_hat[mask]'], {}), '(var_hat[mask])\n', (5878, 5893), True, 'import numpy as np\n'), ((5896, 5921), 'numpy.sum', 'np.sum', (['(s_mean[mask] ** 2)'], {}), '(s_mean[mask] ** 2)\n', (5902, 5921), True, 'import numpy as np\n'), ((5968, 5986), 'numpy.eye', 'np.eye', (['s.shape[0]'], {}), '(s.shape[0])\n', (5974, 5986), True, 'import numpy as np\n'), ((15071, 15108), 'rsatoolbox.data.average_dataset_by', 'average_dataset_by', (['dataset', 'obs_desc'], {}), '(dataset, obs_desc)\n', (15089, 15108), False, 'from rsatoolbox.data import average_dataset_by\n'), ((15135, 15188), 'rsatoolbox.util.data_utils.get_unique_inverse', 'get_unique_inverse', (['dataset.obs_descriptors[obs_desc]'], {}), '(dataset.obs_descriptors[obs_desc])\n', (15153, 15188), False, 'from rsatoolbox.util.data_utils import get_unique_inverse\n'), ((877, 915), 'numpy.mean', 'np.mean', (['matrix'], {'axis': '(0)', 'keepdims': '(True)'}), '(matrix, axis=0, keepdims=True)\n', (884, 915), True, 'import numpy as np\n'), ((995, 1033), 'numpy.mean', 'np.mean', (['matrix'], {'axis': '(2)', 'keepdims': '(True)'}), '(matrix, axis=2, keepdims=True)\n', (1002, 1033), True, 'import numpy as np\n'), ((2578, 2617), 'numpy.einsum', 'np.einsum', (['"""ij, ij-> j"""', 'matrix', 'matrix'], {}), "('ij, ij-> j', matrix, matrix)\n", (2587, 2617), True, 'import numpy as np\n'), ((4183, 4193), 'numpy.diag', 'np.diag', (['s'], {}), '(s)\n', (4190, 4193), True, 'import numpy as np\n'), ((4340, 4358), 'numpy.eye', 'np.eye', (['s.shape[0]'], {}), '(s.shape[0])\n', (4346, 4358), True, 'import numpy as np\n'), ((5596, 5618), 'numpy.expand_dims', 'np.expand_dims', (['std', '(1)'], {}), '(std, 1)\n', (5610, 5618), True, 'import numpy as np\n'), ((5691, 5713), 'numpy.expand_dims', 'np.expand_dims', (['var', '(1)'], {}), '(var, 1)\n', (5705, 5713), True, 'import numpy as np\n'), ((9543, 9563), 'numpy.linalg.inv', 'np.linalg.inv', (['cov_i'], {}), '(cov_i)\n', (9556, 9563), True, 'import numpy as np\n'), ((9608, 9627), 'numpy.zeros', 'np.zeros', (['cov.shape'], {}), '(cov.shape)\n', (9616, 9627), True, 'import numpy as np\n'), ((9736, 9754), 'numpy.linalg.inv', 'np.linalg.inv', (['cov'], {}), '(cov)\n', (9749, 9754), True, 'import numpy as np\n'), ((13011, 13031), 'numpy.linalg.inv', 'np.linalg.inv', (['cov_i'], {}), '(cov_i)\n', (13024, 13031), True, 'import numpy as np\n'), ((13076, 13095), 'numpy.zeros', 'np.zeros', (['cov.shape'], {}), '(cov.shape)\n', (13084, 13095), True, 'import numpy as np\n'), ((13204, 13222), 'numpy.linalg.inv', 'np.linalg.inv', (['cov'], {}), '(cov)\n', (13217, 13222), True, 'import numpy as np\n'), ((16698, 16718), 'numpy.linalg.inv', 'np.linalg.inv', (['cov_i'], {}), '(cov_i)\n', (16711, 16718), True, 'import numpy as np\n'), ((16763, 16782), 'numpy.zeros', 'np.zeros', (['cov.shape'], {}), '(cov.shape)\n', (16771, 16782), True, 'import numpy as np\n'), ((16891, 16909), 'numpy.linalg.inv', 'np.linalg.inv', (['cov'], {}), '(cov)\n', (16904, 16909), True, 'import numpy as np\n'), ((5571, 5593), 'numpy.expand_dims', 'np.expand_dims', (['std', '(0)'], {}), '(std, 0)\n', (5585, 5593), True, 'import numpy as np\n'), ((5666, 5688), 'numpy.expand_dims', 'np.expand_dims', (['var', '(0)'], {}), '(var, 0)\n', (5680, 5688), True, 'import numpy as np\n'), ((9690, 9710), 'numpy.linalg.inv', 'np.linalg.inv', (['cov_i'], {}), '(cov_i)\n', (9703, 9710), True, 'import numpy as np\n'), ((13158, 13178), 'numpy.linalg.inv', 'np.linalg.inv', (['cov_i'], {}), '(cov_i)\n', (13171, 13178), True, 'import numpy as np\n'), ((16845, 16865), 'numpy.linalg.inv', 'np.linalg.inv', (['cov_i'], {}), '(cov_i)\n', (16858, 16865), True, 'import numpy as np\n'), ((4233, 4251), 'numpy.eye', 'np.eye', (['s.shape[0]'], {}), '(s.shape[0])\n', (4239, 4251), True, 'import numpy as np\n')]
|
import sys
import h5py
import numpy as np
import torch
from torch.autograd import Variable
def print_args(args):
print("===== Experiment Configuration =====")
options = vars(args)
for key, value in options.items():
print(f'{key}: {value}')
print("====================================")
def rand_float(lo, hi):
return np.random.rand() * (hi - lo) + lo
def rand_int(lo, hi):
return np.random.randint(lo, hi)
def calc_dis(a, b):
return np.sqrt((a[0] - b[0]) ** 2 + (a[1] - b[1]) ** 2)
def norm(x, p=2):
return np.power(np.sum(x ** p), 1. / p)
def store_data(data_names, data, path):
hf = h5py.File(path, 'w')
for i in range(len(data_names)):
hf.create_dataset(data_names[i], data=data[i])
hf.close()
def load_data(data_names, path):
hf = h5py.File(path, 'r')
data = []
for i in range(len(data_names)):
d = np.array(hf.get(data_names[i]))
data.append(d)
hf.close()
return data
def combine_stat(stat_0, stat_1):
mean_0, std_0, n_0 = stat_0[:, 0], stat_0[:, 1], stat_0[:, 2]
mean_1, std_1, n_1 = stat_1[:, 0], stat_1[:, 1], stat_1[:, 2]
mean = (mean_0 * n_0 + mean_1 * n_1) / (n_0 + n_1)
std = np.sqrt(
(std_0 ** 2 * n_0 + std_1 ** 2 * n_1 + (mean_0 - mean) ** 2 * n_0 + (mean_1 - mean) ** 2 * n_1) / (n_0 + n_1))
n = n_0 + n_1
return np.stack([mean, std, n], axis=-1)
def init_stat(dim):
# mean, std, count
return np.zeros((dim, 3))
def var_norm(x):
return torch.sqrt((x ** 2).sum()).item()
def count_parameters(model):
return sum(p.numel() for p in model.parameters() if p.requires_grad)
def get_flat(x, keep_dim=False):
if keep_dim:
return x.reshape(torch.Size([1, x.size(0) * x.size(1)]) + x.size()[2:])
return x.reshape(torch.Size([x.size(0) * x.size(1)]) + x.size()[2:])
def to_var(tensor, use_gpu, requires_grad=False):
if use_gpu:
return Variable(torch.FloatTensor(tensor).cuda(), requires_grad=requires_grad)
else:
return Variable(torch.FloatTensor(tensor), requires_grad=requires_grad)
def to_np(x):
return x.detach().cpu().numpy()
def mix_iters(iters):
table = []
for i, iter in enumerate(iters):
table += [i] * len(iter)
np.random.shuffle(table)
for i in table:
yield iters[i].next()
class Tee(object):
def __init__(self, name, mode):
self.file = open(name, mode)
self.stdout = sys.stdout
sys.stdout = self
def __del__(self):
sys.stdout = self.stdout
self.file.close()
def write(self, data):
self.file.write(data)
self.stdout.write(data)
def flush(self):
self.file.flush()
def close(self):
self.__del__()
class AverageMeter(object):
def __init__(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
|
[
"numpy.stack",
"h5py.File",
"numpy.sum",
"numpy.zeros",
"torch.FloatTensor",
"numpy.random.randint",
"numpy.random.rand",
"numpy.random.shuffle",
"numpy.sqrt"
] |
[((417, 442), 'numpy.random.randint', 'np.random.randint', (['lo', 'hi'], {}), '(lo, hi)\n', (434, 442), True, 'import numpy as np\n'), ((476, 524), 'numpy.sqrt', 'np.sqrt', (['((a[0] - b[0]) ** 2 + (a[1] - b[1]) ** 2)'], {}), '((a[0] - b[0]) ** 2 + (a[1] - b[1]) ** 2)\n', (483, 524), True, 'import numpy as np\n'), ((640, 660), 'h5py.File', 'h5py.File', (['path', '"""w"""'], {}), "(path, 'w')\n", (649, 660), False, 'import h5py\n'), ((812, 832), 'h5py.File', 'h5py.File', (['path', '"""r"""'], {}), "(path, 'r')\n", (821, 832), False, 'import h5py\n'), ((1216, 1338), 'numpy.sqrt', 'np.sqrt', (['((std_0 ** 2 * n_0 + std_1 ** 2 * n_1 + (mean_0 - mean) ** 2 * n_0 + (\n mean_1 - mean) ** 2 * n_1) / (n_0 + n_1))'], {}), '((std_0 ** 2 * n_0 + std_1 ** 2 * n_1 + (mean_0 - mean) ** 2 * n_0 +\n (mean_1 - mean) ** 2 * n_1) / (n_0 + n_1))\n', (1223, 1338), True, 'import numpy as np\n'), ((1374, 1407), 'numpy.stack', 'np.stack', (['[mean, std, n]'], {'axis': '(-1)'}), '([mean, std, n], axis=-1)\n', (1382, 1407), True, 'import numpy as np\n'), ((1464, 1482), 'numpy.zeros', 'np.zeros', (['(dim, 3)'], {}), '((dim, 3))\n', (1472, 1482), True, 'import numpy as np\n'), ((2266, 2290), 'numpy.random.shuffle', 'np.random.shuffle', (['table'], {}), '(table)\n', (2283, 2290), True, 'import numpy as np\n'), ((565, 579), 'numpy.sum', 'np.sum', (['(x ** p)'], {}), '(x ** p)\n', (571, 579), True, 'import numpy as np\n'), ((348, 364), 'numpy.random.rand', 'np.random.rand', ([], {}), '()\n', (362, 364), True, 'import numpy as np\n'), ((2045, 2070), 'torch.FloatTensor', 'torch.FloatTensor', (['tensor'], {}), '(tensor)\n', (2062, 2070), False, 'import torch\n'), ((1948, 1973), 'torch.FloatTensor', 'torch.FloatTensor', (['tensor'], {}), '(tensor)\n', (1965, 1973), False, 'import torch\n')]
|
#!/usr/bin/env python
__description__ = 'Decode VBE script'
__author__ = '<NAME>'
__version__ = '0.0.2'
__date__ = '2016/03/29'
"""
Source code put in public domain by Didier Stevens, no Copyright
https://DidierStevens.com
Use at your own risk
History:
2016/03/28: start
2016/03/29: 0.0.2 added support for ZIP files and literal arguments with File2StringHash
Todo:
Reference:
https://gallery.technet.microsoft.com/Encode-and-Decode-a-VB-a480d74c
"""
import optparse
import sys
import os
import signal
import textwrap
import re
import zipfile
import binascii
MALWARE_PASSWORD = '<PASSWORD>'
def PrintManual():
manual = '''
Manual:
This program reads from the given file or standard input, and converts the encoded VBE script to VBS.
The provided file can be a password protected ZIP file (with password infected) containing the VBE script.
The content of the VBE script can also be passed as a literal argument. This is similar to a Here Document in Unix.
Start the argument (the "filename") with character # to pass a literal argument.
Example: decode-vbe.py "##@~^DgAAAA==\ko$K6,JCV^GJqAQAAA==^#~@"
Result: MsgBox "Hello"
It's also possible to use hexadecimal (prefix #h#) or base64 (prefix #b#) to pass a literal argument.
Example: decode-vbe.py #h#23407E5E4467414141413D3D5C6B6F244B362C4A437F565E474A7141514141413D3D5E237E40
Result: MsgBox "Hello"
Example: decode-vbe.py #b#I<KEY>=
Result: MsgBox "Hello"
'''
for line in manual.split('\n'):
print(textwrap.fill(line))
#Convert 2 Bytes If Python 3
def C2BIP3(string):
if sys.version_info[0] > 2:
return bytes([ord(x) for x in string])
else:
return string
def File2String(filename):
try:
f = open(filename, 'rb')
except:
return None
try:
return f.read()
except:
return None
finally:
f.close()
def File2StringHash(filename):
decoded = None
if filename.startswith('#h#'):
try:
decoded = binascii.a2b_hex(filename[3:])
finally:
return decoded
elif filename.startswith('#b#'):
try:
decoded = binascii.a2b_base64(filename[3:])
finally:
return decoded
elif filename.startswith('#'):
return filename[1:]
elif filename.lower().endswith('.zip'):
oZipfile = zipfile.ZipFile(filename, 'r')
if len(oZipfile.infolist()) == 1:
oZipContent = oZipfile.open(oZipfile.infolist()[0], 'r', C2BIP3(MALWARE_PASSWORD))
data = oZipContent.read()
oZipContent.close()
else:
data = File2String(filename)
oZipfile.close()
return data
else:
return File2String(filename)
def FixPipe():
try:
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
except:
pass
#Fix for http://bugs.python.org/issue11395
def StdoutWriteChunked(data):
while data != '':
sys.stdout.write(data[0:10000])
sys.stdout.flush()
data = data[10000:]
def Decode(data):
dDecode = {}
dDecode[9] = '\x57\x6E\x7B'
dDecode[10] = '\x4A\x4C\x41'
dDecode[11] = '\x0B\x0B\x0B'
dDecode[12] = '\x0C\x0C\x0C'
dDecode[13] = '\x4A\x4C\x41'
dDecode[14] = '\x0E\x0E\x0E'
dDecode[15] = '\x0F\x0F\x0F'
dDecode[16] = '\x10\x10\x10'
dDecode[17] = '\x11\x11\x11'
dDecode[18] = '\x12\x12\x12'
dDecode[19] = '\x13\x13\x13'
dDecode[20] = '\x14\x14\x14'
dDecode[21] = '\x15\x15\x15'
dDecode[22] = '\x16\x16\x16'
dDecode[23] = '\x17\x17\x17'
dDecode[24] = '\x18\x18\x18'
dDecode[25] = '\x19\x19\x19'
dDecode[26] = '\x1A\x1A\x1A'
dDecode[27] = '\x1B\x1B\x1B'
dDecode[28] = '\x1C\x1C\x1C'
dDecode[29] = '\x1D\x1D\x1D'
dDecode[30] = '\x1E\x1E\x1E'
dDecode[31] = '\x1F\x1F\x1F'
dDecode[32] = '\x2E\x2D\x32'
dDecode[33] = '\x47\x75\x30'
dDecode[34] = '\x7A\x52\x21'
dDecode[35] = '\x56\x60\x29'
dDecode[36] = '\x42\x71\x5B'
dDecode[37] = '\x6A\x5E\x38'
dDecode[38] = '\x2F\x49\x33'
dDecode[39] = '\x26\x5C\x3D'
dDecode[40] = '\x49\x62\x58'
dDecode[41] = '\x41\x7D\x3A'
dDecode[42] = '\x34\x29\x35'
dDecode[43] = '\x32\x36\x65'
dDecode[44] = '\x5B\x20\x39'
dDecode[45] = '\x76\x7C\x5C'
dDecode[46] = '\x72\x7A\x56'
dDecode[47] = '\x43\x7F\x73'
dDecode[48] = '\x38\x6B\x66'
dDecode[49] = '\x39\x63\x4E'
dDecode[50] = '\x70\x33\x45'
dDecode[51] = '\x45\x2B\x6B'
dDecode[52] = '\x68\x68\x62'
dDecode[53] = '\x71\x51\x59'
dDecode[54] = '\x4F\x66\x78'
dDecode[55] = '\x09\x76\x5E'
dDecode[56] = '\x62\x31\x7D'
dDecode[57] = '\x44\x64\x4A'
dDecode[58] = '\x23\x54\x6D'
dDecode[59] = '\x75\x43\x71'
dDecode[60] = '\x4A\x4C\x41'
dDecode[61] = '\x7E\x3A\x60'
dDecode[62] = '\x4A\x4C\x41'
dDecode[63] = '\x5E\x7E\x53'
dDecode[64] = '\x40\x4C\x40'
dDecode[65] = '\x77\x45\x42'
dDecode[66] = '\x4A\x2C\x27'
dDecode[67] = '\x61\x2A\x48'
dDecode[68] = '\x5D\x74\x72'
dDecode[69] = '\x22\x27\x75'
dDecode[70] = '\x4B\x37\x31'
dDecode[71] = '\x6F\x44\x37'
dDecode[72] = '\x4E\x79\x4D'
dDecode[73] = '\x3B\x59\x52'
dDecode[74] = '\x4C\x2F\x22'
dDecode[75] = '\x50\x6F\x54'
dDecode[76] = '\x67\x26\x6A'
dDecode[77] = '\x2A\x72\x47'
dDecode[78] = '\x7D\x6A\x64'
dDecode[79] = '\x74\x39\x2D'
dDecode[80] = '\x54\x7B\x20'
dDecode[81] = '\x2B\x3F\x7F'
dDecode[82] = '\x2D\x38\x2E'
dDecode[83] = '\x2C\x77\x4C'
dDecode[84] = '\x30\x67\x5D'
dDecode[85] = '\x6E\x53\x7E'
dDecode[86] = '\x6B\x47\x6C'
dDecode[87] = '\x66\x34\x6F'
dDecode[88] = '\x35\x78\x79'
dDecode[89] = '\x25\x5D\x74'
dDecode[90] = '\x21\x30\x43'
dDecode[91] = '\x64\x23\x26'
dDecode[92] = '\x4D\x5A\x76'
dDecode[93] = '\x52\x5B\x25'
dDecode[94] = '\x63\x6C\x24'
dDecode[95] = '\x3F\x48\x2B'
dDecode[96] = '\x7B\x55\x28'
dDecode[97] = '\x78\x70\x23'
dDecode[98] = '\x29\x69\x41'
dDecode[99] = '\x28\x2E\x34'
dDecode[100] = '\x73\x4C\x09'
dDecode[101] = '\x59\x21\x2A'
dDecode[102] = '\x33\x24\x44'
dDecode[103] = '\x7F\x4E\x3F'
dDecode[104] = '\x6D\x50\x77'
dDecode[105] = '\x55\x09\x3B'
dDecode[106] = '\x53\x56\x55'
dDecode[107] = '\x7C\x73\x69'
dDecode[108] = '\x3A\x35\x61'
dDecode[109] = '\x5F\x61\x63'
dDecode[110] = '\x65\x4B\x50'
dDecode[111] = '\x46\x58\x67'
dDecode[112] = '\x58\x3B\x51'
dDecode[113] = '\x31\x57\x49'
dDecode[114] = '\x69\x22\x4F'
dDecode[115] = '\x6C\x6D\x46'
dDecode[116] = '\x5A\x4D\x68'
dDecode[117] = '\x48\x25\x7C'
dDecode[118] = '\x27\x28\x36'
dDecode[119] = '\x5C\x46\x70'
dDecode[120] = '\x3D\x4A\x6E'
dDecode[121] = '\x24\x32\x7A'
dDecode[122] = '\x79\x41\x2F'
dDecode[123] = '\x37\x3D\x5F'
dDecode[124] = '\x60\x5F\x4B'
dDecode[125] = '\x51\x4F\x5A'
dDecode[126] = '\x20\x42\x2C'
dDecode[127] = '\x36\x65\x57'
dCombination = {}
dCombination[0] = 0
dCombination[1] = 1
dCombination[2] = 2
dCombination[3] = 0
dCombination[4] = 1
dCombination[5] = 2
dCombination[6] = 1
dCombination[7] = 2
dCombination[8] = 2
dCombination[9] = 1
dCombination[10] = 2
dCombination[11] = 1
dCombination[12] = 0
dCombination[13] = 2
dCombination[14] = 1
dCombination[15] = 2
dCombination[16] = 0
dCombination[17] = 2
dCombination[18] = 1
dCombination[19] = 2
dCombination[20] = 0
dCombination[21] = 0
dCombination[22] = 1
dCombination[23] = 2
dCombination[24] = 2
dCombination[25] = 1
dCombination[26] = 0
dCombination[27] = 2
dCombination[28] = 1
dCombination[29] = 2
dCombination[30] = 2
dCombination[31] = 1
dCombination[32] = 0
dCombination[33] = 0
dCombination[34] = 2
dCombination[35] = 1
dCombination[36] = 2
dCombination[37] = 1
dCombination[38] = 2
dCombination[39] = 0
dCombination[40] = 2
dCombination[41] = 0
dCombination[42] = 0
dCombination[43] = 1
dCombination[44] = 2
dCombination[45] = 0
dCombination[46] = 2
dCombination[47] = 1
dCombination[48] = 0
dCombination[49] = 2
dCombination[50] = 1
dCombination[51] = 2
dCombination[52] = 0
dCombination[53] = 0
dCombination[54] = 1
dCombination[55] = 2
dCombination[56] = 2
dCombination[57] = 0
dCombination[58] = 0
dCombination[59] = 1
dCombination[60] = 2
dCombination[61] = 0
dCombination[62] = 2
dCombination[63] = 1
result = ''
index = -1
for char in data.replace('@&', chr(10)).replace('@#', chr(13)).replace('@*', '>').replace('@!', '<').replace('@$', '@'):
byte = ord(char)
if byte < 128:
index = index + 1
if (byte == 9 or byte > 31 and byte < 128) and byte != 60 and byte != 62 and byte != 64:
char = [c for c in dDecode[byte]][dCombination[index % 64]]
result += char
return result
def DecodeVBE(filename, options):
FixPipe()
if sys.platform == 'win32':
import msvcrt
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
if filename == '':
content = sys.stdin.read()
else:
content = File2StringHash(filename)
oMatch = re.search(r'#@~\^......==(.+)......==\^#~@', content)
if oMatch == None:
print('No encoded script found!')
else:
StdoutWriteChunked(Decode(oMatch.groups()[0]))
def Main():
oParser = optparse.OptionParser(usage='usage: %prog [options] [file]\n' + __description__, version='%prog ' + __version__)
oParser.add_option('-m', '--man', action='store_true', default=False, help='Print manual')
(options, args) = oParser.parse_args()
if options.man:
oParser.print_help()
PrintManual()
return
if len(args) > 1:
oParser.print_help()
print('')
print(' Source code put in the public domain by <NAME>, no Copyright')
print(' Use at your own risk')
print(' https://DidierStevens.com')
return
elif len(args) == 0:
DecodeVBE('', options)
else:
DecodeVBE(args[0], options)
if __name__ == '__main__':
Main()
|
[
"sys.stdout.write",
"sys.stdin.read",
"textwrap.fill",
"optparse.OptionParser",
"zipfile.ZipFile",
"sys.stdout.fileno",
"binascii.a2b_hex",
"sys.stdout.flush",
"signal.signal",
"re.search",
"binascii.a2b_base64"
] |
[((9354, 9408), 're.search', 're.search', (['"""#@~\\\\^......==(.+)......==\\\\^#~@"""', 'content'], {}), "('#@~\\\\^......==(.+)......==\\\\^#~@', content)\n", (9363, 9408), False, 'import re\n'), ((9565, 9681), 'optparse.OptionParser', 'optparse.OptionParser', ([], {'usage': "('usage: %prog [options] [file]\\n' + __description__)", 'version': "('%prog ' + __version__)"}), "(usage='usage: %prog [options] [file]\\n' +\n __description__, version='%prog ' + __version__)\n", (9586, 9681), False, 'import optparse\n'), ((2758, 2803), 'signal.signal', 'signal.signal', (['signal.SIGPIPE', 'signal.SIG_DFL'], {}), '(signal.SIGPIPE, signal.SIG_DFL)\n', (2771, 2803), False, 'import signal\n'), ((2933, 2964), 'sys.stdout.write', 'sys.stdout.write', (['data[0:10000]'], {}), '(data[0:10000])\n', (2949, 2964), False, 'import sys\n'), ((2973, 2991), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (2989, 2991), False, 'import sys\n'), ((9270, 9286), 'sys.stdin.read', 'sys.stdin.read', ([], {}), '()\n', (9284, 9286), False, 'import sys\n'), ((1488, 1507), 'textwrap.fill', 'textwrap.fill', (['line'], {}), '(line)\n', (1501, 1507), False, 'import textwrap\n'), ((1989, 2019), 'binascii.a2b_hex', 'binascii.a2b_hex', (['filename[3:]'], {}), '(filename[3:])\n', (2005, 2019), False, 'import binascii\n'), ((9195, 9214), 'sys.stdout.fileno', 'sys.stdout.fileno', ([], {}), '()\n', (9212, 9214), False, 'import sys\n'), ((2136, 2169), 'binascii.a2b_base64', 'binascii.a2b_base64', (['filename[3:]'], {}), '(filename[3:])\n', (2155, 2169), False, 'import binascii\n'), ((2340, 2370), 'zipfile.ZipFile', 'zipfile.ZipFile', (['filename', '"""r"""'], {}), "(filename, 'r')\n", (2355, 2370), False, 'import zipfile\n')]
|
import os
import time
import numpy as np
import pandas as pd
from nltk import word_tokenize
from nltk.util import ngrams
import tensorflow as tf
from transformers import TFBertModel
from transformers import BertTokenizer
from transformers import TFBertModel
from tensorflow.keras.layers import Dense, Flatten
bert_model_name = 'bert-base-uncased'
tokenizer = BertTokenizer.from_pretrained(bert_model_name, do_lower_case=True)
print('...BERT tokenizer loading complete')
cols_to_use = [
'Note',
'LGBTQ',
'ADULT_CONTENT',
'HEALTH',
'DRUGS_ALCOHOL_GAMBLING',
'RACE',
'VIOLENCE_CRIME',
'POLITICS',
'RELATION',
'LOCATION'
]
label_cols = cols_to_use[1:] #exclude note (input)
class BertClassifier(tf.keras.Model):
def __init__(self, bert: TFBertModel, num_classes: int):
super().__init__()
self.bert = bert
self.classifier = Dense(num_classes, activation='sigmoid')
@tf.function
def call(self, input_ids, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None):
outputs = self.bert(input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask)
cls_output = outputs[1]
cls_output = self.classifier(cls_output)
return cls_output
def read_df(path):
df = pd.read_csv(path)
df = df[cols_to_use]
print('Number of all sentences: {}'.format(len(df)))
df['Note'] = df.Note.replace('NA',np.nan)
df = df.dropna().sample(frac=1).reset_index(drop=True)
print('Number of non-empty sentences: {}'.format(len(df)))
return df
def get_ids_masks(sentences, MAX_LEN):
ids = []
masks = []
for sent in sentences:
encoded_dict = tokenizer.encode_plus(
sent, # Sentence to encode.
add_special_tokens = True, # Add '[CLS]' and '[SEP]'
truncation = 'longest_first',
max_length = MAX_LEN, # Pad & truncate all sentences.
padding = 'max_length',
return_attention_mask = True, # Construct attn. masks.
)
ids.append(encoded_dict['input_ids'])
masks.append(encoded_dict['attention_mask'])
return ids, masks
def create_dataset(data_tuple, epochs=1, batch_size=32, buffer_size=100, train=True):
dataset = tf.data.Dataset.from_tensor_slices(data_tuple)
if train:
dataset = dataset.shuffle(buffer_size=buffer_size)
dataset = dataset.batch(batch_size)
if train:
dataset = dataset.prefetch(1)
return dataset
def get_keywords():
# retrieve keywords if exist
if os.path.exists("./data/keyword_list.txt"):
print("...Keyword list loading complete")
with open("./data/keyword_list.txt", 'r') as keyword_file:
keywords = set()
for word in keyword_file.readlines():
keywords.add(word.strip())
return keywords
# construct keywords if not exist
keywords_path = "./data/Lexicon/"
filenames = [os.path.join(keywords_path, f) for f in os.listdir(keywords_path) if os.path.isfile(os.path.join(keywords_path, f))]
keywords = set()
for fn in filenames:
with open(fn, 'r') as keyword_file:
for line in keyword_file.readlines():
word = line.strip()
if word:
keywords.add(word.lower())
with open("./data/keyword_list.txt", 'w') as keyword_file:
for word in keywords:
keyword_file.write("{}\n".format(word))
print("...Keyword list building complete")
return keywords
def not_in_keywords(note, keywords):
unigrams = word_tokenize(note)
bigrams = ngrams(unigrams, 2)
bigrams = [' '.join(bg) for bg in bigrams]
trigrams = ngrams(unigrams, 3)
trigrams = [' '.join(tg) for tg in trigrams]
tokens = unigrams + bigrams + trigrams
for t in tokens:
if t in keywords:
return False
return True
|
[
"os.listdir",
"nltk.util.ngrams",
"tensorflow.keras.layers.Dense",
"pandas.read_csv",
"os.path.exists",
"tensorflow.data.Dataset.from_tensor_slices",
"transformers.BertTokenizer.from_pretrained",
"os.path.join",
"nltk.word_tokenize"
] |
[((360, 426), 'transformers.BertTokenizer.from_pretrained', 'BertTokenizer.from_pretrained', (['bert_model_name'], {'do_lower_case': '(True)'}), '(bert_model_name, do_lower_case=True)\n', (389, 426), False, 'from transformers import BertTokenizer\n'), ((1281, 1298), 'pandas.read_csv', 'pd.read_csv', (['path'], {}), '(path)\n', (1292, 1298), True, 'import pandas as pd\n'), ((2141, 2187), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['data_tuple'], {}), '(data_tuple)\n', (2175, 2187), True, 'import tensorflow as tf\n'), ((2403, 2444), 'os.path.exists', 'os.path.exists', (['"""./data/keyword_list.txt"""'], {}), "('./data/keyword_list.txt')\n", (2417, 2444), False, 'import os\n'), ((3289, 3308), 'nltk.word_tokenize', 'word_tokenize', (['note'], {}), '(note)\n', (3302, 3308), False, 'from nltk import word_tokenize\n'), ((3320, 3339), 'nltk.util.ngrams', 'ngrams', (['unigrams', '(2)'], {}), '(unigrams, 2)\n', (3326, 3339), False, 'from nltk.util import ngrams\n'), ((3396, 3415), 'nltk.util.ngrams', 'ngrams', (['unigrams', '(3)'], {}), '(unigrams, 3)\n', (3402, 3415), False, 'from nltk.util import ngrams\n'), ((833, 873), 'tensorflow.keras.layers.Dense', 'Dense', (['num_classes'], {'activation': '"""sigmoid"""'}), "(num_classes, activation='sigmoid')\n", (838, 873), False, 'from tensorflow.keras.layers import Dense, Flatten\n'), ((2746, 2776), 'os.path.join', 'os.path.join', (['keywords_path', 'f'], {}), '(keywords_path, f)\n', (2758, 2776), False, 'import os\n'), ((2786, 2811), 'os.listdir', 'os.listdir', (['keywords_path'], {}), '(keywords_path)\n', (2796, 2811), False, 'import os\n'), ((2830, 2860), 'os.path.join', 'os.path.join', (['keywords_path', 'f'], {}), '(keywords_path, f)\n', (2842, 2860), False, 'import os\n')]
|
#
# Copyright 2021 Open Raven Inc. and the Mockingbird project authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import final
from docx import Document
from mockingbird.__base import __BaseDocument
from .__base import __BaseUnstructuredDataType
class DOCXDocument(__BaseDocument):
def __init__(self):
super().__init__(extension="docx")
# Create a list of docx formats we're going to export.
self._docx_styles = []
active_styles = self._configurable_dict["unstructured_data"]["docx_document"]["active_styles"]
if active_styles["paragraph_style"]:
self._docx_styles.append(_DocxParagraphStyle)
if active_styles["footer_style"]:
self._docx_styles.append(_DocxFooterStyle)
if active_styles["bullet_point_style"]:
self._docx_styles.append(_DocxBulletPointStyle)
if active_styles["chat_style"]:
self._docx_styles.append(_DocxChatStyle)
@final
def save(self, save_path: str) -> None:
for style in self._docx_styles:
instantiated_style = style()
instantiated_style.clone_sensitive_data(other=self)
instantiated_style.save(save_path=save_path)
self._meta_data_object.add_other_meta_data(instantiated_style._meta_data_object)
class _DocxParagraphStyle(__BaseUnstructuredDataType):
"""
Writes a simple paragraph containing sensitive-soup.
"""
def __init__(self):
super().__init__(extension="docx")
@final
def save(self, save_path: str) -> None:
"""
"""
save_file = self.setup_save_file(save_path=save_path, extension=self.extension)
document = Document()
document.add_heading('Paragraph Styled Document', 0)
sensitive_soup = self._get_sensitive_soup()
document.add_paragraph(sensitive_soup)
document.save(save_file)
self._log_save(save_file)
class _DocxFooterStyle(__BaseUnstructuredDataType):
"""
Writes a simple document with sensitive-soup in the footer.
"""
def __init__(self):
super().__init__(extension="docx")
@final
def save(self, save_path: str) -> None:
"""
"""
save_file = self.setup_save_file(save_path=save_path, extension=self.extension)
sensitive_soup = self._get_sensitive_soup()
document = Document()
document.add_heading('Sensitive-Data in Footer Styled Document', 0)
section = document.sections[0]
footer = section.footer
footer.paragraphs[0].text = sensitive_soup
document.save(save_file)
self._log_save(save_file)
class _DocxBulletPointStyle(__BaseUnstructuredDataType):
"""
Writes a simple document with sensitive-soup in the footer.
"""
def __init__(self):
super().__init__(extension="docx")
@final
def save(self, save_path: str) -> None:
"""
"""
save_file = self.setup_save_file(save_path=save_path, extension=self.extension)
enumerated_groups = self._get_enumerated_style()
document = Document()
document.add_heading('Sensitive Data Stored in Bullet Points', 0)
for group in enumerated_groups:
key, enumerated_items = group
document.add_heading(key, level=1)
for item in enumerated_items:
document.add_paragraph(item, style="List Bullet")
document.save(save_file)
self._log_save(save_file)
class _DocxChatStyle(__BaseUnstructuredDataType):
"""
Writes a simple document with sensitive-soup in the footer.
"""
def __init__(self):
super().__init__(extension="docx")
@final
def save(self, save_path: str) -> None:
"""
"""
save_file = self.setup_save_file(save_path=save_path, extension=self.extension)
chat_log = self._get_chat_log()
document = Document()
document.add_heading('A chat between two people', 0)
for line in chat_log:
document.add_paragraph(line)
document.save(save_file)
self._log_save(save_file)
|
[
"docx.Document"
] |
[((2215, 2225), 'docx.Document', 'Document', ([], {}), '()\n', (2223, 2225), False, 'from docx import Document\n'), ((2897, 2907), 'docx.Document', 'Document', ([], {}), '()\n', (2905, 2907), False, 'from docx import Document\n'), ((3628, 3638), 'docx.Document', 'Document', ([], {}), '()\n', (3636, 3638), False, 'from docx import Document\n'), ((4450, 4460), 'docx.Document', 'Document', ([], {}), '()\n', (4458, 4460), False, 'from docx import Document\n')]
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# pereIBILITY OF SUCH DAMAGE.
"""Ce fichier définit le contexte-éditeur 'SelectionTags'."""
from primaires.interpreteur.editeur.selection import Selection
from primaires.format.fonctions import supprimer_accents
class SelectionTags(Selection):
"""Contexte-éditeur pour la sélection de tags."""
nom = "editeur:tags:selection"
def __init__(self, pere, objet=None, attribut=None, liste=None,
tagge=None):
Selection.__init__(self, pere, objet, attribut, liste)
self.tagge = tagge
@staticmethod
def afficher_apercu(apercu, objet, valeur, liste=None, tagge=None):
"""Affichage de l'aperçu."""
return Selection.afficher_apercu(apercu, objet, valeur, liste)
def interpreter(self, msg):
"""Interprétation du contexte"""
nom = msg
msg_sa = supprimer_accents(msg).lower()
liste = getattr(self.objet, self.attribut)
cles = list(self.liste)
cles_sa = [supprimer_accents(c).lower() for c in cles]
if msg_sa in cles_sa:
cle = cles[cles_sa.index(msg_sa)]
if cle in liste:
while cle in liste:
liste.remove(cle)
else:
liste.append(cle)
# Ajout des évènements à l'objet taggé
tag = importeur.tags.tags[cle]
script = tag.script
for evenement in script.evenements.values():
evt = self.tagge.script[evenement.nom]
evt.copier_depuis(evenement)
self.pere << "Copie de l'évènement {}.".format(
evenement.nom)
liste[:] = [e for e in liste if e]
self.actualiser()
else:
self.pere << "|err|La clé {} est introuvable.|ff|".format(
repr(msg))
|
[
"primaires.format.fonctions.supprimer_accents",
"primaires.interpreteur.editeur.selection.Selection.afficher_apercu",
"primaires.interpreteur.editeur.selection.Selection.__init__"
] |
[((1967, 2021), 'primaires.interpreteur.editeur.selection.Selection.__init__', 'Selection.__init__', (['self', 'pere', 'objet', 'attribut', 'liste'], {}), '(self, pere, objet, attribut, liste)\n', (1985, 2021), False, 'from primaires.interpreteur.editeur.selection import Selection\n'), ((2192, 2247), 'primaires.interpreteur.editeur.selection.Selection.afficher_apercu', 'Selection.afficher_apercu', (['apercu', 'objet', 'valeur', 'liste'], {}), '(apercu, objet, valeur, liste)\n', (2217, 2247), False, 'from primaires.interpreteur.editeur.selection import Selection\n'), ((2357, 2379), 'primaires.format.fonctions.supprimer_accents', 'supprimer_accents', (['msg'], {}), '(msg)\n', (2374, 2379), False, 'from primaires.format.fonctions import supprimer_accents\n'), ((2490, 2510), 'primaires.format.fonctions.supprimer_accents', 'supprimer_accents', (['c'], {}), '(c)\n', (2507, 2510), False, 'from primaires.format.fonctions import supprimer_accents\n')]
|
"""Models instantiated by the vessels api."""
from dataclasses import dataclass
from datetime import datetime
from typing import Optional
@dataclass(frozen=True)
class VesselClass:
"""Vessel class characteristics.
Detailed characteristics of each vessel class, including its defining
measurement and the range that corresponds to this vessel class.
Attributes:
id: The vessel class id e.g. 81 (refers to Panamax), 86 (Aframax), 85
(Suezmax).
vessel_type_id: Numeric ID corresponding to the different values of the
VesselType field. 1-> Tanker, 3-> Dry, 4 -> Containers, 5
->LNG(Liquified Natural gas) , 6-> LPG(Liquified Petroleum Gas).
from_size: The minimum value that corresponds to this vessel class
(Deadweight/TEU/CubicSize).
to_size: The maximum value that corresponds to this vessel class
(Deadweight/TEU/CubicSize).
name: The vessel class e.g. Panamax, Aframax, Suezmax.
vessel_type: Description of the type of the vessel, based on the
carried cargo. Main categories are Tankers, Dry (bulk carriers),
Containers, LNG and LPG.
defining_size: The attribute(DeadWeight, TEU, CubicSize) that defines
the size of the vesselClass.
size: The units of the DefiningSize attribute. DeadWeight->
kt(kilotons), TEU-> TEU, CubicSize-> cbm(cubic meters).
"""
id: int
vessel_type_id: int
from_size: int
to_size: int
name: Optional[str] = None
vessel_type: Optional[str] = None
defining_size: Optional[str] = None
size: Optional[str] = None
@dataclass(frozen=True)
class VesselType:
"""A vessel type.
Attributes:
id: The vessel type id, e.g. 1 -> Tanker, 3 -> Dry, 4 -> Containers,
5 -> LNG (Liquified Natural gas),
6-> LPG (Liquified Petroleum Gas).
name: The vessel type name, e.g. Tanker, Dry, Containers,
LNG (Liquified Natural gas), LPG (Liquified Petroleum Gas).
"""
id: int
name: str
@dataclass(frozen=True)
class Vessel:
"""Contains all details of a vessel.
Attributes:
imo: A seven-digits number that uniquely identifies a ship and does not
change when the ship's owner, country of registry or name of the
vessel changes.
vessel_type_id: Numeric ID corresponding to the different values of the
VesselType field. 1 -> Tanker, 3 -> Dry, 4 -> Containers, 5 ->
LNG(Liquified Natural gas), 6 -> LPG(Liquified Petroleum Gas).
built_for_trade_id: Numeric ID corresponding to the different values of
the BuiltForTrade field. 1 -> Crude, 2 -> Product, 3 -> Chemical.
trade_id: Numeric ID that takes the same values as the BuiltForTradeID
field. 1 -> Crude, 2 -> Product, 3 -> Chemical.
vessel_class_id: Numeric ID corresponding to the different vessel
classes of a certain vessel type.
commercial_operator_id: Numeric ID corresponding to the maritime
company that manages the vessel commercially.
deadweight: Numeric, measured in tonnes [t], often shortened as DWT,
denotes the total carrying capacity of the vessel including cargo,
ballast water, stores, provisions, crew and so on.
breadth_extreme: Numeric, measured in meters [m], denotes the width of
a ship over the outside of all planking or plating at the widest
frame.
gross_rated_tonnage: Numeric, measured in register tons, often
shortened as GRT, denotes the sum of all the closed and/or closable
spaces.
reduced_gross_tonnage: Numeric, measured in register tons, often
shortened as RGT, denotes a measure applicable for open-top
container ships and tankers with a double hull (ships equipped with
segregated ballast tanks).This quantity can be used to compute
various tonnage-based fees.
net_rated_tonnage: Numeric, measured in register tons, often shortened
as NRT, denotes the difference between the GRT and the sum of all
spaces which are not used for the purpose for which the ship is
built.
draught: Numeric, measured in meters [m], denotes the distance between
the ship’s keel and the waterline of the vessel. As the
instantaneous draught of a vessel is a function of the vessel's
loading status, this vessel characteristics refers to the maximum
draught of the vessel.
length_overall: Numeric, measured in meters [m], denotes the vessel's
maximum length between the extremes points, forward and aft.
moulded_depth: Numeric, measured in meters [m], denotes the vertical
distance between the moulded base line and the top of the beams of
the uppermost continuous deck.
year_built: Numeric, year format, the year the vessel was built.
geared: Boolean, denotes whether the vessel has cranes installed for
handling its cargo or not.
clean_dirty_willing: Boolean, indicates whether a tanker vessel is
‘willing’ to compete in the market complementary to the one shown
in Trade. For example an LR willing dirty will have Trade=Product
and CleanDirtyWilling=true.
main_engine_manufacturer_id: Numeric ID corresponding to the different
values of the MainEngine field. 1-> MAN B&W, 2-> Wartsila, 3->
Mitsubishi.
classification_register_id: The id of the classification register.
Default value: -2.
updated_date: Date, format YYYY-MM-DD HH:MM:SS, corresponding to the
latest update.
vessel_name: The current vessel name corresponding to that IMO.
call_sign: Alphanumeric code that uniquely identifies a vessel and is
used for radio communication with land based operators or stations
and between the vessels.
vessel_type: Description of the type of the vessel, based on the
carried cargo. Main categories are Tankers, Dry (bulk carriers),
Containers, LNG and LPG.
built_for_trade: Additional attribute to specify a Tanker vessel with
finer granularity. This classification is derived by the vessel
characteristics only. It indicates the initial cargo the vessel was
designed for, here called "trade". For example, an LR2 is a vessel
of VesselClass Aframax and BuiltForTrade Clean.
trade: Time-dependent version of the attribute BuiltForTrade. It is
specified by the last cargo carried by the vessel at the time of
query. For example, an LR2 with fueloil as last cargo has
BuiltForTrade = Crude and Trade = Product.
vessel_class: Name of the vessel class the vessel belongs to.
Assignment of a vessel to a certain VesselClass is based on the
VesselType and the value of its Deadweight (if Tanker or Dry), its
LiquidCap (if LNG/LPG) or its TEU (if Containers). For example, an
Aframax is a Tanker vessel with Deadweight within the range 82kt -
125kt, while a Capesize is a Dry vessel with Deadweight within the
range 120kt-220kt. LR2 are defined as Aframax, as only Deadweight
is used to define vessel classes.
flag_code: ISO 3166-1 alpha-2 code representing the vessel's country of
registration.
flag: The country where the vessel has been registered and whose law is
subject to.
commercial_operator: Name of the maritime company that manages the
vessel commercially.
built_country_code: Two letters code representing the country where the
vessel was built.
built_country_name: String, the name of the country where the vessel
was built.
scrapped_date: Date, with format YYYY-MM-DD, indicates when the vessel
was scrapped. If the vessel is active, ScrappedDate is null.
shipyard_built_id: Numeric ID corresponding to the geo location where
the vessel was built, for example the specific shipyard.
shipyard_built_name: String, the name of the shipyard where the vessel
was built, e.g. Hyundai Heavy Industries Co.
ice_class: Alphanumeric code that denotes the vessel's additional level
of strengthening as well as other arrangements that make navigation
through frozen seas possible. For example 1A, 1D, etc.
cranes_ton_capacity: Numeric, measured in tonnes [t], denotes the
capacity of the vessel's cranes whenever applicable.
teu: Numeric, measured in TEU (Twenty-Foot Equivalent Unit), denotes a
volumetric measure of a container's cargo carrying capacity. Used
for Containers, that is vessels with VesselType=4.
te_u14: Numeric, denotes the capacity of the vessel measured in twenty-
foot equivalent units (TEU) loaded at 14 tons.
reefers: Numeric, denotes the capacity of the vessel measured in
refrigerated twenty-foot equivalent units (TEU), i.e., the maximum
number of refrigerated containers that could be carried.
panama_canal_net_tonnage: Numeric, measured in register tons,
volumetric measure derived by the NRT (NetRatedTonnage) and
modified for Panama Canal purposes. Often used to compute tonnage-
based fees.
cubic_size: Numeric, measured in cubic meters [cbm] denotes the
carrying capacity of Gas vessels (LNG, LPG). For tankers it is the
volume of cargo tanks.
scrubbers_date: Date, format YYYY-MM-DD HH:MM:SS, best estimate of the
scrubbers installation date.
summer_tpc: Numeric, measured in [metric tonnes/cm], acronym of Summer
Tonnes Per Centimeter, denotes the cargo in metric tonnes (10^3 kg)
needed to further increase the vessel's salt water draught by one
centimeter.
lightship_tonnes: The weight of the vessels without any cargo or
bunkers. It is an important parameter to estimate the scrap value
of the vessel as it represents the amount of steel that can be
recycled.
main_engine_manufacturer: String denoting the brand of the vessel's
main engine.
delivery_date: Date, with format YYYY-MM-DD, indicates when the vessel
was delivered to the owner and commenced its first voyage.
classification_register: The name of the organization that issued the
vessel's classification certificate. Default value: Not set.
number_of_holds: Numeric, the number of separate enclosed spaces within
a ship designed for storing cargo.
grain_capacity: This is the space available for a liquid-type cargo,
like bulk grain, which can flow into every corner.
bale_capacity: This is the space available for solid cargo. Bale space
is usually about 7–10% less than grain space.
"""
imo: int
vessel_type_id: int
built_for_trade_id: int
trade_id: int
vessel_class_id: int
commercial_operator_id: int
deadweight: int
breadth_extreme: int
gross_rated_tonnage: int
reduced_gross_tonnage: int
net_rated_tonnage: int
draught: float
length_overall: float
moulded_depth: float
year_built: int
geared: bool
clean_dirty_willing: bool
main_engine_manufacturer_id: int
classification_register_id: int
updated_date: datetime
vessel_name: Optional[str] = None
call_sign: Optional[str] = None
vessel_type: Optional[str] = None
built_for_trade: Optional[str] = None
trade: Optional[str] = None
vessel_class: Optional[str] = None
flag_code: Optional[str] = None
flag: Optional[str] = None
commercial_operator: Optional[str] = None
built_country_code: Optional[str] = None
built_country_name: Optional[str] = None
scrapped_date: Optional[datetime] = None
shipyard_built_id: Optional[int] = None
shipyard_built_name: Optional[str] = None
ice_class: Optional[str] = None
cranes_ton_capacity: Optional[int] = None
teu: Optional[int] = None
te_u14: Optional[int] = None
reefers: Optional[int] = None
panama_canal_net_tonnage: Optional[int] = None
cubic_size: Optional[int] = None
scrubbers_date: Optional[datetime] = None
summer_tpc: Optional[float] = None
lightship_tonnes: Optional[int] = None
main_engine_manufacturer: Optional[str] = None
delivery_date: Optional[datetime] = None
classification_register: Optional[str] = None
number_of_holds: Optional[int] = None
grain_capacity: Optional[int] = None
bale_capacity: Optional[int] = None
|
[
"dataclasses.dataclass"
] |
[((141, 163), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (150, 163), False, 'from dataclasses import dataclass\n'), ((1668, 1690), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (1677, 1690), False, 'from dataclasses import dataclass\n'), ((2094, 2116), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (2103, 2116), False, 'from dataclasses import dataclass\n')]
|
from PySide2 import QtWidgets, QtGui, QtCore
class RulePartWidget(QtWidgets.QWidget):
def __init__(self, parent=None, feature_ranges={'feature X': [0, 100]}, rule_number=-1):
QtWidgets.QWidget.__init__(self, parent)
self.layout = QtWidgets.QHBoxLayout(self)
# combobox
self.combo_box = QtWidgets.QComboBox()
self.feature_ranges = feature_ranges
self.features = list(self.feature_ranges.keys())
print(self.features)
for feature in self.features:
self.combo_box.addItem(feature)
# create widgets
feat_range = self.feature_ranges[self.features[0]]
self.min_box = QtWidgets.QDoubleSpinBox()
self.min_box.valueChanged.connect(self._min_box_changed)
self.max_box = QtWidgets.QDoubleSpinBox()
self.max_box.valueChanged.connect(self._max_box_changed)
self.min_box.setRange(*feat_range)
self.max_box.setRange(*feat_range)
self.min_box.setValue(feat_range[0])
self.max_box.setValue(feat_range[1])
self.rule_id = f'Rule Part {rule_number if rule_number != -1 else "X"}'
# add to layout
self.layout.addWidget(QtWidgets.QLabel(self.rule_id, self))
self.layout.addWidget(self.combo_box)
self.layout.addWidget(QtWidgets.QLabel('between', self))
self.layout.addWidget(self.min_box)
self.layout.addWidget(QtWidgets.QLabel('and', self))
self.layout.addWidget(self.max_box)
self.combo_box.activated.connect(self.feature_change)
def feature_change(self):
print('Current Feature:', self.combo_box.currentText())
selected_feature = self.combo_box.currentText()
feat_range = self.feature_ranges[selected_feature]
self.min_box.setRange(*feat_range)
self.max_box.setRange(*feat_range)
self.min_box.setValue(feat_range[0])
self.max_box.setValue(feat_range[1])
def _min_box_changed(self, val):
selected_feature = self.combo_box.currentText()
feat_range = self.feature_ranges[selected_feature]
# limit by chosen minimum
self.max_box.setRange(val, feat_range[1])
def _max_box_changed(self, val):
selected_feature = self.combo_box.currentText()
feat_range = self.feature_ranges[selected_feature]
# limit by chosen minimum
self.min_box.setRange(feat_range[0], val)
def get_rule(self):
selected_feature = self.combo_box.currentText()
min_val = self.min_box.value()
max_val = self.max_box.value()
return {'rule_id': self.rule_id, 'feature': selected_feature, 'range': [min_val, max_val]}
|
[
"PySide2.QtWidgets.QDoubleSpinBox",
"PySide2.QtWidgets.QWidget.__init__",
"PySide2.QtWidgets.QLabel",
"PySide2.QtWidgets.QComboBox",
"PySide2.QtWidgets.QHBoxLayout"
] |
[((189, 229), 'PySide2.QtWidgets.QWidget.__init__', 'QtWidgets.QWidget.__init__', (['self', 'parent'], {}), '(self, parent)\n', (215, 229), False, 'from PySide2 import QtWidgets, QtGui, QtCore\n'), ((253, 280), 'PySide2.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', (['self'], {}), '(self)\n', (274, 280), False, 'from PySide2 import QtWidgets, QtGui, QtCore\n'), ((326, 347), 'PySide2.QtWidgets.QComboBox', 'QtWidgets.QComboBox', ([], {}), '()\n', (345, 347), False, 'from PySide2 import QtWidgets, QtGui, QtCore\n'), ((668, 694), 'PySide2.QtWidgets.QDoubleSpinBox', 'QtWidgets.QDoubleSpinBox', ([], {}), '()\n', (692, 694), False, 'from PySide2 import QtWidgets, QtGui, QtCore\n'), ((783, 809), 'PySide2.QtWidgets.QDoubleSpinBox', 'QtWidgets.QDoubleSpinBox', ([], {}), '()\n', (807, 809), False, 'from PySide2 import QtWidgets, QtGui, QtCore\n'), ((1193, 1229), 'PySide2.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.rule_id', 'self'], {}), '(self.rule_id, self)\n', (1209, 1229), False, 'from PySide2 import QtWidgets, QtGui, QtCore\n'), ((1307, 1340), 'PySide2.QtWidgets.QLabel', 'QtWidgets.QLabel', (['"""between"""', 'self'], {}), "('between', self)\n", (1323, 1340), False, 'from PySide2 import QtWidgets, QtGui, QtCore\n'), ((1416, 1445), 'PySide2.QtWidgets.QLabel', 'QtWidgets.QLabel', (['"""and"""', 'self'], {}), "('and', self)\n", (1432, 1445), False, 'from PySide2 import QtWidgets, QtGui, QtCore\n')]
|
import numpy as np
class LinearConstraints():
def __init__(self, A, b, mode='Intersection'):
"""
Defines linear functions f(x) = Ax + b.
The integration domain is defined as the union of where all of these functions are positive if mode='Union'
or the domain where any of the functions is positive, when mode='Intersection'
:param A: matrix A with shape (M, D) where M is the number of constraints and D the dimension
:param b: offset, shape (M, 1)
"""
self.A = A
self.b = b
self.N_constraints = b.shape[0]
self.N_dim = A.shape[1]
self.mode = mode
def evaluate(self, x):
"""
Evaluate linear functions at N locations x
:param x: location, shape (D, N)
:return: Ax + b
"""
return np.dot(self.A, x) + self.b
def integration_domain(self, x):
"""
is 1 if x is in the integration domain, else 0
:param x: location, shape (D, N)
:return: either self.indicator_union or self.indicator_intersection, depending on setting of self.mode
"""
if self.mode == 'Union':
return self.indicator_union(x)
elif self.mode == 'Intersection':
return self.indicator_intersection(x)
else:
raise NotImplementedError
def indicator_intersection(self, x):
"""
Intersection of indicator functions taken to be 1 when the linear function is >= 0
:param x: location, shape (D, N)
:return: 1 if all linear functions are >= 0, else 0.
"""
return np.where(self.evaluate(x) >= 0, 1, 0).prod(axis=0)
def indicator_union(self, x):
"""
Union of indicator functions taken to be 1 when the linear function is >= 0
:param x: location, shape (D, N)
:return: 1 if any of the linear functions is >= 0, else 0.
"""
return 1 - (np.where(self.evaluate(x) >= 0, 0, 1)).prod(axis=0)
class ShiftedLinearConstraints(LinearConstraints):
def __init__(self, A, b, shift):
"""
Class for shifted linear constraints that appear in multilevel splitting method
:param A: matrix A with shape (M, D) where M is the number of constraints and D the dimension
:param b: offset, shape (M, 1)
:param shift: (positive) scalar value denoting the shift
"""
self.shift = shift
super(ShiftedLinearConstraints, self).__init__(A, b + shift)
|
[
"numpy.dot"
] |
[((832, 849), 'numpy.dot', 'np.dot', (['self.A', 'x'], {}), '(self.A, x)\n', (838, 849), True, 'import numpy as np\n')]
|
from time import sleep
from tensorboardX import SummaryWriter
with SummaryWriter(logdir='runs/purge') as w:
for i in range(100):
w.add_scalar('purgetest', i, i)
sleep(1.0)
with SummaryWriter(logdir='runs/purge', purge_step=42) as w:
# event 42~99 are removed (inclusively)
for i in range(42, 100):
w.add_scalar('purgetest', 42, i)
|
[
"tensorboardX.SummaryWriter",
"time.sleep"
] |
[((175, 185), 'time.sleep', 'sleep', (['(1.0)'], {}), '(1.0)\n', (180, 185), False, 'from time import sleep\n'), ((68, 102), 'tensorboardX.SummaryWriter', 'SummaryWriter', ([], {'logdir': '"""runs/purge"""'}), "(logdir='runs/purge')\n", (81, 102), False, 'from tensorboardX import SummaryWriter\n'), ((192, 241), 'tensorboardX.SummaryWriter', 'SummaryWriter', ([], {'logdir': '"""runs/purge"""', 'purge_step': '(42)'}), "(logdir='runs/purge', purge_step=42)\n", (205, 241), False, 'from tensorboardX import SummaryWriter\n')]
|
# Imports
import math
import numpy as np
import matplotlib.pyplot as plt
class ParkingTrajectoryGenerator:
# Class Variables
# Vehicle Parameters
__l = 0.356 # length between front and rear axle in m
__b = 0.37 # width of car in m
__l_1 = 0.12 # length between front axle and bumper in m
__l_2 = 0.108 # length between rear axle and bumper in m
__alpha_max = math.radians(45) # maximum steering angle in rad
# alpha_c = alpha_max # constant steering angle in rad
__rho_min = 1/math.tan(__alpha_max) # radius of the turning cycle of the car in m
# Driving lane and parking spot parameters
__h_cd = 0.974-2*0.03 # width of driving lane in m
__h_pd = (0.96-3*0.02)/2 # width of parking space in m
__h_pw = 0.85 # depth of parking space in m
__h_ps = (__h_pd - __b)/2 # = h_pr = h_pl = h_ps -> for symmetrical parking -> space between car and parking space boundaries in m
# Parameters for calculation of the Trajectory Points
__num_points_per_segment = 100
__pull_out_left_straight_offset = 0.2
__r_B2 = math.sqrt((__l + __l_1)**2 + (__rho_min + __b/2)**2)
__s_m = -math.sqrt((__rho_min - __b/2)**2 - (__rho_min - __h_pd/2)**2)
__s_max = __h_cd - __r_B2
__s = max(abs(__s_m), abs(__s_max))
# Points of Parking Trajectory
__parkingTrajectoryPoints_x_rear_axle = np.zeros(2*__num_points_per_segment)
__parkingTrajectoryPoints_y_rear_axle = np.zeros(2*__num_points_per_segment)
#__parkingTrajectoryPoints_x_front_axle = np.zeros(2*__num_points_per_segment)
#__parkingTrajectoryPoints_y_front_axle = np.zeros(2*__num_points_per_segment)
__pullOutLeftTrajectoryPoints_x_rear_axle = np.zeros(2*__num_points_per_segment)
__pullOutLeftTrajectoryPoints_y_rear_axle = np.zeros(2*__num_points_per_segment)
#__pullOutLeftTrajectoryPoints_x_front_axle = np.zeros(2*__num_points_per_segment)
#__pullOutLeftTrajectoryPoints_y_front_axle = np.zeros(2*__num_points_per_segment)
# Heading of Parking Trajectory
__parkingTrajectoryHeading_rear_axle = np.zeros(2*__num_points_per_segment)
# Parameter for Representing Circle Arc as Polynomial (Bezier)
__c = 0.55191502449
# Parameters of Steering Angle Controller (Saturated Control) from Paper
__K_t = 8
__K = 5.85
__a_0 = 0.17
__u = np.tan(__alpha_max)/__l
# Vehicle Heading for test purposes (idealised)
__theta = np.zeros(2*__num_points_per_segment)
# Constructor
def __init__(self, targetParkingSpot_x = 0, targetParkingSpot_y = 0):
self.__targetPoint_x_rear_axle = targetParkingSpot_x + self.__h_pw - self.__l_2 - self.__h_ps
self.__targetPoint_y_rear_axle = targetParkingSpot_y
self.__targetPoint_x_front_axle = targetParkingSpot_x + self.__h_pw - self.__l_2 - self.__h_ps - self.__l
self.__targetPoint_y_front_axle = targetParkingSpot_y
self.calcParkingTrajectory()
self.calcPullOutLeftTrajectory()
# Setter
def setTargetParkingSpot(self, targetParkingSpot_x = 0, targetParkingSpot_y = 0):
self.__targetPoint_x_rear_axle = targetParkingSpot_x + self.__h_pw - self.__l_2 - self.__h_ps
self.__targetPoint_y_rear_axle = targetParkingSpot_y
self.__targetPoint_x_front_axle = targetParkingSpot_x + self.__h_pw - self.__l_2 - self.__h_ps - self.__l
self.__targetPoint_y_front_axle = targetParkingSpot_y
self.calcParkingTrajectory()
self.calcPullOutLeftTrajectory()
# Getter
def getParkingStartPoint(self):
return self.__parkingTrajectoryPoints_x_rear_axle[-1], self.__parkingTrajectoryPoints_y_rear_axle[-1]
def getParkingEndPoint(self):
return self.__targetPoint_x_rear_axle, self.__targetPoint_y_rear_axle
def getParkingTrajectoryPolynomials(self):
return self.__parkingTrajectory_polynomial_coefficients_circle_arc_x, self.__parkingTrajectory_polynomial_coefficients_circle_arc_y, self.__parkingTrajectory_polynomial_coefficients_straight_x, self.__parkingTrajectory_polynomial_coefficients_straight_y
def gePullOutLeftTrajectoryPolynomials(self):
return self.__pullOutLeftTrajectory_polynomial_coefficients_circle_arc_x, self.__pullOutLeftTrajectory_polynomial_coefficients_circle_arc_y, self.__pullOutLeftTrajectory_polynomial_coefficients_straight_x, self.__pullOutLeftTrajectory_polynomial_coefficients_straight_y
# Functions
def calcParkingTrajectory(self):
# = Pull Out Right Trajectory
# Target Point Rear End of the Parking Spot (Rear end of the axle)
S_x_rear_axle = self.__targetPoint_x_rear_axle - self.__h_pw + self.__l_2 + self.__h_ps + self.__s
S_y_rear_axle = self.__targetPoint_y_rear_axle
#S_x_front_axle = self.targetPoint_x_front_axle - self.h_pw + self.l_2 + self.h_ps + self.s + self.l
#S_y_front_axle = self.targetPoint_y_front_axle
O_x_rear_axle = S_x_rear_axle
O_y_rear_axle = S_y_rear_axle + self.__rho_min
#O_x_front_axle = S_x_front_axle
#O_y_front_axle = S_y_front_axle + self.rho_min
# Points on Unit circle with Origin O
P_0_circle_arc_x = O_x_rear_axle
P_0_circle_arc_y = O_y_rear_axle - 1
P_1_circle_arc_x = O_x_rear_axle - self.__c
P_1_circle_arc_y = O_y_rear_axle - 1
P_2_circle_arc_x = O_x_rear_axle - 1
P_2_circle_arc_y = O_y_rear_axle - self.__c
P_3_circle_arc_x = O_x_rear_axle - 1
P_3_circle_arc_y = O_y_rear_axle
# Polynomial of the circle arc
self.__parkingTrajectory_polynomial_coefficients_circle_arc_x = np.poly1d([self.__rho_min*(P_3_circle_arc_x + 3.*P_1_circle_arc_x - 3.*P_2_circle_arc_x - P_0_circle_arc_x), self.__rho_min*3*(P_2_circle_arc_x - 2*P_1_circle_arc_x + P_0_circle_arc_x), self.__rho_min*3*(P_1_circle_arc_x - P_0_circle_arc_x), self.__rho_min*P_0_circle_arc_x])
self.__parkingTrajectory_polynomial_coefficients_circle_arc_y = np.poly1d([self.__rho_min*(P_3_circle_arc_y + 3.*P_1_circle_arc_y - 3.*P_2_circle_arc_y - P_0_circle_arc_y), self.__rho_min*3*(P_2_circle_arc_y - 2*P_1_circle_arc_y + P_0_circle_arc_y), self.__rho_min*3*(P_1_circle_arc_y - P_0_circle_arc_y), self.__rho_min*P_0_circle_arc_y])
# Polynomial of the straight
self.__parkingTrajectory_polynomial_coefficients_straight_x = np.poly1d([0, 0, S_x_rear_axle - self.__targetPoint_x_rear_axle, self.__targetPoint_x_rear_axle])
self.__parkingTrajectory_polynomial_coefficients_straight_y = np.poly1d([0, 0, S_y_rear_axle - self.__targetPoint_y_rear_axle, self.__targetPoint_y_rear_axle])
self.__parkingTrajectoryPoints_x_rear_axle[ : self.__num_points_per_segment] = np.linspace(self.__targetPoint_x_rear_axle, S_x_rear_axle, self.__num_points_per_segment)
self.__parkingTrajectoryPoints_y_rear_axle[ : self.__num_points_per_segment] = np.ones(self.__num_points_per_segment)*self.__targetPoint_y_rear_axle
#self.__parkingTrajectoryHeading_rear_axle[ : self.__num_points_per_segment] = np.ones(self.__num_points_per_segment)*math.pi
#self.parkingTrajectoryPoints_x_front_axle[0 : self.num_points_per_segment] = np.linspace(self.targetPoint_x_front_axle, S_x_front_axle, self.num_points_per_segment)
#self.parkingTrajectoryPoints_y_front_axle[0 : self.num_points_per_segment] = np.ones(self.num_points_per_segment)*self.targetPoint_y_front_axle
circle_arc_angle = np.linspace(math.pi, math.pi*(3/2), self.__num_points_per_segment)
#heading_angle = np.linspace(math.pi, math.pi/2, self.__num_points_per_segment)
# Vehicle Heading for test
self.__theta[ : self.__num_points_per_segment] = math.pi
self.__theta[self.__num_points_per_segment : ] = np.linspace(math.pi, math.pi/2, self.__num_points_per_segment)
#i = self.__num_points_per_segment
#for angle in circle_arc_angle :
self.__parkingTrajectoryPoints_x_rear_axle[self.__num_points_per_segment : ] = self.__rho_min*np.cos(circle_arc_angle) + O_x_rear_axle
self.__parkingTrajectoryPoints_y_rear_axle[self.__num_points_per_segment : ] = self.__rho_min*np.sin(circle_arc_angle) + O_y_rear_axle
#self.__parkingTrajectoryPoints_x_front_axle[ : self.__num_points_per_segment] = self.__parkingTrajectoryPoints_x_rear_axle[ : self.__num_points_per_segment] - self.__l
#self.__parkingTrajectoryPoints_y_front_axle[ : self.__num_points_per_segment] = self.__parkingTrajectoryPoints_y_rear_axle[ : self.__num_points_per_segment]
#self.__parkingTrajectoryPoints_x_front_axle[self.__num_points_per_segment : ] = self.__parkingTrajectoryPoints_x_rear_axle[self.__num_points_per_segment : ] + np.cos(self.__theta[self.__num_points_per_segment : ])*self.__l
#self.__parkingTrajectoryPoints_y_front_axle[self.__num_points_per_segment : ] = self.__parkingTrajectoryPoints_y_rear_axle[self.__num_points_per_segment : ] + np.sin(self.__theta[self.__num_points_per_segment : ])*self.__l
#self.__parkingTrajectoryHeading_rear_axle[self.__num_points_per_segment : ] = heading_angle
#self.parkingTrajectoryPoints_x_front_axle[i] = self.rho_min*math.cos(angle) + O_x_front_axle
#self.parkingTrajectoryPoints_y_front_axle[i] = self.rho_min*math.sin(angle) + O_y_front_axle
# i += 1
# Printing
#t = np.linspace(0, 1, 100)
#poly_circle_arc_x = self.__parkingTrajectory_polynomial_coefficients_circle_arc_x(t)
#poly_circle_arc_y = self.__parkingTrajectory_polynomial_coefficients_circle_arc_y(t)
#poly_straight_x = self.__parkingTrajectory_polynomial_coefficients_straight_x(t)
#poly_straight_y = self.__parkingTrajectory_polynomial_coefficients_straight_y(t)
#plt.plot(self.__parkingTrajectoryPoints_x_rear_axle, self.__parkingTrajectoryPoints_y_rear_axle, 'b.')
#plt.plot(poly_circle_arc_x, poly_circle_arc_y, 'r.')
#plt.plot(poly_straight_x, poly_straight_y, 'r.')
#plt.show()
#plt.stem(self.__parkingTrajectoryHeading_rear_axle)
#plt.show()
return self.__parkingTrajectory_polynomial_coefficients_circle_arc_x, self.__parkingTrajectory_polynomial_coefficients_circle_arc_y, self.__parkingTrajectory_polynomial_coefficients_straight_x, self.__parkingTrajectory_polynomial_coefficients_straight_y
def calcPullOutLeftTrajectory(self):
# Target Point Rear End of the Parking Spot (Rear end of the axle)
S_x_rear_axle = self.__targetPoint_x_rear_axle - self.__h_pw + self.__l_2 + self.__h_ps + self.__s - self.__pull_out_left_straight_offset
S_y_rear_axle = self.__targetPoint_y_rear_axle
#S_x_front_axle = self.targetPoint_x_front_axle - self.h_pw + self.l_2 + self.h_ps + self.s + self.l
#S_y_front_axle = self.targetPoint_y_front_axle
O_x_rear_axle = S_x_rear_axle
O_y_rear_axle = S_y_rear_axle - self.__rho_min
#O_x_front_axle = S_x_front_axle
#O_y_front_axle = S_y_front_axle + self.rho_min
# Points on Unit circle with Origin O
P_0_circle_arc_x = O_x_rear_axle - 1
P_0_circle_arc_y = O_y_rear_axle
P_1_circle_arc_x = O_x_rear_axle - 1
P_1_circle_arc_y = O_y_rear_axle + self.__c
P_2_circle_arc_x = O_x_rear_axle - self.__c
P_2_circle_arc_y = O_y_rear_axle + 1
P_3_circle_arc_x = O_x_rear_axle
P_3_circle_arc_y = O_y_rear_axle + 1
# Polynomial of the circle arc
self.__pullOutLeftTrajectory_polynomial_coefficients_circle_arc_x = np.poly1d([self.__rho_min*(P_3_circle_arc_x + 3.*P_1_circle_arc_x - 3.*P_2_circle_arc_x - P_0_circle_arc_x), self.__rho_min*3*(P_2_circle_arc_x - 2*P_1_circle_arc_x + P_0_circle_arc_x), self.__rho_min*3*(P_1_circle_arc_x - P_0_circle_arc_x), self.__rho_min*P_0_circle_arc_x])
self.__pullOutLeftTrajectory_polynomial_coefficients_circle_arc_y = np.poly1d([self.__rho_min*(P_3_circle_arc_y + 3.*P_1_circle_arc_y - 3.*P_2_circle_arc_y - P_0_circle_arc_y), self.__rho_min*3*(P_2_circle_arc_y - 2*P_1_circle_arc_y + P_0_circle_arc_y), self.__rho_min*3*(P_1_circle_arc_y - P_0_circle_arc_y), self.__rho_min*P_0_circle_arc_y])
# Polynomial of the straight
self.__pullOutLeftTrajectory_polynomial_coefficients_straight_x = np.poly1d([0, 0, S_x_rear_axle - self.__targetPoint_x_rear_axle, self.__targetPoint_x_rear_axle])
self.__pullOutLeftTrajectory_polynomial_coefficients_straight_y = np.poly1d([0, 0, S_y_rear_axle - self.__targetPoint_y_rear_axle, self.__targetPoint_y_rear_axle])
self.__pullOutLeftTrajectoryPoints_x_rear_axle[0 : self.__num_points_per_segment] = np.linspace(self.__targetPoint_x_rear_axle, S_x_rear_axle, self.__num_points_per_segment)
self.__pullOutLeftTrajectoryPoints_y_rear_axle[0 : self.__num_points_per_segment] = np.ones(self.__num_points_per_segment)*self.__targetPoint_y_rear_axle
#self.parkingTrajectoryPoints_x_front_axle[0 : self.num_points_per_segment] = np.linspace(self.targetPoint_x_front_axle, S_x_front_axle, self.num_points_per_segment)
#self.parkingTrajectoryPoints_y_front_axle[0 : self.num_points_per_segment] = np.ones(self.num_points_per_segment)*self.targetPoint_y_front_axle
circle_arc_angle = np.linspace(math.pi, math.pi/2, self.__num_points_per_segment)
i = self.__num_points_per_segment
for angle in circle_arc_angle :
self.__pullOutLeftTrajectoryPoints_x_rear_axle[i] = self.__rho_min*np.cos(angle) + O_x_rear_axle
self.__pullOutLeftTrajectoryPoints_y_rear_axle[i] = self.__rho_min*np.sin(angle) + O_y_rear_axle
#self.parkingTrajectoryPoints_x_front_axle[i] = self.rho_min*math.cos(angle) + O_x_front_axle
#self.parkingTrajectoryPoints_y_front_axle[i] = self.rho_min*math.sin(angle) + O_y_front_axle
i += 1
# Printing
#t = np.linspace(0, 1, 100)
#poly_circle_arc_x = self.__pullOutLeftTrajectory_polynomial_coefficients_circle_arc_x(t)
#poly_circle_arc_y = self.__pullOutLeftTrajectory_polynomial_coefficients_circle_arc_y(t)
#poly_straight_x = self.__pullOutLeftTrajectory_polynomial_coefficients_straight_x(t)
#poly_straight_y = self.__pullOutLeftTrajectory_polynomial_coefficients_straight_y(t)
#plt.plot(self.__parkingTrajectoryPoints_x_rear_axle, self.__parkingTrajectoryPoints_y_rear_axle, 'b.')
#plt.plot(self.__pullOutLeftTrajectoryPoints_x_rear_axle, self.__pullOutLeftTrajectoryPoints_y_rear_axle, 'b.')
#plt.plot(poly_circle_arc_x, poly_circle_arc_y, 'r.')
#plt.plot(poly_straight_x, poly_straight_y, 'r.')
#plt.show()
return self.__pullOutLeftTrajectory_polynomial_coefficients_circle_arc_x, self.__pullOutLeftTrajectory_polynomial_coefficients_circle_arc_y, self.__pullOutLeftTrajectory_polynomial_coefficients_straight_x, self.__pullOutLeftTrajectory_polynomial_coefficients_straight_y
def getSteeringAngle(self, actualPoint_y, vehicle_heading):
theta = vehicle_heading - math.pi
print(theta)
v = self.__K*(theta - self.__a_0*actualPoint_y)
alpha = np.arctan(self.__l*self.__u*np.tanh(self.__K_t*v))
return alpha
ParkingTrajectoryGenerator1 = ParkingTrajectoryGenerator()
[a, b, c, d] = ParkingTrajectoryGenerator1.getParkingTrajectoryPolynomials()
print(a)
print(b)
print(c)
print(d)
#plt.plot(ParkingTrajectoryGenerator1._ParkingTrajectoryGenerator__parkingTrajectoryPoints_x_front_axle, ParkingTrajectoryGenerator1._ParkingTrajectoryGenerator__parkingTrajectoryPoints_y_front_axle, 'b.', ParkingTrajectoryGenerator1._ParkingTrajectoryGenerator__parkingTrajectoryPoints_x_rear_axle, ParkingTrajectoryGenerator1._ParkingTrajectoryGenerator__parkingTrajectoryPoints_y_rear_axle, 'r.')
#plt.show()
steering_angle = np.zeros(ParkingTrajectoryGenerator1._ParkingTrajectoryGenerator__parkingTrajectoryPoints_y_rear_axle.size)
i = 0
for elem in ParkingTrajectoryGenerator1._ParkingTrajectoryGenerator__parkingTrajectoryPoints_y_rear_axle :
steering_angle[i] = ParkingTrajectoryGenerator1.getSteeringAngle(ParkingTrajectoryGenerator1._ParkingTrajectoryGenerator__parkingTrajectoryPoints_y_rear_axle[i], ParkingTrajectoryGenerator1._ParkingTrajectoryGenerator__theta[i])
i += 1
plt.stem(ParkingTrajectoryGenerator1._ParkingTrajectoryGenerator__theta)
plt.show()
plt.stem(np.degrees(steering_angle))
plt.show()
#ParkingTrajectoryGenerator1.calcPullOutLeftTrajectory()
|
[
"numpy.poly1d",
"matplotlib.pyplot.show",
"numpy.tanh",
"math.sqrt",
"numpy.degrees",
"math.radians",
"math.tan",
"numpy.zeros",
"matplotlib.pyplot.stem",
"numpy.ones",
"numpy.tan",
"numpy.sin",
"numpy.linspace",
"numpy.cos"
] |
[((15864, 15976), 'numpy.zeros', 'np.zeros', (['ParkingTrajectoryGenerator1._ParkingTrajectoryGenerator__parkingTrajectoryPoints_y_rear_axle.size'], {}), '(ParkingTrajectoryGenerator1.\n _ParkingTrajectoryGenerator__parkingTrajectoryPoints_y_rear_axle.size)\n', (15872, 15976), True, 'import numpy as np\n'), ((16330, 16402), 'matplotlib.pyplot.stem', 'plt.stem', (['ParkingTrajectoryGenerator1._ParkingTrajectoryGenerator__theta'], {}), '(ParkingTrajectoryGenerator1._ParkingTrajectoryGenerator__theta)\n', (16338, 16402), True, 'import matplotlib.pyplot as plt\n'), ((16403, 16413), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (16411, 16413), True, 'import matplotlib.pyplot as plt\n'), ((16452, 16462), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (16460, 16462), True, 'import matplotlib.pyplot as plt\n'), ((394, 410), 'math.radians', 'math.radians', (['(45)'], {}), '(45)\n', (406, 410), False, 'import math\n'), ((1082, 1140), 'math.sqrt', 'math.sqrt', (['((__l + __l_1) ** 2 + (__rho_min + __b / 2) ** 2)'], {}), '((__l + __l_1) ** 2 + (__rho_min + __b / 2) ** 2)\n', (1091, 1140), False, 'import math\n'), ((1360, 1398), 'numpy.zeros', 'np.zeros', (['(2 * __num_points_per_segment)'], {}), '(2 * __num_points_per_segment)\n', (1368, 1398), True, 'import numpy as np\n'), ((1441, 1479), 'numpy.zeros', 'np.zeros', (['(2 * __num_points_per_segment)'], {}), '(2 * __num_points_per_segment)\n', (1449, 1479), True, 'import numpy as np\n'), ((1692, 1730), 'numpy.zeros', 'np.zeros', (['(2 * __num_points_per_segment)'], {}), '(2 * __num_points_per_segment)\n', (1700, 1730), True, 'import numpy as np\n'), ((1777, 1815), 'numpy.zeros', 'np.zeros', (['(2 * __num_points_per_segment)'], {}), '(2 * __num_points_per_segment)\n', (1785, 1815), True, 'import numpy as np\n'), ((2068, 2106), 'numpy.zeros', 'np.zeros', (['(2 * __num_points_per_segment)'], {}), '(2 * __num_points_per_segment)\n', (2076, 2106), True, 'import numpy as np\n'), ((2422, 2460), 'numpy.zeros', 'np.zeros', (['(2 * __num_points_per_segment)'], {}), '(2 * __num_points_per_segment)\n', (2430, 2460), True, 'import numpy as np\n'), ((16424, 16450), 'numpy.degrees', 'np.degrees', (['steering_angle'], {}), '(steering_angle)\n', (16434, 16450), True, 'import numpy as np\n'), ((520, 541), 'math.tan', 'math.tan', (['__alpha_max'], {}), '(__alpha_max)\n', (528, 541), False, 'import math\n'), ((1148, 1217), 'math.sqrt', 'math.sqrt', (['((__rho_min - __b / 2) ** 2 - (__rho_min - __h_pd / 2) ** 2)'], {}), '((__rho_min - __b / 2) ** 2 - (__rho_min - __h_pd / 2) ** 2)\n', (1157, 1217), False, 'import math\n'), ((2331, 2350), 'numpy.tan', 'np.tan', (['__alpha_max'], {}), '(__alpha_max)\n', (2337, 2350), True, 'import numpy as np\n'), ((5624, 5938), 'numpy.poly1d', 'np.poly1d', (['[self.__rho_min * (P_3_circle_arc_x + 3.0 * P_1_circle_arc_x - 3.0 *\n P_2_circle_arc_x - P_0_circle_arc_x), self.__rho_min * 3 * (\n P_2_circle_arc_x - 2 * P_1_circle_arc_x + P_0_circle_arc_x), self.\n __rho_min * 3 * (P_1_circle_arc_x - P_0_circle_arc_x), self.__rho_min *\n P_0_circle_arc_x]'], {}), '([self.__rho_min * (P_3_circle_arc_x + 3.0 * P_1_circle_arc_x - \n 3.0 * P_2_circle_arc_x - P_0_circle_arc_x), self.__rho_min * 3 * (\n P_2_circle_arc_x - 2 * P_1_circle_arc_x + P_0_circle_arc_x), self.\n __rho_min * 3 * (P_1_circle_arc_x - P_0_circle_arc_x), self.__rho_min *\n P_0_circle_arc_x])\n', (5633, 5938), True, 'import numpy as np\n'), ((5972, 6286), 'numpy.poly1d', 'np.poly1d', (['[self.__rho_min * (P_3_circle_arc_y + 3.0 * P_1_circle_arc_y - 3.0 *\n P_2_circle_arc_y - P_0_circle_arc_y), self.__rho_min * 3 * (\n P_2_circle_arc_y - 2 * P_1_circle_arc_y + P_0_circle_arc_y), self.\n __rho_min * 3 * (P_1_circle_arc_y - P_0_circle_arc_y), self.__rho_min *\n P_0_circle_arc_y]'], {}), '([self.__rho_min * (P_3_circle_arc_y + 3.0 * P_1_circle_arc_y - \n 3.0 * P_2_circle_arc_y - P_0_circle_arc_y), self.__rho_min * 3 * (\n P_2_circle_arc_y - 2 * P_1_circle_arc_y + P_0_circle_arc_y), self.\n __rho_min * 3 * (P_1_circle_arc_y - P_0_circle_arc_y), self.__rho_min *\n P_0_circle_arc_y])\n', (5981, 6286), True, 'import numpy as np\n'), ((6364, 6466), 'numpy.poly1d', 'np.poly1d', (['[0, 0, S_x_rear_axle - self.__targetPoint_x_rear_axle, self.\n __targetPoint_x_rear_axle]'], {}), '([0, 0, S_x_rear_axle - self.__targetPoint_x_rear_axle, self.\n __targetPoint_x_rear_axle])\n', (6373, 6466), True, 'import numpy as np\n'), ((6532, 6634), 'numpy.poly1d', 'np.poly1d', (['[0, 0, S_y_rear_axle - self.__targetPoint_y_rear_axle, self.\n __targetPoint_y_rear_axle]'], {}), '([0, 0, S_y_rear_axle - self.__targetPoint_y_rear_axle, self.\n __targetPoint_y_rear_axle])\n', (6541, 6634), True, 'import numpy as np\n'), ((6718, 6812), 'numpy.linspace', 'np.linspace', (['self.__targetPoint_x_rear_axle', 'S_x_rear_axle', 'self.__num_points_per_segment'], {}), '(self.__targetPoint_x_rear_axle, S_x_rear_axle, self.\n __num_points_per_segment)\n', (6729, 6812), True, 'import numpy as np\n'), ((7453, 7523), 'numpy.linspace', 'np.linspace', (['math.pi', '(math.pi * (3 / 2))', 'self.__num_points_per_segment'], {}), '(math.pi, math.pi * (3 / 2), self.__num_points_per_segment)\n', (7464, 7523), True, 'import numpy as np\n'), ((7767, 7831), 'numpy.linspace', 'np.linspace', (['math.pi', '(math.pi / 2)', 'self.__num_points_per_segment'], {}), '(math.pi, math.pi / 2, self.__num_points_per_segment)\n', (7778, 7831), True, 'import numpy as np\n'), ((11578, 11892), 'numpy.poly1d', 'np.poly1d', (['[self.__rho_min * (P_3_circle_arc_x + 3.0 * P_1_circle_arc_x - 3.0 *\n P_2_circle_arc_x - P_0_circle_arc_x), self.__rho_min * 3 * (\n P_2_circle_arc_x - 2 * P_1_circle_arc_x + P_0_circle_arc_x), self.\n __rho_min * 3 * (P_1_circle_arc_x - P_0_circle_arc_x), self.__rho_min *\n P_0_circle_arc_x]'], {}), '([self.__rho_min * (P_3_circle_arc_x + 3.0 * P_1_circle_arc_x - \n 3.0 * P_2_circle_arc_x - P_0_circle_arc_x), self.__rho_min * 3 * (\n P_2_circle_arc_x - 2 * P_1_circle_arc_x + P_0_circle_arc_x), self.\n __rho_min * 3 * (P_1_circle_arc_x - P_0_circle_arc_x), self.__rho_min *\n P_0_circle_arc_x])\n', (11587, 11892), True, 'import numpy as np\n'), ((11930, 12244), 'numpy.poly1d', 'np.poly1d', (['[self.__rho_min * (P_3_circle_arc_y + 3.0 * P_1_circle_arc_y - 3.0 *\n P_2_circle_arc_y - P_0_circle_arc_y), self.__rho_min * 3 * (\n P_2_circle_arc_y - 2 * P_1_circle_arc_y + P_0_circle_arc_y), self.\n __rho_min * 3 * (P_1_circle_arc_y - P_0_circle_arc_y), self.__rho_min *\n P_0_circle_arc_y]'], {}), '([self.__rho_min * (P_3_circle_arc_y + 3.0 * P_1_circle_arc_y - \n 3.0 * P_2_circle_arc_y - P_0_circle_arc_y), self.__rho_min * 3 * (\n P_2_circle_arc_y - 2 * P_1_circle_arc_y + P_0_circle_arc_y), self.\n __rho_min * 3 * (P_1_circle_arc_y - P_0_circle_arc_y), self.__rho_min *\n P_0_circle_arc_y])\n', (11939, 12244), True, 'import numpy as np\n'), ((12326, 12428), 'numpy.poly1d', 'np.poly1d', (['[0, 0, S_x_rear_axle - self.__targetPoint_x_rear_axle, self.\n __targetPoint_x_rear_axle]'], {}), '([0, 0, S_x_rear_axle - self.__targetPoint_x_rear_axle, self.\n __targetPoint_x_rear_axle])\n', (12335, 12428), True, 'import numpy as np\n'), ((12498, 12600), 'numpy.poly1d', 'np.poly1d', (['[0, 0, S_y_rear_axle - self.__targetPoint_y_rear_axle, self.\n __targetPoint_y_rear_axle]'], {}), '([0, 0, S_y_rear_axle - self.__targetPoint_y_rear_axle, self.\n __targetPoint_y_rear_axle])\n', (12507, 12600), True, 'import numpy as np\n'), ((12689, 12783), 'numpy.linspace', 'np.linspace', (['self.__targetPoint_x_rear_axle', 'S_x_rear_axle', 'self.__num_points_per_segment'], {}), '(self.__targetPoint_x_rear_axle, S_x_rear_axle, self.\n __num_points_per_segment)\n', (12700, 12783), True, 'import numpy as np\n'), ((13295, 13359), 'numpy.linspace', 'np.linspace', (['math.pi', '(math.pi / 2)', 'self.__num_points_per_segment'], {}), '(math.pi, math.pi / 2, self.__num_points_per_segment)\n', (13306, 13359), True, 'import numpy as np\n'), ((6895, 6933), 'numpy.ones', 'np.ones', (['self.__num_points_per_segment'], {}), '(self.__num_points_per_segment)\n', (6902, 6933), True, 'import numpy as np\n'), ((12871, 12909), 'numpy.ones', 'np.ones', (['self.__num_points_per_segment'], {}), '(self.__num_points_per_segment)\n', (12878, 12909), True, 'import numpy as np\n'), ((8017, 8041), 'numpy.cos', 'np.cos', (['circle_arc_angle'], {}), '(circle_arc_angle)\n', (8023, 8041), True, 'import numpy as np\n'), ((8160, 8184), 'numpy.sin', 'np.sin', (['circle_arc_angle'], {}), '(circle_arc_angle)\n', (8166, 8184), True, 'import numpy as np\n'), ((15216, 15239), 'numpy.tanh', 'np.tanh', (['(self.__K_t * v)'], {}), '(self.__K_t * v)\n', (15223, 15239), True, 'import numpy as np\n'), ((13520, 13533), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (13526, 13533), True, 'import numpy as np\n'), ((13629, 13642), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (13635, 13642), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 12 13:07:35 2018
@author: Sunny
"""
import numpy as np
import cv2
print(cv2.__version__)
TOTAL_CAMERAS=1
HEIGHT = 240
WIDTH = 320
RECORD_WIDTH = WIDTH*3
RECORD_HEIGHT = HEIGHT
FPS = 90
cam = []
frame = []
ret = []
rgb = []
i = 0
rgb_current=0
cam = cv2.VideoCapture(0)
#cam1 = cv2.VideoCapture(1)
#cam2 = cv2.VideoCapture(2)
cam.set(3,WIDTH)
cam.set(4,HEIGHT)
cam.set(cv2.CAP_PROP_FPS,FPS)
print(cam.get(3))
print(cam.get(4))
print(cam.get(5))
print(cam.get(cv2.CAP_PROP_FPS))
fourcc = cv2.VideoWriter_fourcc(*'DIVX')
out = cv2.VideoWriter('C:\\Users\\Sunny\\Desktop\\saveOutput.avi',fourcc, FPS, (RECORD_WIDTH,RECORD_HEIGHT))
x=0
rgb_previous = 0
cv2.namedWindow("Live Feed")
flag_record=True
while(True):
final_frame=0
j = 0
ret_current, frame_current = cam.read()
# Our operations on the frame come here
#gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
rgb_current = cv2.cvtColor(frame_current, cv2.COLOR_RGBA2RGB)
rgb_current = cv2.resize(rgb_current,(WIDTH,HEIGHT),interpolation=cv2.INTER_CUBIC);
horizontal_img = cv2.flip(rgb_current, 1 )
# Display the resulting frame
numpy_horizontal = np.hstack((horizontal_img, rgb_current, horizontal_img))
numpy_vertical = np.vstack((numpy_horizontal,numpy_horizontal))
if(flag_record == True ):
out.write(numpy_horizontal)
cv2.imshow("Live Feed",numpy_horizontal)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
elif cv2.waitKey(1) & 0xFF == ord('r'):
if(flag_record==False):
flag_record = True
else:
flag_record = False
cam.release()
if(flag_record==True):
out.release()
cv2.destroyAllWindows()
|
[
"cv2.resize",
"cv2.VideoWriter_fourcc",
"cv2.cvtColor",
"cv2.waitKey",
"cv2.imshow",
"numpy.hstack",
"cv2.VideoCapture",
"numpy.vstack",
"cv2.VideoWriter",
"cv2.flip",
"cv2.destroyAllWindows",
"cv2.namedWindow"
] |
[((302, 321), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (318, 321), False, 'import cv2\n'), ((543, 574), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (["*'DIVX'"], {}), "(*'DIVX')\n", (565, 574), False, 'import cv2\n'), ((581, 690), 'cv2.VideoWriter', 'cv2.VideoWriter', (['"""C:\\\\Users\\\\Sunny\\\\Desktop\\\\saveOutput.avi"""', 'fourcc', 'FPS', '(RECORD_WIDTH, RECORD_HEIGHT)'], {}), "('C:\\\\Users\\\\Sunny\\\\Desktop\\\\saveOutput.avi', fourcc, FPS, (\n RECORD_WIDTH, RECORD_HEIGHT))\n", (596, 690), False, 'import cv2\n'), ((705, 733), 'cv2.namedWindow', 'cv2.namedWindow', (['"""Live Feed"""'], {}), "('Live Feed')\n", (720, 733), False, 'import cv2\n'), ((1722, 1745), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1743, 1745), False, 'import cv2\n'), ((961, 1008), 'cv2.cvtColor', 'cv2.cvtColor', (['frame_current', 'cv2.COLOR_RGBA2RGB'], {}), '(frame_current, cv2.COLOR_RGBA2RGB)\n', (973, 1008), False, 'import cv2\n'), ((1035, 1106), 'cv2.resize', 'cv2.resize', (['rgb_current', '(WIDTH, HEIGHT)'], {'interpolation': 'cv2.INTER_CUBIC'}), '(rgb_current, (WIDTH, HEIGHT), interpolation=cv2.INTER_CUBIC)\n', (1045, 1106), False, 'import cv2\n'), ((1126, 1150), 'cv2.flip', 'cv2.flip', (['rgb_current', '(1)'], {}), '(rgb_current, 1)\n', (1134, 1150), False, 'import cv2\n'), ((1210, 1266), 'numpy.hstack', 'np.hstack', (['(horizontal_img, rgb_current, horizontal_img)'], {}), '((horizontal_img, rgb_current, horizontal_img))\n', (1219, 1266), True, 'import numpy as np\n'), ((1288, 1335), 'numpy.vstack', 'np.vstack', (['(numpy_horizontal, numpy_horizontal)'], {}), '((numpy_horizontal, numpy_horizontal))\n', (1297, 1335), True, 'import numpy as np\n'), ((1415, 1456), 'cv2.imshow', 'cv2.imshow', (['"""Live Feed"""', 'numpy_horizontal'], {}), "('Live Feed', numpy_horizontal)\n", (1425, 1456), False, 'import cv2\n'), ((1464, 1478), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (1475, 1478), False, 'import cv2\n'), ((1522, 1536), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (1533, 1536), False, 'import cv2\n')]
|
# Generated by Django 2.2.3 on 2019-09-20 05:05
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contract', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='UserSettings',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('usernamename', models.CharField(max_length=20)),
('realname', models.CharField(max_length=20)),
('passwordword', models.CharField(max_length=20)),
('created', models.DateTimeField(auto_now_add=True, db_index=True)),
('telphone_num', models.CharField(max_length=20)),
('comment', models.TextField(null=True)),
('user', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='settings', to=settings.AUTH_USER_MODEL)),
('vendor', models.ManyToManyField(related_name='user_settings', to='contract.Vendor')),
],
options={
'verbose_name_plural': 'User Settings',
},
),
]
|
[
"django.db.models.TextField",
"django.db.models.OneToOneField",
"django.db.migrations.swappable_dependency",
"django.db.models.ManyToManyField",
"django.db.models.CharField",
"django.db.models.AutoField",
"django.db.models.DateTimeField"
] |
[((247, 304), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (278, 304), False, 'from django.db import migrations, models\n'), ((479, 572), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (495, 572), False, 'from django.db import migrations, models\n'), ((604, 635), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)'}), '(max_length=20)\n', (620, 635), False, 'from django.db import migrations, models\n'), ((667, 698), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)'}), '(max_length=20)\n', (683, 698), False, 'from django.db import migrations, models\n'), ((734, 765), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)'}), '(max_length=20)\n', (750, 765), False, 'from django.db import migrations, models\n'), ((796, 850), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'db_index': '(True)'}), '(auto_now_add=True, db_index=True)\n', (816, 850), False, 'from django.db import migrations, models\n'), ((886, 917), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)'}), '(max_length=20)\n', (902, 917), False, 'from django.db import migrations, models\n'), ((948, 975), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (964, 975), False, 'from django.db import migrations, models\n'), ((1003, 1137), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""settings"""', 'to': 'settings.AUTH_USER_MODEL'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n related_name='settings', to=settings.AUTH_USER_MODEL)\n", (1023, 1137), False, 'from django.db import migrations, models\n'), ((1163, 1237), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'related_name': '"""user_settings"""', 'to': '"""contract.Vendor"""'}), "(related_name='user_settings', to='contract.Vendor')\n", (1185, 1237), False, 'from django.db import migrations, models\n')]
|
import discord
from discord import Embed
from discord.errors import NotFound
from discord.ext import commands
import requests
import asyncio
from lib.bot import bot
class Filter(commands.Cog):
def __init__(self, client: bot):
self.client = client
@commands.Cog.listener()
async def on_message(self, message):
if not message.author.bot:
check = requests.get(
f"https://www.purgomalum.com/service/containsprofanity?text=${message.content}")
check = check.text
if str(message.guild.id) in self.client.guilds_:
guild = self.client.guilds_[str(message.guild.id)]
filter = guild["filter"]
words = guild["filterWords"]
delete = guild["filterDelete"]
else:
return
if filter:
if check == 'true':
if delete == True:
try:
await message.delete()
except NotFound:
return
elif delete == False:
response = requests.get(
"https://insult.mattbas.org/api/insult")
embed = Embed(
colour=0x000ff0000,
description=response.text
)
await message.channel.send(embed=embed)
elif message.content in words:
if delete == True:
try:
await message.delete()
except NotFound:
return
elif delete == False:
response = requests.get(
"https://insult.mattbas.org/api/insult")
embed = Embed(
colour=0x000ff0000,
description=response.text
)
await message.channel.send(embed=embed)
|
[
"discord.Embed",
"requests.get",
"discord.ext.commands.Cog.listener"
] |
[((269, 292), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (290, 292), False, 'from discord.ext import commands\n'), ((389, 492), 'requests.get', 'requests.get', (['f"""https://www.purgomalum.com/service/containsprofanity?text=${message.content}"""'], {}), "(\n f'https://www.purgomalum.com/service/containsprofanity?text=${message.content}'\n )\n", (401, 492), False, 'import requests\n'), ((1168, 1221), 'requests.get', 'requests.get', (['"""https://insult.mattbas.org/api/insult"""'], {}), "('https://insult.mattbas.org/api/insult')\n", (1180, 1221), False, 'import requests\n'), ((1284, 1333), 'discord.Embed', 'Embed', ([], {'colour': '(16711680)', 'description': 'response.text'}), '(colour=16711680, description=response.text)\n', (1289, 1333), False, 'from discord import Embed\n'), ((1804, 1857), 'requests.get', 'requests.get', (['"""https://insult.mattbas.org/api/insult"""'], {}), "('https://insult.mattbas.org/api/insult')\n", (1816, 1857), False, 'import requests\n'), ((1920, 1969), 'discord.Embed', 'Embed', ([], {'colour': '(16711680)', 'description': 'response.text'}), '(colour=16711680, description=response.text)\n', (1925, 1969), False, 'from discord import Embed\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Master module for dlotter.arguments
Called from dlotter.__main__
"""
import sys
import argparse
from argparse import ArgumentDefaultsHelpFormatter
class MyParser(argparse.ArgumentParser):
def error(self, message):
sys.stderr.write('error: %s\n' % message)
self.print_help()
sys.exit(2)
class arguments:
def __init__(self) -> None:
return
def get_args(self, sysargs):
parent_parser = MyParser(
description='Plot data quick and dirty from NWP output',
formatter_class=ArgumentDefaultsHelpFormatter)
subparser = parent_parser.add_subparsers(dest="cmd")
# Parser for NWP Deterministic
parser_plot = subparser.add_parser('plot', help='Plot NWP output')
parser_plot.add_argument('-p',
'--parameters',
metavar='PARAMETERS',
type=str,
help='Parameters to plot. Seperate with ":", eg: "t2m:w10m:precip:slp:td2m:tcc:lmhc".',
required=True)
parser_plot.add_argument('-f',
'--filetype',
metavar='FILETYPE',
type=str,
help='What filetype are we using? (Options are: grib2)',
default='grib2',
required=False)
parser_plot.add_argument('-d',
'--directory',
type=str,
help='directory to read data from',
default='.')
parser_plot.add_argument('--prefix',
type=str,
help='Set to prefix of files if any',
default='',
required=False)
parser_plot.add_argument('--postfix',
type=str,
help='Set to postfix of files if any',
default='',
required=False)
parser_plot.add_argument('-o',
'--output-dir',
metavar='OUTDIR',
type=str,
help='Directory to place output into',
default='.',
required=False)
parser_plot.add_argument('-l',
'--limit-files',
metavar='LIMIT',
type=int,
help='Only use the first LIMIT files. If set to 0, not limit is used. If Limit > 0, files will be sorted by name first',
default=0,
required=False)
parser_plot.add_argument('-a',
'--area',
metavar='AREA',
type=str,
help='Over which area to plot (Options are: dk, neu, sjalland, disko)',
default="dk",
required=False)
parser_plot.add_argument('--verbose',
action='store_true',
help='Verbose output',
default=False)
if len(sysargs)==1:
parent_parser.print_help()
sys.exit(2)
args = parent_parser.parse_args()
return args
|
[
"sys.stderr.write",
"sys.exit"
] |
[((277, 318), 'sys.stderr.write', 'sys.stderr.write', (["('error: %s\\n' % message)"], {}), "('error: %s\\n' % message)\n", (293, 318), False, 'import sys\n'), ((353, 364), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (361, 364), False, 'import sys\n'), ((3387, 3398), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (3395, 3398), False, 'import sys\n')]
|
import numpy as np
from instResp.polezero import polezero
from instResp.plotResp import plotResponse
import os
import logging
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
logger = logging.getLogger(__name__)
'''
This module contains a collection of non-bulletproof codes
for creating/manipulating instrument response stages,
particularly the first stage = analog polezero stage.
'''
def evalResp(pz, f):
s = 0.000 + 1.000j
numerator = 1.000 + 0.000j
denominator = 1.000 + 0.000j
if pz.type == 'A':
s *= 2.*np.pi*f
elif pz.type == 'B':
s *= f
else:
logger.warn("Unknown pz response type=[%s]" % pz.type)
for j in range(pz.nzeros):
numerator *= (s - pz.zeros[j])
for j in range(pz.npoles):
denominator *= (s - pz.poles[j])
Gf = numerator * pz.a0 # Make sure this is complex
Gf /= denominator
return Gf;
def getResponse(pz, freqs, removeZero=False, useSensitivity=True):
'''
We're expecting a standard IRIS polezero file for displacement,
so if velocity=True try to shed one zero at origin
'''
if removeZero:
success = pz.remove_zero()
#zeros = np.zeros((pz.zeros.size-1,), dtype=np.complex128)
#success = remove_zero(pz.zeros, zeros)
if success:
logger.debug("Zero successfully removed from origin")
#pz.zeros = zeros
#pz.nzeros = zeros.size
else:
logger.warn("Problem removing zero from origin!")
resp = np.zeros((len(freqs),), dtype=np.complex128)
for i, f in enumerate(freqs):
resp[i] = evalResp(pz, f)
if useSensitivity:
resp[i] *= pz.sensitivity
return resp
def read_sacpz_file(filename):
"""
* **********************************
* NETWORK (KNETWK): AU
* STATION (KSTNM): WR1
* LOCATION (KHOLE):
* CHANNEL (KCMPNM): BHZ
* CREATED : 2017-02-02T01:23:27
* START : 2005-01-31T00:00:00
* END : 2599-12-31T23:59:59
* DESCRIPTION : Warramunga Array, Australia
* LATITUDE : -19.942600
* LONGITUDE : 134.339500
* ELEVATION : 389.0
* DEPTH : 0.0
* DIP : 0.0
* AZIMUTH : 0.0
* SAMPLE RATE : 40.0
* INPUT UNIT : M
* OUTPUT UNIT : COUNTS
* INSTTYPE : Guralp CMG3ESP_30sec_ims/Guralp DM24-MK3 Datalogge
* INSTGAIN : 4.000290e+03 (M/S)
* COMMENT : V3180 A3242
* SENSITIVITY : 2.797400e+09 (M/S)
* A0 : 8.883050e-02
* **********************************
ZEROS 5
+0.000000e+00 +0.000000e+00
+0.000000e+00 +0.000000e+00
+0.000000e+00 +0.000000e+00
+8.670000e+02 +9.050000e+02
+8.670000e+02 -9.050000e+02
POLES 4
-1.486000e-01 +1.486000e-01
-1.486000e-01 -1.486000e-01
-3.140000e+02 +2.023000e+02
-3.140000e+02 -2.023000e+02
CONSTANT 2.484944e+08
"""
fname = 'read_sacpz_file'
with open(filename, 'r') as f:
lines = f.readlines()
zeros = None
poles = None
sensitivity = None
a0 = None
unitsIn = None
unitsOut = None
knet = ""
ksta = ""
kloc = ""
kchan = ""
for i in range(len(lines)):
line = lines[i]
#print "i=[%d] line=[%s]" % (i, line)
if line[0] == '*':
if line[2] != '*':
split_list = line.split(':')
field = split_list[0][1:]
val = split_list[1]
# could have val = "" or val = 2.79E9 (M/S)
val_list = val.split()
nsplit=len(val_list)
#print "field=", field, " val=", val
if 'SENSITIVITY' in field:
sensitivity = float(val_list[0])
elif 'A0' in field:
a0 = float(val_list[0])
elif 'INPUT UNIT' in field:
unitsIn = val.strip()
elif 'OUTPUT UNIT' in field:
unitsOut = val.strip()
elif 'NETWORK' in field:
knet = val.strip()
elif 'STATION' in field:
ksta = val.strip()
elif 'LOCATION' in field:
kloc = val.strip()
elif 'CHANNEL' in field:
kchan = val.strip()
elif line[0:5] == 'ZEROS':
try:
nzeros = int(line[6:len(line)])
except:
logger.error("%s.%s Error: can't read nzeros from line=[%s]" % (__name__, fname, line))
exit(1)
#zeros = np.zeros((nzeros,), dtype=np.complex128)
zeros = np.zeros(nzeros, dtype=np.complex128)
for j in range(nzeros):
i += 1
line = lines[i]
(z_re, z_im) = line.split()
zeros[j] = complex( float(z_re), float(z_im) )
elif line[0:5] == 'POLES':
try:
npoles = int(line[6:len(line)])
except:
logger.error("%s.%s Error: can't read npoles from line=[%s]" % (__name__, fname, line))
exit(1)
poles = np.zeros(npoles, dtype=np.complex128)
for j in range(npoles):
i += 1
line = lines[i]
(p_re, p_im) = line.split()
poles[j] = complex( float(p_re), float(p_im) )
#print "knet=%s ksta=%s kloc=%s kchan=%s" % (knet, ksta, kloc, kchan)
name = "%s.%s %s.%s" % (knet, ksta, kloc, kchan)
pz_ = polezero(name = name,
type = 'A', #type = 'A[Laplace Transform (Rad/sec)]',
unitsIn = unitsIn,
unitsOut = unitsOut,
a0 = a0,
sensitivity = sensitivity,
sensitivity_f = 1.0,
poles = poles,
zeros = zeros)
return pz_
def get_corner_freq_from_pole(pole):
'''
get distance [rad/s] from lowest order pole to origin
and return Hz [/s]
'''
return np.sqrt(pole.real**2 + pole.imag**2) / (2.*np.pi)
def test_RC():
from instResp.libNom import RC
R = 4.
C = 1.25/(2.*np.pi)
pzs = RC(tau=R*C)
freqs = np.logspace(-5, 4., num=1000)
resp = getResponse(pzs, freqs, removeZero=False)
title = 'RC filter: R=4 ohms, C=1.25F/2pi'
plotResponse(resp, freqs, title=title, xmin=.001, xmax=100., ymin=0.01, ymax=1.2)
logger.info("Corner freq:%f" % get_corner_freq_from_pole(pzs.poles[0]))
return
def test_WA(damp=.18, gain=1., f0=14, fnorm=100.):
from instResp.libNom import WA, Accelerometer
pzs = WA(per=1/f0, damp=damp, gain=gain, normalize=True, normalize_freq=fnorm)
logger.info(pzs)
freqs = np.logspace(-5, 4., num=500)
resp = getResponse(pzs, freqs, removeZero=False)
#print(np.max(np.abs(resp)))
title='WA for f0=%.2f Hz damp=%.3f gain=%.0f' % (f0,damp, gain)
logger.info("Corner freq:%.2f" % get_corner_freq_from_pole(pzs.poles[0]))
plotResponse(resp, freqs, title=title, xmin=1, xmax=5000., ymin=.01, ymax=1.2)
return
def plot_pz_resp(pzfile=None):
pzs = read_sacpz_file(pzfile)
logger.info(pzs)
freqs = np.logspace(-5, 3., num=500)
resp = getResponse(pzs, freqs, removeZero=True, useSensitivity=False)
title=pzfile
plotResponse(resp, freqs, title=title, xmin=.001, xmax=100., ymin=.01, ymax=1e3)
return
def main():
#test_RC()
test_WA(damp=0.6)
exit()
pz_dir = '/Users/mth/mth/Data/IRIS_Request/pz/'
pz_fil = 'SACPZ.II.AAK.10.BHZ'
plot_pz_resp(pzfile=os.path.join(pz_dir, pz_fil))
exit()
if __name__=="__main__":
main()
|
[
"instResp.libNom.RC",
"logging.basicConfig",
"numpy.logspace",
"instResp.polezero.polezero",
"numpy.zeros",
"instResp.libNom.WA",
"instResp.plotResp.plotResponse",
"os.path.join",
"logging.getLogger",
"numpy.sqrt"
] |
[((129, 204), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(levelname)s:%(message)s"""', 'level': 'logging.INFO'}), "(format='%(levelname)s:%(message)s', level=logging.INFO)\n", (148, 204), False, 'import logging\n'), ((214, 241), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (231, 241), False, 'import logging\n'), ((5619, 5765), 'instResp.polezero.polezero', 'polezero', ([], {'name': 'name', 'type': '"""A"""', 'unitsIn': 'unitsIn', 'unitsOut': 'unitsOut', 'a0': 'a0', 'sensitivity': 'sensitivity', 'sensitivity_f': '(1.0)', 'poles': 'poles', 'zeros': 'zeros'}), "(name=name, type='A', unitsIn=unitsIn, unitsOut=unitsOut, a0=a0,\n sensitivity=sensitivity, sensitivity_f=1.0, poles=poles, zeros=zeros)\n", (5627, 5765), False, 'from instResp.polezero import polezero\n'), ((6345, 6358), 'instResp.libNom.RC', 'RC', ([], {'tau': '(R * C)'}), '(tau=R * C)\n', (6347, 6358), False, 'from instResp.libNom import RC\n'), ((6369, 6399), 'numpy.logspace', 'np.logspace', (['(-5)', '(4.0)'], {'num': '(1000)'}), '(-5, 4.0, num=1000)\n', (6380, 6399), True, 'import numpy as np\n'), ((6504, 6591), 'instResp.plotResp.plotResponse', 'plotResponse', (['resp', 'freqs'], {'title': 'title', 'xmin': '(0.001)', 'xmax': '(100.0)', 'ymin': '(0.01)', 'ymax': '(1.2)'}), '(resp, freqs, title=title, xmin=0.001, xmax=100.0, ymin=0.01,\n ymax=1.2)\n', (6516, 6591), False, 'from instResp.plotResp import plotResponse\n'), ((6787, 6861), 'instResp.libNom.WA', 'WA', ([], {'per': '(1 / f0)', 'damp': 'damp', 'gain': 'gain', 'normalize': '(True)', 'normalize_freq': 'fnorm'}), '(per=1 / f0, damp=damp, gain=gain, normalize=True, normalize_freq=fnorm)\n', (6789, 6861), False, 'from instResp.libNom import WA, Accelerometer\n'), ((6894, 6923), 'numpy.logspace', 'np.logspace', (['(-5)', '(4.0)'], {'num': '(500)'}), '(-5, 4.0, num=500)\n', (6905, 6923), True, 'import numpy as np\n'), ((7160, 7245), 'instResp.plotResp.plotResponse', 'plotResponse', (['resp', 'freqs'], {'title': 'title', 'xmin': '(1)', 'xmax': '(5000.0)', 'ymin': '(0.01)', 'ymax': '(1.2)'}), '(resp, freqs, title=title, xmin=1, xmax=5000.0, ymin=0.01, ymax=1.2\n )\n', (7172, 7245), False, 'from instResp.plotResp import plotResponse\n'), ((7351, 7380), 'numpy.logspace', 'np.logspace', (['(-5)', '(3.0)'], {'num': '(500)'}), '(-5, 3.0, num=500)\n', (7362, 7380), True, 'import numpy as np\n'), ((7476, 7566), 'instResp.plotResp.plotResponse', 'plotResponse', (['resp', 'freqs'], {'title': 'title', 'xmin': '(0.001)', 'xmax': '(100.0)', 'ymin': '(0.01)', 'ymax': '(1000.0)'}), '(resp, freqs, title=title, xmin=0.001, xmax=100.0, ymin=0.01,\n ymax=1000.0)\n', (7488, 7566), False, 'from instResp.plotResp import plotResponse\n'), ((6197, 6237), 'numpy.sqrt', 'np.sqrt', (['(pole.real ** 2 + pole.imag ** 2)'], {}), '(pole.real ** 2 + pole.imag ** 2)\n', (6204, 6237), True, 'import numpy as np\n'), ((7743, 7771), 'os.path.join', 'os.path.join', (['pz_dir', 'pz_fil'], {}), '(pz_dir, pz_fil)\n', (7755, 7771), False, 'import os\n'), ((4740, 4777), 'numpy.zeros', 'np.zeros', (['nzeros'], {'dtype': 'np.complex128'}), '(nzeros, dtype=np.complex128)\n', (4748, 4777), True, 'import numpy as np\n'), ((5244, 5281), 'numpy.zeros', 'np.zeros', (['npoles'], {'dtype': 'np.complex128'}), '(npoles, dtype=np.complex128)\n', (5252, 5281), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
# pylint: disable=R0903, C0115
"""
log_it.extensions.marshmallow.log
---------------------------------
Marshmallow Log Models
:copyright: (c) 2021 by <NAME>
:license: BSD, see LICENSE for more details
"""
from datetime import datetime
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema, auto_field
from marshmallow_sqlalchemy.fields import Nested
from log_it.log.model import (
TLog,
TField,
TLogField,
TMessage,
TMessageType,
TTag,
TTagMessage,
TUserPermission,
TRolePermission,
)
from . import FixtureSchema
from .user import UserFixture, RoleFixture, ActionFixture
class LogSchema(SQLAlchemyAutoSchema):
class Meta:
model = TLog
class FieldSchema(SQLAlchemyAutoSchema):
class Meta:
model = TField
class LogFieldSchema(SQLAlchemyAutoSchema):
class Meta:
model = TLogField
class MessageSchema(SQLAlchemyAutoSchema):
class Meta:
model = TMessage
class MessageTypeSchema(SQLAlchemyAutoSchema):
class Meta:
model = TMessageType
class TagSchema(SQLAlchemyAutoSchema):
class Meta:
model = TTag
class TagMessageSchema(SQLAlchemyAutoSchema):
class Meta:
model = TTagMessage
class UserPermissionSchema(SQLAlchemyAutoSchema):
class Meta:
model = TUserPermission
class RolePermissionSchema(SQLAlchemyAutoSchema):
class Meta:
model = TRolePermission
# FixtureSchema
class LogFixture(FixtureSchema):
"""Barebones Log Fixture for stubs"""
class Meta(FixtureSchema.Meta):
model = TLog
filter_attrs = ["sLog"]
sLog = auto_field()
class FieldFixture(FixtureSchema):
class Meta(FixtureSchema.Meta):
model = TField
filter_attrs = ["sField"]
sField = auto_field()
class LogFieldFixture(FixtureSchema):
class Meta(FixtureSchema.Meta):
model = TLogField
filter_attrs = [
"log.ixLog",
"field.ixField",
]
log = Nested(LogFixture, many=False)
field = Nested(FieldFixture, many=False)
sValue = auto_field()
iOrder = auto_field(missing=None)
class MessageTypeFixture(FixtureSchema):
class Meta(FixtureSchema.Meta):
model = TMessageType
filter_attrs = ["sMessageType"]
class TagFixture(FixtureSchema):
class Meta(FixtureSchema.Meta):
model = TTag
filter_attrs = ["sTag"]
sTag = auto_field()
class TagMessageFixture(FixtureSchema):
class Meta(FixtureSchema.Meta):
model = TTagMessage
class MessageFixture(FixtureSchema):
class Meta(FixtureSchema.Meta):
model = TMessage
# message fixtures are always inserted, never looked up
filter_attrs = None
log = Nested(LogFixture, many=False)
message_type = Nested(MessageTypeFixture, many=False)
user = Nested(UserFixture, many=False)
utcMessage = auto_field(missing=datetime.utcnow)
sMessage = auto_field()
tags = Nested(TagFixture, many=True)
class UserPermissionFixture(FixtureSchema):
class Meta(FixtureSchema.Meta):
model = TUserPermission
log = Nested(LogFixture, many=False)
user = Nested(UserFixture, many=False)
action = Nested(ActionFixture, many=False)
class RolePermissionFixture(FixtureSchema):
class Meta(FixtureSchema.Meta):
model = TRolePermission
log = Nested(LogFixture, many=False)
role = Nested(RoleFixture, many=False)
action = Nested(ActionFixture, many=False)
class LogFullFixture(FixtureSchema):
class Meta(FixtureSchema.Meta):
model = TLog
filter_attrs = ["sLog"]
sLog = auto_field()
user = Nested(UserFixture, many=False)
fields = Nested(FieldFixture, many=True)
user_permissions = Nested(UserPermissionFixture)
role_permissions = Nested(RolePermissionFixture)
|
[
"marshmallow_sqlalchemy.auto_field",
"marshmallow_sqlalchemy.fields.Nested"
] |
[((1652, 1664), 'marshmallow_sqlalchemy.auto_field', 'auto_field', ([], {}), '()\n', (1662, 1664), False, 'from marshmallow_sqlalchemy import SQLAlchemyAutoSchema, auto_field\n'), ((1809, 1821), 'marshmallow_sqlalchemy.auto_field', 'auto_field', ([], {}), '()\n', (1819, 1821), False, 'from marshmallow_sqlalchemy import SQLAlchemyAutoSchema, auto_field\n'), ((2024, 2054), 'marshmallow_sqlalchemy.fields.Nested', 'Nested', (['LogFixture'], {'many': '(False)'}), '(LogFixture, many=False)\n', (2030, 2054), False, 'from marshmallow_sqlalchemy.fields import Nested\n'), ((2067, 2099), 'marshmallow_sqlalchemy.fields.Nested', 'Nested', (['FieldFixture'], {'many': '(False)'}), '(FieldFixture, many=False)\n', (2073, 2099), False, 'from marshmallow_sqlalchemy.fields import Nested\n'), ((2113, 2125), 'marshmallow_sqlalchemy.auto_field', 'auto_field', ([], {}), '()\n', (2123, 2125), False, 'from marshmallow_sqlalchemy import SQLAlchemyAutoSchema, auto_field\n'), ((2139, 2163), 'marshmallow_sqlalchemy.auto_field', 'auto_field', ([], {'missing': 'None'}), '(missing=None)\n', (2149, 2163), False, 'from marshmallow_sqlalchemy import SQLAlchemyAutoSchema, auto_field\n'), ((2448, 2460), 'marshmallow_sqlalchemy.auto_field', 'auto_field', ([], {}), '()\n', (2458, 2460), False, 'from marshmallow_sqlalchemy import SQLAlchemyAutoSchema, auto_field\n'), ((2770, 2800), 'marshmallow_sqlalchemy.fields.Nested', 'Nested', (['LogFixture'], {'many': '(False)'}), '(LogFixture, many=False)\n', (2776, 2800), False, 'from marshmallow_sqlalchemy.fields import Nested\n'), ((2820, 2858), 'marshmallow_sqlalchemy.fields.Nested', 'Nested', (['MessageTypeFixture'], {'many': '(False)'}), '(MessageTypeFixture, many=False)\n', (2826, 2858), False, 'from marshmallow_sqlalchemy.fields import Nested\n'), ((2870, 2901), 'marshmallow_sqlalchemy.fields.Nested', 'Nested', (['UserFixture'], {'many': '(False)'}), '(UserFixture, many=False)\n', (2876, 2901), False, 'from marshmallow_sqlalchemy.fields import Nested\n'), ((2919, 2954), 'marshmallow_sqlalchemy.auto_field', 'auto_field', ([], {'missing': 'datetime.utcnow'}), '(missing=datetime.utcnow)\n', (2929, 2954), False, 'from marshmallow_sqlalchemy import SQLAlchemyAutoSchema, auto_field\n'), ((2970, 2982), 'marshmallow_sqlalchemy.auto_field', 'auto_field', ([], {}), '()\n', (2980, 2982), False, 'from marshmallow_sqlalchemy import SQLAlchemyAutoSchema, auto_field\n'), ((2994, 3023), 'marshmallow_sqlalchemy.fields.Nested', 'Nested', (['TagFixture'], {'many': '(True)'}), '(TagFixture, many=True)\n', (3000, 3023), False, 'from marshmallow_sqlalchemy.fields import Nested\n'), ((3149, 3179), 'marshmallow_sqlalchemy.fields.Nested', 'Nested', (['LogFixture'], {'many': '(False)'}), '(LogFixture, many=False)\n', (3155, 3179), False, 'from marshmallow_sqlalchemy.fields import Nested\n'), ((3191, 3222), 'marshmallow_sqlalchemy.fields.Nested', 'Nested', (['UserFixture'], {'many': '(False)'}), '(UserFixture, many=False)\n', (3197, 3222), False, 'from marshmallow_sqlalchemy.fields import Nested\n'), ((3236, 3269), 'marshmallow_sqlalchemy.fields.Nested', 'Nested', (['ActionFixture'], {'many': '(False)'}), '(ActionFixture, many=False)\n', (3242, 3269), False, 'from marshmallow_sqlalchemy.fields import Nested\n'), ((3395, 3425), 'marshmallow_sqlalchemy.fields.Nested', 'Nested', (['LogFixture'], {'many': '(False)'}), '(LogFixture, many=False)\n', (3401, 3425), False, 'from marshmallow_sqlalchemy.fields import Nested\n'), ((3437, 3468), 'marshmallow_sqlalchemy.fields.Nested', 'Nested', (['RoleFixture'], {'many': '(False)'}), '(RoleFixture, many=False)\n', (3443, 3468), False, 'from marshmallow_sqlalchemy.fields import Nested\n'), ((3482, 3515), 'marshmallow_sqlalchemy.fields.Nested', 'Nested', (['ActionFixture'], {'many': '(False)'}), '(ActionFixture, many=False)\n', (3488, 3515), False, 'from marshmallow_sqlalchemy.fields import Nested\n'), ((3656, 3668), 'marshmallow_sqlalchemy.auto_field', 'auto_field', ([], {}), '()\n', (3666, 3668), False, 'from marshmallow_sqlalchemy import SQLAlchemyAutoSchema, auto_field\n'), ((3680, 3711), 'marshmallow_sqlalchemy.fields.Nested', 'Nested', (['UserFixture'], {'many': '(False)'}), '(UserFixture, many=False)\n', (3686, 3711), False, 'from marshmallow_sqlalchemy.fields import Nested\n'), ((3725, 3756), 'marshmallow_sqlalchemy.fields.Nested', 'Nested', (['FieldFixture'], {'many': '(True)'}), '(FieldFixture, many=True)\n', (3731, 3756), False, 'from marshmallow_sqlalchemy.fields import Nested\n'), ((3780, 3809), 'marshmallow_sqlalchemy.fields.Nested', 'Nested', (['UserPermissionFixture'], {}), '(UserPermissionFixture)\n', (3786, 3809), False, 'from marshmallow_sqlalchemy.fields import Nested\n'), ((3833, 3862), 'marshmallow_sqlalchemy.fields.Nested', 'Nested', (['RolePermissionFixture'], {}), '(RolePermissionFixture)\n', (3839, 3862), False, 'from marshmallow_sqlalchemy.fields import Nested\n')]
|
# -*- coding: utf-8 -*-
import graphene
from graphene import relay
from graphene_gae import NdbObjectType, NdbConnectionField
from messaging.models.accounts import (
Account as AccountModel,
create,
update,
delete,
generate_api_key,
)
from messaging.models.services import Service as ServiceModel
from messaging.models.messages import Message as MessageModel
from messaging.schema.services import Service as ServiceType
from messaging.schema.messages import Message as MessageType
from messaging.utils import pick
from messaging.helpers import get_key
from messaging.exceptions import ExecutionUnauthorized
class Account(NdbObjectType):
class Meta:
model = AccountModel
exclude_fields = AccountModel._excluded_keys
interfaces = (relay.Node,)
services = NdbConnectionField(ServiceType)
def resolve_services(self, info, **args):
return ServiceModel.query(ancestor=self.key)
messages = NdbConnectionField(MessageType)
def resolve_messages(self, info, **args):
return MessageModel.query(ancestor=self.key)
@classmethod
def accounts_resolver(cls, root, info):
return AccountModel.query(ancestor=info.context.user_key)
class CreateAccount(relay.ClientIDMutation):
class Input:
site = graphene.String(required=True)
name = graphene.String(required=True)
account = graphene.Field(Account)
@classmethod
def mutate_and_get_payload(cls, root, info, **input):
account = create(
fields=cls.Input._meta.fields.keys(),
user=info.context.user_key,
body=input,
as_obj=True,
)
return CreateAccount(account=account)
class UpdateAccount(relay.ClientIDMutation):
class Input:
id = graphene.ID(required=True)
site = graphene.String()
name = graphene.String()
account = graphene.Field(Account)
@classmethod
def mutate_and_get_payload(cls, root, info, **input):
account_key = get_key(input.get("id"))
if account_key.parent() != info.context.user_key:
raise ExecutionUnauthorized
account = update(
fields=filter(lambda x: x != "id", cls.Input._meta.fields.keys()),
account=account_key,
body=pick(["site", "name"], input),
as_obj=True,
)
return UpdateAccount(account=account)
class DeleteAccount(relay.ClientIDMutation):
class Input:
id = graphene.ID(required=True)
@classmethod
def mutate_and_get_payload(cls, root, info, id):
account_key = get_key(id)
if account_key.parent() != info.context.user_key:
raise ExecutionUnauthorized
delete(account_key)
return DeleteAccount()
class CreateAccountKey(relay.ClientIDMutation):
class Input:
id = graphene.ID(required=True)
key = graphene.String()
@classmethod
def mutate_and_get_payload(cls, root, info, **input):
account_key = get_key(input.get("id"))
if account_key.parent() != info.context.user_key:
raise ExecutionUnauthorized
key = generate_api_key(account_key)
return CreateAccountKey(key=key)
|
[
"graphene.String",
"messaging.models.accounts.Account.query",
"graphene.ID",
"messaging.models.accounts.generate_api_key",
"messaging.utils.pick",
"graphene_gae.NdbConnectionField",
"messaging.models.accounts.delete",
"messaging.models.messages.Message.query",
"messaging.models.services.Service.query",
"graphene.Field",
"messaging.helpers.get_key"
] |
[((810, 841), 'graphene_gae.NdbConnectionField', 'NdbConnectionField', (['ServiceType'], {}), '(ServiceType)\n', (828, 841), False, 'from graphene_gae import NdbObjectType, NdbConnectionField\n'), ((958, 989), 'graphene_gae.NdbConnectionField', 'NdbConnectionField', (['MessageType'], {}), '(MessageType)\n', (976, 989), False, 'from graphene_gae import NdbObjectType, NdbConnectionField\n'), ((1389, 1412), 'graphene.Field', 'graphene.Field', (['Account'], {}), '(Account)\n', (1403, 1412), False, 'import graphene\n'), ((1895, 1918), 'graphene.Field', 'graphene.Field', (['Account'], {}), '(Account)\n', (1909, 1918), False, 'import graphene\n'), ((2891, 2908), 'graphene.String', 'graphene.String', ([], {}), '()\n', (2906, 2908), False, 'import graphene\n'), ((904, 941), 'messaging.models.services.Service.query', 'ServiceModel.query', ([], {'ancestor': 'self.key'}), '(ancestor=self.key)\n', (922, 941), True, 'from messaging.models.services import Service as ServiceModel\n'), ((1052, 1089), 'messaging.models.messages.Message.query', 'MessageModel.query', ([], {'ancestor': 'self.key'}), '(ancestor=self.key)\n', (1070, 1089), True, 'from messaging.models.messages import Message as MessageModel\n'), ((1167, 1217), 'messaging.models.accounts.Account.query', 'AccountModel.query', ([], {'ancestor': 'info.context.user_key'}), '(ancestor=info.context.user_key)\n', (1185, 1217), True, 'from messaging.models.accounts import Account as AccountModel, create, update, delete, generate_api_key\n'), ((1297, 1327), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (1312, 1327), False, 'import graphene\n'), ((1343, 1373), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (1358, 1373), False, 'import graphene\n'), ((1787, 1813), 'graphene.ID', 'graphene.ID', ([], {'required': '(True)'}), '(required=True)\n', (1798, 1813), False, 'import graphene\n'), ((1829, 1846), 'graphene.String', 'graphene.String', ([], {}), '()\n', (1844, 1846), False, 'import graphene\n'), ((1862, 1879), 'graphene.String', 'graphene.String', ([], {}), '()\n', (1877, 1879), False, 'import graphene\n'), ((2484, 2510), 'graphene.ID', 'graphene.ID', ([], {'required': '(True)'}), '(required=True)\n', (2495, 2510), False, 'import graphene\n'), ((2604, 2615), 'messaging.helpers.get_key', 'get_key', (['id'], {}), '(id)\n', (2611, 2615), False, 'from messaging.helpers import get_key\n'), ((2722, 2741), 'messaging.models.accounts.delete', 'delete', (['account_key'], {}), '(account_key)\n', (2728, 2741), False, 'from messaging.models.accounts import Account as AccountModel, create, update, delete, generate_api_key\n'), ((2853, 2879), 'graphene.ID', 'graphene.ID', ([], {'required': '(True)'}), '(required=True)\n', (2864, 2879), False, 'import graphene\n'), ((3144, 3173), 'messaging.models.accounts.generate_api_key', 'generate_api_key', (['account_key'], {}), '(account_key)\n', (3160, 3173), False, 'from messaging.models.accounts import Account as AccountModel, create, update, delete, generate_api_key\n'), ((2295, 2324), 'messaging.utils.pick', 'pick', (["['site', 'name']", 'input'], {}), "(['site', 'name'], input)\n", (2299, 2324), False, 'from messaging.utils import pick\n')]
|
from datetime import datetime
import argparse
from config.settings import DEFAULT_TIME_FORMAT
def valid_date(date):
try:
return datetime.strptime(date,DEFAULT_TIME_FORMAT)
except ValueError:
msg = "Not a valid date: '{0}'.".format(date)
raise argparse.ArgumentTypeError(msg)
|
[
"datetime.datetime.strptime",
"argparse.ArgumentTypeError"
] |
[((142, 186), 'datetime.datetime.strptime', 'datetime.strptime', (['date', 'DEFAULT_TIME_FORMAT'], {}), '(date, DEFAULT_TIME_FORMAT)\n', (159, 186), False, 'from datetime import datetime\n'), ((282, 313), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (['msg'], {}), '(msg)\n', (308, 313), False, 'import argparse\n')]
|
"""Crypto API Views"""
from typing import Optional
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.serialization import load_pem_private_key
from cryptography.x509 import load_pem_x509_certificate
from django.http.response import HttpResponse
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from django_filters import FilterSet
from django_filters.filters import BooleanFilter
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
from rest_framework.decorators import action
from rest_framework.fields import CharField, DateTimeField, IntegerField, SerializerMethodField
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ModelSerializer, ValidationError
from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger
from authentik.api.decorators import permission_required
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import PassiveSerializer
from authentik.crypto.builder import CertificateBuilder
from authentik.crypto.managed import MANAGED_KEY
from authentik.crypto.models import CertificateKeyPair
from authentik.events.models import Event, EventAction
LOGGER = get_logger()
class CertificateKeyPairSerializer(ModelSerializer):
"""CertificateKeyPair Serializer"""
cert_expiry = DateTimeField(source="certificate.not_valid_after", read_only=True)
cert_subject = SerializerMethodField()
private_key_available = SerializerMethodField()
private_key_type = SerializerMethodField()
certificate_download_url = SerializerMethodField()
private_key_download_url = SerializerMethodField()
def get_cert_subject(self, instance: CertificateKeyPair) -> str:
"""Get certificate subject as full rfc4514"""
return instance.certificate.subject.rfc4514_string()
def get_private_key_available(self, instance: CertificateKeyPair) -> bool:
"""Show if this keypair has a private key configured or not"""
return instance.key_data != "" and instance.key_data is not None
def get_private_key_type(self, instance: CertificateKeyPair) -> Optional[str]:
"""Get the private key's type, if set"""
key = instance.private_key
if key:
return key.__class__.__name__.replace("_", "").lower().replace("privatekey", "")
return None
def get_certificate_download_url(self, instance: CertificateKeyPair) -> str:
"""Get URL to download certificate"""
return (
reverse(
"authentik_api:certificatekeypair-view-certificate",
kwargs={"pk": instance.pk},
)
+ "?download"
)
def get_private_key_download_url(self, instance: CertificateKeyPair) -> str:
"""Get URL to download private key"""
return (
reverse(
"authentik_api:certificatekeypair-view-private-key",
kwargs={"pk": instance.pk},
)
+ "?download"
)
def validate_certificate_data(self, value: str) -> str:
"""Verify that input is a valid PEM x509 Certificate"""
try:
# Cast to string to fully load and parse certificate
# Prevents issues like https://github.com/goauthentik/authentik/issues/2082
str(load_pem_x509_certificate(value.encode("utf-8"), default_backend()))
except ValueError as exc:
LOGGER.warning("Failed to load certificate", exc=exc)
raise ValidationError("Unable to load certificate.")
return value
def validate_key_data(self, value: str) -> str:
"""Verify that input is a valid PEM Key"""
# Since this field is optional, data can be empty.
if value != "":
try:
# Cast to string to fully load and parse certificate
# Prevents issues like https://github.com/goauthentik/authentik/issues/2082
str(
load_pem_private_key(
str.encode("\n".join([x.strip() for x in value.split("\n")])),
password=None,
backend=default_backend(),
)
)
except (ValueError, TypeError) as exc:
LOGGER.warning("Failed to load private key", exc=exc)
raise ValidationError("Unable to load private key (possibly encrypted?).")
return value
class Meta:
model = CertificateKeyPair
fields = [
"pk",
"name",
"fingerprint_sha256",
"fingerprint_sha1",
"certificate_data",
"key_data",
"cert_expiry",
"cert_subject",
"private_key_available",
"private_key_type",
"certificate_download_url",
"private_key_download_url",
"managed",
]
extra_kwargs = {
"key_data": {"write_only": True},
"certificate_data": {"write_only": True},
}
class CertificateDataSerializer(PassiveSerializer):
"""Get CertificateKeyPair's data"""
data = CharField(read_only=True)
class CertificateGenerationSerializer(PassiveSerializer):
"""Certificate generation parameters"""
common_name = CharField()
subject_alt_name = CharField(required=False, allow_blank=True, label=_("Subject-alt name"))
validity_days = IntegerField(initial=365)
class CertificateKeyPairFilter(FilterSet):
"""Filter for certificates"""
has_key = BooleanFilter(
label="Only return certificate-key pairs with keys", method="filter_has_key"
)
# pylint: disable=unused-argument
def filter_has_key(self, queryset, name, value): # pragma: no cover
"""Only return certificate-key pairs with keys"""
return queryset.exclude(key_data__exact="")
class Meta:
model = CertificateKeyPair
fields = ["name", "managed"]
class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
"""CertificateKeyPair Viewset"""
queryset = CertificateKeyPair.objects.exclude(managed=MANAGED_KEY)
serializer_class = CertificateKeyPairSerializer
filterset_class = CertificateKeyPairFilter
ordering = ["name"]
search_fields = ["name"]
@permission_required(None, ["authentik_crypto.add_certificatekeypair"])
@extend_schema(
request=CertificateGenerationSerializer(),
responses={
200: CertificateKeyPairSerializer,
400: OpenApiResponse(description="Bad request"),
},
)
@action(detail=False, methods=["POST"])
def generate(self, request: Request) -> Response:
"""Generate a new, self-signed certificate-key pair"""
data = CertificateGenerationSerializer(data=request.data)
if not data.is_valid():
return Response(data.errors, status=400)
builder = CertificateBuilder()
builder.common_name = data.validated_data["common_name"]
builder.build(
subject_alt_names=data.validated_data.get("subject_alt_name", "").split(","),
validity_days=int(data.validated_data["validity_days"]),
)
instance = builder.save()
serializer = self.get_serializer(instance)
return Response(serializer.data)
@extend_schema(
parameters=[
OpenApiParameter(
name="download",
location=OpenApiParameter.QUERY,
type=OpenApiTypes.BOOL,
)
],
responses={200: CertificateDataSerializer(many=False)},
)
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=invalid-name, unused-argument
def view_certificate(self, request: Request, pk: str) -> Response:
"""Return certificate-key pairs certificate and log access"""
certificate: CertificateKeyPair = self.get_object()
Event.new( # noqa # nosec
EventAction.SECRET_VIEW,
secret=certificate,
type="certificate",
).from_http(request)
if "download" in request.query_params:
# Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html
response = HttpResponse(
certificate.certificate_data, content_type="application/x-pem-file"
)
response[
"Content-Disposition"
] = f'attachment; filename="{certificate.name}_certificate.pem"'
return response
return Response(CertificateDataSerializer({"data": certificate.certificate_data}).data)
@extend_schema(
parameters=[
OpenApiParameter(
name="download",
location=OpenApiParameter.QUERY,
type=OpenApiTypes.BOOL,
)
],
responses={200: CertificateDataSerializer(many=False)},
)
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=invalid-name, unused-argument
def view_private_key(self, request: Request, pk: str) -> Response:
"""Return certificate-key pairs private key and log access"""
certificate: CertificateKeyPair = self.get_object()
Event.new( # noqa # nosec
EventAction.SECRET_VIEW,
secret=certificate,
type="private_key",
).from_http(request)
if "download" in request.query_params:
# Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html
response = HttpResponse(certificate.key_data, content_type="application/x-pem-file")
response[
"Content-Disposition"
] = f'attachment; filename="{certificate.name}_private_key.pem"'
return response
return Response(CertificateDataSerializer({"data": certificate.key_data}).data)
|
[
"rest_framework.fields.IntegerField",
"authentik.api.decorators.permission_required",
"rest_framework.response.Response",
"structlog.stdlib.get_logger",
"drf_spectacular.utils.OpenApiResponse",
"authentik.crypto.builder.CertificateBuilder",
"rest_framework.fields.SerializerMethodField",
"rest_framework.fields.CharField",
"authentik.crypto.models.CertificateKeyPair.objects.exclude",
"django_filters.filters.BooleanFilter",
"django.utils.translation.gettext_lazy",
"django.urls.reverse",
"drf_spectacular.utils.OpenApiParameter",
"authentik.events.models.Event.new",
"cryptography.hazmat.backends.default_backend",
"rest_framework.fields.DateTimeField",
"rest_framework.decorators.action",
"django.http.response.HttpResponse",
"rest_framework.serializers.ValidationError"
] |
[((1371, 1383), 'structlog.stdlib.get_logger', 'get_logger', ([], {}), '()\n', (1381, 1383), False, 'from structlog.stdlib import get_logger\n'), ((1498, 1565), 'rest_framework.fields.DateTimeField', 'DateTimeField', ([], {'source': '"""certificate.not_valid_after"""', 'read_only': '(True)'}), "(source='certificate.not_valid_after', read_only=True)\n", (1511, 1565), False, 'from rest_framework.fields import CharField, DateTimeField, IntegerField, SerializerMethodField\n'), ((1585, 1608), 'rest_framework.fields.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (1606, 1608), False, 'from rest_framework.fields import CharField, DateTimeField, IntegerField, SerializerMethodField\n'), ((1637, 1660), 'rest_framework.fields.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (1658, 1660), False, 'from rest_framework.fields import CharField, DateTimeField, IntegerField, SerializerMethodField\n'), ((1684, 1707), 'rest_framework.fields.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (1705, 1707), False, 'from rest_framework.fields import CharField, DateTimeField, IntegerField, SerializerMethodField\n'), ((1740, 1763), 'rest_framework.fields.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (1761, 1763), False, 'from rest_framework.fields import CharField, DateTimeField, IntegerField, SerializerMethodField\n'), ((1795, 1818), 'rest_framework.fields.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (1816, 1818), False, 'from rest_framework.fields import CharField, DateTimeField, IntegerField, SerializerMethodField\n'), ((5333, 5358), 'rest_framework.fields.CharField', 'CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (5342, 5358), False, 'from rest_framework.fields import CharField, DateTimeField, IntegerField, SerializerMethodField\n'), ((5482, 5493), 'rest_framework.fields.CharField', 'CharField', ([], {}), '()\n', (5491, 5493), False, 'from rest_framework.fields import CharField, DateTimeField, IntegerField, SerializerMethodField\n'), ((5610, 5635), 'rest_framework.fields.IntegerField', 'IntegerField', ([], {'initial': '(365)'}), '(initial=365)\n', (5622, 5635), False, 'from rest_framework.fields import CharField, DateTimeField, IntegerField, SerializerMethodField\n'), ((5730, 5826), 'django_filters.filters.BooleanFilter', 'BooleanFilter', ([], {'label': '"""Only return certificate-key pairs with keys"""', 'method': '"""filter_has_key"""'}), "(label='Only return certificate-key pairs with keys', method=\n 'filter_has_key')\n", (5743, 5826), False, 'from django_filters.filters import BooleanFilter\n'), ((6262, 6317), 'authentik.crypto.models.CertificateKeyPair.objects.exclude', 'CertificateKeyPair.objects.exclude', ([], {'managed': 'MANAGED_KEY'}), '(managed=MANAGED_KEY)\n', (6296, 6317), False, 'from authentik.crypto.models import CertificateKeyPair\n'), ((6476, 6546), 'authentik.api.decorators.permission_required', 'permission_required', (['None', "['authentik_crypto.add_certificatekeypair']"], {}), "(None, ['authentik_crypto.add_certificatekeypair'])\n", (6495, 6546), False, 'from authentik.api.decorators import permission_required\n'), ((6768, 6806), 'rest_framework.decorators.action', 'action', ([], {'detail': '(False)', 'methods': "['POST']"}), "(detail=False, methods=['POST'])\n", (6774, 6806), False, 'from rest_framework.decorators import action\n'), ((7791, 7853), 'rest_framework.decorators.action', 'action', ([], {'detail': '(True)', 'pagination_class': 'None', 'filter_backends': '[]'}), '(detail=True, pagination_class=None, filter_backends=[])\n', (7797, 7853), False, 'from rest_framework.decorators import action\n'), ((9094, 9156), 'rest_framework.decorators.action', 'action', ([], {'detail': '(True)', 'pagination_class': 'None', 'filter_backends': '[]'}), '(detail=True, pagination_class=None, filter_backends=[])\n', (9100, 9156), False, 'from rest_framework.decorators import action\n'), ((7093, 7113), 'authentik.crypto.builder.CertificateBuilder', 'CertificateBuilder', ([], {}), '()\n', (7111, 7113), False, 'from authentik.crypto.builder import CertificateBuilder\n'), ((7471, 7496), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (7479, 7496), False, 'from rest_framework.response import Response\n'), ((2682, 2774), 'django.urls.reverse', 'reverse', (['"""authentik_api:certificatekeypair-view-certificate"""'], {'kwargs': "{'pk': instance.pk}"}), "('authentik_api:certificatekeypair-view-certificate', kwargs={'pk':\n instance.pk})\n", (2689, 2774), False, 'from django.urls import reverse\n'), ((3011, 3103), 'django.urls.reverse', 'reverse', (['"""authentik_api:certificatekeypair-view-private-key"""'], {'kwargs': "{'pk': instance.pk}"}), "('authentik_api:certificatekeypair-view-private-key', kwargs={'pk':\n instance.pk})\n", (3018, 3103), False, 'from django.urls import reverse\n'), ((5567, 5588), 'django.utils.translation.gettext_lazy', '_', (['"""Subject-alt name"""'], {}), "('Subject-alt name')\n", (5568, 5588), True, 'from django.utils.translation import gettext_lazy as _\n'), ((7041, 7074), 'rest_framework.response.Response', 'Response', (['data.errors'], {'status': '(400)'}), '(data.errors, status=400)\n', (7049, 7074), False, 'from rest_framework.response import Response\n'), ((8427, 8513), 'django.http.response.HttpResponse', 'HttpResponse', (['certificate.certificate_data'], {'content_type': '"""application/x-pem-file"""'}), "(certificate.certificate_data, content_type=\n 'application/x-pem-file')\n", (8439, 8513), False, 'from django.http.response import HttpResponse\n'), ((9730, 9803), 'django.http.response.HttpResponse', 'HttpResponse', (['certificate.key_data'], {'content_type': '"""application/x-pem-file"""'}), "(certificate.key_data, content_type='application/x-pem-file')\n", (9742, 9803), False, 'from django.http.response import HttpResponse\n'), ((3677, 3723), 'rest_framework.serializers.ValidationError', 'ValidationError', (['"""Unable to load certificate."""'], {}), "('Unable to load certificate.')\n", (3692, 3723), False, 'from rest_framework.serializers import ModelSerializer, ValidationError\n'), ((6702, 6744), 'drf_spectacular.utils.OpenApiResponse', 'OpenApiResponse', ([], {'description': '"""Bad request"""'}), "(description='Bad request')\n", (6717, 6744), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema\n'), ((8115, 8189), 'authentik.events.models.Event.new', 'Event.new', (['EventAction.SECRET_VIEW'], {'secret': 'certificate', 'type': '"""certificate"""'}), "(EventAction.SECRET_VIEW, secret=certificate, type='certificate')\n", (8124, 8189), False, 'from authentik.events.models import Event, EventAction\n'), ((7551, 7646), 'drf_spectacular.utils.OpenApiParameter', 'OpenApiParameter', ([], {'name': '"""download"""', 'location': 'OpenApiParameter.QUERY', 'type': 'OpenApiTypes.BOOL'}), "(name='download', location=OpenApiParameter.QUERY, type=\n OpenApiTypes.BOOL)\n", (7567, 7646), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema\n'), ((9418, 9492), 'authentik.events.models.Event.new', 'Event.new', (['EventAction.SECRET_VIEW'], {'secret': 'certificate', 'type': '"""private_key"""'}), "(EventAction.SECRET_VIEW, secret=certificate, type='private_key')\n", (9427, 9492), False, 'from authentik.events.models import Event, EventAction\n'), ((8854, 8949), 'drf_spectacular.utils.OpenApiParameter', 'OpenApiParameter', ([], {'name': '"""download"""', 'location': 'OpenApiParameter.QUERY', 'type': 'OpenApiTypes.BOOL'}), "(name='download', location=OpenApiParameter.QUERY, type=\n OpenApiTypes.BOOL)\n", (8870, 8949), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema\n'), ((3539, 3556), 'cryptography.hazmat.backends.default_backend', 'default_backend', ([], {}), '()\n', (3554, 3556), False, 'from cryptography.hazmat.backends import default_backend\n'), ((4533, 4601), 'rest_framework.serializers.ValidationError', 'ValidationError', (['"""Unable to load private key (possibly encrypted?)."""'], {}), "('Unable to load private key (possibly encrypted?).')\n", (4548, 4601), False, 'from rest_framework.serializers import ModelSerializer, ValidationError\n'), ((4331, 4348), 'cryptography.hazmat.backends.default_backend', 'default_backend', ([], {}), '()\n', (4346, 4348), False, 'from cryptography.hazmat.backends import default_backend\n')]
|
from django.test import TestCase
from django.urls import reverse
from rest_framework.test import APIClient
from rest_framework import status
from core.models import Item
from order.serializers import ItemSerializer
from core.tests.user_test_utils import create_user
from core.tests.order_test_utils import sample_order, sample_item
ITEMS_URL = reverse('order:item-list')
class PublicItemAPITest(TestCase):
def setUp(self):
self.client = APIClient()
def test_authentication_required(self):
response = self.client.get(ITEMS_URL)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class PrivateItemAPITest(TestCase):
def setUp(self):
self.client = APIClient()
self.user_data = {
'name': 'Name',
'email': '<EMAIL>',
'password': 'password'
}
self.user = create_user(**self.user_data)
self.client.force_authenticate(user=self.user)
def test_get_items_success(self):
Item.objects.create(user=self.user, name='Item 1')
Item.objects.create(user=self.user, name='Item 2')
items = Item.objects.all().order_by('-name')
serializer = ItemSerializer(items, many=True)
response = self.client.get(ITEMS_URL)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(serializer.data, response.data)
def test_get_items_user_specific(self):
other_user_data = self.user_data.copy()
other_user_data['email'] = '<EMAIL>'
other_user = create_user(**other_user_data)
item = Item.objects.create(user=self.user, name='Item 1')
Item.objects.create(user=other_user, name='Item 2')
response = self.client.get(ITEMS_URL)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['name'], item.name)
def test_post_item_success(self):
payload = {'name': 'Item'}
response = self.client.post(ITEMS_URL, payload)
exists = Item.objects.filter(
user=self.user,
name=payload['name']
).exists()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertTrue(exists)
def test_post_item_fail(self):
payload = {}
response = self.client.post(ITEMS_URL, payload)
exists = Item.objects.filter(
user=self.user,
name=None
).exists()
self.assertEqual(
response.status_code, status.HTTP_400_BAD_REQUEST
)
self.assertFalse(exists)
def test_filter_items_by_assignment(self):
order = sample_order(user=self.user)
item_1 = sample_item(user=self.user, name='Item 1')
item_2 = sample_item(user=self.user, name='Item 2')
order.items.add(item_1)
response_1 = self.client.get(ITEMS_URL, {'assigned': 1})
response_2 = self.client.get(ITEMS_URL)
self.assertEqual(response_1.status_code, status.HTTP_200_OK)
self.assertEqual(response_2.status_code, status.HTTP_200_OK)
self.assertEqual(len(response_1.data), 1)
self.assertEqual(len(response_2.data), 2)
self.assertEqual(response_1.data[0]['name'], item_1.name)
item_1_name_match = \
response_2.data[0]['name'] == item_1.name or \
response_2.data[1]['name'] == item_1.name
item_2_name_match = \
response_2.data[0]['name'] == item_2.name or \
response_2.data[1]['name'] == item_2.name
self.assertTrue(item_1_name_match)
self.assertTrue(item_2_name_match)
def test_filter_items_by_assignment_unique(self):
order_1 = sample_order(user=self.user)
order_2 = sample_order(user=self.user)
item = sample_item(user=self.user, name='Item 1')
sample_item(user=self.user, name='Item 2')
order_1.items.add(item)
order_2.items.add(item)
response = self.client.get(ITEMS_URL, {'assigned': 1})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
|
[
"core.models.Item.objects.create",
"core.models.Item.objects.filter",
"core.tests.order_test_utils.sample_order",
"order.serializers.ItemSerializer",
"django.urls.reverse",
"core.models.Item.objects.all",
"rest_framework.test.APIClient",
"core.tests.user_test_utils.create_user",
"core.tests.order_test_utils.sample_item"
] |
[((347, 373), 'django.urls.reverse', 'reverse', (['"""order:item-list"""'], {}), "('order:item-list')\n", (354, 373), False, 'from django.urls import reverse\n'), ((454, 465), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (463, 465), False, 'from rest_framework.test import APIClient\n'), ((715, 726), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (724, 726), False, 'from rest_framework.test import APIClient\n'), ((879, 908), 'core.tests.user_test_utils.create_user', 'create_user', ([], {}), '(**self.user_data)\n', (890, 908), False, 'from core.tests.user_test_utils import create_user\n'), ((1011, 1061), 'core.models.Item.objects.create', 'Item.objects.create', ([], {'user': 'self.user', 'name': '"""Item 1"""'}), "(user=self.user, name='Item 1')\n", (1030, 1061), False, 'from core.models import Item\n'), ((1070, 1120), 'core.models.Item.objects.create', 'Item.objects.create', ([], {'user': 'self.user', 'name': '"""Item 2"""'}), "(user=self.user, name='Item 2')\n", (1089, 1120), False, 'from core.models import Item\n'), ((1196, 1228), 'order.serializers.ItemSerializer', 'ItemSerializer', (['items'], {'many': '(True)'}), '(items, many=True)\n', (1210, 1228), False, 'from order.serializers import ItemSerializer\n'), ((1560, 1590), 'core.tests.user_test_utils.create_user', 'create_user', ([], {}), '(**other_user_data)\n', (1571, 1590), False, 'from core.tests.user_test_utils import create_user\n'), ((1607, 1657), 'core.models.Item.objects.create', 'Item.objects.create', ([], {'user': 'self.user', 'name': '"""Item 1"""'}), "(user=self.user, name='Item 1')\n", (1626, 1657), False, 'from core.models import Item\n'), ((1666, 1717), 'core.models.Item.objects.create', 'Item.objects.create', ([], {'user': 'other_user', 'name': '"""Item 2"""'}), "(user=other_user, name='Item 2')\n", (1685, 1717), False, 'from core.models import Item\n'), ((2716, 2744), 'core.tests.order_test_utils.sample_order', 'sample_order', ([], {'user': 'self.user'}), '(user=self.user)\n', (2728, 2744), False, 'from core.tests.order_test_utils import sample_order, sample_item\n'), ((2763, 2805), 'core.tests.order_test_utils.sample_item', 'sample_item', ([], {'user': 'self.user', 'name': '"""Item 1"""'}), "(user=self.user, name='Item 1')\n", (2774, 2805), False, 'from core.tests.order_test_utils import sample_order, sample_item\n'), ((2823, 2865), 'core.tests.order_test_utils.sample_item', 'sample_item', ([], {'user': 'self.user', 'name': '"""Item 2"""'}), "(user=self.user, name='Item 2')\n", (2834, 2865), False, 'from core.tests.order_test_utils import sample_order, sample_item\n'), ((3767, 3795), 'core.tests.order_test_utils.sample_order', 'sample_order', ([], {'user': 'self.user'}), '(user=self.user)\n', (3779, 3795), False, 'from core.tests.order_test_utils import sample_order, sample_item\n'), ((3814, 3842), 'core.tests.order_test_utils.sample_order', 'sample_order', ([], {'user': 'self.user'}), '(user=self.user)\n', (3826, 3842), False, 'from core.tests.order_test_utils import sample_order, sample_item\n'), ((3859, 3901), 'core.tests.order_test_utils.sample_item', 'sample_item', ([], {'user': 'self.user', 'name': '"""Item 1"""'}), "(user=self.user, name='Item 1')\n", (3870, 3901), False, 'from core.tests.order_test_utils import sample_order, sample_item\n'), ((3910, 3952), 'core.tests.order_test_utils.sample_item', 'sample_item', ([], {'user': 'self.user', 'name': '"""Item 2"""'}), "(user=self.user, name='Item 2')\n", (3921, 3952), False, 'from core.tests.order_test_utils import sample_order, sample_item\n'), ((1138, 1156), 'core.models.Item.objects.all', 'Item.objects.all', ([], {}), '()\n', (1154, 1156), False, 'from core.models import Item\n'), ((2092, 2149), 'core.models.Item.objects.filter', 'Item.objects.filter', ([], {'user': 'self.user', 'name': "payload['name']"}), "(user=self.user, name=payload['name'])\n", (2111, 2149), False, 'from core.models import Item\n'), ((2430, 2476), 'core.models.Item.objects.filter', 'Item.objects.filter', ([], {'user': 'self.user', 'name': 'None'}), '(user=self.user, name=None)\n', (2449, 2476), False, 'from core.models import Item\n')]
|
from unittest import TestCase
from databuild import settings
from databuild.adapters.locmem.models import LocMemBook
from databuild.functions import data
settings.LANGUAGES['noop'] = 'databuild.environments.noop.NoopEnvironment'
class FunctionsTestCase(TestCase):
def test_cross(self):
a_data = [
{'id': 1, 'x': 2, 'y': 3},
{'id': 2, 'x': 2, 'y': 3.5},
{'id': 3, 'x': 1, 'y': 3.5},
]
b_data = [
{'id': 3, 'z': 3},
{'id': 1, 'z': 4},
{'id': 2, 'z': 4.5},
]
book = LocMemBook('project1')
env = book.operator.languages['noop']
a = book.add_sheet('a', ['id', 'x', 'y'])
b = book.add_sheet('b', ['id', 'z'])
a.extend(a_data)
b.extend(b_data)
result = [data.cross(env, book, row, 'b', 'z', 'id') for row in a.all()]
assert result == [4, 4.5, 3]
def test_column(self):
a_data = [
{'id': 1, 'x': 2, 'y': 3},
{'id': 2, 'x': 2, 'y': 3.5},
{'id': 3, 'x': 1, 'y': 3.5},
]
b_data = [
{'id': 3, 'z': 3},
{'id': 1, 'z': 4},
{'id': 2, 'z': 4.5},
]
book = LocMemBook('project1')
env = book.operator.languages['noop']
a = book.add_sheet('a', ['id', 'x', 'y'])
b = book.add_sheet('b', ['id', 'z'])
a.extend(a_data)
b.extend(b_data)
result = data.column(env, book, 'a', 'b', 'z', 'id')
assert result == [4, 4.5, 3]
|
[
"databuild.functions.data.cross",
"databuild.adapters.locmem.models.LocMemBook",
"databuild.functions.data.column"
] |
[((584, 606), 'databuild.adapters.locmem.models.LocMemBook', 'LocMemBook', (['"""project1"""'], {}), "('project1')\n", (594, 606), False, 'from databuild.adapters.locmem.models import LocMemBook\n'), ((1237, 1259), 'databuild.adapters.locmem.models.LocMemBook', 'LocMemBook', (['"""project1"""'], {}), "('project1')\n", (1247, 1259), False, 'from databuild.adapters.locmem.models import LocMemBook\n'), ((1471, 1514), 'databuild.functions.data.column', 'data.column', (['env', 'book', '"""a"""', '"""b"""', '"""z"""', '"""id"""'], {}), "(env, book, 'a', 'b', 'z', 'id')\n", (1482, 1514), False, 'from databuild.functions import data\n'), ((819, 861), 'databuild.functions.data.cross', 'data.cross', (['env', 'book', 'row', '"""b"""', '"""z"""', '"""id"""'], {}), "(env, book, row, 'b', 'z', 'id')\n", (829, 861), False, 'from databuild.functions import data\n')]
|
from starcluster.clustersetup import ClusterSetup
from starcluster.logger import log
class PicardInstaller(ClusterSetup):
def run(self, nodes, master, user, user_shell, volumes):
for node in nodes:
log.info("Installing Picard tools 1.121 on %s" % (node.alias))
node.ssh.execute('wget -c -P /opt/software/picard https://github.com/broadinstitute/picard/releases/download/1.121/picard-tools-1.121.zip')
node.ssh.execute('unzip -d /opt/software/picard /opt/software/picard/picard-tools-1.121.zip')
node.ssh.execute('find /opt/software/picard/picard-tools-1.121/*.jar -exec chmod 755 {} +')
node.ssh.execute('mkdir -p /usr/local/Modules/applications/picard/;touch /usr/local/Modules/applications/picard/1.121')
node.ssh.execute('echo "#%Module" >> /usr/local/Modules/applications/picard/1.121')
node.ssh.execute('echo "set root /opt/software/picard/picard-tools-1.121" >> /usr/local/Modules/applications/picard/1.121')
node.ssh.execute('echo -e "prepend-path\tPATH\t\$root" >> /usr/local/Modules/applications/picard/1.121')
|
[
"starcluster.logger.log.info"
] |
[((205, 265), 'starcluster.logger.log.info', 'log.info', (["('Installing Picard tools 1.121 on %s' % node.alias)"], {}), "('Installing Picard tools 1.121 on %s' % node.alias)\n", (213, 265), False, 'from starcluster.logger import log\n')]
|
#!/usr/bin/env python3
import argparse
import sys
from fennec import __version__ as VERSION, __citation__ as CITATION
if __name__ == "__main__":
parser = argparse.ArgumentParser(
prog="fennec",
description="Fennec",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument("-v", "--version", action="version", version=VERSION)
parser.add_argument("--citation", action="version", version=CITATION)
subparsers = parser.add_subparsers()
# - "model" subparser
m_parser = subparsers.add_parser(
"model",
help="Extract features from sequences",
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
m_req = m_parser.add_argument_group("mandatory arguments")
m_req.add_argument(
"--input",
default=argparse.SUPPRESS,
help="Input file",
required=True,
metavar="FASTA",
)
m_req.add_argument("--_PROG", default="model", help=argparse.SUPPRESS)
m_opt = m_parser.add_argument_group("optionnal arguments")
m_opt.add_argument(
"--min_length", type=int, default=1000, help="Minimum sequence length to consider"
)
m_opt.add_argument("--chunk_size", type=int, default=10000, help="Chunk size")
m_opt.add_argument(
"--overlap",
type=str,
default="auto",
help="Overlap between chunks. Must be 'auto' or 0+",
)
m_opt.add_argument("--outfile", default="<input.h5>", help="Output file")
m_opt.add_argument(
"--verbosity", type=int, default=3, choices=[0, 1, 2, 3, 4], help="Verbosity level"
)
m_opt.add_argument("--n_jobs", type=int, default=1, help="Number of CPU to use")
m_opt.add_argument(
"-h", "--help", action="help", help="show this help message and exit"
)
# - "describe" subparser
d_parser = subparsers.add_parser(
"describe",
help="Describe modelled sequences",
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
d_req = d_parser.add_argument_group("mandatory arguments")
d_req.add_argument(
"--input", required=True, default=argparse.SUPPRESS, help="Input HDF5 file"
)
d_req.add_argument("--_PROG", default="describe", help=argparse.SUPPRESS)
d_opt = d_parser.add_argument_group("optionnal arguments")
d_opt.add_argument(
"--db_size",
action="store_true",
help="Print number of sequence fragements in the database",
default=argparse.SUPPRESS,
)
d_opt.add_argument(
"--list_models",
action="store_true",
help="List available models in the database",
default=argparse.SUPPRESS,
)
d_opt.add_argument(
"--repack",
action="store_true",
help=argparse.SUPPRESS,
# help="Repack the HDF5 file",
default=argparse.SUPPRESS,
)
d_opt.add_argument(
"-h", "--help", action="help", help="show this help message and exit"
)
# - "extract" subparser
e_parser = subparsers.add_parser(
"extract",
help="Extract bins from modelled sequences",
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
e_req = e_parser.add_argument_group("mandatory arguments")
e_req.add_argument(
"--input", required=True, default=argparse.SUPPRESS, help="Input HDF5 file"
)
e_req.add_argument(
"--models",
required=True,
default=["kmers4", "kmers110011", "contig2vec4", "cov_gattaca31"],
nargs="+",
help="List of models to use",
metavar="MODEL",
)
e_req.add_argument("--_PROG", default="extract", help=argparse.SUPPRESS)
e_opt = e_parser.add_argument_group("optionnal arguments")
e_opt.add_argument("--label", type=str, default="fennec", help="Label")
e_opt.add_argument(
"--max_iter", type=int, default=25, help="Maximum number of iteration"
)
e_opt.add_argument(
"--max_cluster", type=int, default=600, help="Maximum number of cluster"
)
e_opt.add_argument(
"--kpca_inertia",
type=float,
default=0.85,
help="Inertia to keep after kernel PCA",
metavar="[0.0-1.0]",
)
e_opt.add_argument(
"--kpca_t",
type=float,
default=0.33,
help="Proportion of data to use to fit kernel PCA",
metavar="[0.0-1.0]",
)
e_opt.add_argument(
"--ppmode",
type=str,
default="reassigntiny",
choices=["nopostprocessing", "reassigntiny"],
help="Postprocessing mode",
)
e_opt.add_argument(
"--verbosity", type=int, default=3, choices=[0, 1, 2, 3, 4], help="Verbosity level"
)
e_opt.add_argument(
"--min_cluster_size",
type=int,
default=50,
help=argparse.SUPPRESS,
# help="Minimum number of sequence per cluster",
)
e_opt.add_argument("--n_jobs", type=int, default=1, help="Number of CPU to use")
e_opt.add_argument(
"-h", "--help", action="help", help="show this help message and exit"
)
args = parser.parse_args()
print(args)
if not args.__dict__: # print usage if there is no args
parser.error("No argument given")
elif args._PROG == "model":
print("== model")
sys.exit(0)
elif args._PROG == "describe":
print("== describe")
sys.exit(0)
elif args._PROG == "extract":
print("== extract")
sys.exit(0)
else:
print("== ERROR ==")
sys.exit(1)
|
[
"argparse.ArgumentParser",
"sys.exit"
] |
[((162, 282), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""fennec"""', 'description': '"""Fennec"""', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(prog='fennec', description='Fennec',\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n", (185, 282), False, 'import argparse\n'), ((5381, 5392), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (5389, 5392), False, 'import sys\n'), ((5465, 5476), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (5473, 5476), False, 'import sys\n'), ((5547, 5558), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (5555, 5558), False, 'import sys\n'), ((5606, 5617), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5614, 5617), False, 'import sys\n')]
|
# Generated by Django 2.0.2 on 2018-03-03 02:54
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Contacts', '0002_auto_20180303_0253'),
]
operations = [
migrations.RenameModel(
old_name='Contacts',
new_name='Contact',
),
]
|
[
"django.db.migrations.RenameModel"
] |
[((228, 291), 'django.db.migrations.RenameModel', 'migrations.RenameModel', ([], {'old_name': '"""Contacts"""', 'new_name': '"""Contact"""'}), "(old_name='Contacts', new_name='Contact')\n", (250, 291), False, 'from django.db import migrations\n')]
|
# coding: utf-8
'''
<NAME>ard corpus magnitude check.
'''
import io
import os
def main():
count = 0
for filename in os.listdir('./data/bc_hansard'):
with io.open('./data/bc_hansard/%s'%filename, encoding='utf-8') as text_file:
count += len(text_file.read().split())
print(count)
if __name__ == '__main__':
main()
|
[
"os.listdir",
"io.open"
] |
[((119, 150), 'os.listdir', 'os.listdir', (['"""./data/bc_hansard"""'], {}), "('./data/bc_hansard')\n", (129, 150), False, 'import os\n'), ((159, 219), 'io.open', 'io.open', (["('./data/bc_hansard/%s' % filename)"], {'encoding': '"""utf-8"""'}), "('./data/bc_hansard/%s' % filename, encoding='utf-8')\n", (166, 219), False, 'import io\n')]
|
from PIL import Image, ImageDraw, ImageFont
class _Image:
@staticmethod
def draw_picture_with_text(image_file, text, size, x, y):
image = Image.open(image_file)
draw = ImageDraw.Draw(image)
width_image, height_image = image.size
font = ImageFont.truetype("arial.ttf", size=size)
draw.text((x, y), text, font=font, fill='white')
image.save(f'{image_file}')
@staticmethod
def draw_cross_on_picture(image_file, color, width):
with Image.open(image_file) as im:
draw = ImageDraw.Draw(im)
draw.line((0, 0) + im.size, fill=color, width=width)
draw.line((0, im.size[1], im.size[0], 0), fill=color, width=width)
# write to stdout
im.save(image_file)
@staticmethod
def draw_rect_on_picture(image_file, x0, y0, x1, y1, color, width):
with Image.open(image_file) as im:
draw = ImageDraw.Draw(im)
draw.rectangle((x0,y0,x1,y1), outline=color, width=width)
# write to stdout
im.save(image_file)
|
[
"PIL.ImageDraw.Draw",
"PIL.ImageFont.truetype",
"PIL.Image.open"
] |
[((156, 178), 'PIL.Image.open', 'Image.open', (['image_file'], {}), '(image_file)\n', (166, 178), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((194, 215), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['image'], {}), '(image)\n', (208, 215), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((278, 320), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (['"""arial.ttf"""'], {'size': 'size'}), "('arial.ttf', size=size)\n", (296, 320), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((502, 524), 'PIL.Image.open', 'Image.open', (['image_file'], {}), '(image_file)\n', (512, 524), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((551, 569), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['im'], {}), '(im)\n', (565, 569), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((880, 902), 'PIL.Image.open', 'Image.open', (['image_file'], {}), '(image_file)\n', (890, 902), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((929, 947), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['im'], {}), '(im)\n', (943, 947), False, 'from PIL import Image, ImageDraw, ImageFont\n')]
|
# Module file for conductance measurements with the
# SR830. Implementing the good ideas of <NAME>
from typing import Union, Optional
from time import sleep
import numpy as np
import qcodes as qc
from qcodes.instrument.parameter import Parameter
from qdev_wrappers.sweep_functions import _do_measurement
from qcodes.instrument_drivers.QDev.QDac_channels import QDac as QDacch
from qdev_wrappers.T3.customised_instruments import SR830_T3
def do2Dconductance(outer_param: Parameter,
outer_start: Union[float, int],
outer_stop: Union[float, int],
outer_npts: int,
inner_param: Parameter,
inner_start: Union[float, int],
inner_stop: Union[float, int],
inner_npts: int,
lockin: SR830_T3,
delay: Optional[float]=None):
"""
Function to perform a sped-up 2D conductance measurement
Args:
outer_param: The outer loop voltage parameter
outer_start: The outer loop start voltage
outer_stop: The outer loop stop voltage
outer_npts: The number of points in the outer loop
inner_param: The inner loop voltage parameter
inner_start: The inner loop start voltage
inner_stop: The inner loop stop voltage
inner_npts: The number of points in the inner loop
lockin: The lock-in amplifier to use
delay: Delay to wait after setting inner parameter before triggering lockin.
If None will use default delay, otherwise used the supplied.
"""
station = qc.Station.default
sr = lockin
# Validate the instruments
if sr.name not in station.components:
raise KeyError('Unknown lock-in! Refusing to proceed until the '
'lock-in has been added to the station.')
if (outer_param._instrument.name not in station.components and
outer_param._instrument._parent.name not in station.components):
raise KeyError('Unknown instrument for outer parameter. '
'Please add that instrument to the station.')
if (inner_param._instrument.name not in station.components and
inner_param._instrument._parent.name not in station.components):
raise KeyError('Unknown instrument for inner parameter. '
'Please add that instrument to the station.')
tau = sr.time_constant()
min_delay = 0.002 # what's the physics behind this number?
if delay is None:
delay = tau + min_delay
# Prepare for the first iteration
# Some of these things have to be repeated during the loop
sr.buffer_reset()
sr.buffer_start()
#sr.buffer_trig_mode('ON')
sr.buffer_SR('Trigger')
sr.conductance.shape = (inner_npts,)
sr.conductance.setpoint_names = (inner_param.name,)
sr.conductance.setpoint_labels = (inner_param.label,)
sr.conductance.setpoint_units = ('V',)
sr.conductance.setpoints = (tuple(np.linspace(inner_start,
inner_stop,
inner_npts)),)
def trigger():
sleep(delay)
sr.send_trigger()
def prepare_buffer():
# here it should be okay to call ch1_databuffer... I think...
sr.ch1_databuffer.prepare_buffer_readout()
# For the dataset/plotting, put in the correct setpoints
sr.conductance.setpoint_names = (inner_param.name,)
sr.conductance.setpoint_labels = (inner_param.label,)
sr.conductance.setpoint_units = ('V',)
sr.conductance.setpoints = (tuple(np.linspace(inner_start,
inner_stop,
inner_npts)),)
def start_buffer():
sr.buffer_start()
sr.conductance.shape = (inner_npts,) # This is something
def reset_buffer():
sr.buffer_reset()
trig_task = qc.Task(trigger)
reset_task = qc.Task(reset_buffer)
start_task = qc.Task(start_buffer)
inner_loop = qc.Loop(inner_param.sweep(inner_start,
inner_stop,
num=inner_npts)).each(trig_task)
outer_loop = qc.Loop(outer_param.sweep(outer_start,
outer_stop,
num=outer_npts)).each(start_task,
inner_loop,
sr.conductance,
reset_task)
set_params = ((inner_param, inner_start, inner_stop),
(outer_param, outer_start, outer_stop))
meas_params = (sr.conductance,)
prepare_buffer()
qdac = None
# ensure that any waveform generator is unbound from the qdac channels that we step if
# we are stepping the qdac
if isinstance(inner_param._instrument, QDacch):
qdacch = inner_param._instrument
qdacch.slope('Inf')
if isinstance(outer_param._instrument, QDacch):
qdacch = outer_param._instrument
qdacch.slope('Inf')
if qdac:
qdac.fast_voltage_set(True) # now that we have unbound the function generators
# we don't need to do it in the loop
qdac.voltage_set_dont_wait(False) # this is un safe and highly experimental
plot, data = _do_measurement(outer_loop, set_params, meas_params, do_plots=True)
return plot, data
|
[
"qcodes.Task",
"qdev_wrappers.sweep_functions._do_measurement",
"numpy.linspace",
"time.sleep"
] |
[((3995, 4011), 'qcodes.Task', 'qc.Task', (['trigger'], {}), '(trigger)\n', (4002, 4011), True, 'import qcodes as qc\n'), ((4029, 4050), 'qcodes.Task', 'qc.Task', (['reset_buffer'], {}), '(reset_buffer)\n', (4036, 4050), True, 'import qcodes as qc\n'), ((4068, 4089), 'qcodes.Task', 'qc.Task', (['start_buffer'], {}), '(start_buffer)\n', (4075, 4089), True, 'import qcodes as qc\n'), ((5531, 5598), 'qdev_wrappers.sweep_functions._do_measurement', '_do_measurement', (['outer_loop', 'set_params', 'meas_params'], {'do_plots': '(True)'}), '(outer_loop, set_params, meas_params, do_plots=True)\n', (5546, 5598), False, 'from qdev_wrappers.sweep_functions import _do_measurement\n'), ((3187, 3199), 'time.sleep', 'sleep', (['delay'], {}), '(delay)\n', (3192, 3199), False, 'from time import sleep\n'), ((3007, 3055), 'numpy.linspace', 'np.linspace', (['inner_start', 'inner_stop', 'inner_npts'], {}), '(inner_start, inner_stop, inner_npts)\n', (3018, 3055), True, 'import numpy as np\n'), ((3650, 3698), 'numpy.linspace', 'np.linspace', (['inner_start', 'inner_stop', 'inner_npts'], {}), '(inner_start, inner_stop, inner_npts)\n', (3661, 3698), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Role'
db.create_table('pta_role', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('description', self.gf('django.db.models.fields.CharField')(max_length=500)),
))
db.send_create_signal('pta', ['Role'])
# Adding model 'Staff'
db.create_table('pta_staff', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('mob_no', self.gf('django.db.models.fields.CharField')(max_length=20)),
('adress', self.gf('django.db.models.fields.CharField')(max_length=100)),
('role', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['pta.Role'])),
('user', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['auth.User'], unique=True)),
))
db.send_create_signal('pta', ['Staff'])
# Adding model 'Language'
db.create_table('pta_language', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('language', self.gf('django.db.models.fields.CharField')(max_length=100)),
('language_code', self.gf('django.db.models.fields.CharField')(max_length=20)),
))
db.send_create_signal('pta', ['Language'])
# Adding model 'Patient'
db.create_table('pta_patient', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('first_name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('last_name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('initials', self.gf('django.db.models.fields.CharField')(max_length=3)),
('original_address', self.gf('django.db.models.fields.CharField')(max_length=400)),
('bed_no', self.gf('django.db.models.fields.IntegerField')()),
('ward_no', self.gf('django.db.models.fields.IntegerField')()),
('pta_cleared', self.gf('django.db.models.fields.BooleanField')(default=False)),
('language', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['pta.Language'])),
))
db.send_create_signal('pta', ['Patient'])
# Adding model 'Question'
db.create_table('pta_question', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=500)),
('question_type', self.gf('django.db.models.fields.CharField')(max_length=50)),
))
db.send_create_signal('pta', ['Question'])
# Adding model 'Answer'
db.create_table('pta_answer', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('question', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['pta.Question'])),
('text', self.gf('django.db.models.fields.CharField')(max_length=200)),
('isAnswerRight', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('pta', ['Answer'])
# Adding model 'PatientResponses'
db.create_table('pta_patientresponses', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('patient', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['pta.Patient'])),
('date', self.gf('django.db.models.fields.DateTimeField')()),
('answer', self.gf('django.db.models.fields.CharField')(max_length=200)),
('answerStatus', self.gf('django.db.models.fields.BooleanField')(default=False)),
('question', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['pta.Question'])),
))
db.send_create_signal('pta', ['PatientResponses'])
# Adding model 'PTAQuestionaire'
db.create_table('pta_ptaquestionaire', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('patient', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['pta.Patient'])),
('date', self.gf('django.db.models.fields.DateTimeField')()),
('correctAnswers', self.gf('django.db.models.fields.IntegerField')()),
('totalQuestions', self.gf('django.db.models.fields.IntegerField')()),
('staff', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
))
db.send_create_signal('pta', ['PTAQuestionaire'])
def backwards(self, orm):
# Deleting model 'Role'
db.delete_table('pta_role')
# Deleting model 'Staff'
db.delete_table('pta_staff')
# Deleting model 'Language'
db.delete_table('pta_language')
# Deleting model 'Patient'
db.delete_table('pta_patient')
# Deleting model 'Question'
db.delete_table('pta_question')
# Deleting model 'Answer'
db.delete_table('pta_answer')
# Deleting model 'PatientResponses'
db.delete_table('pta_patientresponses')
# Deleting model 'PTAQuestionaire'
db.delete_table('pta_ptaquestionaire')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('<PASSWORD>.fields.CharField', [], {'max_length': '<PASSWORD>'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'pta.answer': {
'Meta': {'object_name': 'Answer'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isAnswerRight': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['pta.Question']"}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'pta.language': {
'Meta': {'object_name': 'Language'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
'pta.patient': {
'Meta': {'object_name': 'Patient'},
'bed_no': ('django.db.models.fields.IntegerField', [], {}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initials': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['pta.Language']"}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'original_address': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'pta_cleared': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'ward_no': ('django.db.models.fields.IntegerField', [], {})
},
'pta.patientresponses': {
'Meta': {'object_name': 'PatientResponses'},
'answer': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'answerStatus': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'patient': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['pta.Patient']"}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['pta.Question']"})
},
'pta.ptaquestionaire': {
'Meta': {'object_name': 'PTAQuestionaire'},
'correctAnswers': ('django.db.models.fields.IntegerField', [], {}),
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'patient': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['pta.Patient']"}),
'staff': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'totalQuestions': ('django.db.models.fields.IntegerField', [], {})
},
'pta.question': {
'Meta': {'object_name': 'Question'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'question_type': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'pta.role': {
'Meta': {'object_name': 'Role'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'pta.staff': {
'Meta': {'object_name': 'Staff'},
'adress': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mob_no': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['pta.Role']"}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['pta']
|
[
"south.db.db.delete_table",
"south.db.db.send_create_signal"
] |
[((577, 615), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""pta"""', "['Role']"], {}), "('pta', ['Role'])\n", (598, 615), False, 'from south.db import db\n'), ((1172, 1211), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""pta"""', "['Staff']"], {}), "('pta', ['Staff'])\n", (1193, 1211), False, 'from south.db import db\n'), ((1572, 1614), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""pta"""', "['Language']"], {}), "('pta', ['Language'])\n", (1593, 1614), False, 'from south.db import db\n'), ((2503, 2544), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""pta"""', "['Patient']"], {}), "('pta', ['Patient'])\n", (2524, 2544), False, 'from south.db import db\n'), ((2901, 2943), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""pta"""', "['Question']"], {}), "('pta', ['Question'])\n", (2922, 2943), False, 'from south.db import db\n'), ((3404, 3444), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""pta"""', "['Answer']"], {}), "('pta', ['Answer'])\n", (3425, 3444), False, 'from south.db import db\n'), ((4103, 4153), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""pta"""', "['PatientResponses']"], {}), "('pta', ['PatientResponses'])\n", (4124, 4153), False, 'from south.db import db\n'), ((4790, 4839), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""pta"""', "['PTAQuestionaire']"], {}), "('pta', ['PTAQuestionaire'])\n", (4811, 4839), False, 'from south.db import db\n'), ((4912, 4939), 'south.db.db.delete_table', 'db.delete_table', (['"""pta_role"""'], {}), "('pta_role')\n", (4927, 4939), False, 'from south.db import db\n'), ((4982, 5010), 'south.db.db.delete_table', 'db.delete_table', (['"""pta_staff"""'], {}), "('pta_staff')\n", (4997, 5010), False, 'from south.db import db\n'), ((5056, 5087), 'south.db.db.delete_table', 'db.delete_table', (['"""pta_language"""'], {}), "('pta_language')\n", (5071, 5087), False, 'from south.db import db\n'), ((5132, 5162), 'south.db.db.delete_table', 'db.delete_table', (['"""pta_patient"""'], {}), "('pta_patient')\n", (5147, 5162), False, 'from south.db import db\n'), ((5208, 5239), 'south.db.db.delete_table', 'db.delete_table', (['"""pta_question"""'], {}), "('pta_question')\n", (5223, 5239), False, 'from south.db import db\n'), ((5283, 5312), 'south.db.db.delete_table', 'db.delete_table', (['"""pta_answer"""'], {}), "('pta_answer')\n", (5298, 5312), False, 'from south.db import db\n'), ((5366, 5405), 'south.db.db.delete_table', 'db.delete_table', (['"""pta_patientresponses"""'], {}), "('pta_patientresponses')\n", (5381, 5405), False, 'from south.db import db\n'), ((5458, 5496), 'south.db.db.delete_table', 'db.delete_table', (['"""pta_ptaquestionaire"""'], {}), "('pta_ptaquestionaire')\n", (5473, 5496), False, 'from south.db import db\n')]
|
import re
from locustio.common_utils import init_logger, confluence_measure
logger = init_logger(app_type='confluence')
@confluence_measure
def app_specific_action(locust):
r = locust.client.get('/plugin/report') # navigate to page
content = r.content.decode('utf-8') # parse page content
token_pattern_example = '"token":"(.+?)"'
id_pattern_example = '"id":"(.+?)"'
token = re.findall(token_pattern_example, content) # parse variables from response using regexp
id = re.findall(id_pattern_example, content)
logger.locust_info(f'token: {token}, id: {id}') # logger for debug when verbose is true in confluence.yml file
if 'assertion string' not in content:
logger.error(f"'assertion string' was not found in {content}")
assert 'assertion string' in content # assertion after GET request
body = {"id": id, "token": token} # include parsed variables to POST body
headers = {'content-type': 'application/json'}
r = locust.client.post('/plugin/post/endpoint', body, headers) # send some POST request
content = r.content.decode('utf-8')
if 'assertion string after successful post request' not in content:
logger.error(f"'assertion string after successful post request' was not found in {content}")
assert 'assertion string after successful post request' in content # assertion after POST request
|
[
"locustio.common_utils.init_logger",
"re.findall"
] |
[((86, 120), 'locustio.common_utils.init_logger', 'init_logger', ([], {'app_type': '"""confluence"""'}), "(app_type='confluence')\n", (97, 120), False, 'from locustio.common_utils import init_logger, confluence_measure\n'), ((402, 444), 're.findall', 're.findall', (['token_pattern_example', 'content'], {}), '(token_pattern_example, content)\n', (412, 444), False, 'import re\n'), ((500, 539), 're.findall', 're.findall', (['id_pattern_example', 'content'], {}), '(id_pattern_example, content)\n', (510, 539), False, 'import re\n')]
|
# imports - module imports
from ezros.exception import (
EzrosError
)
# imports - test imports
import pytest
def test_ezros_error():
with pytest.raises(EzrosError):
raise EzrosError
|
[
"pytest.raises"
] |
[((150, 175), 'pytest.raises', 'pytest.raises', (['EzrosError'], {}), '(EzrosError)\n', (163, 175), False, 'import pytest\n')]
|
import MySQLdb
'''
数据库实例目前部署一台就可以了,然后通过这个脚本进行数据的同步操作
插入测试数据 每次开发完成新的模块 使用这个脚本 动态的添加数据
'''
# 重新创建模式 为 create 追加数据使用 append 删除并且重新添加 为 refresh
DB_OP_MODE = "append"
# 数据库链接用户名
MYSQL_CONN_NAME = "mysqlname"
#数据库远程链接地址
MYSQL_CONN_ADDR = "mysqllinkpath"
#数据库登录密码
MYSQL_CONN_PASSWORD = "<PASSWORD>"
#数据库默认的链接编码
MYSQL_CONN_CHARSET = "utf8"
# 默认的数据库名称
CREATE_DB_NAME = "you_money"
def check_db_exists(db_name,db):
'''
检查当前数据库是否已经存在
:param db_name:
:return:
'''
cursor = db.cursor()
cursor.execute("SHOW DATABASES")
rows = cursor.fetchall();
for row in rows:
tmp = '%2s'%row
if tmp == CREATE_DB_NAME:
return True
return False
#TODO 创建数据库脚本未完成
def drop_db(db):
'''
创建数据库
:return:
'''
cursor = db.cursor()
cursor.execute("DROP DATABASE IF EXISTS " + CREATE_DB_NAME)
cursor.execute("CREATE DATABASE IF NOT EXISTS " + CREATE_DB_NAME)
def create_table(tab_name,engine,charset):
'''
创建表函数
:param tab_name:
:param engine:
:param charset:
:return:
'''
pass
def append_data(sql_query):
'''
追加数据
:param sql_query:
:return:
'''
pass
def clean_table(tab_name):
'''
清理表数据
:param tab_name:
:return:
'''
pass
if __name__ == '__main__':
db = MySQLdb.connect(MYSQL_CONN_ADDR,
MYSQL_CONN_NAME,
MYSQL_CONN_PASSWORD,
MYSQL_CONN_CHARSET
)
if check_db_exists(CREATE_DB_NAME,db):
pass
db.close()
|
[
"MySQLdb.connect"
] |
[((1332, 1426), 'MySQLdb.connect', 'MySQLdb.connect', (['MYSQL_CONN_ADDR', 'MYSQL_CONN_NAME', 'MYSQL_CONN_PASSWORD', 'MYSQL_CONN_CHARSET'], {}), '(MYSQL_CONN_ADDR, MYSQL_CONN_NAME, MYSQL_CONN_PASSWORD,\n MYSQL_CONN_CHARSET)\n', (1347, 1426), False, 'import MySQLdb\n')]
|
from django.conf.urls import url
from alipay.create_direct_pay_by_user.dpn import views
urlpatterns = [
url(r'^$', views.dpn, {'item_check_callable': None}, name='alipay-dpn'),
]
|
[
"django.conf.urls.url"
] |
[((110, 180), 'django.conf.urls.url', 'url', (['"""^$"""', 'views.dpn', "{'item_check_callable': None}"], {'name': '"""alipay-dpn"""'}), "('^$', views.dpn, {'item_check_callable': None}, name='alipay-dpn')\n", (113, 180), False, 'from django.conf.urls import url\n')]
|
from django.shortcuts import render
# Create your views here.
from rest_framework.viewsets import ReadOnlyModelViewSet
from rest_framework_extensions.cache.mixins import CacheResponseMixin
from areas.models import Area
from areas.serializers import AreaSerializer, SubAreaSerializer
class AreasViewSet(CacheResponseMixin, ReadOnlyModelViewSet):
"""
行政区划信息
# GET /areas/(?P<pk>\d+)/
request: pk(int)
response: id(int) name(str) subs(list)
定义 查询集 和 序列化器的类 后面的源码方法就是 get_queryset 和 get_serializer_class ,这里根据需要直接重写方法
"""
pagination_class = None # 区划信息不分页
def get_queryset(self):
"""
提供数据集
"""
if self.action == 'list':
return Area.objects.filter(parent=None)
else:
return Area.objects.all()
def get_serializer_class(self):
"""
提供序列化器
"""
if self.action == 'list':
return AreaSerializer
else:
return SubAreaSerializer
|
[
"areas.models.Area.objects.all",
"areas.models.Area.objects.filter"
] |
[((704, 736), 'areas.models.Area.objects.filter', 'Area.objects.filter', ([], {'parent': 'None'}), '(parent=None)\n', (723, 736), False, 'from areas.models import Area\n'), ((770, 788), 'areas.models.Area.objects.all', 'Area.objects.all', ([], {}), '()\n', (786, 788), False, 'from areas.models import Area\n')]
|
""" This module is a wrapper for lxu.command.BasicCommand.
It improves and simplifies command implementations including popups,
sPresetText fields, and Form Command Lists.
This is based on Adam O'Hern Commander code but is vastly enhanced.
https://github.com/adamohern/commander
"""
import time
import traceback
import operator
import lx, lxu, lxifc
import modo
from xfrm import TransformUtils
from item import ItemUtils
from message import Message
from setup import SetupMode
from run import run
def bless(commandClass, commandName):
""" Custom bless function.
"""
commandClass.NAME = commandName
try:
lx.bless(commandClass, commandName)
except TypeError:
lx.out('Blessing failed: %s, %s' % (str(commandClass), str(commandName)))
class Argument(object):
""" Argument represents single command argument.
Arguments should be added as this class instances to the command.
"""
# These datatypes will be treated as Float values
sTYPE_FLOATs = [
'acceleration',
'angle',
'axis',
'color1',
'distance',
'float',
'force',
'light',
'mass',
'percent',
'speed',
'time',
'uvcoord'
]
# Treated as Str values
sTYPE_STRINGs = [
'date',
'datetime',
'filepath',
'string',
'vertmapname',
'&item'
]
# Treated as Str values in the MODO UI,
# but parsed into [Float, Float, Float] for use in the execute()
sTYPE_STRING_vectors = [
'angle3',
'color',
'float3',
'percent3'
]
# Treated as Int values
sTYPE_INTEGERs = [
'integer'
]
# Treated as Bool values
sTYPE_BOOLEANs = [
'boolean'
]
DATATYPES = sTYPE_FLOATs + sTYPE_STRINGs + sTYPE_STRING_vectors + sTYPE_INTEGERs + sTYPE_BOOLEANs
def __init__(self, name="", datatype=None):
self.name = name
self.label = None
self.defaultValue = None
self.datatype = None
if datatype is not None:
self.datatype = datatype.lower()
self.valuesList = None
self.valuesListUIType = None
self.flags = None
self.index = -1
self.hints = None
def __str__ (self):
""" Represent argument as its name and string datatype.
"""
reprString = "Command argument: " + self.name
if isinstance(self.datatype, str):
reprString += " type: "
reprString += self.datatype
return reprString
def __eq__(self, other):
if isinstance(other, str):
return self.name == other
elif isinstance(other, Argument):
return self.name == other.name
elif isinstance(other, int):
return self.index == other
else:
return False
class ArgumentPopupContent(object):
""" Use this class for filling contents of a popup.
"""
def __init__(self):
self._entries = []
self.iconWidth = None
self.iconHeight = None
def __len__(self):
return len(self._entries)
def __getitem__(self, key):
if not isinstance(key, int):
raise TypeError
if key >= len(self._entries):
raise KeyError
return self._entries[key]
def __iter__(self):
return iter(self._entries)
def addEntry(self, entry):
self._entries.append(entry)
def getEntry(self, index):
return self._entries[index]
@property
def entriesCount(self):
return len(self._entries)
class ArgumentPopupEntry(object):
def __init__(self, internalName="", userName=""):
self.internalName = internalName
self.userName = userName
self.iconImage = None
self.iconResource = None
class ArgumentItemsContent(object):
""" Use this class to define values for the item popup argument.
"""
def __init__(self):
self.noneOption = False
self.testOnRawItems = False # use lx.object.Item rather then modo.Item.
self.itemTestFunction = False
class ArgumentValuesListType(object):
""" When argument represents a list of values these can show up
in UI as Popup, sPresetText or Form Command List.
A popup with item list is also supported.
"""
POPUP = 1
S_PRESET_TEXT = 2
FORM_COMMAND_LIST = 3
ITEM_POPUP = 4
class Command(lxu.command.BasicCommand):
"""Wrapper for lxu.command.BasicCommand.
Based on Adam OHern commander code.
https://github.com/adamohern/commander
"""
# NAME is only used for debugging purposes.
NAME = ''
@property
def name(self):
return self.NAME
# --- Public methods, to be overriden by user.
def init(self):
""" Performs any extra initialisation steps that the command requires.
This is called from commands __init__() method.
"""
pass
def interact(self):
""" Perform interaction with user before command is actually executed.
Typically this means opening file dialogs, confirm messages, etc.
Interact() happens before command posts its dialog with arguments.
Returns
-------
bool
False if command should not be executed, True if it should go ahead.
"""
return True
def enable(self, msg):
""" Decides if the command should be enabled or disabled.
Parameters
----------
msg : modox.Message
Wrapper around lx.object.Message, use it to set disable/enable message.
Returns
-------
bool
True for enabled command, False otherwise.
"""
return True
def flags(self):
""" Command flags.
"""
return lx.symbol.fCMD_UNDO
def arguments(self):
""" Gets a list of arguments for a command.
Returns
-------
list of Argument or single Argument
Return either single or a list of Argument objects, one for each argument.
"""
return []
def getArgument(self, ident):
""" Gets argument by index or name.
Parameters
----------
ident : str or int
Either argument name or its index.
Returns
-------
Argument
Raises
------
LookupError?
"""
if type(ident) == str:
ident = self._argumentsByName[ident]
return self._argumentsList[ident]
def isArgumentSet(self, ident):
""" Returns whether given argument is set in a command or not.
Parameters
----------
ident : str or int
Either argument name or its index.
Returns
-------
bool
"""
arg = self.getArgument(ident)
return self.dyna_IsSet(arg.index)
def getArgumentValue(self, ident):
"""Return a command argument value by index.
If no argument value exists, returns the default parameter.
NOTE: The commander_args() method is simpler to use than this method.
You should probably use that one unless you have a reason to find a specific
argument by index.
:param index: (int) index of argument to retrieve
:param default: value to return if argument is not set
:returns: argument value (str, int, float, or boolean as appropriate)
"""
arg = self.getArgument(ident)
# If no value is set, return the default.
if not self.dyna_IsSet(arg.index):
return self._resolveDefaultValue(arg.defaultValue)
# TODO: I think it's about variable argument value?
#if 'variable' in self.commander_arguments()[index].get(FLAGS, []):
#datatype = self.basic_ArgType(index)
#else:
#datatype = self.commander_arguments()[index][DATATYPE].lower()
# If it's a string, use dyna_String to grab it.
if arg.datatype in Argument.sTYPE_STRINGs:
return self.dyna_String(arg.index)
# If the value is a vector, use dyna_String to grab it, then parse it
# into a list of float vlues.
elif arg.datatype in Argument.sTYPE_STRING_vectors:
return [float(i) for i in self.dyna_String(arg.index).split(" ")]
# If the value is an integer, use dyna_Int to grab it.
elif arg.datatype in Argument.sTYPE_INTEGERs:
return self.dyna_Int(arg.index)
# If the value is a float, use dyna_Float to grab it.
elif arg.datatype in Argument.sTYPE_FLOATs:
return self.dyna_Float(arg.index)
# If the value is a boolean, use dyna_Bool to grab it.
elif arg.datatype in Argument.sTYPE_BOOLEANs:
return self.dyna_Bool(arg.index)
elif arg.datatype == '&item':
return self.dyna_String(arg.index)
# If something bonkers is going on, use the default.
return self._resolveDefaultValue(arg.defaultValue)
def uiHints(self, argument, hints):
""" Set UI hints for a given argument by calling methods
on the given hints object.
"""
pass
def icon(self):
""" Returns string with icon name for command button.
"""
return None
def notifiers(self):
""" Returns a list of notifiers for a command.
Should return a list of tuples, for example:
[('notifier.editAction',''), ("select.event", "item +ldt"), ("tagger.notifier", "")]
"""
return []
def setupMode(self):
""" Sets setup mode for the command.
This will be set at the beginning of execute.
Returns
-------
bool or None
True/False to switch Setup Mode to a given state.
None to not affect setup mode (this is default).
"""
return None
def restoreSetupMode(self):
"""
Restores setup mode to its previous value once command is executed.
Returns
-------
bool
Return True to restore setup mode to its state prior to command execution.
"""
return False
def preExecute(self):
""" Called after interact() but before execute block is called.
Use this if you want to verify the command is ok to run after dialog
with command arguments was closed by user.
Returns
-------
bool
False if command should not be executed, True if it should go ahead.
"""
return True
def executeStart(self):
""" Called from within basic_Execute at the very beginning of execution code.
Use this function to perform actions from within the actual execute block
but right before execute() is called.
"""
pass
def execute(self, msg, flags):
""" This is the place for main command execution code.
"""
pass
def executeEnd(self):
""" Called from basic_Execute, after execute() was called.
Typically used for clean up/restore operations.
"""
pass
def query(self, argument):
""" Returns a value based on and argument being queried.
This method can return string, boolean, integer or float."""
return None
def enableTimersOn(self):
""" Enable/disable log output that says how long enable() takes.
This can help with optimising performance of enable().
This method should be as fast as possible so it doesn't slow down UI.
Returns
-------
bool
True to enable timers log output.
"""
return False
def queryTimersOn(self):
""" Enable/disable log output that says how long query() method takes.
This can help with optimising performance of query().
This method should be as fast as possible so it doesn't slow down UI.
Returns
-------
bool
True to enable log output.
"""
return False
def restoreItemSelection(self):
""" Restores item selection after command is executed.
Returns
-------
bool
True if item selection should be restored to a state prior to firing the command.
"""
return False
def autoFocusItemListWhenDone(self):
""" Automatically focuses item list on selected items when command execution is over.
"""
return False
def applyEditActionPre(self):
""" Applies edit action before the command is executed so there are no 'hanging' edits.
Returns
-------
bool
True if edit action should be applied.
Default is False.
"""
return False
def applyEditActionPost(self):
""" Applies edit action after the command is executed so there are no 'hanging' edits left.
Returns
-------
bool
True if edit action should be applied.
Default is False.
"""
return False
def dropToolPre(self):
"""
Drops any active tool before command execution starts.
Returns
-------
bool
True to drop a tool (if any is active).
"""
return False
# --- Private methods, do not touch.
def cmd_Flags(self):
""" Command is scene altering, undoable by default.
"""
return self.flags()
def cmd_Interact(self):
result = self.interact()
if not result:
msg = lx.object.Message(self.cmd_Message())
msg.SetCode(lx.symbol.e_ABORT)
def cmd_PreExecute(self):
result = self.preExecute()
if not result:
msg = lx.object.Message(self.cmd_Message())
msg.SetCode(lx.symbol.e_ABORT)
def cmd_Icon(self):
return self.icon()
def basic_Enable(self, msg):
if self.enableTimersOn():
timeStart = time.clock()
msgWrap = Message(msg)
enabled = self.enable(msgWrap)
if self.enableTimersOn():
timeEnd = time.clock()
lx.out("ENABLE (%s) : %f s." % (self.NAME, (timeEnd - timeStart)))
return enabled
def basic_ArgType(self, index):
pass
def cmd_DialogInit(self):
""" Sets default values for arguments in command dialogs.
Once this method is implemented MODO's default mechanism for storing
argument values is not used.
This method is called right before command's dialog pops up.
Note that this method uses command argument's .defaultValue property.
This property can be a function (or callable as a matter of fact).
If you set a function as default value it'll always be called to retrieve the
actual default value and used instead of the stored value in the dialog.
Sadly, using function as argument, due to the way MODO seems to work (possible bug)
makes it impossible to set the argument in command string, it will always be
overridden by what default function returns.
"""
arguments = self.arguments()
for n, argument in enumerate(arguments):
datatype = argument.datatype
defaultValue = arguments[n].defaultValue
# Default value can be a function.
# If it's a function we always want to call this function
# to get the default value. This is because sometimes MODO seems
# to report that the dyna_IsSet() for an argument even if it's not set
# and should be pulled from default value.
# In this case we do not want to miss retrieving value from function.
if hasattr(defaultValue, '__call__'):
storedValue = defaultValue()
else:
# If we already have a value, use it.
# This is especially important when a command is run with args
# via command line or form button.
if self.dyna_IsSet(n):
continue
storedValue = self._argumentValuesCache[n]
# If there's no stored value, we're done.
if not storedValue:
continue
# The correct attr_Set... method depends on datatype.
if datatype in Argument.sTYPE_STRINGs + Argument.sTYPE_STRING_vectors:
self.attr_SetString(n, str(storedValue))
elif datatype in Argument.sTYPE_INTEGERs + Argument.sTYPE_BOOLEANs:
self.attr_SetInt(n, int(storedValue))
elif datatype in Argument.sTYPE_FLOATs:
self.attr_SetFlt(n, float(storedValue))
def basic_Execute(self, msg, flags):
"""Stores recent command values for next run and wraps commander_execute
in a try/except statement with traceback.
Do NOT override this method. Use commander_execute() instead.
You should never need to touch this.
CRUCIAL: When turning off listening never just turn it back on!
Set it to whatever the state was prior to executing this command.
Otherwise, firing rs command from within other rs command is going
to mess things up. Listening will be back to True as soon as first
sub command is done.
Returns
-------
bool, None
Return False to exit command with ABORT message code.
"""
scene = modo.Scene()
self.executeStart()
if self.dropToolPre():
run('!tool.drop')
if self.restoreItemSelection():
selection = scene.selected
setupMode = SetupMode()
if self.restoreSetupMode():
setupMode.store()
if self.setupMode() is not None and setupMode.state != self.setupMode():
setupMode.state = self.setupMode()
if self.applyEditActionPre():
TransformUtils.applyEdit()
msgWrap = Message(msg)
try:
cmdResult = self.execute(msgWrap, flags)
except:
cmdResult = False
lx.out(traceback.format_exc())
if self.applyEditActionPost():
TransformUtils.applyEdit()
if self.restoreItemSelection():
scene.select(selection, add=False)
if self.restoreSetupMode():
setupMode.restore()
self.executeEnd()
if not cmdResult and cmdResult is not None:
msgWrap.setCode(Message.Code.ABORT)
return
# This is executed only when command did not abort
if self.autoFocusItemListWhenDone():
ItemUtils.autoFocusItemListOnSelection()
def cmd_Query(self, index, vaQuery):
if self.queryTimersOn():
timeStart = time.clock()
# Create the ValueArray object
va = lx.object.ValueArray()
va.set(vaQuery)
# To keep things simpler for commander users, let them return
# a value using only an index (no ValueArray nonsense)
commander_query_result = self.query(self._argumentsList[index])
# Need to add the proper datatype based on result from commander_query
if isinstance(commander_query_result, basestring):
va.AddString(commander_query_result)
elif isinstance(commander_query_result, int):
va.AddInt(commander_query_result)
elif isinstance(commander_query_result, float):
va.AddFloat(commander_query_result)
elif isinstance(commander_query_result, (modo.Item, lx.object.Item, lxu.object.Item)):
valRef = lx.object.ValueReference(va.AddEmptyValue())
valRef.SetObject(commander_query_result)
if self.queryTimersOn():
timeEnd = time.clock()
lx.out("QUERY (%s) : %f s." % (self.NAME, (timeEnd - timeStart)))
return lx.result.OK
def arg_UIHints(self, index, hints):
"""Adds pretty labels to arguments in command dialogs. If no label parameter
is explicitly included, we create a pseudo-label by capitalizing the
argument name and replacing underscores with spaces.
Labels can either be literal strings or method/function objects. In the
latter case, the method or function will be called when needed.
If any popup fields of type sPresetText are present,
adds the appropriate hint.
You should never need to touch this."""
try:
arg = self._argumentsList[index]
except IndexError:
return
# If an explicit label is provided, use it.
if arg.label is not None:
label = ""
if isinstance(arg.label, str):
label = arg.label
elif type(arg.label) == bool and arg.label:
label = arg.name.replace("_", " ").title()
# Labels can be functions. If so, run the function to get the string.
elif hasattr(arg.label, '__call__'):
label = label()
# Apply the label.
if (label):
hints.Label(label)
# If the popup type is sPresetText, apply the appropriate class.
if arg.valuesListUIType == ArgumentValuesListType.S_PRESET_TEXT:
hints.Class("sPresetText")
# Allow command implementation to do its custom work.
self.uiHints(arg, hints)
def arg_UIValueHints(self, index):
"""Popups and sPresetText arguments fire this method whenever
they update. Note that the 'hints' parameter can be a literal list
or tuple, but can also be a method or function.
For dynamic lists, be sure to pass in the generator method or function object itself,
not its result. (i.e. pass in 'myGreatFunction', NOT 'myGreatFunction()')
You should never need to touch this."""
try:
arg = self._argumentsList[index]
except IndexError:
return
arg_data = None
# Try to grab the values_list for the argument.
if arg.valuesList is not None:
arg_data = arg.valuesList
# If our values_list is empty, don't bother.
if not arg_data:
return
# If the values_list is a list/tuple, use it as-is.
if isinstance(arg_data, (list, tuple)):
values = arg_data
# This is very hacky here for the time being.
# It's testing values against being the items popup content object.
elif isinstance(arg_data, ArgumentItemsContent):
values = arg_data
# If the values_list is a method/function, fire it and use the result.
elif hasattr(arg_data, '__call__'):
values = arg_data()
# In some rare cases you may want to manually instantiate your own
# popup class as a subclass of UIValueHints. In those cases, we
# ignore the below and just use yours.
# isinstance(arg_data, type) tests whether arg_data is class
# TODO: Think whether this logic has the best flow.
# the return statement here doesn't fit and breaks the flow.
if isinstance(arg_data, type) and issubclass(arg_data, lxifc.UIValueHints):
return arg_data()
# If values is None or "" or someother nonsense, return an empty list.
if not values:
values = []
# Argument can be a normal popup, an sPresetText popup, or a
# Form Command List. We'll need to return a different class
# depending on the 'values_list_type'.
if arg.valuesListUIType == ArgumentValuesListType.POPUP:
return PopupClass(values)
elif arg.valuesListUIType == ArgumentValuesListType.S_PRESET_TEXT:
return PopupClass(values)
elif arg.valuesListUIType == ArgumentValuesListType.FORM_COMMAND_LIST:
return FormCommandListClass(values)
elif arg.valuesListUIType == ArgumentValuesListType.ITEM_POPUP:
return ItemPopupClass(arg_data)
def cmd_NotifyAddClient(self, argument, object):
"""Add notifier clients as needed.
You should never need to touch this."""
for i, tup in enumerate(self._notifier_tuples):
if self._notifiers[i] is None:
self._notifiers[i] = self.not_svc.Spawn (self._notifier_tuples[i][0], self._notifier_tuples[i][1])
self._notifiers[i].AddClient(object)
def cmd_NotifyRemoveClient(self, object):
"""Remove notifier clients as needed.
You should never need to touch this."""
for i, tup in enumerate(self._notifier_tuples):
if self._notifiers[i] is not None:
self._notifiers[i].RemoveClient(object)
# -------- Private methods
def _resolveDefaultValue(self, defaultValue):
""" Resolves default value in case default value is a function.
"""
if hasattr(defaultValue, '__call__'):
return defaultValue()
return defaultValue
def _setupNotifiers(self):
# CommandClass can implement the commander_notifiers() method to update
# FormCommandLists and Popups. If implemented, add the notifiers.
self.not_svc = lx.service.NotifySys()
self._notifiers = []
self._notifier_tuples = tuple([i for i in self.notifiers()])
for i in self._notifier_tuples:
self._notifiers.append(None)
@classmethod
def _setupArgumentValuesCache(cls):
""" We manually cache all argument values between command executions during single session.
"""
try:
cls._argumentValuesCache
except AttributeError:
cls._argumentValuesCache = []
@classmethod
def _cacheArgumentDefaultValue(cls, value):
"""Add an argument to the class variable _commander_stored_values.
You should never need to touch this.
"""
cls._argumentValuesCache.append(value)
def _setupArguments(self):
""" Setup command arguments based on arguments() method.
Parse the list of Argument objects that the arguments method returns.
"""
arguments = self.arguments()
# The command does not have arguments
if not arguments:
return True
result = True
if not isinstance(arguments, list):
arguments = [arguments]
for argument in arguments:
if not isinstance(argument, Argument):
continue
if not self._addArgument(argument):
result = False
return result
def _addArgument(self, argument):
if argument.datatype is None or argument.name is None:
return False
datatype = self._resolveArgumentDatatype(argument.datatype)
if not datatype:
return False
argument.index = len(self._argumentsList)
self.dyna_Add(argument.name, datatype)
# This is setting up default value for this argument.
# If this is the first time running the command, the class variable
# _argumentValuesCache will be empty. In that case, populate it.
# This should really go on the argument level, not command class level.
if argument.index >= len(self._argumentValuesCache):
# The default value can be a function. If it's a function
# it will be called each time the command dialog is about to be opened.
# In such case do not cache the default value, just make it a None.
if hasattr(argument.defaultValue, '__call__'):
self._cacheArgumentDefaultValue(None)
else:
self._cacheArgumentDefaultValue(argument.defaultValue)
flags = self._resolveArgumentFlagsList(argument.flags)
if flags:
self.basic_SetFlags(argument.index, reduce(operator.ior, flags))
if argument.hints is not None:
self.dyna_SetHint(argument.index, argument.hints)
self._argumentsList.append(argument)
self._argumentsByName[argument.name] = argument.index
return True
def _resolveArgumentDatatype(self, datatype):
""" Resolve argument datatype into proper string that can be used by raw API.
Args:
datatype: (str) one of command argument type constants or
one of lx.symbol.sTYPE_ raw API constants.
"""
try:
resolvedDatatype = getattr(lx.symbol, 'sTYPE_' + datatype.upper())
except AttributeError:
resolvedDatatype = datatype
return resolvedDatatype
def _resolveArgumentFlagsList(self, flagsList):
if not isinstance(flagsList, list):
flagsList = [flagsList]
flags = []
for flag in flagsList:
if flag is None:
continue
try:
flags.append(getattr(lx.symbol, 'fCMDARG_' + flag.upper()))
except AttributeError:
flags.append(flag)
return flags
def __init__(self):
lxu.command.BasicCommand.__init__(self)
self._name = ""
self._argumentsList = []
self._argumentsByName = {}
self._setupArgumentValuesCache()
self._setupArguments()
self._setupNotifiers()
self.init()
class FormCommandListClass(lxifc.UIValueHints):
"""Special class for creating Form Command Lists. This is instantiated
by CommanderClass objects if an FCL argument provided.
Expects a list of valid MODO commands to be provided to init.
NOTE: Any invalid command will crash MODO.
You should never need to touch this."""
def __init__(self, items):
self._items = items
def uiv_Flags(self):
return lx.symbol.fVALHINT_FORM_COMMAND_LIST
def uiv_FormCommandListCount(self):
return len(self._items)
def uiv_FormCommandListByIndex(self,index):
return self._items[index]
class PopupClass(lxifc.UIValueHints):
"""Special class for creating popups and sPresetText fields. Accepts
either a simple list of values, or a list of (internal, user facing) tuples:
[1, 2, 3]
or
[(1, "The Number One"), (2, "The Number Two"), (3, "The Number Three")]
You should never need to touch this."""
def __init__(self, items):
self._content = ArgumentPopupContent()
if isinstance(items, (list, tuple)):
for item in items:
# If the list item is a list or tuple, assume the format (ugly, pretty)
if isinstance(item, (list, tuple)):
entry = ArgumentPopupEntry(str(item[0]), str(item[1]))
self._content.addEntry(entry)
# Otherwise just use the value for both Ugly and Pretty
else:
entry = ArgumentPopupEntry(str(item), str(item))
self._content.addEntry(entry)
elif isinstance(items, ArgumentPopupContent):
self._content = items
def uiv_Flags(self):
return lx.symbol.fVALHINT_POPUPS
def uiv_PopCount(self):
return len(self._content)
def uiv_PopUserName(self, index):
return self._content[index].userName
def uiv_PopInternalName(self,index):
return self._content[index].internalName
def uiv_PopIconSize(self):
if self._content.iconWidth is not None and self._content.iconHeight is not None:
return(1 ,self._content.iconWidth, self._content.iconHeight)
lx.notimpl()
def uiv_PopIconImage(self, index):
iconImage = self._content[index].iconImage
if iconImage is not None:
return iconImage
lx.notimpl()
def uiv_PopIconResource(self, index):
iconResource = self._content[index].iconResource
if iconResource is not None:
return iconResource
lx.notimpl()
class ItemPopupClass(lxu.command.BasicHints):
"""Special class for creating popup with item list.
"""
def __init__(self, itemContent):
self._itemContent = itemContent
def uiv_Flags(self):
flags = lx.symbol.fVALHINT_ITEMS
if self._itemContent.noneOption:
flags |= lx.symbol.fVALHINT_ITEMS_NONE
return flags
def uiv_ItemTest(self, item):
# item comes here as lx.object.Unknown.
# Cast it to lx.object.Item by default.
item = lx.object.Item(item)
if not self._itemContent.testOnRawItems:
item = modo.Item(item)
return self._itemContent.itemTestFunction(item)
|
[
"lx.out",
"lx.object.Item",
"run.run",
"time.clock",
"item.ItemUtils.autoFocusItemListOnSelection",
"lx.bless",
"message.Message",
"lx.service.NotifySys",
"setup.SetupMode",
"modo.Scene",
"lxu.command.BasicCommand.__init__",
"lx.notimpl",
"modo.Item",
"traceback.format_exc",
"xfrm.TransformUtils.applyEdit",
"lx.object.ValueArray"
] |
[((650, 685), 'lx.bless', 'lx.bless', (['commandClass', 'commandName'], {}), '(commandClass, commandName)\n', (658, 685), False, 'import lx, lxu, lxifc\n'), ((14311, 14323), 'message.Message', 'Message', (['msg'], {}), '(msg)\n', (14318, 14323), False, 'from message import Message\n'), ((17836, 17848), 'modo.Scene', 'modo.Scene', ([], {}), '()\n', (17846, 17848), False, 'import modo\n'), ((18041, 18052), 'setup.SetupMode', 'SetupMode', ([], {}), '()\n', (18050, 18052), False, 'from setup import SetupMode\n'), ((18345, 18357), 'message.Message', 'Message', (['msg'], {}), '(msg)\n', (18352, 18357), False, 'from message import Message\n'), ((19236, 19258), 'lx.object.ValueArray', 'lx.object.ValueArray', ([], {}), '()\n', (19256, 19258), False, 'import lx, lxu, lxifc\n'), ((25658, 25680), 'lx.service.NotifySys', 'lx.service.NotifySys', ([], {}), '()\n', (25678, 25680), False, 'import lx, lxu, lxifc\n'), ((29561, 29600), 'lxu.command.BasicCommand.__init__', 'lxu.command.BasicCommand.__init__', (['self'], {}), '(self)\n', (29594, 29600), False, 'import lx, lxu, lxifc\n'), ((32025, 32037), 'lx.notimpl', 'lx.notimpl', ([], {}), '()\n', (32035, 32037), False, 'import lx, lxu, lxifc\n'), ((32200, 32212), 'lx.notimpl', 'lx.notimpl', ([], {}), '()\n', (32210, 32212), False, 'import lx, lxu, lxifc\n'), ((32390, 32402), 'lx.notimpl', 'lx.notimpl', ([], {}), '()\n', (32400, 32402), False, 'import lx, lxu, lxifc\n'), ((32919, 32939), 'lx.object.Item', 'lx.object.Item', (['item'], {}), '(item)\n', (32933, 32939), False, 'import lx, lxu, lxifc\n'), ((14280, 14292), 'time.clock', 'time.clock', ([], {}), '()\n', (14290, 14292), False, 'import time\n'), ((14419, 14431), 'time.clock', 'time.clock', ([], {}), '()\n', (14429, 14431), False, 'import time\n'), ((14444, 14508), 'lx.out', 'lx.out', (["('ENABLE (%s) : %f s.' % (self.NAME, timeEnd - timeStart))"], {}), "('ENABLE (%s) : %f s.' % (self.NAME, timeEnd - timeStart))\n", (14450, 14508), False, 'import lx, lxu, lxifc\n'), ((17922, 17939), 'run.run', 'run', (['"""!tool.drop"""'], {}), "('!tool.drop')\n", (17925, 17939), False, 'from run import run\n'), ((18299, 18325), 'xfrm.TransformUtils.applyEdit', 'TransformUtils.applyEdit', ([], {}), '()\n', (18323, 18325), False, 'from xfrm import TransformUtils\n'), ((18566, 18592), 'xfrm.TransformUtils.applyEdit', 'TransformUtils.applyEdit', ([], {}), '()\n', (18590, 18592), False, 'from xfrm import TransformUtils\n'), ((19030, 19070), 'item.ItemUtils.autoFocusItemListOnSelection', 'ItemUtils.autoFocusItemListOnSelection', ([], {}), '()\n', (19068, 19070), False, 'from item import ItemUtils\n'), ((19170, 19182), 'time.clock', 'time.clock', ([], {}), '()\n', (19180, 19182), False, 'import time\n'), ((20175, 20187), 'time.clock', 'time.clock', ([], {}), '()\n', (20185, 20187), False, 'import time\n'), ((20200, 20263), 'lx.out', 'lx.out', (["('QUERY (%s) : %f s.' % (self.NAME, timeEnd - timeStart))"], {}), "('QUERY (%s) : %f s.' % (self.NAME, timeEnd - timeStart))\n", (20206, 20263), False, 'import lx, lxu, lxifc\n'), ((33008, 33023), 'modo.Item', 'modo.Item', (['item'], {}), '(item)\n', (33017, 33023), False, 'import modo\n'), ((18490, 18512), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (18510, 18512), False, 'import traceback\n')]
|
# Copyright 2017 MDSLAB - University of Messina
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
from iotronic.common import rpc
from iotronic.common import states
from iotronic.conductor import rpcapi
from iotronic import objects
from iotronic.wamp import wampmessage as wm
from oslo_config import cfg
from oslo_log import log
LOG = log.getLogger(__name__)
CONF = cfg.CONF
CONF(project='iotronic')
rpc.init(CONF)
topic = 'iotronic.conductor_manager'
c = rpcapi.ConductorAPI(topic)
class cont(object):
def to_dict(self):
return {}
ctxt = cont()
def echo(data):
LOG.info("ECHO: %s" % data)
return data
def wamp_alive(board_uuid, board_name):
LOG.debug("Alive board: %s (%s)", board_uuid, board_name)
return "Iotronic alive @ " + datetime.now().strftime(
'%Y-%m-%dT%H:%M:%S.%f')
# to be removed
def alive():
LOG.debug("Alive")
return "Iotronic alive @ " + datetime.now().strftime(
'%Y-%m-%dT%H:%M:%S.%f')
def update_sessions(session_list, agent):
session_list = set(session_list)
list_from_db = objects.SessionWP.valid_list(ctxt, agent)
list_db = set([int(elem.session_id) for elem in list_from_db])
LOG.debug('Wamp session list: %s', session_list)
LOG.debug('DB session list: %s', list_db)
if session_list == list_db:
LOG.debug('Sessions on the database are updated.')
return
# list of board not connected anymore
old_connected = list_db.difference(session_list)
LOG.debug('no more valid session list: %s', old_connected)
for elem in old_connected:
old_session = objects.SessionWP.get(ctxt, elem)
if old_session.valid:
old_session.valid = False
old_session.save()
board = objects.Board.get_by_uuid(ctxt, old_session.board_uuid)
board.status = states.OFFLINE
board.save()
LOG.debug('Session updated. Board %s is now %s', board.uuid,
states.OFFLINE)
if old_connected:
LOG.warning('Some boards have been updated: status offline')
# list of board still connected
keep_connected = list_db.intersection(session_list)
LOG.debug('still valid session list: %s', keep_connected)
for elem in keep_connected:
for x in list_from_db:
if x.session_id == str(elem):
LOG.debug('%s need to be restored.', x.board_uuid)
break
if keep_connected:
LOG.warning('Some boards need to be restored.')
def board_on_leave(session_id):
LOG.debug('A board with %s disconnectd', session_id)
try:
old_session = objects.SessionWP.get(ctxt, session_id)
if old_session.valid:
old_session.valid = False
old_session.save()
board = objects.Board.get_by_uuid(ctxt, old_session.board_uuid)
board.status = states.OFFLINE
board.save()
LOG.debug('Session updated. Board %s is now %s', board.uuid,
states.OFFLINE)
return
LOG.debug('Session %s already set to not valid', session_id)
except Exception:
LOG.debug('session %s not found', session_id)
def connection(uuid, session, info=None):
LOG.debug('Received registration from %s with session %s',
uuid, session)
try:
board = objects.Board.get_by_uuid(ctxt, uuid)
except Exception as exc:
msg = exc.message % {'board': uuid}
LOG.error(msg)
return wm.WampError(msg).serialize()
try:
old_ses = objects.SessionWP(ctxt)
old_ses = old_ses.get_session_by_board_uuid(ctxt, board.uuid,
valid=True)
old_ses.valid = False
old_ses.save()
LOG.debug('old session for %s found: %s', board.uuid,
old_ses.session_id)
except Exception:
LOG.debug('valid session for %s not found', board.uuid)
session_data = {'board_id': board.id,
'board_uuid': board.uuid,
'session_id': session}
session = objects.SessionWP(ctxt, **session_data)
session.create()
LOG.debug('new session for %s saved %s', board.uuid,
session.session_id)
board.status = states.ONLINE
if info:
LOG.debug('board infos %s', info)
if 'lr_version' in info:
if board.lr_version != info['lr_version']:
board.lr_version = info['lr_version']
if 'connectivity' in info:
board.connectivity = info['connectivity']
if 'mac_addr' in info:
board.connectivity = {"mac_addr": info['mac_addr']}
board.save()
LOG.info('Board %s (%s) is now %s', board.uuid,
board.name, states.ONLINE)
return wm.WampSuccess('').serialize()
def registration(code, session):
return c.registration(ctxt, code, session)
def board_on_join(session_id):
LOG.debug('A board with %s joined', session_id['session'])
def notify_result(board_uuid, wampmessage):
wmsg = wm.deserialize(wampmessage)
LOG.info('Board %s completed the its request %s with result: %s',
board_uuid, wmsg.req_id, wmsg.result)
res = objects.Result.get(ctxt, board_uuid, wmsg.req_id)
res.result = wmsg.result
res.message = wmsg.message
res.save()
filter = {"result": objects.result.RUNNING,
"request_uuid": wmsg.req_id}
list_result = objects.Result.get_results_list(ctxt,
filter)
if len(list_result) == 0:
req = objects.Request.get_by_uuid(ctxt, wmsg.req_id)
req.status = objects.request.COMPLETED
req.save()
if req.main_request_uuid:
mreq = objects.Request.get_by_uuid(ctxt, req.main_request_uuid)
mreq.pending_requests = mreq.pending_requests - 1
if mreq.pending_requests == 0:
mreq.status = objects.request.COMPLETED
mreq.save()
return wm.WampSuccess('notification_received').serialize()
|
[
"oslo_log.log.getLogger",
"iotronic.wamp.wampmessage.deserialize",
"iotronic.conductor.rpcapi.ConductorAPI",
"iotronic.objects.SessionWP",
"iotronic.objects.SessionWP.get",
"iotronic.objects.Board.get_by_uuid",
"iotronic.objects.Request.get_by_uuid",
"iotronic.wamp.wampmessage.WampError",
"iotronic.common.rpc.init",
"datetime.datetime.now",
"iotronic.objects.Result.get_results_list",
"iotronic.wamp.wampmessage.WampSuccess",
"iotronic.objects.Result.get",
"iotronic.objects.SessionWP.valid_list"
] |
[((914, 937), 'oslo_log.log.getLogger', 'log.getLogger', (['__name__'], {}), '(__name__)\n', (927, 937), False, 'from oslo_log import log\n'), ((981, 995), 'iotronic.common.rpc.init', 'rpc.init', (['CONF'], {}), '(CONF)\n', (989, 995), False, 'from iotronic.common import rpc\n'), ((1038, 1064), 'iotronic.conductor.rpcapi.ConductorAPI', 'rpcapi.ConductorAPI', (['topic'], {}), '(topic)\n', (1057, 1064), False, 'from iotronic.conductor import rpcapi\n'), ((1648, 1689), 'iotronic.objects.SessionWP.valid_list', 'objects.SessionWP.valid_list', (['ctxt', 'agent'], {}), '(ctxt, agent)\n', (1676, 1689), False, 'from iotronic import objects\n'), ((4679, 4718), 'iotronic.objects.SessionWP', 'objects.SessionWP', (['ctxt'], {}), '(ctxt, **session_data)\n', (4696, 4718), False, 'from iotronic import objects\n'), ((5635, 5662), 'iotronic.wamp.wampmessage.deserialize', 'wm.deserialize', (['wampmessage'], {}), '(wampmessage)\n', (5649, 5662), True, 'from iotronic.wamp import wampmessage as wm\n'), ((5795, 5844), 'iotronic.objects.Result.get', 'objects.Result.get', (['ctxt', 'board_uuid', 'wmsg.req_id'], {}), '(ctxt, board_uuid, wmsg.req_id)\n', (5813, 5844), False, 'from iotronic import objects\n'), ((6031, 6076), 'iotronic.objects.Result.get_results_list', 'objects.Result.get_results_list', (['ctxt', 'filter'], {}), '(ctxt, filter)\n', (6062, 6076), False, 'from iotronic import objects\n'), ((2177, 2210), 'iotronic.objects.SessionWP.get', 'objects.SessionWP.get', (['ctxt', 'elem'], {}), '(ctxt, elem)\n', (2198, 2210), False, 'from iotronic import objects\n'), ((3208, 3247), 'iotronic.objects.SessionWP.get', 'objects.SessionWP.get', (['ctxt', 'session_id'], {}), '(ctxt, session_id)\n', (3229, 3247), False, 'from iotronic import objects\n'), ((3929, 3966), 'iotronic.objects.Board.get_by_uuid', 'objects.Board.get_by_uuid', (['ctxt', 'uuid'], {}), '(ctxt, uuid)\n', (3954, 3966), False, 'from iotronic import objects\n'), ((4135, 4158), 'iotronic.objects.SessionWP', 'objects.SessionWP', (['ctxt'], {}), '(ctxt)\n', (4152, 4158), False, 'from iotronic import objects\n'), ((6171, 6217), 'iotronic.objects.Request.get_by_uuid', 'objects.Request.get_by_uuid', (['ctxt', 'wmsg.req_id'], {}), '(ctxt, wmsg.req_id)\n', (6198, 6217), False, 'from iotronic import objects\n'), ((2330, 2385), 'iotronic.objects.Board.get_by_uuid', 'objects.Board.get_by_uuid', (['ctxt', 'old_session.board_uuid'], {}), '(ctxt, old_session.board_uuid)\n', (2355, 2385), False, 'from iotronic import objects\n'), ((3368, 3423), 'iotronic.objects.Board.get_by_uuid', 'objects.Board.get_by_uuid', (['ctxt', 'old_session.board_uuid'], {}), '(ctxt, old_session.board_uuid)\n', (3393, 3423), False, 'from iotronic import objects\n'), ((5369, 5387), 'iotronic.wamp.wampmessage.WampSuccess', 'wm.WampSuccess', (['""""""'], {}), "('')\n", (5383, 5387), True, 'from iotronic.wamp import wampmessage as wm\n'), ((6337, 6393), 'iotronic.objects.Request.get_by_uuid', 'objects.Request.get_by_uuid', (['ctxt', 'req.main_request_uuid'], {}), '(ctxt, req.main_request_uuid)\n', (6364, 6393), False, 'from iotronic import objects\n'), ((6591, 6630), 'iotronic.wamp.wampmessage.WampSuccess', 'wm.WampSuccess', (['"""notification_received"""'], {}), "('notification_received')\n", (6605, 6630), True, 'from iotronic.wamp import wampmessage as wm\n'), ((1347, 1361), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1359, 1361), False, 'from datetime import datetime\n'), ((1491, 1505), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1503, 1505), False, 'from datetime import datetime\n'), ((4078, 4095), 'iotronic.wamp.wampmessage.WampError', 'wm.WampError', (['msg'], {}), '(msg)\n', (4090, 4095), True, 'from iotronic.wamp import wampmessage as wm\n')]
|
r"""
.. _ref_ex_composite:
Creating a Composite Section
----------------------------
Create a section of mixed materials.
The following example demonstrates how to create a composite cross-section by assigning
different material properties to various regions of the mesh. A steel 310UB40.4 is modelled
with a 50Dx600W timber panel placed on its top flange.
The geometry and mesh are plotted, and the mesh information printed to the terminal
before the analysis is carried out. All types of cross-section analyses are carried
out, with an axial force, bending moment and shear force applied during the stress
analysis. Once the analysis is complete, the cross-section properties are printed
to the terminal and a plot of the centroids and cross-section stresses generated.
"""
# sphinx_gallery_thumbnail_number = 2
import sectionproperties.pre.library.primitive_sections as sections
import sectionproperties.pre.library.steel_sections as steel_sections
from sectionproperties.pre.geometry import CompoundGeometry
from sectionproperties.pre.pre import Material
from sectionproperties.analysis.section import Section
# %%
# Create material properties
steel = Material(
name="Steel",
elastic_modulus=200e3,
poissons_ratio=0.3,
yield_strength=500,
density=8.05e-6,
color="grey",
)
timber = Material(
name="Timber",
elastic_modulus=8e3,
poissons_ratio=0.35,
yield_strength=20,
density=0.78e-6,
color="burlywood",
)
# %%
# Create 310UB40.4
ub = steel_sections.i_section(
d=304, b=165, t_f=10.2, t_w=6.1, r=11.4, n_r=8, material=steel
)
# %%
# Create timber panel on top of the UB
panel = sections.rectangular_section(d=50, b=600, material=timber)
panel = panel.align_center(ub).align_to(ub, on="top")
# Create intermediate nodes in panel to match nodes in ub
panel = (panel - ub) | panel
# %%
# Merge the two sections into one geometry object
section_geometry = CompoundGeometry([ub, panel])
# %%
# Create a mesh and a Section object. For the mesh use a mesh size of 5 for
# the UB, 20 for the panel
section_geometry.create_mesh(mesh_sizes=[5, 20])
comp_section = Section(section_geometry, time_info=True)
comp_section.display_mesh_info() # display the mesh information
# %%
# Plot the mesh with coloured materials and a line transparency of 0.6
comp_section.plot_mesh(materials=True, alpha=0.6)
# %%
# Perform a geometric, warping and plastic analysis
comp_section.calculate_geometric_properties()
comp_section.calculate_warping_properties()
comp_section.calculate_plastic_properties(verbose=True)
# %%
# Perform a stress analysis with N = 100 kN, Mxx = 120 kN.m and Vy = 75 kN
stress_post = comp_section.calculate_stress(N=-100e3, Mxx=-120e6, Vy=-75e3)
# %%
# Print the results to the terminal
comp_section.display_results()
# %%
# Plot the centroids
comp_section.plot_centroids()
# %%
# Plot the axial stress
stress_post.plot_stress_n_zz(pause=False)
# %%
# Plot the bending stress
stress_post.plot_stress_m_zz(pause=False)
# %%
# Plot the shear stress
stress_post.plot_stress_v_zxy()
|
[
"sectionproperties.pre.pre.Material",
"sectionproperties.pre.library.steel_sections.i_section",
"sectionproperties.pre.geometry.CompoundGeometry",
"sectionproperties.pre.library.primitive_sections.rectangular_section",
"sectionproperties.analysis.section.Section"
] |
[((1163, 1287), 'sectionproperties.pre.pre.Material', 'Material', ([], {'name': '"""Steel"""', 'elastic_modulus': '(200000.0)', 'poissons_ratio': '(0.3)', 'yield_strength': '(500)', 'density': '(8.05e-06)', 'color': '"""grey"""'}), "(name='Steel', elastic_modulus=200000.0, poissons_ratio=0.3,\n yield_strength=500, density=8.05e-06, color='grey')\n", (1171, 1287), False, 'from sectionproperties.pre.pre import Material\n'), ((1316, 1443), 'sectionproperties.pre.pre.Material', 'Material', ([], {'name': '"""Timber"""', 'elastic_modulus': '(8000.0)', 'poissons_ratio': '(0.35)', 'yield_strength': '(20)', 'density': '(7.8e-07)', 'color': '"""burlywood"""'}), "(name='Timber', elastic_modulus=8000.0, poissons_ratio=0.35,\n yield_strength=20, density=7.8e-07, color='burlywood')\n", (1324, 1443), False, 'from sectionproperties.pre.pre import Material\n'), ((1494, 1586), 'sectionproperties.pre.library.steel_sections.i_section', 'steel_sections.i_section', ([], {'d': '(304)', 'b': '(165)', 't_f': '(10.2)', 't_w': '(6.1)', 'r': '(11.4)', 'n_r': '(8)', 'material': 'steel'}), '(d=304, b=165, t_f=10.2, t_w=6.1, r=11.4, n_r=8,\n material=steel)\n', (1518, 1586), True, 'import sectionproperties.pre.library.steel_sections as steel_sections\n'), ((1642, 1700), 'sectionproperties.pre.library.primitive_sections.rectangular_section', 'sections.rectangular_section', ([], {'d': '(50)', 'b': '(600)', 'material': 'timber'}), '(d=50, b=600, material=timber)\n', (1670, 1700), True, 'import sectionproperties.pre.library.primitive_sections as sections\n'), ((1917, 1946), 'sectionproperties.pre.geometry.CompoundGeometry', 'CompoundGeometry', (['[ub, panel]'], {}), '([ub, panel])\n', (1933, 1946), False, 'from sectionproperties.pre.geometry import CompoundGeometry\n'), ((2120, 2161), 'sectionproperties.analysis.section.Section', 'Section', (['section_geometry'], {'time_info': '(True)'}), '(section_geometry, time_info=True)\n', (2127, 2161), False, 'from sectionproperties.analysis.section import Section\n')]
|
# Copyright (c) 2015 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pylxd import api
from pylxd import exceptions as lxd_exceptions
def upload_image(image):
alias = '{}/{}/{}/{}'.format(image['os'],
image['release'],
image['arch'],
image['variant'])
lxd = api.API()
imgs = api.API(host='images.linuxcontainers.org')
d = imgs.alias_show(alias)
meta = d[1]['metadata']
tgt = meta['target']
try:
lxd.alias_update(meta)
except lxd_exceptions.APIError as ex:
if ex.status_code == 404:
lxd.alias_create(meta)
return tgt
def delete_image(image):
lxd = api.API()
lxd.image_delete(image)
|
[
"pylxd.api.API"
] |
[((912, 921), 'pylxd.api.API', 'api.API', ([], {}), '()\n', (919, 921), False, 'from pylxd import api\n'), ((933, 975), 'pylxd.api.API', 'api.API', ([], {'host': '"""images.linuxcontainers.org"""'}), "(host='images.linuxcontainers.org')\n", (940, 975), False, 'from pylxd import api\n'), ((1266, 1275), 'pylxd.api.API', 'api.API', ([], {}), '()\n', (1273, 1275), False, 'from pylxd import api\n')]
|
"""create_words_and_results_tables
Revision ID: 71d46639309e
Revises:
Create Date: 2022-03-29 15:45:02.382574
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "71d46639309e"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"words",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("word", sa.String(length=5, collation="NOCASE"), nullable=False),
sa.Column("enabled", sa.Boolean(), nullable=True),
sa.Column("nsfw", sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint("id", name=op.f("pk_words")),
sa.UniqueConstraint("word", name=op.f("uq_words_word")),
)
op.create_table(
"results",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("word_id", sa.Integer(), nullable=True),
sa.Column("guessed_in_run", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["word_id"],
["words.id"],
name=op.f("fk_results_word_id_words"),
ondelete="cascade",
),
sa.PrimaryKeyConstraint("id", name=op.f("pk_results")),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("results")
op.drop_table("words")
# ### end Alembic commands ###
|
[
"alembic.op.drop_table",
"sqlalchemy.DateTime",
"alembic.op.f",
"sqlalchemy.Boolean",
"sqlalchemy.String",
"sqlalchemy.Integer"
] |
[((1517, 1541), 'alembic.op.drop_table', 'op.drop_table', (['"""results"""'], {}), "('results')\n", (1530, 1541), False, 'from alembic import op\n'), ((1546, 1568), 'alembic.op.drop_table', 'op.drop_table', (['"""words"""'], {}), "('words')\n", (1559, 1568), False, 'from alembic import op\n'), ((436, 448), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (446, 448), True, 'import sqlalchemy as sa\n'), ((499, 512), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (510, 512), True, 'import sqlalchemy as sa\n'), ((556, 595), 'sqlalchemy.String', 'sa.String', ([], {'length': '(5)', 'collation': '"""NOCASE"""'}), "(length=5, collation='NOCASE')\n", (565, 595), True, 'import sqlalchemy as sa\n'), ((643, 655), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (653, 655), True, 'import sqlalchemy as sa\n'), ((699, 711), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (709, 711), True, 'import sqlalchemy as sa\n'), ((926, 938), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (936, 938), True, 'import sqlalchemy as sa\n'), ((989, 1002), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1000, 1002), True, 'import sqlalchemy as sa\n'), ((1049, 1061), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1059, 1061), True, 'import sqlalchemy as sa\n'), ((1115, 1127), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1125, 1127), True, 'import sqlalchemy as sa\n'), ((772, 788), 'alembic.op.f', 'op.f', (['"""pk_words"""'], {}), "('pk_words')\n", (776, 788), False, 'from alembic import op\n'), ((832, 853), 'alembic.op.f', 'op.f', (['"""uq_words_word"""'], {}), "('uq_words_word')\n", (836, 853), False, 'from alembic import op\n'), ((1246, 1278), 'alembic.op.f', 'op.f', (['"""fk_results_word_id_words"""'], {}), "('fk_results_word_id_words')\n", (1250, 1278), False, 'from alembic import op\n'), ((1366, 1384), 'alembic.op.f', 'op.f', (['"""pk_results"""'], {}), "('pk_results')\n", (1370, 1384), False, 'from alembic import op\n')]
|
# -*- coding: utf-8 -*-
'''
Code based on the python-oletools package by <NAME> 2012-10-18
http://www.decalage.info/python/oletools
'''
import os
import tempfile
from viper.common.abstracts import Module
from viper.core.session import __sessions__
try:
from oletools.rtfobj import RtfObjParser
from oletools import oleobj
HAVE_RTF = True
except ImportError:
HAVE_RTF = False
class Rtf(Module):
cmd = 'rtf'
description = 'RTF Parser'
authors = ['xorhex']
categories = ["document"]
def __init__(self):
super(Rtf, self).__init__()
self.parser.add_argument('-l', "--list", action='store_true', help='List of ')
self.parser.add_argument('-s', "--save", metavar='item_index', help='Save object')
def parse_rtf(self, filename, data):
'''
The bulk of this fuction is taken from python-oletools: https://github.com/decalage2/oletools/blob/master/oletools/rtfobj.py
See link for license
'''
self.log('success', 'File: {name} - size: {size} bytes'.format(name=filename, size=hex(len(data))))
table = []
h = ['id', 'index', 'OLE Object']
rtfp = RtfObjParser(data)
rtfp.parse()
for rtfobj in rtfp.objects:
row = []
obj_col = []
if rtfobj.is_ole:
obj_col.append('format_id: {id} '.format(id=rtfobj.format_id))
if rtfobj.format_id == oleobj.OleObject.TYPE_EMBEDDED:
obj_col.append('(Embedded)')
elif rtfobj.format_id == oleobj.OleObject.TYPE_LINKED:
obj_col.append('(Linked)')
else:
obj_col.append('(Unknown)')
obj_col.append('class name: {cls}'.format(cls=rtfobj.class_name))
# if the object is linked and not embedded, data_size=None:
if rtfobj.oledata_size is None:
obj_col.append('data size: N/A')
else:
obj_col.append('data size: %d' % rtfobj.oledata_size)
if rtfobj.is_package:
obj_col.append('OLE Package object:')
obj_col.append('Filename: {name}'.format(name=rtfobj.filename))
obj_col.append('Source path: {path}'.format(path=rtfobj.src_path))
obj_col.append('Temp path = {path}'.format(path=rtfobj.temp_path))
obj_col.append('MD5 = {md5}'.format(md5=rtfobj.olepkgdata_md5))
# check if the file extension is executable:
_, temp_ext = os.path.splitext(rtfobj.temp_path)
self.log('debug', 'Temp path extension: {ext}'.format(ext=temp_ext))
_, file_ext = os.path.splitext(rtfobj.filename)
self.log('debug', 'File extension: %r' % file_ext)
if temp_ext != file_ext:
obj_col.append("MODIFIED FILE EXTENSION")
else:
obj_col.append('MD5 = {md5}'.format(md5=rtfobj.oledata_md5))
if rtfobj.clsid is not None:
obj_col.append('CLSID: {clsid}'.format(clsid=rtfobj.clsid))
obj_col.append(rtfobj.clsid_desc)
# Detect OLE2Link exploit
# http://www.kb.cert.org/vuls/id/921560
if rtfobj.class_name == b'OLE2Link':
obj_col.append('Possibly an exploit for the OLE2Link vulnerability (VU#921560, CVE-2017-0199)')
# Detect Equation Editor exploit
# https://www.kb.cert.org/vuls/id/421280/
elif rtfobj.class_name.lower() == b'equation.3':
obj_col.append('Possibly an exploit for the Equation Editor vulnerability (VU#421280, CVE-2017-11882)')
else:
obj_col.append('Not a well-formed OLE object')
row.append(rtfp.objects.index(rtfobj))
row.append('%08Xh' % rtfobj.start)
row.append('\n'.join(obj_col))
table.append(row)
self.log('table', dict(rows=table, header=h))
def list(self):
self.parse_rtf(__sessions__.current.file.name, __sessions__.current.file.data)
def save_ole_objects(self, data, save_object, filename):
'''
The bulk of this fuction is taken from python-oletools: https://github.com/decalage2/oletools/blob/master/oletools/rtfobj.py
See link for license
'''
rtfp = RtfObjParser(data)
rtfp.parse()
try:
i = int(save_object)
objects = [rtfp.objects[i]]
except Exception as ex:
self.log('error', 'The -s option must be followed by an object index, such as "-s 2"\n{ex}'.format(ex=ex))
return
for rtfobj in objects:
i = objects.index(rtfobj)
tmp = tempfile.NamedTemporaryFile(delete=False)
if rtfobj.is_package:
self.log('info', 'Saving file from OLE Package in object #%d:' % i)
self.log('info', ' Filename = %r' % rtfobj.filename)
self.log('info', ' Source path = %r' % rtfobj.src_path)
self.log('info', ' Temp path = %r' % rtfobj.temp_path)
self.log('info', ' saving to file %s' % tmp.name)
self.log('info', ' md5 %s' % rtfobj.olepkgdata_md5)
tmp.write(rtfobj.olepkgdata)
tmp.close()
# When format_id=TYPE_LINKED, oledata_size=None
elif rtfobj.is_ole and rtfobj.oledata_size is not None:
self.log('info', 'Saving file embedded in OLE object #%d:' % i)
self.log('info', ' format_id = %d' % rtfobj.format_id)
self.log('info', ' class name = %r' % rtfobj.class_name)
self.log('info', ' data size = %d' % rtfobj.oledata_size)
# set a file extension according to the class name:
self.log('info', ' saving to file %s' % tmp.name)
self.log('info', ' md5 %s' % rtfobj.oledata_md5)
tmp.write(rtfobj.oledata)
tmp.close()
else:
self.log('info', 'Saving raw data in object #%d:' % i)
self.log('info', ' saving object to file %s' % tmp.name)
self.log('info', ' md5 %s' % rtfobj.rawdata_md5)
tmp.write(rtfobj.rawdata)
tmp.close()
if not save_object == 'all':
__sessions__.new(tmp.name)
def save(self, idx):
self.save_ole_objects(__sessions__.current.file.data, idx, __sessions__.current.file.name)
# Main starts here
def run(self):
super(Rtf, self).run()
if self.args is None:
return
if not __sessions__.is_set():
self.log('error', 'No open session. This command expects a file to be open.')
return
if not HAVE_RTF:
self.log('error', 'Missing dependancy. install oletools (pip install oletools)')
return
if self.args.list:
self.list()
elif self.args.save:
self.save(self.args.save)
else:
self.parser.print_usage()
|
[
"tempfile.NamedTemporaryFile",
"viper.core.session.__sessions__.new",
"viper.core.session.__sessions__.is_set",
"os.path.splitext",
"oletools.rtfobj.RtfObjParser"
] |
[((1175, 1193), 'oletools.rtfobj.RtfObjParser', 'RtfObjParser', (['data'], {}), '(data)\n', (1187, 1193), False, 'from oletools.rtfobj import RtfObjParser\n'), ((4512, 4530), 'oletools.rtfobj.RtfObjParser', 'RtfObjParser', (['data'], {}), '(data)\n', (4524, 4530), False, 'from oletools.rtfobj import RtfObjParser\n'), ((4896, 4937), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'delete': '(False)'}), '(delete=False)\n', (4923, 4937), False, 'import tempfile\n'), ((6531, 6557), 'viper.core.session.__sessions__.new', '__sessions__.new', (['tmp.name'], {}), '(tmp.name)\n', (6547, 6557), False, 'from viper.core.session import __sessions__\n'), ((6821, 6842), 'viper.core.session.__sessions__.is_set', '__sessions__.is_set', ([], {}), '()\n', (6840, 6842), False, 'from viper.core.session import __sessions__\n'), ((2607, 2641), 'os.path.splitext', 'os.path.splitext', (['rtfobj.temp_path'], {}), '(rtfobj.temp_path)\n', (2623, 2641), False, 'import os\n'), ((2765, 2798), 'os.path.splitext', 'os.path.splitext', (['rtfobj.filename'], {}), '(rtfobj.filename)\n', (2781, 2798), False, 'import os\n')]
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Process utilities.
"""
import os
def append_to_path(*args, **kwargs):
"""
Appends one or more paths to the system path of an environment.
The environment will be that of the current process unless another is passed using the
'env' keyword argument.
:param args: paths to append
:param kwargs: 'env' may be used to pass a custom environment to use
"""
_append_to_path('PATH', *args, **kwargs)
def append_to_pythonpath(*args, **kwargs):
"""
Appends one or more paths to the python path of an environment.
The environment will be that of the current process unless another is passed using the
'env' keyword argument.
:param args: paths to append
:param kwargs: 'env' may be used to pass a custom environment to use
"""
_append_to_path('PYTHONPATH', *args, **kwargs)
def _append_to_path(path, *args, **kwargs):
env = kwargs.get('env') or os.environ
env[path] = '{0}{1}{2}'.format(
os.pathsep.join(args),
os.pathsep,
env.get(path, '')
)
|
[
"os.pathsep.join"
] |
[((1750, 1771), 'os.pathsep.join', 'os.pathsep.join', (['args'], {}), '(args)\n', (1765, 1771), False, 'import os\n')]
|
import sys
PRINT_BEEJ = 1
HALT = 2
PRINT_NUM = 3
SAVE = 4
PRINT_REGISTER = 5
ADD = 6
'''
SAVE takes 2 arguments
saves value in [ARG1] to register [ARG2]
'''
register = [0] * 8
memory = [0] * 128 # 128 bytes of RAM
def load_memory(filename):
try:
address = 0
with open(filename) as f:
for line in f:
# Split before and after any comment symbols
comment_split = line.split("#")
num = comment_split[0].strip()
# Ignore blanks
if num == "":
continue
value = int(num)
memory[address] = value
address += 1
except FileNotFoundError:
print(f"{sys.argv[0]}: {sys.argv[1]} not found")
sys.exit(2)
if len(sys.argv) != 2:
print("usage: simple.py <filename>", file=sys.stderr)
sys.exit(1)
filepath = sys.argv[1]
load_memory(filepath)
pc = 0
running = True
while running:
command = memory[pc]
if command == PRINT_BEEJ:
print("Beej!")
pc += 1
elif command == PRINT_NUM:
num = memory[pc + 1]
print(num)
pc += 2
elif command == SAVE:
num = memory[pc + 1]
reg = memory[pc + 2]
register[reg] = num
pc += 3
elif command == PRINT_REGISTER:
reg = memory[pc + 1]
print(register[reg])
pc += 2
elif command == ADD:
reg_a = memory[pc + 1]
reg_b = memory[pc + 2]
register[reg_a] += register[reg_b]
pc += 3
elif command == HALT:
running = False
pc += 1
else:
print(f"Unknown instruction: {command}")
sys.exit(1)
|
[
"sys.exit"
] |
[((926, 937), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (934, 937), False, 'import sys\n'), ((827, 838), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (835, 838), False, 'import sys\n'), ((1641, 1652), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1649, 1652), False, 'import sys\n')]
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label":_("Juggling Management"),
"items": [
{
"type": "doctype",
"name": "Jugglingtrick_juggling_management",
"label": _("Jugglingtrick"),
"description": _("Description of Jugglingtricks"),
},
{
"type": "doctype",
"name": "Routine_juggling_management",
"label": _("Routine"),
"description": _("Description of Lists"),
},
{
"type": "doctype",
"name": "Category_juggling_management",
"label": _("Category"),
"description": _("Description of Categories"),
},
{
"type": "doctype",
"name": "Prop_juggling_management",
"label": _("Prop"),
"description": _("Description of Props"),
}
]
}
]
|
[
"frappe._"
] |
[((115, 139), 'frappe._', '_', (['"""Juggling Management"""'], {}), "('Juggling Management')\n", (116, 139), False, 'from frappe import _\n'), ((312, 330), 'frappe._', '_', (['"""Jugglingtrick"""'], {}), "('Jugglingtrick')\n", (313, 330), False, 'from frappe import _\n'), ((361, 395), 'frappe._', '_', (['"""Description of Jugglingtricks"""'], {}), "('Description of Jugglingtricks')\n", (362, 395), False, 'from frappe import _\n'), ((543, 555), 'frappe._', '_', (['"""Routine"""'], {}), "('Routine')\n", (544, 555), False, 'from frappe import _\n'), ((586, 611), 'frappe._', '_', (['"""Description of Lists"""'], {}), "('Description of Lists')\n", (587, 611), False, 'from frappe import _\n'), ((775, 788), 'frappe._', '_', (['"""Category"""'], {}), "('Category')\n", (776, 788), False, 'from frappe import _\n'), ((819, 849), 'frappe._', '_', (['"""Description of Categories"""'], {}), "('Description of Categories')\n", (820, 849), False, 'from frappe import _\n'), ((1009, 1018), 'frappe._', '_', (['"""Prop"""'], {}), "('Prop')\n", (1010, 1018), False, 'from frappe import _\n'), ((1049, 1074), 'frappe._', '_', (['"""Description of Props"""'], {}), "('Description of Props')\n", (1050, 1074), False, 'from frappe import _\n')]
|
import pytest
from protean import Domain
from protean.adapters.event_store.message_db import MessageDBStore
from protean.exceptions import ConfigurationError
@pytest.mark.message_db
class TestMessageDBEventStore:
def test_retrieving_message_store_from_domain(self, test_domain):
assert test_domain.event_store is not None
assert test_domain.event_store.store is not None
assert isinstance(test_domain.event_store.store, MessageDBStore)
def test_error_on_message_db_initialization(self):
domain = Domain()
domain.config["EVENT_STORE"][
"PROVIDER"
] = "protean.adapters.event_store.message_db.MessageDBStore"
domain.config["EVENT_STORE"][
"DATABASE_URI"
] = "postgresql://message_store@localhost:5433/dummy"
with pytest.raises(ConfigurationError) as exc:
domain.event_store.store._write(
"testStream-123", "Event1", {"foo": "bar"}, {"kind": "EVENT"}
)
assert 'FATAL: database "dummy" does not exist' in str(exc.value)
# Reset config value. # FIXME Config should be an argument to the domain
domain.config["EVENT_STORE"][
"PROVIDER"
] = "protean.adapters.event_store.memory.MemoryEventStore"
domain.config["EVENT_STORE"].pop("DATABASE_URI")
def test_write_to_event_store(self, test_domain):
position = test_domain.event_store.store._write(
"testStream-123", "Event1", {"foo": "bar"}
)
assert position == 0
def test_multiple_writes_to_event_store(self, test_domain):
for i in range(5):
position = test_domain.event_store.store._write(
"testStream-123", "Event1", {"foo": f"bar{i}"}
)
position = test_domain.event_store.store._write(
"testStream-123", "Event1", {"foo": "bar"}
)
assert position == 5
def test_reading_stream_message(self, test_domain):
test_domain.event_store.store._write("testStream-123", "Event1", {"foo": "bar"})
messages = test_domain.event_store.store._read("testStream-123")
assert len(messages) == 1
assert messages[0]["position"] == 0
assert messages[0]["data"] == {"foo": "bar"}
def test_reading_multiple_stream_messages(self, test_domain):
for i in range(5):
test_domain.event_store.store._write(
"testStream-123", "Event1", {"foo": f"bar{i}"}
)
messages = test_domain.event_store.store._read("testStream-123")
assert len(messages) == 5
assert messages[4]["data"] == {"foo": "bar4"}
def test_reading_category_message(self, test_domain):
test_domain.event_store.store._write("testStream-123", "Event1", {"foo": "bar"})
messages = test_domain.event_store.store._read("testStream")
assert len(messages) == 1
assert messages[0]["position"] == 0
assert messages[0]["data"] == {"foo": "bar"}
def test_reading_multiple_category_messages(self, test_domain):
for i in range(5):
test_domain.event_store.store._write(
"testStream-123", "Event1", {"foo": f"bar{i}"}
)
messages = test_domain.event_store.store._read("testStream")
assert len(messages) == 5
assert messages[4]["data"] == {"foo": "bar4"}
def test_reading_targeted_stream_messages(self, test_domain):
for i in range(5):
test_domain.event_store.store._write(
"testStream-123", "Event1", {"foo": f"bar{i}"}
)
for i in range(5):
test_domain.event_store.store._write(
"testStream-456", "Event1", {"foo": f"baz{i}"}
)
messages = test_domain.event_store.store._read("testStream-456")
assert len(messages) == 5
assert messages[4]["data"] == {"foo": "baz4"}
def test_read_last_message(self, test_domain):
for i in range(5):
test_domain.event_store.store._write(
"testStream-123", "Event1", {"foo": f"bar{i}"}
)
message = test_domain.event_store.store._read_last_message("testStream-123")
assert message["position"] == 4
def test_read_last_message_when_there_are_no_messages(self, test_domain):
message = test_domain.event_store.store._read_last_message("foo-bar")
assert message is None
|
[
"pytest.raises",
"protean.Domain"
] |
[((540, 548), 'protean.Domain', 'Domain', ([], {}), '()\n', (546, 548), False, 'from protean import Domain\n'), ((820, 853), 'pytest.raises', 'pytest.raises', (['ConfigurationError'], {}), '(ConfigurationError)\n', (833, 853), False, 'import pytest\n')]
|
# -*- test-case-name: pymeta.test.test_grammar -*-
"""
The definition of PyMeta's language is itself a PyMeta grammar, but something
has to be able to read that. Most of the code in this module is generated from
that grammar (in future versions, it will hopefully all be generated).
"""
import string
from pymeta.runtime import OMetaBase, ParseError, EOFError, expected
class BootOMetaGrammar(OMetaBase):
"""
The bootstrap grammar, generated from L{pymeta.grammar.OMetaGrammar} via
L{pymeta.builder.PythonBuilder}.
"""
globals = globals()
def __init__(self, input):
OMetaBase.__init__(self, input)
self._ruleNames = []
def parseGrammar(self, name, builder, *args):
"""
Entry point for converting a grammar to code (of some variety).
@param name: The name for this grammar.
@param builder: A class that implements the grammar-building interface
(interface to be explicitly defined later)
"""
self.builder = builder(name, self, *args)
res, err = self.apply("grammar")
try:
x = self.input.head()
except EOFError:
pass
else:
raise err
return res
def applicationArgs(self):
args = []
while True:
try:
(arg, endchar), err = self.pythonExpr(" >")
if not arg:
break
args.append(self.builder.expr(arg))
if endchar == '>':
break
except ParseError:
break
if args:
return args
else:
raise ParseError(self.input.position, expected("python expression"))
def ruleValueExpr(self):
(expr, endchar), err = self.pythonExpr(endChars="\r\n)]")
if str(endchar) in ")]":
self.input = self.input.prev()
return self.builder.expr(expr)
def semanticActionExpr(self):
return self.builder.action(self.pythonExpr(')')[0][0])
def semanticPredicateExpr(self):
expr = self.builder.expr(self.pythonExpr(')')[0][0])
return self.builder.pred(expr)
def eatWhitespace(self):
"""
Consume input until a non-whitespace character is reached.
"""
consumingComment = False
e = None
while True:
try:
c, e = self.input.head()
except EOFError:
break
t = self.input.tail()
if c.isspace() or consumingComment:
self.input = t
if c == '\n':
consumingComment = False
elif c == '#':
consumingComment = True
else:
break
return True, e
rule_spaces = eatWhitespace
def rule_number(self):
_locals = {'self': self}
self.locals['number'] = _locals
_G_apply_1, lastError = self._apply(self.rule_spaces, "spaces", [])
self.considerError(lastError)
def _G_or_2():
_G_exactly_1, lastError = self.exactly('-')
self.considerError(lastError)
_G_apply_2, lastError = self._apply(self.rule_barenumber, "barenumber", [])
self.considerError(lastError)
_locals['x'] = _G_apply_2
_G_python_3, lastError = eval('self.builder.exactly(-x)', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_3, self.currentError)
def _G_or_3():
_G_apply_1, lastError = self._apply(self.rule_barenumber, "barenumber", [])
self.considerError(lastError)
_locals['x'] = _G_apply_1
_G_python_2, lastError = eval('self.builder.exactly(x)', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_2, self.currentError)
_G_or_4, lastError = self._or([_G_or_2, _G_or_3])
self.considerError(lastError)
return (_G_or_4, self.currentError)
def rule_barenumber(self):
_locals = {'self': self}
self.locals['barenumber'] = _locals
def _G_or_1():
_G_exactly_1, lastError = self.exactly('0')
self.considerError(lastError)
def _G_or_2():
def _G_or_1():
_G_exactly_1, lastError = self.exactly('x')
self.considerError(lastError)
return (_G_exactly_1, self.currentError)
def _G_or_2():
_G_exactly_1, lastError = self.exactly('X')
self.considerError(lastError)
return (_G_exactly_1, self.currentError)
_G_or_3, lastError = self._or([_G_or_1, _G_or_2])
self.considerError(lastError)
def _G_many_4():
_G_apply_1, lastError = self._apply(self.rule_hexdigit, "hexdigit", [])
self.considerError(lastError)
return (_G_apply_1, self.currentError)
_G_many_5, lastError = self.many(_G_many_4)
self.considerError(lastError)
_locals['hs'] = _G_many_5
_G_python_6, lastError = eval("int(''.join(hs), 16)", self.globals, _locals), None
self.considerError(lastError)
return (_G_python_6, self.currentError)
def _G_or_3():
def _G_many_1():
_G_apply_1, lastError = self._apply(self.rule_octaldigit, "octaldigit", [])
self.considerError(lastError)
return (_G_apply_1, self.currentError)
_G_many_2, lastError = self.many(_G_many_1)
self.considerError(lastError)
_locals['ds'] = _G_many_2
_G_python_3, lastError = eval("int('0'+''.join(ds), 8)", self.globals, _locals), None
self.considerError(lastError)
return (_G_python_3, self.currentError)
_G_or_4, lastError = self._or([_G_or_2, _G_or_3])
self.considerError(lastError)
return (_G_or_4, self.currentError)
def _G_or_2():
def _G_many1_1():
_G_apply_1, lastError = self._apply(self.rule_digit, "digit", [])
self.considerError(lastError)
return (_G_apply_1, self.currentError)
_G_many1_2, lastError = self.many(_G_many1_1, _G_many1_1())
self.considerError(lastError)
_locals['ds'] = _G_many1_2
_G_python_3, lastError = eval("int(''.join(ds))", self.globals, _locals), None
self.considerError(lastError)
return (_G_python_3, self.currentError)
_G_or_3, lastError = self._or([_G_or_1, _G_or_2])
self.considerError(lastError)
return (_G_or_3, self.currentError)
def rule_octaldigit(self):
_locals = {'self': self}
self.locals['octaldigit'] = _locals
_G_apply_1, lastError = self._apply(self.rule_anything, "anything", [])
self.considerError(lastError)
_locals['x'] = _G_apply_1
def _G_pred_2():
_G_python_1, lastError = eval('x in string.octdigits', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_1, self.currentError)
_G_pred_3, lastError = self.pred(_G_pred_2)
self.considerError(lastError)
_G_python_4, lastError = eval('x', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_4, self.currentError)
def rule_hexdigit(self):
_locals = {'self': self}
self.locals['hexdigit'] = _locals
_G_apply_1, lastError = self._apply(self.rule_anything, "anything", [])
self.considerError(lastError)
_locals['x'] = _G_apply_1
def _G_pred_2():
_G_python_1, lastError = eval('x in string.hexdigits', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_1, self.currentError)
_G_pred_3, lastError = self.pred(_G_pred_2)
self.considerError(lastError)
_G_python_4, lastError = eval('x', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_4, self.currentError)
def rule_escapedChar(self):
_locals = {'self': self}
self.locals['escapedChar'] = _locals
_G_exactly_1, lastError = self.exactly('\\')
self.considerError(lastError)
def _G_or_2():
_G_exactly_1, lastError = self.exactly('n')
self.considerError(lastError)
_G_python_2, lastError = eval('"\\n"', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_2, self.currentError)
def _G_or_3():
_G_exactly_1, lastError = self.exactly('r')
self.considerError(lastError)
_G_python_2, lastError = eval('"\\r"', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_2, self.currentError)
def _G_or_4():
_G_exactly_1, lastError = self.exactly('t')
self.considerError(lastError)
_G_python_2, lastError = eval('"\\t"', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_2, self.currentError)
def _G_or_5():
_G_exactly_1, lastError = self.exactly('b')
self.considerError(lastError)
_G_python_2, lastError = eval('"\\b"', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_2, self.currentError)
def _G_or_6():
_G_exactly_1, lastError = self.exactly('f')
self.considerError(lastError)
_G_python_2, lastError = eval('"\\f"', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_2, self.currentError)
def _G_or_7():
_G_exactly_1, lastError = self.exactly('"')
self.considerError(lastError)
_G_python_2, lastError = eval('\'"\'', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_2, self.currentError)
def _G_or_8():
_G_exactly_1, lastError = self.exactly("'")
self.considerError(lastError)
_G_python_2, lastError = eval('"\'"', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_2, self.currentError)
def _G_or_9():
_G_exactly_1, lastError = self.exactly('\\')
self.considerError(lastError)
_G_python_2, lastError = eval('"\\\\"', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_2, self.currentError)
_G_or_10, lastError = self._or([_G_or_2, _G_or_3, _G_or_4, _G_or_5, _G_or_6, _G_or_7, _G_or_8, _G_or_9])
self.considerError(lastError)
return (_G_or_10, self.currentError)
def rule_character(self):
_locals = {'self': self}
self.locals['character'] = _locals
_G_python_1, lastError = eval('"\'"', self.globals, _locals), None
self.considerError(lastError)
_G_apply_2, lastError = self._apply(self.rule_token, "token", [_G_python_1])
self.considerError(lastError)
def _G_or_3():
_G_apply_1, lastError = self._apply(self.rule_escapedChar, "escapedChar", [])
self.considerError(lastError)
return (_G_apply_1, self.currentError)
def _G_or_4():
_G_apply_1, lastError = self._apply(self.rule_anything, "anything", [])
self.considerError(lastError)
return (_G_apply_1, self.currentError)
_G_or_5, lastError = self._or([_G_or_3, _G_or_4])
self.considerError(lastError)
_locals['c'] = _G_or_5
_G_python_6, lastError = eval('"\'"', self.globals, _locals), None
self.considerError(lastError)
_G_apply_7, lastError = self._apply(self.rule_token, "token", [_G_python_6])
self.considerError(lastError)
_G_python_8, lastError = eval('self.builder.exactly(c)', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_8, self.currentError)
def rule_string(self):
_locals = {'self': self}
self.locals['string'] = _locals
_G_python_1, lastError = eval('\'"\'', self.globals, _locals), None
self.considerError(lastError)
_G_apply_2, lastError = self._apply(self.rule_token, "token", [_G_python_1])
self.considerError(lastError)
def _G_many_3():
def _G_or_1():
_G_apply_1, lastError = self._apply(self.rule_escapedChar, "escapedChar", [])
self.considerError(lastError)
return (_G_apply_1, self.currentError)
def _G_or_2():
def _G_not_1():
_G_exactly_1, lastError = self.exactly('"')
self.considerError(lastError)
return (_G_exactly_1, self.currentError)
_G_not_2, lastError = self._not(_G_not_1)
self.considerError(lastError)
_G_apply_3, lastError = self._apply(self.rule_anything, "anything", [])
self.considerError(lastError)
return (_G_apply_3, self.currentError)
_G_or_3, lastError = self._or([_G_or_1, _G_or_2])
self.considerError(lastError)
return (_G_or_3, self.currentError)
_G_many_4, lastError = self.many(_G_many_3)
self.considerError(lastError)
_locals['c'] = _G_many_4
_G_python_5, lastError = eval('\'"\'', self.globals, _locals), None
self.considerError(lastError)
_G_apply_6, lastError = self._apply(self.rule_token, "token", [_G_python_5])
self.considerError(lastError)
_G_python_7, lastError = eval("self.builder.exactly(''.join(c))", self.globals, _locals), None
self.considerError(lastError)
return (_G_python_7, self.currentError)
def rule_name(self):
_locals = {'self': self}
self.locals['name'] = _locals
_G_apply_1, lastError = self._apply(self.rule_letter, "letter", [])
self.considerError(lastError)
_locals['x'] = _G_apply_1
def _G_many_2():
_G_apply_1, lastError = self._apply(self.rule_letterOrDigit, "letterOrDigit", [])
self.considerError(lastError)
return (_G_apply_1, self.currentError)
_G_many_3, lastError = self.many(_G_many_2)
self.considerError(lastError)
_locals['xs'] = _G_many_3
_G_python_4, lastError = eval('xs.insert(0, x)', self.globals, _locals), None
self.considerError(lastError)
_G_python_5, lastError = eval("''.join(xs)", self.globals, _locals), None
self.considerError(lastError)
return (_G_python_5, self.currentError)
def rule_application(self):
_locals = {'self': self}
self.locals['application'] = _locals
_G_python_1, lastError = eval("'<'", self.globals, _locals), None
self.considerError(lastError)
_G_apply_2, lastError = self._apply(self.rule_token, "token", [_G_python_1])
self.considerError(lastError)
_G_apply_3, lastError = self._apply(self.rule_spaces, "spaces", [])
self.considerError(lastError)
_G_apply_4, lastError = self._apply(self.rule_name, "name", [])
self.considerError(lastError)
_locals['name'] = _G_apply_4
def _G_or_5():
_G_exactly_1, lastError = self.exactly(' ')
self.considerError(lastError)
_G_python_2, lastError = eval('self.applicationArgs()', self.globals, _locals), None
self.considerError(lastError)
_locals['args'] = _G_python_2
_G_python_3, lastError = eval('self.builder.apply(name, self.name, *args)', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_3, self.currentError)
def _G_or_6():
_G_python_1, lastError = eval("'>'", self.globals, _locals), None
self.considerError(lastError)
_G_apply_2, lastError = self._apply(self.rule_token, "token", [_G_python_1])
self.considerError(lastError)
_G_python_3, lastError = eval('self.builder.apply(name, self.name)', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_3, self.currentError)
_G_or_7, lastError = self._or([_G_or_5, _G_or_6])
self.considerError(lastError)
return (_G_or_7, self.currentError)
def rule_expr1(self):
_locals = {'self': self}
self.locals['expr1'] = _locals
def _G_or_1():
_G_apply_1, lastError = self._apply(self.rule_application, "application", [])
self.considerError(lastError)
return (_G_apply_1, self.currentError)
def _G_or_2():
_G_apply_1, lastError = self._apply(self.rule_ruleValue, "ruleValue", [])
self.considerError(lastError)
return (_G_apply_1, self.currentError)
def _G_or_3():
_G_apply_1, lastError = self._apply(self.rule_semanticPredicate, "semanticPredicate", [])
self.considerError(lastError)
return (_G_apply_1, self.currentError)
def _G_or_4():
_G_apply_1, lastError = self._apply(self.rule_semanticAction, "semanticAction", [])
self.considerError(lastError)
return (_G_apply_1, self.currentError)
def _G_or_5():
_G_apply_1, lastError = self._apply(self.rule_number, "number", [])
self.considerError(lastError)
return (_G_apply_1, self.currentError)
def _G_or_6():
_G_apply_1, lastError = self._apply(self.rule_character, "character", [])
self.considerError(lastError)
return (_G_apply_1, self.currentError)
def _G_or_7():
_G_apply_1, lastError = self._apply(self.rule_string, "string", [])
self.considerError(lastError)
return (_G_apply_1, self.currentError)
def _G_or_8():
_G_python_1, lastError = eval("'('", self.globals, _locals), None
self.considerError(lastError)
_G_apply_2, lastError = self._apply(self.rule_token, "token", [_G_python_1])
self.considerError(lastError)
_G_apply_3, lastError = self._apply(self.rule_expr, "expr", [])
self.considerError(lastError)
_locals['e'] = _G_apply_3
_G_python_4, lastError = eval("')'", self.globals, _locals), None
self.considerError(lastError)
_G_apply_5, lastError = self._apply(self.rule_token, "token", [_G_python_4])
self.considerError(lastError)
_G_python_6, lastError = eval('e', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_6, self.currentError)
def _G_or_9():
_G_python_1, lastError = eval("'['", self.globals, _locals), None
self.considerError(lastError)
_G_apply_2, lastError = self._apply(self.rule_token, "token", [_G_python_1])
self.considerError(lastError)
_G_apply_3, lastError = self._apply(self.rule_expr, "expr", [])
self.considerError(lastError)
_locals['e'] = _G_apply_3
_G_python_4, lastError = eval("']'", self.globals, _locals), None
self.considerError(lastError)
_G_apply_5, lastError = self._apply(self.rule_token, "token", [_G_python_4])
self.considerError(lastError)
_G_python_6, lastError = eval('self.builder.listpattern(e)', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_6, self.currentError)
_G_or_10, lastError = self._or([_G_or_1, _G_or_2, _G_or_3, _G_or_4, _G_or_5, _G_or_6, _G_or_7, _G_or_8, _G_or_9])
self.considerError(lastError)
return (_G_or_10, self.currentError)
def rule_expr2(self):
_locals = {'self': self}
self.locals['expr2'] = _locals
def _G_or_1():
_G_python_1, lastError = eval("'~'", self.globals, _locals), None
self.considerError(lastError)
_G_apply_2, lastError = self._apply(self.rule_token, "token", [_G_python_1])
self.considerError(lastError)
def _G_or_3():
_G_python_1, lastError = eval("'~'", self.globals, _locals), None
self.considerError(lastError)
_G_apply_2, lastError = self._apply(self.rule_token, "token", [_G_python_1])
self.considerError(lastError)
_G_apply_3, lastError = self._apply(self.rule_expr2, "expr2", [])
self.considerError(lastError)
_locals['e'] = _G_apply_3
_G_python_4, lastError = eval('self.builder.lookahead(e)', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_4, self.currentError)
def _G_or_4():
_G_apply_1, lastError = self._apply(self.rule_expr2, "expr2", [])
self.considerError(lastError)
_locals['e'] = _G_apply_1
_G_python_2, lastError = eval('self.builder._not(e)', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_2, self.currentError)
_G_or_5, lastError = self._or([_G_or_3, _G_or_4])
self.considerError(lastError)
return (_G_or_5, self.currentError)
def _G_or_2():
_G_apply_1, lastError = self._apply(self.rule_expr1, "expr1", [])
self.considerError(lastError)
return (_G_apply_1, self.currentError)
_G_or_3, lastError = self._or([_G_or_1, _G_or_2])
self.considerError(lastError)
return (_G_or_3, self.currentError)
def rule_expr3(self):
_locals = {'self': self}
self.locals['expr3'] = _locals
def _G_or_1():
_G_apply_1, lastError = self._apply(self.rule_expr2, "expr2", [])
self.considerError(lastError)
_locals['e'] = _G_apply_1
def _G_or_2():
_G_exactly_1, lastError = self.exactly('*')
self.considerError(lastError)
_G_python_2, lastError = eval('self.builder.many(e)', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_2, self.currentError)
def _G_or_3():
_G_exactly_1, lastError = self.exactly('+')
self.considerError(lastError)
_G_python_2, lastError = eval('self.builder.many1(e)', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_2, self.currentError)
def _G_or_4():
_G_exactly_1, lastError = self.exactly('?')
self.considerError(lastError)
_G_python_2, lastError = eval('self.builder.optional(e)', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_2, self.currentError)
def _G_or_5():
_G_python_1, lastError = eval('e', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_1, self.currentError)
_G_or_6, lastError = self._or([_G_or_2, _G_or_3, _G_or_4, _G_or_5])
self.considerError(lastError)
_locals['r'] = _G_or_6
def _G_or_7():
_G_exactly_1, lastError = self.exactly(':')
self.considerError(lastError)
_G_apply_2, lastError = self._apply(self.rule_name, "name", [])
self.considerError(lastError)
_locals['n'] = _G_apply_2
_G_python_3, lastError = eval('self.builder.bind(r, n)', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_3, self.currentError)
def _G_or_8():
_G_python_1, lastError = eval('r', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_1, self.currentError)
_G_or_9, lastError = self._or([_G_or_7, _G_or_8])
self.considerError(lastError)
return (_G_or_9, self.currentError)
def _G_or_2():
_G_python_1, lastError = eval("':'", self.globals, _locals), None
self.considerError(lastError)
_G_apply_2, lastError = self._apply(self.rule_token, "token", [_G_python_1])
self.considerError(lastError)
_G_apply_3, lastError = self._apply(self.rule_name, "name", [])
self.considerError(lastError)
_locals['n'] = _G_apply_3
_G_python_4, lastError = eval('self.builder.bind(self.builder.apply("anything", self.name), n)', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_4, self.currentError)
_G_or_3, lastError = self._or([_G_or_1, _G_or_2])
self.considerError(lastError)
return (_G_or_3, self.currentError)
def rule_expr4(self):
_locals = {'self': self}
self.locals['expr4'] = _locals
def _G_many_1():
_G_apply_1, lastError = self._apply(self.rule_expr3, "expr3", [])
self.considerError(lastError)
return (_G_apply_1, self.currentError)
_G_many_2, lastError = self.many(_G_many_1)
self.considerError(lastError)
_locals['es'] = _G_many_2
_G_python_3, lastError = eval('self.builder.sequence(es)', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_3, self.currentError)
def rule_expr(self):
_locals = {'self': self}
self.locals['expr'] = _locals
_G_apply_1, lastError = self._apply(self.rule_expr4, "expr4", [])
self.considerError(lastError)
_locals['e'] = _G_apply_1
def _G_many_2():
_G_python_1, lastError = eval("'|'", self.globals, _locals), None
self.considerError(lastError)
_G_apply_2, lastError = self._apply(self.rule_token, "token", [_G_python_1])
self.considerError(lastError)
_G_apply_3, lastError = self._apply(self.rule_expr4, "expr4", [])
self.considerError(lastError)
return (_G_apply_3, self.currentError)
_G_many_3, lastError = self.many(_G_many_2)
self.considerError(lastError)
_locals['es'] = _G_many_3
_G_python_4, lastError = eval('es.insert(0, e)', self.globals, _locals), None
self.considerError(lastError)
_G_python_5, lastError = eval('self.builder._or(es)', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_5, self.currentError)
def rule_ruleValue(self):
_locals = {'self': self}
self.locals['ruleValue'] = _locals
_G_python_1, lastError = eval('"=>"', self.globals, _locals), None
self.considerError(lastError)
_G_apply_2, lastError = self._apply(self.rule_token, "token", [_G_python_1])
self.considerError(lastError)
_G_python_3, lastError = eval('self.ruleValueExpr()', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_3, self.currentError)
def rule_semanticPredicate(self):
_locals = {'self': self}
self.locals['semanticPredicate'] = _locals
_G_python_1, lastError = eval('"?("', self.globals, _locals), None
self.considerError(lastError)
_G_apply_2, lastError = self._apply(self.rule_token, "token", [_G_python_1])
self.considerError(lastError)
_G_python_3, lastError = eval('self.semanticPredicateExpr()', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_3, self.currentError)
def rule_semanticAction(self):
_locals = {'self': self}
self.locals['semanticAction'] = _locals
_G_python_1, lastError = eval('"!("', self.globals, _locals), None
self.considerError(lastError)
_G_apply_2, lastError = self._apply(self.rule_token, "token", [_G_python_1])
self.considerError(lastError)
_G_python_3, lastError = eval('self.semanticActionExpr()', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_3, self.currentError)
def rule_rulePart(self):
_locals = {'self': self}
self.locals['rulePart'] = _locals
_G_apply_1, lastError = self._apply(self.rule_anything, "anything", [])
self.considerError(lastError)
_locals['requiredName'] = _G_apply_1
_G_apply_2, lastError = self._apply(self.rule_spaces, "spaces", [])
self.considerError(lastError)
_G_apply_3, lastError = self._apply(self.rule_name, "name", [])
self.considerError(lastError)
_locals['n'] = _G_apply_3
def _G_pred_4():
_G_python_1, lastError = eval('n == requiredName', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_1, self.currentError)
_G_pred_5, lastError = self.pred(_G_pred_4)
self.considerError(lastError)
_G_python_6, lastError = eval('setattr(self, "name", n)', self.globals, _locals), None
self.considerError(lastError)
_G_apply_7, lastError = self._apply(self.rule_expr4, "expr4", [])
self.considerError(lastError)
_locals['args'] = _G_apply_7
def _G_or_8():
_G_python_1, lastError = eval('"::="', self.globals, _locals), None
self.considerError(lastError)
_G_apply_2, lastError = self._apply(self.rule_token, "token", [_G_python_1])
self.considerError(lastError)
_G_apply_3, lastError = self._apply(self.rule_expr, "expr", [])
self.considerError(lastError)
_locals['e'] = _G_apply_3
_G_python_4, lastError = eval('self.builder.sequence([args, e])', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_4, self.currentError)
def _G_or_9():
_G_python_1, lastError = eval('args', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_1, self.currentError)
_G_or_10, lastError = self._or([_G_or_8, _G_or_9])
self.considerError(lastError)
return (_G_or_10, self.currentError)
def rule_rule(self):
_locals = {'self': self}
self.locals['rule'] = _locals
_G_apply_1, lastError = self._apply(self.rule_spaces, "spaces", [])
self.considerError(lastError)
def _G_lookahead_2():
_G_apply_1, lastError = self._apply(self.rule_name, "name", [])
self.considerError(lastError)
_locals['n'] = _G_apply_1
return (_locals['n'], self.currentError)
_G_lookahead_3, lastError = self.lookahead(_G_lookahead_2)
self.considerError(lastError)
_G_python_4, lastError = eval('n', self.globals, _locals), None
self.considerError(lastError)
_G_apply_5, lastError = self._apply(self.rule_rulePart, "rulePart", [_G_python_4])
self.considerError(lastError)
_locals['r'] = _G_apply_5
def _G_or_6():
def _G_many1_1():
_G_python_1, lastError = eval('n', self.globals, _locals), None
self.considerError(lastError)
_G_apply_2, lastError = self._apply(self.rule_rulePart, "rulePart", [_G_python_1])
self.considerError(lastError)
return (_G_apply_2, self.currentError)
_G_many1_2, lastError = self.many(_G_many1_1, _G_many1_1())
self.considerError(lastError)
_locals['rs'] = _G_many1_2
_G_python_3, lastError = eval('self.builder.rule(n, self.builder._or([r] + rs))', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_3, self.currentError)
def _G_or_7():
_G_python_1, lastError = eval('self.builder.rule(n, r)', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_1, self.currentError)
_G_or_8, lastError = self._or([_G_or_6, _G_or_7])
self.considerError(lastError)
return (_G_or_8, self.currentError)
def rule_grammar(self):
_locals = {'self': self}
self.locals['grammar'] = _locals
def _G_many_1():
_G_apply_1, lastError = self._apply(self.rule_rule, "rule", [])
self.considerError(lastError)
return (_G_apply_1, self.currentError)
_G_many_2, lastError = self.many(_G_many_1)
self.considerError(lastError)
_locals['rs'] = _G_many_2
_G_apply_3, lastError = self._apply(self.rule_spaces, "spaces", [])
self.considerError(lastError)
_G_python_4, lastError = eval('self.builder.makeGrammar(rs)', self.globals, _locals), None
self.considerError(lastError)
return (_G_python_4, self.currentError)
|
[
"pymeta.runtime.expected",
"pymeta.runtime.OMetaBase.__init__"
] |
[((601, 632), 'pymeta.runtime.OMetaBase.__init__', 'OMetaBase.__init__', (['self', 'input'], {}), '(self, input)\n', (619, 632), False, 'from pymeta.runtime import OMetaBase, ParseError, EOFError, expected\n'), ((1698, 1727), 'pymeta.runtime.expected', 'expected', (['"""python expression"""'], {}), "('python expression')\n", (1706, 1727), False, 'from pymeta.runtime import OMetaBase, ParseError, EOFError, expected\n')]
|
import frappe
def after_submit(self,method):
if self.picklist_reference:
frappe.db.set_value("Nanak Pick List", self.picklist_reference, "sales_invoice", self.name)
frappe.db.set_value("Nanak Pick List", self.picklist_reference, "sales_invoice_status", "Created")
|
[
"frappe.db.set_value"
] |
[((90, 185), 'frappe.db.set_value', 'frappe.db.set_value', (['"""Nanak Pick List"""', 'self.picklist_reference', '"""sales_invoice"""', 'self.name'], {}), "('Nanak Pick List', self.picklist_reference,\n 'sales_invoice', self.name)\n", (109, 185), False, 'import frappe\n'), ((191, 293), 'frappe.db.set_value', 'frappe.db.set_value', (['"""Nanak Pick List"""', 'self.picklist_reference', '"""sales_invoice_status"""', '"""Created"""'], {}), "('Nanak Pick List', self.picklist_reference,\n 'sales_invoice_status', 'Created')\n", (210, 293), False, 'import frappe\n')]
|
import json
from pathlib import Path
from typing import Dict
from countdata import CountData
from drawingdata import DrawingDataList
class TransectData:
"""
Manages transect save data in a primitive
data state, such that it can be easily
serialized.
"""
def __init__(self, transectData: Dict[str, Dict[str, list]], fp: Path):
"""
{
'ImageName.jpg':
{
"drawings":
[
DrawingData1.toDict(),
DrawingData2.toDict(),
...
]
}
}
Include `fp` for traceability.
"""
self._transectData: Dict[str, Dict[str, list]] = transectData
self.fp = fp
@staticmethod
def load(fp):
"""
Loads a serialized file. If the data cannot be decoded,
The save data is initialized with a blank dict.
"""
try:
with open(fp, "r") as f:
data = json.load(f)
except json.decoder.JSONDecodeError:
print(
f"Badly formed JSON file. Data will be overwritten when file is saved: {fp}"
)
data = {}
return TransectData(data, fp)
def dump(self, fp):
"""
Serialize save data and save to specified path.
Writes this data on top of already existing data.
"""
with open(fp, "w") as f:
json.dump(self._transectData, f, indent=4)
def addImage(self, imageName):
"""
Ensure that an image with the name
imageName is in this save data.
"""
if imageName not in self._transectData.keys():
self._transectData[imageName] = {}
def addDrawings(self, imageName, drawings: DrawingDataList):
"""
Add drawings (for a specific image) to the
save data. This will replace any drawings currently
in this save data instance associated with this
image.
"""
# Ensure image name is present
self.addImage(imageName)
# Add these drawings the image dict
self._transectData[imageName]["drawings"] = drawings.toDict()
def removeDrawings(self, imageName: str):
"""
Remove the drawings associated with an image.
"""
if imageName in self._transectData.keys():
try:
self._transectData[imageName].pop("drawings")
# There might not have been this data saved yet
except KeyError:
pass
def imageHasDrawings(self, imageName: str, otherDrawings: DrawingDataList):
"""
Compares the drawings associated with `imageName`,
and returns `True` if those drawings match `otherDrawings`.
"""
# Check if image has no drawings or data
if imageName not in self._transectData.keys():
return False
# Check if image has no drawings
if "drawings" not in self._transectData[imageName].keys():
return False
# Check if image drawings are the same as the input
# also lol TODO stop this maddness
drawings = DrawingDataList.loads(
json.dumps(self._transectData[imageName]["drawings"])
)
return drawings == otherDrawings
def drawings(self):
"""
Generator yielding a tuple of images
with corresponding drawings.
(imageName:str, drawings:DrawingDataList)
"""
for imageName, imageData in self._transectData.items():
if "drawings" in imageData.keys():
yield imageName, DrawingDataList.load(imageData["drawings"])
def imageCounts(self):
"""
Generator yielding tuple of images
and their counts.
(imageName:str, counts:CountData)
"""
for imageName, imageData in self._transectData.items():
if "drawings" in imageData.keys():
drawings = imageData["drawings"]
for drawing in drawings:
countData = CountData.fromDict(drawing["CountData"])
if not countData.isEmpty():
yield imageName, countData
def uniqueSpecies(self):
"""
Returns a list of all the different species in this save file
"""
species = []
for _, countData in self.imageCounts():
if countData.species not in species:
species.append(countData.species)
return species
def uniqueAnimals(self):
"""
Returns a list of the animals in this data set, excluding those
marked as "duplicates". The length of this list is the total number of animals counted
in this data set.
"""
animals = []
for _, countData in self.imageCounts():
if not countData.isDuplicate:
animals.extend([countData.species] * countData.number)
return animals
def uniqueImages(self):
"""
Returns a list of unique images in this data set.
"""
imageNames = []
for imageName, _ in self.imageCounts():
if imageName not in imageNames:
imageNames.append(imageName)
return imageNames
def __repr__(self):
return f"TransectData({super().__repr__()})"
def sorted(self):
"""Sort by key values (image names)"""
return TransectData(
dict(sorted(self._transectData.items(), key=lambda t: t[0])), self.fp
)
|
[
"json.dump",
"json.load",
"countdata.CountData.fromDict",
"json.dumps",
"drawingdata.DrawingDataList.load"
] |
[((1467, 1509), 'json.dump', 'json.dump', (['self._transectData', 'f'], {'indent': '(4)'}), '(self._transectData, f, indent=4)\n', (1476, 1509), False, 'import json\n'), ((3235, 3288), 'json.dumps', 'json.dumps', (["self._transectData[imageName]['drawings']"], {}), "(self._transectData[imageName]['drawings'])\n", (3245, 3288), False, 'import json\n'), ((1014, 1026), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1023, 1026), False, 'import json\n'), ((4105, 4145), 'countdata.CountData.fromDict', 'CountData.fromDict', (["drawing['CountData']"], {}), "(drawing['CountData'])\n", (4123, 4145), False, 'from countdata import CountData\n'), ((3665, 3708), 'drawingdata.DrawingDataList.load', 'DrawingDataList.load', (["imageData['drawings']"], {}), "(imageData['drawings'])\n", (3685, 3708), False, 'from drawingdata import DrawingDataList\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.