repo_name
stringlengths
7
94
repo_path
stringlengths
4
237
repo_head_hexsha
stringlengths
40
40
content
stringlengths
10
680k
apis
stringlengths
2
680k
by-liu/SegLossBia
tools/test_net.py
9cc639c04084cda9d5fb20ea34699db7e0beaf5c
import sys import logging from seglossbias.utils import mkdir, setup_logging from seglossbias.engine import default_argument_parser, load_config, DefaultTester logger = logging.getLogger(__name__) def setup(args): cfg = load_config(args) mkdir(cfg.OUTPUT_DIR) setup_logging(output_dir=cfg.OUTPUT_DIR) return cfg def main(): args = default_argument_parser().parse_args() cfg = setup(args) logger.info("Launch command : ") logger.info(" ".join(sys.argv)) tester = DefaultTester(cfg) tester.test() if __name__ == "__main__": main()
[((171, 198), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (188, 198), False, 'import logging\n'), ((228, 245), 'seglossbias.engine.load_config', 'load_config', (['args'], {}), '(args)\n', (239, 245), False, 'from seglossbias.engine import default_argument_parser, load_config, DefaultTester\n'), ((250, 271), 'seglossbias.utils.mkdir', 'mkdir', (['cfg.OUTPUT_DIR'], {}), '(cfg.OUTPUT_DIR)\n', (255, 271), False, 'from seglossbias.utils import mkdir, setup_logging\n'), ((276, 316), 'seglossbias.utils.setup_logging', 'setup_logging', ([], {'output_dir': 'cfg.OUTPUT_DIR'}), '(output_dir=cfg.OUTPUT_DIR)\n', (289, 316), False, 'from seglossbias.utils import mkdir, setup_logging\n'), ((504, 522), 'seglossbias.engine.DefaultTester', 'DefaultTester', (['cfg'], {}), '(cfg)\n', (517, 522), False, 'from seglossbias.engine import default_argument_parser, load_config, DefaultTester\n'), ((357, 382), 'seglossbias.engine.default_argument_parser', 'default_argument_parser', ([], {}), '()\n', (380, 382), False, 'from seglossbias.engine import default_argument_parser, load_config, DefaultTester\n')]
anielsen001/sarpy
sarpy/io/nitf/tres/unclass/ENGRDA.py
07bf157f54a5304185fc0e1c34010053fd6ae9d9
# -*- coding: utf-8 -*- from ..tre_elements import TREExtension, TREElement __classification__ = "UNCLASSIFIED" __author__ = "Thomas McCullough" class RECORD(TREElement): def __init__(self, value): super(RECORD, self).__init__() self.add_field('ENGLN', 'd', 2, value) self.add_field('ENGLBL', 's', self.ENGLN, value) self.add_field('ENGMTXC', 'd', 4, value) self.add_field('ENGMTXR', 'd', 4, value) self.add_field('ENGTYP', 's', 1, value) self.add_field('ENGDTS', 'd', 1, value) self.add_field('ENGDTU', 's', 2, value) self.add_field('ENGDATC', 'd', 8, value) self.add_field('ENGDATA', 'b', self.ENGDATC, value) class ENGRDAType(TREElement): def __init__(self, value): super(ENGRDAType, self).__init__() self.add_field('RESRC', 's', 20, value) self.add_field('RECNT', 'd', 3, value) self.add_loop('RECORDSs', self.RECNT, RECORD, value) class ENGRDA(TREExtension): _tag_value = 'ENGRDA' _data_type = ENGRDAType
[]
lingruiluo/jp_doodle
jp_doodle/quantity_forest.py
b3935208821898f22ab504c2b26dd4d37f08f0e4
from jp_doodle import doodle_files qf_js = doodle_files.vendor_path("js/quantity_forest.js") from jp_doodle import dual_canvas import jp_proxy_widget import os from subprocess import check_output import pprint if bytes != str: unicode = str def directory_usage(directory, epsilon=0.02): if not os.path.isdir(directory): return None ls = os.listdir(directory) result = {} total = 0.0 for fn in ls: path = os.path.join(directory, fn) try: usage = check_output(["du", "-s", path]) except Exception: pass else: usage = unicode(usage, "utf8") # py 3 [snum, sname] = usage.strip().split("\t") num = float(snum) total += num result[fn] = (path, num) final = {} other = 0 for fn in result: (path, num) = result[fn] portion = num/total if portion < epsilon: other += num else: final[fn] = {"name": fn, "file_size": num, "percent": portion*100, "id": path} if other>epsilon: final["*other"] = {"name": "*other", "file_size": other, "percent": other*100/total, "id": "*" + directory} return final RIGHT = {"x": 1, "y":0} UP = {"x": 0, "y":1} class FileSystemExplorer: color_counter = 333 opacity = 0.5 def __init__(self, canvas_widget, path, width=600, enable_deletions=False, horizontal=False, x_vector=None, y_vector=None, dy=50, dh=20, epsilon=0.02, degrees=15, font="normal 10px Arial", background="rgba(244,230,255,0.8)", opacity=0.7, clearHeight=300, ): self.opacity = opacity if y_vector is None: y_vector = UP if horizontal: y_vector = RIGHT if x_vector is None: x_vector = RIGHT if horizontal: x_vector = UP self.epsilon = epsilon self.enable_deletions = enable_deletions path = os.path.expanduser(path) path = os.path.abspath(path) self.color_cache = {} self.usage_cache = {} self.id_to_data = {} self.expanded = {} self.widget = canvas_widget self.path = path members = self.directory_members(path) self.widget = canvas_widget canvas_widget.load_js_files([qf_js]) canvas_widget.js_init(""" var forest_config = { top_label: top_label, roots: members, width: width, dy: dy, dh: dh, id_click: id_click, degrees: degrees, background: background, x_vector: x_vector, y_vector: y_vector, font: font, clearHeight: clearHeight, } element.quantity_forest(forest_config); element.detail = $("<div>Initialized</div>").appendTo(element); element.show_detail = function(identity, info) { var d = element.detail d.html("<div/>"); for (key in info) { $("<div>" + key + " : " + info[key] + "<div>").appendTo(d); } if (!identity.startsWith("*")) { var deleter = $("<a>delete " + identity + "</a>").appendTo(d); deleter.on("click", function() { delete_id(identity); }); } }; """, width=width, members=members, dy=dy, dh=dh, id_click=self.id_click, top_label=path, delete_id=self.delete_id, degrees=degrees, x_vector=x_vector, y_vector=y_vector, font=font, background=background, clearHeight=clearHeight, ) if enable_deletions: self.widget.element.detail.html("<div>DELETIONS ARE ENABLED!</div>"); def directory_usage(self, directory): cache = self.usage_cache if directory in cache: return cache[directory] usage = directory_usage(directory, self.epsilon) cache[directory] = usage if not usage: return usage for u in usage.values(): u["parent"] = directory self.id_to_data[u["id"]] = u return usage def get_color(self, identity): cache = self.color_cache if identity in cache: return cache[identity] result = cache[identity] = self.pick_color() return result def pick_color(self): self.color_counter += 1 counter = self.color_counter rgb = [0, 0, 0] for i in range(8): for j in range(3): rgb[j] = (rgb[j] << 1) | (counter & 1) counter = (counter >> 1) # darken for i in range(3): rgb[i] = (rgb[i] * 200) // 255 return "rgba(%s,%s,%s,%s)" % (tuple(rgb) + (self.opacity,)) def delete_id(self, identity): try: self.widget.element.css("cursor", "wait") self.widget.element.detail.html("<div>attempting delete...</div>") self.delete_id1(identity) finally: self.widget.element.css("cursor", "default") def delete_id1(self, identity): if self.enable_deletions: # for simplicity for now just clear the usage cache self.usage_cache = {} cmd = ["rm", "-rf", identity] self.widget.element["print"](repr(cmd)) #w.element.css("cursor", "wait") try: #try: checked = check_output(cmd) #finally: #w.element.css("cursor", "default") except Exception as e: self.widget.element.detail.html("<div>delete " + repr((identity, e)) + " failed</div>"); else: roots = self.directory_members(self.path) #pprint.pprint(roots) self.widget.element.reset_roots(roots) self.widget.element.detail.html("<div>" + repr(identity) + " deleted</div>"); else: self.widget.element.detail.html("<div>delete " + repr(identity) + " disabled</div>"); def id_click(self, identity): try: self.widget.element.css("cursor", "wait") self.widget.element.detail.html("<div>click...</div>") self.expanded[identity] = not self.expanded.get(identity, False) roots = self.directory_members(self.path) #pprint.pprint(roots) self.widget.element.reset_roots(roots) #self.widget.element.detail.html("<div>expand " + repr(identity) + "</div>"); self.widget.element.show_detail(identity, self.id_to_data[identity]) finally: self.widget.element.css("cursor", "default") def directory_members(self, directory): self.expanded[directory] = True usage = self.directory_usage(directory) if not usage: return [] result = [] sorter = [(u["percent"], u["name"]) for u in usage.values()] for (pct, filename) in reversed(sorted(sorter)): u = usage[filename] identity = u["id"] expanded = self.expanded.get(identity, False) children = None if expanded: children = self.directory_members(identity) r = { "id": identity, "label": u["name"], "size": u["file_size"], "children": children, "expanded": expanded, "color": self.get_color(identity), } result.append(r) return result
[((43, 92), 'jp_doodle.doodle_files.vendor_path', 'doodle_files.vendor_path', (['"""js/quantity_forest.js"""'], {}), "('js/quantity_forest.js')\n", (67, 92), False, 'from jp_doodle import doodle_files\n'), ((359, 380), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (369, 380), False, 'import os\n'), ((304, 328), 'os.path.isdir', 'os.path.isdir', (['directory'], {}), '(directory)\n', (317, 328), False, 'import os\n'), ((446, 473), 'os.path.join', 'os.path.join', (['directory', 'fn'], {}), '(directory, fn)\n', (458, 473), False, 'import os\n'), ((2017, 2041), 'os.path.expanduser', 'os.path.expanduser', (['path'], {}), '(path)\n', (2035, 2041), False, 'import os\n'), ((2057, 2078), 'os.path.abspath', 'os.path.abspath', (['path'], {}), '(path)\n', (2072, 2078), False, 'import os\n'), ((507, 539), 'subprocess.check_output', 'check_output', (["['du', '-s', path]"], {}), "(['du', '-s', path])\n", (519, 539), False, 'from subprocess import check_output\n'), ((5686, 5703), 'subprocess.check_output', 'check_output', (['cmd'], {}), '(cmd)\n', (5698, 5703), False, 'from subprocess import check_output\n')]
YuweiYin/Algorithm_YuweiYin
LeetCode-All-Solution/Python3/LC-0035-Search-Insert-Position.py
28648fac59c5a4e3c907978cbd1b3e662ba18fd5
#!/usr/bin/env python # -*- coding:utf-8 -*- """================================================================= @Project : Algorithm_YuweiYin/LeetCode-All-Solution/Python3 @File : LC-0035-Search-Insert-Position.py @Author : [YuweiYin](https://github.com/YuweiYin) @Date : 2022-01-01 ==================================================================""" import sys import time from typing import List """ LeetCode - 0035 - (Easy) - Search Insert Position https://leetcode.com/problems/search-insert-position/ Description: Given a sorted array of distinct integers and a target value, return the index if the target is found. If not, return the index where it would be if it were inserted in order. Requirement: You must write an algorithm with O(log n) runtime complexity. Example 1: Input: nums = [1,3,5,6], target = 5 Output: 2 Example 2: Input: nums = [1,3,5,6], target = 2 Output: 1 Example 3: Input: nums = [1,3,5,6], target = 7 Output: 4 Constraints: 1 <= nums.length <= 10^4 -10^4 <= nums[i] <= 10^4 nums contains distinct values sorted in ascending order. -10^4 <= target <= 10^4 """ class Solution: def searchInsert(self, nums: List[int], target: int) -> int: # exception case if not isinstance(nums, list) or len(nums) == 0: return 0 # main method: (loop) binary search of sorted list return self._searchInsert(nums, target) def _searchInsert(self, nums: List[int], target: int) -> int: start_index, end_index = 0, len(nums) - 1 insert_index = 0 while start_index <= end_index: cur_index = (end_index + start_index) >> 1 # current cursor cur_num = nums[cur_index] # cache variable if start_index == end_index: # border case: must decide the insert position now return start_index if (target <= cur_num) else (start_index + 1) if cur_num == target: # 1. hit the target return cur_index elif cur_num < target: # 2. go right start_index = cur_index + 1 # change interval insert_index = start_index # adjust the possible insert index else: # 3. go left end_index = cur_index - 1 # change interval insert_index = cur_index # adjust the possible insert index return insert_index def main(): # Example 1: Output: 2 # nums = [1, 3, 5, 6] # target = 5 # Example 2: Output: 1 # nums = [1, 3, 5, 6] # target = 2 # Example 3: Output: 4 # nums = [1,3,5,6] # target = 7 # Example 4: Output: 0 # nums = [1, 3, 5, 6] # target = 0 # Example 5: Output: 0 nums = [1, 3] target = 0 # init instance solution = Solution() # run & time start = time.process_time() ans = solution.searchInsert(nums, target) end = time.process_time() # show answer print('\nAnswer:') print(ans) # show time consumption print('Running Time: %.5f ms' % ((end - start) * 1000)) if __name__ == "__main__": sys.exit(main())
[((2847, 2866), 'time.process_time', 'time.process_time', ([], {}), '()\n', (2864, 2866), False, 'import time\n'), ((2923, 2942), 'time.process_time', 'time.process_time', ([], {}), '()\n', (2940, 2942), False, 'import time\n')]
firodj/ciscodevnet-coding-skills-sample-code
coding202-parsing-json/get-ap-json-1.py
4fca975e450cf0c913001fe1b36582f7a094b1e7
import requests url = 'https://64.103.26.61/api/contextaware/v1/maps/info/DevNetCampus/DevNetBuilding/DevNetZone' headers = {'Authorization': 'Basic bGVhcm5pbmc6bGVhcm5pbmc=='} response = requests.get(url, headers=headers, verify=False) responseString = response.text print(responseString)
[((188, 236), 'requests.get', 'requests.get', (['url'], {'headers': 'headers', 'verify': '(False)'}), '(url, headers=headers, verify=False)\n', (200, 236), False, 'import requests\n')]
LaudateCorpus1/llvm-project
lldb/test/API/lang/swift/optimized_code/bound_generic_enum/TestSwiftOptimizedBoundGenericEnum.py
ff2e0f0c1112558b3f30d8afec7c9882c33c79e3
import lldb from lldbsuite.test.decorators import * import lldbsuite.test.lldbtest as lldbtest import lldbsuite.test.lldbutil as lldbutil import os import unittest2 class TestSwiftOptimizedBoundGenericEnum(lldbtest.TestBase): mydir = lldbtest.TestBase.compute_mydir(__file__) @swiftTest def test(self): """Test the bound generic enum types in "optimized" code.""" self.build() target, process, thread, bkpt = lldbutil.run_to_source_breakpoint(self, 'break one', lldb.SBFileSpec('main.swift')) bkpt_two = target.BreakpointCreateBySourceRegex( 'break two', lldb.SBFileSpec('main.swift')) self.assertGreater(bkpt_two.GetNumLocations(), 0) var_self = self.frame().FindVariable("self") # FIXME, this fails with a data extractor error. lldbutil.check_variable(self, var_self, False, value=None) lldbutil.continue_to_breakpoint(process, bkpt_two) var_self = self.frame().FindVariable("self") lldbutil.check_variable(self, var_self, True, value="success")
[((241, 282), 'lldbsuite.test.lldbtest.TestBase.compute_mydir', 'lldbtest.TestBase.compute_mydir', (['__file__'], {}), '(__file__)\n', (272, 282), True, 'import lldbsuite.test.lldbtest as lldbtest\n'), ((836, 894), 'lldbsuite.test.lldbutil.check_variable', 'lldbutil.check_variable', (['self', 'var_self', '(False)'], {'value': 'None'}), '(self, var_self, False, value=None)\n', (859, 894), True, 'import lldbsuite.test.lldbutil as lldbutil\n'), ((903, 953), 'lldbsuite.test.lldbutil.continue_to_breakpoint', 'lldbutil.continue_to_breakpoint', (['process', 'bkpt_two'], {}), '(process, bkpt_two)\n', (934, 953), True, 'import lldbsuite.test.lldbutil as lldbutil\n'), ((1015, 1077), 'lldbsuite.test.lldbutil.check_variable', 'lldbutil.check_variable', (['self', 'var_self', '(True)'], {'value': '"""success"""'}), "(self, var_self, True, value='success')\n", (1038, 1077), True, 'import lldbsuite.test.lldbutil as lldbutil\n'), ((514, 543), 'lldb.SBFileSpec', 'lldb.SBFileSpec', (['"""main.swift"""'], {}), "('main.swift')\n", (529, 543), False, 'import lldb\n'), ((627, 656), 'lldb.SBFileSpec', 'lldb.SBFileSpec', (['"""main.swift"""'], {}), "('main.swift')\n", (642, 656), False, 'import lldb\n')]
wentzlau/kervi
kervi/kervi/plugin/routing/kervi_io/__init__.py
d35a422a6bca6b0ef50a4f9e5c382dece855abdc
#Copyright 2017 Tim Wentlau. #Distributed under the MIT License. See LICENSE in root of project. def init_plugin(config, manager): from kervi.plugin.routing.kervi_io.mq_router import KerviIORouterPlugin return KerviIORouterPlugin(config, manager) def plugin_type(): return "routing"
[((219, 255), 'kervi.plugin.routing.kervi_io.mq_router.KerviIORouterPlugin', 'KerviIORouterPlugin', (['config', 'manager'], {}), '(config, manager)\n', (238, 255), False, 'from kervi.plugin.routing.kervi_io.mq_router import KerviIORouterPlugin\n')]
ghostFaceKillah/bc-gym-planning-env
bc_gym_planning_env/envs/base/action.py
3cc0eb03adb752d304c3f007675cfff86691d007
""" Code for wrapping the motion primitive action in an object. """ from __future__ import division from __future__ import absolute_import import attr import numpy as np from bc_gym_planning_env.utilities.serialize import Serializable @attr.s(cmp=False) class Action(Serializable): """ Object representing an 'action' - a motion primitive to execute in the environment """ VERSION = 1 command = attr.ib(type=np.ndarray) @classmethod def from_cmds(cls, wanted_linear_velocity_of_baselink, wanted_front_wheel_angle): return cls(command=np.array([wanted_linear_velocity_of_baselink, wanted_front_wheel_angle])) def __eq__(self, other): if not isinstance(other, Action): return False if (self.command != other.command).any(): return False return True def __ne__(self, other): return not self.__eq__(other)
[((240, 257), 'attr.s', 'attr.s', ([], {'cmp': '(False)'}), '(cmp=False)\n', (246, 257), False, 'import attr\n'), ((411, 435), 'attr.ib', 'attr.ib', ([], {'type': 'np.ndarray'}), '(type=np.ndarray)\n', (418, 435), False, 'import attr\n'), ((567, 639), 'numpy.array', 'np.array', (['[wanted_linear_velocity_of_baselink, wanted_front_wheel_angle]'], {}), '([wanted_linear_velocity_of_baselink, wanted_front_wheel_angle])\n', (575, 639), True, 'import numpy as np\n')]
ccfelius/queueing
histogram.py
c38bd2fe230e52d6166a94449cec28f82e245ec2
import matplotlib.pyplot as plt import pandas as pd import math import numpy as np from scipy import stats import seaborn as sns data = pd.read_csv("data/500-4.txt", sep="\t") # example1 = data[data["SIM_TIME"] == 500] simulations = 500 simtimes = [5, 50, 150, 500, 1000] # for i in [1, 2, 4]: # data = pd.read_csv(f"data/500-{i}.txt", sep="\t") # example = data[data["SIM_TIME"] == simtime] rhos = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.975] print("DONE") print("\n START MEAN, STDEV, CONF INT") data = pd.read_csv(f"data/500-2.txt", sep="\t") example = data[data["SIM_TIME"] == 150] example1 = data[data["SIM_TIME"] == 500] ex = example[example['RHO'] == 0.1]['AVG_WAIT'] ex2 = example1[example1['RHO'] == 0.1]['AVG_WAIT'] ex_9 = example[example['RHO'] == 0.9]['AVG_WAIT'] ex2_9 = example1[example1['RHO'] == 0.9]['AVG_WAIT'] print("\nMEAN 150, 500, rho 0.1, rho 0.9") print(ex.mean(), ex2.mean()) print(ex_9.mean(), ex2_9.mean()) print("\nSTDEV 150, 500, rho 0.1, rho 0.9") print(ex.std(), ex2.std()) print(ex_9.std(), ex2_9.std()) fig = plt.figure(facecolor='w') ax = fig.add_subplot(111, facecolor='whitesmoke', axisbelow=True) ax.hist(ex_9, bins = 100, alpha=0.8, color = 'cornflowerblue', label="Simtime=150") ax.hist(ex2_9, bins = 100, alpha = 0.5, color='springgreen', label="Simtime=500") # sns.displot(ex_9,) # sns.displot(ex2_9) ax.set_xlabel('Mean waiting time / time unit', fontsize=12) ax.set_ylabel('Density', fontsize=12) ax.set_title('Distribution mean waiting time', fontsize = 14) ax.yaxis.set_tick_params(length=0) ax.xaxis.set_tick_params(length=0) ax.grid(b=True, which='major', c='w', lw=2, ls='-') legend = ax.legend() legend.get_frame().set_alpha(0.5) for spine in ('top', 'right', 'bottom', 'left'): ax.spines[spine].set_visible(False) plt.savefig("plots/histogram-150-500-01.png", dpi=300) plt.show()
[((137, 176), 'pandas.read_csv', 'pd.read_csv', (['"""data/500-4.txt"""'], {'sep': '"""\t"""'}), "('data/500-4.txt', sep='\\t')\n", (148, 176), True, 'import pandas as pd\n'), ((531, 571), 'pandas.read_csv', 'pd.read_csv', (['f"""data/500-2.txt"""'], {'sep': '"""\t"""'}), "(f'data/500-2.txt', sep='\\t')\n", (542, 571), True, 'import pandas as pd\n'), ((1070, 1095), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'facecolor': '"""w"""'}), "(facecolor='w')\n", (1080, 1095), True, 'import matplotlib.pyplot as plt\n'), ((1797, 1851), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""plots/histogram-150-500-01.png"""'], {'dpi': '(300)'}), "('plots/histogram-150-500-01.png', dpi=300)\n", (1808, 1851), True, 'import matplotlib.pyplot as plt\n'), ((1852, 1862), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1860, 1862), True, 'import matplotlib.pyplot as plt\n')]
zheedong/BaekJoon
python/1931.py
7f9e00085276a337d18ee3bb90c98126f7af4d3a
n = int(input()) conf_set = [] for _ in range(n): conf_set.append(tuple(map(int, input().split()))) conf_set.sort(key=lambda x : (x[1], x[0])) # 끝나는 시간을 기준으로 정렬 # 시작과 종료가 같은 경우를 포함하기 위해선, 시작 시간도 오름차순으로 정렬해 줘야 한다 solution_list = [conf_set[0]] # Greedy Algorithm for conf in conf_set[1:]: last_conf = solution_list[-1] _, last_end_time = last_conf new_start_time, _ = conf # 정렬된 회의의 list의 마지막 값의 시작 시간과, 정답 list 마지막의 종료 시간을 비교한다 if new_start_time >= last_end_time: solution_list.append(conf) print(len(solution_list))
[]
python-social-auth/social-app-webpy
social_webpy/app.py
edcfd8dd95c66a3524961e5212e13c9c2e8515a3
import web from social_core.actions import do_auth, do_complete, do_disconnect from .utils import psa, load_strategy, load_strategy urls = ( r'/login/(?P<backend>[^/]+)/?', 'auth', r'/complete/(?P<backend>[^/]+)/?', 'complete', r'/disconnect/(?P<backend>[^/]+)/?', 'disconnect', r'/disconnect/(?P<backend>[^/]+)/(?P<association_id>\d+)/?', 'disconnect', ) class BaseViewClass(object): def __init__(self, *args, **kwargs): self.session = web.web_session method = web.ctx.method == 'POST' and 'post' or 'get' self.strategy = load_strategy() self.data = web.input(_method=method) self.backend = None self._user = None super(BaseViewClass, self).__init__(*args, **kwargs) def get_current_user(self): if not hasattr(self, '_user'): if self.session.get('logged_in'): self._user = self.strategy.get_user( self.session.get('user_id') ) else: self._user = None return self._user def login_user(self, user): self.session['logged_in'] = True self.session['user_id'] = user.id class auth(BaseViewClass): def GET(self, backend): return self._auth(backend) def POST(self, backend): return self._auth(backend) @psa('/complete/%(backend)s/') def _auth(self, backend): return do_auth(self.backend) class complete(BaseViewClass): def GET(self, backend, *args, **kwargs): return self._complete(backend, *args, **kwargs) def POST(self, backend, *args, **kwargs): return self._complete(backend, *args, **kwargs) @psa('/complete/%(backend)s/') def _complete(self, backend, *args, **kwargs): return do_complete( self.backend, login=lambda backend, user, social_user: self.login_user(user), user=self.get_current_user(), *args, **kwargs ) class disconnect(BaseViewClass): @psa() def POST(self, backend, association_id=None): return do_disconnect(self.backend, self.get_current_user(), association_id) app_social = web.application(urls, locals())
[((609, 634), 'web.input', 'web.input', ([], {'_method': 'method'}), '(_method=method)\n', (618, 634), False, 'import web\n'), ((1419, 1440), 'social_core.actions.do_auth', 'do_auth', (['self.backend'], {}), '(self.backend)\n', (1426, 1440), False, 'from social_core.actions import do_auth, do_complete, do_disconnect\n')]
gomyar/stellar
stellar/config.py
b2dfbe136f1540f0ca6ac5779ebaeae996a3b747
import os import logging import yaml from schema import Use, Schema, SchemaError, Optional class InvalidConfig(Exception): pass class MissingConfig(Exception): pass default_config = { 'logging': 30, 'migrate_from_0_3_2': True } schema = Schema({ 'stellar_url': Use(str), 'url': Use(str), 'project_name': Use(str), 'tracked_databases': [Use(str)], Optional('logging'): int, Optional('migrate_from_0_3_2'): bool }) def get_config_path(): current_directory = os.getcwd() while True: try: with open( os.path.join(current_directory, 'stellar.yaml'), 'rb' ) as fp: return os.path.join(current_directory, 'stellar.yaml') except IOError: pass current_directory = os.path.abspath( os.path.join(current_directory, '..') ) if current_directory == '/': return None def load_config(): config = {} stellar_config_env = os.getenv('STELLAR_CONFIG') if stellar_config_env: if os.path.exists(stellar_config_env): config = yaml.safe_load(open(stellar_config_env)) else: current_directory = os.getcwd() while True: try: with open( os.path.join(current_directory, 'stellar.yaml'), 'rb' ) as fp: config = yaml.safe_load(fp) break except IOError: pass if current_directory == '/': break current_directory = os.path.abspath( os.path.join(current_directory, '..') ) if not config: raise MissingConfig() for k, v in default_config.items(): if k not in config: config[k] = v try: return schema.validate(config) except SchemaError as e: raise InvalidConfig(e) def save_config(config): logging.getLogger(__name__).debug('save_config()') with open(get_config_path(), "w") as fp: yaml.dump(config, fp)
[((508, 519), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (517, 519), False, 'import os\n'), ((1020, 1047), 'os.getenv', 'os.getenv', (['"""STELLAR_CONFIG"""'], {}), "('STELLAR_CONFIG')\n", (1029, 1047), False, 'import os\n'), ((389, 408), 'schema.Optional', 'Optional', (['"""logging"""'], {}), "('logging')\n", (397, 408), False, 'from schema import Use, Schema, SchemaError, Optional\n'), ((419, 449), 'schema.Optional', 'Optional', (['"""migrate_from_0_3_2"""'], {}), "('migrate_from_0_3_2')\n", (427, 449), False, 'from schema import Use, Schema, SchemaError, Optional\n'), ((287, 295), 'schema.Use', 'Use', (['str'], {}), '(str)\n', (290, 295), False, 'from schema import Use, Schema, SchemaError, Optional\n'), ((308, 316), 'schema.Use', 'Use', (['str'], {}), '(str)\n', (311, 316), False, 'from schema import Use, Schema, SchemaError, Optional\n'), ((338, 346), 'schema.Use', 'Use', (['str'], {}), '(str)\n', (341, 346), False, 'from schema import Use, Schema, SchemaError, Optional\n'), ((1086, 1120), 'os.path.exists', 'os.path.exists', (['stellar_config_env'], {}), '(stellar_config_env)\n', (1100, 1120), False, 'import os\n'), ((1222, 1233), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1231, 1233), False, 'import os\n'), ((2111, 2132), 'yaml.dump', 'yaml.dump', (['config', 'fp'], {}), '(config, fp)\n', (2120, 2132), False, 'import yaml\n'), ((374, 382), 'schema.Use', 'Use', (['str'], {}), '(str)\n', (377, 382), False, 'from schema import Use, Schema, SchemaError, Optional\n'), ((849, 886), 'os.path.join', 'os.path.join', (['current_directory', '""".."""'], {}), "(current_directory, '..')\n", (861, 886), False, 'import os\n'), ((2007, 2034), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (2024, 2034), False, 'import logging\n'), ((702, 749), 'os.path.join', 'os.path.join', (['current_directory', '"""stellar.yaml"""'], {}), "(current_directory, 'stellar.yaml')\n", (714, 749), False, 'import os\n'), ((1670, 1707), 'os.path.join', 'os.path.join', (['current_directory', '""".."""'], {}), "(current_directory, '..')\n", (1682, 1707), False, 'import os\n'), ((588, 635), 'os.path.join', 'os.path.join', (['current_directory', '"""stellar.yaml"""'], {}), "(current_directory, 'stellar.yaml')\n", (600, 635), False, 'import os\n'), ((1446, 1464), 'yaml.safe_load', 'yaml.safe_load', (['fp'], {}), '(fp)\n', (1460, 1464), False, 'import yaml\n'), ((1318, 1365), 'os.path.join', 'os.path.join', (['current_directory', '"""stellar.yaml"""'], {}), "(current_directory, 'stellar.yaml')\n", (1330, 1365), False, 'import os\n')]
texpomru13/espnet
egs2/mr_openslr64/asr1/local/data_prep.py
7ef005e832e2fb033f356c16f54e0f08762fb4b0
#!/usr/bin/env python3 # Copyright 2021 Carnegie Mellon University (Peter Wu) # Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0) import argparse import os import random if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("-d", help="downloads directory", type=str, default="downloads") args = parser.parse_args() tsv_path = "%s/line_index.tsv" % args.d with open(tsv_path, "r") as inf: tsv_lines = inf.readlines() tsv_lines = [line.strip() for line in tsv_lines] spk2utt = {} utt2text = {} for line in tsv_lines: l_list = line.split("\t") fid = l_list[0] spk = l_list[0].split("_")[1] text = l_list[1] path = "%s/%s.wav" % (args.d, fid) if os.path.exists(path): utt2text[fid] = text if spk in spk2utt: spk2utt[spk].append(fid) else: spk2utt[spk] = [fid] spks = sorted(list(spk2utt.keys())) num_fids = 0 num_test_spks = 0 for spk in spks: num_test_spks += 1 fids = sorted(list(set(spk2utt[spk]))) num_fids += len(fids) if num_fids >= 2000: break num_test_spks = 2 test_spks = spks[:num_test_spks] train_dev_spks = spks[num_test_spks:] random.Random(0).shuffle(train_dev_spks) num_train = int(len(train_dev_spks) * 0.9) train_spks = train_dev_spks[:num_train] dev_spks = train_dev_spks[num_train:] spks_by_phase = {"train": train_spks, "dev": dev_spks, "test": test_spks} flac_dir = "%s" % args.d sr = 16000 for phase in spks_by_phase: spks = spks_by_phase[phase] text_strs = [] wav_scp_strs = [] spk2utt_strs = [] num_fids = 0 for spk in spks: fids = sorted(list(set(spk2utt[spk]))) num_fids += len(fids) if phase == "test" and num_fids > 2000: curr_num_fids = num_fids - 2000 random.Random(1).shuffle(fids) fids = fids[:curr_num_fids] utts = [spk + "-" + f for f in fids] utts_str = " ".join(utts) spk2utt_strs.append("%s %s" % (spk, utts_str)) for fid, utt in zip(fids, utts): cmd = "ffmpeg -i %s/%s.wav -f wav -ar %d -ab 16 -ac 1 - |" % ( flac_dir, fid, sr, ) text_strs.append("%s %s" % (utt, utt2text[fid])) wav_scp_strs.append("%s %s" % (utt, cmd)) phase_dir = "data/marathi_%s" % phase if not os.path.exists(phase_dir): os.makedirs(phase_dir) text_strs = sorted(text_strs) wav_scp_strs = sorted(wav_scp_strs) spk2utt_strs = sorted(spk2utt_strs) with open(os.path.join(phase_dir, "text"), "w+") as ouf: for s in text_strs: ouf.write("%s\n" % s) with open(os.path.join(phase_dir, "wav.scp"), "w+") as ouf: for s in wav_scp_strs: ouf.write("%s\n" % s) with open(os.path.join(phase_dir, "spk2utt"), "w+") as ouf: for s in spk2utt_strs: ouf.write("%s\n" % s)
[((223, 248), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (246, 248), False, 'import argparse\n'), ((779, 799), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (793, 799), False, 'import os\n'), ((1319, 1335), 'random.Random', 'random.Random', (['(0)'], {}), '(0)\n', (1332, 1335), False, 'import random\n'), ((2632, 2657), 'os.path.exists', 'os.path.exists', (['phase_dir'], {}), '(phase_dir)\n', (2646, 2657), False, 'import os\n'), ((2671, 2693), 'os.makedirs', 'os.makedirs', (['phase_dir'], {}), '(phase_dir)\n', (2682, 2693), False, 'import os\n'), ((2840, 2871), 'os.path.join', 'os.path.join', (['phase_dir', '"""text"""'], {}), "(phase_dir, 'text')\n", (2852, 2871), False, 'import os\n'), ((2975, 3009), 'os.path.join', 'os.path.join', (['phase_dir', '"""wav.scp"""'], {}), "(phase_dir, 'wav.scp')\n", (2987, 3009), False, 'import os\n'), ((3116, 3150), 'os.path.join', 'os.path.join', (['phase_dir', '"""spk2utt"""'], {}), "(phase_dir, 'spk2utt')\n", (3128, 3150), False, 'import os\n'), ((2006, 2022), 'random.Random', 'random.Random', (['(1)'], {}), '(1)\n', (2019, 2022), False, 'import random\n')]
eulerkaku/movement_validation
movement_validation/features/feature_processing_options.py
af939a42a97c1de889cf13bad0c22a2824d60947
# -*- coding: utf-8 -*- """ This module will hold a class that will be referenced when processing features. I'd like to move things from "config" into here ... - @JimHokanson """ from __future__ import division from .. import utils #Can't do this, would be circular #from .worm_features import WormFeatures class FeatureProcessingOptions(object): def __init__(self,fps): #The idea with this attribute is that functions will check if they are #in this list. If they are then they can display some sort of popup that #clarifies how they are working. # #No functions actually use this yet. It is just a place holder. # #An example of this might be: # 'morphology.length' # s self.functions_to_explain = [] #This indicates that, where possible, code should attempt to replicate #the errors and inconsistencies present in the way that the Schafer lab #computed features. This can be useful for ensuring that we are able to #compute features in the same way that they did. # #NOTE: There are a few instances where this is not supported such that #the behavior will not match even if this value is set to True. self.mimic_old_behaviour = True self.locomotion = LocomotionOptions(fps) self.posture = PostureOptions(fps) #TODO: Implement this #This is not yet implemented. The idea is to support not #computing certain features. We might also allow disabling certain #groups of feature. self.features_to_ignore = [] def should_compute_feature(self,feature_name,worm_features): """ """ #TODO: Implement this ... return True def disable_contour_features(self): """ Contour features: """ #see self.features_to_ignore contour_dependent_features = [\ 'morphology.width', 'morphology.area', 'morphology.area_per_length', 'morphology.width_per_length', 'posture.eccentricity'] self.features_to_ignore = list(set(self.features_to_ignore + contour_dependent_features)) def disable_feature_sections(self,section_names): """ This can be used to disable processing of features by section (see the options available below) Modifies 'features_to_ignore' Parameters ---------- section_names : list[str] Options are: - morphology - locomotion - posture - path Examples -------- fpo.disable_feature_sections(['morphology']) fpo.disable_feature_sections(['morphology','locomotion']) """ new_ignores = [] f = IgnorableFeatures() for section in section_names: new_ignores.extend(getattr(f,section)) self.features_to_ignore = list(set(self.features_to_ignore + new_ignores)) def __repr__(self): return utils.print_object(self) class PostureOptions(object): def __init__(self,fps): self.n_eccentricity_grid_points = 50 # Grid size for estimating eccentricity, this is the # max # of points that will fill the wide dimension. # (scalar) The # of points to place in the long dimension. More points # gives a more accurate estimate of the ellipse but increases # the calculation time. # #Used by: posture_features.get_eccentricity_and_orientation self.coiling_frame_threshold = round(1/5 * fps) #This is the # of #frames that an epoch must exceed in order for it to be truly #considered a coiling event #Current value translation: 1/5 of a second # #Used by: posture_features.get_worm_coils self.n_eigenworms_use = 6 #The maximum # of available values is 7 although technically there #are generally 48 eigenvectors avaiable, we've just only precomputed #7 to use for the projections # #Used by: self.kink_length_threshold_pct = 1/12 #This the fraction of the worm #length that a bend must be in order to be counted. The # of worm #points (this_value*worm_length_in_samples) is rounded to an integer #value. The threshold value is inclusive. # #Use: posture_features.get_worm_kinks self.wavelength = PostureWavelengthOptions() class PostureWavelengthOptions(object): """ These options are all used in: get_amplitude_and_wavelength """ def __init__(self): self.n_points_fft = 512 self.min_dist_peaks = 5 #This value is in samples, not a #spatial frequency. The spatial frequency sampling also varies by #the worm length, so this resolution varies on a frame by frame basis. self.pct_max_cutoff = 0.5 self.pct_cutoff = 2 class LocomotionOptions(object): def __init__(self,fps): #locomotion_features.LocomotionVelocity #------------------------------------- #Units: seconds #NOTE: We could get the defaults from the class ... self.velocity_tip_diff = 0.25 self.velocity_body_diff = 0.5 #locomotion_features.MotionEvents #-------------------------------------- # Interpolate only this length of NaN run; anything longer is # probably an omega turn. # If set to "None", interpolate all lengths (i.e. infinity) #TODO - Inf would be a better specification self.motion_codes_longest_nan_run_to_interpolate = None # These are a percentage of the worm's length self.motion_codes_speed_threshold_pct = 0.05 self.motion_codes_distance_threshold_pct = 0.05 self.motion_codes_pause_threshold_pct = 0.025 # These are times (s) self.motion_codes_min_frames_threshold = 0.5 self.motion_codes_max_interframes_threshold = 0.25 #locomotion_bends.LocomotionCrawlingBends self.crawling_bends = LocomotionCrawlingBends(fps) self.foraging_bends = LocomotionForagingBends(fps) self.locomotion_turns = LocomotionTurns(fps) def __repr__(self): return utils.print_object(self) class LocomotionTurns(object): def __init__(self,fps): self.max_interpolation_gap_allowed = 9 #frames self.min_omega_event_length = round(fps/4) #TODO: There is still a lot to put into here class LocomotionForagingBends(object): def __init__(self,fps): #NOTE: The nose & neck can also be thought of as the head tip #and head neck self.min_nose_window_samples = round(0.1 * fps) self.max_samples_interp_nose = 2*self.min_nose_window_samples - 1 class LocomotionCrawlingBends(object): def __init__(self,fps): self.fft_n_samples = 2 ** 14 self.bends_partitions = \ {'head': (5, 10), 'midbody': (22, 27), 'tail': (39, 44)} self.peak_energy_threshold = 0.5 # max_amplitude_pct_bandwidth - when determining the bandwidth, # the minimums that are found can't exceed this percentage of the maximum. # Doing so invalidates the result. self.max_amplitude_pct_bandwidth = 0.5 self.min_time_for_bend = 0.5 self.max_time_for_bend = 15 #TODO: What are the units on these things ???? #This is a spatial frequency self.min_frequency = 0.25 * self.max_time_for_bend #What is the technical max???? 0.5 fps???? self.max_frequency = 0.25 * fps #This is a processing optimization. #How far into the maximum peaks should we look ... #If this value is low, an expensive computation could go faster. If it #is too low, then we end up rerunning the calculation the whole dataset #and we end up losing time self.initial_max_I_pct = 0.5 def __repr__(self): return utils.print_object(self) class IgnorableFeatures: """ I'm not thrilled with where this is placed, but placing it in WormFeatures creates a circular dependency """ def __init__(self): temp = ['length','width','area','area_per_length','width_per_length'] self.morphology = ['morphology.' + s for s in temp] #None of these are implemented ... temp = ['velocity','motion_events','motion_mode','crawling_bends','foraging_bends','turns'] self.locomotion = ['locomotion.' + s for s in temp] #locomotion #crawling_bends: Done #turns: Done temp = ['bends','eccentricity', 'amplitude_and_wavelength','kinks','coils','directions','eigen_projection'] self.posture = ['posture.' + s for s in temp] #None of these are implemented ...
[]
wathsalav/xos
xos/ec2_observer/steps/__init__.py
f6bcaa37a948ee41729236afe7fce0802e002404
#from .sync_external_routes import SyncExternalRoutes #from .sync_network_slivers import SyncNetworkSlivers #from .sync_networks import SyncNetworks #from .sync_network_deployments import SyncNetworkDeployments #from .sync_site_privileges import SyncSitePrivilege #from .sync_slice_memberships import SyncSliceMemberships #from .sync_slices import SyncSlices #from .sync_sliver_ips import SyncSliverIps #from .sync_slivers import SyncSlivers #from .sync_users import SyncUsers #from .sync_roles import SyncRoles #from .sync_nodes import SyncNodes #from .sync_images import SyncImages #from .garbage_collector import GarbageCollector
[]
NHPatterson/napari-geojson
src/napari_geojson/_tests/test_writer.py
8e7925dc7600608673d489e24e8760c4669eaa0b
import geojson import pytest from napari_geojson import write_shapes ellipse = [[[0, 0], [0, 5], [5, 5], [5, 0]], "ellipse", "Polygon"] line = [[[0, 0], [5, 5]], "line", "LineString"] polygon = [[[0, 0], [5, 5], [0, 10]], "polygon", "Polygon"] polyline = [[[0, 0], [5, 5], [0, 10]], "path", "LineString"] rectangle = [[[0, 0], [0, 5], [5, 5], [5, 0]], "rectangle", "Polygon"] sample_shapes = [ellipse, line, polygon, polyline, rectangle] sample_shapes_ids = ["ellipse", "line", "polygon", "polyline", "rectangle"] @pytest.mark.parametrize( "coords,shape_type,expected", sample_shapes, ids=sample_shapes_ids ) def test_write_each_shape( make_napari_viewer, tmp_path, coords, shape_type, expected ): # noqa E501 """Writer writes a shapes layer as GeoJSON.""" fname = str(tmp_path / "sample.geojson") viewer = make_napari_viewer() shapes_layer = viewer.add_shapes(coords, shape_type=shape_type) # shape was written assert len(shapes_layer.data) == 1 data, meta, _ = shapes_layer.as_layer_data_tuple() write_shapes(fname, data, meta) # read back with open(fname) as fp: collection = geojson.load(fp) geom = collection["geometries"][0] assert geom.type == expected
[((520, 616), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""coords,shape_type,expected"""', 'sample_shapes'], {'ids': 'sample_shapes_ids'}), "('coords,shape_type,expected', sample_shapes, ids=\n sample_shapes_ids)\n", (543, 616), False, 'import pytest\n'), ((1045, 1076), 'napari_geojson.write_shapes', 'write_shapes', (['fname', 'data', 'meta'], {}), '(fname, data, meta)\n', (1057, 1076), False, 'from napari_geojson import write_shapes\n'), ((1143, 1159), 'geojson.load', 'geojson.load', (['fp'], {}), '(fp)\n', (1155, 1159), False, 'import geojson\n')]
woozway/py3-leetcode
algorithms/521. Longest Uncommon Subsequence I.py
e51a9ce7a6bb3e35c0fcb8c8f4f6cd5763708dbf
class Solution: def findLUSlength(self, a: str, b: str) -> int: return -1 if a == b else max(len(a), len(b))
[]
echoyi/pgmpy
pgmpy/models/ClusterGraph.py
c37dda4401f23ec73fc5d17d957867cd62e588d3
#!/usr/bin/env python3 from collections import defaultdict import numpy as np from pgmpy.base import UndirectedGraph from pgmpy.factors import factor_product class ClusterGraph(UndirectedGraph): r""" Base class for representing Cluster Graph. Cluster graph is an undirected graph which is associated with a subset of variables. The graph contains undirected edges that connects clusters whose scopes have a non-empty intersection. Formally, a cluster graph is :math:`\mathcal{U}` for a set of factors :math:`\Phi` over :math:`\mathcal{X}` is an undirected graph, each of whose nodes :math:`i` is associated with a subset :math:`C_i \subseteq X`. A cluster graph must be family-preserving - each factor :math:`\phi \in \Phi` must be associated with a cluster C, denoted :math:`\alpha(\phi)`, such that :math:`Scope[\phi] \subseteq C_i`. Each edge between a pair of clusters :math:`C_i` and :math:`C_j` is associated with a sepset :math:`S_{i,j} \subseteq C_i \cap C_j`. Parameters ---------- data: input graph Data to initialize graph. If data=None (default) an empty graph is created. The data is an edge list Examples -------- Create an empty ClusterGraph with no nodes and no edges >>> from pgmpy.models import ClusterGraph >>> G = ClusterGraph() G can be grown by adding clique nodes. **Nodes:** Add a tuple (or list or set) of nodes as single clique node. >>> G.add_node(('a', 'b', 'c')) >>> G.add_nodes_from([('a', 'b'), ('a', 'b', 'c')]) **Edges:** G can also be grown by adding edges. >>> G.add_edge(('a', 'b', 'c'), ('a', 'b')) or a list of edges >>> G.add_edges_from([(('a', 'b', 'c'), ('a', 'b')), ... (('a', 'b', 'c'), ('a', 'c'))]) """ def __init__(self, ebunch=None): super(ClusterGraph, self).__init__() if ebunch: self.add_edges_from(ebunch) self.factors = [] def add_node(self, node, **kwargs): """ Add a single node to the cluster graph. Parameters ---------- node: node A node should be a collection of nodes forming a clique. It can be a list, set or tuple of nodes Examples -------- >>> from pgmpy.models import ClusterGraph >>> G = ClusterGraph() >>> G.add_node(('a', 'b', 'c')) """ if not isinstance(node, (list, set, tuple)): raise TypeError( "Node can only be a list, set or tuple of nodes forming a clique" ) node = tuple(node) super(ClusterGraph, self).add_node(node, **kwargs) def add_nodes_from(self, nodes, **kwargs): """ Add multiple nodes to the cluster graph. Parameters ---------- nodes: iterable container A container of nodes (list, dict, set, etc.). Examples -------- >>> from pgmpy.models import ClusterGraph >>> G = ClusterGraph() >>> G.add_nodes_from([('a', 'b'), ('a', 'b', 'c')]) """ for node in nodes: self.add_node(node, **kwargs) def add_edge(self, u, v, **kwargs): """ Add an edge between two clique nodes. Parameters ---------- u, v: nodes Nodes can be any list or set or tuple of nodes forming a clique. Examples -------- >>> from pgmpy.models import ClusterGraph >>> G = ClusterGraph() >>> G.add_nodes_from([('a', 'b', 'c'), ('a', 'b'), ('a', 'c')]) >>> G.add_edges_from([(('a', 'b', 'c'), ('a', 'b')), ... (('a', 'b', 'c'), ('a', 'c'))]) """ set_u = set(u) set_v = set(v) if set_u.isdisjoint(set_v): raise ValueError("No sepset found between these two edges.") super(ClusterGraph, self).add_edge(u, v) def add_factors(self, *factors): """ Associate a factor to the graph. See factors class for the order of potential values Parameters ---------- *factor: pgmpy.factors.factors object A factor object on any subset of the variables of the model which is to be associated with the model. Returns ------- None Examples -------- >>> from pgmpy.models import ClusterGraph >>> from pgmpy.factors.discrete import DiscreteFactor >>> student = ClusterGraph() >>> student.add_node(('Alice', 'Bob')) >>> factor = DiscreteFactor(['Alice', 'Bob'], cardinality=[3, 2], ... values=np.random.rand(6)) >>> student.add_factors(factor) """ for factor in factors: factor_scope = set(factor.scope()) nodes = [set(node) for node in self.nodes()] if factor_scope not in nodes: raise ValueError( "Factors defined on clusters of variable not" "present in model" ) self.factors.append(factor) def get_factors(self, node=None): """ Return the factors that have been added till now to the graph. If node is not None, it would return the factor corresponding to the given node. Examples -------- >>> from pgmpy.models import ClusterGraph >>> from pgmpy.factors.discrete import DiscreteFactor >>> G = ClusterGraph() >>> G.add_nodes_from([('a', 'b', 'c'), ('a', 'b'), ('a', 'c')]) >>> G.add_edges_from([(('a', 'b', 'c'), ('a', 'b')), ... (('a', 'b', 'c'), ('a', 'c'))]) >>> phi1 = DiscreteFactor(['a', 'b', 'c'], [2, 2, 2], np.random.rand(8)) >>> phi2 = DiscreteFactor(['a', 'b'], [2, 2], np.random.rand(4)) >>> phi3 = DiscreteFactor(['a', 'c'], [2, 2], np.random.rand(4)) >>> G.add_factors(phi1, phi2, phi3) >>> G.get_factors() >>> G.get_factors(node=('a', 'b', 'c')) """ if node is None: return self.factors else: nodes = [set(n) for n in self.nodes()] if set(node) not in nodes: raise ValueError("Node not present in Cluster Graph") factors = filter(lambda x: set(x.scope()) == set(node), self.factors) return next(factors) def remove_factors(self, *factors): """ Removes the given factors from the added factors. Examples -------- >>> from pgmpy.models import ClusterGraph >>> from pgmpy.factors.discrete import DiscreteFactor >>> student = ClusterGraph() >>> factor = DiscreteFactor(['Alice', 'Bob'], cardinality=[2, 2], ... value=np.random.rand(4)) >>> student.add_factors(factor) >>> student.remove_factors(factor) """ for factor in factors: self.factors.remove(factor) def get_cardinality(self, node=None): """ Returns the cardinality of the node Parameters ---------- node: any hashable python object (optional) The node whose cardinality we want. If node is not specified returns a dictionary with the given variable as keys and their respective cardinality as values. Returns ------- int or dict : If node is specified returns the cardinality of the node. If node is not specified returns a dictionary with the given variable as keys and their respective cardinality as values. Examples -------- >>> from pgmpy.models import ClusterGraph >>> from pgmpy.factors.discrete import DiscreteFactor >>> student = ClusterGraph() >>> factor = DiscreteFactor(['Alice', 'Bob'], cardinality=[2, 2], ... values=np.random.rand(4)) >>> student.add_node(('Alice', 'Bob')) >>> student.add_factors(factor) >>> student.get_cardinality() defaultdict(<class 'int'>, {'Bob': 2, 'Alice': 2}) >>> student.get_cardinality(node='Alice') 2 """ if node: for factor in self.factors: for variable, cardinality in zip(factor.scope(), factor.cardinality): if node == variable: return cardinality else: cardinalities = defaultdict(int) for factor in self.factors: for variable, cardinality in zip(factor.scope(), factor.cardinality): cardinalities[variable] = cardinality return cardinalities def get_partition_function(self): r""" Returns the partition function for a given undirected graph. A partition function is defined as .. math:: \sum_{X}(\prod_{i=1}^{m} \phi_i) where m is the number of factors present in the graph and X are all the random variables present. Examples -------- >>> from pgmpy.models import ClusterGraph >>> from pgmpy.factors.discrete import DiscreteFactor >>> G = ClusterGraph() >>> G.add_nodes_from([('a', 'b', 'c'), ('a', 'b'), ('a', 'c')]) >>> G.add_edges_from([(('a', 'b', 'c'), ('a', 'b')), ... (('a', 'b', 'c'), ('a', 'c'))]) >>> phi1 = DiscreteFactor(['a', 'b', 'c'], [2, 2, 2], np.random.rand(8)) >>> phi2 = DiscreteFactor(['a', 'b'], [2, 2], np.random.rand(4)) >>> phi3 = DiscreteFactor(['a', 'c'], [2, 2], np.random.rand(4)) >>> G.add_factors(phi1, phi2, phi3) >>> G.get_partition_function() """ if self.check_model(): factor = self.factors[0] factor = factor_product( factor, *[self.factors[i] for i in range(1, len(self.factors))] ) return np.sum(factor.values) def check_model(self): """ Check the model for various errors. This method checks for the following errors. * Checks if factors are defined for all the cliques or not. * Check for running intersection property is not done explicitly over here as it done in the add_edges method. * Checks if cardinality information for all the variables is availble or not. If not it raises an error. * Check if cardinality of random variable remains same across all the factors. Returns ------- check: boolean True if all the checks are passed """ for clique in self.nodes(): factors = filter(lambda x: set(x.scope()) == set(clique), self.factors) if not any(factors): raise ValueError("Factors for all the cliques or clusters not defined.") cardinalities = self.get_cardinality() if len(set((x for clique in self.nodes() for x in clique))) != len( cardinalities ): raise ValueError("Factors for all the variables not defined.") for factor in self.factors: for variable, cardinality in zip(factor.scope(), factor.cardinality): if cardinalities[variable] != cardinality: raise ValueError( "Cardinality of variable {var} not matching among factors".format( var=variable ) ) return True def copy(self): """ Returns a copy of ClusterGraph. Returns ------- ClusterGraph: copy of ClusterGraph Examples -------- >>> from pgmpy.factors.discrete import DiscreteFactor >>> G = ClusterGraph() >>> G.add_nodes_from([('a', 'b'), ('b', 'c')]) >>> G.add_edge(('a', 'b'), ('b', 'c')) >>> phi1 = DiscreteFactor(['a', 'b'], [2, 2], np.random.rand(4)) >>> phi2 = DiscreteFactor(['b', 'c'], [2, 2], np.random.rand(4)) >>> G.add_factors(phi1, phi2) >>> graph_copy = G.copy() >>> graph_copy.factors [<DiscreteFactor representing phi(a:2, b:2) at 0xb71b19cc>, <DiscreteFactor representing phi(b:2, c:2) at 0xb4eaf3ac>] >>> graph_copy.edges() [(('a', 'b'), ('b', 'c'))] >>> graph_copy.nodes() [('a', 'b'), ('b', 'c')] """ copy = ClusterGraph(self.edges()) if self.factors: factors_copy = [factor.copy() for factor in self.factors] copy.add_factors(*factors_copy) return copy
[((8516, 8532), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (8527, 8532), False, 'from collections import defaultdict\n'), ((9995, 10016), 'numpy.sum', 'np.sum', (['factor.values'], {}), '(factor.values)\n', (10001, 10016), True, 'import numpy as np\n')]
stonewell/eim
plugins/commands_window/plugin.py
50fc4bb6e265ed8a5eb84577fd203e83934d55a7
from PySide6.QtWidgets import QListWidgetItem from yapsy.IPlugin import IPlugin class Plugin(IPlugin): def __init__(self): IPlugin.__init__(self) def activate(self): IPlugin.activate(self) return def deactivate(self): IPlugin.deactivate(self) def set_current_window(self, editor): self.editor_ = editor self.ctx.register_command('commands_list', self.show_commands_window, None, False) self.ctx.bind_key('Alt+X', 'commands_list') def show_commands_window(self, ctx): self.commands_ = ctx.get_commands() self.content_window_ = cw = ctx.create_list_content_window() self.list_widget_ = l = cw.list_widget_ self.text_edit_ = t = cw.text_edit_ self.list_items_ = [] f_c = self.ctx.get_theme_def_color('default', 'foreground') b_c = self.ctx.get_theme_def_color('default', 'background') for cmd in self.commands_: item = QListWidgetItem(cmd, l) item.setForeground(f_c) item.setBackground(b_c) self.list_items_.append(item) t.returnPressed.connect(self.execute_command) l.itemDoubleClicked[QListWidgetItem].connect(self.execute_command) self.content_window_.select_first_visible_item() cw.show() def execute_command(self): self.item_double_clicked(self.list_widget_.currentItem()) def item_double_clicked(self, item): self.ctx.run_command(item.text())
[((141, 163), 'yapsy.IPlugin.IPlugin.__init__', 'IPlugin.__init__', (['self'], {}), '(self)\n', (157, 163), False, 'from yapsy.IPlugin import IPlugin\n'), ((194, 216), 'yapsy.IPlugin.IPlugin.activate', 'IPlugin.activate', (['self'], {}), '(self)\n', (210, 216), False, 'from yapsy.IPlugin import IPlugin\n'), ((261, 285), 'yapsy.IPlugin.IPlugin.deactivate', 'IPlugin.deactivate', (['self'], {}), '(self)\n', (279, 285), False, 'from yapsy.IPlugin import IPlugin\n'), ((974, 997), 'PySide6.QtWidgets.QListWidgetItem', 'QListWidgetItem', (['cmd', 'l'], {}), '(cmd, l)\n', (989, 997), False, 'from PySide6.QtWidgets import QListWidgetItem\n')]
zroger49/broadlink_custom_component
custom_components/helpers.py
c7b0f9648f1dbaad64e573561e852b689be5a755
"""Helpers for the Broadlink remote.""" from base64 import b64decode from homeassistant.helpers import config_validation as cv def decode_packet(value): """Decode a data packet given for a Broadlink remote.""" value = cv.string(value) extra = len(value) % 4 if extra > 0: value = value + ("=" * (4 - extra)) return b64decode(value) def format_mac(mac): """Format a MAC address.""" return ":".join([format(octet, "02x") for octet in mac])
[((229, 245), 'homeassistant.helpers.config_validation.string', 'cv.string', (['value'], {}), '(value)\n', (238, 245), True, 'from homeassistant.helpers import config_validation as cv\n'), ((346, 362), 'base64.b64decode', 'b64decode', (['value'], {}), '(value)\n', (355, 362), False, 'from base64 import b64decode\n')]
subramp-prep/leetcode
problems/139.Word_Break/AC_dp_n2.py
d125201d9021ab9b1eea5e5393c2db4edd84e740
#!/usr/bin/python # -*- coding: utf-8 -*- # Author: illuz <iilluzen[at]gmail.com> # File: AC_dp_n2.py # Create Date: 2015-04-21 10:21:18 # Usage: AC_dp_n2.py # Descripton: class Solution: # @param s, a string # @param dict, a set of string # @return a boolean def wordBreak(self, s, dict): n = len(s) dp = [False] * (n + 1) dp[0] = True for i in range(n): if dp[i]: for word in dict: j = len(word) if i + j <= n and s[i: i + j] == word: dp[i + j] = True return dp[n] # debug s = Solution() print s.wordBreak('a', ['a'])
[]
motionfigures/bullet3
examples/pybullet/gym/pybullet_envs/bullet/kukaCamGymEnv.py
4a66d6c80b38a87ecbdf2fd5c4d281f0b5913d22
import os, inspect currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) parentdir = os.path.dirname(os.path.dirname(currentdir)) os.sys.path.insert(0,parentdir) import math import gym from gym import spaces from gym.utils import seeding import numpy as np import time import pybullet as p from . import kuka import random import pybullet_data from pkg_resources import parse_version maxSteps = 1000 RENDER_HEIGHT = 720 RENDER_WIDTH = 960 class KukaCamGymEnv(gym.Env): metadata = { 'render.modes': ['human', 'rgb_array'], 'video.frames_per_second' : 50 } def __init__(self, urdfRoot=pybullet_data.getDataPath(), actionRepeat=1, isEnableSelfCollision=True, renders=False, isDiscrete=False): self._timeStep = 1./240. self._urdfRoot = urdfRoot self._actionRepeat = actionRepeat self._isEnableSelfCollision = isEnableSelfCollision self._observation = [] self._envStepCounter = 0 self._renders = renders self._width = 341 self._height = 256 self._isDiscrete=isDiscrete self.terminated = 0 self._p = p if self._renders: cid = p.connect(p.SHARED_MEMORY) if (cid<0): p.connect(p.GUI) p.resetDebugVisualizerCamera(1.3,180,-41,[0.52,-0.2,-0.33]) else: p.connect(p.DIRECT) #timinglog = p.startStateLogging(p.STATE_LOGGING_PROFILE_TIMINGS, "kukaTimings.json") self._seed() self.reset() observationDim = len(self.getExtendedObservation()) #print("observationDim") #print(observationDim) observation_high = np.array([np.finfo(np.float32).max] * observationDim) if (self._isDiscrete): self.action_space = spaces.Discrete(7) else: action_dim = 3 self._action_bound = 1 action_high = np.array([self._action_bound] * action_dim) self.action_space = spaces.Box(-action_high, action_high) self.observation_space = spaces.Box(low=0, high=255, shape=(self._height, self._width, 4)) self.viewer = None def _reset(self): self.terminated = 0 p.resetSimulation() p.setPhysicsEngineParameter(numSolverIterations=150) p.setTimeStep(self._timeStep) p.loadURDF(os.path.join(self._urdfRoot,"plane.urdf"),[0,0,-1]) p.loadURDF(os.path.join(self._urdfRoot,"table/table.urdf"), 0.5000000,0.00000,-.820000,0.000000,0.000000,0.0,1.0) xpos = 0.5 +0.2*random.random() ypos = 0 +0.25*random.random() ang = 3.1415925438*random.random() orn = p.getQuaternionFromEuler([0,0,ang]) self.blockUid =p.loadURDF(os.path.join(self._urdfRoot,"block.urdf"), xpos,ypos,-0.1,orn[0],orn[1],orn[2],orn[3]) p.setGravity(0,0,-10) self._kuka = kuka.Kuka(urdfRootPath=self._urdfRoot, timeStep=self._timeStep) self._envStepCounter = 0 p.stepSimulation() self._observation = self.getExtendedObservation() return np.array(self._observation) def __del__(self): p.disconnect() def _seed(self, seed=None): self.np_random, seed = seeding.np_random(seed) return [seed] def getExtendedObservation(self): #camEyePos = [0.03,0.236,0.54] #distance = 1.06 #pitch=-56 #yaw = 258 #roll=0 #upAxisIndex = 2 #camInfo = p.getDebugVisualizerCamera() #print("width,height") #print(camInfo[0]) #print(camInfo[1]) #print("viewMatrix") #print(camInfo[2]) #print("projectionMatrix") #print(camInfo[3]) #viewMat = camInfo[2] #viewMat = p.computeViewMatrixFromYawPitchRoll(camEyePos,distance,yaw, pitch,roll,upAxisIndex) viewMat = [-0.5120397806167603, 0.7171027660369873, -0.47284144163131714, 0.0, -0.8589617609977722, -0.42747554183006287, 0.28186774253845215, 0.0, 0.0, 0.5504802465438843, 0.8348482847213745, 0.0, 0.1925382763147354, -0.24935829639434814, -0.4401884973049164, 1.0] #projMatrix = camInfo[3]#[0.7499999403953552, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, -1.0000200271606445, -1.0, 0.0, 0.0, -0.02000020071864128, 0.0] projMatrix = [0.75, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, -1.0000200271606445, -1.0, 0.0, 0.0, -0.02000020071864128, 0.0] img_arr = p.getCameraImage(width=self._width,height=self._height,viewMatrix=viewMat,projectionMatrix=projMatrix) rgb=img_arr[2] np_img_arr = np.reshape(rgb, (self._height, self._width, 4)) self._observation = np_img_arr return self._observation def _step(self, action): if (self._isDiscrete): dv = 0.01 dx = [0,-dv,dv,0,0,0,0][action] dy = [0,0,0,-dv,dv,0,0][action] da = [0,0,0,0,0,-0.1,0.1][action] f = 0.3 realAction = [dx,dy,-0.002,da,f] else: dv = 0.01 dx = action[0] * dv dy = action[1] * dv da = action[2] * 0.1 f = 0.3 realAction = [dx,dy,-0.002,da,f] return self.step2( realAction) def step2(self, action): for i in range(self._actionRepeat): self._kuka.applyAction(action) p.stepSimulation() if self._termination(): break #self._observation = self.getExtendedObservation() self._envStepCounter += 1 self._observation = self.getExtendedObservation() if self._renders: time.sleep(self._timeStep) #print("self._envStepCounter") #print(self._envStepCounter) done = self._termination() reward = self._reward() #print("len=%r" % len(self._observation)) return np.array(self._observation), reward, done, {} def _render(self, mode='human', close=False): if mode != "rgb_array": return np.array([]) base_pos,orn = self._p.getBasePositionAndOrientation(self._racecar.racecarUniqueId) view_matrix = self._p.computeViewMatrixFromYawPitchRoll( cameraTargetPosition=base_pos, distance=self._cam_dist, yaw=self._cam_yaw, pitch=self._cam_pitch, roll=0, upAxisIndex=2) proj_matrix = self._p.computeProjectionMatrixFOV( fov=60, aspect=float(RENDER_WIDTH)/RENDER_HEIGHT, nearVal=0.1, farVal=100.0) (_, _, px, _, _) = self._p.getCameraImage( width=RENDER_WIDTH, height=RENDER_HEIGHT, viewMatrix=view_matrix, projectionMatrix=proj_matrix, renderer=pybullet.ER_BULLET_HARDWARE_OPENGL) rgb_array = np.array(px) rgb_array = rgb_array[:, :, :3] return rgb_array def _termination(self): #print (self._kuka.endEffectorPos[2]) state = p.getLinkState(self._kuka.kukaUid,self._kuka.kukaEndEffectorIndex) actualEndEffectorPos = state[0] #print("self._envStepCounter") #print(self._envStepCounter) if (self.terminated or self._envStepCounter>maxSteps): self._observation = self.getExtendedObservation() return True maxDist = 0.005 closestPoints = p.getClosestPoints(self._kuka.trayUid, self._kuka.kukaUid,maxDist) if (len(closestPoints)):#(actualEndEffectorPos[2] <= -0.43): self.terminated = 1 #print("closing gripper, attempting grasp") #start grasp and terminate fingerAngle = 0.3 for i in range (100): graspAction = [0,0,0.0001,0,fingerAngle] self._kuka.applyAction(graspAction) p.stepSimulation() fingerAngle = fingerAngle-(0.3/100.) if (fingerAngle<0): fingerAngle=0 for i in range (1000): graspAction = [0,0,0.001,0,fingerAngle] self._kuka.applyAction(graspAction) p.stepSimulation() blockPos,blockOrn=p.getBasePositionAndOrientation(self.blockUid) if (blockPos[2] > 0.23): #print("BLOCKPOS!") #print(blockPos[2]) break state = p.getLinkState(self._kuka.kukaUid,self._kuka.kukaEndEffectorIndex) actualEndEffectorPos = state[0] if (actualEndEffectorPos[2]>0.5): break self._observation = self.getExtendedObservation() return True return False def _reward(self): #rewards is height of target object blockPos,blockOrn=p.getBasePositionAndOrientation(self.blockUid) closestPoints = p.getClosestPoints(self.blockUid,self._kuka.kukaUid,1000, -1, self._kuka.kukaEndEffectorIndex) reward = -1000 numPt = len(closestPoints) #print(numPt) if (numPt>0): #print("reward:") reward = -closestPoints[0][8]*10 if (blockPos[2] >0.2): #print("grasped a block!!!") #print("self._envStepCounter") #print(self._envStepCounter) reward = reward+1000 #print("reward") #print(reward) return reward if parse_version(gym.__version__)>=parse_version('0.9.6'): render = _render reset = _reset seed = _seed step = _step
[((164, 196), 'os.sys.path.insert', 'os.sys.path.insert', (['(0)', 'parentdir'], {}), '(0, parentdir)\n', (182, 196), False, 'import os, inspect\n'), ((135, 162), 'os.path.dirname', 'os.path.dirname', (['currentdir'], {}), '(currentdir)\n', (150, 162), False, 'import os, inspect\n'), ((656, 683), 'pybullet_data.getDataPath', 'pybullet_data.getDataPath', ([], {}), '()\n', (681, 683), False, 'import pybullet_data\n'), ((1987, 2052), 'gym.spaces.Box', 'spaces.Box', ([], {'low': '(0)', 'high': '(255)', 'shape': '(self._height, self._width, 4)'}), '(low=0, high=255, shape=(self._height, self._width, 4))\n', (1997, 2052), False, 'from gym import spaces\n'), ((2125, 2144), 'pybullet.resetSimulation', 'p.resetSimulation', ([], {}), '()\n', (2142, 2144), True, 'import pybullet as p\n'), ((2149, 2201), 'pybullet.setPhysicsEngineParameter', 'p.setPhysicsEngineParameter', ([], {'numSolverIterations': '(150)'}), '(numSolverIterations=150)\n', (2176, 2201), True, 'import pybullet as p\n'), ((2206, 2235), 'pybullet.setTimeStep', 'p.setTimeStep', (['self._timeStep'], {}), '(self._timeStep)\n', (2219, 2235), True, 'import pybullet as p\n'), ((2543, 2580), 'pybullet.getQuaternionFromEuler', 'p.getQuaternionFromEuler', (['[0, 0, ang]'], {}), '([0, 0, ang])\n', (2567, 2580), True, 'import pybullet as p\n'), ((2701, 2724), 'pybullet.setGravity', 'p.setGravity', (['(0)', '(0)', '(-10)'], {}), '(0, 0, -10)\n', (2713, 2724), True, 'import pybullet as p\n'), ((2837, 2855), 'pybullet.stepSimulation', 'p.stepSimulation', ([], {}), '()\n', (2853, 2855), True, 'import pybullet as p\n'), ((2921, 2948), 'numpy.array', 'np.array', (['self._observation'], {}), '(self._observation)\n', (2929, 2948), True, 'import numpy as np\n'), ((2975, 2989), 'pybullet.disconnect', 'p.disconnect', ([], {}), '()\n', (2987, 2989), True, 'import pybullet as p\n'), ((3048, 3071), 'gym.utils.seeding.np_random', 'seeding.np_random', (['seed'], {}), '(seed)\n', (3065, 3071), False, 'from gym.utils import seeding\n'), ((4188, 4297), 'pybullet.getCameraImage', 'p.getCameraImage', ([], {'width': 'self._width', 'height': 'self._height', 'viewMatrix': 'viewMat', 'projectionMatrix': 'projMatrix'}), '(width=self._width, height=self._height, viewMatrix=viewMat,\n projectionMatrix=projMatrix)\n', (4204, 4297), True, 'import pybullet as p\n'), ((4329, 4376), 'numpy.reshape', 'np.reshape', (['rgb', '(self._height, self._width, 4)'], {}), '(rgb, (self._height, self._width, 4))\n', (4339, 4376), True, 'import numpy as np\n'), ((6273, 6285), 'numpy.array', 'np.array', (['px'], {}), '(px)\n', (6281, 6285), True, 'import numpy as np\n'), ((6424, 6491), 'pybullet.getLinkState', 'p.getLinkState', (['self._kuka.kukaUid', 'self._kuka.kukaEndEffectorIndex'], {}), '(self._kuka.kukaUid, self._kuka.kukaEndEffectorIndex)\n', (6438, 6491), True, 'import pybullet as p\n'), ((6769, 6836), 'pybullet.getClosestPoints', 'p.getClosestPoints', (['self._kuka.trayUid', 'self._kuka.kukaUid', 'maxDist'], {}), '(self._kuka.trayUid, self._kuka.kukaUid, maxDist)\n', (6787, 6836), True, 'import pybullet as p\n'), ((7971, 8017), 'pybullet.getBasePositionAndOrientation', 'p.getBasePositionAndOrientation', (['self.blockUid'], {}), '(self.blockUid)\n', (8002, 8017), True, 'import pybullet as p\n'), ((8038, 8139), 'pybullet.getClosestPoints', 'p.getClosestPoints', (['self.blockUid', 'self._kuka.kukaUid', '(1000)', '(-1)', 'self._kuka.kukaEndEffectorIndex'], {}), '(self.blockUid, self._kuka.kukaUid, 1000, -1, self._kuka.\n kukaEndEffectorIndex)\n', (8056, 8139), True, 'import pybullet as p\n'), ((8509, 8539), 'pkg_resources.parse_version', 'parse_version', (['gym.__version__'], {}), '(gym.__version__)\n', (8522, 8539), False, 'from pkg_resources import parse_version\n'), ((8541, 8563), 'pkg_resources.parse_version', 'parse_version', (['"""0.9.6"""'], {}), "('0.9.6')\n", (8554, 8563), False, 'from pkg_resources import parse_version\n'), ((81, 103), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (101, 103), False, 'import os, inspect\n'), ((1211, 1237), 'pybullet.connect', 'p.connect', (['p.SHARED_MEMORY'], {}), '(p.SHARED_MEMORY)\n', (1220, 1237), True, 'import pybullet as p\n'), ((1288, 1352), 'pybullet.resetDebugVisualizerCamera', 'p.resetDebugVisualizerCamera', (['(1.3)', '(180)', '(-41)', '[0.52, -0.2, -0.33]'], {}), '(1.3, 180, -41, [0.52, -0.2, -0.33])\n', (1316, 1352), True, 'import pybullet as p\n'), ((1364, 1383), 'pybullet.connect', 'p.connect', (['p.DIRECT'], {}), '(p.DIRECT)\n', (1373, 1383), True, 'import pybullet as p\n'), ((1751, 1769), 'gym.spaces.Discrete', 'spaces.Discrete', (['(7)'], {}), '(7)\n', (1766, 1769), False, 'from gym import spaces\n'), ((1850, 1893), 'numpy.array', 'np.array', (['([self._action_bound] * action_dim)'], {}), '([self._action_bound] * action_dim)\n', (1858, 1893), True, 'import numpy as np\n'), ((1920, 1957), 'gym.spaces.Box', 'spaces.Box', (['(-action_high)', 'action_high'], {}), '(-action_high, action_high)\n', (1930, 1957), False, 'from gym import spaces\n'), ((2251, 2293), 'os.path.join', 'os.path.join', (['self._urdfRoot', '"""plane.urdf"""'], {}), "(self._urdfRoot, 'plane.urdf')\n", (2263, 2293), False, 'import os, inspect\n'), ((2319, 2367), 'os.path.join', 'os.path.join', (['self._urdfRoot', '"""table/table.urdf"""'], {}), "(self._urdfRoot, 'table/table.urdf')\n", (2331, 2367), False, 'import os, inspect\n'), ((2517, 2532), 'random.random', 'random.random', ([], {}), '()\n', (2530, 2532), False, 'import random\n'), ((2609, 2651), 'os.path.join', 'os.path.join', (['self._urdfRoot', '"""block.urdf"""'], {}), "(self._urdfRoot, 'block.urdf')\n", (2621, 2651), False, 'import os, inspect\n'), ((4988, 5006), 'pybullet.stepSimulation', 'p.stepSimulation', ([], {}), '()\n', (5004, 5006), True, 'import pybullet as p\n'), ((5225, 5251), 'time.sleep', 'time.sleep', (['self._timeStep'], {}), '(self._timeStep)\n', (5235, 5251), False, 'import time\n'), ((5439, 5466), 'numpy.array', 'np.array', (['self._observation'], {}), '(self._observation)\n', (5447, 5466), True, 'import numpy as np\n'), ((5575, 5587), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (5583, 5587), True, 'import numpy as np\n'), ((1265, 1281), 'pybullet.connect', 'p.connect', (['p.GUI'], {}), '(p.GUI)\n', (1274, 1281), True, 'import pybullet as p\n'), ((2443, 2458), 'random.random', 'random.random', ([], {}), '()\n', (2456, 2458), False, 'import random\n'), ((2478, 2493), 'random.random', 'random.random', ([], {}), '()\n', (2491, 2493), False, 'import random\n'), ((7165, 7183), 'pybullet.stepSimulation', 'p.stepSimulation', ([], {}), '()\n', (7181, 7183), True, 'import pybullet as p\n'), ((7411, 7429), 'pybullet.stepSimulation', 'p.stepSimulation', ([], {}), '()\n', (7427, 7429), True, 'import pybullet as p\n'), ((7456, 7502), 'pybullet.getBasePositionAndOrientation', 'p.getBasePositionAndOrientation', (['self.blockUid'], {}), '(self.blockUid)\n', (7487, 7502), True, 'import pybullet as p\n'), ((7628, 7695), 'pybullet.getLinkState', 'p.getLinkState', (['self._kuka.kukaUid', 'self._kuka.kukaEndEffectorIndex'], {}), '(self._kuka.kukaUid, self._kuka.kukaEndEffectorIndex)\n', (7642, 7695), True, 'import pybullet as p\n'), ((1654, 1674), 'numpy.finfo', 'np.finfo', (['np.float32'], {}), '(np.float32)\n', (1662, 1674), True, 'import numpy as np\n')]
kumar-pratik/hi-ml
hi-ml-histopathology/src/histopathology/preprocessing/tiling.py
a108cf4ea244a76127adedc0ca60f0a5afdfb3e8
# ------------------------------------------------------------------------------------------ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. # ------------------------------------------------------------------------------------------ # These tiling implementations are adapted from PANDA Kaggle solutions, for example: # https://github.com/kentaroy47/Kaggle-PANDA-1st-place-solution/blob/master/src/data_process/a00_save_tiles.py from typing import Any, Optional, Tuple import numpy as np def get_1d_padding(length: int, tile_size: int) -> Tuple[int, int]: """Computes symmetric padding for `length` to be divisible by `tile_size`.""" pad = (tile_size - length % tile_size) % tile_size return (pad // 2, pad - pad // 2) def pad_for_tiling_2d(array: np.ndarray, tile_size: int, channels_first: Optional[bool] = True, **pad_kwargs: Any) -> Tuple[np.ndarray, np.ndarray]: """Symmetrically pads a 2D `array` such that both dimensions are divisible by `tile_size`. :param array: 2D image array. :param tile_size: Width/height of each tile in pixels. :param channels_first: Whether `array` is in CHW (`True`, default) or HWC (`False`) layout. :param pad_kwargs: Keyword arguments to be passed to `np.pad()` (e.g. `constant_values=0`). :return: A tuple containing: - `padded_array`: Resulting array, in the same CHW/HWC layout as the input. - `offset`: XY offset introduced by the padding. Add this to coordinates relative to the original array to obtain indices for the padded array. """ height, width = array.shape[1:] if channels_first else array.shape[:-1] padding_h = get_1d_padding(height, tile_size) padding_w = get_1d_padding(width, tile_size) padding = [padding_h, padding_w] channels_axis = 0 if channels_first else 2 padding.insert(channels_axis, (0, 0)) # zero padding on channels axis padded_array = np.pad(array, padding, **pad_kwargs) offset = (padding_w[0], padding_h[0]) return padded_array, np.array(offset) def tile_array_2d(array: np.ndarray, tile_size: int, channels_first: Optional[bool] = True, **pad_kwargs: Any) -> Tuple[np.ndarray, np.ndarray]: """Split an image array into square non-overlapping tiles. The array will be padded symmetrically if its dimensions are not exact multiples of `tile_size`. :param array: Image array. :param tile_size: Width/height of each tile in pixels. :param pad_kwargs: Keyword arguments to be passed to `np.pad()` (e.g. `constant_values=0`). :param channels_first: Whether `array` is in CHW (`True`, default) or HWC (`False`) layout. :return: A tuple containing: - `tiles`: A batch of tiles in NCHW layout. - `coords`: XY coordinates of each tile, in the same order. """ padded_array, (offset_w, offset_h) = pad_for_tiling_2d(array, tile_size, channels_first, **pad_kwargs) if channels_first: channels, height, width = padded_array.shape else: height, width, channels = padded_array.shape n_tiles_h = height // tile_size n_tiles_w = width // tile_size if channels_first: intermediate_shape = (channels, n_tiles_h, tile_size, n_tiles_w, tile_size) axis_order = (1, 3, 0, 2, 4) # (n_tiles_h, n_tiles_w, channels, tile_size, tile_size) output_shape = (n_tiles_h * n_tiles_w, channels, tile_size, tile_size) else: intermediate_shape = (n_tiles_h, tile_size, n_tiles_w, tile_size, channels) axis_order = (0, 2, 1, 3, 4) # (n_tiles_h, n_tiles_w, tile_size, tile_size, channels) output_shape = (n_tiles_h * n_tiles_w, tile_size, tile_size, channels) tiles = padded_array.reshape(intermediate_shape) # Split width and height axes tiles = tiles.transpose(axis_order) tiles = tiles.reshape(output_shape) # Flatten tile batch dimension # Compute top-left coordinates of every tile, relative to the original array's origin coords_h = tile_size * np.arange(n_tiles_h) - offset_h coords_w = tile_size * np.arange(n_tiles_w) - offset_w # Shape: (n_tiles_h * n_tiles_w, 2) coords = np.stack(np.meshgrid(coords_w, coords_h), axis=-1).reshape(-1, 2) return tiles, coords def assemble_tiles_2d(tiles: np.ndarray, coords: np.ndarray, fill_value: Optional[float] = np.nan, channels_first: Optional[bool] = True) -> Tuple[np.ndarray, np.ndarray]: """Assembles a 2D array from sequences of tiles and coordinates. :param tiles: Stack of tiles with batch dimension first. :param coords: XY tile coordinates, assumed to be spaced by multiples of `tile_size` (shape: [N, 2]). :param tile_size: Size of each tile; must be >0. :param fill_value: Value to assign to empty elements (default: `NaN`). :param channels_first: Whether each tile is in CHW (`True`, default) or HWC (`False`) layout. :return: A tuple containing: - `array`: The reassembled 2D array with the smallest dimensions to contain all given tiles. - `offset`: The lowest XY coordinates. - `offset`: XY offset introduced by the assembly. Add this to tile coordinates to obtain indices for the assembled array. """ if coords.shape[0] != tiles.shape[0]: raise ValueError(f"Tile coordinates and values must have the same length, " f"got {coords.shape[0]} and {tiles.shape[0]}") if channels_first: n_tiles, channels, tile_size, _ = tiles.shape else: n_tiles, tile_size, _, channels = tiles.shape tile_xs, tile_ys = coords.T x_min, x_max = min(tile_xs), max(tile_xs + tile_size) y_min, y_max = min(tile_ys), max(tile_ys + tile_size) width = x_max - x_min height = y_max - y_min output_shape = (channels, height, width) if channels_first else (height, width, channels) array = np.full(output_shape, fill_value) offset = np.array([-x_min, -y_min]) for idx in range(n_tiles): row = coords[idx, 1] + offset[1] col = coords[idx, 0] + offset[0] if channels_first: array[:, row:row + tile_size, col:col + tile_size] = tiles[idx] else: array[row:row + tile_size, col:col + tile_size, :] = tiles[idx] return array, offset
[((2038, 2074), 'numpy.pad', 'np.pad', (['array', 'padding'], {}), '(array, padding, **pad_kwargs)\n', (2044, 2074), True, 'import numpy as np\n'), ((5985, 6018), 'numpy.full', 'np.full', (['output_shape', 'fill_value'], {}), '(output_shape, fill_value)\n', (5992, 6018), True, 'import numpy as np\n'), ((6033, 6059), 'numpy.array', 'np.array', (['[-x_min, -y_min]'], {}), '([-x_min, -y_min])\n', (6041, 6059), True, 'import numpy as np\n'), ((2142, 2158), 'numpy.array', 'np.array', (['offset'], {}), '(offset)\n', (2150, 2158), True, 'import numpy as np\n'), ((4115, 4135), 'numpy.arange', 'np.arange', (['n_tiles_h'], {}), '(n_tiles_h)\n', (4124, 4135), True, 'import numpy as np\n'), ((4174, 4194), 'numpy.arange', 'np.arange', (['n_tiles_w'], {}), '(n_tiles_w)\n', (4183, 4194), True, 'import numpy as np\n'), ((4268, 4299), 'numpy.meshgrid', 'np.meshgrid', (['coords_w', 'coords_h'], {}), '(coords_w, coords_h)\n', (4279, 4299), True, 'import numpy as np\n')]
savcardamone/tyche-
miniapp/miniapp/hartreefock/hf.py
ea89edea89a607291e4fe0ba738d75522f54dc1a
from math import pi from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace from numpy.linalg import eig, norm class HartreeFock(): zeta = array([38.474970, 5.782948, 1.242567, 0.298073]) num_aos = len(zeta) num_mos = 0 energy_tolerance = 0.0001; density_tolerance = 0.001 prev_energy = 0 prev_density = [] def __init__(self, num_elec): # Make sure we can pair electrons if num_elec % 2 != 0: raise Exception("Can't do a RHF with", num_elec, "electrons.") else: print("Restricted Hartree-Fock with", num_elec, "electron(s).") # We're RHF, so pair up spins in each molecular orbital self.num_mos = int(num_elec / 2) if self.num_mos > self.num_aos: raise Exception("Can't create", self.num_mos, "molecular orbital(s) from", self.num_aos, "atomic orbital(s).") else: print(self.num_aos, "atomic orbital(s) and", self.num_mos, "molecular orbital(s).") print("Zeta: ", self.zeta) self.prev_density = ndarray(shape=(self.num_aos,self.num_aos),dtype=float, order='C') def one_electron_integrals(self): def overlap_kernel(zeta_i, zeta_j): return pow(pi / (zeta_i + zeta_j), 1.5) def kinetic_kernel(zeta_i, zeta_j): return 3 * pow(pi, 1.5) * (zeta_i * zeta_j) / pow(zeta_i + zeta_j, 2.5) def nucattr_kernel(zeta_i, zeta_j): return (-4 * pi) / (zeta_i + zeta_j) # Initialise our matrices overlap = ndarray(shape=(self.num_aos,self.num_aos), dtype=float, order='C') kinetic = ndarray(shape=(self.num_aos,self.num_aos), dtype=float, order='C') nucattr = ndarray(shape=(self.num_aos,self.num_aos), dtype=float, order='C') for i_ao in range(self.num_aos): for j_ao in range(self.num_aos): overlap[i_ao,j_ao] = overlap_kernel(self.zeta[i_ao], self.zeta[j_ao]) kinetic[i_ao,j_ao] = kinetic_kernel(self.zeta[i_ao], self.zeta[j_ao]) nucattr[i_ao,j_ao] = nucattr_kernel(self.zeta[i_ao], self.zeta[j_ao]) return overlap, kinetic, nucattr def two_electron_integrals(self): def tei_kernel(zeta_i, zeta_j, zeta_k, zeta_l): temp_1 = (zeta_i + zeta_j) * (zeta_k + zeta_l) temp_2 = sqrt(zeta_i + zeta_j + zeta_k + zeta_l) return 2 * pow(pi, 2.5) / (temp_1 * temp_2) teis = ndarray(shape=(self.num_aos,self.num_aos,self.num_aos,self.num_aos), dtype=float, order='C') for i_ao in range(self.num_aos): for j_ao in range(self.num_aos): for k_ao in range(self.num_aos): for l_ao in range(self.num_aos): teis[i_ao,j_ao,k_ao,l_ao] = tei_kernel(self.zeta[i_ao], self.zeta[j_ao], self.zeta[k_ao], self.zeta[l_ao]) return teis def basis_transformation_matrix(self, overlap): # Get the eigenvalues and eigenvectors of the overlap matrix overlap_evals, overlap_evecs = eig(overlap) # Create diagonal matrix with entries given by inverse of eigenvalues of # overlap matrix try: inv_sqrt_evals = diag(divide(1., sqrt(overlap_evals))) except: raise Exception("Overlap matrix is not positive definite.") # Construct the basis transformation matrix and return it return overlap_evecs @ inv_sqrt_evals @ overlap_evecs.T def fock_matrix(self, core_hamiltonian, teis, density): fock = ndarray(shape=density.shape, dtype=float, order='C') for i_ao in range(self.num_aos): for j_ao in range(self.num_aos): fock[i_ao,j_ao] = core_hamiltonian[i_ao,j_ao] for k_ao in range(self.num_aos): for l_ao in range(self.num_aos): coulomb = teis[i_ao,k_ao,j_ao,l_ao] exchange = teis[i_ao,k_ao,l_ao,j_ao] fock[i_ao,j_ao] += density[k_ao,l_ao] * (coulomb - 0.5*exchange) return fock def density_matrix(self, overlap, basis_transform, fock): def ordered_eigensystem(matrix): # Generate the eigenvalues and eigenvectors of the matrix evals, evecs = eig(matrix) # Sort the eigenvalues in ascending order and keep a track of what index they # were originally assigned ordered_indices = argsort(evals) ordered_evals = sort(evals) # Order the eigenvectors in asceding order of their corresponding eigenvalues ordered_evecs = ndarray(shape=evecs.shape, dtype=float, order='C') ordered_transform = ndarray(shape=evecs.shape, dtype=float, order='C') for i_evec in range(len(ordered_evals)): ordered_evecs[:,i_evec] = evecs[:,ordered_indices[i_evec]] ordered_transform[i_evec,:] = basis_transform[ordered_indices[i_evec],:] # Return the ordered eigenvalues and corresponding eigenvectors return ordered_evals, ordered_evecs, ordered_transform # Transform Fock matrix to orthogonal basis fock = basis_transform.T @ fock @ basis_transform # Get the eigenvalues and eigenvectors of the input Fock matrix fock_evals, fock_evecs, new_transform = ordered_eigensystem(fock) # Transform the eigenvectors of the Fock matrix back to the original basis fock_evecs = new_transform @ fock_evecs # First of all we make sure the eigenvectors of the Fock matrix are normalised by the # overlap matrix (these are molecular orbitals, afterall) for i_mo in range(self.num_aos): ao_coeffs = fock_evecs[:,i_mo] norm = ao_coeffs.T @ overlap @ ao_coeffs fock_evecs[:,i_mo] /= sqrt(norm) # Initialise the density matrix density = ndarray(shape=overlap.shape, dtype=float, order='C') # Loop over all elements in the density matrix and accumulate for i_ao in range(self.num_aos): for j_ao in range(self.num_aos): density[i_ao,j_ao] = 0.0 # We accumulate only over occupied molecular orbitals! Note that we also have # access to the virtual orbitals at this point, but they're effectively discarded for i_mo in range(self.num_mos): density[i_ao,j_ao] += 2 * fock_evecs[i_ao,i_mo] * fock_evecs[j_ao,i_mo] return fock_evecs, density def scf_energy(self, density, core_hamiltonian, fock): energy = 0.0 for i_ao in range(self.num_aos): for j_ao in range(self.num_aos): energy += 0.5 * density[i_ao,j_ao] * (core_hamiltonian[i_ao,j_ao] + fock[i_ao,j_ao]) return energy def check_convergence(self, energy, density): if abs(energy - self.prev_energy) < self.energy_tolerance: energy_converged = True else: energy_converged = False self.prev_energy = energy if norm(density - self.prev_density) < self.density_tolerance: density_converged = True else: density_converged = False self.prev_density = density return energy_converged, density_converged def mulliken(self, overlap, density): return trace(density @ overlap) def run(self, num_cycles): print("Hartree-Fock will run for a maximum of", num_cycles, "SCF iteration(s).") overlap, kinetic, nucattr = self.one_electron_integrals() core_hamiltonian = kinetic + nucattr teis = self.two_electron_integrals() basis_transform = self.basis_transformation_matrix(overlap) _, density = self.density_matrix(overlap, basis_transform, core_hamiltonian) energy = self.scf_energy(density, core_hamiltonian, core_hamiltonian) for i in range(num_cycles): fock = self.fock_matrix(core_hamiltonian, teis, density) fock_evecs, density = self.density_matrix(overlap, basis_transform, fock) energy = self.scf_energy(density, core_hamiltonian, fock) print("Iteration", i, "SCF Energy:", energy) energy_converged, density_converged = self.check_convergence(energy, density) if energy_converged and density_converged: print("SCF has converged!") for i_mo in range(self.num_mos): print("Molecular Orbital", i_mo, "Coefficients :", fock_evecs[:,i_mo]) print("Mulliken charge:", self.mulliken(overlap, density)) break if i == num_cycles - 1: print("SCF failed to converge.") print("Energy Convergence Check:", energy_converged) print("Density Convergence Check:", density_converged) fock_mo_basis = ndarray(shape=(self.num_mos,self.num_mos), dtype=float, order='C') for i_mo in range(self.num_mos): for j_mo in range(self.num_mos): fock_mo_basis[i_mo,j_mo] = 0.0 for i_ao in range(self.num_aos): for j_ao in range(self.num_aos): fock_mo_basis[i_mo,j_mo] += fock_evecs[i_ao,j_mo] * fock_evecs[j_ao,i_mo] * fock[i_ao,j_ao] print(fock_mo_basis) if __name__ == "__main__": hf = HartreeFock(4) hf.run(2000)
[((170, 217), 'numpy.array', 'array', (['[38.47497, 5.782948, 1.242567, 0.298073]'], {}), '([38.47497, 5.782948, 1.242567, 0.298073])\n', (175, 217), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((1103, 1170), 'numpy.ndarray', 'ndarray', ([], {'shape': '(self.num_aos, self.num_aos)', 'dtype': 'float', 'order': '"""C"""'}), "(shape=(self.num_aos, self.num_aos), dtype=float, order='C')\n", (1110, 1170), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((1595, 1662), 'numpy.ndarray', 'ndarray', ([], {'shape': '(self.num_aos, self.num_aos)', 'dtype': 'float', 'order': '"""C"""'}), "(shape=(self.num_aos, self.num_aos), dtype=float, order='C')\n", (1602, 1662), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((1681, 1748), 'numpy.ndarray', 'ndarray', ([], {'shape': '(self.num_aos, self.num_aos)', 'dtype': 'float', 'order': '"""C"""'}), "(shape=(self.num_aos, self.num_aos), dtype=float, order='C')\n", (1688, 1748), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((1767, 1834), 'numpy.ndarray', 'ndarray', ([], {'shape': '(self.num_aos, self.num_aos)', 'dtype': 'float', 'order': '"""C"""'}), "(shape=(self.num_aos, self.num_aos), dtype=float, order='C')\n", (1774, 1834), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((2526, 2625), 'numpy.ndarray', 'ndarray', ([], {'shape': '(self.num_aos, self.num_aos, self.num_aos, self.num_aos)', 'dtype': 'float', 'order': '"""C"""'}), "(shape=(self.num_aos, self.num_aos, self.num_aos, self.num_aos),\n dtype=float, order='C')\n", (2533, 2625), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((3135, 3147), 'numpy.linalg.eig', 'eig', (['overlap'], {}), '(overlap)\n', (3138, 3147), False, 'from numpy.linalg import eig, norm\n'), ((3645, 3697), 'numpy.ndarray', 'ndarray', ([], {'shape': 'density.shape', 'dtype': 'float', 'order': '"""C"""'}), "(shape=density.shape, dtype=float, order='C')\n", (3652, 3697), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((6077, 6129), 'numpy.ndarray', 'ndarray', ([], {'shape': 'overlap.shape', 'dtype': 'float', 'order': '"""C"""'}), "(shape=overlap.shape, dtype=float, order='C')\n", (6084, 6129), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((7583, 7607), 'numpy.trace', 'trace', (['(density @ overlap)'], {}), '(density @ overlap)\n', (7588, 7607), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((9159, 9226), 'numpy.ndarray', 'ndarray', ([], {'shape': '(self.num_mos, self.num_mos)', 'dtype': 'float', 'order': '"""C"""'}), "(shape=(self.num_mos, self.num_mos), dtype=float, order='C')\n", (9166, 9226), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((2411, 2450), 'numpy.sqrt', 'sqrt', (['(zeta_i + zeta_j + zeta_k + zeta_l)'], {}), '(zeta_i + zeta_j + zeta_k + zeta_l)\n', (2415, 2450), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((4409, 4420), 'numpy.linalg.eig', 'eig', (['matrix'], {}), '(matrix)\n', (4412, 4420), False, 'from numpy.linalg import eig, norm\n'), ((4585, 4599), 'numpy.argsort', 'argsort', (['evals'], {}), '(evals)\n', (4592, 4599), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((4629, 4640), 'numpy.sort', 'sort', (['evals'], {}), '(evals)\n', (4633, 4640), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((4763, 4813), 'numpy.ndarray', 'ndarray', ([], {'shape': 'evecs.shape', 'dtype': 'float', 'order': '"""C"""'}), "(shape=evecs.shape, dtype=float, order='C')\n", (4770, 4813), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((4847, 4897), 'numpy.ndarray', 'ndarray', ([], {'shape': 'evecs.shape', 'dtype': 'float', 'order': '"""C"""'}), "(shape=evecs.shape, dtype=float, order='C')\n", (4854, 4897), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((6004, 6014), 'numpy.sqrt', 'sqrt', (['norm'], {}), '(norm)\n', (6008, 6014), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((7277, 7310), 'numpy.linalg.norm', 'norm', (['(density - self.prev_density)'], {}), '(density - self.prev_density)\n', (7281, 7310), False, 'from numpy.linalg import eig, norm\n'), ((3318, 3337), 'numpy.sqrt', 'sqrt', (['overlap_evals'], {}), '(overlap_evals)\n', (3322, 3337), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n')]
shengqiangzhang/examples-of-web-crawlers
6.爬取豆瓣排行榜电影数据(含GUI界面版)/main.py
89eb6c169b8824a6a9bc78e7a32e064d33560aa7
# -*- coding:utf-8 -*- from uiObject import uiObject # main入口 if __name__ == '__main__': ui = uiObject() ui.ui_process()
[((102, 112), 'uiObject.uiObject', 'uiObject', ([], {}), '()\n', (110, 112), False, 'from uiObject import uiObject\n')]
eude313/vault
photos/models.py
d3e24cf01d15de94244b7d2e80316355a0827f74
from django.db import models from cloudinary.models import CloudinaryField # Create your models here. class Category(models.Model): name = models.CharField( max_length=200, null=False, blank=False ) def __str__(self): return self.name class Photo(models.Model): category = models.ForeignKey( Category, on_delete=models.SET_NULL, null=True, blank=True ) image = CloudinaryField('image', default='') description = models.TextField() def __str__(self): return self.description
[((145, 202), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(False)', 'blank': '(False)'}), '(max_length=200, null=False, blank=False)\n', (161, 202), False, 'from django.db import models\n'), ((302, 379), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Category'], {'on_delete': 'models.SET_NULL', 'null': '(True)', 'blank': '(True)'}), '(Category, on_delete=models.SET_NULL, null=True, blank=True)\n', (319, 379), False, 'from django.db import models\n'), ((394, 430), 'cloudinary.models.CloudinaryField', 'CloudinaryField', (['"""image"""'], {'default': '""""""'}), "('image', default='')\n", (409, 430), False, 'from cloudinary.models import CloudinaryField\n'), ((449, 467), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (465, 467), False, 'from django.db import models\n')]
christiansencq/ibm_capstone
server/djangoapp/restapis.py
d445fd40c0267be0948a5d85e9d43828b908641c
import requests import json # import related models here from .models import CarDealer, DealerReview from requests.auth import HTTPBasicAuth import logging logger = logging.getLogger(__name__) # Create a `get_request` to make HTTP GET requests # e.g., response = requests.get(url, params=params, headers={'Content-Type': 'application/json'}, # auth=HTTPBasicAuth('apikey', api_key)) def get_request(url, api_key, **kwargs): print("GET from {}".format(url)) print(kwargs) try: if api_key is not None: response = requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs, auth=HTTPBasicAuth('apikey', api_key)) else: response = requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs) except: print("Network Error") status_code = response.status_code print("With status code {}".format(status_code)) json_data = json.loads(response.text) return json_data, status_code # Create a `post_request` to make HTTP POST requests # e.g., response = requests.post(url, params=kwargs, json=payload) def post_request(url, json_payload, **kwargs): print("Post to url: {} ".format(url)) print(kwargs) print(json_payload) response = requests.post(url, headers={'Content-Type': 'application/json'}, params=kwargs, json=json_payload) status_code = response.status_code print("With status code {}".format(status_code)) json_data = json.loads(response.text) return json_data, status_code # Create a get_dealers_from_cf method to get dealers from a cloud function def get_dealers_from_cf(url, **kwargs): info = [] result = "ok" # - Call get_request() with specified arguments logger.info("Get Dealers from CF Called!") json_result, status_code = get_request(url, None) if status_code == 200 and json_result: dealers = json_result['rows'] logger.info(len(dealers)) for dealer in dealers: dlr_data = dealer['doc'] #print('ADDRESS', dlr_data["address"]) if dlr_data.get('address'): # Create a CarDealer object with values in `doc` object dealer_obj = CarDealer(address=dlr_data.get("address"), city=dlr_data.get("city"), full_name=dlr_data.get("full_name"), id=dlr_data.get("id"), lat=dlr_data.get("lat"), long=dlr_data.get("long"), short_name=dlr_data.get("short_name"), state=dlr_data.get("state"), st=dlr_data.get("st"), zip=dlr_data.get("zip")) # dealer_obj = CarDealer(address=dealer["doc"]["address"], city=dealer["doc"]["city"], full_name=dealer["doc"]["full_name"], # id=dealer["doc"]["id"], lat=dealer["doc"]["lat"], long=dealer["doc"]["long"], # short_name=dealer["doc"]["short_name"], # st=dealer["doc"]["st"], state=dealer["doc"]["state"], zip=dealer["doc"]["zip"]) info.append(dealer_obj) elif json_result: result = json_result["message"] else: result = "Unknown error" return info, result def get_dealer_by_id(url, dealerId): # Call get_request with a URL parameter info = None result = "ok" json_result, status_code = get_request(url, None, dealerId=dealerId) # json_result, status_code = get_request(url, None, dealerId=dealerId) if status_code == 200 and json_result: # Get the row list in JSON as dealers dealers = json_result["rows"] for dealer in dealers: # Create a CarDealer object with values in `doc` object info = CarDealer(address=dealer.get("address"), city=dealer.get("city"), full_name=dealer.get("full_name"), id=dealer.get("id"), lat=dealer.get("lat"), long=dealer.get("long"), short_name=dealer.get("short_name"), st=dealer.get("st"), state=dealer.get("state"), zip=dealer.get("zip")) # info = CarDealer(address=dealer["address"], city=dealer["city"], full_name=dealer["full_name"], # id=dealer["id"], lat=dealer["lat"], long=dealer["long"], # short_name=dealer["short_name"], state=dealer["state"], # st=dealer["st"], zip=dealer["zip"]) elif json_result: result = json_result["message"] else: result = "Unknown error" return info, result def get_dealers_by_state (url, state): info = [] result = "ok" # Call get_request with a URL parameter json_result, status_code = get_request(url, None, state=state) if status_code == 200 and json_result: # Get the row list in JSON as dealers dealers = json_result["rows"] # For each dealer object for dealer in dealers: # dlr_data = dealer["doc"] # Create a CarDealer object with values in `doc` object dealer_obj = CarDealer(address=dealer.get("address"), city=dealer.get("city"), full_name=dealer.get("full_name"), id=dealer.get("id"), lat=dealer.get("lat"), long=dealer.get("long"), short_name=dealer.get("short_name"), state=dealer.get("state"), st=dealer.get("st"), zip=dealer.get("zip")) # dealer_obj = CarDealer(address=dlr_data.get("address"), city=dlr_data.get("city"), full_name=dlr_data.get("full_name"), # id=dlr_data.get("id"), lat=dlr_data.get("lat"), long=dlr_data.get("long"), # short_name=dlr_data.get("short_name"), state=dlr_data.get("state"), # st=dlr_data.get("st"), zip=dlr_data.get("zip")) info.append(dealer_obj) elif json_result: result = json_result["message"] else: result = "Unknown error" return info, result def get_dealer_reviews_from_cf (url, dealerId): info = [] result = "ok" # Call get_request with a URL parameter json_result, status_code = get_request(url, None, dealerId=dealerId) if status_code == 200 and json_result: # Get the row list in JSON as reviews reviews = json_result["body"]["data"] # For each review object for review in reviews: if (dealerId == review.get("dealership")): # Create a DealerReview object with values in object #sentiment = analyze_review_sentiments(review["review"]) review_obj = DealerReview( id=review.get("id"), name=review.get("name"), review=review.get("review"), purchase=review.get("purchase"), car_make=review.get("car_make", None), car_model=review.get("car_model", None), car_year=review.get("car_year", None), purchase_date=review.get("purchase_date", None)) info.append(review_obj) elif json_result: result = json_result["message"] else: result = "Unknown error" return info, result # Create an `analyze_review_sentiments` method to call Watson NLU and analyze text # def analyze_review_sentiments(text): # - Call get_request() with specified arguments # - Get the returned sentiment label such as Positive or Negative
[((166, 193), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (183, 193), False, 'import logging\n'), ((966, 991), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (976, 991), False, 'import json\n'), ((1293, 1396), 'requests.post', 'requests.post', (['url'], {'headers': "{'Content-Type': 'application/json'}", 'params': 'kwargs', 'json': 'json_payload'}), "(url, headers={'Content-Type': 'application/json'}, params=\n kwargs, json=json_payload)\n", (1306, 1396), False, 'import requests\n'), ((1500, 1525), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (1510, 1525), False, 'import json\n'), ((736, 814), 'requests.get', 'requests.get', (['url'], {'headers': "{'Content-Type': 'application/json'}", 'params': 'kwargs'}), "(url, headers={'Content-Type': 'application/json'}, params=kwargs)\n", (748, 814), False, 'import requests\n'), ((665, 697), 'requests.auth.HTTPBasicAuth', 'HTTPBasicAuth', (['"""apikey"""', 'api_key'], {}), "('apikey', api_key)\n", (678, 697), False, 'from requests.auth import HTTPBasicAuth\n')]
oslokommune/okdata-data-uploader
examples/python/upload.py
fc006ae90440b267613260bba90235799bf0cf6e
import logging from configparser import ConfigParser from sdk.data_uploader import DataUploader logging.basicConfig(level=logging.INFO) log = logging.getLogger() config = ConfigParser() config.read("config.ini") ##### # Datasets to be added to metadata API datasetData = { "title": "Test", "description": "Test data", "keywords": ["test"], "accessRights": "non-public", "objective": "Formålsbeskrivelse", "contactPoint": { "name": "Tim", "email": "[email protected]", "phone": "12345678", }, "publisher": "Tim", } datasetVersionData = {"version": "6", "schema": {}, "transformation": {}} datasetVersionEditionData = { "edition": "2019-05-28T15:37:00+02:00", "description": "Data for one hour", "startTime": "2018-12-21T08:00:00+01:00", "endTime": "2018-12-21T09:00:00+01:00", } ###### # The dataset* variables are optional, if these are set in config.ini this script will # not run the relevant DataUploader function datasetId = config.get("dataUploader", "datasetId", fallback=None) datasetVersion = config.get("dataUploader", "datasetVersion", fallback=None) datasetVersionEdition = config.get( "dataUploader", "datasetVersionEdition", fallback=None ) upload = DataUploader(config) try: log.info("Uploading a file to S3") upload.login() if datasetId is None: upload.createDataset(datasetData) if datasetVersion is None: upload.createVersion(datasetVersionData) if datasetVersionEdition is None: upload.createEdition(datasetVersionEditionData) log.info(f"Dataset: {upload.datasetId}") log.info(f"Version: {upload.datasetVersion}") log.info(f"Edition: {upload.datasetVersionEdition}") if upload.upload("README.md"): log.info("Done... go brew some coffee") else: log.error("Could not upload file....") except Exception as e: log.exception(f">> Something went horrible wrong:\n{e}") # To upload with curl: cmd = upload.curl("tmp3.zip") # Max upload size for now is 5GB
[((97, 136), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (116, 136), False, 'import logging\n'), ((143, 162), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (160, 162), False, 'import logging\n'), ((173, 187), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (185, 187), False, 'from configparser import ConfigParser\n'), ((1242, 1262), 'sdk.data_uploader.DataUploader', 'DataUploader', (['config'], {}), '(config)\n', (1254, 1262), False, 'from sdk.data_uploader import DataUploader\n')]
Enerccio/mahjong
doc/examples.py
903505a7886c31845dfa6b3f54c936a4feb29e6e
from mahjong.hand_calculating.hand import HandCalculator from mahjong.meld import Meld from mahjong.hand_calculating.hand_config import HandConfig, OptionalRules from mahjong.shanten import Shanten from mahjong.tile import TilesConverter calculator = HandCalculator() # useful helper def print_hand_result(hand_result): print(hand_result.han, hand_result.fu) print(hand_result.cost['main']) print(hand_result.yaku) for fu_item in hand_result.fu_details: print(fu_item) print('') #################################################################### # Tanyao hand by ron # #################################################################### # we had to use all 14 tiles in that array tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444') win_tile = TilesConverter.string_to_136_array(sou='4')[0] result = calculator.estimate_hand_value(tiles, win_tile) print_hand_result(result) #################################################################### # Tanyao hand by tsumo # #################################################################### result = calculator.estimate_hand_value(tiles, win_tile, config=HandConfig(is_tsumo=True)) print_hand_result(result) #################################################################### # Add open set to hand # #################################################################### melds = [Meld(meld_type=Meld.PON, tiles=TilesConverter.string_to_136_array(man='444'))] result = calculator.estimate_hand_value(tiles, win_tile, melds=melds, config=HandConfig(options=OptionalRules(has_open_tanyao=True))) print_hand_result(result) #################################################################### # Shanten calculation # #################################################################### shanten = Shanten() tiles = TilesConverter.string_to_34_array(man='13569', pin='123459', sou='443') result = shanten.calculate_shanten(tiles) print(result) #################################################################### # Kazoe as a sanbaiman # #################################################################### tiles = TilesConverter.string_to_136_array(man='22244466677788') win_tile = TilesConverter.string_to_136_array(man='7')[0] melds = [ Meld(Meld.KAN, TilesConverter.string_to_136_array(man='2222'), False) ] dora_indicators = [ TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], ] config = HandConfig(is_riichi=True, options=OptionalRules(kazoe=HandConfig.KAZOE_SANBAIMAN)) result = calculator.estimate_hand_value(tiles, win_tile, melds, dora_indicators, config) print_hand_result(result) #################################################################### # Change the cost of yaku # #################################################################### config = HandConfig(is_renhou=True) # renhou as an yakuman - old style config.yaku.renhou.han_closed = 13 tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444') win_tile = TilesConverter.string_to_136_array(sou='4')[0] result = calculator.estimate_hand_value(tiles, win_tile, config=config) print_hand_result(result)
[((252, 268), 'mahjong.hand_calculating.hand.HandCalculator', 'HandCalculator', ([], {}), '()\n', (266, 268), False, 'from mahjong.hand_calculating.hand import HandCalculator\n'), ((772, 844), 'mahjong.tile.TilesConverter.string_to_136_array', 'TilesConverter.string_to_136_array', ([], {'man': '"""22444"""', 'pin': '"""333567"""', 'sou': '"""444"""'}), "(man='22444', pin='333567', sou='444')\n", (806, 844), False, 'from mahjong.tile import TilesConverter\n'), ((1996, 2005), 'mahjong.shanten.Shanten', 'Shanten', ([], {}), '()\n', (2003, 2005), False, 'from mahjong.shanten import Shanten\n'), ((2014, 2085), 'mahjong.tile.TilesConverter.string_to_34_array', 'TilesConverter.string_to_34_array', ([], {'man': '"""13569"""', 'pin': '"""123459"""', 'sou': '"""443"""'}), "(man='13569', pin='123459', sou='443')\n", (2047, 2085), False, 'from mahjong.tile import TilesConverter\n'), ((2361, 2417), 'mahjong.tile.TilesConverter.string_to_136_array', 'TilesConverter.string_to_136_array', ([], {'man': '"""22244466677788"""'}), "(man='22244466677788')\n", (2395, 2417), False, 'from mahjong.tile import TilesConverter\n'), ((3222, 3248), 'mahjong.hand_calculating.hand_config.HandConfig', 'HandConfig', ([], {'is_renhou': '(True)'}), '(is_renhou=True)\n', (3232, 3248), False, 'from mahjong.hand_calculating.hand_config import HandConfig, OptionalRules\n'), ((3328, 3400), 'mahjong.tile.TilesConverter.string_to_136_array', 'TilesConverter.string_to_136_array', ([], {'man': '"""22444"""', 'pin': '"""333567"""', 'sou': '"""444"""'}), "(man='22444', pin='333567', sou='444')\n", (3362, 3400), False, 'from mahjong.tile import TilesConverter\n'), ((856, 899), 'mahjong.tile.TilesConverter.string_to_136_array', 'TilesConverter.string_to_136_array', ([], {'sou': '"""4"""'}), "(sou='4')\n", (890, 899), False, 'from mahjong.tile import TilesConverter\n'), ((2429, 2472), 'mahjong.tile.TilesConverter.string_to_136_array', 'TilesConverter.string_to_136_array', ([], {'man': '"""7"""'}), "(man='7')\n", (2463, 2472), False, 'from mahjong.tile import TilesConverter\n'), ((3412, 3455), 'mahjong.tile.TilesConverter.string_to_136_array', 'TilesConverter.string_to_136_array', ([], {'sou': '"""4"""'}), "(sou='4')\n", (3446, 3455), False, 'from mahjong.tile import TilesConverter\n'), ((1262, 1287), 'mahjong.hand_calculating.hand_config.HandConfig', 'HandConfig', ([], {'is_tsumo': '(True)'}), '(is_tsumo=True)\n', (1272, 1287), False, 'from mahjong.hand_calculating.hand_config import HandConfig, OptionalRules\n'), ((2505, 2551), 'mahjong.tile.TilesConverter.string_to_136_array', 'TilesConverter.string_to_136_array', ([], {'man': '"""2222"""'}), "(man='2222')\n", (2539, 2551), False, 'from mahjong.tile import TilesConverter\n'), ((2587, 2630), 'mahjong.tile.TilesConverter.string_to_136_array', 'TilesConverter.string_to_136_array', ([], {'man': '"""1"""'}), "(man='1')\n", (2621, 2630), False, 'from mahjong.tile import TilesConverter\n'), ((2639, 2682), 'mahjong.tile.TilesConverter.string_to_136_array', 'TilesConverter.string_to_136_array', ([], {'man': '"""1"""'}), "(man='1')\n", (2673, 2682), False, 'from mahjong.tile import TilesConverter\n'), ((2691, 2734), 'mahjong.tile.TilesConverter.string_to_136_array', 'TilesConverter.string_to_136_array', ([], {'man': '"""1"""'}), "(man='1')\n", (2725, 2734), False, 'from mahjong.tile import TilesConverter\n'), ((2743, 2786), 'mahjong.tile.TilesConverter.string_to_136_array', 'TilesConverter.string_to_136_array', ([], {'man': '"""1"""'}), "(man='1')\n", (2777, 2786), False, 'from mahjong.tile import TilesConverter\n'), ((2838, 2885), 'mahjong.hand_calculating.hand_config.OptionalRules', 'OptionalRules', ([], {'kazoe': 'HandConfig.KAZOE_SANBAIMAN'}), '(kazoe=HandConfig.KAZOE_SANBAIMAN)\n', (2851, 2885), False, 'from mahjong.hand_calculating.hand_config import HandConfig, OptionalRules\n'), ((1566, 1611), 'mahjong.tile.TilesConverter.string_to_136_array', 'TilesConverter.string_to_136_array', ([], {'man': '"""444"""'}), "(man='444')\n", (1600, 1611), False, 'from mahjong.tile import TilesConverter\n'), ((1711, 1746), 'mahjong.hand_calculating.hand_config.OptionalRules', 'OptionalRules', ([], {'has_open_tanyao': '(True)'}), '(has_open_tanyao=True)\n', (1724, 1746), False, 'from mahjong.hand_calculating.hand_config import HandConfig, OptionalRules\n')]
Infinidat/mount-utils
src/infi/mount_utils/solaris/mounter.py
e4e08e21c278a2391494a7b9d9d7efdaeb393a9c
from ..base.mounter import MounterMixin, execute_mount class SolarisMounterMixin(MounterMixin): def _get_fstab_path(self): return "/etc/fstab" def _get_entry_format(self, entry): return entry.get_format_solaris() def mount_entry(self, entry): args = ["-F", entry.get_typename(), entry.get_fsname(), entry.get_dirname()] args.extend(self._format_options(entry)) execute_mount(args)
[]
EladGabay/pulumi-oci
sdk/python/pulumi_oci/database/get_external_non_container_database.py
6841e27d4a1a7e15c672306b769912efbfd3ba99
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities from . import outputs __all__ = [ 'GetExternalNonContainerDatabaseResult', 'AwaitableGetExternalNonContainerDatabaseResult', 'get_external_non_container_database', ] @pulumi.output_type class GetExternalNonContainerDatabaseResult: """ A collection of values returned by getExternalNonContainerDatabase. """ def __init__(__self__, character_set=None, compartment_id=None, database_configuration=None, database_edition=None, database_management_config=None, database_version=None, db_id=None, db_packs=None, db_unique_name=None, defined_tags=None, display_name=None, external_non_container_database_id=None, freeform_tags=None, id=None, lifecycle_details=None, ncharacter_set=None, operations_insights_config=None, state=None, time_created=None, time_zone=None): if character_set and not isinstance(character_set, str): raise TypeError("Expected argument 'character_set' to be a str") pulumi.set(__self__, "character_set", character_set) if compartment_id and not isinstance(compartment_id, str): raise TypeError("Expected argument 'compartment_id' to be a str") pulumi.set(__self__, "compartment_id", compartment_id) if database_configuration and not isinstance(database_configuration, str): raise TypeError("Expected argument 'database_configuration' to be a str") pulumi.set(__self__, "database_configuration", database_configuration) if database_edition and not isinstance(database_edition, str): raise TypeError("Expected argument 'database_edition' to be a str") pulumi.set(__self__, "database_edition", database_edition) if database_management_config and not isinstance(database_management_config, dict): raise TypeError("Expected argument 'database_management_config' to be a dict") pulumi.set(__self__, "database_management_config", database_management_config) if database_version and not isinstance(database_version, str): raise TypeError("Expected argument 'database_version' to be a str") pulumi.set(__self__, "database_version", database_version) if db_id and not isinstance(db_id, str): raise TypeError("Expected argument 'db_id' to be a str") pulumi.set(__self__, "db_id", db_id) if db_packs and not isinstance(db_packs, str): raise TypeError("Expected argument 'db_packs' to be a str") pulumi.set(__self__, "db_packs", db_packs) if db_unique_name and not isinstance(db_unique_name, str): raise TypeError("Expected argument 'db_unique_name' to be a str") pulumi.set(__self__, "db_unique_name", db_unique_name) if defined_tags and not isinstance(defined_tags, dict): raise TypeError("Expected argument 'defined_tags' to be a dict") pulumi.set(__self__, "defined_tags", defined_tags) if display_name and not isinstance(display_name, str): raise TypeError("Expected argument 'display_name' to be a str") pulumi.set(__self__, "display_name", display_name) if external_non_container_database_id and not isinstance(external_non_container_database_id, str): raise TypeError("Expected argument 'external_non_container_database_id' to be a str") pulumi.set(__self__, "external_non_container_database_id", external_non_container_database_id) if freeform_tags and not isinstance(freeform_tags, dict): raise TypeError("Expected argument 'freeform_tags' to be a dict") pulumi.set(__self__, "freeform_tags", freeform_tags) if id and not isinstance(id, str): raise TypeError("Expected argument 'id' to be a str") pulumi.set(__self__, "id", id) if lifecycle_details and not isinstance(lifecycle_details, str): raise TypeError("Expected argument 'lifecycle_details' to be a str") pulumi.set(__self__, "lifecycle_details", lifecycle_details) if ncharacter_set and not isinstance(ncharacter_set, str): raise TypeError("Expected argument 'ncharacter_set' to be a str") pulumi.set(__self__, "ncharacter_set", ncharacter_set) if operations_insights_config and not isinstance(operations_insights_config, dict): raise TypeError("Expected argument 'operations_insights_config' to be a dict") pulumi.set(__self__, "operations_insights_config", operations_insights_config) if state and not isinstance(state, str): raise TypeError("Expected argument 'state' to be a str") pulumi.set(__self__, "state", state) if time_created and not isinstance(time_created, str): raise TypeError("Expected argument 'time_created' to be a str") pulumi.set(__self__, "time_created", time_created) if time_zone and not isinstance(time_zone, str): raise TypeError("Expected argument 'time_zone' to be a str") pulumi.set(__self__, "time_zone", time_zone) @property @pulumi.getter(name="characterSet") def character_set(self) -> str: """ The character set of the external database. """ return pulumi.get(self, "character_set") @property @pulumi.getter(name="compartmentId") def compartment_id(self) -> str: """ The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment. """ return pulumi.get(self, "compartment_id") @property @pulumi.getter(name="databaseConfiguration") def database_configuration(self) -> str: """ The Oracle Database configuration """ return pulumi.get(self, "database_configuration") @property @pulumi.getter(name="databaseEdition") def database_edition(self) -> str: """ The Oracle Database edition. """ return pulumi.get(self, "database_edition") @property @pulumi.getter(name="databaseManagementConfig") def database_management_config(self) -> 'outputs.GetExternalNonContainerDatabaseDatabaseManagementConfigResult': """ The configuration of the Database Management service. """ return pulumi.get(self, "database_management_config") @property @pulumi.getter(name="databaseVersion") def database_version(self) -> str: """ The Oracle Database version. """ return pulumi.get(self, "database_version") @property @pulumi.getter(name="dbId") def db_id(self) -> str: """ The Oracle Database ID, which identifies an Oracle Database located outside of Oracle Cloud. """ return pulumi.get(self, "db_id") @property @pulumi.getter(name="dbPacks") def db_packs(self) -> str: """ The database packs licensed for the external Oracle Database. """ return pulumi.get(self, "db_packs") @property @pulumi.getter(name="dbUniqueName") def db_unique_name(self) -> str: """ The `DB_UNIQUE_NAME` of the external database. """ return pulumi.get(self, "db_unique_name") @property @pulumi.getter(name="definedTags") def defined_tags(self) -> Mapping[str, Any]: """ Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). """ return pulumi.get(self, "defined_tags") @property @pulumi.getter(name="displayName") def display_name(self) -> str: """ The user-friendly name for the external database. The name does not have to be unique. """ return pulumi.get(self, "display_name") @property @pulumi.getter(name="externalNonContainerDatabaseId") def external_non_container_database_id(self) -> str: return pulumi.get(self, "external_non_container_database_id") @property @pulumi.getter(name="freeformTags") def freeform_tags(self) -> Mapping[str, Any]: """ Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}` """ return pulumi.get(self, "freeform_tags") @property @pulumi.getter def id(self) -> str: """ The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Oracle Cloud Infrastructure external database resource. """ return pulumi.get(self, "id") @property @pulumi.getter(name="lifecycleDetails") def lifecycle_details(self) -> str: """ Additional information about the current lifecycle state. """ return pulumi.get(self, "lifecycle_details") @property @pulumi.getter(name="ncharacterSet") def ncharacter_set(self) -> str: """ The national character of the external database. """ return pulumi.get(self, "ncharacter_set") @property @pulumi.getter(name="operationsInsightsConfig") def operations_insights_config(self) -> 'outputs.GetExternalNonContainerDatabaseOperationsInsightsConfigResult': """ The configuration of Operations Insights for the external database """ return pulumi.get(self, "operations_insights_config") @property @pulumi.getter def state(self) -> str: """ The current state of the Oracle Cloud Infrastructure external database resource. """ return pulumi.get(self, "state") @property @pulumi.getter(name="timeCreated") def time_created(self) -> str: """ The date and time the database was created. """ return pulumi.get(self, "time_created") @property @pulumi.getter(name="timeZone") def time_zone(self) -> str: """ The time zone of the external database. It is a time zone offset (a character type in the format '[+|-]TZH:TZM') or a time zone region name, depending on how the time zone value was specified when the database was created / last altered. """ return pulumi.get(self, "time_zone") class AwaitableGetExternalNonContainerDatabaseResult(GetExternalNonContainerDatabaseResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetExternalNonContainerDatabaseResult( character_set=self.character_set, compartment_id=self.compartment_id, database_configuration=self.database_configuration, database_edition=self.database_edition, database_management_config=self.database_management_config, database_version=self.database_version, db_id=self.db_id, db_packs=self.db_packs, db_unique_name=self.db_unique_name, defined_tags=self.defined_tags, display_name=self.display_name, external_non_container_database_id=self.external_non_container_database_id, freeform_tags=self.freeform_tags, id=self.id, lifecycle_details=self.lifecycle_details, ncharacter_set=self.ncharacter_set, operations_insights_config=self.operations_insights_config, state=self.state, time_created=self.time_created, time_zone=self.time_zone) def get_external_non_container_database(external_non_container_database_id: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetExternalNonContainerDatabaseResult: """ This data source provides details about a specific External Non Container Database resource in Oracle Cloud Infrastructure Database service. Gets information about a specific external non-container database. ## Example Usage ```python import pulumi import pulumi_oci as oci test_external_non_container_database = oci.database.get_external_non_container_database(external_non_container_database_id=oci_database_external_non_container_database["test_external_non_container_database"]["id"]) ``` :param str external_non_container_database_id: The external non-container database [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm). """ __args__ = dict() __args__['externalNonContainerDatabaseId'] = external_non_container_database_id if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:database/getExternalNonContainerDatabase:getExternalNonContainerDatabase', __args__, opts=opts, typ=GetExternalNonContainerDatabaseResult).value return AwaitableGetExternalNonContainerDatabaseResult( character_set=__ret__.character_set, compartment_id=__ret__.compartment_id, database_configuration=__ret__.database_configuration, database_edition=__ret__.database_edition, database_management_config=__ret__.database_management_config, database_version=__ret__.database_version, db_id=__ret__.db_id, db_packs=__ret__.db_packs, db_unique_name=__ret__.db_unique_name, defined_tags=__ret__.defined_tags, display_name=__ret__.display_name, external_non_container_database_id=__ret__.external_non_container_database_id, freeform_tags=__ret__.freeform_tags, id=__ret__.id, lifecycle_details=__ret__.lifecycle_details, ncharacter_set=__ret__.ncharacter_set, operations_insights_config=__ret__.operations_insights_config, state=__ret__.state, time_created=__ret__.time_created, time_zone=__ret__.time_zone)
[((5361, 5395), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""characterSet"""'}), "(name='characterSet')\n", (5374, 5395), False, 'import pulumi\n'), ((5577, 5612), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""compartmentId"""'}), "(name='compartmentId')\n", (5590, 5612), False, 'import pulumi\n'), ((5860, 5903), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""databaseConfiguration"""'}), "(name='databaseConfiguration')\n", (5873, 5903), False, 'import pulumi\n'), ((6093, 6130), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""databaseEdition"""'}), "(name='databaseEdition')\n", (6106, 6130), False, 'import pulumi\n'), ((6303, 6349), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""databaseManagementConfig"""'}), "(name='databaseManagementConfig')\n", (6316, 6349), False, 'import pulumi\n'), ((6635, 6672), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""databaseVersion"""'}), "(name='databaseVersion')\n", (6648, 6672), False, 'import pulumi\n'), ((6845, 6871), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""dbId"""'}), "(name='dbId')\n", (6858, 6871), False, 'import pulumi\n'), ((7086, 7115), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""dbPacks"""'}), "(name='dbPacks')\n", (7099, 7115), False, 'import pulumi\n'), ((7305, 7339), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""dbUniqueName"""'}), "(name='dbUniqueName')\n", (7318, 7339), False, 'import pulumi\n'), ((7526, 7559), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""definedTags"""'}), "(name='definedTags')\n", (7539, 7559), False, 'import pulumi\n'), ((7912, 7945), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""displayName"""'}), "(name='displayName')\n", (7925, 7945), False, 'import pulumi\n'), ((8168, 8220), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""externalNonContainerDatabaseId"""'}), "(name='externalNonContainerDatabaseId')\n", (8181, 8220), False, 'import pulumi\n'), ((8368, 8402), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""freeformTags"""'}), "(name='freeformTags')\n", (8381, 8402), False, 'import pulumi\n'), ((9108, 9146), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""lifecycleDetails"""'}), "(name='lifecycleDetails')\n", (9121, 9146), False, 'import pulumi\n'), ((9350, 9385), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""ncharacterSet"""'}), "(name='ncharacterSet')\n", (9363, 9385), False, 'import pulumi\n'), ((9574, 9620), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""operationsInsightsConfig"""'}), "(name='operationsInsightsConfig')\n", (9587, 9620), False, 'import pulumi\n'), ((10135, 10168), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""timeCreated"""'}), "(name='timeCreated')\n", (10148, 10168), False, 'import pulumi\n'), ((10348, 10378), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""timeZone"""'}), "(name='timeZone')\n", (10361, 10378), False, 'import pulumi\n'), ((1273, 1325), 'pulumi.set', 'pulumi.set', (['__self__', '"""character_set"""', 'character_set'], {}), "(__self__, 'character_set', character_set)\n", (1283, 1325), False, 'import pulumi\n'), ((1479, 1533), 'pulumi.set', 'pulumi.set', (['__self__', '"""compartment_id"""', 'compartment_id'], {}), "(__self__, 'compartment_id', compartment_id)\n", (1489, 1533), False, 'import pulumi\n'), ((1711, 1781), 'pulumi.set', 'pulumi.set', (['__self__', '"""database_configuration"""', 'database_configuration'], {}), "(__self__, 'database_configuration', database_configuration)\n", (1721, 1781), False, 'import pulumi\n'), ((1941, 1999), 'pulumi.set', 'pulumi.set', (['__self__', '"""database_edition"""', 'database_edition'], {}), "(__self__, 'database_edition', database_edition)\n", (1951, 1999), False, 'import pulumi\n'), ((2191, 2269), 'pulumi.set', 'pulumi.set', (['__self__', '"""database_management_config"""', 'database_management_config'], {}), "(__self__, 'database_management_config', database_management_config)\n", (2201, 2269), False, 'import pulumi\n'), ((2429, 2487), 'pulumi.set', 'pulumi.set', (['__self__', '"""database_version"""', 'database_version'], {}), "(__self__, 'database_version', database_version)\n", (2439, 2487), False, 'import pulumi\n'), ((2614, 2650), 'pulumi.set', 'pulumi.set', (['__self__', '"""db_id"""', 'db_id'], {}), "(__self__, 'db_id', db_id)\n", (2624, 2650), False, 'import pulumi\n'), ((2786, 2828), 'pulumi.set', 'pulumi.set', (['__self__', '"""db_packs"""', 'db_packs'], {}), "(__self__, 'db_packs', db_packs)\n", (2796, 2828), False, 'import pulumi\n'), ((2982, 3036), 'pulumi.set', 'pulumi.set', (['__self__', '"""db_unique_name"""', 'db_unique_name'], {}), "(__self__, 'db_unique_name', db_unique_name)\n", (2992, 3036), False, 'import pulumi\n'), ((3186, 3236), 'pulumi.set', 'pulumi.set', (['__self__', '"""defined_tags"""', 'defined_tags'], {}), "(__self__, 'defined_tags', defined_tags)\n", (3196, 3236), False, 'import pulumi\n'), ((3384, 3434), 'pulumi.set', 'pulumi.set', (['__self__', '"""display_name"""', 'display_name'], {}), "(__self__, 'display_name', display_name)\n", (3394, 3434), False, 'import pulumi\n'), ((3648, 3746), 'pulumi.set', 'pulumi.set', (['__self__', '"""external_non_container_database_id"""', 'external_non_container_database_id'], {}), "(__self__, 'external_non_container_database_id',\n external_non_container_database_id)\n", (3658, 3746), False, 'import pulumi\n'), ((3895, 3947), 'pulumi.set', 'pulumi.set', (['__self__', '"""freeform_tags"""', 'freeform_tags'], {}), "(__self__, 'freeform_tags', freeform_tags)\n", (3905, 3947), False, 'import pulumi\n'), ((4065, 4095), 'pulumi.set', 'pulumi.set', (['__self__', '"""id"""', 'id'], {}), "(__self__, 'id', id)\n", (4075, 4095), False, 'import pulumi\n'), ((4258, 4318), 'pulumi.set', 'pulumi.set', (['__self__', '"""lifecycle_details"""', 'lifecycle_details'], {}), "(__self__, 'lifecycle_details', lifecycle_details)\n", (4268, 4318), False, 'import pulumi\n'), ((4472, 4526), 'pulumi.set', 'pulumi.set', (['__self__', '"""ncharacter_set"""', 'ncharacter_set'], {}), "(__self__, 'ncharacter_set', ncharacter_set)\n", (4482, 4526), False, 'import pulumi\n'), ((4718, 4796), 'pulumi.set', 'pulumi.set', (['__self__', '"""operations_insights_config"""', 'operations_insights_config'], {}), "(__self__, 'operations_insights_config', operations_insights_config)\n", (4728, 4796), False, 'import pulumi\n'), ((4923, 4959), 'pulumi.set', 'pulumi.set', (['__self__', '"""state"""', 'state'], {}), "(__self__, 'state', state)\n", (4933, 4959), False, 'import pulumi\n'), ((5107, 5157), 'pulumi.set', 'pulumi.set', (['__self__', '"""time_created"""', 'time_created'], {}), "(__self__, 'time_created', time_created)\n", (5117, 5157), False, 'import pulumi\n'), ((5296, 5340), 'pulumi.set', 'pulumi.set', (['__self__', '"""time_zone"""', 'time_zone'], {}), "(__self__, 'time_zone', time_zone)\n", (5306, 5340), False, 'import pulumi\n'), ((5523, 5556), 'pulumi.get', 'pulumi.get', (['self', '"""character_set"""'], {}), "(self, 'character_set')\n", (5533, 5556), False, 'import pulumi\n'), ((5805, 5839), 'pulumi.get', 'pulumi.get', (['self', '"""compartment_id"""'], {}), "(self, 'compartment_id')\n", (5815, 5839), False, 'import pulumi\n'), ((6030, 6072), 'pulumi.get', 'pulumi.get', (['self', '"""database_configuration"""'], {}), "(self, 'database_configuration')\n", (6040, 6072), False, 'import pulumi\n'), ((6246, 6282), 'pulumi.get', 'pulumi.get', (['self', '"""database_edition"""'], {}), "(self, 'database_edition')\n", (6256, 6282), False, 'import pulumi\n'), ((6568, 6614), 'pulumi.get', 'pulumi.get', (['self', '"""database_management_config"""'], {}), "(self, 'database_management_config')\n", (6578, 6614), False, 'import pulumi\n'), ((6788, 6824), 'pulumi.get', 'pulumi.get', (['self', '"""database_version"""'], {}), "(self, 'database_version')\n", (6798, 6824), False, 'import pulumi\n'), ((7040, 7065), 'pulumi.get', 'pulumi.get', (['self', '"""db_id"""'], {}), "(self, 'db_id')\n", (7050, 7065), False, 'import pulumi\n'), ((7256, 7284), 'pulumi.get', 'pulumi.get', (['self', '"""db_packs"""'], {}), "(self, 'db_packs')\n", (7266, 7284), False, 'import pulumi\n'), ((7471, 7505), 'pulumi.get', 'pulumi.get', (['self', '"""db_unique_name"""'], {}), "(self, 'db_unique_name')\n", (7481, 7505), False, 'import pulumi\n'), ((7859, 7891), 'pulumi.get', 'pulumi.get', (['self', '"""defined_tags"""'], {}), "(self, 'defined_tags')\n", (7869, 7891), False, 'import pulumi\n'), ((8115, 8147), 'pulumi.get', 'pulumi.get', (['self', '"""display_name"""'], {}), "(self, 'display_name')\n", (8125, 8147), False, 'import pulumi\n'), ((8293, 8347), 'pulumi.get', 'pulumi.get', (['self', '"""external_non_container_database_id"""'], {}), "(self, 'external_non_container_database_id')\n", (8303, 8347), False, 'import pulumi\n'), ((8774, 8807), 'pulumi.get', 'pulumi.get', (['self', '"""freeform_tags"""'], {}), "(self, 'freeform_tags')\n", (8784, 8807), False, 'import pulumi\n'), ((9065, 9087), 'pulumi.get', 'pulumi.get', (['self', '"""id"""'], {}), "(self, 'id')\n", (9075, 9087), False, 'import pulumi\n'), ((9292, 9329), 'pulumi.get', 'pulumi.get', (['self', '"""lifecycle_details"""'], {}), "(self, 'lifecycle_details')\n", (9302, 9329), False, 'import pulumi\n'), ((9519, 9553), 'pulumi.get', 'pulumi.get', (['self', '"""ncharacter_set"""'], {}), "(self, 'ncharacter_set')\n", (9529, 9553), False, 'import pulumi\n'), ((9852, 9898), 'pulumi.get', 'pulumi.get', (['self', '"""operations_insights_config"""'], {}), "(self, 'operations_insights_config')\n", (9862, 9898), False, 'import pulumi\n'), ((10089, 10114), 'pulumi.get', 'pulumi.get', (['self', '"""state"""'], {}), "(self, 'state')\n", (10099, 10114), False, 'import pulumi\n'), ((10295, 10327), 'pulumi.get', 'pulumi.get', (['self', '"""time_created"""'], {}), "(self, 'time_created')\n", (10305, 10327), False, 'import pulumi\n'), ((10696, 10725), 'pulumi.get', 'pulumi.get', (['self', '"""time_zone"""'], {}), "(self, 'time_zone')\n", (10706, 10725), False, 'import pulumi\n'), ((13060, 13082), 'pulumi.InvokeOptions', 'pulumi.InvokeOptions', ([], {}), '()\n', (13080, 13082), False, 'import pulumi\n'), ((13174, 13349), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (['"""oci:database/getExternalNonContainerDatabase:getExternalNonContainerDatabase"""', '__args__'], {'opts': 'opts', 'typ': 'GetExternalNonContainerDatabaseResult'}), "(\n 'oci:database/getExternalNonContainerDatabase:getExternalNonContainerDatabase'\n , __args__, opts=opts, typ=GetExternalNonContainerDatabaseResult)\n", (13195, 13349), False, 'import pulumi\n')]
xmedius/xmedius-mailrelayserver
setup.py
44bb55c4b543e96bb23a45499d281c1bbab18abf
from setuptools import setup from setuptools.command.install import install class PostInstallCommand(install): user_options = install.user_options + [ ('noservice', None, None), ] def initialize_options(self): install.initialize_options(self) self.noservice = None def finalize_options(self): install.finalize_options(self) def run(self): install.run(self) if not self.noservice: from xmediusmailrelayserver import console console.install_service(['--startup', 'auto', 'install']) setup( name='xmediusmailrelayserver', version='1.0.0', description='The Python module to be used to relay mail to different servers depending on patterns', long_description='See https://github.com/xmedius/xmedius-mailrelayserver for more information', url='https://github.com/xmedius/xmedius-mailrelayserver/', author='XMedius R&D', license='MIT', classifiers=[ 'Programming Language :: Python :: 3.6', 'Environment :: Win32 (MS Windows)', 'Operating System :: Microsoft :: Windows' ], cmdclass={ 'install': PostInstallCommand }, packages=['xmediusmailrelayserver'], package_data={'xmediusmailrelayserver': ['config.yml']}, install_requires=['pyyaml', 'aiosmtpd'], dependency_links=[] )
[((578, 1309), 'setuptools.setup', 'setup', ([], {'name': '"""xmediusmailrelayserver"""', 'version': '"""1.0.0"""', 'description': '"""The Python module to be used to relay mail to different servers depending on patterns"""', 'long_description': '"""See https://github.com/xmedius/xmedius-mailrelayserver for more information"""', 'url': '"""https://github.com/xmedius/xmedius-mailrelayserver/"""', 'author': '"""XMedius R&D"""', 'license': '"""MIT"""', 'classifiers': "['Programming Language :: Python :: 3.6',\n 'Environment :: Win32 (MS Windows)',\n 'Operating System :: Microsoft :: Windows']", 'cmdclass': "{'install': PostInstallCommand}", 'packages': "['xmediusmailrelayserver']", 'package_data': "{'xmediusmailrelayserver': ['config.yml']}", 'install_requires': "['pyyaml', 'aiosmtpd']", 'dependency_links': '[]'}), "(name='xmediusmailrelayserver', version='1.0.0', description=\n 'The Python module to be used to relay mail to different servers depending on patterns'\n , long_description=\n 'See https://github.com/xmedius/xmedius-mailrelayserver for more information'\n , url='https://github.com/xmedius/xmedius-mailrelayserver/', author=\n 'XMedius R&D', license='MIT', classifiers=[\n 'Programming Language :: Python :: 3.6',\n 'Environment :: Win32 (MS Windows)',\n 'Operating System :: Microsoft :: Windows'], cmdclass={'install':\n PostInstallCommand}, packages=['xmediusmailrelayserver'], package_data=\n {'xmediusmailrelayserver': ['config.yml']}, install_requires=['pyyaml',\n 'aiosmtpd'], dependency_links=[])\n", (583, 1309), False, 'from setuptools import setup\n'), ((240, 272), 'setuptools.command.install.install.initialize_options', 'install.initialize_options', (['self'], {}), '(self)\n', (266, 272), False, 'from setuptools.command.install import install\n'), ((344, 374), 'setuptools.command.install.install.finalize_options', 'install.finalize_options', (['self'], {}), '(self)\n', (368, 374), False, 'from setuptools.command.install import install\n'), ((403, 420), 'setuptools.command.install.install.run', 'install.run', (['self'], {}), '(self)\n', (414, 420), False, 'from setuptools.command.install import install\n'), ((519, 576), 'xmediusmailrelayserver.console.install_service', 'console.install_service', (["['--startup', 'auto', 'install']"], {}), "(['--startup', 'auto', 'install'])\n", (542, 576), False, 'from xmediusmailrelayserver import console\n')]
tsbxmw/leetcode
143.py
e751311b8b5f2769874351717a22c35c19b48a36
# 143. 重排链表 # 给定一个单链表 L:L0→L1→…→Ln-1→Ln , # 将其重新排列后变为: L0→Ln→L1→Ln-1→L2→Ln-2→… # 你不能只是单纯的改变节点内部的值,而是需要实际的进行节点交换。 # 示例 1: # 给定链表 1->2->3->4, 重新排列为 1->4->2->3. # 示例 2: # 给定链表 1->2->3->4->5, 重新排列为 1->5->2->4->3. # Definition for singly-linked list. # class ListNode: # def __init__(self, x): # self.val = x # self.next = None # Definition for singly-linked list. # class ListNode: # def __init__(self, x): # self.val = x # self.next = None ## 整体上是交换,使用递归,先找到最后节点 ## 1 -》 2 -》 3 -》 4 -》 5 ## | | ## temp = 1.next == 2 ## 1.next = 4.next == 5 ## 4.next = None ## 1.next.next == 5.next = 2 ## now = 2 ## last = 3.next class Solution: def reorderList(self, head: ListNode) -> None: """ Do not return anything, modify head in-place instead. """ if not head: return self.pre = head self.flag = True def test(node): if not node.next: # 如果 node.next 是 None,就不需要交换了 return test(node.next) if not self.flag: return if not self.pre.next: self.flag = False return if self.pre == node: self.flag = False return temp = self.pre.next self.pre.next = node.next self.pre.next.next = temp self.pre = temp node.next = None test(self.pre)
[]
Toranktto/CraftProtocol
CraftProtocol/NBT/NBTTagList.py
a6f4a67756c3868820ab76df5e148d76b020d990
#!/usr/bin/env python from CraftProtocol.NBT.NBTBase import NBTBase from CraftProtocol.NBT.NBTProvider import NBTProvider from CraftProtocol.StreamIO import StreamIO class NBTTagList(NBTBase): TYPE_ID = 0x09 def __init__(self, tag_type, values=None): NBTBase.__init__(self) if values is None: values = [] self._tag_type = tag_type self._values = list(values) def get(self): return self._values def get_tag_type(self): return self._tag_type def __getitem__(self, i): return self._values.__getitem__(i) def __setitem__(self, i, o): assert isinstance(o, self._tag_type), "value must be " + self._tag_type.__name__ self._values.__setitem__(i, o) def __delitem__(self, i): self._values.__delitem__(i) def __iter__(self): return self._values.__iter__() def __contains__(self, o): return self._values.__contains__(o) def __len__(self): return self._values.__len__() def append(self, x): assert isinstance(x, self._tag_type), "arg must be " + self._tag_type.__name__ self._values.append(x) def remove(self, x): assert isinstance(x, self._tag_type), "arg must be " + self._tag_type.__name__ self._values.remove(x) @staticmethod def write(stream, tag): StreamIO.write_ubyte(stream, tag.get_tag_type().TYPE_ID) StreamIO.write_int(stream, len(tag)) for i in tag: tag.get_tag_type().write(stream, i) @staticmethod def read(stream): tag_type_id = StreamIO.read_ubyte(stream) tag_type = NBTProvider.get_tag_class(tag_type_id) values = [] len = StreamIO.read_int(stream) for i in xrange(len): values.append(tag_type.read(stream)) return NBTTagList(tag_type, values)
[((271, 293), 'CraftProtocol.NBT.NBTBase.NBTBase.__init__', 'NBTBase.__init__', (['self'], {}), '(self)\n', (287, 293), False, 'from CraftProtocol.NBT.NBTBase import NBTBase\n'), ((1611, 1638), 'CraftProtocol.StreamIO.StreamIO.read_ubyte', 'StreamIO.read_ubyte', (['stream'], {}), '(stream)\n', (1630, 1638), False, 'from CraftProtocol.StreamIO import StreamIO\n'), ((1658, 1696), 'CraftProtocol.NBT.NBTProvider.NBTProvider.get_tag_class', 'NBTProvider.get_tag_class', (['tag_type_id'], {}), '(tag_type_id)\n', (1683, 1696), False, 'from CraftProtocol.NBT.NBTProvider import NBTProvider\n'), ((1731, 1756), 'CraftProtocol.StreamIO.StreamIO.read_int', 'StreamIO.read_int', (['stream'], {}), '(stream)\n', (1748, 1756), False, 'from CraftProtocol.StreamIO import StreamIO\n')]
lapaniku/GAS
examples/0b02b172-ad67-449b-b4a2-ff645b28c508.py
e49ce302689af683da744cd172e0359c0ba0af86
# This program was generated by "Generative Art Synthesizer" # Generation date: 2021-11-28 09:21:40 UTC # GAS change date: 2021-11-28 09:20:21 UTC # GAS md5 hash: ad55481e87ca5a7e9a8e92cd336d1cad # Python version: 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)] # For more information visit: https://github.com/volotat/GAS #import python libraries import os #OS version: default import numpy as np #Numpy version: 1.19.5 from PIL import Image #PIL version: 8.1.2 #set initial params SIZE = 768 GRID_CHANNELS = 16 def test_values(arr): if np.isnan(arr).any(): raise Exception('Array has None elements!') if np.amin(arr) < -1 or np.amax(arr) > 1: raise Exception('Values went to far! [ %.2f : %.2f ]'%(np.amin(arr), np.amax(arr)) ) return arr #define grid transformation methods def transit(x, t_indx, s_indx, alphas): res = x.copy() res[:,:,t_indx] = np.sum(x[:,:,s_indx] * alphas, axis = -1) return test_values(res.clip(-1,1)) def sin(x, t_indx, s_indx, scale = 1, shift = 0): res = x.copy() res[:,:,t_indx] = np.sin(x[:,:,s_indx] * 0.5 * np.pi * scale + shift) return test_values(res) def magnitude(x, t_indx, s_indx, ord = 2): res = x.copy() res[:,:,t_indx] = np.linalg.norm(x[:,:,s_indx], axis = -1, ord = ord) / np.sqrt(len(s_indx)) return test_values(res) def shift(x, t_indx, s_indx, shift): res = x.copy() if shift > 0: res[:,:,t_indx] = (-np.abs(((x[:,:,s_indx] + 1) / 2) ** (1 + shift) - 1) ** (1 / (1 + shift)) + 1) * 2 - 1 if shift < 0: res[:,:,t_indx] = np.abs((1 - (x[:,:,s_indx] + 1) / 2) ** (1 - shift) - 1) ** (1 / (1 - shift)) * 2 - 1 return test_values(res) def inverse(x, t_indx, s_indx): res = x.copy() res[:,:,t_indx] = -x[:,:,s_indx] return test_values(res) def smooth_max(x, t_indx, s1_indx, s2_indx, p = 10): res = x.copy() res[:,:,t_indx] = np.log((np.exp(x[:,:,s1_indx] * p) + np.exp(x[:,:,s2_indx] * p)) ** (1/p)) / 1.07 return test_values(res) def smooth_min(x, t_indx, s1_indx, s2_indx, p = 10): res = x.copy() res[:,:,t_indx] = -np.log((np.exp(-x[:,:,s1_indx] * p) + np.exp(-x[:,:,s2_indx] * p)) ** (1/p)) / 1.07 return test_values(res) def prod(x, t_indx, s_indx): res = x.copy() res[:,:,t_indx] = np.prod(x[:,:,s_indx], -1) return test_values(res) def power(x, t_indx, s_indx, p = 1): res = x.copy() res[:,:,t_indx] = np.sign(x[:,:,s_indx]) * np.abs(x[:,:,s_indx]) ** p return test_values(res) #set initial grid grid = np.zeros((SIZE, SIZE, GRID_CHANNELS)) x = ((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((1, SIZE)).repeat(SIZE, 0) y = ((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((SIZE, 1)).repeat(SIZE, 1) grid[:,:,0] = (x * 0.9386329219527516 + y * -0.45147169454413794) / 2 grid[:,:,1] = (x * 0.8090860808441245 + y * 0.2914526739617249) / 2 grid[:,:,2] = (x * 0.9804797761207309 + y * -0.5063344373124843) / 2 grid[:,:,3] = (x * -0.8484277738516293 + y * -0.5155435342135386) / 2 grid[:,:,4] = (x * -0.6644350461377522 + y * 0.1739322518414499) / 2 grid[:,:,5] = (x * -0.5986715486203882 + y * 0.9515468928881716) / 2 grid[:,:,6] = (x * 0.2265055481768512 + y * 0.4365452266748293) / 2 grid[:,:,7] = (x * 0.5049774961793401 + y * 0.05113255120007798) / 2 grid[:,:,8] = (x * -0.3391983246964396 + y * -0.5135707069423852) / 2 grid[:,:,9] = (x * -0.4075423366723827 + y * 0.5388833863473126) / 2 grid[:,:,10] = (x * -0.4262457935185371 + y * -0.6817079327248272) / 2 grid[:,:,11] = (x * 0.8435706697714382 + y * 0.7746597063144072) / 2 grid[:,:,12] = (x * -0.5303146721156469 + y * -0.41048419195488317) / 2 grid[:,:,13] = (x * -0.5864100240508576 + y * -0.9425245660964123) / 2 grid[:,:,14] = (x * -0.7665883618456049 + y * -0.3867357840809138) / 2 grid[:,:,15] = (x * 0.49037959172682255 + y * -0.7671554143072785) / 2 #apply transformations to the grid grid = transit(grid, 4, [7, 6, 12, 8, 9, 0, 1], [0.05863158300898051, 0.3467981515651057, 0.262107802795733, 0.038001653167336905, 0.2112967596903696, 0.002128256606899112, 0.08103579316557531]) grid = shift(grid, 3, 3, 2.4622222565241207) grid = sin(grid, 10, 0, 0.5112825397666086, 37.95950546335726) grid = sin(grid, 12, 13, 3.6938747278005737, 76.37702042567852) grid = magnitude(grid, 15, [5, 3, 8, 0, 15], 2) grid = prod(grid, 2, [3, 11, 1]) grid = smooth_min(grid, 3, 2, 7) grid = smooth_max(grid, 8, 10, 6) grid = prod(grid, 3, [2, 6, 10, 7, 4]) grid = smooth_min(grid, 7, 12, 0) grid = transit(grid, 2, [1, 2], [0.9078557995211777, 0.09214420047882232]) grid = smooth_max(grid, 1, 0, 1) grid = sin(grid, 9, 4, 3.0281102269529683, 11.185401112275173) grid = sin(grid, 10, 4, 1.2844464834351186, -45.836492724169695) grid = sin(grid, 1, 2, -1.5301674594368837, -60.29431568717391) grid = transit(grid, 2, [13, 11, 5], [0.421270391024163, 0.5054038923567993, 0.07332571661903758]) grid = transit(grid, 11, [1, 15, 5, 0, 6, 12, 2, 7, 4], [0.03047869593495055, 0.024092687676923453, 0.02665655056773558, 0.17667886361751853, 0.15211061797378253, 0.016462544099609754, 0.0072484377164178625, 0.4477791048998878, 0.11849249751317383]) grid = transit(grid, 10, [5, 11, 15, 8, 2, 13, 12, 3, 6], [0.1020239434902293, 0.05405846145210329, 0.11525379082942891, 0.11556721863292163, 0.12372657123165616, 0.1356897031789931, 0.20047556686480725, 0.09921434949484752, 0.05399039482501285]) grid = transit(grid, 9, [5], [1.0]) grid = transit(grid, 15, [12, 0, 1, 11], [0.01847979792505241, 0.33442336387003857, 0.15192425697494277, 0.4951725812299663]) grid = sin(grid, 4, 8, 3.386521226555936, 60.95572898751007) grid = shift(grid, 14, 2, 2.55681173849493) grid = sin(grid, 10, 14, 0.8649185298731181, 3.1973516320924773) grid = sin(grid, 9, 7, -2.4657577404884132, 72.95418196004374) grid = transit(grid, 12, [7, 4, 10, 5], [0.5076634403621766, 0.003404332378773421, 0.04142944289977586, 0.4475027843592742]) grid = inverse(grid, 4, 5) grid = transit(grid, 1, [4, 14, 0, 13], [0.2785496566747933, 0.004915230889640017, 0.30146401859790545, 0.4150710938376613]) grid = sin(grid, 3, 11, -6.496603906160505, -73.75617586359363) grid = transit(grid, 6, [6, 14], [0.7201753385758813, 0.2798246614241187]) grid = prod(grid, 4, [10, 0, 2, 4, 8, 5, 6, 7]) grid = transit(grid, 8, [3], [1.0]) grid = inverse(grid, 8, 5) grid = smooth_max(grid, 10, 5, 13) grid = sin(grid, 9, 10, -1.8565532127479274, -54.75186223635349) grid = transit(grid, 10, [14], [1.0]) grid = transit(grid, 15, [11, 4, 10], [0.6926745567135898, 0.1831142410590532, 0.12421120222735695]) grid = magnitude(grid, 7, [6, 12, 7, 13, 8], 2) grid = transit(grid, 8, [3, 15, 9, 6, 11], [0.036102265915692405, 0.1224495166624379, 0.2384660328868578, 0.3357862916746864, 0.2671958928603256]) grid = smooth_min(grid, 1, 1, 11) grid = transit(grid, 5, [11, 4, 2, 1, 13, 12, 0, 8], [0.08486049729383285, 0.15069099224942706, 0.024923245737924458, 0.07191051851248272, 0.25942601829807205, 0.16834508849259286, 0.14540219911263502, 0.094441440303033]) grid = transit(grid, 11, [12], [1.0]) grid = power(grid, 3, 5, 0.10200689258338674) grid = transit(grid, 2, [10, 11, 4, 15, 0, 6], [0.24973877983541862, 0.3378766591098989, 0.15974656746239488, 0.027776085211312595, 0.02330072841260748, 0.20156117996836745]) grid = smooth_min(grid, 0, 5, 1) grid = magnitude(grid, 0, [5, 0], 2) grid = transit(grid, 6, [15, 8], [0.5303803951305812, 0.4696196048694189]) grid = inverse(grid, 0, 0) grid = magnitude(grid, 13, [8], 2) grid = transit(grid, 13, [15, 5, 9, 4, 6, 12], [0.18067242214638962, 0.12939497982917472, 0.08164480089591167, 0.24583958083442445, 0.2244518823086713, 0.13799633398542827]) grid = transit(grid, 11, [0], [1.0]) grid = magnitude(grid, 0, [4, 13], 2) grid = transit(grid, 8, [5, 4, 15, 6, 14, 0, 3, 11], [0.13835365002720226, 0.008781149737259792, 0.24627334258742545, 0.04870190081124998, 0.049950480577274, 0.15123046752435387, 0.31255198044446264, 0.04415702829077187]) grid = transit(grid, 1, [3], [1.0]) grid = magnitude(grid, 14, [4], 2) grid = sin(grid, 1, 5, 8.18216846853571, -6.729427492311089) grid = magnitude(grid, 11, [8, 2], 2) grid = transit(grid, 7, [12, 11, 13, 4], [0.1713900685471786, 0.14082681623065177, 0.19859698568682838, 0.4891861295353413]) grid = transit(grid, 13, [12, 15, 9, 2, 0, 1, 5], [0.18796556626817826, 0.19260744772691155, 0.11226112831146452, 0.08161640805634696, 0.08706050582840198, 0.2243337708440404, 0.11415517296465624]) grid = sin(grid, 11, 13, -6.909579361872105, 70.84834564082374) grid = transit(grid, 2, [11, 7, 13], [0.3629247592109436, 0.10073172896374764, 0.5363435118253088]) grid = sin(grid, 1, 5, 0.6814927249849106, 30.75954926767548) grid = inverse(grid, 8, 7) grid = prod(grid, 10, [5, 2]) grid = transit(grid, 15, [0, 3], [0.29345909580747953, 0.7065409041925205]) grid = sin(grid, 12, 4, -1.6398586072056767, 84.51374680259704) grid = sin(grid, 1, 1, -0.183401440709518, -88.40242580975152) grid = transit(grid, 12, [3, 13, 2, 9, 0], [0.24803411847529433, 0.2425397323068922, 0.0904752958055755, 0.11683555248582808, 0.30211530092641004]) grid = sin(grid, 5, 2, -2.2972705471452146, -12.522748365129786) grid = smooth_min(grid, 12, 9, 11) grid = sin(grid, 4, 15, -1.9527829039221054, 20.537776250912316) grid = transit(grid, 7, [11, 9, 2], [0.5001532946669459, 0.42070604285213226, 0.07914066248092186]) grid = inverse(grid, 5, 12) grid = sin(grid, 10, 2, 0.9155140652310594, -34.1653400637653) grid = transit(grid, 8, [14], [1.0]) grid = transit(grid, 4, [1, 12, 15, 13, 3], [0.32356965941479515, 0.022696478437764827, 0.2132573540073865, 0.11957266769813353, 0.3209038404419199]) grid = transit(grid, 6, [1, 7, 0, 2, 9, 4, 8], [0.06904450551777742, 0.12680650314665426, 0.1756104206123629, 0.013987480750913602, 0.1337935702206657, 0.39097327478734406, 0.08978424496428203]) grid = smooth_min(grid, 9, 9, 10) grid = shift(grid, 8, 1, -0.2952350240798842) grid = sin(grid, 11, 6, 1.576100090732909, -21.508000199215132) grid = shift(grid, 11, 5, 1.0526879494498724) grid = transit(grid, 1, [14], [1.0]) grid = transit(grid, 8, [9, 10, 2, 15, 13], [0.3265190472987195, 0.21568397721657098, 0.06226802479442838, 0.0028158122366541832, 0.39271313845362693]) grid = magnitude(grid, 11, [13, 10, 12, 2, 11, 14], 2) grid = transit(grid, 12, [8, 11, 3], [0.2717231795161624, 0.38648847983305307, 0.3417883406507845]) grid = transit(grid, 15, [7, 3], [0.9172074355564371, 0.08279256444356292]) grid = transit(grid, 13, [1, 2, 7, 5, 8, 9, 15], [0.085742434722219, 0.4119764535375412, 0.08377067725345017, 0.13045782410775286, 0.02917564277599849, 0.12489006625007311, 0.13398690135296518]) grid = transit(grid, 2, [2, 0, 11, 10, 5, 4, 15, 13], [0.1869735689344564, 0.06343641920215143, 0.038951322931441136, 0.04613309733662021, 0.19750663742298355, 0.16072124228620793, 0.15869932715876592, 0.14757838472737334]) grid = transit(grid, 2, [1, 7], [0.18247956114317448, 0.8175204388568255]) grid = transit(grid, 8, [11, 15, 0], [0.08195235243098883, 0.6796005904358621, 0.23844705713314918]) grid = power(grid, 14, 0, 0.10854801586669052) grid = shift(grid, 8, 9, 2.766857264282361) grid = transit(grid, 3, [6, 14, 0, 3, 15, 4, 2, 11, 13], [0.03597236183123865, 0.04938629068404894, 0.08457069101219464, 0.014801187461296406, 0.3649334871683411, 0.28062233683539095, 0.08637063851194285, 0.06076815802338077, 0.022574848472165728]) grid = transit(grid, 4, [11, 4, 15, 10, 8, 5, 2, 3], [0.23701292672659616, 0.08316792464084911, 0.017867439461611043, 0.36417402420248035, 0.02841485585755143, 0.19916101840344472, 0.03422984110049058, 0.03597196960697647]) grid = magnitude(grid, 13, [11, 7], 2) grid = sin(grid, 4, 8, 4.28026157040775, -75.14180284322572) grid = prod(grid, 3, [14, 15]) grid = inverse(grid, 5, 5) grid = transit(grid, 4, [8, 4, 15, 9, 10], [0.10267794314653868, 0.019022820046952493, 0.061606568183823145, 0.4832751235896067, 0.33341754503307897]) grid = transit(grid, 13, [10, 8, 9, 12, 2], [0.031587088727564654, 0.024264739611302585, 0.0306940545567164, 0.19611241111174804, 0.7173417059926683]) grid = transit(grid, 0, [7, 1, 11, 0, 15], [0.036901331671075975, 0.5054281720479712, 0.13288430351514774, 0.10820806749406277, 0.21657812527174225]) grid = transit(grid, 3, [7, 3, 12, 9], [0.13643904772292245, 0.38438336340747, 0.15936221296996333, 0.31981537589964426]) grid = sin(grid, 10, 3, -2.5681840787633137, -30.256455817944243) grid = sin(grid, 8, 2, 3.501615294498545, -75.50049353340206) grid = prod(grid, 9, [1, 4, 0, 6]) grid = transit(grid, 8, [9, 3], [0.30088974760959275, 0.6991102523904072]) grid = transit(grid, 8, [2, 11, 15, 4, 1, 0, 14], [0.29712982335534416, 0.2526657169525107, 0.08415696601637544, 0.18541009701166816, 0.011062110917544764, 0.017334502896306194, 0.1522407828502505]) grid = prod(grid, 2, [8, 7, 11, 10, 15, 0, 5]) grid = transit(grid, 11, [7, 2, 3, 9, 5], [0.24039798004748805, 0.2886075990223525, 0.18742374307846998, 0.11615833154358073, 0.16741234630810867]) grid = prod(grid, 0, [0, 1, 2, 14]) grid = prod(grid, 9, [10, 11, 8, 15, 0, 12, 3]) grid = transit(grid, 13, [5, 15, 10], [0.13237609957996088, 0.22944646977966682, 0.6381774306403722]) grid = transit(grid, 6, [15], [1.0]) grid = sin(grid, 15, 0, -0.033265790773207085, 51.94880270063618) grid = smooth_min(grid, 13, 10, 15) grid = transit(grid, 1, [12, 8, 10, 4, 2], [0.43102537693091664, 0.25433300797798253, 0.21618454566402304, 0.046743011673522995, 0.05171405775355483]) grid = sin(grid, 10, 10, 0.9558311639914843, -47.618914508652054) grid = shift(grid, 9, 8, -1.1449289879251126) grid = transit(grid, 7, [4, 10, 1, 13, 5, 0, 7, 8, 9, 12, 6, 11, 14], [0.10006330804326793, 0.03891760159161208, 0.005474465860804227, 0.12962618248625338, 0.03090992138168193, 0.016043163973997736, 0.13259375374543056, 0.09920705802758992, 0.1415090600653345, 0.09597789664069131, 0.06106766497801195, 0.14032187015082653, 0.008288053054498123]) grid = prod(grid, 15, [12, 15]) grid = prod(grid, 8, [11, 7, 4, 12]) grid = transit(grid, 7, [15, 6, 2, 7], [0.45073658968521574, 0.16060948991238613, 0.12949271785123345, 0.2591612025511646]) grid = transit(grid, 10, [11, 4, 2, 8, 14], [0.3705316303566195, 0.1755951969700656, 0.043989590834687294, 0.22866693087969006, 0.1812166509589377]) grid = sin(grid, 4, 2, -3.329894296119046, -76.41676919069447) grid = smooth_min(grid, 11, 8, 12) grid = transit(grid, 1, [1, 14, 8], [0.38986786543390084, 0.40057743619803005, 0.20955469836806906]) grid = transit(grid, 9, [5], [1.0]) grid = shift(grid, 9, 13, -5.367438086043798) grid = magnitude(grid, 13, [2, 0], 2) grid = transit(grid, 13, [6, 2, 3, 15, 5, 7], [0.06492287400539203, 0.21223490901058306, 0.36311130408652753, 0.09994467226348329, 0.12833432959710458, 0.1314519110369097]) grid = transit(grid, 8, [6, 2], [0.6857167761482571, 0.31428322385174284]) grid = shift(grid, 6, 15, 4.115946851379848) grid = transit(grid, 15, [13, 3], [0.5897775709748927, 0.41022242902510725]) grid = sin(grid, 12, 14, 1.097917736937588, 58.87772371184383) grid = transit(grid, 11, [9, 11], [0.37033495928182997, 0.6296650407181701]) grid = smooth_min(grid, 4, 1, 8) grid = sin(grid, 4, 4, 3.47544933993972, -37.11795195118333) grid = sin(grid, 11, 7, -0.3409112713023047, 75.93313567333723) grid = transit(grid, 11, [5, 10, 7], [0.22694849313985146, 0.5162695719847235, 0.25678193487542517]) grid = sin(grid, 9, 9, -4.261918262131112, 18.680580924548693) grid = smooth_max(grid, 2, 2, 11) grid = sin(grid, 13, 13, 7.718114740496995, 55.242200715207815) grid = sin(grid, 12, 10, -3.1151555334821888, 17.571856948335267) grid = prod(grid, 6, [2, 4, 13]) grid = transit(grid, 5, [1, 9, 3, 10, 4], [0.24075568684771534, 0.02527375632067568, 0.4828116495090197, 0.09546712897709621, 0.15569177834549294]) grid = sin(grid, 6, 3, -0.1377650382373763, -96.34412250071645) grid = sin(grid, 7, 3, 1.6405444007982959, -37.09230830685477) grid = transit(grid, 9, [8], [1.0]) grid = sin(grid, 5, 10, -1.5052434957207308, 24.900059771988836) grid = sin(grid, 8, 10, 2.5947698108630664, -90.74050288622541) grid = sin(grid, 9, 8, -0.8743741598911887, 15.92872484723533) grid = transit(grid, 4, [3, 13, 9, 8, 5, 2, 12], [0.05731677054419865, 0.08527765171582982, 0.33929504571762287, 0.1932983536368378, 0.0036374435750729187, 0.12289545051895708, 0.19827928429148084]) grid = transit(grid, 8, [13, 9, 5, 7, 14], [0.05801706264076675, 0.341923243761946, 0.0494872820880747, 0.29583940098242745, 0.2547330105267852]) grid = inverse(grid, 11, 5) grid = magnitude(grid, 14, [4, 6, 1, 0], 2) grid = transit(grid, 13, [11, 0], [0.6569516962992897, 0.3430483037007103]) grid = sin(grid, 14, 5, 0.053526366336325744, 4.147364704932215) grid = transit(grid, 4, [3], [1.0]) grid = sin(grid, 3, 12, -4.078686662791614, 24.459526349523884) grid = inverse(grid, 15, 10) grid = shift(grid, 6, 1, -1.115193397983063) grid = smooth_max(grid, 13, 3, 8) grid = transit(grid, 13, [13, 0, 5, 14], [0.09662806703796267, 0.1621478194912538, 0.21548762580464817, 0.5257364876661353]) grid = inverse(grid, 1, 0) grid = smooth_max(grid, 1, 15, 12) grid = prod(grid, 11, [3]) grid = smooth_max(grid, 8, 11, 15) grid = sin(grid, 12, 6, -3.621533174445339, 24.02414911462421) grid = sin(grid, 1, 11, 0.5071121900678415, 10.950101187785563) grid = shift(grid, 13, 3, 5.677279514103952) grid = transit(grid, 3, [15, 11, 2, 8, 0], [0.28772794692354614, 0.1935939805514465, 0.06024872230823076, 0.13457223936247906, 0.32385711085429764]) grid = transit(grid, 1, [7, 2, 6, 1, 4, 0], [0.2070905138265326, 0.06562120796792839, 0.17355051228662716, 0.05514926535269553, 0.0829726599151083, 0.41561584065110807]) grid = transit(grid, 2, [0, 4, 2], [0.010597803396528332, 0.7371576932264431, 0.25224450337702853]) grid = sin(grid, 11, 8, 4.303514875116891, -67.11152580467314) grid = prod(grid, 5, [3, 9, 2]) grid = sin(grid, 5, 1, 2.0751861425380627, 63.37681521624819) grid = smooth_min(grid, 11, 10, 9) grid = sin(grid, 13, 2, 4.295107938126156, 57.378601701270014) grid = sin(grid, 10, 2, -0.010214061334835559, 20.43114218394348) grid = transit(grid, 8, [1], [1.0]) grid = sin(grid, 4, 9, 0.2366252211469413, -40.63773874328931) grid = sin(grid, 9, 15, -2.507870105026106, -89.43842740853354) grid = transit(grid, 0, [12, 6, 4, 9, 1, 0, 14], [0.36336761526831185, 0.17372789204937897, 0.08036453739500136, 0.09747098994785518, 0.040818441056887325, 0.16796111771248814, 0.07628940657007711]) grid = transit(grid, 3, [11, 1, 12, 9, 0, 8, 15, 2, 10, 14], [0.20381942291270427, 0.07753380798970702, 0.11445683149439734, 0.08475226158626031, 0.1416941580568898, 0.020968563089492034, 0.0847896752697893, 0.0921589665387646, 0.008240731277180186, 0.17158558178481512]) grid = transit(grid, 5, [11, 10], [0.9817011300708863, 0.018298869929113594]) grid = sin(grid, 14, 8, -0.4693746108213766, -98.17810769380118) grid = sin(grid, 12, 10, 3.6427863324838423, 99.297524709649) grid = sin(grid, 5, 14, -1.45141083652418, -99.85812912291547) grid = transit(grid, 0, [4, 3, 8], [0.23275058190778222, 0.49901982570530873, 0.2682295923869092]) grid = magnitude(grid, 8, [10, 9, 12, 4, 7, 15], 2) grid = sin(grid, 12, 7, 1.439019575760617, 13.126437741104823) grid = transit(grid, 10, [15, 8, 13, 2], [0.32464063956303774, 0.20922781529873477, 0.16179927966914437, 0.30433226546908315]) grid = magnitude(grid, 6, [14, 5, 13, 11, 2, 9], 2) grid = sin(grid, 9, 5, -5.606152225672729, -35.928477282758536) grid = transit(grid, 0, [7, 11, 15, 8, 12, 0, 4, 14, 3, 5], [0.11084510086381213, 0.003439701966452383, 0.10819642722960272, 0.15371289739415475, 0.25812192912399506, 0.005727171643985687, 0.14633649245899077, 0.033890406689391105, 0.05550396325806974, 0.1242259093715456]) grid = smooth_max(grid, 10, 15, 10) grid = transit(grid, 11, [9, 0, 11, 7, 3, 8], [0.03500911832175082, 0.03265868671024263, 0.3248025339288217, 0.4234363710484886, 0.13338109758306646, 0.050712192407629864]) grid = transit(grid, 7, [14, 2, 13, 1, 11, 3, 8, 7], [0.207462236904601, 0.11516125867317799, 0.12240760599022518, 0.05066197369764289, 0.13869178538077429, 0.09948828746526778, 0.16686217850764798, 0.09926467338066268]) grid = transit(grid, 6, [6, 13, 7], [0.16813621041531998, 0.42150135317124293, 0.410362436413437]) grid = inverse(grid, 6, 6) grid = sin(grid, 7, 15, -4.9164570678736865, 86.15931416043557) grid = sin(grid, 1, 7, 1.6265187305620117, -97.13150019385894) grid = transit(grid, 11, [0, 9], [0.1290607634325389, 0.8709392365674611]) grid = transit(grid, 14, [14, 13, 15], [0.530662002197574, 0.1082014600047566, 0.36113653779766947]) grid = transit(grid, 14, [10, 14, 4, 9, 13, 6], [0.3199750359220948, 0.07376266150860299, 0.03622483092076182, 0.09070212266434277, 0.4030414045204916, 0.07629394446370606]) grid = magnitude(grid, 13, [7, 4, 15], 2) grid = transit(grid, 13, [6, 15, 11, 9, 12], [0.21908823570589997, 0.1636179110868493, 0.03797238284324163, 0.29532957711092916, 0.2839918932530799]) grid = sin(grid, 4, 3, 2.634465399239887, 62.07538440217337) grid = sin(grid, 7, 2, 3.41043792019894, 65.36615977552518) grid = transit(grid, 0, [14, 3, 11, 10, 7], [0.5203714128788618, 0.068511863728177, 0.10141059844877331, 0.2728285912351676, 0.036877533709020166]) grid = transit(grid, 7, [11], [1.0]) grid = transit(grid, 5, [9, 13, 3, 14], [0.28064413535886806, 0.5181512474389621, 0.1504742947642479, 0.050730322437922]) grid = prod(grid, 1, [12, 13]) grid = sin(grid, 6, 14, -1.927951619591129, -65.3028706482776) grid = prod(grid, 14, [13]) grid = sin(grid, 1, 12, -0.5111321725063378, 18.261359970959475) grid = power(grid, 6, 5, 0.9223892145169746) grid = transit(grid, 2, [9, 11, 10], [0.2662646690994658, 0.2460545507972383, 0.4876807801032959]) grid = transit(grid, 2, [7], [1.0]) grid = sin(grid, 10, 9, 6.219381309190064, -71.03631884776823) grid = sin(grid, 9, 6, 1.6821417847846682, -64.12547446801875) grid = sin(grid, 13, 3, -0.15800274281797377, 90.63950889076133) grid = sin(grid, 14, 14, -1.842523240371888, 74.23947694195837) grid = inverse(grid, 7, 8) grid = smooth_max(grid, 10, 3, 15) grid = magnitude(grid, 9, [15, 7], 2) grid = transit(grid, 4, [4, 12, 14, 15, 7, 1], [0.20378471182464508, 0.038241020379710625, 0.16903312106740406, 0.3387613981701764, 0.11303295854369695, 0.13714679001436697]) grid = transit(grid, 4, [14, 11, 12, 13, 4, 7], [0.23221079251346607, 0.3307147367708056, 0.26199556841553734, 0.018127231672754242, 0.13788777275073352, 0.01906389787670339]) grid = sin(grid, 4, 7, 3.7705302330112063, 56.91558505626969) grid = sin(grid, 3, 9, 1.4275963527158242, -76.78247379244436) grid = sin(grid, 2, 5, -5.225820110717917, 57.71107021356826) grid = transit(grid, 2, [12], [1.0]) grid = prod(grid, 14, [11, 10]) grid = transit(grid, 2, [0, 15, 10], [0.005204838856346087, 0.5116602651328436, 0.48313489601081044]) grid = transit(grid, 10, [10], [1.0]) grid = transit(grid, 1, [8, 10, 15, 14, 9], [0.33493798319460544, 0.14040206011900094, 0.3010385316537353, 0.07412413198773361, 0.14949729304492473]) grid = magnitude(grid, 10, [11, 0, 5], 2) grid = magnitude(grid, 9, [15, 3, 11, 0, 14], 2) grid = sin(grid, 4, 5, -1.8457292172108153, -53.43885199947502) grid = sin(grid, 10, 0, 7.741409383532979, -12.082110529508299) grid = prod(grid, 11, [9]) grid = sin(grid, 4, 3, 0.10154488887533689, 12.479110491961137) grid = magnitude(grid, 1, [7], 2) grid = smooth_min(grid, 7, 4, 13) grid = magnitude(grid, 5, [7], 2) grid = transit(grid, 6, [9, 11, 2, 13], [0.381505247910628, 0.12073241493361198, 0.3454992433435407, 0.15226309381221942]) grid = magnitude(grid, 10, [7, 15, 5], 2) grid = magnitude(grid, 9, [12, 14, 4], 2) grid = shift(grid, 3, 9, 3.0393348894939773) grid = shift(grid, 2, 4, 2.1961962516242517) grid = prod(grid, 15, [3, 5, 0, 1]) grid = sin(grid, 6, 11, -0.7697482296056479, 23.55348445076298) grid = sin(grid, 7, 7, 0.5492744322205282, 35.873568370773654) grid = transit(grid, 7, [13], [1.0]) grid = sin(grid, 3, 12, 6.470760426148978, -53.62090724330151) grid = sin(grid, 10, 10, 0.7827958631857042, -90.82177259964699) grid = transit(grid, 6, [8, 6, 5, 7, 4, 2], [0.39579476392315127, 0.3200094081197146, 0.06439062651950353, 0.03284446726347166, 0.04732779189481446, 0.13963294227934445]) grid = smooth_min(grid, 0, 13, 15) grid = smooth_max(grid, 5, 8, 4) grid = transit(grid, 10, [1], [1.0]) grid = transit(grid, 15, [15], [1.0]) grid = prod(grid, 13, [6, 3, 7]) grid = sin(grid, 0, 3, -3.561651028660104, 11.539889679902203) grid = power(grid, 10, 5, 0.12539493928522222) grid = power(grid, 0, 12, 2.5526439221510495) grid = sin(grid, 4, 10, -3.680544885171134, 30.633332441673872) grid = transit(grid, 11, [12, 6, 9], [0.1597221050818672, 0.523275926379751, 0.31700196853838186]) grid = sin(grid, 14, 7, 5.409920766787869, -58.09956716630187) grid = sin(grid, 2, 15, -2.5319898824657017, -45.01904701883333) grid = shift(grid, 5, 5, 3.1584260780059252) grid = transit(grid, 10, [9, 8], [0.7777441717493406, 0.22225582825065934]) grid = transit(grid, 3, [9], [1.0]) grid = transit(grid, 11, [2], [1.0]) #create color space def shift_colors(x, shift): res = x.copy() for i in range(x.shape[-1]): if shift[i] > 0: res[:,:,i] = (-np.abs(((x[:,:,i] + 1) / 2) ** (1 + shift[i]) - 1) ** (1 / (1 + shift[i])) + 1) * 2 - 1 if shift[i] < 0: res[:,:,i] = np.abs((1 - (x [:,:,i]+ 1) / 2) ** (1 - shift[i]) - 1) ** (1 / (1 - shift[i])) * 2 - 1 return test_values(res) res = np.zeros((SIZE, SIZE, 3)) res += shift_colors(grid[:,:,0:1].repeat(3, -1), [1.9355805467383669, 1.4677093499726706, 1.2451388311186942]) res = res / 1 res = ((res + 1) / 2 * 255).clip(0,255) #save results im = Image.fromarray(np.uint8(res)) im.save(os.path.basename(__file__) + '.png') #save layers img = np.zeros((SIZE * 4, SIZE * 4)) for j in range(GRID_CHANNELS): x = j % 4 y = j // 4 img[x*SIZE:(x + 1)*SIZE, y*SIZE:(y+1)*SIZE] = grid[:,:,j] img = (img + 1) * 127.5 im = Image.fromarray(np.uint8(img)) im.save(os.path.basename(__file__) + '_layers.png')
[((2606, 2643), 'numpy.zeros', 'np.zeros', (['(SIZE, SIZE, GRID_CHANNELS)'], {}), '((SIZE, SIZE, GRID_CHANNELS))\n', (2614, 2643), True, 'import numpy as np\n'), ((25379, 25404), 'numpy.zeros', 'np.zeros', (['(SIZE, SIZE, 3)'], {}), '((SIZE, SIZE, 3))\n', (25387, 25404), True, 'import numpy as np\n'), ((25692, 25722), 'numpy.zeros', 'np.zeros', (['(SIZE * 4, SIZE * 4)'], {}), '((SIZE * 4, SIZE * 4))\n', (25700, 25722), True, 'import numpy as np\n'), ((959, 1002), 'numpy.sum', 'np.sum', (['(x[:, :, (s_indx)] * alphas)'], {'axis': '(-1)'}), '(x[:, :, (s_indx)] * alphas, axis=-1)\n', (965, 1002), True, 'import numpy as np\n'), ((1134, 1189), 'numpy.sin', 'np.sin', (['(x[:, :, (s_indx)] * 0.5 * np.pi * scale + shift)'], {}), '(x[:, :, (s_indx)] * 0.5 * np.pi * scale + shift)\n', (1140, 1189), True, 'import numpy as np\n'), ((2360, 2390), 'numpy.prod', 'np.prod', (['x[:, :, (s_indx)]', '(-1)'], {}), '(x[:, :, (s_indx)], -1)\n', (2367, 2390), True, 'import numpy as np\n'), ((25612, 25625), 'numpy.uint8', 'np.uint8', (['res'], {}), '(res)\n', (25620, 25625), True, 'import numpy as np\n'), ((25892, 25905), 'numpy.uint8', 'np.uint8', (['img'], {}), '(img)\n', (25900, 25905), True, 'import numpy as np\n'), ((1305, 1356), 'numpy.linalg.norm', 'np.linalg.norm', (['x[:, :, (s_indx)]'], {'axis': '(-1)', 'ord': 'ord'}), '(x[:, :, (s_indx)], axis=-1, ord=ord)\n', (1319, 1356), True, 'import numpy as np\n'), ((2496, 2522), 'numpy.sign', 'np.sign', (['x[:, :, (s_indx)]'], {}), '(x[:, :, (s_indx)])\n', (2503, 2522), True, 'import numpy as np\n'), ((25635, 25661), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (25651, 25661), False, 'import os\n'), ((25915, 25941), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (25931, 25941), False, 'import os\n'), ((590, 603), 'numpy.isnan', 'np.isnan', (['arr'], {}), '(arr)\n', (598, 603), True, 'import numpy as np\n'), ((680, 692), 'numpy.amin', 'np.amin', (['arr'], {}), '(arr)\n', (687, 692), True, 'import numpy as np\n'), ((701, 713), 'numpy.amax', 'np.amax', (['arr'], {}), '(arr)\n', (708, 713), True, 'import numpy as np\n'), ((2521, 2546), 'numpy.abs', 'np.abs', (['x[:, :, (s_indx)]'], {}), '(x[:, :, (s_indx)])\n', (2527, 2546), True, 'import numpy as np\n'), ((782, 794), 'numpy.amin', 'np.amin', (['arr'], {}), '(arr)\n', (789, 794), True, 'import numpy as np\n'), ((796, 808), 'numpy.amax', 'np.amax', (['arr'], {}), '(arr)\n', (803, 808), True, 'import numpy as np\n'), ((1629, 1689), 'numpy.abs', 'np.abs', (['((1 - (x[:, :, (s_indx)] + 1) / 2) ** (1 - shift) - 1)'], {}), '((1 - (x[:, :, (s_indx)] + 1) / 2) ** (1 - shift) - 1)\n', (1635, 1689), True, 'import numpy as np\n'), ((1972, 2002), 'numpy.exp', 'np.exp', (['(x[:, :, (s1_indx)] * p)'], {}), '(x[:, :, (s1_indx)] * p)\n', (1978, 2002), True, 'import numpy as np\n'), ((2001, 2031), 'numpy.exp', 'np.exp', (['(x[:, :, (s2_indx)] * p)'], {}), '(x[:, :, (s2_indx)] * p)\n', (2007, 2031), True, 'import numpy as np\n'), ((2182, 2213), 'numpy.exp', 'np.exp', (['(-x[:, :, (s1_indx)] * p)'], {}), '(-x[:, :, (s1_indx)] * p)\n', (2188, 2213), True, 'import numpy as np\n'), ((2212, 2243), 'numpy.exp', 'np.exp', (['(-x[:, :, (s2_indx)] * p)'], {}), '(-x[:, :, (s2_indx)] * p)\n', (2218, 2243), True, 'import numpy as np\n'), ((25254, 25312), 'numpy.abs', 'np.abs', (['((1 - (x[:, :, (i)] + 1) / 2) ** (1 - shift[i]) - 1)'], {}), '((1 - (x[:, :, (i)] + 1) / 2) ** (1 - shift[i]) - 1)\n', (25260, 25312), True, 'import numpy as np\n'), ((1506, 1562), 'numpy.abs', 'np.abs', (['(((x[:, :, (s_indx)] + 1) / 2) ** (1 + shift) - 1)'], {}), '(((x[:, :, (s_indx)] + 1) / 2) ** (1 + shift) - 1)\n', (1512, 1562), True, 'import numpy as np\n'), ((2651, 2666), 'numpy.arange', 'np.arange', (['SIZE'], {}), '(SIZE)\n', (2660, 2666), True, 'import numpy as np\n'), ((2729, 2744), 'numpy.arange', 'np.arange', (['SIZE'], {}), '(SIZE)\n', (2738, 2744), True, 'import numpy as np\n'), ((25128, 25182), 'numpy.abs', 'np.abs', (['(((x[:, :, (i)] + 1) / 2) ** (1 + shift[i]) - 1)'], {}), '(((x[:, :, (i)] + 1) / 2) ** (1 + shift[i]) - 1)\n', (25134, 25182), True, 'import numpy as np\n')]
comydream/OpenNMT-py
onmt/bin/build_vocab.py
2f3c810069ca03b752d9886782648e576b39a06d
#!/usr/bin/env python """Get vocabulary coutings from transformed corpora samples.""" from onmt.utils.logging import init_logger from onmt.utils.misc import set_random_seed, check_path from onmt.utils.parse import ArgumentParser from onmt.opts import dynamic_prepare_opts from onmt.inputters.corpus import build_vocab from onmt.transforms import make_transforms, get_transforms_cls def build_vocab_main(opts): """Apply transforms to samples of specified data and build vocab from it. Transforms that need vocab will be disabled in this. Built vocab is saved in plain text format as following and can be pass as `-src_vocab` (and `-tgt_vocab`) when training: ``` <tok_0>\t<count_0> <tok_1>\t<count_1> ``` """ ArgumentParser.validate_prepare_opts(opts, build_vocab_only=True) assert opts.n_sample == -1 or opts.n_sample > 1, \ f"Illegal argument n_sample={opts.n_sample}." logger = init_logger() set_random_seed(opts.seed, False) transforms_cls = get_transforms_cls(opts._all_transform) fields = None transforms = make_transforms(opts, transforms_cls, fields) logger.info(f"Counter vocab from {opts.n_sample} samples.") src_counter, tgt_counter, src_feats_counter = build_vocab( opts, transforms, n_sample=opts.n_sample) logger.info(f"Counters src:{len(src_counter)}") logger.info(f"Counters tgt:{len(tgt_counter)}") for feat_name, feat_counter in src_feats_counter.items(): logger.info(f"Counters {feat_name}:{len(feat_counter)}") def save_counter(counter, save_path): check_path(save_path, exist_ok=opts.overwrite, log=logger.warning) with open(save_path, "w", encoding="utf8") as fo: for tok, count in counter.most_common(): fo.write(tok + "\t" + str(count) + "\n") if opts.share_vocab: src_counter += tgt_counter tgt_counter = src_counter logger.info(f"Counters after share:{len(src_counter)}") save_counter(src_counter, opts.src_vocab) else: save_counter(src_counter, opts.src_vocab) save_counter(tgt_counter, opts.tgt_vocab) for k, v in src_feats_counter.items(): save_counter(v, opts.src_feats_vocab[k]) def _get_parser(): parser = ArgumentParser(description='build_vocab.py') dynamic_prepare_opts(parser, build_vocab_only=True) return parser def main(): parser = _get_parser() opts, unknown = parser.parse_known_args() build_vocab_main(opts) if __name__ == '__main__': main()
[((752, 817), 'onmt.utils.parse.ArgumentParser.validate_prepare_opts', 'ArgumentParser.validate_prepare_opts', (['opts'], {'build_vocab_only': '(True)'}), '(opts, build_vocab_only=True)\n', (788, 817), False, 'from onmt.utils.parse import ArgumentParser\n'), ((941, 954), 'onmt.utils.logging.init_logger', 'init_logger', ([], {}), '()\n', (952, 954), False, 'from onmt.utils.logging import init_logger\n'), ((959, 992), 'onmt.utils.misc.set_random_seed', 'set_random_seed', (['opts.seed', '(False)'], {}), '(opts.seed, False)\n', (974, 992), False, 'from onmt.utils.misc import set_random_seed, check_path\n'), ((1014, 1053), 'onmt.transforms.get_transforms_cls', 'get_transforms_cls', (['opts._all_transform'], {}), '(opts._all_transform)\n', (1032, 1053), False, 'from onmt.transforms import make_transforms, get_transforms_cls\n'), ((1090, 1135), 'onmt.transforms.make_transforms', 'make_transforms', (['opts', 'transforms_cls', 'fields'], {}), '(opts, transforms_cls, fields)\n', (1105, 1135), False, 'from onmt.transforms import make_transforms, get_transforms_cls\n'), ((1251, 1304), 'onmt.inputters.corpus.build_vocab', 'build_vocab', (['opts', 'transforms'], {'n_sample': 'opts.n_sample'}), '(opts, transforms, n_sample=opts.n_sample)\n', (1262, 1304), False, 'from onmt.inputters.corpus import build_vocab\n'), ((2282, 2326), 'onmt.utils.parse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""build_vocab.py"""'}), "(description='build_vocab.py')\n", (2296, 2326), False, 'from onmt.utils.parse import ArgumentParser\n'), ((2331, 2382), 'onmt.opts.dynamic_prepare_opts', 'dynamic_prepare_opts', (['parser'], {'build_vocab_only': '(True)'}), '(parser, build_vocab_only=True)\n', (2351, 2382), False, 'from onmt.opts import dynamic_prepare_opts\n'), ((1597, 1663), 'onmt.utils.misc.check_path', 'check_path', (['save_path'], {'exist_ok': 'opts.overwrite', 'log': 'logger.warning'}), '(save_path, exist_ok=opts.overwrite, log=logger.warning)\n', (1607, 1663), False, 'from onmt.utils.misc import set_random_seed, check_path\n')]
dssg/mlpolicylab_fall20_schools3_public
schools3/ml/experiments/feat_pruning_experiment.py
f8eff4c56e9bada1eb81ddaca03686d7ef53c2c4
import numpy as np import pandas as pd from schools3.ml.experiments.models_experiment import ModelsExperiment from schools3.data.base.cohort import Cohort from schools3.config import main_config from schools3.config import global_config from schools3.data.datasets.dataset import Dataset from schools3.ml.experiments.feat_importances_experiment import FeatureImportancesExperiment from schools3.ml.experiments.single_dataset_experiment import SingleDatasetExperiment from schools3.ml.models.tfkeras_model import TFKerasModel from schools3.ml.models.sklearn_model import SklearnModel import schools3.config.ml.experiments.feat_pruning_experiment_config as config from schools3.config.data.datasets import dataset_config # an experiment that trains models with subsets of the features according to their permutation importance rank # like SingleDatasetExperiment, this works on a specific grade class FeaturePruningExperiment(ModelsExperiment): def __init__( self, name='ignore', features_list=main_config.features, labels=main_config.labels, models=main_config.models, metrics=main_config.metrics, use_cache=main_config.use_cache ): super(FeaturePruningExperiment, self).__init__( name, features_list, labels, models, metrics, use_cache=use_cache ) def perform( self, grade=main_config.single_grade, train_years=main_config.train_years, test_years=main_config.test_years, compute_train_metrics=False, **kwargs ): train_cohort = Cohort(grade, train_years) df = pd.DataFrame() for model in self.models: if not (isinstance(model, SklearnModel) or isinstance(model, TFKerasModel)): continue train_data = Dataset(train_cohort, self.features_list, model.get_feature_processor(), self.labels) model.train(train_data) feats_exp = FeatureImportancesExperiment('ignore', self.features_list, self.labels, [model], self.metrics) feature_names, _, sorted_idxs = feats_exp.get_feature_importances(model, train_data) feats = np.flip(feature_names[sorted_idxs]) for i in config.num_feats: dataset_config.feat_whitelist.clear() for feat in feats[:i]: dataset_config.feat_whitelist.append(feat) exp = SingleDatasetExperiment('ignore', self.features_list, self.labels, [model], self.metrics) cur_df = exp.perform(grade, train_years, test_years, compute_train_metrics=compute_train_metrics, **kwargs) cur_df['num_feats'] = i df = pd.concat([df, cur_df], ignore_index=True) return df
[((1561, 1587), 'schools3.data.base.cohort.Cohort', 'Cohort', (['grade', 'train_years'], {}), '(grade, train_years)\n', (1567, 1587), False, 'from schools3.data.base.cohort import Cohort\n'), ((1602, 1616), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (1614, 1616), True, 'import pandas as pd\n'), ((1938, 2037), 'schools3.ml.experiments.feat_importances_experiment.FeatureImportancesExperiment', 'FeatureImportancesExperiment', (['"""ignore"""', 'self.features_list', 'self.labels', '[model]', 'self.metrics'], {}), "('ignore', self.features_list, self.labels, [\n model], self.metrics)\n", (1966, 2037), False, 'from schools3.ml.experiments.feat_importances_experiment import FeatureImportancesExperiment\n'), ((2163, 2198), 'numpy.flip', 'np.flip', (['feature_names[sorted_idxs]'], {}), '(feature_names[sorted_idxs])\n', (2170, 2198), True, 'import numpy as np\n'), ((2255, 2292), 'schools3.config.data.datasets.dataset_config.feat_whitelist.clear', 'dataset_config.feat_whitelist.clear', ([], {}), '()\n', (2290, 2292), False, 'from schools3.config.data.datasets import dataset_config\n'), ((2418, 2511), 'schools3.ml.experiments.single_dataset_experiment.SingleDatasetExperiment', 'SingleDatasetExperiment', (['"""ignore"""', 'self.features_list', 'self.labels', '[model]', 'self.metrics'], {}), "('ignore', self.features_list, self.labels, [model],\n self.metrics)\n", (2441, 2511), False, 'from schools3.ml.experiments.single_dataset_experiment import SingleDatasetExperiment\n'), ((2694, 2736), 'pandas.concat', 'pd.concat', (['[df, cur_df]'], {'ignore_index': '(True)'}), '([df, cur_df], ignore_index=True)\n', (2703, 2736), True, 'import pandas as pd\n'), ((2352, 2394), 'schools3.config.data.datasets.dataset_config.feat_whitelist.append', 'dataset_config.feat_whitelist.append', (['feat'], {}), '(feat)\n', (2388, 2394), False, 'from schools3.config.data.datasets import dataset_config\n')]
imsb-uke/podometric_u_net
network/dataset/image_loading.py
a33afcc186d618889df73c7ab2941dfbb63574ac
import os import numpy as np from skimage.io import imread def get_file_count(paths, image_format='.tif'): total_count = 0 for path in paths: try: path_list = [_ for _ in os.listdir(path) if _.endswith(image_format)] total_count += len(path_list) except OSError: print("Directory does not exist. Returned file count for this path will be 0") return total_count # Function to load image def load_image(img_path): img = imread(img_path) if img.shape[2] == 4: img = img[:, :, :-1] # img = np.roll(img, shift=1, axis=2) # CHECK IMAGE FORMAT return img # Function to load mask def load_mask(mask_path): mask = imread(mask_path) return mask def load_mask_from_img(cfg, img_path, img_name, suffixes): a_mask = imread(os.path.join(img_path, img_name + suffixes[0])) msk = np.zeros((a_mask.shape[0], a_mask.shape[1], len(suffixes) * cfg.NUMBER_MSK_CHANNELS)) i = 0 for suffix in suffixes: msk_channel = imread(os.path.join(img_path, img_name + suffix)) if len(msk_channel.shape) == 2: msk_channel = np.expand_dims(msk_channel, axis=-1) if len(msk_channel.shape) != 3: raise ValueError("Mask must be 3-dim here. Does your mask have 1 or more than 3 dimensions? " "Check the masks.") msk[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = msk_channel i += cfg.NUMBER_MSK_CHANNELS # print(msk, msk.shape) return msk def load_weights(cfg, img_path, img_name, weight_suffixes): a_weights = np.load(os.path.join(img_path, img_name + weight_suffixes[0])) weights = np.zeros((a_weights.shape[0], a_weights.shape[1], len(weight_suffixes) * cfg.NUMBER_MSK_CHANNELS)) i = 0 for suffix in weight_suffixes: weights_channel = np.load(os.path.join(img_path, img_name + suffix)) if len(weights_channel.shape) == 2: weights_channel = np.expand_dims(weights_channel, axis=-1) if len(weights_channel.shape) != 3: raise ValueError("Weights must be 3-dim here. Has your weights 1 or more than 3 dimensions? Check the weights.") weights[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = weights_channel i += cfg.NUMBER_MSK_CHANNELS return weights
[((491, 507), 'skimage.io.imread', 'imread', (['img_path'], {}), '(img_path)\n', (497, 507), False, 'from skimage.io import imread\n'), ((705, 722), 'skimage.io.imread', 'imread', (['mask_path'], {}), '(mask_path)\n', (711, 722), False, 'from skimage.io import imread\n'), ((820, 866), 'os.path.join', 'os.path.join', (['img_path', '(img_name + suffixes[0])'], {}), '(img_path, img_name + suffixes[0])\n', (832, 866), False, 'import os\n'), ((1599, 1652), 'os.path.join', 'os.path.join', (['img_path', '(img_name + weight_suffixes[0])'], {}), '(img_path, img_name + weight_suffixes[0])\n', (1611, 1652), False, 'import os\n'), ((1031, 1072), 'os.path.join', 'os.path.join', (['img_path', '(img_name + suffix)'], {}), '(img_path, img_name + suffix)\n', (1043, 1072), False, 'import os\n'), ((1140, 1176), 'numpy.expand_dims', 'np.expand_dims', (['msk_channel'], {'axis': '(-1)'}), '(msk_channel, axis=-1)\n', (1154, 1176), True, 'import numpy as np\n'), ((1846, 1887), 'os.path.join', 'os.path.join', (['img_path', '(img_name + suffix)'], {}), '(img_path, img_name + suffix)\n', (1858, 1887), False, 'import os\n'), ((1963, 2003), 'numpy.expand_dims', 'np.expand_dims', (['weights_channel'], {'axis': '(-1)'}), '(weights_channel, axis=-1)\n', (1977, 2003), True, 'import numpy as np\n'), ((202, 218), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (212, 218), False, 'import os\n')]
kefir/snakee
series/simple/numeric_series.py
a17734d4b2d7dfd3e6c7b195baa128fbc84d197b
from typing import Optional, Callable try: # Assume we're a sub-module in a package. from series import series_classes as sc from utils import numeric as nm except ImportError: # Apparently no higher-level package has been imported, fall back to a local import. from .. import series_classes as sc from ...utils import numeric as nm Native = sc.AnySeries DEFAULT_NUMERIC = True WINDOW_DEFAULT = (-1, 0, 1) WINDOW_WO_CENTER = (-2, -1, 0, 1, 2) WINDOW_NEIGHBORS = (-1, 0) class NumericSeries(sc.AnySeries): def __init__( self, values=[], validate=False, name=None, ): super().__init__( values=values, validate=validate, name=name, ) @staticmethod def get_distance_func(): return nm.diff def get_errors(self): yield from super().get_errors() if not self.has_valid_items(): yield 'Values of {} must be numeric'.format(self.get_class_name()) def has_valid_items(self): for v in self.get_values(): if not isinstance(v, (int, float)): return False return True def is_numeric(self, check=False): if check: return self.has_valid_items() else: return DEFAULT_NUMERIC def get_sum(self): return sum( self.filter_values_defined().get_values(), ) def get_mean(self): values_defined = self.filter_values_defined().get_values() if values_defined: return sum(values_defined) / len(values_defined) def norm(self, rate=None, default=None): if rate is None: rate = self.get_mean() return self.map_values(lambda v: v / rate if rate else default) def divide(self, series, default=None, extend=False): return self.map_optionally_extend_zip_values( lambda x, y: x / y if y else default, extend, series, ) def subtract(self, series, default=None, extend=False): return self.map_optionally_extend_zip_values( lambda x, y: x - y if x is not None and y is not None else default, extend, series, ) def derivative(self, extend=False, default=0): if extend: return self.preface(None).subtract( self, extend=True, default=default, ).crop(0, 1) else: return self.slice(0, -1).subtract( self.shift(-1) ) def get_sliding_window(self, window=WINDOW_DEFAULT, extend=True, default=None, as_series=True): if extend: n_min = 0 n_max = self.get_count() else: n_min = - min(window) n_max = self.get_count() - max(window) for center in range(n_min, n_max): sliding_window = [center + n for n in window] if as_series: yield self.value_series().items_no(sliding_window, extend=extend, default=default) else: yield self.value_series().get_items_no(sliding_window, extend=extend, default=default) def apply_window_func( self, function: Callable, window=WINDOW_DEFAULT, extend=True, default=None, as_series=False, inplace: bool = False, ) -> Optional[Native]: values = map(function, self.get_sliding_window(window, extend=extend, default=default, as_series=as_series)) return self.set_values(values, inplace=inplace) def mark_local_extremums(self, local_min=True, local_max=True): return self.apply_window_func( lambda a: nm.is_local_extremum(*a, local_min=local_min, local_max=local_max), window=WINDOW_DEFAULT, extend=True, default=False, ) def mark_local_max(self): return self.mark_local_extremums(local_min=False, local_max=True) def mark_local_min(self): return self.mark_local_extremums(local_min=True, local_max=False) def deviation_from_neighbors(self, window=WINDOW_NEIGHBORS, rel=False): smoothed_series = self.smooth(window=window) deviation = self.subtract(smoothed_series) if rel: deviation = deviation.divide(smoothed_series, default=0) return deviation # @deprecated def smooth_simple_linear(self, window_len=3, exclude_center=False): center = int((window_len - 1) / 2) count = self.get_count() result = self.new() for n in self.get_range_numbers(): is_edge = n < center or n >= count - center if is_edge: result.append(self.get_item_no(n), inplace=True) else: sub_series = self.slice(n - center, n + center + 1) if exclude_center: sub_series = sub_series.drop_item_no(center) result.append(sub_series.get_mean(), inplace=True) return result def smooth(self, how='linear', *args, **kwargs): method_name = 'smooth_{}'.format(how) smooth_method = self.__getattribute__(method_name) return smooth_method(*args, **kwargs) def smooth_multiple(self, list_kwargs=[]): series = self for kwargs in list_kwargs: series = series.smooth(**kwargs) return series def smooth_linear(self, window=WINDOW_DEFAULT): return self.apply_window_func( lambda s: s.get_mean(), window=window, extend=True, default=None, as_series=True, ) def smooth_spikes(self, threshold, window=WINDOW_WO_CENTER, local_min=False, local_max=True, whitelist=None): spikes = self.mark_spikes(threshold, local_min=local_min, local_max=local_max) if whitelist: spikes = spikes.map_zip_values( lambda a, b: a and not b, whitelist, ) return self.map_zip_values( lambda v, t, s: s if t else v, spikes, self.smooth(window=window), ) def mark_spikes(self, threshold, window=WINDOW_NEIGHBORS, local_min=False, local_max=True): deviation = self.deviation_from_neighbors(window=window, rel=True) if local_min or local_max: deviation = deviation.map_zip_values( lambda x, m: x if m else None, self.mark_local_extremums(local_min=local_min, local_max=local_max), ) spikes = deviation.map_values( lambda x: abs(x or 0) > threshold, ) return spikes def plot(self, fmt='-'): nm.plot(self.get_range_numbers(), self.get_values(), fmt=fmt)
[((3727, 3793), 'utils.numeric.is_local_extremum', 'nm.is_local_extremum', (['*a'], {'local_min': 'local_min', 'local_max': 'local_max'}), '(*a, local_min=local_min, local_max=local_max)\n', (3747, 3793), True, 'from utils import numeric as nm\n')]
kuropengin/SHINtube-video-api
app/internal/module/video/database.py
8a4b068fb95a9a2736b3dba3782dbbbf73815290
import glob import pathlib from .filemanager import filemanager_class class database_class(filemanager_class): def __init__(self): filemanager_class.__init__(self) async def update_info(self, year, cid, vid, title, explanation): # 既存のjsonを読み込み json_file = "/".join([self.video_dir, str(year), cid, vid, "info.json"]) _dict = await self.read_json(json_file) if not _dict: return False # jsonの更新 _dict["title"] = title _dict["explanation"] = explanation # jsonの書き込み if self.write_json(json_file, _dict): return True return False async def encode_result(self, folderpath, resolution, result=True): # 既存のjsonを読み込み json_file = "/".join([folderpath, "info.json"]) _dict = await self.read_json(json_file) if not _dict: return False if result: # 画質の追加 _dict["resolution"].append(f"{resolution}p") _dict["encode_tasks"].remove(f"{resolution}p") else: _dict["encode_error"].append(f"{resolution}p") _dict["encode_tasks"].remove(f"{resolution}p") # jsonの書き込み self.write_json(json_file, _dict) # プレイリストに書き込み playlist = "/".join([folderpath, "playlist.m3u8"]) await self.write_playlist(playlist, resolution) async def encode_task(self, folderpath, resolution): # 既存のjsonを読み込み json_file = "/".join([folderpath, "info.json"]) _dict = await self.read_json(json_file) if not _dict: return False if f"{resolution}p" in _dict["resolution"]: return True # 画質の追加 _dict["encode_tasks"].append(f"{resolution}p") # jsonの書き込み if self.write_json(json_file, _dict): return True return False async def encode_error(self, folderpath, message): # 既存のjsonを読み込み json_file = "/".join([folderpath, "info.json"]) _dict = await self.read_json(json_file) if not _dict: return False # 画質の追加 _dict["encode_error"].append(f"{message}") # jsonの書き込み if self.write_json(json_file, _dict): return True return False async def get_all_info(self): json_files_path = await self.async_wrap(glob.glob)( f"./{self.video_dir}/**/info.json", recursive=True) result = [] for json_file in json_files_path: temp = await self.read_json(json_file) directory = "/".join(json_file.split("/")[:-1]) temp["video_directory"] = directory try: temp["video_file_name"] = glob.glob( f"{directory}/1.*")[0].split("/")[-1] except IndexError: temp["video_file_name"] = None result.append(temp) return result async def get_encode_tasks(self): video_info = await self.get_all_info() result = [] for info in video_info: if len(info["encode_tasks"]) > 0: result.append(info) return result async def list_video_id(self, year, cid): _video_dir = "/".join([self.video_dir, str(year), cid]) temp = await self.async_wrap(glob.glob)(f"{_video_dir}/*") return [video_id.split("/")[-1] for video_id in temp] async def list_link(self, year, cid): _video_dir = "/".join([self.video_dir, str(year), cid]) temp = await self.async_wrap(glob.glob)(f"{_video_dir}/*") result = {} for link_path in temp: json_file = link_path + "/info.json" _dict = await self.read_json(json_file) if not _dict: pass else: result[link_path.split("/")[-1]] = _dict return result async def get_all_info(self): json_files_path = await self.async_wrap(glob.glob)( f"./{self.video_dir}/**/info.json", recursive=True) result = [] for json_file in json_files_path: temp = await self.read_json(json_file) directory = "/".join(json_file.split("/")[:-1]) temp["video_directory"] = directory try: temp["video_file_name"] = glob.glob( f"{directory}/1.*")[0].split("/")[-1] except IndexError: temp["video_file_name"] = None result.append(temp) return result async def get_encode_tasks(self): video_info = await self.get_all_info() result = [] for info in video_info: if len(info["encode_tasks"]) > 0: result.append(info) return result database = database_class()
[((2763, 2792), 'glob.glob', 'glob.glob', (['f"""{directory}/1.*"""'], {}), "(f'{directory}/1.*')\n", (2772, 2792), False, 'import glob\n'), ((4384, 4413), 'glob.glob', 'glob.glob', (['f"""{directory}/1.*"""'], {}), "(f'{directory}/1.*')\n", (4393, 4413), False, 'import glob\n')]
scoofy/open-geotiling
python/OpenGeoTile.py
0b1305d4482d6df46104135662ffe4565f92f9f0
from openlocationcode import openlocationcode as olc from enum import Enum import math, re class TileSize(Enum): ''' An area of 20° x 20°. The side length of this tile varies with its location on the globe, but can be up to approximately 2200km. Tile addresses will be 2 characters long.''' GLOBAL = (2, 20.0) ''' An area of 1° x 1°. The side length of this tile varies with its location on the globe, but can be up to approximately 110km. Tile addresses will be 4 characters long.''' REGION = (4, 1.0) ''' An area of 0.05° x 0.05°. The side length of this tile varies with its location on the globe, but can be up to approximately 5.5km. Tile addresses will be 6 characters long.''' DISTRICT = (6, 0.05) ''' An area of 0.0025° x 0.0025°. The side length of this tile varies with its location on the globe, but can be up to approximately 275m. Tile addresses will be 8 characters long.''' NEIGHBORHOOD = (8, 0.0025) ''' An area of 0.000125° x 0.000125°. The side length of this tile varies with its location on the globe, but can be up to approximately 14m. Tile addresses will be 10 characters long.''' PINPOINT = (10, 0.000125) def __init__(self, code_length, coordinate_increment): self.code_length = code_length self.coordinate_increment = coordinate_increment def getCodeLength(self): '''get 0th value''' return self.code_length def getCoordinateIncrement(self): '''get 1th value''' return self.coordinate_increment # Copy from OpenLocationCode.java # A separator used to break the code into two parts to aid memorability. SEPARATOR = '+' # Copy from OpenLocationCode.java # The character used to pad codes. PADDING_CHARACTER = '0' PADDING_2 = "00" PADDING_4 = "0000" PADDING_6 = "000000" CODE_ALPHABET = olc.CODE_ALPHABET_ BASE_20_SET = {x+y for x in CODE_ALPHABET for y in CODE_ALPHABET} BASE_20_BORDER_SET = {x for x in BASE_20_SET if x[0] in ['2', 'X'] or x[1] in ['2', 'X']} NORTH_DIGITS = {x for x in BASE_20_BORDER_SET if x[0] == 'X'} EAST_DIGITS = {x for x in BASE_20_BORDER_SET if x[1] == 'X'} SOUTH_DIGITS = {x for x in BASE_20_BORDER_SET if x[0] == '2'} WEST_DIGITS = {x for x in BASE_20_BORDER_SET if x[1] == '2'} memoized_digit_dict = { "N1": NORTH_DIGITS, "E1": EAST_DIGITS, "S1": SOUTH_DIGITS, "W1": WEST_DIGITS, } def is_padded(plus_code): return plus_code.find(PADDING_CHARACTER) != -1 def is_tile_address(plus_code): return plus_code.find(SEPARATOR) == -1 def return_code_of_tile_size(too_precise_plus_code, desired_tile_size): code = too_precise_plus_code if not is_tile_address(code): code = code.replace(SEPARATOR, '') if is_padded(code): if code.find(PADDING_CHARACTER) < desired_tile_size.getCodeLength(): raise Exception("OLC padding larger than allowed by desired_tile_size") code_address = code[:desired_tile_size.getCodeLength()] full_length = TileSize.PINPOINT.getCodeLength() code = code_address + ("0" * (full_length - len(code_address))) if desired_tile_size == TileSize.PINPOINT: code = code[:-2] + SEPARATOR + code[-2:] else: code = code[:-2] + SEPARATOR return code def return_set_of_subaddresses(set_of_addresses): for address in set_of_addresses: if len(address) == TileSize.PINPOINT.getCodeLength(): ''' address already minimum possible size ''' return None return {address+base for address in set_of_addresses for base in BASE_20_SET} class OpenGeoTile(): ''' /** * A wrapper around an {@code OpenLocationCode} object, focusing on the area identified by a prefix * of the given OpenLocationCode. * * Using this wrapper class allows to determine whether two locations are in the same or adjacent * "tiles", to determine all neighboring tiles of a given one, to calculate a distance in tiles etc. * * Open Location Code is a technology developed by Google and licensed under the Apache License 2.0. * For more information, see https://github.com/google/open-location-code * * @author Andreas Bartels * @version 0.1.0 */ Ported by scoofy on 08.31.21 ''' def __init__(self, code=None, tile_size=None, lat=None, long=None, ): if not (code or (code and tile_size) or (lat and long)): raise Exception("Invalid OpenGeoTile constructor arguments") if lat and long: self.constructTileFromLatLong(lat, long, tile_size) elif code and tile_size: self.constructTileFromCodeAndSize(code, tile_size) elif code: if is_tile_address(code): self.constructTileFromTileAddress(code) else: self.constructTileFromCode(code) self.tile_address = self.code.replace(SEPARATOR, "")[0: self.tile_size.getCodeLength()] def constructTileFromCode(self, plus_code): '''/** * Creates a new OpenGeoTile from an existing * {@link com.google.openlocationcode.OpenLocationCode}. * @param olc OpenLocationCode for the current location. This can be a padded code, in which * case the resulting OpenGeoTile will have a larger TileSize. * @throws IllegalArgumentException if olc is not a full code */''' if not olc.isFull(plus_code): raise Exception("Only full OLC supported. Use olc.recoverNearest().") self.code = plus_code.upper() if is_padded(plus_code): code_length = plus_code.find(PADDING_CHARACTER) else: code_length = min(len(plus_code)-1, 10) if code_length == TileSize.GLOBAL.getCodeLength(): self.tile_size = TileSize.GLOBAL elif code_length == TileSize.REGION.getCodeLength(): self.tile_size = TileSize.REGION elif code_length == TileSize.DISTRICT.getCodeLength(): self.tile_size = TileSize.DISTRICT elif code_length == TileSize.NEIGHBORHOOD.getCodeLength(): self.tile_size = TileSize.NEIGHBORHOOD elif code_length == TileSize.PINPOINT.getCodeLength(): self.tile_size = TileSize.PINPOINT else: raise Exception("Too precise, sort this later") def constructTileFromCodeAndSize(self, plus_code, tile_size): ''' Creates a new OpenGeoTile from an existing {@link com.google.openlocationcode.OpenLocationCode}. @param olc OpenLocationCode for the current location @param tile_size tile size to use for this OpenGeoTile @throws IllegalArgumentException when trying to pass a short (non-full) OLC, or if OLC has too much padding for given tile_size ''' if not olc.isFull(plus_code): raise Exception("Only full OLC supported. Use recover().") modified_plus_code = return_code_of_tile_size(plus_code, tile_size) self.code = modified_plus_code.upper() self.tile_size = tile_size def constructTileFromLatLong(self, lat: float, long: float, tile_size=None): '''/** * Creates a new OpenGeoTile from lat/long coordinates. * @param latitude latitude of the location * @param longitude longitude of the location * @param tile_size tile size to use for this OpenGeoTile * @throws IllegalArgumentException passed through from * {@link OpenLocationCode#OpenLocationCode(double, double, int)} */''' if not tile_size: tile_size = TileSize.PINPOINT self.code = olc.encode(lat, long, tile_size.getCodeLength()).upper() self.tile_size = tile_size def constructTileFromTileAddress(self, tileAddress): '''/** * Creates a new OpenGeoTile from a tile address. * @param tileAddress a tile address is a [2/4/6/8/10]-character string that corresponds to a * valid {@link com.google.openlocationcode.OpenLocationCode} after removing * '+' and an additional number of trailing characters; tile size is * determined by the length of this address * @throws IllegalArgumentException passed through from * {@link OpenLocationCode#OpenLocationCode(String)} or thrown if tileAddress is of * invalid length */''' detectedTileSize = None olcBuilder = "" if len(tileAddress) == TileSize.GLOBAL.getCodeLength(): detectedTileSize = TileSize.GLOBAL olcBuilder += tileAddress + PADDING_6 + SEPARATOR if len(tileAddress) == TileSize.REGION.getCodeLength(): detectedTileSize = TileSize.REGION olcBuilder += tileAddress + PADDING_4 + SEPARATOR if len(tileAddress) == TileSize.DISTRICT.getCodeLength(): detectedTileSize = TileSize.DISTRICT olcBuilder += tileAddress + PADDING_2 + SEPARATOR if len(tileAddress) == TileSize.NEIGHBORHOOD.getCodeLength(): detectedTileSize = TileSize.NEIGHBORHOOD olcBuilder += tileAddress + SEPARATOR if len(tileAddress) == TileSize.PINPOINT.getCodeLength(): detectedTileSize = TileSize.PINPOINT olcBuilder += tileAddress[0:8] + SEPARATOR + tileAddress[8:10] if detectedTileSize == None: print(tileAddress) raise Exception("Invalid tile address") self.tile_size = detectedTileSize self.code = olcBuilder.upper() def getWrappedOpenLocationCode(self): # this code is effectively redundant as python has no wrapping '''/** * The exact {@link com.google.openlocationcode.OpenLocationCode} wrapped by this OpenGeoTile. * For the plus code of the whole tile, see {@link #getTileOpenLocationCode()}. * @return the exact plus code wrapped by this OpenGeoTile */''' return self.code def returnCode(self): return self.code def getTileSize(self): '''/** * Get the {@link TileSize} of this OpenGeoTile. * @return the {@link TileSize} of this OpenGeoTile */''' return self.tile_size def getTileAddress(self): '''/** * A tile address is a string of length 2, 4, 6, 8, or 10, which corresponds to a valid * {@link com.google.openlocationcode.OpenLocationCode} after padding with an appropriate * number of '0' and '+' characters. Example: Address "CVXW" corresponds to OLC "CVXW0000+" * @return the tile address of this OpenGeoTile; */''' return self.tile_address def getTileAddressPrefix(self): '''/** * The prefix of a tile address is the address of the next biggest tile at this location. * @return this tile's address with the final two characters removed. In case of a GLOBAL tile, * returns the empty string. */''' if self.tile_size == TileSize.GLOBAL: return "" else: return self.getTileAddress()[0: self.tile_size.getCodeLength()-2] def getParentTileAddress(self): return self.getTileAddressPrefix() def getTileOpenLocationCode(self): # this code is redundant '''/** * The full {@link com.google.openlocationcode.OpenLocationCode} for this tile. Other than * {@link #getWrappedOpenLocationCode()}, this will return a full plus code for the whole tile. * @return a plus code for the whole tile, probably padded with '0' characters */''' return self.getWrappedOpenLocationCode() def getNeighbors(self, eight_point_direction=None): '''/** * Get an array of the typically 8 neighboring tiles of the same size. * @return an array of the typically 8 neighboring tiles of the same size; * may return less than 8 neighbors for tiles near the poles. */''' # deltas = [20.0, 1.0, 0.05, 0.0025, 0.000125] delta = self.getTileSize().getCoordinateIncrement() code_area = olc.decode(self.code) latitude = code_area.latitudeCenter longitude = code_area.longitudeCenter '''directions_list included to keep ordered data''' directions_list = ["NW", "N", "NE", "E", "SE", "S", "SW", "W"] direction_dict = { "NW": [+1, -1], "N": [+1, 0], "NE": [+1, +1], "W": [ 0, -1], "E": [ 0, +1], "SW": [-1, -1], "S": [-1, 0], "SE": [-1, +1], } #lat_diff = [+1, +1, +1, 0, -1, -1, -1, 0] #long_diff = [-1, 0, +1, +1, +1, 0, -1, -1] if not type(eight_point_direction) in [type(None), list, str]: raise Exception("eight_point_direction must be of type list or str") if eight_point_direction is None: directions = directions_list elif isinstance(eight_point_direction, str): directions = [] if eight_point_direction.upper() in directions_list: directions.append(eight_point_direction.upper()) else: ''' this list construction keeps directions in the order above ''' uppercase_input_directions = [d.upper() for d in eight_point_direction] directions = [direction for direction in directions_list if direction in uppercase_input_directions] neighbors = set() for direction in directions: lat_diff, long_diff = direction_dict.get(direction) ''' //OLC constructor clips and normalizes, //so we don't have to deal with invalid lat/long values directly''' neighborLatitude = latitude + (delta * lat_diff) neighborLongitude = longitude + (delta * long_diff) new_OpenGeoTile = OpenGeoTile(lat=neighborLatitude, long=neighborLongitude, tile_size=self.getTileSize()) if not self.isSameTile(new_OpenGeoTile): '''//don't add tiles that are the same as this one due to clipping near the poles''' neighbors.add(new_OpenGeoTile) return neighbors def isSameTile(self, potentialSameTile): '''/** * Check if a tile describes the same area as this one. * @param potentialSameTile the OpenGeoTile to check * @return true if tile sizes and addresses are the same; false if not */''' if potentialSameTile.getTileSize() != self.getTileSize(): return False return potentialSameTile.getTileAddress() == self.getTileAddress() def isNeighbor(self, potentialNeighbor): '''/** * Check if a tile is neighboring this one. * @param potentialNeighbor the OpenGeoTile to check * @return true if this and potentialNeighbor are adjacent (8-neighborhood); * false if not */''' if potentialNeighbor.getTileSize() == self.getTileSize(): '''//avoid iterating over neighbors for same tile''' if self.isSameTile(potentialNeighbor): return False neighbors = self.getNeighbors() for neighbor in neighbors: if potentialNeighbor.isSameTile(neighbor): return True return False else: '''//tiles of different size are adjacent if at least one neighbor of the smaller tile, //but not the smaller tile itself, is contained within the bigger tile''' if potentialNeighbor.getTileSize().getCodeLength() > self.tile_size.getCodeLength(): smallerTile = potentialNeighbor biggerTile = self else: smallerTile = self biggerTile = potentialNeighbor if biggerTile.contains(smallerTile): return False neighbors = smallerTile.getNeighbors() for neighbor in neighbors: if biggerTile.contains(neighbor): return True return False def contains(self, potentialMember): '''/** * Check if this tile contains another one. * @param potentialMember the OpenGeoTile to check * @return true if the area potentialMember falls within the area of this tile, including cases * where both are the same; false if not */''' # //if A contains B, then B's address has A's address as a prefix return potentialMember.getTileAddress().startswith(self.getTileAddress()) def getManhattanTileDistanceTo(self, otherTile): '''/** * Calculates the Manhattan (city block) distance between this and another tile of the same size. * @param otherTile another tile of the same size as this one * @return an integer value corresponding to the number of tiles of the given size that need to * be traversed getting from one to the other tile * @throws IllegalArgumentException thrown if otherTile has different {@link TileSize} */''' if otherTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") return self.getLatitudinalTileDistance(otherTile, True) + self.getLongitudinalTileDistance(otherTile, True) def getChebyshevTileDistanceTo(self, otherTile): '''/** * Calculates the Chebyshev (chessboard) distance between this and another tile of the same size. * @param otherTile another tile of the same size as this one * @return an integer value corresponding to the number of tiles of the given size that need to * be traversed getting from one to the other tile * @throws IllegalArgumentException thrown if otherTile has different {@link TileSize} */''' if otherTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") return max(self.getLatitudinalTileDistance(otherTile, True), self.getLongitudinalTileDistance(otherTile, True)) def getDirection(self, otherTile): '''/** * Returns the approximate direction of the other tile relative to this. The return value can * have a large margin of error, especially for big or far away tiles, so this should only be * interpreted as a very rough approximation and used as such. * @param otherTile another tile of the same size as this one * @return an angle in radians, 0 being an eastward direction, +/- PI being westward direction * @throws IllegalArgumentException thrown if otherTile has different {@link TileSize} */''' if otherTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") xDiff = int(self.getLongitudinalTileDistance(otherTile, False)) yDiff = int(self.getLatitudinalTileDistance(otherTile, False)) return math.atan2(yDiff, xDiff) def getEightPointDirectionOfNeighbor(self, neighborTile): ''' returns neighbor's direction, to assist in expanding tile areas ''' if not self.isNeighbor(neighborTile): raise Exception("neighborTile must be neighbor") if neighborTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") self_tile_x = self.getTileAddress()[-2] self_tile_y = self.getTileAddress()[-1] other_tile_x = neighborTile.getTileAddress()[-2] other_tile_y = neighborTile.getTileAddress()[-1] direction = "" north_south = None if self_tile_x != other_tile_x: ''' one tile is above the other ''' if CODE_ALPHABET.find(self_tile_x) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_x) in [0, len(CODE_ALPHABET)-1]: ''' ajacent parent tiles ''' if CODE_ALPHABET.find(other_tile_x) == 0: ''' other tile is above -> neighborTile is north ''' direction = direction + 'N' else: direction = direction + 'S' else: if CODE_ALPHABET.find(self_tile_x) < CODE_ALPHABET.find(other_tile_x): ''' other tile is above -> neighborTile is north ''' direction = direction + 'N' else: ''' other tile is below -> neighborTile is south ''' direction = direction + 'S' if self_tile_y != other_tile_y: ''' one tile is above the other ''' if CODE_ALPHABET.find(self_tile_y) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_y) in [0, len(CODE_ALPHABET)-1]: ''' ajacent parent tiles ''' if CODE_ALPHABET.find(other_tile_y) == 0: ''' other tile is right -> neighborTile is east ''' direction = direction + 'E' else: ''' other tile is left -> neighborTile is west ''' direction = direction + 'W' else: if CODE_ALPHABET.find(self_tile_y) < CODE_ALPHABET.find(other_tile_y): ''' other tile is right -> neighborTile is east ''' direction = direction + 'E' else: ''' other tile is left -> neighborTile is west ''' direction = direction + 'W' return direction def getCharacterIndex(self, c): '''//following definitions copied from OpenLocationCode.java''' index = "23456789CFGHJMPQRVWX".find(c.upper()) if index == -1: raise Exception("Character does not exist in alphabet") return index def characterDistance(self, c1, c2): return self.getCharacterIndex(c1) - self.getCharacterIndex(c2) def getLatitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") numIterations = self.tile_size.getCodeLength()/2 #1..5 tileDistance = 0 for i in range(int(numIterations)): tileDistance *= 20 c1 = self.getTileAddress()[i*2] c2 = otherTile.getTileAddress()[i*2] tileDistance += self.characterDistance(c1,c2) if absolute_value_bool: return abs(tileDistance) return tileDistance def getLongitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") numIterations = self.tile_size.getCodeLength()/2 #; //1..5 tileDistance = 0 for i in range(int(numIterations)): tileDistance *= 20 c1 = self.getTileAddress()[i*2 + 1] c2 = otherTile.getTileAddress()[i*2 + 1] if i == 0: '''//for the first longitudinal value, we need to take care of wrapping - basically, //if it's shorter to go the other way around, do so''' firstDiff = self.characterDistance(c1, c2) NUM_CHARACTERS_USED = 18 #; //360°/20° = 18 if abs(firstDiff) > NUM_CHARACTERS_USED/2: if firstDiff > 0: firstDiff -= NUM_CHARACTERS_USED else: firstDiff += NUM_CHARACTERS_USED tileDistance += firstDiff else: tileDistance += self.characterDistance(c1, c2) if absolute_value_bool: return abs(tileDistance) return tileDistance def returnSetOfSubtiles(self, desired_tile_size=TileSize.PINPOINT): if self.tile_size.getCodeLength() == desired_tile_size.getCodeLength(): ''' tile is desired size ''' return self elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size is too big' raise Exception("OLC padding larger than allowed by desired_tile_size") iterations_needed = desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2 address_set = set([self.getTileAddress()]) for i in range(int(iterations_needed)): address_set = return_set_of_subaddresses(address_set) tile_set = {OpenGeoTile(address) for address in address_set} return tile_set def returnSetOfBorderSubtiles(self, desired_tile_size=TileSize.PINPOINT, eight_point_direction=None): address = self.getTileAddress() if len(address) == TileSize.PINPOINT.getCodeLength(): ''' address already minimum possible size ''' return None elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size is too big' raise Exception("OLC padding larger than allowed by desired_tile_size") iterations_needed = int(desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2) north_set = set() east_set = set() south_set = set() west_set = set() if isinstance(eight_point_direction, str): eight_point_direction = eight_point_direction.upper() set_of_border_subaddresses = set() if eight_point_direction is None: ''' all borders ''' ''' traveling salesman problem ''' ''' let's do it once, and try to reduce by swaping digits ''' all_border_set = memoized_digit_dict.get(f"A{iterations_needed}") if not all_border_set: north_base_set = memoized_digit_dict.get(f"N{iterations_needed}") if not north_base_set: self.memoizeDigitDict("N", iterations_needed) north_set = memoized_digit_dict.get(f"N{iterations_needed}") east_set = memoized_digit_dict.get(f"E{iterations_needed}", set()) south_set = memoized_digit_dict.get(f"S{iterations_needed}", set()) west_set = memoized_digit_dict.get(f"W{iterations_needed}", set()) east_exists = east_set != set() south_exists = south_set != set() west_exists = west_set != set() for base in north_set: east_base = "" south_base = "" west_base = "" base_tuple_list = re.findall('..', base) ''' north will be Xd east dX south 2d west d2''' for n_tuple in base_tuple_list: relevant_digit = n_tuple[1] if not east_exists: east_base += relevant_digit + "X" if not south_exists: south_base += "2" + relevant_digit if not west_exists: west_base += relevant_digit + "2" if not east_exists: east_set.add(east_base) if not south_exists: south_set.add(south_base) if not west_exists: west_set.add(west_base) memoized_digit_dict[f"E{iterations_needed}"] = east_set memoized_digit_dict[f"S{iterations_needed}"] = south_set memoized_digit_dict[f"W{iterations_needed}"] = west_set all_border_set = north_set | east_set | south_set | west_set memoized_digit_dict[f"A{iterations_needed}"] = all_border_set return {OpenGeoTile(address+base) for base in all_border_set} elif len(eight_point_direction) == 1: ''' North, South, East, or West ''' base_set = memoized_digit_dict.get(f"{eight_point_direction}{iterations_needed}") if not base_set: self.memoizeDigitDict(eight_point_direction, iterations_needed) base_set = memoized_digit_dict.get(f'{eight_point_direction}{iterations_needed}') return {OpenGeoTile(address + base) for base in base_set} elif len(eight_point_direction) == 2: ''' NW, NE, SW, SE... should return only one tile''' ordinal_digit_dict = { 'NW': 'X2', 'NE': 'XX', 'SE': '2X', 'SW': '22' } base = '' for i in range(iterations_needed): base += ordinal_digit_dict.get(eight_point_direction) return {OpenGeoTile(address + base)} def memoizeDigitDict(self, eight_point_direction, iterations_needed): base_set = memoized_digit_dict.get(f"{eight_point_direction}{iterations_needed}") if not base_set: quickest_i = 0 for i in reversed(range(iterations_needed)): if memoized_digit_dict.get(f"{eight_point_direction}{i + 1}"): quickest_i = i break for i in range(quickest_i, iterations_needed): existing_bases = memoized_digit_dict.get(f"{eight_point_direction}{i + 1}") next_set = {existing_base + base for existing_base in existing_bases for base in memoized_digit_dict.get(f"{eight_point_direction}1")} memoized_digit_dict[f"{eight_point_direction}{i + 2}"] = next_set
[((12271, 12292), 'openlocationcode.openlocationcode.decode', 'olc.decode', (['self.code'], {}), '(self.code)\n', (12281, 12292), True, 'from openlocationcode import openlocationcode as olc\n'), ((19098, 19122), 'math.atan2', 'math.atan2', (['yDiff', 'xDiff'], {}), '(yDiff, xDiff)\n', (19108, 19122), False, 'import math, re\n'), ((5509, 5530), 'openlocationcode.openlocationcode.isFull', 'olc.isFull', (['plus_code'], {}), '(plus_code)\n', (5519, 5530), True, 'from openlocationcode import openlocationcode as olc\n'), ((6931, 6952), 'openlocationcode.openlocationcode.isFull', 'olc.isFull', (['plus_code'], {}), '(plus_code)\n', (6941, 6952), True, 'from openlocationcode import openlocationcode as olc\n'), ((26660, 26682), 're.findall', 're.findall', (['""".."""', 'base'], {}), "('..', base)\n", (26670, 26682), False, 'import math, re\n')]
ShujaKhalid/deep-rl
deep-rl/lib/python2.7/site-packages/OpenGL/arrays/arraydatatype.py
99c6ba6c3095d1bfdab81bd01395ced96bddd611
"""Array data-type implementations (abstraction points for GL array types""" import ctypes import OpenGL from OpenGL.raw.GL import _types from OpenGL import plugins from OpenGL.arrays import formathandler, _arrayconstants as GL_1_1 from OpenGL import logs _log = logs.getLog( 'OpenGL.arrays.arraydatatype' ) from OpenGL import acceleratesupport ADT = None if acceleratesupport.ACCELERATE_AVAILABLE: try: from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT except ImportError as err: _log.warn( "Unable to load ArrayDatatype accelerator from OpenGL_accelerate" ) if ADT is None: # Python-coded version class HandlerRegistry( dict ): GENERIC_OUTPUT_PREFERENCES = ['numpy','ctypesarrays'] def __init__( self, plugin_match ): self.match = plugin_match self.output_handler = None self.preferredOutput = None self.all_output_handlers = [] def __call__( self, value ): """Lookup of handler for given value""" try: typ = value.__class__ except AttributeError as err: typ = type(value) handler = self.get( typ ) if not handler: if hasattr( typ, '__mro__' ): for base in typ.__mro__: handler = self.get( base ) if not handler: handler = self.match( base ) if handler: handler = handler.load() if handler: handler = handler() if handler: self[ typ ] = handler if hasattr( handler, 'registerEquivalent' ): handler.registerEquivalent( typ, base ) return handler raise TypeError( """No array-type handler for type %s.%s (value: %s) registered"""%( typ.__module__, type.__name__, repr(value)[:50] ) ) return handler def handler_by_plugin_name( self, name ): plugin = plugins.FormatHandler.by_name( name ) if plugin: try: return plugin.load() except ImportError as err: return None else: raise RuntimeError( 'No handler of name %s found'%(name,)) def get_output_handler( self ): """Fast-path lookup for output handler object""" if self.output_handler is None: if self.preferredOutput is not None: self.output_handler = self.handler_by_plugin_name( self.preferredOutput ) if not self.output_handler: for preferred in self.GENERIC_OUTPUT_PREFERENCES: self.output_handler = self.handler_by_plugin_name( preferred ) if self.output_handler: break if not self.output_handler: raise RuntimeError( """Unable to find any output handler at all (not even ctypes/numpy ones!)""" ) return self.output_handler def register( self, handler, types=None ): """Register this class as handler for given set of types""" if not isinstance( types, (list,tuple)): types = [ types ] for type in types: self[ type ] = handler if handler.isOutput: self.all_output_handlers.append( handler ) def registerReturn( self, handler ): """Register this handler as the default return-type handler""" if isinstance( handler, (str,unicode)): self.preferredOutput = handler self.output_handler = None else: self.preferredOutput = None self.output_handler = handler GLOBAL_REGISTRY = HandlerRegistry( plugins.FormatHandler.match) formathandler.FormatHandler.TYPE_REGISTRY = GLOBAL_REGISTRY class ArrayDatatype( object ): """Mix-in for array datatype classes The ArrayDatatype marker essentially is used to mark a particular argument as having an "array" type, which means that it is eligible for handling via the arrays sub-package and its registered handlers. """ typeConstant = None handler = GLOBAL_REGISTRY getHandler = GLOBAL_REGISTRY.__call__ returnHandler = GLOBAL_REGISTRY.get_output_handler isAccelerated = False @classmethod def getRegistry( cls ): """Get our handler registry""" return cls.handler def from_param( cls, value, typeConstant=None ): """Given a value in a known data-pointer type, convert to a ctypes pointer""" return cls.getHandler(value).from_param( value, cls.typeConstant ) from_param = classmethod( logs.logOnFail( from_param, _log ) ) def dataPointer( cls, value ): """Given a value in a known data-pointer type, return long for pointer""" try: return cls.getHandler(value).dataPointer( value ) except Exception as err: _log.warn( """Failure in dataPointer for %s instance %s""", type(value), value, ) raise dataPointer = classmethod( logs.logOnFail( dataPointer, _log ) ) def voidDataPointer( cls, value ): """Given value in a known data-pointer type, return void_p for pointer""" pointer = cls.dataPointer( value ) try: return ctypes.c_void_p(pointer) except TypeError as err: return pointer voidDataPointer = classmethod( logs.logOnFail( voidDataPointer, _log ) ) def typedPointer( cls, value ): """Return a pointer-to-base-type pointer for given value""" return ctypes.cast( cls.dataPointer(value), ctypes.POINTER( cls.baseType )) typedPointer = classmethod( typedPointer ) def asArray( cls, value, typeCode=None ): """Given a value, convert to preferred array representation""" return cls.getHandler(value).asArray( value, typeCode or cls.typeConstant ) asArray = classmethod( logs.logOnFail( asArray, _log ) ) def arrayToGLType( cls, value ): """Given a data-value, guess the OpenGL type of the corresponding pointer Note: this is not currently used in PyOpenGL and may be removed eventually. """ return cls.getHandler(value).arrayToGLType( value ) arrayToGLType = classmethod( logs.logOnFail( arrayToGLType, _log ) ) def arraySize( cls, value, typeCode = None ): """Given a data-value, calculate dimensions for the array (number-of-units)""" return cls.getHandler(value).arraySize( value, typeCode or cls.typeConstant ) arraySize = classmethod( logs.logOnFail( arraySize, _log ) ) def unitSize( cls, value, typeCode=None ): """Determine unit size of an array (if possible) Uses our local type if defined, otherwise asks the handler to guess... """ return cls.getHandler(value).unitSize( value, typeCode or cls.typeConstant ) unitSize = classmethod( logs.logOnFail( unitSize, _log ) ) def zeros( cls, dims, typeCode=None ): """Allocate a return array of the given dimensions filled with zeros""" return cls.returnHandler().zeros( dims, typeCode or cls.typeConstant ) zeros = classmethod( logs.logOnFail( zeros, _log ) ) def dimensions( cls, value ): """Given a data-value, get the dimensions (assumes full structure info)""" return cls.getHandler(value).dimensions( value ) dimensions = classmethod( logs.logOnFail( dimensions, _log ) ) def arrayByteCount( cls, value ): """Given a data-value, try to determine number of bytes it's final form occupies For most data-types this is arraySize() * atomic-unit-size """ return cls.getHandler(value).arrayByteCount( value ) arrayByteCount = classmethod( logs.logOnFail( arrayByteCount, _log ) ) # the final array data-type classes... class GLclampdArray( ArrayDatatype, ctypes.POINTER(_types.GLclampd )): """Array datatype for GLclampd types""" baseType = _types.GLclampd typeConstant = _types.GL_DOUBLE class GLclampfArray( ArrayDatatype, ctypes.POINTER(_types.GLclampf )): """Array datatype for GLclampf types""" baseType = _types.GLclampf typeConstant = _types.GL_FLOAT class GLfloatArray( ArrayDatatype, ctypes.POINTER(_types.GLfloat )): """Array datatype for GLfloat types""" baseType = _types.GLfloat typeConstant = _types.GL_FLOAT class GLdoubleArray( ArrayDatatype, ctypes.POINTER(_types.GLdouble )): """Array datatype for GLdouble types""" baseType = _types.GLdouble typeConstant = _types.GL_DOUBLE class GLbyteArray( ArrayDatatype, ctypes.POINTER(_types.GLbyte )): """Array datatype for GLbyte types""" baseType = _types.GLbyte typeConstant = _types.GL_BYTE class GLcharArray( ArrayDatatype, ctypes.c_char_p): """Array datatype for ARB extension pointers-to-arrays""" baseType = _types.GLchar typeConstant = _types.GL_BYTE GLcharARBArray = GLcharArray class GLshortArray( ArrayDatatype, ctypes.POINTER(_types.GLshort )): """Array datatype for GLshort types""" baseType = _types.GLshort typeConstant = _types.GL_SHORT class GLintArray( ArrayDatatype, ctypes.POINTER(_types.GLint )): """Array datatype for GLint types""" baseType = _types.GLint typeConstant = _types.GL_INT class GLubyteArray( ArrayDatatype, ctypes.POINTER(_types.GLubyte )): """Array datatype for GLubyte types""" baseType = _types.GLubyte typeConstant = _types.GL_UNSIGNED_BYTE GLbooleanArray = GLubyteArray class GLushortArray( ArrayDatatype, ctypes.POINTER(_types.GLushort )): """Array datatype for GLushort types""" baseType = _types.GLushort typeConstant = _types.GL_UNSIGNED_SHORT class GLuintArray( ArrayDatatype, ctypes.POINTER(_types.GLuint )): """Array datatype for GLuint types""" baseType = _types.GLuint typeConstant = _types.GL_UNSIGNED_INT class GLint64Array( ArrayDatatype, ctypes.POINTER(_types.GLint64 )): """Array datatype for GLuint types""" baseType = _types.GLint64 typeConstant = None # TODO: find out what this should be! class GLuint64Array( ArrayDatatype, ctypes.POINTER(_types.GLuint64 )): """Array datatype for GLuint types""" baseType = _types.GLuint64 typeConstant = _types.GL_UNSIGNED_INT64 class GLenumArray( ArrayDatatype, ctypes.POINTER(_types.GLenum )): """Array datatype for GLenum types""" baseType = _types.GLenum typeConstant = _types.GL_UNSIGNED_INT class GLsizeiArray( ArrayDatatype, ctypes.POINTER(_types.GLsizei )): """Array datatype for GLsizei types""" baseType = _types.GLsizei typeConstant = _types.GL_INT class GLvoidpArray( ArrayDatatype, ctypes.POINTER(_types.GLvoid )): """Array datatype for GLenum types""" baseType = _types.GLvoidp typeConstant = _types.GL_VOID_P else: # Cython-coded array handler _log.info( 'Using accelerated ArrayDatatype' ) ArrayDatatype = ADT( None, None ) GLclampdArray = ADT( GL_1_1.GL_DOUBLE, _types.GLclampd ) GLclampfArray = ADT( GL_1_1.GL_FLOAT, _types.GLclampf ) GLdoubleArray = ADT( GL_1_1.GL_DOUBLE, _types.GLdouble ) GLfloatArray = ADT( GL_1_1.GL_FLOAT, _types.GLfloat ) GLbyteArray = ADT( GL_1_1.GL_BYTE, _types.GLbyte ) GLcharArray = GLcharARBArray = ADT( GL_1_1.GL_BYTE, _types.GLchar ) GLshortArray = ADT( GL_1_1.GL_SHORT, _types.GLshort ) GLintArray = ADT( GL_1_1.GL_INT, _types.GLint ) GLubyteArray = GLbooleanArray = ADT( GL_1_1.GL_UNSIGNED_BYTE, _types.GLubyte ) GLushortArray = ADT( GL_1_1.GL_UNSIGNED_SHORT, _types.GLushort ) GLuintArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLuint ) GLint64Array = ADT( None, _types.GLint64 ) GLuint64Array = ADT( GL_1_1.GL_UNSIGNED_INT64, _types.GLuint64 ) GLenumArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLenum ) GLsizeiArray = ADT( GL_1_1.GL_INT, _types.GLsizei ) GLvoidpArray = ADT( _types.GL_VOID_P, _types.GLvoidp ) GL_CONSTANT_TO_ARRAY_TYPE = { GL_1_1.GL_DOUBLE : GLclampdArray, GL_1_1.GL_FLOAT : GLclampfArray, GL_1_1.GL_FLOAT : GLfloatArray, GL_1_1.GL_DOUBLE : GLdoubleArray, GL_1_1.GL_BYTE : GLbyteArray, GL_1_1.GL_SHORT : GLshortArray, GL_1_1.GL_INT : GLintArray, GL_1_1.GL_UNSIGNED_BYTE : GLubyteArray, GL_1_1.GL_UNSIGNED_SHORT : GLushortArray, GL_1_1.GL_UNSIGNED_INT : GLuintArray, #GL_1_1.GL_UNSIGNED_INT : GLenumArray, }
[((263, 305), 'OpenGL.logs.getLog', 'logs.getLog', (['"""OpenGL.arrays.arraydatatype"""'], {}), "('OpenGL.arrays.arraydatatype')\n", (274, 305), False, 'from OpenGL import logs\n'), ((8774, 8805), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLclampd'], {}), '(_types.GLclampd)\n', (8788, 8805), False, 'import ctypes\n'), ((8973, 9004), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLclampf'], {}), '(_types.GLclampf)\n', (8987, 9004), False, 'import ctypes\n'), ((9170, 9200), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLfloat'], {}), '(_types.GLfloat)\n', (9184, 9200), False, 'import ctypes\n'), ((9365, 9396), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLdouble'], {}), '(_types.GLdouble)\n', (9379, 9396), False, 'import ctypes\n'), ((9562, 9591), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLbyte'], {}), '(_types.GLbyte)\n', (9576, 9591), False, 'import ctypes\n'), ((9979, 10009), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLshort'], {}), '(_types.GLshort)\n', (9993, 10009), False, 'import ctypes\n'), ((10171, 10199), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLint'], {}), '(_types.GLint)\n', (10185, 10199), False, 'import ctypes\n'), ((10357, 10387), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLubyte'], {}), '(_types.GLubyte)\n', (10371, 10387), False, 'import ctypes\n'), ((10594, 10625), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLushort'], {}), '(_types.GLushort)\n', (10608, 10625), False, 'import ctypes\n'), ((10799, 10828), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLuint'], {}), '(_types.GLuint)\n', (10813, 10828), False, 'import ctypes\n'), ((11001, 11031), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLint64'], {}), '(_types.GLint64)\n', (11015, 11031), False, 'import ctypes\n'), ((11226, 11257), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLuint64'], {}), '(_types.GLuint64)\n', (11240, 11257), False, 'import ctypes\n'), ((11429, 11458), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLenum'], {}), '(_types.GLenum)\n', (11443, 11458), False, 'import ctypes\n'), ((11626, 11656), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLsizei'], {}), '(_types.GLsizei)\n', (11640, 11656), False, 'import ctypes\n'), ((11817, 11846), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLvoid'], {}), '(_types.GLvoid)\n', (11831, 11846), False, 'import ctypes\n'), ((12080, 12095), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['None', 'None'], {}), '(None, None)\n', (12083, 12095), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12118, 12156), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_DOUBLE', '_types.GLclampd'], {}), '(GL_1_1.GL_DOUBLE, _types.GLclampd)\n', (12121, 12156), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12179, 12216), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_FLOAT', '_types.GLclampf'], {}), '(GL_1_1.GL_FLOAT, _types.GLclampf)\n', (12182, 12216), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12239, 12277), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_DOUBLE', '_types.GLdouble'], {}), '(GL_1_1.GL_DOUBLE, _types.GLdouble)\n', (12242, 12277), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12299, 12335), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_FLOAT', '_types.GLfloat'], {}), '(GL_1_1.GL_FLOAT, _types.GLfloat)\n', (12302, 12335), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12356, 12390), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_BYTE', '_types.GLbyte'], {}), '(GL_1_1.GL_BYTE, _types.GLbyte)\n', (12359, 12390), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12428, 12462), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_BYTE', '_types.GLchar'], {}), '(GL_1_1.GL_BYTE, _types.GLchar)\n', (12431, 12462), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12484, 12520), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_SHORT', '_types.GLshort'], {}), '(GL_1_1.GL_SHORT, _types.GLshort)\n', (12487, 12520), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12540, 12572), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_INT', '_types.GLint'], {}), '(GL_1_1.GL_INT, _types.GLint)\n', (12543, 12572), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12611, 12655), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_UNSIGNED_BYTE', '_types.GLubyte'], {}), '(GL_1_1.GL_UNSIGNED_BYTE, _types.GLubyte)\n', (12614, 12655), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12678, 12724), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_UNSIGNED_SHORT', '_types.GLushort'], {}), '(GL_1_1.GL_UNSIGNED_SHORT, _types.GLushort)\n', (12681, 12724), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12745, 12787), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_UNSIGNED_INT', '_types.GLuint'], {}), '(GL_1_1.GL_UNSIGNED_INT, _types.GLuint)\n', (12748, 12787), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12809, 12834), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['None', '_types.GLint64'], {}), '(None, _types.GLint64)\n', (12812, 12834), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12857, 12903), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_UNSIGNED_INT64', '_types.GLuint64'], {}), '(GL_1_1.GL_UNSIGNED_INT64, _types.GLuint64)\n', (12860, 12903), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12924, 12966), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_UNSIGNED_INT', '_types.GLenum'], {}), '(GL_1_1.GL_UNSIGNED_INT, _types.GLenum)\n', (12927, 12966), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12988, 13022), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_INT', '_types.GLsizei'], {}), '(GL_1_1.GL_INT, _types.GLsizei)\n', (12991, 13022), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((13044, 13081), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['_types.GL_VOID_P', '_types.GLvoidp'], {}), '(_types.GL_VOID_P, _types.GLvoidp)\n', (13047, 13081), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((2301, 2336), 'OpenGL.plugins.FormatHandler.by_name', 'plugins.FormatHandler.by_name', (['name'], {}), '(name)\n', (2330, 2336), False, 'from OpenGL import plugins\n'), ((5245, 5277), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['from_param', '_log'], {}), '(from_param, _log)\n', (5259, 5277), False, 'from OpenGL import logs\n'), ((5718, 5751), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['dataPointer', '_log'], {}), '(dataPointer, _log)\n', (5732, 5751), False, 'from OpenGL import logs\n'), ((6104, 6141), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['voidDataPointer', '_log'], {}), '(voidDataPointer, _log)\n', (6118, 6141), False, 'from OpenGL import logs\n'), ((6641, 6670), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['asArray', '_log'], {}), '(asArray, _log)\n', (6655, 6670), False, 'from OpenGL import logs\n'), ((7033, 7068), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['arrayToGLType', '_log'], {}), '(arrayToGLType, _log)\n', (7047, 7068), False, 'from OpenGL import logs\n'), ((7341, 7372), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['arraySize', '_log'], {}), '(arraySize, _log)\n', (7355, 7372), False, 'from OpenGL import logs\n'), ((7722, 7752), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['unitSize', '_log'], {}), '(unitSize, _log)\n', (7736, 7752), False, 'from OpenGL import logs\n'), ((8000, 8027), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['zeros', '_log'], {}), '(zeros, _log)\n', (8014, 8027), False, 'from OpenGL import logs\n'), ((8252, 8284), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['dimensions', '_log'], {}), '(dimensions, _log)\n', (8266, 8284), False, 'from OpenGL import logs\n'), ((8636, 8672), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['arrayByteCount', '_log'], {}), '(arrayByteCount, _log)\n', (8650, 8672), False, 'from OpenGL import logs\n'), ((5972, 5996), 'ctypes.c_void_p', 'ctypes.c_void_p', (['pointer'], {}), '(pointer)\n', (5987, 5996), False, 'import ctypes\n'), ((6314, 6342), 'ctypes.POINTER', 'ctypes.POINTER', (['cls.baseType'], {}), '(cls.baseType)\n', (6328, 6342), False, 'import ctypes\n')]
jbergmanster/probability
tensorflow_probability/python/build_defs.bzl
e15b307066e7485b8fe9faf3d289c739ab8d3806
# Copyright 2019 The TensorFlow Probability Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Build defs for TF/NumPy/JAX-variadic libraries & tests.""" # [internal] load python3.bzl NO_REWRITE_NEEDED = [ "internal:all_util", "internal:docstring_util", "internal:reparameterization", "layers", "platform_google", ] REWRITER_TARGET = "//tensorflow_probability/substrates/meta:rewrite" RUNFILES_ROOT = "tensorflow_probability/" def _substrate_src(src, substrate): """Rewrite a single src filename for the given substrate.""" return "_{}/_generated_{}".format(substrate, src) def _substrate_srcs(srcs, substrate): """Rewrite src filenames for the given substrate.""" return [_substrate_src(src, substrate) for src in srcs] def _substrate_dep(dep, substrate): """Convert a single dep to one appropriate for the given substrate.""" dep_to_check = dep if dep.startswith(":"): dep_to_check = "{}{}".format(native.package_name(), dep) for no_rewrite in NO_REWRITE_NEEDED: if no_rewrite in dep_to_check: return dep if "tensorflow_probability/" in dep or dep.startswith(":"): if "internal/backend" in dep: return dep if ":" in dep: return "{}.{}".format(dep, substrate) return "{}:{}.{}".format(dep, dep.split("/")[-1], substrate) return dep def _substrate_deps(deps, substrate): """Convert deps to those appropriate for the given substrate.""" new_deps = [_substrate_dep(dep, substrate) for dep in deps] backend_dep = "//tensorflow_probability/python/internal/backend/{}".format(substrate) if backend_dep not in new_deps: new_deps.append(backend_dep) return new_deps # This is needed for the transitional period during which we have the internal # py2and3_test and py_test comingling in BUILD files. Otherwise the OSS export # rewrite process becomes irreversible. def py3_test(*args, **kwargs): """Internal/external reversibility, denotes py3-only vs py2+3 tests. Args: *args: Passed to underlying py_test. **kwargs: Passed to underlying py_test. srcs_version and python_version are added (with value `"PY3"`) if not specified. """ kwargs = dict(kwargs) if "srcs_version" not in kwargs: kwargs["srcs_version"] = "PY3" if "python_version" not in kwargs: kwargs["python_version"] = "PY3" native.py_test(*args, **kwargs) def _resolve_omit_dep(dep): """Resolves a `substrates_omit_deps` item to full target.""" if ":" not in dep: dep = "{}:{}".format(dep, dep.split("/")[-1]) if dep.startswith(":"): dep = "{}{}".format(native.package_name(), dep) return dep def _substrate_runfiles_symlinks_impl(ctx): """A custom BUILD rule to generate python runfiles symlinks. A custom build rule which adds runfiles symlinks for files matching a substrate genrule file pattern, i.e. `'_jax/_generated_normal.py'`. This rule will aggregate and pass along deps while adding the given symlinks to the runfiles structure. Build rule attributes: - substrate: One of 'jax' or 'numpy'; which substrate this applies to. - deps: A list of py_library labels. These are passed along. Args: ctx: Rule analysis context. Returns: Info objects to propagate deps and add runfiles symlinks. """ # Aggregate the depset inputs to resolve transitive dependencies. transitive_sources = [] uses_shared_libraries = [] imports = [] has_py2_only_sources = [] has_py3_only_sources = [] cc_infos = [] for dep in ctx.attr.deps: if PyInfo in dep: transitive_sources.append(dep[PyInfo].transitive_sources) uses_shared_libraries.append(dep[PyInfo].uses_shared_libraries) imports.append(dep[PyInfo].imports) has_py2_only_sources.append(dep[PyInfo].has_py2_only_sources) has_py3_only_sources.append(dep[PyInfo].has_py3_only_sources) # if PyCcLinkParamsProvider in dep: # DisableOnExport # cc_infos.append(dep[PyCcLinkParamsProvider].cc_info) # DisableOnExport if CcInfo in dep: cc_infos.append(dep[CcInfo]) # Determine the set of symlinks to generate. transitive_sources = depset(transitive = transitive_sources) runfiles_dict = {} substrate = ctx.attr.substrate file_substr = "_{}/_generated_".format(substrate) for f in transitive_sources.to_list(): if "tensorflow_probability" in f.dirname and file_substr in f.short_path: pre, post = f.short_path.split("/python/") out_path = "{}/substrates/{}/{}".format( pre, substrate, post.replace(file_substr, ""), ) runfiles_dict[RUNFILES_ROOT + out_path] = f # Construct the output structures to pass along Python srcs/deps/etc. py_info = PyInfo( transitive_sources = transitive_sources, uses_shared_libraries = any(uses_shared_libraries), imports = depset(transitive = imports), has_py2_only_sources = any(has_py2_only_sources), has_py3_only_sources = any(has_py3_only_sources), ) py_cc_link_info = cc_common.merge_cc_infos(cc_infos = cc_infos) py_runfiles = depset( transitive = [depset(transitive = [ dep[DefaultInfo].data_runfiles.files, dep[DefaultInfo].default_runfiles.files, ]) for dep in ctx.attr.deps], ) runfiles = DefaultInfo(runfiles = ctx.runfiles( transitive_files = py_runfiles, root_symlinks = runfiles_dict, )) return py_info, py_cc_link_info, runfiles # See documentation at: # https://docs.bazel.build/versions/3.4.0/skylark/rules.html substrate_runfiles_symlinks = rule( implementation = _substrate_runfiles_symlinks_impl, attrs = { "substrate": attr.string(), "deps": attr.label_list(), }, ) def multi_substrate_py_library( name, srcs = [], deps = [], substrates_omit_deps = [], jax_omit_deps = [], numpy_omit_deps = [], testonly = 0, srcs_version = "PY2AND3"): """A TFP `py_library` for each of TF, NumPy, and JAX. Args: name: The TF `py_library` name. NumPy and JAX libraries have '.numpy' and '.jax' appended. srcs: As with `py_library`. A `genrule` is used to rewrite srcs for NumPy and JAX substrates. deps: As with `py_library`. The list is rewritten to depend on substrate-specific libraries for substrate variants. substrates_omit_deps: List of deps to omit if those libraries are not rewritten for the substrates. jax_omit_deps: List of deps to omit for the JAX substrate. numpy_omit_deps: List of deps to omit for the NumPy substrate. testonly: As with `py_library`. srcs_version: As with `py_library`. """ native.py_library( name = name, srcs = srcs, deps = deps, srcs_version = srcs_version, testonly = testonly, ) remove_deps = [ "//third_party/py/tensorflow", "//third_party/py/tensorflow:tensorflow", ] trimmed_deps = [dep for dep in deps if (dep not in substrates_omit_deps and dep not in remove_deps)] resolved_omit_deps_numpy = [ _resolve_omit_dep(dep) for dep in substrates_omit_deps + numpy_omit_deps ] for src in srcs: native.genrule( name = "rewrite_{}_numpy".format(src.replace(".", "_")), srcs = [src], outs = [_substrate_src(src, "numpy")], cmd = "$(location {}) $(SRCS) --omit_deps={} > $@".format( REWRITER_TARGET, ",".join(resolved_omit_deps_numpy), ), tools = [REWRITER_TARGET], ) native.py_library( name = "{}.numpy.raw".format(name), srcs = _substrate_srcs(srcs, "numpy"), deps = _substrate_deps(trimmed_deps, "numpy"), srcs_version = srcs_version, testonly = testonly, ) # Add symlinks under tfp/substrates/numpy. substrate_runfiles_symlinks( name = "{}.numpy".format(name), substrate = "numpy", deps = [":{}.numpy.raw".format(name)], testonly = testonly, ) resolved_omit_deps_jax = [ _resolve_omit_dep(dep) for dep in substrates_omit_deps + jax_omit_deps ] jax_srcs = _substrate_srcs(srcs, "jax") for src in srcs: native.genrule( name = "rewrite_{}_jax".format(src.replace(".", "_")), srcs = [src], outs = [_substrate_src(src, "jax")], cmd = "$(location {}) $(SRCS) --omit_deps={} --numpy_to_jax > $@".format( REWRITER_TARGET, ",".join(resolved_omit_deps_jax), ), tools = [REWRITER_TARGET], ) native.py_library( name = "{}.jax.raw".format(name), srcs = jax_srcs, deps = _substrate_deps(trimmed_deps, "jax"), srcs_version = srcs_version, testonly = testonly, ) # Add symlinks under tfp/substrates/jax. substrate_runfiles_symlinks( name = "{}.jax".format(name), substrate = "jax", deps = [":{}.jax.raw".format(name)], testonly = testonly, ) def multi_substrate_py_test( name, size = "small", jax_size = None, numpy_size = None, srcs = [], deps = [], tags = [], numpy_tags = [], jax_tags = [], disabled_substrates = [], srcs_version = "PY2AND3", timeout = None, shard_count = None): """A TFP `py2and3_test` for each of TF, NumPy, and JAX. Args: name: Name of the `test_suite` which covers TF, NumPy and JAX variants of the test. Each substrate will have a dedicated `py2and3_test` suffixed with '.tf', '.numpy', or '.jax' as appropriate. size: As with `py_test`. jax_size: A size override for the JAX target. numpy_size: A size override for the numpy target. srcs: As with `py_test`. These will have a `genrule` emitted to rewrite NumPy and JAX variants, writing the test file into a subdirectory. deps: As with `py_test`. The list is rewritten to depend on substrate-specific libraries for substrate variants. tags: Tags global to this test target. NumPy also gets a `'tfp_numpy'` tag, and JAX gets a `'tfp_jax'` tag. A `f'_{name}'` tag is used to produce the `test_suite`. numpy_tags: Tags specific to the NumPy test. (e.g. `"notap"`). jax_tags: Tags specific to the JAX test. (e.g. `"notap"`). disabled_substrates: Iterable of substrates to disable, items from ["numpy", "jax"]. srcs_version: As with `py_test`. timeout: As with `py_test`. shard_count: As with `py_test`. """ name_tag = "_{}".format(name) tags = [t for t in tags] tags.append(name_tag) tags.append("multi_substrate") native.py_test( name = "{}.tf".format(name), size = size, srcs = srcs, main = "{}.py".format(name), deps = deps, tags = tags, srcs_version = srcs_version, timeout = timeout, shard_count = shard_count, ) if "numpy" not in disabled_substrates: numpy_srcs = _substrate_srcs(srcs, "numpy") native.genrule( name = "rewrite_{}_numpy".format(name), srcs = srcs, outs = numpy_srcs, cmd = "$(location {}) $(SRCS) > $@".format(REWRITER_TARGET), tools = [REWRITER_TARGET], ) py3_test( name = "{}.numpy".format(name), size = numpy_size or size, srcs = numpy_srcs, main = _substrate_src("{}.py".format(name), "numpy"), deps = _substrate_deps(deps, "numpy"), tags = tags + ["tfp_numpy"] + numpy_tags, srcs_version = srcs_version, python_version = "PY3", timeout = timeout, shard_count = shard_count, ) if "jax" not in disabled_substrates: jax_srcs = _substrate_srcs(srcs, "jax") native.genrule( name = "rewrite_{}_jax".format(name), srcs = srcs, outs = jax_srcs, cmd = "$(location {}) $(SRCS) --numpy_to_jax > $@".format(REWRITER_TARGET), tools = [REWRITER_TARGET], ) jax_deps = _substrate_deps(deps, "jax") # [internal] Add JAX build dep py3_test( name = "{}.jax".format(name), size = jax_size or size, srcs = jax_srcs, main = _substrate_src("{}.py".format(name), "jax"), deps = jax_deps, tags = tags + ["tfp_jax"] + jax_tags, srcs_version = srcs_version, python_version = "PY3", timeout = timeout, shard_count = shard_count, ) native.test_suite( name = name, tags = [name_tag], )
[]
lschmelzeisen/wikidata-history-analyzer
src/wikidated/wikidata/wikidata_dump.py
8673639b61839d2dca271fbbaf2feb8563b75f2d
# # Copyright 2021 Lukas Schmelzeisen # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import annotations import json from datetime import date, datetime from logging import getLogger from pathlib import Path from typing import Mapping, MutableSequence, Sequence, Type, TypeVar import requests from pydantic import BaseModel as PydanticModel from pydantic import validator from tqdm import tqdm # type: ignore from typing_extensions import Final from wikidated._utils import RangeMap from wikidated.wikidata.wikidata_dump_file import WikidataDumpFile from wikidated.wikidata.wikidata_dump_pages_meta_history import ( WikidataDumpPagesMetaHistory, ) from wikidated.wikidata.wikidata_dump_sites_table import WikidataDumpSitesTable _LOGGER = getLogger(__name__) _T_WikidataDumpFile = TypeVar("_T_WikidataDumpFile", bound=WikidataDumpFile) class WikidataDump: def __init__( self, dump_dir: Path, version: date, *, mirror: str = "https://dumps.wikimedia.org", ) -> None: self._dump_dir = dump_dir self.version: Final = version self.mirror: Final = mirror self._dump_status = _WikidataDumpStatus.load( self._dump_dir, self.version, self.mirror ) self.sites_table: Final = self._construct_dumps( WikidataDumpSitesTable, "sitestable" )[0] self.pages_meta_history: Final = RangeMap[WikidataDumpPagesMetaHistory]() for dump_file in self._construct_dumps( WikidataDumpPagesMetaHistory, "metahistory7zdump" ): self.pages_meta_history[dump_file.page_ids] = dump_file def download( self, *, sites_table: bool = True, pages_meta_history: bool = True ) -> None: _LOGGER.info( f"Downloading Wikidata dump {self.version:%4Y%2m%2d} from '{self.mirror}'." ) dump_files: MutableSequence[WikidataDumpFile] = [] if sites_table: dump_files.append(self.sites_table) if pages_meta_history: dump_files.extend(self.pages_meta_history.values()) with tqdm( desc=f"Wikidata dump {self.version:%4Y%2m%2d} files", total=len(dump_files), dynamic_ncols=True, position=1, ) as progress_bar_files, tqdm( desc=f"Wikidata dump {self.version:%4Y%2m%2d} bytes", total=sum(dump_file.size for dump_file in dump_files), dynamic_ncols=True, position=2, unit="B", unit_scale=True, unit_divisor=1024, ) as progress_bar_size: for dump_file in dump_files: dump_file.download() progress_bar_files.update(1) progress_bar_size.update(dump_file.size) _LOGGER.info(f"Done downloading Wikidata dump {self.version:%4Y%2m%2d}.") def _construct_dumps( self, dump_type: Type[_T_WikidataDumpFile], dump_type_id: str ) -> Sequence[_T_WikidataDumpFile]: return [ dump_type( path=self._dump_dir / path, url=self.mirror + dump_status_file.url, sha1=dump_status_file.sha1, size=dump_status_file.size, ) for path, dump_status_file in self._dump_status.jobs[ dump_type_id ].files.items() ] class _WikidataDumpStatusFile(PydanticModel): size: int url: str md5: str sha1: str class _WikidataDumpStatusJob(PydanticModel): status: str updated: datetime files: Mapping[str, _WikidataDumpStatusFile] @validator("updated", pre=True) def _parse_datetime(cls, value: str) -> datetime: # noqa: N805 return datetime.strptime(value, "%Y-%m-%d %H:%M:%S") class _WikidataDumpStatus(PydanticModel): jobs: Mapping[str, _WikidataDumpStatusJob] version: str @classmethod def load(cls, dump_dir: Path, version: date, mirror: str) -> _WikidataDumpStatus: path = dump_dir / f"wikidatawiki-{version:%4Y%2m%2d}-dumpstatus.json" if not path.exists(): url = f"{mirror}/wikidatawiki/{version:%4Y%2m%2d}/dumpstatus.json" _LOGGER.debug(f"Downloading Wikidata dump status from '{url}'.") response = requests.get(url) response.raise_for_status() path.parent.mkdir(exist_ok=True, parents=True) with path.open("w", encoding="UTF-8") as fd: fd.write(json.dumps(response.json(), indent=2) + "\n") _LOGGER.debug("Done downloading Wikidata dump status.") dump_status = _WikidataDumpStatus.parse_file(path) for job_name, job in dump_status.jobs.items(): if job.status != "done": path.unlink() raise Exception(f"Job '{job_name}' is not 'done', but '{job.status}'.") return dump_status
[((1264, 1283), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (1273, 1283), False, 'from logging import getLogger\n'), ((1307, 1361), 'typing.TypeVar', 'TypeVar', (['"""_T_WikidataDumpFile"""'], {'bound': 'WikidataDumpFile'}), "('_T_WikidataDumpFile', bound=WikidataDumpFile)\n", (1314, 1361), False, 'from typing import Mapping, MutableSequence, Sequence, Type, TypeVar\n'), ((4153, 4183), 'pydantic.validator', 'validator', (['"""updated"""'], {'pre': '(True)'}), "('updated', pre=True)\n", (4162, 4183), False, 'from pydantic import validator\n'), ((4267, 4312), 'datetime.datetime.strptime', 'datetime.strptime', (['value', '"""%Y-%m-%d %H:%M:%S"""'], {}), "(value, '%Y-%m-%d %H:%M:%S')\n", (4284, 4312), False, 'from datetime import date, datetime\n'), ((4813, 4830), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (4825, 4830), False, 'import requests\n')]
Ahrvo-Trading-Systems/tcapy
tcapygen/layoutgen.py
df8439aa5c754fc9a7fde463c44c489b27112f76
from __future__ import division, print_function __author__ = 'saeedamen' # Saeed Amen / [email protected] # # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro # # See the License for the specific language governing permissions and limitations under the License. # ## Web server components import dash_core_components as dcc import dash_html_components as html import base64 import os ## Date/time components import pandas as pd import datetime from datetime import timedelta from collections import OrderedDict from pandas.tseries.offsets import * from tcapy.vis.layoutdash import LayoutDash ######################################################################################################################## class LayoutDashImplGen(LayoutDash): """This implements the LayoutDash abstract class, to create the web based GUI for the tcapy application. It creates two web pages - detailed_page - for doing detailed tcapy analysis for a specific currency pair - aggregated_page - for more aggregated style analysis across multiple currency pairs and over multiple time periods """ def __init__(self, app=None, constants=None, url_prefix=''): super(LayoutDashImplGen, self).__init__(app=app, constants=constants, url_prefix=url_prefix) available_dates = pd.date_range( datetime.datetime.today().date() - timedelta(days=self._constants.gui_lookback_window), datetime.datetime.today().date(), freq=BDay()) times = pd.date_range("0:00", "23:59", freq="15min") ### create the possible values for drop down boxes on both pages # Reverse date list (for both detailed and aggregated pages) self.available_dates = [x.date() for x in available_dates[::-1]] # For detailed page only self.available_times = [t.strftime("%H:%M") for t in times] self.available_tickers = self._constants.available_tickers_dictionary['All'] self.available_venues = self._constants.available_venues_dictionary['All'] self.available_brokers = self._constants.available_brokers_dictionary['All'] self.available_algos = self._constants.available_algos_dictionary['All'] self.available_market_data = self._constants.available_market_data self.available_order_plot_lines = ['candlestick', 'mid', 'bid', 'ask', 'arrival', 'twap', 'vwap', 'buy trade', 'sell trade'] self.available_execution_plot_lines = ['candlestick', 'mid', 'bid', 'ask', 'buy trade', 'sell trade'] self.available_slippage_bounds = ['0.25', '0.5', '1.0', '1.25', '1.5', '2.0', 'bid/ask'] # For aggregated page only self.available_grouped_tickers = self._flatten_dictionary(self._constants.available_tickers_dictionary) self.available_grouped_venues = self._flatten_dictionary(self._constants.available_venues_dictionary) self.available_grouped_brokers = self._flatten_dictionary(self._constants.available_brokers_dictionary) self.available_grouped_algos = self._flatten_dictionary(self._constants.available_algos_dictionary) self.available_event_types = self._constants.available_event_types self.available_metrics = self._constants.available_metrics self.available_reload = ['no', 'yes'] self.available_visualization = ['yes', 'no'] self.construct_layout() def _flatten_dictionary(self, dictionary): available = dictionary['All'] available_groups = self._util_func.dict_key_list(dictionary.keys()) return self.flatten_list_of_strings([available_groups, available]) def construct_layout(self): self.page_content = html.Div([ dcc.Location(id='url', refresh=False), html.Div(id='page-content') ]) link_bar_dict = {'Detailed' : 'detailed', 'Aggregated' : 'aggregated', 'Compliance' : 'compliance'} trade_outliers_cols = ['Date', 'ticker', 'side', 'notional cur', 'benchmark', 'exec not', 'exec not in rep cur', 'slippage'] broker_cols = ['Date', 'by broker notional (rep cur)'] # Main page for detailed analysing of (eg. over the course of a few days) self.pages['detailed'] = html.Div([ self._sc.header_bar('FX: Detailed - Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B("Status: ok", id='detailed-status'), margin_left=5), self._sc.horizontal_bar(), # Dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id={'start-date-val' : self.available_dates, 'start-time-val' : self.available_times}, prefix_id='detailed'), self._sc.drop_down(caption='Finish Date', id=OrderedDict([('finish-date-val', self.available_dates), ('finish-time-val', self.available_times)]), prefix_id='detailed'), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='detailed', drop_down_values=self.available_tickers), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='detailed', drop_down_values=self.available_grouped_brokers), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='detailed', drop_down_values=self.available_grouped_algos), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='detailed', drop_down_values=self.available_grouped_venues), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='detailed', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='detailed', drop_down_values=self.available_metrics) ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='detailed'), # self.button(caption = 'Print PDF', id = 'detailed-print-pdf-button', className = 'no-print'), # Orders self._sc.horizontal_bar(), self._sc.plot(caption='Orders: Timeline', id='order-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-order-candle-timeline-plot', self.available_order_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='order-candle-timeline-download-link', download_file='download_order_candle_timeline', height=500), self._sc.plot(caption='Orders: Markout', id='order-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Orders: Histogram vs PDF fit', id='order-dist-plot', prefix_id='detailed', height=500), # Execution trades self._sc.horizontal_bar(), self._sc.plot(caption='Executions: Timeline', id='execution-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-execution-candle-timeline-plot', self.available_execution_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='execution-candle-timeline-download-link', download_file='download_execution_candle_timeline.csv', height=500), self._sc.plot(caption='Executions: Markout', id='execution-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Executions: Histogram vs PDF fit', id='execution-dist-plot', prefix_id='detailed', height=500), # Detailed tcapy markout table for executions html.Div([ html.H3('Executions: Markout Table'), html.Div(id='detailed-execution-table') ], style={'width': '1000px', 'display': 'inline-block', 'marginBottom': 5, 'marginTop': 5, 'marginLeft': 5, 'marginRight': 5}), ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) ################################################################################################################ # Secondary page for analysing aggregated statistics over long periods of time, eg. who is the best broker? self.pages['aggregated'] = html.Div([ self._sc.header_bar('FX: Aggregated - Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B("Status: ok", id='aggregated-status'), margin_left=5), self._sc.horizontal_bar(), # dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='aggregated', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='aggregated', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='aggregated', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='aggregated', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Event Type', id='event-type-val', prefix_id='aggregated', drop_down_values=self.available_event_types), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='aggregated', drop_down_values=self.available_metrics), ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='aggregated'), # , msg_id='aggregated-status'), self._sc.horizontal_bar(), # self.date_picker_range(caption='Start/Finish Dates', id='aggregated-date-val', offset=[-7,-1]), self._sc.plot(caption='Aggregated Trader: Summary', id=['execution-by-ticker-bar-plot', 'execution-by-venue-bar-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: Timeline', id='execution-by-ticker-timeline-plot', prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: PDF fit (' + self._constants.reporting_currency + ' notional)', id=['execution-by-ticker-dist-plot', 'execution-by-venue-dist-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar() ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) ################################################################################################################ self.pages['compliance'] = html.Div([ self._sc.header_bar('FX: Compliance Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B("Status: ok", id='compliance-status'), margin_left=5), self._sc.horizontal_bar(), # Dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='compliance', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='compliance', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='compliance', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='compliance', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='compliance', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Filter by Time', id='filter-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Start Time of Day', id='start-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Finish Time of Day', id='finish-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Slippage to Mid (bp)', id='slippage-bounds-val', prefix_id='compliance', drop_down_values=self.available_slippage_bounds), self._sc.drop_down(caption='Visualization', id='visualization-val', prefix_id='compliance', drop_down_values=self.available_visualization) ]), self._sc.horizontal_bar(), html.Div([ self._sc.button(caption='Calculate', id='calculation-button', prefix_id='compliance'), # self.date_picker(caption='Start Date', id='start-date-dtpicker', prefix_id='compliance'), # self.date_picker(caption='Finish Date', id='finish-date-dtpicker', prefix_id='compliance'), ]), self._sc.horizontal_bar(), self._sc.table(caption='Compliance: Trade Outliers', id='execution-by-anomalous-table', prefix_id='compliance', columns=trade_outliers_cols, downloadplot_caption='Trade outliers CSV', downloadplot_tag='execution-by-anomalous-download-link', download_file='download_execution_by_anomalous.csv'), self._sc.table(caption='Compliance: Totals by Broker', id='summary-by-broker-table', prefix_id='compliance', columns=broker_cols, downloadplot_caption='Download broker CSV', downloadplot_tag='summary-by-broker-download-link', download_file='download_broker.csv' ), self._sc.horizontal_bar() ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) # ID flags self.id_flags = { # Detailed trader page # 'timeline_trade_orders' : {'client-orders': 'order', 'executions': 'trade'}, # 'markout_trade_orders' : {'client-orders': 'order_df', 'executions': 'trade_df'}, 'detailed_candle_timeline_trade_order': {'execution': 'sparse_market_trade_df', 'order': 'sparse_market_order_df'}, 'detailed_markout_trade_order': {'execution': 'trade_df', 'order': 'order_df'}, 'detailed_table_trade_order': {'execution': 'table_trade_df_markout_by_all'}, 'detailed_dist_trade_order': {'execution': 'dist_trade_df_by/pdf/side', 'order': 'dist_order_df_by/pdf/side'}, 'detailed_download_link_trade_order': {'execution-candle-timeline': 'sparse_market_trade_df', 'order-candle-timeline': 'sparse_market_order_df'}, # Aggregated trader page 'aggregated_bar_trade_order': {'execution-by-ticker': 'bar_trade_df_by/mean/ticker', 'execution-by-venue': 'bar_trade_df_by/mean/venue'}, 'aggregated_timeline_trade_order': {'execution-by-ticker': 'timeline_trade_df_by/mean_date/ticker', 'execution-by-venue': 'timeline_trade_df_by/mean_date/venue'}, 'aggregated_dist_trade_order': {'execution-by-ticker': 'dist_trade_df_by/pdf/ticker', 'execution-by-venue': 'dist_trade_df_by/pdf/venue'}, # Compliance page 'compliance_metric_table_trade_order': {'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all', 'summary-by-broker': 'bar_trade_df_executed_notional_in_reporting_currency_by_broker_id'}, 'compliance_download_link_trade_order': {'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all', 'summary-by-broker': 'bar_trade_df_executed_notional_in_reporting_currency_by_broker_id'}, }
[((1519, 1563), 'pandas.date_range', 'pd.date_range', (['"""0:00"""', '"""23:59"""'], {'freq': '"""15min"""'}), "('0:00', '23:59', freq='15min')\n", (1532, 1563), True, 'import pandas as pd\n'), ((1390, 1441), 'datetime.timedelta', 'timedelta', ([], {'days': 'self._constants.gui_lookback_window'}), '(days=self._constants.gui_lookback_window)\n', (1399, 1441), False, 'from datetime import timedelta\n'), ((3755, 3792), 'dash_core_components.Location', 'dcc.Location', ([], {'id': '"""url"""', 'refresh': '(False)'}), "(id='url', refresh=False)\n", (3767, 3792), True, 'import dash_core_components as dcc\n'), ((3806, 3833), 'dash_html_components.Div', 'html.Div', ([], {'id': '"""page-content"""'}), "(id='page-content')\n", (3814, 3833), True, 'import dash_html_components as html\n'), ((1455, 1480), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (1478, 1480), False, 'import datetime\n'), ((4526, 4568), 'dash_html_components.B', 'html.B', (['"""Status: ok"""'], {'id': '"""detailed-status"""'}), "('Status: ok', id='detailed-status')\n", (4532, 4568), True, 'import dash_html_components as html\n'), ((9115, 9159), 'dash_html_components.B', 'html.B', (['"""Status: ok"""'], {'id': '"""aggregated-status"""'}), "('Status: ok', id='aggregated-status')\n", (9121, 9159), True, 'import dash_html_components as html\n'), ((12638, 12682), 'dash_html_components.B', 'html.B', (['"""Status: ok"""'], {'id': '"""compliance-status"""'}), "('Status: ok', id='compliance-status')\n", (12644, 12682), True, 'import dash_html_components as html\n'), ((1355, 1380), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (1378, 1380), False, 'import datetime\n'), ((8294, 8330), 'dash_html_components.H3', 'html.H3', (['"""Executions: Markout Table"""'], {}), "('Executions: Markout Table')\n", (8301, 8330), True, 'import dash_html_components as html\n'), ((8348, 8387), 'dash_html_components.Div', 'html.Div', ([], {'id': '"""detailed-execution-table"""'}), "(id='detailed-execution-table')\n", (8356, 8387), True, 'import dash_html_components as html\n'), ((5004, 5107), 'collections.OrderedDict', 'OrderedDict', (["[('finish-date-val', self.available_dates), ('finish-time-val', self.\n available_times)]"], {}), "([('finish-date-val', self.available_dates), ('finish-time-val',\n self.available_times)])\n", (5015, 5107), False, 'from collections import OrderedDict\n')]
andrewtarzia/stk
tests/molecular/molecules/molecule/fixtures/cof/periodic_kagome.py
1ac2ecbb5c9940fe49ce04cbf5603fd7538c475a
import pytest import stk from ...case_data import CaseData @pytest.fixture( scope='session', params=( lambda name: CaseData( molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome( building_blocks=( stk.BuildingBlock( smiles='BrC1=C(Br)[C+]=N1', functional_groups=[stk.BromoFactory()], ), stk.BuildingBlock( smiles=( 'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+](' 'Br)[C+2]1' ), functional_groups=[stk.BromoFactory()], ), ), lattice_size=(2, 2, 1), ), ), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N=' '[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C' '+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1' '5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%' '20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21' ')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2][' 'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N' '=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)' '[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]' '5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206[' 'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+' ']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C' '2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N=' '[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name, ), lambda name: CaseData( molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome( building_blocks=( stk.BuildingBlock( smiles='BrC1=C(Br)[C+]=N1', functional_groups=[stk.BromoFactory()], ), stk.BuildingBlock( smiles=( 'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+](' 'Br)[C+2]1' ), functional_groups=[stk.BromoFactory()], ), ), lattice_size=(2, 2, 1), optimizer=stk.PeriodicCollapser(), ), ), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N=' '[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C' '+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1' '5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%' '20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21' ')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2][' 'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N' '=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)' '[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]' '5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206[' 'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+' ']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C' '2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N=' '[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name, ), ), ) def cof_periodic_kagome(request) -> CaseData: return request.param( f'{request.fixturename}{request.param_index}', )
[((2901, 2924), 'stk.PeriodicCollapser', 'stk.PeriodicCollapser', ([], {}), '()\n', (2922, 2924), False, 'import stk\n'), ((429, 447), 'stk.BromoFactory', 'stk.BromoFactory', ([], {}), '()\n', (445, 447), False, 'import stk\n'), ((751, 769), 'stk.BromoFactory', 'stk.BromoFactory', ([], {}), '()\n', (767, 769), False, 'import stk\n'), ((2434, 2452), 'stk.BromoFactory', 'stk.BromoFactory', ([], {}), '()\n', (2450, 2452), False, 'import stk\n'), ((2756, 2774), 'stk.BromoFactory', 'stk.BromoFactory', ([], {}), '()\n', (2772, 2774), False, 'import stk\n')]
Oneflow-Inc/libai
projects/MAE/utils/weight_convert.py
e473bd3962f07b1e37232d2be39c8257df0ec0f3
# coding=utf-8 # Copyright 2021 The OneFlow Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import oneflow as flow import torch logger = logging.getLogger(__name__) def convert_qkv_weight(cfg, value): """ Convert qkv.weight to be compatible with LiBai transformer layer Args: cfg: config file value: qkv.weight in the loaded checkpoint """ num_heads = cfg.model.num_heads hidden_size = cfg.model.embed_dim head_size = int(hidden_size / num_heads) qkv_weight = ( value.view([3, num_heads, head_size, hidden_size]) .permute(1, 0, 2, 3) .contiguous() .view(hidden_size * 3, hidden_size) ) return qkv_weight def convert_qkv_bias(cfg, value): """ Convert qkv.bias to be compatible with LiBai transformer layer Args: cfg: config file value: qkv.bias in the loaded checkpoint """ num_heads = cfg.model.num_heads hidden_size = cfg.model.embed_dim head_size = int(hidden_size / num_heads) qkv_bias = ( value.view(3, num_heads, head_size).permute(1, 0, 2).contiguous().view(hidden_size * 3) ) return qkv_bias def filter_keys(key, value, cfg): """ Filtering the state_dict keys and values to match LiBai's MAE model """ if "norm1" in key: key = key.replace("norm1", "input_layernorm") elif "attn.qkv" in key: key = key.replace("attn.qkv", "self_attention.query_key_value") if "weight" in key: value = convert_qkv_weight(cfg, value) if "bias" in key: value = convert_qkv_bias(cfg, value) elif "attn.proj" in key: key = key.replace("attn.proj", "self_attention.dense") elif "norm2" in key: key = key.replace("norm2", "post_attention_layernorm") elif "mlp.fc1" in key: key = key.replace("mlp.fc1", "mlp.dense_h_to_4h") elif "mlp.fc2" in key: key = key.replace("mlp.fc2", "mlp.dense_4h_to_h") elif "fc_norm" in key: key = key.replace("fc_norm", "norm") return key, value def load_torch_checkpoint(model, cfg, path="./mae_finetuned_vit_base.pth", strict=False): """ Load checkpoint from the given torch weights. Torch weight can be downloaded from the original repo: https://github.com/facebookresearch/mae """ torch_dict = torch.load(path, map_location="cpu")["model"] parameters = torch_dict new_parameters = dict() for key, value in parameters.items(): if "num_batches_tracked" not in key: # to global tensor key, val = filter_keys(key, value, cfg) val = val.detach().cpu().numpy() val = flow.tensor(val).to_global( sbp=flow.sbp.broadcast, placement=flow.placement("cuda", ranks=[0]) ) new_parameters[key] = val model.load_state_dict(new_parameters, strict=strict) print("Successfully load torch mae checkpoint.") return model
[((683, 710), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (700, 710), False, 'import logging\n'), ((2885, 2921), 'torch.load', 'torch.load', (['path'], {'map_location': '"""cpu"""'}), "(path, map_location='cpu')\n", (2895, 2921), False, 'import torch\n'), ((3220, 3236), 'oneflow.tensor', 'flow.tensor', (['val'], {}), '(val)\n', (3231, 3236), True, 'import oneflow as flow\n'), ((3298, 3331), 'oneflow.placement', 'flow.placement', (['"""cuda"""'], {'ranks': '[0]'}), "('cuda', ranks=[0])\n", (3312, 3331), True, 'import oneflow as flow\n')]
taskmaker1/dthm4kaiako
dthm4kaiako/events/__init__.py
681babc10b3223b5ae7fdf19b98c53d2bef4ea1a
"""Module for events application."""
[]
K0gata/SGLI_Python_output_tool
spot/level1.py
1368e0408edd737a5109d0523db6c147faa80b97
import numpy as np import logging from decimal import Decimal, ROUND_HALF_UP from abc import ABC, abstractmethod, abstractproperty from spot.utility import bilin_2d from spot.config import PROJ_TYPE # ============================= # Level-1 template class # ============================= class L1Interface(ABC): @property @abstractmethod def PROJECTION_TYPE(self): raise NotImplementedError() @property @abstractmethod def ALLOW_PROJECTION_TYPE(self): return NotImplementedError() def __init__(self, h5_file, product_id): self.h5_file = h5_file self.product_id = product_id geo_data_grp_attrs = self.h5_file['Geometry_data'].attrs self.geo_n_pix = geo_data_grp_attrs['Number_of_pixels'][0] self.geo_n_lin = geo_data_grp_attrs['Number_of_lines'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_n_pix = img_data_grp_attrs['Number_of_pixels'][0] self.img_n_lin = img_data_grp_attrs['Number_of_lines'][0] def get_product_data(self, prod_name:str): dset = self.h5_file['Image_data/' + prod_name] # Return uint16 type data if the product is QA_flag or Line_tai93 if 'QA_flag' == prod_name or 'Line_tai93' == prod_name: return dset[:] # Validate data = dset[:].astype(np.float32) if 'Error_DN' in dset.attrs: data[data == dset.attrs['Error_DN'][0]] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') if 'Maximum_valid_DN' in dset.attrs: data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN if 'Minimum_valid_DN' in dset.attrs: data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN # Convert DN to physical value data = data * dset.attrs['Slope'][0] + dset.attrs['Offset'][0] return data @abstractmethod def get_geometry_data(self, data_name:str, **kwargs): raise NotImplementedError() @abstractmethod def get_geometry_data_list(self): raise NotImplementedError() def get_product_data_list(self): return list(self.h5_file['/Image_data'].keys()) def get_unit(self, prod_name: str): if 'Rt_' in prod_name: return 'NA' # Get attrs set unit_name = 'Unit' attrs = self.h5_file['/Image_data/' + prod_name].attrs # Get unit if unit_name not in attrs: return 'NA' return attrs[unit_name][0].decode('UTF-8') # ============================= # Level-1 map-projection class # ============================= class Scene(L1Interface): PROJECTION_TYPE = PROJ_TYPE.SCENE.name ALLOW_PROJECTION_TYPE = [PROJECTION_TYPE, PROJ_TYPE.EQR.name] def __init__(self, h5_file, product_id): super().__init__(h5_file, product_id) self.scene_number = h5_file['/Global_attributes'].attrs['Scene_number'][0] self.path_number = h5_file['/Global_attributes'].attrs['RSP_path_number'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_spatial_reso = img_data_grp_attrs['Grid_interval'][0] def get_geometry_data(self, data_name: str, **kwargs): interval = kwargs['interval'] dset = self.h5_file['Geometry_data/' + data_name] data = dset[:] if 'Latitude' is not data_name and 'Longitude' is not data_name: data = data.astype(np.float32) * dset.attrs['Slope'][0] + dset.attrs['Offset'][0] # Finish if interval is none if interval is None or interval == 'none': return data # Interpolate raw data if interval == 'auto': interp_interval = dset.attrs['Resampling_interval'][0] else: interp_interval = interval lon_mode = False if 'Longitude' == data_name: lon_mode = True if interp_interval > 1: data = bilin_2d(data, interp_interval, lon_mode) # Trim away the excess pixel/line (data_size_lin, data_size_pxl) = data.shape if (kwargs['fit_img_size'] is True) and (self.img_n_lin <= data_size_lin) and (self.img_n_pix <= data_size_pxl): data = data[:self.img_n_lin, :self.img_n_pix] return data def get_geometry_data_list(self): return list(self.h5_file['/Geometry_data'].keys()) def get_allow_projection_type(self): return self.ALLOW_PROJECTION_TYPE # ============================= # Level-1 sub-processing level class # ============================= class L1B(Scene): # ----------------------------- # Public # ----------------------------- def get_product_data(self, prod_name:str): if 'Land_water_flag' in prod_name: return self._get_land_water_flag() if 'Lt_' in prod_name: return self._get_Lt(prod_name) if 'Rt_' in prod_name: return self._get_Rt(prod_name) if 'Stray_light_correction_flag_' in prod_name: return self._get_stray_light_correction_flag(prod_name) return super().get_product_data(prod_name) # ----------------------------- # Private # ----------------------------- def _get_land_water_flag(self): dset = self.h5_file['Image_data/Land_water_flag'] data = dset[:].astype(np.float32) if 'Error_DN' in dset.attrs: data[data == dset.attrs['Error_value'][0]] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_value'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_value'][0]] = np.NaN return data def _get_Lt(self, prod_name): dset = self.h5_file['Image_data/' + prod_name] dn_data = dset[:] mask = dset.attrs['Mask'][0] data = np.bitwise_and(dn_data, mask).astype(np.float32) data = data * dset.attrs['Slope'] + dset.attrs['Offset'] data[dn_data == dset.attrs['Error_DN']] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN return data def _get_Rt(self, prod_name): prod_name = prod_name.replace('Rt_', 'Lt_') dset = self.h5_file['Image_data/' + prod_name] dn_data = dset[:] mask = dset.attrs['Mask'][0] data = np.bitwise_and(dn_data, mask).astype(np.float32) data = data * dset.attrs['Slope_reflectance'] + dset.attrs['Offset_reflectance'] data[dn_data == dset.attrs['Error_DN']] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN cos_theta_0 = np.cos(np.deg2rad(self.get_geometry_data('Solar_zenith', interval='auto', fit_img_size=True))) data = data / cos_theta_0 return data def _get_stray_light_correction_flag(self, prod_name): prod_name = prod_name.replace('Stray_light_correction_flag_', 'Lt_') dset = self.h5_file['Image_data/' + prod_name] dn_data = dset[:] data = np.bitwise_and(dn_data, 0x8000) data[dn_data == dset.attrs['Error_DN']] = 0 return data > 0 class VNRL1B(L1B): def get_product_data_list(self): prod_list = super().get_product_data_list() for prod in prod_list: if 'Lt_' in prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list = sorted(prod_list) return prod_list class IRSL1B(L1B): def get_product_data_list(self): prod_list = super().get_product_data_list() for prod in prod_list: if 'Lt_SW' in prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list = sorted(prod_list) return prod_list # EOF
[((7586, 7616), 'numpy.bitwise_and', 'np.bitwise_and', (['dn_data', '(32768)'], {}), '(dn_data, 32768)\n', (7600, 7616), True, 'import numpy as np\n'), ((1477, 1505), 'numpy.warnings.catch_warnings', 'np.warnings.catch_warnings', ([], {}), '()\n', (1503, 1505), True, 'import numpy as np\n'), ((1519, 1606), 'numpy.warnings.filterwarnings', 'np.warnings.filterwarnings', (['"""ignore"""', '"""invalid value encountered in (greater|less)"""'], {}), "('ignore',\n 'invalid value encountered in (greater|less)')\n", (1545, 1606), True, 'import numpy as np\n'), ((4035, 4076), 'spot.utility.bilin_2d', 'bilin_2d', (['data', 'interp_interval', 'lon_mode'], {}), '(data, interp_interval, lon_mode)\n', (4043, 4076), False, 'from spot.utility import bilin_2d\n'), ((5563, 5591), 'numpy.warnings.catch_warnings', 'np.warnings.catch_warnings', ([], {}), '()\n', (5589, 5591), True, 'import numpy as np\n'), ((5605, 5692), 'numpy.warnings.filterwarnings', 'np.warnings.filterwarnings', (['"""ignore"""', '"""invalid value encountered in (greater|less)"""'], {}), "('ignore',\n 'invalid value encountered in (greater|less)')\n", (5631, 5692), True, 'import numpy as np\n'), ((6205, 6233), 'numpy.warnings.catch_warnings', 'np.warnings.catch_warnings', ([], {}), '()\n', (6231, 6233), True, 'import numpy as np\n'), ((6247, 6334), 'numpy.warnings.filterwarnings', 'np.warnings.filterwarnings', (['"""ignore"""', '"""invalid value encountered in (greater|less)"""'], {}), "('ignore',\n 'invalid value encountered in (greater|less)')\n", (6273, 6334), True, 'import numpy as np\n'), ((6917, 6945), 'numpy.warnings.catch_warnings', 'np.warnings.catch_warnings', ([], {}), '()\n', (6943, 6945), True, 'import numpy as np\n'), ((6959, 7046), 'numpy.warnings.filterwarnings', 'np.warnings.filterwarnings', (['"""ignore"""', '"""invalid value encountered in (greater|less)"""'], {}), "('ignore',\n 'invalid value encountered in (greater|less)')\n", (6985, 7046), True, 'import numpy as np\n'), ((6021, 6050), 'numpy.bitwise_and', 'np.bitwise_and', (['dn_data', 'mask'], {}), '(dn_data, mask)\n', (6035, 6050), True, 'import numpy as np\n'), ((6709, 6738), 'numpy.bitwise_and', 'np.bitwise_and', (['dn_data', 'mask'], {}), '(dn_data, mask)\n', (6723, 6738), True, 'import numpy as np\n')]
Alvin1994/leetcode-python3-
168. Excel Sheet Column Title.py
ba2bde873c925554cc39f2bd13be81967713477d
class Solution: # @return a string def convertToTitle(self, n: int) -> str: capitals = [chr(x) for x in range(ord('A'), ord('Z')+1)] result = [] while n > 0: result.insert(0, capitals[(n-1)%len(capitals)]) n = (n-1) % len(capitals) # result.reverse() return ''.join(result)
[]
Martijnve23/catapult
devil/devil/utils/cmd_helper.py
5c63b19d221af6a12889e8727acc85d93892cab7
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """A wrapper for subprocess to make calling shell commands easier.""" import codecs import logging import os import pipes import select import signal import string import subprocess import sys import time CATAPULT_ROOT_PATH = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..', '..')) SIX_PATH = os.path.join(CATAPULT_ROOT_PATH, 'third_party', 'six') if SIX_PATH not in sys.path: sys.path.append(SIX_PATH) import six from devil import base_error logger = logging.getLogger(__name__) _SafeShellChars = frozenset(string.ascii_letters + string.digits + '@%_-+=:,./') # Cache the string-escape codec to ensure subprocess can find it # later. Return value doesn't matter. if six.PY2: codecs.lookup('string-escape') def SingleQuote(s): """Return an shell-escaped version of the string using single quotes. Reliably quote a string which may contain unsafe characters (e.g. space, quote, or other special characters such as '$'). The returned value can be used in a shell command line as one token that gets to be interpreted literally. Args: s: The string to quote. Return: The string quoted using single quotes. """ return pipes.quote(s) def DoubleQuote(s): """Return an shell-escaped version of the string using double quotes. Reliably quote a string which may contain unsafe characters (e.g. space or quote characters), while retaining some shell features such as variable interpolation. The returned value can be used in a shell command line as one token that gets to be further interpreted by the shell. The set of characters that retain their special meaning may depend on the shell implementation. This set usually includes: '$', '`', '\', '!', '*', and '@'. Args: s: The string to quote. Return: The string quoted using double quotes. """ if not s: return '""' elif all(c in _SafeShellChars for c in s): return s else: return '"' + s.replace('"', '\\"') + '"' def ShrinkToSnippet(cmd_parts, var_name, var_value): """Constructs a shell snippet for a command using a variable to shrink it. Takes into account all quoting that needs to happen. Args: cmd_parts: A list of command arguments. var_name: The variable that holds var_value. var_value: The string to replace in cmd_parts with $var_name Returns: A shell snippet that does not include setting the variable. """ def shrink(value): parts = (x and SingleQuote(x) for x in value.split(var_value)) with_substitutions = ('"$%s"' % var_name).join(parts) return with_substitutions or "''" return ' '.join(shrink(part) for part in cmd_parts) def Popen(args, stdin=None, stdout=None, stderr=None, shell=None, cwd=None, env=None): # preexec_fn isn't supported on windows. # pylint: disable=unexpected-keyword-arg if sys.platform == 'win32': close_fds = (stdin is None and stdout is None and stderr is None) preexec_fn = None else: close_fds = True preexec_fn = lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL) if six.PY2: return subprocess.Popen( args=args, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn ) else: # opens stdout in text mode, so that caller side always get 'str', # and there will be no type mismatch error. # Ignore any decoding error, so that caller will not crash due to # uncaught exception. Decoding errors are unavoidable, as we # do not know the encoding of the output, and in some output there # will be multiple encodings (e.g. adb logcat) return subprocess.Popen( args=args, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn, universal_newlines=True, encoding='utf-8', errors='ignore' ) def Call(args, stdout=None, stderr=None, shell=None, cwd=None, env=None): pipe = Popen( args, stdout=stdout, stderr=stderr, shell=shell, cwd=cwd, env=env) pipe.communicate() return pipe.wait() def RunCmd(args, cwd=None): """Opens a subprocess to execute a program and returns its return value. Args: args: A string or a sequence of program arguments. The program to execute is the string or the first item in the args sequence. cwd: If not None, the subprocess's current directory will be changed to |cwd| before it's executed. Returns: Return code from the command execution. """ logger.debug(str(args) + ' ' + (cwd or '')) return Call(args, cwd=cwd) def GetCmdOutput(args, cwd=None, shell=False, env=None): """Open a subprocess to execute a program and returns its output. Args: args: A string or a sequence of program arguments. The program to execute is the string or the first item in the args sequence. cwd: If not None, the subprocess's current directory will be changed to |cwd| before it's executed. shell: Whether to execute args as a shell command. env: If not None, a mapping that defines environment variables for the subprocess. Returns: Captures and returns the command's stdout. Prints the command's stderr to logger (which defaults to stdout). """ (_, output) = GetCmdStatusAndOutput(args, cwd, shell, env) return output def _ValidateAndLogCommand(args, cwd, shell): if isinstance(args, six.string_types): if not shell: raise Exception('string args must be run with shell=True') else: if shell: raise Exception('array args must be run with shell=False') args = ' '.join(SingleQuote(str(c)) for c in args) if cwd is None: cwd = '' else: cwd = ':' + cwd logger.debug('[host]%s> %s', cwd, args) return args def GetCmdStatusAndOutput(args, cwd=None, shell=False, env=None, merge_stderr=False): """Executes a subprocess and returns its exit code and output. Args: args: A string or a sequence of program arguments. The program to execute is the string or the first item in the args sequence. cwd: If not None, the subprocess's current directory will be changed to |cwd| before it's executed. shell: Whether to execute args as a shell command. Must be True if args is a string and False if args is a sequence. env: If not None, a mapping that defines environment variables for the subprocess. merge_stderr: If True, captures stderr as part of stdout. Returns: The 2-tuple (exit code, stdout). """ status, stdout, stderr = GetCmdStatusOutputAndError( args, cwd=cwd, shell=shell, env=env, merge_stderr=merge_stderr) if stderr: logger.critical('STDERR: %s', stderr) logger.debug('STDOUT: %s%s', stdout[:4096].rstrip(), '<truncated>' if len(stdout) > 4096 else '') return (status, stdout) def StartCmd(args, cwd=None, shell=False, env=None): """Starts a subprocess and returns a handle to the process. Args: args: A string or a sequence of program arguments. The program to execute is the string or the first item in the args sequence. cwd: If not None, the subprocess's current directory will be changed to |cwd| before it's executed. shell: Whether to execute args as a shell command. Must be True if args is a string and False if args is a sequence. env: If not None, a mapping that defines environment variables for the subprocess. Returns: A process handle from subprocess.Popen. """ _ValidateAndLogCommand(args, cwd, shell) return Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell, cwd=cwd, env=env) def GetCmdStatusOutputAndError(args, cwd=None, shell=False, env=None, merge_stderr=False): """Executes a subprocess and returns its exit code, output, and errors. Args: args: A string or a sequence of program arguments. The program to execute is the string or the first item in the args sequence. cwd: If not None, the subprocess's current directory will be changed to |cwd| before it's executed. shell: Whether to execute args as a shell command. Must be True if args is a string and False if args is a sequence. env: If not None, a mapping that defines environment variables for the subprocess. merge_stderr: If True, captures stderr as part of stdout. Returns: The 3-tuple (exit code, stdout, stderr). """ _ValidateAndLogCommand(args, cwd, shell) stderr = subprocess.STDOUT if merge_stderr else subprocess.PIPE pipe = Popen( args, stdout=subprocess.PIPE, stderr=stderr, shell=shell, cwd=cwd, env=env) stdout, stderr = pipe.communicate() return (pipe.returncode, stdout, stderr) class TimeoutError(base_error.BaseError): """Module-specific timeout exception.""" def __init__(self, output=None): super(TimeoutError, self).__init__('Timeout') self._output = output @property def output(self): return self._output def _read_and_decode(fd, buffer_size): data = os.read(fd, buffer_size) if data and six.PY3: data = data.decode('utf-8', errors='ignore') return data def _IterProcessStdoutFcntl(process, iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1): """An fcntl-based implementation of _IterProcessStdout.""" # pylint: disable=too-many-nested-blocks import fcntl try: # Enable non-blocking reads from the child's stdout. child_fd = process.stdout.fileno() fl = fcntl.fcntl(child_fd, fcntl.F_GETFL) fcntl.fcntl(child_fd, fcntl.F_SETFL, fl | os.O_NONBLOCK) end_time = (time.time() + timeout) if timeout else None iter_end_time = (time.time() + iter_timeout) if iter_timeout else None while True: if end_time and time.time() > end_time: raise TimeoutError() if iter_end_time and time.time() > iter_end_time: yield None iter_end_time = time.time() + iter_timeout if iter_end_time: iter_aware_poll_interval = min(poll_interval, max(0, iter_end_time - time.time())) else: iter_aware_poll_interval = poll_interval read_fds, _, _ = select.select([child_fd], [], [], iter_aware_poll_interval) if child_fd in read_fds: data = _read_and_decode(child_fd, buffer_size) if not data: break yield data if process.poll() is not None: # If process is closed, keep checking for output data (because of timing # issues). while True: read_fds, _, _ = select.select([child_fd], [], [], iter_aware_poll_interval) if child_fd in read_fds: data = _read_and_decode(child_fd, buffer_size) if data: yield data continue break break finally: try: if process.returncode is None: # Make sure the process doesn't stick around if we fail with an # exception. process.kill() except OSError: pass process.wait() def _IterProcessStdoutQueue(process, iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1): """A Queue.Queue-based implementation of _IterProcessStdout. TODO(jbudorick): Evaluate whether this is a suitable replacement for _IterProcessStdoutFcntl on all platforms. """ # pylint: disable=unused-argument if six.PY3: import queue else: import Queue as queue import threading stdout_queue = queue.Queue() def read_process_stdout(): # TODO(jbudorick): Pick an appropriate read size here. while True: try: output_chunk = _read_and_decode(process.stdout.fileno(), buffer_size) except IOError: break stdout_queue.put(output_chunk, True) if not output_chunk and process.poll() is not None: break reader_thread = threading.Thread(target=read_process_stdout) reader_thread.start() end_time = (time.time() + timeout) if timeout else None try: while True: if end_time and time.time() > end_time: raise TimeoutError() try: s = stdout_queue.get(True, iter_timeout) if not s: break yield s except queue.Empty: yield None finally: try: if process.returncode is None: # Make sure the process doesn't stick around if we fail with an # exception. process.kill() except OSError: pass process.wait() reader_thread.join() _IterProcessStdout = (_IterProcessStdoutQueue if sys.platform == 'win32' else _IterProcessStdoutFcntl) """Iterate over a process's stdout. This is intentionally not public. Args: process: The process in question. iter_timeout: An optional length of time, in seconds, to wait in between each iteration. If no output is received in the given time, this generator will yield None. timeout: An optional length of time, in seconds, during which the process must finish. If it fails to do so, a TimeoutError will be raised. buffer_size: The maximum number of bytes to read (and thus yield) at once. poll_interval: The length of time to wait in calls to `select.select`. If iter_timeout is set, the remaining length of time in the iteration may take precedence. Raises: TimeoutError: if timeout is set and the process does not complete. Yields: basestrings of data or None. """ def GetCmdStatusAndOutputWithTimeout(args, timeout, cwd=None, shell=False, logfile=None, env=None): """Executes a subprocess with a timeout. Args: args: List of arguments to the program, the program to execute is the first element. timeout: the timeout in seconds or None to wait forever. cwd: If not None, the subprocess's current directory will be changed to |cwd| before it's executed. shell: Whether to execute args as a shell command. Must be True if args is a string and False if args is a sequence. logfile: Optional file-like object that will receive output from the command as it is running. env: If not None, a mapping that defines environment variables for the subprocess. Returns: The 2-tuple (exit code, output). Raises: TimeoutError on timeout. """ _ValidateAndLogCommand(args, cwd, shell) output = six.StringIO() process = Popen( args, cwd=cwd, shell=shell, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) try: for data in _IterProcessStdout(process, timeout=timeout): if logfile: logfile.write(data) output.write(data) except TimeoutError: raise TimeoutError(output.getvalue()) str_output = output.getvalue() logger.debug('STDOUT+STDERR: %s%s', str_output[:4096].rstrip(), '<truncated>' if len(str_output) > 4096 else '') return process.returncode, str_output def IterCmdOutputLines(args, iter_timeout=None, timeout=None, cwd=None, shell=False, env=None, check_status=True): """Executes a subprocess and continuously yields lines from its output. Args: args: List of arguments to the program, the program to execute is the first element. iter_timeout: Timeout for each iteration, in seconds. timeout: Timeout for the entire command, in seconds. cwd: If not None, the subprocess's current directory will be changed to |cwd| before it's executed. shell: Whether to execute args as a shell command. Must be True if args is a string and False if args is a sequence. env: If not None, a mapping that defines environment variables for the subprocess. check_status: A boolean indicating whether to check the exit status of the process after all output has been read. Yields: The output of the subprocess, line by line. Raises: CalledProcessError if check_status is True and the process exited with a non-zero exit status. """ cmd = _ValidateAndLogCommand(args, cwd, shell) process = Popen( args, cwd=cwd, shell=shell, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) return _IterCmdOutputLines( process, cmd, iter_timeout=iter_timeout, timeout=timeout, check_status=check_status) def _IterCmdOutputLines(process, cmd, iter_timeout=None, timeout=None, check_status=True): buffer_output = '' iter_end = None cur_iter_timeout = None if iter_timeout: iter_end = time.time() + iter_timeout cur_iter_timeout = iter_timeout for data in _IterProcessStdout( process, iter_timeout=cur_iter_timeout, timeout=timeout): if iter_timeout: # Check whether the current iteration has timed out. cur_iter_timeout = iter_end - time.time() if data is None or cur_iter_timeout < 0: yield None iter_end = time.time() + iter_timeout continue else: assert data is not None, ( 'Iteration received no data despite no iter_timeout being set. ' 'cmd: %s' % cmd) # Construct lines to yield from raw data. buffer_output += data has_incomplete_line = buffer_output[-1] not in '\r\n' lines = buffer_output.splitlines() buffer_output = lines.pop() if has_incomplete_line else '' for line in lines: yield line if iter_timeout: iter_end = time.time() + iter_timeout if buffer_output: yield buffer_output if check_status and process.returncode: raise subprocess.CalledProcessError(process.returncode, cmd)
[((485, 539), 'os.path.join', 'os.path.join', (['CATAPULT_ROOT_PATH', '"""third_party"""', '"""six"""'], {}), "(CATAPULT_ROOT_PATH, 'third_party', 'six')\n", (497, 539), False, 'import os\n'), ((650, 677), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (667, 677), False, 'import logging\n'), ((572, 597), 'sys.path.append', 'sys.path.append', (['SIX_PATH'], {}), '(SIX_PATH)\n', (587, 597), False, 'import sys\n'), ((878, 908), 'codecs.lookup', 'codecs.lookup', (['"""string-escape"""'], {}), "('string-escape')\n", (891, 908), False, 'import codecs\n'), ((1348, 1362), 'pipes.quote', 'pipes.quote', (['s'], {}), '(s)\n', (1359, 1362), False, 'import pipes\n'), ((9598, 9622), 'os.read', 'os.read', (['fd', 'buffer_size'], {}), '(fd, buffer_size)\n', (9605, 9622), False, 'import os\n'), ((12335, 12348), 'Queue.Queue', 'queue.Queue', ([], {}), '()\n', (12346, 12348), True, 'import Queue as queue\n'), ((12713, 12757), 'threading.Thread', 'threading.Thread', ([], {'target': 'read_process_stdout'}), '(target=read_process_stdout)\n', (12729, 12757), False, 'import threading\n'), ((15356, 15370), 'six.StringIO', 'six.StringIO', ([], {}), '()\n', (15368, 15370), False, 'import six\n'), ((428, 453), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (443, 453), False, 'import os\n'), ((3309, 3459), 'subprocess.Popen', 'subprocess.Popen', ([], {'args': 'args', 'cwd': 'cwd', 'stdin': 'stdin', 'stdout': 'stdout', 'stderr': 'stderr', 'shell': 'shell', 'close_fds': 'close_fds', 'env': 'env', 'preexec_fn': 'preexec_fn'}), '(args=args, cwd=cwd, stdin=stdin, stdout=stdout, stderr=\n stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn)\n', (3325, 3459), False, 'import subprocess\n'), ((3910, 4125), 'subprocess.Popen', 'subprocess.Popen', ([], {'args': 'args', 'cwd': 'cwd', 'stdin': 'stdin', 'stdout': 'stdout', 'stderr': 'stderr', 'shell': 'shell', 'close_fds': 'close_fds', 'env': 'env', 'preexec_fn': 'preexec_fn', 'universal_newlines': '(True)', 'encoding': '"""utf-8"""', 'errors': '"""ignore"""'}), "(args=args, cwd=cwd, stdin=stdin, stdout=stdout, stderr=\n stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=\n preexec_fn, universal_newlines=True, encoding='utf-8', errors='ignore')\n", (3926, 4125), False, 'import subprocess\n'), ((10159, 10195), 'fcntl.fcntl', 'fcntl.fcntl', (['child_fd', 'fcntl.F_GETFL'], {}), '(child_fd, fcntl.F_GETFL)\n', (10170, 10195), False, 'import fcntl\n'), ((10200, 10256), 'fcntl.fcntl', 'fcntl.fcntl', (['child_fd', 'fcntl.F_SETFL', '(fl | os.O_NONBLOCK)'], {}), '(child_fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)\n', (10211, 10256), False, 'import fcntl\n'), ((18733, 18787), 'subprocess.CalledProcessError', 'subprocess.CalledProcessError', (['process.returncode', 'cmd'], {}), '(process.returncode, cmd)\n', (18762, 18787), False, 'import subprocess\n'), ((3237, 3282), 'signal.signal', 'signal.signal', (['signal.SIGPIPE', 'signal.SIG_DFL'], {}), '(signal.SIGPIPE, signal.SIG_DFL)\n', (3250, 3282), False, 'import signal\n'), ((10851, 10910), 'select.select', 'select.select', (['[child_fd]', '[]', '[]', 'iter_aware_poll_interval'], {}), '([child_fd], [], [], iter_aware_poll_interval)\n', (10864, 10910), False, 'import select\n'), ((12797, 12808), 'time.time', 'time.time', ([], {}), '()\n', (12806, 12808), False, 'import time\n'), ((17730, 17741), 'time.time', 'time.time', ([], {}), '()\n', (17739, 17741), False, 'import time\n'), ((10274, 10285), 'time.time', 'time.time', ([], {}), '()\n', (10283, 10285), False, 'import time\n'), ((10339, 10350), 'time.time', 'time.time', ([], {}), '()\n', (10348, 10350), False, 'import time\n'), ((18008, 18019), 'time.time', 'time.time', ([], {}), '()\n', (18017, 18019), False, 'import time\n'), ((10432, 10443), 'time.time', 'time.time', ([], {}), '()\n', (10441, 10443), False, 'import time\n'), ((10512, 10523), 'time.time', 'time.time', ([], {}), '()\n', (10521, 10523), False, 'import time\n'), ((10584, 10595), 'time.time', 'time.time', ([], {}), '()\n', (10593, 10595), False, 'import time\n'), ((11275, 11334), 'select.select', 'select.select', (['[child_fd]', '[]', '[]', 'iter_aware_poll_interval'], {}), '([child_fd], [], [], iter_aware_poll_interval)\n', (11288, 11334), False, 'import select\n'), ((12887, 12898), 'time.time', 'time.time', ([], {}), '()\n', (12896, 12898), False, 'import time\n'), ((18105, 18116), 'time.time', 'time.time', ([], {}), '()\n', (18114, 18116), False, 'import time\n'), ((18609, 18620), 'time.time', 'time.time', ([], {}), '()\n', (18618, 18620), False, 'import time\n'), ((10752, 10763), 'time.time', 'time.time', ([], {}), '()\n', (10761, 10763), False, 'import time\n')]
AyanSamanta23/moni-moni
services/server/server/apps/checkout/migrations/0001_initial.py
8e8aa4edf4cd2e2b005f6dbe8c885ecc791e6a2b
# Generated by Django 4.0.2 on 2022-02-26 15:52 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='FundingOptions', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('funding_name', models.CharField(help_text='Required', max_length=255, verbose_name='funding_name')), ('funding_price', models.DecimalField(decimal_places=2, help_text='Required', max_digits=1000, verbose_name='funding price')), ('funding_timeframe', models.CharField(help_text='Required', max_length=255, verbose_name='funding timeframe')), ('funding_window', models.CharField(help_text='Required', max_length=255, verbose_name='funding window')), ], options={ 'verbose_name': 'Funding Option', 'verbose_name_plural': 'Funding Options', }, ), migrations.CreateModel( name='PaymentSelections', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(help_text='Required', max_length=255, verbose_name='name')), ('is_active', models.BooleanField(default=True)), ], options={ 'verbose_name': 'Payment Selection', 'verbose_name_plural': 'Payment Selections', }, ), ]
[((310, 406), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (329, 406), False, 'from django.db import migrations, models\n'), ((438, 526), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""Required"""', 'max_length': '(255)', 'verbose_name': '"""funding_name"""'}), "(help_text='Required', max_length=255, verbose_name=\n 'funding_name')\n", (454, 526), False, 'from django.db import migrations, models\n'), ((558, 668), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'help_text': '"""Required"""', 'max_digits': '(1000)', 'verbose_name': '"""funding price"""'}), "(decimal_places=2, help_text='Required', max_digits=1000,\n verbose_name='funding price')\n", (577, 668), False, 'from django.db import migrations, models\n'), ((705, 798), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""Required"""', 'max_length': '(255)', 'verbose_name': '"""funding timeframe"""'}), "(help_text='Required', max_length=255, verbose_name=\n 'funding timeframe')\n", (721, 798), False, 'from django.db import migrations, models\n'), ((831, 921), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""Required"""', 'max_length': '(255)', 'verbose_name': '"""funding window"""'}), "(help_text='Required', max_length=255, verbose_name=\n 'funding window')\n", (847, 921), False, 'from django.db import migrations, models\n'), ((1204, 1300), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1223, 1300), False, 'from django.db import migrations, models\n'), ((1324, 1399), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""Required"""', 'max_length': '(255)', 'verbose_name': '"""name"""'}), "(help_text='Required', max_length=255, verbose_name='name')\n", (1340, 1399), False, 'from django.db import migrations, models\n'), ((1432, 1465), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1451, 1465), False, 'from django.db import migrations, models\n')]
urchinpro/L2-forms
api/to_astm.py
37f33386984efbb2d1e92c73d915256247801109
import itertools from astm import codec from collections import defaultdict from django.utils import timezone import directions.models as directions import directory.models as directory import api.models as api import simplejson as json def get_astm_header() -> list: return ['H|\\^&', None, None, ['1', '2.00'], None, None, None, None, None, None, 'P', '1.00', timezone.now().strftime("%Y%m%d%H%M%S")] def get_leave() -> list: return ['L', 1, 'N'] def get_patient() -> list: return ['P', 1] def get_iss_direction(direction: directions.Napravleniya, analyzer: api.Analyzer, full=False) -> list: r = [] n = 0 iss_list = directions.Issledovaniya.objects.filter(napravleniye=direction) if not full: iss_list = iss_list.filter(doc_confirmation__isnull=True) for i in iss_list: researches = defaultdict(list) for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__analyzer=analyzer, hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction, analyzer=analyzer) if not rel.exists(): continue rel = rel[0] tube = directions.TubesRegistration.objects.filter(type__fractions=fraction) if not tube.exists(): continue tube = tube[0] researches[tube.pk].append(rel.astm_field) for tpk in researches: n += 1 r.append(['O', n, tpk, None, [[None, x, None, None] for x in researches[tpk]]]) return r def encode(m) -> str: return codec.iter_encode(m) def get_astm(directions_list, analyzer: api.Analyzer, full=False, out=None) -> str: iss = [get_iss_direction(x, analyzer, full) for x in directions_list] m = [get_astm_header(), get_patient()] m = list(itertools.chain(m, *iss)) m.append(get_leave()) if out: out.write(json.dumps(m)) return encode(m) def get_iss_astm(issledovaniya: list, app: api.Application, need_astm=False): m = [get_astm_header(), get_patient()] n = 0 researches = defaultdict(list) for row in issledovaniya: k = row["pk"] i = row["iss"] for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__application_api=app, hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction, application_api=app) if not rel.exists(): continue rel = rel[0] if rel.is_code: researches[k].append([None, None, None, rel.astm_field]) else: researches[k].append([None, rel.astm_field, None, None]) for tpk in researches: n += 1 m.append(['O', n, tpk, None, researches[tpk]]) m.append(get_leave()) return encode(m)
[((651, 714), 'directions.models.Issledovaniya.objects.filter', 'directions.Issledovaniya.objects.filter', ([], {'napravleniye': 'direction'}), '(napravleniye=direction)\n', (690, 714), True, 'import directions.models as directions\n'), ((1593, 1613), 'astm.codec.iter_encode', 'codec.iter_encode', (['m'], {}), '(m)\n', (1610, 1613), False, 'from astm import codec\n'), ((2099, 2116), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (2110, 2116), False, 'from collections import defaultdict\n'), ((842, 859), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (853, 859), False, 'from collections import defaultdict\n'), ((884, 996), 'directory.models.Fractions.objects.filter', 'directory.Fractions.objects.filter', ([], {'research': 'i.research', 'relationfractionastm__analyzer': 'analyzer', 'hide': '(False)'}), '(research=i.research,\n relationfractionastm__analyzer=analyzer, hide=False)\n', (918, 996), True, 'import directory.models as directory\n'), ((1830, 1854), 'itertools.chain', 'itertools.chain', (['m', '*iss'], {}), '(m, *iss)\n', (1845, 1854), False, 'import itertools\n'), ((2216, 2330), 'directory.models.Fractions.objects.filter', 'directory.Fractions.objects.filter', ([], {'research': 'i.research', 'relationfractionastm__application_api': 'app', 'hide': '(False)'}), '(research=i.research,\n relationfractionastm__application_api=app, hide=False)\n', (2250, 2330), True, 'import directory.models as directory\n'), ((1012, 1089), 'api.models.RelationFractionASTM.objects.filter', 'api.RelationFractionASTM.objects.filter', ([], {'fraction': 'fraction', 'analyzer': 'analyzer'}), '(fraction=fraction, analyzer=analyzer)\n', (1051, 1089), True, 'import api.models as api\n'), ((1192, 1261), 'directions.models.TubesRegistration.objects.filter', 'directions.TubesRegistration.objects.filter', ([], {'type__fractions': 'fraction'}), '(type__fractions=fraction)\n', (1235, 1261), True, 'import directions.models as directions\n'), ((1912, 1925), 'simplejson.dumps', 'json.dumps', (['m'], {}), '(m)\n', (1922, 1925), True, 'import simplejson as json\n'), ((2346, 2425), 'api.models.RelationFractionASTM.objects.filter', 'api.RelationFractionASTM.objects.filter', ([], {'fraction': 'fraction', 'application_api': 'app'}), '(fraction=fraction, application_api=app)\n', (2385, 2425), True, 'import api.models as api\n'), ((368, 382), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (380, 382), False, 'from django.utils import timezone\n')]
CospanDesign/nysa
test/unit/test_som_rom_parser.py
ffe07f0b8fe2f6217e7a862d89b80f1b17163be9
#!/usr/bin/python import unittest import json import sys import os import string sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) from nysa.cbuilder import sdb_component as sdbc from nysa.cbuilder import sdb_object_model as som from nysa.cbuilder.som_rom_parser import parse_rom_image from nysa.cbuilder.som_rom_generator import generate_rom_image from nysa.cbuilder.sdb import SDBInfo from nysa.cbuilder.sdb import SDBWarning from nysa.cbuilder.sdb import SDBError from nysa.common.status import StatusLevel from nysa.common.status import Status class Test (unittest.TestCase): """Unit test SDB Tree""" def setUp(self): pass ''' def test_simple_rom(self): rom_in = ROM1 som = parse_rom_image(rom_in) rom_out = generate_rom_image(som) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) self.assertEqual(rom_in, rom_out) ''' def test_full_dionysus_read(self): from nysa.host.platform_scanner import PlatformScanner pscanner = PlatformScanner() platform_dict = pscanner.get_platforms() platform_names = platform_dict.keys() if "dionysus" not in platform_names: return s = Status() platform_instance = platform_dict["dionysus"](s) platforms = platform_instance.scan() if len(platforms) == 0: return dionysus = platforms[platforms.keys()[0]] #print "Found Dionysus" s.set_level("fatal") s.Verbose("Read SDB") dionysus.read_sdb() def test_full_bus(self): sm = som.SOM() sm.initialize_root() root = sm.get_root() peripheral = sm.insert_bus() peripheral.set_name("peripheral") memory = sm.insert_bus() memory.set_name("memory") d1 = sdbc.create_device_record(name = "device 1", size = 0x100) d2 = sdbc.create_device_record(name = "device 2", size = 0x100) m1 = sdbc.create_device_record(name = "memory 1", size = 0x10000) m2 = sdbc.create_device_record(name = "memory 2", size = 0x20000) peripheral.set_child_spacing(0x0010000000) root.set_child_spacing (0x0100000000) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1) sm.insert_component(memory, m2) rom = generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #rom_in = ROM2 #print_sdb_rom(rom_in) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) self.assertEqual(rom_in, rom_out) def test_full_bus_with_integration(self): sm = som.SOM() sm.initialize_root() root = sm.get_root() peripheral = sm.insert_bus() peripheral.set_name("peripheral") memory = sm.insert_bus() memory.set_name("memory") d1 = sdbc.create_device_record(name = "device 1", size = 0x100) d2 = sdbc.create_device_record(name = "device 2", size = 0x100) m1 = sdbc.create_device_record(name = "memory 1", size = 0x10000) m2 = sdbc.create_device_record(name = "memory 2", size = 0x20000) intr = sdbc.create_integration_record("Integration Data", vendor_id = 0x800BEAF15DEADC03, device_id = 0x00000000) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1) sm.insert_component(memory, m2) rom = generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #rom_in = ROM2 #print_sdb_rom(rom_in) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): sm = som.SOM() sm.initialize_root() root = sm.get_root() peripheral = sm.insert_bus() peripheral.set_name("peripheral") memory = sm.insert_bus() memory.set_name("memory") d1 = sdbc.create_device_record(name = "device 1", size = 0x100) d2 = sdbc.create_device_record(name = "device 2", size = 0x100) m1 = sdbc.create_device_record(name = "memory 1", size = 0x10000) m2 = sdbc.create_device_record(name = "memory 2", size = 0x20000) intr = sdbc.create_integration_record("Integration Data", vendor_id = 0x800BEAF15DEADC03, device_id = 0x00000000) url = sdbc.create_repo_url_record("http://www.geocities.com") sm.insert_component(root, url) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1) sm.insert_component(memory, m2) rom = generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #print_sdb(rom) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): sm = som.SOM() sm.initialize_root() root = sm.get_root() peripheral = sm.insert_bus() peripheral.set_name("peripheral") memory = sm.insert_bus() memory.set_name("memory") d1 = sdbc.create_device_record(name = "device 1", size = 0x100) d2 = sdbc.create_device_record(name = "device 2", size = 0x100) m1 = sdbc.create_device_record(name = "memory 1", size = 0x10000) m2 = sdbc.create_device_record(name = "memory 2", size = 0x20000) intr = sdbc.create_integration_record("Integration Data", vendor_id = 0x800BEAF15DEADC03, device_id = 0x00000000) url = sdbc.create_repo_url_record("http://www.geocities.com") synthesis = sdbc.create_synthesis_record("Synthesis Name", 123, "cool tool", 1.0, "jeff") sm.insert_component(root, url) sm.insert_component(root, synthesis) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1) sm.insert_component(memory, m2) rom = generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #print_sdb(rom) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): rom_in = ROMD #print_sdb(rom) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def compare_roms(rom_in, rom_out): if len(rom_in) != len(rom_out): print "Length of rom is not equal!" return rom_in = rom_in.splitlines() rom_out = rom_out.splitlines() for i in range (0, len(rom_in), 4): if (i % 16 == 0): magic = "0x%s" % (rom_in[i].lower()) last_val = int(rom_in[i + 15], 16) & 0xFF print "" if (magic == hex(sdbc.SDB_INTERCONNECT_MAGIC) and last_val == 0): print "Interconnect" elif last_val == 0x01: print "Device" elif last_val == 0x02: print "Bridge" elif last_val == 0x80: print "Integration" elif last_val == 0x81: print "URL" elif last_val == 0x82: print "Synthesis" elif last_val == 0xFF: print "Empty" else: print "???" if rom_in[i] == rom_out[i] and rom_in[i + 1] == rom_out[i + 1] and rom_in[i + 2] == rom_out[i + 2] and rom_in[i + 3] == rom_out[i + 3]: print "%s %s : %s %s" % (rom_in[i], rom_in[i + 1], rom_in[i + 2], rom_in[i + 3]) else: print "%s %s : %s %s != %s %s : %s %s" % (rom_in[i], rom_in[i + 1], rom_in[i + 2], rom_in[i + 3], rom_out[i], rom_out[i + 1], rom_out[i + 2], rom_out[i + 3]) def print_sdb_rom(rom): #rom = sdbc.convert_rom_to_32bit_buffer(rom) rom = rom.splitlines() print "ROM" for i in range (0, len(rom), 4): if (i % 16 == 0): magic = "0x%s" % (rom[i].lower()) last_val = int(rom[i + 15], 16) & 0xFF print "" if (magic == hex(sdbc.SDB_INTERCONNECT_MAGIC) and last_val == 0): print "Interconnect" elif last_val == 0x01: print "Device" elif last_val == 0x02: print "Bridge" elif last_val == 0x80: print "Integration" elif last_val == 0x81: print "URL" elif last_val == 0x82: print "Synthesis" elif last_val == 0xFF: print "Empty" else: print "???" print "%s %s : %s %s" % (rom[i], rom[i + 1], rom[i + 2], rom[i + 3]) ROM1 = "5344422D\n"\ "00010100\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000100\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0105\n"\ "746F7000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000207\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000100\n"\ "80000000\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "140F0105\n"\ "64657669\n"\ "63652031\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "000000FF" ROM2 = "5344422D\n"\ "00020100\n"\ "00000000\n"\ "00000000\n"\ "03000000\n"\ "00000000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0105\n"\ "746F7000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000020\n"\ "00000000\n"\ "00000000\n"\ "00000100\n"\ "00000000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0105\n"\ "70657269\n"\ "70686572\n"\ "616C0000\n"\ "00000000\n"\ "00000002\n"\ "00000000\n"\ "00000040\n"\ "00000100\n"\ "00000000\n"\ "00000200\n"\ "00030000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0105\n"\ "6D656D6F\n"\ "72790000\n"\ "00000000\n"\ "00000000\n"\ "00000002\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "000000FF\n"\ "5344422D\n"\ "00020100\n"\ "00000000\n"\ "00000000\n"\ "00000100\n"\ "00000000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0105\n"\ "70657269\n"\ "70686572\n"\ "616C0000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000207\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000100\n"\ "80000000\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "140F0105\n"\ "64657669\n"\ "63652031\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "00000000\n"\ "00000207\n"\ "00000001\n"\ "00000000\n"\ "00000003\n"\ "00000100\n"\ "80000000\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "140F0105\n"\ "64657669\n"\ "63652032\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "000000FF\n"\ "5344422D\n"\ "00020100\n"\ "00000100\n"\ "00000000\n"\ "00000200\n"\ "00030000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0105\n"\ "6D656D6F\n"\ "72790000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000207\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00010000\n"\ "80000000\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "140F0105\n"\ "6D656D6F\n"\ "72792031\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "00000000\n"\ "00000207\n"\ "00000000\n"\ "00010000\n"\ "00000000\n"\ "00030000\n"\ "80000000\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "140F0105\n"\ "6D656D6F\n"\ "72792032\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "000000FF" ROMD = "5344422D\n"\ "00020100\n"\ "00000000\n"\ "00000000\n"\ "00000002\n"\ "00000000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0106\n"\ "746F7000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000020\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "20000000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0106\n"\ "70657269\n"\ "70686572\n"\ "616C0000\n"\ "00000000\n"\ "00000002\n"\ "00000000\n"\ "00000040\n"\ "00000001\n"\ "00000000\n"\ "00000001\n"\ "00800000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0106\n"\ "6D656D6F\n"\ "72790000\n"\ "00000000\n"\ "00000000\n"\ "00000002\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "000000FF\n"\ "5344422D\n"\ "00020100\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "20000000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0106\n"\ "70657269\n"\ "70686572\n"\ "616C0000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000207\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000340\n"\ "80000000\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "140F0106\n"\ "53444200\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "00000101\n"\ "00000207\n"\ "00000000\n"\ "10000000\n"\ "00000000\n"\ "10000008\n"\ "80000000\n"\ "0000C594\n"\ "00000000\n"\ "00000001\n"\ "140F0107\n"\ "77625F67\n"\ "70696F00\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "000000FF\n"\ "5344422D\n"\ "00010100\n"\ "00000001\n"\ "00000000\n"\ "00000001\n"\ "00800000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0106\n"\ "6D656D6F\n"\ "72790000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000502\n"\ "00000207\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00800000\n"\ "80000000\n"\ "0000C594\n"\ "00000000\n"\ "00000001\n"\ "140F0107\n"\ "77625F73\n"\ "6472616D\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "000000FF"
[]
jeetsagar/turbojet
src/TF-gui/tftrain.py
9b17edde0a7e01d0fa320261fbc2734ce53577d2
#!python3 import os import pandas as pd import tensorflow as tf from tensorflow.keras import layers os.environ["CUDA_VISIBLE_DEVICES"] = "0" # gpu_devices = tf.config.experimental.list_physical_devices("GPU") # for device in gpu_devices: # tf.config.experimental.set_memory_growth(device, True) def trainModel(data_in, params_in): data_in = data_in.take(2048) data_in = data_in.shuffle(24) data_in = data_in.batch(1024) arch = params_in["Architecture"] dropout = params_in["Dropout"] lr = params_in["LearningRate"] attrs = params_in["Attrs"] epochs = params_in["Epochs"] if arch == "BaseCNN": if params_in["BatchNorm"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Flatten(), layers.Dense(50, "relu"), layers.Dense(1) ]) else: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Flatten(), layers.Dense(50, "relu"), layers.Dense(1) ]) elif arch == "CNN-LSTM": if params_in["BatchNorm"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False), layers.Dense(50, "relu"), layers.Dense(1) ]) else: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False), layers.Dense(50, "relu"), layers.Dense(1) ]) elif arch == "CNN-2LSTM": if params_in["BatchNorm"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1) ]) else: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1) ]) model.compile(loss=tf.losses.MeanSquaredError(), optimizer=tf.optimizers.Adam(learning_rate=lr, amsgrad=True)) filepath = "./checkpoints/Model_in-" + arch + str(attrs) + ".h5" losses = [] class CustomModelCheckPoint(tf.keras.callbacks.Callback): def __init__(self, **kargs): super(CustomModelCheckPoint, self).__init__(**kargs) self.epoch_loss = {} # accuracy at given epoch def on_epoch_begin(self, epoch, logs={}): # Things done on beginning of epoch. return def on_epoch_end(self, epoch, logs={}): # things done on end of the epoch self.epoch_loss[epoch] = logs.get("loss") losses.append(self.epoch_loss[epoch]) if params_in["ResumeTraining"]: model.load_weights(filepath) checkpoint2 = CustomModelCheckPoint() checkpoint = tf.keras.callbacks.ModelCheckpoint(filepath, monitor='loss', verbos=0, save_best_only=True, save_freq='epoch') model.fit(data_in, epochs=epochs, callbacks=[checkpoint, checkpoint2]) df_loss = pd.DataFrame() df_loss["Epochs"] = list(range(1, epochs + 1)) df_loss["Loss"] = losses df_loss.to_csv("./losses/lossTrend.csv", index=False)
[((5713, 5827), 'tensorflow.keras.callbacks.ModelCheckpoint', 'tf.keras.callbacks.ModelCheckpoint', (['filepath'], {'monitor': '"""loss"""', 'verbos': '(0)', 'save_best_only': '(True)', 'save_freq': '"""epoch"""'}), "(filepath, monitor='loss', verbos=0,\n save_best_only=True, save_freq='epoch')\n", (5747, 5827), True, 'import tensorflow as tf\n'), ((5966, 5980), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (5978, 5980), True, 'import pandas as pd\n'), ((4856, 4884), 'tensorflow.losses.MeanSquaredError', 'tf.losses.MeanSquaredError', ([], {}), '()\n', (4882, 4884), True, 'import tensorflow as tf\n'), ((4896, 4946), 'tensorflow.optimizers.Adam', 'tf.optimizers.Adam', ([], {'learning_rate': 'lr', 'amsgrad': '(True)'}), '(learning_rate=lr, amsgrad=True)\n', (4914, 4946), True, 'import tensorflow as tf\n'), ((735, 842), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""', 'input_shape': '(1, 50, attrs)'}), "(filters=10, kernel_size=5, padding='same', activation='relu',\n input_shape=(1, 50, attrs))\n", (748, 842), False, 'from tensorflow.keras import layers\n'), ((856, 879), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (870, 879), False, 'from tensorflow.keras import layers\n'), ((897, 972), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=10, kernel_size=5, padding='same', activation='relu')\n", (910, 972), False, 'from tensorflow.keras import layers\n'), ((990, 1013), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (1004, 1013), False, 'from tensorflow.keras import layers\n'), ((1031, 1105), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(1)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=1, kernel_size=5, padding='same', activation='relu')\n", (1044, 1105), False, 'from tensorflow.keras import layers\n'), ((1123, 1146), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (1137, 1146), False, 'from tensorflow.keras import layers\n'), ((1164, 1191), 'tensorflow.keras.layers.BatchNormalization', 'layers.BatchNormalization', ([], {}), '()\n', (1189, 1191), False, 'from tensorflow.keras import layers\n'), ((1209, 1225), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (1223, 1225), False, 'from tensorflow.keras import layers\n'), ((1243, 1267), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(50)', '"""relu"""'], {}), "(50, 'relu')\n", (1255, 1267), False, 'from tensorflow.keras import layers\n'), ((1285, 1300), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {}), '(1)\n', (1297, 1300), False, 'from tensorflow.keras import layers\n'), ((1388, 1495), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""', 'input_shape': '(1, 50, attrs)'}), "(filters=10, kernel_size=5, padding='same', activation='relu',\n input_shape=(1, 50, attrs))\n", (1401, 1495), False, 'from tensorflow.keras import layers\n'), ((1509, 1532), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (1523, 1532), False, 'from tensorflow.keras import layers\n'), ((1550, 1625), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=10, kernel_size=5, padding='same', activation='relu')\n", (1563, 1625), False, 'from tensorflow.keras import layers\n'), ((1643, 1666), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (1657, 1666), False, 'from tensorflow.keras import layers\n'), ((1684, 1758), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(1)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=1, kernel_size=5, padding='same', activation='relu')\n", (1697, 1758), False, 'from tensorflow.keras import layers\n'), ((1776, 1799), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (1790, 1799), False, 'from tensorflow.keras import layers\n'), ((1817, 1833), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (1831, 1833), False, 'from tensorflow.keras import layers\n'), ((1851, 1875), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(50)', '"""relu"""'], {}), "(50, 'relu')\n", (1863, 1875), False, 'from tensorflow.keras import layers\n'), ((1893, 1908), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {}), '(1)\n', (1905, 1908), False, 'from tensorflow.keras import layers\n'), ((2047, 2154), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""', 'input_shape': '(1, 50, attrs)'}), "(filters=10, kernel_size=5, padding='same', activation='relu',\n input_shape=(1, 50, attrs))\n", (2060, 2154), False, 'from tensorflow.keras import layers\n'), ((2168, 2191), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (2182, 2191), False, 'from tensorflow.keras import layers\n'), ((2209, 2284), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=10, kernel_size=5, padding='same', activation='relu')\n", (2222, 2284), False, 'from tensorflow.keras import layers\n'), ((2302, 2325), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (2316, 2325), False, 'from tensorflow.keras import layers\n'), ((2343, 2417), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(1)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=1, kernel_size=5, padding='same', activation='relu')\n", (2356, 2417), False, 'from tensorflow.keras import layers\n'), ((2435, 2458), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (2449, 2458), False, 'from tensorflow.keras import layers\n'), ((2476, 2503), 'tensorflow.keras.layers.BatchNormalization', 'layers.BatchNormalization', ([], {}), '()\n', (2501, 2503), False, 'from tensorflow.keras import layers\n'), ((2521, 2544), 'tensorflow.keras.layers.Reshape', 'layers.Reshape', (['(5, 10)'], {}), '((5, 10))\n', (2535, 2544), False, 'from tensorflow.keras import layers\n'), ((2562, 2601), 'tensorflow.keras.layers.LSTM', 'layers.LSTM', (['(30)'], {'return_sequences': '(False)'}), '(30, return_sequences=False)\n', (2573, 2601), False, 'from tensorflow.keras import layers\n'), ((2619, 2643), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(50)', '"""relu"""'], {}), "(50, 'relu')\n", (2631, 2643), False, 'from tensorflow.keras import layers\n'), ((2661, 2676), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {}), '(1)\n', (2673, 2676), False, 'from tensorflow.keras import layers\n'), ((2764, 2871), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""', 'input_shape': '(1, 50, attrs)'}), "(filters=10, kernel_size=5, padding='same', activation='relu',\n input_shape=(1, 50, attrs))\n", (2777, 2871), False, 'from tensorflow.keras import layers\n'), ((2885, 2908), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (2899, 2908), False, 'from tensorflow.keras import layers\n'), ((2926, 3001), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=10, kernel_size=5, padding='same', activation='relu')\n", (2939, 3001), False, 'from tensorflow.keras import layers\n'), ((3019, 3042), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (3033, 3042), False, 'from tensorflow.keras import layers\n'), ((3060, 3134), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(1)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=1, kernel_size=5, padding='same', activation='relu')\n", (3073, 3134), False, 'from tensorflow.keras import layers\n'), ((3152, 3175), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (3166, 3175), False, 'from tensorflow.keras import layers\n'), ((3193, 3216), 'tensorflow.keras.layers.Reshape', 'layers.Reshape', (['(5, 10)'], {}), '((5, 10))\n', (3207, 3216), False, 'from tensorflow.keras import layers\n'), ((3234, 3273), 'tensorflow.keras.layers.LSTM', 'layers.LSTM', (['(30)'], {'return_sequences': '(False)'}), '(30, return_sequences=False)\n', (3245, 3273), False, 'from tensorflow.keras import layers\n'), ((3291, 3315), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(50)', '"""relu"""'], {}), "(50, 'relu')\n", (3303, 3315), False, 'from tensorflow.keras import layers\n'), ((3333, 3348), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {}), '(1)\n', (3345, 3348), False, 'from tensorflow.keras import layers\n'), ((3488, 3595), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""', 'input_shape': '(1, 50, attrs)'}), "(filters=10, kernel_size=5, padding='same', activation='relu',\n input_shape=(1, 50, attrs))\n", (3501, 3595), False, 'from tensorflow.keras import layers\n'), ((3609, 3632), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (3623, 3632), False, 'from tensorflow.keras import layers\n'), ((3650, 3725), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=10, kernel_size=5, padding='same', activation='relu')\n", (3663, 3725), False, 'from tensorflow.keras import layers\n'), ((3743, 3766), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (3757, 3766), False, 'from tensorflow.keras import layers\n'), ((3784, 3858), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(1)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=1, kernel_size=5, padding='same', activation='relu')\n", (3797, 3858), False, 'from tensorflow.keras import layers\n'), ((3876, 3899), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (3890, 3899), False, 'from tensorflow.keras import layers\n'), ((3917, 3944), 'tensorflow.keras.layers.BatchNormalization', 'layers.BatchNormalization', ([], {}), '()\n', (3942, 3944), False, 'from tensorflow.keras import layers\n'), ((3962, 3985), 'tensorflow.keras.layers.Reshape', 'layers.Reshape', (['(5, 10)'], {}), '((5, 10))\n', (3976, 3985), False, 'from tensorflow.keras import layers\n'), ((4003, 4041), 'tensorflow.keras.layers.LSTM', 'layers.LSTM', (['(30)'], {'return_sequences': '(True)'}), '(30, return_sequences=True)\n', (4014, 4041), False, 'from tensorflow.keras import layers\n'), ((4059, 4098), 'tensorflow.keras.layers.LSTM', 'layers.LSTM', (['(30)'], {'return_sequences': '(False)'}), '(30, return_sequences=False)\n', (4070, 4098), False, 'from tensorflow.keras import layers\n'), ((4116, 4131), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {}), '(1)\n', (4128, 4131), False, 'from tensorflow.keras import layers\n'), ((4219, 4326), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""', 'input_shape': '(1, 50, attrs)'}), "(filters=10, kernel_size=5, padding='same', activation='relu',\n input_shape=(1, 50, attrs))\n", (4232, 4326), False, 'from tensorflow.keras import layers\n'), ((4340, 4363), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (4354, 4363), False, 'from tensorflow.keras import layers\n'), ((4381, 4456), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=10, kernel_size=5, padding='same', activation='relu')\n", (4394, 4456), False, 'from tensorflow.keras import layers\n'), ((4474, 4497), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (4488, 4497), False, 'from tensorflow.keras import layers\n'), ((4515, 4589), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(1)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=1, kernel_size=5, padding='same', activation='relu')\n", (4528, 4589), False, 'from tensorflow.keras import layers\n'), ((4607, 4630), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (4621, 4630), False, 'from tensorflow.keras import layers\n'), ((4648, 4671), 'tensorflow.keras.layers.Reshape', 'layers.Reshape', (['(5, 10)'], {}), '((5, 10))\n', (4662, 4671), False, 'from tensorflow.keras import layers\n'), ((4689, 4727), 'tensorflow.keras.layers.LSTM', 'layers.LSTM', (['(30)'], {'return_sequences': '(True)'}), '(30, return_sequences=True)\n', (4700, 4727), False, 'from tensorflow.keras import layers\n'), ((4745, 4784), 'tensorflow.keras.layers.LSTM', 'layers.LSTM', (['(30)'], {'return_sequences': '(False)'}), '(30, return_sequences=False)\n', (4756, 4784), False, 'from tensorflow.keras import layers\n'), ((4802, 4817), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {}), '(1)\n', (4814, 4817), False, 'from tensorflow.keras import layers\n')]
sebastienc/ansible-kong-module
library/kong_api.py
c1e7b471a517d1ec99c5629f3729ebc34088bd64
#!/usr/bin/python DOCUMENTATION = ''' --- module: kong short_description: Configure a Kong API Gateway ''' EXAMPLES = ''' - name: Register a site kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name: "Mockbin" taget_url: "http://mockbin.com" request_host: "mockbin.com" state: present - name: Delete a site kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name: "Mockbin" state: absent ''' import json, requests, os class KongAPI: def __init__(self, base_url, auth_username=None, auth_password=None): self.base_url = base_url if auth_username is not None and auth_password is not None: self.auth = (auth_username, auth_password) else: self.auth = None def __url(self, path): return "{}{}" . format (self.base_url, path) def _api_exists(self, name, api_list): for api in api_list: if name == api.get("name", None): return True return False def add_or_update(self, name, upstream_url, request_host=None, request_path=None, strip_request_path=False, preserve_host=False): method = "post" url = self.__url("/apis/") api_list = self.list().json().get("data", []) api_exists = self._api_exists(name, api_list) if api_exists: method = "patch" url = "{}{}" . format (url, name) data = { "name": name, "upstream_url": upstream_url, "strip_request_path": strip_request_path, "preserve_host": preserve_host } if request_host is not None: data['request_host'] = request_host if request_path is not None: data['request_path'] = request_path return getattr(requests, method)(url, data, auth=self.auth) def list(self): url = self.__url("/apis") return requests.get(url, auth=self.auth) def info(self, id): url = self.__url("/apis/{}" . format (id)) return requests.get(url, auth=self.auth) def delete_by_name(self, name): info = self.info(name) id = info.json().get("id") return self.delete(id) def delete(self, id): path = "/apis/{}" . format (id) url = self.__url(path) return requests.delete(url, auth=self.auth) class ModuleHelper: def __init__(self, fields): self.fields = fields def get_module(self): args = dict( kong_admin_uri = dict(required=False, type='str'), kong_admin_username = dict(required=False, type='str'), kong_admin_password = dict(required=False, type='str'), name = dict(required=False, type='str'), upstream_url = dict(required=False, type='str'), request_host = dict(required=False, type='str'), request_path = dict(required=False, type='str'), strip_request_path = dict(required=False, default=False, type='bool'), preserve_host = dict(required=False, default=False, type='bool'), state = dict(required=False, default="present", choices=['present', 'absent', 'latest', 'list', 'info'], type='str'), ) return AnsibleModule(argument_spec=args,supports_check_mode=False) def prepare_inputs(self, module): url = module.params['kong_admin_uri'] auth_user = module.params['kong_admin_username'] auth_password = module.params['kong_admin_password'] state = module.params['state'] data = {} for field in self.fields: value = module.params.get(field, None) if value is not None: data[field] = value return (url, data, state, auth_user, auth_password) def get_response(self, response, state): if state == "present": meta = response.json() has_changed = response.status_code in [201, 200] if state == "absent": meta = {} has_changed = response.status_code == 204 if state == "list": meta = response.json() has_changed = False return (has_changed, meta) def main(): fields = [ 'name', 'upstream_url', 'request_host', 'request_path', 'strip_request_path', 'preserve_host' ] helper = ModuleHelper(fields) global module # might not need this module = helper.get_module() base_url, data, state, auth_user, auth_password = helper.prepare_inputs(module) api = KongAPI(base_url, auth_user, auth_password) if state == "present": response = api.add_or_update(**data) if state == "absent": response = api.delete_by_name(data.get("name")) if state == "list": response = api.list() if response.status_code == 401: module.fail_json(msg="Please specify kong_admin_username and kong_admin_password", meta=response.json()) elif response.status_code == 403: module.fail_json(msg="Please check kong_admin_username and kong_admin_password", meta=response.json()) else: has_changed, meta = helper.get_response(response, state) module.exit_json(changed=has_changed, meta=meta) from ansible.module_utils.basic import * from ansible.module_utils.urls import * if __name__ == '__main__': main()
[((1928, 1961), 'requests.get', 'requests.get', (['url'], {'auth': 'self.auth'}), '(url, auth=self.auth)\n', (1940, 1961), False, 'import json, requests, os\n'), ((2053, 2086), 'requests.get', 'requests.get', (['url'], {'auth': 'self.auth'}), '(url, auth=self.auth)\n', (2065, 2086), False, 'import json, requests, os\n'), ((2334, 2370), 'requests.delete', 'requests.delete', (['url'], {'auth': 'self.auth'}), '(url, auth=self.auth)\n', (2349, 2370), False, 'import json, requests, os\n')]
XingxinHE/compas
src/compas_plotters/artists/lineartist.py
d2901dbbacdaf4694e5adae78ba8f093f10532bf
from compas_plotters.artists import Artist from matplotlib.lines import Line2D from compas.geometry import intersection_line_box_xy __all__ = ['LineArtist'] class LineArtist(Artist): """""" zorder = 1000 def __init__(self, line, draw_points=False, draw_as_segment=False, linewidth=1.0, linestyle='solid', color=(0, 0, 0)): super(LineArtist, self).__init__(line) self._mpl_line = None self._start_artist = None self._end_artist = None self._segment_artist = None self._draw_points = draw_points self._draw_as_segment = draw_as_segment self.line = line self.linewidth = linewidth self.linestyle = linestyle self.color = color def clip(self): xlim, ylim = self.plotter.viewbox xmin, xmax = xlim ymin, ymax = ylim box = [[xmin, ymin], [xmax, ymin], [xmax, ymax], [xmin, ymax]] return intersection_line_box_xy(self.line, box) @property def data(self): return [self.line.start[:2], self.line.end[:2]] def draw(self): if self._draw_as_segment: x0, y0 = self.line.start[:2] x1, y1 = self.line.end[:2] line2d = Line2D([x0, x1], [y0, y1], linewidth=self.linewidth, linestyle=self.linestyle, color=self.color, zorder=self.zorder) self._mpl_line = self.plotter.axes.add_line(line2d) if self._draw_points: self._start_artist = self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) else: points = self.clip() if points: p0, p1 = points x0, y0 = p0[:2] x1, y1 = p1[:2] line2d = Line2D([x0, x1], [y0, y1], linewidth=self.linewidth, linestyle=self.linestyle, color=self.color, zorder=self.zorder) self._mpl_line = self.plotter.axes.add_line(line2d) if self._draw_points: self._start_artist = self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) def redraw(self): if self._draw_as_segment: x0, y0 = self.line.start[:2] x1, y1 = self.line.end[:2] self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0, y1]) self._mpl_line.set_color(self.color) self._mpl_line.set_linewidth(self.linewidth) else: points = self.clip() if points: p0, p1 = points x0, y0 = p0[:2] x1, y1 = p1[:2] self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0, y1]) self._mpl_line.set_color(self.color) self._mpl_line.set_linewidth(self.linewidth)
[((931, 971), 'compas.geometry.intersection_line_box_xy', 'intersection_line_box_xy', (['self.line', 'box'], {}), '(self.line, box)\n', (955, 971), False, 'from compas.geometry import intersection_line_box_xy\n'), ((1219, 1340), 'matplotlib.lines.Line2D', 'Line2D', (['[x0, x1]', '[y0, y1]'], {'linewidth': 'self.linewidth', 'linestyle': 'self.linestyle', 'color': 'self.color', 'zorder': 'self.zorder'}), '([x0, x1], [y0, y1], linewidth=self.linewidth, linestyle=self.\n linestyle, color=self.color, zorder=self.zorder)\n', (1225, 1340), False, 'from matplotlib.lines import Line2D\n'), ((1875, 1996), 'matplotlib.lines.Line2D', 'Line2D', (['[x0, x1]', '[y0, y1]'], {'linewidth': 'self.linewidth', 'linestyle': 'self.linestyle', 'color': 'self.color', 'zorder': 'self.zorder'}), '([x0, x1], [y0, y1], linewidth=self.linewidth, linestyle=self.\n linestyle, color=self.color, zorder=self.zorder)\n', (1881, 1996), False, 'from matplotlib.lines import Line2D\n')]
manome/python-silvq
plot2d_artificial_dataset1_silvq.py
b50d7486e970fbe9a5b66dd3fc5beb8b5de8ca2f
# -*- encoding: utf8 -*- import numpy as np from sklearn.metrics import accuracy_score from sklearn.model_selection import train_test_split from lvq import SilvqModel from lvq.utils import plot2d def main(): # Load dataset dataset = np.loadtxt('data/artificial_dataset1.csv', delimiter=',') x = dataset[:, :-1].astype('float64') y = dataset[:, -1].astype('int64') # Split dataset into training set and test set x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=3, shuffle=True, stratify=y) # Generating model model = SilvqModel(x.shape[1], theta=0.8, bias_type='ls') # Training the model model.fit(x_train, y_train, epochs=30) # Predict the response for test dataset y_predict = model.predict(x_test) # Evaluating the model print('Accuracy: %.3f' %accuracy_score(y_test, y_predict)) # Plot prediction results and prototypes plot2d(model, x, y, title='Artificial dataset1') if __name__ == '__main__': main()
[((245, 302), 'numpy.loadtxt', 'np.loadtxt', (['"""data/artificial_dataset1.csv"""'], {'delimiter': '""","""'}), "('data/artificial_dataset1.csv', delimiter=',')\n", (255, 302), True, 'import numpy as np\n'), ((474, 553), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x', 'y'], {'test_size': '(0.2)', 'random_state': '(3)', 'shuffle': '(True)', 'stratify': 'y'}), '(x, y, test_size=0.2, random_state=3, shuffle=True, stratify=y)\n', (490, 553), False, 'from sklearn.model_selection import train_test_split\n'), ((590, 639), 'lvq.SilvqModel', 'SilvqModel', (['x.shape[1]'], {'theta': '(0.8)', 'bias_type': '"""ls"""'}), "(x.shape[1], theta=0.8, bias_type='ls')\n", (600, 639), False, 'from lvq import SilvqModel\n'), ((930, 978), 'lvq.utils.plot2d', 'plot2d', (['model', 'x', 'y'], {'title': '"""Artificial dataset1"""'}), "(model, x, y, title='Artificial dataset1')\n", (936, 978), False, 'from lvq.utils import plot2d\n'), ((846, 879), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y_test', 'y_predict'], {}), '(y_test, y_predict)\n', (860, 879), False, 'from sklearn.metrics import accuracy_score\n')]
ifr1m/hyper-kvasir
classification_experiments/Fine-Tuned-ResNet-50/Fine-Tuned-ResNet-50.py
21cc366e78c0cb4e180a26a0e441d6c0d5171da9
#!/usr/bin/env python # coding: utf-8 # In[ ]: #Importing all required libraries # In[ ]: from __future__ import absolute_import, division, print_function, unicode_literals # In[ ]: #Checking for correct cuda and tf versions from tensorflow.python.platform import build_info as tf_build_info print(tf_build_info.cuda_version_number) # 9.0 in v1.10.0 print(tf_build_info.cudnn_version_number) # 7 in v1.10.0 # In[ ]: import tensorflow as tf import pathlib from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D from tensorflow.keras.preprocessing.image import ImageDataGenerator import os import numpy as np import matplotlib.pyplot as plt # In[ ]: AUTOTUNE = tf.data.experimental.AUTOTUNE # In[ ]: import IPython.display as display from PIL import Image import numpy as np import matplotlib.pyplot as plt import os # In[ ]: tf.__version__ # In[ ]: #Train and test data folder train_data_dir = "\\hyper-kvasir\\splits\\all\\1" test_data_dir = "\\hyper-kvasir\\splits\\all\\0" # In[ ]: train_data_dir = pathlib.Path(train_data_dir) test_data_dir = pathlib.Path(test_data_dir) # In[ ]: #count how many images are there image_count = len(list(train_data_dir.glob('*/*.jpg'))) image_count # In[ ]: total_train = len(list(train_data_dir.glob('*/*.jpg'))) total_val = len(list(test_data_dir.glob('*/*.jpg'))) # In[ ]: #get the class names CLASS_NAMES = np.array([item.name for item in train_data_dir.glob('*') if item.name != "LICENSE.txt"]) CLASS_NAMES # In[ ]: #Define parameter for training batch_size = 32 IMG_HEIGHT = 224 IMG_WIDTH = 224 STEPS_PER_EPOCH = np.ceil(image_count/batch_size) epochs = 8 num_classes = len(CLASS_NAMES) #23 # In[ ]: #We use image data generators to load the images and prepare them for the training train_image_generator = ImageDataGenerator() # Generator for our training data validation_image_generator = ImageDataGenerator() # Generator for our validation data train_data_gen = train_image_generator.flow_from_directory(directory=str(train_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), classes = list(CLASS_NAMES), class_mode='categorical' ) val_data_gen = validation_image_generator.flow_from_directory(directory=str(test_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), class_mode='categorical', classes = list(CLASS_NAMES) ) #get class order from directories print(train_data_gen.class_indices.keys()) print(val_data_gen.class_indices.keys()) # In[ ]: IMG_SIZE = 224 IMG_SHAPE = (IMG_SIZE, IMG_SIZE, 3) # base model from the pre-trained model. Resnet 50 in this case base_model = tf.keras.applications.ResNet50(input_shape=IMG_SHAPE, include_top=False, weights='imagenet') base_model.trainable = False # In[ ]: #add new classification layer x = base_model.output x = tf.keras.layers.GlobalAveragePooling2D()(x) x = tf.keras.layers.Dense(num_classes,activation='softmax')(x) model = tf.keras.models.Model(inputs=base_model.input, outputs=x) base_learning_rate = 0.001 model.compile(optimizer=tf.keras.optimizers.Adam(lr=base_learning_rate), loss='categorical_crossentropy', metrics=['accuracy']) # In[ ]: #fit the model history = model.fit_generator( train_data_gen, steps_per_epoch=total_train // batch_size, epochs=epochs, validation_data=val_data_gen, validation_steps=total_val // batch_size ) # In[ ]: #create training plots history acc = history.history['accuracy'] val_acc = history.history['val_accuracy'] loss = history.history['loss'] val_loss = history.history['val_loss'] epochs_range = range(epochs) plt.figure(figsize=(8, 8)) plt.subplot(1, 2, 1) plt.plot(epochs_range, acc, label='Training Accuracy') plt.plot(epochs_range, val_acc, label='Validation Accuracy') plt.legend(loc='lower right') plt.title('Training and Validation Accuracy') plt.subplot(1, 2, 2) plt.plot(epochs_range, loss, label='Training Loss') plt.plot(epochs_range, val_loss, label='Validation Loss') plt.legend(loc='upper right') plt.title('Training and Validation Loss') plt.show() # In[ ]: base_model.trainable = True #now we want to train the base model # In[ ]: # How many layers are in the base model print("Layers base model: ", len(base_model.layers)) # Fine tune from layer x fine_tune_at = 100 # Freeze all the layers before the fine tune starting layer for layer in base_model.layers[:fine_tune_at]: layer.trainable = False # In[ ]: model.compile(loss='categorical_crossentropy', optimizer = tf.keras.optimizers.RMSprop(lr=base_learning_rate/10), metrics=['accuracy']) # In[ ]: model.summary() # In[ ]: #Fine tune step initial_epochs = 7 fine_tune_epochs = 3 total_epochs = initial_epochs + fine_tune_epochs train_batches = total_train // batch_size print(total_val // batch_size) validation_batches = total_val // batch_size history_fine = model.fit_generator( train_data_gen, steps_per_epoch=total_train // batch_size, epochs=total_epochs, initial_epoch = history.epoch[-1], validation_data=val_data_gen, validation_steps=total_val // batch_size ) # In[ ]: acc += history_fine.history['accuracy'] val_acc += history_fine.history['val_accuracy'] loss += history_fine.history['loss'] val_loss += history_fine.history['val_loss'] # In[ ]: #Plot fine tuning plt.figure(figsize=(8, 8)) plt.subplot(2, 1, 1) plt.plot(acc, label='Training Accuracy') plt.plot(val_acc, label='Validation Accuracy') plt.ylim([0.8, 1]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine Tuning') plt.legend(loc='lower right') plt.title('Training and Validation Accuracy') plt.subplot(2, 1, 2) plt.plot(loss, label='Training Loss') plt.plot(val_loss, label='Validation Loss') plt.ylim([0, 1.0]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine Tuning') plt.legend(loc='upper right') plt.title('Training and Validation Loss') plt.xlabel('epoch') plt.show() # In[ ]: #model save and load import os # In[ ]: #some time stamp from datetime import datetime # current date and time. now = datetime.now() timestamp = datetime.timestamp(now) print("timestamp =", timestamp) # In[ ]: mode_filename = str(timestamp)+'mymodel.h5' model.save(model_filename) # In[ ]: #To apply the model on new data new_model = tf.keras.models.load_model(model_filename) # Show the model architecture new_model.summary() # In[ ]: from tensorflow.keras.preprocessing import image #image directory containing images to test img_dir="\\polyps" for i,img in enumerate(os.listdir(img_dir)): tmpimage = image.load_img(os.path.join(img_dir,img), target_size=(IMG_SIZE,IMG_SIZE)) tmpimage = np.expand_dims(tmpimage, axis=0).astype('float32') result_class=new_model.predict(tmpimage) print(img,";",CLASS_NAMES[result_class.argmax(axis=-1)])
[((1111, 1139), 'pathlib.Path', 'pathlib.Path', (['train_data_dir'], {}), '(train_data_dir)\n', (1123, 1139), False, 'import pathlib\n'), ((1156, 1183), 'pathlib.Path', 'pathlib.Path', (['test_data_dir'], {}), '(test_data_dir)\n', (1168, 1183), False, 'import pathlib\n'), ((1680, 1713), 'numpy.ceil', 'np.ceil', (['(image_count / batch_size)'], {}), '(image_count / batch_size)\n', (1687, 1713), True, 'import numpy as np\n'), ((1879, 1899), 'tensorflow.keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {}), '()\n', (1897, 1899), False, 'from tensorflow.keras.preprocessing.image import ImageDataGenerator\n'), ((1963, 1983), 'tensorflow.keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {}), '()\n', (1981, 1983), False, 'from tensorflow.keras.preprocessing.image import ImageDataGenerator\n'), ((3374, 3470), 'tensorflow.keras.applications.ResNet50', 'tf.keras.applications.ResNet50', ([], {'input_shape': 'IMG_SHAPE', 'include_top': '(False)', 'weights': '"""imagenet"""'}), "(input_shape=IMG_SHAPE, include_top=False,\n weights='imagenet')\n", (3404, 3470), True, 'import tensorflow as tf\n'), ((3777, 3834), 'tensorflow.keras.models.Model', 'tf.keras.models.Model', ([], {'inputs': 'base_model.input', 'outputs': 'x'}), '(inputs=base_model.input, outputs=x)\n', (3798, 3834), True, 'import tensorflow as tf\n'), ((4467, 4493), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (4477, 4493), True, 'import matplotlib.pyplot as plt\n'), ((4494, 4514), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (4505, 4514), True, 'import matplotlib.pyplot as plt\n'), ((4515, 4569), 'matplotlib.pyplot.plot', 'plt.plot', (['epochs_range', 'acc'], {'label': '"""Training Accuracy"""'}), "(epochs_range, acc, label='Training Accuracy')\n", (4523, 4569), True, 'import matplotlib.pyplot as plt\n'), ((4570, 4630), 'matplotlib.pyplot.plot', 'plt.plot', (['epochs_range', 'val_acc'], {'label': '"""Validation Accuracy"""'}), "(epochs_range, val_acc, label='Validation Accuracy')\n", (4578, 4630), True, 'import matplotlib.pyplot as plt\n'), ((4631, 4660), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""lower right"""'}), "(loc='lower right')\n", (4641, 4660), True, 'import matplotlib.pyplot as plt\n'), ((4661, 4706), 'matplotlib.pyplot.title', 'plt.title', (['"""Training and Validation Accuracy"""'], {}), "('Training and Validation Accuracy')\n", (4670, 4706), True, 'import matplotlib.pyplot as plt\n'), ((4708, 4728), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (4719, 4728), True, 'import matplotlib.pyplot as plt\n'), ((4729, 4780), 'matplotlib.pyplot.plot', 'plt.plot', (['epochs_range', 'loss'], {'label': '"""Training Loss"""'}), "(epochs_range, loss, label='Training Loss')\n", (4737, 4780), True, 'import matplotlib.pyplot as plt\n'), ((4781, 4838), 'matplotlib.pyplot.plot', 'plt.plot', (['epochs_range', 'val_loss'], {'label': '"""Validation Loss"""'}), "(epochs_range, val_loss, label='Validation Loss')\n", (4789, 4838), True, 'import matplotlib.pyplot as plt\n'), ((4839, 4868), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper right"""'}), "(loc='upper right')\n", (4849, 4868), True, 'import matplotlib.pyplot as plt\n'), ((4869, 4910), 'matplotlib.pyplot.title', 'plt.title', (['"""Training and Validation Loss"""'], {}), "('Training and Validation Loss')\n", (4878, 4910), True, 'import matplotlib.pyplot as plt\n'), ((4911, 4921), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4919, 4921), True, 'import matplotlib.pyplot as plt\n'), ((6194, 6220), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (6204, 6220), True, 'import matplotlib.pyplot as plt\n'), ((6221, 6241), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(1)'], {}), '(2, 1, 1)\n', (6232, 6241), True, 'import matplotlib.pyplot as plt\n'), ((6242, 6282), 'matplotlib.pyplot.plot', 'plt.plot', (['acc'], {'label': '"""Training Accuracy"""'}), "(acc, label='Training Accuracy')\n", (6250, 6282), True, 'import matplotlib.pyplot as plt\n'), ((6283, 6329), 'matplotlib.pyplot.plot', 'plt.plot', (['val_acc'], {'label': '"""Validation Accuracy"""'}), "(val_acc, label='Validation Accuracy')\n", (6291, 6329), True, 'import matplotlib.pyplot as plt\n'), ((6330, 6348), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0.8, 1]'], {}), '([0.8, 1])\n', (6338, 6348), True, 'import matplotlib.pyplot as plt\n'), ((6444, 6473), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""lower right"""'}), "(loc='lower right')\n", (6454, 6473), True, 'import matplotlib.pyplot as plt\n'), ((6474, 6519), 'matplotlib.pyplot.title', 'plt.title', (['"""Training and Validation Accuracy"""'], {}), "('Training and Validation Accuracy')\n", (6483, 6519), True, 'import matplotlib.pyplot as plt\n'), ((6521, 6541), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(2)'], {}), '(2, 1, 2)\n', (6532, 6541), True, 'import matplotlib.pyplot as plt\n'), ((6542, 6579), 'matplotlib.pyplot.plot', 'plt.plot', (['loss'], {'label': '"""Training Loss"""'}), "(loss, label='Training Loss')\n", (6550, 6579), True, 'import matplotlib.pyplot as plt\n'), ((6580, 6623), 'matplotlib.pyplot.plot', 'plt.plot', (['val_loss'], {'label': '"""Validation Loss"""'}), "(val_loss, label='Validation Loss')\n", (6588, 6623), True, 'import matplotlib.pyplot as plt\n'), ((6624, 6642), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0, 1.0]'], {}), '([0, 1.0])\n', (6632, 6642), True, 'import matplotlib.pyplot as plt\n'), ((6737, 6766), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper right"""'}), "(loc='upper right')\n", (6747, 6766), True, 'import matplotlib.pyplot as plt\n'), ((6767, 6808), 'matplotlib.pyplot.title', 'plt.title', (['"""Training and Validation Loss"""'], {}), "('Training and Validation Loss')\n", (6776, 6808), True, 'import matplotlib.pyplot as plt\n'), ((6809, 6828), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""epoch"""'], {}), "('epoch')\n", (6819, 6828), True, 'import matplotlib.pyplot as plt\n'), ((6829, 6839), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6837, 6839), True, 'import matplotlib.pyplot as plt\n'), ((6976, 6990), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6988, 6990), False, 'from datetime import datetime\n'), ((7003, 7026), 'datetime.datetime.timestamp', 'datetime.timestamp', (['now'], {}), '(now)\n', (7021, 7026), False, 'from datetime import datetime\n'), ((7200, 7242), 'tensorflow.keras.models.load_model', 'tf.keras.models.load_model', (['model_filename'], {}), '(model_filename)\n', (7226, 7242), True, 'import tensorflow as tf\n'), ((3661, 3701), 'tensorflow.keras.layers.GlobalAveragePooling2D', 'tf.keras.layers.GlobalAveragePooling2D', ([], {}), '()\n', (3699, 3701), True, 'import tensorflow as tf\n'), ((3709, 3765), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['num_classes'], {'activation': '"""softmax"""'}), "(num_classes, activation='softmax')\n", (3730, 3765), True, 'import tensorflow as tf\n'), ((6405, 6415), 'matplotlib.pyplot.ylim', 'plt.ylim', ([], {}), '()\n', (6413, 6415), True, 'import matplotlib.pyplot as plt\n'), ((6698, 6708), 'matplotlib.pyplot.ylim', 'plt.ylim', ([], {}), '()\n', (6706, 6708), True, 'import matplotlib.pyplot as plt\n'), ((7443, 7462), 'os.listdir', 'os.listdir', (['img_dir'], {}), '(img_dir)\n', (7453, 7462), False, 'import os\n'), ((3887, 3934), 'tensorflow.keras.optimizers.Adam', 'tf.keras.optimizers.Adam', ([], {'lr': 'base_learning_rate'}), '(lr=base_learning_rate)\n', (3911, 3934), True, 'import tensorflow as tf\n'), ((5372, 5427), 'tensorflow.keras.optimizers.RMSprop', 'tf.keras.optimizers.RMSprop', ([], {'lr': '(base_learning_rate / 10)'}), '(lr=base_learning_rate / 10)\n', (5399, 5427), True, 'import tensorflow as tf\n'), ((7493, 7519), 'os.path.join', 'os.path.join', (['img_dir', 'img'], {}), '(img_dir, img)\n', (7505, 7519), False, 'import os\n'), ((7569, 7601), 'numpy.expand_dims', 'np.expand_dims', (['tmpimage'], {'axis': '(0)'}), '(tmpimage, axis=0)\n', (7583, 7601), True, 'import numpy as np\n')]
SlimKatLegacy/android_external_chromium_org
tools/android/android_tools.gyp
ee480ef5039d7c561fc66ccf52169ead186f1bea
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. { 'targets': [ # Intermediate target grouping the android tools needed to run native # unittests and instrumentation test apks. { 'target_name': 'android_tools', 'type': 'none', 'dependencies': [ 'adb_reboot/adb_reboot.gyp:adb_reboot', 'forwarder2/forwarder.gyp:forwarder2', 'md5sum/md5sum.gyp:md5sum', 'purge_ashmem/purge_ashmem.gyp:purge_ashmem', ], }, { 'target_name': 'memdump', 'type': 'none', 'dependencies': [ 'memdump/memdump.gyp:memdump', ], }, { 'target_name': 'memconsumer', 'type': 'none', 'dependencies': [ 'memconsumer/memconsumer.gyp:memconsumer', ], }, ], }
[]
FantasyGold/FantasyGold-Core
test/functional/fantasygold_opcall.py
afff8871e770045e468e2f536ede9db0dff889d5
#!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * from test_framework.script import * from test_framework.mininode import * from test_framework.fantasygold import * from test_framework.fantasygoldconfig import * import sys class OpCallTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 self.extra_args = [['-txindex=1']]*2 def send_one_op_call_tx_with_counter_check(self, outputs, counter_should_increase_by=0, input_value=500000000, should_throw=False): # 61bc221a counter() old_out = int(self.node.callcontract(self.contract_address, "61bc221a")['executionResult']['output'], 16) inpt = make_vin(self.node, input_value) tx = make_transaction(self.node, [inpt], outputs) if should_throw: try: self.node.sendrawtransaction(tx) assert(False) except JSONRPCException as e: print(e) pass else: self.node.sendrawtransaction(tx) self.node.generate(1) sync_blocks(self.nodes) for i in range(2): # 61bc221a counter() out = int(self.nodes[i].callcontract(self.contract_address, "61bc221a")['executionResult']['output'], 16) assert(out-old_out == counter_should_increase_by) def send_multiple_op_call_txs_with_counter_check(self, num_txs, outputs, counter_should_increase_by): # 61bc221a counter() old_out = int(self.node.callcontract(self.contract_address, "61bc221a")['executionResult']['output'], 16) i = 0 unspents = self.node.listunspent() while i < num_txs and len(unspents) > 0: # Select as input a tx which has at least 5 fantasygold spendable for tx_i in range(len(unspents)): if int(unspents[tx_i]['amount']*COIN) == 1000000*FGC_MIN_GAS_PRICE and unspents[tx_i]['spendable']: break else: assert(False) inpt = CTxIn(COutPoint(int(unspents[tx_i]['txid'], 16), unspents[tx_i]['vout']), nSequence=0) tx = make_transaction(self.node, [inpt], outputs) txid = self.node.sendrawtransaction(tx) unspents = self.node.listunspent() i += 1 self.node.generate(1) sync_blocks(self.nodes) for i in range(2): # 61bc221a counter() out = int(self.nodes[i].callcontract(self.contract_address, "61bc221a")['executionResult']['output'], 16) assert(out-old_out == counter_should_increase_by) # Deploy the testing contract def create_contract_test(self): """ pragma solidity ^0.4.10; contract Example { uint public counter; function inc() public { counter += 1; } function getBalance() public { return this.balance; } } """ contract_data = self.node.createcontract("6060604052341561000c57fe5b5b61011e8061001c6000396000f30060606040526000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806312065fe0146058578063371303c014607b57806361bc221a14608a578063d0e30db01460ad575bfe5b3415605f57fe5b606560b5565b6040518082815260200191505060405180910390f35b3415608257fe5b608860d5565b005b3415609157fe5b609760e9565b6040518082815260200191505060405180910390f35b60b360ef565b005b60003073ffffffffffffffffffffffffffffffffffffffff163190505b90565b60016000600082825401925050819055505b565b60005481565b5b5600a165627a7a72305820fe93d8cc66557a2a6c8347f481f6d334402a7f90f8b2288668a874c34416a4dc0029", 1000000) self.contract_address = contract_data['address'] block_height = self.node.getblockcount() self.node.generate(1) sync_blocks(self.nodes) for i in range(2): assert(self.nodes[i].getblockcount() == block_height+1) assert(len(self.nodes[i].listcontracts()) == 1+NUM_DEFAULT_DGP_CONTRACTS) # Sends a tx containing 2 op_call outputs calling inc() def many_calls_in_same_tx_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=2, input_value=2*1000000*FGC_MIN_GAS_PRICE) # Sends a normal raw op_call tx with a single output. def normal_op_call_output_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", b"\xff\x7f", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=0x7fff*FGC_MIN_GAS_PRICE) # Sends a tx containing 1 op_call output where txfee == gas_price*gas_limit. def gas_equal_to_tx_fee_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=1000000*FGC_MIN_GAS_PRICE) # Sends a tx containing 1 op_call output where txfee < gas_price*gas_limit. def gas_exceeding_tx_fee_100001_1_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", CScriptNum(10000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000001*FGC_MIN_GAS_PRICE-1, should_throw=True) # Sends a tx containing 1 op_call output where txfee < gas_price*gas_limit. def gas_exceeding_tx_fee_100001_2_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000000*FGC_MIN_GAS_PRICE, should_throw=True) # Sends a tx containing 2 op_call outputs that has a combined gas_price*gas_limit exceeding the tx fee. # This tx should be rejected since executing such a tx would be unable to pay for its potential execution costs in the same way as a tx with one output where txfee < gas_price*gas_limit. def two_calls_in_same_tx_exceeding_tx_fee_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=2000000*FGC_MIN_GAS_PRICE-1, should_throw=True) # sends a tx containing 1 op_call output with a (if interpreted with a signed integer) negative gas limit calling inc() def gas_limit_signedness_test(self): outputs = [] gas_limit = b"\xff" while len(gas_limit) < 20: outputs.append(make_op_call_output(0, b"\x04", gas_limit, CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=min(max(int(bytes_to_hex_str(gas_limit), 16)*FGC_MIN_GAS_PRICE, 10000000), 1000000000)) gas_limit += b"\xff" # sends a tx containing 1 op_call output with a (if interpreted with a signed integer) negative gas limit calling inc() def gas_limit_signedness_one_valid_test(self): outputs = [] gas_limit = b"\xff" outputs.append(make_op_call_output(0, b"\x04", b"\xff\xff\x00", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b"\x04", b"\xff\xff", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=2*0xffff*FGC_MIN_GAS_PRICE) # sends a tx containing 1 op_call output with a (if interpreted with a signed integer) negative gas price calling inc() def gas_price_signedness_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", b"\x01\x00", b"\xff\xff", bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=10000000) # sends a tx containing 1 op_call output with a possible negative gas limit and price calling inc() def gas_limit_and_price_signedness_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", b"\xff\xff", b"\xff", bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=0xff*0xffff) # Sends 100 valid op_call txs def send_100_txs_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_multiple_op_call_txs_with_counter_check(100, outputs, 100) def send_tx_with_value_test(self): outputs = [] # d0e30db0 deposit() outputs.append(make_op_call_output(100000000, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("d0e30db0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=0, input_value=100000000+1000000*FGC_MIN_GAS_PRICE) # 12065fe0 getBalance() balance = int(self.node.callcontract(self.contract_address, "12065fe0")['executionResult']['output'], 16) assert(balance == 100000000) def run_test(self): self.node = self.nodes[0] connect_nodes(self.nodes[0], 1) self.nodes[0].generate(200+COINBASE_MATURITY) self.node.sendmany("", {self.node.getnewaddress(): 1000000*FGC_MIN_GAS_PRICE / Decimal('100000000') for i in range(200)}) print("Creating contract") self.create_contract_test() print("Calling inc() in two outputs") self.many_calls_in_same_tx_test() print("Calling inc() in one output") self.normal_op_call_output_test() print("Calling inc() in one output with txfee equal to gas_limit*gas_price") self.gas_equal_to_tx_fee_test() print("Calling inc() in one output with txfee < gas_limit*gas_price") self.gas_exceeding_tx_fee_100001_1_test() print("Second test of inc() in one outputs with txfee < gas_limit*gas_price") self.gas_exceeding_tx_fee_100001_2_test() print("Second test of inc() in one output with txfee < gas_limit*gas_price") self.two_calls_in_same_tx_exceeding_tx_fee_test() print("Mining a block with 100 txs each with an output calling inc()") self.send_100_txs_test() print("Checking that the value of txs are correctly updated") self.send_tx_with_value_test() print("Checking gas limit signedness where one tx is valid") self.gas_limit_signedness_one_valid_test() print("Checking gas limit signedness") self.gas_limit_signedness_test() print("Checking gas price signedness") self.gas_price_signedness_test() print("Checking gas limit and gas price signedness") self.gas_limit_and_price_signedness_test() if __name__ == '__main__': OpCallTest().main()
[]
AlbertDeFusco/intake-sklearn
intake_sklearn/source.py
6cd0e11b26703712eb338032518e5c55b725c48f
from intake.source.base import DataSource, Schema import joblib import fsspec import sklearn import re from . import __version__ class SklearnModelSource(DataSource): container = 'python' name = 'sklearn' version = __version__ partition_access = False def __init__(self, urlpath, storage_options=None, metadata=None): """ Parameters ---------- urlpath: str, location of model pkl file Either the absolute or relative path to the file or URL to be opened. Some examples: - ``{{ CATALOG_DIR }}/models/model.pkl`` - ``s3://some-bucket/models/model.pkl`` """ self._urlpath = urlpath self._storage_options = storage_options or {} super().__init__(metadata=metadata) def _load(self): with fsspec.open(self._urlpath, mode='rb', **self._storage_options) as f: return f.read() def _get_schema(self): as_binary = self._load() s = re.search(b'_sklearn_versionq(.*\x00)((\d+\.)?(\d+\.)?(\*|\d+))q', as_binary) if s: sklearn_version = s.group(2).decode() else: sklearn_version = None self._schema = Schema( npartitions=1, extra_metadata={ 'sklearn_version':sklearn_version } ) return self._schema def read(self): self._load_metadata() if not self.metadata['sklearn_version'] == sklearn.__version__: msg = ('The model was created with Scikit-Learn version {} ' 'but version {} has been installed in your current environment.' ).format(self.metadata['sklearn_version'], sklearn.__version__) raise RuntimeError(msg) with fsspec.open(self._urlpath, **self._storage_options) as f: return joblib.load(f)
[((994, 1081), 're.search', 're.search', (["b'_sklearn_versionq(.*\\x00)((\\\\d+\\\\.)?(\\\\d+\\\\.)?(\\\\*|\\\\d+))q'", 'as_binary'], {}), "(b'_sklearn_versionq(.*\\x00)((\\\\d+\\\\.)?(\\\\d+\\\\.)?(\\\\*|\\\\d+))q',\n as_binary)\n", (1003, 1081), False, 'import re\n'), ((1209, 1283), 'intake.source.base.Schema', 'Schema', ([], {'npartitions': '(1)', 'extra_metadata': "{'sklearn_version': sklearn_version}"}), "(npartitions=1, extra_metadata={'sklearn_version': sklearn_version})\n", (1215, 1283), False, 'from intake.source.base import DataSource, Schema\n'), ((823, 885), 'fsspec.open', 'fsspec.open', (['self._urlpath'], {'mode': '"""rb"""'}), "(self._urlpath, mode='rb', **self._storage_options)\n", (834, 885), False, 'import fsspec\n'), ((1790, 1841), 'fsspec.open', 'fsspec.open', (['self._urlpath'], {}), '(self._urlpath, **self._storage_options)\n', (1801, 1841), False, 'import fsspec\n'), ((1867, 1881), 'joblib.load', 'joblib.load', (['f'], {}), '(f)\n', (1878, 1881), False, 'import joblib\n')]
hatamov/jedi
jedi/evaluate/dynamic.py
10df0f933f931a8e0e70304d823f6df0dc3000bd
""" One of the really important features of |jedi| is to have an option to understand code like this:: def foo(bar): bar. # completion here foo(1) There's no doubt wheter bar is an ``int`` or not, but if there's also a call like ``foo('str')``, what would happen? Well, we'll just show both. Because that's what a human would expect. It works as follows: - |Jedi| sees a param - search for function calls named ``foo`` - execute these calls and check the input. This work with a ``ParamListener``. """ from itertools import chain from jedi._compatibility import unicode from jedi.parser import tree as pr from jedi import settings from jedi import debug from jedi.evaluate.cache import memoize_default from jedi.evaluate import imports class ParamListener(object): """ This listener is used to get the params for a function. """ def __init__(self): self.param_possibilities = [] def execute(self, params): self.param_possibilities += params @debug.increase_indent def search_params(evaluator, param): """ A dynamic search for param values. If you try to complete a type: >>> def func(foo): ... foo >>> func(1) >>> func("") It is not known what the type ``foo`` without analysing the whole code. You have to look for all calls to ``func`` to find out what ``foo`` possibly is. """ if not settings.dynamic_params: return [] debug.dbg('Dynamic param search for %s', param) func = param.get_parent_until(pr.Function) # Compare the param names. names = [n for n in search_function_call(evaluator, func) if n.value == param.name.value] # Evaluate the ExecutedParams to types. result = list(chain.from_iterable(n.parent.eval(evaluator) for n in names)) debug.dbg('Dynamic param result %s', result) return result @memoize_default([], evaluator_is_first_arg=True) def search_function_call(evaluator, func): """ Returns a list of param names. """ from jedi.evaluate import representation as er def get_params_for_module(module): """ Returns the values of a param, or an empty array. """ @memoize_default([], evaluator_is_first_arg=True) def get_posibilities(evaluator, module, func_name): try: names = module.used_names[func_name] except KeyError: return [] for name in names: parent = name.parent if pr.is_node(parent, 'trailer'): parent = parent.parent trailer = None if pr.is_node(parent, 'power'): for t in parent.children[1:]: if t == '**': break if t.start_pos > name.start_pos and t.children[0] == '(': trailer = t break if trailer is not None: types = evaluator.goto_definition(name) # We have to remove decorators, because they are not the # "original" functions, this way we can easily compare. # At the same time we also have to remove InstanceElements. undec = [] for escope in types: if escope.isinstance(er.Function, er.Instance) \ and escope.decorates is not None: undec.append(escope.decorates) elif isinstance(escope, er.InstanceElement): undec.append(escope.var) else: undec.append(escope) if er.wrap(evaluator, compare) in undec: # Only if we have the correct function we execute # it, otherwise just ignore it. evaluator.eval_trailer(types, trailer) return listener.param_possibilities return get_posibilities(evaluator, module, func_name) current_module = func.get_parent_until() func_name = unicode(func.name) compare = func if func_name == '__init__': cls = func.get_parent_scope() if isinstance(cls, pr.Class): func_name = unicode(cls.name) compare = cls # add the listener listener = ParamListener() func.listeners.add(listener) try: result = [] # This is like backtracking: Get the first possible result. for mod in imports.get_modules_containing_name(evaluator, [current_module], func_name): result = get_params_for_module(mod) if result: break finally: # cleanup: remove the listener; important: should not stick. func.listeners.remove(listener) return result
[((1874, 1922), 'jedi.evaluate.cache.memoize_default', 'memoize_default', (['[]'], {'evaluator_is_first_arg': '(True)'}), '([], evaluator_is_first_arg=True)\n', (1889, 1922), False, 'from jedi.evaluate.cache import memoize_default\n'), ((1446, 1493), 'jedi.debug.dbg', 'debug.dbg', (['"""Dynamic param search for %s"""', 'param'], {}), "('Dynamic param search for %s', param)\n", (1455, 1493), False, 'from jedi import debug\n'), ((1808, 1852), 'jedi.debug.dbg', 'debug.dbg', (['"""Dynamic param result %s"""', 'result'], {}), "('Dynamic param result %s', result)\n", (1817, 1852), False, 'from jedi import debug\n'), ((4185, 4203), 'jedi._compatibility.unicode', 'unicode', (['func.name'], {}), '(func.name)\n', (4192, 4203), False, 'from jedi._compatibility import unicode\n'), ((2199, 2247), 'jedi.evaluate.cache.memoize_default', 'memoize_default', (['[]'], {'evaluator_is_first_arg': '(True)'}), '([], evaluator_is_first_arg=True)\n', (2214, 2247), False, 'from jedi.evaluate.cache import memoize_default\n'), ((4604, 4679), 'jedi.evaluate.imports.get_modules_containing_name', 'imports.get_modules_containing_name', (['evaluator', '[current_module]', 'func_name'], {}), '(evaluator, [current_module], func_name)\n', (4639, 4679), False, 'from jedi.evaluate import imports\n'), ((4355, 4372), 'jedi._compatibility.unicode', 'unicode', (['cls.name'], {}), '(cls.name)\n', (4362, 4372), False, 'from jedi._compatibility import unicode\n'), ((2521, 2550), 'jedi.parser.tree.is_node', 'pr.is_node', (['parent', '"""trailer"""'], {}), "(parent, 'trailer')\n", (2531, 2550), True, 'from jedi.parser import tree as pr\n'), ((2646, 2673), 'jedi.parser.tree.is_node', 'pr.is_node', (['parent', '"""power"""'], {}), "(parent, 'power')\n", (2656, 2673), True, 'from jedi.parser import tree as pr\n'), ((3782, 3809), 'jedi.evaluate.representation.wrap', 'er.wrap', (['evaluator', 'compare'], {}), '(evaluator, compare)\n', (3789, 3809), True, 'from jedi.evaluate import representation as er\n')]
moird/linux-game-report
steamcheck/views.py
8c3204d857134b0685bc3c213cd9d9e9f9a5f2fd
from steamcheck import app from flask import jsonify, render_template import os import steamapi import json @app.route('/') def index(): return render_template("index.html") @app.route('/report/<name>') def report(name=None): """ This will generate the report based on the users Steam ID. Returns JSON :param name: Steam ID (either numerical ID or vanity url: steamcommunity.com/id/moird :return: Json object that contains listing of all linux games and general information about them: { "steamuser": "real steam name", "image": "steam user image url", "games": [{'gametitle', {"linux":true}}] "error": "" } """ process_report = {} try: # See if we are running on heroku or not. Could probably set an environment variable for this as well. if os.path.exists('/app/assets/GAMES.json'): linux_game_list = '/app/assets/GAMES.json' winehq_list = '/app/assets/winehq.json' else: linux_game_list = './assets/GAMES.json' winehq_list = './assets/winehq.json' with open(linux_game_list) as linux_game_list_raw: linux_games = json.load(linux_game_list_raw) with open(winehq_list) as winehq_raw: winehq_apps = json.load(winehq_raw) steam_connection = steamapi.core.APIConnection(api_key=os.environ['steam_api_key']) try: user = steamapi.user.SteamUser(userid=int(name)) except ValueError: # When we get further this as a fallback will be taken out, really don't want to do this. user = steamapi.user.SteamUser(userurl=name) process_report['steamuser'] = user.name process_report['image'] = user.avatar process_report['games'] = {} for game in user.games: linux = False winehq = False if str(game.id) in linux_games: linux = True if game.name in winehq_apps: winehq = winehq_apps[game.name] process_report['games'][game.id] = {"name": game.name, "linux": linux, "winehq":winehq} except Exception as e: process_report['error'] = e return jsonify(**process_report)
[((111, 125), 'steamcheck.app.route', 'app.route', (['"""/"""'], {}), "('/')\n", (120, 125), False, 'from steamcheck import app\n'), ((183, 210), 'steamcheck.app.route', 'app.route', (['"""/report/<name>"""'], {}), "('/report/<name>')\n", (192, 210), False, 'from steamcheck import app\n'), ((150, 179), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (165, 179), False, 'from flask import jsonify, render_template\n'), ((2220, 2245), 'flask.jsonify', 'jsonify', ([], {}), '(**process_report)\n', (2227, 2245), False, 'from flask import jsonify, render_template\n'), ((837, 877), 'os.path.exists', 'os.path.exists', (['"""/app/assets/GAMES.json"""'], {}), "('/app/assets/GAMES.json')\n", (851, 877), False, 'import os\n'), ((1341, 1405), 'steamapi.core.APIConnection', 'steamapi.core.APIConnection', ([], {'api_key': "os.environ['steam_api_key']"}), "(api_key=os.environ['steam_api_key'])\n", (1368, 1405), False, 'import steamapi\n'), ((1187, 1217), 'json.load', 'json.load', (['linux_game_list_raw'], {}), '(linux_game_list_raw)\n', (1196, 1217), False, 'import json\n'), ((1291, 1312), 'json.load', 'json.load', (['winehq_raw'], {}), '(winehq_raw)\n', (1300, 1312), False, 'import json\n'), ((1629, 1666), 'steamapi.user.SteamUser', 'steamapi.user.SteamUser', ([], {'userurl': 'name'}), '(userurl=name)\n', (1652, 1666), False, 'import steamapi\n')]
benjaminysmith/covidcast-indicators
validator/delphi_validator/run.py
b1474cd68a1497166fefe4beffd4d5ff867b9a61
# -*- coding: utf-8 -*- """Functions to call when running the tool. This module should contain a function called `run_module`, that is executed when the module is run with `python -m delphi_validator`. """ from delphi_utils import read_params from .validate import Validator def run_module(): """Run the validator as a module.""" parent_params = read_params() params = parent_params['validation'] validator = Validator(params) validator.validate(parent_params["export_dir"]).print_and_exit()
[((357, 370), 'delphi_utils.read_params', 'read_params', ([], {}), '()\n', (368, 370), False, 'from delphi_utils import read_params\n')]
zenithfang/supervised_dispnet
datasets/validation_folders.py
f81dfccfdc944e015d8fae17e24b3e664bec14d6
import torch.utils.data as data import numpy as np from imageio import imread from path import Path import pdb def crawl_folders(folders_list): imgs = [] depth = [] for folder in folders_list: current_imgs = sorted(folder.files('*.jpg')) current_depth = [] for img in current_imgs: d = img.dirname()/(img.name[:-4] + '.npy') assert(d.isfile()), "depth file {} not found".format(str(d)) depth.append(d) imgs.extend(current_imgs) depth.extend(current_depth) return imgs, depth def load_as_float(path): return imread(path).astype(np.float32) class ValidationSet(data.Dataset): """A sequence data loader where the files are arranged in this way: root/scene_1/0000000.jpg root/scene_1/0000000.npy root/scene_1/0000001.jpg root/scene_1/0000001.npy .. root/scene_2/0000000.jpg root/scene_2/0000000.npy . transform functions must take in a list a images and a numpy array which can be None """ def __init__(self, root, transform=None): self.root = Path(root) scene_list_path = self.root/'val.txt' self.scenes = [self.root/folder[:-1] for folder in open(scene_list_path)] self.imgs, self.depth = crawl_folders(self.scenes) self.transform = transform def __getitem__(self, index): img = load_as_float(self.imgs[index]) depth = np.load(self.depth[index]).astype(np.float32) #;pdb.set_trace() if self.transform is not None: img, _, _ = self.transform([img], depth, None); #this depth is just used to fill the compose transform that is shared(no need for the result) img = img[0] return img, depth def __len__(self): return len(self.imgs)
[((1183, 1193), 'path.Path', 'Path', (['root'], {}), '(root)\n', (1187, 1193), False, 'from path import Path\n'), ((654, 666), 'imageio.imread', 'imread', (['path'], {}), '(path)\n', (660, 666), False, 'from imageio import imread\n'), ((1513, 1539), 'numpy.load', 'np.load', (['self.depth[index]'], {}), '(self.depth[index])\n', (1520, 1539), True, 'import numpy as np\n')]
tigertv/crypt
secretpy/ciphers/rot18.py
e464f998e5540f52e269fe360ec9d3a08e976b2e
#!/usr/bin/python from .rot13 import Rot13 import secretpy.alphabets as al class Rot18: """ The Rot18 Cipher """ __rot13 = Rot13() def __init__(self): alphabet = al.ENGLISH half = len(alphabet) >> 1 self.__alphabet = alphabet[:half] + al.DECIMAL[:5] + alphabet[half:] + al.DECIMAL[5:] def __crypt(self, text, alphabet): return self.__rot13.encrypt(text, alphabet=self.__alphabet) def encrypt(self, text, key=None, alphabet=None): """ Encryption method :param text: Text to encrypt :param key: is not used :param alphabet: is not used :type text: string :type key: integer :type alphabet: string :return: text :rtype: string """ return self.__crypt(text, self.__alphabet) def decrypt(self, text, key=None, alphabet=None): """ Decryption method :param text: Text to decrypt :param key: is not used :param alphabet: is not used :type text: string :type key: integer :type alphabet: string :return: text :rtype: string """ return self.__crypt(text, self.__alphabet) def get_fixed_alphabet(self): return self.__alphabet
[]
notoraptor/pysaurus
pysaurus/database/special_properties.py
3bf5fe8c15e0e0e580e5edaea05b4a1298641367
from abc import abstractmethod from pysaurus.database.properties import PropType from pysaurus.database.video import Video class SpecialPropType(PropType): __slots__ = () @abstractmethod def get(self, video: Video): raise NotImplementedError() class PropError(SpecialPropType): __slots__ = () def __init__(self): super().__init__("<error>", "", True) def get(self, video: Video): return sorted(set(video.errors) | set(video.properties.get(self.name, ()))) class SpecialProperties: properties = [PropError()] @classmethod def install(cls, database): to_save = False for expected in cls.properties: if ( not database.has_prop_type(expected.name) or database.get_prop_type(expected.name) != expected ): database.remove_prop_type(expected.name) database.add_prop_type(expected) to_save = True if to_save: database.save() @classmethod def all_in(cls, video: Video): return all(prop.name in video.properties for prop in cls.properties) @classmethod def set(cls, video: Video): for prop in cls.properties: video.properties[prop.name] = prop.get(video)
[]
openstack/patrole
patrole_tempest_plugin/rbac_utils.py
fa0ee135121a5e86301ad5ee1854b3a0bd70b69b
# Copyright 2017 AT&T Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import contextlib import sys import time from oslo_log import log as logging from oslo_utils import excutils from tempest import config from tempest.lib import exceptions as lib_exc from patrole_tempest_plugin import rbac_exceptions CONF = config.CONF LOG = logging.getLogger(__name__) class _ValidateListContext(object): """Context class responsible for validation of the list functions. This class is used in ``override_role_and_validate_list`` function and the result of a list function must be assigned to the ``ctx.resources`` variable. Example:: with self.override_role_and_validate_list(...) as ctx: ctx.resources = list_function() """ def __init__(self, admin_resources=None, admin_resource_id=None): """Constructor for ``ValidateListContext``. Either ``admin_resources`` or ``admin_resource_id`` should be used, not both. :param list admin_resources: The list of resources received before calling the ``override_role_and_validate_list`` function. To validate will be used the ``_validate_len`` function. :param UUID admin_resource_id: An ID of a resource created before calling the ``override_role_and_validate_list`` function. To validate will be used the ``_validate_resource`` function. :raises RbacValidateListException: if both ``admin_resources`` and ``admin_resource_id`` are set or unset. """ self.resources = None if admin_resources is not None and not admin_resource_id: self._admin_len = len(admin_resources) if not self._admin_len: raise rbac_exceptions.RbacValidateListException( reason="the list of admin resources cannot be empty") self._validate_func = self._validate_len elif admin_resource_id and admin_resources is None: self._admin_resource_id = admin_resource_id self._validate_func = self._validate_resource else: raise rbac_exceptions.RbacValidateListException( reason="admin_resources and admin_resource_id are mutually " "exclusive") def _validate_len(self): """Validates that the number of resources is less than admin resources. """ if not len(self.resources): raise rbac_exceptions.RbacEmptyResponseBody() elif self._admin_len > len(self.resources): raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate_resource(self): """Validates that the admin resource is present in the resources. """ for resource in self.resources: if resource['id'] == self._admin_resource_id: return raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate(self): """Calls the proper validation function. :raises RbacValidateListException: if the ``ctx.resources`` variable is not assigned. """ if self.resources is None: raise rbac_exceptions.RbacValidateListException( reason="ctx.resources is not assigned") self._validate_func() class RbacUtilsMixin(object): """Utility mixin responsible for switching ``os_primary`` role. Should be used as a mixin class alongside an instance of :py:class:`tempest.test.BaseTestCase` to perform Patrole class setup for a base RBAC class. Child classes should not use this mixin. Example:: class BaseRbacTest(rbac_utils.RbacUtilsMixin, base.BaseV2ComputeTest): @classmethod def setup_clients(cls): super(BaseRbacTest, cls).setup_clients() cls.hosts_client = cls.os_primary.hosts_client ... This class is responsible for overriding the value of the primary Tempest credential's role (i.e. ``os_primary`` role). By doing so, it is possible to seamlessly swap between admin credentials, needed for setup and clean up, and primary credentials, needed to perform the API call which does policy enforcement. The primary credentials always cycle between roles defined by ``CONF.identity.admin_role`` and ``CONF.patrole.rbac_test_roles``. """ credentials = ['primary', 'admin'] def __init__(self, *args, **kwargs): super(RbacUtilsMixin, self).__init__(*args, **kwargs) # Shows if override_role was called. self.__override_role_called = False # Shows if exception raised during override_role. self.__override_role_caught_exc = False _admin_role_id = None _rbac_role_ids = None _project_id = None _user_id = None _role_map = None _role_inferences_mapping = None _orig_roles = [] admin_roles_client = None @classmethod def restore_roles(cls): if cls._orig_roles: LOG.info("Restoring original roles %s", cls._orig_roles) roles_already_present = cls._list_and_clear_user_roles_on_project( cls._orig_roles) if not roles_already_present: cls._create_user_role_on_project(cls._orig_roles) @classmethod def setup_clients(cls): if CONF.identity_feature_enabled.api_v3: admin_roles_client = cls.os_admin.roles_v3_client else: raise lib_exc.InvalidConfiguration( "Patrole role overriding only supports v3 identity API.") cls.admin_roles_client = admin_roles_client cls._project_id = cls.os_primary.credentials.tenant_id cls._user_id = cls.os_primary.credentials.user_id cls._role_inferences_mapping = cls._prepare_role_inferences_mapping() cls._init_roles() # Store the user's original roles and rollback after testing. roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] cls._orig_roles = [role['id'] for role in roles] cls.addClassResourceCleanup(cls.restore_roles) # Change default role to admin cls._override_role(False) super(RbacUtilsMixin, cls).setup_clients() @classmethod def _prepare_role_inferences_mapping(cls): """Preparing roles mapping to support role inferences Making query to `list-all-role-inference-rules`_ keystone API returns all inference rules, which makes it possible to prepare roles mapping. It walks recursively through the raw data:: {"role_inferences": [ { "implies": [{"id": "3", "name": "reader"}], "prior_role": {"id": "2", "name": "member"} }, { "implies": [{"id": "2", "name": "member"}], "prior_role": {"id": "1", "name": "admin"} } ] } and converts it to the mapping:: { "2": ["3"], # "member": ["reader"], "1": ["2", "3"] # "admin": ["member", "reader"] } .. _list-all-role-inference-rules: https://docs.openstack.org/api-ref/identity/v3/#list-all-role-inference-rules """ # noqa: E501 def process_roles(role_id, data): roles = data.get(role_id, set()) for rid in roles.copy(): roles.update(process_roles(rid, data)) return roles def convert_data(data): res = {} for rule in data: prior_role = rule['prior_role']['id'] implies = {r['id'] for r in rule['implies']} res[prior_role] = implies return res raw_data = cls.admin_roles_client.list_all_role_inference_rules() data = convert_data(raw_data['role_inferences']) res = {} for role_id in data: res[role_id] = process_roles(role_id, data) return res def get_all_needed_roles(self, roles): """Extending given roles with roles from mapping Examples:: ["admin"] >> ["admin", "member", "reader"] ["member"] >> ["member", "reader"] ["reader"] >> ["reader"] ["custom_role"] >> ["custom_role"] :param roles: list of roles :return: extended list of roles """ res = set(r for r in roles) for role in res.copy(): role_id = self.__class__._role_map.get(role) implied_roles = self.__class__._role_inferences_mapping.get( role_id, set()) role_names = {self.__class__._role_map[rid] for rid in implied_roles} res.update(role_names) LOG.debug('All needed roles: %s; Base roles: %s', res, roles) return list(res) @contextlib.contextmanager def override_role(self): """Override the role used by ``os_primary`` Tempest credentials. Temporarily change the role used by ``os_primary`` credentials to: * ``[patrole] rbac_test_roles`` before test execution * ``[identity] admin_role`` after test execution Automatically switches to admin role after test execution. :returns: None .. warning:: This function can alter user roles for pre-provisioned credentials. Work is underway to safely clean up after this function. Example:: @rbac_rule_validation.action(service='test', rules=['a:test:rule']) def test_foo(self): # Allocate test-level resources here. with self.override_role(): # The role for `os_primary` has now been overridden. Within # this block, call the API endpoint that enforces the # expected policy specified by "rule" in the decorator. self.foo_service.bar_api_call() # The role is switched back to admin automatically. Note that # if the API call above threw an exception, any code below this # point in the test is not executed. """ self._set_override_role_called() self._override_role(True) try: # Execute the test. yield finally: # Check whether an exception was raised. If so, remember that # for future validation. exc = sys.exc_info()[0] if exc is not None: self._set_override_role_caught_exc() # This code block is always executed, no matter the result of the # test. Automatically switch back to the admin role for test clean # up. self._override_role(False) @classmethod def _override_role(cls, toggle_rbac_role=False): """Private helper for overriding ``os_primary`` Tempest credentials. :param toggle_rbac_role: Boolean value that controls the role that overrides default role of ``os_primary`` credentials. * If True: role is set to ``[patrole] rbac_test_role`` * If False: role is set to ``[identity] admin_role`` """ LOG.debug('Overriding role to: %s.', toggle_rbac_role) roles_already_present = False try: target_roles = (cls._rbac_role_ids if toggle_rbac_role else [cls._admin_role_id]) roles_already_present = cls._list_and_clear_user_roles_on_project( target_roles) # Do not override roles if `target_role` already exists. if not roles_already_present: cls._create_user_role_on_project(target_roles) except Exception as exp: with excutils.save_and_reraise_exception(): LOG.exception(exp) finally: auth_providers = cls.get_auth_providers() for provider in auth_providers: provider.clear_auth() # Fernet tokens are not subsecond aware so sleep to ensure we are # passing the second boundary before attempting to authenticate. # Only sleep if a token revocation occurred as a result of role # overriding. This will optimize test runtime in the case where # ``[identity] admin_role`` == ``[patrole] rbac_test_roles``. if not roles_already_present: time.sleep(1) for provider in auth_providers: provider.set_auth() @classmethod def _init_roles(cls): available_roles = cls.admin_roles_client.list_roles()['roles'] cls._role_map = {r['name']: r['id'] for r in available_roles} LOG.debug('Available roles: %s', cls._role_map.keys()) rbac_role_ids = [] roles = CONF.patrole.rbac_test_roles # TODO(vegasq) drop once CONF.patrole.rbac_test_role is removed if CONF.patrole.rbac_test_role: if not roles: roles.append(CONF.patrole.rbac_test_role) for role_name in roles: rbac_role_ids.append(cls._role_map.get(role_name)) admin_role_id = cls._role_map.get(CONF.identity.admin_role) if not all([admin_role_id, all(rbac_role_ids)]): missing_roles = [] msg = ("Could not find `[patrole] rbac_test_roles` or " "`[identity] admin_role`, both of which are required for " "RBAC testing.") if not admin_role_id: missing_roles.append(CONF.identity.admin_role) if not all(rbac_role_ids): missing_roles += [role_name for role_name in roles if role_name not in cls._role_map] msg += " Following roles were not found: %s." % ( ", ".join(missing_roles)) msg += " Available roles: %s." % ", ".join(cls._role_map) raise rbac_exceptions.RbacResourceSetupFailed(msg) cls._admin_role_id = admin_role_id cls._rbac_role_ids = rbac_role_ids # Adding backward mapping cls._role_map.update({v: k for k, v in cls._role_map.items()}) @classmethod def _create_user_role_on_project(cls, role_ids): for role_id in role_ids: cls.admin_roles_client.create_user_role_on_project( cls._project_id, cls._user_id, role_id) @classmethod def _list_and_clear_user_roles_on_project(cls, role_ids): roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] all_role_ids = [role['id'] for role in roles] # NOTE(felipemonteiro): We do not use ``role_id in all_role_ids`` here # to avoid over-permission errors: if the current list of roles on the # project includes "admin" and "Member", and we are switching to the # "Member" role, then we must delete the "admin" role. Thus, we only # return early if the user's roles on the project are an exact match. if set(role_ids) == set(all_role_ids): return True for role in roles: cls.admin_roles_client.delete_role_from_user_on_project( cls._project_id, cls._user_id, role['id']) return False @contextlib.contextmanager def override_role_and_validate_list(self, admin_resources=None, admin_resource_id=None): """Call ``override_role`` and validate RBAC for a list API action. List actions usually do soft authorization: partial or empty response bodies are returned instead of exceptions. This helper validates that unauthorized roles only return a subset of the available resources. Should only be used for validating list API actions. :param test_obj: Instance of ``tempest.test.BaseTestCase``. :param list admin_resources: The list of resources received before calling the ``override_role_and_validate_list`` function. :param UUID admin_resource_id: An ID of a resource created before calling the ``override_role_and_validate_list`` function. :return: py:class:`_ValidateListContext` object. Example:: # the resource created by admin admin_resource_id = ( self.ntp_client.create_dscp_marking_rule() ["dscp_marking_rule"]["id']) with self.override_role_and_validate_list( admin_resource_id=admin_resource_id) as ctx: # the list of resources available for member role ctx.resources = self.ntp_client.list_dscp_marking_rules( policy_id=self.policy_id)["dscp_marking_rules"] """ ctx = _ValidateListContext(admin_resources, admin_resource_id) with self.override_role(): yield ctx ctx._validate() @classmethod def get_auth_providers(cls): """Returns list of auth_providers used within test. Tests may redefine this method to include their own or third party client auth_providers. """ return [cls.os_primary.auth_provider] def _set_override_role_called(self): """Helper for tracking whether ``override_role`` was called.""" self.__override_role_called = True def _set_override_role_caught_exc(self): """Helper for tracking whether exception was thrown inside ``override_role``. """ self.__override_role_caught_exc = True def _validate_override_role_called(self): """Idempotently validate that ``override_role`` is called and reset its value to False for sequential tests. """ was_called = self.__override_role_called self.__override_role_called = False return was_called def _validate_override_role_caught_exc(self): """Idempotently validate that exception was caught inside ``override_role``, so that, by process of elimination, it can be determined whether one was thrown outside (which is invalid). """ caught_exception = self.__override_role_caught_exc self.__override_role_caught_exc = False return caught_exception def is_admin(): """Verifies whether the current test role equals the admin role. :returns: True if ``rbac_test_roles`` contain the admin role. """ roles = CONF.patrole.rbac_test_roles # TODO(vegasq) drop once CONF.patrole.rbac_test_role is removed if CONF.patrole.rbac_test_role: roles.append(CONF.patrole.rbac_test_role) roles = list(set(roles)) # TODO(felipemonteiro): Make this more robust via a context is admin # lookup. return CONF.identity.admin_role in roles
[((895, 922), 'oslo_log.log.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (912, 922), True, 'from oslo_log import log as logging\n'), ((3463, 3523), 'patrole_tempest_plugin.rbac_exceptions.RbacPartialResponseBody', 'rbac_exceptions.RbacPartialResponseBody', ([], {'body': 'self.resources'}), '(body=self.resources)\n', (3502, 3523), False, 'from patrole_tempest_plugin import rbac_exceptions\n'), ((3036, 3075), 'patrole_tempest_plugin.rbac_exceptions.RbacEmptyResponseBody', 'rbac_exceptions.RbacEmptyResponseBody', ([], {}), '()\n', (3073, 3075), False, 'from patrole_tempest_plugin import rbac_exceptions\n'), ((3771, 3857), 'patrole_tempest_plugin.rbac_exceptions.RbacValidateListException', 'rbac_exceptions.RbacValidateListException', ([], {'reason': '"""ctx.resources is not assigned"""'}), "(reason=\n 'ctx.resources is not assigned')\n", (3812, 3857), False, 'from patrole_tempest_plugin import rbac_exceptions\n'), ((6076, 6167), 'tempest.lib.exceptions.InvalidConfiguration', 'lib_exc.InvalidConfiguration', (['"""Patrole role overriding only supports v3 identity API."""'], {}), "(\n 'Patrole role overriding only supports v3 identity API.')\n", (6104, 6167), True, 'from tempest.lib import exceptions as lib_exc\n'), ((14696, 14740), 'patrole_tempest_plugin.rbac_exceptions.RbacResourceSetupFailed', 'rbac_exceptions.RbacResourceSetupFailed', (['msg'], {}), '(msg)\n', (14735, 14740), False, 'from patrole_tempest_plugin import rbac_exceptions\n'), ((2328, 2428), 'patrole_tempest_plugin.rbac_exceptions.RbacValidateListException', 'rbac_exceptions.RbacValidateListException', ([], {'reason': '"""the list of admin resources cannot be empty"""'}), "(reason=\n 'the list of admin resources cannot be empty')\n", (2369, 2428), False, 'from patrole_tempest_plugin import rbac_exceptions\n'), ((2704, 2821), 'patrole_tempest_plugin.rbac_exceptions.RbacValidateListException', 'rbac_exceptions.RbacValidateListException', ([], {'reason': '"""admin_resources and admin_resource_id are mutually exclusive"""'}), "(reason=\n 'admin_resources and admin_resource_id are mutually exclusive')\n", (2745, 2821), False, 'from patrole_tempest_plugin import rbac_exceptions\n'), ((3146, 3206), 'patrole_tempest_plugin.rbac_exceptions.RbacPartialResponseBody', 'rbac_exceptions.RbacPartialResponseBody', ([], {'body': 'self.resources'}), '(body=self.resources)\n', (3185, 3206), False, 'from patrole_tempest_plugin import rbac_exceptions\n'), ((11195, 11209), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (11207, 11209), False, 'import sys\n'), ((13183, 13196), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (13193, 13196), False, 'import time\n'), ((12517, 12554), 'oslo_utils.excutils.save_and_reraise_exception', 'excutils.save_and_reraise_exception', ([], {}), '()\n', (12552, 12554), False, 'from oslo_utils import excutils\n')]
kimera1999/pmpktn
core/my_widgets/drug_picker.py
5307b6684a08bac4b88617f097017b5ea4192ab2
from initialize import * from core.db.db_func import query_linedrug_list import os import wx class DrugPopup(wx.ComboPopup): def __init__(self, parent): super().__init__() self.lc = None self.mv = parent.mv self.init_d_l = query_linedrug_list(self.mv.sess).all() self.d_l = [] def Create(self, parent): self.lc = wx.ListCtrl( parent, style=wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.SIMPLE_BORDER) self.lc.AppendColumn('Thuốc', width=200) self.lc.AppendColumn('Thành phần', width=150) self.lc.AppendColumn('Số lượng') self.lc.AppendColumn('Đơn giá') self.lc.AppendColumn('Cách dùng', width=100) self.lc.Bind(wx.EVT_MOTION, self.OnMotion) self.lc.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown) self.lc.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Update() return True def Init(self): self.value = -1 self.curitem = -1 def GetControl(self): return self.lc def SetStringValue(self, val): idx = self.lc.FindItem(-1, val) if idx != wx.NOT_FOUND: self.lc.Select(idx) def GetStringValue(self): if self.value >= 0: return self.lc.GetItemText(self.value, col=0) return "" def GetAdjustedSize(self, minWidth, prefHeight, maxHeight): return super().GetAdjustedSize(*popup_size) def Update(self, s=''): self.lc.DeleteAllItems() self.d_l = list(filter( lambda x: s.casefold() in x.name.casefold() or s.casefold() in x.element.casefold(), self.init_d_l)) for index, item in enumerate(self.d_l): self.lc.Append( [item.name, item.element, item.quantity, item.sale_price, item.usage]) if item.quantity <= user_setting["so_luong_thuoc_toi_thieu_de_bao_dong_do"]: self.lc.SetItemTextColour(index, wx.Colour(252, 3, 57, 255)) def OnMotion(self, e): item, flags = self.lc.HitTest(e.GetPosition()) if item >= 0: self.lc.Select(item) self.curitem = item def OnLeftDown(self, e): try: self.value = self.curitem self.ComboCtrl.drugWH = self.d_l[self.value] self.Dismiss() self.ComboCtrl.SelectAll() self.ComboCtrl.SetInsertionPointEnd() except IndexError: self.Dismiss() def OnPopup(self): self.Init() self.Update(self.ComboCtrl.Value) if self.lc.ItemCount > 0: if self.curitem < (self.lc.ItemCount - 1): self.curitem += 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyDown(self): if self.lc.ItemCount > 0: if self.curitem < (self.lc.ItemCount - 1): self.curitem += 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyUp(self): if self.lc.ItemCount > 0: if self.curitem > 0: self.curitem -= 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) else: self.KeyESC() def KeyESC(self): a = self.ComboCtrl.Value self.Dismiss() self.ComboCtrl.ChangeValue(a) self.ComboCtrl.SetInsertionPointEnd() def KeyReturn(self): self.OnLeftDown(None) def onKeyPress(self, e): c = e.GetKeyCode() if c == wx.WXK_DOWN: self.KeyDown() elif c == wx.WXK_UP: self.KeyUp() elif c == wx.WXK_ESCAPE: self.KeyESC() elif c == wx.WXK_RETURN: self.KeyReturn() class DrugPicker(wx.ComboCtrl): def __init__(self, parent): super().__init__(parent, size=drugctrl_size, style=wx.TE_PROCESS_ENTER) self.mv = parent.mv self.drug_popup = DrugPopup(self) self.SetPopupControl(self.drug_popup) self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Bind(wx.EVT_TEXT, self.onTextChange) self.SetHint("Nhấn Enter để search thuốc") self._drugWH = None self.EnablePopupAnimation(enable=False) @property def drugWH(self): return self._drugWH @drugWH.setter def drugWH(self, dwh): self._drugWH = dwh pg = self.Parent if dwh: pg.usage_unit.Label = dwh.usage_unit + " " pg.sale_unit.Label = dwh.sale_unit + " " else: self.ChangeValue('') pg.dosage_per.ChangeValue('') pg.usage_unit.Label = '{Đơn vị} ' pg.times.ChangeValue("") pg.quantity.ChangeValue("") pg.sale_unit.Label = '{Đơn vị} ' pg.usage.ChangeValue("") def onKeyPress(self, e): if os.name == "posix": if e.GetKeyCode() in [wx.WXK_RETURN, wx.WXK_DOWN]: if not self.IsPopupShown(): self.Popup() else: e.Skip() else: if e.GetKeyCode() not in [wx.WXK_RETURN, wx.WXK_UP, wx.WXK_DOWN, wx.WXK_ESCAPE]: if self.IsPopupShown(): a = self.Value self.Dismiss() self.ChangeValue(a) self.SetInsertionPointEnd() e.Skip() def onTextChange(self, e): if os.name == "nt": if e.String == "": self.Clear() elif len(e.String) >= 1: if not self.IsPopupShown(): self.Popup() self.SetInsertionPointEnd() if os.name == "posix": if e.String == "": self.Clear() def Clear(self): self.drugWH = None def refreshPopup(self): self.drug_popup.init_d_l = query_linedrug_list(self.mv.sess).all()
[((373, 450), 'wx.ListCtrl', 'wx.ListCtrl', (['parent'], {'style': '(wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.SIMPLE_BORDER)'}), '(parent, style=wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.SIMPLE_BORDER)\n', (384, 450), False, 'import wx\n'), ((262, 295), 'core.db.db_func.query_linedrug_list', 'query_linedrug_list', (['self.mv.sess'], {}), '(self.mv.sess)\n', (281, 295), False, 'from core.db.db_func import query_linedrug_list\n'), ((5989, 6022), 'core.db.db_func.query_linedrug_list', 'query_linedrug_list', (['self.mv.sess'], {}), '(self.mv.sess)\n', (6008, 6022), False, 'from core.db.db_func import query_linedrug_list\n'), ((1950, 1976), 'wx.Colour', 'wx.Colour', (['(252)', '(3)', '(57)', '(255)'], {}), '(252, 3, 57, 255)\n', (1959, 1976), False, 'import wx\n')]
GuilhermeEsdras/Grafos
em Python/Roteiro4/Roteiro4__grafos.py
b6556c3d679496d576f65b798a1a584cd73e40f4
from Roteiro4.Roteiro4__funcoes import Grafo class Grafos: # Grafo da Paraíba paraiba = Grafo(['J', 'C', 'E', 'P', 'M', 'T', 'Z']) for aresta in ['J-C', 'C-E', 'C-E', 'C-P', 'C-P', 'C-M', 'C-T', 'M-T', 'T-Z']: paraiba.adicionaAresta(aresta) # --- # # Grafo Completo grafo_completo = Grafo(['J', 'C', 'E', 'P']) for aresta in ['J-C', 'J-P', 'J-E', 'C-E', 'C-P', 'P-E']: grafo_completo.adicionaAresta(aresta) # --- # # K3 k3 = Grafo(['A', 'B', 'C']) for aresta in ['A-B', 'B-C', 'C-A']: k3.adicionaAresta(aresta) # --- #
[((103, 145), 'Roteiro4.Roteiro4__funcoes.Grafo', 'Grafo', (["['J', 'C', 'E', 'P', 'M', 'T', 'Z']"], {}), "(['J', 'C', 'E', 'P', 'M', 'T', 'Z'])\n", (108, 145), False, 'from Roteiro4.Roteiro4__funcoes import Grafo\n'), ((332, 359), 'Roteiro4.Roteiro4__funcoes.Grafo', 'Grafo', (["['J', 'C', 'E', 'P']"], {}), "(['J', 'C', 'E', 'P'])\n", (337, 359), False, 'from Roteiro4.Roteiro4__funcoes import Grafo\n'), ((504, 526), 'Roteiro4.Roteiro4__funcoes.Grafo', 'Grafo', (["['A', 'B', 'C']"], {}), "(['A', 'B', 'C'])\n", (509, 526), False, 'from Roteiro4.Roteiro4__funcoes import Grafo\n')]
youtaya/knight
fuzzybee/joboard/views.py
6899e18ca6b1ef01daaae7d7fd14b50a26aa0aee
# -*- coding: utf-8 -*- from django.shortcuts import get_object_or_404, render_to_response, render from django.http import HttpResponseRedirect, HttpResponse from django.core.urlresolvers import reverse from django.shortcuts import redirect from joboard.models import Factory from joboard.forms import FactoryForm from django.template import RequestContext from django.core.exceptions import ObjectDoesNotExist from urllib import urlopen, urlencode import urllib2 from fuzzybee.conf import b_url, b_ak, geo_table, l_url, app_id, app_key from utils.pack_json import toJSON, fromJSON from django.contrib.auth.decorators import login_required from people.models import People import logging logger = logging.getLogger(__name__) @login_required def index(request): form = None if request.method == 'POST': form = FactoryForm(request.POST) print form if form.is_valid(): factory = form.cleaned_data logger.debug("lat: " + str(factory['fact_lat'])) logger.debug("addr: " + factory['fact_addr']) #save factory in model factmodel = form.save(commit=False) print request.user factmodel.fact_maintainer = People.objects.get(user=request.user) factmodel.save() factid = factmodel.id #save in public server: leancloud and baidu save_factory_cloud(factory, factid) return HttpResponseRedirect(reverse('board:detail', args=(factid,))) else: form = FactoryForm() return render_to_response('board/new.html', {'form': form}, context_instance=RequestContext(request)) @login_required def detail(request, fact_id): print fact_id info = get_object_or_404(Factory, pk=fact_id) return render(request, 'board/detail.html', {'info':info}) @login_required def manager(request): print "manager..." try: people = People.objects.get(user=request.user) factory = Factory.objects.get(fact_maintainer=people) except ObjectDoesNotExist: print 'no hire action...' return redirect(reverse('joboard.views.index', args=[])) return render(request, 'board/manager.html', {'info':factory}) def save_factory_cloud(fact_info, fact_id): title = fact_info['fact_name'] address = fact_info['fact_addr'] lat = fact_info['fact_lat'] lng = fact_info['fact_lng'] num = fact_info['hire_num'] data = { 'title': title.encode("utf-8"), 'address': address.encode("utf-8"), 'latitude': lat, 'longitude': lng, 'job_num': num, 'factory_id': fact_id, } head = { 'X-AVOSCloud-Application-Id': app_id, 'X-AVOSCloud-Application-Key': app_key, 'Content-Type': 'application/json', } req = urllib2.Request(l_url, toJSON(data), head) print str(req) response = urllib2.urlopen(req) #print respone.read() lean_response = fromJSON(response.read()) print lean_response lean_objectId = lean_response['objectId'] # save in Baidu Map params = urlencode({ 'title': title.encode("utf-8"), 'address': address.encode("utf-8"), 'latitude': lat, 'longitude': lng, 'coord_type': 3, 'geotable_id': geo_table, 'ak': b_ak, 'job_num': num, 'lean_id': lean_objectId, }) req = urllib2.Request(b_url, params) #print str(req) response = urllib2.urlopen(req) #print respone.read()
[]
yatsu/react-flask-graphql-example
flask/app.py
18a38b7602c81a85a3cc38c74440ce34d63fc32a
from flask import Flask from flask_cors import CORS from flask_graphql import GraphQLView from schema import Schema def create_app(**kwargs): app = Flask(__name__) app.debug = True app.add_url_rule( '/graphql', view_func=GraphQLView.as_view('graphql', schema=Schema, **kwargs) ) return app if __name__ == '__main__': app = create_app(graphiql=True) CORS(app, resources={r'/graphql': {'origins': '*'}}) app.run()
[((154, 169), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (159, 169), False, 'from flask import Flask\n'), ((397, 448), 'flask_cors.CORS', 'CORS', (['app'], {'resources': "{'/graphql': {'origins': '*'}}"}), "(app, resources={'/graphql': {'origins': '*'}})\n", (401, 448), False, 'from flask_cors import CORS\n'), ((251, 306), 'flask_graphql.GraphQLView.as_view', 'GraphQLView.as_view', (['"""graphql"""'], {'schema': 'Schema'}), "('graphql', schema=Schema, **kwargs)\n", (270, 306), False, 'from flask_graphql import GraphQLView\n')]
AndrewChap/mortgagetvm
mortgagetvm/mortgageOptions.py
4ec39707d61bcb3224bdcddce84bf237f02352d1
# Factory-like class for mortgage options class MortgageOptions: def __init__(self,kind,**inputOptions): self.set_default_options() self.set_kind_options(kind = kind) self.set_input_options(**inputOptions) def set_default_options(self): self.optionList = dict() self.optionList['commonDefaults'] = dict( name = None , label = None , color = [0,0,0], houseCost = '100%', # how much you are paying for the house mortgageRate = '0.0%', # Mortgage annual interest rate mortgageLength = '30Y' , # Mortgage length (in years) downPayment = '0%' , # Percentage of house cost paid upfront startingCash = '100%', # Amount of money you have before purchase tvmRate = '7.0%', # Annual rate of return of savings inflationRate = '1.8%', # Annual rate of inflation - NOT IMPLEMENTED appreciationRate = '5.0%', # Annual rate of increase in value of house houseValue = '100%', # how much the house is worth when you bought it originationFees = '0.0%', # Mortgage fees as a percentage of the loan otherMortgageFees = '0.0%', # Other fees as a percentage of the loan otherPurchaseFees = '0.0%', # Other fees as a percentage of home value paymentsPerYear = '12' , # Number of mortgage payments per year taxRate = '0.0%', # Annual taxes as percentage of home value insuranceRate = '0.0%', # Annual insurance as percentage of home value listingFee = '0.0%', # Cost of selling the house capitalGainsTax = '0.0%', # Paid if selling house within two years capitalGainsPeriod = '0' , # Years after which cap gains tax is not applied rentalIncome = '0.0%', # Monthly rental price as percentage of home value rentalPayment = '0.0%', # Monthly rental price as percentage of home value ) self.optionList['mortgageDefaults'] = dict( name = 'mortgage', label = 'Mortgage', mortgageRate = '4.5%', # Mortgage annual interest rate mortgageLength = '30Y' , # Mortgage length (in years) downPayment = '20%' , # Percentage of house cost paid upfront startingCash = '100%', # Amount of money you have before purchase originationFees = '0.5%', # Mortgage fees as a percentage of the loan otherMortgageFees = '0.5%', # Other fees as a percentage of the loan otherPurchaseFees = '0.5%', # Other fees as a percentage of home value paymentsPerYear = '12' , # Number of mortgage payments per year taxRate = '0.6%', # Annual taxes as percentage of home value insuranceRate = '0.4%', # Annual insurance as percentage of home value listingFee = '6.0%', # Cost of selling the house capitalGainsTax = '15%' , # Paid if selling house within two years capitalGainsPeriod = '2' , # Years after which cap gains tax is not applied ) self.optionList['rentalDefaults'] = dict( rentalPayment = '0.6%', # Monthly rental price as percentage of home value ) self.optionList['investmentPropertyDefaults'] = dict( mortgageRate = '4.5%', # Mortgage annual interest rate mortgageLength = '30Y' , # Mortgage length (in years) downPayment = '20%' , # Percentage of house cost paid upfront startingCash = '100%', # Amount of money you have before purchase tvmRate = '7.0%', # Annual rate of return of savings inflationRate = '1.8%', # Annual rate of inflation - NOT IMPLEMENTED appreciationRate = '5.0%', # Annual rate of increase in value of house houseValue = '100%', # how much the house is worth when you bought it originationFees = '0.5%', # Mortgage fees as a percentage of the loan otherMortgageFees = '0.5%', # Other fees as a percentage of the loan otherPurchaseFees = '0.5%', # Other fees as a percentage of home value paymentsPerYear = '12' , # Number of mortgage payments per year taxRate = '0.6%', # Annual taxes as percentage of home value insuranceRate = '0.4%', # Annual insurance as percentage of home value listingFee = '6.0%', # Cost of selling the house capitalGainsTax = '15%' , # Paid if selling house within two years capitalGainsPeriod = '2' , # Years after which cap gains tax is not applied rentalIncome = '0.6%', # Monthly rental price as percentage of home value ) def set_kind_options(self,kind,**inputOptions): self.options = self.optionList['commonDefaults'] if kind == None: pass elif kind == 'mortgage': for key,val in self.optionList['mortgageDefaults'].items(): self.options[key] = val elif kind == 'rental': for key,val in self.optionList['rentalDefaults'].items(): self.options[key] = val elif kind == 'investmentProperty': for key,val in self.optionList['investmentPropertyDefaults'].items(): self.options[key] = val def set_input_options(self,**inputOptions): for key,val in inputOptions.items(): self.options[key] = val
[]
CodingBullywug/DDreshape
DD/Terrain.py
393e5ea336eb6cb78f31345731ccf52baf19bfac
from DD.utils import PoolByteArray2NumpyArray, NumpyArray2PoolByteArray from DD.Entity import Entity import numpy as np class Terrain(Entity): def __init__(self, json, width, height, scale=4, terrain_types=4): super(Terrain, self).__init__(json) self._scale = scale self.terrain_types = terrain_types self.splat = PoolByteArray2NumpyArray(self._json['splat']).reshape(height*self._scale, width*self._scale, self.terrain_types, order='C') def get_json(self): json = self._json json['splat'] = NumpyArray2PoolByteArray(self.splat.reshape(np.prod(self.splat.shape), order='C')) return json def pad(self, top, bottom, left, right): self.splat = np.pad(self.splat, ((top*self._scale, bottom*self._scale), (left*self._scale, right*self._scale), (0,0)), mode='edge') def crop(self, top, bottom, left, right): self.splat = self._crop_map_safe(self.splat, top, bottom, left, right, self._scale) def fliplr(self, width): self.splat = np.fliplr(self.splat) def flipud(self, height): self.splat = np.flipud(self.splat) def rot90(self, width, height): self.splat = self._rot90_map(self.splat) def rot180(self, width, height): self.splat = self._rot180_map(self.splat) def rot270(self, width, height): self.splat = self._rot270_map(self.splat)
[((720, 852), 'numpy.pad', 'np.pad', (['self.splat', '((top * self._scale, bottom * self._scale), (left * self._scale, right *\n self._scale), (0, 0))'], {'mode': '"""edge"""'}), "(self.splat, ((top * self._scale, bottom * self._scale), (left * self\n ._scale, right * self._scale), (0, 0)), mode='edge')\n", (726, 852), True, 'import numpy as np\n'), ((1090, 1111), 'numpy.fliplr', 'np.fliplr', (['self.splat'], {}), '(self.splat)\n', (1099, 1111), True, 'import numpy as np\n'), ((1168, 1189), 'numpy.flipud', 'np.flipud', (['self.splat'], {}), '(self.splat)\n', (1177, 1189), True, 'import numpy as np\n'), ((351, 396), 'DD.utils.PoolByteArray2NumpyArray', 'PoolByteArray2NumpyArray', (["self._json['splat']"], {}), "(self._json['splat'])\n", (375, 396), False, 'from DD.utils import PoolByteArray2NumpyArray, NumpyArray2PoolByteArray\n'), ((594, 619), 'numpy.prod', 'np.prod', (['self.splat.shape'], {}), '(self.splat.shape)\n', (601, 619), True, 'import numpy as np\n')]
AbbyGi/bluesky
bluesky/tests/utils.py
759f9c55dce97dc47513cca749a69dd861bdf58d
from collections import defaultdict import contextlib import tempfile import sys import threading import asyncio @contextlib.contextmanager def _print_redirect(): old_stdout = sys.stdout try: fout = tempfile.TemporaryFile(mode="w+", encoding="utf-8") sys.stdout = fout yield fout finally: sys.stdout = old_stdout class MsgCollector: def __init__(self, msg_hook=None): self.msgs = [] self.msg_hook = msg_hook def __call__(self, msg): self.msgs.append(msg) if self.msg_hook: self.msg_hook(msg) class DocCollector: def __init__(self): self.start = [] self.stop = {} self.descriptor = defaultdict(list) self.event = {} def insert(self, name, doc): if name == "start": self.start.append(doc) elif name == "stop": self.stop[doc["run_start"]] = doc elif name == "descriptor": self.descriptor[doc["run_start"]].append(doc) self.event[doc["uid"]] = [] elif name == 'bulk_events': for k, v in doc.items(): self.event[k].extend(v) else: self.event[doc["descriptor"]].append(doc) def _fabricate_asycio_event(loop): th_ev = threading.Event() aio_event = None def really_make_the_event(): nonlocal aio_event aio_event = asyncio.Event() th_ev.set() h = loop.call_soon_threadsafe(really_make_the_event) if not th_ev.wait(0.1): h.cancel() raise Exception("failed to make asyncio event") return aio_event
[((1289, 1306), 'threading.Event', 'threading.Event', ([], {}), '()\n', (1304, 1306), False, 'import threading\n'), ((217, 268), 'tempfile.TemporaryFile', 'tempfile.TemporaryFile', ([], {'mode': '"""w+"""', 'encoding': '"""utf-8"""'}), "(mode='w+', encoding='utf-8')\n", (239, 268), False, 'import tempfile\n'), ((712, 729), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (723, 729), False, 'from collections import defaultdict\n'), ((1410, 1425), 'asyncio.Event', 'asyncio.Event', ([], {}), '()\n', (1423, 1425), False, 'import asyncio\n')]
MJJojo97/openslides-backend
cli/check_json.py
af0d1edb0070e352d46f285a1ba0bbe3702d49ae
import json import sys from openslides_backend.models.checker import Checker, CheckException def main() -> int: files = sys.argv[1:] if not files: print("No files specified.") return 1 possible_modes = tuple(f"--{mode}" for mode in Checker.modes) modes = tuple(mode[2:] for mode in possible_modes if mode in files) if len(modes) == 0: mode = "all" elif len(modes) > 1: print(f"You can only choose one mode of {', '.join(possible_modes)}.") exit(1) else: mode = modes[0] if len(modes): files = [x for x in files if x not in possible_modes] failed = False for f in files: with open(f) as data: try: Checker( json.load(data), mode=mode, ).run_check() except CheckException as e: print(f"Check for {f} failed:\n", e) failed = True else: print(f"Check for {f} successful.") return 1 if failed else 0 if __name__ == "__main__": sys.exit(main())
[((765, 780), 'json.load', 'json.load', (['data'], {}), '(data)\n', (774, 780), False, 'import json\n')]
robinagist/manic
utils/mgmt.py
b1bdefbb11c7489164a0c08bb092ffecb5900261
from utils.data import load_memfile_configs from utils.server import plain_response from sanic import response def get_mappedfile_configs(): cfgs = load_memfile_configs() return response.json(plain_response(cfgs, 0), status=200) def created_mapped_file(): pass def delete_mapped_file(): pass
[((156, 178), 'utils.data.load_memfile_configs', 'load_memfile_configs', ([], {}), '()\n', (176, 178), False, 'from utils.data import load_memfile_configs\n'), ((204, 227), 'utils.server.plain_response', 'plain_response', (['cfgs', '(0)'], {}), '(cfgs, 0)\n', (218, 227), False, 'from utils.server import plain_response\n')]
UpCoder/YNe
datasets/medicalImage.py
2f932456eda29b1e04f4c7e212e2ab0dacfe831b
# -*- coding=utf-8 -*- import SimpleITK as itk import pydicom import numpy as np from PIL import Image, ImageDraw import gc from skimage.morphology import disk, dilation import nipy import os from glob import glob import scipy import cv2 from xml.dom.minidom import Document typenames = ['CYST', 'FNH', 'HCC', 'HEM', 'METS'] typeids = [0, 1, 2, 3, 4] def get_voxel_size(file_path): load_image_obj = nipy.load_image(file_path) header = load_image_obj.header x_size = header['srow_x'][0] y_size = header['srow_y'][1] z_size = header['srow_z'][2] return [x_size, y_size, z_size] def read_nii(file_path): return nipy.load_image(file_path).get_data() def read_nii_with_header(file_path): img_obj = nipy.load_image(file_path) header_obj = img_obj.header res_dict = {} res_dict['voxel_spacing'] = [header_obj['srow_x'][0], header_obj['srow_y'][1], header_obj['srow_z'][2]] img_arr = img_obj.get_data() return img_arr, res_dict # 读取文件序列 def read_dicom_series(dir_name): reader = itk.ImageSeriesReader() dicom_series = reader.GetGDCMSeriesFileNames(dir_name) reader.SetFileNames(dicom_series) images = reader.Execute() image_array = itk.GetArrayFromImage(images) return image_array # 将DICOM序列转化成MHD文件 def convert_dicomseries2mhd(dicom_series_dir, save_path): data = read_dicom_series(dicom_series_dir) save_mhd_image(data, save_path) # 读取单个DICOM文件 def read_dicom_file(file_name): header = pydicom.read_file(file_name) image = header.pixel_array image = header.RescaleSlope * image + header.RescaleIntercept return image # 读取mhd文件 def read_mhd_image(file_path, rejust=False): header = itk.ReadImage(file_path) image = np.array(itk.GetArrayFromImage(header)) if rejust: image[image < -70] = -70 image[image > 180] = 180 image = image + 70 return np.array(image) # 保存mhd文件 def save_mhd_image(image, file_name): header = itk.GetImageFromArray(image) itk.WriteImage(header, file_name) # 根据文件名返回期项名 def return_phasename(file_name): phasenames = ['NC', 'ART', 'PV'] for phasename in phasenames: if file_name.find(phasename) != -1: return phasename # 读取DICOM文件中包含的病例ID信息 def read_patientId(dicom_file_path): ds = pydicom.read_file(dicom_file_path) return ds.PatientID # 返回病灶类型和ID的字典类型的数据 key是typename value是typeid def return_type_nameid(): res = {} res['CYST'] = 0 res['FNH'] = 1 res['HCC'] = 2 res['HEM'] = 3 res['METS'] = 4 return res # 返回病灶类型ID和名称的字典类型的数据 key是typeid value是typename def return_type_idname(): res = {} res[0] = 'CYST' res[1] = 'FNH' res[2] = 'HCC' res[3] = 'HEM' res[4] = 'METS' return res # 根据病灶类型的ID返回类型的字符串 def return_typename_byid(typeid): idname_dict = return_type_idname() return idname_dict[typeid] # 根据病灶类型的name返回id的字符串 def return_typeid_byname(typename): nameid_dict = return_type_nameid() return nameid_dict[typename] # 填充图像 def fill_region(image): # image.show() from scipy import ndimage image = ndimage.binary_fill_holes(image).astype(np.uint8) return image def close_operation(binary_image, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (kernel_size, kernel_size)) close_r = cv2.morphologyEx(binary_image, cv2.MORPH_CLOSE, kernel) return close_r def open_operation(slice_image, kernel_size=3): opening = cv2.morphologyEx(slice_image, cv2.MORPH_OPEN, cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size))) return opening def get_kernel_filters(kernel_size): ''' 返回进行kernel操作的5个模版 (1个是正常的dilated操作,还有四个是分别对四个方向进行单独进行dilated的操作) :param kernel_size: :return: [5, kernel_size, kernel_size] ''' kernel_whole = np.ones([kernel_size, kernel_size], np.uint8) half_size = kernel_size // 2 kernel_left = np.copy(kernel_whole) kernel_left[:, half_size + 1:] = 0 kernel_right = np.copy(kernel_whole) kernel_right[:, :half_size] = 0 kernel_top = np.copy(kernel_whole) kernel_top[half_size + 1:, :] = 0 kernel_bottom = np.copy(kernel_whole) kernel_bottom[:half_size, :] = 0 return np.concatenate([ np.expand_dims(kernel_whole, axis=0), np.expand_dims(kernel_left, axis=0), np.expand_dims(kernel_right, axis=0), np.expand_dims(kernel_top, axis=0), np.expand_dims(kernel_bottom, axis=0), ], axis=0) def image_erode(img, kernel_size=5): import cv2 import numpy as np kernel = np.ones((kernel_size, kernel_size), np.uint8) erosion = cv2.erode(img, kernel, iterations=1) return erosion def image_expand(img, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image = cv2.dilate(img, kernel) return image def image_erode(img, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image = cv2.erode(img, kernel) return image # 图像膨胀 # def image_expand(image, size): # def find_significant_layer(mask_image): ''' 找到显著层 :param mask_image: [depth, width, height] :return: idx ''' sum_res = np.sum(np.sum(mask_image, axis=1), axis=1) return np.argmax(sum_res) # 将一个矩阵保存为图片 def save_image(image_arr, save_path): image = Image.fromarray(np.asarray(image_arr, np.uint8)) image.save(save_path) def show_image(image): img = np.asarray(image, np.uint8) import matplotlib.pyplot as plt plt.figure("Image") # 这里必须加 cmap='gray' ,否则尽管原图像是灰度图(下图1),但是显示的是伪彩色图像(下图2)(如果不加的话) plt.imshow(img, cmap='gray') plt.axis('on') plt.title('image') plt.show() # 将图像画出来,并且画出标记的病灶 def save_image_with_mask(image_arr, mask_image, save_path): image_arr[image_arr < -70] = -70 image_arr[image_arr > 180] = 180 image_arr = image_arr + 70 shape = list(np.shape(image_arr)) image_arr_rgb = np.zeros(shape=[shape[0], shape[1], 3]) image_arr_rgb[:, :, 0] = image_arr image_arr_rgb[:, :, 1] = image_arr image_arr_rgb[:, :, 2] = image_arr image = Image.fromarray(np.asarray(image_arr_rgb, np.uint8)) image_draw = ImageDraw.Draw(image) [ys, xs] = np.where(mask_image != 0) miny = np.min(ys) maxy = np.max(ys) minx = np.min(xs) maxx = np.max(xs) ROI = image_arr_rgb[miny - 1:maxy + 1, minx - 1:maxx + 1, :] ROI_Image = Image.fromarray(np.asarray(ROI, np.uint8)) for index, y in enumerate(ys): image_draw.point([xs[index], y], fill=(255, 0, 0)) if save_path is None: image.show() else: image.save(save_path) ROI_Image.save(os.path.join(os.path.dirname(save_path), os.path.basename(save_path).split('.')[0] + '_ROI.jpg')) del image, ROI_Image gc.collect() def compress22dim(image): ''' 将一个矩阵如果可能,压缩到三维的空间 ''' shape = list(np.shape(image)) if len(shape) == 3: return np.squeeze(image) return image def extract_ROI(image, mask_image): ''' 提取一幅图像中的ROI ''' xs, ys = np.where(mask_image == 1) xs_min = np.min(xs) xs_max = np.max(xs) ys_min = np.min(ys) ys_max = np.max(ys) return image[xs_min: xs_max + 1, ys_min: ys_max + 1] def resize_image(image, size): image = Image.fromarray(np.asarray(image, np.uint8)) return image.resize((size, size)) # def image_expand(mask_image, r): # return dilation(mask_image, disk(r)) ''' 将形式如(512, 512)格式的图像转化为(1, 512, 512)形式的图片 ''' def expand23D(mask_image): shape = list(np.shape(mask_image)) if len(shape) == 2: mask_image = np.expand_dims(mask_image, axis=0) print('after expand23D', np.shape(mask_image)) return mask_image ''' 返回一个mask图像的中心,是对xyz坐标计算平均值之后的结果 ''' def find_centroid3D(image, flag): [x, y, z] = np.where(image == flag) centroid_x = int(np.mean(x)) centroid_y = int(np.mean(y)) centroid_z = int(np.mean(z)) return centroid_x, centroid_y, centroid_z ''' 将[w, h, d]reshape为[d, w, h] ''' def convert2depthfirst(image): image = np.array(image) shape = np.shape(image) new_image = np.zeros([shape[2], shape[0], shape[1]]) for i in range(shape[2]): new_image[i, :, :] = image[:, :, i] return new_image # def test_convert2depthfirst(): # zeros = np.zeros([100, 100, 30]) # after_zeros = convert2depthfirst(zeros) # print np.shape(after_zeros) # test_convert2depthfirst() ''' 将[d, w, h]reshape为[w, h, d] ''' def convert2depthlastest(image): image = np.array(image) shape = np.shape(image) new_image = np.zeros([shape[1], shape[2], shape[0]]) for i in range(shape[0]): new_image[:, :, i] = image[i, :, :] return new_image def read_image_file(file_path): if file_path.endswith('.nii'): return read_nil(file_path) if file_path.endswith('.mhd'): return read_mhd_image(file_path) print('the format of image is not support in this version') return None def processing(image, size_training): image = np.array(image) # numpy_clip bottom = -300. top = 500. image = np.clip(image, bottom, top) # to float minval = -350 interv = 500 - (-350) image -= minval # scale down to 0 - 2 image /= (interv / 2) # zoom desired_size = [size_training, size_training] desired_size = np.asarray(desired_size, dtype=np.int) zooms = desired_size / np.array(image[:, :, 0].shape, dtype=np.float) print(zooms) after_zoom = np.zeros([size_training, size_training, np.shape(image)[2]]) for i in range(np.shape(after_zoom)[2]): after_zoom[:, :, i] = scipy.ndimage.zoom(image[:, :, i], zooms, order=1) # order = 1 => biliniear interpolation return after_zoom def preprocessing_agumentation(image, size_training): image = np.array(image) # numpy_clip c_minimum = -300. c_maximum = 500. s_maximum = 255. image = np.clip(image, c_minimum, c_maximum) interv = float(c_maximum - c_minimum) image = (image - c_minimum) / interv * s_maximum minval = 0. maxval = 255. image -= minval interv = maxval - minval # print('static scaler 0', interv) # scale down to 0 - 2 # image /= (interv / 2) image = np.asarray(image, np.float32) image = image / interv image = image * 2.0 # zoom desired_size = [size_training, size_training] desired_size = np.asarray(desired_size, dtype=np.int) zooms = desired_size / np.array(image[:, :, 0].shape, dtype=np.float) print(zooms) after_zoom = np.zeros([size_training, size_training, np.shape(image)[2]]) for i in range(np.shape(after_zoom)[2]): after_zoom[:, :, i] = scipy.ndimage.zoom(image[:, :, i], zooms, order=1) # order = 1 => biliniear interpolation return after_zoom def MICCAI2018_Iterator(image_dir, execute_func, *parameters): ''' 遍历MICCAI2018文件夹的框架 :param execute_func: :return: ''' for sub_name in ['train', 'val', 'test']: names = os.listdir(os.path.join(image_dir, sub_name)) for name in names: cur_slice_dir = os.path.join(image_dir, sub_name, name) execute_func(cur_slice_dir, *parameters) def dicom2jpg_singlephase(slice_dir, save_dir, phase_name='PV'): mhd_image_path = glob(os.path.join(slice_dir, phase_name+'_Image*.mhd'))[0] mhd_mask_path = glob(os.path.join(slice_dir, phase_name + '_Mask*.mhd'))[0] mhd_image = read_mhd_image(mhd_image_path) mask_image = read_mhd_image(mhd_mask_path) mhd_image = np.asarray(np.squeeze(mhd_image), np.float32) mhd_image = np.expand_dims(mhd_image, axis=2) mhd_image = np.concatenate([mhd_image, mhd_image, mhd_image], axis=2) mask_image = np.asarray(np.squeeze(mask_image), np.uint8) max_v = 300. min_v = -350. mhd_image[mhd_image > max_v] = max_v mhd_image[mhd_image < min_v] = min_v print(np.mean(mhd_image, dtype=np.float32)) mhd_image -= np.mean(mhd_image) min_v = np.min(mhd_image) max_v = np.max(mhd_image) interv = max_v - min_v mhd_image = (mhd_image - min_v) / interv file_name = os.path.basename(slice_dir) dataset_name = os.path.basename(os.path.dirname(slice_dir)) save_path = os.path.join(save_dir, phase_name, dataset_name, file_name+'.jpg') if not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path)) print('the shape of mhd_image is ', np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) cv2.imwrite(save_path, mhd_image * 255) xml_save_dir = os.path.join(save_dir, phase_name, dataset_name+'_xml') if not os.path.exists(xml_save_dir): os.makedirs(xml_save_dir) evulate_gt_dir = os.path.join(save_dir, phase_name, dataset_name+'_gt') if not os.path.exists(evulate_gt_dir): os.makedirs(evulate_gt_dir) xml_save_path = os.path.join(xml_save_dir, file_name + '.xml') gt_save_path = os.path.join(evulate_gt_dir, file_name + '.txt') # for evulate doc = Document() root_node = doc.createElement('annotation') doc.appendChild(root_node) folder_name = os.path.basename(save_dir) + '/' + phase_name folder_node = doc.createElement('folder') root_node.appendChild(folder_node) folder_txt_node = doc.createTextNode(folder_name) folder_node.appendChild(folder_txt_node) file_name = file_name + '.jpg' filename_node = doc.createElement('filename') root_node.appendChild(filename_node) filename_txt_node = doc.createTextNode(file_name) filename_node.appendChild(filename_txt_node) shape = list(np.shape(mhd_image)) size_node = doc.createElement('size') root_node.appendChild(size_node) width_node = doc.createElement('width') width_node.appendChild(doc.createTextNode(str(shape[0]))) height_node = doc.createElement('height') height_node.appendChild(doc.createTextNode(str(shape[1]))) depth_node = doc.createElement('depth') depth_node.appendChild(doc.createTextNode(str(3))) size_node.appendChild(width_node) size_node.appendChild(height_node) size_node.appendChild(depth_node) mask_image[mask_image != 1] = 0 xs, ys = np.where(mask_image == 1) min_x = np.min(xs) min_y = np.min(ys) max_x = np.max(xs) max_y = np.max(ys) object_node = doc.createElement('object') root_node.appendChild(object_node) name_node = doc.createElement('name') name_node.appendChild(doc.createTextNode('Cyst')) object_node.appendChild(name_node) truncated_node = doc.createElement('truncated') object_node.appendChild(truncated_node) truncated_node.appendChild(doc.createTextNode('0')) difficult_node = doc.createElement('difficult') object_node.appendChild(difficult_node) difficult_node.appendChild(doc.createTextNode('0')) bndbox_node = doc.createElement('bndbox') object_node.appendChild(bndbox_node) xmin_node = doc.createElement('xmin') xmin_node.appendChild(doc.createTextNode(str(min_y))) bndbox_node.appendChild(xmin_node) ymin_node = doc.createElement('ymin') ymin_node.appendChild(doc.createTextNode(str(min_x))) bndbox_node.appendChild(ymin_node) xmax_node = doc.createElement('xmax') xmax_node.appendChild(doc.createTextNode(str(max_y))) bndbox_node.appendChild(xmax_node) ymax_node = doc.createElement('ymax') ymax_node.appendChild(doc.createTextNode(str(max_x))) bndbox_node.appendChild(ymax_node) with open(xml_save_path, 'wb') as f: f.write(doc.toprettyxml(indent='\t', encoding='utf-8')) line = '%s %d %d %d %d\n' % ('Cyst', min_y, min_x, max_y, max_x) print(line) lines = [] lines.append(line) with open(gt_save_path, 'w') as f: f.writelines(lines) f.close() def dicom2jpg_multiphase(slice_dir, save_dir, phasenames=['NC', 'ART', 'PV'], target_phase='PV', suffix_name='npy'): target_mask = None mhd_images = [] for phase_name in phasenames: mhd_image_path = glob(os.path.join(slice_dir, 'Image_%s*.mhd' % phase_name))[0] mhd_mask_path = glob(os.path.join(slice_dir, 'Mask_%s*.mhd' % phase_name))[0] mhd_image = read_mhd_image(mhd_image_path) mask_image = read_mhd_image(mhd_mask_path) mhd_image = np.asarray(np.squeeze(mhd_image), np.float32) mhd_images.append(mhd_image) mask_image = np.asarray(np.squeeze(mask_image), np.uint8) if phase_name == target_phase: target_mask = mask_image print(np.shape(mhd_images)) mask_image = target_mask mask_image_shape = list(np.shape(mask_image)) if len(mask_image_shape) == 3: mask_image = mask_image[1, :, :] print('the mask image shape is ', np.shape(mask_image)) if suffix_name == 'jpg': mhd_images = np.transpose(np.asarray(mhd_images, np.float32), axes=[1, 2, 0]) mhd_image = mhd_images elif suffix_name == 'npy': mhd_images = np.concatenate(np.asarray(mhd_images, np.float), axis=0) mhd_images = np.transpose(np.asarray(mhd_images, np.float32), axes=[1, 2, 0]) mhd_image = mhd_images else: print('the suffix name does not support') assert False max_v = 300. min_v = -350. mhd_image[mhd_image > max_v] = max_v mhd_image[mhd_image < min_v] = min_v print(np.mean(mhd_image, dtype=np.float32)) mhd_image -= np.mean(mhd_image) min_v = np.min(mhd_image) max_v = np.max(mhd_image) interv = max_v - min_v mhd_image = (mhd_image - min_v) / interv file_name = os.path.basename(slice_dir) dataset_name = os.path.basename(os.path.dirname(slice_dir)) phase_name = ''.join(phasenames) save_path = os.path.join(save_dir, phase_name, dataset_name, file_name+'.' + suffix_name) if not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path)) print('the shape of mhd_image is ', np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) #cv2.imwrite(save_path, mhd_image * 255) np.save(save_path, mhd_image * 255) xml_save_dir = os.path.join(save_dir, phase_name, dataset_name+'_xml') if not os.path.exists(xml_save_dir): os.makedirs(xml_save_dir) evulate_gt_dir = os.path.join(save_dir, phase_name, dataset_name+'_gt') if not os.path.exists(evulate_gt_dir): os.makedirs(evulate_gt_dir) xml_save_path = os.path.join(xml_save_dir, file_name + '.xml') gt_save_path = os.path.join(evulate_gt_dir, file_name + '.txt') # for evulate doc = Document() root_node = doc.createElement('annotation') doc.appendChild(root_node) folder_name = os.path.basename(save_dir) + '/' + phase_name folder_node = doc.createElement('folder') root_node.appendChild(folder_node) folder_txt_node = doc.createTextNode(folder_name) folder_node.appendChild(folder_txt_node) file_name = file_name + '.jpg' filename_node = doc.createElement('filename') root_node.appendChild(filename_node) filename_txt_node = doc.createTextNode(file_name) filename_node.appendChild(filename_txt_node) shape = list(np.shape(mhd_image)) size_node = doc.createElement('size') root_node.appendChild(size_node) width_node = doc.createElement('width') width_node.appendChild(doc.createTextNode(str(shape[0]))) height_node = doc.createElement('height') height_node.appendChild(doc.createTextNode(str(shape[1]))) depth_node = doc.createElement('depth') depth_node.appendChild(doc.createTextNode(str(3))) size_node.appendChild(width_node) size_node.appendChild(height_node) size_node.appendChild(depth_node) mask_image[mask_image != 1] = 0 xs, ys = np.where(mask_image == 1) print(xs, ys) min_x = np.min(xs) min_y = np.min(ys) max_x = np.max(xs) max_y = np.max(ys) object_node = doc.createElement('object') root_node.appendChild(object_node) name_node = doc.createElement('name') name_node.appendChild(doc.createTextNode('Cyst')) object_node.appendChild(name_node) truncated_node = doc.createElement('truncated') object_node.appendChild(truncated_node) truncated_node.appendChild(doc.createTextNode('0')) difficult_node = doc.createElement('difficult') object_node.appendChild(difficult_node) difficult_node.appendChild(doc.createTextNode('0')) bndbox_node = doc.createElement('bndbox') object_node.appendChild(bndbox_node) xmin_node = doc.createElement('xmin') xmin_node.appendChild(doc.createTextNode(str(min_y))) bndbox_node.appendChild(xmin_node) ymin_node = doc.createElement('ymin') ymin_node.appendChild(doc.createTextNode(str(min_x))) bndbox_node.appendChild(ymin_node) xmax_node = doc.createElement('xmax') xmax_node.appendChild(doc.createTextNode(str(max_y))) bndbox_node.appendChild(xmax_node) ymax_node = doc.createElement('ymax') ymax_node.appendChild(doc.createTextNode(str(max_x))) bndbox_node.appendChild(ymax_node) with open(xml_save_path, 'wb') as f: f.write(doc.toprettyxml(indent='\t', encoding='utf-8')) line = '%s %d %d %d %d\n' % ('Cyst', min_y, min_x, max_y, max_x) print(line) lines = [] lines.append(line) with open(gt_save_path, 'w') as f: f.writelines(lines) f.close() def static_pixel_num(image_dir, target_phase='PV'): # {0: 217784361, 1: 1392043, 2: 209128, 3: 1486676, 4: 458278, 5: 705482} # {0: 1.0, 156, 1041, 146, 475, 308} static_res = { 0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0 } from convert2jpg import extract_bboxs_mask_from_mask from config import pixel2type, type2pixel for sub_name in ['train', 'val', 'test']: names = os.listdir(os.path.join(image_dir, sub_name)) for name in names: cur_slice_dir = os.path.join(image_dir, sub_name, name) mhd_mask_path = glob(os.path.join(cur_slice_dir, 'Mask_%s*.mhd' % target_phase))[0] mask_image = read_mhd_image(mhd_mask_path) min_xs, min_ys, max_xs, max_ys, names, mask = extract_bboxs_mask_from_mask(mask_image, os.path.join(cur_slice_dir, 'tumor_types')) for key in pixel2type.keys(): mask[mask == key] = type2pixel[pixel2type[key]][0] pixel_value_set = np.unique(mask) print pixel_value_set for value in list(pixel_value_set): static_res[value] += np.sum(mask == value) print(static_res) def convertCase2PNGs(volume_path, seg_path, save_dir=None, z_axis=5.0, short_edge=64): ''' 将nii转化成PNG :param volume_path: nii的路径 :param seg_path: :return: ''' from skimage.measure import label volume, header = read_nii_with_header(volume_path) # volume = np.transpose(volume, [1, 0, 2]) volume = np.asarray(volume, np.float32) max_v = 250. min_v = -200. # max_v = 180 # min_v = -70 volume[volume > max_v] = max_v volume[volume < min_v] = min_v volume -= np.mean(volume) min_v = np.min(volume) max_v = np.max(volume) interv = max_v - min_v volume = (volume - min_v) / interv z_axis_case = header['voxel_spacing'][-1] slice_num = int(z_axis / z_axis_case) if slice_num == 0: slice_num = 1 seg = read_nii(seg_path) # print np.shape(volume), np.shape(seg) [_, _, channel] = np.shape(volume) imgs = [] names = [] masks = [] tumor_weakly_masks = [] liver_masks = [] i = slice_num + 1 pos_slice_num = np.sum(np.sum(np.sum(seg == 2, axis=0), axis=0) != 0) total_slice_num = np.shape(seg)[-1] print('pos_slice_num is ', pos_slice_num, total_slice_num) neg_rate = (3.0 * pos_slice_num) / total_slice_num # 正样本是负样本的 if neg_rate > 1.0: neg_rate = 1.0 for i in range(channel): seg_slice = seg[:, :, i] mid_slice = np.expand_dims(volume[:, :, i], axis=0) pre_slice = [] # pre_end = i - slice_num / 2 # pre_end = i # for j in range(1, slice_num + 1): # z = pre_end - j # if z < 0: # z = 0 # pre_slice.append(volume[:, :, z]) if (i - 1) < 0: pre_slice = np.expand_dims(volume[:, :, i], axis=0) else: pre_slice = np.expand_dims(volume[:, :, i-1], axis=0) next_slice = [] # next_start = i + slice_num / 2 # next_start = i # for j in range(1, slice_num + 1): # z = next_start + j # if z >= channel: # z = channel - 1 # next_slice.append(volume[:, :, z]) if (i + 1) >= channel: next_slice = np.expand_dims(volume[:, :, i], axis=0) else: next_slice = np.expand_dims(volume[:, :, i+1], axis=0) # pre_slice = np.mean(pre_slice, axis=0, keepdims=True) # next_slice = np.mean(next_slice, axis=0, keepdims=True) imgs.append( np.transpose(np.concatenate([pre_slice, mid_slice, next_slice], axis=0), axes=[1, 2, 0])) names.append(os.path.basename(volume_path).split('.')[0].split('-')[1] + '-' + str(i)) binary_seg_slice = np.asarray(seg_slice == 2, np.uint8) # print np.max(binary_seg_slice) masks.append(binary_seg_slice) labeled_mask = label(binary_seg_slice) weakly_label_mask = np.zeros_like(binary_seg_slice, np.uint8) for idx in range(1, np.max(labeled_mask) + 1): xs, ys = np.where(labeled_mask == idx) min_xs = np.min(xs) max_xs = np.max(xs) min_ys = np.min(ys) max_ys = np.max(ys) weakly_label_mask[min_xs: max_xs, min_ys: max_ys] = 1 liver_masks.append(np.asarray(seg_slice == 1, np.uint8)) tumor_weakly_masks.append(weakly_label_mask) # i += 1 return np.asarray(imgs, np.float32), np.asarray(masks, np.uint8), np.asarray(liver_masks, np.uint8), np.asarray( tumor_weakly_masks, np.uint8) def statics_num_slices_lesion(nii_dir): ''' 统计每个case,有多少slice具有病灶 :param nii_dir: :return: ''' mask_nii_paths = glob(os.path.join(nii_dir, 'segmentation-*.nii')) for mask_nii_path in mask_nii_paths: mask_img = read_nii(mask_nii_path) has_lesion = np.asarray(np.sum(np.sum(mask_img == 2, axis=0), axis=0)>0, np.bool) num_lesion_slices = np.sum(has_lesion) print os.path.basename(mask_nii_path), num_lesion_slices, np.shape(mask_img)[-1] if __name__ == '__main__': # for phasename in ['NC', 'ART', 'PV']: # convert_dicomseries2mhd( # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/' + phasename, # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/MHD/' + phasename + '.mhd' # ) # names = os.listdir('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2') # for name in names: # path = os.path.join('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2', name) # image = read_nil(path) # print(np.shape(image)) # conver2JPG single phase # image_dir = '/home/give/Documents/dataset/MICCAI2018/Slices/crossvalidation/0' # save_dir = '/home/give/Documents/dataset/MICCAI2018_Detection/SinglePhase' # phase_name = 'NC' # MICCAI2018_Iterator(image_dir, dicom2jpg_singlephase, save_dir, phase_name) # conver2JPG multi phase # image_dir = '/home/give/Documents/dataset/LiverLesionDetection_Splited/0' # static_pixel_num(image_dir, 'PV') statics_num_slices_lesion('/media/give/CBMIR/ld/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2')
[]
marcus-luck/zohoreader
setup.py
e832f076a8a87bf27607980fb85a1d2bc8339743
from setuptools import setup def readme(): with open('README.rst') as f: return f.read() setup(name='zohoreader', version='0.1', description='A simple reader for zoho projects API to get all projects, users and timereports', long_description=readme(), classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.5', ], keywords='zoho, API, zoho project', url='https://github.com/marcus-luck/zohoreader', author='Marcus Luck', author_email='[email protected]', license='MIT', packages=['zohoreader'], zip_safe=False, install_requires=[ 'requests>=2.12.4', 'python-dateutil>=2.7.2' ], test_suite='nose.collector', tests_require=['nose', 'nose-cover3'], include_package_data=True )
[]
Ubehebe/rules_webtesting
web/repositories.bzl
c231866a3bccc0f27b31050a57dc2b4a700ad64e
# Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Defines external repositories needed by rules_webtesting.""" load("//web/internal:platform_http_file.bzl", "platform_http_file") load("@bazel_gazelle//:deps.bzl", "go_repository") load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") load("@bazel_tools//tools/build_defs/repo:java.bzl", "java_import_external") # NOTE: URLs are mirrored by an asynchronous review process. They must # be greppable for that to happen. It's OK to submit broken mirror # URLs, so long as they're correctly formatted. Bazel's downloader # has fast failover. def web_test_repositories(**kwargs): """Defines external repositories required by Webtesting Rules. This function exists for other Bazel projects to call from their WORKSPACE file when depending on rules_webtesting using http_archive. This function makes it easy to import these transitive dependencies into the parent workspace. This will check to see if a repository has been previously defined before defining a new repository. Alternatively, individual dependencies may be excluded with an "omit_" + name parameter. This is useful for users who want to be rigorous about declaring their own direct dependencies, or when another Bazel project is depended upon (e.g. rules_closure) that defines the same dependencies as this one (e.g. com_google_guava.) Alternatively, a whitelist model may be used by calling the individual functions this method references. Please note that while these dependencies are defined, they are not actually downloaded, unless a target is built that depends on them. Args: **kwargs: omit_... parameters used to prevent importing specific dependencies. """ if should_create_repository("bazel_skylib", kwargs): bazel_skylib() if should_create_repository("com_github_blang_semver", kwargs): com_github_blang_semver() if should_create_repository("com_github_gorilla_context", kwargs): com_github_gorilla_context() if should_create_repository("com_github_gorilla_mux", kwargs): com_github_gorilla_mux() if should_create_repository("com_github_tebeka_selenium", kwargs): com_github_tebeka_selenium() if should_create_repository("com_github_urllib3", kwargs): com_github_urllib3() if should_create_repository("com_google_code_findbugs_jsr305", kwargs): com_google_code_findbugs_jsr305() if should_create_repository("com_google_code_gson", kwargs): com_google_code_gson() if should_create_repository( "com_google_errorprone_error_prone_annotations", kwargs, ): com_google_errorprone_error_prone_annotations() if should_create_repository("com_google_guava", kwargs): com_google_guava() if should_create_repository("com_squareup_okhttp3_okhttp", kwargs): com_squareup_okhttp3_okhttp() if should_create_repository("com_squareup_okio", kwargs): com_squareup_okio() if should_create_repository("commons_codec", kwargs): commons_codec() if should_create_repository("commons_logging", kwargs): commons_logging() if should_create_repository("junit", kwargs): junit() if should_create_repository("net_bytebuddy", kwargs): net_bytebuddy() if should_create_repository("org_apache_commons_exec", kwargs): org_apache_commons_exec() if should_create_repository("org_apache_httpcomponents_httpclient", kwargs): org_apache_httpcomponents_httpclient() if should_create_repository("org_apache_httpcomponents_httpcore", kwargs): org_apache_httpcomponents_httpcore() if should_create_repository("org_hamcrest_core", kwargs): org_hamcrest_core() if should_create_repository("org_jetbrains_kotlin_stdlib", kwargs): org_jetbrains_kotlin_stdlib() if should_create_repository("org_json", kwargs): org_json() if should_create_repository("org_seleniumhq_py", kwargs): org_seleniumhq_py() if should_create_repository("org_seleniumhq_selenium_api", kwargs): org_seleniumhq_selenium_api() if should_create_repository("org_seleniumhq_selenium_remote_driver", kwargs): org_seleniumhq_selenium_remote_driver() if kwargs.keys(): print("The following parameters are unknown: " + str(kwargs.keys())) def should_create_repository(name, args): """Returns whether the name repository should be created. This allows creation of a repository to be disabled by either an "omit_" _+ name parameter or by previously defining a rule for the repository. The args dict will be mutated to remove "omit_" + name. Args: name: The name of the repository that should be checked. args: A dictionary that contains "omit_...": bool pairs. Returns: boolean indicating whether the repository should be created. """ key = "omit_" + name if key in args: val = args.pop(key) if val: return False if native.existing_rule(name): return False return True def browser_repositories(firefox = False, chromium = False, sauce = False): """Sets up repositories for browsers defined in //browsers/.... This should only be used on an experimental basis; projects should define their own browsers. Args: firefox: Configure repositories for //browsers:firefox-native. chromium: Configure repositories for //browsers:chromium-native. sauce: Configure repositories for //browser/sauce:chrome-win10. """ if chromium: org_chromium_chromedriver() org_chromium_chromium() if firefox: org_mozilla_firefox() org_mozilla_geckodriver() if sauce: com_saucelabs_sauce_connect() def bazel_skylib(): http_archive( name = "bazel_skylib", sha256 = "", strip_prefix = "bazel-skylib-e9fc4750d427196754bebb0e2e1e38d68893490a", urls = [ "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz", "https://github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz", ], ) def com_github_blang_semver(): go_repository( name = "com_github_blang_semver", importpath = "github.com/blang/semver", sha256 = "3d9da53f4c2d3169bfa9b25f2f36f301a37556a47259c870881524c643c69c57", strip_prefix = "semver-3.5.1", urls = [ "https://mirror.bazel.build/github.com/blang/semver/archive/v3.5.1.tar.gz", "https://github.com/blang/semver/archive/v3.5.1.tar.gz", ], ) def com_github_gorilla_context(): go_repository( name = "com_github_gorilla_context", importpath = "github.com/gorilla/context", sha256 = "2dfdd051c238695bf9ebfed0bf6a8c533507ac0893bce23be5930e973736bb03", strip_prefix = "context-1.1.1", urls = [ "https://mirror.bazel.build/github.com/gorilla/context/archive/v1.1.1.tar.gz", "https://github.com/gorilla/context/archive/v1.1.1.tar.gz", ], ) def com_github_gorilla_mux(): go_repository( name = "com_github_gorilla_mux", importpath = "github.com/gorilla/mux", sha256 = "0dc18fb09413efea7393e9c2bd8b5b442ce08e729058f5f7e328d912c6c3d3e3", strip_prefix = "mux-1.6.2", urls = [ "https://mirror.bazel.build/github.com/gorilla/mux/archive/v1.6.2.tar.gz", "https://github.com/gorilla/mux/archive/v1.6.2.tar.gz", ], ) def com_github_tebeka_selenium(): go_repository( name = "com_github_tebeka_selenium", importpath = "github.com/tebeka/selenium", sha256 = "c506637fd690f4125136233a3ea405908b8255e2d7aa2aa9d3b746d96df50dcd", strip_prefix = "selenium-a49cf4b98a36c2b21b1ccb012852bd142d5fc04a", urls = [ "https://mirror.bazel.build/github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz", "https://github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz", ], ) def com_github_urllib3(): http_archive( name = "com_github_urllib3", build_file = str(Label("//build_files:com_github_urllib3.BUILD")), sha256 = "a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf", strip_prefix = "urllib3-1.23", urls = [ "https://files.pythonhosted.org/packages/3c/d2/dc5471622bd200db1cd9319e02e71bc655e9ea27b8e0ce65fc69de0dac15/urllib3-1.23.tar.gz", ], ) def com_google_code_findbugs_jsr305(): java_import_external( name = "com_google_code_findbugs_jsr305", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar", "https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar", ], jar_sha256 = "766ad2a0783f2687962c8ad74ceecc38a28b9f72a2d085ee438b7813e928d0c7", licenses = ["notice"], # BSD 3-clause ) def com_google_code_gson(): java_import_external( name = "com_google_code_gson", jar_sha256 = "233a0149fc365c9f6edbd683cfe266b19bdc773be98eabdaf6b3c924b48e7d81", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar", "https://repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar", ], licenses = ["notice"], # The Apache Software License, Version 2.0 ) def com_google_errorprone_error_prone_annotations(): java_import_external( name = "com_google_errorprone_error_prone_annotations", jar_sha256 = "10a5949aa0f95c8de4fd47edfe20534d2acefd8c224f8afea1f607e112816120", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar", "https://repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar", ], licenses = ["notice"], # Apache 2.0 ) def com_google_guava(): java_import_external( name = "com_google_guava", jar_sha256 = "a0e9cabad665bc20bcd2b01f108e5fc03f756e13aea80abaadb9f407033bea2c", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.9-jre.jar", "https://repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.0-jre.jar", ], licenses = ["notice"], # Apache 2.0 exports = [ "@com_google_code_findbugs_jsr305", "@com_google_errorprone_error_prone_annotations", ], ) def com_saucelabs_sauce_connect(): platform_http_file( name = "com_saucelabs_sauce_connect", licenses = ["by_exception_only"], # SauceLabs EULA amd64_sha256 = "dd53f2cdcec489fbc2443942b853b51bf44af39f230600573119cdd315ddee52", amd64_urls = [ "https://saucelabs.com/downloads/sc-4.5.1-linux.tar.gz", ], macos_sha256 = "920ae7bd5657bccdcd27bb596593588654a2820486043e9a12c9062700697e66", macos_urls = [ "https://saucelabs.com/downloads/sc-4.5.1-osx.zip", ], windows_sha256 = "ec11b4ee029c9f0cba316820995df6ab5a4f394053102e1871b9f9589d0a9eb5", windows_urls = [ "https://saucelabs.com/downloads/sc-4.4.12-win32.zip", ], ) def com_squareup_okhttp3_okhttp(): java_import_external( name = "com_squareup_okhttp3_okhttp", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar", "https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar", ], jar_sha256 = "a0d01017a42bba26e507fc6d448bb36e536f4b6e612f7c42de30bbdac2b7785e", licenses = ["notice"], # Apache 2.0 deps = [ "@com_squareup_okio", "@com_google_code_findbugs_jsr305", ], ) def com_squareup_okio(): java_import_external( name = "com_squareup_okio", jar_sha256 = "79b948cf77504750fdf7aeaf362b5060415136ab6635e5113bd22925e0e9e737", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar", "https://repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar", ], licenses = ["notice"], # Apache 2.0 deps = [ "@com_google_code_findbugs_jsr305", "@org_jetbrains_kotlin_stdlib", ], ) def commons_codec(): java_import_external( name = "commons_codec", jar_sha256 = "e599d5318e97aa48f42136a2927e6dfa4e8881dff0e6c8e3109ddbbff51d7b7d", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar", "https://repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar", ], licenses = ["notice"], # Apache License, Version 2.0 ) def commons_logging(): java_import_external( name = "commons_logging", jar_sha256 = "daddea1ea0be0f56978ab3006b8ac92834afeefbd9b7e4e6316fca57df0fa636", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar", "https://repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar", ], licenses = ["notice"], # The Apache Software License, Version 2.0 ) def junit(): java_import_external( name = "junit", jar_sha256 = "59721f0805e223d84b90677887d9ff567dc534d7c502ca903c0c2b17f05c116a", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar", "https://repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar", ], licenses = ["reciprocal"], # Eclipse Public License 1.0 testonly_ = 1, deps = ["@org_hamcrest_core"], ) def net_bytebuddy(): java_import_external( name = "net_bytebuddy", jar_sha256 = "4b87ad52a8f64a1197508e176e84076584160e3d65229ff757efee870cd4a8e2", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar", "https://repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar", ], licenses = ["notice"], # Apache 2.0 deps = ["@com_google_code_findbugs_jsr305"], ) def org_apache_commons_exec(): java_import_external( name = "org_apache_commons_exec", jar_sha256 = "cb49812dc1bfb0ea4f20f398bcae1a88c6406e213e67f7524fb10d4f8ad9347b", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar", "https://repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar", ], licenses = ["notice"], # Apache License, Version 2.0 ) def org_apache_httpcomponents_httpclient(): java_import_external( name = "org_apache_httpcomponents_httpclient", jar_sha256 = "c03f813195e7a80e3608d0ddd8da80b21696a4c92a6a2298865bf149071551c7", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar", "https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar", ], licenses = ["notice"], # Apache License, Version 2.0 deps = [ "@org_apache_httpcomponents_httpcore", "@commons_logging", "@commons_codec", ], ) def org_apache_httpcomponents_httpcore(): java_import_external( name = "org_apache_httpcomponents_httpcore", jar_sha256 = "1b4a1c0b9b4222eda70108d3c6e2befd4a6be3d9f78ff53dd7a94966fdf51fc5", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar", "https://repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar", ], licenses = ["notice"], # Apache License, Version 2.0 ) def org_chromium_chromedriver(): platform_http_file( name = "org_chromium_chromedriver", licenses = ["reciprocal"], # BSD 3-clause, ICU, MPL 1.1, libpng (BSD/MIT-like), Academic Free License v. 2.0, BSD 2-clause, MIT amd64_sha256 = "71eafe087900dbca4bc0b354a1d172df48b31a4a502e21f7c7b156d7e76c95c7", amd64_urls = [ "https://chromedriver.storage.googleapis.com/2.41/chromedriver_linux64.zip", ], macos_sha256 = "fd32a27148f44796a55f5ce3397015c89ebd9f600d9dda2bcaca54575e2497ae", macos_urls = [ "https://chromedriver.storage.googleapis.com/2.41/chromedriver_mac64.zip", ], windows_sha256 = "a8fa028acebef7b931ef9cb093f02865f9f7495e49351f556e919f7be77f072e", windows_urls = [ "https://chromedriver.storage.googleapis.com/2.38/chromedriver_win32.zip", ], ) def org_chromium_chromium(): platform_http_file( name = "org_chromium_chromium", licenses = ["notice"], # BSD 3-clause (maybe more?) amd64_sha256 = "6933d0afce6e17304b62029fbbd246cbe9e130eb0d90d7682d3765d3dbc8e1c8", amd64_urls = [ "https://commondatastorage.googleapis.com/chromium-browser-snapshots/Linux_x64/561732/chrome-linux.zip", ], macos_sha256 = "084884e91841a923d7b6e81101f0105bbc3b0026f9f6f7a3477f5b313ee89e32", macos_urls = [ "https://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/561733/chrome-mac.zip", ], windows_sha256 = "d1bb728118c12ea436d8ea07dba980789e7d860aa664dd1fad78bc20e8d9391c", windows_urls = [ "https://commondatastorage.googleapis.com/chromium-browser-snapshots/Win_x64/540270/chrome-win32.zip", ], ) def org_hamcrest_core(): java_import_external( name = "org_hamcrest_core", jar_sha256 = "66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar", "https://repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar", ], licenses = ["notice"], # New BSD License testonly_ = 1, ) def org_jetbrains_kotlin_stdlib(): java_import_external( name = "org_jetbrains_kotlin_stdlib", jar_sha256 = "62eaf9cc6e746cef4593abe7cdb4dd48694ef5f817c852e0d9fbbd11fcfc564e", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar", "https://repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar", ], licenses = ["notice"], # The Apache Software License, Version 2.0 ) def org_json(): java_import_external( name = "org_json", jar_sha256 = "518080049ba83181914419d11a25d9bc9833a2d729b6a6e7469fa52851356da8", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar", "https://repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar", ], licenses = ["notice"], # MIT-style license ) def org_mozilla_firefox(): platform_http_file( name = "org_mozilla_firefox", licenses = ["reciprocal"], # MPL 2.0 amd64_sha256 = "3a729ddcb1e0f5d63933177a35177ac6172f12edbf9fbbbf45305f49333608de", amd64_urls = [ "https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2", "https://ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2", ], macos_sha256 = "bf23f659ae34832605dd0576affcca060d1077b7bf7395bc9874f62b84936dc5", macos_urls = [ "https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg", "https://ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg", ], ) def org_mozilla_geckodriver(): platform_http_file( name = "org_mozilla_geckodriver", licenses = ["reciprocal"], # MPL 2.0 amd64_sha256 = "c9ae92348cf00aa719be6337a608fae8304691a95668e8e338d92623ba9e0ec6", amd64_urls = [ "https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz", "https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz", ], macos_sha256 = "ce4a3e9d706db94e8760988de1ad562630412fa8cf898819572522be584f01ce", macos_urls = [ "https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz", "https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz", ], ) def org_seleniumhq_py(): http_archive( name = "org_seleniumhq_py", build_file = str(Label("//build_files:org_seleniumhq_py.BUILD")), sha256 = "f9ca21919b564a0a86012cd2177923e3a7f37c4a574207086e710192452a7c40", strip_prefix = "selenium-3.14.0", urls = [ "https://files.pythonhosted.org/packages/af/7c/3f76140976b1c8f8a6b437ccd1f04efaed37bdc2600530e76ba981c677b9/selenium-3.14.0.tar.gz", ], ) def org_seleniumhq_selenium_api(): java_import_external( name = "org_seleniumhq_selenium_api", jar_sha256 = "1fc941f86ba4fefeae9a705c1468e65beeaeb63688e19ad3fcbda74cc883ee5b", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar", "https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar", ], licenses = ["notice"], # The Apache Software License, Version 2.0 testonly_ = 1, ) def org_seleniumhq_selenium_remote_driver(): java_import_external( name = "org_seleniumhq_selenium_remote_driver", jar_sha256 = "284cb4ea043539353bd5ecd774cbd726b705d423ea4569376c863d0b66e5eaf2", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar", "https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar", ], licenses = ["notice"], # The Apache Software License, Version 2.0 testonly_ = 1, deps = [ "@com_google_code_gson", "@com_google_guava", "@net_bytebuddy", "@com_squareup_okhttp3_okhttp", "@com_squareup_okio", "@commons_codec", "@commons_logging", "@org_apache_commons_exec", "@org_apache_httpcomponents_httpclient", "@org_apache_httpcomponents_httpcore", "@org_seleniumhq_selenium_api", ], )
[]
santomon/taskonomy
code/tools/run_viz_single_task.py
4b22087a2686172b21b61589831061e7a386fe36
from __future__ import absolute_import, division, print_function import argparse import importlib import itertools import time from multiprocessing import Pool import numpy as np import os import pdb import pickle import subprocess import sys import tensorflow as tf import tensorflow.contrib.slim as slim import threading import init_paths from models.sample_models import * target_tasks = "autoencoder colorization curvature denoise edge2d edge3d ego_motion fix_pose impainting_whole jigsaw keypoint2d keypoint3d non_fixated_pose point_match reshade rgb2depth rgb2mist rgb2sfnorm room_layout segment25d segment2d vanishing_point_well_defined segmentsemantic_rb class_selected class_1000" list_of_tasks = target_tasks.split(" ") ON_TEST_SET = True IN_TRAIN_MODE = False parser = argparse.ArgumentParser(description='Viz Single Task') parser.add_argument('--idx', dest='idx', help='Task to run', type=int) parser.add_argument('--hs', dest='hs', help='Hidden size to use', type=int) parser.add_argument('--n-parallel', dest='n_parallel', help='Number of models to run in parallel', type=int) parser.set_defaults(n_parallel=1) tf.logging.set_verbosity(tf.logging.ERROR) ipython_std_out = sys.stdout # Disabe def blockPrint(): sys.stdout = open(os.devnull, 'w') # Restore def enablePrint(): sys.stdout = ipython_std_out # Force Print def forcePrint(str): enablePrint() print(str) sys.stdout.flush() blockPrint() def remove_dups(seq): seen = set() seen_add = seen.add return [x for x in seq if not (x in seen or seen_add(x))] pairs = list(itertools.product(list_of_tasks, list_of_tasks)) args = parser.parse_args() idx_to_run = args.idx if idx_to_run == -1: pairs_to_run = pairs else: pairs_to_run = pairs[idx_to_run:idx_to_run+1] def run_to_task(task_to): import general_utils from general_utils import RuntimeDeterminedEnviromentVars import models.architectures as architectures from data.load_ops import resize_rescale_image import utils from data.task_data_loading import load_and_specify_preprocessors_for_representation_extraction import lib.data.load_ops as load_ops tf.logging.set_verbosity(tf.logging.ERROR) all_outputs = {} pickle_dir = 'viz_output_single_task.pkl' import os if os.path.isfile(pickle_dir): with open( pickle_dir, 'rb') as fp: all_outputs = pickle.load(fp) for task in list_of_tasks: if task in all_outputs: print("{} already exists....\n\n\n".format(task)) continue print("Doing {task}".format(task=task)) general_utils = importlib.reload(general_utils) tf.reset_default_graph() training_runners = { 'sess': tf.InteractiveSession(), 'coord': tf.train.Coordinator() } # task = '{f}__{t}__{hs}'.format(f=task_from, t=task_to, hs=args.hs) CONFIG_DIR = '/home/ubuntu/task-taxonomy-331b/experiments/final/{TASK}'.format(TASK=task) ############## Load Configs ############## cfg = utils.load_config( CONFIG_DIR, nopause=True ) RuntimeDeterminedEnviromentVars.register_dict( cfg ) split_file = cfg['test_filenames'] if ON_TEST_SET else cfg['val_filenames'] cfg['train_filenames'] = split_file cfg['val_filenames'] = split_file cfg['test_filenames'] = split_file cfg['num_epochs'] = 1 cfg['randomize'] = False root_dir = cfg['root_dir'] cfg['num_read_threads'] = 1 print(cfg['log_root']) if task == 'jigsaw': continue cfg['model_path'] = os.path.join( cfg['log_root'], task, 'model.permanent-ckpt' ) print( cfg['model_path']) if cfg['model_path'] is None: continue ############## Set Up Inputs ############## # tf.logging.set_verbosity( tf.logging.INFO ) inputs = utils.setup_input( cfg, is_training=ON_TEST_SET, use_filename_queue=False ) # is_training determines whether to use train/validaiton RuntimeDeterminedEnviromentVars.load_dynamic_variables( inputs, cfg ) RuntimeDeterminedEnviromentVars.populate_registered_variables() start_time = time.time() # utils.print_start_info( cfg, inputs[ 'max_steps' ], is_training=False ) ############## Set Up Model ############## model = utils.setup_model( inputs, cfg, is_training=IN_TRAIN_MODE ) m = model[ 'model' ] model[ 'saver_op' ].restore( training_runners[ 'sess' ], cfg[ 'model_path' ] ) ############## Start dataloading workers ############## data_prefetch_init_fn = utils.get_data_prefetch_threads_init_fn( inputs, cfg, is_training=ON_TEST_SET, use_filename_queue=False ) prefetch_threads = threading.Thread( target=data_prefetch_init_fn, args=( training_runners[ 'sess' ], training_runners[ 'coord' ] )) prefetch_threads.start() ############## Run First Batch ############## if not hasattr(m, 'masks'): ( input_batch, target_batch, data_idx, predicted, loss, ) = training_runners['sess'].run( [ m.input_images, m.targets, model[ 'data_idxs' ], m.decoder_output, m.total_loss] ) mask_batch = 1. else: ( input_batch, target_batch, mask_batch, data_idx, predicted, loss, ) = training_runners['sess'].run( [ m.input_images, m.targets, m.masks, model[ 'data_idxs' ], m.decoder_output, m.total_loss] ) if task == 'segment2d' or task == 'segment25d': from sklearn.decomposition import PCA x = np.zeros((32,256,256,3), dtype='float') for i in range(predicted.shape[0]): embedding_flattened = np.squeeze(predicted[i]).reshape((-1,64)) pca = PCA(n_components=3) pca.fit(embedding_flattened) lower_dim = pca.transform(embedding_flattened).reshape((256,256,-1)) lower_dim = (lower_dim - lower_dim.min()) / (lower_dim.max() - lower_dim.min()) x[i] = lower_dim predicted = x ############## Clean Up ############## training_runners[ 'coord' ].request_stop() training_runners[ 'coord' ].join() # if os.path.isfile(pickle_dir): # with open(pickle_dir, 'rb') as fp: # all_outputs = pickle.load(fp) ############## Store to dict ############## to_store = { 'input': input_batch, 'target': target_batch, 'mask': mask_batch, 'data_idx':data_idx, 'output':predicted} all_outputs[task] = to_store print("Done: {}".format(task)) # os.system("sudo cp {d} /home/ubuntu/s3/model_log".format(d=pickle_dir)) ############## Reset graph and paths ############## tf.reset_default_graph() training_runners['sess'].close() try: del sys.modules[ 'config' ] except: pass sys.path = remove_dups(sys.path) print("FINISHED: {}\n\n\n\n\n\n".format(task)) pickle_dir = 'viz_output_single_task.pkl' with open( pickle_dir, 'wb') as fp: pickle.dump(all_outputs, fp) try: subprocess.call("aws s3 cp {} s3://task-preprocessing-512-oregon/visualizations/".format(pickle_dir), shell=True) except: subprocess.call("sudo cp {} /home/ubuntu/s3/visualizations/".format(pickle_dir), shell=True) return if __name__ == '__main__': run_to_task(None) # with Pool(args.n_parallel) as p: # p.map(run_to_task, list_of_tasks)
[((790, 844), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Viz Single Task"""'}), "(description='Viz Single Task')\n", (813, 844), False, 'import argparse\n'), ((1198, 1240), 'tensorflow.logging.set_verbosity', 'tf.logging.set_verbosity', (['tf.logging.ERROR'], {}), '(tf.logging.ERROR)\n', (1222, 1240), True, 'import tensorflow as tf\n'), ((1478, 1496), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1494, 1496), False, 'import sys\n'), ((1654, 1701), 'itertools.product', 'itertools.product', (['list_of_tasks', 'list_of_tasks'], {}), '(list_of_tasks, list_of_tasks)\n', (1671, 1701), False, 'import itertools\n'), ((2240, 2282), 'tensorflow.logging.set_verbosity', 'tf.logging.set_verbosity', (['tf.logging.ERROR'], {}), '(tf.logging.ERROR)\n', (2264, 2282), True, 'import tensorflow as tf\n'), ((2372, 2398), 'os.path.isfile', 'os.path.isfile', (['pickle_dir'], {}), '(pickle_dir)\n', (2386, 2398), False, 'import os\n'), ((2713, 2744), 'importlib.reload', 'importlib.reload', (['general_utils'], {}), '(general_utils)\n', (2729, 2744), False, 'import importlib\n'), ((2753, 2777), 'tensorflow.reset_default_graph', 'tf.reset_default_graph', ([], {}), '()\n', (2775, 2777), True, 'import tensorflow as tf\n'), ((3116, 3159), 'utils.load_config', 'utils.load_config', (['CONFIG_DIR'], {'nopause': '(True)'}), '(CONFIG_DIR, nopause=True)\n', (3133, 3159), False, 'import utils\n'), ((3170, 3220), 'general_utils.RuntimeDeterminedEnviromentVars.register_dict', 'RuntimeDeterminedEnviromentVars.register_dict', (['cfg'], {}), '(cfg)\n', (3215, 3220), False, 'from general_utils import RuntimeDeterminedEnviromentVars\n'), ((3681, 3740), 'os.path.join', 'os.path.join', (["cfg['log_root']", 'task', '"""model.permanent-ckpt"""'], {}), "(cfg['log_root'], task, 'model.permanent-ckpt')\n", (3693, 3740), False, 'import os\n'), ((4021, 4094), 'utils.setup_input', 'utils.setup_input', (['cfg'], {'is_training': 'ON_TEST_SET', 'use_filename_queue': '(False)'}), '(cfg, is_training=ON_TEST_SET, use_filename_queue=False)\n', (4038, 4094), False, 'import utils\n'), ((4162, 4229), 'general_utils.RuntimeDeterminedEnviromentVars.load_dynamic_variables', 'RuntimeDeterminedEnviromentVars.load_dynamic_variables', (['inputs', 'cfg'], {}), '(inputs, cfg)\n', (4216, 4229), False, 'from general_utils import RuntimeDeterminedEnviromentVars\n'), ((4240, 4303), 'general_utils.RuntimeDeterminedEnviromentVars.populate_registered_variables', 'RuntimeDeterminedEnviromentVars.populate_registered_variables', ([], {}), '()\n', (4301, 4303), False, 'from general_utils import RuntimeDeterminedEnviromentVars\n'), ((4325, 4336), 'time.time', 'time.time', ([], {}), '()\n', (4334, 4336), False, 'import time\n'), ((4487, 4544), 'utils.setup_model', 'utils.setup_model', (['inputs', 'cfg'], {'is_training': 'IN_TRAIN_MODE'}), '(inputs, cfg, is_training=IN_TRAIN_MODE)\n', (4504, 4544), False, 'import utils\n'), ((4760, 4868), 'utils.get_data_prefetch_threads_init_fn', 'utils.get_data_prefetch_threads_init_fn', (['inputs', 'cfg'], {'is_training': 'ON_TEST_SET', 'use_filename_queue': '(False)'}), '(inputs, cfg, is_training=\n ON_TEST_SET, use_filename_queue=False)\n', (4799, 4868), False, 'import utils\n'), ((4907, 5018), 'threading.Thread', 'threading.Thread', ([], {'target': 'data_prefetch_init_fn', 'args': "(training_runners['sess'], training_runners['coord'])"}), "(target=data_prefetch_init_fn, args=(training_runners[\n 'sess'], training_runners['coord']))\n", (4923, 5018), False, 'import threading\n'), ((7255, 7279), 'tensorflow.reset_default_graph', 'tf.reset_default_graph', ([], {}), '()\n', (7277, 7279), True, 'import tensorflow as tf\n'), ((2470, 2485), 'pickle.load', 'pickle.load', (['fp'], {}), '(fp)\n', (2481, 2485), False, 'import pickle\n'), ((2815, 2838), 'tensorflow.InteractiveSession', 'tf.InteractiveSession', ([], {}), '()\n', (2836, 2838), True, 'import tensorflow as tf\n'), ((2849, 2871), 'tensorflow.train.Coordinator', 'tf.train.Coordinator', ([], {}), '()\n', (2869, 2871), True, 'import tensorflow as tf\n'), ((5964, 6006), 'numpy.zeros', 'np.zeros', (['(32, 256, 256, 3)'], {'dtype': '"""float"""'}), "((32, 256, 256, 3), dtype='float')\n", (5972, 6006), True, 'import numpy as np\n'), ((7609, 7637), 'pickle.dump', 'pickle.dump', (['all_outputs', 'fp'], {}), '(all_outputs, fp)\n', (7620, 7637), False, 'import pickle\n'), ((6154, 6173), 'sklearn.decomposition.PCA', 'PCA', ([], {'n_components': '(3)'}), '(n_components=3)\n', (6157, 6173), False, 'from sklearn.decomposition import PCA\n'), ((6090, 6114), 'numpy.squeeze', 'np.squeeze', (['predicted[i]'], {}), '(predicted[i])\n', (6100, 6114), True, 'import numpy as np\n')]
cholve/stratum
stratum/portage/build_defs.bzl
09ddb5acb604f7e694a6b7d2fe93fea79f801794
# Copyright 2018 Google LLC # Copyright 2018-present Open Networking Foundation # SPDX-License-Identifier: Apache-2.0 """A portable build system for Stratum P4 switch stack. To use this, load() this file in a BUILD file, specifying the symbols needed. The public symbols are the macros: decorate(path) sc_cc_lib Declare a portable Library. sc_proto_lib Declare a portable .proto Library. sc_cc_bin Declare a portable Binary. sc_package Declare a portable tarball package. and the variables/lists: ALL_ARCHES All known arches. EMBEDDED_ARCHES All embedded arches. EMBEDDED_PPC Name of PowerPC arch - "ppc". EMBEDDED_X86 Name of "x86" arch. HOST_ARCH Name of default "host" arch. HOST_ARCHES All host arches. STRATUM_INTERNAL For declaring Stratum internal visibility. The macros are like cc_library(), proto_library(), and cc_binary(), but with different options and some restrictions. The key difference: you can supply lists of architectures for which they should be compiled - defaults to all if left unstated. Internally, libraries and binaries are generated for every listed architecture. The names are decorated to keep them different and allow all to be generated and addressed independently. This aspect of the system is suboptimal - something along the lines of augmenting context with a user defined configuration fragment would be a much cleaner solution. Currently supported architectures: ppc x86 """ load("//tools/build_defs/label:def.bzl", "parse_label") load( "//devtools/build_cleaner/skylark:build_defs.bzl", "register_extension_info", ) load("@rules_proto//proto:defs.bzl", "proto_library") load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library", "cc_test") # Generic path & label helpers. ============================================ def _normpath(path): """Normalize a path. Normalizes a path by removing unnecessary path-up segments and its corresponding directories. Providing own implementation because import os is not allowed in build defs. For example ../../dir/to/deeply/nested/path/../../../other/path will become ../../dir/to/other/path Args: path: A valid absolute or relative path to normalize. Returns: A path equivalent to the input path with minimal use of path-up segments. Invalid input paths will stay invalid. """ sep = "/" level = 0 result = [] for d in path.split(sep): if d in ("", "."): if result: continue elif d == "..": if level > 0: result.pop() level += -1 continue else: level += 1 result.append(d) return sep.join(result) # Adds a suffix to a label, expanding implicit targets if needed. def decorate(label, suffix): if label.endswith(":"): # .../bar: -> .../bar label = label[:-1] if ":" in label: # .../bar:bat -> .../bar:bat_suffix return "%s_%s" % (label, suffix) elif label.startswith("//"): # //foo/bar -> //foo/bar:bar_suffix return "%s:%s_%s" % (label, label.split("/")[-1], suffix) else: # bar -> bar_suffix return "%s_%s" % (label, suffix) # Creates a relative filename from a label, replacing "//" and ":". def _make_filename(label): if label.startswith("//"): # //foo/bar:bat/baz -> google3_foo/bar/bat/baz return label.replace("//", "google3/").replace(":", "/") elif label.startswith(":"): # :bat/baz -> bat/baz return label[1:] else: # bat/baz -> bat/baz return label # Adds dquotes around a string. def dquote(s): return '"' + s + '"' # Adds squotes around a string. def squote(s): return "'" + s + "'" # Emulate Python 2.5+ str(startswith([prefix ...]) def starts_with(s, prefix_list): for prefix in prefix_list: if s.startswith(prefix): return prefix return None def sc_platform_select(host = None, ppc = None, x86 = None, default = None): """Public macro to alter blaze rules based on the platform architecture. Generates a blaze select(...) statement that can be used in most contexts to alter a blaze rule based on the target platform architecture. If no selection is provided for a given platform, {default} is used instead. A specific value or default must be provided for every target platform. Args: host: The value to use for host builds. ppc: The value to use for ppc builds. x86: The value to use for x86 builds. default: The value to use for any of {host,ppc,x86} that isn't specified. Returns: The requested selector. """ if default == None and (host == None or ppc == None or x86 == None): fail("Missing a select value for at least one platform in " + "sc_platform_select. Please add.") config_label_prefix = "//stratum:stratum_" return select({ "//conditions:default": (host or default), config_label_prefix + "ppc": (ppc or default), config_label_prefix + "x86": (x86 or default), }) # Generates an sc_platform_select based on a textual list of arches. def sc_platform_filter(value, default, arches): return sc_platform_select( host = value if "host" in arches else default, ppc = value if "ppc" in arches else default, x86 = value if "x86" in arches else default, ) def sc_platform_alias( name, host = None, ppc = None, x86 = None, default = None, visibility = None): """Public macro to create an alias that changes based on target arch. Generates a blaze alias that will select the appropriate target. If no selection is provided for a given platform and no default is set, a dummy default target is used instead. Args: name: The name of the alias target. host: The result of the alias for host builds. ppc: The result of the alias for ppc builds. x86: The result of the alias for x86 builds. default: The result of the alias for any of {host,ppc,x86} that isn't specified. visibility: The visibility of the alias target. """ native.alias( name = name, actual = sc_platform_select( default = default or "//stratum/portage:dummy", host = host, ppc = ppc, x86 = x86, ), visibility = visibility, ) # Embedded build definitions. ============================================== EMBEDDED_PPC = "ppc" EMBEDDED_X86 = "x86" EMBEDDED_ARCHES = [ EMBEDDED_PPC, EMBEDDED_X86, ] HOST_ARCH = "host" HOST_ARCHES = [HOST_ARCH] ALL_ARCHES = EMBEDDED_ARCHES + HOST_ARCHES # Identify Stratum platform arch for .pb.h shims and other portability hacks. _ARCH_DEFINES = sc_platform_select( default = ["STRATUM_ARCH_HOST"], ppc = ["STRATUM_ARCH_PPC"], x86 = ["STRATUM_ARCH_X86"], ) STRATUM_INTERNAL = [ "//stratum:__subpackages__", ] # # Build options for all embedded architectures # # Set _TRACE_SRCS to show sources in embedded sc_cc_lib compile steps. # This is more general than it may seem: genrule doesn't have hdrs or deps # attributes, so all embedded dependencies appear as a `src'. # TODO(unknown): if useful again then inject from cmdline else kill feature. _TRACE_SRCS = False # Used for all gcc invocations. _EMBEDDED_FLAGS = [ "-O0", # Don't use this for program-sizing build #-- "-Os", # Use this for program-sizing build "-g", # Don't use this for program-sizing build "-Wall", "-Werror", # Warn lots, and force fixing warnings. "-no-canonical-prefixes", # Don't mangle paths and confuse blaze. "-fno-builtin-malloc", # We'll use tcmalloc "-fno-builtin-calloc", "-fno-builtin-realloc", "-fno-builtin-free", "-D__STDC_FORMAT_MACROS=1", # TODO(unknown): Figure out how we can use $(CC_FLAGS) instead of this. "-D__GOOGLE_STL_LEGACY_COMPATIBILITY", ] # Used for C and C++ compiler invocations. _EMBEDDED_CFLAGS = [ "-I$(GENDIR)", ] # Used for C++ compiler invocations. _EMBEDDED_CXXFLAGS = [ "-std=gnu++11", # Allow C++11 features _and_ GNU extensions. ] # Used for linking binaries. _EMBEDDED_LDFLAGS = [ # "-static", # Use this for program-sizing build # "-Wl,--gc-sections,--no-wchar-size-warning", # Use this for program-sizing build ] # PPC ====================================================================== _PPC_GRTE = "//unsupported_toolchains/crosstoolng_powerpc32_8540/sysroot" # X86 ====================================================================== _X86_GRTE = "//grte/v4_x86/release/usr/grte/v4" # Portability definitions =================================================== def sc_cc_test( name, size = None, srcs = None, deps = None, data = None, defines = None, copts = None, linkopts = None, visibility = None): """Creates a cc_test rule that interacts safely with Stratum builds. Generates a cc_test rule that doesn't break the build when an embedded arch is selected. During embedded builds this target will generate a dummy binary and will not attempt to build any dependencies. Args: name: Analogous to cc_test name argument. size: Analogous to cc_test size argument. srcs: Analogous to cc_test srcs argument. deps: Analogous to cc_test deps argument. data: Analogous to cc_test data argument. defines: Analogous to cc_test defines argument. copts: Analogous to cc_test copts argument. linkopts: Analogous to cc_test linkopts argument. visibility: Analogous to cc_test visibility argument. """ cc_test( name = name, size = size or "small", srcs = sc_platform_select(host = srcs or [], default = []), deps = sc_platform_select( host = deps or [], default = ["//stratum/portage:dummy_with_main"], ), data = data or [], defines = defines, copts = copts, linkopts = linkopts, visibility = visibility, ) register_extension_info( extension_name = "sc_cc_test", label_regex_for_dep = "{extension_name}", ) def sc_cc_lib( name, deps = None, srcs = None, hdrs = None, arches = None, copts = None, defines = None, includes = None, include_prefix = None, strip_include_prefix = None, data = None, testonly = None, textual_hdrs = None, visibility = None, xdeps = None): """Creates rules for the given portable library and arches. Args: name: Analogous to cc_library name argument. deps: Analogous to cc_library deps argument. srcs: Analogous to cc_library srcs argument. hdrs: Analogous to cc_library hdrs argument. arches: List of architectures to generate this way. copts: Analogous to cc_library copts argument. defines: Symbols added as "-D" compilation options. includes: Paths to add as "-I" compilation options. include_prefix: Analogous to cc_library include_prefix argument. strip_include_prefix: Analogous to cc_library strip_include_prefix argument. data: Files to provide as data at runtime (host builds only). testonly: Standard blaze testonly parameter. textual_hdrs: Analogous to cc_library. visibility: Standard blaze visibility parameter. xdeps: External (file) dependencies of this library - no decorations assumed, used and exported as header, not for flags, libs, etc. """ alwayslink = 0 deps = depset(deps or []) srcs = depset(srcs or []) hdrs = depset(hdrs or []) xdeps = depset(xdeps or []) copts = depset(copts or []) includes = depset(includes or []) data = depset(data or []) textual_hdrs = depset(textual_hdrs or []) if srcs: if [s for s in srcs.to_list() if not s.endswith(".h")]: alwayslink = 1 if not arches: arches = ALL_ARCHES defs_plus = (defines or []) + _ARCH_DEFINES textual_plus = textual_hdrs | depset(deps.to_list()) cc_library( name = name, deps = sc_platform_filter(deps, [], arches), srcs = sc_platform_filter(srcs, [], arches), hdrs = sc_platform_filter(hdrs, [], arches), alwayslink = alwayslink, copts = sc_platform_filter(copts, [], arches), defines = defs_plus, includes = sc_platform_filter(includes, [], arches), include_prefix = include_prefix, strip_include_prefix = strip_include_prefix, testonly = testonly, textual_hdrs = sc_platform_filter( textual_plus | xdeps, [], arches, ), data = sc_platform_filter(data, [], arches), visibility = visibility, ) register_extension_info( extension_name = "sc_cc_lib", label_regex_for_dep = "{extension_name}", ) def sc_cc_bin( name, deps = None, srcs = None, arches = None, copts = None, defines = None, includes = None, testonly = None, visibility = None): """Creates rules for the given portable binary and arches. Args: name: Analogous to cc_binary name argument. deps: Analogous to cc_binary deps argument. srcs: Analogous to cc_binary srcs argument. arches: List of architectures to generate this way. copts: Analogous to cc_binary copts argument. defines: Symbols added as "-D" compilation options. includes: Paths to add as "-I" compilation options. testonly: Standard blaze testonly parameter. visibility: Standard blaze visibility parameter. """ deps = depset(deps or []) srcs = depset(srcs or []) if not arches: arches = ALL_ARCHES defs_plus = (defines or []) + _ARCH_DEFINES cc_binary( name = name, deps = sc_platform_filter( deps, ["//stratum/portage:dummy_with_main"], arches, ), srcs = sc_platform_filter(srcs, [], arches), copts = copts, defines = defs_plus, includes = includes, linkopts = ["-ldl", "-lutil"], testonly = testonly, visibility = visibility, ) register_extension_info( extension_name = "sc_cc_bin", label_regex_for_dep = "{extension_name}", ) # Protobuf ================================================================= _SC_GRPC_DEPS = [ "//sandblaze/prebuilt/grpc", "//sandblaze/prebuilt/grpc:grpc++_codegen_base", "//sandblaze/prebuilt/grpc:grpc++_codegen_proto_lib", ] _PROTOC = "@com_google_protobuf//:protobuf:protoc" _PROTOBUF = "@com_google_protobuf//:protobuf" _SC_GRPC_PLUGIN = "//sandblaze/prebuilt/protobuf:grpc_cpp_plugin" _GRPC_PLUGIN = "//grpc:grpc_cpp_plugin" def _loc(target): """Return target location for constructing commands. Args: target: Blaze target name available to this build. Returns: $(location target) """ return "$(location %s)" % target def _gen_proto_lib( name, srcs, hdrs, deps, arch, visibility, testonly, proto_include, grpc_shim_rule): """Creates rules and filegroups for embedded protobuf library. For every given ${src}.proto, generate: :${src}_${arch}.pb rule to run protoc ${src}.proto => ${src}.${arch}.pb.{h,cc} :${src}_${arch}.grpc.pb rule to run protoc w/ erpc plugin: ${src}.proto => ${src}.${arch}.grpc.pb.{h,cc} :${src}_${arch}_proto_rollup collects include options for protoc: ${src}_${arch}_proto_rollup.flags Feed each set into sc_cc_lib to wrap them them up into a usable library; note that ${src}_${arch}_erpc_proto depends on ${src}_${arch}_proto. Args: name: Base name for this library. srcs: List of proto files hdrs: More files to build into this library, but also exported for dependent rules to utilize. deps: List of deps for this library arch: Which architecture to build this library for. visibility: Standard blaze visibility parameter, passed through to subsequent rules. testonly: Standard blaze testonly parameter. proto_include: Include path for generated sc_cc_libs. grpc_shim_rule: If needed, the name of the grpc shim for this proto lib. """ bash_vars = ["g3=$${PWD}"] # TODO(unknown): Switch protobuf to using the proto_include mechanism protoc_label = _PROTOC protobuf_label = _PROTOBUF protobuf_hdrs = "%s:well_known_types_srcs" % protobuf_label protobuf_srcs = [protobuf_hdrs] protobuf_include = "$${g3}/protobuf/src" if arch in EMBEDDED_ARCHES: grpc_plugin = _SC_GRPC_PLUGIN else: grpc_plugin = _GRPC_PLUGIN protoc_deps = [] for dep in deps: if dep.endswith("_proto"): protoc_deps.append("%s_%s_headers" % (dep, arch)) name_arch = decorate(name, arch) # We use this filegroup to accumulate the set of .proto files needed to # compile this proto. native.filegroup( name = decorate(name_arch, "headers"), srcs = hdrs + protoc_deps, visibility = visibility, ) my_proto_rollup = decorate(name_arch, "proto_rollup.flags") protoc_srcs_set = (srcs + hdrs + protoc_deps + protobuf_srcs + [my_proto_rollup]) gen_srcs = [] gen_hdrs = [] grpc_gen_hdrs = [] grpc_gen_srcs = [] tools = [protoc_label] grpc_tools = [protoc_label, grpc_plugin] protoc = "$${g3}/%s" % _loc(protoc_label) grpc_plugin = "$${g3}/%s" % _loc(grpc_plugin) cpp_out = "$${g3}/$(GENDIR)/%s/%s" % (native.package_name(), arch) accum_flags = [] full_proto_include = None if proto_include == ".": full_proto_include = native.package_name() elif proto_include: full_proto_include = "%s/%s" % (native.package_name(), proto_include) if full_proto_include: temp_prefix = "%s/%s" % (cpp_out, native.package_name()[len(full_proto_include):]) # We do a bit of extra work with these include flags to avoid generating # warnings. accum_flags.append( "$$(if [[ -e $(GENDIR)/%s ]]; then echo -IG3LOC/$(GENDIR)/%s; fi)" % (full_proto_include, full_proto_include), ) accum_flags.append( "$$(if [[ -e %s ]]; then echo -IG3LOC/%s; fi)" % (full_proto_include, full_proto_include), ) else: temp_prefix = "%s/%s" % (cpp_out, native.package_name()) proto_rollups = [ decorate(decorate(dep, arch), "proto_rollup.flags") for dep in deps if dep.endswith("_proto") ] proto_rollup_cmds = ["printf '%%s\n' %s" % flag for flag in accum_flags] proto_rollup_cmds.append("cat $(SRCS)") proto_rollup_cmd = "{ %s; } | sort -u -o $(@)" % "; ".join(proto_rollup_cmds) native.genrule( name = decorate(name_arch, "proto_rollup"), srcs = proto_rollups, outs = [my_proto_rollup], cmd = proto_rollup_cmd, visibility = visibility, testonly = testonly, ) for src in srcs + hdrs: if src.endswith(".proto"): src_stem = src[0:-6] src_arch = "%s_%s" % (src_stem, arch) temp_stem = "%s/%s" % (temp_prefix, src_stem) gen_stem = "%s.%s" % (src_stem, arch) # We can't use $${PWD} until this step, because our rollup command # might be generated on another forge server. proto_path_cmds = ["rollup=$$(sed \"s,G3LOC,$${PWD},g\" %s)" % _loc(my_proto_rollup)] proto_rollup_flags = ["$${rollup}"] if proto_include: # We'll be cd-ing to another directory before protoc, so # adjust our .proto path accordingly. proto_src_loc = "%s/%s" % (native.package_name(), src) if proto_src_loc.startswith(full_proto_include + "/"): proto_src_loc = proto_src_loc[len(full_proto_include) + 1:] else: print("Invalid proto include '%s' doesn't match src %s" % (full_proto_include, proto_src_loc)) # By cd-ing to another directory, we force protoc to produce # different symbols. Careful, our proto might be in GENDIR! proto_path_cmds.append("; ".join([ "if [[ -e %s ]]" % ("%s/%s" % (full_proto_include, proto_src_loc)), "then cd %s" % full_proto_include, "else cd $(GENDIR)/%s" % full_proto_include, "fi", ])) gendir_include = ["-I$${g3}/$(GENDIR)", "-I$${g3}", "-I."] else: proto_src_loc = "%s/%s" % (native.package_name(), src) proto_path_cmds.append("[[ -e %s ]] || cd $(GENDIR)" % proto_src_loc) gendir_include = ["-I$(GENDIR)", "-I."] # Generate messages gen_pb_h = gen_stem + ".pb.h" gen_pb_cc = gen_stem + ".pb.cc" gen_hdrs.append(gen_pb_h) gen_srcs.append(gen_pb_cc) cmds = bash_vars + [ "mkdir -p %s" % temp_prefix, ] + proto_path_cmds + [ " ".join([protoc] + gendir_include + proto_rollup_flags + [ "-I%s" % protobuf_include, "--cpp_out=%s" % cpp_out, proto_src_loc, ]), "cd $${g3}", "cp %s.pb.h %s" % (temp_stem, _loc(gen_pb_h)), "cp %s.pb.cc %s" % (temp_stem, _loc(gen_pb_cc)), ] pb_outs = [gen_pb_h, gen_pb_cc] native.genrule( name = src_arch + ".pb", srcs = protoc_srcs_set, outs = pb_outs, tools = tools, cmd = " && ".join(cmds), heuristic_label_expansion = 0, visibility = visibility, ) # Generate GRPC if grpc_shim_rule: gen_grpc_pb_h = gen_stem + ".grpc.pb.h" gen_grpc_pb_cc = gen_stem + ".grpc.pb.cc" grpc_gen_hdrs.append(gen_grpc_pb_h) grpc_gen_srcs.append(gen_grpc_pb_cc) cmds = bash_vars + [ "mkdir -p %s" % temp_prefix, ] + proto_path_cmds + [ " ".join([ protoc, "--plugin=protoc-gen-grpc-cpp=%s" % grpc_plugin, ] + gendir_include + proto_rollup_flags + [ "-I%s" % protobuf_include, "--grpc-cpp_out=%s" % cpp_out, proto_src_loc, ]), "cd $${g3}", "cp %s.grpc.pb.h %s" % (temp_stem, _loc(gen_grpc_pb_h)), "cp %s.grpc.pb.cc %s" % (temp_stem, _loc(gen_grpc_pb_cc)), ] grpc_pb_outs = [gen_grpc_pb_h, gen_grpc_pb_cc] native.genrule( name = src_arch + ".grpc.pb", srcs = protoc_srcs_set, outs = grpc_pb_outs, tools = grpc_tools, cmd = " && ".join(cmds), heuristic_label_expansion = 0, visibility = visibility, ) dep_set = depset(deps) | [protobuf_label] includes = [] if proto_include: includes = [proto_include] # Note: Public sc_proto_lib invokes this once per (listed) arch; # which then calls sc_cc_lib with same name for each arch; # multiple such calls are OK as long as the arches are disjoint. sc_cc_lib( name = decorate(name, arch), deps = dep_set, srcs = gen_srcs, hdrs = hdrs + gen_hdrs, arches = [arch], copts = [], includes = includes, testonly = testonly, textual_hdrs = gen_hdrs, visibility = visibility, ) if grpc_shim_rule: grpc_name = name[:-6] + "_grpc_proto" grpc_dep_set = dep_set | [name] | _SC_GRPC_DEPS grpc_gen_hdrs_plus = grpc_gen_hdrs + gen_hdrs sc_cc_lib( name = decorate(grpc_name, arch), deps = grpc_dep_set, srcs = grpc_gen_srcs, hdrs = hdrs + grpc_gen_hdrs_plus + [grpc_shim_rule], arches = [arch], copts = [], includes = includes, testonly = testonly, textual_hdrs = grpc_gen_hdrs_plus, visibility = visibility, ) def _gen_proto_shims(name, pb_modifier, srcs, arches, visibility): """Macro to build .pb.h multi-arch master switch for sc_proto_lib. For each src path.proto, generates path.pb.h consisting of: #ifdef logic to select path.${arch}.pb.h Also generates an alias that will select the appropriate proto target based on the currently selected platform architecture. Args: name: Base name for this library. pb_modifier: protoc plugin-dependent file extension (e.g.: .pb) srcs: List of proto files. arches: List of arches this shim should support. visibility: The blaze visibility of the generated alias. Returns: Name of shim rule for use in follow-on hdrs and/or src lists. """ outs = [] cmds = [] hdr_ext = pb_modifier + ".h" for src in srcs: pkg, filename = parse_label(src) if not filename.endswith(".proto"): continue hdr_stem = filename[0:-6] new_hdr_name = hdr_stem + hdr_ext outs.append(new_hdr_name) # Generate lines for shim switch file. # Lines expand inside squotes, so quote accordingly. include_fmt = "#include " + dquote(pkg + "/" + hdr_stem + ".%s" + hdr_ext) lines = [ "#if defined(STRATUM_ARCH_%s)" % "PPC", include_fmt % "ppc", "#elif defined(STRATUM_ARCH_%s)" % "X86", include_fmt % "x86", "#elif defined(STRATUM_ARCH_%s)" % "HOST", include_fmt % "host", "#else", "#error Unknown STRATUM_ARCH", "#endif", ] gen_cmds = [("printf '%%s\\n' '%s'" % line) for line in lines] new_hdr_loc = "$(location %s)" % new_hdr_name cmds.append("{ %s; } > %s" % (" && ".join(gen_cmds), new_hdr_loc)) shim_rule = decorate(name, "shims") native.genrule( name = shim_rule, srcs = srcs, outs = outs, cmd = " && ".join(cmds) or "true", ) sc_platform_alias( name = name, host = decorate(name, "host") if "host" in arches else None, ppc = decorate(name, "ppc") if "ppc" in arches else None, x86 = decorate(name, "x86") if "x86" in arches else None, visibility = visibility, ) return shim_rule def _gen_py_proto_lib(name, srcs, deps, visibility, testonly): """Creates a py_proto_library from the given srcs. There's no clean way to make python protos work with sc_proto_lib's proto_include field, so we keep this simple. For library "name", generates: * ${name}_default_pb, a regular proto library. * ${name}_py, a py_proto_library based on ${name}_default_pb. Args: name: Standard blaze name argument. srcs: Standard blaze srcs argument. deps: Standard blaze deps argument. visibility: Standard blaze visibility argument. testonly: Standard blaze testonly argument. """ regular_proto_name = decorate(name, "default_pb") py_name = decorate(name, "py") proto_library( name = regular_proto_name, srcs = srcs, deps = [decorate(dep, "default_pb") for dep in deps], visibility = visibility, testonly = testonly, ) native.py_proto_library( name = py_name, api_version = 2, deps = [regular_proto_name], visibility = visibility, testonly = testonly, ) # TODO(unknown): Add support for depending on normal proto_library rules. def sc_proto_lib( name = None, srcs = [], hdrs = [], deps = [], arches = [], visibility = None, testonly = None, proto_include = None, python_support = False, services = []): """Public macro to build multi-arch library from Message protobuf(s). For library "name", generates: * ${name}_shim aka .pb.h master switch - see _gen_proto_shims, above. * ${name}_${arch}_pb protobuf compile rules - one for each arch. * sc_cc_lib(name) with those as input. * ${name}_py a py_proto_library version of this library. Only generated if python_support == True. Args: name: Base name for this library. srcs: List of .proto files - private to this library. hdrs: As above, but also exported for dependent rules to utilize. deps: List of deps for this library arches: Which architectures to build this library for, None => ALL. visibility: Standard blaze visibility parameter, passed through to subsequent rules. testonly: Standard blaze testonly parameter. proto_include: Path to add to include path. This will affect the symbols generated by protoc, as well as the include paths used for both sc_cc_lib and sc_proto_lib rules that depend on this rule. Typically "." python_support: Defaults to False. If True, generate a python proto library from this rule. Any sc_proto_lib with python support may only depend on sc_proto_libs that also have python support, and may not use the proto_include field in this rule. services: List of services to enable {"grpc", "rpc"}; Only "grpc" is supported. So "rpc" and "grpc" are equivalent. """ if not arches: if testonly: arches = HOST_ARCHES else: arches = ALL_ARCHES service_enable = { "grpc": 0, } for service in services or []: if service == "grpc": service_enable["grpc"] = 1 elif service == "rpc": service_enable["grpc"] = 1 else: fail("service='%s' not in (grpc, rpc)" % service) deps = depset(deps or []) shim_rule = _gen_proto_shims( name = name, pb_modifier = ".pb", srcs = srcs + hdrs, arches = arches, visibility = visibility, ) grpc_shim_rule = None if (service_enable["grpc"]): grpc_shim_rule = _gen_proto_shims( name = decorate(name[:-6], "grpc_proto"), pb_modifier = ".grpc.pb", srcs = srcs + hdrs, arches = arches, visibility = visibility, ) for arch in arches: _gen_proto_lib( name = name, srcs = srcs, hdrs = [shim_rule] + hdrs, deps = deps, arch = arch, visibility = visibility, testonly = testonly, proto_include = proto_include, grpc_shim_rule = grpc_shim_rule, ) if python_support: if proto_include: fail("Cannot use proto_include on an sc_proto_lib with python support.") _gen_py_proto_lib( name = name, srcs = depset(srcs + hdrs), deps = deps, visibility = visibility, testonly = testonly, ) register_extension_info( extension_name = "sc_proto_lib", label_regex_for_dep = "{extension_name}", ) def sc_package( name = None, bins = None, data = None, deps = None, arches = None, visibility = None): """Public macro to package binaries and data for deployment. For package "name", generates: * ${name}_${arch}_bin and ${name}_${arch}_data filesets containing respectively all of the binaries and all of the data needed for this package and all dependency packages. * ${name}_${arch} fileset containing the corresponding bin and data filesets, mapped to bin/ and share/ respectively. * ${name}_${arch}_tarball rule builds that .tar.gz package. Args: name: Base name for this package. bins: List of sc_cc_bin rules to be packaged. data: List of files (and file producing rules) to be packaged. deps: List of other sc_packages to add to this package. arches: Which architectures to build this library for, None => EMBEDDED_ARCHES (HOST_ARCHES not generally supported). visibility: Standard blaze visibility parameter, passed through to all filesets. """ bins = depset(bins or []) data = depset(data or []) deps = depset(deps or []) if not arches: arches = EMBEDDED_ARCHES fileset_name = decorate(name, "fs") for extension, inputs in [ ("bin", ["%s.stripped" % b for b in bins.to_list()]), ("data", data), ]: native.Fileset( name = decorate(fileset_name, extension), out = decorate(name, extension), entries = [ native.FilesetEntry( files = inputs, ), ] + [ native.FilesetEntry(srcdir = decorate(dep, extension)) for dep in deps.to_list() ], visibility = visibility, ) # Add any platform specific files to the final tarball. platform_entries = sc_platform_select( # We use a different ppc toolchain for Stratum. # This means that we must provide portable shared libs for our ppc # executables. ppc = [native.FilesetEntry( srcdir = "%s:BUILD" % _PPC_GRTE, files = [":libs"], destdir = "lib/stratum", symlinks = "dereference", )], default = [], ) native.Fileset( name = fileset_name, out = name, entries = [ native.FilesetEntry( srcdir = decorate(name, "bin"), destdir = "bin", ), native.FilesetEntry( srcdir = decorate(name, "data"), destdir = "share", ), ] + platform_entries, visibility = visibility, ) outs = ["%s.tar.gz" % name] # Copy our files into a temporary directory and make any necessary changes # before tarballing. cmds = [ "TEMP_DIR=$(@D)/stratum_packaging_temp", "mkdir $${TEMP_DIR}", "cp -r %s $${TEMP_DIR}/tarball" % _loc(fileset_name), "if [[ -e $${TEMP_DIR}/tarball/bin ]]", "then for f in $${TEMP_DIR}/tarball/bin/*.stripped", " do mv $${f} $${f%.stripped}", # rename not available. "done", "fi", "tar czf %s -h -C $${TEMP_DIR}/tarball ." % _loc(name + ".tar.gz"), "rm -rf $${TEMP_DIR}", ] native.genrule( name = decorate(name, "tarball"), srcs = [":%s" % fileset_name], outs = outs, cmd = "; ".join(cmds), visibility = visibility, )
[]
miuvlad/genieparser
src/genie/libs/parser/ios/tests/test_show_platform.py
60b1151e3c67c6b55d75e30359d0bf52825efad8
#!/bin/env python import unittest from unittest.mock import Mock from pyats.topology import Device from genie.metaparser.util.exceptions import SchemaEmptyParserError,\ SchemaMissingKeyError from genie.libs.parser.ios.show_platform import ShowVersion,\ Dir,\ ShowRedundancy,\ ShowInventory,\ ShowBootvar, \ ShowProcessesCpuSorted,\ ShowProcessesCpu,\ ShowVersionRp,\ ShowPlatform,\ ShowPlatformPower,\ ShowProcessesCpuHistory,\ ShowProcessesCpuPlatform,\ ShowPlatformSoftwareStatusControl,\ ShowPlatformSoftwareSlotActiveMonitorMem,\ ShowPlatformHardware,\ ShowPlatformHardwarePlim,\ ShowPlatformHardwareQfpBqsOpmMapping,\ ShowPlatformHardwareQfpBqsIpmMapping,\ ShowPlatformHardwareSerdes,\ ShowPlatformHardwareSerdesInternal,\ ShowPlatformHardwareQfpBqsStatisticsChannelAll,\ ShowPlatformHardwareQfpInterfaceIfnameStatistics,\ ShowPlatformHardwareQfpStatisticsDrop,\ ShowEnvironment,\ ShowModule,\ ShowSwitch, ShowSwitchDetail from genie.libs.parser.iosxe.tests.test_show_platform import TestShowPlatform as test_show_platform_iosxe,\ TestShowPlatformPower as test_show_platform_power_iosxe,\ TestShowVersionRp as test_show_version_rp_iosxe,\ TestShowProcessesCpu as test_show_processes_cpu_iosxe,\ TestShowProcessesCpuHistory as test_show_processes_cpu_history_iosxe,\ TestShowProcessesCpuPlatform as test_show_processes_cpu_platform_iosxe,\ TestShowPlatformSoftwareStatusControlProcessorBrief as test_show_platform_software_status_control_processor_brief_iosxe,\ TestShowPlatformSoftwareSlotActiveMonitorMemSwap as test_show_platform_software_slot_active_monitor_Mem_iosxe,\ TestShowPlatformHardware as test_show_platform_hardware_iosxe,\ TestShowPlatformHardwarePlim as test_show_platform_hardware_plim_iosxe,\ TestShowPlatformHardwareQfpBqsOpmMapping as test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe,\ TestShowPlatformHardwareQfpBqsIpmMapping as test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe,\ TestShowPlatformHardwareSerdesStatistics as test_show_platform_hardware_serdes_statistics_iosxe,\ TestShowPlatformHardwareSerdesStatisticsInternal as test_show_platform_hardware_serdes_statistics_internal_iosxe,\ ShowPlatformHardwareQfpBqsStatisticsChannelAll as show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe,\ ShowPlatformHardwareQfpInterface as show_platform_hardware_qfp_interface_iosxe,\ TestShowPlatformHardwareQfpStatisticsDrop as test_show_platform_hardware_qfp_statistics_drop_iosxe,\ TestShowEnv as test_show_env_iosxe,\ TestShowModule as test_show_module_iosxe,\ TestShowSwitch as test_show_switch_iosxe,\ TestShowSwitchDetail as test_show_switch_detail_iosxe class TestShowVersion(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} semi_empty_output = {'execute.return_value': '''\ ROM: Bootstrap program is IOSv '''} golden_parsed_output_iosv = { "version": { "last_reload_reason": "Unknown reason", "hostname": "N95_1", "os": "IOS", "version_short": "15.6", "number_of_intfs": { "Gigabit Ethernet": "6" }, "version": "15.6(3)M2", "rtr_type": "IOSv", "chassis_sn": "9K66Z7TOKAACDEQA24N7S", "chassis": "IOSv", "image_id": "VIOS-ADVENTERPRISEK9-M", 'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed 29-Mar-17 14:05', "processor_type": "revision 1.0", "platform": "IOSv", "image_type": "production image", 'processor_board_flash': '10080K', 'returned_to_rom_by': 'reload', "main_mem": "435457", "mem_size": { "non-volatile configuration": "256" }, "system_image": "flash0:/vios-adventerprisek9-m", "curr_config_register": "0x0", "rom": "Bootstrap program is IOSv", "uptime": "1 day, 16 hours, 42 minutes" } } golden_output_iosv = {'execute.return_value': '''\ Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2017 by Cisco Systems, Inc. Compiled Wed 29-Mar-17 14:05 by prod_rel_team ROM: Bootstrap program is IOSv N95_1 uptime is 1 day, 16 hours, 42 minutes System returned to ROM by reload System image file is "flash0:/vios-adventerprisek9-m" Last reload reason: Unknown reason This product contains cryptographic features and is subject to United States and local country laws governing import, export, transfer and use. Delivery of Cisco cryptographic products does not imply third-party authority to import, export, distribute or use encryption. Importers, exporters, distributors and users are responsible for compliance with U.S. and local country laws. By using this product you agree to comply with applicable laws and regulations. If you are unable to comply with U.S. and local laws, return this product immediately. A summary of U.S. laws governing Cisco cryptographic products may be found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you require further assistance please contact us by sending email to [email protected]. Cisco IOSv (revision 1.0) with with 435457K/87040K bytes of memory. Processor board ID 9K66Z7TOKAACDEQA24N7S 6 Gigabit Ethernet interfaces DRAM configuration is 72 bits wide with parity disabled. 256K bytes of non-volatile configuration memory. 2097152K bytes of ATA System CompactFlash 0 (Read/Write) 0K bytes of ATA CompactFlash 1 (Read/Write) 0K bytes of ATA CompactFlash 2 (Read/Write) 10080K bytes of ATA CompactFlash 3 (Read/Write) Configuration register is 0x0'''} golden_parsed_output_ios = { 'version': {'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M) Version ' '15.2(3r)E, RELEASE SOFTWARE (fc1)', 'chassis': 'WS-C3750X-24P', 'chassis_sn': 'FDO2028F1WK', 'curr_config_register': '0xF', 'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed 26-Jun-13 09:56', 'hostname': 'R5', 'image_id': 'C3750E-UNIVERSALK9-M', 'image_type': 'production image', 'last_reload_reason': 'power-on', 'license_level': 'ipservices', 'license_type': 'Permanent', 'main_mem': '262144', 'mem_size': {'flash-simulated non-volatile configuration': '512'}, 'next_reload_license_level': 'ipservices', 'number_of_intfs': {'Gigabit Ethernet': '28', 'Ten Gigabit Ethernet': '2', 'Virtual Ethernet': '2', 'Gigabit Ethernet': '28', 'FastEthernet': '1' }, 'os': 'IOS', 'platform': 'C3750E', 'processor_type': 'PowerPC405', 'returned_to_rom_by': 'power-on', 'rom': 'Bootstrap program is C3750E boot loader', 'rtr_type': 'WS-C3750X-24P', 'system_image': 'flash:c3750e-universalk9-mz', 'system_restarted_at': '12:22:21 PDT Mon Sep 10 2018', 'uptime': '9 weeks, 4 days, 2 hours, 3 minutes', 'version': '12.2(55)SE8', 'version_short': '12.2' } } golden_output_ios = {'execute.return_value': '''\ Cisco IOS Software, C3750E Software (C3750E-UNIVERSALK9-M), Version 12.2(55)SE8, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2013 by Cisco Systems, Inc. Compiled Wed 26-Jun-13 09:56 by prod_rel_team Image text-base: 0x00003000, data-base: 0x02800000 ROM: Bootstrap program is C3750E boot loader BOOTLDR: C3750E Boot Loader (C3750X-HBOOT-M) Version 15.2(3r)E, RELEASE SOFTWARE (fc1) R5 uptime is 9 weeks, 4 days, 2 hours, 3 minutes System returned to ROM by power-on System restarted at 12:22:21 PDT Mon Sep 10 2018 System image file is "flash:c3750e-universalk9-mz" This product contains cryptographic features and is subject to United States and local country laws governing import, export, transfer and use. Delivery of Cisco cryptographic products does not imply third-party authority to import, export, distribute or use encryption. Importers, exporters, distributors and users are responsible for compliance with U.S. and local country laws. By using this product you agree to comply with applicable laws and regulations. If you are unable to comply with U.S. and local laws, return this product immediately. A summary of U.S. laws governing Cisco cryptographic products may be found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you require further assistance please contact us by sending email to [email protected]. License Level: ipservices License Type: Permanent Next reload license Level: ipservices cisco WS-C3750X-24P (PowerPC405) processor (revision W0) with 262144K bytes of memory. Processor board ID FDO2028F1WK Last reset from power-on 2 Virtual Ethernet interfaces 1 FastEthernet interface 28 Gigabit Ethernet interfaces 2 Ten Gigabit Ethernet interfaces The password-recovery mechanism is enabled. 512K bytes of flash-simulated non-volatile configuration memory. Base ethernet MAC Address : 84:3D:C6:FF:F1:B8 Motherboard assembly number : 73-15476-04 Motherboard serial number : FDO202907UH Model revision number : W0 Motherboard revision number : B0 Model number : WS-C3750X-24P-L Daughterboard assembly number : 800-32727-03 Daughterboard serial number : FDO202823P8 System serial number : FDO2028F1WK Top Assembly Part Number : 800-38990-01 Top Assembly Revision Number : F0 Version ID : V07 CLEI Code Number : CMMPP00DRB Hardware Board Revision Number : 0x05 Switch Ports Model SW Version SW Image ------ ----- ----- ---------- ---------- * 1 30 WS-C3750X-24P 12.2(55)SE8 C3750E-UNIVERSALK9-M Configuration register is 0xF '''} golden_parsed_output_ios_cat6k = { "version": { "os": "IOS", "version_short": "12.2", "platform": "s72033_rp", "version": "12.2(18)SXF7", "image_id": "s72033_rp-ADVENTERPRISEK9_WAN-M", 'compiled_by': 'kellythw', 'compiled_date': 'Thu 23-Nov-06 06:26', "image_type": "production image", "rom": "System Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE (fc1)", "bootldr": "s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1)", "hostname": "cat6k_tb1", "uptime": "10 weeks, 5 days, 5 hours, 16 minutes", "system_image": "disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7", "chassis": "WS-C6503-E", "main_mem": "983008", "processor_type": "R7000", 'sp_by': 'power on', 'returned_to_rom_at': '21:57:23 UTC Sat Aug 28 2010', 'returned_to_rom_by': 'power cycle', "rtr_type": "WS-C6503-E", "chassis_sn": "FXS1821Q2H9", "last_reload_reason": "s/w reset", 'processor_board_flash': '65536K', "number_of_intfs": { "Gigabit Ethernet/IEEE 802.3": "50", 'Virtual Ethernet/IEEE 802.3': '1' }, "mem_size": {"non-volatile configuration": "1917", "packet buffer": "8192"}, "curr_config_register": "0x2102", } } golden_output_ios_cat6k = {'execute.return_value': ''' show version Cisco Internetwork Operating System Software IOS (tm) s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2006 by cisco Systems, Inc. Compiled Thu 23-Nov-06 06:26 by kellythw Image text-base: 0x40101040, data-base: 0x42D98000 ROM: System Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE (fc1) BOOTLDR: s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1) cat6k_tb1 uptime is 10 weeks, 5 days, 5 hours, 16 minutes Time since cat6k_tb1 switched to active is 10 weeks, 5 days, 5 hours, 15 minutes System returned to ROM by power cycle at 21:57:23 UTC Sat Aug 28 2010 (SP by power on) System image file is "disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7" This product contains cryptographic features and is subject to United States and local country laws governing import, export, transfer and use. Delivery of Cisco cryptographic products does not imply third-party authority to import, export, distribute or use encryption. Importers, exporters, distributors and users are responsible for compliance with U.S. and local country laws. By using this product you agree to comply with applicable laws and regulations. If you are unable to comply with U.S. and local laws, return this product immediately. A summary of U.S. laws governing Cisco cryptographic products may be found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you require further assistance please contact us by sending email to [email protected]. cisco WS-C6503-E (R7000) processor (revision 1.4) with 983008K/65536K bytes of memory. Processor board ID FXS1821Q2H9 SR71000 CPU at 600Mhz, Implementation 0x504, Rev 1.2, 512KB L2 Cache Last reset from s/w reset SuperLAT software (copyright 1990 by Meridian Technology Corp). X.25 software, Version 3.0.0. Bridging software. TN3270 Emulation software. 1 Virtual Ethernet/IEEE 802.3 interface 50 Gigabit Ethernet/IEEE 802.3 interfaces 1917K bytes of non-volatile configuration memory. 8192K bytes of packet buffer memory. 65536K bytes of Flash internal SIMM (Sector size 512K). Configuration register is 0x2102 '''} golden_output_ios_1 = {'execute.return_value': '''\ Cisco IOS Software, C3750E Software (C3750E-UNIVERSALK9-M), Version 15.2(2)E8, RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2018 by Cisco Systems, Inc. Compiled Mon 22-Jan-18 04:07 by prod_rel_team ROM: Bootstrap program is C3750E boot loader BOOTLDR: C3750E Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE (fc1) sample_switch uptime is 8 weeks, 3 days, 10 hours, 27 minutes System returned to ROM by power-on System restarted at 05:06:40 GMT Tue Sep 10 2019 System image file is "flash:c3750e-universalk9-mz.152-2.E8.bin" Last reload reason: Reload command This product contains cryptographic features and is subject to United States and local country laws governing import, export, transfer and use. Delivery of Cisco cryptographic products does not imply third-party authority to import, export, distribute or use encryption. Importers, exporters, distributors and users are responsible for compliance with U.S. and local country laws. By using this product you agree to comply with applicable laws and regulations. If you are unable to comply with U.S. and local laws, return this product immediately. A summary of U.S. laws governing Cisco cryptographic products may be found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you require further assistance please contact us by sending email to [email protected]. License Level: ipservices License Type: Permanent Next reload license Level: ipservices cisco WS-C3750X-24S (PowerPC405) processor (revision A0) with 524288K bytes of memory. Processor board ID FDO1633Q14S Last reset from power-on 14 Virtual Ethernet interfaces 1 FastEthernet interface 28 Gigabit Ethernet interfaces 2 Ten Gigabit Ethernet interfaces The password-recovery mechanism is enabled. 512K bytes of flash-simulated non-volatile configuration memory. Base ethernet MAC Address : AC:F2:C5:FF:55:E7 Motherboard assembly number : 73-13061-04 Motherboard serial number : FDO1633Q14M Model revision number : A0 Motherboard revision number : A0 Model number : WS-C3750X-24S-E Daughterboard assembly number : 800-32727-03 Daughterboard serial number : FDO172217ED System serial number : FDO1633Q14S Top Assembly Part Number : 800-33746-04 Top Assembly Revision Number : B0 Version ID : V03 CLEI Code Number : CMMFF00ARC Hardware Board Revision Number : 0x04 Switch Ports Model SW Version SW Image ------ ----- ----- ---------- ---------- * 1 30 WS-C3750X-24S 15.2(2)E8 C3750E-UNIVERSALK9-M Configuration register is 0xF '''} golden_parsed_output_ios_1 = { 'version': {'version_short': '15.2', 'platform': 'C3750E', 'version': '15.2(2)E8', 'image_id': 'C3750E-UNIVERSALK9-M', 'os': 'IOS', 'image_type': 'production image', 'compiled_date': 'Mon 22-Jan-18 04:07', 'compiled_by': 'prod_rel_team', 'rom': 'Bootstrap program is C3750E boot loader', 'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE (fc1)', 'hostname': 'sample_switch', 'uptime': '8 weeks, 3 days, 10 hours, 27 minutes', 'returned_to_rom_by': 'power-on', 'system_restarted_at': '05:06:40 GMT Tue Sep 10 2019', 'system_image': 'flash:c3750e-universalk9-mz.152-2.E8.bin', 'last_reload_reason': 'power-on', 'license_level': 'ipservices', 'license_type': 'Permanent', 'next_reload_license_level': 'ipservices', 'chassis': 'WS-C3750X-24S', 'main_mem': '524288', 'processor_type': 'PowerPC405', 'rtr_type': 'WS-C3750X-24S', 'chassis_sn': 'FDO1633Q14S', 'number_of_intfs': { 'Virtual Ethernet': '14', 'FastEthernet': '1', 'Gigabit Ethernet': '28', 'Ten Gigabit Ethernet': '2' }, 'mem_size': { 'flash-simulated non-volatile configuration': '512' }, 'curr_config_register': '0xF' } } device_output = {'execute.return_value':''' best-c3945-IOS3#show version Cisco IOS Software, C3900 Software (C3900-UNIVERSALK9-M), Version 15.0(1)M7, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2011 by Cisco Systems, Inc. Compiled Fri 05-Aug-11 00:32 by prod_rel_team ROM: System Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE (fc1) best-c3945-IOS3 uptime is 1 hour, 20 minutes System returned to ROM by reload at 10:26:47 EST Mon Dec 9 2019 System restarted at 10:27:57 EST Mon Dec 9 2019 System image file is "flash0:c3900-universalk9-mz.SPA.150-1.M7.bin" Last reload type: Normal Reload Last reload reason: Reload Command This product contains cryptographic features and is subject to United States and local country laws governing import, export, transfer and use. Delivery of Cisco cryptographic products does not imply third-party authority to import, export, distribute or use encryption. Importers, exporters, distributors and users are responsible for compliance with U.S. and local country laws. By using this product you agree to comply with applicable laws and regulations. If you are unable to comply with U.S. and local laws, return this product immediately. A summary of U.S. laws governing Cisco cryptographic products may be found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you require further assistance please contact us by sending email to [email protected]. Cisco CISCO3945-CHASSIS (revision 1.1) with C3900-SPE150/K9 with 2027520K/69632K bytes of memory. Processor board ID FGL161010K8 2 FastEthernet interfaces 3 Gigabit Ethernet interfaces 1 Virtual Private Network (VPN) Module DRAM configuration is 72 bits wide with parity enabled. 255K bytes of non-volatile configuration memory. 2000880K bytes of ATA System CompactFlash 0 (Read/Write) License Info: License UDI: ------------------------------------------------- Device# PID SN ------------------------------------------------- *0 C3900-SPE150/K9 FOC16050QP6 Technology Package License Information for Module:'c3900' ----------------------------------------------------------------- Technology Technology-package Technology-package Current Type Next reboot ------------------------------------------------------------------ ipbase ipbasek9 Permanent ipbasek9 security securityk9 Permanent securityk9 uc None None None data datak9 Permanent datak9 Configuration register is 0x2102 '''} parsed_output = { 'version': { 'chassis': 'CISCO3945-CHASSIS', 'chassis_sn': 'FGL161010K8', 'compiled_by': 'prod_rel_team', 'compiled_date': 'Fri 05-Aug-11 00:32', 'curr_config_register': '0x2102', 'hostname': 'best-c3945-IOS3', 'image_id': 'C3900-UNIVERSALK9-M', 'image_type': 'production image', 'last_reload_reason': 'Reload Command', 'last_reload_type': 'Normal Reload', 'license_udi': { 'device_num': { '*0': { 'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6' } } }, 'license_package': { 'data': { 'license_level': 'datak9', 'license_type': 'Permanent', 'next_reload_license_level': 'datak9', }, 'ipbase': { 'license_level': 'ipbasek9', 'license_type': 'Permanent', 'next_reload_license_level': 'ipbasek9', }, 'security': { 'license_level': 'securityk9', 'license_type': 'Permanent', 'next_reload_license_level': 'securityk9', }, 'uc': { 'license_level': 'None', 'license_type': 'None', 'next_reload_license_level': 'None', }, }, 'main_mem': '2027520', 'mem_size': { 'non-volatile configuration': '255', }, 'number_of_intfs': { 'FastEthernet': '2', 'Gigabit Ethernet': '3', }, 'os': 'IOS', 'platform': 'C3900', 'processor_board_flash': '2000880K', 'processor_type': 'C3900-SPE150/K9', 'returned_to_rom_at': '10:26:47 EST Mon Dec 9 2019', 'returned_to_rom_by': 'reload', 'rom': 'System Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE (fc1)', 'rtr_type': 'CISCO3945-CHASSIS', 'system_image': 'flash0:c3900-universalk9-mz.SPA.150-1.M7.bin', 'system_restarted_at': '10:27:57 EST Mon Dec 9 2019', 'uptime': '1 hour, 20 minutes', 'version': '15.0(1)M7', 'version_short': '15.0', }, } def test_empty(self): self.dev1 = Mock(**self.empty_output) version_obj = ShowVersion(device=self.dev1) with self.assertRaises(AttributeError): parsered_output = version_obj.parse() def test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output) version_obj = ShowVersion(device=self.dev1) with self.assertRaises(KeyError): parsed_output = version_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_ios(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios) def test_golden_ios_cat6k(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios_cat6k) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_cat6k) def test_golden_ios_1(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios_1) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_1) def test_golden_ios_2(self): self.maxDiff = None self.dev_iosv = Mock(**self.device_output) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.parsed_output) class test_dir(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} semi_empty_output = {'execute.return_value': '''\ Directory of flash:/ '''} golden_parsed_output_iosv = { "dir": { "flash0:/": { "files": { "e1000_bia.txt": { "last_modified_date": "Oct 17 2018 18:57:18 +00:00", "index": "269", "size": "119", "permissions": "-rw-" }, "config": { "last_modified_date": "Oct 14 2013 00:00:00 +00:00", "index": "264", "size": "0", "permissions": "drw-" }, "nvram": { "last_modified_date": "Oct 17 2018 18:57:10 +00:00", "index": "268", "size": "524288", "permissions": "-rw-" }, "boot": { "last_modified_date": "Jan 30 2013 00:00:00 +00:00", "index": "1", "size": "0", "permissions": "drw-" }, "vios-adventerprisek9-m": { "last_modified_date": "Mar 29 2017 00:00:00 +00:00", "index": "267", "size": "147988420", "permissions": "-rw-" } }, "bytes_total": "2142715904", "bytes_free": "1989595136" }, "dir": "flash0:/" } } golden_output_iosv = {'execute.return_value': '''\ Directory of flash0:/ 1 drw- 0 Jan 30 2013 00:00:00 +00:00 boot 264 drw- 0 Oct 14 2013 00:00:00 +00:00 config 267 -rw- 147988420 Mar 29 2017 00:00:00 +00:00 vios-adventerprisek9-m 268 -rw- 524288 Oct 17 2018 18:57:10 +00:00 nvram 269 -rw- 119 Oct 17 2018 18:57:18 +00:00 e1000_bia.txt 2142715904 bytes total (1989595136 bytes free) '''} def test_empty(self): self.dev1 = Mock(**self.empty_output) dir_obj = Dir(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsered_output = dir_obj.parse() def test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output) dir_obj = Dir(device=self.dev1) with self.assertRaises(SchemaMissingKeyError): parsed_output = dir_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) dir_obj = Dir(device=self.dev_iosv) parsed_output = dir_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_redundancy(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} golden_parsed_output_iosv = { "red_sys_info": { "last_switchover_reason": "unsupported", "maint_mode": "Disabled", "switchovers_system_experienced": "0", "available_system_uptime": "0 minutes", "communications": "Down", "hw_mode": "Simplex", "communications_reason": "Failure", "standby_failures": "0" }, "slot": { "slot 0": { "image_ver": "Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2)", "uptime_in_curr_state": "1 day, 16 hours, 42 minutes", "config_register": "0x0", "curr_sw_state": "ACTIVE" } } } golden_output_iosv = {'execute.return_value': '''\ Redundant System Information : ------------------------------ Available system uptime = 0 minutes Switchovers system experienced = 0 Standby failures = 0 Last switchover reason = unsupported Hardware Mode = Simplex Maintenance Mode = Disabled Communications = Down Reason: Failure Current Processor Information : ------------------------------- Active Location = slot 0 Current Software state = ACTIVE Uptime in current state = 1 day, 16 hours, 42 minutes Image Version = Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2017 by Cisco Systems, Inc. Compiled Wed 29-Mar-17 14:05 by prod_rel_team Configuration register = 0x0 Peer (slot: 0) information is not available because it is in 'DISABLED' state '''} def test_empty(self): self.dev1 = Mock(**self.empty_output) redundancy_obj = ShowRedundancy(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = redundancy_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) redundancy_obj = ShowRedundancy(device=self.dev_iosv) parsed_output = redundancy_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class TestShowInventory(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} golden_parsed_output_iosv = { 'main': { 'chassis': { 'IOSv': { 'descr': 'IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0', 'name': 'IOSv', 'pid': 'IOSv', 'sn': '9K66Z7TOKAACDEQA24N7S', 'vid': '1.0', }, }, }, } golden_output_iosv = {'execute.return_value': '''\ NAME: "IOSv", DESCR: "IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0" PID: IOSv , VID: 1.0, SN: 9K66Z7TOKAACDEQA24N7S '''} golden_parsed_output_2 = { "main": { "chassis": { "WS-C6504-E": { "name": "WS-C6504-E", "descr": "Cisco Systems Cisco 6500 4-slot Chassis System", "pid": "WS-C6504-E", "vid": "V01", "sn": "FXS1712Q1R8", } } }, "slot": { "CLK-7600 1": { "other": { "CLK-7600 1": { "name": "CLK-7600 1", "descr": "OSR-7600 Clock FRU 1", "pid": "CLK-7600", "vid": "", "sn": "FXS170802GL", } } }, "CLK-7600 2": { "other": { "CLK-7600 2": { "name": "CLK-7600 2", "descr": "OSR-7600 Clock FRU 2", "pid": "CLK-7600", "vid": "", "sn": "FXS170802GL", } } }, "FAN-MOD-4HS 1": { "other": { "FAN-MOD-4HS 1": { "name": "FAN-MOD-4HS 1", "descr": "High Speed Fan Module for CISCO7604 1", "pid": "FAN-MOD-4HS", "vid": "V01", "sn": "DCH170900PF", } } }, "PS 1 PWR-2700-AC/4": { "other": { "PS 1 PWR-2700-AC/4": { "name": "PS 1 PWR-2700-AC/4", "descr": "2700W AC power supply for CISCO7604 1", "pid": "PWR-2700-AC/4", "vid": "V03", "sn": "APS1707008Y", } } }, "PS 2 PWR-2700-AC/4": { "other": { "PS 2 PWR-2700-AC/4": { "name": "PS 2 PWR-2700-AC/4", "descr": "2700W AC power supply for CISCO7604 2", "pid": "PWR-2700-AC/4", "vid": "V03", "sn": "APS17070093", } } }, "1": { "rp": { "VS-SUP2T-10G": { "name": "1", "descr": "VS-SUP2T-10G 5 ports Supervisor Engine 2T 10GE w/ CTS Rev. 1.5", "pid": "VS-SUP2T-10G", "vid": "V05", "sn": "SAL17152N0F", "subslot": { "0": { "VS-F6K-MSFC5": { "descr": "VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0", "name": "msfc sub-module of 1", "pid": "VS-F6K-MSFC5", "sn": "SAL17142D06", "vid": "", }, "VS-F6K-PFC4": { "descr": "VS-F6K-PFC4 Policy Feature Card 4 Rev. 2.0", "name": "VS-F6K-PFC4 Policy Feature Card 4 EARL sub-module of 1", "pid": "VS-F6K-PFC4", "sn": "SAL17163901", "vid": "V03", }, }, "4": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te1/4", "name": "Transceiver Te1/4", "pid": "X2-10GB-SR", "sn": "ONT170202T1", "vid": "V06 ", } }, "5": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te1/5", "name": "Transceiver Te1/5", "pid": "X2-10GB-SR", "sn": "ONT1702033D", "vid": "V06 ", } }, }, } } }, "2": { "lc": { "WS-X6816-10GE": { "name": "2", "descr": "WS-X6816-10GE CEF720 16 port 10GE Rev. 2.0", "pid": "WS-X6816-10GE", "vid": "V02", "sn": "SAL17152QB3", "subslot": { "0": { "WS-F6K-DFC4-E": { "descr": "WS-F6K-DFC4-E Distributed Forwarding Card 4 Rev. 1.2", "name": "WS-F6K-DFC4-E Distributed Forwarding Card 4 EARL sub-module of 2", "pid": "WS-F6K-DFC4-E", "sn": "SAL171846RF", "vid": "V02", } }, "1": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te2/1", "name": "Transceiver Te2/1", "pid": "X2-10GB-SR", "sn": "ONT17020338", "vid": "V06 ", } }, "2": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te2/2", "name": "Transceiver Te2/2", "pid": "X2-10GB-SR", "sn": "ONT1702020H", "vid": "V06 ", } }, "3": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te2/3", "name": "Transceiver Te2/3", "pid": "X2-10GB-SR", "sn": "ONT170202UU", "vid": "V06 ", } }, "4": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te2/4", "name": "Transceiver Te2/4", "pid": "X2-10GB-SR", "sn": "ONT170202T5", "vid": "V06 ", } }, "5": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te2/5", "name": "Transceiver Te2/5", "pid": "X2-10GB-SR", "sn": "AGA1515XZE2", "vid": "V05 ", } }, "6": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te2/6", "name": "Transceiver Te2/6", "pid": "X2-10GB-SR", "sn": "FNS153920YJ", "vid": "V06 ", } }, "16": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te2/16", "name": "Transceiver Te2/16", "pid": "X2-10GB-SR", "sn": "ONT170201TT", "vid": "V06 ", } }, }, } } }, "3": { "lc": { "WS-X6824-SFP": { "name": "3", "descr": "WS-X6824-SFP CEF720 24 port 1000mb SFP Rev. 1.0", "pid": "WS-X6824-SFP", "vid": "V01", "sn": "SAL17152EG9", "subslot": { "0": { "WS-F6K-DFC4-A": { "descr": "WS-F6K-DFC4-A Distributed Forwarding Card 4 Rev. 1.0", "name": "WS-F6K-DFC4-A Distributed Forwarding Card 4 EARL sub-module of 3", "pid": "WS-F6K-DFC4-A", "sn": "SAL171848KL", "vid": "V04", } } }, } } }, "4": { "lc": { "WS-X6748-GE-TX": { "name": "4", "descr": "WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 3.4", "pid": "WS-X6748-GE-TX", "vid": "V04", "sn": "SAL14017TWF", "subslot": { "0": { "WS-F6700-CFC": { "descr": "WS-F6700-CFC Centralized Forwarding Card Rev. 4.1", "name": "WS-F6700-CFC Centralized Forwarding Card EARL sub-module of 4", "pid": "WS-F6700-CFC", "sn": "SAL13516QS8", "vid": "V06", } } }, } } }, }, } golden_output_2 = {'execute.return_value': ''' NAME: "WS-C6504-E", DESCR: "Cisco Systems Cisco 6500 4-slot Chassis System" PID: WS-C6504-E , VID: V01, SN: FXS1712Q1R8 NAME: "CLK-7600 1", DESCR: "OSR-7600 Clock FRU 1" PID: CLK-7600 , VID: , SN: FXS170802GL NAME: "CLK-7600 2", DESCR: "OSR-7600 Clock FRU 2" PID: CLK-7600 , VID: , SN: FXS170802GL NAME: "1", DESCR: "VS-SUP2T-10G 5 ports Supervisor Engine 2T 10GE w/ CTS Rev. 1.5" PID: VS-SUP2T-10G , VID: V05, SN: SAL17152N0F NAME: "msfc sub-module of 1", DESCR: "VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0" PID: VS-F6K-MSFC5 , VID: , SN: SAL17142D06 NAME: "VS-F6K-PFC4 Policy Feature Card 4 EARL sub-module of 1", DESCR: "VS-F6K-PFC4 Policy Feature Card 4 Rev. 2.0" PID: VS-F6K-PFC4 , VID: V03, SN: SAL17163901 NAME: "Transceiver Te1/4", DESCR: "X2 Transceiver 10Gbase-SR Te1/4" PID: X2-10GB-SR , VID: V06 , SN: ONT170202T1 NAME: "Transceiver Te1/5", DESCR: "X2 Transceiver 10Gbase-SR Te1/5" PID: X2-10GB-SR , VID: V06 , SN: ONT1702033D NAME: "2", DESCR: "WS-X6816-10GE CEF720 16 port 10GE Rev. 2.0" PID: WS-X6816-10GE , VID: V02, SN: SAL17152QB3 NAME: "WS-F6K-DFC4-E Distributed Forwarding Card 4 EARL sub-module of 2", DESCR: "WS-F6K-DFC4-E Distributed Forwarding Card 4 Rev. 1.2" PID: WS-F6K-DFC4-E , VID: V02, SN: SAL171846RF NAME: "Transceiver Te2/1", DESCR: "X2 Transceiver 10Gbase-SR Te2/1" PID: X2-10GB-SR , VID: V06 , SN: ONT17020338 NAME: "Transceiver Te2/2", DESCR: "X2 Transceiver 10Gbase-SR Te2/2" PID: X2-10GB-SR , VID: V06 , SN: ONT1702020H NAME: "Transceiver Te2/3", DESCR: "X2 Transceiver 10Gbase-SR Te2/3" PID: X2-10GB-SR , VID: V06 , SN: ONT170202UU NAME: "Transceiver Te2/4", DESCR: "X2 Transceiver 10Gbase-SR Te2/4" PID: X2-10GB-SR , VID: V06 , SN: ONT170202T5 NAME: "Transceiver Te2/5", DESCR: "X2 Transceiver 10Gbase-SR Te2/5" PID: X2-10GB-SR , VID: V05 , SN: AGA1515XZE2 NAME: "Transceiver Te2/6", DESCR: "X2 Transceiver 10Gbase-SR Te2/6" PID: X2-10GB-SR , VID: V06 , SN: FNS153920YJ NAME: "Transceiver Te2/16", DESCR: "X2 Transceiver 10Gbase-SR Te2/16" PID: X2-10GB-SR , VID: V06 , SN: ONT170201TT NAME: "3", DESCR: "WS-X6824-SFP CEF720 24 port 1000mb SFP Rev. 1.0" PID: WS-X6824-SFP , VID: V01, SN: SAL17152EG9 NAME: "WS-F6K-DFC4-A Distributed Forwarding Card 4 EARL sub-module of 3", DESCR: "WS-F6K-DFC4-A Distributed Forwarding Card 4 Rev. 1.0" PID: WS-F6K-DFC4-A , VID: V04, SN: SAL171848KL NAME: "4", DESCR: "WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 3.4" PID: WS-X6748-GE-TX , VID: V04, SN: SAL14017TWF NAME: "WS-F6700-CFC Centralized Forwarding Card EARL sub-module of 4", DESCR: "WS-F6700-CFC Centralized Forwarding Card Rev. 4.1" PID: WS-F6700-CFC , VID: V06, SN: SAL13516QS8 NAME: "FAN-MOD-4HS 1", DESCR: "High Speed Fan Module for CISCO7604 1" PID: FAN-MOD-4HS , VID: V01, SN: DCH170900PF NAME: "PS 1 PWR-2700-AC/4", DESCR: "2700W AC power supply for CISCO7604 1" PID: PWR-2700-AC/4 , VID: V03, SN: APS1707008Y NAME: "PS 2 PWR-2700-AC/4", DESCR: "2700W AC power supply for CISCO7604 2" PID: PWR-2700-AC/4 , VID: V03, SN: APS17070093 '''} golden_parsed_output_3 = { "main": { "chassis": { "WS-C6503-E": { "name": "WS-C6503-E", "descr": "Cisco Systems Catalyst 6500 3-slot Chassis System", "pid": "WS-C6503-E", "vid": "V03", "sn": "FXS1821Q2H9", } } }, "slot": { "CLK-7600 1": { "other": { "CLK-7600 1": { "name": "CLK-7600 1", "descr": "OSR-7600 Clock FRU 1", "pid": "CLK-7600", "vid": "", "sn": "FXS181101V4", } } }, "CLK-7600 2": { "other": { "CLK-7600 2": { "name": "CLK-7600 2", "descr": "OSR-7600 Clock FRU 2", "pid": "CLK-7600", "vid": "", "sn": "FXS181101V4", } } }, "1": { "rp": { "WS-SUP720-3BXL": { "name": "1", "descr": "WS-SUP720-3BXL 2 ports Supervisor Engine 720 Rev. 5.6", "pid": "WS-SUP720-3BXL", "vid": "V05", "sn": "SAL11434P2C", "subslot": { "0": { "WS-SUP720": { "descr": "WS-SUP720 MSFC3 Daughterboard Rev. 3.1", "name": "msfc sub-module of 1", "pid": "WS-SUP720", "sn": "SAL11434N9G", "vid": "", }, "WS-F6K-PFC3BXL": { "descr": "WS-F6K-PFC3BXL Policy Feature Card 3 Rev. 1.8", "name": "switching engine sub-module of 1", "pid": "WS-F6K-PFC3BXL", "sn": "SAL11434LYG", "vid": "V01", }, } }, } } }, "2": { "lc": { "WS-X6748-GE-TX": { "name": "2", "descr": "WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 2.6", "pid": "WS-X6748-GE-TX", "vid": "V02", "sn": "SAL1128UPQ9", "subslot": { "0": { "WS-F6700-DFC3CXL": { "descr": "WS-F6700-DFC3CXL Distributed Forwarding Card 3 Rev. 1.1", "name": "switching engine sub-module of 2", "pid": "WS-F6700-DFC3CXL", "sn": "SAL1214LAG5", "vid": "V01", } } }, } } }, "WS-C6503-E-FAN 1": { "other": { "WS-C6503-E-FAN 1": { "name": "WS-C6503-E-FAN 1", "descr": "Enhanced 3-slot Fan Tray 1", "pid": "WS-C6503-E-FAN", "vid": "V02", "sn": "DCH183500KW", } } }, "PS 1 PWR-1400-AC": { "other": { "PS 1 PWR-1400-AC": { "name": "PS 1 PWR-1400-AC", "descr": "AC power supply, 1400 watt 1", "pid": "PWR-1400-AC", "vid": "V01", "sn": "ABC0830J127", } } }, }, } golden_output_3 = {'execute.return_value': ''' # show inventory NAME: "WS-C6503-E", DESCR: "Cisco Systems Catalyst 6500 3-slot Chassis System" PID: WS-C6503-E , VID: V03, SN: FXS1821Q2H9 NAME: "CLK-7600 1", DESCR: "OSR-7600 Clock FRU 1" PID: CLK-7600 , VID: , SN: FXS181101V4 NAME: "CLK-7600 2", DESCR: "OSR-7600 Clock FRU 2" PID: CLK-7600 , VID: , SN: FXS181101V4 NAME: "1", DESCR: "WS-SUP720-3BXL 2 ports Supervisor Engine 720 Rev. 5.6" PID: WS-SUP720-3BXL , VID: V05, SN: SAL11434P2C NAME: "msfc sub-module of 1", DESCR: "WS-SUP720 MSFC3 Daughterboard Rev. 3.1" PID: WS-SUP720 , VID: , SN: SAL11434N9G NAME: "switching engine sub-module of 1", DESCR: "WS-F6K-PFC3BXL Policy Feature Card 3 Rev. 1.8" PID: WS-F6K-PFC3BXL , VID: V01, SN: SAL11434LYG NAME: "2", DESCR: "WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 2.6" PID: WS-X6748-GE-TX , VID: V02, SN: SAL1128UPQ9 NAME: "switching engine sub-module of 2", DESCR: "WS-F6700-DFC3CXL Distributed Forwarding Card 3 Rev. 1.1" PID: WS-F6700-DFC3CXL , VID: V01, SN: SAL1214LAG5 NAME: "WS-C6503-E-FAN 1", DESCR: "Enhanced 3-slot Fan Tray 1" PID: WS-C6503-E-FAN , VID: V02, SN: DCH183500KW NAME: "PS 1 PWR-1400-AC", DESCR: "AC power supply, 1400 watt 1" PID: PWR-1400-AC , VID: V01, SN: ABC0830J127 '''} golden_output_4 = {'execute.return_value': ''' NAME: "1", DESCR: "WS-C8888X-88" PID: WS-C0123X-45T-S , VID: V00 , SN: FDO123R12W NAME: "Switch 1 - Power Supply 1", DESCR: "ABC Power Supply" PID: C3KX-PWR-350WAC , VID: V01D , SN: DTN1504L0E9 NAME: "TenGigabitEthernet1/1/1", DESCR: "SFP-10GBase-SR" PID: SFP-10G-SR , VID: V03 , SN: SPC1519005V NAME: "2", DESCR: "WS-C3210X-48" PID: WS-C3210X-48T-S , VID: V02 , SN: FD5678Z90P NAME: "Switch 2 - Power Supply 1", DESCR: "BCA Power Supply" PID: C3KX-PWR-007CBA , VID: V01L , SN: LTP13579L3R NAME: "TenGigabitEthernet2/1/1", DESCR: "SFP-10GBase-LR" PID: SFP-10G-LR , VID: V02 , SN: ONT182746GZ NAME: "1", DESCR: "WS-C1010XR-48FPS-I" PID: WS-C1010XR-48FPS-I, VID: V05 , SN: FD2043B0K3 NAME: "Switch 1 - Power Supply 1", DESCR: "LLL Power Supply" PID: PWR-C2-2929WAC , VID: V02L , SN: LIT03728KKK NAME: "Switch 1 - FlexStackPlus Module", DESCR: "Stacking Module" PID: C1010X-STACK , VID: V02 , SN: FD232323XXZ NAME: "GigabitEthernet1/0/49", DESCR: "1000BaseSX SFP" PID: GLC-SX-MMD , VID: V01 , SN: ACW102938VS '''} golden_parsed_output_4 = { 'slot': { '1': { 'rp': { 'WS-C0123X-45T-S': { 'descr': 'WS-C8888X-88', 'name': '1', 'pid': 'WS-C0123X-45T-S', 'sn': 'FDO123R12W', 'subslot': { '1': { 'C3KX-PWR-350WAC': { 'descr': 'ABC Power Supply', 'name': 'Switch 1 - Power Supply 1', 'pid': 'C3KX-PWR-350WAC', 'sn': 'DTN1504L0E9', 'vid': 'V01D ', }, }, '1/1/1': { 'SFP-10G-SR': { 'descr': 'SFP-10GBase-SR', 'name': 'TenGigabitEthernet1/1/1', 'pid': 'SFP-10G-SR', 'sn': 'SPC1519005V', 'vid': 'V03 ', }, }, }, 'vid': 'V00 ', }, 'WS-C1010XR-48FPS-I': { 'descr': 'WS-C1010XR-48FPS-I', 'name': '1', 'pid': 'WS-C1010XR-48FPS-I', 'sn': 'FD2043B0K3', 'subslot': { '1': { 'C1010X-STACK': { 'descr': 'Stacking Module', 'name': 'Switch 1 - FlexStackPlus Module', 'pid': 'C1010X-STACK', 'sn': 'FD232323XXZ', 'vid': 'V02 ', }, 'PWR-C2-2929WAC': { 'descr': 'LLL Power Supply', 'name': 'Switch 1 - Power Supply 1', 'pid': 'PWR-C2-2929WAC', 'sn': 'LIT03728KKK', 'vid': 'V02L ', }, }, '1/0/49': { 'GLC-SX-MMD': { 'descr': '1000BaseSX SFP', 'name': 'GigabitEthernet1/0/49', 'pid': 'GLC-SX-MMD', 'sn': 'ACW102938VS', 'vid': 'V01 ', }, }, }, 'vid': 'V05 ', }, }, }, '2': { 'rp': { 'WS-C3210X-48T-S': { 'descr': 'WS-C3210X-48', 'name': '2', 'pid': 'WS-C3210X-48T-S', 'sn': 'FD5678Z90P', 'subslot': { '2': { 'C3KX-PWR-007CBA': { 'descr': 'BCA Power Supply', 'name': 'Switch 2 - Power Supply 1', 'pid': 'C3KX-PWR-007CBA', 'sn': 'LTP13579L3R', 'vid': 'V01L ', }, }, '2/1/1': { 'SFP-10G-LR': { 'descr': 'SFP-10GBase-LR', 'name': 'TenGigabitEthernet2/1/1', 'pid': 'SFP-10G-LR', 'sn': 'ONT182746GZ', 'vid': 'V02 ', }, }, }, 'vid': 'V02 ', }, }, }, }, } golden_output_5 = {'execute.return_value': ''' best-c3945-IOS3#show inventory NAME: "CISCO3945-CHASSIS", DESCR: "CISCO3945-CHASSIS" PID: CISCO3945-CHASSIS , VID: V05 , SN: FGL161010K8 NAME: "Cisco Services Performance Engine 150 for Cisco 3900 ISR on Slot 0", DESCR: "Cisco Services Performance Engine 150 for Cisco 3900 ISR" PID: C3900-SPE150/K9 , VID: V05 , SN: FOC16050QP6 NAME: "Two-Port Fast Ethernet High Speed WAN Interface Card on Slot 0 SubSlot 3", DESCR: "Two-Port Fast Ethernet High Speed WAN Interface Card" PID: HWIC-2FE , VID: V02 , SN: FOC16062824 NAME: "C3900 AC Power Supply 1", DESCR: "C3900 AC Power Supply 1" PID: PWR-3900-AC , VID: V03 , SN: QCS1604P0BT '''} golden_parsed_output_5 = { 'main': { 'chassis': { 'CISCO3945-CHASSIS': { 'descr': 'CISCO3945-CHASSIS', 'name': 'CISCO3945-CHASSIS', 'pid': 'CISCO3945-CHASSIS', 'sn': 'FGL161010K8', 'vid': 'V05 ', }, }, }, 'slot': { '0': { 'rp': { 'C3900-SPE150/K9': { 'descr': 'Cisco Services Performance Engine 150 for Cisco 3900 ISR', 'name': 'Cisco Services Performance Engine 150 for Cisco 3900 ISR on Slot 0', 'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6', 'subslot': { '3': { 'HWIC-2FE': { 'descr': 'Two-Port Fast Ethernet High Speed WAN Interface Card', 'name': 'Two-Port Fast Ethernet High Speed WAN Interface Card on Slot 0 SubSlot 3', 'pid': 'HWIC-2FE', 'sn': 'FOC16062824', 'vid': 'V02 ', }, }, }, 'vid': 'V05 ', }, }, }, 'C3900 AC Power Supply 1': { 'other': { 'C3900 AC Power Supply 1': { 'descr': 'C3900 AC Power Supply 1', 'name': 'C3900 AC Power Supply 1', 'pid': 'PWR-3900-AC', 'sn': 'QCS1604P0BT', 'vid': 'V03 ', }, }, }, }, } golden_output_6 = {'execute.return_value': ''' NAME: "1", DESCR: "SM-ES2-16-P" PID: SM-ES2-16-P , VID: , SN: FOC09876NP3 '''} golden_parsed_output_6 = { 'slot': { '1': { 'lc': { 'SM-ES2-16-P': { 'descr': 'SM-ES2-16-P', 'name': '1', 'pid': 'SM-ES2-16-P', 'sn': 'FOC09876NP3', 'vid': '', }, }, }, }, } golden_output_7 = {'execute.return_value': ''' NAME: "2821 chassis", DESCR: "2821 chassis" PID: CISCO2821 , VID: V07 , SN: FTX1234AMWT NAME: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 0", DESCR: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1" PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC98675U0D NAME: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 1", DESCR: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1" PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC98675W3E NAME: "Virtual Private Network (VPN) Module on Slot 0", DESCR: "Encryption AIM Element" PID: AIM-VPN/SSL-2 , VID: V01, SN: FOC2837465E '''} golden_parsed_output_7 = { 'main': { 'chassis': { 'CISCO2821': { 'descr': '2821 chassis', 'name': '2821 chassis', 'pid': 'CISCO2821', 'sn': 'FTX1234AMWT', 'vid': 'V07 ', }, }, }, 'slot': { '0': { 'other': { 'AIM-VPN/SSL-2': { 'descr': 'Encryption AIM Element', 'name': 'Virtual Private Network (VPN) Module on Slot 0', 'pid': 'AIM-VPN/SSL-2', 'sn': 'FOC2837465E', 'vid': 'V01', 'subslot': { '0': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1', 'name': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 0', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675U0D', 'vid': 'V01 ', }, }, '1': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1', 'name': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 1', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675W3E', 'vid': 'V01 ', }, }, }, }, }, }, }, } golden_output_8 = {'execute.return_value': ''' NAME: "3825 chassis", DESCR: "3825 chassis" PID: CISCO3825 , VID: V05 , SN: FTX7908A3RQ NAME: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 0", DESCR: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1" PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC65428K9F NAME: "Wan Interface Card BRI U (2091, 3086) on Slot 0 SubSlot 1", DESCR: "Wan Interface Card BRI U (2091, 3086)" PID: WIC-1B-U-V2 , VID: V01, SN: 10293847 NAME: "PVDMII DSP SIMM with four DSPs on Slot 0 SubSlot 4", DESCR: "PVDMII DSP SIMM with four DSPs" PID: PVDM2-64 , VID: V01 , SN: FOC63358WSI NAME: "High Density Voice Module - 8FXS/DID on Slot 1", DESCR: "High Density Voice Module - 8FXS/DID" PID: EVM-HD-8FXS/DID , VID: V04 , SN: FOC65798TG8 NAME: "Six port FXO voice interface daughtercard on Slot 1 SubSlot 1", DESCR: "Six port FXO voice interface daughtercard" PID: EM-HDA-6FXO , VID: V03 , SN: FOC85389QXB '''} golden_parsed_output_8 = { 'main': { 'chassis': { 'CISCO3825': { 'descr': '3825 chassis', 'name': '3825 chassis', 'pid': 'CISCO3825', 'sn': 'FTX7908A3RQ', 'vid': 'V05 ', }, }, }, 'slot': { '0': { 'rp': { 'CISCO3825': { 'subslot': { '0': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1', 'name': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 0', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC65428K9F', 'vid': 'V01 ', }, }, '1': { 'WIC-1B-U-V2': { 'descr': 'Wan Interface Card BRI U (2091, 3086)', 'name': 'Wan Interface Card BRI U (2091, 3086) on Slot 0 SubSlot 1', 'pid': 'WIC-1B-U-V2', 'sn': '10293847', 'vid': 'V01', }, }, '4': { 'PVDM2-64': { 'descr': 'PVDMII DSP SIMM with four DSPs', 'name': 'PVDMII DSP SIMM with four DSPs on Slot 0 SubSlot 4', 'pid': 'PVDM2-64', 'sn': 'FOC63358WSI', 'vid': 'V01 ', }, }, }, }, }, }, '1': { 'other': { 'EVM-HD-8FXS/DID': { 'descr': 'High Density Voice Module - 8FXS/DID', 'name': 'High Density Voice Module - 8FXS/DID on Slot 1', 'pid': 'EVM-HD-8FXS/DID', 'sn': 'FOC65798TG8', 'subslot': { '1': { 'EM-HDA-6FXO': { 'descr': 'Six port FXO voice interface daughtercard', 'name': 'Six port FXO voice interface daughtercard on Slot 1 SubSlot 1', 'pid': 'EM-HDA-6FXO', 'sn': 'FOC85389QXB', 'vid': 'V03 ', }, }, }, 'vid': 'V04 ', }, }, }, }, } golden_output_9 = {'execute.return_value': ''' NAME: "3845 chassis", DESCR: "3845 chassis" PID: CISCO3845 , VID: V05 , SN: FTX6666ARJ9 NAME: "c3845 Motherboard with Gigabit Ethernet on Slot 0", DESCR: "c3845 Motherboard with Gigabit Ethernet" PID: CISCO3845-MB , VID: V09 , SN: FOC729346GQ NAME: "Virtual Private Network (VPN) Module on Slot 0", DESCR: "Encryption AIM Element" PID: AIM-VPN/SSL-3 , VID: V01, SN: FOC758693YO NAME: "Clear/Subrate T3/E3 WAN on Slot 1", DESCR: "Clear/Subrate T3/E3 WAN" PID: NM-1T3/E3= , VID: V01 , SN: FOC28476ADM NAME: "16 Port 10BaseT/100BaseTX EtherSwitch on Slot 2", DESCR: "16 Port 10BaseT/100BaseTX EtherSwitch" PID: NM-16ESW , VID: V01 , SN: FOC135464KO NAME: "Gigabit(1000BaseT) module for EtherSwitch NM on Slot 2 SubSlot 0", DESCR: "Gigabit(1000BaseT) module for EtherSwitch NM" PID: GE-DCARD-ESW , VID: V01 , SN: FOC91864MNN '''} golden_parsed_output_9 = { 'main': { 'chassis': { 'CISCO3845': { 'descr': '3845 chassis', 'name': '3845 chassis', 'pid': 'CISCO3845', 'sn': 'FTX6666ARJ9', 'vid': 'V05 ', }, }, }, 'slot': { '0': { 'lc': { 'CISCO3845-MB': { 'descr': 'c3845 Motherboard with Gigabit Ethernet', 'name': 'c3845 Motherboard with Gigabit Ethernet on Slot 0', 'pid': 'CISCO3845-MB', 'sn': 'FOC729346GQ', 'vid': 'V09 ', }, }, 'other': { 'AIM-VPN/SSL-3': { 'descr': 'Encryption AIM Element', 'name': 'Virtual Private Network (VPN) Module on Slot 0', 'pid': 'AIM-VPN/SSL-3', 'sn': 'FOC758693YO', 'vid': 'V01', }, }, }, '1': { 'lc': { 'NM-1T3/E3=': { 'descr': 'Clear/Subrate T3/E3 WAN', 'name': 'Clear/Subrate T3/E3 WAN on Slot 1', 'pid': 'NM-1T3/E3=', 'sn': 'FOC28476ADM', 'vid': 'V01 ', }, }, }, '16': { 'lc': { 'NM-16ESW': { 'descr': '16 Port 10BaseT/100BaseTX EtherSwitch', 'name': '16 Port 10BaseT/100BaseTX EtherSwitch on Slot 2', 'pid': 'NM-16ESW', 'sn': 'FOC135464KO', 'subslot': { '0': { 'GE-DCARD-ESW': { 'descr': 'Gigabit(1000BaseT) module for EtherSwitch NM', 'name': 'Gigabit(1000BaseT) module for EtherSwitch NM on Slot 2 SubSlot 0', 'pid': 'GE-DCARD-ESW', 'sn': 'FOC91864MNN', 'vid': 'V01 ', }, }, }, 'vid': 'V01 ', }, }, }, }, } def test_empty(self): self.dev1 = Mock(**self.empty_output) inventory_obj = ShowInventory(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = inventory_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) inventory_obj = ShowInventory(device=self.dev_iosv) parsed_output = inventory_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_output_2(self): self.maxDiff = None self.device = Mock(**self.golden_output_2) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_2) def test_golden_output_3(self): self.maxDiff = None self.device = Mock(**self.golden_output_3) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_3) def test_golden_output_4(self): self.maxDiff = None self.device = Mock(**self.golden_output_4) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_4) def test_golden_output_5(self): self.maxDiff = None self.device = Mock(**self.golden_output_5) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_5) def test_golden_output_6(self): self.maxDiff = None self.device = Mock(**self.golden_output_6) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_6) def test_golden_output_7(self): self.maxDiff = None self.device = Mock(**self.golden_output_7) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_7) def test_golden_output_8(self): self.maxDiff = None self.device = Mock(**self.golden_output_8) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_8) def test_golden_output_9(self): self.maxDiff = None self.device = Mock(**self.golden_output_9) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_9) class test_show_bootvar(unittest.TestCase): dev = Device(name='ios') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} golden_parsed_output_iosv = { "active": { "boot_variable": "disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12", "configuration_register": "0x2012" }, "next_reload_boot_variable": "disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12" } golden_output_iosv = {'execute.return_value': '''\ BOOT variable = disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12; CONFIG_FILE variable = BOOTLDR variable = Configuration register is 0x2012 Standby not ready to show bootvar '''} def test_empty(self): self.dev = Mock(**self.empty_output) platform_obj = ShowBootvar(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) platform_obj = ShowBootvar(device=self.dev_iosv) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_processes_cpu_sorted_CPU(unittest.TestCase): dev = Device(name='c3850') empty_output = {'execute.return_value': ''} golden_parsed_output = { "five_sec_cpu_total": 13, "five_min_cpu": 15, "one_min_cpu": 23, "five_sec_cpu_interrupts": 0 } golden_output = {'execute.return_value': '''\ show processes cpu sorted 5min | inc CPU CPU utilization for five seconds: 13%/0%; one minute: 23%; five minutes: 15% '''} golden_parsed_output_1 = { "sort": { 1: { "invoked": 3321960, "usecs": 109, "tty": 0, "one_min_cpu": 0.54, "process": "PIM Process", "five_min_cpu": 0.48, "runtime": 362874, "pid": 368, "five_sec_cpu": 1.03 }, 2: { "invoked": 1466728, "usecs": 2442, "tty": 0, "one_min_cpu": 0.87, "process": "IOSv e1000", "five_min_cpu": 2.77, "runtime": 3582279, "pid": 84, "five_sec_cpu": 0.55 }, 3: { "invoked": 116196, "usecs": 976, "tty": 0, "one_min_cpu": 0.07, "process": "OSPF-1 Hello", "five_min_cpu": 0.07, "runtime": 113457, "pid": 412, "five_sec_cpu": 0.15 } }, "five_sec_cpu_total": 4, "five_min_cpu": 9, "one_min_cpu": 4, "nonzero_cpu_processes": [ "PIM Process", "IOSv e1000", "OSPF-1 Hello" ], "five_sec_cpu_interrupts": 0 } golden_output_1 = {'execute.return_value': ''' CPU utilization for five seconds: 4%/0%; one minute: 4%; five minutes: 9% PID Runtime(ms) Invoked uSecs 5Sec 1Min 5Min TTY Process 368 362874 3321960 109 1.03% 0.54% 0.48% 0 PIM Process 84 3582279 1466728 2442 0.55% 0.87% 2.77% 0 IOSv e1000 412 113457 116196 976 0.15% 0.07% 0.07% 0 OSPF-1 Hello '''} def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowProcessesCpuSorted(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff = None self.dev = Mock(**self.golden_output) obj = ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse(key_word='CPU', sort_time='5min') self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff = None self.dev = Mock(**self.golden_output_1) obj = ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_1) class test_show_processes_cpu(test_show_processes_cpu_iosxe): def test_golden(self): self.device = Mock(**self.golden_output) obj = ShowProcessesCpu(device=self.device) parsed_output = obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff = None self.device = Mock(**self.golden_output_1) obj = ShowProcessesCpu(device=self.device) parsed_output = obj.parse(key_word='process') self.assertEqual(parsed_output, self.golden_parsed_output_1) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowProcessesCpu(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_version_rp(test_show_version_rp_iosxe): def test_golden_active(self): self.device = Mock(**self.golden_output_active) obj = ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='active', status='running') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_golden_standby(self): self.device = Mock(**self.golden_output_standby) obj = ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='standby', status='running') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_standby) def test_golden_standby_offline(self): self.device = Mock(**self.golden_output_standby_offline) obj = ShowVersionRp(device=self.device) self.maxDiff = None with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(rp='standby', status='running') def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowVersionRp(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_platform(test_show_platform_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowPlatform(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_semi_empty(self): self.dev2 = Mock(**self.semi_empty_output) platform_obj = ShowPlatform(device=self.dev2) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden_c3850(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowPlatform(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_c3850) def test_golden_asr1k(self): self.maxDiff = None self.dev_asr1k = Mock(**self.golden_output_asr1k) platform_obj = ShowPlatform(device=self.dev_asr1k) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_asr1k) class test_show_platform_power(test_show_platform_power_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformPower(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output) platform_obj = ShowPlatformPower(device=self.device) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_history(test_show_processes_cpu_history_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowProcessesCpuHistory(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output) platform_obj = ShowProcessesCpuHistory(device=self.device) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_platform(test_show_processes_cpu_platform_iosxe): def test_golden(self): self.device = Mock(**self.golden_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device) parsed_output = cpu_platform_obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output) def test_empty(self): self.device1 = Mock(**self.empty_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = cpu_platform_obj.parse() class test_show_platform_software_status_control_processor_brief(test_show_platform_software_status_control_processor_brief_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff = None self.dev = Mock(**self.golden_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_software_slot_active_monitor_Mem(test_show_platform_software_slot_active_monitor_Mem_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff = None self.dev = Mock(**self.golden_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware(test_show_platform_hardware_iosxe): def test_golden_active(self): self.device = Mock(**self.golden_output_active) obj = ShowPlatformHardware(device=self.device) parsed_output = obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardware(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_platform_hardware_plim(test_show_platform_hardware_plim_iosxe): def test_golden_port(self): self.device = Mock(**self.golden_output_port) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(port='0/0/0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_port) def test_golden_slot(self): self.device = Mock(**self.golden_output_slot) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_slot) def test_golden_subslot(self): self.device = Mock(**self.golden_output_subslot) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(subslot='0/1') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_subslot) def test_golden_slot_internal(self): self.device = Mock(**self.golden_output_slot_internal) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0', internal=True) self.maxDiff = None self.assertEqual( parsed_output, self.golden_parsed_output_slot_internal) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwarePlim(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(port='0/0/0') class test_show_platform_hardware_qfp_bqs_opm_mapping(test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe): def test_golden_active_opm(self): self.device = Mock(**self.golden_output_active_opm) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device) parsed_output = obj.parse(status='active', slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active', slot='0') class test_show_platform_hardware_qfp_bqs_ipm_mapping(test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe): def test_golden_active_ipm(self): self.device = Mock(**self.golden_output_active_ipm) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device) parsed_output = obj.parse(status='active', slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active', slot='0') class test_show_platform_hardware_serdes_statistics(test_show_platform_hardware_serdes_statistics_iosxe): def test_golden_serdes(self): self.device = Mock(**self.golden_output_serdes) obj = ShowPlatformHardwareSerdes(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_serdes) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareSerdes(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0') class test_show_platform_hardware_serdes_statistics_internal(test_show_platform_hardware_serdes_statistics_internal_iosxe): def test_golden(self): self.device = Mock(**self.golden_output_serdes_internal) obj = ShowPlatformHardwareSerdesInternal(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual( parsed_output, self.golden_parsed_output_serdes_internal) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareSerdesInternal(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0') class show_platform_hardware_qfp_bqs_statistics_channel_all(show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse( status='active', slot='0', iotype='ipm') def test_golden_active_ipm(self): self.maxDiff = None self.device = Mock(**self.golden_output_active_ipm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output = platform_obj.parse( status='active', slot='0', iotype='ipm') self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_golden_active_opm(self): self.maxDiff = None self.device = Mock(**self.golden_output_active_opm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output = platform_obj.parse( status='active', slot='0', iotype='opm') self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) class show_platform_hardware_qfp_interface(show_platform_hardware_qfp_interface_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0') def test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) parsed_output = platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0') self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware_qfp_statistics_drop(test_show_platform_hardware_qfp_statistics_drop_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse(status='active') def test_golden_active(self): self.maxDiff = None self.device = Mock(**self.golden_output_active) platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device) parsed_output = platform_obj.parse(status='active') self.assertEqual(parsed_output, self.golden_parsed_output_active) class test_show_env(test_show_env_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowEnvironment(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff = None self.dev = Mock(**self.golden_output) obj = ShowEnvironment(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_module(test_show_module_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowModule(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowModule(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch(test_show_switch_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowSwitch(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowSwitch(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch_detail(test_show_switch_detail_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowSwitchDetail(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowSwitchDetail(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) if __name__ == '__main__': unittest.main()
[((2878, 2898), 'pyats.topology.Device', 'Device', ([], {'name': '"""empty"""'}), "(name='empty')\n", (2884, 2898), False, 'from pyats.topology import Device\n'), ((2914, 2933), 'pyats.topology.Device', 'Device', ([], {'name': '"""iosv"""'}), "(name='iosv')\n", (2920, 2933), False, 'from pyats.topology import Device\n'), ((27613, 27633), 'pyats.topology.Device', 'Device', ([], {'name': '"""empty"""'}), "(name='empty')\n", (27619, 27633), False, 'from pyats.topology import Device\n'), ((27649, 27668), 'pyats.topology.Device', 'Device', ([], {'name': '"""iosv"""'}), "(name='iosv')\n", (27655, 27668), False, 'from pyats.topology import Device\n'), ((30676, 30696), 'pyats.topology.Device', 'Device', ([], {'name': '"""empty"""'}), "(name='empty')\n", (30682, 30696), False, 'from pyats.topology import Device\n'), ((30712, 30731), 'pyats.topology.Device', 'Device', ([], {'name': '"""iosv"""'}), "(name='iosv')\n", (30718, 30731), False, 'from pyats.topology import Device\n'), ((33346, 33366), 'pyats.topology.Device', 'Device', ([], {'name': '"""empty"""'}), "(name='empty')\n", (33352, 33366), False, 'from pyats.topology import Device\n'), ((33382, 33401), 'pyats.topology.Device', 'Device', ([], {'name': '"""iosv"""'}), "(name='iosv')\n", (33388, 33401), False, 'from pyats.topology import Device\n'), ((75887, 75905), 'pyats.topology.Device', 'Device', ([], {'name': '"""ios"""'}), "(name='ios')\n", (75893, 75905), False, 'from pyats.topology import Device\n'), ((75921, 75940), 'pyats.topology.Device', 'Device', ([], {'name': '"""iosv"""'}), "(name='iosv')\n", (75927, 75940), False, 'from pyats.topology import Device\n'), ((77192, 77212), 'pyats.topology.Device', 'Device', ([], {'name': '"""c3850"""'}), "(name='c3850')\n", (77198, 77212), False, 'from pyats.topology import Device\n'), ((96562, 96577), 'unittest.main', 'unittest.main', ([], {}), '()\n', (96575, 96577), False, 'import unittest\n'), ((25720, 25745), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (25724, 25745), False, 'from unittest.mock import Mock\n'), ((25768, 25797), 'genie.libs.parser.ios.show_platform.ShowVersion', 'ShowVersion', ([], {'device': 'self.dev1'}), '(device=self.dev1)\n', (25779, 25797), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((25948, 25978), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.semi_empty_output)\n', (25952, 25978), False, 'from unittest.mock import Mock\n'), ((26001, 26030), 'genie.libs.parser.ios.show_platform.ShowVersion', 'ShowVersion', ([], {'device': 'self.dev1'}), '(device=self.dev1)\n', (26012, 26030), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((26206, 26237), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_iosv)\n', (26210, 26237), False, 'from unittest.mock import Mock\n'), ((26260, 26293), 'genie.libs.parser.ios.show_platform.ShowVersion', 'ShowVersion', ([], {'device': 'self.dev_iosv'}), '(device=self.dev_iosv)\n', (26271, 26293), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((26494, 26524), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_ios)\n', (26498, 26524), False, 'from unittest.mock import Mock\n'), ((26547, 26580), 'genie.libs.parser.ios.show_platform.ShowVersion', 'ShowVersion', ([], {'device': 'self.dev_iosv'}), '(device=self.dev_iosv)\n', (26558, 26580), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((26786, 26822), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_ios_cat6k)\n', (26790, 26822), False, 'from unittest.mock import Mock\n'), ((26845, 26878), 'genie.libs.parser.ios.show_platform.ShowVersion', 'ShowVersion', ([], {'device': 'self.dev_iosv'}), '(device=self.dev_iosv)\n', (26856, 26878), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((27086, 27118), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_ios_1)\n', (27090, 27118), False, 'from unittest.mock import Mock\n'), ((27141, 27174), 'genie.libs.parser.ios.show_platform.ShowVersion', 'ShowVersion', ([], {'device': 'self.dev_iosv'}), '(device=self.dev_iosv)\n', (27152, 27174), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((27378, 27404), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.device_output)\n', (27382, 27404), False, 'from unittest.mock import Mock\n'), ((27427, 27460), 'genie.libs.parser.ios.show_platform.ShowVersion', 'ShowVersion', ([], {'device': 'self.dev_iosv'}), '(device=self.dev_iosv)\n', (27438, 27460), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((29953, 29978), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (29957, 29978), False, 'from unittest.mock import Mock\n'), ((29997, 30018), 'genie.libs.parser.ios.show_platform.Dir', 'Dir', ([], {'device': 'self.dev1'}), '(device=self.dev1)\n', (30000, 30018), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((30173, 30203), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.semi_empty_output)\n', (30177, 30203), False, 'from unittest.mock import Mock\n'), ((30222, 30243), 'genie.libs.parser.ios.show_platform.Dir', 'Dir', ([], {'device': 'self.dev1'}), '(device=self.dev1)\n', (30225, 30243), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((30428, 30459), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_iosv)\n', (30432, 30459), False, 'from unittest.mock import Mock\n'), ((30478, 30503), 'genie.libs.parser.ios.show_platform.Dir', 'Dir', ([], {'device': 'self.dev_iosv'}), '(device=self.dev_iosv)\n', (30481, 30503), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((32800, 32825), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (32804, 32825), False, 'from unittest.mock import Mock\n'), ((32851, 32883), 'genie.libs.parser.ios.show_platform.ShowRedundancy', 'ShowRedundancy', ([], {'device': 'self.dev1'}), '(device=self.dev1)\n', (32865, 32883), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((33076, 33107), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_iosv)\n', (33080, 33107), False, 'from unittest.mock import Mock\n'), ((33133, 33169), 'genie.libs.parser.ios.show_platform.ShowRedundancy', 'ShowRedundancy', ([], {'device': 'self.dev_iosv'}), '(device=self.dev_iosv)\n', (33147, 33169), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((73196, 73221), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (73200, 73221), False, 'from unittest.mock import Mock\n'), ((73246, 73277), 'genie.libs.parser.ios.show_platform.ShowInventory', 'ShowInventory', ([], {'device': 'self.dev1'}), '(device=self.dev1)\n', (73259, 73277), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((73469, 73500), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_iosv)\n', (73473, 73500), False, 'from unittest.mock import Mock\n'), ((73525, 73560), 'genie.libs.parser.ios.show_platform.ShowInventory', 'ShowInventory', ([], {'device': 'self.dev_iosv'}), '(device=self.dev_iosv)\n', (73538, 73560), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((73766, 73794), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_2)\n', (73770, 73794), False, 'from unittest.mock import Mock\n'), ((73809, 73842), 'genie.libs.parser.ios.show_platform.ShowInventory', 'ShowInventory', ([], {'device': 'self.device'}), '(device=self.device)\n', (73822, 73842), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((74035, 74063), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_3)\n', (74039, 74063), False, 'from unittest.mock import Mock\n'), ((74078, 74111), 'genie.libs.parser.ios.show_platform.ShowInventory', 'ShowInventory', ([], {'device': 'self.device'}), '(device=self.device)\n', (74091, 74111), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((74304, 74332), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_4)\n', (74308, 74332), False, 'from unittest.mock import Mock\n'), ((74347, 74380), 'genie.libs.parser.ios.show_platform.ShowInventory', 'ShowInventory', ([], {'device': 'self.device'}), '(device=self.device)\n', (74360, 74380), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((74573, 74601), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_5)\n', (74577, 74601), False, 'from unittest.mock import Mock\n'), ((74616, 74649), 'genie.libs.parser.ios.show_platform.ShowInventory', 'ShowInventory', ([], {'device': 'self.device'}), '(device=self.device)\n', (74629, 74649), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((74842, 74870), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_6)\n', (74846, 74870), False, 'from unittest.mock import Mock\n'), ((74885, 74918), 'genie.libs.parser.ios.show_platform.ShowInventory', 'ShowInventory', ([], {'device': 'self.device'}), '(device=self.device)\n', (74898, 74918), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((75111, 75139), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_7)\n', (75115, 75139), False, 'from unittest.mock import Mock\n'), ((75154, 75187), 'genie.libs.parser.ios.show_platform.ShowInventory', 'ShowInventory', ([], {'device': 'self.device'}), '(device=self.device)\n', (75167, 75187), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((75380, 75408), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_8)\n', (75384, 75408), False, 'from unittest.mock import Mock\n'), ((75423, 75456), 'genie.libs.parser.ios.show_platform.ShowInventory', 'ShowInventory', ([], {'device': 'self.device'}), '(device=self.device)\n', (75436, 75456), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((75649, 75677), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_9)\n', (75653, 75677), False, 'from unittest.mock import Mock\n'), ((75692, 75725), 'genie.libs.parser.ios.show_platform.ShowInventory', 'ShowInventory', ([], {'device': 'self.device'}), '(device=self.device)\n', (75705, 75725), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((76649, 76674), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (76653, 76674), False, 'from unittest.mock import Mock\n'), ((76698, 76726), 'genie.libs.parser.ios.show_platform.ShowBootvar', 'ShowBootvar', ([], {'device': 'self.dev'}), '(device=self.dev)\n', (76709, 76726), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((76912, 76943), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_iosv)\n', (76916, 76943), False, 'from unittest.mock import Mock\n'), ((76967, 77000), 'genie.libs.parser.ios.show_platform.ShowBootvar', 'ShowBootvar', ([], {'device': 'self.dev_iosv'}), '(device=self.dev_iosv)\n', (76978, 77000), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((79491, 79516), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (79495, 79516), False, 'from unittest.mock import Mock\n'), ((79531, 79570), 'genie.libs.parser.ios.show_platform.ShowProcessesCpuSorted', 'ShowProcessesCpuSorted', ([], {'device': 'self.dev'}), '(device=self.dev)\n', (79553, 79570), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((79744, 79770), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output)\n', (79748, 79770), False, 'from unittest.mock import Mock\n'), ((79785, 79824), 'genie.libs.parser.ios.show_platform.ShowProcessesCpuSorted', 'ShowProcessesCpuSorted', ([], {'device': 'self.dev'}), '(device=self.dev)\n', (79807, 79824), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((80037, 80065), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_1)\n', (80041, 80065), False, 'from unittest.mock import Mock\n'), ((80080, 80119), 'genie.libs.parser.ios.show_platform.ShowProcessesCpuSorted', 'ShowProcessesCpuSorted', ([], {'device': 'self.dev'}), '(device=self.dev)\n', (80102, 80119), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((80339, 80365), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output)\n', (80343, 80365), False, 'from unittest.mock import Mock\n'), ((80380, 80416), 'genie.libs.parser.ios.show_platform.ShowProcessesCpu', 'ShowProcessesCpu', ([], {'device': 'self.device'}), '(device=self.device)\n', (80396, 80416), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((80628, 80656), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_1)\n', (80632, 80656), False, 'from unittest.mock import Mock\n'), ((80671, 80707), 'genie.libs.parser.ios.show_platform.ShowProcessesCpu', 'ShowProcessesCpu', ([], {'device': 'self.device'}), '(device=self.device)\n', (80687, 80707), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((80881, 80906), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (80885, 80906), False, 'from unittest.mock import Mock\n'), ((80921, 80958), 'genie.libs.parser.ios.show_platform.ShowProcessesCpu', 'ShowProcessesCpu', ([], {'device': 'self.device1'}), '(device=self.device1)\n', (80937, 80958), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((81170, 81203), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_active)\n', (81174, 81203), False, 'from unittest.mock import Mock\n'), ((81218, 81251), 'genie.libs.parser.ios.show_platform.ShowVersionRp', 'ShowVersionRp', ([], {'device': 'self.device'}), '(device=self.device)\n', (81231, 81251), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((81477, 81511), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_standby)\n', (81481, 81511), False, 'from unittest.mock import Mock\n'), ((81526, 81559), 'genie.libs.parser.ios.show_platform.ShowVersionRp', 'ShowVersionRp', ([], {'device': 'self.device'}), '(device=self.device)\n', (81539, 81559), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((81795, 81837), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_standby_offline)\n', (81799, 81837), False, 'from unittest.mock import Mock\n'), ((81852, 81885), 'genie.libs.parser.ios.show_platform.ShowVersionRp', 'ShowVersionRp', ([], {'device': 'self.device'}), '(device=self.device)\n', (81865, 81885), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((82090, 82115), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (82094, 82115), False, 'from unittest.mock import Mock\n'), ((82130, 82164), 'genie.libs.parser.ios.show_platform.ShowVersionRp', 'ShowVersionRp', ([], {'device': 'self.device1'}), '(device=self.device1)\n', (82143, 82164), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((82362, 82387), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (82366, 82387), False, 'from unittest.mock import Mock\n'), ((82411, 82441), 'genie.libs.parser.ios.show_platform.ShowPlatform', 'ShowPlatform', ([], {'device': 'self.dev1'}), '(device=self.dev1)\n', (82423, 82441), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((82599, 82629), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.semi_empty_output)\n', (82603, 82629), False, 'from unittest.mock import Mock\n'), ((82653, 82683), 'genie.libs.parser.ios.show_platform.ShowPlatform', 'ShowPlatform', ([], {'device': 'self.dev2'}), '(device=self.dev2)\n', (82665, 82683), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((82876, 82908), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_c3850)\n', (82880, 82908), False, 'from unittest.mock import Mock\n'), ((82932, 82967), 'genie.libs.parser.ios.show_platform.ShowPlatform', 'ShowPlatform', ([], {'device': 'self.dev_c3850'}), '(device=self.dev_c3850)\n', (82944, 82967), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((83173, 83205), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_asr1k)\n', (83177, 83205), False, 'from unittest.mock import Mock\n'), ((83229, 83264), 'genie.libs.parser.ios.show_platform.ShowPlatform', 'ShowPlatform', ([], {'device': 'self.dev_asr1k'}), '(device=self.dev_asr1k)\n', (83241, 83264), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((83498, 83523), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (83502, 83523), False, 'from unittest.mock import Mock\n'), ((83547, 83584), 'genie.libs.parser.ios.show_platform.ShowPlatformPower', 'ShowPlatformPower', ([], {'device': 'self.device'}), '(device=self.device)\n', (83564, 83584), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((83768, 83794), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output)\n', (83772, 83794), False, 'from unittest.mock import Mock\n'), ((83818, 83855), 'genie.libs.parser.ios.show_platform.ShowPlatformPower', 'ShowPlatformPower', ([], {'device': 'self.device'}), '(device=self.device)\n', (83835, 83855), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((84097, 84122), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (84101, 84122), False, 'from unittest.mock import Mock\n'), ((84146, 84189), 'genie.libs.parser.ios.show_platform.ShowProcessesCpuHistory', 'ShowProcessesCpuHistory', ([], {'device': 'self.device'}), '(device=self.device)\n', (84169, 84189), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((84373, 84399), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output)\n', (84377, 84399), False, 'from unittest.mock import Mock\n'), ((84423, 84466), 'genie.libs.parser.ios.show_platform.ShowProcessesCpuHistory', 'ShowProcessesCpuHistory', ([], {'device': 'self.device'}), '(device=self.device)\n', (84446, 84466), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((84711, 84737), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output)\n', (84715, 84737), False, 'from unittest.mock import Mock\n'), ((84765, 84809), 'genie.libs.parser.ios.show_platform.ShowProcessesCpuPlatform', 'ShowProcessesCpuPlatform', ([], {'device': 'self.device'}), '(device=self.device)\n', (84789, 84809), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((85004, 85029), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (85008, 85029), False, 'from unittest.mock import Mock\n'), ((85057, 85102), 'genie.libs.parser.ios.show_platform.ShowProcessesCpuPlatform', 'ShowProcessesCpuPlatform', ([], {'device': 'self.device1'}), '(device=self.device1)\n', (85081, 85102), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((85392, 85417), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (85396, 85417), False, 'from unittest.mock import Mock\n'), ((85432, 85482), 'genie.libs.parser.ios.show_platform.ShowPlatformSoftwareStatusControl', 'ShowPlatformSoftwareStatusControl', ([], {'device': 'self.dev'}), '(device=self.dev)\n', (85465, 85482), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((85656, 85682), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output)\n', (85660, 85682), False, 'from unittest.mock import Mock\n'), ((85697, 85747), 'genie.libs.parser.ios.show_platform.ShowPlatformSoftwareStatusControl', 'ShowPlatformSoftwareStatusControl', ([], {'device': 'self.dev'}), '(device=self.dev)\n', (85730, 85747), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((86017, 86042), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (86021, 86042), False, 'from unittest.mock import Mock\n'), ((86057, 86114), 'genie.libs.parser.ios.show_platform.ShowPlatformSoftwareSlotActiveMonitorMem', 'ShowPlatformSoftwareSlotActiveMonitorMem', ([], {'device': 'self.dev'}), '(device=self.dev)\n', (86097, 86114), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((86288, 86314), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output)\n', (86292, 86314), False, 'from unittest.mock import Mock\n'), ((86329, 86386), 'genie.libs.parser.ios.show_platform.ShowPlatformSoftwareSlotActiveMonitorMem', 'ShowPlatformSoftwareSlotActiveMonitorMem', ([], {'device': 'self.dev'}), '(device=self.dev)\n', (86369, 86386), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((86619, 86652), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_active)\n', (86623, 86652), False, 'from unittest.mock import Mock\n'), ((86667, 86707), 'genie.libs.parser.ios.show_platform.ShowPlatformHardware', 'ShowPlatformHardware', ([], {'device': 'self.device'}), '(device=self.device)\n', (86687, 86707), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((86896, 86921), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (86900, 86921), False, 'from unittest.mock import Mock\n'), ((86936, 86977), 'genie.libs.parser.ios.show_platform.ShowPlatformHardware', 'ShowPlatformHardware', ([], {'device': 'self.device1'}), '(device=self.device1)\n', (86956, 86977), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((87211, 87242), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_port)\n', (87215, 87242), False, 'from unittest.mock import Mock\n'), ((87257, 87301), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwarePlim', 'ShowPlatformHardwarePlim', ([], {'device': 'self.device'}), '(device=self.device)\n', (87281, 87301), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((87505, 87536), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_slot)\n', (87509, 87536), False, 'from unittest.mock import Mock\n'), ((87551, 87595), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwarePlim', 'ShowPlatformHardwarePlim', ([], {'device': 'self.device'}), '(device=self.device)\n', (87575, 87595), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((87798, 87832), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_subslot)\n', (87802, 87832), False, 'from unittest.mock import Mock\n'), ((87847, 87891), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwarePlim', 'ShowPlatformHardwarePlim', ([], {'device': 'self.device'}), '(device=self.device)\n', (87871, 87891), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((88108, 88148), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_slot_internal)\n', (88112, 88148), False, 'from unittest.mock import Mock\n'), ((88163, 88207), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwarePlim', 'ShowPlatformHardwarePlim', ([], {'device': 'self.device'}), '(device=self.device)\n', (88187, 88207), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((88439, 88464), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (88443, 88464), False, 'from unittest.mock import Mock\n'), ((88479, 88524), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwarePlim', 'ShowPlatformHardwarePlim', ([], {'device': 'self.device1'}), '(device=self.device1)\n', (88503, 88524), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((88806, 88843), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_active_opm)\n', (88810, 88843), False, 'from unittest.mock import Mock\n'), ((88858, 88914), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwareQfpBqsOpmMapping', 'ShowPlatformHardwareQfpBqsOpmMapping', ([], {'device': 'self.device'}), '(device=self.device)\n', (88894, 88914), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((89132, 89157), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (89136, 89157), False, 'from unittest.mock import Mock\n'), ((89172, 89229), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwareQfpBqsOpmMapping', 'ShowPlatformHardwareQfpBqsOpmMapping', ([], {'device': 'self.device1'}), '(device=self.device1)\n', (89208, 89229), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((89524, 89561), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_active_ipm)\n', (89528, 89561), False, 'from unittest.mock import Mock\n'), ((89576, 89632), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwareQfpBqsIpmMapping', 'ShowPlatformHardwareQfpBqsIpmMapping', ([], {'device': 'self.device'}), '(device=self.device)\n', (89612, 89632), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((89850, 89875), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (89854, 89875), False, 'from unittest.mock import Mock\n'), ((89890, 89947), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwareQfpBqsIpmMapping', 'ShowPlatformHardwareQfpBqsIpmMapping', ([], {'device': 'self.device1'}), '(device=self.device1)\n', (89926, 89947), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((90234, 90267), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_serdes)\n', (90238, 90267), False, 'from unittest.mock import Mock\n'), ((90282, 90328), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwareSerdes', 'ShowPlatformHardwareSerdes', ([], {'device': 'self.device'}), '(device=self.device)\n', (90308, 90328), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((90525, 90550), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (90529, 90550), False, 'from unittest.mock import Mock\n'), ((90565, 90612), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwareSerdes', 'ShowPlatformHardwareSerdes', ([], {'device': 'self.device1'}), '(device=self.device1)\n', (90591, 90612), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((90893, 90935), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_serdes_internal)\n', (90897, 90935), False, 'from unittest.mock import Mock\n'), ((90950, 91004), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwareSerdesInternal', 'ShowPlatformHardwareSerdesInternal', ([], {'device': 'self.device'}), '(device=self.device)\n', (90984, 91004), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((91223, 91248), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (91227, 91248), False, 'from unittest.mock import Mock\n'), ((91263, 91318), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwareSerdesInternal', 'ShowPlatformHardwareSerdesInternal', ([], {'device': 'self.device1'}), '(device=self.device1)\n', (91297, 91318), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((91596, 91621), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (91600, 91621), False, 'from unittest.mock import Mock\n'), ((91645, 91711), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwareQfpBqsStatisticsChannelAll', 'ShowPlatformHardwareQfpBqsStatisticsChannelAll', ([], {'device': 'self.device'}), '(device=self.device)\n', (91691, 91711), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((91975, 92012), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_active_ipm)\n', (91979, 92012), False, 'from unittest.mock import Mock\n'), ((92036, 92102), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwareQfpBqsStatisticsChannelAll', 'ShowPlatformHardwareQfpBqsStatisticsChannelAll', ([], {'device': 'self.device'}), '(device=self.device)\n', (92082, 92102), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((92380, 92417), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_active_opm)\n', (92384, 92417), False, 'from unittest.mock import Mock\n'), ((92441, 92507), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwareQfpBqsStatisticsChannelAll', 'ShowPlatformHardwareQfpBqsStatisticsChannelAll', ([], {'device': 'self.device'}), '(device=self.device)\n', (92487, 92507), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((92835, 92860), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (92839, 92860), False, 'from unittest.mock import Mock\n'), ((92884, 92952), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwareQfpInterfaceIfnameStatistics', 'ShowPlatformHardwareQfpInterfaceIfnameStatistics', ([], {'device': 'self.device'}), '(device=self.device)\n', (92932, 92952), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((93216, 93242), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output)\n', (93220, 93242), False, 'from unittest.mock import Mock\n'), ((93266, 93334), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwareQfpInterfaceIfnameStatistics', 'ShowPlatformHardwareQfpInterfaceIfnameStatistics', ([], {'device': 'self.device'}), '(device=self.device)\n', (93314, 93334), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((93684, 93709), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (93688, 93709), False, 'from unittest.mock import Mock\n'), ((93733, 93790), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwareQfpStatisticsDrop', 'ShowPlatformHardwareQfpStatisticsDrop', ([], {'device': 'self.device'}), '(device=self.device)\n', (93770, 93790), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((94009, 94042), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_active)\n', (94013, 94042), False, 'from unittest.mock import Mock\n'), ((94066, 94123), 'genie.libs.parser.ios.show_platform.ShowPlatformHardwareQfpStatisticsDrop', 'ShowPlatformHardwareQfpStatisticsDrop', ([], {'device': 'self.device'}), '(device=self.device)\n', (94103, 94123), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((94361, 94386), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (94365, 94386), False, 'from unittest.mock import Mock\n'), ((94401, 94433), 'genie.libs.parser.ios.show_platform.ShowEnvironment', 'ShowEnvironment', ([], {'device': 'self.dev'}), '(device=self.dev)\n', (94416, 94433), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((94607, 94633), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output)\n', (94611, 94633), False, 'from unittest.mock import Mock\n'), ((94648, 94680), 'genie.libs.parser.ios.show_platform.ShowEnvironment', 'ShowEnvironment', ([], {'device': 'self.dev'}), '(device=self.dev)\n', (94663, 94680), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((94881, 94906), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (94885, 94906), False, 'from unittest.mock import Mock\n'), ((94930, 94958), 'genie.libs.parser.ios.show_platform.ShowModule', 'ShowModule', ([], {'device': 'self.dev1'}), '(device=self.dev1)\n', (94940, 94958), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((95149, 95181), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_c3850)\n', (95153, 95181), False, 'from unittest.mock import Mock\n'), ((95205, 95238), 'genie.libs.parser.ios.show_platform.ShowModule', 'ShowModule', ([], {'device': 'self.dev_c3850'}), '(device=self.dev_c3850)\n', (95215, 95238), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((95456, 95481), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (95460, 95481), False, 'from unittest.mock import Mock\n'), ((95505, 95533), 'genie.libs.parser.ios.show_platform.ShowSwitch', 'ShowSwitch', ([], {'device': 'self.dev1'}), '(device=self.dev1)\n', (95515, 95533), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((95724, 95756), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_c3850)\n', (95728, 95756), False, 'from unittest.mock import Mock\n'), ((95780, 95813), 'genie.libs.parser.ios.show_platform.ShowSwitch', 'ShowSwitch', ([], {'device': 'self.dev_c3850'}), '(device=self.dev_c3850)\n', (95790, 95813), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((96042, 96067), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.empty_output)\n', (96046, 96067), False, 'from unittest.mock import Mock\n'), ((96091, 96125), 'genie.libs.parser.ios.show_platform.ShowSwitchDetail', 'ShowSwitchDetail', ([], {'device': 'self.dev1'}), '(device=self.dev1)\n', (96107, 96125), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n'), ((96316, 96348), 'unittest.mock.Mock', 'Mock', ([], {}), '(**self.golden_output_c3850)\n', (96320, 96348), False, 'from unittest.mock import Mock\n'), ((96372, 96411), 'genie.libs.parser.ios.show_platform.ShowSwitchDetail', 'ShowSwitchDetail', ([], {'device': 'self.dev_c3850'}), '(device=self.dev_c3850)\n', (96388, 96411), False, 'from genie.libs.parser.ios.show_platform import ShowVersion, Dir, ShowRedundancy, ShowInventory, ShowBootvar, ShowProcessesCpuSorted, ShowProcessesCpu, ShowVersionRp, ShowPlatform, ShowPlatformPower, ShowProcessesCpuHistory, ShowProcessesCpuPlatform, ShowPlatformSoftwareStatusControl, ShowPlatformSoftwareSlotActiveMonitorMem, ShowPlatformHardware, ShowPlatformHardwarePlim, ShowPlatformHardwareQfpBqsOpmMapping, ShowPlatformHardwareQfpBqsIpmMapping, ShowPlatformHardwareSerdes, ShowPlatformHardwareSerdesInternal, ShowPlatformHardwareQfpBqsStatisticsChannelAll, ShowPlatformHardwareQfpInterfaceIfnameStatistics, ShowPlatformHardwareQfpStatisticsDrop, ShowEnvironment, ShowModule, ShowSwitch, ShowSwitchDetail\n')]
emmamcbryde/AuTuMN-1
autumn/projects/covid_19/sri_lanka/sri_lanka/project.py
b1e7de15ac6ef6bed95a80efab17f0780ec9ff6f
import numpy as np from autumn.calibration.proposal_tuning import perform_all_params_proposal_tuning from autumn.core.project import Project, ParameterSet, load_timeseries, build_rel_path, get_all_available_scenario_paths, \ use_tuned_proposal_sds from autumn.calibration import Calibration from autumn.calibration.priors import UniformPrior, BetaPrior,TruncNormalPrior from autumn.calibration.targets import ( NormalTarget, get_dispersion_priors_for_gaussian_targets, ) from autumn.models.covid_19 import base_params, build_model from autumn.settings import Region, Models from autumn.projects.covid_19.sri_lanka.sri_lanka.scenario_builder import get_all_scenario_dicts # Load and configure model parameters. default_path = build_rel_path("params/default.yml") #scenario_paths = [build_rel_path(f"params/scenario-{i}.yml") for i in range(7, 9)] mle_path = build_rel_path("params/mle-params.yml") baseline_params = base_params.update(default_path).update(mle_path, calibration_format=True) all_scenario_dicts = get_all_scenario_dicts("LKA") #scenario_params = [baseline_params.update(p) for p in scenario_paths] scenario_params = [baseline_params.update(sc_dict) for sc_dict in all_scenario_dicts] param_set = ParameterSet(baseline=baseline_params, scenarios=scenario_params) ts_set = load_timeseries(build_rel_path("timeseries.json")) notifications_ts = ts_set["notifications"].rolling(7).mean().loc[350::7] death_ts = ts_set["infection_deaths"].loc[350:] targets = [ NormalTarget(notifications_ts), NormalTarget(death_ts), ] priors = [ # Dispersion parameters based on targets *get_dispersion_priors_for_gaussian_targets(targets), *get_dispersion_priors_for_gaussian_targets(targets), # Regional parameters UniformPrior("contact_rate", [0.024, 0.027]), UniformPrior("infectious_seed", [275.0, 450.0]), # Detection UniformPrior("testing_to_detection.assumed_cdr_parameter", [0.009, 0.025]), UniformPrior("infection_fatality.multiplier", [0.09, 0.13]), #VoC UniformPrior("voc_emergence.alpha_beta.start_time", [370, 410]), UniformPrior("voc_emergence.alpha_beta.contact_rate_multiplier", [3.2, 4.5]), UniformPrior("voc_emergence.delta.start_time", [475, 530]), UniformPrior("voc_emergence.delta.contact_rate_multiplier", [8.5, 11.5]), ] # Load proposal sds from yml file # use_tuned_proposal_sds(priors, build_rel_path("proposal_sds.yml")) calibration = Calibration(priors, targets) # FIXME: Replace with flexible Python plot request API. import json plot_spec_filepath = build_rel_path("timeseries.json") with open(plot_spec_filepath) as f: plot_spec = json.load(f) project = Project( Region.SRI_LANKA, Models.COVID_19, build_model, param_set, calibration, plots=plot_spec ) #perform_all_params_proposal_tuning(project, calibration, priors, n_points=50, relative_likelihood_reduction=0.2)
[((738, 774), 'autumn.core.project.build_rel_path', 'build_rel_path', (['"""params/default.yml"""'], {}), "('params/default.yml')\n", (752, 774), False, 'from autumn.core.project import Project, ParameterSet, load_timeseries, build_rel_path, get_all_available_scenario_paths, use_tuned_proposal_sds\n'), ((870, 909), 'autumn.core.project.build_rel_path', 'build_rel_path', (['"""params/mle-params.yml"""'], {}), "('params/mle-params.yml')\n", (884, 909), False, 'from autumn.core.project import Project, ParameterSet, load_timeseries, build_rel_path, get_all_available_scenario_paths, use_tuned_proposal_sds\n'), ((1024, 1053), 'autumn.projects.covid_19.sri_lanka.sri_lanka.scenario_builder.get_all_scenario_dicts', 'get_all_scenario_dicts', (['"""LKA"""'], {}), "('LKA')\n", (1046, 1053), False, 'from autumn.projects.covid_19.sri_lanka.sri_lanka.scenario_builder import get_all_scenario_dicts\n'), ((1223, 1288), 'autumn.core.project.ParameterSet', 'ParameterSet', ([], {'baseline': 'baseline_params', 'scenarios': 'scenario_params'}), '(baseline=baseline_params, scenarios=scenario_params)\n', (1235, 1288), False, 'from autumn.core.project import Project, ParameterSet, load_timeseries, build_rel_path, get_all_available_scenario_paths, use_tuned_proposal_sds\n'), ((2435, 2463), 'autumn.calibration.Calibration', 'Calibration', (['priors', 'targets'], {}), '(priors, targets)\n', (2446, 2463), False, 'from autumn.calibration import Calibration\n'), ((2555, 2588), 'autumn.core.project.build_rel_path', 'build_rel_path', (['"""timeseries.json"""'], {}), "('timeseries.json')\n", (2569, 2588), False, 'from autumn.core.project import Project, ParameterSet, load_timeseries, build_rel_path, get_all_available_scenario_paths, use_tuned_proposal_sds\n'), ((2666, 2766), 'autumn.core.project.Project', 'Project', (['Region.SRI_LANKA', 'Models.COVID_19', 'build_model', 'param_set', 'calibration'], {'plots': 'plot_spec'}), '(Region.SRI_LANKA, Models.COVID_19, build_model, param_set,\n calibration, plots=plot_spec)\n', (2673, 2766), False, 'from autumn.core.project import Project, ParameterSet, load_timeseries, build_rel_path, get_all_available_scenario_paths, use_tuned_proposal_sds\n'), ((1315, 1348), 'autumn.core.project.build_rel_path', 'build_rel_path', (['"""timeseries.json"""'], {}), "('timeseries.json')\n", (1329, 1348), False, 'from autumn.core.project import Project, ParameterSet, load_timeseries, build_rel_path, get_all_available_scenario_paths, use_tuned_proposal_sds\n'), ((1487, 1517), 'autumn.calibration.targets.NormalTarget', 'NormalTarget', (['notifications_ts'], {}), '(notifications_ts)\n', (1499, 1517), False, 'from autumn.calibration.targets import NormalTarget, get_dispersion_priors_for_gaussian_targets\n'), ((1523, 1545), 'autumn.calibration.targets.NormalTarget', 'NormalTarget', (['death_ts'], {}), '(death_ts)\n', (1535, 1545), False, 'from autumn.calibration.targets import NormalTarget, get_dispersion_priors_for_gaussian_targets\n'), ((1752, 1796), 'autumn.calibration.priors.UniformPrior', 'UniformPrior', (['"""contact_rate"""', '[0.024, 0.027]'], {}), "('contact_rate', [0.024, 0.027])\n", (1764, 1796), False, 'from autumn.calibration.priors import UniformPrior, BetaPrior, TruncNormalPrior\n'), ((1802, 1849), 'autumn.calibration.priors.UniformPrior', 'UniformPrior', (['"""infectious_seed"""', '[275.0, 450.0]'], {}), "('infectious_seed', [275.0, 450.0])\n", (1814, 1849), False, 'from autumn.calibration.priors import UniformPrior, BetaPrior, TruncNormalPrior\n'), ((1871, 1945), 'autumn.calibration.priors.UniformPrior', 'UniformPrior', (['"""testing_to_detection.assumed_cdr_parameter"""', '[0.009, 0.025]'], {}), "('testing_to_detection.assumed_cdr_parameter', [0.009, 0.025])\n", (1883, 1945), False, 'from autumn.calibration.priors import UniformPrior, BetaPrior, TruncNormalPrior\n'), ((1951, 2010), 'autumn.calibration.priors.UniformPrior', 'UniformPrior', (['"""infection_fatality.multiplier"""', '[0.09, 0.13]'], {}), "('infection_fatality.multiplier', [0.09, 0.13])\n", (1963, 2010), False, 'from autumn.calibration.priors import UniformPrior, BetaPrior, TruncNormalPrior\n'), ((2025, 2088), 'autumn.calibration.priors.UniformPrior', 'UniformPrior', (['"""voc_emergence.alpha_beta.start_time"""', '[370, 410]'], {}), "('voc_emergence.alpha_beta.start_time', [370, 410])\n", (2037, 2088), False, 'from autumn.calibration.priors import UniformPrior, BetaPrior, TruncNormalPrior\n'), ((2094, 2170), 'autumn.calibration.priors.UniformPrior', 'UniformPrior', (['"""voc_emergence.alpha_beta.contact_rate_multiplier"""', '[3.2, 4.5]'], {}), "('voc_emergence.alpha_beta.contact_rate_multiplier', [3.2, 4.5])\n", (2106, 2170), False, 'from autumn.calibration.priors import UniformPrior, BetaPrior, TruncNormalPrior\n'), ((2176, 2234), 'autumn.calibration.priors.UniformPrior', 'UniformPrior', (['"""voc_emergence.delta.start_time"""', '[475, 530]'], {}), "('voc_emergence.delta.start_time', [475, 530])\n", (2188, 2234), False, 'from autumn.calibration.priors import UniformPrior, BetaPrior, TruncNormalPrior\n'), ((2240, 2312), 'autumn.calibration.priors.UniformPrior', 'UniformPrior', (['"""voc_emergence.delta.contact_rate_multiplier"""', '[8.5, 11.5]'], {}), "('voc_emergence.delta.contact_rate_multiplier', [8.5, 11.5])\n", (2252, 2312), False, 'from autumn.calibration.priors import UniformPrior, BetaPrior, TruncNormalPrior\n'), ((2641, 2653), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2650, 2653), False, 'import json\n'), ((928, 960), 'autumn.models.covid_19.base_params.update', 'base_params.update', (['default_path'], {}), '(default_path)\n', (946, 960), False, 'from autumn.models.covid_19 import base_params, build_model\n'), ((1611, 1662), 'autumn.calibration.targets.get_dispersion_priors_for_gaussian_targets', 'get_dispersion_priors_for_gaussian_targets', (['targets'], {}), '(targets)\n', (1653, 1662), False, 'from autumn.calibration.targets import NormalTarget, get_dispersion_priors_for_gaussian_targets\n'), ((1669, 1720), 'autumn.calibration.targets.get_dispersion_priors_for_gaussian_targets', 'get_dispersion_priors_for_gaussian_targets', (['targets'], {}), '(targets)\n', (1711, 1720), False, 'from autumn.calibration.targets import NormalTarget, get_dispersion_priors_for_gaussian_targets\n')]
thanosbnt/SharingCitiesDashboard
Analytics/resources/themes/test_subthemes.py
5d123691d1f25d0b85e20e4e8293266bf23c9f8a
import unittest from http import HTTPStatus from unittest import TestCase import bcrypt from flask.ctx import AppContext from flask.testing import FlaskClient from app import create_app from models.theme import Theme, SubTheme from models.users import Users class TestSubTemes(TestCase): """ Unittest for the creation, renaming and deleting of Themes """ def setUp(self): """ Setup a FlaskClient for testing, creates an admin user and creates the authorization header for requests to the Flask Client and a dummy theme """ self.client, self.app_context = self.create_test_client() self.user = self.create_admin_user() self.auth_header = self.get_auth_header() self.theme = Theme.get_by_name("_test_add_Subtheme_") if not self.theme: self.theme = Theme("_test_add_Subtheme_") self.theme.save() self.theme.commit() self.theme = Theme.get_by_name("_test_add_Subtheme_") self.subtheme = self.create_dummy_subtheme() def create_test_client(self) -> (FlaskClient, AppContext): """ Create flask testing client :return: FlaskClient for tests and AppContext """ test_app = create_app(DATABASE_NAME='test_analysis', TESTING=True) testing_client = test_app.test_client() test_app_context = test_app.app_context() test_app_context.push() return testing_client, test_app_context def create_dummy_subtheme(self) -> SubTheme: """ Create SubTheme for tests :return: SubTheme for tests """ subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') if not subtheme: subtheme = SubTheme(self.theme.id, '_TEST_SUB_THEME_') subtheme.save() subtheme.commit() subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') return subtheme def create_admin_user(self) -> Users: """ Create Admin user for tests :return: an admin user for tests """ password_hash = bcrypt.hashpw("wfnbqk".encode("utf-8"), bcrypt.gensalt()) user = Users.find_by_email("[email protected]") if not user: user = Users("Admin", "[email protected]", password_hash.decode("utf8"), True, True) try: user.save() user.commit() except Exception as e: pass return user def get_auth_header(self) -> {str: str}: """ Create an Authorization header for test :return: An authorization header """ response_login = self.client.post('/login', data=dict(email=self.user.email, password="wfnbqk", remember=True), follow_redirects=True) response_login_json = response_login.get_json() return {'Authorization': 'Bearer {}'.format(response_login_json["access_token"])} def test_add_subtheme(self): """ Create a new SubTheme and check the client response status code for http status 200 (OK) Check JSON response data for the expected message 'New theme created' and Theme name """ response = self.client.post('/admin/themes/add_subtheme', json={"theme_id": self.theme.id, "subtheme": "_TEST_SUB_THEME_2"}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) json_response = response.get_json() self.assertEqual(json_response["message"], "sub theme created") self.assertEqual(json_response["theme_id"], self.theme.id) self.assertEqual(json_response["subtheme"], "_TEST_SUB_THEME_2") def test_rename_subtheme_theme_id(self): """ Rename a SubTheme by theme_id and check the clients response status code for http status 200 (OK) Check response data for the expected message 'Subtheme renamed' and the Subtheme name has been changed """ if not self.subtheme: self.subtheme = self.create_dummy_subtheme() current_name = self.subtheme.name response = self.client.post('/admin/themes/rename_subtheme', json={"theme_id": self.subtheme.t_id, "current_name": current_name, "new_name": "new_name_not_1" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) response = response.get_json() self.assertEqual(response["id"], self.subtheme.id) self.assertEqual(response["message"], "Subtheme renamed") self.assertEqual(response["old_name"], current_name) self.assertEqual(response["new_name"], "new_name_not_1") def test_rename_subtheme_id(self): """ Rename a SubTheme by id and check the clients response status code for http status 200 (OK) Check response data for the expected message 'Subtheme renamed' and the Subtheme name has been changed """ if not self.subtheme: self.subtheme = self.create_dummy_subtheme() current_name = self.subtheme.name response = self.client.post('/admin/themes/rename_subtheme', json={"id": self.subtheme.id, "current_name": current_name, "new_name": "new_name_not_1" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) response = response.get_json() self.assertEqual(response["id"], self.subtheme.id) self.assertEqual(response["message"], "Subtheme renamed") self.assertEqual(response["old_name"], current_name) self.assertEqual(response["new_name"], "new_name_not_1") def test_rename_non_existant_subtheme(self): """ Rename a SubTheme that does not exist and check the clients response status code for http status 404 (OK) """ response = self.client.post('/admin/themes/rename_subtheme', json={"theme_id": -1, "current_name": "a3d4f5g6h7j8k0", "new_name": "new_name_not_1" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_non_exsitant_subtheme(self): """ Delete a SubTheme that does not exist and check the client response status code for http status 404 """ if not self.subtheme: self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={"name": "weA_gfj24fhurtyui", "theme_id": -1}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_subtheme_by_id(self): """ Delete a SubTheme by id and check the client response status code for http status 204 (NO_CONTENT) """ if not self.subtheme: self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={"id": self.subtheme.id}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def test_delete_subtheme_by_theme_id_and_name(self): """ Delete a SubTheme by theme_id and name: check the client response status code for http status 204 (NO_CONTENT) """ if not self.subtheme: self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={"theme_id": self.subtheme.t_id, "name": self.subtheme.name}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def tearDown(self): """ Handle the cleanup after tests""" self.subtheme = SubTheme.get_by_name("new_name_not_1") if not self.subtheme: self.subtheme = SubTheme.get_by_name("_TEST_SUB_THEME_") if self.subtheme: self.subtheme.delete() self.subtheme.commit() test_sub = SubTheme.get_by_name("_TEST_SUB_THEME_2") if test_sub: test_sub.delete() test_sub.commit() if self.theme: self.theme.delete() self.theme.commit() self.client.post('/logout', headers=self.auth_header) if self.user: self.user.delete() self.user.commit() self.app_context.pop() if __name__ == '__main__': unittest.main()
[((9229, 9244), 'unittest.main', 'unittest.main', ([], {}), '()\n', (9242, 9244), False, 'import unittest\n'), ((759, 799), 'models.theme.Theme.get_by_name', 'Theme.get_by_name', (['"""_test_add_Subtheme_"""'], {}), "('_test_add_Subtheme_')\n", (776, 799), False, 'from models.theme import Theme, SubTheme\n'), ((1259, 1314), 'app.create_app', 'create_app', ([], {'DATABASE_NAME': '"""test_analysis"""', 'TESTING': '(True)'}), "(DATABASE_NAME='test_analysis', TESTING=True)\n", (1269, 1314), False, 'from app import create_app\n'), ((1656, 1696), 'models.theme.SubTheme.get_by_name', 'SubTheme.get_by_name', (['"""_TEST_SUB_THEME_"""'], {}), "('_TEST_SUB_THEME_')\n", (1676, 1696), False, 'from models.theme import Theme, SubTheme\n'), ((2176, 2212), 'models.users.Users.find_by_email', 'Users.find_by_email', (['"""[email protected]"""'], {}), "('[email protected]')\n", (2195, 2212), False, 'from models.users import Users\n'), ((8551, 8589), 'models.theme.SubTheme.get_by_name', 'SubTheme.get_by_name', (['"""new_name_not_1"""'], {}), "('new_name_not_1')\n", (8571, 8589), False, 'from models.theme import Theme, SubTheme\n'), ((8806, 8847), 'models.theme.SubTheme.get_by_name', 'SubTheme.get_by_name', (['"""_TEST_SUB_THEME_2"""'], {}), "('_TEST_SUB_THEME_2')\n", (8826, 8847), False, 'from models.theme import Theme, SubTheme\n'), ((852, 880), 'models.theme.Theme', 'Theme', (['"""_test_add_Subtheme_"""'], {}), "('_test_add_Subtheme_')\n", (857, 880), False, 'from models.theme import Theme, SubTheme\n'), ((968, 1008), 'models.theme.Theme.get_by_name', 'Theme.get_by_name', (['"""_test_add_Subtheme_"""'], {}), "('_test_add_Subtheme_')\n", (985, 1008), False, 'from models.theme import Theme, SubTheme\n'), ((1745, 1788), 'models.theme.SubTheme', 'SubTheme', (['self.theme.id', '"""_TEST_SUB_THEME_"""'], {}), "(self.theme.id, '_TEST_SUB_THEME_')\n", (1753, 1788), False, 'from models.theme import Theme, SubTheme\n'), ((1870, 1910), 'models.theme.SubTheme.get_by_name', 'SubTheme.get_by_name', (['"""_TEST_SUB_THEME_"""'], {}), "('_TEST_SUB_THEME_')\n", (1890, 1910), False, 'from models.theme import Theme, SubTheme\n'), ((2143, 2159), 'bcrypt.gensalt', 'bcrypt.gensalt', ([], {}), '()\n', (2157, 2159), False, 'import bcrypt\n'), ((8648, 8688), 'models.theme.SubTheme.get_by_name', 'SubTheme.get_by_name', (['"""_TEST_SUB_THEME_"""'], {}), "('_TEST_SUB_THEME_')\n", (8668, 8688), False, 'from models.theme import Theme, SubTheme\n')]
TC921/openpilot
selfdrive/sensord/rawgps/structs.py
d5d91e6e3be02e2525ed8d6137e5fdca2b81657c
from struct import unpack_from, calcsize LOG_GNSS_POSITION_REPORT = 0x1476 LOG_GNSS_GPS_MEASUREMENT_REPORT = 0x1477 LOG_GNSS_CLOCK_REPORT = 0x1478 LOG_GNSS_GLONASS_MEASUREMENT_REPORT = 0x1480 LOG_GNSS_BDS_MEASUREMENT_REPORT = 0x1756 LOG_GNSS_GAL_MEASUREMENT_REPORT = 0x1886 LOG_GNSS_OEMDRE_MEASUREMENT_REPORT = 0x14DE LOG_GNSS_OEMDRE_SVPOLY_REPORT = 0x14E1 LOG_GNSS_ME_DPO_STATUS = 0x1838 LOG_GNSS_CD_DB_REPORT = 0x147B LOG_GNSS_PRX_RF_HW_STATUS_REPORT = 0x147E LOG_CGPS_SLOW_CLOCK_CLIB_REPORT = 0x1488 LOG_GNSS_CONFIGURATION_STATE = 0x1516 glonass_measurement_report = """ uint8_t version; uint32_t f_count; uint8_t glonass_cycle_number; uint16_t glonass_number_of_days; uint32_t milliseconds; float time_bias; float clock_time_uncertainty; float clock_frequency_bias; float clock_frequency_uncertainty; uint8_t sv_count; """ glonass_measurement_report_sv = """ uint8_t sv_id; int8_t frequency_index; uint8_t observation_state; // SVObservationStates uint8_t observations; uint8_t good_observations; uint8_t hemming_error_count; uint8_t filter_stages; uint16_t carrier_noise; int16_t latency; uint8_t predetect_interval; uint16_t postdetections; uint32_t unfiltered_measurement_integral; float unfiltered_measurement_fraction; float unfiltered_time_uncertainty; float unfiltered_speed; float unfiltered_speed_uncertainty; uint32_t measurement_status; uint8_t misc_status; uint32_t multipath_estimate; float azimuth; float elevation; int32_t carrier_phase_cycles_integral; uint16_t carrier_phase_cycles_fraction; float fine_speed; float fine_speed_uncertainty; uint8_t cycle_slip_count; uint32_t pad; """ gps_measurement_report = """ uint8_t version; uint32_t f_count; uint16_t week; uint32_t milliseconds; float time_bias; float clock_time_uncertainty; float clock_frequency_bias; float clock_frequency_uncertainty; uint8_t sv_count; """ gps_measurement_report_sv = """ uint8_t sv_id; uint8_t observation_state; // SVObservationStates uint8_t observations; uint8_t good_observations; uint16_t parity_error_count; uint8_t filter_stages; uint16_t carrier_noise; int16_t latency; uint8_t predetect_interval; uint16_t postdetections; uint32_t unfiltered_measurement_integral; float unfiltered_measurement_fraction; float unfiltered_time_uncertainty; float unfiltered_speed; float unfiltered_speed_uncertainty; uint32_t measurement_status; uint8_t misc_status; uint32_t multipath_estimate; float azimuth; float elevation; int32_t carrier_phase_cycles_integral; uint16_t carrier_phase_cycles_fraction; float fine_speed; float fine_speed_uncertainty; uint8_t cycle_slip_count; uint32_t pad; """ position_report = """ uint8 u_Version; /* Version number of DM log */ uint32 q_Fcount; /* Local millisecond counter */ uint8 u_PosSource; /* Source of position information */ /* 0: None 1: Weighted least-squares 2: Kalman filter 3: Externally injected 4: Internal database */ uint32 q_Reserved1; /* Reserved memory field */ uint16 w_PosVelFlag; /* Position velocity bit field: (see DM log 0x1476 documentation) */ uint32 q_PosVelFlag2; /* Position velocity 2 bit field: (see DM log 0x1476 documentation) */ uint8 u_FailureCode; /* Failure code: (see DM log 0x1476 documentation) */ uint16 w_FixEvents; /* Fix events bit field: (see DM log 0x1476 documentation) */ uint32 _fake_align_week_number; uint16 w_GpsWeekNumber; /* GPS week number of position */ uint32 q_GpsFixTimeMs; /* GPS fix time of week of in milliseconds */ uint8 u_GloNumFourYear; /* Number of Glonass four year cycles */ uint16 w_GloNumDaysInFourYear; /* Glonass calendar day in four year cycle */ uint32 q_GloFixTimeMs; /* Glonass fix time of day in milliseconds */ uint32 q_PosCount; /* Integer count of the number of unique positions reported */ uint64 t_DblFinalPosLatLon[2]; /* Final latitude and longitude of position in radians */ uint32 q_FltFinalPosAlt; /* Final height-above-ellipsoid altitude of position */ uint32 q_FltHeadingRad; /* User heading in radians */ uint32 q_FltHeadingUncRad; /* User heading uncertainty in radians */ uint32 q_FltVelEnuMps[3]; /* User velocity in east, north, up coordinate frame. In meters per second. */ uint32 q_FltVelSigmaMps[3]; /* Gaussian 1-sigma value for east, north, up components of user velocity */ uint32 q_FltClockBiasMeters; /* Receiver clock bias in meters */ uint32 q_FltClockBiasSigmaMeters; /* Gaussian 1-sigma value for receiver clock bias in meters */ uint32 q_FltGGTBMeters; /* GPS to Glonass time bias in meters */ uint32 q_FltGGTBSigmaMeters; /* Gaussian 1-sigma value for GPS to Glonass time bias uncertainty in meters */ uint32 q_FltGBTBMeters; /* GPS to BeiDou time bias in meters */ uint32 q_FltGBTBSigmaMeters; /* Gaussian 1-sigma value for GPS to BeiDou time bias uncertainty in meters */ uint32 q_FltBGTBMeters; /* BeiDou to Glonass time bias in meters */ uint32 q_FltBGTBSigmaMeters; /* Gaussian 1-sigma value for BeiDou to Glonass time bias uncertainty in meters */ uint32 q_FltFiltGGTBMeters; /* Filtered GPS to Glonass time bias in meters */ uint32 q_FltFiltGGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for GPS to Glonass time bias uncertainty in meters */ uint32 q_FltFiltGBTBMeters; /* Filtered GPS to BeiDou time bias in meters */ uint32 q_FltFiltGBTBSigmaMeters; /* Filtered Gaussian 1-sigma value for GPS to BeiDou time bias uncertainty in meters */ uint32 q_FltFiltBGTBMeters; /* Filtered BeiDou to Glonass time bias in meters */ uint32 q_FltFiltBGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for BeiDou to Glonass time bias uncertainty in meters */ uint32 q_FltSftOffsetSec; /* SFT offset as computed by WLS in seconds */ uint32 q_FltSftOffsetSigmaSec; /* Gaussian 1-sigma value for SFT offset in seconds */ uint32 q_FltClockDriftMps; /* Clock drift (clock frequency bias) in meters per second */ uint32 q_FltClockDriftSigmaMps; /* Gaussian 1-sigma value for clock drift in meters per second */ uint32 q_FltFilteredAlt; /* Filtered height-above-ellipsoid altitude in meters as computed by WLS */ uint32 q_FltFilteredAltSigma; /* Gaussian 1-sigma value for filtered height-above-ellipsoid altitude in meters */ uint32 q_FltRawAlt; /* Raw height-above-ellipsoid altitude in meters as computed by WLS */ uint32 q_FltRawAltSigma; /* Gaussian 1-sigma value for raw height-above-ellipsoid altitude in meters */ uint32 align_Flt[14]; uint32 q_FltPdop; /* 3D position dilution of precision as computed from the unweighted uint32 q_FltHdop; /* Horizontal position dilution of precision as computed from the unweighted least-squares covariance matrix */ uint32 q_FltVdop; /* Vertical position dilution of precision as computed from the unweighted least-squares covariance matrix */ uint8 u_EllipseConfidence; /* Statistical measure of the confidence (percentage) associated with the uncertainty ellipse values */ uint32 q_FltEllipseAngle; /* Angle of semimajor axis with respect to true North, with increasing angles moving clockwise from North. In units of degrees. */ uint32 q_FltEllipseSemimajorAxis; /* Semimajor axis of final horizontal position uncertainty error ellipse. In units of meters. */ uint32 q_FltEllipseSemiminorAxis; /* Semiminor axis of final horizontal position uncertainty error ellipse. In units of meters. */ uint32 q_FltPosSigmaVertical; /* Gaussian 1-sigma value for final position height-above-ellipsoid altitude in meters */ uint8 u_HorizontalReliability; /* Horizontal position reliability 0: Not set 1: Very Low 2: Low 3: Medium 4: High */ uint8 u_VerticalReliability; /* Vertical position reliability */ uint16 w_Reserved2; /* Reserved memory field */ uint32 q_FltGnssHeadingRad; /* User heading in radians derived from GNSS only solution */ uint32 q_FltGnssHeadingUncRad; /* User heading uncertainty in radians derived from GNSS only solution */ uint32 q_SensorDataUsageMask; /* Denotes which additional sensor data were used to compute this position fix. BIT[0] 0x00000001 <96> Accelerometer BIT[1] 0x00000002 <96> Gyro 0x0000FFFC - Reserved A bit set to 1 indicates that certain fields as defined by the SENSOR_AIDING_MASK were aided with sensor data*/ uint32 q_SensorAidMask; /* Denotes which component of the position report was assisted with additional sensors defined in SENSOR_DATA_USAGE_MASK BIT[0] 0x00000001 <96> Heading aided with sensor data BIT[1] 0x00000002 <96> Speed aided with sensor data BIT[2] 0x00000004 <96> Position aided with sensor data BIT[3] 0x00000008 <96> Velocity aided with sensor data 0xFFFFFFF0 <96> Reserved */ uint8 u_NumGpsSvsUsed; /* The number of GPS SVs used in the fix */ uint8 u_TotalGpsSvs; /* Total number of GPS SVs detected by searcher, including ones not used in position calculation */ uint8 u_NumGloSvsUsed; /* The number of Glonass SVs used in the fix */ uint8 u_TotalGloSvs; /* Total number of Glonass SVs detected by searcher, including ones not used in position calculation */ uint8 u_NumBdsSvsUsed; /* The number of BeiDou SVs used in the fix */ uint8 u_TotalBdsSvs; /* Total number of BeiDou SVs detected by searcher, including ones not used in position calculation */ """ def name_to_camelcase(nam): ret = [] i = 0 while i < len(nam): if nam[i] == "_": ret.append(nam[i+1].upper()) i += 2 else: ret.append(nam[i]) i += 1 return ''.join(ret) def parse_struct(ss): st = "<" nams = [] for l in ss.strip().split("\n"): typ, nam = l.split(";")[0].split() #print(typ, nam) if typ == "float" or '_Flt' in nam: st += "f" elif typ == "double" or '_Dbl' in nam: st += "d" elif typ in ["uint8", "uint8_t"]: st += "B" elif typ in ["int8", "int8_t"]: st += "b" elif typ in ["uint32", "uint32_t"]: st += "I" elif typ in ["int32", "int32_t"]: st += "i" elif typ in ["uint16", "uint16_t"]: st += "H" elif typ in ["int16", "int16_t"]: st += "h" elif typ == "uint64": st += "Q" else: print("unknown type", typ) assert False if '[' in nam: cnt = int(nam.split("[")[1].split("]")[0]) st += st[-1]*(cnt-1) for i in range(cnt): nams.append("%s[%d]" % (nam.split("[")[0], i)) else: nams.append(nam) return st, nams def dict_unpacker(ss, camelcase = False): st, nams = parse_struct(ss) if camelcase: nams = [name_to_camelcase(x) for x in nams] sz = calcsize(st) return lambda x: dict(zip(nams, unpack_from(st, x))), sz
[((11349, 11361), 'struct.calcsize', 'calcsize', (['st'], {}), '(st)\n', (11357, 11361), False, 'from struct import unpack_from, calcsize\n'), ((11396, 11414), 'struct.unpack_from', 'unpack_from', (['st', 'x'], {}), '(st, x)\n', (11407, 11414), False, 'from struct import unpack_from, calcsize\n')]
anvdev/Hammer-Tools
python2.7libs/hammer_tools/content_browser.py
0211ec837da6754e537c98624ecd07c23abab28e
from __future__ import print_function try: from PyQt5.QtWidgets import * from PyQt5.QtGui import * from PyQt5.QtCore import * except ImportError: from PySide2.QtWidgets import * from PySide2.QtGui import * from PySide2.QtCore import * import hou from hammer_tools.utils import createAction def isRevertToDefaultEvent(event): return event.modifiers() == Qt.ControlModifier and event.button() == Qt.MiddleButton class Slider(QSlider): def __init__(self, orientation=Qt.Horizontal, parent=None): super(Slider, self).__init__(orientation, parent) self.defaultValue = 0 self.valueLadderMode = False def revertToDefault(self): self.setValue(self.defaultValue) def setDefaultValue(self, value, reset=True): self.defaultValue = value if reset: self.revertToDefault() def mousePressEvent(self, event): if False: # Type hint event = QMouseEvent if event.button() == Qt.MiddleButton: return elif event.button() == Qt.LeftButton: event = QMouseEvent(QEvent.MouseButtonPress, event.pos(), Qt.MiddleButton, Qt.MiddleButton, Qt.NoModifier) super(Slider, self).mousePressEvent(event) def mouseMoveEvent(self, event): if False: # Type hint event = QMouseEvent if not self.valueLadderMode and event.buttons() == Qt.MiddleButton: try: hou.ui.openValueLadder(self.value(), self.setValue, data_type=hou.valueLadderDataType.Int) except hou.OperationFailed: return else: self.valueLadderMode = True elif self.valueLadderMode: hou.ui.updateValueLadder(event.globalX(), event.globalY(), bool(event.modifiers() & Qt.AltModifier), bool(event.modifiers() & Qt.ShiftModifier)) else: super(Slider, self).mouseMoveEvent(event) def mouseReleaseEvent(self, event): if False: # Type hint event = QMouseEvent if self.valueLadderMode and event.button() == Qt.MiddleButton: hou.ui.closeValueLadder() self.valueLadderMode = False elif isRevertToDefaultEvent(event): self.revertToDefault() else: super(Slider, self).mouseReleaseEvent(event) class SearchField(QComboBox): def __init__(self, parent=None): super(SearchField, self).__init__(parent) self.setEditable(True) edit = self.lineEdit() edit.setPlaceholderText('Search...') edit.installEventFilter(self) edit.setFont(QFont('Segoe UI')) self.setFixedHeight(26) comp = self.completer() comp.setCompletionMode(QCompleter.PopupCompletion) comp.setFilterMode(Qt.MatchContains) comp.setModelSorting(QCompleter.CaseInsensitivelySortedModel) comp.setMaxVisibleItems(5) popup = comp.popup() popup.setStyleSheet(hou.qt.styleSheet()) def mouseReleaseEvent(self, event): if False: # Type hint event = QMouseEvent if isRevertToDefaultEvent(event): self.clearEditText() def eventFilter(self, watched, event): if False: # Type hint watched = QObject event = QEvent if watched == self.lineEdit(): if event.type() == QEvent.MouseButtonRelease and isRevertToDefaultEvent(event): self.clearEditText() event.accept() return True return False def keyPressEvent(self, event): if False: # Type hint event = QKeyEvent key = event.key() mod = event.modifiers() if mod == Qt.NoModifier and key == Qt.Key_Escape: self.clearEditText() else: super(SearchField, self).keyPressEvent(event) def hidePopup(self): super(SearchField, self).hidePopup() self.lineEdit().setFocus() link_or_state_icon = 'BUTTONS_link' embedded_icon = 'BUTTONS_pinned' class BrowserMode(QStandardItemModel): def __init__(self): super(BrowserMode, self).__init__() class BrowserTreeView(QTreeView): def __init__(self, parent=None): super(BrowserTreeView, self).__init__(parent) self.setAlternatingRowColors(True) class BrowserTableView(QListView): def __init__(self, parent=None): super(BrowserTableView, self).__init__(parent) self.setViewMode(QListView.IconMode) self.setResizeMode(QListView.Adjust) self.setSelectionMode(QAbstractItemView.ExtendedSelection) self.setVerticalScrollMode(QAbstractItemView.ScrollPerPixel) self.setIconSize(QSize(120, 90)) self.setUniformItemSizes(True) self.setContextMenuPolicy(Qt.CustomContextMenu) class ContentBrowser(QWidget): def __init__(self, parent=None): super(ContentBrowser, self).__init__(parent) self.setWindowTitle('Content Browser') self.setProperty('houdiniStyle', True) topLayout = QHBoxLayout() topLayout.setContentsMargins(4, 4, 4, 2) topLayout.setSpacing(2) self.refreshButton = QPushButton() self.refreshButton.setFixedSize(26, 26) self.refreshButton.setToolTip('Update\tF5') self.refreshButton.setIcon(hou.qt.Icon('BUTTONS_reload', 18, 18)) self.refreshButton.setIconSize(QSize(18, 18)) topLayout.addWidget(self.refreshButton) sep = hou.qt.Separator() if False: # Type hint sep = QFrame sep.setFixedWidth(2) sep.setFrameShape(QFrame.VLine) topLayout.addWidget(sep) viewModeButtonGroup = QButtonGroup(self) viewModeButtonGroup.setExclusive(True) self.treeViewButton = QPushButton() self.treeViewButton.setFixedSize(26, 26) self.treeViewButton.setToolTip('Tree View\t\tCtrl+1') self.treeViewButton.setIcon(hou.qt.Icon('BUTTONS_tree', 18, 18)) self.treeViewButton.setIconSize(QSize(18, 18)) self.treeViewButton.setCheckable(True) viewModeButtonGroup.addButton(self.treeViewButton) topLayout.addWidget(self.treeViewButton) self.tableViewButton = QPushButton() self.tableViewButton.setFixedSize(26, 26) self.tableViewButton.setToolTip('Table View\tCtrl+2') self.tableViewButton.setIcon(hou.qt.Icon('NETVIEW_shape_palette', 18, 18)) self.tableViewButton.setIconSize(QSize(18, 18)) self.tableViewButton.setCheckable(True) self.tableViewButton.toggle() viewModeButtonGroup.addButton(self.tableViewButton) topLayout.addWidget(self.tableViewButton) topLayout.addWidget(sep) self.searchField = SearchField() self.searchField.setToolTip('Search\tCtrl+F, F3') topLayout.addWidget(self.searchField) searchModeButtonGroup = QButtonGroup(self) searchModeButtonGroup.setExclusive(True) self.wholeSearchButton = QPushButton() self.wholeSearchButton.setFixedSize(26, 26) self.wholeSearchButton.setCheckable(True) self.wholeSearchButton.setToolTip('Whole word search') self.wholeSearchButton.setIcon(hou.qt.Icon('VOP_titlecase', 18, 18)) self.wholeSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.wholeSearchButton) topLayout.addWidget(self.wholeSearchButton) self.fuzzySearchButton = QPushButton() self.fuzzySearchButton.setFixedSize(26, 26) self.fuzzySearchButton.setCheckable(True) self.fuzzySearchButton.toggle() self.fuzzySearchButton.setToolTip('Fuzzy search') self.fuzzySearchButton.setIcon(hou.qt.Icon('VOP_endswith', 18, 18)) self.fuzzySearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.fuzzySearchButton) topLayout.addWidget(self.fuzzySearchButton) self.patternSearchButton = QPushButton() self.patternSearchButton.setFixedSize(26, 26) self.patternSearchButton.setCheckable(True) self.patternSearchButton.setToolTip('Search by Pattern') self.patternSearchButton.setIcon(hou.qt.Icon('VOP_isalpha', 18, 18)) self.patternSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.patternSearchButton) topLayout.addWidget(self.patternSearchButton) self.regexSearchButton = QPushButton() self.regexSearchButton.setFixedSize(26, 26) self.regexSearchButton.setCheckable(True) self.regexSearchButton.setToolTip('Search by Regular Expression') self.regexSearchButton.setIcon(hou.qt.Icon('VOP_regex_match', 18, 18)) self.regexSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.regexSearchButton) topLayout.addWidget(self.regexSearchButton) topLayout.addWidget(sep) topLayout.addWidget(hou.qt.HelpButton('/hammer/content_browser', 'Show Help\tF1')) middleLayout = QHBoxLayout() middleLayout.setContentsMargins(4, 0, 0, 4) middleLayout.setSpacing(4) self.viewLayout = QStackedLayout(middleLayout) model = QFileSystemModel() model.setRootPath('C:/') treeView = BrowserTreeView() treeView.setModel(model) treeView.setRootIndex(model.index('C:/')) self.viewLayout.addWidget(treeView) tableView = BrowserTableView() tableView.setModel(model) tableView.setRootIndex(model.index('C:/')) tableView.setSelectionModel(treeView.selectionModel()) self.viewLayout.addWidget(tableView) self.viewLayout.setCurrentIndex(1) self.treeViewButton.clicked.connect(self.switchToTreeView) self.addAction(createAction(self, 'Tree View', self.switchToTreeView, shortcut='Ctrl+1')) self.tableViewButton.clicked.connect(self.switchToTableView) self.addAction(createAction(self, 'Table View', self.switchToTableView, shortcut='Ctrl+2')) bottomLayout = QHBoxLayout() bottomLayout.setContentsMargins(4, 0, 4, 4) bottomLayout.setSpacing(2) settingsButton = QPushButton() settingsButton.setFixedSize(26, 26) settingsButton.setToolTip('Settings') settingsButton.setIcon(hou.qt.Icon('BUTTONS_gear_mini', 18, 18)) settingsButton.setIconSize(QSize(18, 18)) bottomLayout.addWidget(settingsButton) spacer = QSpacerItem(0, 0, QSizePolicy.Expanding, QSizePolicy.Ignored) bottomLayout.addSpacerItem(spacer) self.scaleSlider = Slider() self.scaleSlider.setDefaultValue(50) self.scaleSlider.setFixedWidth(120) self.scaleSlider.valueChanged.connect(lambda v: tableView.setIconSize(QSize(120, 90) * v / 100)) bottomLayout.addWidget(self.scaleSlider) mainLayout = QVBoxLayout(self) mainLayout.setContentsMargins(0, 0, 0, 0) mainLayout.setSpacing(4) mainLayout.addLayout(topLayout) mainLayout.addLayout(middleLayout) mainLayout.addLayout(bottomLayout) def switchToTreeView(self): self.viewLayout.setCurrentIndex(0) self.scaleSlider.hide() self.treeViewButton.setChecked(True) def switchToTableView(self): self.viewLayout.setCurrentIndex(1) self.scaleSlider.show() self.tableViewButton.setChecked(True) def keyPressEvent(self, event): if False: # Type hint event = QKeyEvent key = event.key() mod = event.modifiers() if mod == Qt.NoModifier and key == Qt.Key_F5: pass elif mod == Qt.ControlModifier and key == Qt.Key_F: self.searchField.setFocus() elif mod == Qt.NoModifier and key == Qt.Key_F3: self.searchField.setFocus() elif mod == Qt.ControlModifier and key == Qt.Key_Equal: pass elif mod == Qt.ControlModifier and key == Qt.Key_Minus: pass elif mod == Qt.ControlModifier and key == Qt.Key_1: pass elif mod == Qt.ControlModifier and key == Qt.Key_2: pass elif mod == Qt.NoModifier and key == Qt.Key_F1: pass else: super(ContentBrowser, self).keyPressEvent(event) if __name__ == '__main__': app = QApplication([]) window = ContentBrowser() window.show() app.exec_()
[((5595, 5613), 'hou.qt.Separator', 'hou.qt.Separator', ([], {}), '()\n', (5611, 5613), False, 'import hou\n'), ((2230, 2255), 'hou.ui.closeValueLadder', 'hou.ui.closeValueLadder', ([], {}), '()\n', (2253, 2255), False, 'import hou\n'), ((3082, 3101), 'hou.qt.styleSheet', 'hou.qt.styleSheet', ([], {}), '()\n', (3099, 3101), False, 'import hou\n'), ((5439, 5476), 'hou.qt.Icon', 'hou.qt.Icon', (['"""BUTTONS_reload"""', '(18)', '(18)'], {}), "('BUTTONS_reload', 18, 18)\n", (5450, 5476), False, 'import hou\n'), ((6060, 6095), 'hou.qt.Icon', 'hou.qt.Icon', (['"""BUTTONS_tree"""', '(18)', '(18)'], {}), "('BUTTONS_tree', 18, 18)\n", (6071, 6095), False, 'import hou\n'), ((6502, 6546), 'hou.qt.Icon', 'hou.qt.Icon', (['"""NETVIEW_shape_palette"""', '(18)', '(18)'], {}), "('NETVIEW_shape_palette', 18, 18)\n", (6513, 6546), False, 'import hou\n'), ((7333, 7369), 'hou.qt.Icon', 'hou.qt.Icon', (['"""VOP_titlecase"""', '(18)', '(18)'], {}), "('VOP_titlecase', 18, 18)\n", (7344, 7369), False, 'import hou\n'), ((7832, 7867), 'hou.qt.Icon', 'hou.qt.Icon', (['"""VOP_endswith"""', '(18)', '(18)'], {}), "('VOP_endswith', 18, 18)\n", (7843, 7867), False, 'import hou\n'), ((8305, 8339), 'hou.qt.Icon', 'hou.qt.Icon', (['"""VOP_isalpha"""', '(18)', '(18)'], {}), "('VOP_isalpha', 18, 18)\n", (8316, 8339), False, 'import hou\n'), ((8784, 8822), 'hou.qt.Icon', 'hou.qt.Icon', (['"""VOP_regex_match"""', '(18)', '(18)'], {}), "('VOP_regex_match', 18, 18)\n", (8795, 8822), False, 'import hou\n'), ((9061, 9122), 'hou.qt.HelpButton', 'hou.qt.HelpButton', (['"""/hammer/content_browser"""', '"""Show Help\tF1"""'], {}), "('/hammer/content_browser', 'Show Help\\tF1')\n", (9078, 9122), False, 'import hou\n'), ((9905, 9978), 'hammer_tools.utils.createAction', 'createAction', (['self', '"""Tree View"""', 'self.switchToTreeView'], {'shortcut': '"""Ctrl+1"""'}), "(self, 'Tree View', self.switchToTreeView, shortcut='Ctrl+1')\n", (9917, 9978), False, 'from hammer_tools.utils import createAction\n'), ((10072, 10147), 'hammer_tools.utils.createAction', 'createAction', (['self', '"""Table View"""', 'self.switchToTableView'], {'shortcut': '"""Ctrl+2"""'}), "(self, 'Table View', self.switchToTableView, shortcut='Ctrl+2')\n", (10084, 10147), False, 'from hammer_tools.utils import createAction\n'), ((10435, 10475), 'hou.qt.Icon', 'hou.qt.Icon', (['"""BUTTONS_gear_mini"""', '(18)', '(18)'], {}), "('BUTTONS_gear_mini', 18, 18)\n", (10446, 10475), False, 'import hou\n')]
luhuadong/stm32f769-disco-demo
rt-thread/applications/server/udp_sender.py
c7fb0d627b02c3f87959f43f1447bc79f62a7099
#!/usr/bin/python3 """ UDP sender """ import socket import time import sys smsg = b'\xaa\x08\xfe\x00\xc9\xe6\x5f\xee' def main(): ip_port = ('192.168.3.188', 8888) if len(sys.argv) < 2: port = 8888 else: port = int(sys.argv[1]) # 1. 创建 udp 套接字 udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # 2. 绑定本地信息 udp_socket.bind(('', port)) cnt = 100 loop = 4 print("send %d...", cnt*loop) # 3. 接收发送的数据 while cnt > 0: #loop = 10 #while loop > 0: for i in range(0, loop): udp_socket.sendto(smsg, ip_port) print('.', end=' ') #loop = loop -1 #recv_data = udp_socket.recvfrom(1024) #print(recv_data.decode('gbk')) #print(recv_data.decode('utf-8')) #print('.', end=' ') #data = recv_data.decode('utf-8') #print('0x%x'%data) cnt = cnt - 1 time.sleep(0.005) print("") print("finished") # 7. 关闭套接字 udp_socket.close() print("close") if __name__ == '__main__': main()
[((305, 353), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (318, 353), False, 'import socket\n'), ((948, 965), 'time.sleep', 'time.sleep', (['(0.005)'], {}), '(0.005)\n', (958, 965), False, 'import time\n')]
LunaProject-Discord/yudzuki.py
yudzuki/role.py
7ff2d1f9e39ed5300a46c48fb95df50046814ede
__all__ = ( "Role", ) class Role: def __init__(self, data): self.data = data self._update(data) def _get_json(self): return self.data def __repr__(self): return ( f"<Role id={self.id} name={self.name}>" ) def __str__(self): return ( f"{self.name}" ) def _update(self, data): self._id = data["id"] self._color = data["color"] self._managed = data["managed"] self._name = data["name"] self._guild_id = data["guild_id"] self._mentionable = data["mentionable"] self._position = data["potition"] self._hoisted = data["hoisted"] @property def id(self): return self._id @property def color(self): return self._color @property def managed(self): return self._managed @property def name(self): return self._name @property def guild_id(self): return self._guild_id @property def mentionable(self): return self._mentionable @property def position(self): return self._position @property def hoisted(self): return self._hoisted
[]
cabilangan112/intern-drf-blog
jassen/django/project/project/urls.py
b2d6c7a4af1316b2c7ce38547bd9df99b4f3e8b9
"""project URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/2.0/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django.conf.urls import url, include from rest_framework import routers from blog import views from blog.views import PostViewSet,CommentViewSet,CategoryViewSet,TagViewSet,DraftViewSet,HideViewSet from django.conf import settings from django.conf.urls.static import static router = routers.DefaultRouter() router.register(r'hide',HideViewSet, base_name='hiddinn') router.register(r'draft',DraftViewSet, base_name='draft') router.register(r'post', PostViewSet, base_name='post') router.register(r'comment', CommentViewSet, base_name='comment') router.register(r'tags', TagViewSet, base_name='tags') router.register(r'category', CategoryViewSet, base_name='category') from django.contrib import admin from django.urls import path urlpatterns = [ path('admin/', admin.site.urls), url(r'^', include(router.urls)), url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')) ] urlpatterns.extend( static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) )
[((919, 942), 'rest_framework.routers.DefaultRouter', 'routers.DefaultRouter', ([], {}), '()\n', (940, 942), False, 'from rest_framework import routers\n'), ((1390, 1421), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (1394, 1421), False, 'from django.urls import path\n'), ((1437, 1457), 'django.conf.urls.include', 'include', (['router.urls'], {}), '(router.urls)\n', (1444, 1457), False, 'from django.conf.urls import url, include\n'), ((1483, 1541), 'django.conf.urls.include', 'include', (['"""rest_framework.urls"""'], {'namespace': '"""rest_framework"""'}), "('rest_framework.urls', namespace='rest_framework')\n", (1490, 1541), False, 'from django.conf.urls import url, include\n'), ((1569, 1632), 'django.conf.urls.static.static', 'static', (['settings.STATIC_URL'], {'document_root': 'settings.STATIC_ROOT'}), '(settings.STATIC_URL, document_root=settings.STATIC_ROOT)\n', (1575, 1632), False, 'from django.conf.urls.static import static\n'), ((1639, 1700), 'django.conf.urls.static.static', 'static', (['settings.MEDIA_URL'], {'document_root': 'settings.MEDIA_ROOT'}), '(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n', (1645, 1700), False, 'from django.conf.urls.static import static\n')]
everbrez/Deep-Learning-based-Chemical-Graphics-Analysis-Platform
deep-learning-app/src/models/__init__.py
5ecaedadd74e96891c28d9f73384e07c1526916b
print('init')
[]
Andy-Wilkinson/ChemMLToolk
chemmltoolkit/tensorflow/callbacks/variableScheduler.py
83efc7ea66d2def860a3e04ccd70d77fb689fddc
import tensorflow as tf class VariableScheduler(tf.keras.callbacks.Callback): """Schedules an arbitary variable during training. Arguments: variable: The variable to modify the value of. schedule: A function that takes an epoch index (integer, indexed from 0) and current variable value as input and returns a new value to assign to the variable as output. verbose: int. 0: quiet, 1: update messages. """ def __init__(self, variable, schedule, verbose=0): super(VariableScheduler, self).__init__() self.variable = variable self.schedule = schedule self.verbose = verbose def on_epoch_begin(self, epoch, logs=None): value = self.variable.read_value() value = self.schedule(epoch, value) self.variable.assign(value) if self.verbose > 0: print(f'\nEpoch {epoch + 1}: VariableScheduler assigning ' f'variable {self.variable.name} to {value}.')
[]
nijibabulu/chip_tools
join_peaks.py
04def22059a6018b3b49247d69d7b04eee1dcd89
#! /usr/bin/env python import os import sys import math import csv import collections import docopt import peakzilla_qnorm_mapq_patched as pz __doc__ = ''' Usage: join_peaks.py [options] PEAKS CHIP INPUT [ (PEAKS CHIP INPUT) ... ] This script finds peaks in common between multiple ChIP experiments determined by peakzilla. For each ChIP experiment, input a PEAKS file as otuput by peakzilla, and 2 BED files (CHIP and INPUT) as input to peakzilla. This will output a table with 3 columns identifying the peaks (Chromosome, Start, End, Name,'NPeaks','Spread','ChipSE','EnrichSE'). NPeaks signifies the number of peaks that were called among all the ChIP experiments, Spread is the difference between the biggest and smallest ChIP peak, ChipSE and EnrichSE are the standard error on the mean among the ChIP and Enrich values for the peaks. For each experinent "X", information about the peaks are output: 'XPZName','XPZScore', 'XPZChip','XPZInput','XPZEnrich','XPZFDR','XChip','XInput','XEnrich','XMapq'. All 'PZ' columns are the original output from peakzilla and the remaining columns are re-calculated in this script (also output regardless of the presence of a peak). Options: --max-distance=DIST maximum summit distance to join peaks [default: 10] ''' args = docopt.docopt(__doc__) #np.set_printoptions(precision=1,suppress=True) def stddev(l): mean = sum(l)/float(len(l)) variance = sum((x-mean)**2 for x in l)/(len(l)-1) return math.sqrt(variance) def std_err(l): return stddev(l)/math.sqrt(len(l)) class Peak(object): def dist(self,other): if self.chrom == other.chrom: return abs(self.center-other.center) else: return -1 def compute_fold_enrichment(self): self.computed_fold_enrichment = float(self.computed_chip )/self.computed_control class SlavePeak(Peak): def __init__(self,set_name,center): self.name = 'Slave' self.set_name = set_name self.center = center class PZPeak(Peak): def __init__(self,set_name,chrom,start,end,name,summit,score,chip,control, fold_enrichment,distribution_score,fdr): self.set_name = set_name self.chrom = chrom self.start = int(start) self.end = int(end) self.name = name self.center = int(summit) self.score = float(score) self.chip = float(chip) self.control = float(control) self.fold_enrichment = float(fold_enrichment) self.distribution_score = float(distribution_score) self.fdr = float(fdr) def width(self): return self.end-self.start+1 class JoinedPeak(Peak): WIDTH = 0 HEADER = ['#Chromosome','Start','End','Name','NPeaks','Spread','ChipSE','EnrichSE'] HEADER_TYPES = set() def __init__(self,pzpeak): self.chrom = pzpeak.chrom self.peaks = {} self.center = self.add(pzpeak) #pzpeak.center def can_add(self,pzpeak): return not pzpeak.set_name in self.peaks def add(self,pzpeak): self.HEADER_TYPES.add(pzpeak.set_name) self.peaks[pzpeak.set_name] = pzpeak return sum(p.center for p in self.peaks.values())/len(self.peaks) def name(self): return '%s_%d' % (self.chrom,self.center) @classmethod def header(cls): s = '\t'.join(cls.HEADER) + '\t' #'#Chromosome\tPosition\tNPeaks\tSpread\t' for htype in cls.HEADER_TYPES: s += '\t'.join( htype + '_' + x for x in [ 'PZName','PZScore','PZChip','PZInput','PZEnrich','PZFDR','Chip','Input','Enrich','Mapq'] ) + '\t' return s def __str__(self): s = '' called_peaks = 0 peak_signals = [] peak_enrichs = [] for set_name,peak in self.peaks.items(): if hasattr(peak,'score'): s += peak.name + '\t' + '\t'.join('%.2f' % x for x in [peak.score,peak.chip,peak.control,peak.fold_enrichment,peak.fdr]) + '\t' called_peaks += 1 #s += '%.1f\t%.1f\t%.1f\t%.1f\t' % ( #peak.score,peak.chip,peak.control,peak.fold_enrichment) else: s += 'NA\tNA\tNA\tNA\tNA\tNA\t' if hasattr(peak,'pzpeak'): s += '\t'.join('%.2f' % x for x in [ peak.pzpeak.nrom_signal,peak.pzpeak.norm_background,peak.pzpeak.fold_enrichment,peak.pzpeak.mapq_score ]) + '\t' peak_signals.append(peak.pzpeak.nrom_signal) peak_enrichs.append(peak.pzpeak.fold_enrichment) else: s += 'NA\tNA\tNA\tNA\tNA\t' #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment #s += '%.1f\t%.1f\t%.1f\t' % ( #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment) #s += '\t'.join([str(x) for x in #[peak.score,peak.chip,peak.fold_enrichment]]) try: if len(peak_signals): s = '\t'.join([self.chrom,str(self.center-self.WIDTH/2),str(self.center+self.WIDTH/2), self.chrom+'_'+str(self.center),str(called_peaks)]) +\ '\t%.2f\t%.2f\t%.2f\t' % ( max(peak_signals)/(min(peak_signals) + sys.float_info.epsilon), std_err(peak_signals), std_err(peak_enrichs), ) + s else: s = '\t'.join([self.chrom,str(self.center), self.chrom+'_'+str(self.center),str(called_peaks)]) +\ '\tNA\tNA\tNA\t' + s except: print max(peak_signals),min(peak_signals) raise return s class PeakScorer(pz.PeakContainer): def __init__(self, ip_tags, control_tags, peak_size, plus_model, minus_model): self.ip_tags = ip_tags self.control_tags = control_tags self.peak_size = peak_size self.peak_shift = (peak_size - 1) / 2 self.score_threshold = 10 self.plus_model = plus_model self.minus_model = minus_model self.peaks = collections.defaultdict(list) self.peak_count = 0 self.plus_window = collections.deque([]) self.minus_window = collections.deque([]) self.position = 0 def fill_scores(self,chrom,libtype,scoretype): plus_tags = collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '+')) plus_mapq = collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '+')) minus_tags = collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '-')) minus_mapq = collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '-')) self.plus_window = collections.deque([]) self.minus_window = collections.deque([]) self.plus_mapq = collections.deque([]) self.minus_mapq = collections.deque([]) for peak in self.peaks[chrom]: # fill windows while plus_tags and plus_tags[0] <= (peak.position + self.peak_shift): self.plus_window.append(plus_tags.popleft()) self.plus_mapq.append(plus_mapq.popleft()) while minus_tags and minus_tags[0] <= (peak.position + self.peak_shift): self.minus_window.append(minus_tags.popleft()) self.minus_mapq.append(minus_mapq.popleft()) # get rid of old tags not fitting in the window any more while self.plus_window and self.plus_window[0] < (peak.position - self.peak_shift): self.plus_window.popleft() self.plus_mapq.popleft() while self.minus_window and self.minus_window[0] < (peak.position - self.peak_shift): self.minus_window.popleft() self.minus_mapq.popleft() # calculate normalized background level # add position to region if over threshold self.position = peak.position if libtype == 'ip': peak.mapq_score = float(sum(self.plus_mapq) + sum(self.minus_mapq) )/max(1,(len(self.plus_mapq) + len(self.minus_mapq))) #if peak.name == 'Peak_12869': #print zip(self.plus_window,self.plus_mapq) #print zip(self.minus_window,self.minus_mapq) #print sum(self.plus_mapq) , sum(self.minus_mapq), len(self.plus_mapq) , len(self.minus_mapq) #print peak.mapq_score setattr(peak,scoretype,self.calculate_score()) def score_peaks(self,peak_dict): for chrom,peaks in peak_dict.items(): for jp in peaks: jp.pzpeak = pz.Peak() jp.pzpeak.size = self.peak_size jp.pzpeak.shift = self.peak_shift jp.pzpeak.position = jp.center jp.pzpeak.name = jp.name self.peaks[chrom].append(jp.pzpeak) self.peak_count += 1 for chrom,peaks in self.peaks.items(): self.peaks[chrom] = sorted(self.peaks[chrom], lambda a,b: cmp(a.position,b.position)) self.fill_scores(chrom,'ip','score') self.fill_scores(chrom,'control','background') self.determine_fold_enrichment(chrom) self.determine_signal_over_background(chrom) class FileSet(object): def __init__(self,peakfile,chipfile,controlfile): self.peakfile = peakfile self.chip_file = chipfile self.chip_tags = pz.TagContainer(store_mapq=True) self.chip_tags(chipfile,True) self.control_file = controlfile self.control_tags = pz.TagContainer(store_mapq=True) self.control_tags(controlfile,True) #print self.chip_tags, self.control_tags def get_file(self,type): return getattr(self, '%s_file' % type) def get_tagcount(self,type): return getattr(self, '%s_tags' % type) maxdist = int(args['--max-distance']) peaksets = {} filesets = {} for peakfile,chipfile,controlfile in zip(args['PEAKS'],args['CHIP'],args['INPUT']): set_name = os.path.basename(peakfile).split('.')[0] peaksets[set_name] = collections.defaultdict(list) filesets[set_name] = FileSet(peakfile,chipfile,controlfile) r = csv.reader(open(peakfile),delimiter='\t') r.next() # header ''' #XXX: limit peaks maxpeaks = 20 peakcounter = 0 for row in r: if float(row[5]) >= 100 and float(row[8]) >= 10: peakcounter += 1 if peakcounter > maxpeaks: break peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) ''' for row in r: peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) JoinedPeak.WIDTH += peaksets[set_name].itervalues().next()[0].width() JoinedPeak.WIDTH /= len(peaksets) # find closest peak to each peak in the new set # make new peaks when there's no qualifying one npeaks = 0 joined_peaks = collections.defaultdict(list) for set_name,peakset in peaksets.items(): for chrom,peaks in peakset.items(): for peak in peaks: closest = None for jp in joined_peaks[chrom]: dist = jp.dist(peak) if dist >= 0 and dist <= maxdist: if closest is None or closest.dist(peak) > dist: closest = jp if closest is None or not closest.can_add(peak): npeaks += 1 joined_peaks[chrom].append(JoinedPeak(peak)) else: closest.add(peak) plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) for set_name,fileset in filesets.items(): scorer = PeakScorer(fileset.chip_tags,fileset.control_tags, JoinedPeak.WIDTH,plus_model,minus_model) peaks_to_score = collections.defaultdict(list) for chrom,peaks in joined_peaks.items(): for jp in peaks: if set_name not in jp.peaks: jp.peaks[set_name] = SlavePeak(set_name,jp.center) peaks_to_score[chrom].append(jp.peaks[set_name]) scorer.score_peaks(peaks_to_score) print JoinedPeak.header() for chrom,peaks in joined_peaks.items(): for peak in peaks: print peak #plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) #def get_coverage(fileset,type,jp,pseudocount=0): #score = 0 #start = max(0,jp.center-JoinedPeak.WIDTH/2) #for aln in fileset.get_file(type).fetch( #reference = jp.chrom, start = start, #end = jp.center+JoinedPeak.WIDTH/2): #if aln.is_reverse: #score += minus_model[aln.pos-start] #else: #score += plus_model[aln.pos-start] #return (score+pseudocount)*10.**6/fileset.get_tagcount(type) #return 10.**6*fileset.get_file(type).count( #reference = jp.chrom, #start = max(0,jp.center-JoinedPeak.WIDTH/2), #end = jp.center+JoinedPeak.WIDTH/2)/fileset.get_tagcount(type) #start = jp.center, #end = jp.center+1) #matrix = np.zeros((npeaks,len(peaksets)*2)) #i = 0 #for chrom,peaks in joined_peaks.items(): #for jp in peaks: #for j,set_name in enumerate(peaksets.keys()): #control_coverage = get_coverage(filesets[set_name],'control',jp,pseudocount=1) #chip_coverage = get_coverage(filesets[set_name],'chip',jp) #matrix[i][j] = float(chip_coverage) #matrix[i][j+len(peaksets)] = float(control_coverage) #i += 1 #quantile_normalize.quantile_norm(matrix) #i = 0 #for chrom,peaks in joined_peaks.items(): #for jp in peaks: #for j,set_name in enumerate(peaksets.keys()): #if set_name not in jp.peaks: #jp.peaks[set_name] = SlavePeak( #set_name,matrix[i][j],matrix[i][j + len(peaksets)]) #else: #jp.peaks[set_name].computed_chip = matrix[i][j] #jp.peaks[set_name].computed_control = matrix[i][j+len(peaksets)] #jp.peaks[set_name].compute_fold_enrichment() #print jp #i += 1 ''' i = 0 for chrom,peaks in joined_peaks.items(): for jp in peaks: for j,set_name in enumerate(filesets.keys()): matrix[i][j] = float(jp.peaks[set_name].computed_chip) matrix[i][j+len(peaksets)] = float(jp.peaks[set_name].computed_control) i += 1 '''
[]
uptown/django-town
django_town/rest_swagger/views.py
4c3b078a8ce5dcc275d65faa4a1cdfb7ebc74a50
from django_town.rest import RestApiView, rest_api_manager from django_town.http import http_json_response from django_town.cache.utlis import SimpleCache from django_town.oauth2.swagger import swagger_authorizations_data from django_town.social.oauth2.permissions import OAuth2Authenticated, OAuth2AuthenticatedOrReadOnly from django_town.social.permissions import Authenticated, AuthenticatedOrReadOnly class ApiDocsView(RestApiView): def read(self, request, api_version): def load_cache(api_version="alpha"): manager = rest_api_manager(api_version) ret = {'title': manager.name, 'description': manager.description, 'apiVersion': manager.api_version, 'swaggerVersion': "1.2", 'basePath': manager.base_url, 'resourcePath': manager.base_url, 'info': manager.info, 'authorizations': swagger_authorizations_data()} apis = [] models = { "Error": { "id": "Error", "required": ['error'], "properties": { "error": { "type": "string" }, "field": { "type": "string" }, "message": { "type": "string" }, "resource": { "type": "string" } } } } for view_cls in manager.api_list: operations = [] global_params = [] path = view_cls.path() if path == "": continue if '{}' in path: path = path.replace('{}', '{pk}') global_params.append( { "paramType": "path", "name": 'pk', "description": 'primary key for object', "dataType": 'integer', "format": 'int64', "required": True, } ) responseMessages = [ { 'code': 404, "message": "not_found", "responseModel": "Error" }, { 'code': 500, "message": "internal_error", "responseModel": "Error" }, { 'code': 409, "message": "method_not_allowed", "responseModel": "Error" }, { 'code': 409, "message": "conflict", "responseModel": "Error" }, { 'code': 403, "message": "forbidden", "responseModel": "Error" }, { 'code': 401, "message": "permission_denied", "responseModel": "Error" }, { 'code': 401, "message": "unauthorized", "responseModel": "Error" }, { 'code': 400, "message": "form_invalid", "responseModel": "Error" }, { 'code': 400, "message": "form_required", "responseModel": "Error" }, { 'code': 400, "message": "bad_request", "responseModel": "Error" }, ] current_api = { 'path': path, 'description': view_cls.__doc__, } operations = [] if 'create' in view_cls.crud_method_names and hasattr(view_cls, 'create'): create_op = { 'method': 'POST', 'parameters': global_params, 'responseMessages': responseMessages, 'nickname': 'create ' + path, } operations.append(create_op) if 'read' in view_cls.crud_method_names and hasattr(view_cls, 'read'): op = { 'method': 'GET', 'responseMessages': responseMessages, 'nickname': 'read ' + path } params = global_params.copy() for each_permission in view_cls.permission_classes: if issubclass(each_permission, OAuth2Authenticated): params.append( { "paramType": "query", "name": 'access_token', "dataType": 'string', "required": True, } ) if hasattr(view_cls, 'read_safe_parameters'): for each in view_cls.read_safe_parameters: if isinstance(each, tuple): if each[1] == int: params.append( { "paramType": "query", "name": each[0], "dataType": 'int', "format": 'int64', "required": True, } ) elif each[1] == float: params.append( { "paramType": "query", "name": each[0], "dataType": 'float', "format": 'float', "required": True, } ) else: params.append( { "paramType": "query", "name": each[0], "dataType": 'string', "required": True, } ) else: params.append( { "paramType": "query", "name": each, "dataType": 'string', "required": True, } ) pass pass op['parameters'] = params operations.append(op) if 'update' in view_cls.crud_method_names and hasattr(view_cls, 'update'): op = { 'method': 'UPDATE', 'parameters': global_params, 'responseMessages': responseMessages, 'errorResponses': [], 'nickname': 'read ' + path, } operations.append(op) if 'delete' in view_cls.crud_method_names and hasattr(view_cls, 'delete'): op = { 'method': 'DELETE', 'parameters': global_params, 'responseMessages': responseMessages, 'errorResponses': [], 'nickname': 'read ' + path, } operations.append(op) current_api['operations'] = operations apis.append(current_api) ret['apis'] = apis ret["models"] = models return ret ret = SimpleCache(key_format="api-doc:%(api_version)s", duration=60 * 60 * 24, load_callback=load_cache).get(api_version=api_version) response = http_json_response(ret) response["Access-Control-Allow-Origin"] = "*" response["Access-Control-Allow-Methods"] = "GET" response["Access-Control-Max-Age"] = "1000" response["Access-Control-Allow-Headers"] = "*" return response
[((9207, 9230), 'django_town.http.http_json_response', 'http_json_response', (['ret'], {}), '(ret)\n', (9225, 9230), False, 'from django_town.http import http_json_response\n'), ((548, 577), 'django_town.rest.rest_api_manager', 'rest_api_manager', (['api_version'], {}), '(api_version)\n', (564, 577), False, 'from django_town.rest import RestApiView, rest_api_manager\n'), ((897, 926), 'django_town.oauth2.swagger.swagger_authorizations_data', 'swagger_authorizations_data', ([], {}), '()\n', (924, 926), False, 'from django_town.oauth2.swagger import swagger_authorizations_data\n'), ((9034, 9136), 'django_town.cache.utlis.SimpleCache', 'SimpleCache', ([], {'key_format': '"""api-doc:%(api_version)s"""', 'duration': '(60 * 60 * 24)', 'load_callback': 'load_cache'}), "(key_format='api-doc:%(api_version)s', duration=60 * 60 * 24,\n load_callback=load_cache)\n", (9045, 9136), False, 'from django_town.cache.utlis import SimpleCache\n')]
mastermind88/dash
components/dash-core-components/tests/integration/dropdown/test_dynamic_options.py
760af721980e18d91bdbc4e204d1d063c7ed325c
from dash import Dash, Input, Output, dcc, html from dash.exceptions import PreventUpdate def test_dddo001_dynamic_options(dash_dcc): dropdown_options = [ {"label": "New York City", "value": "NYC"}, {"label": "Montreal", "value": "MTL"}, {"label": "San Francisco", "value": "SF"}, ] app = Dash(__name__) app.layout = dcc.Dropdown(id="my-dynamic-dropdown", options=[]) @app.callback( Output("my-dynamic-dropdown", "options"), [Input("my-dynamic-dropdown", "search_value")], ) def update_options(search_value): if not search_value: raise PreventUpdate return [o for o in dropdown_options if search_value in o["label"]] dash_dcc.start_server(app) # Get the inner input used for search value. input_ = dash_dcc.find_element("#my-dynamic-dropdown input") # Focus on the input to open the options menu input_.send_keys("x") # No options to be found with `x` in them, should show the empty message. dash_dcc.wait_for_text_to_equal(".Select-noresults", "No results found") input_.clear() input_.send_keys("o") options = dash_dcc.find_elements("#my-dynamic-dropdown .VirtualizedSelectOption") # Should show all options. assert len(options) == 3 # Searching for `on` input_.send_keys("n") options = dash_dcc.find_elements("#my-dynamic-dropdown .VirtualizedSelectOption") assert len(options) == 1 print(options) assert options[0].text == "Montreal" assert dash_dcc.get_logs() == [] def test_dddo002_array_comma_value(dash_dcc): app = Dash(__name__) dropdown = dcc.Dropdown( options=["New York, NY", "Montreal, QC", "San Francisco, CA"], value=["San Francisco, CA"], multi=True, ) app.layout = html.Div(dropdown) dash_dcc.start_server(app) dash_dcc.wait_for_text_to_equal("#react-select-2--value-0", "San Francisco, CA\n ") assert dash_dcc.get_logs() == [] def test_dddo003_value_no_options(dash_dcc): app = Dash(__name__) app.layout = html.Div( [ dcc.Dropdown(value="foobar", id="dropdown"), ] ) dash_dcc.start_server(app) assert dash_dcc.get_logs() == [] dash_dcc.wait_for_element("#dropdown")
[((328, 342), 'dash.Dash', 'Dash', (['__name__'], {}), '(__name__)\n', (332, 342), False, 'from dash import Dash, Input, Output, dcc, html\n'), ((360, 410), 'dash.dcc.Dropdown', 'dcc.Dropdown', ([], {'id': '"""my-dynamic-dropdown"""', 'options': '[]'}), "(id='my-dynamic-dropdown', options=[])\n", (372, 410), False, 'from dash import Dash, Input, Output, dcc, html\n'), ((1616, 1630), 'dash.Dash', 'Dash', (['__name__'], {}), '(__name__)\n', (1620, 1630), False, 'from dash import Dash, Input, Output, dcc, html\n'), ((1647, 1767), 'dash.dcc.Dropdown', 'dcc.Dropdown', ([], {'options': "['New York, NY', 'Montreal, QC', 'San Francisco, CA']", 'value': "['San Francisco, CA']", 'multi': '(True)'}), "(options=['New York, NY', 'Montreal, QC', 'San Francisco, CA'],\n value=['San Francisco, CA'], multi=True)\n", (1659, 1767), False, 'from dash import Dash, Input, Output, dcc, html\n'), ((1812, 1830), 'dash.html.Div', 'html.Div', (['dropdown'], {}), '(dropdown)\n', (1820, 1830), False, 'from dash import Dash, Input, Output, dcc, html\n'), ((2047, 2061), 'dash.Dash', 'Dash', (['__name__'], {}), '(__name__)\n', (2051, 2061), False, 'from dash import Dash, Input, Output, dcc, html\n'), ((439, 479), 'dash.Output', 'Output', (['"""my-dynamic-dropdown"""', '"""options"""'], {}), "('my-dynamic-dropdown', 'options')\n", (445, 479), False, 'from dash import Dash, Input, Output, dcc, html\n'), ((490, 534), 'dash.Input', 'Input', (['"""my-dynamic-dropdown"""', '"""search_value"""'], {}), "('my-dynamic-dropdown', 'search_value')\n", (495, 534), False, 'from dash import Dash, Input, Output, dcc, html\n'), ((2112, 2155), 'dash.dcc.Dropdown', 'dcc.Dropdown', ([], {'value': '"""foobar"""', 'id': '"""dropdown"""'}), "(value='foobar', id='dropdown')\n", (2124, 2155), False, 'from dash import Dash, Input, Output, dcc, html\n')]
dipghoshraj/live-video-streming-with-web-socket
Server.py
dda924e22a4c40d225ec39dd94ee1e489233c403
import cv2 import io import socket import struct import time import pickle import zlib client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) client_socket.connect(('127.0.0.1', 8485)) connection = client_socket.makefile('wb') cam = cv2.VideoCapture("E:/songs/Attention Charlie Puth(GabbarWorld.com) 1080p.mp4") cam.set(3, 320) cam.set(4, 240) img_counter = 0 encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90] while True: ret, frame = cam.read() result, frame = cv2.imencode('.jpg', frame, encode_param) # data = zlib.compress(pickle.dumps(frame, 0)) data = pickle.dumps(frame, 0) size = len(data) print("{}: {}".format(img_counter, size)) client_socket.sendall(struct.pack(">L", size) + data) img_counter += 1 cam.release()
[((103, 152), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (116, 152), False, 'import socket\n'), ((244, 322), 'cv2.VideoCapture', 'cv2.VideoCapture', (['"""E:/songs/Attention Charlie Puth(GabbarWorld.com) 1080p.mp4"""'], {}), "('E:/songs/Attention Charlie Puth(GabbarWorld.com) 1080p.mp4')\n", (260, 322), False, 'import cv2\n'), ((482, 523), 'cv2.imencode', 'cv2.imencode', (['""".jpg"""', 'frame', 'encode_param'], {}), "('.jpg', frame, encode_param)\n", (494, 523), False, 'import cv2\n'), ((585, 607), 'pickle.dumps', 'pickle.dumps', (['frame', '(0)'], {}), '(frame, 0)\n', (597, 607), False, 'import pickle\n'), ((701, 724), 'struct.pack', 'struct.pack', (['""">L"""', 'size'], {}), "('>L', size)\n", (712, 724), False, 'import struct\n')]
gunpowder78/google-research
hal/agent/tf2_utils.py
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
# coding=utf-8 # Copyright 2022 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for Tensorflow 2.0. Partially adapted from: https://www.tensorflow.org/tutorials/text/image_captioning """ # Lint as: python3 # pylint: disable=invalid-name from __future__ import absolute_import from __future__ import division import tensorflow as tf def film_params(sentence_embedding, n_layer_channel): """Generate FiLM parameters from a sentence embedding. Generate FiLM parameters from a sentence embedding. This method assumes a batch dimension exists. Args: sentence_embedding: a tensor containing batched sentenced embedding to be transformed n_layer_channel: a list of integers specifying how many channels are at each hidden layer to be FiLM'ed Returns: a tuple of tensors the same length as n_layer_channel. Each element contains all gamma_i and beta_i for a single hidden layer. """ n_total = sum(n_layer_channel) * 2 all_params = tf.layers.dense(sentence_embedding, n_total) all_params = tf.keras.layers.Dense( 2 * sum * (n_layer_channel), activation=tf.nn.relu) return tf.split(all_params, [c * 2 for c in n_layer_channel], 1) def stack_conv_layer(layer_cfg, padding='same'): """Stack convolution layers per layer_cfg. Args: layer_cfg: list of integer tuples specifying the parameter each layer; each tuple should be (channel, kernel size, strides) padding: what kind of padding the conv layers use Returns: the keras model with stacked conv layers """ layers = [] for cfg in layer_cfg[:-1]: layers.append( tf.keras.layers.Conv2D( filters=cfg[0], kernel_size=cfg[1], strides=cfg[2], activation=tf.nn.relu, padding=padding)) final_cfg = layer_cfg[-1] layers.append( tf.keras.layers.Conv2D( final_cfg[0], final_cfg[1], final_cfg[2], padding=padding)) return tf.keras.Sequential(layers) def stack_dense_layer(layer_cfg): """Stack Dense layers. Args: layer_cfg: list of integer specifying the number of units at each layer Returns: the keras model with stacked dense layers """ layers = [] for cfg in layer_cfg[:-1]: layers.append(tf.keras.layers.Dense(cfg, activation=tf.nn.relu)) layers.append(tf.keras.layers.Dense(layer_cfg[-1])) return tf.keras.Sequential(layers) def soft_variables_update(source_variables, target_variables, polyak_rate=1.0): """Update the target variables using exponential moving average. Specifically, v_s' = v_s * polyak_rate + (1-polyak_rate) * v_t Args: source_variables: the moving average variables target_variables: the new observations polyak_rate: rate of moving average Returns: Operation that does the update """ updates = [] for (v_s, v_t) in zip(source_variables, target_variables): v_t.shape.assert_is_compatible_with(v_s.shape) def update_fn(v1, v2): """Update variables.""" # For not trainable variables do hard updates. return v1.assign(polyak_rate * v1 + (1 - polyak_rate) * v2) update = update_fn(v_t, v_s) updates.append(update) return updates def vector_tensor_product(a, b): """"Returns keras layer that perfrom a outer product between a and b.""" # a shape: [B, ?, d], b shape: [B, ?, d] shape_layer = tf.keras.layers.Lambda(tf.shape) shape = shape_layer(b) shape_numpy = b.get_shape() variable_length = shape[1] # variable_len = ? expand_dims_layer_1 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) expand_dims_layer_2 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) a = expand_dims_layer_1(a) # a shape: [B, ?, 1, d] b = expand_dims_layer_2(b) # a shape: [B, ?, 1, d] tile_layer = tf.keras.layers.Lambda( lambda inputs: tf.tile(inputs[0], multiples=inputs[1])) a = tile_layer((a, [1, 1, variable_length, 1])) # a shape: [B, ?, ?, d] b = tile_layer((b, [1, 1, variable_length, 1])) # b shape: [B, ?, ?, d] b = tf.keras.layers.Permute((2, 1, 3))(b) # b shape: [B, ?, ?, d] return tf.keras.layers.concatenate([a, b]) # shape: [B, ?, ?, 2*d] class BahdanauAttention(tf.keras.Model): """Bahdanau Attention Layer. Attributes: w1: weights that process the feature w2: weights that process the memory state v: projection layer that project score vector to scalar """ def __init__(self, units): """Initialize Bahdanau attention layer. Args: units: size of the dense layers """ super(BahdanauAttention, self).__init__() self.W1 = tf.keras.layers.Dense(units) self.W2 = tf.keras.layers.Dense(units) self.V = tf.keras.layers.Dense(1) def call(self, features, hidden): # features(CNN_encoder output) shape == (batch_size, 64, embedding_dim) # hidden shape == (batch_size, hidden_size) # hidden_with_time_axis shape == (batch_size, 1, hidden_size) hidden_with_time_axis = tf.expand_dims(hidden, 1) # score shape == (batch_size, 64, hidden_size) score = tf.nn.tanh(self.W1(features) + self.W2(hidden_with_time_axis)) # attention_weights shape == (batch_size, 64, 1) # you get 1 at the last axis because you are applying score to self.V attention_weights = tf.nn.softmax(self.V(score), axis=1) # context_vector shape after sum == (batch_size, hidden_size) context_vector = attention_weights * features context_vector = tf.reduce_sum(context_vector, axis=1) return context_vector, attention_weights class GRUEnecoder(tf.keras.Model): """TF2.0 GRE encoder. Attributes: embedding: word embedding matrix gru: the GRU layer """ def __init__(self, embedding_dim, units, vocab_size): """Initialize the GRU encoder. Args: embedding_dim: dimension of word emebdding units: number of units of the memory state vocab_size: total number of vocabulary """ super(GRUEnecoder, self).__init__() self._units = units self.embedding = tf.keras.layers.Embedding(vocab_size, embedding_dim) self.gru = tf.keras.layers.GRU( self.units, return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform') def call(self, x, hidden): # x shape after passing through embedding == (batch_size, 1, embedding_dim) x = self.embedding(x) # passing the concatenated vector to the GRU output, state = self.gru(x) return output, state def reset_state(self, batch_size): return tf.zeros((batch_size, self._units))
[((1519, 1563), 'tensorflow.layers.dense', 'tf.layers.dense', (['sentence_embedding', 'n_total'], {}), '(sentence_embedding, n_total)\n', (1534, 1563), True, 'import tensorflow as tf\n'), ((1579, 1650), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['(2 * sum * n_layer_channel)'], {'activation': 'tf.nn.relu'}), '(2 * sum * n_layer_channel, activation=tf.nn.relu)\n', (1600, 1650), True, 'import tensorflow as tf\n'), ((1669, 1728), 'tensorflow.split', 'tf.split', (['all_params', '[(c * 2) for c in n_layer_channel]', '(1)'], {}), '(all_params, [(c * 2) for c in n_layer_channel], 1)\n', (1677, 1728), True, 'import tensorflow as tf\n'), ((2484, 2511), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', (['layers'], {}), '(layers)\n', (2503, 2511), True, 'import tensorflow as tf\n'), ((2897, 2924), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', (['layers'], {}), '(layers)\n', (2916, 2924), True, 'import tensorflow as tf\n'), ((3889, 3921), 'tensorflow.keras.layers.Lambda', 'tf.keras.layers.Lambda', (['tf.shape'], {}), '(tf.shape)\n', (3911, 3921), True, 'import tensorflow as tf\n'), ((4050, 4099), 'tensorflow.keras.layers.Reshape', 'tf.keras.layers.Reshape', (['(-1, 1, shape_numpy[-1])'], {}), '((-1, 1, shape_numpy[-1]))\n', (4073, 4099), True, 'import tensorflow as tf\n'), ((4124, 4173), 'tensorflow.keras.layers.Reshape', 'tf.keras.layers.Reshape', (['(-1, 1, shape_numpy[-1])'], {}), '((-1, 1, shape_numpy[-1]))\n', (4147, 4173), True, 'import tensorflow as tf\n'), ((4611, 4646), 'tensorflow.keras.layers.concatenate', 'tf.keras.layers.concatenate', (['[a, b]'], {}), '([a, b])\n', (4638, 4646), True, 'import tensorflow as tf\n'), ((2381, 2467), 'tensorflow.keras.layers.Conv2D', 'tf.keras.layers.Conv2D', (['final_cfg[0]', 'final_cfg[1]', 'final_cfg[2]'], {'padding': 'padding'}), '(final_cfg[0], final_cfg[1], final_cfg[2], padding=\n padding)\n', (2403, 2467), True, 'import tensorflow as tf\n'), ((2850, 2886), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['layer_cfg[-1]'], {}), '(layer_cfg[-1])\n', (2871, 2886), True, 'import tensorflow as tf\n'), ((4539, 4573), 'tensorflow.keras.layers.Permute', 'tf.keras.layers.Permute', (['(2, 1, 3)'], {}), '((2, 1, 3))\n', (4562, 4573), True, 'import tensorflow as tf\n'), ((5105, 5133), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['units'], {}), '(units)\n', (5126, 5133), True, 'import tensorflow as tf\n'), ((5148, 5176), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['units'], {}), '(units)\n', (5169, 5176), True, 'import tensorflow as tf\n'), ((5190, 5214), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['(1)'], {}), '(1)\n', (5211, 5214), True, 'import tensorflow as tf\n'), ((5471, 5496), 'tensorflow.expand_dims', 'tf.expand_dims', (['hidden', '(1)'], {}), '(hidden, 1)\n', (5485, 5496), True, 'import tensorflow as tf\n'), ((5951, 5988), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['context_vector'], {'axis': '(1)'}), '(context_vector, axis=1)\n', (5964, 5988), True, 'import tensorflow as tf\n'), ((6517, 6569), 'tensorflow.keras.layers.Embedding', 'tf.keras.layers.Embedding', (['vocab_size', 'embedding_dim'], {}), '(vocab_size, embedding_dim)\n', (6542, 6569), True, 'import tensorflow as tf\n'), ((6585, 6702), 'tensorflow.keras.layers.GRU', 'tf.keras.layers.GRU', (['self.units'], {'return_sequences': '(True)', 'return_state': '(True)', 'recurrent_initializer': '"""glorot_uniform"""'}), "(self.units, return_sequences=True, return_state=True,\n recurrent_initializer='glorot_uniform')\n", (6604, 6702), True, 'import tensorflow as tf\n'), ((7023, 7058), 'tensorflow.zeros', 'tf.zeros', (['(batch_size, self._units)'], {}), '((batch_size, self._units))\n', (7031, 7058), True, 'import tensorflow as tf\n'), ((2153, 2271), 'tensorflow.keras.layers.Conv2D', 'tf.keras.layers.Conv2D', ([], {'filters': 'cfg[0]', 'kernel_size': 'cfg[1]', 'strides': 'cfg[2]', 'activation': 'tf.nn.relu', 'padding': 'padding'}), '(filters=cfg[0], kernel_size=cfg[1], strides=cfg[2],\n activation=tf.nn.relu, padding=padding)\n', (2175, 2271), True, 'import tensorflow as tf\n'), ((2783, 2832), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['cfg'], {'activation': 'tf.nn.relu'}), '(cfg, activation=tf.nn.relu)\n', (2804, 2832), True, 'import tensorflow as tf\n'), ((4342, 4381), 'tensorflow.tile', 'tf.tile', (['inputs[0]'], {'multiples': 'inputs[1]'}), '(inputs[0], multiples=inputs[1])\n', (4349, 4381), True, 'import tensorflow as tf\n')]
Wolkabout/WolkConnect-Python-
wolk/logger_factory.py
11412e3f88911170f587b5e857d07ab41c8f52b5
"""LoggerFactory Module.""" # Copyright 2020 WolkAbout Technology s.r.o. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from typing import List from typing import Optional class LoggerFactory: """Factory for issuing ready to use loggers in other modules.""" def __init__(self, level=logging.INFO, console=True, log_file=None): # type: ignore """ Create a factory that will give loggers through calls to get_logger(). :param level: Set the desired logging level :type level: int or None :param console: Should the log messages be outputted to the console :type console: bool or None :param log_file: Name of the log file to output to :type log_file: str or None """ self.level = level self.device_key = None self.console = console self.log_file = log_file self.loggers: List[logging.Logger] = [] def set_device_key(self, device_key: str) -> None: """ Set device key. :param device_key: Device key :type device_key: str """ self.device_key = device_key def get_logger( self, name: str, level: Optional[int] = None ) -> logging.Logger: """ Return a ready to use logger instance. :param name: Name of the logger :type name: str :param level: Override the log level :type level: int or None :returns: Logger instance :rtype: logger """ logger = logging.getLogger(name) if level is not None: logger.setLevel(level) else: logger.setLevel(self.level) if self.device_key is not None: formatter = logging.Formatter( "%(asctime)s - '" + str(self.device_key) + "' - %(levelname)s [%(filename)s:%(lineno)s" + " - %(funcName)s()] - %(message)s" ) else: formatter = logging.Formatter( "%(asctime)s - %(levelname)s [%(filename)s:%(lineno)s" + " - %(funcName)s()] - %(message)s" ) if self.console: console_handler = logging.StreamHandler() if level is not None: console_handler.setLevel(level) else: console_handler.setLevel(self.level) console_handler.setFormatter(formatter) logger.addHandler(console_handler) if self.log_file is not None: file_handler = logging.FileHandler(self.log_file) if level is not None: file_handler.setLevel(level) else: file_handler.setLevel(self.level) file_handler.setFormatter(formatter) logger.addHandler(file_handler) self.loggers.append(logger) return logger # Logging levels available: NOTSET, INFO, DEBUG logger_factory = LoggerFactory(level=logging.INFO) LEVELS = { "debug": logging.DEBUG, "info": logging.INFO, "warning": logging.WARNING, "error": logging.ERROR, "critical": logging.CRITICAL, "notset": logging.NOTSET, } def logging_config(level: str, log_file: Optional[str] = None) -> None: """ Set desired log level and designate a log file. :param level: Available levels : debug, info, notset :type level: str :param log_file: path to log file :type log_file: str or None """ if log_file is not None: logger_factory.log_file = log_file if level not in LEVELS: print(f"Invalid level '{level}'") return if LEVELS[level] == logger_factory.level: return logger_factory.level = LEVELS[level] for logger in logger_factory.loggers: logger.setLevel(logger_factory.level) for handler in logger.handlers: handler.setLevel(logger_factory.level)
[((2052, 2075), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (2069, 2075), False, 'import logging\n'), ((2521, 2635), 'logging.Formatter', 'logging.Formatter', (["('%(asctime)s - %(levelname)s [%(filename)s:%(lineno)s' +\n ' - %(funcName)s()] - %(message)s')"], {}), "('%(asctime)s - %(levelname)s [%(filename)s:%(lineno)s' +\n ' - %(funcName)s()] - %(message)s')\n", (2538, 2635), False, 'import logging\n'), ((2734, 2757), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (2755, 2757), False, 'import logging\n'), ((3077, 3111), 'logging.FileHandler', 'logging.FileHandler', (['self.log_file'], {}), '(self.log_file)\n', (3096, 3111), False, 'import logging\n')]
andre-marcos-perez/data-pipeline-demo
raw.py
2647cce6e90d39798eda352608dc0f6d6ab5255a
import json import gzip import requests from datetime import datetime import pendulum import boto3 from botocore.exceptions import ClientError from util.log import Log from settings.aws_settings import AWSSettings from settings.telegram_settings import TelegramSettings def lambda_handler(event: dict, context: dict) -> dict: log = Log.setup(name='logger') aws_settings = AWSSettings() telegram_settings = TelegramSettings() timezone = pendulum.timezone('America/Sao_Paulo') date = datetime.now(tz=timezone).strftime('%Y-%m-%d') timestamp = datetime.now(tz=timezone).strftime('%Y%m%d%H%M%S') try: token = telegram_settings.access_token base_url = f"https://api.telegram.org/bot{token}" data = json.loads(event["body"]) chat_id = data["message"]["chat"]["id"] if chat_id == telegram_settings.chat_id: client = boto3.client('s3') bucket = aws_settings.raw_bucket root_path = aws_settings.root_path try: with open(f"{root_path}/{timestamp}.json", mode='w', encoding='utf8') as fp: json.dump(data, fp) client.upload_file(f"{root_path}/{timestamp}.json", bucket, f"{date}/{timestamp}.json") except ClientError as exc: raise exc else: text = "I can't talk to strangers, sorry mate!" data = {"text": text, "chat_id": chat_id} data = gzip.compress(json.dumps(data).encode('utf-8')) headers = {'content-type': 'application/json', 'content-encoding': 'gzip'} url = base_url + "/sendMessage" requests.post(url=url, data=data, headers=headers) except Exception as exc: log.error(msg=exc) finally: return dict(statusCode="200")
[((341, 365), 'util.log.Log.setup', 'Log.setup', ([], {'name': '"""logger"""'}), "(name='logger')\n", (350, 365), False, 'from util.log import Log\n'), ((385, 398), 'settings.aws_settings.AWSSettings', 'AWSSettings', ([], {}), '()\n', (396, 398), False, 'from settings.aws_settings import AWSSettings\n'), ((423, 441), 'settings.telegram_settings.TelegramSettings', 'TelegramSettings', ([], {}), '()\n', (439, 441), False, 'from settings.telegram_settings import TelegramSettings\n'), ((458, 496), 'pendulum.timezone', 'pendulum.timezone', (['"""America/Sao_Paulo"""'], {}), "('America/Sao_Paulo')\n", (475, 496), False, 'import pendulum\n'), ((753, 778), 'json.loads', 'json.loads', (["event['body']"], {}), "(event['body'])\n", (763, 778), False, 'import json\n'), ((508, 533), 'datetime.datetime.now', 'datetime.now', ([], {'tz': 'timezone'}), '(tz=timezone)\n', (520, 533), False, 'from datetime import datetime\n'), ((571, 596), 'datetime.datetime.now', 'datetime.now', ([], {'tz': 'timezone'}), '(tz=timezone)\n', (583, 596), False, 'from datetime import datetime\n'), ((899, 917), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (911, 917), False, 'import boto3\n'), ((1670, 1720), 'requests.post', 'requests.post', ([], {'url': 'url', 'data': 'data', 'headers': 'headers'}), '(url=url, data=data, headers=headers)\n', (1683, 1720), False, 'import requests\n'), ((1141, 1160), 'json.dump', 'json.dump', (['data', 'fp'], {}), '(data, fp)\n', (1150, 1160), False, 'import json\n'), ((1493, 1509), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1503, 1509), False, 'import json\n')]