code
stringlengths
22
1.05M
apis
listlengths
1
3.31k
extract_api
stringlengths
75
3.25M
import pathlib import os.path cwd = pathlib.Path(__file__).parent.absolute() cwd = os.path.abspath(cwd) fp = os.path.join(cwd, 'ProjectSettings', 'InputManager.asset') def output_unity_axis(f, joy_idx, name, button='', axis_type=0, joy_axis=0): f.write(' - serializedVersion: 3\n') f.write(' m_Name: joystick {} {}\n'.format(joy_idx, name)) f.write(' descriptiveName: Reconfigurable gamepad input\n') f.write(' descriptiveNegativeName: \n') f.write(' negativeButton: \n') f.write(' positiveButton: {}\n'.format(button)) f.write(' altNegativeButton: \n') f.write(' altPositiveButton: \n') f.write(' gravity: 3\n') f.write(' dead: 0.01\n') f.write(' sensitivity: 3\n') f.write(' snap: 1\n') f.write(' invert: 0\n') f.write(' type: {}\n'.format(axis_type)) f.write(' axis: {}\n'.format(joy_axis)) f.write(' joyNum: {}\n'.format(joy_idx)) def output_axis(f, joy_idx, joy_axis): name = 'axis {}'.format(joy_axis) output_unity_axis(f, joy_idx, name, axis_type=2, joy_axis=joy_axis) def output_button(f, joy_idx, button): name = 'button {}'.format(button) output_unity_axis(f, joy_idx, name, button='joystick button {}'.format(button)) joystick_count = 9 joystick_axis_count = 10 joystick_button_count = 20 with open(fp, 'wt') as f: f.write('%YAML 1.1\n') f.write('%TAG !u! tag:unity3d.com,2011:\n') f.write('--- !u!13 &1\n') f.write('InputManager:\n') f.write(' m_ObjectHideFlags: 0\n') f.write(' serializedVersion: 2\n') f.write(' m_Axes:\n') # Default values, required by the UI f.write(' - serializedVersion: 3\n') f.write(' m_Name: Horizontal\n') f.write(' descriptiveName: UI Horizontal\n') f.write(' descriptiveNegativeName: \n') f.write(' negativeButton: left\n') f.write(' positiveButton: right\n') f.write(' altNegativeButton: a\n') f.write(' altPositiveButton: d\n') f.write(' gravity: 3\n') f.write(' dead: 0.001\n') f.write(' sensitivity: 3\n') f.write(' snap: 1\n') f.write(' invert: 0\n') f.write(' type: 0\n') f.write(' axis: 0\n') f.write(' joyNum: 0\n') f.write(' - serializedVersion: 3\n') f.write(' m_Name: Vertical\n') f.write(' descriptiveName: UI Vertical\n') f.write(' descriptiveNegativeName: \n') f.write(' negativeButton: down\n') f.write(' positiveButton: up\n') f.write(' altNegativeButton: s\n') f.write(' altPositiveButton: w\n') f.write(' gravity: 3\n') f.write(' dead: 0.001\n') f.write(' sensitivity: 3\n') f.write(' snap: 1\n') f.write(' invert: 0\n') f.write(' type: 0\n') f.write(' axis: 0\n') f.write(' joyNum: 0\n') f.write(' - serializedVersion: 3\n') f.write(' m_Name: Horizontal\n') f.write(' descriptiveName: UI Horizontal (gamepad)\n') f.write(' descriptiveNegativeName: \n') f.write(' negativeButton: \n') f.write(' positiveButton: \n') f.write(' altNegativeButton: \n') f.write(' altPositiveButton: \n') f.write(' gravity: 3\n') f.write(' dead: 0.01\n') f.write(' sensitivity: 3\n') f.write(' snap: 1\n') f.write(' invert: 0\n') f.write(' type: 2\n') f.write(' axis: 0\n') f.write(' joyNum: 0\n') f.write(' - serializedVersion: 3\n') f.write(' m_Name: Vertical\n') f.write(' descriptiveName: UI Vertical (gamepad)\n') f.write(' descriptiveNegativeName: \n') f.write(' negativeButton: \n') f.write(' positiveButton: \n') f.write(' altNegativeButton: \n') f.write(' altPositiveButton: \n') f.write(' gravity: 3\n') f.write(' dead: 0.01\n') f.write(' sensitivity: 3\n') f.write(' snap: 1\n') f.write(' invert: 1\n') f.write(' type: 2\n') f.write(' axis: 1\n') f.write(' joyNum: 0\n') f.write(' - serializedVersion: 3\n') f.write(' m_Name: Submit\n') f.write(' descriptiveName: Unity UI...\n') f.write(' descriptiveNegativeName: \n') f.write(' negativeButton: \n') f.write(' positiveButton: enter\n') f.write(' altNegativeButton: \n') f.write(' altPositiveButton: joystick button 0\n') f.write(' gravity: 3\n') f.write(' dead: 0.01\n') f.write(' sensitivity: 3\n') f.write(' snap: 1\n') f.write(' invert: 0\n') f.write(' type: 0\n') f.write(' axis: 1\n') f.write(' joyNum: 0\n') f.write(' - serializedVersion: 3\n') f.write(' m_Name: Cancel\n') f.write(' descriptiveName: Unity UI...\n') f.write(' descriptiveNegativeName: \n') f.write(' negativeButton: \n') f.write(' positiveButton: escape\n') f.write(' altNegativeButton: \n') f.write(' altPositiveButton: joystick button 1\n') f.write(' gravity: 3\n') f.write(' dead: 0.01\n') f.write(' sensitivity: 3\n') f.write(' snap: 1\n') f.write(' invert: 0\n') f.write(' type: 0\n') f.write(' axis: 1\n') f.write(' joyNum: 0\n') for joy_idx in range(joystick_count): for joy_axis in range(joystick_axis_count): output_axis(f, joy_idx, joy_axis) for joy_bt in range(joystick_button_count): output_button(f, joy_idx, joy_bt)
[ "pathlib.Path" ]
[((37, 59), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (49, 59), False, 'import pathlib\n')]
# Copyright 2019 The Johns Hopkins University Applied Physics Laboratory # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #!/usr/bin/env python import argparse import sys import itertools import numpy as np sphere_radius = 5 #Take output of cell detect step, split into two streams- one list of cells, the other the map of cells def split_cells(args): cells = np.load(args.input) cell_map = cells[1] cell_list = cells[0] with open(args.map_output, 'wb') as f: np.save(f, cell_map) # Make volume out of cell_list cell_centroid_volume = np.zeros(cell_map.shape) for cell in cell_list: axes_range = [[],[],[]] for i,axes in enumerate(cell[:3]): min_range = max(int(axes-args.sphere_size), 0) max_range = min(int(axes+args.sphere_size), cell_map.shape[i]-1) axes_range[i]=range(min_range, max_range) coords = list(itertools.product(*axes_range)) for pixel in coords: if np.linalg.norm(np.array(cell[:3])-np.array(pixel)) <= args.sphere_size: cell_centroid_volume[pixel] = 1 with open(args.list_output, 'wb') as f: np.save(f, cell_list) with open(args.centroid_volume_output, 'wb') as f: np.save(f, cell_centroid_volume) def main(): parser = argparse.ArgumentParser(description='cell results splitting script') parser.set_defaults(func=lambda _: parser.print_help()) parser.add_argument('-i', '--input', required=True, help='Input file') parser.add_argument('--map_output', required=True, help='Map Output file') parser.add_argument('--list_output', required=True, help='List Output file') parser.add_argument('--centroid_volume_output', required=True, help='Output volume with spheres') parser.add_argument('--sphere_size', required=False, help='Size of the spheres in the centroids volume', default=5, type=int) args = parser.parse_args() split_cells(args) if __name__ == '__main__': main()
[ "numpy.load", "numpy.save", "argparse.ArgumentParser", "numpy.zeros", "numpy.array", "itertools.product" ]
[((862, 881), 'numpy.load', 'np.load', (['args.input'], {}), '(args.input)\n', (869, 881), True, 'import numpy as np\n'), ((1066, 1090), 'numpy.zeros', 'np.zeros', (['cell_map.shape'], {}), '(cell_map.shape)\n', (1074, 1090), True, 'import numpy as np\n'), ((1804, 1872), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""cell results splitting script"""'}), "(description='cell results splitting script')\n", (1827, 1872), False, 'import argparse\n'), ((982, 1002), 'numpy.save', 'np.save', (['f', 'cell_map'], {}), '(f, cell_map)\n', (989, 1002), True, 'import numpy as np\n'), ((1655, 1676), 'numpy.save', 'np.save', (['f', 'cell_list'], {}), '(f, cell_list)\n', (1662, 1676), True, 'import numpy as np\n'), ((1740, 1772), 'numpy.save', 'np.save', (['f', 'cell_centroid_volume'], {}), '(f, cell_centroid_volume)\n', (1747, 1772), True, 'import numpy as np\n'), ((1406, 1436), 'itertools.product', 'itertools.product', (['*axes_range'], {}), '(*axes_range)\n', (1423, 1436), False, 'import itertools\n'), ((1497, 1515), 'numpy.array', 'np.array', (['cell[:3]'], {}), '(cell[:3])\n', (1505, 1515), True, 'import numpy as np\n'), ((1516, 1531), 'numpy.array', 'np.array', (['pixel'], {}), '(pixel)\n', (1524, 1531), True, 'import numpy as np\n')]
# Copyright (C) 2010 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import re import sys from webkitpy.tool.steps.abstractstep import AbstractStep from webkitpy.tool.steps.options import Options if sys.version_info > (3, 0): from urllib.parse import urlparse else: from urlparse import urlparse class PromptForBugOrTitle(AbstractStep): @classmethod def options(cls): return AbstractStep.options() + [ Options.non_interactive, ] def run(self, state): # No need to prompt if we alrady have the bug_id. if state.get("bug_id"): return user_response = self._tool.user.prompt("Please enter a bug number/bugzilla URL or a title for a new bug:\n") # If the user responds with a number or a valid bugzilla URL, we assume it's bug number. # Otherwise we assume it's a bug subject. try: state["bug_id"] = int(user_response) except ValueError as TypeError: parsed_url = None try: parsed_url = urlparse(user_response) except ValueError: # urlparse can throw a value error for some strings. pass if parsed_url and re.match("bugs.webkit.org", parsed_url.netloc): match = re.match("id=(?P<bug_id>\d+)", parsed_url.query) if match: state["bug_id"] = int(match.group("bug_id")) return if not self._options.non_interactive and not self._tool.user.confirm("Are you sure you want to create a new bug?", default="n"): self._exit(1) state["bug_title"] = user_response # FIXME: This is kind of a lame description. state["bug_description"] = user_response
[ "webkitpy.tool.steps.abstractstep.AbstractStep.options", "urlparse.urlparse", "re.match" ]
[((1864, 1886), 'webkitpy.tool.steps.abstractstep.AbstractStep.options', 'AbstractStep.options', ([], {}), '()\n', (1884, 1886), False, 'from webkitpy.tool.steps.abstractstep import AbstractStep\n'), ((2516, 2539), 'urlparse.urlparse', 'urlparse', (['user_response'], {}), '(user_response)\n', (2524, 2539), False, 'from urlparse import urlparse\n'), ((2692, 2738), 're.match', 're.match', (['"""bugs.webkit.org"""', 'parsed_url.netloc'], {}), "('bugs.webkit.org', parsed_url.netloc)\n", (2700, 2738), False, 'import re\n'), ((2768, 2817), 're.match', 're.match', (['"""id=(?P<bug_id>\\\\d+)"""', 'parsed_url.query'], {}), "('id=(?P<bug_id>\\\\d+)', parsed_url.query)\n", (2776, 2817), False, 'import re\n')]
from PIL import Image from pylab import * img = Image.open('images/profile.jpg').convert('L') print(array(img)[500]) imgArray = array(img) figure() hist(imgArray.flatten(), 300) show() # img.show()
[ "PIL.Image.open" ]
[((52, 84), 'PIL.Image.open', 'Image.open', (['"""images/profile.jpg"""'], {}), "('images/profile.jpg')\n", (62, 84), False, 'from PIL import Image\n')]
# -*- coding: utf-8 -*- ## # @file data_context.py # @brief # @author wondereamer # @version 0.1 # @date 2016-11-27 import datetime from quantity.digger.engine.series import SeriesBase, NumberSeries, DateTimeSeries from quantity.digger.technicals.base import TechnicalBase from quantity.digger.util import elogger as logger from quantity.digger.datastruct import ( Bar ) class DataContext(object): """ A DataContext expose data should be visited by multiple strategie. which including bars of specific PContract, technicals and series of strategie. """ def __init__(self, Helper): data = Helper.data self.open = NumberSeries(data.open.values, 'open') self.close = NumberSeries(data.close.values, 'close') self.high = NumberSeries(data.high.values, 'high') self.low = NumberSeries(data.low.values, 'low') self.volume = NumberSeries(data.volume.values, 'volume') self.datetime = DateTimeSeries(data.index, 'datetime') self.ith_comb = -1 # 第i个组合 self.ith_strategy = -1 # 第j个策略 self.bar = Bar(None, None, None, None, None, None) self.new_row = False self.next_datetime = datetime.datetime(2100, 1, 1) self.technicals = [[{}]] self._curbar = -1 self._Helper = Helper self._series = [[{}]] self._variables = [[{}]] self._all_variables = [[{}]] self._size = len(data.close) @property def raw_data(self): return self._Helper.data @property def curbar(self): return self._curbar + 1 @property def pcontract(self): return self._Helper.pcontract @property def contract(self): return self._Helper.pcontract.contract def __getattr__(self, name): return self.get_item(name) def update_system_vars(self): # self.data = np.append(data, tracker.container_day) self._curbar = self.last_curbar self.open.update_curbar(self._curbar) self.close.update_curbar(self._curbar) self.high.update_curbar(self._curbar) self.low.update_curbar(self._curbar) self.volume.update_curbar(self._curbar) self.datetime.update_curbar(self._curbar) self.bar = Bar(self.datetime[0], self.open[0], self.close[0], self.high[0], self.low[0], self.volume[0]) self.new_row = False def update_user_vars(self): # Update series defined by user if exist. try: series = self._series[self.ith_comb][self.ith_strategy].values() except IndexError: pass else: for s in series: s.update_curbar(self._curbar) s.duplicate_last_element() # Update technicals if exist. try: technicals = self.technicals[self.ith_comb][self.ith_strategy].values() except IndexError: pass else: for tec in technicals: if tec.is_multiple: for s in tec.series.values(): s.update_curbar(self._curbar) else: for s in tec.series: s.update_curbar(self._curbar) def rolling_forward(self): """ 滚动读取下一步的数据。 """ self.new_row, self.last_curbar = self._Helper.rolling_forward() if not self.new_row: self.last_curbar -= 1 return False, None self.next_datetime = self._Helper.data.index[self.last_curbar] if self.datetime[0] >= self.next_datetime and self.curbar != 0: logger.error('合约[%s] 数据时间逆序或冗余' % self.pcontract) raise return True, self.new_row def __len__(self): return len(self._Helper) def get_item(self, name): """ 获取用户在策略on_init函数中初始化的变量 """ return self._all_variables[self.ith_comb][self.ith_strategy][name] def add_item(self, name, value): """ 添加用户初始化的变量。 """ # @TODO ... if self.ith_comb < len(self._all_variables): if self.ith_strategy < len(self._all_variables[self.ith_comb]): self._all_variables[self.ith_comb][self.ith_strategy][name] = value else: self._all_variables[self.ith_comb].append({name: value}) else: self._all_variables.append([{name: value}]) if isinstance(value, SeriesBase): self.add_series(name, value) elif isinstance(value, TechnicalBase): self.add_indicator(name, value) else: self.add_variable(name, value) def add_series(self, attr, s): """ 添加on_init中初始化的序列变量 Args: attr (str): 属性名 s (Series): 序列变量 """ s.reset_data([], self._size) if self.ith_comb < len(self._series): if self.ith_strategy < len(self._series[self.ith_comb]): self._series[self.ith_comb][self.ith_strategy][attr] = s else: self._series[self.ith_comb].append({attr: s}) else: self._series.append([{attr: s}]) def add_indicator(self, attr, indic): if self.ith_comb < len(self.technicals): if self.ith_strategy < len(self.technicals[self.ith_comb]): self.technicals[self.ith_comb][self.ith_strategy][attr] = indic else: self.technicals[self.ith_comb].append({attr: indic}) else: self.technicals.append([{attr: indic}]) def add_variable(self, attr, var): if self.ith_comb < len(self._variables): if self.ith_strategy < len(self._variables[self.ith_comb]): self._variables[self.ith_comb][self.ith_strategy][attr] = var else: self._variables[self.ith_comb].append({attr: var}) else: self._variables.append([{attr: var}]) class DataContextAttributeHelper(object): """""" def __init__(self, data): self.data = data def __setattr__(self, name, value): if name == 'data': super(DataContextAttributeHelper, self).__setattr__(name, value) return data = self.data if name in data._all_variables[data.ith_comb][data.ith_strategy]: data.add_item(name, value) def __getattr__(self, name): return getattr(self.data, name)
[ "quantity.digger.datastruct.Bar", "datetime.datetime", "quantity.digger.engine.series.DateTimeSeries", "quantity.digger.engine.series.NumberSeries", "quantity.digger.util.elogger.error" ]
[((659, 697), 'quantity.digger.engine.series.NumberSeries', 'NumberSeries', (['data.open.values', '"""open"""'], {}), "(data.open.values, 'open')\n", (671, 697), False, 'from quantity.digger.engine.series import SeriesBase, NumberSeries, DateTimeSeries\n'), ((719, 759), 'quantity.digger.engine.series.NumberSeries', 'NumberSeries', (['data.close.values', '"""close"""'], {}), "(data.close.values, 'close')\n", (731, 759), False, 'from quantity.digger.engine.series import SeriesBase, NumberSeries, DateTimeSeries\n'), ((780, 818), 'quantity.digger.engine.series.NumberSeries', 'NumberSeries', (['data.high.values', '"""high"""'], {}), "(data.high.values, 'high')\n", (792, 818), False, 'from quantity.digger.engine.series import SeriesBase, NumberSeries, DateTimeSeries\n'), ((838, 874), 'quantity.digger.engine.series.NumberSeries', 'NumberSeries', (['data.low.values', '"""low"""'], {}), "(data.low.values, 'low')\n", (850, 874), False, 'from quantity.digger.engine.series import SeriesBase, NumberSeries, DateTimeSeries\n'), ((897, 939), 'quantity.digger.engine.series.NumberSeries', 'NumberSeries', (['data.volume.values', '"""volume"""'], {}), "(data.volume.values, 'volume')\n", (909, 939), False, 'from quantity.digger.engine.series import SeriesBase, NumberSeries, DateTimeSeries\n'), ((964, 1002), 'quantity.digger.engine.series.DateTimeSeries', 'DateTimeSeries', (['data.index', '"""datetime"""'], {}), "(data.index, 'datetime')\n", (978, 1002), False, 'from quantity.digger.engine.series import SeriesBase, NumberSeries, DateTimeSeries\n'), ((1100, 1139), 'quantity.digger.datastruct.Bar', 'Bar', (['None', 'None', 'None', 'None', 'None', 'None'], {}), '(None, None, None, None, None, None)\n', (1103, 1139), False, 'from quantity.digger.datastruct import Bar\n'), ((1198, 1227), 'datetime.datetime', 'datetime.datetime', (['(2100)', '(1)', '(1)'], {}), '(2100, 1, 1)\n', (1215, 1227), False, 'import datetime\n'), ((2266, 2364), 'quantity.digger.datastruct.Bar', 'Bar', (['self.datetime[0]', 'self.open[0]', 'self.close[0]', 'self.high[0]', 'self.low[0]', 'self.volume[0]'], {}), '(self.datetime[0], self.open[0], self.close[0], self.high[0], self.low[0\n ], self.volume[0])\n', (2269, 2364), False, 'from quantity.digger.datastruct import Bar\n'), ((3627, 3676), 'quantity.digger.util.elogger.error', 'logger.error', (["('合约[%s] 数据时间逆序或冗余' % self.pcontract)"], {}), "('合约[%s] 数据时间逆序或冗余' % self.pcontract)\n", (3639, 3676), True, 'from quantity.digger.util import elogger as logger\n')]
#! /usr/bin/env python ''' Solvers and example test cases for Day 5 of the Advent of Code 2021. Problem description: <https://adventofcode.com/2021/day/5> ''' from collections import Counter from dataclasses import dataclass from typing import Iterable, List, Tuple import unittest def part1(lines: Iterable[str]) -> int: ''' Solver for Day 5, part 1 ''' vents = parse_input(lines) vent_points: Counter = Counter() for (start, end) in vents: if start.x == end.x or start.y == end.y: vent_points.update(points_between(start, end)) return sum(1 for (_, count) in vent_points.most_common() if count >= 2) def part2(lines: Iterable[str]) -> int: ''' Solver for Day 5, part 2 ''' vents = parse_input(lines) vent_points: Counter = Counter() for (start, end) in vents: vent_points.update(points_between(start, end)) return sum(1 for (_, count) in vent_points.most_common() if count >= 2) @dataclass(frozen=True) class Point: ''' Represents a single (x, y) coordinate. ''' x: int y: int def points_between(start: Point, end: Point) -> Iterable[Point]: ''' Iterates over the integral points between start and end (inclusive). Line must be either vertical, horizontal, or 45 degrees. ''' x_step = sign(end.x - start.x) y_step = sign(end.y - start.y) x = start.x y = start.y while x != end.x or y != end.y: yield Point(x, y) x += x_step y += y_step yield Point(x, y) def sign(value: int) -> int: ''' Returns the sign of value, i.e. 1 if value is positive, -1 if value is negative, or 0 if value is zero. ''' if value < 0: return -1 if value == 0: return 0 return 1 def parse_input(lines: Iterable[str]) -> List[Tuple[Point, Point]]: ''' Parses the problem input and returns a list of (Point, Point) tuples describing the vents. ''' vents = [] for line in lines: start, _, end = line.split() p1_x, p1_y = start.split(',') p2_x, p2_y = end.split(',') vents.append((Point(int(p1_x), int(p1_y)), Point(int(p2_x), int(p2_y)))) return vents class TestDay05(unittest.TestCase): ''' Example test cases for Day 5, as specified in the problem description ''' # pylint: disable=missing-function-docstring def setUp(self): self.data = [ '0,9 -> 5,9', '8,0 -> 0,8', '9,4 -> 3,4', '2,2 -> 2,1', '7,0 -> 7,4', '6,4 -> 2,0', '0,9 -> 2,9', '3,4 -> 1,4', '0,0 -> 8,8', '5,5 -> 8,2'] def test_part1_example(self): self.assertEqual(part1(self.data), 5) def test_part2_example(self): self.assertEqual(part2(self.data), 12)
[ "collections.Counter", "dataclasses.dataclass" ]
[((974, 996), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (983, 996), False, 'from dataclasses import dataclass\n'), ((429, 438), 'collections.Counter', 'Counter', ([], {}), '()\n', (436, 438), False, 'from collections import Counter\n'), ((799, 808), 'collections.Counter', 'Counter', ([], {}), '()\n', (806, 808), False, 'from collections import Counter\n')]
__copyright__ = """This code is licensed under the 3-clause BSD license. Copyright ETH Zurich, Laboratory of Physical Chemistry, Reiher Group. See LICENSE.txt for details. """ import pytest import scine_utilities as scine import numpy as np import os class SigmaVectorEvaluatorPython(scine.SigmaVectorEvaluator): def __init__(self, matrix): scine.SigmaVectorEvaluator.__init__(self) self.matrix = matrix def evaluate(self, guess_vectors): return np.dot(self.matrix, guess_vectors) def collapsed(self, newSubspaceDimension): return def swap(self, i, j): return def create_matrix(): # create a selfadjoint matrix matrix = np.random.rand(100,100) matrix = 0.5*(matrix + np.transpose(matrix)) matrix[np.diag_indices_from(matrix)] += 1 return matrix def initialize_diagonalizer(matrix): # Create sigma vector evaluator and preconditioner sve = scine.IndirectSigmaVectorEvaluator(matrix) prec = scine.IndirectPreconditionerEvaluator(matrix[np.diag_indices_from(matrix)]) # Create and fill Non Orthogonal Davidson diag = scine.NonOrthogonalDavidson(5,100) diag.sigma_vector_evaluator = sve diag.set_preconditioner(prec) return diag def test_SigmaVectorEvaluator(): ref = create_matrix() sve = scine.IndirectSigmaVectorEvaluator(ref) result = sve.evaluate(2.0 * np.identity(100)) assert np.all(2.0 * ref[:,:] == result[:,:]) def test_Preconditioner(): ''' Test that if you try to precondition a vector of ones, you just get -1.0 / (difference btw the diagonal and the current eigenvalue) ''' ref = create_matrix() diag = ref[np.diag_indices_from(ref)] ones_vector = np.ones(100) arbitrary_eigenvalue = 3.5 prec = scine.IndirectPreconditionerEvaluator(diag) result = prec.evaluate(ones_vector, arbitrary_eigenvalue) assert np.all(result[:] == -1.0 / (diag - arbitrary_eigenvalue)) def test_InitializeDiagonalizer(): diag = initialize_diagonalizer(create_matrix()) def test_DiagonalizeWithNonOrthogonalDavidson(): ref = create_matrix() diag = initialize_diagonalizer(ref) result = diag.solve(scine.core.Log.silent()) # Get reference numbers w, v = np.linalg.eig(ref) assert np.all(result.eigenvalues[:] - sorted(w)[:5] <= 1.0e-5) def test_DiagonalizeWithOrthogonalDavidson(): ref = create_matrix() # Create sigma vector evaluator and preconditioner sve = scine.IndirectSigmaVectorEvaluator(ref) prec = scine.IndirectPreconditionerEvaluator(ref[np.diag_indices_from(ref)]) # Create and fill Non Orthogonal Davidson diag = scine.OrthogonalDavidson(5,100) diag.sigma_vector_evaluator = sve diag.set_preconditioner(prec) result = diag.solve(scine.core.Log.silent()) # Get reference numbers w, v = np.linalg.eig(ref) assert np.all(result.eigenvalues[:] - sorted(w)[:5] <= 1.0e-5) def test_DiagonalizeWithPythonSigmaVectorEvaluator(): ref = create_matrix() diag = initialize_diagonalizer(ref) # Set python specific sigma vector evaluator # Note: first initialize, then assign to prevent auto casting. # If I write diag.sigma_vector_evaluator = SigmaVectorEvaluatorPython(ref) # then it it tried to look for the method SigmaVectorEvaluator::evaluate() # instead of SigmaVectorEvaluatorPython::evaluate() sve = SigmaVectorEvaluatorPython(ref) diag.sigma_vector_evaluator = sve result = diag.solve(scine.core.Log.silent()) # Get reference numbers w, v = np.linalg.eig(ref) assert np.all(result.eigenvalues[:] - sorted(w)[:5] <= 1.0e-5)
[ "scine_utilities.NonOrthogonalDavidson", "scine_utilities.OrthogonalDavidson", "numpy.diag_indices_from", "scine_utilities.core.Log.silent", "numpy.transpose", "numpy.ones", "numpy.linalg.eig", "scine_utilities.SigmaVectorEvaluator.__init__", "numpy.identity", "scine_utilities.IndirectSigmaVectorEvaluator", "numpy.random.rand", "numpy.dot", "scine_utilities.IndirectPreconditionerEvaluator", "numpy.all" ]
[((687, 711), 'numpy.random.rand', 'np.random.rand', (['(100)', '(100)'], {}), '(100, 100)\n', (701, 711), True, 'import numpy as np\n'), ((927, 969), 'scine_utilities.IndirectSigmaVectorEvaluator', 'scine.IndirectSigmaVectorEvaluator', (['matrix'], {}), '(matrix)\n', (961, 969), True, 'import scine_utilities as scine\n'), ((1115, 1150), 'scine_utilities.NonOrthogonalDavidson', 'scine.NonOrthogonalDavidson', (['(5)', '(100)'], {}), '(5, 100)\n', (1142, 1150), True, 'import scine_utilities as scine\n'), ((1309, 1348), 'scine_utilities.IndirectSigmaVectorEvaluator', 'scine.IndirectSigmaVectorEvaluator', (['ref'], {}), '(ref)\n', (1343, 1348), True, 'import scine_utilities as scine\n'), ((1410, 1449), 'numpy.all', 'np.all', (['(2.0 * ref[:, :] == result[:, :])'], {}), '(2.0 * ref[:, :] == result[:, :])\n', (1416, 1449), True, 'import numpy as np\n'), ((1718, 1730), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (1725, 1730), True, 'import numpy as np\n'), ((1773, 1816), 'scine_utilities.IndirectPreconditionerEvaluator', 'scine.IndirectPreconditionerEvaluator', (['diag'], {}), '(diag)\n', (1810, 1816), True, 'import scine_utilities as scine\n'), ((1890, 1947), 'numpy.all', 'np.all', (['(result[:] == -1.0 / (diag - arbitrary_eigenvalue))'], {}), '(result[:] == -1.0 / (diag - arbitrary_eigenvalue))\n', (1896, 1947), True, 'import numpy as np\n'), ((2240, 2258), 'numpy.linalg.eig', 'np.linalg.eig', (['ref'], {}), '(ref)\n', (2253, 2258), True, 'import numpy as np\n'), ((2464, 2503), 'scine_utilities.IndirectSigmaVectorEvaluator', 'scine.IndirectSigmaVectorEvaluator', (['ref'], {}), '(ref)\n', (2498, 2503), True, 'import scine_utilities as scine\n'), ((2643, 2675), 'scine_utilities.OrthogonalDavidson', 'scine.OrthogonalDavidson', (['(5)', '(100)'], {}), '(5, 100)\n', (2667, 2675), True, 'import scine_utilities as scine\n'), ((2835, 2853), 'numpy.linalg.eig', 'np.linalg.eig', (['ref'], {}), '(ref)\n', (2848, 2853), True, 'import numpy as np\n'), ((3558, 3576), 'numpy.linalg.eig', 'np.linalg.eig', (['ref'], {}), '(ref)\n', (3571, 3576), True, 'import numpy as np\n'), ((355, 396), 'scine_utilities.SigmaVectorEvaluator.__init__', 'scine.SigmaVectorEvaluator.__init__', (['self'], {}), '(self)\n', (390, 396), True, 'import scine_utilities as scine\n'), ((480, 514), 'numpy.dot', 'np.dot', (['self.matrix', 'guess_vectors'], {}), '(self.matrix, guess_vectors)\n', (486, 514), True, 'import numpy as np\n'), ((771, 799), 'numpy.diag_indices_from', 'np.diag_indices_from', (['matrix'], {}), '(matrix)\n', (791, 799), True, 'import numpy as np\n'), ((1673, 1698), 'numpy.diag_indices_from', 'np.diag_indices_from', (['ref'], {}), '(ref)\n', (1693, 1698), True, 'import numpy as np\n'), ((2176, 2199), 'scine_utilities.core.Log.silent', 'scine.core.Log.silent', ([], {}), '()\n', (2197, 2199), True, 'import scine_utilities as scine\n'), ((2771, 2794), 'scine_utilities.core.Log.silent', 'scine.core.Log.silent', ([], {}), '()\n', (2792, 2794), True, 'import scine_utilities as scine\n'), ((3494, 3517), 'scine_utilities.core.Log.silent', 'scine.core.Log.silent', ([], {}), '()\n', (3515, 3517), True, 'import scine_utilities as scine\n'), ((738, 758), 'numpy.transpose', 'np.transpose', (['matrix'], {}), '(matrix)\n', (750, 758), True, 'import numpy as np\n'), ((1026, 1054), 'numpy.diag_indices_from', 'np.diag_indices_from', (['matrix'], {}), '(matrix)\n', (1046, 1054), True, 'import numpy as np\n'), ((1381, 1397), 'numpy.identity', 'np.identity', (['(100)'], {}), '(100)\n', (1392, 1397), True, 'import numpy as np\n'), ((2557, 2582), 'numpy.diag_indices_from', 'np.diag_indices_from', (['ref'], {}), '(ref)\n', (2577, 2582), True, 'import numpy as np\n')]
import os from keras import layers from keras.layers import Input, merge from keras.layers.convolutional import (AveragePooling2D, Conv2D, MaxPooling2D, ZeroPadding2D) from keras.layers.core import Activation, Dense, Flatten from keras.layers.normalization import BatchNormalization from keras.models import Model,Sequential from keras.utils import plot_model os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' def identity_block(x, nb_filter, kernel_size=3): k1, k2, k3 = nb_filter shortcut = x out = Conv2D(k1, kernel_size=(1,1), strides=(1,1),padding="valid",activation="relu")(x) out = BatchNormalization(axis=3)(out) out = Conv2D(k2, kernel_size=(3,3), strides=(1,1), padding='same',activation="relu")(out) out = BatchNormalization(axis=3)(out) out = Conv2D(k3, kernel_size=(1,1), strides=(1,1),padding="valid")(out) out = BatchNormalization(axis=3)(out) # out = merge([out, shortcut], mode='sum') out= layers.add([out,shortcut]) out = Activation('relu')(out) return out def conv_block(x, nb_filter, kernel_size=3): k1, k2, k3 = nb_filter shortcut = x out = Conv2D(k1, kernel_size=(1,1), strides=(2,2), padding="valid",activation="relu")(x) out = BatchNormalization(axis=3)(out) out = out = Conv2D(k2, kernel_size=(kernel_size,kernel_size), strides=(1,1), padding="same",activation="relu")(out) out = BatchNormalization()(out) out = Conv2D(k3, kernel_size=(1,1), strides=(1,1), padding="valid")(out) out = BatchNormalization(axis=3)(out) shortcut = Conv2D(k3, kernel_size=(1,1), strides=(2,2), padding="valid")(shortcut) shortcut = BatchNormalization(axis=3)(shortcut) # out = merge([out, shortcut], mode='sum') out = layers.add([out, shortcut]) out = Activation('relu')(out) return out def buildNet(): inp = Input(shape=(224, 224, 3)) out = ZeroPadding2D((3, 3))(inp) out = Conv2D(64, kernel_size=(7, 7), strides=(2, 2),activation="relu")(out) out = BatchNormalization()(out) out = MaxPooling2D(pool_size=(3, 3), strides=(2, 2), padding="same")(out) out = conv_block(out, [64, 64, 256]) out = identity_block(out, [64, 64, 256]) out = identity_block(out, [64, 64, 256]) out = conv_block(out, [128, 128, 512]) out = identity_block(out, [128, 128, 512]) out = identity_block(out, [128, 128, 512]) out = identity_block(out, [128, 128, 512]) out = conv_block(out, [256, 256, 1024]) out = identity_block(out, [256, 256, 1024]) out = identity_block(out, [256, 256, 1024]) out = identity_block(out, [256, 256, 1024]) out = identity_block(out, [256, 256, 1024]) out = identity_block(out, [256, 256, 1024]) out = conv_block(out, [512, 512, 2048]) out = identity_block(out, [512, 512, 2048]) out = identity_block(out, [512, 512, 2048]) out = AveragePooling2D((4, 4))(out) out = Flatten()(out) # 展平 out = Dense(1000, activation='softmax')(out) model = Model(inputs=inp, outputs=out) return model if __name__ == '__main__': # resNet18 = ResNet(block_num=[2,2,2,2]) # resNet34 = ResNet(block_num=[3,4,6,3]) # resNet50 = ResNet(block_num=[3,4,6,3]) # resNet101 = ResNet(block_num=[3,4,23,3]) # resNet152 = ResNet(block_num=[3,8,36,3]) net = buildNet() net.compile(optimizer='adam',loss='categorical_crossentropy', metrics=['accuracy']) plot_model(net, to_file='./models/resnet.png') net.summary()
[ "keras.layers.core.Dense", "keras.layers.convolutional.AveragePooling2D", "keras.layers.add", "keras.layers.core.Activation", "keras.layers.convolutional.MaxPooling2D", "keras.models.Model", "keras.utils.plot_model", "keras.layers.convolutional.Conv2D", "keras.layers.core.Flatten", "keras.layers.Input", "keras.layers.convolutional.ZeroPadding2D", "keras.layers.normalization.BatchNormalization" ]
[((987, 1014), 'keras.layers.add', 'layers.add', (['[out, shortcut]'], {}), '([out, shortcut])\n', (997, 1014), False, 'from keras import layers\n'), ((1772, 1799), 'keras.layers.add', 'layers.add', (['[out, shortcut]'], {}), '([out, shortcut])\n', (1782, 1799), False, 'from keras import layers\n'), ((1877, 1903), 'keras.layers.Input', 'Input', ([], {'shape': '(224, 224, 3)'}), '(shape=(224, 224, 3))\n', (1882, 1903), False, 'from keras.layers import Input, merge\n'), ((3013, 3043), 'keras.models.Model', 'Model', ([], {'inputs': 'inp', 'outputs': 'out'}), '(inputs=inp, outputs=out)\n', (3018, 3043), False, 'from keras.models import Model, Sequential\n'), ((3437, 3483), 'keras.utils.plot_model', 'plot_model', (['net'], {'to_file': '"""./models/resnet.png"""'}), "(net, to_file='./models/resnet.png')\n", (3447, 3483), False, 'from keras.utils import plot_model\n'), ((550, 637), 'keras.layers.convolutional.Conv2D', 'Conv2D', (['k1'], {'kernel_size': '(1, 1)', 'strides': '(1, 1)', 'padding': '"""valid"""', 'activation': '"""relu"""'}), "(k1, kernel_size=(1, 1), strides=(1, 1), padding='valid', activation=\n 'relu')\n", (556, 637), False, 'from keras.layers.convolutional import AveragePooling2D, Conv2D, MaxPooling2D, ZeroPadding2D\n'), ((642, 668), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'axis': '(3)'}), '(axis=3)\n', (660, 668), False, 'from keras.layers.normalization import BatchNormalization\n'), ((685, 771), 'keras.layers.convolutional.Conv2D', 'Conv2D', (['k2'], {'kernel_size': '(3, 3)', 'strides': '(1, 1)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(k2, kernel_size=(3, 3), strides=(1, 1), padding='same', activation=\n 'relu')\n", (691, 771), False, 'from keras.layers.convolutional import AveragePooling2D, Conv2D, MaxPooling2D, ZeroPadding2D\n'), ((779, 805), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'axis': '(3)'}), '(axis=3)\n', (797, 805), False, 'from keras.layers.normalization import BatchNormalization\n'), ((822, 885), 'keras.layers.convolutional.Conv2D', 'Conv2D', (['k3'], {'kernel_size': '(1, 1)', 'strides': '(1, 1)', 'padding': '"""valid"""'}), "(k3, kernel_size=(1, 1), strides=(1, 1), padding='valid')\n", (828, 885), False, 'from keras.layers.convolutional import AveragePooling2D, Conv2D, MaxPooling2D, ZeroPadding2D\n'), ((898, 924), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'axis': '(3)'}), '(axis=3)\n', (916, 924), False, 'from keras.layers.normalization import BatchNormalization\n'), ((1026, 1044), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (1036, 1044), False, 'from keras.layers.core import Activation, Dense, Flatten\n'), ((1172, 1259), 'keras.layers.convolutional.Conv2D', 'Conv2D', (['k1'], {'kernel_size': '(1, 1)', 'strides': '(2, 2)', 'padding': '"""valid"""', 'activation': '"""relu"""'}), "(k1, kernel_size=(1, 1), strides=(2, 2), padding='valid', activation=\n 'relu')\n", (1178, 1259), False, 'from keras.layers.convolutional import AveragePooling2D, Conv2D, MaxPooling2D, ZeroPadding2D\n'), ((1265, 1291), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'axis': '(3)'}), '(axis=3)\n', (1283, 1291), False, 'from keras.layers.normalization import BatchNormalization\n'), ((1314, 1420), 'keras.layers.convolutional.Conv2D', 'Conv2D', (['k2'], {'kernel_size': '(kernel_size, kernel_size)', 'strides': '(1, 1)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(k2, kernel_size=(kernel_size, kernel_size), strides=(1, 1), padding=\n 'same', activation='relu')\n", (1320, 1420), False, 'from keras.layers.convolutional import AveragePooling2D, Conv2D, MaxPooling2D, ZeroPadding2D\n'), ((1428, 1448), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (1446, 1448), False, 'from keras.layers.normalization import BatchNormalization\n'), ((1465, 1528), 'keras.layers.convolutional.Conv2D', 'Conv2D', (['k3'], {'kernel_size': '(1, 1)', 'strides': '(1, 1)', 'padding': '"""valid"""'}), "(k3, kernel_size=(1, 1), strides=(1, 1), padding='valid')\n", (1471, 1528), False, 'from keras.layers.convolutional import AveragePooling2D, Conv2D, MaxPooling2D, ZeroPadding2D\n'), ((1542, 1568), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'axis': '(3)'}), '(axis=3)\n', (1560, 1568), False, 'from keras.layers.normalization import BatchNormalization\n'), ((1590, 1653), 'keras.layers.convolutional.Conv2D', 'Conv2D', (['k3'], {'kernel_size': '(1, 1)', 'strides': '(2, 2)', 'padding': '"""valid"""'}), "(k3, kernel_size=(1, 1), strides=(2, 2), padding='valid')\n", (1596, 1653), False, 'from keras.layers.convolutional import AveragePooling2D, Conv2D, MaxPooling2D, ZeroPadding2D\n'), ((1677, 1703), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'axis': '(3)'}), '(axis=3)\n', (1695, 1703), False, 'from keras.layers.normalization import BatchNormalization\n'), ((1810, 1828), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (1820, 1828), False, 'from keras.layers.core import Activation, Dense, Flatten\n'), ((1915, 1936), 'keras.layers.convolutional.ZeroPadding2D', 'ZeroPadding2D', (['(3, 3)'], {}), '((3, 3))\n', (1928, 1936), False, 'from keras.layers.convolutional import AveragePooling2D, Conv2D, MaxPooling2D, ZeroPadding2D\n'), ((1952, 2017), 'keras.layers.convolutional.Conv2D', 'Conv2D', (['(64)'], {'kernel_size': '(7, 7)', 'strides': '(2, 2)', 'activation': '"""relu"""'}), "(64, kernel_size=(7, 7), strides=(2, 2), activation='relu')\n", (1958, 2017), False, 'from keras.layers.convolutional import AveragePooling2D, Conv2D, MaxPooling2D, ZeroPadding2D\n'), ((2032, 2052), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (2050, 2052), False, 'from keras.layers.normalization import BatchNormalization\n'), ((2068, 2130), 'keras.layers.convolutional.MaxPooling2D', 'MaxPooling2D', ([], {'pool_size': '(3, 3)', 'strides': '(2, 2)', 'padding': '"""same"""'}), "(pool_size=(3, 3), strides=(2, 2), padding='same')\n", (2080, 2130), False, 'from keras.layers.convolutional import AveragePooling2D, Conv2D, MaxPooling2D, ZeroPadding2D\n'), ((2890, 2914), 'keras.layers.convolutional.AveragePooling2D', 'AveragePooling2D', (['(4, 4)'], {}), '((4, 4))\n', (2906, 2914), False, 'from keras.layers.convolutional import AveragePooling2D, Conv2D, MaxPooling2D, ZeroPadding2D\n'), ((2930, 2939), 'keras.layers.core.Flatten', 'Flatten', ([], {}), '()\n', (2937, 2939), False, 'from keras.layers.core import Activation, Dense, Flatten\n'), ((2961, 2994), 'keras.layers.core.Dense', 'Dense', (['(1000)'], {'activation': '"""softmax"""'}), "(1000, activation='softmax')\n", (2966, 2994), False, 'from keras.layers.core import Activation, Dense, Flatten\n')]
""" <NAME> 2014-2016 Python Progress Indicator Utility Author: <NAME> <<EMAIL>> License: BSD 3 clause Contributors: https://github.com/rasbt/pyprind/graphs/contributors Code Repository: https://github.com/rasbt/pyprind PyPI: https://pypi.python.org/pypi/PyPrind """ import sys import time import pyprind n = 100 sleeptime = 0.02 def test_basic_percent(): perc = pyprind.ProgPercent(n) for i in range(n): time.sleep(sleeptime) perc.update() def test_stdout(): perc = pyprind.ProgPercent(n, stream=sys.stdout) for i in range(n): time.sleep(sleeptime) perc.update() def test_generator(): for i in pyprind.prog_percent(range(n), stream=sys.stdout): time.sleep(sleeptime) def test_monitoring(): perc = pyprind.ProgPercent(n, monitor=True) for i in range(n): time.sleep(sleeptime) perc.update() print(perc) def test_item_tracking(): items = ['file_%s.csv' % i for i in range(0, n)] perc = pyprind.ProgPercent(len(items)) for i in items: time.sleep(sleeptime) perc.update(item_id=i) def test_force_flush(): perc = pyprind.ProgPercent(n) for i in range(n): time.sleep(sleeptime) perc.update(force_flush=True) def test_update_interval(): perc = pyprind.ProgPercent(n, update_interval=4) for i in range(n): time.sleep(sleeptime) perc.update() if __name__ == "__main__": print('\n%s' % (80 * '=')) print('%s\n' % (80 * '=')) print('Testing Basic Percentage Indicator\n') test_basic_percent() print('\n%s' % (80 * '=')) print('%s\n' % (80 * '=')) print('Testing stdout Stream\n') test_stdout() print('\n%s' % (80 * '=')) print('%s\n' % (80 * '=')) print('Testing Percentage Indicator Generator\n') test_generator() print('\n%s' % (80 * '=')) print('%s\n' % (80 * '=')) print('Testing monitor function\n') test_monitoring() print('\n%s' % (80 * '=')) print('%s\n' % (80 * '=')) print('Testing Item Tracking\n') test_item_tracking() print('\n%s' % (80 * '=')) print('%s\n' % (80 * '=')) print('Testing Force Flush\n') test_force_flush() print('\n%s' % (80 * '=')) print('%s\n' % (80 * '=')) print('Testing Update Interval\n') test_update_interval()
[ "pyprind.ProgPercent", "time.sleep" ]
[((372, 394), 'pyprind.ProgPercent', 'pyprind.ProgPercent', (['n'], {}), '(n)\n', (391, 394), False, 'import pyprind\n'), ((502, 543), 'pyprind.ProgPercent', 'pyprind.ProgPercent', (['n'], {'stream': 'sys.stdout'}), '(n, stream=sys.stdout)\n', (521, 543), False, 'import pyprind\n'), ((773, 809), 'pyprind.ProgPercent', 'pyprind.ProgPercent', (['n'], {'monitor': '(True)'}), '(n, monitor=True)\n', (792, 809), False, 'import pyprind\n'), ((1143, 1165), 'pyprind.ProgPercent', 'pyprind.ProgPercent', (['n'], {}), '(n)\n', (1162, 1165), False, 'import pyprind\n'), ((1298, 1339), 'pyprind.ProgPercent', 'pyprind.ProgPercent', (['n'], {'update_interval': '(4)'}), '(n, update_interval=4)\n', (1317, 1339), False, 'import pyprind\n'), ((426, 447), 'time.sleep', 'time.sleep', (['sleeptime'], {}), '(sleeptime)\n', (436, 447), False, 'import time\n'), ((575, 596), 'time.sleep', 'time.sleep', (['sleeptime'], {}), '(sleeptime)\n', (585, 596), False, 'import time\n'), ((715, 736), 'time.sleep', 'time.sleep', (['sleeptime'], {}), '(sleeptime)\n', (725, 736), False, 'import time\n'), ((841, 862), 'time.sleep', 'time.sleep', (['sleeptime'], {}), '(sleeptime)\n', (851, 862), False, 'import time\n'), ((1053, 1074), 'time.sleep', 'time.sleep', (['sleeptime'], {}), '(sleeptime)\n', (1063, 1074), False, 'import time\n'), ((1197, 1218), 'time.sleep', 'time.sleep', (['sleeptime'], {}), '(sleeptime)\n', (1207, 1218), False, 'import time\n'), ((1371, 1392), 'time.sleep', 'time.sleep', (['sleeptime'], {}), '(sleeptime)\n', (1381, 1392), False, 'import time\n')]
# coding: utf-8 # # 使用预训练的VGG模型Fine-tune CNN # In[1]: # Import packs import numpy as np import os import scipy.io from scipy.misc import imread, imresize import matplotlib.pyplot as plt import skimage.io import skimage.transform import tensorflow as tf get_ipython().magic(u'matplotlib inline') cwd = os.getcwd() print ("Package loaded") print ("Current folder is %s" % (cwd) ) # In[2]: # 下载预先训练好的vgg-19模型,为Matlab的.mat格式,之后会用scipy读取 # (注意此版本模型与此处http://www.vlfeat.org/matconvnet/pretrained/最新版本不同) import os.path if not os.path.isfile('./data/imagenet-vgg-verydeep-19.mat'): get_ipython().system(u'wget -O data/imagenet-vgg-verydeep-19.mat http://www.vlfeat.org/matconvnet/models/beta16/imagenet-vgg-verydeep-19.mat') # # 载入图像,调节尺寸,生成数据集 # In[3]: # Configure the locations of the images and reshaping sizes # ------------------------------------------------------------------- # paths = {"images/cats", "images/dogs"} imgsize = [64, 64] # The reshape size use_gray = 0 # Grayscale data_name = "data4vgg" # Save name valid_exts = [".jpg",".gif",".png",".tga", ".jpeg"] # ------------------------------------------------------------------- # imgcnt = 0 nclass = len(paths) for relpath in paths: fullpath = cwd + "/" + relpath flist = os.listdir(fullpath) for f in flist: if os.path.splitext(f)[1].lower() not in valid_exts: continue fullpath = os.path.join(fullpath, f) imgcnt = imgcnt + 1 # Grayscale def rgb2gray(rgb): if len(rgb.shape) is 3: return np.dot(rgb[...,:3], [0.299, 0.587, 0.114]) else: print ("Current Image is GRAY!") return rgb if use_gray: totalimg = np.ndarray((imgcnt, imgsize[0]*imgsize[1])) else: totalimg = np.ndarray((imgcnt, imgsize[0]*imgsize[1]*3)) totallabel = np.ndarray((imgcnt, nclass)) imgcnt = 0 for i, relpath in zip(range(nclass), paths): path = cwd + "/" + relpath flist = os.listdir(path) for f in flist: if os.path.splitext(f)[1].lower() not in valid_exts: continue fullpath = os.path.join(path, f) currimg = imread(fullpath) # Convert to grayscale if use_gray: grayimg = rgb2gray(currimg) else: grayimg = currimg # Reshape graysmall = imresize(grayimg, [imgsize[0], imgsize[1]])/255. grayvec = np.reshape(graysmall, (1, -1)) # Save totalimg[imgcnt, :] = grayvec totallabel[imgcnt, :] = np.eye(nclass, nclass)[i] imgcnt = imgcnt + 1 # Divide total data into training and test set randidx = np.random.randint(imgcnt, size=imgcnt) trainidx = randidx[0:int(4*imgcnt/5)] testidx = randidx[int(4*imgcnt/5):imgcnt] trainimg = totalimg[trainidx, :] trainlabel = totallabel[trainidx, :] testimg = totalimg[testidx, :] testlabel = totallabel[testidx, :] ntrain = trainimg.shape[0] nclass = trainlabel.shape[1] dim = trainimg.shape[1] ntest = testimg.shape[0] print ("Number of total images is %d (train: %d, test: %d)" % (imgcnt, ntrain, ntest)) print ("Shape of an image is (%d, %d, %d)" % (imgsize[0], imgsize[1], 3)) # # 定义VGG网络结构 # In[4]: def net(data_path, input_image): layers = ( 'conv1_1', 'relu1_1', 'conv1_2', 'relu1_2', 'pool1', 'conv2_1', 'relu2_1', 'conv2_2', 'relu2_2', 'pool2', 'conv3_1', 'relu3_1', 'conv3_2', 'relu3_2', 'conv3_3', 'relu3_3', 'conv3_4', 'relu3_4', 'pool3', 'conv4_1', 'relu4_1', 'conv4_2', 'relu4_2', 'conv4_3', 'relu4_3', 'conv4_4', 'relu4_4', 'pool4', 'conv5_1', 'relu5_1', 'conv5_2', 'relu5_2', 'conv5_3', 'relu5_3', 'conv5_4', 'relu5_4' ) data = scipy.io.loadmat(data_path) mean = data['normalization'][0][0][0] mean_pixel = np.mean(mean, axis=(0, 1)) weights = data['layers'][0] net = {} current = input_image for i, name in enumerate(layers): kind = name[:4] if kind == 'conv': kernels, bias = weights[i][0][0][0][0] # matconvnet: weights are [width, height, in_channels, out_channels] # tensorflow: weights are [height, width, in_channels, out_channels] kernels = np.transpose(kernels, (1, 0, 2, 3)) bias = bias.reshape(-1) current = _conv_layer(current, kernels, bias) elif kind == 'relu': current = tf.nn.relu(current) elif kind == 'pool': current = _pool_layer(current) net[name] = current assert len(net) == len(layers) return net, mean_pixel def _conv_layer(input, weights, bias): conv = tf.nn.conv2d(input, tf.constant(weights), strides=(1, 1, 1, 1), padding='SAME') return tf.nn.bias_add(conv, bias) def _pool_layer(input): return tf.nn.max_pool(input, ksize=(1, 2, 2, 1), strides=(1, 2, 2, 1), padding='SAME') def preprocess(image, mean_pixel): return image - mean_pixel def unprocess(image, mean_pixel): return image + mean_pixel print ("VGG net ready") # # 使用VGG计算卷积特征图 # In[5]: # Preprocess trainimg_tensor = np.ndarray((ntrain, imgsize[0], imgsize[1], 3)) testimg_tensor = np.ndarray((ntest, imgsize[0], imgsize[1], 3)) for i in range(ntrain): currimg = trainimg[i, :] currimg = np.reshape(currimg, [imgsize[0], imgsize[1], 3]) trainimg_tensor[i, :, :, :] = currimg print ("Shape of trainimg_tensor is %s" % (trainimg_tensor.shape,)) for i in range(ntest): currimg = testimg[i, :] currimg = np.reshape(currimg, [imgsize[0], imgsize[1], 3]) testimg_tensor[i, :, :, :] = currimg print ("Shape of trainimg_tensor is %s" % (testimg_tensor.shape,)) # Get conv features VGG_PATH = cwd + "/data/imagenet-vgg-verydeep-19.mat" with tf.Graph().as_default(), tf.Session() as sess: with tf.device("/cpu:0"): img_placeholder = tf.placeholder(tf.float32 , shape=(None, imgsize[0], imgsize[1], 3)) nets, mean_pixel = net(VGG_PATH, img_placeholder) train_features = nets['relu5_4'].eval(feed_dict={img_placeholder: trainimg_tensor}) test_features = nets['relu5_4'].eval(feed_dict={img_placeholder: testimg_tensor}) print("Convolutional map extraction done") # # 卷积特征图的形状 # In[6]: print ("Shape of 'train_features' is %s" % (train_features.shape,)) print ("Shape of 'test_features' is %s" % (test_features.shape,)) # # 向量化 # In[7]: # Vectorize train_vectorized = np.ndarray((ntrain, 4*4*512)) test_vectorized = np.ndarray((ntest, 4*4*512)) for i in range(ntrain): curr_feat = train_features[i, :, :, :] curr_feat_vec = np.reshape(curr_feat, (1, -1)) train_vectorized[i, :] = curr_feat_vec for i in range(ntest): curr_feat = test_features[i, :, :, :] curr_feat_vec = np.reshape(curr_feat, (1, -1)) test_vectorized[i, :] = curr_feat_vec print ("Shape of 'train_vectorized' is %s" % (train_features.shape,)) print ("Shape of 'test_vectorized' is %s" % (test_features.shape,)) # # 定义finetuning的结构 # In[8]: # Parameters learning_rate = 0.0001 training_epochs = 100 batch_size = 100 display_step = 10 # tf Graph input x = tf.placeholder(tf.float32, [None, 4*4*512]) y = tf.placeholder(tf.float32, [None, nclass]) keepratio = tf.placeholder(tf.float32) # Network with tf.device("/cpu:0"): n_input = dim n_output = nclass weights = { 'wd1': tf.Variable(tf.random_normal([4*4*512, 1024], stddev=0.1)), 'wd2': tf.Variable(tf.random_normal([1024, n_output], stddev=0.1)) } biases = { 'bd1': tf.Variable(tf.random_normal([1024], stddev=0.1)), 'bd2': tf.Variable(tf.random_normal([n_output], stddev=0.1)) } def conv_basic(_input, _w, _b, _keepratio): # Input _input_r = _input # Vectorize _dense1 = tf.reshape(_input_r, [-1, _w['wd1'].get_shape().as_list()[0]]) # Fc1 _fc1 = tf.nn.relu(tf.add(tf.matmul(_dense1, _w['wd1']), _b['bd1'])) _fc_dr1 = tf.nn.dropout(_fc1, _keepratio) # Fc2 _out = tf.add(tf.matmul(_fc_dr1, _w['wd2']), _b['bd2']) # Return everything out = {'input_r': _input_r, 'dense1': _dense1, 'fc1': _fc1, 'fc_dr1': _fc_dr1, 'out': _out } return out # Functions! _pred = conv_basic(x, weights, biases, keepratio)['out'] cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=_pred, labels=y)) optm = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost) _corr = tf.equal(tf.argmax(_pred,1), tf.argmax(y,1)) accr = tf.reduce_mean(tf.cast(_corr, tf.float32)) init = tf.initialize_all_variables() print ("Network Ready to Go!") # # 优化 # In[9]: # Launch the graph sess = tf.Session() sess.run(init) # Training cycle for epoch in range(training_epochs): avg_cost = 0. num_batch = int(ntrain/batch_size)+1 # Loop over all batches for i in range(num_batch): randidx = np.random.randint(ntrain, size=batch_size) batch_xs = train_vectorized[randidx, :] batch_ys = trainlabel[randidx, :] # Fit training using batch data sess.run(optm, feed_dict={x: batch_xs, y: batch_ys, keepratio:0.7}) # Compute average loss avg_cost += sess.run(cost, feed_dict={x: batch_xs, y: batch_ys, keepratio:1.})/num_batch # Display logs per epoch step if epoch % display_step == 0: print ("Epoch: %03d/%03d cost: %.9f" % (epoch, training_epochs, avg_cost)) train_acc = sess.run(accr, feed_dict={x: batch_xs, y: batch_ys, keepratio:1.}) print (" Training accuracy: %.3f" % (train_acc)) test_acc = sess.run(accr, feed_dict={x: test_vectorized, y: testlabel, keepratio:1.}) print (" Test accuracy: %.3f" % (test_acc)) print ("Optimization Finished!")
[ "tensorflow.matmul", "os.path.isfile", "numpy.random.randint", "numpy.mean", "os.path.join", "numpy.ndarray", "tensorflow.nn.relu", "tensorflow.nn.softmax_cross_entropy_with_logits", "numpy.transpose", "tensorflow.placeholder", "tensorflow.cast", "numpy.reshape", "tensorflow.initialize_all_variables", "tensorflow.nn.bias_add", "tensorflow.Session", "tensorflow.constant", "tensorflow.nn.max_pool", "tensorflow.random_normal", "tensorflow.Graph", "scipy.misc.imresize", "numpy.dot", "os.listdir", "scipy.misc.imread", "os.getcwd", "tensorflow.argmax", "tensorflow.device", "os.path.splitext", "numpy.eye", "tensorflow.train.AdamOptimizer", "tensorflow.nn.dropout" ]
[((305, 316), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (314, 316), False, 'import os\n'), ((1812, 1840), 'numpy.ndarray', 'np.ndarray', (['(imgcnt, nclass)'], {}), '((imgcnt, nclass))\n', (1822, 1840), True, 'import numpy as np\n'), ((2630, 2668), 'numpy.random.randint', 'np.random.randint', (['imgcnt'], {'size': 'imgcnt'}), '(imgcnt, size=imgcnt)\n', (2647, 2668), True, 'import numpy as np\n'), ((5128, 5175), 'numpy.ndarray', 'np.ndarray', (['(ntrain, imgsize[0], imgsize[1], 3)'], {}), '((ntrain, imgsize[0], imgsize[1], 3))\n', (5138, 5175), True, 'import numpy as np\n'), ((5193, 5239), 'numpy.ndarray', 'np.ndarray', (['(ntest, imgsize[0], imgsize[1], 3)'], {}), '((ntest, imgsize[0], imgsize[1], 3))\n', (5203, 5239), True, 'import numpy as np\n'), ((6489, 6522), 'numpy.ndarray', 'np.ndarray', (['(ntrain, 4 * 4 * 512)'], {}), '((ntrain, 4 * 4 * 512))\n', (6499, 6522), True, 'import numpy as np\n'), ((6538, 6570), 'numpy.ndarray', 'np.ndarray', (['(ntest, 4 * 4 * 512)'], {}), '((ntest, 4 * 4 * 512))\n', (6548, 6570), True, 'import numpy as np\n'), ((7186, 7233), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, 4 * 4 * 512]'], {}), '(tf.float32, [None, 4 * 4 * 512])\n', (7200, 7233), True, 'import tensorflow as tf\n'), ((7234, 7276), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, nclass]'], {}), '(tf.float32, [None, nclass])\n', (7248, 7276), True, 'import tensorflow as tf\n'), ((7289, 7315), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {}), '(tf.float32)\n', (7303, 7315), True, 'import tensorflow as tf\n'), ((8773, 8785), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (8783, 8785), True, 'import tensorflow as tf\n'), ((527, 580), 'os.path.isfile', 'os.path.isfile', (['"""./data/imagenet-vgg-verydeep-19.mat"""'], {}), "('./data/imagenet-vgg-verydeep-19.mat')\n", (541, 580), False, 'import os\n'), ((1273, 1293), 'os.listdir', 'os.listdir', (['fullpath'], {}), '(fullpath)\n', (1283, 1293), False, 'import os\n'), ((1686, 1731), 'numpy.ndarray', 'np.ndarray', (['(imgcnt, imgsize[0] * imgsize[1])'], {}), '((imgcnt, imgsize[0] * imgsize[1]))\n', (1696, 1731), True, 'import numpy as np\n'), ((1753, 1802), 'numpy.ndarray', 'np.ndarray', (['(imgcnt, imgsize[0] * imgsize[1] * 3)'], {}), '((imgcnt, imgsize[0] * imgsize[1] * 3))\n', (1763, 1802), True, 'import numpy as np\n'), ((1944, 1960), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (1954, 1960), False, 'import os\n'), ((3822, 3848), 'numpy.mean', 'np.mean', (['mean'], {'axis': '(0, 1)'}), '(mean, axis=(0, 1))\n', (3829, 3848), True, 'import numpy as np\n'), ((4760, 4786), 'tensorflow.nn.bias_add', 'tf.nn.bias_add', (['conv', 'bias'], {}), '(conv, bias)\n', (4774, 4786), True, 'import tensorflow as tf\n'), ((4822, 4901), 'tensorflow.nn.max_pool', 'tf.nn.max_pool', (['input'], {'ksize': '(1, 2, 2, 1)', 'strides': '(1, 2, 2, 1)', 'padding': '"""SAME"""'}), "(input, ksize=(1, 2, 2, 1), strides=(1, 2, 2, 1), padding='SAME')\n", (4836, 4901), True, 'import tensorflow as tf\n'), ((5307, 5355), 'numpy.reshape', 'np.reshape', (['currimg', '[imgsize[0], imgsize[1], 3]'], {}), '(currimg, [imgsize[0], imgsize[1], 3])\n', (5317, 5355), True, 'import numpy as np\n'), ((5537, 5585), 'numpy.reshape', 'np.reshape', (['currimg', '[imgsize[0], imgsize[1], 3]'], {}), '(currimg, [imgsize[0], imgsize[1], 3])\n', (5547, 5585), True, 'import numpy as np\n'), ((5804, 5816), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (5814, 5816), True, 'import tensorflow as tf\n'), ((6654, 6684), 'numpy.reshape', 'np.reshape', (['curr_feat', '(1, -1)'], {}), '(curr_feat, (1, -1))\n', (6664, 6684), True, 'import numpy as np\n'), ((6813, 6843), 'numpy.reshape', 'np.reshape', (['curr_feat', '(1, -1)'], {}), '(curr_feat, (1, -1))\n', (6823, 6843), True, 'import numpy as np\n'), ((7331, 7350), 'tensorflow.device', 'tf.device', (['"""/cpu:0"""'], {}), "('/cpu:0')\n", (7340, 7350), True, 'import tensorflow as tf\n'), ((8665, 8694), 'tensorflow.initialize_all_variables', 'tf.initialize_all_variables', ([], {}), '()\n', (8692, 8694), True, 'import tensorflow as tf\n'), ((1415, 1440), 'os.path.join', 'os.path.join', (['fullpath', 'f'], {}), '(fullpath, f)\n', (1427, 1440), False, 'import os\n'), ((1543, 1586), 'numpy.dot', 'np.dot', (['rgb[..., :3]', '[0.299, 0.587, 0.114]'], {}), '(rgb[..., :3], [0.299, 0.587, 0.114])\n', (1549, 1586), True, 'import numpy as np\n'), ((2082, 2103), 'os.path.join', 'os.path.join', (['path', 'f'], {}), '(path, f)\n', (2094, 2103), False, 'import os\n'), ((2123, 2139), 'scipy.misc.imread', 'imread', (['fullpath'], {}), '(fullpath)\n', (2129, 2139), False, 'from scipy.misc import imread, imresize\n'), ((2387, 2417), 'numpy.reshape', 'np.reshape', (['graysmall', '(1, -1)'], {}), '(graysmall, (1, -1))\n', (2397, 2417), True, 'import numpy as np\n'), ((4677, 4697), 'tensorflow.constant', 'tf.constant', (['weights'], {}), '(weights)\n', (4688, 4697), True, 'import tensorflow as tf\n'), ((5835, 5854), 'tensorflow.device', 'tf.device', (['"""/cpu:0"""'], {}), "('/cpu:0')\n", (5844, 5854), True, 'import tensorflow as tf\n'), ((5882, 5949), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '(None, imgsize[0], imgsize[1], 3)'}), '(tf.float32, shape=(None, imgsize[0], imgsize[1], 3))\n', (5896, 5949), True, 'import tensorflow as tf\n'), ((8023, 8054), 'tensorflow.nn.dropout', 'tf.nn.dropout', (['_fc1', '_keepratio'], {}), '(_fc1, _keepratio)\n', (8036, 8054), True, 'import tensorflow as tf\n'), ((8398, 8461), 'tensorflow.nn.softmax_cross_entropy_with_logits', 'tf.nn.softmax_cross_entropy_with_logits', ([], {'logits': '_pred', 'labels': 'y'}), '(logits=_pred, labels=y)\n', (8437, 8461), True, 'import tensorflow as tf\n'), ((8562, 8581), 'tensorflow.argmax', 'tf.argmax', (['_pred', '(1)'], {}), '(_pred, 1)\n', (8571, 8581), True, 'import tensorflow as tf\n'), ((8582, 8597), 'tensorflow.argmax', 'tf.argmax', (['y', '(1)'], {}), '(y, 1)\n', (8591, 8597), True, 'import tensorflow as tf\n'), ((8625, 8651), 'tensorflow.cast', 'tf.cast', (['_corr', 'tf.float32'], {}), '(_corr, tf.float32)\n', (8632, 8651), True, 'import tensorflow as tf\n'), ((8994, 9036), 'numpy.random.randint', 'np.random.randint', (['ntrain'], {'size': 'batch_size'}), '(ntrain, size=batch_size)\n', (9011, 9036), True, 'import numpy as np\n'), ((2318, 2361), 'scipy.misc.imresize', 'imresize', (['grayimg', '[imgsize[0], imgsize[1]]'], {}), '(grayimg, [imgsize[0], imgsize[1]])\n', (2326, 2361), False, 'from scipy.misc import imread, imresize\n'), ((2504, 2526), 'numpy.eye', 'np.eye', (['nclass', 'nclass'], {}), '(nclass, nclass)\n', (2510, 2526), True, 'import numpy as np\n'), ((4244, 4279), 'numpy.transpose', 'np.transpose', (['kernels', '(1, 0, 2, 3)'], {}), '(kernels, (1, 0, 2, 3))\n', (4256, 4279), True, 'import numpy as np\n'), ((5779, 5789), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (5787, 5789), True, 'import tensorflow as tf\n'), ((7437, 7486), 'tensorflow.random_normal', 'tf.random_normal', (['[4 * 4 * 512, 1024]'], {'stddev': '(0.1)'}), '([4 * 4 * 512, 1024], stddev=0.1)\n', (7453, 7486), True, 'import tensorflow as tf\n'), ((7512, 7558), 'tensorflow.random_normal', 'tf.random_normal', (['[1024, n_output]'], {'stddev': '(0.1)'}), '([1024, n_output], stddev=0.1)\n', (7528, 7558), True, 'import tensorflow as tf\n'), ((7610, 7646), 'tensorflow.random_normal', 'tf.random_normal', (['[1024]'], {'stddev': '(0.1)'}), '([1024], stddev=0.1)\n', (7626, 7646), True, 'import tensorflow as tf\n'), ((7676, 7716), 'tensorflow.random_normal', 'tf.random_normal', (['[n_output]'], {'stddev': '(0.1)'}), '([n_output], stddev=0.1)\n', (7692, 7716), True, 'import tensorflow as tf\n'), ((8091, 8120), 'tensorflow.matmul', 'tf.matmul', (['_fc_dr1', "_w['wd2']"], {}), "(_fc_dr1, _w['wd2'])\n", (8100, 8120), True, 'import tensorflow as tf\n'), ((8474, 8525), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': 'learning_rate'}), '(learning_rate=learning_rate)\n', (8496, 8525), True, 'import tensorflow as tf\n'), ((4425, 4444), 'tensorflow.nn.relu', 'tf.nn.relu', (['current'], {}), '(current)\n', (4435, 4444), True, 'import tensorflow as tf\n'), ((7962, 7991), 'tensorflow.matmul', 'tf.matmul', (['_dense1', "_w['wd1']"], {}), "(_dense1, _w['wd1'])\n", (7971, 7991), True, 'import tensorflow as tf\n'), ((1325, 1344), 'os.path.splitext', 'os.path.splitext', (['f'], {}), '(f)\n', (1341, 1344), False, 'import os\n'), ((1992, 2011), 'os.path.splitext', 'os.path.splitext', (['f'], {}), '(f)\n', (2008, 2011), False, 'import os\n')]
from setuptools import setup requires = [ 'click==6.7', 'bucketstore==0.1.1' ] setup( name="s3env", version="0.0.4", author="<NAME>", description="Manipulate a key/value JSON object file in an S3 bucket through the CLI", author_email="<EMAIL>", url='https://github.com/cameronmaske/s3env', py_modules=['s3env'], license='MIT', install_requires=requires, entry_points=''' [console_scripts] s3env=s3env:cli ''', )
[ "setuptools.setup" ]
[((90, 456), 'setuptools.setup', 'setup', ([], {'name': '"""s3env"""', 'version': '"""0.0.4"""', 'author': '"""<NAME>"""', 'description': '"""Manipulate a key/value JSON object file in an S3 bucket through the CLI"""', 'author_email': '"""<EMAIL>"""', 'url': '"""https://github.com/cameronmaske/s3env"""', 'py_modules': "['s3env']", 'license': '"""MIT"""', 'install_requires': 'requires', 'entry_points': '"""\n [console_scripts]\n s3env=s3env:cli\n """'}), '(name=\'s3env\', version=\'0.0.4\', author=\'<NAME>\', description=\n \'Manipulate a key/value JSON object file in an S3 bucket through the CLI\',\n author_email=\'<EMAIL>\', url=\'https://github.com/cameronmaske/s3env\',\n py_modules=[\'s3env\'], license=\'MIT\', install_requires=requires,\n entry_points="""\n [console_scripts]\n s3env=s3env:cli\n """)\n', (95, 456), False, 'from setuptools import setup\n')]
"""Unit tests for the testplan.testing.multitest.result module.""" import collections import mock import pytest from testplan.testing.multitest import result as result_mod from testplan.testing.multitest.suite import testcase, testsuite from testplan.testing.multitest import MultiTest from testplan.common.utils import comparison, testing @testsuite class AssertionOrder(object): @testcase def case(self, env, result): summary = result.subresult() first = result.subresult() second = result.subresult() second.true(True, 'AssertionSecond') result.true(True, 'AssertionMain1') result.true(True, 'AssertionMain2') first.true(True, 'AssertionFirst1') first.true(True, 'AssertionFirst2') summary.append(first) result.true(first.passed, 'Report passed so far.') if first.passed: summary.append(second) result.prepend(summary) def test_assertion_orders(): mtest = MultiTest(name='AssertionsOrder', suites=[AssertionOrder()]) mtest.run() expected = ['AssertionFirst1', 'AssertionFirst2', 'AssertionSecond', 'AssertionMain1', 'AssertionMain2', 'Report passed so far.'] # pylint: disable=invalid-sequence-index assertions = ( entry for entry in mtest.report.flatten() if isinstance(entry, dict) and entry['meta_type'] == 'assertion') for idx, entry in enumerate(assertions): assert entry['description'] == expected[idx] @pytest.fixture def dict_ns(): """Dict namespace with a mocked out result object.""" mock_result = mock.MagicMock() mock_result.entries = collections.deque() return result_mod.DictNamespace(mock_result) @pytest.fixture def fix_ns(): """FIX namespace with a mocked out result object.""" mock_result = mock.MagicMock() mock_result.entries = collections.deque() return result_mod.FixNamespace(mock_result) class TestDictNamespace(object): """Unit testcases for the result.DictNamespace class.""" def test_basic_match(self, dict_ns): """ Test the match method against identical expected and actual dicts. """ expected = {'key': 123} actual = expected.copy() assert dict_ns.match( actual, expected, description='Basic dictmatch of identical dicts passes') assert dict_ns.match( actual, expected, description='Force type-check of values', value_cmp_func=comparison.COMPARE_FUNCTIONS['check_types']) assert dict_ns.match( actual, expected, description='Convert values to strings before comparing', value_cmp_func=comparison.COMPARE_FUNCTIONS['stringify']) def test_duck_match(self, dict_ns): """ Test the match method by seting different types that can be compared. Due to duck-typing, ints and floats can be equal if they refer to the same numeric value - in this case, 123 == 123.0. However if type-checking is forced by use of the check_types comparison method the assertion will fail. """ expected = {'key': 123} actual = {'key': 123.0} assert dict_ns.match( actual, expected, description='Dictmatch passes since the numeric values are equal.') assert not dict_ns.match( actual, expected, description='Dictmatch fails when type comparison is forced.', value_cmp_func=comparison.COMPARE_FUNCTIONS['check_types']) assert not dict_ns.match( actual, expected, description='Dictmatch with string conversion fails due to ' 'different string representations of int/float.', value_cmp_func=comparison.COMPARE_FUNCTIONS['stringify']) def test_fail_match(self, dict_ns): """ Test the match method for types that do not compare equal - in this case, 123 should not match "123". """ expected = {'key': 123} actual = {'key': '123'} assert not dict_ns.match( actual, expected, description='Dictmatch fails because 123 != "123') assert not dict_ns.match( actual, expected, description='Dictmatch fails due to type mismatch', value_cmp_func=comparison.COMPARE_FUNCTIONS['check_types']) assert dict_ns.match( actual, expected, description='Dictmatch passes when values are converted to strings', value_cmp_func=comparison.COMPARE_FUNCTIONS['stringify']) def test_custom_match(self, dict_ns): """Test a dict match using a user-defined comparison function.""" expected = {'key': 174.24} actual = {'key': 174.87} tolerance = 1.0 def cmp_with_tolerance(lhs, rhs): """Check that both values are within a given tolerance range.""" return abs(lhs - rhs) < tolerance assert not dict_ns.match( actual, expected, description='Values are not exactly equal') assert dict_ns.match( actual, expected, description='Values are equal within tolerance', value_cmp_func=cmp_with_tolerance) def test_report_modes(self, dict_ns): """Test controlling report modes for a dict match.""" expected = {'key{}'.format(i): i for i in range(10)} actual = expected.copy() expected['wrong'] = 'expected' actual['wrong'] = 'actual' assert not dict_ns.match( actual, expected, description='Keep all comparisons by default') assert len(dict_ns.result.entries) == 1 dict_assert = dict_ns.result.entries.popleft() assert len(dict_assert.comparison) == 11 assert dict_ns.match( actual, expected, description='Keep ignored comparisons', include_keys=['key{}'.format(i) for i in range(3)]) assert len(dict_ns.result.entries) == 1 dict_assert = dict_ns.result.entries.popleft() assert len(dict_assert.comparison) == 11 assert dict_ns.match( actual, expected, description='Discard ignored comparisons', include_keys=['key{}'.format(i) for i in range(3)], report_mode=comparison.ReportOptions.NO_IGNORED) assert len(dict_ns.result.entries) == 1 dict_assert = dict_ns.result.entries.popleft() assert len(dict_assert.comparison) == 3 assert not dict_ns.match( actual, expected, report_mode=comparison.ReportOptions.FAILS_ONLY, description='Discard passing comparisons') assert len(dict_ns.result.entries) == 1 dict_assert = dict_ns.result.entries.popleft() assert len(dict_assert.comparison) == 1 class TestFIXNamespace(object): """Unit testcases for the result.FixNamespace class.""" def test_untyped_fixmatch(self, fix_ns): """Test FIX matches between untyped FIX messages.""" expected = testing.FixMessage( ((35, 'D'), (38, '1000000'), (44, '125.83'))) actual = expected.copy() assert fix_ns.match(actual, expected, description='Basic FIX match') def test_typed_fixmatch(self, fix_ns): """Test FIX matches between typed FIX messages.""" expected = testing.FixMessage( ((35, 'D'), (38, 1000000), (44, 125.83)), typed_values=True) actual = expected.copy() assert fix_ns.match(actual, expected, description='Basic FIX match') # Now change the type of the actual 38 key's value to str. The assert # should fail since we are performing a typed match. actual[38] = '1000000' assert not fix_ns.match( actual, expected, description='Failing str/int comparison') # Change the type to a float. The match should still fail because of # the type difference, despite the numeric values being equal. actual[38] = 1000000.0 assert not fix_ns.match( actual, expected, description='Failing float/int comparison') def test_mixed_fixmatch(self, fix_ns): """Test FIX matches between typed and untyped FIX messages.""" expected = testing.FixMessage( ((35, 'D'), (38, '1000000'), (44, '125.83')), typed_values=False) actual = testing.FixMessage( ((35, 'D'), (38, '1000000'), (44, 125.83)), typed_values=True) assert fix_ns.match(actual, expected, description='Mixed FIX match') def test_report_modes(self, fix_ns): """Test controlling report modes for FIX match.""" expected = testing.FixMessage((i, (25 * i) - 4) for i in range(10)) actual = expected.copy() expected['wrong'] = 'expected' actual['wrong'] = 'actual' assert not fix_ns.match( actual, expected, description='Keep all comparisons by default') assert len(fix_ns.result.entries) == 1 dict_assert = fix_ns.result.entries.popleft() assert len(dict_assert.comparison) == 11 assert fix_ns.match( actual, expected, description='Keep ignored comparisons', include_tags=[0, 1, 2]) assert len(fix_ns.result.entries) == 1 dict_assert = fix_ns.result.entries.popleft() assert len(dict_assert.comparison) == 11 assert fix_ns.match( actual, expected, description='Discard ignored comparisons', include_tags=[0, 1, 2], report_mode=comparison.ReportOptions.NO_IGNORED) assert len(fix_ns.result.entries) == 1 dict_assert = fix_ns.result.entries.popleft() assert len(dict_assert.comparison) == 3 assert not fix_ns.match( actual, expected, report_mode=comparison.ReportOptions.FAILS_ONLY, description='Discard passing comparisons') assert len(fix_ns.result.entries) == 1 dict_assert = fix_ns.result.entries.popleft() assert len(dict_assert.comparison) == 1
[ "testplan.testing.multitest.result.DictNamespace", "testplan.testing.multitest.result.FixNamespace", "testplan.common.utils.testing.FixMessage", "mock.MagicMock", "collections.deque" ]
[((1615, 1631), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1629, 1631), False, 'import mock\n'), ((1658, 1677), 'collections.deque', 'collections.deque', ([], {}), '()\n', (1675, 1677), False, 'import collections\n'), ((1689, 1726), 'testplan.testing.multitest.result.DictNamespace', 'result_mod.DictNamespace', (['mock_result'], {}), '(mock_result)\n', (1713, 1726), True, 'from testplan.testing.multitest import result as result_mod\n'), ((1834, 1850), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1848, 1850), False, 'import mock\n'), ((1877, 1896), 'collections.deque', 'collections.deque', ([], {}), '()\n', (1894, 1896), False, 'import collections\n'), ((1908, 1944), 'testplan.testing.multitest.result.FixNamespace', 'result_mod.FixNamespace', (['mock_result'], {}), '(mock_result)\n', (1931, 1944), True, 'from testplan.testing.multitest import result as result_mod\n'), ((7320, 7384), 'testplan.common.utils.testing.FixMessage', 'testing.FixMessage', (["((35, 'D'), (38, '1000000'), (44, '125.83'))"], {}), "(((35, 'D'), (38, '1000000'), (44, '125.83')))\n", (7338, 7384), False, 'from testplan.common.utils import comparison, testing\n'), ((7631, 7710), 'testplan.common.utils.testing.FixMessage', 'testing.FixMessage', (["((35, 'D'), (38, 1000000), (44, 125.83))"], {'typed_values': '(True)'}), "(((35, 'D'), (38, 1000000), (44, 125.83)), typed_values=True)\n", (7649, 7710), False, 'from testplan.common.utils import comparison, testing\n'), ((8544, 8632), 'testplan.common.utils.testing.FixMessage', 'testing.FixMessage', (["((35, 'D'), (38, '1000000'), (44, '125.83'))"], {'typed_values': '(False)'}), "(((35, 'D'), (38, '1000000'), (44, '125.83')),\n typed_values=False)\n", (8562, 8632), False, 'from testplan.common.utils import comparison, testing\n'), ((8671, 8757), 'testplan.common.utils.testing.FixMessage', 'testing.FixMessage', (["((35, 'D'), (38, '1000000'), (44, 125.83))"], {'typed_values': '(True)'}), "(((35, 'D'), (38, '1000000'), (44, 125.83)), typed_values\n =True)\n", (8689, 8757), False, 'from testplan.common.utils import comparison, testing\n')]
#!/usr/bin/python3 # -*- coding: UTF-8 -*- # This module gets that daya from SoaringSpot and prepaeres the infor for the REAL TIME SCORING # # Author: <NAME> - May 2021 # #import sys import json import urllib.request import urllib.error import urllib.parse import hmac import hashlib import base64 import os import socket import os.path from datetime import datetime import pycountry from ognddbfuncs import getognreg, getognflarmid from simplehal import HalDocument, Resolver #-------------------------------------------------------------------------------------------------------------------# ######################################################################### global apiurl global auth # auth and apiurl are globals ######################################################################### def getapidata(url, autho): # get the data from the API server req = urllib.request.Request(url) req.add_header('Authorization', autho) # build the authorization header req.add_header("Accept", "application/json") req.add_header("Content-Type", "application/hal+json") r = urllib.request.urlopen(req) # open the url resource j_obj = json.load(r) # convert to JSON return j_obj # return the JSON object ################################################################### ######################################################################## def oksta(station): if (station != "FLYMASTER"): return(True) return(False) ##################### def chkfilati(latitude, flatil, flatiu): if (flatil == 0.0): return (False) if (flatil > 0): # northern hemisphere if (latitude < flatil or latitude > flatiu): return (True) else: # southern hemisfere if (latitude > flatil or latitude < flatiu): return (True) return(False) ######################################################################## # get the data from the soaring spot and return it as a HAL document def gdata(url, key, prt='no'): global auth # auth and apiurl are globals global apiurl j_obj = getapidata(url, auth) # call the fuction that get it # convert to HAL if prt == 'yes': # if print is required print(json.dumps(j_obj, indent=4)) cd = HalDocument.get_data(HalDocument.from_python( j_obj), apiurl+'rel/' + key) # get the data from the HAL document return cd def getemb(base, ctype): global apiurl return(base['_embedded'][apiurl+'rel/'+ctype]) def getlinks(base, ctype): global apiurl return (base['_links'][apiurl+'rel/'+ctype]['href']) ################################################################### # # ---------- main code --------------- # # gather the competition data from SoaringSpot def soa2rts(RTS, client, secretkey, prt=False): global apiurl global auth date = datetime.now() # get the date hostname = socket.gethostname() # directory where to store the IGC file # see if index day is requestedd # --------------------------------------# # ===== SETUP parameters =======================# # where to find the SQLITE3 database cwd = os.getcwd() # get the current working directory # where to find the clientid and secretkey files secpath = cwd+"/SoaringSpot/" apiurl = "http://api.soaringspot.com/" # soaringspot API URL rel = "v1" # we use API version 1 # ==============================================# utc = datetime.utcnow() # the UTC time date = utc.strftime("%Y-%m-%dT%H:%M:%SZ") # get the UTC time local_time = datetime.now() # the local time # print the time for information only if prt: print("Hostname:", hostname) print("UTC Time is now:", utc) print(date) # # print the time for information only print("Local Time is now:", local_time) print("Config params. SECpath:", secpath) # nonce=base64.b64encode(OpenSSL.rand.bytes(36)) # get the once base nonce = base64.b64encode(os.urandom(36)) # get the once base # build the message message = nonce+date.encode(encoding='utf-8') + \ client.encode(encoding='utf-8') # and the message digest digest = hmac.new(secretkey, msg=message, digestmod=hashlib.sha256).digest() signature = str(base64.b64encode(digest).decode() ) # build the digital signature # the AUTHORIZATION ID is built now auth = apiurl+rel+'/hmac/v1 ClientID="'+client+'",Signature="' + \ signature+'",Nonce="' + \ nonce.decode(encoding='utf-8')+'",Created="'+date+'" ' #print ("URLiauth:", auth) # get the initial base of the tree url1 = apiurl+rel # get the contest data, first instance cd = gdata(url1, 'contests', prt='no')[0] # get the main data from the contest category = cd['category'] eventname = cd['name'] compid = cd['id'] country = cd['country'] # country code - 2 chars code compcountry = country # contry as defaults for pilots # convert the 2 chars ID to the 3 chars ID ccc = pycountry.countries.get(alpha_2=country) country = ccc.alpha_3 endate = cd['end_date'] lc = getemb(cd, 'location') # location data lcname = lc['name'] # location name print("\n\n= Contest ===============================") print("Category:", category, "Comp name:", eventname, "Comp ID:", compid) print("Loc Name:", lcname, "Country: ", country, country, "End date:", endate) print("=========================================\n\n") if prt: print("Classes:\n========\n\n") npil = 0 # init the number of pilots classes = [] pilots = [] devicesid = "" # Build the tracks and turn points, exploring the contestants and task within each class # go thru the different classes now within the daya pilots = [] for cl in getemb(cd, 'classes'): # print "CLCLCL", cl classname = cl["type"] # search for each class if prt: print("Class:", classname, "\n\n") # search for each class # search for the contestants on each class url3 = getlinks(cl, "contestants") ctt = gdata(url3, "contestants") # get the contestants data # print "CTTCTT",ctt for contestants in ctt: # print "FT", ft, "\n\n" fname = getemb(contestants, 'pilot')[0]['first_name'] lname = getemb(contestants, 'pilot')[0]['last_name'] # convert it to utf8 in order to avoid problems pname = fname.encode('utf-8').decode('utf-8') + \ " "+lname.encode('utf-8').decode('utf-8') if 'club' in contestants: club = contestants['club'].encode('utf-8').decode('utf-8') else: club = "club_NOTYET" if 'aircraft_model' in contestants: ar = contestants['aircraft_model'] else: ar = "am_NOTYET" if 'contestant_number' in contestants: cn = contestants['contestant_number'] else: cn = "cn_NOTYET" if 'nationality' in getemb(contestants, 'pilot')[0]: nation = getemb(contestants, 'pilot')[0]['nationality'] else: if compcountry != '': nation = compcountry else: nation = "ES" # by default is SPAIN # convert the 2 chars ID to the 3 chars ID ccc = pycountry.countries.get(alpha_2=nation) country3 = ccc.alpha_3 igcid = getemb(contestants, 'pilot')[0]['igc_id'] idflarm = "" ognpair = "" ognid = "" idfreg = "" if 'live_track_id' in contestants: # check if we have the FlarmId from the SoaringSpot livetrk = contestants['live_track_id'] # flarmID and OGN pair if prt: print ("Live_track:", livetrk) if len(livetrk) == 9: idflarm = livetrk # case that just the FlarmID, no piaring if len(livetrk) == 19: # format: FLR123456 OGN654321 # case that just the FlarmID and OGN tracker pair idflarm = livetrk[0:9] ognpair = livetrk[10:] # OGN trackers paired if len(idflarm) == 6: # in case of missing FLR/ICA/OGN (deprecated) if idflarm[0] == 'D': idflarm = "FLR"+idflarm # assume a Flarm type elif idflarm[0].isdigit(): idflarm = "ICA"+idflarm # assume a ICAO type else: idflarm = "OGN"+idflarm # assume a OGN type # get the registration from OGN DDB idfreg = getognreg(idflarm[3:9]) if 'aircraft_registration' in contestants: regi = contestants['aircraft_registration'] # get the flarm if from the OGN DDB ognid = getognflarmid(regi) else: # if we do not have the registration ID on the soaringspota regi = "reg_NOTYET" if idflarm == '': idflarm = ognid if idflarm != '': devicesid += idflarm+'/' if prt: print("Pilot:", pname, "Club:", club, "CompID:", cn, "Nation:", nation, "Country Code", country3, "IGCID:", igcid, "Reg:", regi, "Model:", ar, "Flarm:", idflarm, "idf:", idfreg, "OGNpair", ognpair, ognid) npil += 1 pil = {"PilotName": pname, "Club": club, "CompID": cn, "Nation": nation, "CountryCode": country3, "Registration": regi, "Class": classname, "IgcID": igcid, "AcftModel": ar, "Flarmid": idflarm, "OGNpair": ognpair} pilots.append(pil) cll = {"Class": classname} classes.append(cll) if prt: print("----------------------------------------------------------------\n\n") # print the number of pilots as a reference and control if len(devicesid) > 0: devicesid = devicesid[0:-1] if prt: print("= Pilots ===========================", npil, "\n\n") print(devicesid) RTS = {"Compname": eventname, "Category": category, "Country": country, "EndDate": endate, "Location": lcname, "Classes": classes, "Pilots": pilots, "Devices": devicesid} return (RTS)
[ "ognddbfuncs.getognreg", "json.load", "ognddbfuncs.getognflarmid", "os.getcwd", "hmac.new", "pycountry.countries.get", "json.dumps", "socket.gethostname", "datetime.datetime.utcnow", "base64.b64encode", "simplehal.HalDocument.from_python", "datetime.datetime.now", "os.urandom" ]
[((1218, 1230), 'json.load', 'json.load', (['r'], {}), '(r)\n', (1227, 1230), False, 'import json\n'), ((3076, 3090), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3088, 3090), False, 'from datetime import datetime\n'), ((3136, 3156), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (3154, 3156), False, 'import socket\n'), ((3382, 3393), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3391, 3393), False, 'import os\n'), ((3733, 3750), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3748, 3750), False, 'from datetime import datetime\n'), ((3863, 3877), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3875, 3877), False, 'from datetime import datetime\n'), ((5437, 5477), 'pycountry.countries.get', 'pycountry.countries.get', ([], {'alpha_2': 'country'}), '(alpha_2=country)\n', (5460, 5477), False, 'import pycountry\n'), ((2501, 2531), 'simplehal.HalDocument.from_python', 'HalDocument.from_python', (['j_obj'], {}), '(j_obj)\n', (2524, 2531), False, 'from simplehal import HalDocument, Resolver\n'), ((4334, 4348), 'os.urandom', 'os.urandom', (['(36)'], {}), '(36)\n', (4344, 4348), False, 'import os\n'), ((2442, 2469), 'json.dumps', 'json.dumps', (['j_obj'], {'indent': '(4)'}), '(j_obj, indent=4)\n', (2452, 2469), False, 'import json\n'), ((4533, 4591), 'hmac.new', 'hmac.new', (['secretkey'], {'msg': 'message', 'digestmod': 'hashlib.sha256'}), '(secretkey, msg=message, digestmod=hashlib.sha256)\n', (4541, 4591), False, 'import hmac\n'), ((7973, 8012), 'pycountry.countries.get', 'pycountry.countries.get', ([], {'alpha_2': 'nation'}), '(alpha_2=nation)\n', (7996, 8012), False, 'import pycountry\n'), ((4643, 4667), 'base64.b64encode', 'base64.b64encode', (['digest'], {}), '(digest)\n', (4659, 4667), False, 'import base64\n'), ((9383, 9406), 'ognddbfuncs.getognreg', 'getognreg', (['idflarm[3:9]'], {}), '(idflarm[3:9])\n', (9392, 9406), False, 'from ognddbfuncs import getognreg, getognflarmid\n'), ((9598, 9617), 'ognddbfuncs.getognflarmid', 'getognflarmid', (['regi'], {}), '(regi)\n', (9611, 9617), False, 'from ognddbfuncs import getognreg, getognflarmid\n')]
#!/usr/bin/env python3 import json from pathlib import PurePath, Path import subprocess import tempfile import zipfile wasm_pack = Path("~/.cargo/bin/wasm-pack").expanduser() root_files = ["module.json", "README.md", "CHANGELOG.md", "LICENSE"] wasm_files = ["gridless_pathfinding_bg.wasm", "gridless_pathfinding.js"] output_dir = Path("artifact") copy_everything_directories = ["js", "lang", "templates"] wasm_dir = Path("wasm") root_dir = Path(".") rust_dir = Path("rust") build_dir_tmp = tempfile.TemporaryDirectory() build_dir = Path(build_dir_tmp.name) with open("module.json", "r") as file: manifest = json.load(file) zip_root = PurePath(f'{manifest["name"]}') filename = f'{manifest["name"]}-{manifest["version"]}.zip' result = subprocess.run([wasm_pack, "build", "--target", "web", "--out-dir", build_dir, root_dir / rust_dir]) if result.returncode != 0: raise Exception("Wasm build failed") output_dir.mkdir(parents=True, exist_ok=True) def write_directory(archive, d): for f in (root_dir / d).iterdir(): if f.is_dir(): write_directory(archive, f) else: assert(f.is_file()) archive.write(f, arcname=zip_root / d / f.name) with zipfile.ZipFile(output_dir / filename, mode="w", compression=zipfile.ZIP_DEFLATED, compresslevel=9) as archive: for f in root_files: archive.write(root_dir / f, arcname=zip_root / f) for d in copy_everything_directories: write_directory(archive, d) for f in wasm_files: archive.write(build_dir / f, arcname=zip_root / wasm_dir / f) print(f"Successfully built {output_dir / filename}")
[ "subprocess.run", "json.load", "tempfile.TemporaryDirectory", "zipfile.ZipFile", "pathlib.Path", "pathlib.PurePath" ]
[((333, 349), 'pathlib.Path', 'Path', (['"""artifact"""'], {}), "('artifact')\n", (337, 349), False, 'from pathlib import PurePath, Path\n'), ((419, 431), 'pathlib.Path', 'Path', (['"""wasm"""'], {}), "('wasm')\n", (423, 431), False, 'from pathlib import PurePath, Path\n'), ((443, 452), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (447, 452), False, 'from pathlib import PurePath, Path\n'), ((464, 476), 'pathlib.Path', 'Path', (['"""rust"""'], {}), "('rust')\n", (468, 476), False, 'from pathlib import PurePath, Path\n'), ((493, 522), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (520, 522), False, 'import tempfile\n'), ((535, 559), 'pathlib.Path', 'Path', (['build_dir_tmp.name'], {}), '(build_dir_tmp.name)\n', (539, 559), False, 'from pathlib import PurePath, Path\n'), ((640, 671), 'pathlib.PurePath', 'PurePath', (['f"""{manifest[\'name\']}"""'], {}), '(f"{manifest[\'name\']}")\n', (648, 671), False, 'from pathlib import PurePath, Path\n'), ((742, 846), 'subprocess.run', 'subprocess.run', (["[wasm_pack, 'build', '--target', 'web', '--out-dir', build_dir, root_dir /\n rust_dir]"], {}), "([wasm_pack, 'build', '--target', 'web', '--out-dir',\n build_dir, root_dir / rust_dir])\n", (756, 846), False, 'import subprocess\n'), ((612, 627), 'json.load', 'json.load', (['file'], {}), '(file)\n', (621, 627), False, 'import json\n'), ((1161, 1265), 'zipfile.ZipFile', 'zipfile.ZipFile', (['(output_dir / filename)'], {'mode': '"""w"""', 'compression': 'zipfile.ZIP_DEFLATED', 'compresslevel': '(9)'}), "(output_dir / filename, mode='w', compression=zipfile.\n ZIP_DEFLATED, compresslevel=9)\n", (1176, 1265), False, 'import zipfile\n'), ((133, 163), 'pathlib.Path', 'Path', (['"""~/.cargo/bin/wasm-pack"""'], {}), "('~/.cargo/bin/wasm-pack')\n", (137, 163), False, 'from pathlib import PurePath, Path\n')]
import functools import inspect import equinox from typing import Callable, Iterable def compose(workflow: Iterable[Callable], do_jit: bool=False) -> Callable: def pipeline(*args, **kwargs): # *args are for grad, **kwargs are the rest res = dict([]) for f in workflow: sig = inspect.signature(f) f_args = sig.parameters.keys() feed_args = False feed_kwargs = False arglist = [] for arg in f_args: if not feed_args or not feed_kwargs: if arg in kwargs.keys() and arg not in res.keys(): feed_kwargs = True arglist.append(arg) elif arg not in kwargs.keys() and arg not in res.keys(): feed_args = True elif arg in kwargs.keys() and arg in res.keys(): raise Exception(f'the keyword \'{arg}\' is already specified in the workflow') else: break f_kwargs = {k:kwargs[k] for k in arglist} if feed_args and feed_kwargs: res = f(*args, **res, **f_kwargs) elif feed_args and not feed_kwargs: res = f(*args, **res) elif not feed_args and feed_kwargs: res = f(**res, **f_kwargs) else: res = f(**res) return res # not really too helpful, since can't parse which of these are free params... workflow_pars = [] for i, f in enumerate(workflow): sig = inspect.signature(f) workflow_pars += list(sig.parameters.values()) workflow_pars = sorted(workflow_pars, key=lambda x: 0 if x.default is inspect.Parameter.empty else 1) # print(workflow_pars) last_sig = inspect.signature(workflow[-1]) an = last_sig.return_annotation pipeline.__signature__ = inspect.Signature(workflow_pars, return_annotation=an) if do_jit: return equinox.filter_jit( pipeline, filter_spec=equinox.is_array, filter_spec_return=equinox.is_array ) else: return pipeline
[ "inspect.Signature", "inspect.signature", "equinox.filter_jit" ]
[((1842, 1873), 'inspect.signature', 'inspect.signature', (['workflow[-1]'], {}), '(workflow[-1])\n', (1859, 1873), False, 'import inspect\n'), ((1939, 1993), 'inspect.Signature', 'inspect.Signature', (['workflow_pars'], {'return_annotation': 'an'}), '(workflow_pars, return_annotation=an)\n', (1956, 1993), False, 'import inspect\n'), ((1613, 1633), 'inspect.signature', 'inspect.signature', (['f'], {}), '(f)\n', (1630, 1633), False, 'import inspect\n'), ((2029, 2128), 'equinox.filter_jit', 'equinox.filter_jit', (['pipeline'], {'filter_spec': 'equinox.is_array', 'filter_spec_return': 'equinox.is_array'}), '(pipeline, filter_spec=equinox.is_array,\n filter_spec_return=equinox.is_array)\n', (2047, 2128), False, 'import equinox\n'), ((312, 332), 'inspect.signature', 'inspect.signature', (['f'], {}), '(f)\n', (329, 332), False, 'import inspect\n')]
import requests base_url = "https://stepic.org/media/attachments/course67/3.6.3/" with open("solutions/week-3/dataset_3378_3.txt") as f: first_url = f.readline().strip() r = requests.get(first_url) answer = r.text.strip() count = 1 while not answer.startswith("We"): r = requests.get(f"{base_url}{answer}") answer = r.text.strip() count += 1 print(f"Requesting next file with answer. Requested: {count}") else: final_answer = answer print(final_answer)
[ "requests.get" ]
[((181, 204), 'requests.get', 'requests.get', (['first_url'], {}), '(first_url)\n', (193, 204), False, 'import requests\n'), ((283, 318), 'requests.get', 'requests.get', (['f"""{base_url}{answer}"""'], {}), "(f'{base_url}{answer}')\n", (295, 318), False, 'import requests\n')]
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import subprocess import sys from contextlib import contextmanager from signal import Signals from subprocess import Popen as _Popen from typing import List def run_command(cmd: List[str], timeout: int): with Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True ) as process: stdout, stderr = process.communicate(timeout=timeout) if process.returncode: returncode = process.returncode try: # Try and decode the name of a signal. Signal returncodes # are negative. returncode = f"{returncode} ({Signals(abs(returncode)).name})" except ValueError: pass raise OSError( f"Compilation job failed with returncode {returncode}\n" f"Command: {' '.join(cmd)}\n" f"Stderr: {stderr.strip()}" ) return stdout def communicate(process, input=None, timeout=None): """subprocess.communicate() which kills subprocess on timeout.""" try: return process.communicate(input=input, timeout=timeout) except subprocess.TimeoutExpired: # kill() was added in Python 3.7. if sys.version_info >= (3, 7, 0): process.kill() else: process.terminate() # Wait for shutdown to complete. try: process.communicate(timeout=timeout) except subprocess.TimeoutExpired: pass # Stubborn process won't die, nothing can be done. raise @contextmanager def Popen(*args, **kwargs): """subprocess.Popen() with resilient process termination at end of scope.""" with _Popen(*args, **kwargs) as process: try: yield process finally: # Process has not yet terminated, kill it. if process.poll() is None: # kill() was added in Python 3.7. if sys.version_info >= (3, 7, 0): process.kill() else: process.terminate() # Wait for shutdown to complete. try: process.communicate(timeout=60) except subprocess.TimeoutExpired: pass # Stubborn process won't die, nothing can be done.
[ "subprocess.Popen" ]
[((1873, 1896), 'subprocess.Popen', '_Popen', (['*args'], {}), '(*args, **kwargs)\n', (1879, 1896), True, 'from subprocess import Popen as _Popen\n')]
# Twitter Parser # Parses Data and uploads to MongoDB import twitter_access import mongo_access from tweepy.streaming import StreamListener from tweepy import OAuthHandler from tweepy import Stream import json import re import tweepy import time from datetime import datetime from dateutil.parser import parse from pymongo import MongoClient import os import psycopg2 from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer import pandas as pd # Connect to MongoDB client_mongo = MongoClient(mongo_access.URL) db_mongo = client_mongo.get_database("alikhanlab-twitter") tweets_mongo = db_mongo.tweets # Twitter Parser Class class Output_Listener(StreamListener): def __init__(self, sec_limit, track): self.start_time = time.time() self.sec_limit = sec_limit self.track = track self.analyser = SentimentIntensityAnalyzer() self.cities = pd.read_excel('CitiesEnriched.xls') def on_data(self, data): def clean_tweet(x): x = x.encode('ascii', 'ignore').decode('ascii') x = re.sub(r'http\S+', '', x) return x if (time.time() - self.start_time) < self.sec_limit: tweet = json.loads(data) if tweet["retweeted"] == False: created_at = parse(tweet["created_at"]) tweet_id_str = tweet["id_str"] text = clean_tweet(tweet["text"]) retweet_count = tweet["retweet_count"] favorite_count = tweet["favorite_count"] user_id = tweet["user"]["id_str"] user_followers_count = tweet["user"]["followers_count"] # text sentiment tweet_sentiment = self.analyser.polarity_scores(text) # user geolocation city = self.cities.sample() longitude = city['Lng'].values[0] latitude = city['Lat'].values[0] obj = {"track":self.track[0],"created_at":created_at,"tweet_id_str":tweet_id_str,"text":text, "neg_score":tweet_sentiment["neg"], "neu_score":tweet_sentiment["neu"], "pos_score":tweet_sentiment["pos"], "retweet_count":retweet_count, "favorite_count":favorite_count, "user_id":user_id, "user_followers_count":user_followers_count, "user_long": longitude, "user_lat":latitude} tweets_mongo.insert_one(obj) print('Tweet is uploaded on MongoDB') return True else: print('End parsing.....') print('Time limit is reached') return False def on_error(self, status): print(status) def parse_and_populate(sec_limit, track): listener = Output_Listener(sec_limit, track) auth = OAuthHandler(twitter_access.API_KEY, twitter_access.API_SECRET_KEY) auth.set_access_token(twitter_access.ACCESS_TOKEN, twitter_access.ACCESS_TOKEN_SECRET) stream = Stream(auth, listener) stream.filter(languages = ['en'], track = track)
[ "pymongo.MongoClient", "dateutil.parser.parse", "json.loads", "time.time", "pandas.read_excel", "vaderSentiment.vaderSentiment.SentimentIntensityAnalyzer", "tweepy.Stream", "tweepy.OAuthHandler", "re.sub" ]
[((497, 526), 'pymongo.MongoClient', 'MongoClient', (['mongo_access.URL'], {}), '(mongo_access.URL)\n', (508, 526), False, 'from pymongo import MongoClient\n'), ((2883, 2950), 'tweepy.OAuthHandler', 'OAuthHandler', (['twitter_access.API_KEY', 'twitter_access.API_SECRET_KEY'], {}), '(twitter_access.API_KEY, twitter_access.API_SECRET_KEY)\n', (2895, 2950), False, 'from tweepy import OAuthHandler\n'), ((3055, 3077), 'tweepy.Stream', 'Stream', (['auth', 'listener'], {}), '(auth, listener)\n', (3061, 3077), False, 'from tweepy import Stream\n'), ((754, 765), 'time.time', 'time.time', ([], {}), '()\n', (763, 765), False, 'import time\n'), ((852, 880), 'vaderSentiment.vaderSentiment.SentimentIntensityAnalyzer', 'SentimentIntensityAnalyzer', ([], {}), '()\n', (878, 880), False, 'from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer\n'), ((903, 938), 'pandas.read_excel', 'pd.read_excel', (['"""CitiesEnriched.xls"""'], {}), "('CitiesEnriched.xls')\n", (916, 938), True, 'import pandas as pd\n'), ((1082, 1107), 're.sub', 're.sub', (['"""http\\\\S+"""', '""""""', 'x'], {}), "('http\\\\S+', '', x)\n", (1088, 1107), False, 'import re\n'), ((1211, 1227), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (1221, 1227), False, 'import json\n'), ((1142, 1153), 'time.time', 'time.time', ([], {}), '()\n', (1151, 1153), False, 'import time\n'), ((1301, 1327), 'dateutil.parser.parse', 'parse', (["tweet['created_at']"], {}), "(tweet['created_at'])\n", (1306, 1327), False, 'from dateutil.parser import parse\n')]
# Imports the Google Cloud client library from google.cloud import language from google.cloud.language import enums from google.cloud.language import types # Instantiates a client client = language.LanguageServiceClient() ENTITY_TYPE_TO_IGNORE = [8, 9, 10, 11, 12] # The text to analyze text = "Aside from course work, my passion for traveling was what prompted me to take part in the study aboard program in Dresden, Germany, in the spring of 2018 and Tokyo, Japan in the summer of 2017. While taking classes offered at both TU Dresden and Tokyo tech, I spent most of my off time traveling Europe and around the city. Combine with my study in the States, I believe that it is these experiences that taught me how to quickly adapt to changes in the environment and apply my ability in a different context. My passion for electronics and computers is also what prompts me to join the High-Performance Computing (HPC) club and continue to be an active member of the university maker space. My decision to take part in the leadership role of the BUHPC contained more than my interest in the subject matter; I wish to inspire others learning about HPC by sharing a subject that I enjoy learning. Similarly, by taking part in the engineering " def gapiAnalysisText(text): document = types.Document( content=text, type=enums.Document.Type.PLAIN_TEXT) encoding_type = enums.EncodingType.UTF8 response = client.analyze_entities(document, encoding_type=encoding_type) # Loop through entitites returned from the API key_words=list() for entity in response.entities: if (entity.type not in ENTITY_TYPE_TO_IGNORE): key_words.append(entity.name) key_words=list(dict.fromkeys(key_words)) key_words.sort() return ",".join(map(str,key_words)) char_str= gapiAnalysisText(text) print(char_str)
[ "google.cloud.language.LanguageServiceClient", "google.cloud.language.types.Document" ]
[((190, 222), 'google.cloud.language.LanguageServiceClient', 'language.LanguageServiceClient', ([], {}), '()\n', (220, 222), False, 'from google.cloud import language\n'), ((1282, 1347), 'google.cloud.language.types.Document', 'types.Document', ([], {'content': 'text', 'type': 'enums.Document.Type.PLAIN_TEXT'}), '(content=text, type=enums.Document.Type.PLAIN_TEXT)\n', (1296, 1347), False, 'from google.cloud.language import types\n')]
from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions from selenium.common.exceptions import NoSuchElementException import time, math class PageGetSet: def __init__(self): self.PET_OWNERS = 0 self.INVENTORY = 1 self.VETS = 2 self.VETS_DETAILS = 3 self.APPOINTMENTS = 4 self.PET_OWNER_DETAILS = 5 self.APPOINTMENT_DETAILS = 6 self.Browser = None self.WorkElement = None self.Pages = [ "PetOwners.aspx", "Inventory.aspx", "Vets.aspx", "VetDetails.aspx", "Appointments.aspx", "PetOwnerDetails.aspx", "AppointmentDetails.aspx" ] self.Statistics = { "success": 0, "fail": 0, "max": 0 } self.CurrentPage = self.PET_OWNERS self.InitializeBrowser("http://localhost:50452/" + self.Pages[self.CurrentPage]) self.RunCommands() def InitializeBrowser(self, address): self.Browser = webdriver.Firefox() self.Browser.get(address) def UpdateStatistics(self, status): self.Statistics[status] += 1 self.Statistics["max"] += 1 def GetElementById(self, id): try: self.WorkElement = self.Browser.find_element_by_id(id) return self.WorkElement except NoSuchElementException: return False def SendKeysToElement(self, element, keys): element.clear() element.send_keys(keys) def WaitForElementToExist(self, id): WebDriverWait(self.Browser, 3).until( expected_conditions.visibility_of_element_located( (By.ID, id) ) ) def SwitchToPage(self, pageID): self.CurrentPage = pageID self.Browser.get("http://localhost:50452/" + self.Pages[self.CurrentPage]) def ClickElement(self, element): element.click() def ClickElementById(self, elementId): self.GetElementById(elementId).click() def Sleep(self, length): time.sleep(length) def IfExistById(self, id): try: value = self.GetElementById(id) if(value == False): return False return True except NoSuchElementException: print("NO PAGE") return False def PetOwners(self): print("Testing Pet Owners Start") self.SwitchToPage(self.PET_OWNERS) self.SendKeysToElement(self.GetElementById("txtSearch"), "Arnold") self.Browser.find_element_by_xpath("//input[@id='btnSearch']").click() print("Testing Pet Owners Finish") self.Sleep(3) def PetOwnersDetails(self): print("Testing Pet Owners Details Start") self.SwitchToPage(self.PET_OWNER_DETAILS) self.SendKeysToElement(self.GetElementById("txtFirstname"), "Arnold") self.SendKeysToElement(self.GetElementById("txtLastname"), "Schwarzenegger") self.SendKeysToElement(self.GetElementById("txtMobile"), "424-288-2000") self.SendKeysToElement(self.GetElementById("txtEmail"), "<EMAIL>") self.Browser.find_element_by_xpath("//input[@id='chkID']").click() self.Browser.find_element_by_xpath("//input[@id='btnSave']").click() alert = self.Browser.switch_to_alert() alert.accept() self.SendKeysToElement(self.GetElementById("txtPetName"), "Arnold") self.SendKeysToElement(self.GetElementById("txtPetDOB"), "02/01/2023") self.SendKeysToElement(self.GetElementById("txtPetType"), "Schwarzenegger") self.SendKeysToElement(self.GetElementById("txtPetBreed"), "Arnold") self.Browser.find_element_by_xpath("//input[@id='btnAddPet']").click() alert = self.Browser.switch_to_alert() alert.accept() print("Testing Pet Owners Details Finish") self.Sleep(3) def Inventory(self): print("Testing Inventory Start") self.SwitchToPage(self.INVENTORY) self.SendKeysToElement(self.GetElementById("txtName"), "Arnold") self.SendKeysToElement(self.GetElementById("txtQuantity"), "50") self.SendKeysToElement(self.GetElementById("txtCost"), "2.0") self.SendKeysToElement(self.GetElementById("txtPrice"), "2.0") self.Browser.find_element_by_xpath("//input[@id='btnSave']").click() alert = self.Browser.switch_to_alert() alert.accept() self.SendKeysToElement(self.GetElementById("txtSearch"), "Arnold") self.Browser.find_element_by_xpath("//input[@id='btnSearch']").click() print("Testing Inventory Finish") self.Sleep(3) def Vets(self): print("Testing Vets Start") self.SwitchToPage(self.VETS) self.SendKeysToElement(self.GetElementById("txtSearch"), "Arnold") self.Browser.find_element_by_xpath("//input[@id='btnSearch']").click() print("Testing Vets Finish") self.Sleep(3) def VetsDetails(self): print("Testing Vets Details Start") self.SwitchToPage(self.VETS_DETAILS) self.SendKeysToElement(self.GetElementById("txtFirstName"), "Arnold") self.SendKeysToElement(self.GetElementById("txtLastName"), "Schwarzenegger") self.SendKeysToElement(self.GetElementById("txtMobileNo"), "56675675") self.SendKeysToElement(self.GetElementById("txtEmail"), "<EMAIL>") self.SendKeysToElement(self.GetElementById("txtAddress"), "50 apple cross ave") self.SendKeysToElement(self.GetElementById("txtPostcode"), "6023") self.SendKeysToElement(self.GetElementById("txtSkills"), "BodyBuilder") self.Browser.find_element_by_xpath("//input[@id='btnSave']").click() alert = self.Browser.switch_to_alert() alert.accept() self.Browser.find_element_by_xpath("//option[@value='Feb']").click() self.SendKeysToElement(self.GetElementById("txtSkills"), "BodyBuilder") self.Browser.find_element_by_xpath("//input[@id='chkTue']").click() self.Browser.find_element_by_xpath("//input[@id='btnAddAvailability']").click() alert = self.Browser.switch_to_alert() alert.accept() print("Testing Vets Details Finish") self.Sleep(3) def Appointments(self): print("Testing Appointments Start") self.SwitchToPage(self.APPOINTMENTS) print("Testing Appointments Finish") self.Sleep(3) def AppointmentsDetails(self): print("Testing Appointments Details Start") self.SwitchToPage(self.APPOINTMENT_DETAILS) self.SendKeysToElement(self.GetElementById("txtAppointmentDate"), "02/01/2023") self.Browser.find_element_by_xpath("//select[@id='DropDownListHour']").click() self.Browser.find_element_by_xpath("//option[@value='07']").click() self.Browser.find_element_by_xpath("//select[@id='DropDownListMinute']").click() self.Browser.find_element_by_xpath("//option[@value='45']").click() self.SendKeysToElement(self.GetElementById("txtPaid"), "Cakes") self.Browser.find_element_by_xpath("//input[@id='chkPaid']").click() self.SendKeysToElement(self.GetElementById("txtComments"), "Cakes are tasty and good yis") self.Browser.find_element_by_xpath("//div//div//div//table[@id='GridViewPets']//tbody//tr[2]//td[1]//input[1]").click() self.Browser.find_element_by_xpath("//div//div//div//table[@id='GridViewVets']//tbody//tr[2]//td[1]//input[1]").click() self.Browser.find_element_by_xpath("//input[@id='btnSave']").click() alert = self.Browser.switch_to_alert() alert.accept() self.Browser.find_element_by_xpath("//div[@id='Panel1']//div//div//tbody//tr[4]//td[1]//input[1]").click() self.Browser.find_element_by_xpath("//input[@id='txtQuantity']").click() self.SendKeysToElement(self.GetElementById("txtQuantity"), "2") self.Browser.find_element_by_xpath("//input[@id='btnAddMedication']").click() self.Browser.find_element_by_xpath("//input[@id='btnSave']").click() alert = self.Browser.switch_to_alert() alert.accept() self.SendKeysToElement(self.GetElementById("txtSearchPet"), "Arnold") self.Browser.find_element_by_xpath("//input[@id='btnSearchPet']").click() self.SendKeysToElement(self.GetElementById("txtSearchVet"), "Arnold") self.Browser.find_element_by_xpath("//input[@id='btnSearchVet']").click() self.SendKeysToElement(self.GetElementById("txtSearchInventory"), "Arnold") self.Browser.find_element_by_xpath("//input[@id='btnSearchInventory']").click() print("Testing Appointments Details Finish") self.Sleep(3) def RunCommands(self): self.PetOwners() self.PetOwnersDetails() self.Inventory() self.Vets() self.VetsDetails() self.Appointments() self.AppointmentsDetails() self.Appointments() print("---------------------------") print("-- All Tests Complete --") print("---------------------------") pgs = PageGetSet()
[ "selenium.webdriver.support.ui.WebDriverWait", "time.sleep", "selenium.webdriver.support.expected_conditions.visibility_of_element_located", "selenium.webdriver.Firefox" ]
[((1176, 1195), 'selenium.webdriver.Firefox', 'webdriver.Firefox', ([], {}), '()\n', (1193, 1195), False, 'from selenium import webdriver\n'), ((2213, 2231), 'time.sleep', 'time.sleep', (['length'], {}), '(length)\n', (2223, 2231), False, 'import time, math\n'), ((1765, 1827), 'selenium.webdriver.support.expected_conditions.visibility_of_element_located', 'expected_conditions.visibility_of_element_located', (['(By.ID, id)'], {}), '((By.ID, id))\n', (1814, 1827), False, 'from selenium.webdriver.support import expected_conditions\n'), ((1715, 1745), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['self.Browser', '(3)'], {}), '(self.Browser, 3)\n', (1728, 1745), False, 'from selenium.webdriver.support.ui import WebDriverWait\n')]
from unicodedata import name from accounts import views as account_views from django.urls import path, reverse from .views import (AboutPageView, AuctionCreateView, AuctionDetailView, AuctionListView, BidCreateView, BidDetailView, DashboardPageView, DatasourceView, HomePageView, StripeConnectionView,MyAuctionDetailView,AuctionUpdateView) urlpatterns = [ path('', HomePageView.as_view(), name='home'), path('about/', AboutPageView.as_view(), name='about'), path('dashboard/', DashboardPageView.as_view(), name='dashboard'), path('auctions/', AuctionListView.as_view(), name='auctions'), path('auction-detail/<int:pk>/', AuctionDetailView.as_view(), name='auction-detail'), path('auctions/create/',AuctionCreateView.as_view(), name= 'auction-create'), path('bids/create/',BidCreateView.as_view(), name= 'bid-create'), path('bid-detail/<int:pk>/', BidDetailView.as_view(), name='bid-detail'), path('stripe-connection/', StripeConnectionView.as_view(), name='stripe-connection'), path('data-source/',DatasourceView.as_view(), name='data-source'), path('dashboard/my-auction/', MyAuctionDetailView.as_view(),name='my-auction'), path('auction-update/<int:pk>/',AuctionUpdateView.as_view(), name='auction-update'), path('dashboard/company/my-company', account_views.MyCompanyDetailView.as_view(), name='my-company'), path('dashboard/company/<int:pk>/', account_views.CompanyUpdateView.as_view(), name="company-update"), path('dashboard/company/create/', account_views.CompanyCreateView.as_view(), name='company-create'), path('dashboard/connect/', account_views.connect, name="connect"), path('dashboard/my-account/', account_views.AccountDetailView.as_view(), name='my-account'), ]
[ "accounts.views.MyCompanyDetailView.as_view", "django.urls.path", "accounts.views.AccountDetailView.as_view", "accounts.views.CompanyUpdateView.as_view", "accounts.views.CompanyCreateView.as_view" ]
[((1608, 1673), 'django.urls.path', 'path', (['"""dashboard/connect/"""', 'account_views.connect'], {'name': '"""connect"""'}), "('dashboard/connect/', account_views.connect, name='connect')\n", (1612, 1673), False, 'from django.urls import path, reverse\n'), ((1327, 1370), 'accounts.views.MyCompanyDetailView.as_view', 'account_views.MyCompanyDetailView.as_view', ([], {}), '()\n', (1368, 1370), True, 'from accounts import views as account_views\n'), ((1432, 1473), 'accounts.views.CompanyUpdateView.as_view', 'account_views.CompanyUpdateView.as_view', ([], {}), '()\n', (1471, 1473), True, 'from accounts import views as account_views\n'), ((1537, 1578), 'accounts.views.CompanyCreateView.as_view', 'account_views.CompanyCreateView.as_view', ([], {}), '()\n', (1576, 1578), True, 'from accounts import views as account_views\n'), ((1709, 1750), 'accounts.views.AccountDetailView.as_view', 'account_views.AccountDetailView.as_view', ([], {}), '()\n', (1748, 1750), True, 'from accounts import views as account_views\n')]
"""A population model that creates samples with more and more variants. Suitable for the aligner paper experiments ^ = intersection E = subset vx ^ v0 = v0 vx ^ v1 = v0 ... vx ^ vn = v0 v0 E v1 v1 E v2 v2 E v3 ... v(n-1) E vn This plugin does not honor the site frequency spectrum model and ignores the original 'p' values """ import numpy as np __example_param_text = """ { "vn": { "p_vx": 0.2, "p_vn": [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7], } } """ _description = __doc__ + '\nExample parameters:\n' + __example_param_text #_example_params = json.loads(__example_param_text) _example_params = eval(__example_param_text) class Model: def __init__(self, p_vx, p_vn): """A population model that creates samples with more and more variants. Suitable for the aligner paper experiments :param p_vx: probability value for vx set :param p_vn: probability values for v0, v1, v2, v3 .... set """ self.p_vx, self.p_vn = p_vx, p_vn def samples(self, chrom_no=None, ml=None, rng_seed=1, **kwargs): """This returns an iterator :param chrom_no: number of the chromosome being considered [1,2,3 ...] (ignored here) :param ml: VariantList. master list of variants as created by genomes program :param rng_seed: seed for random number generators :return: A generator returning (generation no, serial_no, chromosome, % samples done) for each sample in population Algorithm: (Repeat for each chromosome copy) Generate random numbers r same size as variants list Select vx <= r < p_vx Pick a random subset of v0 as v1 size(v1)/size(v0) = p_v1/p_v0 Set all r corresponding to v0 - v1 as 1.0 so we never select these again Pick v2, v3 ... by comparing r to p_v2, p_v3 and so on """ assert 0 <= self.p_vx <= 1.0, 'p_vx needs to be >= 0 and <= 1.0' assert self.p_vx > self.p_vn[0], 'p_vx needs to be > p_vn[0]' for n in range(len(self.p_vn) - 1): assert self.p_vn[n] < self.p_vn[n + 1], 'p_vn needs to be in ascending order' assert 0 <= self.p_vn[n] <= 1.0, 'p_vn needs to be >= 0 and <= 1.0' rng = np.random.RandomState(rng_seed) r = rng.rand(ml.variants.shape[0], 2) idx_vx = [None, None] for cpy in [0, 1]: idx_vx[cpy] = np.sort(rng.choice(ml.variants.shape[0], size=int(ml.variants.shape[0] * self.p_vx), replace=False)) # Take elements in vx that are not going to be in v0 completely out of circulation r[idx_vx[cpy][(r[idx_vx[cpy]] >= self.p_vn[0]).nonzero()[0]], cpy] = 1.1 # Now all elements for r < 1.0 are either in vx ^ v0 or not in vx for n in range(len(self.p_vn) + 1): if n == 0: this_idx, sample_name = idx_vx, 'vx' else: this_idx, sample_name = [(r[:, cpy] < self.p_vn[n - 1]).nonzero()[0] for cpy in [0, 1]], 'v{:d}'.format(n - 1) yield sample_name, ml.zip_up_chromosome(*this_idx), float(n + 1) / self.get_sample_count_estimate() def get_sample_count_estimate(self): """Give us an as exact as possible estimate of how many samples we will produce""" return 1 + len(self.p_vn)
[ "numpy.random.RandomState" ]
[((2112, 2143), 'numpy.random.RandomState', 'np.random.RandomState', (['rng_seed'], {}), '(rng_seed)\n', (2133, 2143), True, 'import numpy as np\n')]
#!/usr/bin/env python """Test that GoSubDag contains ancestors from only the user-specified relationships""" # tests/test_gosubdag_relationships_i126.py # goatools/gosubdag/gosubdag.py # goatools/gosubdag/godag_rcnt.py # goatools/gosubdag/godag_rcnt_init.py # goatools/godag/go_tasks.py # goatools/obo_parser.py from __future__ import print_function __copyright__ = "Copyright (C) 2016-2019, <NAME>, <NAME>, All rights reserved." from os.path import join from os import system import sys ## import timeit ## import datetime import collections as cx from goatools.base import get_godag from goatools.godag.consts import RELATIONSHIP_SET from goatools.gosubdag.gosubdag import GoSubDag from goatools.test_data.wr_subobo import WrSubObo from tests.utils import REPO # pylint: disable=line-too-long,unused-variable def test_gosubdag_relationships(wr_new_obo_subset=False): """Test that GoSubDag contains ancestors from only the user-specified relationships""" # Leaf GO: viral triggering of virus induced gene silencing goid_chosen = 'GO:0060150' # Load GODag with all relationships fin_obo = join(REPO, "tests/data/i126/viral_gene_silence.obo") # "go-basic.obo") godag_r0 = get_godag(fin_obo, loading_bar=None) godag_r1 = get_godag(fin_obo, loading_bar=None, optional_attrs=['relationship']) file_sub = join(REPO, "tests/data/viral_gene_silence.obo") # Get all GO terms above this low-level GO ID using all relationships if wr_new_obo_subset: _wr_sub_obo(file_sub, goid_chosen, godag_r1, fin_obo) # RELATIONSHIPS: None gosubdag_r0 = GoSubDag(set([goid_chosen]), godag_r0) assert len(gosubdag_r0.rcntobj.go2ancestors[goid_chosen]) == 12 # RELATIONSHIPS: ALL gosubdag_r1 = GoSubDag(set([goid_chosen]), godag_r1, relationships=True) assert gosubdag_r1.relationships == RELATIONSHIP_SET #### set(['part_of', 'regulates', 'positively_regulates', 'negatively_regulates']) assert len(gosubdag_r1.rcntobj.go2ancestors[goid_chosen]) == 50 # RELATIONSHIPS: part_of gosubdag_rp = GoSubDag(set([goid_chosen]), godag_r1, relationships={'part_of'}) assert gosubdag_rp.relationships == set(['part_of']) rp_par = gosubdag_rp.rcntobj.go2ancestors[goid_chosen] assert 'GO:0016441' not in gosubdag_rp.go2obj, '**FATAL: REGULATION TERM GoSubDag(part_of) go2obj' assert 'GO:0016441' not in rp_par, '**FATAL: REGULATION TERM GoSubDag(part_of) go2parents' # RELATIONSHIPS: regulates gosubdag_rr = GoSubDag(set([goid_chosen]), godag_r1, relationships={'regulates'}) assert gosubdag_rr.relationships == set(['regulates']) rp_par = gosubdag_rr.rcntobj.go2ancestors[goid_chosen] # assert 'GO:0016441' not in gosubdag_rp.go2obj, '**FATAL: REGULATION TERM GoSubDag(part_of) go2obj' # assert 'GO:0016441' not in rp_par, '**FATAL: REGULATION TERM GoSubDag(part_of) go2parents' # RELATIONSHIPS: positively_regulates gosubdag_rp = GoSubDag(set([goid_chosen]), godag_r1, relationships={'positively_regulates'}) assert gosubdag_rp.relationships == set(['positively_regulates']) rp_par = gosubdag_rp.rcntobj.go2ancestors[goid_chosen] # RELATIONSHIPS: negatively_regulates gosubdag_rn = GoSubDag(set([goid_chosen]), godag_r1, relationships={'negatively_regulates'}) assert gosubdag_rn.relationships == set(['negatively_regulates']) rp_par = gosubdag_rn.rcntobj.go2ancestors[goid_chosen] # RELATIONSHIPS: regulates positively_regulates negatively_regulates regs = {'positively_regulates', 'negatively_regulates'} gosubdag_rnp = GoSubDag(set([goid_chosen]), godag_r1, relationships=regs) assert gosubdag_rnp.relationships == regs rp_par = gosubdag_rnp.rcntobj.go2ancestors[goid_chosen] _run_baseline_r0(gosubdag_r0, gosubdag_r1) # BASELINE r1: Test that GOTerm.get_all_upper() is the same as GoSubDag ancestors for goid, term in gosubdag_r1.go2obj.items(): ancestors_r1 = gosubdag_r1.rcntobj.go2ancestors.get(goid, set()) assert ancestors_r1 == term.get_all_upper() #### # Test that #### gosubdag_rp = GoSubDag(set([goid_chosen]), godag_r1, relationships={'part_of'}, prt=sys.stdout) #### for goid, dag_term in godag_r1.items(): #### if goid in gosubdag_r1.rcntobj.go2ancestors: #### ancestors = gosubdag_rp.rcntobj.go2ancestors[goid] #### sub_term = gosubdag_rp.go2obj[goid] #### reldict = sub_term.relationship.items() #### # print(goid) #### # print('DAG', sorted(dag_term.get_all_upper())) #### # print('SUB', sorted(sub_term.get_all_upper())) #### # print('ANS', sorted(ancestors)) #### # for rel, pterms in cx.OrderedDict(reldict).items(): #### # print(rel, ' '.join(sorted(o.id for o in pterms))) #### # print('') #### print(gosubdag_rp.relationships) #### #assert 'GO:0016441' not in gosubdag_rp.rcntobj.go2ancestors['GO:0060150'] #### assert 'GO:0016441' in gosubdag_r1.go2nt #### assert 'GO:0010467' in gosubdag_r1.go2nt def _run_baseline_r0(gosubdag_r0, gosubdag_r1): """BASELINE r0: Test that GOTerm.get_all_parents() == GoSubDag ancestors""" r1_ancestors_more = set() # Loop through r0 GO IDs for goid, term in gosubdag_r0.go2obj.items(): ancestors_r0 = gosubdag_r0.rcntobj.go2ancestors.get(goid, set()) ancestors_r1 = gosubdag_r1.rcntobj.go2ancestors.get(goid, set()) assert ancestors_r0 == term.get_all_parents() assert ancestors_r0.issubset(ancestors_r1) if len(ancestors_r0) < len(ancestors_r1): r1_ancestors_more.add(goid) assert r1_ancestors_more print('{N} r1 GO terms in GoSubDag have more ancestors than r0'.format( N=len(r1_ancestors_more))) # scripts/go_plot.py --go_file=i126_goids_baseline.txt -r --obo=tests/data/viral_gene_silence.obo -o i126_goids_baseline.png fout_gos = 'i126_goids_baseline.txt' with open(fout_gos, 'w') as prt: prt.write('#cafffb {SRC_GO}\n'.format(SRC_GO=next(iter(gosubdag_r0.go_sources)))) _prt_goterms(r1_ancestors_more, gosubdag_r1.go2nt, prt) print(' WROTE: {GOs}'.format(GOs=fout_gos)) def _prt_goterms(goids, go2nt, prt): """Print details of GO terms""" fmt = ('#ffd1df {GO} # {NS} {dcnt:5} {childcnt:3} ' 'L{level:02} D{depth:02} R{reldepth:02} {D1:5} {REL} {rel} {GO_name}\n') nts = [nt for go, nt in go2nt.items() if go in goids] for ntd in sorted(nts, key=lambda nt: nt.dcnt, reverse=True): prt.write(fmt.format(**ntd._asdict())) #cafffb GO:0060150 #ffd1df GO:0050794 # BP 8278 64 D03 R03 regulation of cellular process #ffd1df GO:0019222 # BP 3382 20 D03 R03 regulation of metabolic process #ffd1df GO:0048522 # BP 2417 65 D04 R04 positive regulation of cellular process #ffd1df GO:0060255 # BP 2130 20 D04 R04 regulation of macromolecule metabolic process #ffd1df GO:0010468 # BP 862 20 D05 R05 regulation of gene expression #ffd1df GO:0060968 # BP 53 4 D06 R08 regulation of gene silencing #ffd1df GO:0060147 # BP 24 4 D07 R09 regulation of posttranscriptional gene silencing #ffd1df GO:0060148 # BP 8 3 D08 R10 positive regulation of posttranscriptional gene silencing #ffd1df GO:0060150 # BP 0 0 D09 R11 viral triggering of virus induced gene silencing # - Generate GO DAG subset for this test --------------------------------------------------------- def _wr_sub_obo(fout_obo, goid_chosen, godag_r1, fin_obo): """Sub plot used for visualizing this test file's elements""" # Load GO-DAG: Load optional 'relationship' godag = {go:o for go, o in godag_r1.items() if go == o.item_id} _prt_rtel_ctr(godag) rels_all = set(['part_of', 'regulates', 'negatively_regulates', 'positively_regulates']) goids_leaf_all = set(o.id for o in godag.values() if not o.children) gosubdag_r1 = GoSubDag(goids_leaf_all, godag, relationships=True, prt=sys.stdout) goids_src_r1_all = _get_leafs_w_relsinhier(rels_all, gosubdag_r1) gosubdag_r1.prt_goids(goids_src_r1_all) # Pick one of the GO IDs as a source for the subset DAG gosubdag_viral = GoSubDag({goid_chosen}, godag, relationships=True, prt=sys.stdout) goids_viral = set(gosubdag_viral.go2obj.keys()) with open(fout_obo, 'w') as prt: WrSubObo.prt_goterms(fin_obo, goids_viral, prt) print('{N} GO IDs WROTE: {OBO}'.format(N=len(goids_viral), OBO=fout_obo)) # Plot obo subset pat_r1 = '{REPO}/scripts/go_plot.py {GO} -o {PNG} -r' pat_r0 = '{REPO}/scripts/go_plot.py {GO} -o {PNG}' system(pat_r1.format(REPO=REPO, PNG=fout_obo.replace('.obo', '_r1.png'), GO=goid_chosen)) system(pat_r0.format(REPO=REPO, PNG=fout_obo.replace('.obo', '_r0.png'), GO=goid_chosen)) def _get_leafs_w_relsinhier(rels_usr, gosubdag_r1): """Get GO IDs that have all relationships up their hierarchy.""" gos_r1_relsinhier = set() goids_leaf = set(o.id for o in gosubdag_r1.go2obj.values() if not o.children) for goid in goids_leaf: go_parents = gosubdag_r1.rcntobj.go2ancestors[goid] rels = set(k for p in go_parents for k in gosubdag_r1.go2obj[p].relationship.keys()) if rels == rels_usr: gos_r1_relsinhier.add(goid) return gos_r1_relsinhier def _prt_rtel_ctr(godag): """Print the count of relationships.""" objs_r1_all = set(o for o in godag.values() if o.relationship.keys()) octr = cx.Counter(k for o in objs_r1_all for k in o.relationship.keys()) # objs_r1_sub = set(o.id for o in objs_r1_all if not rels_all.isdisjoint(o.relationship.keys())) print('{N:6,} GO Terms have relationships.'.format(N=len(objs_r1_all))) for key, cnt in octr.most_common(): print('{N:6,} {REL}'.format(N=cnt, REL=key)) # def _chk_child_parent(go2o_dag, go2o_sub): # """Check the differences between the two go2obb dicts.""" # pass if __name__ == '__main__': test_gosubdag_relationships(len(sys.argv) != 1) # Copyright (C) 2016-2019, <NAME>, <NAME>, All rights reserved.
[ "goatools.test_data.wr_subobo.WrSubObo.prt_goterms", "goatools.gosubdag.gosubdag.GoSubDag", "os.path.join", "goatools.base.get_godag" ]
[((1116, 1168), 'os.path.join', 'join', (['REPO', '"""tests/data/i126/viral_gene_silence.obo"""'], {}), "(REPO, 'tests/data/i126/viral_gene_silence.obo')\n", (1120, 1168), False, 'from os.path import join\n'), ((1203, 1239), 'goatools.base.get_godag', 'get_godag', (['fin_obo'], {'loading_bar': 'None'}), '(fin_obo, loading_bar=None)\n', (1212, 1239), False, 'from goatools.base import get_godag\n'), ((1255, 1324), 'goatools.base.get_godag', 'get_godag', (['fin_obo'], {'loading_bar': 'None', 'optional_attrs': "['relationship']"}), "(fin_obo, loading_bar=None, optional_attrs=['relationship'])\n", (1264, 1324), False, 'from goatools.base import get_godag\n'), ((1341, 1388), 'os.path.join', 'join', (['REPO', '"""tests/data/viral_gene_silence.obo"""'], {}), "(REPO, 'tests/data/viral_gene_silence.obo')\n", (1345, 1388), False, 'from os.path import join\n'), ((7913, 7980), 'goatools.gosubdag.gosubdag.GoSubDag', 'GoSubDag', (['goids_leaf_all', 'godag'], {'relationships': '(True)', 'prt': 'sys.stdout'}), '(goids_leaf_all, godag, relationships=True, prt=sys.stdout)\n', (7921, 7980), False, 'from goatools.gosubdag.gosubdag import GoSubDag\n'), ((8176, 8242), 'goatools.gosubdag.gosubdag.GoSubDag', 'GoSubDag', (['{goid_chosen}', 'godag'], {'relationships': '(True)', 'prt': 'sys.stdout'}), '({goid_chosen}, godag, relationships=True, prt=sys.stdout)\n', (8184, 8242), False, 'from goatools.gosubdag.gosubdag import GoSubDag\n'), ((8340, 8387), 'goatools.test_data.wr_subobo.WrSubObo.prt_goterms', 'WrSubObo.prt_goterms', (['fin_obo', 'goids_viral', 'prt'], {}), '(fin_obo, goids_viral, prt)\n', (8360, 8387), False, 'from goatools.test_data.wr_subobo import WrSubObo\n')]
from .wizards import Wizard from .data import Dataset, DatasetInfo, AutomaticAnnotator from .simulation import Renderer, DatasetRenderer, RobotLookupCreator from .prediction import Predictor, LiveCamera from .paths import Paths from .prediction.analysis import Grapher from .prediction.synthetic import SyntheticPredictor from .textfile_integration import JSONCoupling from .projection import Intrinsics import logging logging.basicConfig(level=logging.INFO) Paths().create()
[ "logging.basicConfig" ]
[((420, 459), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (439, 459), False, 'import logging\n')]
""" Functions to rotate a point by a known euler pole. """ import numpy as np from . import fault_vector_functions def point_rotation_by_Euler_Pole(Point, Euler_Pole): """ Compute the velocity of rotation of a point about an Euler pole on a spherical earth. This function is useful for computing the velocity of a stationary point in one reference frame with respect to another reference frame. The resulting velocity is assumed to be horizontal. :param Point: [longitude, latitude] of observation point, in degrees :type Point: array_like :param Euler_Pole: [longitude, latitude, omega] of Euler Pole, in degrees and degrees/Ma :type Euler_Pole: array_like :returns: [e_velocity, n_velocity, u_velocity] of point in rotated reference frame, in mm/yr :rtype: array_like """ R_point = get_r(Point[0], Point[1]); R_ep = get_r(Euler_Pole[0], Euler_Pole[1]); unit_ep = fault_vector_functions.get_unit_vector(R_ep); omega_raw = degma2radyr(Euler_Pole[2]); omega = omega_raw * unit_ep; # in radians per year velocity_of_transformation = np.cross(omega, R_point); # velocity at the station from the euler pole rotation velocity_of_transformation = velocity_of_transformation * 1000; # mm/yr in x, y, z xvel = velocity_of_transformation[0]; yvel = velocity_of_transformation[1]; zvel = velocity_of_transformation[2]; [east_transform, north_transform] = xyz2en(xvel, yvel, zvel, Point[0]); up_transform = 0; # by definition the velocity will be horizontal return [east_transform, north_transform, up_transform]; def degma2radyr(omega): """Convert omega from degrees/Ma to radians/yr""" radyr = omega * (np.pi / 180) * 1e-6; return radyr; def get_r(lon, lat): """ Vector from center of earth to the point in question assuming a spherical earth. The XYZ coordinate system has x=0 at longitude=0 and z=0 at the equator with positive to the north. :param lon: Longitude of initial point, in degrees :type lon: float :param lat: Latitude of initial point, in degrees :type lat: float :returns: [X, Y, Z] coordinates in meters. :rtype: [float, float, float] """ R_fixed = 6378000; # In meters R_equatorial_disk = R_fixed * np.cos(np.deg2rad(lat)); T_equatorial_disk = np.deg2rad(lon); X = R_equatorial_disk * np.cos(T_equatorial_disk); Y = R_equatorial_disk * np.sin(T_equatorial_disk); Z = np.sqrt(R_fixed * R_fixed - X * X - Y * Y); if lat < 0: Z = Z * -1; return [X, Y, Z]; def get_unit_east(lon): """ Unit east vector from a point on earth's surface in XYZ coordinates. The XYZ coordinate system has x=0 at longitude=0 and z=0 at the equator with positive to the north. The return value of Z is zero for eastward motion. :param lon: Longitude of initial point, in degrees :type lon: float :returns: [X, Y, Z] components :rtype: [float, float, float] """ T_equatorial_disk = np.deg2rad(lon); x = -np.sin(T_equatorial_disk); y = np.cos(T_equatorial_disk); return [x, y, 0]; def xyz2en(x, y, z, lon): """ Convert velocities from xyz to horizontal east and north, assuming spherical earth and no vertical motion. We take the dot product of the velocity with the unit east vector and the north component is the remainder. A more complex function xyz2enu(X, Y, Z, lon, lat) could be written later. :param x: x velocity at observation point :type x: float :param y: y velocity at observation point :type y: float :param z: z velocity at observation point :type z: float :param lon: Longitude of observation point, in degrees :type lon: float :returns: [east_vel, north_vel] :rtype: [float, float] """ vel_vector = [x, y, z]; unit_east = get_unit_east(lon); e = np.dot(vel_vector, unit_east); n = np.sqrt(x * x + y * y + z * z - e * e); if z < 0: n = n * -1; return [e, n]; if __name__ == "__main__": Euler_Pole = [69.9, -12.3, 0.55]; # Lon, Lat, Deg/Ma Point = [-124, 40.5]; # Lon, Lat [east_transform, north_transform, up_transform] = point_rotation_by_Euler_Pole(Point, Euler_Pole); total = np.sqrt(east_transform * east_transform + north_transform * north_transform); print("%.2f east, %.2f north, %.2f up, %.2f mm/yr total" % (east_transform, north_transform, up_transform, total));
[ "numpy.deg2rad", "numpy.cross", "numpy.sin", "numpy.cos", "numpy.dot", "numpy.sqrt" ]
[((1109, 1133), 'numpy.cross', 'np.cross', (['omega', 'R_point'], {}), '(omega, R_point)\n', (1117, 1133), True, 'import numpy as np\n'), ((2334, 2349), 'numpy.deg2rad', 'np.deg2rad', (['lon'], {}), '(lon)\n', (2344, 2349), True, 'import numpy as np\n'), ((2469, 2511), 'numpy.sqrt', 'np.sqrt', (['(R_fixed * R_fixed - X * X - Y * Y)'], {}), '(R_fixed * R_fixed - X * X - Y * Y)\n', (2476, 2511), True, 'import numpy as np\n'), ((3015, 3030), 'numpy.deg2rad', 'np.deg2rad', (['lon'], {}), '(lon)\n', (3025, 3030), True, 'import numpy as np\n'), ((3076, 3101), 'numpy.cos', 'np.cos', (['T_equatorial_disk'], {}), '(T_equatorial_disk)\n', (3082, 3101), True, 'import numpy as np\n'), ((3882, 3911), 'numpy.dot', 'np.dot', (['vel_vector', 'unit_east'], {}), '(vel_vector, unit_east)\n', (3888, 3911), True, 'import numpy as np\n'), ((3921, 3959), 'numpy.sqrt', 'np.sqrt', (['(x * x + y * y + z * z - e * e)'], {}), '(x * x + y * y + z * z - e * e)\n', (3928, 3959), True, 'import numpy as np\n'), ((4254, 4330), 'numpy.sqrt', 'np.sqrt', (['(east_transform * east_transform + north_transform * north_transform)'], {}), '(east_transform * east_transform + north_transform * north_transform)\n', (4261, 4330), True, 'import numpy as np\n'), ((2379, 2404), 'numpy.cos', 'np.cos', (['T_equatorial_disk'], {}), '(T_equatorial_disk)\n', (2385, 2404), True, 'import numpy as np\n'), ((2434, 2459), 'numpy.sin', 'np.sin', (['T_equatorial_disk'], {}), '(T_equatorial_disk)\n', (2440, 2459), True, 'import numpy as np\n'), ((3041, 3066), 'numpy.sin', 'np.sin', (['T_equatorial_disk'], {}), '(T_equatorial_disk)\n', (3047, 3066), True, 'import numpy as np\n'), ((2292, 2307), 'numpy.deg2rad', 'np.deg2rad', (['lat'], {}), '(lat)\n', (2302, 2307), True, 'import numpy as np\n')]
# Variables base_list = 'List_1' # this is the base list, each item in this list is checked for a match in the other list_2 = 'List_2' # List_2 is the name of the list in the excel file xlfile = 'DATA_IN.xlsx' # Importing Libs import pandas as pd import numpy as np # Smart Stuff df_0 = pd.read_excel(xlfile, dtype=str) #rename columns df_0.rename(columns={base_list: "base_list", list_2: "list_2"} , inplace=True) df = pd.DataFrame() #create new df df['base_list'] = df_0['base_list'] # create new columns df['Exact Matches'] = '' df['Words Matched'] = '' # unique list of words in list 2 list_2_words = ' '.join([i for i in df_0['list_2']]).split() list_2_words = list(dict.fromkeys(list_2_words)) # loop though keys for index, row in df.iterrows(): current_key = row['base_list'] current_key = str(current_key) count = (df_0.list_2 == current_key).sum() row['Exact Matches']= count current_key_list = current_key.split(' ') for item in current_key_list: if item in list_2_words: row['Words Matched'] = row['Words Matched'] + '|' + item # Dump Excel Sheet df.to_excel('DATA_OUT.xlsx')
[ "pandas.read_excel", "pandas.DataFrame" ]
[((289, 321), 'pandas.read_excel', 'pd.read_excel', (['xlfile'], {'dtype': 'str'}), '(xlfile, dtype=str)\n', (302, 321), True, 'import pandas as pd\n'), ((426, 440), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (438, 440), True, 'import pandas as pd\n')]
""" Common functions and parameters amongst the experiments. """ from edo.distributions import Uniform from sklearn.preprocessing import MinMaxScaler def scale_dataframe(individual): """ Scale the individual's dataframe to the unit square for calculating fitness. """ original = individual.dataframe.copy() dataframe = MinMaxScaler().fit_transform(original) return dataframe size = 100 row_limits = [50, 100] col_limits = [2, 2] max_iter = 100 best_prop = 0.1 mutation_prob = 0.01 Uniform.param_limits["bounds"] = [0, 1] distributions = [Uniform] root = "../data/"
[ "sklearn.preprocessing.MinMaxScaler" ]
[((339, 353), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {}), '()\n', (351, 353), False, 'from sklearn.preprocessing import MinMaxScaler\n')]
import numpy as np import random import os, sys from scipy import ndimage import healpy as hp from astropy.io import fits import matplotlib.pyplot as plt from scipy.signal import savgol_filter from astropy.io import fits from importlib import reload from pycs.misc.cosmostat_init import * from pycs.misc.mr_prog import * def make_healpix_map(ra, dec, weights, nside): pixels= hp.ang2pix(nside,theta = 0.5*np.pi - np.deg2rad(dec), phi = np.deg2rad(ra)) bincount = np.bincount(pixels, minlength = hp.nside2npix(nside)) bincount_weighted = np.bincount(pixels, minlength = hp.nside2npix(nside), weights=weights) return np.where(bincount>0.5, bincount_weighted/bincount, hp.UNSEEN) def get_bincount(ra, dec, nside): pixels= hp.ang2pix(nside,theta = 0.5*np.pi - np.deg2rad(dec), phi = np.deg2rad(ra)) bincount = np.bincount(pixels, minlength = hp.nside2npix(nside)) return bincount def mrs_read(FN): return hp.read_map(FN) def mrs_write(FN, mapin): hp.write_map(FN, mapin, overwrite=True) def rims(FN): return hp.read_map(FN) def mrs_resize(mapin, nsideout): k = hp.ud_grade(mapin, nsideout) return k # smoothing with sigma in arcmin def smooth(map, sigma): s= hp.smoothing(mapin, sigma=sigma/(360.*60.) * (np.pi*2),pol=False) # lut='rainbow' # 'inferno' 'gist_stern' def tvs(mapin,min=None,max=None,title=None,sigma=None,lut=None): if sigma is None: hp.mollview(mapin,max=max,min=min, title=title,cmap=lut) else: s= hp.smoothing(mapin, sigma=sigma/(360.*60.) * (np.pi*2),pol=False) hp.mollview(s,max=max,min=min, title=title,cmap=lut) hp.mollview def get_nside(Npix): return hp.npix2nside(Npix) def gnside(data): npix = data.shape[0] nside = hp.npix2nside(npix) return nside def pixel_size(nside): # Return the pixel size of a healpix map in arc minutes # SKI_SURFACE IN SQUARE DEGREES = 4. * !PI * (360. / (2*!PI))^2 = 41253 psize = 41253. / (float(nside)**2.*12.) * 60.**2. return np.sqrt(psize) def l2amin(l): a = 1. / l a = a * 180.* 60. / np.pi return a def amin2l(a): ar = a / (180.* 60.) * np.pi l = 1. / ar return l def g2eb(g1,g2): nside = gnside(g1) (ae,ab) = hp.map2alm_spin((g1,g2), 2) ke= hp.alm2map(ae, nside, pol=False) kb= hp.alm2map(ab, nside, pol=False) return ke,kb def g2k(g1,g2): nside = gnside(g1) (ae,ab) = hp.map2alm_spin((g1,g2), 2) ke= hp.alm2map(ae, nside, pol=False) return ke def k2g(ke): nside = gnside(ke) ae = hp.map2alm(ke, 1,pol=False) ab = np.copy(ae) * 0. (g1,g2) = hp.alm2map_spin((ae,ab), 2, lmax=lmax) return g1,g2 # it seems that hp.alm2map_spin crashes. def eb2g(ke,kb): nside = gnside(ke) lmax=nside*3 - 1 ae = hp.map2alm(ke, 1, pol=False) ab = hp.map2alm(kb, 1, pol=False) (g1,g2) = hp.alm2map_spin( (ae,ab), nside, 2, lmax) return g1,g2 def mrs_prog(data, prog="mrs_powspec", opt=None, path='./', remove_files=True, verbose=False, FileOut=None, InputFormatisHealpix=True, OutputFormatisHealpix=True): # Create a unique string using the current date and time. # print('mr_filter ', opt) unique_string = datetime.now().strftime('%Y.%m.%d_%H.%M.%S') result=0 # Set the ouput file names. file_name = path + 'mr_temp_' + unique_string file_fits = file_name + '.fits' if FileOut is not None: file_out = FileOut else: file_out = file_name + '_out.fits' # Write the input data to a fits file. if InputFormatisHealpix: mrs_write(file_fits, data) else: writefits(file_fits, data) # print("PROG: ", prog) cmd = prog if isinstance(opt, type(None)): optF=' ' else: optF= opt if verbose: optF = optF + " -v " cmd = cmd + " " + optF + " " + file_fits + " " + file_out if verbose: print ('CMD = ', cmd) args = shlex.split(cmd) # print('args ', args) call(args) # Retrieve wavelet filtered data. if OutputFormatisHealpix: result = mrs_read(file_out) else: result = readfits(file_out) # Return the mr_transform results (and the output file names). if remove_files: remove(file_fits) remove(file_out) return result else: return result def mrs_powspec(map, verbose=False): p = mrs_prog(map, prog="mrs_powspec", verbose=verbose, OutputFormatisHealpix=False) return p def mrs_smooth(map, opt=None, verbose=False): p = mrs_prog(map, prog="mrs_smooth", verbose=verbose, opt=opt, OutputFormatisHealpix=True) return p def mrs_almtrans(map, lmax=None, opt=None, verbose=False): optParam = ' -T ' if opt is not None: optParam = ' -T ' + opt if lmax is not None: optParam = ' -l ' + str(lmax) + optParam p = mrs_prog(map, prog="mrs_almtrans", verbose=verbose, opt=optParam, OutputFormatisHealpix=False) return p def mrs_almrec(map, opt=None, verbose=False,nside=None): optParam = ' -T ' if opt is not None: optParam = ' -T ' + opt if nside is not None: optParam = ' -n ' + str(nside) + optParam p = mrs_prog(map, prog="mrs_almrec", verbose=verbose, opt=optParam, InputFormatisHealpix=False, OutputFormatisHealpix=True) return p def tol(map,lmax_amin,amin=False): ns= gnside(map) lmax=lmax_amin if amin is True: lmax=amin2l(lmax_amin) a = mrs_almtrans(map, lmax=lmax) b = mrs_almrec(a, nside=ns) return b def mrs_uwttrans(map, lmax=None, opt=None, verbose=False, path='./',progpath=None): optParam = ' ' if opt is not None: optParam = ' ' + opt if lmax is not None: optParam = ' -l ' + str(lmax) + optParam if progpath is None: prog="mrs_uwttrans" else: prog=progpath+"mrs_uwttrans" p = mrs_prog(map, prog=prog, verbose=verbose, opt=optParam, OutputFormatisHealpix=False,path=path) return p def mrs_uwtrecons(Tmap, lmax=None, opt=None, verbose=False, path='./',progpath=None): optParam = ' ' if opt is not None: optParam = ' ' + opt if lmax is not None: optParam = ' -l ' + str(lmax) + optParam if progpath is None: prog="mrs_uwttrans" else: prog=progpath+"mrs_uwttrans -r " p = mrs_prog(Tmap, prog=prog, verbose=verbose, opt=optParam, InputFormatisHealpix=False, OutputFormatisHealpix=True,path=path) return p
[ "healpy.write_map", "healpy.alm2map", "healpy.mollview", "numpy.copy", "healpy.map2alm", "numpy.deg2rad", "healpy.ud_grade", "healpy.nside2npix", "numpy.where", "healpy.npix2nside", "healpy.map2alm_spin", "healpy.alm2map_spin", "healpy.smoothing", "healpy.read_map", "numpy.sqrt" ]
[((634, 699), 'numpy.where', 'np.where', (['(bincount > 0.5)', '(bincount_weighted / bincount)', 'hp.UNSEEN'], {}), '(bincount > 0.5, bincount_weighted / bincount, hp.UNSEEN)\n', (642, 699), True, 'import numpy as np\n'), ((938, 953), 'healpy.read_map', 'hp.read_map', (['FN'], {}), '(FN)\n', (949, 953), True, 'import healpy as hp\n'), ((985, 1024), 'healpy.write_map', 'hp.write_map', (['FN', 'mapin'], {'overwrite': '(True)'}), '(FN, mapin, overwrite=True)\n', (997, 1024), True, 'import healpy as hp\n'), ((1051, 1066), 'healpy.read_map', 'hp.read_map', (['FN'], {}), '(FN)\n', (1062, 1066), True, 'import healpy as hp\n'), ((1109, 1137), 'healpy.ud_grade', 'hp.ud_grade', (['mapin', 'nsideout'], {}), '(mapin, nsideout)\n', (1120, 1137), True, 'import healpy as hp\n'), ((1216, 1290), 'healpy.smoothing', 'hp.smoothing', (['mapin'], {'sigma': '(sigma / (360.0 * 60.0) * (np.pi * 2))', 'pol': '(False)'}), '(mapin, sigma=sigma / (360.0 * 60.0) * (np.pi * 2), pol=False)\n', (1228, 1290), True, 'import healpy as hp\n'), ((1685, 1704), 'healpy.npix2nside', 'hp.npix2nside', (['Npix'], {}), '(Npix)\n', (1698, 1704), True, 'import healpy as hp\n'), ((1761, 1780), 'healpy.npix2nside', 'hp.npix2nside', (['npix'], {}), '(npix)\n', (1774, 1780), True, 'import healpy as hp\n'), ((2024, 2038), 'numpy.sqrt', 'np.sqrt', (['psize'], {}), '(psize)\n', (2031, 2038), True, 'import numpy as np\n'), ((2249, 2277), 'healpy.map2alm_spin', 'hp.map2alm_spin', (['(g1, g2)', '(2)'], {}), '((g1, g2), 2)\n', (2264, 2277), True, 'import healpy as hp\n'), ((2285, 2317), 'healpy.alm2map', 'hp.alm2map', (['ae', 'nside'], {'pol': '(False)'}), '(ae, nside, pol=False)\n', (2295, 2317), True, 'import healpy as hp\n'), ((2326, 2358), 'healpy.alm2map', 'hp.alm2map', (['ab', 'nside'], {'pol': '(False)'}), '(ab, nside, pol=False)\n', (2336, 2358), True, 'import healpy as hp\n'), ((2430, 2458), 'healpy.map2alm_spin', 'hp.map2alm_spin', (['(g1, g2)', '(2)'], {}), '((g1, g2), 2)\n', (2445, 2458), True, 'import healpy as hp\n'), ((2466, 2498), 'healpy.alm2map', 'hp.alm2map', (['ae', 'nside'], {'pol': '(False)'}), '(ae, nside, pol=False)\n', (2476, 2498), True, 'import healpy as hp\n'), ((2559, 2587), 'healpy.map2alm', 'hp.map2alm', (['ke', '(1)'], {'pol': '(False)'}), '(ke, 1, pol=False)\n', (2569, 2587), True, 'import healpy as hp\n'), ((2627, 2666), 'healpy.alm2map_spin', 'hp.alm2map_spin', (['(ae, ab)', '(2)'], {'lmax': 'lmax'}), '((ae, ab), 2, lmax=lmax)\n', (2642, 2666), True, 'import healpy as hp\n'), ((2795, 2823), 'healpy.map2alm', 'hp.map2alm', (['ke', '(1)'], {'pol': '(False)'}), '(ke, 1, pol=False)\n', (2805, 2823), True, 'import healpy as hp\n'), ((2833, 2861), 'healpy.map2alm', 'hp.map2alm', (['kb', '(1)'], {'pol': '(False)'}), '(kb, 1, pol=False)\n', (2843, 2861), True, 'import healpy as hp\n'), ((2876, 2917), 'healpy.alm2map_spin', 'hp.alm2map_spin', (['(ae, ab)', 'nside', '(2)', 'lmax'], {}), '((ae, ab), nside, 2, lmax)\n', (2891, 2917), True, 'import healpy as hp\n'), ((1430, 1489), 'healpy.mollview', 'hp.mollview', (['mapin'], {'max': 'max', 'min': 'min', 'title': 'title', 'cmap': 'lut'}), '(mapin, max=max, min=min, title=title, cmap=lut)\n', (1441, 1489), True, 'import healpy as hp\n'), ((1507, 1581), 'healpy.smoothing', 'hp.smoothing', (['mapin'], {'sigma': '(sigma / (360.0 * 60.0) * (np.pi * 2))', 'pol': '(False)'}), '(mapin, sigma=sigma / (360.0 * 60.0) * (np.pi * 2), pol=False)\n', (1519, 1581), True, 'import healpy as hp\n'), ((1580, 1635), 'healpy.mollview', 'hp.mollview', (['s'], {'max': 'max', 'min': 'min', 'title': 'title', 'cmap': 'lut'}), '(s, max=max, min=min, title=title, cmap=lut)\n', (1591, 1635), True, 'import healpy as hp\n'), ((2596, 2607), 'numpy.copy', 'np.copy', (['ae'], {}), '(ae)\n', (2603, 2607), True, 'import numpy as np\n'), ((443, 457), 'numpy.deg2rad', 'np.deg2rad', (['ra'], {}), '(ra)\n', (453, 457), True, 'import numpy as np\n'), ((506, 526), 'healpy.nside2npix', 'hp.nside2npix', (['nside'], {}), '(nside)\n', (519, 526), True, 'import healpy as hp\n'), ((584, 604), 'healpy.nside2npix', 'hp.nside2npix', (['nside'], {}), '(nside)\n', (597, 604), True, 'import healpy as hp\n'), ((803, 817), 'numpy.deg2rad', 'np.deg2rad', (['ra'], {}), '(ra)\n', (813, 817), True, 'import numpy as np\n'), ((866, 886), 'healpy.nside2npix', 'hp.nside2npix', (['nside'], {}), '(nside)\n', (879, 886), True, 'import healpy as hp\n'), ((420, 435), 'numpy.deg2rad', 'np.deg2rad', (['dec'], {}), '(dec)\n', (430, 435), True, 'import numpy as np\n'), ((780, 795), 'numpy.deg2rad', 'np.deg2rad', (['dec'], {}), '(dec)\n', (790, 795), True, 'import numpy as np\n')]
"""Contract test cases for cases for all organization catalogs.""" import json import pytest import requests from tests import responses @pytest.mark.contract @pytest.mark.docker def test_all_catalogs(docker_service: str) -> None: """Should return the all_catalogs response.""" url = f"{docker_service}/organizationcatalogs" response = requests.get(url) assert response.status_code == 200 assert response.json() == json.loads(responses.all_catalogs) @pytest.mark.contract @pytest.mark.docker def test_all_catalogs_has_no_cache_headers(docker_service: str) -> None: """Should include no-cache headers.""" url = f"{docker_service}/organizationcatalogs" response = requests.get(url) assert response.status_code == 200 assert ( response.headers.get("Cache-Control") == "no-cache, no-store, max-age=0, must-revalidate" ) @pytest.mark.contract @pytest.mark.docker def test_all_nap_catalogs(docker_service: str) -> None: """Should return the all_nap response.""" url = f"{docker_service}/organizationcatalogs?filter=transportportal" response = requests.get(url) assert response.status_code == 200 assert response.json() == json.loads(responses.all_nap) @pytest.mark.contract @pytest.mark.docker def test_invalid_filter(docker_service: str) -> None: """Should return 400.""" url = f"{docker_service}/organizationcatalogs?filter=invalid" response = requests.get(url) assert response.status_code == 400
[ "json.loads", "requests.get" ]
[((352, 369), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (364, 369), False, 'import requests\n'), ((701, 718), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (713, 718), False, 'import requests\n'), ((1119, 1136), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1131, 1136), False, 'import requests\n'), ((1445, 1462), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1457, 1462), False, 'import requests\n'), ((440, 474), 'json.loads', 'json.loads', (['responses.all_catalogs'], {}), '(responses.all_catalogs)\n', (450, 474), False, 'import json\n'), ((1207, 1236), 'json.loads', 'json.loads', (['responses.all_nap'], {}), '(responses.all_nap)\n', (1217, 1236), False, 'import json\n')]
""" Builds a NEXUS file form a long-table format CSV. """ import csv from pathlib import Path from collections import defaultdict BASE = Path(__file__).parents[1] / "data" with open(BASE / "ielex.csv", encoding="utf-8") as h: data = list(csv.DictReader(h)) taxa = sorted(set([row["LANGUAGE"] for row in data])) cogs = defaultdict(list) all_cogs = defaultdict(set) for row in data: cogs[row["LANGUAGE"], row["CONCEPT"]].append(row["COGNATE"]) all_cogs[row["CONCEPT"]].add(row["COGNATE"]) all_cogs = {key: sorted(value) for key, value in all_cogs.items()} charstates = [] assumptions = [] cur_idx = 1 for cog in sorted(all_cogs): value = all_cogs[cog] k = value[0].split("_")[0] charstates.append(f"{k}_ascertainment") for sub in value: charstates.append(sub) end_idx = cur_idx + len(value) assumptions.append([cog, cur_idx, end_idx]) cur_idx = end_idx + 1 matrix = {} for taxon in taxa: buf = "" for concept in sorted(all_cogs): buf += "0" # ascert cogids = all_cogs[concept] # if empty if len(cogs[taxon, concept]) == 0: buf += "?" * len(cogids) else: vec = [cogid in cogs[taxon, concept] for cogid in cogids] buf += "".join([["0", "1"][v] for v in vec]) matrix[taxon] = buf ############ taxon_len = max([len(taxon) for taxon in taxa]) nexus = "" nexus += "#NEXUS\n\n" nexus += "BEGIN DATA;\n" nexus += "\tDIMENSIONS NTAX=%i NCHAR=%i;\n" % (len(taxa), len(matrix[taxa[0]])) nexus += '\tFORMAT DATATYPE=STANDARD MISSING=? GAP=- SYMBOLS="01";' nexus += "\tCHARSTATELABELS\n" nexus += ",\n".join(["\t\t%i %s" % (idx + 1, cs) for idx, cs in enumerate(charstates)]) nexus += "\n;\n" nexus += "MATRIX\n" for taxon, vector in matrix.items(): label = taxon.ljust(taxon_len + 4) nexus += "%s %s\n" % (label, vector) nexus += ";\n" nexus += "END;\n\n" nexus += "begin assumptions;\n" for assump in assumptions: v = all_cogs[assump[0]][0].split("_")[0] nexus += "\tcharset %s = %i-%i;\n" % (v, assump[1], assump[2]) nexus += "end;\n\n" print(nexus)
[ "collections.defaultdict", "csv.DictReader", "pathlib.Path" ]
[((328, 345), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (339, 345), False, 'from collections import defaultdict\n'), ((357, 373), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (368, 373), False, 'from collections import defaultdict\n'), ((246, 263), 'csv.DictReader', 'csv.DictReader', (['h'], {}), '(h)\n', (260, 263), False, 'import csv\n'), ((139, 153), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (143, 153), False, 'from pathlib import Path\n')]
from graia.application.message.chain import MessageChain from graia.application.message.elements.internal import Plain from graia.application.message.elements.internal import At from SAGIRIBOT.basics.aio_mysql_excute import execute_sql async def get_rank(group_id: int, memberList: list) -> list: sql = "select * from dragon where groupId=%d order by count desc" % group_id lsp_rank = await execute_sql(sql) print(lsp_rank) msg = [] text = "啊嘞嘞,从启动到现在都没有人要过涩图的嘛!呜呜呜~\n人家。。。人家好寂寞的,快来找我玩嘛~" if lsp_rank == (): return [ "None", MessageChain.create([ Plain(text=text) ]) ] else: lsp_champion_count = lsp_rank[0][3] if lsp_champion_count == 0: return [ "None", MessageChain.create([ Plain(text=text) ]) ] text = "目前lsp排行榜:" msg.append(Plain(text=text)) text = "" index = 0 add_bool = False add = 0 last = -1 for i in lsp_rank: if i[3] == 0: break if i[3] == last: add += 1 add_bool = True else: if add_bool: index += add index += 1 add = 0 add_bool=False last = i[3] text += "\n%i.%-20s %3d" % (index, qq2name(memberList,i[2]), i[3]) msg.append(Plain(text=text)) return msg
[ "SAGIRIBOT.basics.aio_mysql_excute.execute_sql", "graia.application.message.elements.internal.Plain" ]
[((402, 418), 'SAGIRIBOT.basics.aio_mysql_excute.execute_sql', 'execute_sql', (['sql'], {}), '(sql)\n', (413, 418), False, 'from SAGIRIBOT.basics.aio_mysql_excute import execute_sql\n'), ((953, 969), 'graia.application.message.elements.internal.Plain', 'Plain', ([], {'text': 'text'}), '(text=text)\n', (958, 969), False, 'from graia.application.message.elements.internal import Plain\n'), ((1515, 1531), 'graia.application.message.elements.internal.Plain', 'Plain', ([], {'text': 'text'}), '(text=text)\n', (1520, 1531), False, 'from graia.application.message.elements.internal import Plain\n'), ((622, 638), 'graia.application.message.elements.internal.Plain', 'Plain', ([], {'text': 'text'}), '(text=text)\n', (627, 638), False, 'from graia.application.message.elements.internal import Plain\n'), ((857, 873), 'graia.application.message.elements.internal.Plain', 'Plain', ([], {'text': 'text'}), '(text=text)\n', (862, 873), False, 'from graia.application.message.elements.internal import Plain\n')]
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup import re import os import sys name = "django-db-queue-exports" package = "django_dbq_exports" description = "An extension to django-db-queue for monitoring long running jobs" url = "https://www.dabapps.com/" project_urls = {"Source": "https://github.com/dabapps/{}".format(name)} author = "DabApps" author_email = "<EMAIL>" license = "BSD" with open("README.md") as f: readme = f.read() with open("requirements.txt") as f: requirements = f.read().split("\n") def get_version(package): """ Return package version as listed in `__version__` in `init.py`. """ init_py = open(os.path.join(package, "__init__.py")).read() return re.search("^__version__ = ['\"]([^'\"]+)['\"]", init_py, re.MULTILINE).group( 1 ) def get_packages(package): """ Return root package and all sub-packages. """ return [ dirpath for dirpath, dirnames, filenames in os.walk(package) if os.path.exists(os.path.join(dirpath, "__init__.py")) ] def get_package_data(package): """ Return all files under the root package, that are not in a package themselves. """ walk = [ (dirpath.replace(package + os.sep, "", 1), filenames) for dirpath, dirnames, filenames in os.walk(package) if not os.path.exists(os.path.join(dirpath, "__init__.py")) ] filepaths = [] for base, filenames in walk: filepaths.extend([os.path.join(base, filename) for filename in filenames]) return {package: filepaths} setup( name=name, version=get_version(package), url=url, project_urls=project_urls, license=license, description=description, long_description=readme, long_description_content_type="text/markdown", author=author, author_email=author_email, packages=get_packages(package), package_data=get_package_data(package), install_requires=requirements, classifiers=[], include_package_data=True, zip_safe=False, options={"build": {"build_base": "tmp_build"}}, )
[ "os.walk", "re.search", "os.path.join" ]
[((736, 806), 're.search', 're.search', (['"""^__version__ = [\'"]([^\'"]+)[\'"]"""', 'init_py', 're.MULTILINE'], {}), '(\'^__version__ = [\\\'"]([^\\\'"]+)[\\\'"]\', init_py, re.MULTILINE)\n', (745, 806), False, 'import re\n'), ((994, 1010), 'os.walk', 'os.walk', (['package'], {}), '(package)\n', (1001, 1010), False, 'import os\n'), ((1336, 1352), 'os.walk', 'os.walk', (['package'], {}), '(package)\n', (1343, 1352), False, 'import os\n'), ((680, 716), 'os.path.join', 'os.path.join', (['package', '"""__init__.py"""'], {}), "(package, '__init__.py')\n", (692, 716), False, 'import os\n'), ((1037, 1073), 'os.path.join', 'os.path.join', (['dirpath', '"""__init__.py"""'], {}), "(dirpath, '__init__.py')\n", (1049, 1073), False, 'import os\n'), ((1506, 1534), 'os.path.join', 'os.path.join', (['base', 'filename'], {}), '(base, filename)\n', (1518, 1534), False, 'import os\n'), ((1383, 1419), 'os.path.join', 'os.path.join', (['dirpath', '"""__init__.py"""'], {}), "(dirpath, '__init__.py')\n", (1395, 1419), False, 'import os\n')]
import mobula.layers as L import numpy as np def test_sigmoid(): X = ((np.arange(10000) - 5000) / 1000.0).reshape((-1, 1, 1, 1)) data = L.Data(X, "data") data.reshape() l = L.Sigmoid(data) l.reshape() assert l.Y.shape == X.shape l.forward() l.dY = np.random.random(l.Y.shape) * 10 l.backward() enx = np.exp(-X) assert np.allclose(l.Y.ravel(), (1.0 / (1.0 + enx)).ravel()) assert np.allclose(l.dX.ravel(), (enx / np.square(1 + enx) * l.dY).ravel()) def test_relu(): X = ((np.arange(10000) - 5000) / 1000.0).reshape((-1, 1, 1, 1)) data = L.Data(X, "data") data.reshape() l = L.ReLU(data) l.reshape() assert l.Y.shape == X.shape l.forward() l.dY = np.random.random(l.Y.shape) * 10 l.backward() Y = np.zeros(X.shape) b = (X > 0) Y[b] = X[b] dX = np.zeros(X.shape) dX[b] = l.dY[b] ''' d = (l.dX != dX) print (l.dX[d], dX[d]) ''' assert np.allclose(l.Y.ravel(), Y.ravel()) assert np.allclose(l.dX.ravel(), dX.ravel()) def test_selu(): X = ((np.arange(10000) - 5000) / 1000.0).reshape((-1, 1, 1, 1)) data = L.Data(X, "data") data.reshape() l = L.SELU(data) y = l.eval() ty = np.zeros(X.shape) ty[X > 0] = l.scale * X[X>0] ty[X<=0] = l.scale * (l.alpha * np.exp(X[X<=0]) - l.alpha) assert np.allclose(y, ty) l.dY = np.random.random(l.Y.shape) l.backward() dX = np.zeros(X.shape) dX[X > 0] = l.scale dX[X <= 0] = l.scale * l.alpha * np.exp(X[X<=0]) dX *= l.dY assert np.allclose(dX, l.dX) def test_PReLU(): X = ((np.arange(10000) - 5000) / 1000.0).reshape((-1, 1, 1, 1)) data = L.Data(X, "data") data.reshape() l = L.PReLU(data) y = l.eval() ty = np.zeros(X.shape) ty[X>0] = X[X>0] ty[X<=0] = l.alpha * X[X<=0] assert np.allclose(y, ty) l.dY = np.random.random(l.Y.shape) l.backward() dX = np.zeros(X.shape) dX[X>0] = 1 dX[X<=0] = l.alpha dX *= l.dY print (dX, l.dX) assert np.allclose(dX, l.dX) def test_tanh(): X = ((np.arange(10000) - 5000) / 1000.0).reshape((-1, 1, 1, 1)) data = L.Data(X, "data") data.reshape() l = L.Tanh(data) y = l.eval() p = np.exp(X) n = np.exp(-X) ty = (p - n) / (p + n) assert np.allclose(y, ty) l.dY = np.random.random(l.Y.shape) l.backward() dX = 1.0 - np.square(p - n) / np.square(p + n) dX *= l.dY assert np.allclose(dX, l.dX)
[ "mobula.layers.PReLU", "mobula.layers.Tanh", "numpy.allclose", "mobula.layers.ReLU", "numpy.zeros", "numpy.square", "numpy.random.random", "numpy.arange", "numpy.exp", "mobula.layers.Data", "mobula.layers.SELU", "mobula.layers.Sigmoid" ]
[((145, 162), 'mobula.layers.Data', 'L.Data', (['X', '"""data"""'], {}), "(X, 'data')\n", (151, 162), True, 'import mobula.layers as L\n'), ((190, 205), 'mobula.layers.Sigmoid', 'L.Sigmoid', (['data'], {}), '(data)\n', (199, 205), True, 'import mobula.layers as L\n'), ((342, 352), 'numpy.exp', 'np.exp', (['(-X)'], {}), '(-X)\n', (348, 352), True, 'import numpy as np\n'), ((595, 612), 'mobula.layers.Data', 'L.Data', (['X', '"""data"""'], {}), "(X, 'data')\n", (601, 612), True, 'import mobula.layers as L\n'), ((640, 652), 'mobula.layers.ReLU', 'L.ReLU', (['data'], {}), '(data)\n', (646, 652), True, 'import mobula.layers as L\n'), ((786, 803), 'numpy.zeros', 'np.zeros', (['X.shape'], {}), '(X.shape)\n', (794, 803), True, 'import numpy as np\n'), ((845, 862), 'numpy.zeros', 'np.zeros', (['X.shape'], {}), '(X.shape)\n', (853, 862), True, 'import numpy as np\n'), ((1140, 1157), 'mobula.layers.Data', 'L.Data', (['X', '"""data"""'], {}), "(X, 'data')\n", (1146, 1157), True, 'import mobula.layers as L\n'), ((1185, 1197), 'mobula.layers.SELU', 'L.SELU', (['data'], {}), '(data)\n', (1191, 1197), True, 'import mobula.layers as L\n'), ((1224, 1241), 'numpy.zeros', 'np.zeros', (['X.shape'], {}), '(X.shape)\n', (1232, 1241), True, 'import numpy as np\n'), ((1350, 1368), 'numpy.allclose', 'np.allclose', (['y', 'ty'], {}), '(y, ty)\n', (1361, 1368), True, 'import numpy as np\n'), ((1380, 1407), 'numpy.random.random', 'np.random.random', (['l.Y.shape'], {}), '(l.Y.shape)\n', (1396, 1407), True, 'import numpy as np\n'), ((1434, 1451), 'numpy.zeros', 'np.zeros', (['X.shape'], {}), '(X.shape)\n', (1442, 1451), True, 'import numpy as np\n'), ((1555, 1576), 'numpy.allclose', 'np.allclose', (['dX', 'l.dX'], {}), '(dX, l.dX)\n', (1566, 1576), True, 'import numpy as np\n'), ((1675, 1692), 'mobula.layers.Data', 'L.Data', (['X', '"""data"""'], {}), "(X, 'data')\n", (1681, 1692), True, 'import mobula.layers as L\n'), ((1720, 1733), 'mobula.layers.PReLU', 'L.PReLU', (['data'], {}), '(data)\n', (1727, 1733), True, 'import mobula.layers as L\n'), ((1760, 1777), 'numpy.zeros', 'np.zeros', (['X.shape'], {}), '(X.shape)\n', (1768, 1777), True, 'import numpy as np\n'), ((1843, 1861), 'numpy.allclose', 'np.allclose', (['y', 'ty'], {}), '(y, ty)\n', (1854, 1861), True, 'import numpy as np\n'), ((1873, 1900), 'numpy.random.random', 'np.random.random', (['l.Y.shape'], {}), '(l.Y.shape)\n', (1889, 1900), True, 'import numpy as np\n'), ((1927, 1944), 'numpy.zeros', 'np.zeros', (['X.shape'], {}), '(X.shape)\n', (1935, 1944), True, 'import numpy as np\n'), ((2031, 2052), 'numpy.allclose', 'np.allclose', (['dX', 'l.dX'], {}), '(dX, l.dX)\n', (2042, 2052), True, 'import numpy as np\n'), ((2150, 2167), 'mobula.layers.Data', 'L.Data', (['X', '"""data"""'], {}), "(X, 'data')\n", (2156, 2167), True, 'import mobula.layers as L\n'), ((2195, 2207), 'mobula.layers.Tanh', 'L.Tanh', (['data'], {}), '(data)\n', (2201, 2207), True, 'import mobula.layers as L\n'), ((2233, 2242), 'numpy.exp', 'np.exp', (['X'], {}), '(X)\n', (2239, 2242), True, 'import numpy as np\n'), ((2251, 2261), 'numpy.exp', 'np.exp', (['(-X)'], {}), '(-X)\n', (2257, 2261), True, 'import numpy as np\n'), ((2300, 2318), 'numpy.allclose', 'np.allclose', (['y', 'ty'], {}), '(y, ty)\n', (2311, 2318), True, 'import numpy as np\n'), ((2330, 2357), 'numpy.random.random', 'np.random.random', (['l.Y.shape'], {}), '(l.Y.shape)\n', (2346, 2357), True, 'import numpy as np\n'), ((2452, 2473), 'numpy.allclose', 'np.allclose', (['dX', 'l.dX'], {}), '(dX, l.dX)\n', (2463, 2473), True, 'import numpy as np\n'), ((281, 308), 'numpy.random.random', 'np.random.random', (['l.Y.shape'], {}), '(l.Y.shape)\n', (297, 308), True, 'import numpy as np\n'), ((728, 755), 'numpy.random.random', 'np.random.random', (['l.Y.shape'], {}), '(l.Y.shape)\n', (744, 755), True, 'import numpy as np\n'), ((1513, 1530), 'numpy.exp', 'np.exp', (['X[X <= 0]'], {}), '(X[X <= 0])\n', (1519, 1530), True, 'import numpy as np\n'), ((2390, 2406), 'numpy.square', 'np.square', (['(p - n)'], {}), '(p - n)\n', (2399, 2406), True, 'import numpy as np\n'), ((2409, 2425), 'numpy.square', 'np.square', (['(p + n)'], {}), '(p + n)\n', (2418, 2425), True, 'import numpy as np\n'), ((1312, 1329), 'numpy.exp', 'np.exp', (['X[X <= 0]'], {}), '(X[X <= 0])\n', (1318, 1329), True, 'import numpy as np\n'), ((76, 92), 'numpy.arange', 'np.arange', (['(10000)'], {}), '(10000)\n', (85, 92), True, 'import numpy as np\n'), ((526, 542), 'numpy.arange', 'np.arange', (['(10000)'], {}), '(10000)\n', (535, 542), True, 'import numpy as np\n'), ((1071, 1087), 'numpy.arange', 'np.arange', (['(10000)'], {}), '(10000)\n', (1080, 1087), True, 'import numpy as np\n'), ((1606, 1622), 'numpy.arange', 'np.arange', (['(10000)'], {}), '(10000)\n', (1615, 1622), True, 'import numpy as np\n'), ((2081, 2097), 'numpy.arange', 'np.arange', (['(10000)'], {}), '(10000)\n', (2090, 2097), True, 'import numpy as np\n'), ((462, 480), 'numpy.square', 'np.square', (['(1 + enx)'], {}), '(1 + enx)\n', (471, 480), True, 'import numpy as np\n')]
# Copyright 2019 <NAME> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import absolute_import from elasticsearch import Elasticsearch from flask import current_app as app import re import yaml from rhoci.database import Database class DFG(object): def __init__(self, name, squads=[], components=[], squad_to_components={}): self.name = name self.squads = squads self.components = components self.squad_to_components = squad_to_components def insert(self): """Inserts object to the database.""" if not Database.find_one("DFGs", {"name": self.name}): Database.insert(collection='DFGs', data=self.json()) def json(self): return { 'name': self.name, 'squads': self.squads, 'components': self.components, 'squad_to_components': self.squad_to_components, } @classmethod def get_all_DFGs_based_on_jobs(cls): """Returns a list of all DFGs based on job model where it cuts the DFG name from the job name and makes sure the set is unique. """ DFGs = [] es = Elasticsearch(app.config['custom']['elk']['es']['url']) body = { "size": 0, "aggs" : { "jobs" : { "terms" : { "field" : "DFG.keyword", "size" : 4000 } } } } result = es.search(index="logstash", body=body) for bucket in result["aggregations"]['jobs']['buckets']: DFGs.append(bucket['key']) return DFGs @classmethod def get_all_squads(cls): squads = [] for DFG_db in cls.find(): if DFG_db['squads']: squads.extend(DFG_db['squads']) return squads @classmethod def get_all_components(cls): components = [] for DFG_db in cls.find(): if DFG_db['components']: components.extend(DFG_db['components']) return components @classmethod def get_squad(cls, DFG_name, component): DFG_db = cls.find_one(name=DFG_name) if DFG_db['squad_to_components']: for squad, components in DFG_db['squad_to_components'].items(): for comp in components: if comp == component: return squad if component == components: return squad return @classmethod def get_squad_components(cls, DFG_name, squad): """Returns all the components of a given squad.""" DFG_db = cls.find_one(name=DFG_name) return DFG_db['squad_to_components'][squad] @classmethod def find(cls): """Returns find query.""" query = {} DFGs = Database.find(collection="DFGs", query=query) return DFGs @classmethod def find_one(cls, name): """Returns one query result.""" query = {} if name: query['name'] = name DFGs = Database.find_one(collection="DFGs", query=query) return DFGs @classmethod def count(cls, squads=False): """Returns the count of DFGs documents.""" query = {} if squads: return len(cls.get_all_squads()) else: DFGs = Database.find(collection='DFGs', query=query) return DFGs.count()
[ "elasticsearch.Elasticsearch", "rhoci.database.Database.find_one", "rhoci.database.Database.find" ]
[((1728, 1783), 'elasticsearch.Elasticsearch', 'Elasticsearch', (["app.config['custom']['elk']['es']['url']"], {}), "(app.config['custom']['elk']['es']['url'])\n", (1741, 1783), False, 'from elasticsearch import Elasticsearch\n'), ((3374, 3419), 'rhoci.database.Database.find', 'Database.find', ([], {'collection': '"""DFGs"""', 'query': 'query'}), "(collection='DFGs', query=query)\n", (3387, 3419), False, 'from rhoci.database import Database\n'), ((3611, 3660), 'rhoci.database.Database.find_one', 'Database.find_one', ([], {'collection': '"""DFGs"""', 'query': 'query'}), "(collection='DFGs', query=query)\n", (3628, 3660), False, 'from rhoci.database import Database\n'), ((1123, 1169), 'rhoci.database.Database.find_one', 'Database.find_one', (['"""DFGs"""', "{'name': self.name}"], {}), "('DFGs', {'name': self.name})\n", (1140, 1169), False, 'from rhoci.database import Database\n'), ((3900, 3945), 'rhoci.database.Database.find', 'Database.find', ([], {'collection': '"""DFGs"""', 'query': 'query'}), "(collection='DFGs', query=query)\n", (3913, 3945), False, 'from rhoci.database import Database\n')]
from prometheus_client.core import GaugeMetricFamily def make_metrics(queue): list_metrics = [] # Total items in Queue metric = GaugeMetricFamily( 'jenkins_queue_total', 'Total items in Queue', labels=None ) metric.add_metric( labels=[], value=queue.get_total_items() ) list_metrics.append(metric) # Duration of an item in Queue metric = GaugeMetricFamily( 'jenkins_queue_item_duration', 'Duration of a item in Queue in seconds', labels=['queue_id', 'item_name'] ) list_items = queue.get_list_items() for item_id in list_items: item = queue.get_item(item_id) item_name = item['name'] queue_id = str(item_id) metric.add_metric( labels=[queue_id, item_name], value=queue.get_in_queue_duration(item_id) ) list_metrics.append(metric) return list_metrics
[ "prometheus_client.core.GaugeMetricFamily" ]
[((144, 221), 'prometheus_client.core.GaugeMetricFamily', 'GaugeMetricFamily', (['"""jenkins_queue_total"""', '"""Total items in Queue"""'], {'labels': 'None'}), "('jenkins_queue_total', 'Total items in Queue', labels=None)\n", (161, 221), False, 'from prometheus_client.core import GaugeMetricFamily\n'), ((420, 548), 'prometheus_client.core.GaugeMetricFamily', 'GaugeMetricFamily', (['"""jenkins_queue_item_duration"""', '"""Duration of a item in Queue in seconds"""'], {'labels': "['queue_id', 'item_name']"}), "('jenkins_queue_item_duration',\n 'Duration of a item in Queue in seconds', labels=['queue_id', 'item_name'])\n", (437, 548), False, 'from prometheus_client.core import GaugeMetricFamily\n')]
from datetime import datetime from typing import List from sqlalchemy import insert, func, select, distinct from src import db from src.database.models import Artifact, LockedArtifact, ArtifactLabelRelation, FlaggedArtifact, LabelingData from src.helper.tools_common import string_none_or_empty, who_is_signed_in def get_artifact_by_id(art_id: int): return db.session.execute(select(Artifact).where(Artifact.id == art_id)).scalar() def add_artifacts(artifact_txt_list: List[str], artifact_identifier: str, creator: str, manually_uploaded: bool = False) -> List[int]: artifact_txt_list = filter(lambda s: not string_none_or_empty(s), artifact_txt_list) inserted_ids = [] for art in artifact_txt_list: stmt = insert(Artifact).values(text=art, identifier=artifact_identifier, created_by=creator, uploaded_manually=manually_uploaded) inserted_ids.append(db.session.execute(stmt).inserted_primary_key[0]) db.session.commit() return inserted_ids def get_artifacts_with_label(label_text: str) -> List[Artifact]: qry = select(Artifact).join(ArtifactLabelRelation.artifact).join(ArtifactLabelRelation.label).where( LabelingData.labeling == label_text) return [artifact for artifact, in db.session.execute(qry).all()] def unlock_artifacts_by(username): if not username: return my_lock = LockedArtifact.query.filter_by(created_by=username).first() if my_lock is not None: db.session.delete(my_lock) db.session.commit() def lock_artifact_by(username, artifact_id): if not username: return unlock_artifacts_by(username) db.session.add(LockedArtifact(created_by=username, artifact_id=artifact_id)) db.session.commit() def get_locked_artifacts(): update_api_locks() result = db.session.query(LockedArtifact.artifact_id, func.count(LockedArtifact.created_by)).group_by( LockedArtifact.artifact_id).all() all_locks = {row[0]: row[1] for row in result} return all_locks def update_api_locks(): all_locks = LockedArtifact.query.all() now_datetime = datetime.utcnow() for aLock in all_locks: if (now_datetime - aLock.created_at).total_seconds() / 60 >= 15: # 15min # print("Unlocking Artifact: {} -> {}:{}".format(aLock.username, aLock.sourceId, aLock.artifact_post_id)) db.session.delete(aLock) db.session.commit() def total_artifact_count() -> int: return len(db.session.execute(select(Artifact.id)).all()) def artifact_needs_labeling_count() -> int: query = select(Artifact.id).except_( select(ArtifactLabelRelation.artifact_id).group_by(ArtifactLabelRelation.artifact_id).having( func.count(ArtifactLabelRelation.created_by) > 1)) return len(db.session.execute(query).all()) def get_false_positive_artifacts(): """ Return artifacts marked as false positive by me, or marked as false positive by at least 2 people """ q_artifacts_marked_fp_by_me = db.session.query(distinct(FlaggedArtifact.artifact_id)).filter( FlaggedArtifact.created_by == who_is_signed_in()) q_artifacts_marked_fp_by_2 = db.session.query( distinct(FlaggedArtifact.artifact_id)).group_by(FlaggedArtifact.artifact_id).having(func.count() > 1) result = {row[0] for row in q_artifacts_marked_fp_by_me.union(q_artifacts_marked_fp_by_2).all()} return result
[ "sqlalchemy.insert", "src.database.models.LockedArtifact.query.all", "src.database.models.LockedArtifact", "src.database.models.LockedArtifact.query.filter_by", "src.db.session.execute", "sqlalchemy.distinct", "sqlalchemy.select", "datetime.datetime.utcnow", "src.helper.tools_common.who_is_signed_in", "src.helper.tools_common.string_none_or_empty", "src.db.session.commit", "sqlalchemy.func.count", "src.db.session.delete" ]
[((999, 1018), 'src.db.session.commit', 'db.session.commit', ([], {}), '()\n', (1016, 1018), False, 'from src import db\n'), ((1769, 1788), 'src.db.session.commit', 'db.session.commit', ([], {}), '()\n', (1786, 1788), False, 'from src import db\n'), ((2105, 2131), 'src.database.models.LockedArtifact.query.all', 'LockedArtifact.query.all', ([], {}), '()\n', (2129, 2131), False, 'from src.database.models import Artifact, LockedArtifact, ArtifactLabelRelation, FlaggedArtifact, LabelingData\n'), ((2151, 2168), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2166, 2168), False, 'from datetime import datetime\n'), ((2439, 2458), 'src.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2456, 2458), False, 'from src import db\n'), ((1512, 1538), 'src.db.session.delete', 'db.session.delete', (['my_lock'], {}), '(my_lock)\n', (1529, 1538), False, 'from src import db\n'), ((1547, 1566), 'src.db.session.commit', 'db.session.commit', ([], {}), '()\n', (1564, 1566), False, 'from src import db\n'), ((1703, 1763), 'src.database.models.LockedArtifact', 'LockedArtifact', ([], {'created_by': 'username', 'artifact_id': 'artifact_id'}), '(created_by=username, artifact_id=artifact_id)\n', (1717, 1763), False, 'from src.database.models import Artifact, LockedArtifact, ArtifactLabelRelation, FlaggedArtifact, LabelingData\n'), ((1416, 1467), 'src.database.models.LockedArtifact.query.filter_by', 'LockedArtifact.query.filter_by', ([], {'created_by': 'username'}), '(created_by=username)\n', (1446, 1467), False, 'from src.database.models import Artifact, LockedArtifact, ArtifactLabelRelation, FlaggedArtifact, LabelingData\n'), ((2410, 2434), 'src.db.session.delete', 'db.session.delete', (['aLock'], {}), '(aLock)\n', (2427, 2434), False, 'from src import db\n'), ((2616, 2635), 'sqlalchemy.select', 'select', (['Artifact.id'], {}), '(Artifact.id)\n', (2622, 2635), False, 'from sqlalchemy import insert, func, select, distinct\n'), ((3150, 3168), 'src.helper.tools_common.who_is_signed_in', 'who_is_signed_in', ([], {}), '()\n', (3166, 3168), False, 'from src.helper.tools_common import string_none_or_empty, who_is_signed_in\n'), ((3313, 3325), 'sqlalchemy.func.count', 'func.count', ([], {}), '()\n', (3323, 3325), False, 'from sqlalchemy import insert, func, select, distinct\n'), ((640, 663), 'src.helper.tools_common.string_none_or_empty', 'string_none_or_empty', (['s'], {}), '(s)\n', (660, 663), False, 'from src.helper.tools_common import string_none_or_empty, who_is_signed_in\n'), ((755, 771), 'sqlalchemy.insert', 'insert', (['Artifact'], {}), '(Artifact)\n', (761, 771), False, 'from sqlalchemy import insert, func, select, distinct\n'), ((2759, 2803), 'sqlalchemy.func.count', 'func.count', (['ArtifactLabelRelation.created_by'], {}), '(ArtifactLabelRelation.created_by)\n', (2769, 2803), False, 'from sqlalchemy import insert, func, select, distinct\n'), ((2825, 2850), 'src.db.session.execute', 'db.session.execute', (['query'], {}), '(query)\n', (2843, 2850), False, 'from src import db\n'), ((3065, 3102), 'sqlalchemy.distinct', 'distinct', (['FlaggedArtifact.artifact_id'], {}), '(FlaggedArtifact.artifact_id)\n', (3073, 3102), False, 'from sqlalchemy import insert, func, select, distinct\n'), ((945, 969), 'src.db.session.execute', 'db.session.execute', (['stmt'], {}), '(stmt)\n', (963, 969), False, 'from src import db\n'), ((1298, 1321), 'src.db.session.execute', 'db.session.execute', (['qry'], {}), '(qry)\n', (1316, 1321), False, 'from src import db\n'), ((2530, 2549), 'sqlalchemy.select', 'select', (['Artifact.id'], {}), '(Artifact.id)\n', (2536, 2549), False, 'from sqlalchemy import insert, func, select, distinct\n'), ((384, 400), 'sqlalchemy.select', 'select', (['Artifact'], {}), '(Artifact)\n', (390, 400), False, 'from sqlalchemy import insert, func, select, distinct\n'), ((1900, 1937), 'sqlalchemy.func.count', 'func.count', (['LockedArtifact.created_by'], {}), '(LockedArtifact.created_by)\n', (1910, 1937), False, 'from sqlalchemy import insert, func, select, distinct\n'), ((2653, 2694), 'sqlalchemy.select', 'select', (['ArtifactLabelRelation.artifact_id'], {}), '(ArtifactLabelRelation.artifact_id)\n', (2659, 2694), False, 'from sqlalchemy import insert, func, select, distinct\n'), ((3229, 3266), 'sqlalchemy.distinct', 'distinct', (['FlaggedArtifact.artifact_id'], {}), '(FlaggedArtifact.artifact_id)\n', (3237, 3266), False, 'from sqlalchemy import insert, func, select, distinct\n'), ((1120, 1136), 'sqlalchemy.select', 'select', (['Artifact'], {}), '(Artifact)\n', (1126, 1136), False, 'from sqlalchemy import insert, func, select, distinct\n')]
from demo.exceptions import AuthException def token_auth(fn): def wrapper(*args, **kwargs): from flask import request if request.headers.get("token", None) != "123": raise AuthException("Fail to get access") return fn(*args, **kwargs) return wrapper
[ "demo.exceptions.AuthException", "flask.request.headers.get" ]
[((144, 178), 'flask.request.headers.get', 'request.headers.get', (['"""token"""', 'None'], {}), "('token', None)\n", (163, 178), False, 'from flask import request\n'), ((207, 242), 'demo.exceptions.AuthException', 'AuthException', (['"""Fail to get access"""'], {}), "('Fail to get access')\n", (220, 242), False, 'from demo.exceptions import AuthException\n')]
import matplotlib as mpl import matplotlib.pyplot as plt import numpy as np import scipy as sci import pandas as pd import numbers import csv import sys filepath = sys.argv[1] filename = filepath[filepath.rfind('/')+1:] print(filepath) data = pd.read_csv(filepath) print("Adding year column") data['year'] = data.apply(lambda row: str(row.QREDATE)[-6:-2], axis=1) print("Adding month column") data['month'] = data.apply(lambda row: str(row.QREDATE)[-10:-8].lstrip('0'), axis=1) print("Adding day column") data['day'] = data.apply(lambda row: str(row.QREDATE)[-8:-6].lstrip('0'), axis=1) print('Removing invalid users') invalid_users = [] users = data.NEWID.unique() for user in users: user_data = data.loc[data['NEWID'] == user] #Finding invalid users by determining if their QREDATE is invalid invalid = user_data.loc[user_data['QREDATE_'] == 'B'] if len(invalid) > 0: invalid_users.append(user) data = data[data.NEWID != user] invalid2 = user_data.loc[user_data.year == 'n'] if len(invalid2) > 0 and user not in invalid_users: invalid_users.append(user) data = data[data.NEWID != user] print("Mapping UCC to Category") uccdata = pd.read_csv("categorized_ucc_dictionary.csv") data['category'] = data['UCC'].map(uccdata.set_index('UCC')['CATEGORY']) print("Dropping unneeded columns") data = data.drop(columns=["UCC","ALLOC","GIFT","PUB_FLAG","QREDATE","QREDATE_"],axis=1) data.to_csv('clean_data/'+filename,index=False)
[ "pandas.read_csv" ]
[((244, 265), 'pandas.read_csv', 'pd.read_csv', (['filepath'], {}), '(filepath)\n', (255, 265), True, 'import pandas as pd\n'), ((1192, 1237), 'pandas.read_csv', 'pd.read_csv', (['"""categorized_ucc_dictionary.csv"""'], {}), "('categorized_ucc_dictionary.csv')\n", (1203, 1237), True, 'import pandas as pd\n')]
import arcpy from arcpy import env from arcpy.sa import * import file_functions from file_functions import * import create_centerline import create_station_lines from create_station_lines import create_station_lines_function import os from os import listdir from os.path import isfile, join import xlrd import shutil from openpyxl.workbook import Workbook from openpyxl.reader.excel import load_workbook, InvalidFileException def lidar_footptint(lasbin, lidardir, spatialref_shp): """This function converts LAZ files to LAS file format as well as producing a LiDAR extent polygon. in_folder must be a directory containing nothing but raw LAZ files spatial_ref must be an ArcGIS spatial reference object with units of feet. las_tools_bin must be the location of the 'bin' folder installed with LAStools by rapidlasso Returns: A shapefile w/ LiDAR coverage to be used to make a ground polygon for LAStools processing""" files_in_direct = [f for f in listdir(lidardir) if isfile(join(lidardir, f))] laspath = lidardir + '\\las_files' if not os.path.exists(laspath): os.makedirs(laspath) # Initiate temp files folder formatted for LAStools temp_files = lidardir + '\\temp_files' if not os.path.exists(temp_files): os.makedirs(temp_files) in_spatial_ref = arcpy.Describe(spatialref_shp).spatialReference try: # Convert laz files to LAS files for f in files_in_direct: if f[-4:] == ".laz": # Correct format, can alter between browse() input and default if lasbin[-1] != 'n': lasbin = lasbin[:-1] cmd("%s\\laszip.exe -i %s\\%s -o %s\\%s_noprj.las" % (lasbin, lidardir, f, laspath, f[:-4])) print("%s\\laszip.exe -i %s\\%s -o %s\\%s_noprj.las" % (lasbin, lidardir, f, laspath, f[:-4])) cmd("%s\\las2las.exe -i %s\\%s_noprj.las -o %s\\%s.las" % (lasbin, laspath, f[:-4], laspath, f[:-4])) print("%s\\las2las.exe -i %s\\%s_noprj.las -o %s\\%s.las" % (lasbin, laspath, f[:-4], laspath, f[:-4])) files_in_laspath = [f for f in listdir(laspath) if isfile(join(laspath, f))] # Delete unnecessary index files for f in files_in_laspath: if f[-4:] == 'lasx': os.remove(laspath + "\\%s" % f) if f[-5] == 'j': os.remove(laspath + "\\%s" % f) raw_las_dataset = arcpy.CreateLasDataset_management(laspath, lidardir + "\\raw_las_dataset.lasd", spatial_reference=in_spatial_ref, compute_stats=True) lidar_ras = CreateConstantRaster(1, extent=raw_las_dataset) lidar_footprint = arcpy.RasterToPolygon_conversion(lidar_ras, lidardir + '\\las_footprint.shp') except arcpy.ExecuteError: print(arcpy.GetMessages()) return lidar_footprint def define_ground_polygon(lidar_footprint, lidardir, naipdir, ndvi_thresh, aoi_shp): """This function takes the defined lidar footprint from the lidar_footprint() function, as well as a defined NAIP imagery location (in .jpg2) and makes a polygon of vegeation using a NDVI threshold of >0.4. This polygon is erased from the lidar footprint to give a ground_polygon used to define processing settings""" # Set processing extent to the LiDAR data extent arcpy.env.extent = lidar_footprint in_spatial_ref = arcpy.Describe(lidar_footprint).spatialReference # Find NAIP imagery in folder naip_imagery = [f for f in listdir(naipdir) if isfile(join(naipdir, f))] # Initiate temp files folder temp_files = lidardir + '\\temp_files' if not os.path.exists(temp_files): os.makedirs(temp_files) if len(naip_imagery) > 1: add_to_mosaic = [naipdir + "\\" + f for f in naip_imagery] naip_imagery = arcpy.MosaicToNewRaster_management(add_to_mosaic, output_location=lidardir, raster_dataset_name_with_extension="NAIP_mos.tif", coordinate_system_for_the_raster=in_spatial_ref, number_of_bands=4) else: naip_imagery = (naipdir + "\\%s" % naip_imagery[0]) naip_imagery = arcpy.ProjectRaster_management(naip_imagery, lidardir + "\\NAIP_prj.tif", in_spatial_ref) try: # Extract bands 1 (red) and 4 (NIR) red_lyr = arcpy.MakeRasterLayer_management(naip_imagery, temp_files + "\\rd_lyr", band_index=0) nir_lyr = arcpy.MakeRasterLayer_management(naip_imagery, temp_files + "\\nr_lyr", band_index=4) red_lyr = arcpy.SaveToLayerFile_management(red_lyr, temp_files + "\\red_ras.lyr") nir_lyr = arcpy.SaveToLayerFile_management(nir_lyr, temp_files + "\\nir_ras.lyr") red_ras = arcpy.CopyRaster_management(red_lyr, temp_files + "\\red_ras.tif", format="TIFF") nir_ras = arcpy.CopyRaster_management(nir_lyr, temp_files + "\\nir_ras.tif", format="TIFF") red_ras = Raster(red_ras) nir_ras = Raster(nir_ras) # Calculate ndvi and generate polygon delineating values > ndvi_thresh ndvi = lidardir + "\\NDVI.tif" ndvi_ras = ((nir_ras - red_ras) / (nir_ras + red_ras)) ndvi_ras.save(ndvi) veg_ras_raw = Con(arcpy.sa.Raster(ndvi) >= ndvi_thresh, 1) veg_ras_raw.save(temp_files + "\\veg_ras_raw.tif") veg_ras = MajorityFilter(veg_ras_raw, "EIGHT", "MAJORITY") veg_ras.save(temp_files + "\\veg_ras.tif") veg_poly = arcpy.RasterToPolygon_conversion(veg_ras, lidardir + "\\veg_poly_ndvi.shp", simplify=FALSE) # Make polygon representing bare ground if aoi_shp != '': ground_poly = arcpy.Erase_analysis(lidar_footprint, veg_poly, temp_files + "\\ground_poly_full.shp") aoi_prj = arcpy.Project_management(aoi_shp, temp_files + "\\aoi_prj_to_inref.shp", out_coor_system=in_spatial_ref) ground_poly = arcpy.Clip_analysis(ground_poly, aoi_prj, lidardir + "\\ground_poly.shp") else: ground_poly = arcpy.Erase_analysis(lidar_footprint, veg_poly, lidardir + "\\ground_poly.shp") ground_poly = arcpy.DefineProjection_management(ground_poly, in_spatial_ref) print("AOI bare-ground polygon @ %s" % ground_poly) except arcpy.ExecuteError: print(arcpy.GetMessages()) def lidar_to_raster(lidardir, spatialref_shp, aoi_shp, sample_meth, tri_meth, void_meth, m_cell_size=1): """Converts processed LAS files to a LAS dataset, and then to a raster with cell size of 1m Args: Folder containing LAS files, desired cell size in meters (default is 1m), and ft spatial reference Returns: Raster name for use in detrending """ # Create variables with relevant folders lasdir = lidardir + '\\las_files' ground_lasdir = lasdir + '\\09_ground_rm_duplicates' # Create addresses for generated .lasd, .tiff files out_dem = lidardir + "\\las_dem.tif" out_las = lasdir + '\\las_dataset.lasd' # Initiate temp files folder temp_files = lidardir + '\\temp_files' if not os.path.exists(temp_files): os.makedirs(temp_files) # Set up output spatial reference and convert units if necessary in_spatial_ref = arcpy.Describe(spatialref_shp).spatialReference out_spatial_ref = arcpy.Describe(aoi_shp).spatialReference if in_spatial_ref.linearUnitName == 'Meter': cell_size = m_cell_size print('LAS units are Meters') elif in_spatial_ref.linearUnitName == 'Foot_US': cell_size = (3.28 * m_cell_size) print('LAS units are Feet') else: return print('Linear unit name for %s uncertain, please use a PROJECTED COORDINATE SYSTEM' % os.path.basename(in_spatial_ref)) # Set up interpolation method string if sample_meth == 'BINNING': method_str = '%s AVERAGE %s' % (sample_meth, void_meth) else: method_str = "%s %s NO_THINNING MAXIMUM 0" % (sample_meth, tri_meth) print('Methods: %s' % method_str) try: no_prj_dem = temp_files + '\\noprj_dem.tif' las_dataset = arcpy.CreateLasDataset_management(ground_lasdir, out_las, spatial_reference=in_spatial_ref, compute_stats=True) lidar_raster = arcpy.LasDatasetToRaster_conversion(las_dataset, value_field='ELEVATION', data_type='FLOAT', interpolation_type=method_str, sampling_type='CELLSIZE', sampling_value=cell_size) arcpy.CopyRaster_management(lidar_raster, no_prj_dem) arcpy.ProjectRaster_management(no_prj_dem, out_raster=out_dem, out_coor_system=out_spatial_ref) except arcpy.ExecuteError: print(arcpy.GetMessages()) print("LAS -> DEM output @ %s" % out_dem) # Notify the user which units the DEM are in if out_spatial_ref.linearUnitName == 'Meter': print('DEM units are Meters') elif out_spatial_ref.linearUnitName == 'Foot_US': print('DEM units are Feet') else: print('Linear unit name for %s uncertain, please use a PROJECTED COORDINATE SYSTEM' % os.path.basename(out_spatial_ref)) return out_dem def detrend_prep(dem, flow_poly, aoi_shp, filt_passes, smooth_dist, m_spacing=1, centerline_verified=False): """This function takes the Lidar raster, creates a least-cost thalweg centerline from a smoothed raster. Station points are generated along the centerline at defined spacing (1/20th of channel width is a starting point) which are given the values of the lidar raster. Args: raster_name, upstream flow polygon, spatial extent (can be raster), station point spacing in ft (3ft is default). Run first with centerline_verified=False and visually inspect. Run again w/ True to return the [station_points, elevation_table]""" # Set up environment and output folder spatial_ref = arcpy.Describe(aoi_shp).spatialReference arcpy.env.extent = dem dem_dir = os.path.dirname(dem) # Initiate temp files folder temp_files = dem_dir + '\\temp_files' if not os.path.exists(temp_files): os.makedirs(temp_files) # Define input parameters params = [m_spacing, smooth_dist] # First item defines XS length and spacing, second item described smoothing distance if not spatial_ref.linearUnitName == 'Meter': params = [int(i * 3) for i in params] filt_passes = int(filt_passes) if not centerline_verified: print('Generating smooth thalweg centerline...') print("Smoothing DEM w/ %sx low pass filters..." % filt_passes) ticker = 0 filter_out = arcpy.sa.Filter(dem, "LOW") filter_out.save(temp_files + "\\filter_out%s" % ticker) while ticker < filt_passes: # Apply an iterative low pass filter 15x to the raster to smooth the topography filter_out = arcpy.sa.Filter((temp_files + "\\filter_out%s" % ticker), "LOW") filter_out.save(temp_files + "\\filter_out%s" % (ticker + 1)) ticker += 1 smooth_ras = (dem_dir + "\\filt_ras.tif") filter_out.save(dem_dir + "\\filt_ras.tif") # Create least cost centerline from 15x filtered raster print("Smoothed DEM made, least-cost centerline being calculated...") lidar_foot = dem_dir + '\\las_footprint.shp' create_centerline.make_centerline(smooth_ras, aoi_shp, lidar_foot, flow_poly, smooth_distance=10) for ticker in range(filt_passes + 1): # Delete intermediate filtered rasters file = (temp_files + "\\filter_out%s" % ticker) if os.path.exists(file): try: shutil.rmtree(file) except: print("Could not remove %s " % file) else: print("Path %s does not exist and can't be deleted...") print('Done') else: print('Generating thalweg elevation profile...') centerline = dem_dir + "\\thalweg_centerline.shp" # Define location of intermediate files, some of which will be deleted intermediates = ["thalweg_centerline_XS.shp", 'thalweg_station_points.shp', 'thalweg_station_points1.shp', 'sp_elevation_table.dbf'] intermediates = [temp_files + '\\%s' % i for i in intermediates] # Create a station point shapefile evenly sampling the thalweg centerline station_lines = create_station_lines.create_station_lines_function(centerline, spacing=params[0], xs_length=params[0]) station_points = arcpy.Intersect_analysis([intermediates[0], centerline], out_feature_class=intermediates[2], join_attributes="ALL", output_type="POINT") station_points = arcpy.MultipartToSinglepart_management(station_points, intermediates[1]) station_points = arcpy.AddXY_management(station_points) # Extract elevation values from each station point, and export to a .csv file elevation_table = arcpy.ExtractValuesToTable_ga(station_points, in_rasters=dem, out_table=intermediates[3]) station_points = arcpy.JoinField_management(station_points, in_field="ORIG_FID", join_table=elevation_table, join_field="SrcID_Feat", fields=["Value"]) # Add fields to override, but first adjust detrending functions elevation_table = dem_dir + '\\xyz_elevation_table.csv' elevation_table = file_functions.tableToCSV(input_table=station_points, csv_filepath=elevation_table, fld_to_remove_override=['FID_thal_1', 'Id_1', 'InLine_FID', 'ORIG_FID'], keep_fields=[]) elevation_df = pd.read_csv(elevation_table) # Flip rows if upside down max_loc = elevation_df['LOCATION'].max() elevation_df.sort_values('LOCATION', inplace=True) if elevation_df.iloc[0]['Value'] < elevation_df.iloc[-1]['Value']: loc_list = elevation_df.loc[:, ['LOCATION']].squeeze().to_list() loc_np = np.array([int(max_loc - i) for i in loc_list]) elevation_df['LOCATION'] = loc_np elevation_df.sort_values('LOCATION', inplace=True) elevation_df.to_csv(elevation_table) # Delete extra files for j in intermediates[2:]: delete_gis_files(j) print("Thalweg elevation profile (.csv) @ %s " % str(elevation_table)) print('Done') return elevation_table
[ "os.remove", "create_centerline.make_centerline", "arcpy.MultipartToSinglepart_management", "arcpy.SaveToLayerFile_management", "shutil.rmtree", "arcpy.MakeRasterLayer_management", "arcpy.JoinField_management", "os.path.join", "arcpy.Intersect_analysis", "arcpy.ExtractValuesToTable_ga", "arcpy.Clip_analysis", "os.path.dirname", "os.path.exists", "arcpy.CreateLasDataset_management", "file_functions.tableToCSV", "arcpy.MosaicToNewRaster_management", "arcpy.Erase_analysis", "arcpy.AddXY_management", "os.path.basename", "arcpy.Describe", "arcpy.ProjectRaster_management", "arcpy.Project_management", "arcpy.DefineProjection_management", "create_station_lines.create_station_lines_function", "arcpy.RasterToPolygon_conversion", "arcpy.sa.Raster", "arcpy.CopyRaster_management", "os.listdir", "os.makedirs", "arcpy.GetMessages", "arcpy.LasDatasetToRaster_conversion", "arcpy.sa.Filter" ]
[((10192, 10212), 'os.path.dirname', 'os.path.dirname', (['dem'], {}), '(dem)\n', (10207, 10212), False, 'import os\n'), ((1074, 1097), 'os.path.exists', 'os.path.exists', (['laspath'], {}), '(laspath)\n', (1088, 1097), False, 'import os\n'), ((1107, 1127), 'os.makedirs', 'os.makedirs', (['laspath'], {}), '(laspath)\n', (1118, 1127), False, 'import os\n'), ((1240, 1266), 'os.path.exists', 'os.path.exists', (['temp_files'], {}), '(temp_files)\n', (1254, 1266), False, 'import os\n'), ((1276, 1299), 'os.makedirs', 'os.makedirs', (['temp_files'], {}), '(temp_files)\n', (1287, 1299), False, 'import os\n'), ((1322, 1352), 'arcpy.Describe', 'arcpy.Describe', (['spatialref_shp'], {}), '(spatialref_shp)\n', (1336, 1352), False, 'import arcpy\n'), ((2455, 2596), 'arcpy.CreateLasDataset_management', 'arcpy.CreateLasDataset_management', (['laspath', "(lidardir + '\\\\raw_las_dataset.lasd')"], {'spatial_reference': 'in_spatial_ref', 'compute_stats': '(True)'}), "(laspath, lidardir +\n '\\\\raw_las_dataset.lasd', spatial_reference=in_spatial_ref,\n compute_stats=True)\n", (2488, 2596), False, 'import arcpy\n'), ((2743, 2820), 'arcpy.RasterToPolygon_conversion', 'arcpy.RasterToPolygon_conversion', (['lidar_ras', "(lidardir + '\\\\las_footprint.shp')"], {}), "(lidar_ras, lidardir + '\\\\las_footprint.shp')\n", (2775, 2820), False, 'import arcpy\n'), ((3447, 3478), 'arcpy.Describe', 'arcpy.Describe', (['lidar_footprint'], {}), '(lidar_footprint)\n', (3461, 3478), False, 'import arcpy\n'), ((3697, 3723), 'os.path.exists', 'os.path.exists', (['temp_files'], {}), '(temp_files)\n', (3711, 3723), False, 'import os\n'), ((3733, 3756), 'os.makedirs', 'os.makedirs', (['temp_files'], {}), '(temp_files)\n', (3744, 3756), False, 'import os\n'), ((3878, 4080), 'arcpy.MosaicToNewRaster_management', 'arcpy.MosaicToNewRaster_management', (['add_to_mosaic'], {'output_location': 'lidardir', 'raster_dataset_name_with_extension': '"""NAIP_mos.tif"""', 'coordinate_system_for_the_raster': 'in_spatial_ref', 'number_of_bands': '(4)'}), "(add_to_mosaic, output_location=lidardir,\n raster_dataset_name_with_extension='NAIP_mos.tif',\n coordinate_system_for_the_raster=in_spatial_ref, number_of_bands=4)\n", (3912, 4080), False, 'import arcpy\n'), ((4340, 4433), 'arcpy.ProjectRaster_management', 'arcpy.ProjectRaster_management', (['naip_imagery', "(lidardir + '\\\\NAIP_prj.tif')", 'in_spatial_ref'], {}), "(naip_imagery, lidardir + '\\\\NAIP_prj.tif',\n in_spatial_ref)\n", (4370, 4433), False, 'import arcpy\n'), ((4502, 4591), 'arcpy.MakeRasterLayer_management', 'arcpy.MakeRasterLayer_management', (['naip_imagery', "(temp_files + '\\\\rd_lyr')"], {'band_index': '(0)'}), "(naip_imagery, temp_files + '\\\\rd_lyr',\n band_index=0)\n", (4534, 4591), False, 'import arcpy\n'), ((4606, 4695), 'arcpy.MakeRasterLayer_management', 'arcpy.MakeRasterLayer_management', (['naip_imagery', "(temp_files + '\\\\nr_lyr')"], {'band_index': '(4)'}), "(naip_imagery, temp_files + '\\\\nr_lyr',\n band_index=4)\n", (4638, 4695), False, 'import arcpy\n'), ((4711, 4782), 'arcpy.SaveToLayerFile_management', 'arcpy.SaveToLayerFile_management', (['red_lyr', "(temp_files + '\\\\red_ras.lyr')"], {}), "(red_lyr, temp_files + '\\\\red_ras.lyr')\n", (4743, 4782), False, 'import arcpy\n'), ((4801, 4872), 'arcpy.SaveToLayerFile_management', 'arcpy.SaveToLayerFile_management', (['nir_lyr', "(temp_files + '\\\\nir_ras.lyr')"], {}), "(nir_lyr, temp_files + '\\\\nir_ras.lyr')\n", (4833, 4872), False, 'import arcpy\n'), ((4892, 4978), 'arcpy.CopyRaster_management', 'arcpy.CopyRaster_management', (['red_lyr', "(temp_files + '\\\\red_ras.tif')"], {'format': '"""TIFF"""'}), "(red_lyr, temp_files + '\\\\red_ras.tif', format=\n 'TIFF')\n", (4919, 4978), False, 'import arcpy\n'), ((4992, 5078), 'arcpy.CopyRaster_management', 'arcpy.CopyRaster_management', (['nir_lyr', "(temp_files + '\\\\nir_ras.tif')"], {'format': '"""TIFF"""'}), "(nir_lyr, temp_files + '\\\\nir_ras.tif', format=\n 'TIFF')\n", (5019, 5078), False, 'import arcpy\n'), ((5617, 5712), 'arcpy.RasterToPolygon_conversion', 'arcpy.RasterToPolygon_conversion', (['veg_ras', "(lidardir + '\\\\veg_poly_ndvi.shp')"], {'simplify': 'FALSE'}), "(veg_ras, lidardir + '\\\\veg_poly_ndvi.shp',\n simplify=FALSE)\n", (5649, 5712), False, 'import arcpy\n'), ((6315, 6377), 'arcpy.DefineProjection_management', 'arcpy.DefineProjection_management', (['ground_poly', 'in_spatial_ref'], {}), '(ground_poly, in_spatial_ref)\n', (6348, 6377), False, 'import arcpy\n'), ((7239, 7265), 'os.path.exists', 'os.path.exists', (['temp_files'], {}), '(temp_files)\n', (7253, 7265), False, 'import os\n'), ((7275, 7298), 'os.makedirs', 'os.makedirs', (['temp_files'], {}), '(temp_files)\n', (7286, 7298), False, 'import os\n'), ((7390, 7420), 'arcpy.Describe', 'arcpy.Describe', (['spatialref_shp'], {}), '(spatialref_shp)\n', (7404, 7420), False, 'import arcpy\n'), ((7460, 7483), 'arcpy.Describe', 'arcpy.Describe', (['aoi_shp'], {}), '(aoi_shp)\n', (7474, 7483), False, 'import arcpy\n'), ((8247, 8363), 'arcpy.CreateLasDataset_management', 'arcpy.CreateLasDataset_management', (['ground_lasdir', 'out_las'], {'spatial_reference': 'in_spatial_ref', 'compute_stats': '(True)'}), '(ground_lasdir, out_las, spatial_reference\n =in_spatial_ref, compute_stats=True)\n', (8280, 8363), False, 'import arcpy\n'), ((8438, 8622), 'arcpy.LasDatasetToRaster_conversion', 'arcpy.LasDatasetToRaster_conversion', (['las_dataset'], {'value_field': '"""ELEVATION"""', 'data_type': '"""FLOAT"""', 'interpolation_type': 'method_str', 'sampling_type': '"""CELLSIZE"""', 'sampling_value': 'cell_size'}), "(las_dataset, value_field='ELEVATION',\n data_type='FLOAT', interpolation_type=method_str, sampling_type=\n 'CELLSIZE', sampling_value=cell_size)\n", (8473, 8622), False, 'import arcpy\n'), ((8740, 8793), 'arcpy.CopyRaster_management', 'arcpy.CopyRaster_management', (['lidar_raster', 'no_prj_dem'], {}), '(lidar_raster, no_prj_dem)\n', (8767, 8793), False, 'import arcpy\n'), ((8802, 8901), 'arcpy.ProjectRaster_management', 'arcpy.ProjectRaster_management', (['no_prj_dem'], {'out_raster': 'out_dem', 'out_coor_system': 'out_spatial_ref'}), '(no_prj_dem, out_raster=out_dem,\n out_coor_system=out_spatial_ref)\n', (8832, 8901), False, 'import arcpy\n'), ((10110, 10133), 'arcpy.Describe', 'arcpy.Describe', (['aoi_shp'], {}), '(aoi_shp)\n', (10124, 10133), False, 'import arcpy\n'), ((10301, 10327), 'os.path.exists', 'os.path.exists', (['temp_files'], {}), '(temp_files)\n', (10315, 10327), False, 'import os\n'), ((10337, 10360), 'os.makedirs', 'os.makedirs', (['temp_files'], {}), '(temp_files)\n', (10348, 10360), False, 'import os\n'), ((10865, 10892), 'arcpy.sa.Filter', 'arcpy.sa.Filter', (['dem', '"""LOW"""'], {}), "(dem, 'LOW')\n", (10880, 10892), False, 'import arcpy\n'), ((11569, 11670), 'create_centerline.make_centerline', 'create_centerline.make_centerline', (['smooth_ras', 'aoi_shp', 'lidar_foot', 'flow_poly'], {'smooth_distance': '(10)'}), '(smooth_ras, aoi_shp, lidar_foot,\n flow_poly, smooth_distance=10)\n', (11602, 11670), False, 'import create_centerline\n'), ((12657, 12764), 'create_station_lines.create_station_lines_function', 'create_station_lines.create_station_lines_function', (['centerline'], {'spacing': 'params[0]', 'xs_length': 'params[0]'}), '(centerline, spacing=\n params[0], xs_length=params[0])\n', (12707, 12764), False, 'import create_station_lines\n'), ((12860, 13001), 'arcpy.Intersect_analysis', 'arcpy.Intersect_analysis', (['[intermediates[0], centerline]'], {'out_feature_class': 'intermediates[2]', 'join_attributes': '"""ALL"""', 'output_type': '"""POINT"""'}), "([intermediates[0], centerline], out_feature_class=\n intermediates[2], join_attributes='ALL', output_type='POINT')\n", (12884, 13001), False, 'import arcpy\n'), ((13072, 13144), 'arcpy.MultipartToSinglepart_management', 'arcpy.MultipartToSinglepart_management', (['station_points', 'intermediates[1]'], {}), '(station_points, intermediates[1])\n', (13110, 13144), False, 'import arcpy\n'), ((13170, 13208), 'arcpy.AddXY_management', 'arcpy.AddXY_management', (['station_points'], {}), '(station_points)\n', (13192, 13208), False, 'import arcpy\n'), ((13322, 13416), 'arcpy.ExtractValuesToTable_ga', 'arcpy.ExtractValuesToTable_ga', (['station_points'], {'in_rasters': 'dem', 'out_table': 'intermediates[3]'}), '(station_points, in_rasters=dem, out_table=\n intermediates[3])\n', (13351, 13416), False, 'import arcpy\n'), ((13437, 13576), 'arcpy.JoinField_management', 'arcpy.JoinField_management', (['station_points'], {'in_field': '"""ORIG_FID"""', 'join_table': 'elevation_table', 'join_field': '"""SrcID_Feat"""', 'fields': "['Value']"}), "(station_points, in_field='ORIG_FID', join_table=\n elevation_table, join_field='SrcID_Feat', fields=['Value'])\n", (13463, 13576), False, 'import arcpy\n'), ((13787, 13968), 'file_functions.tableToCSV', 'file_functions.tableToCSV', ([], {'input_table': 'station_points', 'csv_filepath': 'elevation_table', 'fld_to_remove_override': "['FID_thal_1', 'Id_1', 'InLine_FID', 'ORIG_FID']", 'keep_fields': '[]'}), "(input_table=station_points, csv_filepath=\n elevation_table, fld_to_remove_override=['FID_thal_1', 'Id_1',\n 'InLine_FID', 'ORIG_FID'], keep_fields=[])\n", (13812, 13968), False, 'import file_functions\n'), ((974, 991), 'os.listdir', 'listdir', (['lidardir'], {}), '(lidardir)\n', (981, 991), False, 'from os import listdir\n'), ((3562, 3578), 'os.listdir', 'listdir', (['naipdir'], {}), '(naipdir)\n', (3569, 3578), False, 'from os import listdir\n'), ((5810, 5900), 'arcpy.Erase_analysis', 'arcpy.Erase_analysis', (['lidar_footprint', 'veg_poly', "(temp_files + '\\\\ground_poly_full.shp')"], {}), "(lidar_footprint, veg_poly, temp_files +\n '\\\\ground_poly_full.shp')\n", (5830, 5900), False, 'import arcpy\n'), ((5919, 6027), 'arcpy.Project_management', 'arcpy.Project_management', (['aoi_shp', "(temp_files + '\\\\aoi_prj_to_inref.shp')"], {'out_coor_system': 'in_spatial_ref'}), "(aoi_shp, temp_files + '\\\\aoi_prj_to_inref.shp',\n out_coor_system=in_spatial_ref)\n", (5943, 6027), False, 'import arcpy\n'), ((6097, 6170), 'arcpy.Clip_analysis', 'arcpy.Clip_analysis', (['ground_poly', 'aoi_prj', "(lidardir + '\\\\ground_poly.shp')"], {}), "(ground_poly, aoi_prj, lidardir + '\\\\ground_poly.shp')\n", (6116, 6170), False, 'import arcpy\n'), ((6212, 6291), 'arcpy.Erase_analysis', 'arcpy.Erase_analysis', (['lidar_footprint', 'veg_poly', "(lidardir + '\\\\ground_poly.shp')"], {}), "(lidar_footprint, veg_poly, lidardir + '\\\\ground_poly.shp')\n", (6232, 6291), False, 'import arcpy\n'), ((11100, 11162), 'arcpy.sa.Filter', 'arcpy.sa.Filter', (["(temp_files + '\\\\filter_out%s' % ticker)", '"""LOW"""'], {}), "(temp_files + '\\\\filter_out%s' % ticker, 'LOW')\n", (11115, 11162), False, 'import arcpy\n'), ((11829, 11849), 'os.path.exists', 'os.path.exists', (['file'], {}), '(file)\n', (11843, 11849), False, 'import os\n'), ((1002, 1019), 'os.path.join', 'join', (['lidardir', 'f'], {}), '(lidardir, f)\n', (1006, 1019), False, 'from os.path import isfile, join\n'), ((2146, 2162), 'os.listdir', 'listdir', (['laspath'], {}), '(laspath)\n', (2153, 2162), False, 'from os import listdir\n'), ((2318, 2349), 'os.remove', 'os.remove', (["(laspath + '\\\\%s' % f)"], {}), "(laspath + '\\\\%s' % f)\n", (2327, 2349), False, 'import os\n'), ((2396, 2427), 'os.remove', 'os.remove', (["(laspath + '\\\\%s' % f)"], {}), "(laspath + '\\\\%s' % f)\n", (2405, 2427), False, 'import os\n'), ((2867, 2886), 'arcpy.GetMessages', 'arcpy.GetMessages', ([], {}), '()\n', (2884, 2886), False, 'import arcpy\n'), ((3589, 3605), 'os.path.join', 'join', (['naipdir', 'f'], {}), '(naipdir, f)\n', (3593, 3605), False, 'from os.path import isfile, join\n'), ((5380, 5401), 'arcpy.sa.Raster', 'arcpy.sa.Raster', (['ndvi'], {}), '(ndvi)\n', (5395, 5401), False, 'import arcpy\n'), ((6484, 6503), 'arcpy.GetMessages', 'arcpy.GetMessages', ([], {}), '()\n', (6501, 6503), False, 'import arcpy\n'), ((8944, 8963), 'arcpy.GetMessages', 'arcpy.GetMessages', ([], {}), '()\n', (8961, 8963), False, 'import arcpy\n'), ((2173, 2189), 'os.path.join', 'join', (['laspath', 'f'], {}), '(laspath, f)\n', (2177, 2189), False, 'from os.path import isfile, join\n'), ((7864, 7896), 'os.path.basename', 'os.path.basename', (['in_spatial_ref'], {}), '(in_spatial_ref)\n', (7880, 7896), False, 'import os\n'), ((9345, 9378), 'os.path.basename', 'os.path.basename', (['out_spatial_ref'], {}), '(out_spatial_ref)\n', (9361, 9378), False, 'import os\n'), ((11892, 11911), 'shutil.rmtree', 'shutil.rmtree', (['file'], {}), '(file)\n', (11905, 11911), False, 'import shutil\n')]
from model_new.config import Config from model_new.keras_model import KerasModel from model_new.utils import load_json config = Config() model = KerasModel(config) train_set = load_json('dataset/train.json') # dev_set = load_json('dataset/dev.json') # sub_set = dev_set[:config.batch_size * 50] model.train(train_set, None, None)
[ "model_new.utils.load_json", "model_new.keras_model.KerasModel", "model_new.config.Config" ]
[((129, 137), 'model_new.config.Config', 'Config', ([], {}), '()\n', (135, 137), False, 'from model_new.config import Config\n'), ((146, 164), 'model_new.keras_model.KerasModel', 'KerasModel', (['config'], {}), '(config)\n', (156, 164), False, 'from model_new.keras_model import KerasModel\n'), ((178, 209), 'model_new.utils.load_json', 'load_json', (['"""dataset/train.json"""'], {}), "('dataset/train.json')\n", (187, 209), False, 'from model_new.utils import load_json\n')]
from django.conf.urls import patterns, url from Standings import views urlpatterns = patterns( '', # ex: /standings/ url(r'^$', views.standings_index, name='index'), )
[ "django.conf.urls.url" ]
[((131, 177), 'django.conf.urls.url', 'url', (['"""^$"""', 'views.standings_index'], {'name': '"""index"""'}), "('^$', views.standings_index, name='index')\n", (134, 177), False, 'from django.conf.urls import patterns, url\n')]
#!/usr/bin/python3 # This script runs whenever a user tries to commit something in this repo. # It checks the commit for any text that resembled an encoded JSON web token, # and asks the user to verify that they want to commit a JWT if it finds any. import sys import subprocess import re import base64 import binascii import unittest # run test like so: # (cd .githooks/; python -m unittest pre-commit-python.py) class TestStringMethods(unittest.TestCase): def test_jwts(self): self.assertTrue(contains_jwt(["<KEY>"])) self.assertTrue(contains_jwt(["<KEY>"])) def test_ok(self): self.assertFalse(contains_jwt(["test test"])) self.assertFalse(contains_jwt(["thisisnotajwteventhoughitisalongstring"])) def contains_jwt(lines): jwtPattern = re.compile('JWT|iat|name|sub|alg|exp|k') raiseIssue = False for line in lines: # try to find long (20+ character) words consisting only of valid JWT characters longTokens = re.findall("[A-Za-z0-9_=-]{20,}", line) # try to decode any found tokens and see if they look like a JSONfragment # where :look like a JSON fragment" is defined as "contains any of the words in the 'jwtPattern' regex pattern" for token in longTokens: try: # python's base64 decoder fails if padding is missing; but does not fail if there's # extra padding; so always add padding utfOut = base64.urlsafe_b64decode(token+'==').decode("utf-8") match = jwtPattern.search(utfOut) if match: print("Probable JWT found in commit: " + token + " gets decoded into: " + utfOut) raiseIssue = True # be very specific about the exceptions we ignore: except (UnicodeDecodeError, binascii.Error) as e: continue return raiseIssue def main(): #get git diff lines lines = subprocess.check_output(['git', 'diff', '--staged']).decode("utf-8").split('\n') # filter out short lines and lines that don't begin with a '+' to only # test longer, newly added text filteredLines = list(filter(lambda line : len(line) > 20 and line[0] == '+', lines)) # found a likely JWT, send user through prompt sequence to double check if contains_jwt(filteredLines): prompt = "This commit appears to add a JSON web token, which is often accidental and can be problematic (unless it's for a test). Are you sure you want to commit these changes? (y/n): " failCount = 0 while True: inputLine = input(prompt).lower() if len(inputLine) > 0 and inputLine[0] == 'y': print("OK, proceeding with commit") return 0 elif len(inputLine) > 0 and inputLine[0] == 'n': print("Aborting commit") return 1 elif failCount == 0: prompt = "Please answer with 'y' or 'n'. Do you wish to proceed with this commit?: " elif failCount == 1: prompt = "That's still neither a 'y' nor an 'n'. Do you wish to proceed with this commit?: " else: prompt = "You've entered an incorrect input " + str(failCount) + " times now. Please respond with 'y' or 'n' (sans apostrophes) regarding whether or not you wish to proceed with this commit which possibly contains a JWT: " failCount += 1 else: print("No likely JWTs found, proceeding with commit") return 0 if __name__ == "__main__": sys.exit(main())
[ "subprocess.check_output", "re.findall", "base64.urlsafe_b64decode", "re.compile" ]
[((790, 830), 're.compile', 're.compile', (['"""JWT|iat|name|sub|alg|exp|k"""'], {}), "('JWT|iat|name|sub|alg|exp|k')\n", (800, 830), False, 'import re\n'), ((987, 1026), 're.findall', 're.findall', (['"""[A-Za-z0-9_=-]{20,}"""', 'line'], {}), "('[A-Za-z0-9_=-]{20,}', line)\n", (997, 1026), False, 'import re\n'), ((1949, 2001), 'subprocess.check_output', 'subprocess.check_output', (["['git', 'diff', '--staged']"], {}), "(['git', 'diff', '--staged'])\n", (1972, 2001), False, 'import subprocess\n'), ((1459, 1497), 'base64.urlsafe_b64decode', 'base64.urlsafe_b64decode', (["(token + '==')"], {}), "(token + '==')\n", (1483, 1497), False, 'import base64\n')]
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import logging from custos.clients.tenant_management_client import TenantManagementClient from custos.clients.super_tenant_management_client import SuperTenantManagementClient from custos.clients.identity_management_client import IdentityManagementClient from custos.transport.settings import CustosServerClientSettings import custos.clients.utils.utilities as utl logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) # create console handler with a higher log level handler = logging.StreamHandler() handler.setLevel(logging.DEBUG) custos_settings = CustosServerClientSettings() # load APIServerClient with default configuration client = TenantManagementClient(custos_settings) admin_client = SuperTenantManagementClient(custos_settings) id_client = IdentityManagementClient(custos_settings) token = utl.get_token(custos_settings) def create_tenant(): contacts = ["2345634324"] redirect_uris = ["http://localhost:8080,http://localhost:8080/user/external_ids"] response = client.create_admin_tenant("SAMPLE", "<EMAIL>", "First Name", "LastName", "email", "admin", "1234", contacts, redirect_uris, "https://domain.org/", "openid profile email org.cilogon.userinfo", "domain.org", "https://domain.org/static/favicon.png", "Galaxy Portal") print(response) def get_tenant(): client_id = "custos-8p4baddxvbiusmjorjch-10000401" response = client.get_tenant(client_token=token, client_id=client_id) print(response) def update_tenant(): client_id = "custos-6nwoqodstpe5mvcq09lh-10000101" contacts = ["8123915386"] redirect_uris = ["https://custos.scigap.org/callback ", "http://127.0.0.1:8000/auth/callback/", "http://127.0.0.1:8000/"] response = client.update_tenant(token, client_id, "Custos Portal", "<EMAIL>", "Isuru", "Ranawaka", "<EMAIL>", "isjarana", "Custos1234", contacts, redirect_uris, "https://custos.scigap.org/", "openid profile email org.cilogon.userinfo", "domain.org", "https://custos.scigap.org/", "Custos Portal") print(response) def add_tenant_roles(): roles = [{"name": "testing", "composite": False, "description": "testing realm"}] response = client.add_tenant_roles(token, roles, False) print(response) def add_protocol_mapper(): response = client.add_protocol_mapper(token, "phone_atr", "phone", "phone", "STRING", "USER_ATTRIBUTE", True, True, True, False, False) print(response) def get_child_tenants(): response = client.get_child_tenants(token, 0, 5, "ACTIVE") print(response) def get_all_tenants(): response = admin_client.get_all_tenants(token, 0, 5, "ACTIVE") print(response) def delete_tenant(): response = client.delete_tenant(token, "<PASSWORD>-pv<PASSWORD>ps<PASSWORD>t-10000000") print(response)
[ "custos.clients.super_tenant_management_client.SuperTenantManagementClient", "custos.clients.tenant_management_client.TenantManagementClient", "logging.StreamHandler", "custos.clients.utils.utilities.get_token", "custos.clients.identity_management_client.IdentityManagementClient", "custos.transport.settings.CustosServerClientSettings", "logging.getLogger" ]
[((1170, 1197), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1187, 1197), False, 'import logging\n'), ((1289, 1312), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (1310, 1312), False, 'import logging\n'), ((1364, 1392), 'custos.transport.settings.CustosServerClientSettings', 'CustosServerClientSettings', ([], {}), '()\n', (1390, 1392), False, 'from custos.transport.settings import CustosServerClientSettings\n'), ((1452, 1491), 'custos.clients.tenant_management_client.TenantManagementClient', 'TenantManagementClient', (['custos_settings'], {}), '(custos_settings)\n', (1474, 1491), False, 'from custos.clients.tenant_management_client import TenantManagementClient\n'), ((1507, 1551), 'custos.clients.super_tenant_management_client.SuperTenantManagementClient', 'SuperTenantManagementClient', (['custos_settings'], {}), '(custos_settings)\n', (1534, 1551), False, 'from custos.clients.super_tenant_management_client import SuperTenantManagementClient\n'), ((1564, 1605), 'custos.clients.identity_management_client.IdentityManagementClient', 'IdentityManagementClient', (['custos_settings'], {}), '(custos_settings)\n', (1588, 1605), False, 'from custos.clients.identity_management_client import IdentityManagementClient\n'), ((1615, 1645), 'custos.clients.utils.utilities.get_token', 'utl.get_token', (['custos_settings'], {}), '(custos_settings)\n', (1628, 1645), True, 'import custos.clients.utils.utilities as utl\n')]
# uncompyle6 version 2.13.2 # Python bytecode 3.5 (3351) # Decompiled from: Python 3.5.3 (default, Jan 19 2017, 14:11:04) # [GCC 6.3.0 20170118] # Embedded file name: db\tables\licenses.py from sqlalchemy import * from sqlalchemy.orm import mapper from db.tables import metadata LicensesTable = Table('licenses', metadata, Column('license_key', TEXT, primary_key=True)) class LicenseRow(object): license_key = None def __init__(self, license_key): self.license_key = license_key def __str__(self): return 'R_license[%s]' % (self.license_key,) def __repr__(self): return self.__str__() mapper(LicenseRow, LicensesTable)
[ "sqlalchemy.orm.mapper" ]
[((632, 665), 'sqlalchemy.orm.mapper', 'mapper', (['LicenseRow', 'LicensesTable'], {}), '(LicenseRow, LicensesTable)\n', (638, 665), False, 'from sqlalchemy.orm import mapper\n')]
#!/usr/bin/env python # -*- coding: UTF-8 -*- """ Author: enen92 License: I don't care version 3.0 """ import xbmc,xbmcgui,xbmcaddon,os addon_id = 'script.retrogames' selfAddon = xbmcaddon.Addon(id=addon_id) datapath = xbmc.translatePath(selfAddon.getAddonInfo('profile')).decode('utf-8') addonfolder = xbmc.translatePath(selfAddon.getAddonInfo('path')).decode('utf-8') artfolder = os.path.join(addonfolder,'resources','img') msgok = xbmcgui.Dialog().ok platformsave= os.path.join(datapath,'folders.txt') def translate(text): return selfAddon.getLocalizedString(text).encode('utf-8')
[ "xbmcgui.Dialog", "os.path.join", "xbmcaddon.Addon" ]
[((186, 214), 'xbmcaddon.Addon', 'xbmcaddon.Addon', ([], {'id': 'addon_id'}), '(id=addon_id)\n', (201, 214), False, 'import xbmc, xbmcgui, xbmcaddon, os\n'), ((389, 434), 'os.path.join', 'os.path.join', (['addonfolder', '"""resources"""', '"""img"""'], {}), "(addonfolder, 'resources', 'img')\n", (401, 434), False, 'import xbmc, xbmcgui, xbmcaddon, os\n'), ((475, 512), 'os.path.join', 'os.path.join', (['datapath', '"""folders.txt"""'], {}), "(datapath, 'folders.txt')\n", (487, 512), False, 'import xbmc, xbmcgui, xbmcaddon, os\n'), ((441, 457), 'xbmcgui.Dialog', 'xbmcgui.Dialog', ([], {}), '()\n', (455, 457), False, 'import xbmc, xbmcgui, xbmcaddon, os\n')]
"""Test Palmetto.""" import pytest from palmettopy.palmetto import Palmetto from palmettopy.exceptions import CoherenceTypeNotAvailable, EndpointDown, WrongContentType @pytest.fixture def words(): """Load test data fixture.""" words = ["cake", "apple", "banana", "cherry", "chocolate"] return words @pytest.fixture def words_underscore(): """Load test data fixture.""" words = ['label', 'type', 'character', 'foundation_garment'] return words @pytest.fixture def words_no_results(): """Load test data fixture.""" words = ['label', 'type', 'character', 'subject', 'discipline', 'topic', 'national', 'familycolor', 'fam', 'glotto', 'isoexception'] return words def test_get_coherence(capsys, words): palmetto = Palmetto() coherence = palmetto.get_coherence(words) assert(coherence == 0.5678879445677241) def test_get_coherence_fast(capsys, words): palmetto = Palmetto() coherence = palmetto.get_coherence_fast(words) assert(coherence == 1779.6591356383024) def test_wrong_endpoint(words): palmetto = Palmetto("http://example.com/nothinghere/") with pytest.raises(EndpointDown): coherence = palmetto.get_coherence(words) def test_wrong_coherence_type(words): palmetto = Palmetto() with pytest.raises(CoherenceTypeNotAvailable): coherence = palmetto.get_coherence(words, coherence_type="asdf") def test_all_coherence_types(words): palmetto = Palmetto() for coherence_type in palmetto.all_coherence_types: palmetto.get_coherence(words, coherence_type=coherence_type) def test_wrong_content_type(words): palmetto = Palmetto() with pytest.raises(WrongContentType): palmetto._request_by_service(words, "cv", "bla") def test_all_content_types(words): palmetto = Palmetto() for content_type in ["text", "bytes"]: palmetto._request_by_service(words, "umass", content_type) def test_get_df_for_words(words): palmetto = Palmetto() doc_ids = palmetto.get_df_for_words(words) for i in range(0, len(words)): assert(doc_ids[i][0] == words[i]) def test_get_df_for_words_underscore(words_underscore): """ This test case fails for some unknown reason Fails. Palmetto can not handle underscores. """ palmetto = Palmetto() doc_ids = palmetto.get_df_for_words(words_underscore) for i in range(0, len(words_underscore)): assert(doc_ids[i][0] == words_underscore[i]) def test_get_df_for_words_with_no_results(words_no_results): """ This test case fails for some unknown reason Fails. Palmetto can not handle underscores. """ palmetto = Palmetto() doc_ids = palmetto.get_df_for_words(words_no_results) for i in range(0, len(words_no_results)): assert(doc_ids[i][0] == words_no_results[i])
[ "pytest.raises", "palmettopy.palmetto.Palmetto" ]
[((759, 769), 'palmettopy.palmetto.Palmetto', 'Palmetto', ([], {}), '()\n', (767, 769), False, 'from palmettopy.palmetto import Palmetto\n'), ((921, 931), 'palmettopy.palmetto.Palmetto', 'Palmetto', ([], {}), '()\n', (929, 931), False, 'from palmettopy.palmetto import Palmetto\n'), ((1076, 1119), 'palmettopy.palmetto.Palmetto', 'Palmetto', (['"""http://example.com/nothinghere/"""'], {}), "('http://example.com/nothinghere/')\n", (1084, 1119), False, 'from palmettopy.palmetto import Palmetto\n'), ((1263, 1273), 'palmettopy.palmetto.Palmetto', 'Palmetto', ([], {}), '()\n', (1271, 1273), False, 'from palmettopy.palmetto import Palmetto\n'), ((1452, 1462), 'palmettopy.palmetto.Palmetto', 'Palmetto', ([], {}), '()\n', (1460, 1462), False, 'from palmettopy.palmetto import Palmetto\n'), ((1641, 1651), 'palmettopy.palmetto.Palmetto', 'Palmetto', ([], {}), '()\n', (1649, 1651), False, 'from palmettopy.palmetto import Palmetto\n'), ((1803, 1813), 'palmettopy.palmetto.Palmetto', 'Palmetto', ([], {}), '()\n', (1811, 1813), False, 'from palmettopy.palmetto import Palmetto\n'), ((1975, 1985), 'palmettopy.palmetto.Palmetto', 'Palmetto', ([], {}), '()\n', (1983, 1985), False, 'from palmettopy.palmetto import Palmetto\n'), ((2305, 2315), 'palmettopy.palmetto.Palmetto', 'Palmetto', ([], {}), '()\n', (2313, 2315), False, 'from palmettopy.palmetto import Palmetto\n'), ((2673, 2683), 'palmettopy.palmetto.Palmetto', 'Palmetto', ([], {}), '()\n', (2681, 2683), False, 'from palmettopy.palmetto import Palmetto\n'), ((1129, 1156), 'pytest.raises', 'pytest.raises', (['EndpointDown'], {}), '(EndpointDown)\n', (1142, 1156), False, 'import pytest\n'), ((1283, 1323), 'pytest.raises', 'pytest.raises', (['CoherenceTypeNotAvailable'], {}), '(CoherenceTypeNotAvailable)\n', (1296, 1323), False, 'import pytest\n'), ((1661, 1692), 'pytest.raises', 'pytest.raises', (['WrongContentType'], {}), '(WrongContentType)\n', (1674, 1692), False, 'import pytest\n')]
import model3 as M import numpy as np import tensorflow as tf params = np.load('lstmpm_d1.npy').item() params2 = np.load('lstmpm_d2.npy').item() def get_conv(name): res = [] # print(params[name]) res.append(params[name]['weights']) res.append(params[name]['bias']) # print(res[0].shape) return res def get_conv2(name): res = [] # print(params[name]) res.append(params2[name]['weights']) res.append(params2[name]['bias']) # print(res[0].shape) return res class Stage0(M.Model): def initialize(self): # init encoding self.c1_s1 = M.ConvLayer(9, 128, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv('conv1_stage1')) self.p1_s1 = M.MaxPool(3, 2, pad='VALID') self.c2_s1 = M.ConvLayer(9, 128, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv('conv2_stage1')) self.p2_s1 = M.MaxPool(3, 2, pad='VALID') self.c3_s1 = M.ConvLayer(9, 128, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv('conv3_stage1')) self.p3_s1 = M.MaxPool(3, 2, pad='VALID') self.c4_s1 = M.ConvLayer(5, 32, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv('conv4_stage1')) self.c5_s1 = M.ConvLayer(9, 512, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv('conv5_stage1')) self.c6_s1 = M.ConvLayer(1, 512, activation=M.PARAM_RELU, values=get_conv('conv6_stage1')) self.c7_s1 = M.ConvLayer(1, 15, values=get_conv('conv7_stage1')) # frame encoding self.c1_s2 = M.ConvLayer(9, 128, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv('conv1_stage2')) self.p1_s2 = M.MaxPool(3, 2, pad='VALID') self.c2_s2 = M.ConvLayer(9, 128, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv('conv2_stage2')) self.p2_s2 = M.MaxPool(3, 2, pad='VALID') self.c3_s2 = M.ConvLayer(9, 128, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv('conv3_stage2')) self.p3_s2 = M.MaxPool(3, 2, pad='VALID') self.c4_s2 = M.ConvLayer(5, 32, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv('conv4_stage2')) # center map self.pool = M.AvgPool(9,8, pad='VALID') # LSTM0 self.g = M.ConvLayer(3, 48, pad='SAME_LEFT', values=get_conv('g_x_stage2')) self.gb = tf.convert_to_tensor(params['g_stage2'][1].astype(np.float32)) self.gb = tf.Variable(self.gb) self.i = M.ConvLayer(3, 48, pad='SAME_LEFT', values=get_conv('i_x_stage2')) self.ib = tf.convert_to_tensor(params['i_stage2'][1].astype(np.float32)) self.ib = tf.Variable(self.ib) self.o = M.ConvLayer(3, 48, pad='SAME_LEFT', values=get_conv('o_x_stage2')) self.ob = tf.convert_to_tensor(params['o_stage2'][1].astype(np.float32)) self.ob = tf.Variable(self.ob) # decoder branch self.mc1 = M.ConvLayer(11, 128, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv('Mconv1_stage2')) self.mc2 = M.ConvLayer(11, 128, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv('Mconv2_stage2')) self.mc3 = M.ConvLayer(11, 128, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv('Mconv3_stage2')) self.mc4 = M.ConvLayer(1, 128, activation=M.PARAM_RELU, values=get_conv('Mconv4_stage2')) self.mc5 = M.ConvLayer(1, 15, values=get_conv('Mconv5_stage2')) def forward(self, dt1, dt2, centermap): #init enc e = dt1 e = self.c1_s1(e) e = tf.pad(e, [[0,0],[0,1],[0,1],[0,0]], mode='SYMMETRIC') e = self.p1_s1(e) e = self.c2_s1(e) e = tf.pad(e, [[0,0],[0,1],[0,1],[0,0]], mode='SYMMETRIC') e = self.p2_s1(e) e = self.c3_s1(e) e = tf.pad(e, [[0,0],[0,1],[0,1],[0,0]], mode='SYMMETRIC') e = self.p3_s1(e) e = self.c4_s1(e) e = self.c5_s1(e) e = self.c6_s1(e) e = self.c7_s1(e) # frame encoding f = dt2 f = self.c1_s2(f) f = tf.pad(f, [[0,0],[0,1],[0,1],[0,0]], mode='SYMMETRIC') f = self.p1_s2(f) f = self.c2_s2(f) f = tf.pad(f, [[0,0],[0,1],[0,1],[0,0]], mode='SYMMETRIC') f = self.p2_s2(f) f = self.c3_s2(f) f = tf.pad(f, [[0,0],[0,1],[0,1],[0,0]], mode='SYMMETRIC') f = self.p3_s2(f) f = self.c4_s2(f) # centermap pooling x = tf.pad(centermap, [[0,0],[0,1],[0,1],[0,0]], mode='SYMMETRIC') x = self.pool(x) # LSTM branch x = tf.concat([f, e, x], axis=-1) g = self.g(x) + self.gb i = self.i(x) + self.ib o = self.o(x) + self.ob g = tf.tanh(g) i = tf.sigmoid(i) o = tf.sigmoid(o) c = g * i h = o * tf.tanh(c) # decoder branch x = self.mc1(h) x = self.mc2(x) x = self.mc3(x) x = self.mc4(x) out = self.mc5(x) return out class Stage1(M.Model): def initialize(self): # frame encoding self.c1_s2 = M.ConvLayer(9, 128, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv2('conv1_stage2')) self.p1_s2 = M.MaxPool(3, 2, pad='VALID') self.c2_s2 = M.ConvLayer(9, 128, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv2('conv2_stage2')) self.p2_s2 = M.MaxPool(3, 2, pad='VALID') self.c3_s2 = M.ConvLayer(9, 128, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv2('conv3_stage2')) self.p3_s2 = M.MaxPool(3, 2, pad='VALID') self.c4_s2 = M.ConvLayer(5, 32, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv2('conv4_stage2')) # center map self.pool = M.AvgPool(9,8, pad='VALID') # lstm self.gx = M.ConvLayer(3, 48, pad='SAME_LEFT', values=get_conv2('g_x_stage3')) self.gh = M.ConvLayer(3, 48, pad='SAME_LEFT', values=get_conv2('g_h_stage3')) self.gb = tf.convert_to_tensor(params2['g_stage3'][1].astype(np.float32)) self.gb = tf.Variable(self.gb) self.fx = M.ConvLayer(3, 48, pad='SAME_LEFT', values=get_conv2('f_x_stage3')) self.fh = M.ConvLayer(3, 48, pad='SAME_LEFT', values=get_conv2('f_h_stage3')) self.fb = tf.convert_to_tensor(params2['f_stage3'][1].astype(np.float32)) self.fb = tf.Variable(self.fb) self.ox = M.ConvLayer(3, 48, pad='SAME_LEFT', values=get_conv2('o_x_stage3')) self.oh = M.ConvLayer(3, 48, pad='SAME_LEFT', values=get_conv2('o_h_stage3')) self.ob = tf.convert_to_tensor(params2['o_stage3'][1].astype(np.float32)) self.ob = tf.Variable(self.ob) self.ix = M.ConvLayer(3, 48, pad='SAME_LEFT', values=get_conv2('i_x_stage3')) self.ih = M.ConvLayer(3, 48, pad='SAME_LEFT', values=get_conv2('i_h_stage3')) self.ib = tf.convert_to_tensor(params2['i_stage3'][1].astype(np.float32)) self.ib = tf.Variable(self.ib) # decoder branch self.mc1 = M.ConvLayer(11, 128, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv2('Mres1_stage3')) self.mc2 = M.ConvLayer(11, 128, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv2('Mres2_stage3')) self.mc3 = M.ConvLayer(11, 128, pad='SAME_LEFT', activation=M.PARAM_RELU, values=get_conv2('Mres3_stage3')) self.mc4 = M.ConvLayer(1, 128, activation=M.PARAM_RELU, values=get_conv2('Mres4_stage3')) self.mc5 = M.ConvLayer(1, 15, values=get_conv2('Mres5_stage3')) def forward(self, x, hmap, centermap, h, c): # frame encoding f = x f = self.c1_s2(f) f = tf.pad(f, [[0,0],[0,1],[0,1],[0,0]], mode='SYMMETRIC') f = self.p1_s2(f) f = self.c2_s2(f) f = tf.pad(f, [[0,0],[0,1],[0,1],[0,0]], mode='SYMMETRIC') f = self.p2_s2(f) f = self.c3_s2(f) f = tf.pad(f, [[0,0],[0,1],[0,1],[0,0]], mode='SYMMETRIC') f = self.p3_s2(f) f = self.c4_s2(f) # centermap pooling ce = tf.pad(centermap, [[0,0],[0,1],[0,1],[0,0]], mode='SYMMETRIC') ce = self.pool(ce) # lstm branch x = tf.concat([f, hmap, ce], axis=-1) gx = self.gx(x) gh = self.gh(h) ox = self.ox(x) oh = self.oh(h) fx = self.fx(x) fh = self.fh(h) ix = self.ix(x) ih = self.ih(h) g = tf.tanh(gx + gh + self.gb) o = tf.sigmoid(ox + oh + self.ob) i = tf.sigmoid(ix + ih + self.ib) f = tf.sigmoid(fx + fh + self.fb) c = f*c + i*g h = o * tf.tanh(c) # decoder branch x = self.mc1(h) x = self.mc2(x) x = self.mc3(x) x = self.mc4(x) out = self.mc5(x) return out class ModelBundle(M.Model): def initialize(self): self.s0 = Stage0() self.s1 = Stage1() if __name__=='__main__': mods = ModelBundle() mod = mods.s0 x = np.ones([1,368,368,3]).astype(np.float32) cent = np.ones([1,368,368,1]).astype(np.float32) x = mod(x, x, cent) out = np.transpose(x,[0,3,1,2]) print(out) print(out.shape) input('Test deploy1 finished. Input for testing deploy2') mod = mods.s1 x = np.ones([1,368,368,3]).astype(np.float32) cent = np.ones([1,368,368,1]).astype(np.float32) h = c = np.ones([1,46,46, 48]).astype(np.float32) hmap = np.ones([1,46,46, 15]).astype(np.float32) x[:,-1] = 0 x = mod(x, hmap, cent, h, c) out = np.transpose(x,[0,3,1,2]) print(out) print(out.shape) input('Test deploy2 finished. Input for saving converted weights ') saver = M.Saver(mods) saver.save('./LSTMPM/lstmpm.ckpt')
[ "model3.Saver", "numpy.load", "model3.AvgPool", "model3.MaxPool", "tensorflow.pad", "numpy.transpose", "tensorflow.concat", "numpy.ones", "tensorflow.Variable", "tensorflow.tanh", "tensorflow.sigmoid" ]
[((7984, 8013), 'numpy.transpose', 'np.transpose', (['x', '[0, 3, 1, 2]'], {}), '(x, [0, 3, 1, 2])\n', (7996, 8013), True, 'import numpy as np\n'), ((8365, 8394), 'numpy.transpose', 'np.transpose', (['x', '[0, 3, 1, 2]'], {}), '(x, [0, 3, 1, 2])\n', (8377, 8394), True, 'import numpy as np\n'), ((8500, 8513), 'model3.Saver', 'M.Saver', (['mods'], {}), '(mods)\n', (8507, 8513), True, 'import model3 as M\n'), ((75, 99), 'numpy.load', 'np.load', (['"""lstmpm_d1.npy"""'], {}), "('lstmpm_d1.npy')\n", (82, 99), True, 'import numpy as np\n'), ((117, 141), 'numpy.load', 'np.load', (['"""lstmpm_d2.npy"""'], {}), "('lstmpm_d2.npy')\n", (124, 141), True, 'import numpy as np\n'), ((665, 693), 'model3.MaxPool', 'M.MaxPool', (['(3)', '(2)'], {'pad': '"""VALID"""'}), "(3, 2, pad='VALID')\n", (674, 693), True, 'import model3 as M\n'), ((819, 847), 'model3.MaxPool', 'M.MaxPool', (['(3)', '(2)'], {'pad': '"""VALID"""'}), "(3, 2, pad='VALID')\n", (828, 847), True, 'import model3 as M\n'), ((973, 1001), 'model3.MaxPool', 'M.MaxPool', (['(3)', '(2)'], {'pad': '"""VALID"""'}), "(3, 2, pad='VALID')\n", (982, 1001), True, 'import model3 as M\n'), ((1527, 1555), 'model3.MaxPool', 'M.MaxPool', (['(3)', '(2)'], {'pad': '"""VALID"""'}), "(3, 2, pad='VALID')\n", (1536, 1555), True, 'import model3 as M\n'), ((1681, 1709), 'model3.MaxPool', 'M.MaxPool', (['(3)', '(2)'], {'pad': '"""VALID"""'}), "(3, 2, pad='VALID')\n", (1690, 1709), True, 'import model3 as M\n'), ((1835, 1863), 'model3.MaxPool', 'M.MaxPool', (['(3)', '(2)'], {'pad': '"""VALID"""'}), "(3, 2, pad='VALID')\n", (1844, 1863), True, 'import model3 as M\n'), ((2003, 2031), 'model3.AvgPool', 'M.AvgPool', (['(9)', '(8)'], {'pad': '"""VALID"""'}), "(9, 8, pad='VALID')\n", (2012, 2031), True, 'import model3 as M\n'), ((2207, 2227), 'tensorflow.Variable', 'tf.Variable', (['self.gb'], {}), '(self.gb)\n', (2218, 2227), True, 'import tensorflow as tf\n'), ((2393, 2413), 'tensorflow.Variable', 'tf.Variable', (['self.ib'], {}), '(self.ib)\n', (2404, 2413), True, 'import tensorflow as tf\n'), ((2579, 2599), 'tensorflow.Variable', 'tf.Variable', (['self.ob'], {}), '(self.ob)\n', (2590, 2599), True, 'import tensorflow as tf\n'), ((3201, 3262), 'tensorflow.pad', 'tf.pad', (['e', '[[0, 0], [0, 1], [0, 1], [0, 0]]'], {'mode': '"""SYMMETRIC"""'}), "(e, [[0, 0], [0, 1], [0, 1], [0, 0]], mode='SYMMETRIC')\n", (3207, 3262), True, 'import tensorflow as tf\n'), ((3302, 3363), 'tensorflow.pad', 'tf.pad', (['e', '[[0, 0], [0, 1], [0, 1], [0, 0]]'], {'mode': '"""SYMMETRIC"""'}), "(e, [[0, 0], [0, 1], [0, 1], [0, 0]], mode='SYMMETRIC')\n", (3308, 3363), True, 'import tensorflow as tf\n'), ((3403, 3464), 'tensorflow.pad', 'tf.pad', (['e', '[[0, 0], [0, 1], [0, 1], [0, 0]]'], {'mode': '"""SYMMETRIC"""'}), "(e, [[0, 0], [0, 1], [0, 1], [0, 0]], mode='SYMMETRIC')\n", (3409, 3464), True, 'import tensorflow as tf\n'), ((3616, 3677), 'tensorflow.pad', 'tf.pad', (['f', '[[0, 0], [0, 1], [0, 1], [0, 0]]'], {'mode': '"""SYMMETRIC"""'}), "(f, [[0, 0], [0, 1], [0, 1], [0, 0]], mode='SYMMETRIC')\n", (3622, 3677), True, 'import tensorflow as tf\n'), ((3717, 3778), 'tensorflow.pad', 'tf.pad', (['f', '[[0, 0], [0, 1], [0, 1], [0, 0]]'], {'mode': '"""SYMMETRIC"""'}), "(f, [[0, 0], [0, 1], [0, 1], [0, 0]], mode='SYMMETRIC')\n", (3723, 3778), True, 'import tensorflow as tf\n'), ((3818, 3879), 'tensorflow.pad', 'tf.pad', (['f', '[[0, 0], [0, 1], [0, 1], [0, 0]]'], {'mode': '"""SYMMETRIC"""'}), "(f, [[0, 0], [0, 1], [0, 1], [0, 0]], mode='SYMMETRIC')\n", (3824, 3879), True, 'import tensorflow as tf\n'), ((3943, 4012), 'tensorflow.pad', 'tf.pad', (['centermap', '[[0, 0], [0, 1], [0, 1], [0, 0]]'], {'mode': '"""SYMMETRIC"""'}), "(centermap, [[0, 0], [0, 1], [0, 1], [0, 0]], mode='SYMMETRIC')\n", (3949, 4012), True, 'import tensorflow as tf\n'), ((4049, 4078), 'tensorflow.concat', 'tf.concat', (['[f, e, x]'], {'axis': '(-1)'}), '([f, e, x], axis=-1)\n', (4058, 4078), True, 'import tensorflow as tf\n'), ((4165, 4175), 'tensorflow.tanh', 'tf.tanh', (['g'], {}), '(g)\n', (4172, 4175), True, 'import tensorflow as tf\n'), ((4182, 4195), 'tensorflow.sigmoid', 'tf.sigmoid', (['i'], {}), '(i)\n', (4192, 4195), True, 'import tensorflow as tf\n'), ((4202, 4215), 'tensorflow.sigmoid', 'tf.sigmoid', (['o'], {}), '(o)\n', (4212, 4215), True, 'import tensorflow as tf\n'), ((4572, 4600), 'model3.MaxPool', 'M.MaxPool', (['(3)', '(2)'], {'pad': '"""VALID"""'}), "(3, 2, pad='VALID')\n", (4581, 4600), True, 'import model3 as M\n'), ((4727, 4755), 'model3.MaxPool', 'M.MaxPool', (['(3)', '(2)'], {'pad': '"""VALID"""'}), "(3, 2, pad='VALID')\n", (4736, 4755), True, 'import model3 as M\n'), ((4882, 4910), 'model3.MaxPool', 'M.MaxPool', (['(3)', '(2)'], {'pad': '"""VALID"""'}), "(3, 2, pad='VALID')\n", (4891, 4910), True, 'import model3 as M\n'), ((5051, 5079), 'model3.AvgPool', 'M.AvgPool', (['(9)', '(8)'], {'pad': '"""VALID"""'}), "(9, 8, pad='VALID')\n", (5060, 5079), True, 'import model3 as M\n'), ((5337, 5357), 'tensorflow.Variable', 'tf.Variable', (['self.gb'], {}), '(self.gb)\n', (5348, 5357), True, 'import tensorflow as tf\n'), ((5606, 5626), 'tensorflow.Variable', 'tf.Variable', (['self.fb'], {}), '(self.fb)\n', (5617, 5626), True, 'import tensorflow as tf\n'), ((5875, 5895), 'tensorflow.Variable', 'tf.Variable', (['self.ob'], {}), '(self.ob)\n', (5886, 5895), True, 'import tensorflow as tf\n'), ((6144, 6164), 'tensorflow.Variable', 'tf.Variable', (['self.ib'], {}), '(self.ib)\n', (6155, 6164), True, 'import tensorflow as tf\n'), ((6776, 6837), 'tensorflow.pad', 'tf.pad', (['f', '[[0, 0], [0, 1], [0, 1], [0, 0]]'], {'mode': '"""SYMMETRIC"""'}), "(f, [[0, 0], [0, 1], [0, 1], [0, 0]], mode='SYMMETRIC')\n", (6782, 6837), True, 'import tensorflow as tf\n'), ((6877, 6938), 'tensorflow.pad', 'tf.pad', (['f', '[[0, 0], [0, 1], [0, 1], [0, 0]]'], {'mode': '"""SYMMETRIC"""'}), "(f, [[0, 0], [0, 1], [0, 1], [0, 0]], mode='SYMMETRIC')\n", (6883, 6938), True, 'import tensorflow as tf\n'), ((6978, 7039), 'tensorflow.pad', 'tf.pad', (['f', '[[0, 0], [0, 1], [0, 1], [0, 0]]'], {'mode': '"""SYMMETRIC"""'}), "(f, [[0, 0], [0, 1], [0, 1], [0, 0]], mode='SYMMETRIC')\n", (6984, 7039), True, 'import tensorflow as tf\n'), ((7104, 7173), 'tensorflow.pad', 'tf.pad', (['centermap', '[[0, 0], [0, 1], [0, 1], [0, 0]]'], {'mode': '"""SYMMETRIC"""'}), "(centermap, [[0, 0], [0, 1], [0, 1], [0, 0]], mode='SYMMETRIC')\n", (7110, 7173), True, 'import tensorflow as tf\n'), ((7212, 7245), 'tensorflow.concat', 'tf.concat', (['[f, hmap, ce]'], {'axis': '(-1)'}), '([f, hmap, ce], axis=-1)\n', (7221, 7245), True, 'import tensorflow as tf\n'), ((7400, 7426), 'tensorflow.tanh', 'tf.tanh', (['(gx + gh + self.gb)'], {}), '(gx + gh + self.gb)\n', (7407, 7426), True, 'import tensorflow as tf\n'), ((7433, 7462), 'tensorflow.sigmoid', 'tf.sigmoid', (['(ox + oh + self.ob)'], {}), '(ox + oh + self.ob)\n', (7443, 7462), True, 'import tensorflow as tf\n'), ((7469, 7498), 'tensorflow.sigmoid', 'tf.sigmoid', (['(ix + ih + self.ib)'], {}), '(ix + ih + self.ib)\n', (7479, 7498), True, 'import tensorflow as tf\n'), ((7505, 7534), 'tensorflow.sigmoid', 'tf.sigmoid', (['(fx + fh + self.fb)'], {}), '(fx + fh + self.fb)\n', (7515, 7534), True, 'import tensorflow as tf\n'), ((4240, 4250), 'tensorflow.tanh', 'tf.tanh', (['c'], {}), '(c)\n', (4247, 4250), True, 'import tensorflow as tf\n'), ((7563, 7573), 'tensorflow.tanh', 'tf.tanh', (['c'], {}), '(c)\n', (7570, 7573), True, 'import tensorflow as tf\n'), ((7864, 7889), 'numpy.ones', 'np.ones', (['[1, 368, 368, 3]'], {}), '([1, 368, 368, 3])\n', (7871, 7889), True, 'import numpy as np\n'), ((7914, 7939), 'numpy.ones', 'np.ones', (['[1, 368, 368, 1]'], {}), '([1, 368, 368, 1])\n', (7921, 7939), True, 'import numpy as np\n'), ((8120, 8145), 'numpy.ones', 'np.ones', (['[1, 368, 368, 3]'], {}), '([1, 368, 368, 3])\n', (8127, 8145), True, 'import numpy as np\n'), ((8170, 8195), 'numpy.ones', 'np.ones', (['[1, 368, 368, 1]'], {}), '([1, 368, 368, 1])\n', (8177, 8195), True, 'import numpy as np\n'), ((8221, 8245), 'numpy.ones', 'np.ones', (['[1, 46, 46, 48]'], {}), '([1, 46, 46, 48])\n', (8228, 8245), True, 'import numpy as np\n'), ((8271, 8295), 'numpy.ones', 'np.ones', (['[1, 46, 46, 15]'], {}), '([1, 46, 46, 15])\n', (8278, 8295), True, 'import numpy as np\n')]
from django.urls import path, re_path from . import views # 这是应用Book中的url具体配置,请求到这里才能调用该应用的视图,上一级首先是调用BookManager的url,如果没有匹配admin # 那么进入应用的匹配Book.urls,之后才会进入这里应用的url urlpatterns = [ # http://127.0.0.1:8000/admin/ 匹配 # 正则匹配,对请求地址进行正则匹配,如果路径中包含admin,就把后台站点中的url信息包含到这个项目中,指明下一集路径如何匹配 # 如果匹配成功,那么直接调用指定的视图 # 正则匹配,对请求地址进行正则匹配,如果路径中不包含admin,就把Book中的url信息包含到这个项目中,指明下一集路径如何匹配 path('stockplot/', views.showlinediagram), path('index3', views.index3,name='index3'), path('json1', views.json1), # ex:/assetinfo/json2 path('json2', views.json2), path('ajax_add/', views.ajax_add), path('ajax_demo1/', views.ajax_demo1), path('data_fresh/', views.data_fresh, name="data_fresh"), path('stocklist/', views.stockList), # 这里的^表示开始,$表示结束,因为是正则表达式,所以必须严格 re_path(r'^([1-9]\d*)/$', views.dashBoard_m) # 调函数的时候传参数,采用正则的组,正则匹配加括号,然后传进去参数,按照顺序传 # 这里的地址最重要,代表了访问的url地址后面 ]
[ "django.urls.re_path", "django.urls.path" ]
[((403, 444), 'django.urls.path', 'path', (['"""stockplot/"""', 'views.showlinediagram'], {}), "('stockplot/', views.showlinediagram)\n", (407, 444), False, 'from django.urls import path, re_path\n'), ((451, 494), 'django.urls.path', 'path', (['"""index3"""', 'views.index3'], {'name': '"""index3"""'}), "('index3', views.index3, name='index3')\n", (455, 494), False, 'from django.urls import path, re_path\n'), ((500, 526), 'django.urls.path', 'path', (['"""json1"""', 'views.json1'], {}), "('json1', views.json1)\n", (504, 526), False, 'from django.urls import path, re_path\n'), ((560, 586), 'django.urls.path', 'path', (['"""json2"""', 'views.json2'], {}), "('json2', views.json2)\n", (564, 586), False, 'from django.urls import path, re_path\n'), ((593, 626), 'django.urls.path', 'path', (['"""ajax_add/"""', 'views.ajax_add'], {}), "('ajax_add/', views.ajax_add)\n", (597, 626), False, 'from django.urls import path, re_path\n'), ((633, 670), 'django.urls.path', 'path', (['"""ajax_demo1/"""', 'views.ajax_demo1'], {}), "('ajax_demo1/', views.ajax_demo1)\n", (637, 670), False, 'from django.urls import path, re_path\n'), ((677, 733), 'django.urls.path', 'path', (['"""data_fresh/"""', 'views.data_fresh'], {'name': '"""data_fresh"""'}), "('data_fresh/', views.data_fresh, name='data_fresh')\n", (681, 733), False, 'from django.urls import path, re_path\n'), ((740, 775), 'django.urls.path', 'path', (['"""stocklist/"""', 'views.stockList'], {}), "('stocklist/', views.stockList)\n", (744, 775), False, 'from django.urls import path, re_path\n'), ((816, 860), 'django.urls.re_path', 're_path', (['"""^([1-9]\\\\d*)/$"""', 'views.dashBoard_m'], {}), "('^([1-9]\\\\d*)/$', views.dashBoard_m)\n", (823, 860), False, 'from django.urls import path, re_path\n')]
from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() from builtins import * class OJAIList(list): def __init__(self): super(OJAIList, self).__init__() @staticmethod def set_list(value, tags=False): from mapr.ojai.ojai.OJAITagsBuilder import OJAITagsBuilder ojai_list = [] if tags: dump_document = OJAITagsBuilder() else: from mapr.ojai.ojai.OJAIDocument import OJAIDocument dump_document = OJAIDocument() for elem in value: if isinstance(elem, list): if isinstance(dump_document, OJAITagsBuilder): nested_list = OJAIList.set_list(elem, tags=True) else: nested_list = OJAIList.set_list(elem) ojai_list.append(nested_list) elif isinstance(elem, dict) and bool(elem): tmp_dict = {} for k, v in list(elem.items()): if isinstance(v, list): tmp_dict[k] = OJAIList.set_list(v) else: internal_value = dump_document.set('dump', v).as_dictionary()['dump'] tmp_dict[k] = internal_value dump_document.clear() ojai_list.append(tmp_dict) else: ojai_list.append(dump_document.set('dump', elem).as_dictionary()['dump']) dump_document.clear() return ojai_list
[ "mapr.ojai.ojai.OJAIDocument.OJAIDocument", "future.standard_library.install_aliases", "mapr.ojai.ojai.OJAITagsBuilder.OJAITagsBuilder" ]
[((185, 219), 'future.standard_library.install_aliases', 'standard_library.install_aliases', ([], {}), '()\n', (217, 219), False, 'from future import standard_library\n'), ((521, 538), 'mapr.ojai.ojai.OJAITagsBuilder.OJAITagsBuilder', 'OJAITagsBuilder', ([], {}), '()\n', (536, 538), False, 'from mapr.ojai.ojai.OJAITagsBuilder import OJAITagsBuilder\n'), ((646, 660), 'mapr.ojai.ojai.OJAIDocument.OJAIDocument', 'OJAIDocument', ([], {}), '()\n', (658, 660), False, 'from mapr.ojai.ojai.OJAIDocument import OJAIDocument\n')]
import os from copy import copy from typing import Any, List, Tuple from nubia import context, eventbus from nubia.internal import cmdloader from nubia.internal.cmdbase import AutoCommand from pygments.token import Name, Token from rich import box, inspect from rich.align import Align from rich.console import Console from rich.panel import Panel from rich.text import Text from sortedcontainers import SortedSet from ._nubia import _Exit, _Help from .constants import COMMAND_PACKAGES, NUBIA_OPTIONS, TITLE_ASCII, TITLE_TEXT class TurdshovelContext(context.Context): """Context for the Turdshovel app. Only allows interactive mode""" # Need to set this to allow initialization available_obj_types = SortedSet() def get_prompt_tokens(self) -> List[Tuple[Any, str]]: tokens = [ (Token.NewLine, "\n"), (Token.Title, "Turdshovel"), (Token.Space, ""), (Token.Pound, "> "), ] if self.target_friendly_name: tokens.insert(3, (Name.Command, self.target_friendly_name)) tokens.insert(3, (Token.At, "@")) return tokens def _replace_internal_cmds(self, override: bool): for k, v in copy(self._registry._cmd_instance_map).items(): if v.__module__.startswith("nubia.internal.commands"): self._registry._cmd_instance_map.pop(k) self._registry._completer.meta_dict.pop(k) self._registry._completer.words.remove(k) # Readd commands for exit and help with less aliases for cmd in [_Exit, _Help]: self._registry.register_command(cmd(), override) def reload_commands(self): """Reloads all the commands for the context""" self._replace_internal_cmds(override=True) for cmd in cmdloader.load_commands(COMMAND_PACKAGES): self._registry.register_command( AutoCommand(cmd, NUBIA_OPTIONS), override=True ) def on_interactive(self, args): self.verbose = args.verbose self.console = Console(soft_wrap=True) self.console.set_alt_screen() # This will be whatever the DataTarget is connected to and the related runtime self.target = None self.target_friendly_name = "" self.runtime = None self.available_obj_types = SortedSet() title_panel = Panel.fit( Text(TITLE_ASCII.rjust(33), style="bold #52311A", end="").append( TITLE_TEXT, style="bold #693F21" ), border_style="bold #52311A", subtitle=f"{':poop:' * 36}", box=box.SIMPLE, ) self.console.print(Align.center(title_panel)) self._replace_internal_cmds(override=False) self.registry.dispatch_message(eventbus.Message.CONNECTED)
[ "copy.copy", "nubia.internal.cmdloader.load_commands", "sortedcontainers.SortedSet", "nubia.internal.cmdbase.AutoCommand", "rich.console.Console", "rich.align.Align.center" ]
[((718, 729), 'sortedcontainers.SortedSet', 'SortedSet', ([], {}), '()\n', (727, 729), False, 'from sortedcontainers import SortedSet\n'), ((1816, 1857), 'nubia.internal.cmdloader.load_commands', 'cmdloader.load_commands', (['COMMAND_PACKAGES'], {}), '(COMMAND_PACKAGES)\n', (1839, 1857), False, 'from nubia.internal import cmdloader\n'), ((2078, 2101), 'rich.console.Console', 'Console', ([], {'soft_wrap': '(True)'}), '(soft_wrap=True)\n', (2085, 2101), False, 'from rich.console import Console\n'), ((2357, 2368), 'sortedcontainers.SortedSet', 'SortedSet', ([], {}), '()\n', (2366, 2368), False, 'from sortedcontainers import SortedSet\n'), ((2693, 2718), 'rich.align.Align.center', 'Align.center', (['title_panel'], {}), '(title_panel)\n', (2705, 2718), False, 'from rich.align import Align\n'), ((1212, 1250), 'copy.copy', 'copy', (['self._registry._cmd_instance_map'], {}), '(self._registry._cmd_instance_map)\n', (1216, 1250), False, 'from copy import copy\n'), ((1920, 1951), 'nubia.internal.cmdbase.AutoCommand', 'AutoCommand', (['cmd', 'NUBIA_OPTIONS'], {}), '(cmd, NUBIA_OPTIONS)\n', (1931, 1951), False, 'from nubia.internal.cmdbase import AutoCommand\n')]
# -*- coding: utf-8 -*- from pims import ImageReader from load_data.ILoadSupervised import ILoadSupervised from os.path import join, exists import csv class LoadPokemon(ILoadSupervised): def __init__(self, path="train_data/Folder_Videojuegos/pokemon-images-and-types"): self.path = path self.classes = set() def get_all(self, sum1=False): X = [] Y = [] Ys_not_processed = [] with open(join(self.path, "pokemon.csv"), "r") as csv_obj: csv_reader = csv.DictReader(csv_obj) for row in csv_reader: imagename = join(self.path, "images", row["Name"]+".png") if exists(imagename): im = ImageReader(imagename) X.append(im.get_frame(0)) self.classes.add(row["Type1"]) actual_ys = [] actual_ys.append(row["Type1"]) if row["Type2"] is not None: self.classes.add(row["Type2"]) actual_ys.append(row["Type2"]) Ys_not_processed.append(actual_ys) Y = self.make_targets(Ys_not_processed, sum1) return X, Y def make_targets(self, not_processed, sum1=False): Y = [] lcl = list(self.classes) for e in not_processed: target = [0 for _ in self.classes] for pktype in e: target[lcl.index(pktype)] = 1 Y.append(target) if sum1: for i in range(len(Y)): sum_i = sum(Y[i]) Y[i] = [e/float(sum_i) for e in Y[i]] return Y def get_classes(self): return self.classes def get_headers(self): return ["image"]# None #self.headers
[ "pims.ImageReader", "csv.DictReader", "os.path.join", "os.path.exists" ]
[((526, 549), 'csv.DictReader', 'csv.DictReader', (['csv_obj'], {}), '(csv_obj)\n', (540, 549), False, 'import csv\n'), ((452, 482), 'os.path.join', 'join', (['self.path', '"""pokemon.csv"""'], {}), "(self.path, 'pokemon.csv')\n", (456, 482), False, 'from os.path import join, exists\n'), ((613, 660), 'os.path.join', 'join', (['self.path', '"""images"""', "(row['Name'] + '.png')"], {}), "(self.path, 'images', row['Name'] + '.png')\n", (617, 660), False, 'from os.path import join, exists\n'), ((678, 695), 'os.path.exists', 'exists', (['imagename'], {}), '(imagename)\n', (684, 695), False, 'from os.path import join, exists\n'), ((722, 744), 'pims.ImageReader', 'ImageReader', (['imagename'], {}), '(imagename)\n', (733, 744), False, 'from pims import ImageReader\n')]
import discord from discord.ext import commands import random import asyncio class GameManager(): def __init__(self): self.setup() def setup(self): print("GameManager: Loaded") class GameManagerCog(commands.Cog): def __init__(self, client): self.client = client self.initiatives = {} self.gamemanager = GameManager() # Official Format: # Test (category channel) # t-session-planning (text channel) # t-notes (text-channel) # t-stars-and-wishes (text channel) # t-pc-basics (text channel) # t-pc-sheets (text channel) # t-pc-visuals (text channel) # t-music (text channel) # t-dice-rolls (text channel) # t-voice-chat (text channel) # T Sessions (voice channel) # Makes a game (category, channel, role, etc) in the server @commands.command(aliases=['Creategame','CreateGame','cg','Cg','cG','CG','gamecreate','Gamecreate','GameCreate','gc','Gc','gC','GC'],brief="Makes the necessary channels and roles for a game.", description="/creategame [arg1] [arg2] @member\n\n- arg1 = Game Name/Campaign\n- arg2 = Game Name Abbreviation\n- @member = Game Master\n\nMakes the necessary channels and roles for a game.") @commands.has_role("Mod") async def creategame(self, ctx, arg1=None, arg2=None, gm: discord.Member = None): if(arg1 != None and arg2 != None and gm != None): # Stuff guild = ctx.guild progress_msg = await ctx.send("Making...") pos = discord.utils.get(ctx.guild.roles, name="⊱ ───── {⭒|PERSONAL|⭒} ───── ⊰").position +2 member = discord.utils.get(ctx.guild.roles, name="Member") role = await guild.create_role(name=str(arg1), mentionable=True) await role.edit(position=pos) await gm.add_roles(role) overwrites = { guild.default_role: discord.PermissionOverwrite(add_reactions=False, administrator=False, attach_files=False,ban_members=False,change_nickname=False,connect=False,create_instant_invite=False,deafen_members=False,embed_links=False,external_emojis=False,kick_members=False,manage_channels=False,manage_emojis=False,manage_guild=False,manage_messages=False,manage_nicknames=False,manage_permissions=False,manage_roles=False,manage_webhooks=False,mention_everyone=False,move_members=False,mute_members=False,priority_speaker=False,read_message_history=False,read_messages=False,request_to_speak=False,send_messages=False,send_tts_messages=False,speak=False,stream=False,use_external_emojis=False,use_slash_commands=False,use_voice_activation=False,view_audit_log=False,view_channel=False,view_guild_insights=False), role: discord.PermissionOverwrite(add_reactions=None, administrator=None, attach_files=None,ban_members=None,change_nickname=None,connect=None,create_instant_invite=None,deafen_members=None,embed_links=None,external_emojis=None,kick_members=None,manage_channels=None,manage_emojis=None,manage_guild=None,manage_messages=None,manage_nicknames=None,manage_permissions=None,manage_roles=None,manage_webhooks=None,mention_everyone=None,move_members=None,mute_members=None,priority_speaker=None,read_message_history=None,read_messages=None,request_to_speak=None,send_messages=None,send_tts_messages=None,speak=True,stream=None,use_external_emojis=None,use_slash_commands=None,use_voice_activation=True,view_audit_log=None,view_channel=True,view_guild_insights=None), member: discord.PermissionOverwrite(add_reactions=True, administrator=None, attach_files=True,ban_members=None,change_nickname=None,connect=True,create_instant_invite=None,deafen_members=None,embed_links=True,external_emojis=True,kick_members=None,manage_channels=None,manage_emojis=None,manage_guild=None,manage_messages=None,manage_nicknames=None,manage_permissions=None,manage_roles=None,manage_webhooks=None,mention_everyone=None,move_members=None,mute_members=None,priority_speaker=None,read_message_history=True,read_messages=True,request_to_speak=None,send_messages=True,send_tts_messages=None,speak=None,stream=None,use_external_emojis=None,use_slash_commands=None,use_voice_activation=None,view_audit_log=None,view_channel=None,view_guild_insights=None), gm: discord.PermissionOverwrite(add_reactions=None, administrator=None, attach_files=None,ban_members=None,change_nickname=None,connect=None,create_instant_invite=None,deafen_members=None,embed_links=None,external_emojis=None,kick_members=None,manage_channels=None,manage_emojis=None,manage_guild=None,manage_messages=None,manage_nicknames=None,manage_permissions=None,manage_roles=None,manage_webhooks=None,mention_everyone=True,move_members=None,mute_members=True,priority_speaker=True,read_message_history=None,read_messages=None,request_to_speak=None,send_messages=None,send_tts_messages=None,speak=True,stream=None,use_external_emojis=None,use_slash_commands=True,use_voice_activation=True,view_audit_log=None,view_channel=True,view_guild_insights=None) } category = await guild.create_category_channel(str(arg1)) await category.create_text_channel(str(arg2) + " session planning", overwrites=overwrites) await category.create_text_channel(str(arg2) + " notes", overwrites=overwrites) await category.create_text_channel(str(arg2) + " star and wishes", overwrites=overwrites) await category.create_text_channel(str(arg2) + " house rules", overwrites=overwrites) await category.create_text_channel(str(arg2) + " pc basics", overwrites=overwrites) await category.create_text_channel(str(arg2) + " pc sheets", overwrites=overwrites) await category.create_text_channel(str(arg2) + " pc visuals", overwrites=overwrites) await category.create_text_channel(str(arg2) + " music", overwrites=overwrites) await category.create_text_channel(str(arg2) + " dice rolls", overwrites=overwrites) overwrites = { guild.default_role: discord.PermissionOverwrite(add_reactions=False, administrator=False, attach_files=False,ban_members=False,change_nickname=False,connect=False,create_instant_invite=False,deafen_members=False,embed_links=False,external_emojis=False,kick_members=False,manage_channels=False,manage_emojis=False,manage_guild=False,manage_messages=False,manage_nicknames=False,manage_permissions=False,manage_roles=False,manage_webhooks=False,mention_everyone=False,move_members=False,mute_members=False,priority_speaker=False,read_message_history=False,read_messages=False,request_to_speak=False,send_messages=False,send_tts_messages=False,speak=False,stream=False,use_external_emojis=False,use_slash_commands=False,use_voice_activation=False,view_audit_log=False,view_channel=False,view_guild_insights=False), role: discord.PermissionOverwrite(add_reactions=None, administrator=None, attach_files=None,ban_members=None,change_nickname=None,connect=None,create_instant_invite=None,deafen_members=None,embed_links=None,external_emojis=None,kick_members=None,manage_channels=None,manage_emojis=None,manage_guild=None,manage_messages=None,manage_nicknames=None,manage_permissions=None,manage_roles=None,manage_webhooks=None,mention_everyone=None,move_members=None,mute_members=None,priority_speaker=None,read_message_history=None,read_messages=None,request_to_speak=None,send_messages=None,send_tts_messages=None,speak=True,stream=None,use_external_emojis=None,use_slash_commands=None,use_voice_activation=True,view_audit_log=None,view_channel=True,view_guild_insights=None), member: discord.PermissionOverwrite(add_reactions=True, administrator=None, attach_files=True,ban_members=None,change_nickname=None,connect=True,create_instant_invite=None,deafen_members=None,embed_links=True,external_emojis=True,kick_members=None,manage_channels=None,manage_emojis=None,manage_guild=None,manage_messages=None,manage_nicknames=None,manage_permissions=None,manage_roles=None,manage_webhooks=None,mention_everyone=None,move_members=None,mute_members=None,priority_speaker=None,read_message_history=True,read_messages=True,request_to_speak=None,send_messages=True,send_tts_messages=None,speak=None,stream=None,use_external_emojis=None,use_slash_commands=None,use_voice_activation=None,view_audit_log=None,view_channel=True,view_guild_insights=None), gm: discord.PermissionOverwrite(add_reactions=None, administrator=None, attach_files=None,ban_members=None,change_nickname=None,connect=None,create_instant_invite=None,deafen_members=None,embed_links=None,external_emojis=None,kick_members=None,manage_channels=None,manage_emojis=None,manage_guild=None,manage_messages=None,manage_nicknames=None,manage_permissions=None,manage_roles=None,manage_webhooks=None,mention_everyone=True,move_members=None,mute_members=True,priority_speaker=True,read_message_history=None,read_messages=None,request_to_speak=None,send_messages=None,send_tts_messages=None,speak=True,stream=None,use_external_emojis=None,use_slash_commands=True,use_voice_activation=True,view_audit_log=None,view_channel=True,view_guild_insights=None) } await category.create_text_channel(str(arg2) + " voice chat", overwrites=overwrites) await category.create_voice_channel(str(arg2).upper() + " Sessions", overwrites=overwrites) await progress_msg.delete() await ctx.send("Done!") else: await ctx.send("Missing arguments!") @commands.command(aliases=['Deletegame','DeleteGame','dg','Dg','dG','DG','gamedelete','Gamedelete','GameDelete','gd','Gd','gD','GD'],brief="Deletes the appropriate channels and roles for a game.", description="/deletegame [arg]\n\n- arg = Game Name/Campaign\n\nDeletes the appropriate channels and roles for a game.") @commands.has_role("Mod") async def deletegame(self, ctx, arg1=None): if(arg1 != None): # Stuff msg = await ctx.send("Are you sure you want to delete " + str(arg1) + "?") await msg.add_reaction("✅") await msg.add_reaction("❌") def check(reaction, user): return user == ctx.author try: reaction = await self.client.wait_for('reaction_add', timeout=60.0, check=check) if(str(reaction[0]) == '✅'): # Stuff channel = discord.utils.get(ctx.guild.channels, name=str(arg1)) role = discord.utils.get(ctx.guild.roles, name=str(arg1)) await role.delete() category = self.client.get_channel(channel.id) for channel in category.channels: await channel.delete() await category.delete() await msg.delete() await ctx.send("Successfully deleted!") elif(str(reaction[0]) == '❌'): #More Stuff await msg.delete() await ctx.send("Deletion Aborted!") else: await ctx.send("That isn't right...") except asyncio.TimeoutError: await msg.delete() await ctx.send("Timed out!") else: await ctx.send("Missing arguments!") @commands.command(aliases=['Initiative','init','Init','i','I','initiate','Initiate'],brief="Allows you to set the current initiative for a game that can be used as a reminder.", description="/initiative [args]\n\n- args = Names separated by spaces to indicate order of initiative\n\nAllows you to set the current initiative for a game that can be used as a reminder.") async def initiative(self, ctx, *args): if(len(args) != 0): if(str(args).isdecimal()): await ctx.send("You can't have just a number for a name, sorry :(") else: game = ctx.channel.category_id self.initiatives[game] = [arg for arg in args] await ctx.send("Initiative saved!") else: game = ctx.channel.category_id msg = "```Initiative:\n" counter = 1 for arg in self.initiatives[game]: msg += "{}) {}\n".format(counter, arg) counter+=1 msg += "```" # print(self.initiatives[game]) await ctx.send(msg) @commands.command(aliases=['Addplayer','AddPlayer','initadd','Initadd','InitAdd'],brief='Adds a player to the initiative.', description='/addplayer [name] [idx]\n\n- name = The name of the player you are adding to the initiative\n- idx = Where in the list the player will go (optional).\n\nAdds a player to the initiative.') async def addplayer(self, ctx, name:str, idx=None): game = ctx.channel.category_id if(idx != None): if(not name.isdecimal()): self.initiatives[game].insert(int(idx)-1, name) await ctx.send("Successfully added player!") else: await ctx.send("No number for name >:T") else: if(not name.isdecimal()): self.initiatives[game].append(name) await ctx.send("Successfully added player!") else: await ctx.send("No number for name! >:T") @commands.command(aliases=['Removeplayer','RemovePlayer','initdel','Initdel','InitDel'],brief='Removes a player from the initiative.', description="/removeplayer [arg]\n\n- arg = The index or name of the player you'd like to remove from initiative.\n\nRemoves a player from the initiative.") async def removeplayer(self, ctx, arg): game = ctx.channel.category_id if(str(arg).isdecimal()): del self.initiatives[game][int(arg)-1] await ctx.send("Successfully removed player!") else: del self.initiatives[game][self.initiatives[game].index(str(arg))] await ctx.send("Successfully removed player!") def setup(client): client.add_cog(GameManager(client))
[ "discord.utils.get", "discord.ext.commands.has_role", "discord.ext.commands.command", "discord.PermissionOverwrite" ]
[((895, 1316), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['Creategame', 'CreateGame', 'cg', 'Cg', 'cG', 'CG', 'gamecreate',\n 'Gamecreate', 'GameCreate', 'gc', 'Gc', 'gC', 'GC']", 'brief': '"""Makes the necessary channels and roles for a game."""', 'description': '"""/creategame [arg1] [arg2] @member\n\n- arg1 = Game Name/Campaign\n- arg2 = Game Name Abbreviation\n- @member = Game Master\n\nMakes the necessary channels and roles for a game."""'}), '(aliases=[\'Creategame\', \'CreateGame\', \'cg\', \'Cg\', \'cG\',\n \'CG\', \'gamecreate\', \'Gamecreate\', \'GameCreate\', \'gc\', \'Gc\', \'gC\', \'GC\'],\n brief=\'Makes the necessary channels and roles for a game.\', description\n =\n """/creategame [arg1] [arg2] @member\n\n- arg1 = Game Name/Campaign\n- arg2 = Game Name Abbreviation\n- @member = Game Master\n\nMakes the necessary channels and roles for a game."""\n )\n', (911, 1316), False, 'from discord.ext import commands\n'), ((1289, 1313), 'discord.ext.commands.has_role', 'commands.has_role', (['"""Mod"""'], {}), "('Mod')\n", (1306, 1313), False, 'from discord.ext import commands\n'), ((9765, 10116), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['Deletegame', 'DeleteGame', 'dg', 'Dg', 'dG', 'DG', 'gamedelete',\n 'Gamedelete', 'GameDelete', 'gd', 'Gd', 'gD', 'GD']", 'brief': '"""Deletes the appropriate channels and roles for a game."""', 'description': '"""/deletegame [arg]\n\n- arg = Game Name/Campaign\n\nDeletes the appropriate channels and roles for a game."""'}), '(aliases=[\'Deletegame\', \'DeleteGame\', \'dg\', \'Dg\', \'dG\',\n \'DG\', \'gamedelete\', \'Gamedelete\', \'GameDelete\', \'gd\', \'Gd\', \'gD\', \'GD\'],\n brief=\'Deletes the appropriate channels and roles for a game.\',\n description=\n """/deletegame [arg]\n\n- arg = Game Name/Campaign\n\nDeletes the appropriate channels and roles for a game."""\n )\n', (9781, 10116), False, 'from discord.ext import commands\n'), ((10088, 10112), 'discord.ext.commands.has_role', 'commands.has_role', (['"""Mod"""'], {}), "('Mod')\n", (10105, 10112), False, 'from discord.ext import commands\n'), ((11661, 12059), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['Initiative', 'init', 'Init', 'i', 'I', 'initiate', 'Initiate']", 'brief': '"""Allows you to set the current initiative for a game that can be used as a reminder."""', 'description': '"""/initiative [args]\n\n- args = Names separated by spaces to indicate order of initiative\n\nAllows you to set the current initiative for a game that can be used as a reminder."""'}), '(aliases=[\'Initiative\', \'init\', \'Init\', \'i\', \'I\',\n \'initiate\', \'Initiate\'], brief=\n \'Allows you to set the current initiative for a game that can be used as a reminder.\'\n , description=\n """/initiative [args]\n\n- args = Names separated by spaces to indicate order of initiative\n\nAllows you to set the current initiative for a game that can be used as a reminder."""\n )\n', (11677, 12059), False, 'from discord.ext import commands\n'), ((12782, 13123), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['Addplayer', 'AddPlayer', 'initadd', 'Initadd', 'InitAdd']", 'brief': '"""Adds a player to the initiative."""', 'description': '"""/addplayer [name] [idx]\n\n- name = The name of the player you are adding to the initiative\n- idx = Where in the list the player will go (optional).\n\nAdds a player to the initiative."""'}), '(aliases=[\'Addplayer\', \'AddPlayer\', \'initadd\', \'Initadd\',\n \'InitAdd\'], brief=\'Adds a player to the initiative.\', description=\n """/addplayer [name] [idx]\n\n- name = The name of the player you are adding to the initiative\n- idx = Where in the list the player will go (optional).\n\nAdds a player to the initiative."""\n )\n', (12798, 13123), False, 'from discord.ext import commands\n'), ((13735, 14048), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['Removeplayer', 'RemovePlayer', 'initdel', 'Initdel', 'InitDel']", 'brief': '"""Removes a player from the initiative."""', 'description': '"""/removeplayer [arg]\n\n- arg = The index or name of the player you\'d like to remove from initiative.\n\nRemoves a player from the initiative."""'}), '(aliases=[\'Removeplayer\', \'RemovePlayer\', \'initdel\',\n \'Initdel\', \'InitDel\'], brief=\'Removes a player from the initiative.\',\n description=\n """/removeplayer [arg]\n\n- arg = The index or name of the player you\'d like to remove from initiative.\n\nRemoves a player from the initiative."""\n )\n', (13751, 14048), False, 'from discord.ext import commands\n'), ((1723, 1772), 'discord.utils.get', 'discord.utils.get', (['ctx.guild.roles'], {'name': '"""Member"""'}), "(ctx.guild.roles, name='Member')\n", (1740, 1772), False, 'import discord\n'), ((2039, 2909), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'add_reactions': '(False)', 'administrator': '(False)', 'attach_files': '(False)', 'ban_members': '(False)', 'change_nickname': '(False)', 'connect': '(False)', 'create_instant_invite': '(False)', 'deafen_members': '(False)', 'embed_links': '(False)', 'external_emojis': '(False)', 'kick_members': '(False)', 'manage_channels': '(False)', 'manage_emojis': '(False)', 'manage_guild': '(False)', 'manage_messages': '(False)', 'manage_nicknames': '(False)', 'manage_permissions': '(False)', 'manage_roles': '(False)', 'manage_webhooks': '(False)', 'mention_everyone': '(False)', 'move_members': '(False)', 'mute_members': '(False)', 'priority_speaker': '(False)', 'read_message_history': '(False)', 'read_messages': '(False)', 'request_to_speak': '(False)', 'send_messages': '(False)', 'send_tts_messages': '(False)', 'speak': '(False)', 'stream': '(False)', 'use_external_emojis': '(False)', 'use_slash_commands': '(False)', 'use_voice_activation': '(False)', 'view_audit_log': '(False)', 'view_channel': '(False)', 'view_guild_insights': '(False)'}), '(add_reactions=False, administrator=False,\n attach_files=False, ban_members=False, change_nickname=False, connect=\n False, create_instant_invite=False, deafen_members=False, embed_links=\n False, external_emojis=False, kick_members=False, manage_channels=False,\n manage_emojis=False, manage_guild=False, manage_messages=False,\n manage_nicknames=False, manage_permissions=False, manage_roles=False,\n manage_webhooks=False, mention_everyone=False, move_members=False,\n mute_members=False, priority_speaker=False, read_message_history=False,\n read_messages=False, request_to_speak=False, send_messages=False,\n send_tts_messages=False, speak=False, stream=False, use_external_emojis\n =False, use_slash_commands=False, use_voice_activation=False,\n view_audit_log=False, view_channel=False, view_guild_insights=False)\n', (2066, 2909), False, 'import discord\n'), ((2854, 3686), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'add_reactions': 'None', 'administrator': 'None', 'attach_files': 'None', 'ban_members': 'None', 'change_nickname': 'None', 'connect': 'None', 'create_instant_invite': 'None', 'deafen_members': 'None', 'embed_links': 'None', 'external_emojis': 'None', 'kick_members': 'None', 'manage_channels': 'None', 'manage_emojis': 'None', 'manage_guild': 'None', 'manage_messages': 'None', 'manage_nicknames': 'None', 'manage_permissions': 'None', 'manage_roles': 'None', 'manage_webhooks': 'None', 'mention_everyone': 'None', 'move_members': 'None', 'mute_members': 'None', 'priority_speaker': 'None', 'read_message_history': 'None', 'read_messages': 'None', 'request_to_speak': 'None', 'send_messages': 'None', 'send_tts_messages': 'None', 'speak': '(True)', 'stream': 'None', 'use_external_emojis': 'None', 'use_slash_commands': 'None', 'use_voice_activation': '(True)', 'view_audit_log': 'None', 'view_channel': '(True)', 'view_guild_insights': 'None'}), '(add_reactions=None, administrator=None,\n attach_files=None, ban_members=None, change_nickname=None, connect=None,\n create_instant_invite=None, deafen_members=None, embed_links=None,\n external_emojis=None, kick_members=None, manage_channels=None,\n manage_emojis=None, manage_guild=None, manage_messages=None,\n manage_nicknames=None, manage_permissions=None, manage_roles=None,\n manage_webhooks=None, mention_everyone=None, move_members=None,\n mute_members=None, priority_speaker=None, read_message_history=None,\n read_messages=None, request_to_speak=None, send_messages=None,\n send_tts_messages=None, speak=True, stream=None, use_external_emojis=\n None, use_slash_commands=None, use_voice_activation=True,\n view_audit_log=None, view_channel=True, view_guild_insights=None)\n', (2881, 3686), False, 'import discord\n'), ((3635, 4467), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'add_reactions': '(True)', 'administrator': 'None', 'attach_files': '(True)', 'ban_members': 'None', 'change_nickname': 'None', 'connect': '(True)', 'create_instant_invite': 'None', 'deafen_members': 'None', 'embed_links': '(True)', 'external_emojis': '(True)', 'kick_members': 'None', 'manage_channels': 'None', 'manage_emojis': 'None', 'manage_guild': 'None', 'manage_messages': 'None', 'manage_nicknames': 'None', 'manage_permissions': 'None', 'manage_roles': 'None', 'manage_webhooks': 'None', 'mention_everyone': 'None', 'move_members': 'None', 'mute_members': 'None', 'priority_speaker': 'None', 'read_message_history': '(True)', 'read_messages': '(True)', 'request_to_speak': 'None', 'send_messages': '(True)', 'send_tts_messages': 'None', 'speak': 'None', 'stream': 'None', 'use_external_emojis': 'None', 'use_slash_commands': 'None', 'use_voice_activation': 'None', 'view_audit_log': 'None', 'view_channel': 'None', 'view_guild_insights': 'None'}), '(add_reactions=True, administrator=None,\n attach_files=True, ban_members=None, change_nickname=None, connect=True,\n create_instant_invite=None, deafen_members=None, embed_links=True,\n external_emojis=True, kick_members=None, manage_channels=None,\n manage_emojis=None, manage_guild=None, manage_messages=None,\n manage_nicknames=None, manage_permissions=None, manage_roles=None,\n manage_webhooks=None, mention_everyone=None, move_members=None,\n mute_members=None, priority_speaker=None, read_message_history=True,\n read_messages=True, request_to_speak=None, send_messages=True,\n send_tts_messages=None, speak=None, stream=None, use_external_emojis=\n None, use_slash_commands=None, use_voice_activation=None,\n view_audit_log=None, view_channel=None, view_guild_insights=None)\n', (3662, 4467), False, 'import discord\n'), ((4412, 5244), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'add_reactions': 'None', 'administrator': 'None', 'attach_files': 'None', 'ban_members': 'None', 'change_nickname': 'None', 'connect': 'None', 'create_instant_invite': 'None', 'deafen_members': 'None', 'embed_links': 'None', 'external_emojis': 'None', 'kick_members': 'None', 'manage_channels': 'None', 'manage_emojis': 'None', 'manage_guild': 'None', 'manage_messages': 'None', 'manage_nicknames': 'None', 'manage_permissions': 'None', 'manage_roles': 'None', 'manage_webhooks': 'None', 'mention_everyone': '(True)', 'move_members': 'None', 'mute_members': '(True)', 'priority_speaker': '(True)', 'read_message_history': 'None', 'read_messages': 'None', 'request_to_speak': 'None', 'send_messages': 'None', 'send_tts_messages': 'None', 'speak': '(True)', 'stream': 'None', 'use_external_emojis': 'None', 'use_slash_commands': '(True)', 'use_voice_activation': '(True)', 'view_audit_log': 'None', 'view_channel': '(True)', 'view_guild_insights': 'None'}), '(add_reactions=None, administrator=None,\n attach_files=None, ban_members=None, change_nickname=None, connect=None,\n create_instant_invite=None, deafen_members=None, embed_links=None,\n external_emojis=None, kick_members=None, manage_channels=None,\n manage_emojis=None, manage_guild=None, manage_messages=None,\n manage_nicknames=None, manage_permissions=None, manage_roles=None,\n manage_webhooks=None, mention_everyone=True, move_members=None,\n mute_members=True, priority_speaker=True, read_message_history=None,\n read_messages=None, request_to_speak=None, send_messages=None,\n send_tts_messages=None, speak=True, stream=None, use_external_emojis=\n None, use_slash_commands=True, use_voice_activation=True,\n view_audit_log=None, view_channel=True, view_guild_insights=None)\n', (4439, 5244), False, 'import discord\n'), ((6228, 7098), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'add_reactions': '(False)', 'administrator': '(False)', 'attach_files': '(False)', 'ban_members': '(False)', 'change_nickname': '(False)', 'connect': '(False)', 'create_instant_invite': '(False)', 'deafen_members': '(False)', 'embed_links': '(False)', 'external_emojis': '(False)', 'kick_members': '(False)', 'manage_channels': '(False)', 'manage_emojis': '(False)', 'manage_guild': '(False)', 'manage_messages': '(False)', 'manage_nicknames': '(False)', 'manage_permissions': '(False)', 'manage_roles': '(False)', 'manage_webhooks': '(False)', 'mention_everyone': '(False)', 'move_members': '(False)', 'mute_members': '(False)', 'priority_speaker': '(False)', 'read_message_history': '(False)', 'read_messages': '(False)', 'request_to_speak': '(False)', 'send_messages': '(False)', 'send_tts_messages': '(False)', 'speak': '(False)', 'stream': '(False)', 'use_external_emojis': '(False)', 'use_slash_commands': '(False)', 'use_voice_activation': '(False)', 'view_audit_log': '(False)', 'view_channel': '(False)', 'view_guild_insights': '(False)'}), '(add_reactions=False, administrator=False,\n attach_files=False, ban_members=False, change_nickname=False, connect=\n False, create_instant_invite=False, deafen_members=False, embed_links=\n False, external_emojis=False, kick_members=False, manage_channels=False,\n manage_emojis=False, manage_guild=False, manage_messages=False,\n manage_nicknames=False, manage_permissions=False, manage_roles=False,\n manage_webhooks=False, mention_everyone=False, move_members=False,\n mute_members=False, priority_speaker=False, read_message_history=False,\n read_messages=False, request_to_speak=False, send_messages=False,\n send_tts_messages=False, speak=False, stream=False, use_external_emojis\n =False, use_slash_commands=False, use_voice_activation=False,\n view_audit_log=False, view_channel=False, view_guild_insights=False)\n', (6255, 7098), False, 'import discord\n'), ((7043, 7875), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'add_reactions': 'None', 'administrator': 'None', 'attach_files': 'None', 'ban_members': 'None', 'change_nickname': 'None', 'connect': 'None', 'create_instant_invite': 'None', 'deafen_members': 'None', 'embed_links': 'None', 'external_emojis': 'None', 'kick_members': 'None', 'manage_channels': 'None', 'manage_emojis': 'None', 'manage_guild': 'None', 'manage_messages': 'None', 'manage_nicknames': 'None', 'manage_permissions': 'None', 'manage_roles': 'None', 'manage_webhooks': 'None', 'mention_everyone': 'None', 'move_members': 'None', 'mute_members': 'None', 'priority_speaker': 'None', 'read_message_history': 'None', 'read_messages': 'None', 'request_to_speak': 'None', 'send_messages': 'None', 'send_tts_messages': 'None', 'speak': '(True)', 'stream': 'None', 'use_external_emojis': 'None', 'use_slash_commands': 'None', 'use_voice_activation': '(True)', 'view_audit_log': 'None', 'view_channel': '(True)', 'view_guild_insights': 'None'}), '(add_reactions=None, administrator=None,\n attach_files=None, ban_members=None, change_nickname=None, connect=None,\n create_instant_invite=None, deafen_members=None, embed_links=None,\n external_emojis=None, kick_members=None, manage_channels=None,\n manage_emojis=None, manage_guild=None, manage_messages=None,\n manage_nicknames=None, manage_permissions=None, manage_roles=None,\n manage_webhooks=None, mention_everyone=None, move_members=None,\n mute_members=None, priority_speaker=None, read_message_history=None,\n read_messages=None, request_to_speak=None, send_messages=None,\n send_tts_messages=None, speak=True, stream=None, use_external_emojis=\n None, use_slash_commands=None, use_voice_activation=True,\n view_audit_log=None, view_channel=True, view_guild_insights=None)\n', (7070, 7875), False, 'import discord\n'), ((7824, 8656), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'add_reactions': '(True)', 'administrator': 'None', 'attach_files': '(True)', 'ban_members': 'None', 'change_nickname': 'None', 'connect': '(True)', 'create_instant_invite': 'None', 'deafen_members': 'None', 'embed_links': '(True)', 'external_emojis': '(True)', 'kick_members': 'None', 'manage_channels': 'None', 'manage_emojis': 'None', 'manage_guild': 'None', 'manage_messages': 'None', 'manage_nicknames': 'None', 'manage_permissions': 'None', 'manage_roles': 'None', 'manage_webhooks': 'None', 'mention_everyone': 'None', 'move_members': 'None', 'mute_members': 'None', 'priority_speaker': 'None', 'read_message_history': '(True)', 'read_messages': '(True)', 'request_to_speak': 'None', 'send_messages': '(True)', 'send_tts_messages': 'None', 'speak': 'None', 'stream': 'None', 'use_external_emojis': 'None', 'use_slash_commands': 'None', 'use_voice_activation': 'None', 'view_audit_log': 'None', 'view_channel': '(True)', 'view_guild_insights': 'None'}), '(add_reactions=True, administrator=None,\n attach_files=True, ban_members=None, change_nickname=None, connect=True,\n create_instant_invite=None, deafen_members=None, embed_links=True,\n external_emojis=True, kick_members=None, manage_channels=None,\n manage_emojis=None, manage_guild=None, manage_messages=None,\n manage_nicknames=None, manage_permissions=None, manage_roles=None,\n manage_webhooks=None, mention_everyone=None, move_members=None,\n mute_members=None, priority_speaker=None, read_message_history=True,\n read_messages=True, request_to_speak=None, send_messages=True,\n send_tts_messages=None, speak=None, stream=None, use_external_emojis=\n None, use_slash_commands=None, use_voice_activation=None,\n view_audit_log=None, view_channel=True, view_guild_insights=None)\n', (7851, 8656), False, 'import discord\n'), ((8601, 9433), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'add_reactions': 'None', 'administrator': 'None', 'attach_files': 'None', 'ban_members': 'None', 'change_nickname': 'None', 'connect': 'None', 'create_instant_invite': 'None', 'deafen_members': 'None', 'embed_links': 'None', 'external_emojis': 'None', 'kick_members': 'None', 'manage_channels': 'None', 'manage_emojis': 'None', 'manage_guild': 'None', 'manage_messages': 'None', 'manage_nicknames': 'None', 'manage_permissions': 'None', 'manage_roles': 'None', 'manage_webhooks': 'None', 'mention_everyone': '(True)', 'move_members': 'None', 'mute_members': '(True)', 'priority_speaker': '(True)', 'read_message_history': 'None', 'read_messages': 'None', 'request_to_speak': 'None', 'send_messages': 'None', 'send_tts_messages': 'None', 'speak': '(True)', 'stream': 'None', 'use_external_emojis': 'None', 'use_slash_commands': '(True)', 'use_voice_activation': '(True)', 'view_audit_log': 'None', 'view_channel': '(True)', 'view_guild_insights': 'None'}), '(add_reactions=None, administrator=None,\n attach_files=None, ban_members=None, change_nickname=None, connect=None,\n create_instant_invite=None, deafen_members=None, embed_links=None,\n external_emojis=None, kick_members=None, manage_channels=None,\n manage_emojis=None, manage_guild=None, manage_messages=None,\n manage_nicknames=None, manage_permissions=None, manage_roles=None,\n manage_webhooks=None, mention_everyone=True, move_members=None,\n mute_members=True, priority_speaker=True, read_message_history=None,\n read_messages=None, request_to_speak=None, send_messages=None,\n send_tts_messages=None, speak=True, stream=None, use_external_emojis=\n None, use_slash_commands=True, use_voice_activation=True,\n view_audit_log=None, view_channel=True, view_guild_insights=None)\n', (8628, 9433), False, 'import discord\n'), ((1601, 1674), 'discord.utils.get', 'discord.utils.get', (['ctx.guild.roles'], {'name': '"""⊱ ───── {⭒|PERSONAL|⭒} ───── ⊰"""'}), "(ctx.guild.roles, name='⊱ ───── {⭒|PERSONAL|⭒} ───── ⊰')\n", (1618, 1674), False, 'import discord\n')]
# Copyright 2016 Isotoma Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import datetime from touchdown.core.utils import force_bytes, force_str from .service import ServiceStubber class LaunchConfigurationStubber(ServiceStubber): client_service = "ec2" def add_describe_launch_configurations_empty_response(self): return self.add_response( "describe_launch_configurations", service_response={"LaunchConfigurations": []}, expected_params={}, ) def add_describe_launch_configurations_one_response(self, user_data=None): launch_config = { "LaunchConfigurationName": self.resource.name, "ImageId": "ami-cba130bc", "InstanceType": "t2.micro", "CreatedTime": datetime.datetime.now(), } if user_data: launch_config["UserData"] = force_str( base64.b64encode(force_bytes(user_data)) ) return self.add_response( "describe_launch_configurations", service_response={"LaunchConfigurations": [launch_config]}, expected_params={}, ) def add_describe_auto_scaling_groups(self): return self.add_response( "describe_auto_scaling_groups", service_response={"AutoScalingGroups": []}, expected_params={}, ) def add_create_launch_configuration(self, user_data=None): expected_params = { "ImageId": "ami-cba130bc", "InstanceMonitoring": {"Enabled": False}, "InstanceType": "t2.micro", "LaunchConfigurationName": "my-test-lc.1", } if user_data: expected_params["UserData"] = user_data return self.add_response( "create_launch_configuration", service_response={}, expected_params=expected_params, ) def add_delete_launch_configuration(self): return self.add_response( "delete_launch_configuration", service_response={}, expected_params={"LaunchConfigurationName": self.resource.name}, )
[ "touchdown.core.utils.force_bytes", "datetime.datetime.now" ]
[((1300, 1323), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1321, 1323), False, 'import datetime\n'), ((1441, 1463), 'touchdown.core.utils.force_bytes', 'force_bytes', (['user_data'], {}), '(user_data)\n', (1452, 1463), False, 'from touchdown.core.utils import force_bytes, force_str\n')]
import sys import click from .function import LambdaPoolFunction from . import utils from tabulate import tabulate from lambdapool import exceptions @click.group() def cli(): pass @cli.command() @click.option('--requirements', '-r', type=click.Path(exists=True), help="Specifies the dependencies to be installed along with the function") @click.option('--memory', type=click.INT, help="Sets the memory size of the function environment") @click.option('--timeout', type=click.INT, help="Sets the timeout for the function in seconds") @click.option('--layers', help="Sets the layers to be used when the function is ran. The Layers ARN's (a maximum of 5) should be specified.") @click.argument('function_name', nargs=1) @click.argument('paths', nargs=-1, type=click.Path(exists=True)) def create(function_name, paths, requirements, memory, timeout, layers): """Create a new function""" click.echo('=== Creating lambdapool function ===') try: func = LambdaPoolFunction( function_name=function_name, paths=paths, requirements=requirements, memory=memory, timeout=timeout, layers=layers.split(',') if layers else [] ) if func.exists(): click.echo(f'lambdapool function {function_name} already exists') sys.exit(1) func.create() except exceptions.LambdaFunctionError as e: click.echo(f'ERROR: {e}') sys.exit(1) click.echo(f'=== Succesfully created lambdapool function {function_name} ===') @cli.command() def list(): """List all deployed functions""" funcs = LambdaPoolFunction.list() funcs = sorted(funcs, key=lambda x: x['last_updated'], reverse=True) rows = [] for func in funcs: rows.append( [ func['function_name'], utils.convert_size(func['size']), utils.datestr(func['last_updated']), func['memory'], func['timeout'] ] ) click.echo(tabulate(rows, headers=['FUNCTION NAME', 'SIZE', 'WHEN', 'RUNTIME MEMORY (MB)', 'TIMEOUT (SEC)'])) @cli.command() @click.option('--requirements', '-r', type=click.Path(exists=True), help="Specifies the dependencies to be installed along with the function") @click.option('--memory', type=click.INT, help="Sets the memory size of the function environment") @click.option('--timeout', type=click.INT, help="Sets the timeout for the function in seconds") @click.option('--layers', help="Sets the layers to be used when the function is ran. The Layers ARN's (a maximum of 5) should be specified.") @click.argument('function_name', nargs=1) @click.argument('paths', nargs=-1) def update(function_name, paths, requirements, memory, timeout, layers): """Update an existing function""" click.echo('=== Updating lambdapool function ===') try: func = LambdaPoolFunction( function_name=function_name, paths=paths, requirements=requirements, memory=memory, timeout=timeout, layers=layers.split(',') if layers else [] ) func.update() except exceptions.LambdaFunctionError as e: click.echo(f'ERROR: {e}') sys.exit(1) click.echo(f'=== Updated lambdapool function {function_name} ===') @cli.command() @click.argument('function_name', nargs=1) def delete(function_name): """Delete a function""" click.echo('=== Deleting lambdapool function ===') func = LambdaPoolFunction(function_name=function_name) func.delete() click.echo(f'=== Deleted lambdapool function {function_name}===')
[ "click.argument", "click.option", "click.echo", "tabulate.tabulate", "click.Path", "click.group", "sys.exit" ]
[((153, 166), 'click.group', 'click.group', ([], {}), '()\n', (164, 166), False, 'import click\n'), ((347, 449), 'click.option', 'click.option', (['"""--memory"""'], {'type': 'click.INT', 'help': '"""Sets the memory size of the function environment"""'}), "('--memory', type=click.INT, help=\n 'Sets the memory size of the function environment')\n", (359, 449), False, 'import click\n'), ((446, 545), 'click.option', 'click.option', (['"""--timeout"""'], {'type': 'click.INT', 'help': '"""Sets the timeout for the function in seconds"""'}), "('--timeout', type=click.INT, help=\n 'Sets the timeout for the function in seconds')\n", (458, 545), False, 'import click\n'), ((542, 692), 'click.option', 'click.option', (['"""--layers"""'], {'help': '"""Sets the layers to be used when the function is ran. The Layers ARN\'s (a maximum of 5) should be specified."""'}), '(\'--layers\', help=\n "Sets the layers to be used when the function is ran. The Layers ARN\'s (a maximum of 5) should be specified."\n )\n', (554, 692), False, 'import click\n'), ((684, 724), 'click.argument', 'click.argument', (['"""function_name"""'], {'nargs': '(1)'}), "('function_name', nargs=1)\n", (698, 724), False, 'import click\n'), ((2312, 2414), 'click.option', 'click.option', (['"""--memory"""'], {'type': 'click.INT', 'help': '"""Sets the memory size of the function environment"""'}), "('--memory', type=click.INT, help=\n 'Sets the memory size of the function environment')\n", (2324, 2414), False, 'import click\n'), ((2411, 2510), 'click.option', 'click.option', (['"""--timeout"""'], {'type': 'click.INT', 'help': '"""Sets the timeout for the function in seconds"""'}), "('--timeout', type=click.INT, help=\n 'Sets the timeout for the function in seconds')\n", (2423, 2510), False, 'import click\n'), ((2507, 2657), 'click.option', 'click.option', (['"""--layers"""'], {'help': '"""Sets the layers to be used when the function is ran. The Layers ARN\'s (a maximum of 5) should be specified."""'}), '(\'--layers\', help=\n "Sets the layers to be used when the function is ran. The Layers ARN\'s (a maximum of 5) should be specified."\n )\n', (2519, 2657), False, 'import click\n'), ((2649, 2689), 'click.argument', 'click.argument', (['"""function_name"""'], {'nargs': '(1)'}), "('function_name', nargs=1)\n", (2663, 2689), False, 'import click\n'), ((2691, 2724), 'click.argument', 'click.argument', (['"""paths"""'], {'nargs': '(-1)'}), "('paths', nargs=-1)\n", (2705, 2724), False, 'import click\n'), ((3375, 3415), 'click.argument', 'click.argument', (['"""function_name"""'], {'nargs': '(1)'}), "('function_name', nargs=1)\n", (3389, 3415), False, 'import click\n'), ((899, 949), 'click.echo', 'click.echo', (['"""=== Creating lambdapool function ==="""'], {}), "('=== Creating lambdapool function ===')\n", (909, 949), False, 'import click\n'), ((1480, 1558), 'click.echo', 'click.echo', (['f"""=== Succesfully created lambdapool function {function_name} ==="""'], {}), "(f'=== Succesfully created lambdapool function {function_name} ===')\n", (1490, 1558), False, 'import click\n'), ((2840, 2890), 'click.echo', 'click.echo', (['"""=== Updating lambdapool function ==="""'], {}), "('=== Updating lambdapool function ===')\n", (2850, 2890), False, 'import click\n'), ((3291, 3357), 'click.echo', 'click.echo', (['f"""=== Updated lambdapool function {function_name} ==="""'], {}), "(f'=== Updated lambdapool function {function_name} ===')\n", (3301, 3357), False, 'import click\n'), ((3475, 3525), 'click.echo', 'click.echo', (['"""=== Deleting lambdapool function ==="""'], {}), "('=== Deleting lambdapool function ===')\n", (3485, 3525), False, 'import click\n'), ((3609, 3674), 'click.echo', 'click.echo', (['f"""=== Deleted lambdapool function {function_name}==="""'], {}), "(f'=== Deleted lambdapool function {function_name}===')\n", (3619, 3674), False, 'import click\n'), ((246, 269), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (256, 269), False, 'import click\n'), ((765, 788), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (775, 788), False, 'import click\n'), ((2053, 2154), 'tabulate.tabulate', 'tabulate', (['rows'], {'headers': "['FUNCTION NAME', 'SIZE', 'WHEN', 'RUNTIME MEMORY (MB)', 'TIMEOUT (SEC)']"}), "(rows, headers=['FUNCTION NAME', 'SIZE', 'WHEN',\n 'RUNTIME MEMORY (MB)', 'TIMEOUT (SEC)'])\n", (2061, 2154), False, 'from tabulate import tabulate\n'), ((2211, 2234), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (2221, 2234), False, 'import click\n'), ((1260, 1325), 'click.echo', 'click.echo', (['f"""lambdapool function {function_name} already exists"""'], {}), "(f'lambdapool function {function_name} already exists')\n", (1270, 1325), False, 'import click\n'), ((1338, 1349), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1346, 1349), False, 'import sys\n'), ((1429, 1454), 'click.echo', 'click.echo', (['f"""ERROR: {e}"""'], {}), "(f'ERROR: {e}')\n", (1439, 1454), False, 'import click\n'), ((1463, 1474), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1471, 1474), False, 'import sys\n'), ((3240, 3265), 'click.echo', 'click.echo', (['f"""ERROR: {e}"""'], {}), "(f'ERROR: {e}')\n", (3250, 3265), False, 'import click\n'), ((3274, 3285), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3282, 3285), False, 'import sys\n')]
import time import random def print_pause(message_to_print): print(message_to_print) time.sleep(2) def intro(item, option): print_pause("You find yourself standing in an open field, filled " "with grass and yellow wildflowers.\n") print_pause("Rumor has it that a " + option + " is somewhere around " "here, and has been terrifying the nearby village.\n") print_pause("In front of you is a house.\n") print_pause("To your right is a dark cave.\n") print_pause("In your hand you hold your trusty (but not very " "effective) dagger.\n") def cave(item, option): if "sword" in item: print_pause("\nYou peer cautiously into the cave.") print_pause("\nYou've been here before, and gotten all" " the good stuff. It's just an empty cave" " now.") print_pause("\nYou walk back to the field.\n") else: print_pause("\nYou peer cautiously into the cave.") print_pause("\nIt turns out to be only a very small cave.") print_pause("\nYour eye catches a glint of metal behind a " "rock.") print_pause("\nYou have found the magical Sword of Ogoroth!") print_pause("\nYou discard your silly old dagger and take " "the sword with you.") print_pause("\nYou walk back out to the field.\n") item.append("sword") field(item, option) def house(item, option): print_pause("\nYou approach the door of the house.") print_pause("\nYou are about to knock when the door " "opens and out steps a " + option + ".") print_pause("\nEep! This is the " + option + "'s house!") print_pause("\nThe " + option + " attacks you!\n") if "sword" not in item: print_pause("You feel a bit under-prepared for this, " "what with only having a tiny dagger.\n") while True: choice2 = input("Would you like to (1) fight or (2) " "run away?") if choice2 == "1": if "sward" in item: print_pause("\nAs the " + option + " moves to attack, " "you unsheath your new sword.") print_pause("\nThe Sword of Ogoroth shines brightly in " "your hand as you brace yourself for the " "attack.") print_pause("\nBut the " + option + "takes one look at " "your shiny new toy and runs away!") print_pause("\nYou have rid the town of the " + option + ". You are victorious!\n") else: print_pause("\nYou do your best...") print_pause("but your dagger is no match for the " + option + ".") print_pause("\nYou have been defeated!\n") play_again() break if choice2 == "2": print_pause("\nYou run back into the field. " "\nLuckily, you don't seem to have been " "followed.\n") field(item, option) break def field(item, option): print_pause("Enter 1 to knock on the door of the house.") print_pause("Enter 2 to peer into the cave.") print_pause("What would you like to do?") while True: choice1 = input("(Please enter 1 or 2.)\n") if choice1 == "1": house(item, option) break elif choice1 == "2": cave(item, option) break def play_again(): again = input("Would you like to play again? (y/n)").lower() if again == "y": print_pause("\n\n\nExcellent! Restarting the game ...\n\n\n") play_game() elif again == "n": print_pause("\n\n\nThanks for playing! See you next time.\n\n\n") else: play_again() def play_game(): item = [] option = random.choice(["pirate", "fairy", "dragon", "gorgon", "troll"]) intro(item, option) field(item, option) play_game()
[ "random.choice", "time.sleep" ]
[((94, 107), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (104, 107), False, 'import time\n'), ((3986, 4049), 'random.choice', 'random.choice', (["['pirate', 'fairy', 'dragon', 'gorgon', 'troll']"], {}), "(['pirate', 'fairy', 'dragon', 'gorgon', 'troll'])\n", (3999, 4049), False, 'import random\n')]
from builtins import str from django.test import TestCase from measure_mate.tests.factories import TemplateFactory, AttributeFactory, RatingFactory class RatingTestCases(TestCase): def test_creation_of_rating(self): template = TemplateFactory() attribute = AttributeFactory(template=template) rating = RatingFactory(attribute=attribute, rank=1) rating.clean() self.assertEqual("%s - %s - %s" % (template.name, attribute.name, rating.name), str(rating))
[ "builtins.str", "measure_mate.tests.factories.TemplateFactory", "measure_mate.tests.factories.AttributeFactory", "measure_mate.tests.factories.RatingFactory" ]
[((242, 259), 'measure_mate.tests.factories.TemplateFactory', 'TemplateFactory', ([], {}), '()\n', (257, 259), False, 'from measure_mate.tests.factories import TemplateFactory, AttributeFactory, RatingFactory\n'), ((280, 315), 'measure_mate.tests.factories.AttributeFactory', 'AttributeFactory', ([], {'template': 'template'}), '(template=template)\n', (296, 315), False, 'from measure_mate.tests.factories import TemplateFactory, AttributeFactory, RatingFactory\n'), ((333, 375), 'measure_mate.tests.factories.RatingFactory', 'RatingFactory', ([], {'attribute': 'attribute', 'rank': '(1)'}), '(attribute=attribute, rank=1)\n', (346, 375), False, 'from measure_mate.tests.factories import TemplateFactory, AttributeFactory, RatingFactory\n'), ((487, 498), 'builtins.str', 'str', (['rating'], {}), '(rating)\n', (490, 498), False, 'from builtins import str\n')]
""" File: mirror_lake.py ---------------------------------- This file reads in mt-rainier.jpg and makes a new image that creates a mirror lake vibe by placing an inverse image of mt-rainier.jpg below the original one. """ from simpleimage import SimpleImage def reflect(filename): """ :param filename: str, the file directory of the original image :return: flip-vertical image """ img = SimpleImage(filename) blank_img = SimpleImage.blank(img.width, img.height * 2) # generating a blank image of double height for x in range(img.width): for y in range(img.height): every_color_of_pixel = img.get_pixel(x, y) upper_blank = blank_img.get_pixel(x, y) # upper part of blank image lower_blank = blank_img.get_pixel(x, blank_img.height - 1 - y) # lower part of blank_image upper_blank.red = every_color_of_pixel.red upper_blank.green = every_color_of_pixel.green upper_blank.blue = every_color_of_pixel.blue lower_blank.red = every_color_of_pixel.red lower_blank.green = every_color_of_pixel.green lower_blank.blue = every_color_of_pixel.blue return blank_img def main(): """ This program generates a flip-vertical image. """ original_mt = SimpleImage('images/mt-rainier.jpg') original_mt.show() reflected = reflect('images/mt-rainier.jpg') reflected.show() if __name__ == '__main__': main()
[ "simpleimage.SimpleImage", "simpleimage.SimpleImage.blank" ]
[((409, 430), 'simpleimage.SimpleImage', 'SimpleImage', (['filename'], {}), '(filename)\n', (420, 430), False, 'from simpleimage import SimpleImage\n'), ((447, 491), 'simpleimage.SimpleImage.blank', 'SimpleImage.blank', (['img.width', '(img.height * 2)'], {}), '(img.width, img.height * 2)\n', (464, 491), False, 'from simpleimage import SimpleImage\n'), ((1328, 1364), 'simpleimage.SimpleImage', 'SimpleImage', (['"""images/mt-rainier.jpg"""'], {}), "('images/mt-rainier.jpg')\n", (1339, 1364), False, 'from simpleimage import SimpleImage\n')]
""" Tests the subsets_exp module. """ import mock import unittest from .context import subsets_exp from .context import config from .context import runner from .context import test_utils as tu class Subsets_ExperimentTestCase(unittest.TestCase): def setUp(self): config_obj = tu.sample_config() mock_runner_obj = mock.Mock(runner.Runner) self.test_obj = subsets_exp.Subsets_Experiment(config_obj, mock_runner_obj) def tearDown(self): self.test_obj = None def test_init(self): # setup test_obj = self.test_obj # assert self.assertTrue(isinstance(test_obj.config_obj, config.Config)) self.assertTrue(isinstance(test_obj.runner_obj, runner.Runner)) self.assertTrue(test_obj.config_obj.modified) self.assertTrue(test_obj.config_obj.pseudo) def test_divide_data_into_subsets(self): self.test_obj.config_obj.end = 4000 self.test_obj.config_obj.start = 0 self.test_obj.config_obj.fold = '0' result = self.test_obj.divide_data_into_subsets(num_subsets=4) exp = [(0, 1000, '0'), (1000, 2000, '1'), (2000, 3000, '2'), (3000, 4000, '3')] self.assertTrue(len(result) == 4) self.assertTrue(result == exp) def test_run_experiment(self): subsets = [(1, 2, '4'), (7, 77, '88'), (7, 88, '169')] self.test_obj.single_run = mock.Mock() self.test_obj.change_config_parameters = mock.Mock() self.test_obj.run_experiment(subsets) exp_ccp = [mock.call(1, 2, '4'), mock.call(7, 77, '88'), mock.call(7, 88, '169')] self.assertTrue(self.test_obj.single_run.call_count == 3) self.assertTrue(self.test_obj.change_config_parameters.call_args_list == exp_ccp) def test_single_run(self): self.test_obj.runner_obj.run_independent = mock.Mock() self.test_obj.runner_obj.run_independent.return_value = ('v', 't') self.test_obj.change_config_rel_op = mock.Mock() self.test_obj.runner_obj.run_relational = mock.Mock() self.test_obj.runner_obj.run_evaluation = mock.Mock() self.test_obj.single_run() exp_ccro = [mock.call(train=True), mock.call(train=False)] exp_rel = [mock.call('v', 't'), mock.call('v', 't')] self.test_obj.runner_obj.run_independent.assert_called_with() self.assertTrue(self.test_obj.change_config_rel_op.call_args_list == exp_ccro) self.assertTrue(self.test_obj.runner_obj.run_relational.call_args_list == exp_rel) self.test_obj.runner_obj.run_evaluation.assert_called_with('t') def test_change_config_parameters(self): self.test_obj.change_config_parameters(2, 4, '69') self.assertTrue(self.test_obj.config_obj.start == 2) self.assertTrue(self.test_obj.config_obj.end == 4) self.assertTrue(self.test_obj.config_obj.fold == '69') def test_change_config_rel_op(self): self.test_obj.change_config_rel_op(train=False) self.assertTrue(self.test_obj.config_obj.infer) def test_suite(): suite = unittest.TestLoader().loadTestsFromTestCase( Subsets_ExperimentTestCase) return suite if __name__ == '__main__': unittest.main()
[ "unittest.main", "mock.call", "unittest.TestLoader", "mock.Mock" ]
[((3303, 3318), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3316, 3318), False, 'import unittest\n'), ((335, 359), 'mock.Mock', 'mock.Mock', (['runner.Runner'], {}), '(runner.Runner)\n', (344, 359), False, 'import mock\n'), ((1426, 1437), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (1435, 1437), False, 'import mock\n'), ((1487, 1498), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (1496, 1498), False, 'import mock\n'), ((1908, 1919), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (1917, 1919), False, 'import mock\n'), ((2040, 2051), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (2049, 2051), False, 'import mock\n'), ((2102, 2113), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (2111, 2113), False, 'import mock\n'), ((2164, 2175), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (2173, 2175), False, 'import mock\n'), ((1566, 1586), 'mock.call', 'mock.call', (['(1)', '(2)', '"""4"""'], {}), "(1, 2, '4')\n", (1575, 1586), False, 'import mock\n'), ((1588, 1610), 'mock.call', 'mock.call', (['(7)', '(77)', '"""88"""'], {}), "(7, 77, '88')\n", (1597, 1610), False, 'import mock\n'), ((1628, 1651), 'mock.call', 'mock.call', (['(7)', '(88)', '"""169"""'], {}), "(7, 88, '169')\n", (1637, 1651), False, 'import mock\n'), ((2233, 2254), 'mock.call', 'mock.call', ([], {'train': '(True)'}), '(train=True)\n', (2242, 2254), False, 'import mock\n'), ((2256, 2278), 'mock.call', 'mock.call', ([], {'train': '(False)'}), '(train=False)\n', (2265, 2278), False, 'import mock\n'), ((2299, 2318), 'mock.call', 'mock.call', (['"""v"""', '"""t"""'], {}), "('v', 't')\n", (2308, 2318), False, 'import mock\n'), ((2320, 2339), 'mock.call', 'mock.call', (['"""v"""', '"""t"""'], {}), "('v', 't')\n", (2329, 2339), False, 'import mock\n'), ((3169, 3190), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (3188, 3190), False, 'import unittest\n')]
from timberscale import Timber import csv import math ##### REPORT DATA MODULE class Report(object): LOG_RANGE_LIST = [["40+ ft", range(41, 121)], ["31-40 ft", range(31, 41)], ["21-30 ft", range(21, 31)], ["11-20 ft", range(11, 21)], ["1-10 ft", range(1, 11)]] def __init__(self, CSV, Stand_to_Examine, Plots, Pref_Log, Min_Log): self.csv = CSV self.stand = Stand_to_Examine.upper() self.plots = Plots self.plog = Pref_Log self.mlog = Min_Log self.species_list = [] self.summary_conditions = [] self.summary_logs = [] self.conditions_dict = {} self.logs_dict = {} self.report() def report(self): AVG_HDR, self.species_list = self.get_HDR_Species() ## MAIN READ AND TREE (TIMBER CLASS) INITIALIZATION with open(self.csv, 'r') as tree_data: tree_data_reader = csv.reader(tree_data) next(tree_data_reader) for line in tree_data_reader: if line[0] == "": break elif str(line[0]).upper() != self.stand: next else: SPECIES = str(line[3]).upper() DBH = float(line[4]) if line[5] == "": HEIGHT = int(round(AVG_HDR * (DBH/12),0)) else: HEIGHT = int(float(line[5])) PLOT_FACTOR = float(line[6]) if DBH >= 6.0: tree = Timber(SPECIES, DBH, HEIGHT) merch_dib = tree.merch_dib() if merch_dib < 5: merch_dib = 5 single = tree.tree_single(merch_dib, self.plog, self.mlog) tree_per_acre = tree.tree_acre(merch_dib, self.plog, self.mlog, PLOT_FACTOR) log_per_acre = tree.log_acre(merch_dib, self.plog, self.mlog, PLOT_FACTOR) self.summary_conditions.append([single['SPP'][0], [tree_per_acre['TPA'], tree_per_acre['BA_AC'], tree_per_acre['RD_AC'], single['T_HGT'], single['HDR'], single['VBAR'], tree_per_acre['BF_AC'], tree_per_acre['CF_AC']]]) self.summary_logs.append(self.get_log_list(single['SPP'][0], log_per_acre)) else: tree = Timber(SPECIES, DBH, HEIGHT) self.summary_conditions.append([tree.SPP, [tree.get_TPA(PLOT_FACTOR), tree.get_BA_acre(PLOT_FACTOR), tree.get_RD_acre(PLOT_FACTOR), tree.HGT, tree.HDR, 0, 0, 0]]) ## SUMMARY STATISTICS self.conditions_dict = self.get_conditions_dict() self.logs_dict = self.get_logs_dict() return def get_HDR_Species(self): HDR_LIST = [] SPECIES_LIST = [] with open(self.csv, 'r') as tree_data: tree_data_reader = csv.reader(tree_data) next(tree_data_reader) for line in tree_data_reader: if line[0] == "": break elif str(line[0]).upper() != self.stand: next else: SPP = str(line[3]).upper() if SPP not in SPECIES_LIST: SPECIES_LIST.append(SPP) if line[5] != "": DBH = float(line[4]) HEIGHT = float(line[5]) HDR_LIST.append(HEIGHT / (DBH / 12)) AVG_HDR = round(sum(HDR_LIST) / len(HDR_LIST), 2) return AVG_HDR, SPECIES_LIST def get_log_list(self, Species, Log_Dict): master = [Species] for key in Log_Dict: rng = "" for ranges in self.LOG_RANGE_LIST: if Log_Dict[key]['L_LGT'] in ranges[1]: rng = ranges[0] temp_list = [Log_Dict[key]['L_GRD'][0], rng, Log_Dict[key]['L_CT_AC'], Log_Dict[key]['L_BF_AC'], Log_Dict[key]['L_CF_AC']] master.append(temp_list) return master def get_conditions_dict(self): # ORDER OF INITAL SPP LIST - [0SPPCOUNT, 1TPA, 2BA_AC, 3RD_AC, 4T_HGT, 5HDR, 6VBAR, 7BF_AC, 8CF_AC] # After Pop SPPCOUNT and Add QMD to 2 index # ORDER OF FINAL SPP LIST - [0TPA, 1BA_AC, 2QMD, 3RD_AC, 4T_HGT, 5HDR, 6VBAR, 7BF_AC, 8CF_AC] master = {} totals_temp = [0, 0, 0, 0, 0, 0, 0, 0, 0] for spp in self.species_list: master[spp] = [0, 0, 0, 0, 0, 0, 0, 0, 0] for data in self.summary_conditions: spp = data[0] master[spp][0] += 1 totals_temp[0] += 1 for i in range(1, len(data[1]) + 1): master[spp][i] += data[1][i - 1] totals_temp[i] += data[1][i - 1] master["TOTALS"] = totals_temp for key in master: sums = [1, 2, 3, 7, 8] for i in range(1, len(master[key])): if i in sums: master[key][i] = master[key][i] / self.plots else: master[key][i] = master[key][i] / master[key][0] master[key].pop(0) master[key].insert(2, math.sqrt((master[key][1] / master[key][0]) / .005454)) return master def get_logs_dict(self): log_rng = ["40+ ft", "31-40 ft", "21-30 ft", "11-20 ft", "1-10 ft", 'TGRD'] master = {} # Formatting Species into main keys for spp in self.species_list: master[spp] = {} master['TOTALS'] = {} # Formatting Grades and Ranges in correct order, as nested dicts of Species and Totals for key in master: for grade in Timber.GRADE_NAMES: master[key][grade] = {} for rng in log_rng: master[key][grade][rng] = [0, 0, 0] master[key]['TTL'] = {} for rng in log_rng: master[key]['TTL'][rng] = [0, 0, 0] # Adding data to Master Dict for data in self.summary_logs: spp = data[0] for i in range(1, len(data)): grade, rng = data[i][0], data[i][1] for j in range(2, len(data[i])): master[spp][grade][rng][j - 2] += (data[i][j] / self.plots) master[spp][grade]['TGRD'][j - 2] += (data[i][j] / self.plots) master[spp]['TTL'][rng][j - 2] += (data[i][j] / self.plots) master[spp]['TTL']['TGRD'][j - 2] += (data[i][j] / self.plots) master['TOTALS'][grade][rng][j - 2] += (data[i][j] / self.plots) master['TOTALS'][grade]['TGRD'][j - 2] += (data[i][j] / self.plots) master['TOTALS']['TTL'][rng][j - 2] += (data[i][j] / self.plots) master['TOTALS']['TTL']['TGRD'][j - 2] += (data[i][j] / self.plots) # Removing any Grades that have zero data ax_list = [] for key in master: for grade in master[key]: count = 0 for rng in master[key][grade]: count += master[key][grade][rng][0] if count == 0: ax_list.append((key, grade)) for ax in ax_list: del master[ax[0]][ax[1]] return master
[ "csv.reader", "timberscale.Timber", "math.sqrt" ]
[((937, 958), 'csv.reader', 'csv.reader', (['tree_data'], {}), '(tree_data)\n', (947, 958), False, 'import csv\n'), ((3322, 3343), 'csv.reader', 'csv.reader', (['tree_data'], {}), '(tree_data)\n', (3332, 3343), False, 'import csv\n'), ((5691, 5744), 'math.sqrt', 'math.sqrt', (['(master[key][1] / master[key][0] / 0.005454)'], {}), '(master[key][1] / master[key][0] / 0.005454)\n', (5700, 5744), False, 'import math\n'), ((1591, 1619), 'timberscale.Timber', 'Timber', (['SPECIES', 'DBH', 'HEIGHT'], {}), '(SPECIES, DBH, HEIGHT)\n', (1597, 1619), False, 'from timberscale import Timber\n'), ((2641, 2669), 'timberscale.Timber', 'Timber', (['SPECIES', 'DBH', 'HEIGHT'], {}), '(SPECIES, DBH, HEIGHT)\n', (2647, 2669), False, 'from timberscale import Timber\n')]
# -*- coding: UTF-8 -*- import ply.yacc from collections import OrderedDict from .lexer import tokens, lex # a:4:{s:4:"date";s:10:"2019-12-29";s:10:"type_fonds";s:11:"arko_seriel";s:4:"ref1";i:12;s:4:"ref2";i:4669;} from .models import Object start = 'expression' def p_expression(p): """expression : atom | associative""" p[0] = p[1] def p_atom(p): """atom : integer | float | boolean | string | null""" p[0] = p[1] def p_collection(p): """associative : array | object""" p[0] = p[1] def p_integer(p): """integer : I_SYMBOL COLON INTEGER""" p[0] = int(p[3]) def p_float(p): """float : D_SYMBOL COLON FLOAT""" p[0] = float(p[3]) def p_boolean(p): """boolean : B_SYMBOL COLON INTEGER""" p[0] = p[3] != "0" def p_string(p): """string : S_SYMBOL COLON INTEGER COLON STRING""" p[0] = p[5] def p_null(p): """null : N_SYMBOL""" p[0] = None def p_array(p): """array : A_SYMBOL raw_array""" p[0] = p[2] def p_raw_array(p): """raw_array : COLON INTEGER COLON LEFT_BRACKET array_expressions RIGHT_BRACKET""" d = OrderedDict() expressions = p[5] for i, k in enumerate(expressions[::2]): d[k] = expressions[i * 2 + 1] p[0] = d def p_array_expressions_array_expression(p): """array_expressions : expression SEMICOLON""" p[0] = [p[1]] def p_array_expressions_array_expression_array_expressions(p): """array_expressions : expression SEMICOLON array_expressions""" p[0] = [p[1]] + p[3] def p_object(p): """object : O_SYMBOL COLON INTEGER COLON STRING raw_array""" p[0] = Object(p[5], dict(p[6])) def eof(): raise RuntimeError('EOF Reached') def p_error(p): if p is None: eof() else: raise RuntimeError(str(p)) def parse(text): parser = ply.yacc.yacc() expression = parser.parse(text, lexer=lex()) return expression
[ "collections.OrderedDict" ]
[((1191, 1204), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (1202, 1204), False, 'from collections import OrderedDict\n')]
import base64 import json import typing import marshmallow from boto3.dynamodb.conditions import Key from drf_yasg2.utils import swagger_auto_schema from flag_engine.api.schemas import APITraitSchema from flag_engine.identities.builders import ( build_identity_dict, build_identity_model, ) from rest_framework import status, viewsets from rest_framework.decorators import action from rest_framework.exceptions import NotFound, ValidationError from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response from app.pagination import EdgeIdentityPagination from edge_api.identities.serializers import ( EdgeIdentityFeatureStateSerializer, EdgeIdentityFsQueryparamSerializer, EdgeIdentitySerializer, EdgeIdentityTraitsSerializer, ) from environments.identities.models import Identity from environments.models import Environment from environments.permissions.constants import MANAGE_IDENTITIES from environments.permissions.permissions import NestedEnvironmentPermissions from features.permissions import IdentityFeatureStatePermissions from projects.exceptions import DynamoNotEnabledError from .exceptions import TraitPersistenceError trait_schema = APITraitSchema() class EdgeIdentityViewSet(viewsets.ModelViewSet): serializer_class = EdgeIdentitySerializer pagination_class = EdgeIdentityPagination lookup_field = "identity_uuid" dynamo_identifier_search_functions = { "EQUAL": lambda identifier: Key("identifier").eq(identifier), "BEGINS_WITH": lambda identifier: Key("identifier").begins_with(identifier), } def initial(self, request, *args, **kwargs): environment = self.get_environment_from_request() if not environment.project.enable_dynamo_db: raise DynamoNotEnabledError() super().initial(request, *args, **kwargs) def _get_search_function_and_value( self, search_query: str, ) -> typing.Tuple[typing.Callable, str]: if search_query.startswith('"') and search_query.endswith('"'): return self.dynamo_identifier_search_functions[ "EQUAL" ], search_query.replace('"', "") return self.dynamo_identifier_search_functions["BEGINS_WITH"], search_query def get_object(self): return Identity.dynamo_wrapper.get_item_from_uuid_or_404( self.kwargs["identity_uuid"] ) def get_queryset(self): page_size = self.pagination_class().get_page_size(self.request) previous_last_evaluated_key = self.request.GET.get("last_evaluated_key") search_query = self.request.query_params.get("q") start_key = None if previous_last_evaluated_key: start_key = json.loads(base64.b64decode(previous_last_evaluated_key)) if not search_query: return Identity.dynamo_wrapper.get_all_items( self.kwargs["environment_api_key"], page_size, start_key ) search_func, search_identifier = self._get_search_function_and_value( search_query ) identity_documents = Identity.dynamo_wrapper.search_items_with_identifier( self.kwargs["environment_api_key"], search_identifier, search_func, page_size, start_key, ) return identity_documents def get_permissions(self): return [ IsAuthenticated(), NestedEnvironmentPermissions( action_permission_map={ "retrieve": MANAGE_IDENTITIES, "get_traits": MANAGE_IDENTITIES, "update_traits": MANAGE_IDENTITIES, } ), ] def get_environment_from_request(self): """ Get environment object from URL parameters in request. """ return Environment.objects.get(api_key=self.kwargs["environment_api_key"]) def perform_destroy(self, instance): Identity.dynamo_wrapper.delete_item(instance["composite_key"]) @swagger_auto_schema( responses={200: EdgeIdentityTraitsSerializer(many=True)}, ) @action(detail=True, methods=["get"], url_path="list-traits") def get_traits(self, request, *args, **kwargs): identity = self.get_object() data = trait_schema.dump(identity["identity_traits"], many=True) return Response(data=data, status=status.HTTP_200_OK) @swagger_auto_schema( method="put", request_body=EdgeIdentityTraitsSerializer, responses={200: EdgeIdentityTraitsSerializer()}, ) @action(detail=True, methods=["put"], url_path="update-traits") def update_traits(self, request, *args, **kwargs): environment = self.get_environment_from_request() if not environment.project.organisation.persist_trait_data: raise TraitPersistenceError() identity = build_identity_model(self.get_object()) try: trait = trait_schema.load(request.data) except marshmallow.ValidationError as validation_error: raise ValidationError(validation_error) from validation_error identity.update_traits([trait]) Identity.dynamo_wrapper.put_item(build_identity_dict(identity)) data = trait_schema.dump(trait) return Response(data, status=status.HTTP_200_OK) class EdgeIdentityFeatureStateViewSet(viewsets.ModelViewSet): permission_classes = [IsAuthenticated, IdentityFeatureStatePermissions] lookup_field = "featurestate_uuid" serializer_class = EdgeIdentityFeatureStateSerializer # Patch is not supported http_method_names = [ "get", "post", "put", "delete", "head", "options", "trace", ] pagination_class = None def initial(self, request, *args, **kwargs): super().initial(request, *args, **kwargs) identity_document = Identity.dynamo_wrapper.get_item_from_uuid_or_404( self.kwargs["edge_identity_identity_uuid"] ) self.identity = build_identity_model(identity_document) def get_object(self): featurestate_uuid = self.kwargs["featurestate_uuid"] try: featurestate = next( filter( lambda fs: fs.featurestate_uuid == featurestate_uuid, self.identity.identity_features, ) ) except StopIteration: raise NotFound() return featurestate @swagger_auto_schema(query_serializer=EdgeIdentityFsQueryparamSerializer()) def list(self, request, *args, **kwargs): q_params_serializer = EdgeIdentityFsQueryparamSerializer( data=self.request.query_params ) q_params_serializer.is_valid(raise_exception=True) identity_features = self.identity.identity_features feature = q_params_serializer.data.get("feature") if feature: identity_features = filter( lambda fs: fs.feature.id == feature, identity_features ) serializer = self.get_serializer(identity_features, many=True) return Response(data=serializer.data, status=status.HTTP_200_OK) def perform_destroy(self, instance): self.identity.identity_features.remove(instance) Identity.dynamo_wrapper.put_item(build_identity_dict(self.identity))
[ "projects.exceptions.DynamoNotEnabledError", "base64.b64decode", "environments.identities.models.Identity.dynamo_wrapper.get_all_items", "rest_framework.response.Response", "environments.models.Environment.objects.get", "rest_framework.exceptions.NotFound", "edge_api.identities.serializers.EdgeIdentityFsQueryparamSerializer", "environments.permissions.permissions.NestedEnvironmentPermissions", "environments.identities.models.Identity.dynamo_wrapper.search_items_with_identifier", "rest_framework.exceptions.ValidationError", "environments.identities.models.Identity.dynamo_wrapper.get_item_from_uuid_or_404", "environments.identities.models.Identity.dynamo_wrapper.delete_item", "flag_engine.api.schemas.APITraitSchema", "edge_api.identities.serializers.EdgeIdentityTraitsSerializer", "flag_engine.identities.builders.build_identity_model", "boto3.dynamodb.conditions.Key", "rest_framework.decorators.action", "flag_engine.identities.builders.build_identity_dict", "rest_framework.permissions.IsAuthenticated" ]
[((1215, 1231), 'flag_engine.api.schemas.APITraitSchema', 'APITraitSchema', ([], {}), '()\n', (1229, 1231), False, 'from flag_engine.api.schemas import APITraitSchema\n'), ((4174, 4234), 'rest_framework.decorators.action', 'action', ([], {'detail': '(True)', 'methods': "['get']", 'url_path': '"""list-traits"""'}), "(detail=True, methods=['get'], url_path='list-traits')\n", (4180, 4234), False, 'from rest_framework.decorators import action\n'), ((4627, 4689), 'rest_framework.decorators.action', 'action', ([], {'detail': '(True)', 'methods': "['put']", 'url_path': '"""update-traits"""'}), "(detail=True, methods=['put'], url_path='update-traits')\n", (4633, 4689), False, 'from rest_framework.decorators import action\n'), ((2323, 2402), 'environments.identities.models.Identity.dynamo_wrapper.get_item_from_uuid_or_404', 'Identity.dynamo_wrapper.get_item_from_uuid_or_404', (["self.kwargs['identity_uuid']"], {}), "(self.kwargs['identity_uuid'])\n", (2372, 2402), False, 'from environments.identities.models import Identity\n'), ((3129, 3280), 'environments.identities.models.Identity.dynamo_wrapper.search_items_with_identifier', 'Identity.dynamo_wrapper.search_items_with_identifier', (["self.kwargs['environment_api_key']", 'search_identifier', 'search_func', 'page_size', 'start_key'], {}), "(self.kwargs[\n 'environment_api_key'], search_identifier, search_func, page_size,\n start_key)\n", (3181, 3280), False, 'from environments.identities.models import Identity\n'), ((3889, 3956), 'environments.models.Environment.objects.get', 'Environment.objects.get', ([], {'api_key': "self.kwargs['environment_api_key']"}), "(api_key=self.kwargs['environment_api_key'])\n", (3912, 3956), False, 'from environments.models import Environment\n'), ((4007, 4069), 'environments.identities.models.Identity.dynamo_wrapper.delete_item', 'Identity.dynamo_wrapper.delete_item', (["instance['composite_key']"], {}), "(instance['composite_key'])\n", (4042, 4069), False, 'from environments.identities.models import Identity\n'), ((4412, 4458), 'rest_framework.response.Response', 'Response', ([], {'data': 'data', 'status': 'status.HTTP_200_OK'}), '(data=data, status=status.HTTP_200_OK)\n', (4420, 4458), False, 'from rest_framework.response import Response\n'), ((5342, 5383), 'rest_framework.response.Response', 'Response', (['data'], {'status': 'status.HTTP_200_OK'}), '(data, status=status.HTTP_200_OK)\n', (5350, 5383), False, 'from rest_framework.response import Response\n'), ((5955, 6053), 'environments.identities.models.Identity.dynamo_wrapper.get_item_from_uuid_or_404', 'Identity.dynamo_wrapper.get_item_from_uuid_or_404', (["self.kwargs['edge_identity_identity_uuid']"], {}), "(self.kwargs[\n 'edge_identity_identity_uuid'])\n", (6004, 6053), False, 'from environments.identities.models import Identity\n'), ((6095, 6134), 'flag_engine.identities.builders.build_identity_model', 'build_identity_model', (['identity_document'], {}), '(identity_document)\n', (6115, 6134), False, 'from flag_engine.identities.builders import build_identity_dict, build_identity_model\n'), ((6696, 6762), 'edge_api.identities.serializers.EdgeIdentityFsQueryparamSerializer', 'EdgeIdentityFsQueryparamSerializer', ([], {'data': 'self.request.query_params'}), '(data=self.request.query_params)\n', (6730, 6762), False, 'from edge_api.identities.serializers import EdgeIdentityFeatureStateSerializer, EdgeIdentityFsQueryparamSerializer, EdgeIdentitySerializer, EdgeIdentityTraitsSerializer\n'), ((7196, 7253), 'rest_framework.response.Response', 'Response', ([], {'data': 'serializer.data', 'status': 'status.HTTP_200_OK'}), '(data=serializer.data, status=status.HTTP_200_OK)\n', (7204, 7253), False, 'from rest_framework.response import Response\n'), ((1794, 1817), 'projects.exceptions.DynamoNotEnabledError', 'DynamoNotEnabledError', ([], {}), '()\n', (1815, 1817), False, 'from projects.exceptions import DynamoNotEnabledError\n'), ((2861, 2960), 'environments.identities.models.Identity.dynamo_wrapper.get_all_items', 'Identity.dynamo_wrapper.get_all_items', (["self.kwargs['environment_api_key']", 'page_size', 'start_key'], {}), "(self.kwargs['environment_api_key'],\n page_size, start_key)\n", (2898, 2960), False, 'from environments.identities.models import Identity\n'), ((3438, 3455), 'rest_framework.permissions.IsAuthenticated', 'IsAuthenticated', ([], {}), '()\n', (3453, 3455), False, 'from rest_framework.permissions import IsAuthenticated\n'), ((3469, 3629), 'environments.permissions.permissions.NestedEnvironmentPermissions', 'NestedEnvironmentPermissions', ([], {'action_permission_map': "{'retrieve': MANAGE_IDENTITIES, 'get_traits': MANAGE_IDENTITIES,\n 'update_traits': MANAGE_IDENTITIES}"}), "(action_permission_map={'retrieve':\n MANAGE_IDENTITIES, 'get_traits': MANAGE_IDENTITIES, 'update_traits':\n MANAGE_IDENTITIES})\n", (3497, 3629), False, 'from environments.permissions.permissions import NestedEnvironmentPermissions\n'), ((5256, 5285), 'flag_engine.identities.builders.build_identity_dict', 'build_identity_dict', (['identity'], {}), '(identity)\n', (5275, 5285), False, 'from flag_engine.identities.builders import build_identity_dict, build_identity_model\n'), ((6582, 6618), 'edge_api.identities.serializers.EdgeIdentityFsQueryparamSerializer', 'EdgeIdentityFsQueryparamSerializer', ([], {}), '()\n', (6616, 6618), False, 'from edge_api.identities.serializers import EdgeIdentityFeatureStateSerializer, EdgeIdentityFsQueryparamSerializer, EdgeIdentitySerializer, EdgeIdentityTraitsSerializer\n'), ((7394, 7428), 'flag_engine.identities.builders.build_identity_dict', 'build_identity_dict', (['self.identity'], {}), '(self.identity)\n', (7413, 7428), False, 'from flag_engine.identities.builders import build_identity_dict, build_identity_model\n'), ((2765, 2810), 'base64.b64decode', 'base64.b64decode', (['previous_last_evaluated_key'], {}), '(previous_last_evaluated_key)\n', (2781, 2810), False, 'import base64\n'), ((4121, 4160), 'edge_api.identities.serializers.EdgeIdentityTraitsSerializer', 'EdgeIdentityTraitsSerializer', ([], {'many': '(True)'}), '(many=True)\n', (4149, 4160), False, 'from edge_api.identities.serializers import EdgeIdentityFeatureStateSerializer, EdgeIdentityFsQueryparamSerializer, EdgeIdentitySerializer, EdgeIdentityTraitsSerializer\n'), ((5119, 5152), 'rest_framework.exceptions.ValidationError', 'ValidationError', (['validation_error'], {}), '(validation_error)\n', (5134, 5152), False, 'from rest_framework.exceptions import NotFound, ValidationError\n'), ((4583, 4613), 'edge_api.identities.serializers.EdgeIdentityTraitsSerializer', 'EdgeIdentityTraitsSerializer', ([], {}), '()\n', (4611, 4613), False, 'from edge_api.identities.serializers import EdgeIdentityFeatureStateSerializer, EdgeIdentityFsQueryparamSerializer, EdgeIdentitySerializer, EdgeIdentityTraitsSerializer\n'), ((6500, 6510), 'rest_framework.exceptions.NotFound', 'NotFound', ([], {}), '()\n', (6508, 6510), False, 'from rest_framework.exceptions import NotFound, ValidationError\n'), ((1490, 1507), 'boto3.dynamodb.conditions.Key', 'Key', (['"""identifier"""'], {}), "('identifier')\n", (1493, 1507), False, 'from boto3.dynamodb.conditions import Key\n'), ((1566, 1583), 'boto3.dynamodb.conditions.Key', 'Key', (['"""identifier"""'], {}), "('identifier')\n", (1569, 1583), False, 'from boto3.dynamodb.conditions import Key\n')]
import torch from collections import defaultdict from uninas.data.abstract import AbstractDataSet from uninas.models.networks.abstract import AbstractNetwork from uninas.training.result import ResultValue from uninas.utils.args import ArgsInterface, Namespace, Argument class AbstractMetric(ArgsInterface): """ Metrics during (supervised) network training, between network outputs and some targets """ def __init__(self, head_weights: list, **kwargs): super().__init__() self.head_weights = head_weights for k, v in kwargs.items(): self.__setattr__(k, v) def get_log_name(self) -> str: raise NotImplementedError @classmethod def from_args(cls, args: Namespace, index: int, data_set: AbstractDataSet, head_weights: list) -> 'AbstractMetric': """ :param args: global arguments namespace :param index: index of this metric :param data_set: data set that is evaluated on :param head_weights: how each head is weighted """ all_parsed = cls._all_parsed_arguments(args, index=index) return cls(head_weights=head_weights, **all_parsed) @classmethod def _to_dict(cls, key: str, prefix: str, name: str, dct: dict) -> dict: """ adds key and name to all dict entries """ s = "%s/%s" % (key, name) if len(prefix) == 0 else "%s/%s/%s" % (prefix, key, name) return {'%s/%s' % (s, k): v for k, v in dct.items()} @classmethod def _batchify_tensors(cls, logits: [torch.Tensor], targets: torch.Tensor) -> ([torch.Tensor], torch.Tensor): """ reshape all [batch, classes, n0, n1, ...] tensors into [batch, classes] :param logits: network outputs :param targets: output targets """ new_logits = [] for tensor in logits + [targets]: shape = tensor.shape if len(shape) > 2: new_logits.append(tensor.transpose(0, 1).reshape(shape[1], -1).transpose(0, 1)) else: new_logits.append(tensor) return new_logits[:-1], new_logits[-1] @classmethod def _remove_onehot(cls, targets: torch.Tensor) -> torch.Tensor: """ remove one-hot encoding from a [batch, classes] tensor """ if len(targets.shape) == 2: return torch.argmax(targets, dim=-1) return targets @classmethod def _ignore_with_index(cls, logits: [torch.Tensor], targets: torch.Tensor, ignore_target_index=-999, ignore_prediction_index=-999) ->\ ([torch.Tensor], torch.Tensor): """ remove all occurrences where the target equals the ignore index, prevent logits from predicting an ignored class :param logits: network outputs, each has the [batch, classes] shape :param targets: output targets, has the [batch] shape :param ignore_target_index: remove all samples where the target matches this index :param ignore_prediction_index: if the network predicts this index, choose the next most-likely prediction instead """ # remove all occurrences where the target equals the ignore index if ignore_target_index >= 0: to_use = targets != ignore_target_index logits = [lg[to_use] for lg in logits] targets = targets[to_use] # prevent logits from predicting an ignored class if ignore_prediction_index >= 0: new_logits = [lg.clone().detach_() for lg in logits] for lg in new_logits: min_ = lg.min(axis=1).values lg[:, ignore_prediction_index] = min_ logits = new_logits return logits, targets def get_accumulated_stats(self, key: str) -> {str: torch.Tensor}: """ get the averaged statistics for a specific key """ return {} def eval_accumulated_stats(self, save_dir: str, key: str, prefix="", epoch: int = None, stats: dict = None) -> dict: """ visualize/log this metric :param save_dir: if stats are visualized, where to save them :param key: key to log :param prefix: string prefix added in front of each dict key :param epoch: optional int :param stats: {str: tensor} or {str: [tensor]} :return: usually empty dict if stats are visualized, otherwise the result of accumulating the stats """ return {} def reset(self, key: str = None): """ reset tracked stats for a specific key, or all (if key == None) """ pass def on_epoch_start(self, epoch: int, is_last=False): pass def evaluate(self, net: AbstractNetwork, inputs: torch.Tensor, logits: [torch.Tensor], targets: torch.Tensor, key: str) -> {str: ResultValue}: """ :param net: evaluated network :param inputs: network inputs :param logits: network outputs :param targets: output targets :param key: prefix for the dict keys, e.g. "train" or "test" :return: dictionary of string keys with corresponding results """ raise NotImplementedError def _evaluate(self, net: AbstractNetwork, inputs: torch.Tensor, logits: [torch.Tensor], targets: torch.Tensor) -> {str: ResultValue}: """ :param net: evaluated network :param inputs: network inputs :param logits: network outputs :param targets: output targets :return: dictionary of string keys with corresponding results """ raise NotImplementedError class AbstractLogMetric(AbstractMetric): """ A metric that is logged epoch-wise to the output stream and loggers (e.g. tensorboard), all single results of _evaluate() are weighted averaged later, by how the batch sizes of each single result """ def get_log_name(self) -> str: raise NotImplementedError def evaluate(self, net: AbstractNetwork, inputs: torch.Tensor, logits: [torch.Tensor], targets: torch.Tensor, key: str) -> {str: ResultValue}: """ :param net: evaluated network :param inputs: network inputs :param logits: network outputs :param targets: output targets :param key: prefix for the dict keys, e.g. "train" or "test" :return: dictionary of string keys with corresponding results """ with torch.no_grad(): cur = self._evaluate(net, inputs, logits, targets) cur = {k: v.unsqueeze() for k, v in cur.items()} return self._to_dict(key, "", self.get_log_name(), cur) def _evaluate(self, net: AbstractNetwork, inputs: torch.Tensor, logits: [torch.Tensor], targets: torch.Tensor) -> {str: ResultValue}: """ :param net: evaluated network :param inputs: network inputs :param logits: network outputs :param targets: output targets :return: dictionary of string keys with corresponding results """ raise NotImplementedError class AbstractAccumulateMetric(AbstractMetric): """ A metric that accumulates stats first """ def __init__(self, head_weights: list, each_epochs=-1, **kwargs): super().__init__(head_weights, **kwargs) self.stats = defaultdict(dict) self.each_epochs = each_epochs self.is_active = False def get_log_name(self) -> str: raise NotImplementedError @classmethod def _combine_tensors(cls, dict_key: str, tensors: [torch.Tensor]) -> torch.Tensor: """ how to combine tensors if they are gathered from distributed training or from different batches """ return sum(tensors) @classmethod def args_to_add(cls, index=None) -> [Argument]: """ list arguments to add to argparse when this class (or a child class) is chosen """ return super().args_to_add(index) + [ Argument('each_epochs', default=-1, type=int, help='visualize each n epochs, only last if <=0'), ] def reset(self, key: str = None): """ reset tracked stats for a specific key, or all (if key == None) """ keys = [key] if isinstance(key, str) else list(self.stats.keys()) for k in keys: self.stats[k].clear() def on_epoch_start(self, epoch: int, is_last=False): self.reset(key=None) self.is_active = is_last or ((self.each_epochs > 0) and ((epoch + 1) % self.each_epochs == 0)) def evaluate(self, net: AbstractNetwork, inputs: torch.Tensor, logits: [torch.Tensor], targets: torch.Tensor, key: str) -> {str: torch.Tensor}: """ :param net: evaluated network :param inputs: network inputs :param logits: network outputs :param targets: output targets :param key: prefix for the dict keys, e.g. "train" or "test" :return: dictionary of string keys with corresponding [scalar] tensors """ if not self.is_active: return {} with torch.no_grad(): cur = self._evaluate(net, inputs, logits, targets) # add all values to current stat dict for k, v in cur.items(): if k in self.stats[key]: self.stats[key][k] = self._combine_tensors(k, [self.stats[key][k], v.value]) else: self.stats[key][k] = v.value return {} def _evaluate(self, net: AbstractNetwork, inputs: torch.Tensor, logits: [torch.Tensor], targets: torch.Tensor) -> {str: ResultValue}: """ :param net: evaluated network :param inputs: network inputs :param logits: network outputs :param targets: output targets :return: dictionary of string keys with corresponding results """ raise NotImplementedError def get_accumulated_stats(self, key: str) -> {str: torch.Tensor}: """ get the averaged statistics for a specific key """ return self.stats.get(key, {}) def eval_accumulated_stats(self, save_dir: str, key: str, prefix="", epoch: int = None, stats: dict = None) -> dict: """ visualize/log this metric :param save_dir: if stats are visualized, where to save them :param key: key to log :param prefix: string prefix added in front of each dict key :param epoch: optional int :param stats: {str: tensor} or {str: [tensor]} :return: usually empty dict if stats are visualized, otherwise the result of accumulating the stats """ if stats is None: stats = self.get_accumulated_stats(key) else: with torch.no_grad(): stats = {k: self._combine_tensors(k, v) if isinstance(v, list) else v for k, v in stats.items()} if len(stats) > 0: if isinstance(epoch, int): save_dir = '%s/epoch_%d/' % (save_dir, epoch) self._viz_stats(save_dir, key, prefix, stats) return self._to_dict(key, prefix, self.get_log_name(), self._compute_stats(save_dir, key, stats)) return {} def _compute_stats(self, save_dir: str, key: str, stats: dict) -> dict: """ compute this metric """ return {} def _viz_stats(self, save_dir: str, key: str, prefix: str, stats: dict): """ visualize this metric """ pass
[ "collections.defaultdict", "torch.argmax", "uninas.utils.args.Argument", "torch.no_grad" ]
[((7330, 7347), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (7341, 7347), False, 'from collections import defaultdict\n'), ((2332, 2361), 'torch.argmax', 'torch.argmax', (['targets'], {'dim': '(-1)'}), '(targets, dim=-1)\n', (2344, 2361), False, 'import torch\n'), ((6432, 6447), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (6445, 6447), False, 'import torch\n'), ((9063, 9078), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (9076, 9078), False, 'import torch\n'), ((7956, 8056), 'uninas.utils.args.Argument', 'Argument', (['"""each_epochs"""'], {'default': '(-1)', 'type': 'int', 'help': '"""visualize each n epochs, only last if <=0"""'}), "('each_epochs', default=-1, type=int, help=\n 'visualize each n epochs, only last if <=0')\n", (7964, 8056), False, 'from uninas.utils.args import ArgsInterface, Namespace, Argument\n'), ((10733, 10748), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (10746, 10748), False, 'import torch\n')]
import subprocess path_oommf = 'C:/Users/jmank/Desktop/oommf12b4_20200930_86_x64/oommf/oommf.tcl' mif_file = 'C:/Users/jmank/Desktop/oommf12b4_20200930_86_x64/oommf/Skyrmion/skyrmionDome2.mif' length = 2 param_string = ' boxsi '#-parameters "integer_length % s" ' % length threads_string = ' -threads 28 ' oommf_command = 'tclsh ' + path_oommf + param_string + threads_string + mif_file subprocess.call(oommf_command, shell=True)
[ "subprocess.call" ]
[((391, 433), 'subprocess.call', 'subprocess.call', (['oommf_command'], {'shell': '(True)'}), '(oommf_command, shell=True)\n', (406, 433), False, 'import subprocess\n')]
from __future__ import annotations from abc import abstractclassmethod, abstractmethod, abstractproperty from typing import ( TYPE_CHECKING, Any, Generator, Generic, Optional, Type, TypeVar, Union, ) import numpy as np from pandas import DataFrame from tanuki.data_store.data_type import DataType from tanuki.database.data_token import DataToken if TYPE_CHECKING: from tanuki.data_store.index.index import Index from tanuki.data_store.index.index_alias import IndexAlias from tanuki.data_store.query import Query B = TypeVar("B", bound="DataBackend") class LocIndexer(Generic[B]): @abstractmethod def __getitem__(self, item: Union[int, list, slice]) -> B: raise NotImplementedError() class ILocIndexer(Generic[B]): @abstractmethod def __getitem__(self, item: Union[Any, list, slice]) -> B: raise NotImplementedError() class DataBackend: @abstractmethod def is_link(self: B) -> bool: raise NotImplementedError() @abstractmethod def link_token(self: B) -> Optional[DataToken]: raise NotImplementedError() @abstractmethod def to_pandas(self) -> DataFrame: raise NotImplementedError() @abstractproperty def columns(self) -> list[str]: raise NotImplementedError() @abstractproperty def values(self) -> np.ndarray: raise NotImplementedError() @abstractproperty def dtypes(self) -> dict[str, DataType]: raise NotImplementedError() @abstractmethod def cast_columns(self, column_dtypes: dict[str, type]) -> DataBackend: raise NotImplementedError() @abstractmethod def to_dict(self, orient) -> dict[str, any]: raise NotImplementedError() @abstractproperty def index(self) -> Index: raise NotImplementedError() @abstractproperty def index_name(self) -> Union[str, list[str]]: raise NotImplementedError() @abstractproperty def loc(self: B) -> LocIndexer[B]: raise NotImplementedError() @abstractproperty def iloc(self: B) -> ILocIndexer[B]: raise NotImplementedError() @abstractmethod def equals(self, other: Any) -> bool: raise NotImplementedError() @abstractmethod def __eq__(self, other: Any) -> DataFrame: raise NotImplementedError() @abstractmethod def __ne__(self, other: Any) -> DataFrame: raise NotImplementedError() @abstractmethod def __gt__(self, other: Any) -> DataFrame: raise NotImplementedError() @abstractmethod def __ge__(self, other: Any) -> DataFrame: raise NotImplementedError() @abstractmethod def __lt__(self, other: Any) -> DataFrame: raise NotImplementedError() @abstractmethod def __le__(self, other: Any) -> DataFrame: raise NotImplementedError() @abstractmethod def __len__(self) -> int: raise NotImplementedError() @abstractmethod def __iter__(self) -> Generator[str, None, None]: raise NotImplementedError() @abstractmethod def iterrows(self) -> Generator[tuple[int, B], None, None]: raise NotImplementedError() @abstractmethod def itertuples(self, ignore_index: bool = False) -> Generator[tuple, None, None]: raise NotImplementedError() @abstractmethod def __getitem__(self, item: Union[str, list[bool]]) -> Any: raise NotImplementedError() @abstractmethod def getitems(self, item: list[str]) -> B: raise NotImplementedError() @abstractmethod def getmask(self, mask: list[bool]) -> B: raise NotImplementedError() @abstractmethod def query(self, query: Query) -> B: raise NotImplementedError() @abstractmethod def __setitem__(self, item: str, value: Any) -> None: raise NotImplementedError() @abstractmethod def get_index(self, index_alias: IndexAlias) -> Index: raise NotImplementedError() @abstractmethod def set_index(self: B, index: Union[Index, IndexAlias]) -> B: raise NotImplementedError() @abstractmethod def reset_index(self: B) -> B: raise NotImplementedError() @abstractmethod def append(self: B, new_backend: B, ignore_index: bool = False) -> B: raise NotImplementedError() @abstractmethod def drop_indices(self: B, indices: list[int]) -> B: raise NotImplementedError() @abstractclassmethod def concat(cls: Type[B], all_backends: list[B], ignore_index: bool = False) -> B: raise NotImplementedError() @abstractmethod def nunique(self: B) -> int: raise NotImplementedError() @abstractmethod def __str__(self: B) -> str: raise NotImplementedError() @abstractmethod def __repr__(self: B) -> str: raise NotImplementedError()
[ "typing.TypeVar" ]
[((567, 600), 'typing.TypeVar', 'TypeVar', (['"""B"""'], {'bound': '"""DataBackend"""'}), "('B', bound='DataBackend')\n", (574, 600), False, 'from typing import TYPE_CHECKING, Any, Generator, Generic, Optional, Type, TypeVar, Union\n')]
SERVER_HOST = '127.0.0.1' SERVER_PORT = 1335 MAX_GET_REQUESTS = 10 import re, socket import random import string MSG_AUTH_RE = re.compile('''^AUTH +(.+) +(.+)''') MSG_GET_RE = re.compile('''^GET +(.+) +(.+)''') MSG_RVK_RE = re.compile('''^RVK +(.+)''') def serve(srv): while 1: print('[S] Waiting for new connections') (s, address) = srv.accept() print('[S] New connection from', address) handle_connection(s) print('[S] Closing connection') s.close() def handle_connection(s): print('[S] Waiting for request') auth = False while (not auth): req = s.recv(1024).decode().strip() print('[S] Received: ' + req) m = MSG_AUTH_RE.match(req) if (m is not None): letters = string.ascii_letters token = ''.join(random.choice(letters) for i in range(20)) reply = 'SUCC ' + token print('[S] Replying: ', reply) s.sendall(str.encode(reply + '\n')) auth = True getRequests = 0 while(auth): req = s.recv(1024).decode().strip() print('[S] Received: ' + req) m_get = MSG_GET_RE.match(req) m_rvk = MSG_RVK_RE.match(req) if (m_get is not None): if (getRequests < MAX_GET_REQUESTS): reply = 'RES content' print('[S] Replying: ', reply) s.sendall(str.encode(reply + '\n')) getRequests += 1 else: reply = 'TIMEOUT' print('[S] Replying: ', reply) s.sendall(str.encode(reply + '\n')) auth = False elif (m_rvk is not None): auth = True break else: print('[S] Invalid message') if (__name__ == '__main__'): # SERVER_PORT = int(argv[1]) print('[S] Auth server starting. Press Ctrl+C to quit') srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # Avoid TIME_WAIT srv.bind((SERVER_HOST, SERVER_PORT)) print('[S] Listening on ', SERVER_HOST, SERVER_PORT) srv.listen(8) serve(srv) srv.close()
[ "socket.socket", "random.choice", "re.compile" ]
[((129, 160), 're.compile', 're.compile', (['"""^AUTH +(.+) +(.+)"""'], {}), "('^AUTH +(.+) +(.+)')\n", (139, 160), False, 'import re, socket\n'), ((178, 208), 're.compile', 're.compile', (['"""^GET +(.+) +(.+)"""'], {}), "('^GET +(.+) +(.+)')\n", (188, 208), False, 'import re, socket\n'), ((226, 250), 're.compile', 're.compile', (['"""^RVK +(.+)"""'], {}), "('^RVK +(.+)')\n", (236, 250), False, 'import re, socket\n'), ((2074, 2123), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (2087, 2123), False, 'import re, socket\n'), ((828, 850), 'random.choice', 'random.choice', (['letters'], {}), '(letters)\n', (841, 850), False, 'import random\n')]
#!/usr/bin/python3 import json state_file_path = "resources/state.json" version = 0 class StateVersionException(Exception): pass def _get_state(): with open(state_file_path, 'r') as state_file: data = json.load(state_file) if data['version'] == 0: data.pop('version', None) return data else: raise StateVersionException(f"No logic to parse state with version: {version} implemented") def set_state(d): dc = d.copy() dc['version'] = version with open(state_file_path, 'w') as state_file: json.dump(dc, state_file, sort_keys=True, indent=4) class State: def __init__(self): self.state = _get_state() pass def set_state_value(self, key, value): self.state[key] = value def get_value(self, key): return self.state[key] def __exit__(self, exc_type, exc_val, exc_tb): set_state(self.state)
[ "json.dump", "json.load" ]
[((222, 243), 'json.load', 'json.load', (['state_file'], {}), '(state_file)\n', (231, 243), False, 'import json\n'), ((582, 633), 'json.dump', 'json.dump', (['dc', 'state_file'], {'sort_keys': '(True)', 'indent': '(4)'}), '(dc, state_file, sort_keys=True, indent=4)\n', (591, 633), False, 'import json\n')]
from rain_alert.utils import RECEIVERS_FILE_PATH import unittest from .context import utils import os class TestGetReceivers(unittest.TestCase): def test_no_file(self): # set the path of the receivers to this folder because in tests folder # we dont have the receivers file utils.RECEIVERS_FILE_PATH = os.path.split(utils.RECEIVERS_FILE_PATH)[-1] self.assertTrue(True) """ class TestGetCredentials(unittest.TestCase): ... """ if __name__ == '__main__': unittest.main()
[ "unittest.main", "os.path.split" ]
[((504, 519), 'unittest.main', 'unittest.main', ([], {}), '()\n', (517, 519), False, 'import unittest\n'), ((335, 375), 'os.path.split', 'os.path.split', (['utils.RECEIVERS_FILE_PATH'], {}), '(utils.RECEIVERS_FILE_PATH)\n', (348, 375), False, 'import os\n')]
# Generated by Django 3.2.7 on 2021-10-18 12:21 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('perfil', '0001_initial'), ] operations = [ migrations.AlterField( model_name='perfil', name='endereco', field=models.CharField(max_length=50, verbose_name='Endereço'), ), migrations.AlterField( model_name='perfil', name='numero', field=models.CharField(max_length=5, verbose_name='Número'), ), migrations.AlterField( model_name='perfil', name='usuario', field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Usuário'), ), ]
[ "django.db.models.CharField", "django.db.models.OneToOneField", "django.db.migrations.swappable_dependency" ]
[((227, 284), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (258, 284), False, 'from django.db import migrations, models\n'), ((459, 515), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'verbose_name': '"""Endereço"""'}), "(max_length=50, verbose_name='Endereço')\n", (475, 515), False, 'from django.db import migrations, models\n'), ((637, 690), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(5)', 'verbose_name': '"""Número"""'}), "(max_length=5, verbose_name='Número')\n", (653, 690), False, 'from django.db import migrations, models\n'), ((813, 936), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""Usuário"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n settings.AUTH_USER_MODEL, verbose_name='Usuário')\n", (833, 936), False, 'from django.db import migrations, models\n')]
import gpflow import matplotlib.pyplot as plt import numpy as np from robustgp import ConditionalVariance X = np.random.rand(150, 1) Y = 0.8 * np.cos(10 * X) + 1.2 * np.sin(8 * X + 0.3) + np.cos(17 * X) * 1.2 + np.random.randn(*X.shape) * 0.1 gpr = gpflow.models.GPR((X, Y), gpflow.kernels.SquaredExponential()) opt = gpflow.optimizers.Scipy() opt_logs = opt.minimize(gpr.training_loss, gpr.trainable_variables, options=dict(maxiter=100)) k = gpflow.kernels.SquaredExponential() gpflow.utilities.multiple_assign(k, gpflow.utilities.read_values(gpr.kernel)) Z_initer = ConditionalVariance() sp = gpflow.models.SGPR((X, Y), k, Z_initer.compute_initialisation(X, 6, k)[0]) gpflow.utilities.multiple_assign(sp, gpflow.utilities.read_values(gpr)) pX = np.linspace(0, 1, 3000)[:, None] m, v = sp.predict_f(pX) ipm, _ = sp.predict_f(sp.inducing_variable.Z.value()) fig, (ax1, ax2) = plt.subplots(2, 1) ax1.plot(X, Y, 'x') ax1.plot(pX, m) ax1.plot(sp.inducing_variable.Z.value(), ipm, 'o', color='C3') deviation = (2 * (v + sp.likelihood.variance.value()) ** 0.5).numpy().flatten() ax1.fill_between(pX.flatten(), m.numpy().flatten() - deviation, m.numpy().flatten() + deviation, alpha=0.3) ax1.axvline(pX[np.argmax(v)].item(), color='C2') ax1.set_ylabel("y") ax2.plot(pX, v ** 0.5) ax2.plot(sp.inducing_variable.Z.value(), sp.inducing_variable.Z.value() * 0.0, 'o', color='C3') ax2.axvline(pX[np.argmax(v)].item(), color='C2') ax2.set_xlabel("input $x$") ax2.set_ylabel("$\mathbb{V}\,[p(f(x) | \mathbf{u}]^{0.5}$") plt.show()
[ "robustgp.ConditionalVariance", "gpflow.kernels.SquaredExponential", "matplotlib.pyplot.show", "numpy.random.randn", "numpy.argmax", "gpflow.optimizers.Scipy", "gpflow.utilities.read_values", "numpy.sin", "numpy.linspace", "numpy.cos", "numpy.random.rand", "matplotlib.pyplot.subplots" ]
[((111, 133), 'numpy.random.rand', 'np.random.rand', (['(150)', '(1)'], {}), '(150, 1)\n', (125, 133), True, 'import numpy as np\n'), ((320, 345), 'gpflow.optimizers.Scipy', 'gpflow.optimizers.Scipy', ([], {}), '()\n', (343, 345), False, 'import gpflow\n'), ((446, 481), 'gpflow.kernels.SquaredExponential', 'gpflow.kernels.SquaredExponential', ([], {}), '()\n', (479, 481), False, 'import gpflow\n'), ((572, 593), 'robustgp.ConditionalVariance', 'ConditionalVariance', ([], {}), '()\n', (591, 593), False, 'from robustgp import ConditionalVariance\n'), ((882, 900), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(1)'], {}), '(2, 1)\n', (894, 900), True, 'import matplotlib.pyplot as plt\n'), ((1513, 1523), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1521, 1523), True, 'import matplotlib.pyplot as plt\n'), ((277, 312), 'gpflow.kernels.SquaredExponential', 'gpflow.kernels.SquaredExponential', ([], {}), '()\n', (310, 312), False, 'import gpflow\n'), ((518, 558), 'gpflow.utilities.read_values', 'gpflow.utilities.read_values', (['gpr.kernel'], {}), '(gpr.kernel)\n', (546, 558), False, 'import gpflow\n'), ((711, 744), 'gpflow.utilities.read_values', 'gpflow.utilities.read_values', (['gpr'], {}), '(gpr)\n', (739, 744), False, 'import gpflow\n'), ((752, 775), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(3000)'], {}), '(0, 1, 3000)\n', (763, 775), True, 'import numpy as np\n'), ((212, 237), 'numpy.random.randn', 'np.random.randn', (['*X.shape'], {}), '(*X.shape)\n', (227, 237), True, 'import numpy as np\n'), ((189, 203), 'numpy.cos', 'np.cos', (['(17 * X)'], {}), '(17 * X)\n', (195, 203), True, 'import numpy as np\n'), ((144, 158), 'numpy.cos', 'np.cos', (['(10 * X)'], {}), '(10 * X)\n', (150, 158), True, 'import numpy as np\n'), ((167, 186), 'numpy.sin', 'np.sin', (['(8 * X + 0.3)'], {}), '(8 * X + 0.3)\n', (173, 186), True, 'import numpy as np\n'), ((1203, 1215), 'numpy.argmax', 'np.argmax', (['v'], {}), '(v)\n', (1212, 1215), True, 'import numpy as np\n'), ((1391, 1403), 'numpy.argmax', 'np.argmax', (['v'], {}), '(v)\n', (1400, 1403), True, 'import numpy as np\n')]
import sys import time from src.github import Github from src.gitlab import Gitlab def exit(message): print(message) sys.exit() if __name__ == "__main__": # Get all gitlab repositories gitlab = Gitlab() gitlab_repos = gitlab.repositories() if gitlab_repos == None: exit('Not able to retreive gitlab repositories') elif gitlab_repos == dict(): exit('Zero repositories was fetched from the gitlab account') print ('Gitlab repositories found: ' + str(len(gitlab_repos))) # Get all github repositories github = Github() github_repos = github.repositories() if github_repos == None: exit('Not able to retreive github repositories') print ('Github repositories found: ' + str(len(github_repos))) # Skip repositories that already exists on github for key in github_repos.keys(): alternativeKey = str(key).replace('-', ' ') if key in gitlab_repos.keys(): gitlab_repos.pop(key) print(f'Repository "{key}" already exsists on Github and will not be exported from gitlab') if alternativeKey in gitlab_repos.keys(): gitlab_repos.pop(alternativeKey) print(f'Repository "{alternativeKey}" already exsists on Github and will not be exported from gitlab') for name, url in gitlab_repos.items(): name = str(name).replace(' ', '-') print(f'Starting import of repository: {name}') # Create repository that does not exist if github.repositoryCreate(name, '') == None: print(f'Unable to create repository: {name}') continue # Start import to repository if github.importStart(url, name) == None: exit(f'Unable to start import of "{url}" to github repo named "{name}"') # Check if import is done status = '' previousStatus = '' finishedStatus = [ 'complete', 'auth_failed', 'error', 'detection_needs_auth', 'detection_found_nothing', 'detection_found_multiple', None ] while status not in finishedStatus: status = github.importStatus(name) if previousStatus != status: print(f'Status: {status}') previousStatus = status if status == 'importing': # Enable transfer of git lfs files if github.getLargeFiles(name) == None: exit(f'Unable to get list of git lfs files in repo: {name}') if github.lfsPreference(name) == None: exit(f'Unable to set git lfs preference on: {name}') time.sleep(1) if status != 'complete': exit(f'Import of "{name}" to Github finished with status: {status}') print(f'Import of "{name}" to Github finished with status: {status}')
[ "time.sleep", "src.github.Github", "sys.exit", "src.gitlab.Gitlab" ]
[((126, 136), 'sys.exit', 'sys.exit', ([], {}), '()\n', (134, 136), False, 'import sys\n'), ((216, 224), 'src.gitlab.Gitlab', 'Gitlab', ([], {}), '()\n', (222, 224), False, 'from src.gitlab import Gitlab\n'), ((575, 583), 'src.github.Github', 'Github', ([], {}), '()\n', (581, 583), False, 'from src.github import Github\n'), ((2729, 2742), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2739, 2742), False, 'import time\n')]
# -*- coding: utf-8 -*- from flask import make_response, request, jsonify from flask_login import login_required import json from werkzeug.utils import secure_filename import os from app.models.fragment import Fragment from app.models.branch import Branch from app.models.tag import Tag from app.api import api from app.whoosh import search_helper from app import base_dir UPLOAD_FOLDER = 'static/resource/uploads/image/' ALLOWED_EXTENSIONS = set(['bmp', 'webp', 'png', 'jpg', 'jpeg', 'gif']) @api.route("/add_tag/", methods=['POST']) @login_required def add_tag(): # name = request.args.get('name', 0, type=int) response = {"status": 500, "msg": "name is Null!"} name = request.form['name'] if name != "": tag = Tag.add(name) if tag: res = {"id": tag.id, "name": tag.name} response['tag'] = res response["status"] = 200 else: response["msg"] = "tag has already exists!" return make_response(json.dumps(response)) @api.route("/add_branch/", methods=['POST']) @login_required def add_branch(): response = {"status": 500, "msg": "name is Null!"} name = request.form['name'] parent_id = request.form['parent'] if name != "": branch = Branch.add(name, parent_id=parent_id) if branch: res = {"id": branch.id, "name": branch.name} response['branch'] = res response["status"] = 200 else: response["msg"] = "branch has already exists!" return make_response(json.dumps(response)) @api.route("/search/<string:keyword>") def search(keyword): res = search_helper.search(keyword) data = {} data["result"] = res return jsonify(data) def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS @api.route("/upload_image/",methods=['POST']) def upload_image(): result = { "success" : 0, "message" : "", "url" : "" } if request.method == "POST": print(request.files) if 'editormd-image-file' not in request.files: result["message"] = "No file part" return jsonify(result) file = request.files['editormd-image-file'] if file.filename == '': result["message"] = "No selected file" return jsonify(result) if file and allowed_file(file.filename): filename = secure_filename(file.filename) save_path = os.path.join(base_dir,UPLOAD_FOLDER,filename) file.save(save_path) result["success"] = 1 result["message"] = "Success" result["url"] = "/"+ UPLOAD_FOLDER + filename return jsonify(result)
[ "app.api.api.route", "app.models.tag.Tag.add", "json.dumps", "werkzeug.utils.secure_filename", "app.models.branch.Branch.add", "flask.jsonify", "app.whoosh.search_helper.search", "os.path.join" ]
[((498, 538), 'app.api.api.route', 'api.route', (['"""/add_tag/"""'], {'methods': "['POST']"}), "('/add_tag/', methods=['POST'])\n", (507, 538), False, 'from app.api import api\n'), ((1013, 1056), 'app.api.api.route', 'api.route', (['"""/add_branch/"""'], {'methods': "['POST']"}), "('/add_branch/', methods=['POST'])\n", (1022, 1056), False, 'from app.api import api\n'), ((1564, 1601), 'app.api.api.route', 'api.route', (['"""/search/<string:keyword>"""'], {}), "('/search/<string:keyword>')\n", (1573, 1601), False, 'from app.api import api\n'), ((1859, 1904), 'app.api.api.route', 'api.route', (['"""/upload_image/"""'], {'methods': "['POST']"}), "('/upload_image/', methods=['POST'])\n", (1868, 1904), False, 'from app.api import api\n'), ((1633, 1662), 'app.whoosh.search_helper.search', 'search_helper.search', (['keyword'], {}), '(keyword)\n', (1653, 1662), False, 'from app.whoosh import search_helper\n'), ((1713, 1726), 'flask.jsonify', 'jsonify', (['data'], {}), '(data)\n', (1720, 1726), False, 'from flask import make_response, request, jsonify\n'), ((741, 754), 'app.models.tag.Tag.add', 'Tag.add', (['name'], {}), '(name)\n', (748, 754), False, 'from app.models.tag import Tag\n'), ((988, 1008), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (998, 1008), False, 'import json\n'), ((1253, 1290), 'app.models.branch.Branch.add', 'Branch.add', (['name'], {'parent_id': 'parent_id'}), '(name, parent_id=parent_id)\n', (1263, 1290), False, 'from app.models.branch import Branch\n'), ((1539, 1559), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (1549, 1559), False, 'import json\n'), ((2197, 2212), 'flask.jsonify', 'jsonify', (['result'], {}), '(result)\n', (2204, 2212), False, 'from flask import make_response, request, jsonify\n'), ((2367, 2382), 'flask.jsonify', 'jsonify', (['result'], {}), '(result)\n', (2374, 2382), False, 'from flask import make_response, request, jsonify\n'), ((2455, 2485), 'werkzeug.utils.secure_filename', 'secure_filename', (['file.filename'], {}), '(file.filename)\n', (2470, 2485), False, 'from werkzeug.utils import secure_filename\n'), ((2510, 2557), 'os.path.join', 'os.path.join', (['base_dir', 'UPLOAD_FOLDER', 'filename'], {}), '(base_dir, UPLOAD_FOLDER, filename)\n', (2522, 2557), False, 'import os\n'), ((2742, 2757), 'flask.jsonify', 'jsonify', (['result'], {}), '(result)\n', (2749, 2757), False, 'from flask import make_response, request, jsonify\n')]
from env.evrp import EVRPEnv from utils.config import read_config import numpy as np from env.worker import VectorizedEVRP from utils.plot import convert_plt_to_numpy, convert_plt_to_tf, plot_tours from utils import try_import_tensorflow tf = try_import_tensorflow() if __name__ == "__main__": config = read_config("config.json") env = EVRPEnv(config.env_config) tours = [[1, 69, 85, 129, 119, 55, 13, 90, 66, 46, 62, 65, 45, 12, 101, 75, 82, 63, 97, 146, 19, 91, 26, 128], [1, 37, 17, 33, 8, 20, 86, 29, 134, 146, 48, 126, 34, 105, 133, 24, 22, 124, 84, 57, 92, 36, 98], [1, 116, 125, 49, 59, 47, 31, 122, 145, 88, 99, 32, 7, 80, 61, 112, 2, 108, 6, 42, 94, 117, 137], [1, 79, 87, 9, 73, 103, 54, 111, 115, 44, 3, 18, 127], [1, 121, 143, 56, 138, 83, 15, 38, 123, 78, 23, 120, 81, 141, 147, 16, 28, 107, 25, 67, 100, 74, 89, 72, 10, 131], [1, 14, 110, 144, 51, 52, 39, 104, 64, 113, 27, 76, 114, 11, 135, 93, 109, 118, 102, 21, 53, 41, 71, 5], [1, 50, 58, 96, 140, 147, 142, 132, 70, 40, 30, 43, 95, 4, 77, 130, 106, 139, 35, 68, 136, 60]] plt = plot_tours(env, tours, 123) plt.show()
[ "utils.config.read_config", "env.evrp.EVRPEnv", "utils.plot.plot_tours", "utils.try_import_tensorflow" ]
[((249, 272), 'utils.try_import_tensorflow', 'try_import_tensorflow', ([], {}), '()\n', (270, 272), False, 'from utils import try_import_tensorflow\n'), ((317, 343), 'utils.config.read_config', 'read_config', (['"""config.json"""'], {}), "('config.json')\n", (328, 343), False, 'from utils.config import read_config\n'), ((355, 381), 'env.evrp.EVRPEnv', 'EVRPEnv', (['config.env_config'], {}), '(config.env_config)\n', (362, 381), False, 'from env.evrp import EVRPEnv\n'), ((1076, 1103), 'utils.plot.plot_tours', 'plot_tours', (['env', 'tours', '(123)'], {}), '(env, tours, 123)\n', (1086, 1103), False, 'from utils.plot import convert_plt_to_numpy, convert_plt_to_tf, plot_tours\n')]
# -*- coding: utf-8 -*- # # Copyright 2019 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for defining CRM Tag arguments on a parser.""" from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals from googlecloudsdk.calliope import arg_parsers from googlecloudsdk.calliope import base def AddShortNameArgToParser(parser): """Adds positional argument to parser. Args: parser: ArgumentInterceptor, an argparse parser. """ parser.add_argument( "short_name", metavar="SHORT_NAME", help=("User specified, friendly name of the TagKey or TagValue. The field" " must be 1-63 characters, beginning and ending with an " "alphanumeric character ([a-z0-9A-Z]) with dashes (-), " "underscores ( _ ), dots (.), and alphanumerics between. ")) def AddParentArgToParser(parser, required=True, message=""): """Adds argument for the TagKey or TagValue's parent to the parser. Args: parser: ArgumentInterceptor, An argparse parser. required: Boolean, to enforce --parent as a required flag. message: String, replacement help text for flag. """ parser.add_argument( "--parent", metavar="PARENT", required=required, help=message if message else ("Parent of the resource.")) def AddDescriptionArgToParser(parser): """Adds argument for the TagKey's or TagValue's description to the parser. Args: parser: ArgumentInterceptor, An argparse parser. """ parser.add_argument( "--description", metavar="DESCRIPTION", help=("User-assigned description of the TagKey or TagValue. " "Must not exceed 256 characters.")) def AddPurposeArgToParser(parser): """Adds argument for the TagKey's purpose to the parser. Args: parser: ArgumentInterceptor, An argparse parser. """ parser.add_argument( "--purpose", metavar="PURPOSE", choices=["GCE_FIREWALL"], help=("Purpose specifier of the TagKey that can only be set on creation. " "Specifying this field adds additional validation from the policy " "system that corresponds to the purpose.")) def AddPurposeDataArgToParser(parser): """Adds argument for the TagKey's purpose data to the parser. Args: parser: ArgumentInterceptor, An argparse parser. """ parser.add_argument( "--purpose-data", type=arg_parsers.ArgDict( spec={"network": str}, max_length=1, ), help=("Purpose data of the TagKey that can only be set on creation. " "This data is validated by the policy system that corresponds" " to the purpose.")) def AddAsyncArgToParser(parser): """Adds async flag to the parser. Args: parser: ArgumentInterceptor, An argparse parser. """ base.ASYNC_FLAG.AddToParser(parser) def AddResourceNameArgToParser(parser): """Adds resource name argument for the namespaced name or resource name to the parser. Args: parser: ArgumentInterceptor, An argparse parser. """ parser.add_argument( "RESOURCE_NAME", metavar="RESOURCE_NAME", help=("Resource name or namespaced name. The resource name should " "be in the form {resource_type}/{numeric_id}. The namespaced name " "should be in the form {org_id}/{short_name} where short_name " "must be 1-63 characters, beginning and ending with an " "alphanumeric character ([a-z0-9A-Z]) with dashes (-), underscores " "( _ ), dots (.), and alphanumerics between.")) def AddForceArgToParser(parser): """Adds force argument to the parser. Args: parser: ArgumentInterceptor, An argparse parser. """ parser.add_argument( "--force", action="store_true", help=("Force argument to bypass checks.")) def AddPolicyFileArgToParser(parser): """Adds argument for the local Policy file to set. Args: parser: ArgumentInterceptor, An argparse parser. """ parser.add_argument( "POLICY_FILE", metavar="POLICY_FILE", help=( "Path to a local JSON or YAML formatted file containing a valid " "policy. The output of the `get-iam-policy` command is a valid " "file, as is any JSON or YAML file conforming to the structure of " "a [Policy](https://cloud.google.com/iam/reference/rest/v1/Policy).")) def AddTagValueArgToParser(parser): """Adds the TagValue argument to the parser. Args: parser: ArgumentInterceptor, An argparse parser. """ parser.add_argument( "--tag-value", metavar="TAG_VALUE", required=True, help=("Tag value name or namespaced name. The name should " "be in the form tagValues/{numeric_id}. The namespaced name " "should be in the form {org_id}/{tag_key_short_name}/{short_name} " "where short_name must be 1-63 characters, beginning and ending " "with an alphanumeric character ([a-z0-9A-Z]) with dashes (-), " "underscores (_), dots (.), and alphanumerics between.")) def AddLocationArgToParser(parser, message): """Adds argument for the location. Args: parser: ArgumentInterceptor, An argparse parser. message: String, help text for flag. """ parser.add_argument( "--location", metavar="LOCATION", required=False, help=message)
[ "googlecloudsdk.calliope.base.ASYNC_FLAG.AddToParser", "googlecloudsdk.calliope.arg_parsers.ArgDict" ]
[((3367, 3402), 'googlecloudsdk.calliope.base.ASYNC_FLAG.AddToParser', 'base.ASYNC_FLAG.AddToParser', (['parser'], {}), '(parser)\n', (3394, 3402), False, 'from googlecloudsdk.calliope import base\n'), ((2955, 3011), 'googlecloudsdk.calliope.arg_parsers.ArgDict', 'arg_parsers.ArgDict', ([], {'spec': "{'network': str}", 'max_length': '(1)'}), "(spec={'network': str}, max_length=1)\n", (2974, 3011), False, 'from googlecloudsdk.calliope import arg_parsers\n')]
from flask import jsonify from pyawsstarter import Logger from wordservice import create_app # Call the Application Factory function to construct a Flask application instance # using the standard configuration defined in /instance/flask.cfg application = create_app('flask.cfg') @application.errorhandler(Exception) def handle_invalid_usage(error): response = jsonify( { 'error': str(error) } ) response.status_code = 401 # Don't do it this way, just for an example return response if __name__ == '__main__': Logger.get_logger('wordservice').info('Starting wordservice') application.run(host='0.0.0.0', port=8080)
[ "pyawsstarter.Logger.get_logger", "wordservice.create_app" ]
[((257, 280), 'wordservice.create_app', 'create_app', (['"""flask.cfg"""'], {}), "('flask.cfg')\n", (267, 280), False, 'from wordservice import create_app\n'), ((565, 597), 'pyawsstarter.Logger.get_logger', 'Logger.get_logger', (['"""wordservice"""'], {}), "('wordservice')\n", (582, 597), False, 'from pyawsstarter import Logger\n')]
from django.conf.urls import url from django.contrib import admin import views admin.autodiscover() urlpatterns = [ url(r'^pass_forget/$', views.pass_forget, name='pass_forget'), # url(r'^pass_rec/$', views.pass_rec, name='pass_rec'), ]
[ "django.contrib.admin.autodiscover", "django.conf.urls.url" ]
[((81, 101), 'django.contrib.admin.autodiscover', 'admin.autodiscover', ([], {}), '()\n', (99, 101), False, 'from django.contrib import admin\n'), ((123, 183), 'django.conf.urls.url', 'url', (['"""^pass_forget/$"""', 'views.pass_forget'], {'name': '"""pass_forget"""'}), "('^pass_forget/$', views.pass_forget, name='pass_forget')\n", (126, 183), False, 'from django.conf.urls import url\n')]
from django.db import models from .utils import upload_to_file, generate_random_string from .validators import validate_file class CompanyManager(models.Manager): def get_or_none(self, **kwargs): try: return self.get(**kwargs) except Company.DoesNotExist: return None class Company(models.Model): objects = CompanyManager() label = models.CharField(max_length=60, default="default", unique=True) description = models.TextField(blank=True, null=True) metadata = models.TextField() created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) def __repr__(self): return self.__str__() def __str__(self): return f"<Company Label: {self.label} / Description: {self.description}>" class JobManager(models.Manager): def get_or_none(self, **kwargs): try: return self.get(**kwargs) except Job.DoesNotExist: return None class Job(models.Model): objects = JobManager() label = models.CharField(max_length=60, default="default") description = models.TextField(blank=True, null=True) company = models.ForeignKey('job_applications.Company', on_delete=models.SET_NULL, null=True, blank=True) hiring_contact = models.CharField(max_length=60, default="default") hiring_contact_email = models.EmailField(max_length=254) is_open = models.BooleanField(default=True) metadata = models.TextField() created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) def __repr__(self): return self.__str__() def __str__(self): return f"<Job Label: {self.label} / Company: {self.company.label}>" class ApplicationManager(models.Manager): def get_or_none(self, **kwargs): try: return self.get(**kwargs) except Application.DoesNotExist: return None class Application(models.Model): objects = ApplicationManager() PENDING = 1 REVIEWING = 2 SHORTLISTED = 3 INTERVIEWING = 4 ADVANCED_INTERVIEWING = 5 REJECTED = 6 OFFERED = 7 HIRED = 8 NEXT_STAGE = { PENDING: [REVIEWING, SHORTLISTED], REVIEWING: [REJECTED], SHORTLISTED: [INTERVIEWING], INTERVIEWING: [ADVANCED_INTERVIEWING, REJECTED, OFFERED], ADVANCED_INTERVIEWING: [REJECTED, OFFERED], OFFERED: [HIRED, REJECTED], REJECTED: [], HIRED: [] } STAGE_TO_TASKS = { PENDING: { "description": "Checked with hiring manager whether he/she has reviewed the application?", "deadline": 7 }, REVIEWING: { "description": "Have you waited a few days yet?", "deadline": 7 }, SHORTLISTED: { "description": "Have you checked with candidate whether he has gone for the interview?", "deadline": 7 }, INTERVIEWING: { "description": "Have you checked with candidate on the status of his interview?", "deadline": 7 }, ADVANCED_INTERVIEWING: { "description": "Have you checked with candidate on the status of his advanced interview?", "deadline": 7 }, OFFERED: { "description": "Have you checked with candidate whether he has taken up the offer?", "deadline": 7 } } categories = [ (PENDING, "Pending"), (REVIEWING, "Reviewing"), (SHORTLISTED, "Shortlisted"), (INTERVIEWING, "Interviewing"), (ADVANCED_INTERVIEWING, "Advanced Interviewing"), (REJECTED, "Rejected"), (OFFERED, "Offered"), (HIRED, "Hired"), ] user = models.ForeignKey('custom_user.User', on_delete=models.CASCADE, null=False, blank=False, related_name='user') recruiter = models.ForeignKey('custom_user.User', on_delete=models.SET_NULL, null=True, blank=True, related_name='recruiter') email = models.EmailField(max_length=254) job = models.ForeignKey('job_applications.Job', on_delete=models.CASCADE, null=False, blank=False) stage = models.IntegerField( choices=categories, ) resume = models.FileField( upload_to=upload_to_file, validators=[validate_file], help_text="Please upload only PDF or docx files", ) created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) def __repr__(self): return self.__str__() def __str__(self): return f"<Application Username: {self.user.username} / JobId: {self.job.pk} / Stage: {self.stage}>" @property def possible_next_stages(self) -> list: """ retrieves the possible next stages to for the application to move into """ return Application.NEXT_STAGE[self.stage]
[ "django.db.models.FileField", "django.db.models.TextField", "django.db.models.CharField", "django.db.models.ForeignKey", "django.db.models.BooleanField", "django.db.models.EmailField", "django.db.models.IntegerField", "django.db.models.DateTimeField" ]
[((392, 455), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(60)', 'default': '"""default"""', 'unique': '(True)'}), "(max_length=60, default='default', unique=True)\n", (408, 455), False, 'from django.db import models\n'), ((474, 513), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (490, 513), False, 'from django.db import models\n'), ((529, 547), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (545, 547), False, 'from django.db import models\n'), ((562, 601), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (582, 601), False, 'from django.db import models\n'), ((617, 652), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (637, 652), False, 'from django.db import models\n'), ((1064, 1114), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(60)', 'default': '"""default"""'}), "(max_length=60, default='default')\n", (1080, 1114), False, 'from django.db import models\n'), ((1133, 1172), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1149, 1172), False, 'from django.db import models\n'), ((1187, 1286), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""job_applications.Company"""'], {'on_delete': 'models.SET_NULL', 'null': '(True)', 'blank': '(True)'}), "('job_applications.Company', on_delete=models.SET_NULL,\n null=True, blank=True)\n", (1204, 1286), False, 'from django.db import models\n'), ((1304, 1354), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(60)', 'default': '"""default"""'}), "(max_length=60, default='default')\n", (1320, 1354), False, 'from django.db import models\n'), ((1382, 1415), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(254)'}), '(max_length=254)\n', (1399, 1415), False, 'from django.db import models\n'), ((1430, 1463), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1449, 1463), False, 'from django.db import models\n'), ((1479, 1497), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (1495, 1497), False, 'from django.db import models\n'), ((1512, 1551), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1532, 1551), False, 'from django.db import models\n'), ((1567, 1602), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (1587, 1602), False, 'from django.db import models\n'), ((3788, 3901), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""custom_user.User"""'], {'on_delete': 'models.CASCADE', 'null': '(False)', 'blank': '(False)', 'related_name': '"""user"""'}), "('custom_user.User', on_delete=models.CASCADE, null=False,\n blank=False, related_name='user')\n", (3805, 3901), False, 'from django.db import models\n'), ((3914, 4031), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""custom_user.User"""'], {'on_delete': 'models.SET_NULL', 'null': '(True)', 'blank': '(True)', 'related_name': '"""recruiter"""'}), "('custom_user.User', on_delete=models.SET_NULL, null=True,\n blank=True, related_name='recruiter')\n", (3931, 4031), False, 'from django.db import models\n'), ((4074, 4107), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(254)'}), '(max_length=254)\n', (4091, 4107), False, 'from django.db import models\n'), ((4118, 4215), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""job_applications.Job"""'], {'on_delete': 'models.CASCADE', 'null': '(False)', 'blank': '(False)'}), "('job_applications.Job', on_delete=models.CASCADE, null=\n False, blank=False)\n", (4135, 4215), False, 'from django.db import models\n'), ((4223, 4262), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'categories'}), '(choices=categories)\n', (4242, 4262), False, 'from django.db import models\n'), ((4291, 4415), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': 'upload_to_file', 'validators': '[validate_file]', 'help_text': '"""Please upload only PDF or docx files"""'}), "(upload_to=upload_to_file, validators=[validate_file],\n help_text='Please upload only PDF or docx files')\n", (4307, 4415), False, 'from django.db import models\n'), ((4457, 4496), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (4477, 4496), False, 'from django.db import models\n'), ((4512, 4547), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (4532, 4547), False, 'from django.db import models\n')]
# Copyright 2020-present Kensho Technologies, LLC. import logging import sys import click import gpg from ..signing import ( DAYS_WARNING_FOR_KEY_EXPIRATION, add_trusted_keys_to_gpg_home_dir, get_days_until_expiry, ) from ..utils import get_temporary_directory logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) def check_gpg_key_expiry( days_warning_for_key_expiration: int = DAYS_WARNING_FOR_KEY_EXPIRATION, ) -> bool: """Check key expirations Args: days_warning_for_key_expiration: warn if a key expires within this number of days. Defaults to 30. Returns: True if no keys are soon to expire or already expired, False otherwise """ with get_temporary_directory() as gpg_homedir: add_trusted_keys_to_gpg_home_dir(gpg_homedir) with gpg.Context(home_dir=gpg_homedir) as ctx: fpr_to_expiry = get_days_until_expiry(ctx) no_keys_close_to_expiry = True for fpr, days_to_expiry in fpr_to_expiry.items(): if days_to_expiry < 0: no_keys_close_to_expiry = False action_message = "KEY IS EXPIRED!" elif days_to_expiry < days_warning_for_key_expiration: no_keys_close_to_expiry = False action_message = "UPDATE KEY ASAP!!!!" else: action_message = "OK for now, but stay tuned" logger.info( "Key (FPR: %s) expires in %s days. %s", fpr, days_to_expiry, action_message ) return no_keys_close_to_expiry @click.command() @click.argument("days_before_warning", required=False) def main(days_before_warning) -> None: """Log info about when GPG keys will expire""" no_keys_close_to_expiry = check_gpg_key_expiry(days_before_warning) if no_keys_close_to_expiry: sys.exit(0) sys.exit(1)
[ "click.argument", "logging.getLogger", "click.command", "gpg.Context", "sys.exit" ]
[((286, 313), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (303, 313), False, 'import logging\n'), ((1637, 1652), 'click.command', 'click.command', ([], {}), '()\n', (1650, 1652), False, 'import click\n'), ((1654, 1707), 'click.argument', 'click.argument', (['"""days_before_warning"""'], {'required': '(False)'}), "('days_before_warning', required=False)\n", (1668, 1707), False, 'import click\n'), ((1926, 1937), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1934, 1937), False, 'import sys\n'), ((1910, 1921), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1918, 1921), False, 'import sys\n'), ((837, 870), 'gpg.Context', 'gpg.Context', ([], {'home_dir': 'gpg_homedir'}), '(home_dir=gpg_homedir)\n', (848, 870), False, 'import gpg\n')]
from os import remove from os.path import isdir, join from pathlib import Path from gdown import download from zipfile import ZipFile def download_ffhq(path): path = join(path, 'ffhq') if not isdir(path): Path(path).mkdir(parents=True, exist_ok=True) path_zip = join(path, 'ffhq.zip') download(id='1EL0pQnON0SFOY8XXn8DX4T6cIcKf4CNu', output=path_zip) with ZipFile(path_zip, 'r') as f: f.extractall(path) remove(path_zip) def set_requires_grad(module, requires_grad): for p in module.parameters(): p.requires_grad = requires_grad return module
[ "os.remove", "zipfile.ZipFile", "os.path.isdir", "gdown.download", "pathlib.Path", "os.path.join" ]
[((171, 189), 'os.path.join', 'join', (['path', '"""ffhq"""'], {}), "(path, 'ffhq')\n", (175, 189), False, 'from os.path import isdir, join\n'), ((201, 212), 'os.path.isdir', 'isdir', (['path'], {}), '(path)\n', (206, 212), False, 'from os.path import isdir, join\n'), ((287, 309), 'os.path.join', 'join', (['path', '"""ffhq.zip"""'], {}), "(path, 'ffhq.zip')\n", (291, 309), False, 'from os.path import isdir, join\n'), ((318, 383), 'gdown.download', 'download', ([], {'id': '"""1EL0pQnON0SFOY8XXn8DX4T6cIcKf4CNu"""', 'output': 'path_zip'}), "(id='1EL0pQnON0SFOY8XXn8DX4T6cIcKf4CNu', output=path_zip)\n", (326, 383), False, 'from gdown import download\n'), ((465, 481), 'os.remove', 'remove', (['path_zip'], {}), '(path_zip)\n', (471, 481), False, 'from os import remove\n'), ((397, 419), 'zipfile.ZipFile', 'ZipFile', (['path_zip', '"""r"""'], {}), "(path_zip, 'r')\n", (404, 419), False, 'from zipfile import ZipFile\n'), ((222, 232), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (226, 232), False, 'from pathlib import Path\n')]
import pdb import time import lib.tf_silent import numpy as np import tensorflow as tf import matplotlib.pyplot as plt import matplotlib.cm as cm from matplotlib.colors import Normalize from matplotlib.gridspec import GridSpec import os import pickle import argparse from lib.pinn import PINN from lib.network import Network from lib.optimizer import Optimizer def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('-i', '--maxiter', type=int, default=2000) parser.add_argument('-ntr', '--num-train-samples', type=int, default=10000) parser.add_argument('-nte', '--num-test-samples', type=int, default=100) parser.add_argument('-n', '--network', type=str, default='pinn') parser.add_argument('-l', '--loss', type=str, default='l2') parser.add_argument('-gi', '--gradient-interval', type=int, default=100) parser.add_argument('--gt-path', type=str, default='data/pinn.pkl') return parser.parse_known_args()[0] def uv(network, xy): """ Compute flow velocities (u, v) for the network with output (psi, p). Args: xy: network input variables as ndarray. Returns: (u, v) as ndarray. """ xy = tf.constant(xy) with tf.GradientTape() as g: g.watch(xy) psi_p = network(xy) psi_p_j = g.batch_jacobian(psi_p, xy) u = psi_p_j[..., 0, 1] v = -psi_p_j[..., 0, 0] return u.numpy(), v.numpy() def contour(grid, x, y, z, title, levels=50): """ Contour plot. Args: grid: plot position. x: x-array. y: y-array. z: z-array. title: title string. levels: number of contour lines. """ # get the value range vmin = -2e-1 vmax = 2e-1 if (title == 'psi'): vmax = 1.2e-1 vmin = -1e-1 if (title == 'p'): vmax = 6.1e-1 vmin = -5e-1 if (title == 'u'): vmax = 1.1e+0 vmin = -2e-1 if (title == 'v'): vmax = 2.1e-1 vmin = -2e-1 if (title == 'dpsi'): vmax = 1.1e-2 vmin = 0.0 if (title == 'dp'): vmax = 4.1e-1 vmin = 0.0 if (title == 'du'): vmax = 1.1e-1 vmin = 0.0 if (title == 'dv'): vmax = 8.1e-2 vmin = 0.0 # plot a contour plt.subplot(grid) print(title, vmin, vmax) plt.contour(x, y, z, colors='k', linewidths=0.2, levels=levels, vmin=vmin, vmax=vmax) plt.contourf(x, y, z, cmap='rainbow', levels=levels, vmin=vmin, vmax=vmax) plt.title(title) m = plt.cm.ScalarMappable(cmap='rainbow', norm=Normalize(vmin=vmin, vmax=vmax)) m.set_array(z) m.set_clim(vmin, vmax) cbar = plt.colorbar(m, pad=0.03, aspect=25, format='%.0e') cbar.mappable.set_clim(vmin, vmax) if __name__ == '__main__': """ Test the physics informed neural network (PINN) model for the cavity flow governed by the steady Navier-Stokes equation. """ args = parse_args() # number of training samples num_train_samples = args.num_train_samples # number of test samples num_test_samples = args.num_test_samples # inlet flow velocity u0 = 1 # density rho = 1 # viscosity nu = 0.01 # build a core network model network = Network().build() network.summary() # build a PINN model model = PINN(network, rho=rho, nu=nu).build() # create training input xy_eqn = np.random.rand(num_train_samples, 2) xy_ub = np.random.rand(num_train_samples//2, 2) # top-bottom boundaries xy_ub[..., 1] = np.round(xy_ub[..., 1]) # y-position is 0 or 1 xy_lr = np.random.rand(num_train_samples//2, 2) # left-right boundaries xy_lr[..., 0] = np.round(xy_lr[..., 0]) # x-position is 0 or 1 xy_bnd = np.random.permutation(np.concatenate([xy_ub, xy_lr])) x_train = [xy_eqn, xy_bnd] # create training output zeros = np.zeros((num_train_samples, 2)) uv_bnd = np.zeros((num_train_samples, 2)) uv_bnd[..., 0] = u0 * np.floor(xy_bnd[..., 1]) y_train = [zeros, zeros, uv_bnd] # train the model using L-BFGS-B algorithm optimizer = Optimizer(model=model, x_train=x_train, y_train=y_train, dict_params=args.__dict__) optimizer.fit() # create meshgrid coordinates (x, y) for test plots x = np.linspace(0, 1, num_test_samples) y = np.linspace(0, 1, num_test_samples) x, y = np.meshgrid(x, y) xy = np.stack([x.flatten(), y.flatten()], axis=-1) # predict (psi, p) psi_p = network.predict(xy, batch_size=len(xy)) psi, p = [ psi_p[..., i].reshape(x.shape) for i in range(psi_p.shape[-1]) ] # compute (u, v) u, v = uv(network, xy) u = u.reshape(x.shape) v = v.reshape(x.shape) if os.path.isfile(args.gt_path): with open(args.gt_path, 'rb') as f: data = pickle.load(f) x_gt, y_gt, psi_gt, p_gt, u_gt, v_gt = data fig = plt.figure(figsize=(6, 5)) gs = GridSpec(2, 2) contour(gs[0, 0], x, y, np.abs(psi - psi_gt), 'dpsi') contour(gs[0, 1], x, y, np.abs(p - p_gt), 'dp') contour(gs[1, 0], x, y, np.abs(u - u_gt), 'du') contour(gs[1, 1], x, y, np.abs(v - v_gt), 'dv') plt.tight_layout() plt.savefig(os.path.join('figures', list(args.__dict__.values())[:-1].__str__() + str(time.time()) + '_error.png')) plt.show() plt.close() fig = plt.figure(figsize=(6, 5)) gs = GridSpec(2, 2) contour(gs[0, 0], x, y, psi, 'psi') contour(gs[0, 1], x, y, p, 'p') contour(gs[1, 0], x, y, u, 'u') contour(gs[1, 1], x, y, v, 'v') plt.tight_layout() plt.savefig(os.path.join('figures', list(args.__dict__.values())[:-1].__str__() + str(time.time()) + '.png')) plt.show() plt.close() else: # plot test results fig = plt.figure(figsize=(6, 5)) gs = GridSpec(2, 2) contour(gs[0, 0], x, y, psi, 'psi') contour(gs[0, 1], x, y, p, 'p') contour(gs[1, 0], x, y, u, 'u') contour(gs[1, 1], x, y, v, 'v') data = [x, y, psi, p, u, v] with open(args.gt_path, 'wb') as f: pickle.dump(data, f) plt.tight_layout() plt.savefig(os.path.join('figures', list(args.__dict__.values())[:-1].__str__() + str(time.time()) + '.png')) plt.show() plt.close()
[ "matplotlib.pyplot.title", "pickle.dump", "numpy.abs", "argparse.ArgumentParser", "numpy.floor", "os.path.isfile", "matplotlib.pyplot.figure", "matplotlib.pyplot.contourf", "matplotlib.pyplot.contour", "pickle.load", "numpy.round", "matplotlib.pyplot.tight_layout", "numpy.meshgrid", "matplotlib.colors.Normalize", "matplotlib.pyplot.close", "matplotlib.pyplot.colorbar", "numpy.linspace", "lib.pinn.PINN", "matplotlib.pyplot.show", "lib.network.Network", "tensorflow.constant", "numpy.concatenate", "matplotlib.pyplot.subplot", "lib.optimizer.Optimizer", "numpy.zeros", "time.time", "numpy.random.rand", "matplotlib.gridspec.GridSpec", "tensorflow.GradientTape" ]
[((395, 420), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (418, 420), False, 'import argparse\n'), ((1189, 1204), 'tensorflow.constant', 'tf.constant', (['xy'], {}), '(xy)\n', (1200, 1204), True, 'import tensorflow as tf\n'), ((2285, 2302), 'matplotlib.pyplot.subplot', 'plt.subplot', (['grid'], {}), '(grid)\n', (2296, 2302), True, 'import matplotlib.pyplot as plt\n'), ((2336, 2425), 'matplotlib.pyplot.contour', 'plt.contour', (['x', 'y', 'z'], {'colors': '"""k"""', 'linewidths': '(0.2)', 'levels': 'levels', 'vmin': 'vmin', 'vmax': 'vmax'}), "(x, y, z, colors='k', linewidths=0.2, levels=levels, vmin=vmin,\n vmax=vmax)\n", (2347, 2425), True, 'import matplotlib.pyplot as plt\n'), ((2426, 2500), 'matplotlib.pyplot.contourf', 'plt.contourf', (['x', 'y', 'z'], {'cmap': '"""rainbow"""', 'levels': 'levels', 'vmin': 'vmin', 'vmax': 'vmax'}), "(x, y, z, cmap='rainbow', levels=levels, vmin=vmin, vmax=vmax)\n", (2438, 2500), True, 'import matplotlib.pyplot as plt\n'), ((2505, 2521), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (2514, 2521), True, 'import matplotlib.pyplot as plt\n'), ((2666, 2717), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['m'], {'pad': '(0.03)', 'aspect': '(25)', 'format': '"""%.0e"""'}), "(m, pad=0.03, aspect=25, format='%.0e')\n", (2678, 2717), True, 'import matplotlib.pyplot as plt\n'), ((3409, 3445), 'numpy.random.rand', 'np.random.rand', (['num_train_samples', '(2)'], {}), '(num_train_samples, 2)\n', (3423, 3445), True, 'import numpy as np\n'), ((3458, 3499), 'numpy.random.rand', 'np.random.rand', (['(num_train_samples // 2)', '(2)'], {}), '(num_train_samples // 2, 2)\n', (3472, 3499), True, 'import numpy as np\n'), ((3543, 3566), 'numpy.round', 'np.round', (['xy_ub[..., 1]'], {}), '(xy_ub[..., 1])\n', (3551, 3566), True, 'import numpy as np\n'), ((3611, 3652), 'numpy.random.rand', 'np.random.rand', (['(num_train_samples // 2)', '(2)'], {}), '(num_train_samples // 2, 2)\n', (3625, 3652), True, 'import numpy as np\n'), ((3696, 3719), 'numpy.round', 'np.round', (['xy_lr[..., 0]'], {}), '(xy_lr[..., 0])\n', (3704, 3719), True, 'import numpy as np\n'), ((3892, 3924), 'numpy.zeros', 'np.zeros', (['(num_train_samples, 2)'], {}), '((num_train_samples, 2))\n', (3900, 3924), True, 'import numpy as np\n'), ((3938, 3970), 'numpy.zeros', 'np.zeros', (['(num_train_samples, 2)'], {}), '((num_train_samples, 2))\n', (3946, 3970), True, 'import numpy as np\n'), ((4123, 4211), 'lib.optimizer.Optimizer', 'Optimizer', ([], {'model': 'model', 'x_train': 'x_train', 'y_train': 'y_train', 'dict_params': 'args.__dict__'}), '(model=model, x_train=x_train, y_train=y_train, dict_params=args.\n __dict__)\n', (4132, 4211), False, 'from lib.optimizer import Optimizer\n'), ((4292, 4327), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'num_test_samples'], {}), '(0, 1, num_test_samples)\n', (4303, 4327), True, 'import numpy as np\n'), ((4336, 4371), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'num_test_samples'], {}), '(0, 1, num_test_samples)\n', (4347, 4371), True, 'import numpy as np\n'), ((4383, 4400), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (4394, 4400), True, 'import numpy as np\n'), ((4720, 4748), 'os.path.isfile', 'os.path.isfile', (['args.gt_path'], {}), '(args.gt_path)\n', (4734, 4748), False, 'import os\n'), ((1214, 1231), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (1229, 1231), True, 'import tensorflow as tf\n'), ((3787, 3817), 'numpy.concatenate', 'np.concatenate', (['[xy_ub, xy_lr]'], {}), '([xy_ub, xy_lr])\n', (3801, 3817), True, 'import numpy as np\n'), ((3997, 4021), 'numpy.floor', 'np.floor', (['xy_bnd[..., 1]'], {}), '(xy_bnd[..., 1])\n', (4005, 4021), True, 'import numpy as np\n'), ((4894, 4920), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(6, 5)'}), '(figsize=(6, 5))\n', (4904, 4920), True, 'import matplotlib.pyplot as plt\n'), ((4934, 4948), 'matplotlib.gridspec.GridSpec', 'GridSpec', (['(2)', '(2)'], {}), '(2, 2)\n', (4942, 4948), False, 'from matplotlib.gridspec import GridSpec\n'), ((5187, 5205), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (5203, 5205), True, 'import matplotlib.pyplot as plt\n'), ((5371, 5381), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5379, 5381), True, 'import matplotlib.pyplot as plt\n'), ((5390, 5401), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (5399, 5401), True, 'import matplotlib.pyplot as plt\n'), ((5417, 5443), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(6, 5)'}), '(figsize=(6, 5))\n', (5427, 5443), True, 'import matplotlib.pyplot as plt\n'), ((5457, 5471), 'matplotlib.gridspec.GridSpec', 'GridSpec', (['(2)', '(2)'], {}), '(2, 2)\n', (5465, 5471), False, 'from matplotlib.gridspec import GridSpec\n'), ((5644, 5662), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (5660, 5662), True, 'import matplotlib.pyplot as plt\n'), ((5789, 5799), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5797, 5799), True, 'import matplotlib.pyplot as plt\n'), ((5808, 5819), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (5817, 5819), True, 'import matplotlib.pyplot as plt\n'), ((5872, 5898), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(6, 5)'}), '(figsize=(6, 5))\n', (5882, 5898), True, 'import matplotlib.pyplot as plt\n'), ((5912, 5926), 'matplotlib.gridspec.GridSpec', 'GridSpec', (['(2)', '(2)'], {}), '(2, 2)\n', (5920, 5926), False, 'from matplotlib.gridspec import GridSpec\n'), ((6212, 6230), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (6228, 6230), True, 'import matplotlib.pyplot as plt\n'), ((6357, 6367), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6365, 6367), True, 'import matplotlib.pyplot as plt\n'), ((6376, 6387), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (6385, 6387), True, 'import matplotlib.pyplot as plt\n'), ((2573, 2604), 'matplotlib.colors.Normalize', 'Normalize', ([], {'vmin': 'vmin', 'vmax': 'vmax'}), '(vmin=vmin, vmax=vmax)\n', (2582, 2604), False, 'from matplotlib.colors import Normalize\n'), ((3252, 3261), 'lib.network.Network', 'Network', ([], {}), '()\n', (3259, 3261), False, 'from lib.network import Network\n'), ((3329, 3358), 'lib.pinn.PINN', 'PINN', (['network'], {'rho': 'rho', 'nu': 'nu'}), '(network, rho=rho, nu=nu)\n', (3333, 3358), False, 'from lib.pinn import PINN\n'), ((4813, 4827), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (4824, 4827), False, 'import pickle\n'), ((4981, 5001), 'numpy.abs', 'np.abs', (['(psi - psi_gt)'], {}), '(psi - psi_gt)\n', (4987, 5001), True, 'import numpy as np\n'), ((5043, 5059), 'numpy.abs', 'np.abs', (['(p - p_gt)'], {}), '(p - p_gt)\n', (5049, 5059), True, 'import numpy as np\n'), ((5099, 5115), 'numpy.abs', 'np.abs', (['(u - u_gt)'], {}), '(u - u_gt)\n', (5105, 5115), True, 'import numpy as np\n'), ((5155, 5171), 'numpy.abs', 'np.abs', (['(v - v_gt)'], {}), '(v - v_gt)\n', (5161, 5171), True, 'import numpy as np\n'), ((6183, 6203), 'pickle.dump', 'pickle.dump', (['data', 'f'], {}), '(data, f)\n', (6194, 6203), False, 'import pickle\n'), ((5300, 5311), 'time.time', 'time.time', ([], {}), '()\n', (5309, 5311), False, 'import time\n'), ((5757, 5768), 'time.time', 'time.time', ([], {}), '()\n', (5766, 5768), False, 'import time\n'), ((6325, 6336), 'time.time', 'time.time', ([], {}), '()\n', (6334, 6336), False, 'import time\n')]
"""The command line application for running the advent of code solutions.""" import argparse import importlib import pathlib from typing import cast from aoc2021.lib import ModSolution def main() -> int: """Run the main CLI entry point.""" parser = argparse.ArgumentParser( description="Run the advent of code puzzle solutions." ) parser.add_argument("day", type=int, help="the day number") parser.add_argument( "-e", "--extra", action="store_true", help="run the alternative community-based solution", ) args = parser.parse_args() # Read the input data data_file = pathlib.Path(f"input/{args.day:02d}.txt") if not data_file.exists(): print(f"Input data file not found: {data_file}") return 1 with data_file.open() as fh: raw_data = fh.read() # Load the solution module if args.extra: submodule = "solutions_extra" else: submodule = "solutions" module = f"aoc2021.{submodule}.day{args.day:02d}" try: mod_solution = cast(ModSolution, importlib.import_module(module)) except ModuleNotFoundError as exc: print(exc) return 1 # Get the solutions part1, part2 = mod_solution.run(raw_data) print(f"Part 1: {part1}") print(f"Part 2: {part2}") return 0
[ "pathlib.Path", "argparse.ArgumentParser", "importlib.import_module" ]
[((260, 339), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Run the advent of code puzzle solutions."""'}), "(description='Run the advent of code puzzle solutions.')\n", (283, 339), False, 'import argparse\n'), ((646, 687), 'pathlib.Path', 'pathlib.Path', (['f"""input/{args.day:02d}.txt"""'], {}), "(f'input/{args.day:02d}.txt')\n", (658, 687), False, 'import pathlib\n'), ((1090, 1121), 'importlib.import_module', 'importlib.import_module', (['module'], {}), '(module)\n', (1113, 1121), False, 'import importlib\n')]
import csv import logging import random from argparse import ArgumentParser from irc import IRC from irc.messages import IRCMessage from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer positives = [ "(˶‾᷄ ⁻̫ ‾᷅˵)", "(っˆڡˆς)", "♥‿♥", "(づ。◕‿‿◕。)づ", "٩( ๑╹ ꇴ╹)۶", "ᕕ( ᐛ )ᕗ", "٩(^‿^)۶", "\(^O^)/" ] negatives = [ "(ノ ゜Д゜)ノ ︵ ┻━┻", "(;´༎ຶД༎ຶ`)", "( ͡° ʖ̯ ͡°)", "(ノಠ益ಠ)ノ彡┻━┻", "t(ಠ益ಠt)", "༼ ༎ຶ ෴ ༎ຶ༽", "┻━┻ ︵ヽ(`Д´)ノ︵ ┻━┻" ] def main() -> None: """Main entrypoint of the bot.""" # Configure the default logging format logging.basicConfig( format="[%(asctime)s] [%(levelname)-5s] %(message)s", level=logging.INFO, datefmt="%Y-%m-%d %H:%M:%S" ) # Create an argument parser for parsing CLI arguments parser = ArgumentParser(description="An IRC bot providing sentiment analysis and reactions using ASCII emojis") # Add parameters for the server connection parser.add_argument("-s", "--server", required=True, type=str, help="The server to connect to") # Add optional parameters for the server connection parser.add_argument("-p", "--port", default=6697, type=int, help="The port to connect to") parser.add_argument("--use-tls", default=True, type=bool, help="Whether or not to use TLS") parser.add_argument("-t", "--timeout", default=300, type=float, help="Connection timeout in seconds") # Add optional parameters for authentication etc. parser.add_argument("-u", "--user", default="sentiment-bot", help="Username to use when connecting to the IRC server") parser.add_argument("-n", "--nick", default="sentiment-bot", help="Nick to use when connecting to the IRC server") parser.add_argument("-g", "--gecos", default="Sentiment Bot v1.0.2 (github.com/AlexGustafsson/irc-sentiment-bot)") parser.add_argument("-c", "--channel", required=True, action='append', help="Channel to join. May be used more than once") # Parse the arguments options = parser.parse_args() # Create an IRC connection irc = IRC( options.server, options.port, options.user, options.nick, timeout=options.timeout, use_tls=options.use_tls ) irc.connect() # Connect to specified channels for channel in options.channel: irc.join(channel) # The last analyzed result lastMessageValence = None # Handle all messages for message in irc.messages: if not isinstance(message, IRCMessage): continue target = message.author if message.target == options.nick else message.target if message.message == "{}: help".format(options.nick): irc.send_message(target, "I perform a simple sentiment analysis on your messages and respond with emojis") irc.send_message(target, "You can debug the sentiment analysis of the last message like so:") irc.send_message(target, "{}: debug".format(options.nick)) elif message.message == "{}: debug".format(options.nick): if lastMessageValence is not None: compound = "compound: {}".format(lastMessageValence["compound"]) debug = ", ".join(["'{}': {}".format(text, valence) for text, valence in lastMessageValence["debug"]]) irc.send_message(target, "{}. {}".format(compound, debug)) else: analyzer = SentimentIntensityAnalyzer() scores = analyzer.polarity_scores(message.message) if scores["compound"] >= 0.6: irc.send_message(target, random.choice(positives)) lastMessageValence = scores elif scores["compound"] <= -0.6: irc.send_message(target, random.choice(negatives)) lastMessageValence = scores if __name__ == "__main__": main()
[ "argparse.ArgumentParser", "logging.basicConfig", "random.choice", "irc.IRC", "vaderSentiment.vaderSentiment.SentimentIntensityAnalyzer" ]
[((604, 730), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""[%(asctime)s] [%(levelname)-5s] %(message)s"""', 'level': 'logging.INFO', 'datefmt': '"""%Y-%m-%d %H:%M:%S"""'}), "(format='[%(asctime)s] [%(levelname)-5s] %(message)s',\n level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S')\n", (623, 730), False, 'import logging\n'), ((829, 936), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""An IRC bot providing sentiment analysis and reactions using ASCII emojis"""'}), "(description=\n 'An IRC bot providing sentiment analysis and reactions using ASCII emojis')\n", (843, 936), False, 'from argparse import ArgumentParser\n'), ((2079, 2195), 'irc.IRC', 'IRC', (['options.server', 'options.port', 'options.user', 'options.nick'], {'timeout': 'options.timeout', 'use_tls': 'options.use_tls'}), '(options.server, options.port, options.user, options.nick, timeout=\n options.timeout, use_tls=options.use_tls)\n', (2082, 2195), False, 'from irc import IRC\n'), ((3426, 3454), 'vaderSentiment.vaderSentiment.SentimentIntensityAnalyzer', 'SentimentIntensityAnalyzer', ([], {}), '()\n', (3452, 3454), False, 'from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer\n'), ((3601, 3625), 'random.choice', 'random.choice', (['positives'], {}), '(positives)\n', (3614, 3625), False, 'import random\n'), ((3757, 3781), 'random.choice', 'random.choice', (['negatives'], {}), '(negatives)\n', (3770, 3781), False, 'import random\n')]
import gzip import pandas as pd import numpy as np import io import os import re import torch import torch.utils.data as data_utils import subprocess import zipfile import zlib from Bio import AlignIO from Bio.SeqIO.FastaIO import FastaIterator, as_fasta from Bio.Align.Applications import MuscleCommandline class IndexTensorDataset: """ Identical to torch.utils.data.Dataset.TensorDataset, but __getitem__ also returns indices as last value in tuple """ def __init__(self, *tensors): assert all(tensors[0].size(0) == tensor.size(0) for tensor in tensors) self.tensors = tensors def __getitem__(self, index): t = [tensor[index] for tensor in self.tensors] t.append(index) return(tuple(t)) def __len__(self): return self.tensors[0].size(0) class GeneDataset: """ Container object that provides access to the PyTorch Dataset and Dataloader objects needed for one experiment """ def __init__(self, data_file, batch_size, test_split, shuffle_dataset, random_seed, validation_split=0): # Load tensor data data = torch.load(data_file) dataset = IndexTensorDataset(data['X'], data['y']) # Test / train split dataset_size = len(dataset) indices = list(range(dataset_size)) split = int(np.floor(test_split * dataset_size)) if shuffle_dataset: np.random.seed(random_seed) np.random.shuffle(indices) train_indices, test_indices = indices[split:], indices[:split] # Initialize Dataloaders train_sampler = data_utils.SubsetRandomSampler(train_indices) test_sampler = data_utils.SubsetRandomSampler(test_indices) self.train_loader = data_utils.DataLoader(dataset, batch_size=batch_size, sampler=train_sampler) self.test_loader = data_utils.DataLoader(dataset, batch_size=batch_size, sampler=test_sampler) self.isolates = data['isolates'] def transform(input, output): """Snakemake function Split and transform input data """ genesdf = pd.read_csv(input[1], index_col=0, header=0) metadf = pd.read_csv(input[0]) all_isolates = metadf["Isolate"].to_numpy('U') encoding = { 'S': 0, 'I': 0.5, 'R': 1 } pattern = re.compile("(\w{3}).pt$") for f in output: m = pattern.match(f, len(f)-6) d = m.group(1) # print(d) y = metadf[d] omit = pd.isnull(y) isolates = all_isolates[~omit] y = y.loc[~omit] X = genesdf.loc[isolates].to_numpy() ylabels = np.array([ encoding[v] for v in y ]) # print(ylabels.shape) # print(X.shape) # print(isolates.shape) # print(isolates[0]) # print(isolates.dtype) y_tensor = torch.from_numpy(ylabels) X_tensor = torch.from_numpy(X) torch.save({'y': y_tensor, 'X': X_tensor, 'isolates': isolates}, f) def align(fh, transl=True): """ Translate and align pangenome cluster fasta file """ align_exe = MuscleCommandline( r'C:\Users\matthewwhiteside\workspace\b_ecoli\muscle\muscle3.8.31_i86win32.exe', clwstrict=True) # Align on stdin/stdout proc = subprocess.Popen(str(align_exe), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, shell=False) sequences = FastaIterator(fh) inp = [ ">"+record.id+"\n"+str(record.translate(table="Bacterial").seq)+"\n" for record in sequences ] inp = "".join(inp) align, err = proc.communicate(input=inp) return(align) def decompress(zipf, transl=True): """ Decompress gzipped fasta files in zip archive """ with zipfile.ZipFile(zipf, "r") as zh: i = 0 for z in zh.infolist(): if not z.is_dir(): print(z.filename) gz = zh.read(z.filename) fh = io.BytesIO(gz) with gzip.open(fh, 'rb') as gz: fn = gz.read() yield fn.decode('utf-8') if __name__ == "__main__": for fn in decompress("data/raw/ecoli/pan_genome_sequences.zip"): with io.StringIO(fn) as ifh: with open('data/tmp/test.aln', 'w') as ofh: ofh.write(align(ifh)) break
[ "torch.from_numpy", "io.StringIO", "zipfile.ZipFile", "numpy.random.seed", "torch.utils.data.DataLoader", "io.BytesIO", "pandas.read_csv", "Bio.SeqIO.FastaIO.FastaIterator", "torch.load", "numpy.floor", "gzip.open", "pandas.isnull", "Bio.Align.Applications.MuscleCommandline", "torch.save", "numpy.array", "torch.utils.data.SubsetRandomSampler", "numpy.random.shuffle", "re.compile" ]
[((2335, 2379), 'pandas.read_csv', 'pd.read_csv', (['input[1]'], {'index_col': '(0)', 'header': '(0)'}), '(input[1], index_col=0, header=0)\n', (2346, 2379), True, 'import pandas as pd\n'), ((2393, 2414), 'pandas.read_csv', 'pd.read_csv', (['input[0]'], {}), '(input[0])\n', (2404, 2414), True, 'import pandas as pd\n'), ((2549, 2575), 're.compile', 're.compile', (['"""(\\\\w{3}).pt$"""'], {}), "('(\\\\w{3}).pt$')\n", (2559, 2575), False, 'import re\n'), ((3328, 3457), 'Bio.Align.Applications.MuscleCommandline', 'MuscleCommandline', (['"""C:\\\\Users\\\\matthewwhiteside\\\\workspace\\\\b_ecoli\\\\muscle\\\\muscle3.8.31_i86win32.exe"""'], {'clwstrict': '(True)'}), "(\n 'C:\\\\Users\\\\matthewwhiteside\\\\workspace\\\\b_ecoli\\\\muscle\\\\muscle3.8.31_i86win32.exe'\n , clwstrict=True)\n", (3345, 3457), False, 'from Bio.Align.Applications import MuscleCommandline\n'), ((3699, 3716), 'Bio.SeqIO.FastaIO.FastaIterator', 'FastaIterator', (['fh'], {}), '(fh)\n', (3712, 3716), False, 'from Bio.SeqIO.FastaIO import FastaIterator, as_fasta\n'), ((1166, 1187), 'torch.load', 'torch.load', (['data_file'], {}), '(data_file)\n', (1176, 1187), False, 'import torch\n'), ((1650, 1695), 'torch.utils.data.SubsetRandomSampler', 'data_utils.SubsetRandomSampler', (['train_indices'], {}), '(train_indices)\n', (1680, 1695), True, 'import torch.utils.data as data_utils\n'), ((1719, 1763), 'torch.utils.data.SubsetRandomSampler', 'data_utils.SubsetRandomSampler', (['test_indices'], {}), '(test_indices)\n', (1749, 1763), True, 'import torch.utils.data as data_utils\n'), ((1793, 1869), 'torch.utils.data.DataLoader', 'data_utils.DataLoader', (['dataset'], {'batch_size': 'batch_size', 'sampler': 'train_sampler'}), '(dataset, batch_size=batch_size, sampler=train_sampler)\n', (1814, 1869), True, 'import torch.utils.data as data_utils\n'), ((1997, 2072), 'torch.utils.data.DataLoader', 'data_utils.DataLoader', (['dataset'], {'batch_size': 'batch_size', 'sampler': 'test_sampler'}), '(dataset, batch_size=batch_size, sampler=test_sampler)\n', (2018, 2072), True, 'import torch.utils.data as data_utils\n'), ((2715, 2727), 'pandas.isnull', 'pd.isnull', (['y'], {}), '(y)\n', (2724, 2727), True, 'import pandas as pd\n'), ((2856, 2890), 'numpy.array', 'np.array', (['[encoding[v] for v in y]'], {}), '([encoding[v] for v in y])\n', (2864, 2890), True, 'import numpy as np\n'), ((3063, 3088), 'torch.from_numpy', 'torch.from_numpy', (['ylabels'], {}), '(ylabels)\n', (3079, 3088), False, 'import torch\n'), ((3108, 3127), 'torch.from_numpy', 'torch.from_numpy', (['X'], {}), '(X)\n', (3124, 3127), False, 'import torch\n'), ((3137, 3204), 'torch.save', 'torch.save', (["{'y': y_tensor, 'X': X_tensor, 'isolates': isolates}", 'f'], {}), "({'y': y_tensor, 'X': X_tensor, 'isolates': isolates}, f)\n", (3147, 3204), False, 'import torch\n'), ((4029, 4055), 'zipfile.ZipFile', 'zipfile.ZipFile', (['zipf', '"""r"""'], {}), "(zipf, 'r')\n", (4044, 4055), False, 'import zipfile\n'), ((1377, 1412), 'numpy.floor', 'np.floor', (['(test_split * dataset_size)'], {}), '(test_split * dataset_size)\n', (1385, 1412), True, 'import numpy as np\n'), ((1454, 1481), 'numpy.random.seed', 'np.random.seed', (['random_seed'], {}), '(random_seed)\n', (1468, 1481), True, 'import numpy as np\n'), ((1494, 1520), 'numpy.random.shuffle', 'np.random.shuffle', (['indices'], {}), '(indices)\n', (1511, 1520), True, 'import numpy as np\n'), ((4491, 4506), 'io.StringIO', 'io.StringIO', (['fn'], {}), '(fn)\n', (4502, 4506), False, 'import io\n'), ((4236, 4250), 'io.BytesIO', 'io.BytesIO', (['gz'], {}), '(gz)\n', (4246, 4250), False, 'import io\n'), ((4272, 4291), 'gzip.open', 'gzip.open', (['fh', '"""rb"""'], {}), "(fh, 'rb')\n", (4281, 4291), False, 'import gzip\n')]
"""construct landmark models """ import json def read_json(fname): with open(fname) as fid: data = json.load(fid) return data def write_json(model, fname): with open(fname, "w") as fid: json.dump(model, fid) index = dict( jaw=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16], left_brow=[17, 18, 19, 20, 21], right_brow=[22, 23, 24, 25, 26], nose_bridge=[27, 28, 29, 30], nose_lower=[31, 32, 33, 34, 35], left_eye=[36, 37, 38, 39, 40, 41], right_eye=[42, 43, 44, 45, 46, 47], mouth_outer=[48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59], mouth_inner=[60, 61, 62, 63, 64, 65, 66, 67], left_pupil=[68], right_pupil=[69]) keys = ["jaw", "left_brow", "right_brow", "nose_bridge", "nose_lower", "left_eye", "right_eye", "mouth_outer", "mouth_inner"] pos = [ # jaw [219., 287.], [220., 334.], [227., 381.], [237., 428.], [254., 471.], [284., 511.], [317., 542.], [357., 568.], [400., 576.], [444., 566.], [482., 540.], [518., 507.], [546., 467.], [564., 424.], [571., 376.], [576., 329.], [576., 281.], # left_brow [254., 252.], [274., 233.], [304., 227.], [335., 232.], [363., 244.], # right_brow [422., 241.], [450., 229.], [482., 223.], [512., 228.], [535., 245.], # nose_bridge [394., 277.], [394., 309.], [394., 341.], [395., 371.], [360., 392.], # nose_lower [377., 399.], [396., 404.], [414., 398.], [430., 391.], # left_eye [288., 283.], [307., 271.], [330., 271.], [348., 285.], [329., 290.], [306., 290.], # right_eye [442., 283.], [459., 270.], [482., 269.], [501., 279.7], [484., 287.], [462., 288.], # mouth_outer [328., 448.], [353., 437.], [378., 434.], [396., 438.], [416., 432.], [442., 437.], [468., 446.], [444., 472.], [419., 484.], [398., 488.], [379., 486.], [353., 475.], # mouth_inner [340., 451.], [378., 448.], [397., 450.], [415., 448.], [457., 449.], [417., 462.], [397., 463.], [377., 460.], # left_pupil [319., 278.], # right_pupil [474., 277.] ] model = dict(pos=pos, index=index, keys=keys) if __name__ == "__main__": write_json(model, "model.json")
[ "json.dump", "json.load" ]
[((113, 127), 'json.load', 'json.load', (['fid'], {}), '(fid)\n', (122, 127), False, 'import json\n'), ((218, 239), 'json.dump', 'json.dump', (['model', 'fid'], {}), '(model, fid)\n', (227, 239), False, 'import json\n')]
from django.db import models import datetime from easy_thumbnails.files import get_thumbnailer from filer.fields.image import FilerImageField import shortuuid class Calendar(models.Model): name = models.CharField(max_length=250) uuid = models.CharField(max_length=22) YEAR_CHOICES = [(r, r) for r in range(1984, datetime.date.today().year+1)] year = models.IntegerField( null=True, max_length=4, choices=YEAR_CHOICES, default=datetime.datetime.now().year ) def create_uuid(self): return shortuuid.uuid() def save(self, *args, **kwargs): if not self.uuid: self.uuid = self.create_uuid() super(Calendar, self).save(*args, **kwargs) def existing_days(self): return self.days.all().count() def __str__(self): return self.name class Day(models.Model): class Meta: unique_together = (('day', 'calendar')) ordering = ['day', ] calendar = models.ForeignKey(Calendar, related_name="days") DAY_CHOICES = lambda x: [(i, '_' + str(i) + '_') for i in range(1, x + 1)] day = models.IntegerField(choices=DAY_CHOICES(24)) image_source = models.URLField(blank=True) original_image = FilerImageField(null=True) def get_image_small_url(self): # TODO: get these from the field height = 400 width = 400 return get_thumbnailer(self.original_image.file).get_thumbnail({ 'size': (width, height), 'crop': True, 'upscale': True, 'detail': True, 'subject_location': self.original_image.subject_location }).url def get_image_large_url(self): # TODO: get these from the field height = 1200 width = 1200 return get_thumbnailer(self.original_image.file).get_thumbnail({ 'size': (width, height), 'crop': True, 'upscale': True, 'detail': True, 'subject_location': self.original_image.subject_location }).url def __str__(self): return ' '.join([self.calendar.name, str(self.day)])
[ "django.db.models.URLField", "filer.fields.image.FilerImageField", "django.db.models.CharField", "shortuuid.uuid", "django.db.models.ForeignKey", "easy_thumbnails.files.get_thumbnailer", "datetime.date.today", "datetime.datetime.now" ]
[((207, 239), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)'}), '(max_length=250)\n', (223, 239), False, 'from django.db import models\n'), ((251, 282), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(22)'}), '(max_length=22)\n', (267, 282), False, 'from django.db import models\n'), ((977, 1025), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Calendar'], {'related_name': '"""days"""'}), "(Calendar, related_name='days')\n", (994, 1025), False, 'from django.db import models\n'), ((1180, 1207), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)'}), '(blank=True)\n', (1195, 1207), False, 'from django.db import models\n'), ((1230, 1256), 'filer.fields.image.FilerImageField', 'FilerImageField', ([], {'null': '(True)'}), '(null=True)\n', (1245, 1256), False, 'from filer.fields.image import FilerImageField\n'), ((547, 563), 'shortuuid.uuid', 'shortuuid.uuid', ([], {}), '()\n', (561, 563), False, 'import shortuuid\n'), ((465, 488), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (486, 488), False, 'import datetime\n'), ((1390, 1431), 'easy_thumbnails.files.get_thumbnailer', 'get_thumbnailer', (['self.original_image.file'], {}), '(self.original_image.file)\n', (1405, 1431), False, 'from easy_thumbnails.files import get_thumbnailer\n'), ((1791, 1832), 'easy_thumbnails.files.get_thumbnailer', 'get_thumbnailer', (['self.original_image.file'], {}), '(self.original_image.file)\n', (1806, 1832), False, 'from easy_thumbnails.files import get_thumbnailer\n'), ((331, 352), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (350, 352), False, 'import datetime\n')]
from flask import Blueprint ctr = Blueprint('ctr', __name__) from . import views
[ "flask.Blueprint" ]
[((35, 61), 'flask.Blueprint', 'Blueprint', (['"""ctr"""', '__name__'], {}), "('ctr', __name__)\n", (44, 61), False, 'from flask import Blueprint\n')]
import tkinter as tk from megawidget.tree import Hook class TreeHook(Hook): def __init__(self, parent_view, nodebar_builder, host): self._parent_view = parent_view self._nodebar_builder = nodebar_builder self._host = host self._stringvar_expander = tk.StringVar() self._stringvar_title = tk.StringVar() self._collapsable_frame = None self._nodebar = None self._formatter = None def on_change_database(self, path): self._parent_view.open_database(path) def on_click_truncate(self, table): sql = "DELETE FROM {}".format(table) formatter = "inline" self._parent_view.push_sql(sql, formatter, execute=True) def on_click_drop(self, table): sql = "DROP TABLE {}".format(table) formatter = "inline" self._parent_view.push_sql(sql, formatter, execute=True) def on_click_explore(self, table): sql = "SELECT * FROM {}".format(table) formatter = "inline" self._parent_view.push_sql(sql, formatter, execute=True) def build_node(self, tree, node, frame): node_id = node["node_id"] if node_id == 0: return # some vars title = node["title"] result = node["data"]["result"] datatype = node["data"]["type"] description = node["data"]["description"] file = node["data"]["file"] path = node["data"]["path"] real_path = node["data"]["realpath"] self._formatter = node["data"]["formatter"] # Populate stringvars self._stringvar_expander.set("-" if node["expanded"] else "+") self._stringvar_title.set(title) # config header frame frame.columnconfigure(0, weight=0) frame.columnconfigure(1, weight=0) frame.columnconfigure(2, weight=1) # Fill titlebar # - button expander command = (lambda tree=tree, node_id=node_id: tree.collexp(node_id)) button_expander = tk.Button(frame, name="treeExpanderButton", textvariable=self._stringvar_expander, command=command) # - button edit button_edit = tk.Button(frame, text="edit", name="buttonEdit", command=lambda self=self, node_id=node_id, tree=tree: self._on_click_edit(tree, node_id)) # - entry title entry_title = tk.Entry(frame, name="treeTitle", state="readonly", textvariable=self._stringvar_title) entry_title.bind("<Button-1>", lambda e, self=self, node_id=node_id, tree=tree: self._on_click_sql(tree, node_id)) # - install button_expander.grid(row=0, column=0, padx=(0, 5), sticky="w") button_edit.grid(row=0, column=1, padx=(0, 5), sticky="w") entry_title.grid(row=0, column=2, sticky="nswe") # collapsable_frame self._collapsable_frame = tk.Frame(frame, class_="CollapsableFrame") self._collapsable_frame.columnconfigure(0, weight=1) # - install self._collapsable_frame.grid(row=1, column=2, sticky="w", padx=(0, 20)) # Fill collapsable frame self._nodebar = self._nodebar_builder.build(self, node_id, self._collapsable_frame, file, path, real_path, result, datatype, description) def on_map_node(self, tree, node): pass def on_destroy_node(self, tree, node): pass def on_feed_node(self, tree, node, *args, **kwargs): pass def on_expand_node(self, tree, node): node_id = node["node_id"] if node_id == 0: return self._stringvar_expander.set("-") self._collapsable_frame.grid() def on_collapse_node(self, tree, node): node_id = node["node_id"] if node_id == 0: return self._stringvar_expander.set("+") self._collapsable_frame.grid_remove() def _on_click_sql(self, tree, node_id): tree.collexp(node_id) def _on_click_edit(self, tree, node_id): sql = self._stringvar_title.get() self._parent_view.push_sql(sql, self._formatter)
[ "tkinter.StringVar", "tkinter.Entry", "tkinter.Button", "tkinter.Frame" ]
[((287, 301), 'tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (299, 301), True, 'import tkinter as tk\n'), ((334, 348), 'tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (346, 348), True, 'import tkinter as tk\n'), ((2019, 2123), 'tkinter.Button', 'tk.Button', (['frame'], {'name': '"""treeExpanderButton"""', 'textvariable': 'self._stringvar_expander', 'command': 'command'}), "(frame, name='treeExpanderButton', textvariable=self.\n _stringvar_expander, command=command)\n", (2028, 2123), True, 'import tkinter as tk\n'), ((2684, 2776), 'tkinter.Entry', 'tk.Entry', (['frame'], {'name': '"""treeTitle"""', 'state': '"""readonly"""', 'textvariable': 'self._stringvar_title'}), "(frame, name='treeTitle', state='readonly', textvariable=self.\n _stringvar_title)\n", (2692, 2776), True, 'import tkinter as tk\n'), ((3351, 3393), 'tkinter.Frame', 'tk.Frame', (['frame'], {'class_': '"""CollapsableFrame"""'}), "(frame, class_='CollapsableFrame')\n", (3359, 3393), True, 'import tkinter as tk\n')]
#!/usr/bin/python # This file is licensed under MIT license. # See the LICENSE file in the project root for more information. import unittest import rostest import rosunit import numpy as np from numpy.testing import assert_almost_equal from std_msgs.msg import Header from geometry_msgs.msg import PoseStamped, Pose, Point, Quaternion from nav_msgs.msg import Path from car_core.common import msgs_helpers, geom_helpers def get_poses_helper(points): poses = [] for p in points: pose = PoseStamped() pose.pose.position = Point(p[0], p[1], p[2]) poses.append(pose) return poses class TestMsgsHelpers(unittest.TestCase): def test_quaterion_to_array_ok(self): q = Quaternion(1,2,3,4) arr = msgs_helpers.quaterion_to_array(q) assert_almost_equal(arr, np.array([1,2, 3, 4])) self.assertTrue(True) def test_point_to_array_ok(self): p = Point(1,2,3) arr = msgs_helpers.point_to_array(p) assert_almost_equal(arr, np.array([1,2])) self.assertTrue(True) def test_path_poses_to_array_ok(self): poses = get_poses_helper([[1,2,3], [4,5,6], [7,8,9]]) arr = msgs_helpers.path_poses_to_array(poses) assert_almost_equal(arr, np.array([[1,2], [4,5], [7,8]])) self.assertTrue(True) def test_array_to_point_ok(self): arr = np.array([1,2]) point = msgs_helpers.array_to_point(arr) self.assertEqual(point, Point(1,2,0)) def test_array_to_path_poses_ok(self): arr = np.array([[1,2], [4,5], [6,7]]) poses = msgs_helpers.array_to_path_poses(arr) poses_true = get_poses_helper([[1,2,0], [4,5,0], [6,7,0]]) self.assertEqual(poses, poses) class TestGeomHelpers(unittest.TestCase): def test_get_closest_path_point_regular(self): poses = np.array([[0,0], [1,1], [2,2], [3,3]]) point = np.array([0.9, 0.9]) index = geom_helpers.get_closest_path_point(poses, point) self.assertEqual(index, 1) def test_get_closest_path_point_far(self): poses = np.array([[0,0], [1,1], [2,2], [3,3]]) point = np.array([-1, 3]) index = geom_helpers.get_closest_path_point(poses, point) self.assertEqual(index, 1) def test_get_closest_path_point_first(self): poses = np.array([[0,0], [1,1], [2,2], [3,3]]) point = np.array([-1, 1]) index = geom_helpers.get_closest_path_point(poses, point) self.assertEqual(index, 0) def test_get_closest_path_point_last(self): poses = np.array([[0,0], [1,1], [2,2], [3,3]]) point = np.array([4, 4]) index = geom_helpers.get_closest_path_point(poses, point) self.assertEqual(index, 3) def test_get_closest_path_point_single_point(self): poses = np.array([[0,0]]) point = np.array([4, 4]) index = geom_helpers.get_closest_path_point(poses, point) self.assertEqual(index, 0) def test_get_closest_path_point_matching_points(self): poses = np.array([[0,0], [1,1], [1,1], [3,3]]) point = np.array([1.1, 1.1]) index = geom_helpers.get_closest_path_point(poses, point) self.assertEqual(index, 1) if __name__ == '__main__': import rosunit rosunit.unitrun("car_core", 'test_msgs_helpers', TestMsgsHelpers) rosunit.unitrun("car_core", 'test_geom_helpers', TestGeomHelpers)
[ "geometry_msgs.msg.PoseStamped", "car_core.common.msgs_helpers.path_poses_to_array", "car_core.common.msgs_helpers.array_to_point", "rosunit.unitrun", "car_core.common.msgs_helpers.array_to_path_poses", "car_core.common.geom_helpers.get_closest_path_point", "car_core.common.msgs_helpers.point_to_array", "geometry_msgs.msg.Quaternion", "geometry_msgs.msg.Point", "numpy.array", "car_core.common.msgs_helpers.quaterion_to_array" ]
[((3932, 3997), 'rosunit.unitrun', 'rosunit.unitrun', (['"""car_core"""', '"""test_msgs_helpers"""', 'TestMsgsHelpers'], {}), "('car_core', 'test_msgs_helpers', TestMsgsHelpers)\n", (3947, 3997), False, 'import rosunit\n'), ((4002, 4067), 'rosunit.unitrun', 'rosunit.unitrun', (['"""car_core"""', '"""test_geom_helpers"""', 'TestGeomHelpers'], {}), "('car_core', 'test_geom_helpers', TestGeomHelpers)\n", (4017, 4067), False, 'import rosunit\n'), ((506, 519), 'geometry_msgs.msg.PoseStamped', 'PoseStamped', ([], {}), '()\n', (517, 519), False, 'from geometry_msgs.msg import PoseStamped, Pose, Point, Quaternion\n'), ((549, 572), 'geometry_msgs.msg.Point', 'Point', (['p[0]', 'p[1]', 'p[2]'], {}), '(p[0], p[1], p[2])\n', (554, 572), False, 'from geometry_msgs.msg import PoseStamped, Pose, Point, Quaternion\n'), ((715, 737), 'geometry_msgs.msg.Quaternion', 'Quaternion', (['(1)', '(2)', '(3)', '(4)'], {}), '(1, 2, 3, 4)\n', (725, 737), False, 'from geometry_msgs.msg import PoseStamped, Pose, Point, Quaternion\n'), ((749, 783), 'car_core.common.msgs_helpers.quaterion_to_array', 'msgs_helpers.quaterion_to_array', (['q'], {}), '(q)\n', (780, 783), False, 'from car_core.common import msgs_helpers, geom_helpers\n'), ((921, 935), 'geometry_msgs.msg.Point', 'Point', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (926, 935), False, 'from geometry_msgs.msg import PoseStamped, Pose, Point, Quaternion\n'), ((948, 978), 'car_core.common.msgs_helpers.point_to_array', 'msgs_helpers.point_to_array', (['p'], {}), '(p)\n', (975, 978), False, 'from car_core.common import msgs_helpers, geom_helpers\n'), ((1247, 1286), 'car_core.common.msgs_helpers.path_poses_to_array', 'msgs_helpers.path_poses_to_array', (['poses'], {}), '(poses)\n', (1279, 1286), False, 'from car_core.common import msgs_helpers, geom_helpers\n'), ((1522, 1538), 'numpy.array', 'np.array', (['[1, 2]'], {}), '([1, 2])\n', (1530, 1538), True, 'import numpy as np\n'), ((1554, 1586), 'car_core.common.msgs_helpers.array_to_point', 'msgs_helpers.array_to_point', (['arr'], {}), '(arr)\n', (1581, 1586), False, 'from car_core.common import msgs_helpers, geom_helpers\n'), ((1691, 1725), 'numpy.array', 'np.array', (['[[1, 2], [4, 5], [6, 7]]'], {}), '([[1, 2], [4, 5], [6, 7]])\n', (1699, 1725), True, 'import numpy as np\n'), ((1787, 1824), 'car_core.common.msgs_helpers.array_to_path_poses', 'msgs_helpers.array_to_path_poses', (['arr'], {}), '(arr)\n', (1819, 1824), False, 'from car_core.common import msgs_helpers, geom_helpers\n'), ((2119, 2161), 'numpy.array', 'np.array', (['[[0, 0], [1, 1], [2, 2], [3, 3]]'], {}), '([[0, 0], [1, 1], [2, 2], [3, 3]])\n', (2127, 2161), True, 'import numpy as np\n'), ((2252, 2272), 'numpy.array', 'np.array', (['[0.9, 0.9]'], {}), '([0.9, 0.9])\n', (2260, 2272), True, 'import numpy as np\n'), ((2289, 2338), 'car_core.common.geom_helpers.get_closest_path_point', 'geom_helpers.get_closest_path_point', (['poses', 'point'], {}), '(poses, point)\n', (2324, 2338), False, 'from car_core.common import msgs_helpers, geom_helpers\n'), ((2438, 2480), 'numpy.array', 'np.array', (['[[0, 0], [1, 1], [2, 2], [3, 3]]'], {}), '([[0, 0], [1, 1], [2, 2], [3, 3]])\n', (2446, 2480), True, 'import numpy as np\n'), ((2571, 2588), 'numpy.array', 'np.array', (['[-1, 3]'], {}), '([-1, 3])\n', (2579, 2588), True, 'import numpy as np\n'), ((2605, 2654), 'car_core.common.geom_helpers.get_closest_path_point', 'geom_helpers.get_closest_path_point', (['poses', 'point'], {}), '(poses, point)\n', (2640, 2654), False, 'from car_core.common import msgs_helpers, geom_helpers\n'), ((2756, 2798), 'numpy.array', 'np.array', (['[[0, 0], [1, 1], [2, 2], [3, 3]]'], {}), '([[0, 0], [1, 1], [2, 2], [3, 3]])\n', (2764, 2798), True, 'import numpy as np\n'), ((2889, 2906), 'numpy.array', 'np.array', (['[-1, 1]'], {}), '([-1, 1])\n', (2897, 2906), True, 'import numpy as np\n'), ((2923, 2972), 'car_core.common.geom_helpers.get_closest_path_point', 'geom_helpers.get_closest_path_point', (['poses', 'point'], {}), '(poses, point)\n', (2958, 2972), False, 'from car_core.common import msgs_helpers, geom_helpers\n'), ((3073, 3115), 'numpy.array', 'np.array', (['[[0, 0], [1, 1], [2, 2], [3, 3]]'], {}), '([[0, 0], [1, 1], [2, 2], [3, 3]])\n', (3081, 3115), True, 'import numpy as np\n'), ((3206, 3222), 'numpy.array', 'np.array', (['[4, 4]'], {}), '([4, 4])\n', (3214, 3222), True, 'import numpy as np\n'), ((3239, 3288), 'car_core.common.geom_helpers.get_closest_path_point', 'geom_helpers.get_closest_path_point', (['poses', 'point'], {}), '(poses, point)\n', (3274, 3288), False, 'from car_core.common import msgs_helpers, geom_helpers\n'), ((3397, 3415), 'numpy.array', 'np.array', (['[[0, 0]]'], {}), '([[0, 0]])\n', (3405, 3415), True, 'import numpy as np\n'), ((3431, 3447), 'numpy.array', 'np.array', (['[4, 4]'], {}), '([4, 4])\n', (3439, 3447), True, 'import numpy as np\n'), ((3464, 3513), 'car_core.common.geom_helpers.get_closest_path_point', 'geom_helpers.get_closest_path_point', (['poses', 'point'], {}), '(poses, point)\n', (3499, 3513), False, 'from car_core.common import msgs_helpers, geom_helpers\n'), ((3625, 3667), 'numpy.array', 'np.array', (['[[0, 0], [1, 1], [1, 1], [3, 3]]'], {}), '([[0, 0], [1, 1], [1, 1], [3, 3]])\n', (3633, 3667), True, 'import numpy as np\n'), ((3758, 3778), 'numpy.array', 'np.array', (['[1.1, 1.1]'], {}), '([1.1, 1.1])\n', (3766, 3778), True, 'import numpy as np\n'), ((3795, 3844), 'car_core.common.geom_helpers.get_closest_path_point', 'geom_helpers.get_closest_path_point', (['poses', 'point'], {}), '(poses, point)\n', (3830, 3844), False, 'from car_core.common import msgs_helpers, geom_helpers\n'), ((817, 839), 'numpy.array', 'np.array', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (825, 839), True, 'import numpy as np\n'), ((1012, 1028), 'numpy.array', 'np.array', (['[1, 2]'], {}), '([1, 2])\n', (1020, 1028), True, 'import numpy as np\n'), ((1320, 1354), 'numpy.array', 'np.array', (['[[1, 2], [4, 5], [7, 8]]'], {}), '([[1, 2], [4, 5], [7, 8]])\n', (1328, 1354), True, 'import numpy as np\n'), ((1619, 1633), 'geometry_msgs.msg.Point', 'Point', (['(1)', '(2)', '(0)'], {}), '(1, 2, 0)\n', (1624, 1633), False, 'from geometry_msgs.msg import PoseStamped, Pose, Point, Quaternion\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- __author__ = 'ipetrash' from PyQt5.QtGui import QPixmap, QPainter, QFont from PyQt5.QtWidgets import QApplication, QLabel from PyQt5.QtCore import Qt, QRect app = QApplication([]) text = "Hello World!" pixmap = QPixmap(180, 130) pixmap.fill(Qt.white) painter = QPainter(pixmap) painter.setFont(QFont('Arial', 12)) rect = QRect(0, 0, 70, 50) painter.drawRect(rect) painter.drawText(rect, Qt.TextWordWrap, text) rect = QRect(0, 60, 70, 50) painter.drawRect(rect) painter.drawText(rect, Qt.AlignLeft, text) w = QLabel() w.setPixmap(pixmap) w.show() app.exec()
[ "PyQt5.QtGui.QPainter", "PyQt5.QtWidgets.QLabel", "PyQt5.QtCore.QRect", "PyQt5.QtGui.QFont", "PyQt5.QtGui.QPixmap", "PyQt5.QtWidgets.QApplication" ]
[((215, 231), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['[]'], {}), '([])\n', (227, 231), False, 'from PyQt5.QtWidgets import QApplication, QLabel\n'), ((265, 282), 'PyQt5.QtGui.QPixmap', 'QPixmap', (['(180)', '(130)'], {}), '(180, 130)\n', (272, 282), False, 'from PyQt5.QtGui import QPixmap, QPainter, QFont\n'), ((316, 332), 'PyQt5.QtGui.QPainter', 'QPainter', (['pixmap'], {}), '(pixmap)\n', (324, 332), False, 'from PyQt5.QtGui import QPixmap, QPainter, QFont\n'), ((377, 396), 'PyQt5.QtCore.QRect', 'QRect', (['(0)', '(0)', '(70)', '(50)'], {}), '(0, 0, 70, 50)\n', (382, 396), False, 'from PyQt5.QtCore import Qt, QRect\n'), ((474, 494), 'PyQt5.QtCore.QRect', 'QRect', (['(0)', '(60)', '(70)', '(50)'], {}), '(0, 60, 70, 50)\n', (479, 494), False, 'from PyQt5.QtCore import Qt, QRect\n'), ((566, 574), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (572, 574), False, 'from PyQt5.QtWidgets import QApplication, QLabel\n'), ((349, 367), 'PyQt5.QtGui.QFont', 'QFont', (['"""Arial"""', '(12)'], {}), "('Arial', 12)\n", (354, 367), False, 'from PyQt5.QtGui import QPixmap, QPainter, QFont\n')]
from django.dispatch import Signal # Arguments: "msg" email_sent = Signal()
[ "django.dispatch.Signal" ]
[((68, 76), 'django.dispatch.Signal', 'Signal', ([], {}), '()\n', (74, 76), False, 'from django.dispatch import Signal\n')]
# Copyright (c) Microsoft Corporation. # Licensed under the MIT license. import numpy as np from typing import Union from sklearn.manifold import TSNE def tsne( embedding: np.ndarray, num_components: int = 2, perplexity: float = 30.0, early_exaggeration: float = 12.0, learning_rate: float = 200.0, num_iterations: int = 1000, num_iterations_without_progress: int = 300, min_grad_norm: float = 1e-7, metric: str = "euclidean", init: str = "random", verbose: int = 1, random_state: Union[int, np.random.RandomState, None] = None, method: str = 'barnes_hut', angle: float = 0.5 ) -> np.ndarray: """ t-distributed Stochastic Neighbor Embedding. t-SNE is a tool to visualize high-dimensional data. It converts similarities between data points to joint probabilities and tries to minimize the Kullback-Leibler divergence between the joint probabilities of the low-dimensional embedding and the high-dimensional data. t-SNE has a cost function that is not convex, i.e. with different initializations we can get different results. It is highly recommended to use another dimensionality reduction method (e.g. PCA for dense data or TruncatedSVD for sparse data) to reduce the number of dimensions to a reasonable amount (e.g. 50) if the number of features is very high. This will suppress some noise and speed up the computation of pairwise distances between samples. :param numpy.ndarray embedding: The embedding in which PCA will be applied :param int num_components: Dimension of the embedded space. Default 2 :param float perplexity: The perplexity is related to the number of nearest neighbors that is used in other manifold learning algorithms. Larger datasets usually require a larger perplexity. Consider selecting a value between 5 and 50. The choice is not extremely critical since t-SNE is quite insensitive to this parameter. Default 30.0 :param float early_exaggeration: Controls how tight natural clusters in the original space are in the embedded space and how much space will be between them. For larger values, the space between natural clusters will be larger in the embedded space. Again, the choice of this parameter is not very critical. If the cost function increases during initial optimization, the early exaggeration factor or the learning rate might be too high. Default 12.0 :param float learning_rate: The learning rate for t-SNE is usually in the range [10.0, 1000.0]. If the learning rate is too high, the data may look like a 'ball' with any point approximately equidistant from its nearest neighbours. If the learning rate is too low, most points may look compressed in a dense cloud with few outliers. If the cost function gets stuck in a bad local minimum increasing the learning rate may help. Default 200.0 :param int num_iterations: Maximum number of iterations for the optimization. Should be at least 250. Default 1000 :param int num_iterations_without_progress: Maximum number of iterations without progress before we abort the optimization, used after 250 initial iterations with early exaggeration. Note that progress is only checked every 50 iterations so this value is rounded to the next multiple of 50. Default 300 :param float min_grad_norm: If the gradient norm is below this threshold, the optimization will be stopped. Default 1e-7 :param metric: The metric to use when calculating distance between instances in a feature array. If metric is a string, it must be one of the options allowed by scipy.spatial.distance.pdist for its metric parameter, or a metric listed in pairwise.PAIRWISE_DISTANCE_FUNCTIONS. If metric is "precomputed", X is assumed to be a distance matrix. Alternatively, if metric is a callable function, it is called on each pair of instances (rows) and the resulting value recorded. The callable should take two arrays from X as input and return a value indicating the distance between them. The default is "euclidean" which is interpreted as squared euclidean distance. Default 'euclidean' :type metric: Union[str, Callable] :param init: Initialization of embedding. Possible options are 'random', 'pca', and a numpy array of shape (n_samples, num_components). PCA initialization cannot be used with precomputed distances and is usually more globally stable than random initialization. Default 'random' :type init: Union[string, numpy.ndarray] :param int verbose: Verbosity level. Default 1 :param random_state: If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Note that different initializations might result in different local minima of the cost function. :type random_state: Optional[Union[int, numpy.random.RandomState]] :param str method: By default the gradient calculation algorithm uses Barnes-Hut approximation running in O(NlogN) time. method='exact' will run on the slower, but exact, algorithm in O(N^2) time. The exact algorithm should be used when nearest-neighbor errors need to be better than 3%. However, the exact method cannot scale to millions of examples. Default 'barnes_hut' :param float angle: Only used if method='barnes_hut' This is the trade-off between speed and accuracy for Barnpcaes-Hut T-SNE. 'angle' is the angular size (referred to as theta in [3]) of a distant node as measured from a point. If this size is below 'angle' then it is used as a summary node of all points contained within it. This method is not very sensitive to changes in this parameter in the range of 0.2 - 0.8. Angle less than 0.2 has quickly increasing computation time and angle greater 0.8 has quickly increasing error. Default 0.5 :return: A np.ndarray of principal axes in feature space, representing the directions of maximum variance in the data. The components are sorted by variance` :rtype: numpy.ndarray """ if embedding is None: raise ValueError('embedding must be specified but was None') if not num_components: raise ValueError('num_components must be specified but was None') model = TSNE( n_components=num_components, perplexity=perplexity, early_exaggeration=early_exaggeration, learning_rate=learning_rate, n_iter=num_iterations, n_iter_without_progress=num_iterations_without_progress, min_grad_norm=min_grad_norm, metric=metric, init=init, verbose=verbose, random_state=random_state, method=method, angle=angle ) return model.fit_transform(embedding)
[ "sklearn.manifold.TSNE" ]
[((6728, 7078), 'sklearn.manifold.TSNE', 'TSNE', ([], {'n_components': 'num_components', 'perplexity': 'perplexity', 'early_exaggeration': 'early_exaggeration', 'learning_rate': 'learning_rate', 'n_iter': 'num_iterations', 'n_iter_without_progress': 'num_iterations_without_progress', 'min_grad_norm': 'min_grad_norm', 'metric': 'metric', 'init': 'init', 'verbose': 'verbose', 'random_state': 'random_state', 'method': 'method', 'angle': 'angle'}), '(n_components=num_components, perplexity=perplexity, early_exaggeration\n =early_exaggeration, learning_rate=learning_rate, n_iter=num_iterations,\n n_iter_without_progress=num_iterations_without_progress, min_grad_norm=\n min_grad_norm, metric=metric, init=init, verbose=verbose, random_state=\n random_state, method=method, angle=angle)\n', (6732, 7078), False, 'from sklearn.manifold import TSNE\n')]
from django import forms import voxel_globe.meta.models as models class TiePointForm(forms.Form): image_set = forms.ModelChoiceField(label="Image Set", queryset=models.ImageSet.objects.all().order_by('name')) class PointCloudForm(forms.Form): point_cloud = forms.ModelChoiceField(label="Point Cloud", queryset=models.PointCloud.objects.all().order_by('name')) class ImageForm(forms.Form): image = forms.ModelChoiceField(label="Image", queryset=models.Image.objects.all().order_by('name')) class CameraForm(forms.Form): image_set = forms.ModelChoiceField(label="Image Set", queryset=models.ImageSet.objects.all().order_by('name')) camera_set = forms.ModelChoiceField(label="Camera Set", queryset=models.CameraSet.objects.all().order_by('name'))
[ "voxel_globe.meta.models.CameraSet.objects.all", "voxel_globe.meta.models.Image.objects.all", "voxel_globe.meta.models.ImageSet.objects.all", "voxel_globe.meta.models.PointCloud.objects.all" ]
[((171, 200), 'voxel_globe.meta.models.ImageSet.objects.all', 'models.ImageSet.objects.all', ([], {}), '()\n', (198, 200), True, 'import voxel_globe.meta.models as models\n'), ((330, 361), 'voxel_globe.meta.models.PointCloud.objects.all', 'models.PointCloud.objects.all', ([], {}), '()\n', (359, 361), True, 'import voxel_globe.meta.models as models\n'), ((474, 500), 'voxel_globe.meta.models.Image.objects.all', 'models.Image.objects.all', ([], {}), '()\n', (498, 500), True, 'import voxel_globe.meta.models as models\n'), ((622, 651), 'voxel_globe.meta.models.ImageSet.objects.all', 'models.ImageSet.objects.all', ([], {}), '()\n', (649, 651), True, 'import voxel_globe.meta.models as models\n'), ((744, 774), 'voxel_globe.meta.models.CameraSet.objects.all', 'models.CameraSet.objects.all', ([], {}), '()\n', (772, 774), True, 'import voxel_globe.meta.models as models\n')]
import cv2 import numpy as np imagen = cv2.imread('imagen.jpg') imagen = cv2.cvtColor(imagen,cv2.COLOR_BGR2RGB) print(imagen.shape) print(imagen[0][0][0]) imagen = cv2.resize(imagen,(256, 256)) imagen = cv2.imread('imagen.jpg') imagen = cv2.cvtColor(imagen,cv2.COLOR_BGR2GRAY) print(imagen.shape) print(imagen[0][0]) imagen[0][0] = 0 imagen[0][1] = 0 imagen[0][2] = 0 cv2.imwrite('grayimagen.jpg',imagen) matriz = np.zeros((256,256),np.float32) print(matriz.shape) cv2.imwrite('matrizImagen.jpg',matriz) imagen = cv2.cvtColor(matriz,cv2.COLOR_GRAY2BGR) print(imagen.shape) cv2.imwrite('matrizColorImagen.jpg',imagen) #cv2.imwrite('resizeImagen.jpg',imagen) #cv2.imshow('image',imagen) #cv2.waitKey(0)
[ "cv2.cvtColor", "cv2.imwrite", "numpy.zeros", "cv2.imread", "cv2.resize" ]
[((40, 64), 'cv2.imread', 'cv2.imread', (['"""imagen.jpg"""'], {}), "('imagen.jpg')\n", (50, 64), False, 'import cv2\n'), ((74, 113), 'cv2.cvtColor', 'cv2.cvtColor', (['imagen', 'cv2.COLOR_BGR2RGB'], {}), '(imagen, cv2.COLOR_BGR2RGB)\n', (86, 113), False, 'import cv2\n'), ((165, 195), 'cv2.resize', 'cv2.resize', (['imagen', '(256, 256)'], {}), '(imagen, (256, 256))\n', (175, 195), False, 'import cv2\n'), ((205, 229), 'cv2.imread', 'cv2.imread', (['"""imagen.jpg"""'], {}), "('imagen.jpg')\n", (215, 229), False, 'import cv2\n'), ((239, 279), 'cv2.cvtColor', 'cv2.cvtColor', (['imagen', 'cv2.COLOR_BGR2GRAY'], {}), '(imagen, cv2.COLOR_BGR2GRAY)\n', (251, 279), False, 'import cv2\n'), ((372, 409), 'cv2.imwrite', 'cv2.imwrite', (['"""grayimagen.jpg"""', 'imagen'], {}), "('grayimagen.jpg', imagen)\n", (383, 409), False, 'import cv2\n'), ((418, 450), 'numpy.zeros', 'np.zeros', (['(256, 256)', 'np.float32'], {}), '((256, 256), np.float32)\n', (426, 450), True, 'import numpy as np\n'), ((469, 508), 'cv2.imwrite', 'cv2.imwrite', (['"""matrizImagen.jpg"""', 'matriz'], {}), "('matrizImagen.jpg', matriz)\n", (480, 508), False, 'import cv2\n'), ((517, 557), 'cv2.cvtColor', 'cv2.cvtColor', (['matriz', 'cv2.COLOR_GRAY2BGR'], {}), '(matriz, cv2.COLOR_GRAY2BGR)\n', (529, 557), False, 'import cv2\n'), ((577, 621), 'cv2.imwrite', 'cv2.imwrite', (['"""matrizColorImagen.jpg"""', 'imagen'], {}), "('matrizColorImagen.jpg', imagen)\n", (588, 621), False, 'import cv2\n')]
# -------------------------------------------------------- # Faster R-CNN # Copyright (c) 2015 Microsoft # Licensed under The MIT License [see LICENSE for details] # Written by <NAME> and <NAME> # -------------------------------------------------------- # -------------------------------------------------------- # Reorganized and modified by <NAME> and <NAME> # -------------------------------------------------------- import torch import torch.nn as nn import numpy as np import numpy.random as npr from ..utils.config import cfg from bbox_transform import bbox_transform, bbox_overlaps, co_bbox_overlaps_batch2, bbox_transform_batch2, bbox_overlaps_batch2 import pdb DEBUG = False class _RelProposalTargetLayer(nn.Module): """ Assign object detection proposals to ground-truth targets. Produces proposal classification labels and bounding-box regression targets. """ def __init__(self, nclasses_rel): super(_RelProposalTargetLayer, self).__init__() self._num_classes_rel = nclasses_rel self.BBOX_NORMALIZE_MEANS = torch.FloatTensor(cfg.TRAIN.BBOX_NORMALIZE_MEANS) self.BBOX_NORMALIZE_STDS = torch.FloatTensor(cfg.TRAIN.BBOX_NORMALIZE_STDS) self.BBOX_INSIDE_WEIGHTS = torch.FloatTensor(cfg.TRAIN.BBOX_INSIDE_WEIGHTS) def forward(self, roi_pairs, gt_boxes, num_boxes): batch_size = gt_boxes.size(0) # compute overlap between gt rel pairs and all roi pairs gt_box_pairs = roi_pairs.new(batch_size, cfg.MAX_ROI_PAIR_NUMBER, 9).zero_() for i in range(batch_size): if (gt_boxes[i, :, 21:] > 0).sum() == 0: # no relation continue gt_pairs_i = (gt_boxes[i, :, 21:] > 0).nonzero() n_rel = min(gt_box_pairs[i].size(0), gt_pairs_i.size(0)) gt_box_pairs[i][:n_rel, 0:4] = gt_boxes[i][gt_pairs_i[:n_rel, 0]][:, :4] gt_box_pairs[i][:n_rel, 4:8] = gt_boxes[i][gt_pairs_i[:n_rel, 1]][:, :4] gt_box_pairs[i][:n_rel, 8] = gt_boxes[i][gt_pairs_i[:n_rel, 0], 21 + gt_pairs_i[:n_rel, 1]] # Include ground-truth boxes in the set of candidate rois # gt_box_pairs_append = roi_pairs.new(batch_size, gt_box_pairs.size(1), roi_pairs.size(2)).zero_() # gt_box_pairs_append[:,:,1:9] = gt_box_pairs[:,:,:8] # for i in range(batch_size): # gt_box_pairs_append[i, :, 0] = i # # roi_pairs = torch.cat([roi_pairs, gt_box_pairs_append], 1) roi_pairs = roi_pairs.contiguous() num_images = 1 rois_per_image = int(cfg.TRAIN.BATCH_SIZE / num_images) fg_rois_per_image = int(np.round(cfg.TRAIN.FG_FRACTION * rois_per_image)) labels, rois, keeps = self._sample_roi_pairs_pytorch(roi_pairs, gt_box_pairs, fg_rois_per_image, rois_per_image, self._num_classes_rel) return rois, labels, keeps def backward(self, top, propagate_down, bottom): """This layer does not propagate gradients.""" pass def _sample_roi_pairs_pytorch(self, all_roi_pairs, gt_box_pairs, fg_rois_per_image, rois_per_image, num_classes): """Generate a random sample of RoIs comprising foreground and background examples. """ # overlaps: (rois x gt_boxes) overlaps = co_bbox_overlaps_batch2(all_roi_pairs[:,:,1:].contiguous(), gt_box_pairs[:,:,:8].contiguous()) max_overlaps, gt_assignment = torch.max(overlaps, 2) batch_size = overlaps.size(0) num_proposal = overlaps.size(1) num_boxes_per_img = overlaps.size(2) offset = torch.arange(0, batch_size) * gt_box_pairs.size(1) offset = offset.view(-1, 1).type_as(gt_assignment) + gt_assignment labels = gt_box_pairs[:,:,8].contiguous().view(-1).index(offset.view(-1))\ .view(batch_size, -1) fg_mask = max_overlaps >= cfg.TRAIN.RELPN_FG_THRESH keep_inds_batch = labels.new(batch_size, rois_per_image).zero_() labels_rel_batch = labels.new(batch_size, rois_per_image).zero_() roi_pairs_batch = all_roi_pairs.new(batch_size, rois_per_image, 9).zero_() # Guard against the case when an image has fewer than max_fg_rois_per_image # foreground RoIs for i in range(batch_size): fg_inds = torch.nonzero(max_overlaps[i] >= cfg.TRAIN.RELPN_FG_THRESH).view(-1) fg_num_rois = fg_inds.numel() # Select background RoIs as those within [BG_THRESH_LO, BG_THRESH_HI) bg_inds = torch.nonzero((max_overlaps[i] < cfg.TRAIN.RELPN_BG_THRESH_HI) & (max_overlaps[i] >= cfg.TRAIN.RELPN_BG_THRESH_LO)).view(-1) bg_num_rois = bg_inds.numel() # print(fg_num_rois, bg_num_rois) # pdb.set_trace() if fg_num_rois > 0 and bg_num_rois > 0: # sampling fg fg_rois_per_this_image = min(fg_rois_per_image, fg_num_rois) # rand_num = torch.randperm(fg_num_rois).long().cuda() rand_num = torch.from_numpy(np.random.permutation(fg_num_rois)).long().cuda() fg_inds = fg_inds[rand_num[:fg_rois_per_this_image]] # sampling bg bg_rois_per_this_image = rois_per_image - fg_rois_per_this_image # Seems torch.rand has a bug, it will generate very large number and make an error. # We use numpy rand instead. #rand_num = (torch.rand(bg_rois_per_this_image) * bg_num_rois).long().cuda() rand_num = np.floor(np.random.rand(bg_rois_per_this_image) * bg_num_rois) rand_num = torch.from_numpy(rand_num).long().cuda() bg_inds = bg_inds[rand_num] elif fg_num_rois > 0 and bg_num_rois == 0: # sampling fg #rand_num = torch.floor(torch.rand(rois_per_image) * fg_num_rois).long().cuda() rand_num = np.floor(np.random.rand(rois_per_image) * fg_num_rois) rand_num = torch.from_numpy(rand_num).long().cuda() fg_inds = fg_inds[rand_num] fg_rois_per_this_image = rois_per_image bg_rois_per_this_image = 0 elif bg_num_rois > 0 and fg_num_rois == 0: # sampling bg #rand_num = torch.floor(torch.rand(rois_per_image) * bg_num_rois).long().cuda() rand_num = np.floor(np.random.rand(rois_per_image) * bg_num_rois) rand_num = torch.from_numpy(rand_num).long().cuda() bg_inds = bg_inds[rand_num] bg_rois_per_this_image = rois_per_image fg_rois_per_this_image = 0 else: print("relpn: bg_num_rois = 0 and fg_num_rois = 0, this should not happen!") # The indices that we're selecting (both fg and bg) keep_inds = torch.cat([fg_inds, bg_inds], 0) keep_inds_batch[i].copy_(keep_inds) # Select sampled values from various arrays: labels_rel_batch[i].copy_(labels[i][keep_inds]) # Clamp relation labels for the background RoIs to 0 labels_rel_batch[i][fg_rois_per_this_image:] = 0 roi_pairs_batch[i].copy_(all_roi_pairs[i][keep_inds]) roi_pairs_batch[i,:,0] = i return labels_rel_batch, roi_pairs_batch, keep_inds_batch
[ "torch.FloatTensor", "torch.cat", "torch.nonzero", "torch.max", "torch.arange", "numpy.random.permutation", "numpy.random.rand", "numpy.round", "torch.from_numpy" ]
[((1065, 1114), 'torch.FloatTensor', 'torch.FloatTensor', (['cfg.TRAIN.BBOX_NORMALIZE_MEANS'], {}), '(cfg.TRAIN.BBOX_NORMALIZE_MEANS)\n', (1082, 1114), False, 'import torch\n'), ((1150, 1198), 'torch.FloatTensor', 'torch.FloatTensor', (['cfg.TRAIN.BBOX_NORMALIZE_STDS'], {}), '(cfg.TRAIN.BBOX_NORMALIZE_STDS)\n', (1167, 1198), False, 'import torch\n'), ((1234, 1282), 'torch.FloatTensor', 'torch.FloatTensor', (['cfg.TRAIN.BBOX_INSIDE_WEIGHTS'], {}), '(cfg.TRAIN.BBOX_INSIDE_WEIGHTS)\n', (1251, 1282), False, 'import torch\n'), ((3497, 3519), 'torch.max', 'torch.max', (['overlaps', '(2)'], {}), '(overlaps, 2)\n', (3506, 3519), False, 'import torch\n'), ((2625, 2673), 'numpy.round', 'np.round', (['(cfg.TRAIN.FG_FRACTION * rois_per_image)'], {}), '(cfg.TRAIN.FG_FRACTION * rois_per_image)\n', (2633, 2673), True, 'import numpy as np\n'), ((3662, 3689), 'torch.arange', 'torch.arange', (['(0)', 'batch_size'], {}), '(0, batch_size)\n', (3674, 3689), False, 'import torch\n'), ((7012, 7044), 'torch.cat', 'torch.cat', (['[fg_inds, bg_inds]', '(0)'], {}), '([fg_inds, bg_inds], 0)\n', (7021, 7044), False, 'import torch\n'), ((4418, 4477), 'torch.nonzero', 'torch.nonzero', (['(max_overlaps[i] >= cfg.TRAIN.RELPN_FG_THRESH)'], {}), '(max_overlaps[i] >= cfg.TRAIN.RELPN_FG_THRESH)\n', (4431, 4477), False, 'import torch\n'), ((4634, 4754), 'torch.nonzero', 'torch.nonzero', (['((max_overlaps[i] < cfg.TRAIN.RELPN_BG_THRESH_HI) & (max_overlaps[i] >= cfg\n .TRAIN.RELPN_BG_THRESH_LO))'], {}), '((max_overlaps[i] < cfg.TRAIN.RELPN_BG_THRESH_HI) & (\n max_overlaps[i] >= cfg.TRAIN.RELPN_BG_THRESH_LO))\n', (4647, 4754), False, 'import torch\n'), ((5696, 5734), 'numpy.random.rand', 'np.random.rand', (['bg_rois_per_this_image'], {}), '(bg_rois_per_this_image)\n', (5710, 5734), True, 'import numpy as np\n'), ((6080, 6110), 'numpy.random.rand', 'np.random.rand', (['rois_per_image'], {}), '(rois_per_image)\n', (6094, 6110), True, 'import numpy as np\n'), ((5777, 5803), 'torch.from_numpy', 'torch.from_numpy', (['rand_num'], {}), '(rand_num)\n', (5793, 5803), False, 'import torch\n'), ((6554, 6584), 'numpy.random.rand', 'np.random.rand', (['rois_per_image'], {}), '(rois_per_image)\n', (6568, 6584), True, 'import numpy as np\n'), ((5190, 5224), 'numpy.random.permutation', 'np.random.permutation', (['fg_num_rois'], {}), '(fg_num_rois)\n', (5211, 5224), True, 'import numpy as np\n'), ((6153, 6179), 'torch.from_numpy', 'torch.from_numpy', (['rand_num'], {}), '(rand_num)\n', (6169, 6179), False, 'import torch\n'), ((6627, 6653), 'torch.from_numpy', 'torch.from_numpy', (['rand_num'], {}), '(rand_num)\n', (6643, 6653), False, 'import torch\n')]
# -*- coding: utf-8 -*- """Model.ipynb Automatically generated by Colaboratory. Original file is located at https://colab.research.google.com/drive/1QPnK5YOh8kRYPOOue6txwrgUqwKOMS0I """ # # Use seaborn for pairplot # !pip install -q seaborn # !pip install tensorflow==2.0.0 # # Use some functions from tensorflow_docs # !pip install -q git+https://github.com/tensorflow/docs # !pip install h5py pyyaml from __future__ import absolute_import, division, print_function, unicode_literals import pathlib import numpy as np import pandas as pd import seaborn as sns import tensorflow as tf from tensorflow import keras import tensorflow_docs as tfdocs import tensorflow_docs.plots import tensorflow_docs.modeling # Commented out IPython magic to ensure Python compatibility. import matplotlib as mpl import matplotlib.pyplot as plt import matplotlib.patches as patches from matplotlib.patches import ConnectionPatch from collections import OrderedDict from matplotlib.gridspec import GridSpec from sklearn import metrics, linear_model from sklearn.preprocessing import PolynomialFeatures, StandardScaler, normalize from sklearn.preprocessing import LabelEncoder, OneHotEncoder from sklearn.model_selection import train_test_split, cross_val_score, cross_val_predict from scipy.optimize import curve_fit import warnings plt.rcParams["patch.force_edgecolor"] = True plt.style.use('fivethirtyeight') mpl.rc('patch', edgecolor = 'dimgray', linewidth=1) # from IPython.core.interactiveshell import InteractiveShell # InteractiveShell.ast_node_interactivity = "last_expr" pd.options.display.max_columns = 50 # %matplotlib inline warnings.filterwarnings("ignore") # import pickle # create and save all the models airlines = pd.read_csv('airlines.csv') carriers = list(airlines['IATA_CODE']) # print(carriers) global train_stats def norm(x): global train_stats return (x - train_stats['mean']) / train_stats['std'] def ret_stats(): return train_stats def build_model(train_ds): model = keras.Sequential([ tf.keras.layers.Dense(64, activation='relu', input_shape=[len(train_ds.keys())]), tf.keras.layers.Dense(64, activation='relu'), tf.keras.layers.Dense(64, activation='relu'), tf.keras.layers.Dense(1) ]) optimizer = tf.keras.optimizers.RMSprop(0.001) model.compile(loss='mse', optimizer=optimizer, metrics=['mae', 'mse']) return model def do_create_models(): for carrier in carriers: # create a model and save it for each carrier global train_stats df = pd.read_csv('carriers/carrier' + str(carrier) + 'data.csv') df.drop(['Unnamed: 0'], axis=1, inplace=True) # encode the origin encoder = LabelEncoder() encoder.fit(df['ORIGIN_AIRPORT']) encoded_data_map = dict(zip(encoder.classes_, encoder.transform(encoder.classes_))) df['ORIGIN_AIRPORT'] = encoder.fit_transform(df['ORIGIN_AIRPORT']) # create the train and test dataset train_dataset = df.sample(frac=0.8,random_state=0) test_dataset = df.drop(train_dataset.index) # getting the stats train_stats = train_dataset.describe() train_stats.pop("ARRIVAL_DELAY") train_stats = train_stats.transpose() train_stats.to_csv('stats/train_stats' + str(carrier) + '.csv') # defining the train and test labels train_labels = train_dataset.pop('ARRIVAL_DELAY') test_labels = test_dataset.pop('ARRIVAL_DELAY') # normalize the data normed_train_data = norm(train_dataset) normed_test_data = norm(test_dataset) # # define the model # model = build_model(train_dataset) # # train the model # EPOCHS = 100 # # The patience parameter is the amount of epochs to check for improvement # early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', patience=10) # early_history = model.fit(normed_train_data, train_labels, # epochs=EPOCHS, validation_split = 0.2, verbose=0, # callbacks=[early_stop, tfdocs.modeling.EpochDots()]) # # calculating the loss # loss, mae, mse = model.evaluate(normed_test_data, test_labels, verbose=2) # # weights = model.get_weights() # # fpkl = open('drive/My Drive/pickle_models/model-' + str(carrier) + '-weights.pkl', 'wb') # # pickle.dump(weights, fpkl, protocol=pickle.HIGHEST_PROTOCOL) # print("Testing set Mean Abs Error: {:5.2f} minutes".format(mae)) # model.save('models/model-' + str(carrier) + '.h5') print('OK ' + str(carrier)) # let's create the input pipeline from datetime import datetime def conv_to_datetime(str_): return datetime.strptime(str_, '%Y-%m-%d %H:%M:%S') def conv_to_time(str_): return datetime.strptime(str_, '%H:%M:%S') import datetime def string_to_time(time_string): if pd.isnull(time_string): return np.nan else: if time_string == 2400: time_string = 0 time_string = "{0:04d}".format(int(time_string)) time_ = datetime.time(int(time_string[0:2]), int(time_string[2:4])) return time_ def func(x): return x.hour * 3600 + x.minute * 60 + x.second dayOfWeek = 6 airline = 'AA' origin = 'LAX' dest = 'SEA' sd = 200 ddelay = -10 sa = 800 dist = 1200 do_create_models() # global train_stats # stats = ret_stats() # print(stats) def processInput(input_): global train_stats processed = [] time_sd = string_to_time(np.int64(input_["sd"])) time_sa = string_to_time(np.int64(input_["sa"])) time_sd = func(time_sd) time_sa = func(time_sa) # encode airlines to their numbers df = pd.read_csv('carriers/carrier' + str(input_["carrier"]) + 'data.csv') df.drop(['Unnamed: 0'], axis=1, inplace=True) encoder = LabelEncoder() encoder.fit(df['ORIGIN_AIRPORT']) encoded_data_map = dict(zip(encoder.classes_, encoder.transform(encoder.classes_))) carrier = input_["carrier"] for carr_ in carriers: # create a model and save it for each carrier if carr_ == carrier: df = pd.read_csv('carriers/carrier' + str(carr_) + 'data.csv') df.drop(['Unnamed: 0'], axis=1, inplace=True) # encode the origin encoder = LabelEncoder() encoder.fit(df['ORIGIN_AIRPORT']) encoded_data_map = dict(zip(encoder.classes_, encoder.transform(encoder.classes_))) # print(encoded_data_map) df['ORIGIN_AIRPORT'] = encoder.fit_transform(df['ORIGIN_AIRPORT']) # # create the train and test dataset # train_dataset = df.sample(frac=0.8,random_state=0) # test_dataset = df.drop(train_dataset.index) # # getting the stats # train_stats = train_dataset.describe() # train_stats.pop("ARRIVAL_DELAY") # train_stats = train_stats.transpose() # # defining the train and test labels # train_labels = train_dataset.pop('ARRIVAL_DELAY') # test_labels = test_dataset.pop('ARRIVAL_DELAY') # # normalize the data # normed_train_data = norm(train_dataset) # normed_test_data = norm(test_dataset) # # define the model # model = build_model(train_dataset) # # train the model # EPOCHS = 100 # # The patience parameter is the amount of epochs to check for improvement # early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', patience=10) # early_history = model.fit(normed_train_data, train_labels, # epochs=EPOCHS, validation_split = 0.2, verbose=0, # callbacks=[early_stop, tfdocs.modeling.EpochDots()]) # # calculating the loss # loss, mae, mse = model.evaluate(normed_test_data, test_labels, verbose=2) # print("Testing set Mean Abs Error: {:5.2f} minutes".format(mae)) # model.save('models/model-' + str(carrier) + '.h5') # weights = model.get_weights() # fpkl = open('model-' + str(carrier) + '-weights.pkl', 'wb') # pickle.dump(weights, fpkl, protocol=pickle.HIGHEST_PROTOCOL) # print('OK ' + str(carrier)) origin = input_["origin"] ddelay = input_["ddelay"] origin_ = encoded_data_map[origin] dist = input_["dist"] weekday = input_["dayOfWeek"] input_ = {"time_insec_dep" : time_sd, "time_insec_arr": time_sa, "ORIGIN_AIRPORT": origin_, "DEPARTURE_DELAY": ddelay, "DISTANCE": dist, "weekday": weekday } df = pd.DataFrame([input_]) df = norm(df) model = keras.models.load_model('models/model-' + str(carrier) +'.h5') print("OK") return df, model # input_ = { # "dayOfWeek": dayOfWeek, # "carrier": airline, # "origin": origin, # "sd": sd, # "ddelay": ddelay, # "sa": sa, # "dist": dist # } # test_input, model = processInput(input_) # from google.colab import drive # drive.mount('/content/drive') # !ls # test_predictions_input = model.predict(test_input).flatten() # print("The delay is: ", test_predictions_input[0], " minutes")
[ "pandas.DataFrame", "matplotlib.rc", "warnings.filterwarnings", "pandas.read_csv", "datetime.strptime", "tensorflow.keras.layers.Dense", "pandas.isnull", "sklearn.preprocessing.LabelEncoder", "matplotlib.pyplot.style.use", "numpy.int64", "tensorflow.keras.optimizers.RMSprop" ]
[((1373, 1405), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""fivethirtyeight"""'], {}), "('fivethirtyeight')\n", (1386, 1405), True, 'import matplotlib.pyplot as plt\n'), ((1406, 1455), 'matplotlib.rc', 'mpl.rc', (['"""patch"""'], {'edgecolor': '"""dimgray"""', 'linewidth': '(1)'}), "('patch', edgecolor='dimgray', linewidth=1)\n", (1412, 1455), True, 'import matplotlib as mpl\n'), ((1633, 1666), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (1656, 1666), False, 'import warnings\n'), ((1728, 1755), 'pandas.read_csv', 'pd.read_csv', (['"""airlines.csv"""'], {}), "('airlines.csv')\n", (1739, 1755), True, 'import pandas as pd\n'), ((2252, 2286), 'tensorflow.keras.optimizers.RMSprop', 'tf.keras.optimizers.RMSprop', (['(0.001)'], {}), '(0.001)\n', (2279, 2286), True, 'import tensorflow as tf\n'), ((4598, 4642), 'datetime.strptime', 'datetime.strptime', (['str_', '"""%Y-%m-%d %H:%M:%S"""'], {}), "(str_, '%Y-%m-%d %H:%M:%S')\n", (4615, 4642), False, 'import datetime\n'), ((4679, 4714), 'datetime.strptime', 'datetime.strptime', (['str_', '"""%H:%M:%S"""'], {}), "(str_, '%H:%M:%S')\n", (4696, 4714), False, 'import datetime\n'), ((4773, 4795), 'pandas.isnull', 'pd.isnull', (['time_string'], {}), '(time_string)\n', (4782, 4795), True, 'import pandas as pd\n'), ((5703, 5717), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (5715, 5717), False, 'from sklearn.preprocessing import LabelEncoder, OneHotEncoder\n'), ((8420, 8442), 'pandas.DataFrame', 'pd.DataFrame', (['[input_]'], {}), '([input_])\n', (8432, 8442), True, 'import pandas as pd\n'), ((2691, 2705), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (2703, 2705), False, 'from sklearn.preprocessing import LabelEncoder, OneHotEncoder\n'), ((5388, 5410), 'numpy.int64', 'np.int64', (["input_['sd']"], {}), "(input_['sd'])\n", (5396, 5410), True, 'import numpy as np\n'), ((5441, 5463), 'numpy.int64', 'np.int64', (["input_['sa']"], {}), "(input_['sa'])\n", (5449, 5463), True, 'import numpy as np\n'), ((2107, 2151), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['(64)'], {'activation': '"""relu"""'}), "(64, activation='relu')\n", (2128, 2151), True, 'import tensorflow as tf\n'), ((2157, 2201), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['(64)'], {'activation': '"""relu"""'}), "(64, activation='relu')\n", (2178, 2201), True, 'import tensorflow as tf\n'), ((2207, 2231), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['(1)'], {}), '(1)\n', (2228, 2231), True, 'import tensorflow as tf\n'), ((6162, 6176), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (6174, 6176), False, 'from sklearn.preprocessing import LabelEncoder, OneHotEncoder\n')]
from datetime import datetime from utils.api import API from time import sleep from config import * import random def load_file(file): try: l = [] with open(file, 'r') as f: for line in f: l.append(line.rstrip()) return l except FileNotFoundError: with open('comment.db', 'w') as f: pass return [] def get_nft(): account = API(REDDIT_CLIENT_ID, REDDIT_CLIENT_SECRET, REDDIT_USERNAME, REDDIT_PASSWORD) account.shadowban_check() reddit = account.authorize() account.authorized(reddit) reddit.read_only = False commented = load_file("comment.db") subreddit = reddit.subreddit("NFTsMarketplace") keywords = ["wallet", "address"] sleep(1) while True: try: for post in subreddit.hot(limit=25): if (post not in commented and any(x in post.title.lower() for x in keywords) or post not in commented and keywords[1] in post.link_flair_text): commented.append(post) with open('comment.db', 'a') as f: f.write(f"{str(post)}\n") post.reply(ETH_ADDRESS) post.upvote() print(f'{post.title}') rndm_sleep = random.randint(300, 600); to_mins = rndm_sleep / 60; to_mins = round(to_mins, 1) print(f"zZz for {str(to_mins)} minutes") sleep(rndm_sleep) except: print("Error occurred, retrying.") sleep(500) print("+") print(f"[{datetime.now().replace(microsecond=0)}] zZz for 6 hours") sleep(21600) if __name__ == '__main__': get_nft()
[ "utils.api.API", "datetime.datetime.now", "random.randint", "time.sleep" ]
[((440, 517), 'utils.api.API', 'API', (['REDDIT_CLIENT_ID', 'REDDIT_CLIENT_SECRET', 'REDDIT_USERNAME', 'REDDIT_PASSWORD'], {}), '(REDDIT_CLIENT_ID, REDDIT_CLIENT_SECRET, REDDIT_USERNAME, REDDIT_PASSWORD)\n', (443, 517), False, 'from utils.api import API\n'), ((782, 790), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (787, 790), False, 'from time import sleep\n'), ((1788, 1800), 'time.sleep', 'sleep', (['(21600)'], {}), '(21600)\n', (1793, 1800), False, 'from time import sleep\n'), ((1671, 1681), 'time.sleep', 'sleep', (['(500)'], {}), '(500)\n', (1676, 1681), False, 'from time import sleep\n'), ((1369, 1393), 'random.randint', 'random.randint', (['(300)', '(600)'], {}), '(300, 600)\n', (1383, 1393), False, 'import random\n'), ((1575, 1592), 'time.sleep', 'sleep', (['rndm_sleep'], {}), '(rndm_sleep)\n', (1580, 1592), False, 'from time import sleep\n'), ((1721, 1735), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1733, 1735), False, 'from datetime import datetime\n')]
# -*- coding: utf-8 -*- # Define here the models for your scraped items # # See documentation in: # https://doc.scrapy.org/en/latest/topics/items.html import scrapy from scrapy.loader.processors import TakeFirst, Join, MapCompose from datetime import datetime, timedelta def comments_strip(string,loader_context): lang = loader_context['lang'] if lang == 'it': if string[0].rfind('Commenta') != -1: return else: return string[0].rstrip(' commenti') elif lang == 'en': if(string[0] == 'Share'): return '0' new_string = string[0].rstrip(' Comments') while new_string.rfind(',') != -1: new_string = new_string[0:new_string.rfind(',')] + new_string[new_string.rfind(',')+1:] return new_string else: return string def reactions_strip(string,loader_context): lang = loader_context['lang'] if lang == 'it': newstring = string[0] #19.298.873 if len(newstring.split()) == 1: while newstring.rfind('.') != -1: newstring = newstring[0:newstring.rfind('.')] + newstring[newstring.rfind('.')+1:] return newstring #Pamela, Luigi e altri 4 else: return string friends = newstring.count(' e ') + newstring.count(',') newstring = newstring.split()[::-1][0] while newstring.rfind('.') != -1: newstring = newstring[0:newstring.rfind('.')] + newstring[newstring.rfind('.')+1:] return int(newstring) + friends elif lang == 'en': newstring = string[0] #19,298,873 if len(newstring.split()) == 1: while newstring.rfind(',') != -1: newstring = newstring[0:newstring.rfind(',')] + newstring[newstring.rfind(',')+1:] return newstring # #Mark and other 254,134 # elif newstring.split()[::-1][1].isdigit(): # friends = newstring.count(' and ') + newstring.count(',') # newstring = newstring.split()[::-1][1] # while newstring.rfind(',') != -1: # newstring = newstring[0:newstring.rfind(',')] + newstring[newstring.rfind(',')+1:] # return int(newstring) + friends # #Philip and 1K others else: return newstring else: return string def url_strip(url): fullurl = url[0] #catchin '&id=' is enough to identify the post i = fullurl.find('&id=') if i != -1: return fullurl[:i+4] + fullurl[i+4:].split('&')[0] else: #catch photos i = fullurl.find('/photos/') if i != -1: return fullurl[:i+8] + fullurl[i+8:].split('/?')[0] else: #catch albums i = fullurl.find('/albums/') if i != -1: return fullurl[:i+8] + fullurl[i+8:].split('/?')[0] else: return fullurl def parse_date(date): import json d = json.loads(date[0]) #nested dict of features flat_d = dict() #only retain 'leaves' of d tree def recursive_items(dictionary): ''' Get most nested key:value pair of nested dict ''' for key, value in dictionary.items(): if type(value) is dict: yield from recursive_items(value) else: yield (key, value) for key, value in recursive_items(d): flat_d[key] = value #returns timestamp in localtime conversion from linux timestamp UTC return str(datetime.fromtimestamp(flat_d['publish_time'])) def id_strip(post_id): import json d = json.loads(post_id[::-1][0]) #nested dict of features return str(d['top_level_post_id']) class FbcrawlItem(scrapy.Item): source = scrapy.Field() date = scrapy.Field() text = scrapy.Field( output_processor=Join(separator=u'') ) # full text of the post comments = scrapy.Field( output_processor=comments_strip ) reactions = scrapy.Field( output_processor=reactions_strip ) # num of reactions likes = scrapy.Field( output_processor=reactions_strip ) ahah = scrapy.Field() love = scrapy.Field() wow = scrapy.Field() sigh = scrapy.Field() grrr = scrapy.Field() share = scrapy.Field() # num of shares url = scrapy.Field( output_processor=url_strip ) post_id = scrapy.Field( output_processor=id_strip ) shared_from = scrapy.Field() class CommentsItem(scrapy.Item): source = scrapy.Field() reply_to=scrapy.Field() date = scrapy.Field( # when was the post published output_processor=parse_date ) text = scrapy.Field( output_processor=Join(separator=u'') ) # full text of the post reactions = scrapy.Field( output_processor=reactions_strip ) # num of reactions likes = scrapy.Field( output_processor=reactions_strip ) source_url = scrapy.Field() url = scrapy.Field() #ahah = scrapy.Field() #love = scrapy.Field() #wow = scrapy.Field() #sigh = scrapy.Field() #grrr = scrapy.Field() #share = scrapy.Field() # num of shares
[ "scrapy.loader.processors.Join", "scrapy.Field", "json.loads", "datetime.datetime.fromtimestamp" ]
[((3018, 3037), 'json.loads', 'json.loads', (['date[0]'], {}), '(date[0])\n', (3028, 3037), False, 'import json\n'), ((3681, 3709), 'json.loads', 'json.loads', (['post_id[::-1][0]'], {}), '(post_id[::-1][0])\n', (3691, 3709), False, 'import json\n'), ((3825, 3839), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (3837, 3839), False, 'import scrapy\n'), ((3854, 3868), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (3866, 3868), False, 'import scrapy\n'), ((4013, 4058), 'scrapy.Field', 'scrapy.Field', ([], {'output_processor': 'comments_strip'}), '(output_processor=comments_strip)\n', (4025, 4058), False, 'import scrapy\n'), ((4128, 4174), 'scrapy.Field', 'scrapy.Field', ([], {'output_processor': 'reactions_strip'}), '(output_processor=reactions_strip)\n', (4140, 4174), False, 'import scrapy\n'), ((4237, 4283), 'scrapy.Field', 'scrapy.Field', ([], {'output_processor': 'reactions_strip'}), '(output_processor=reactions_strip)\n', (4249, 4283), False, 'import scrapy\n'), ((4331, 4345), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (4343, 4345), False, 'import scrapy\n'), ((4379, 4393), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (4391, 4393), False, 'import scrapy\n'), ((4426, 4440), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (4438, 4440), False, 'import scrapy\n'), ((4474, 4488), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (4486, 4488), False, 'import scrapy\n'), ((4522, 4536), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (4534, 4536), False, 'import scrapy\n'), ((4571, 4585), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (4583, 4585), False, 'import scrapy\n'), ((4633, 4673), 'scrapy.Field', 'scrapy.Field', ([], {'output_processor': 'url_strip'}), '(output_processor=url_strip)\n', (4645, 4673), False, 'import scrapy\n'), ((4702, 4741), 'scrapy.Field', 'scrapy.Field', ([], {'output_processor': 'id_strip'}), '(output_processor=id_strip)\n', (4714, 4741), False, 'import scrapy\n'), ((4774, 4788), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (4786, 4788), False, 'import scrapy\n'), ((4836, 4850), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (4848, 4850), False, 'import scrapy\n'), ((4867, 4881), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (4879, 4881), False, 'import scrapy\n'), ((4893, 4934), 'scrapy.Field', 'scrapy.Field', ([], {'output_processor': 'parse_date'}), '(output_processor=parse_date)\n', (4905, 4934), False, 'import scrapy\n'), ((5129, 5175), 'scrapy.Field', 'scrapy.Field', ([], {'output_processor': 'reactions_strip'}), '(output_processor=reactions_strip)\n', (5141, 5175), False, 'import scrapy\n'), ((5238, 5284), 'scrapy.Field', 'scrapy.Field', ([], {'output_processor': 'reactions_strip'}), '(output_processor=reactions_strip)\n', (5250, 5284), False, 'import scrapy\n'), ((5338, 5352), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (5350, 5352), False, 'import scrapy\n'), ((5385, 5399), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (5397, 5399), False, 'import scrapy\n'), ((3579, 3625), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (["flat_d['publish_time']"], {}), "(flat_d['publish_time'])\n", (3601, 3625), False, 'from datetime import datetime, timedelta\n'), ((3926, 3945), 'scrapy.loader.processors.Join', 'Join', ([], {'separator': 'u""""""'}), "(separator=u'')\n", (3930, 3945), False, 'from scrapy.loader.processors import TakeFirst, Join, MapCompose\n'), ((5041, 5060), 'scrapy.loader.processors.Join', 'Join', ([], {'separator': 'u""""""'}), "(separator=u'')\n", (5045, 5060), False, 'from scrapy.loader.processors import TakeFirst, Join, MapCompose\n')]
#!/usr/bin/env python3 import os, sys, subprocess, socket #import cgroups def start_mjpg_streamer(): print("Starting up mjpg_streamer.") # TODO: Add notification if either mjpg-streamer or # cvfilter_py.so aren't installed # TODO: Detect any error if process exits, # such as the uvcvideo crash I'm seeing subprocess.run(["mjpg_streamer", "-i", "input_opencv.so -r 640x480 --filter /usr/lib/mjpg-streamer/cvfilter_py.so --fargs " + os.path.realpath(__file__), "-o", "output_http.so -p 8090 -w /usr/share/mjpg-streamer/www"], stdin=subprocess.PIPE #, stdout=subprocess.PIPE #Commented to allow visibility of #, stderr=subprocess.PIPE #responses from the system on commandline ) if __name__ == "__main__": start_mjpg_streamer() # This method is called by the mjpg_streamer command run above. # This is what calls and executes the running code def init_filter(): ## Socket streams that were here previously are ## now moved to multiple sockets where they are used. import line_follower dc = dummy_car_control() f = line_follower.mjs_filter(dc) print("Returning process") return f.process # This class houses the car_control class class dummy_car_control(): def __init__(self): ## Commented per jkridner's advice import car_control self.c = car_control.car_control() #Output for the status in update method below self.status_port = 3004 self.status_out = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.status_out.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) self.status_out.connect(("", self.status_port)) # This filehandle sends the data to the socket broadcast self.status_file = self.status_out.makefile('w', buffering=None) def tick(self): self.c.tick() return def update(self, line, threshold): (self.paused, self.throttle, self.steering, self.fps) = \ self.c.update(line) # Code has been reworked to output to a separate filehandle pointing # to the socket 3004, output to the dashboard under 'Status' # Replaced the Status output below to be a JSON string stri = "{" if self.paused: stri += '"Status":"Paused"' else: stri += '"Status":"Unpaused"' if line: stri += ', "Line_X":' + str(line[2]) + ', "Line_Y":' + str(line[3]) else: stri += ', "Line_X":"No Line", "Line_Y":"No Line"' stri += ',"Throttle":' + str(self.throttle) + ',"Steering":' + \ str(self.steering) stri += ',"FPS":' + str(self.fps) + ',"Min_Threshold":' + \ str(threshold) + '}' print(stri, "\r", end="", flush=True, file=self.status_file) return ""
[ "line_follower.mjs_filter", "os.path.realpath", "socket.socket", "car_control.car_control" ]
[((1140, 1168), 'line_follower.mjs_filter', 'line_follower.mjs_filter', (['dc'], {}), '(dc)\n', (1164, 1168), False, 'import line_follower\n'), ((1402, 1427), 'car_control.car_control', 'car_control.car_control', ([], {}), '()\n', (1425, 1427), False, 'import car_control\n'), ((1549, 1597), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (1562, 1597), False, 'import os, sys, subprocess, socket\n'), ((478, 504), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (494, 504), False, 'import os, sys, subprocess, socket\n')]
import os import json from flask import Flask from tqdm import tqdm from ..validator.schema_validator import Validator from ..utils.paths import get_path app = Flask(__name__) # Function to accept user-generated feedstock # Args: # path: Path to the feedstock # remove_old: Should fully accepted feedstock be removed? Default False. # verbose: Should status messages be printed? Default False. def accept_feedstock(path, remove_old=False, verbose=False): removed = False with open(path) as feedstock: val = Validator(json.loads(feedstock.readline())) for line in tqdm(feedstock, desc="Accepting " + os.path.basename(path), disable= not verbose): res = val.write_record(json.loads(line)) if not res["success"]: if not val.cancel_validation()["success"]: print("ERROR: Validation not cancelled. Feedstock may not have been removed.") raise ValueError(result["message"] + "\n" + result.get("details")) if remove_old: os.remove(path) removed = True return { "success": True, "source_deleted": removed } # Function to accept all feedstock in a directory # Args: # path: Path to the feedstock directory # remove_old: Should fully accepted feedstock be removed? Default False. # verbose: Should status messages be printed? Default False. def accept_all(path=None, remove_old=False, verbose=False): if not path: path = get_path(__file__, "submissions") if verbose: print("Accepting all feedstock from '", path, "'", sep="") removed = [] count = 0 for feedstock in tqdm(os.listdir(path), desc="Accepting feedstock", disable= not verbose): # Must be actual feedstock if feedstock.endswith("_all.json"): result = accept_feedstock(os.path.join(path, feedstock), remove_old=remove_old, verbose=verbose) count += 1 if result["source_deleted"]: removed.append(feedstock) if verbose: print("Accepted", count, "total feedstock files") return { "success": True, "removed": removed, "total": count }
[ "os.remove", "json.loads", "os.path.basename", "flask.Flask", "os.path.join", "os.listdir" ]
[((163, 178), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (168, 178), False, 'from flask import Flask\n'), ((1036, 1051), 'os.remove', 'os.remove', (['path'], {}), '(path)\n', (1045, 1051), False, 'import os\n'), ((1664, 1680), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (1674, 1680), False, 'import os\n'), ((715, 731), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (725, 731), False, 'import json\n'), ((1850, 1879), 'os.path.join', 'os.path.join', (['path', 'feedstock'], {}), '(path, feedstock)\n', (1862, 1879), False, 'import os\n'), ((633, 655), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (649, 655), False, 'import os\n')]
import os os.environ['MANTLE'] = 'lattice' from magma import * from mantle import And, XOr from simulator import testvectors main = DefineCircuit('main', "a", In(Bit), "b", In(Bit), "c", In(Bit), "d", Out(Bit), 'CLK', In(Bit)) t = And(2)(main.a,main.b) d = XOr(2)(t,main.c) wire(d,main.d) EndCircuit() print(testvectors(main))
[ "simulator.testvectors", "mantle.XOr", "mantle.And" ]
[((233, 239), 'mantle.And', 'And', (['(2)'], {}), '(2)\n', (236, 239), False, 'from mantle import And, XOr\n'), ((259, 265), 'mantle.XOr', 'XOr', (['(2)'], {}), '(2)\n', (262, 265), False, 'from mantle import And, XOr\n'), ((311, 328), 'simulator.testvectors', 'testvectors', (['main'], {}), '(main)\n', (322, 328), False, 'from simulator import testvectors\n')]
from __future__ import absolute_import # -------------------------------------------------------- # Spatial Attention Network withFeature Mimicking # Copyright (c) 2018 University of Illinois # Licensed under The MIT License [see LICENSE for details] # -------------------------------------------------------- # -------------------------------------------------------- # Reorganized and modified Modified by <NAME> # ------------------------------------------------------- import torch import torch.nn as nn import numpy as np import math import yaml from model.utils.config import cfg from model.rpn.generate_anchors import generate_anchors # from model.rpn.bbox_transform import bbox_transform_inv, clip_boxes, clip_boxes_batch from .bbox.bbox_transform import bbox_pred, clip_boxes, bbox_overlaps # from model.nms.nms_wrapper import nms from model.roi_layers import nms import pdb DEBUG = False class _DCRProposalLayer(nn.Module): def __init__(self, class_agnostic): super(_DCRProposalLayer, self).__init__() self.class_agnostic = class_agnostic self._top = cfg.DCR.TOP def forward(self, rois, cls_prob, bbox_pred_tensor, im_info): num_keep_index = int(rois.shape[0] * self._top) rois = rois[0].cpu().detach().numpy()[:, 1:] bbox_deltas = bbox_pred_tensor.cpu().detach().numpy()[:, 4:8] im_info = im_info.cpu().detach().numpy()[0, :] cls_prob = cls_prob.cpu().detach().numpy()[:, 1:] # ignore bg # sort scores max_scores = np.amax(cls_prob, axis=1) # keep top scores keep_index = np.argsort(-max_scores)[:num_keep_index] proposals = bbox_pred(rois, bbox_deltas) proposals = clip_boxes(proposals, im_info[:2]) batch_inds = np.zeros((proposals.shape[0], 1), dtype=np.float32) blob = np.hstack((batch_inds, proposals.astype(np.float32, copy=False))) return blob[keep_index, :], keep_index def backward(self, req, out_grad, in_data, out_data, in_grad, aux): pass def reshape(self, bottom, top): """Reshaping happens during the call to forward.""" pass
[ "numpy.amax", "numpy.argsort", "numpy.zeros" ]
[((1534, 1559), 'numpy.amax', 'np.amax', (['cls_prob'], {'axis': '(1)'}), '(cls_prob, axis=1)\n', (1541, 1559), True, 'import numpy as np\n'), ((1775, 1826), 'numpy.zeros', 'np.zeros', (['(proposals.shape[0], 1)'], {'dtype': 'np.float32'}), '((proposals.shape[0], 1), dtype=np.float32)\n', (1783, 1826), True, 'import numpy as np\n'), ((1607, 1630), 'numpy.argsort', 'np.argsort', (['(-max_scores)'], {}), '(-max_scores)\n', (1617, 1630), True, 'import numpy as np\n')]