repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
Aerdan/adventcode-2020 | python/day5-1.py | 83120aa8c7fc9d1f2d34780610401e3c6d4f583b | #!/usr/bin/env python3
def binary(code, max, bits):
ret = []
for i in range(max):
ret.append(bits[code[i]])
return int(''.join(ret), base=2)
mid = 0
with open('input5.txt') as f:
for line in f.readlines():
line = line[:-1]
row = binary(line[:7], 7, {'F': '0', 'B': '1'})
col = binary(line[7:], 3, {'R': '1', 'L': '0'})
sid = row * 8 + col
mid = sid if sid > mid else mid
print(mid)
| [] |
baramsalem/Custom-stocks-py | custom_stocks_py/base.py | 5beeb7b6f93755ec7c00c25763accf6a52f8bbaf | """
custom_stocks_py base module.
This is the principal module of the custom_stocks_py project.
here you put your main classes and objects.
Be creative! do whatever you want!
If you want to replace this with a Flask application run:
$ make init
and then choose `flask` as template.
"""
class BaseClass:
def base_method(self) -> str:
"""
Base method.
"""
return "hello from BaseClass"
def __call__(self) -> str:
return self.base_method()
def base_function() -> str:
"""
Base function.
"""
return "hello from base function"
| [] |
dpmkl/heimdall | dummy_server.py | 184f169f0be9f6b6b708364725f5db8b1f249d9c | #!/usr/bin/env python
import SimpleHTTPServer
import SocketServer
import logging
PORT = 8000
class GetHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type','text/html')
self.end_headers()
self.wfile.write("Hello World ! '{}'".format(self.path))
return
for i in range(4):
Handler = GetHandler
httpd = SocketServer.TCPServer(("", PORT + i), Handler)
httpd.serve_forever() | [((428, 475), 'SocketServer.TCPServer', 'SocketServer.TCPServer', (["('', PORT + i)", 'Handler'], {}), "(('', PORT + i), Handler)\n", (450, 475), False, 'import SocketServer\n')] |
nalu-svk/cimsparql | cimsparql/__init__.py | e69b0799a2bbd70027e2c8bb9970574991597ca5 | """Library for CIM sparql queries"""
__version__ = "1.9.0"
| [] |
jmviz/xd | scripts/49-cat-logs.py | f905e5c61b2835073b19cc3fa0d6917432fa7ece | #!/usr/bin/env python3
# Usage:
# $0 -o log.txt products/
#
# concatenates .log files (even those in subdirs or .zip) and combines into a single combined.log
from xdfile.utils import find_files_with_time, open_output, get_args
import boto3
# from boto.s3.connection import S3Connection
import os
def main():
args = get_args('aggregates all .log files')
outf = open_output()
s3 = boto3.resource('s3')
s3path = "logs/"
# bucket = conn.get_bucket(s3path)
bucket = s3.Bucket(os.environ['DOMAIN'])
for obj in sorted(bucket.objects.all(), key=lambda x: x.last_modified):
# last_modified
if s3path in obj.key:
print("Name: %s LastModified:%s" % (obj.key.encode('utf-8'), obj.last_modified))
for fn, contents, dt in sorted(find_files_with_time(*args.inputs, ext=".log"), key=lambda x: x[2]): # earliest first
outf.write_file(fn, contents.decode("utf-8"))
main()
| [((326, 363), 'xdfile.utils.get_args', 'get_args', (['"""aggregates all .log files"""'], {}), "('aggregates all .log files')\n", (334, 363), False, 'from xdfile.utils import find_files_with_time, open_output, get_args\n'), ((375, 388), 'xdfile.utils.open_output', 'open_output', ([], {}), '()\n', (386, 388), False, 'from xdfile.utils import find_files_with_time, open_output, get_args\n'), ((399, 419), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {}), "('s3')\n", (413, 419), False, 'import boto3\n'), ((785, 831), 'xdfile.utils.find_files_with_time', 'find_files_with_time', (['*args.inputs'], {'ext': '""".log"""'}), "(*args.inputs, ext='.log')\n", (805, 831), False, 'from xdfile.utils import find_files_with_time, open_output, get_args\n')] |
wuyang1002431655/tango_with_django_19 | manuscript/link_checker.py | 42d5878e4a12037daf04d785826357cd4351a16d | # Checks for broken links in the book chapters, printing the status of each link found to stdout.
# The Python package 'requests' must be installed and available for this simple module to work.
# Author: David Maxwell
# Date: 2017-02-14
import re
import requests
def main(chapters_list_filename, hide_success=True):
"""
hide_success = a boolean switch that determines whether to show URLs that return a HTTP 200.
If set to true, only URLs that fail will be printed.
"""
chapters_f = open(chapters_list_filename, 'r')
pattern = re.compile(r'\[([^]]+)]\(\s*(http[s]?://[^)]+)\s*\)') # http://stackoverflow.com/a/23395483
print 'filename\tline_no\ttitle\turl\tstatus_code'
for filename in chapters_f:
filename = filename.strip()
if not filename or filename.startswith('{'): # Skip non-filename lines
continue
chapter_f = open(filename, 'r')
line_no = 1
for line in chapter_f:
line = line.strip()
for match in re.findall(pattern, line):
title = match[0]
url = match[1]
if '127.0.0.1' in url or 'localhost' in url: # Don't check localhost URLs
continue
request = None
status_code = -1
try:
request = requests.get(url)
status_code = request.status_code
except requests.exceptions.ConnectionError:
request = None
status_code = 'FAILED_TO_CONNECT'
if hide_success and status_code == 200:
continue
title = title.replace('\t', ' ')
print '{filename}\t{line_no}\t{title}\t{url}\t{status_code}'.format(filename=filename,
line_no=line_no,
title=title,
url=url,
status_code=status_code)
line_no = line_no + 1
chapter_f.close()
chapters_f.close()
if __name__ == '__main__':
main('Book.txt', hide_success=False) | [] |
2890841438/fast-index.py | service/__init__.py | fa59f38ed009b4bdf5dbf27d8619d31f8b681118 | # -*- coding = utf-8 -*-
# @Time: 2020/9/4 18:52
# @Author: dimples_yj
# @File: __init__.py.py
# @Software: PyCharm
| [] |
HermannLiang/CLIP-ViL | CLIP-ViL-Direct/vqa/pythia_clip_grid_feature.py | 49c28bc5ece1aacfcbfd9c8810db70663ca0516a | #!/usr/bin/env python3
"""
Grid features extraction script.
"""
import argparse
import os
import torch
import tqdm
from fvcore.common.file_io import PathManager
from detectron2.checkpoint import DetectionCheckpointer
from detectron2.config import get_cfg
from detectron2.engine import default_setup
from detectron2.evaluation import inference_context
from detectron2.modeling import build_model
import numpy as np
from clip.clip import load
import torch.nn as nn
from torchvision.transforms import Compose, Resize, CenterCrop, ToTensor, Normalize
from grid_feats import (
add_attribute_config,
build_detection_test_loader_with_attributes,
)
# from timm.models.vision_transformer import resize_pos_embed
# A simple mapper from object detection dataset to VQA dataset names
dataset_to_folder_mapper = {}
dataset_to_folder_mapper['coco_2014_train'] = 'train2014'
dataset_to_folder_mapper['coco_2014_val'] = 'val2014'
#dataset_to_folder_mapper['coco_2014_val'] = 'trainval2014'
#dataset_to_folder_mapper['coco_2014_train'] = 'trainval2014'
# One may need to change the Detectron2 code to support coco_2015_test
# insert "coco_2015_test": ("coco/test2015", "coco/annotations/image_info_test2015.json"),
# at: https://github.com/facebookresearch/detectron2/blob/master/detectron2/data/datasets/builtin.py#L36
dataset_to_folder_mapper['coco_2015_test'] = 'test2015'
dataset_to_folder_mapper['coco_2015_test-dev'] = 'test-dev2015'
def extract_grid_feature_argument_parser():
parser = argparse.ArgumentParser(description="Grid feature extraction")
parser.add_argument("--config-file", default="", metavar="FILE", help="path to config file")
parser.add_argument("--dataset", help="name of the dataset", default="coco_2014_train",
choices=['coco_2014_train', 'coco_2014_val', 'coco_2015_test', 'coco_2015_test-dev'])
parser.add_argument('--model_type', default='RN50', type=str, help='RN50, RN101, RN50x4, ViT-B/32, vit_base_patch32_224_in21k')
parser.add_argument(
"opts",
help="Modify config options using the command-line",
default=None,
nargs=argparse.REMAINDER,
)
return parser
def extract_grid_feature_on_dataset(model, data_loader, dump_folder):
for idx, inputs in enumerate(tqdm.tqdm(data_loader)):
with torch.no_grad():
image_id = inputs[0]['image_id']
file_name = '%d.pth' % image_id
# compute features
images = model.preprocess_image(inputs)
features = model.backbone(images.tensor)
outputs = model.roi_heads.get_conv5_features(features)
# modify the filename
file_name = inputs[0]['file_name'].split("/")[-1].replace("jpg", "npy")
outputs = outputs.permute(0, 2, 3, 1)
exit()
with PathManager.open(os.path.join(dump_folder, file_name), "wb") as f:
np.save(f, outputs.cpu().numpy())
def do_feature_extraction(cfg, model, dataset_name, args):
with inference_context(model):
dump_folder = os.path.join(cfg.OUTPUT_DIR, "features", dataset_to_folder_mapper[dataset_name])
PathManager.mkdirs(dump_folder)
data_loader = build_detection_test_loader_with_attributes(cfg, dataset_name, model_type='clip')
extract_clip_feature_on_dataset(model, data_loader, dump_folder, args)
def setup(args):
"""
Create configs and perform basic setups.
"""
cfg = get_cfg()
add_attribute_config(cfg)
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
# force the final residual block to have dilations 1
cfg.MODEL.RESNETS.RES5_DILATION = 1
cfg.freeze()
default_setup(cfg, args)
return cfg
def extract_clip_feature_on_dataset(model, data_loader, dump_folder, args):
save_args.model_type = args.model_type.split("-")[0]
mean = torch.Tensor([0.48145466, 0.4578275, 0.40821073]).to("cuda").reshape(3, 1, 1)
std = torch.Tensor([0.26862954, 0.26130258, 0.27577711]).to("cuda").reshape(3, 1, 1)
dump_folder = f"clip/{save_args.model_type}/" + dump_folder.split("/")[-1]
if args.model_type == "ViT-B/32":
num_patches = 558 #600 * 1000 // 32 // 32
print(num_patches)
pos_embed = nn.Parameter(torch.zeros(num_patches + 1, 768, device='cuda'),)
pos_embed.weight = resize_pos_embed(model.visual.positional_embedding.unsqueeze(0), pos_embed.unsqueeze(0))
model.visual.positional_embedding = pos_embed
print(model.visual.positional_embedding.device)
# pass
dump_folder.replace( "rscratch", "dnn" )
dump_folder = "/dnn/sheng.s/clip_boi/grid-feats-vqa/" + dump_folder
if not os.path.exists(dump_folder):
os.makedirs(dump_folder)
for idx, inputs in enumerate(tqdm.tqdm(data_loader)):
with torch.no_grad():
image_id = inputs[0]['image_id']
file_name = '%d.pth' % image_id
# compute features
image = inputs[0]['image'].to("cuda").float() / 255.0
image = (image - mean) / std
image = image.unsqueeze(0)
outputs = model.encode_image(image)
if "RN" in args.model_type:
outputs = outputs.permute(0, 2, 3, 1)
else:
outputs = outputs[:, :, :].reshape(1, 13, 43, 768)
with PathManager.open(os.path.join(dump_folder, file_name), "wb") as f:
# save as CPU tensors
torch.save(outputs.cpu(), f)
def main(args):
cfg = setup(args)
model, transform = load(args.model_type, jit=False)
do_feature_extraction(cfg, model, args.dataset, args)
if __name__ == "__main__":
args = extract_grid_feature_argument_parser().parse_args()
print("Command Line Args:", args)
main(args)
| [((1494, 1556), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Grid feature extraction"""'}), "(description='Grid feature extraction')\n", (1517, 1556), False, 'import argparse\n'), ((3454, 3463), 'detectron2.config.get_cfg', 'get_cfg', ([], {}), '()\n', (3461, 3463), False, 'from detectron2.config import get_cfg\n'), ((3468, 3493), 'grid_feats.add_attribute_config', 'add_attribute_config', (['cfg'], {}), '(cfg)\n', (3488, 3493), False, 'from grid_feats import add_attribute_config, build_detection_test_loader_with_attributes\n'), ((3689, 3713), 'detectron2.engine.default_setup', 'default_setup', (['cfg', 'args'], {}), '(cfg, args)\n', (3702, 3713), False, 'from detectron2.engine import default_setup\n'), ((5580, 5612), 'clip.clip.load', 'load', (['args.model_type'], {'jit': '(False)'}), '(args.model_type, jit=False)\n', (5584, 5612), False, 'from clip.clip import load\n'), ((2275, 2297), 'tqdm.tqdm', 'tqdm.tqdm', (['data_loader'], {}), '(data_loader)\n', (2284, 2297), False, 'import tqdm\n'), ((3013, 3037), 'detectron2.evaluation.inference_context', 'inference_context', (['model'], {}), '(model)\n', (3030, 3037), False, 'from detectron2.evaluation import inference_context\n'), ((3061, 3146), 'os.path.join', 'os.path.join', (['cfg.OUTPUT_DIR', '"""features"""', 'dataset_to_folder_mapper[dataset_name]'], {}), "(cfg.OUTPUT_DIR, 'features', dataset_to_folder_mapper[dataset_name]\n )\n", (3073, 3146), False, 'import os\n'), ((3150, 3181), 'fvcore.common.file_io.PathManager.mkdirs', 'PathManager.mkdirs', (['dump_folder'], {}), '(dump_folder)\n', (3168, 3181), False, 'from fvcore.common.file_io import PathManager\n'), ((3204, 3290), 'grid_feats.build_detection_test_loader_with_attributes', 'build_detection_test_loader_with_attributes', (['cfg', 'dataset_name'], {'model_type': '"""clip"""'}), "(cfg, dataset_name, model_type=\n 'clip')\n", (3247, 3290), False, 'from grid_feats import add_attribute_config, build_detection_test_loader_with_attributes\n'), ((4690, 4717), 'os.path.exists', 'os.path.exists', (['dump_folder'], {}), '(dump_folder)\n', (4704, 4717), False, 'import os\n'), ((4727, 4751), 'os.makedirs', 'os.makedirs', (['dump_folder'], {}), '(dump_folder)\n', (4738, 4751), False, 'import os\n'), ((4785, 4807), 'tqdm.tqdm', 'tqdm.tqdm', (['data_loader'], {}), '(data_loader)\n', (4794, 4807), False, 'import tqdm\n'), ((2313, 2328), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2326, 2328), False, 'import torch\n'), ((4269, 4317), 'torch.zeros', 'torch.zeros', (['(num_patches + 1)', '(768)'], {'device': '"""cuda"""'}), "(num_patches + 1, 768, device='cuda')\n", (4280, 4317), False, 'import torch\n'), ((4823, 4838), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (4836, 4838), False, 'import torch\n'), ((2844, 2880), 'os.path.join', 'os.path.join', (['dump_folder', 'file_name'], {}), '(dump_folder, file_name)\n', (2856, 2880), False, 'import os\n'), ((3874, 3923), 'torch.Tensor', 'torch.Tensor', (['[0.48145466, 0.4578275, 0.40821073]'], {}), '([0.48145466, 0.4578275, 0.40821073])\n', (3886, 3923), False, 'import torch\n'), ((3962, 4012), 'torch.Tensor', 'torch.Tensor', (['[0.26862954, 0.26130258, 0.27577711]'], {}), '([0.26862954, 0.26130258, 0.27577711])\n', (3974, 4012), False, 'import torch\n'), ((5383, 5419), 'os.path.join', 'os.path.join', (['dump_folder', 'file_name'], {}), '(dump_folder, file_name)\n', (5395, 5419), False, 'import os\n')] |
aerendon/blockchain-basics | src/node.py | e3168afd097b26d23a09fd30e74e07b695e577d1 | from flask import Flask, request
import time
import requests
import json
from blockchain import Blockchain
from block import Block
app = Flask(__name__)
blockchain = Blockchain()
peers = set()
@app.route('/add_nodes', methods=['POST'])
def register_new_peers():
nodes = request.get_json()
if not nodes:
return "Invalid data", 400
for node in nodes:
peers.add(node)
return "Success", 201
@app.route('/new_transaction', methods=['POST'])
def new_transaction():
tx_data = request.get_json()
required_fields = ["author", "content"]
for field in required_fields:
if not tx_data.get(field):
return "Invalid transaction data", 404
tx_data["timestamp"] = time.time()
blockchain.add_new_transaction(tx_data)
return "Sucess", 201
@app.route('/chain', methods=['GET'])
def get_chain():
chain_data = []
for block in blockchain.chain:
chain_data.append(block.__dict__)
return json.dumps({ "length": len(chain_data), "chain": chain_data })
@app.route('/mine', methods=['GET'])
def mine_unconfirmed_transactions():
result = blockchain.mine()
if not result:
return "No transactions to mine"
return "Block #{} is mined.".format(result)
@app.route('/pending_tx')
def get_pending_tx():
return json.dumps(blockchain.unconfirmed_transactions)
def consensus():
global blockchain
longest_chain = None
current_len = len(blockchain)
for node in peers:
response = requests.get('http://{}/chain'.format(node))
length = response.json()['length']
chain = response.json()['chain']
if length > current_len and blockchain.check_chain_validity(chain):
current_len = length
longest_chain = chain
if longest_chain:
blockchain = longest_chain
return True
return False
@app.route('/add_block', methods=['POST'])
def validate_and_add_block():
block_data = request.get_json()
block = Block(block_data["index"], block_data["transactions"], block_data["timestamp", block_data["previous_hash"]])
proof = block_data['hash']
added = blockchain.add_block(block, proof)
if not added:
return "The block was discarded by the node", 400
return "Block added to the chain", 201
def announce_new_block(block):
for peer in peers:
url = "http://{}/add_block".format(peer)
requests.post(url, data=json.dumps(block.__dict__, sort_keys=True))
app.run(debug=True, port=8000)
| [((139, 154), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (144, 154), False, 'from flask import Flask, request\n'), ((169, 181), 'blockchain.Blockchain', 'Blockchain', ([], {}), '()\n', (179, 181), False, 'from blockchain import Blockchain\n'), ((279, 297), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (295, 297), False, 'from flask import Flask, request\n'), ((517, 535), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (533, 535), False, 'from flask import Flask, request\n'), ((729, 740), 'time.time', 'time.time', ([], {}), '()\n', (738, 740), False, 'import time\n'), ((1311, 1358), 'json.dumps', 'json.dumps', (['blockchain.unconfirmed_transactions'], {}), '(blockchain.unconfirmed_transactions)\n', (1321, 1358), False, 'import json\n'), ((1961, 1979), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1977, 1979), False, 'from flask import Flask, request\n'), ((1992, 2105), 'block.Block', 'Block', (["block_data['index']", "block_data['transactions']", "block_data['timestamp', block_data['previous_hash']]"], {}), "(block_data['index'], block_data['transactions'], block_data[\n 'timestamp', block_data['previous_hash']])\n", (1997, 2105), False, 'from block import Block\n'), ((2437, 2479), 'json.dumps', 'json.dumps', (['block.__dict__'], {'sort_keys': '(True)'}), '(block.__dict__, sort_keys=True)\n', (2447, 2479), False, 'import json\n')] |
Nilzone-/Knowit-Julekalender-2017 | Luke 02/02.py | 66ef8a651277e0fef7d9278f3f129410b5b98ee0 | import numpy as np
size = 1000
def create_wall(x, y):
return "{0:b}".format(x**3 + 12*x*y + 5*x*y**2).count("1") & 1
def build_grid():
return np.array([create_wall(j+1, i+1) for i in range(size) for j in range(size)]).reshape(size, size)
def visit(grid, x=0, y=0):
if grid[x][y]:
return
grid[x][y] = 1
if x > 0: visit(grid, x-1, y)
if x < size-1: visit(grid, x+1, y)
if y > 0: visit(grid, x, y-1)
if y < size-1: visit(grid, x, y+1)
grid = build_grid()
print "Original grid\n"
print grid
visit(grid)
print "\n\nAfter search\n"
print grid
print "\n%d unvisited points in grid" % (size**2 - np.count_nonzero(grid)) | [] |
danielicapui/programa-o-avancada | databases/music.py | d0e5b876b951ae04a46ffcda0dc0143e3f7114d9 | from utills import *
conn,cur=start('music')
criarTabela("tracks","title text,plays integer")
music=[('trunder',20),
('my way',15)]
insertInto("tracks","title,plays",music)
#cur.executemany("insert into tracks (title,plays) values (?,?)",music)
buscaTabela("tracks","title")
conn.commit()
conn.close()
| [] |
pdxcycling/carv.io | video_analysis/code/scene_postprocess.py | cce0f91a76d3ceed714b3625d415131fd9540899 | import pandas as pd
import numpy as np
import re
from collections import Counter
from flow_preprocess import FlowPreprocess
class ScenePostprocess(object):
"""
Heavy-lifting macro-feature class
"""
def __init__(self, flow_df, quality_df, remove_transitions=False):
"""
Default constructor
Args:
flow_df: Optical flow dataframe
quality_df: Image quality dataframe
remove_transitions: whether to remove frames around
scene transitions
Returns:
Nothing
"""
self.flow_df = flow_df.copy()
self.quality_df = quality_df.copy()
self.remove_transitions = remove_transitions
self.is_static = None
self.duration = self.get_duration()
self.num_frames = quality_df.shape[0]
## Do some rudimentary cleaning of/addding to the flow data
self.flow_df['distance'] = FlowPreprocess.flow_distances(self.flow_df)
self.flow_df['angle'] = FlowPreprocess.flow_angles(self.flow_df)
## Add scene-centric timestamps
## TODO: This has a few issues with actual start times...
scene_time_offset = self.quality_df['time'].min()
self.flow_df['time_scene'] = self.flow_df['time'] - scene_time_offset
self.quality_df['time_scene'] = self.quality_df['time'] - scene_time_offset
self.min_time_scene = self.quality_df['time_scene'].min()
self.max_time_scene =self.quality_df['time_scene'].max()
self.min_frame_num = self.quality_df['frame_number'].min()
self.max_frame_num = self.quality_df['frame_number'].max()
def _find_columns_by_name(self, df, name_re):
"""
Helper function to find binned features by the prefixes in their names
Args:
df: Dataframe
name_re: regular expression for finding colmns
Returns:
List of columns that have names that match name_re
"""
output = []
cols = df.columns
for c in cols:
if re.search(name_re, c):
output.append(c)
return output
def get_duration(self):
"""
Find scene duration (in seconds)
Args:
None
Returns:
Duration of scene in seconds
"""
min_time = np.min(self.quality_df['time'])
max_time = np.max(self.quality_df['time'])
return max_time - min_time
def get_avg_blur(self):
"""
Find average blur across entire scene
NOTE: The higher the number, the less the blur.
Args:
None
Returns:
Average blur as single float value
"""
avg_blur = np.mean(self.quality_df['blur'])
return avg_blur
def get_blur_percentage(self, blur_threshold=100):
"""
Proportion of of frames in scene that are blurry.
A frame is "blurry" if its average blur is below blur_threshold
Args:
blur_threshold: A float value that defines the threshold between
blurry and non-blurry
Returns:
Flow value of the proportion of the scene's frames that are blurry
"""
blur_pct = 1. * np.sum(self.quality_df['blur'] < blur_threshold)/self.quality_df.shape[0]
return blur_pct
def get_top_colors(self, num_colors=10):
"""
Find the dominant colors in all frames across the scene
NOTE: This can be sped if only a subset of frames are sampled.
Need to run experiments on the optimal sampling rate.
TODO: This approach should be changed in v2.0
Args:
num_colors: The number of most common colors to return.
This is 10 by default.
Returns:
Numpy array containing the most prevalent colors in the scene
"""
self.num_colors = num_colors
max_color_array = np.array(str)
cols = self._find_columns_by_name(self.quality_df, "hue")
for frame_num in range(self.min_frame_num, self.max_frame_num + 1):
frame_color_array = self.quality_df[cols].ix[frame_num].sort_values()[::-1].index.values[:self.num_colors]
max_color_array = np.append(max_color_array, frame_color_array)
## Find most common colors
color_count = Counter(max_color_array)
return map(lambda x: x[0], color_count.most_common(self.num_colors))
def _get_values_from_bin_names(self, cols):
"""
From a list of columns representing bins, return a list of the values
of those bins
Args:
cols: a list of column names of histogram bins
Returns:
A list of the value of each bin
"""
values = []
for c in cols:
matches = re.search('_(\d+.\d+)', c)
if matches:
values.append(float(matches.groups(0)[0]))
else:
## This should never happen, but just in case...
values.append(None)
return values
def get_avg_saturation(self):
"""
Find the average saturation across all frames in the scene
Args:
None
Returns:
A float value of average scene saturation
"""
cols = self._find_columns_by_name(self.quality_df, "sat")
vals = self._get_values_from_bin_names(cols)
sums = self.quality_df[cols].sum()
avg = np.sum((sums * vals).values)/np.sum(sums)
return avg
def get_avg_value(self):
"""
Find the average value (from HSV colorspace) across
all frames in the scene
Args:
None
Returns:
A float value of average scene HSV value
"""
cols = self._find_columns_by_name(self.quality_df, "val")
vals = self._get_values_from_bin_names(cols)
sums = self.quality_df[cols].sum()
avg = np.sum((sums * vals).values)/np.sum(sums)
return avg
def get_pixel_pct(self, col_name, frame_size=(480., 360.)):
"""
Calculates the number of pixels in a scene are in col_name
Args:
col_name: the name of column of interest
frame_size:
Returns:
Proportion of pixels that are in the column of interest
"""
frame_pixels = frame_size[0] * frame_size[1]
num_frames = self.quality_df.shape[0]
total_pixels = frame_pixels * num_frames
pixel_cnt = np.sum(self.quality_df[col_name])
return pixel_cnt / total_pixels
"""
vvv Flow calculations vvv
"""
def get_flow_percentile(self, percentile=0.5):
"""
Find the distance traveled by optical flow point,
filtered by the specified percentile.
Args:
percentile: Flow distance percentile to return.
Percentile is between 0 and 1.
Returns:
A float value of the flow distance
"""
return self.flow_df['distance'].quantile(percentile)
def get_avg_flow(self):
"""
Find the average distance an optical flow point has traveled between
frames.
Args:
None
Returns:
A float value of the average distance an optical flow point
has traveled between frames
"""
return self.flow_df['distance'].mean()
def get_shake(self):
"""
Return the shakiness of the scene. Shake is calculated by finding the
median distance an optical flow point has traveled in each frame, and
averaging these values.
TODO: vector addition.
Args:
None.
Returns:
A float value representing the shakiness of a scene.
"""
if not self.flow_df.empty:
shake = np.mean((self.flow_df.groupby('frame_number').median())['distance'])
else:
shake = 0
return shake
def get_flow_angle(self):
"""
Find the average angle of travel of the optical flow points in a scene.
Args:
None
Returns:
A float value of the average optical flow angle
"""
return self.flow_df['angle'].mean()
def get_flow_angle_std_dev(self):
"""
Find the standard devation of all optical flows in a scene
Args:
None
Returns:
A float value of the standard deviation of optical flow angle
"""
return self.flow_df['angle'].std()
def is_static_scene(self, remove_transitions=False):
"""
Determines whether or not scene is a static scene (vs. action scene)
TODO: Ignore some time around scene transitions because of fades.
Ensure that scene is long enough.
Args:
remove_transitions: remove frames at beginning and end of scene
Returns:
A boolean value of whether a scene is static or not.
"""
is_static = None
motion_threshold = 1 # one pixel of movement
total_flow_points = self.flow_df.shape[0] ## number of frames in range
thresholded_df = self.flow_df[self.flow_df['distance'] > motion_threshold].copy()
if thresholded_df.empty:
is_static = True
else:
## Due to "artsy" transitions, ignore around beginning/end of scene
if remove_transitions:
## Amount of transition time between scenes
## This could be a percentage...
transition_time_buffer = 1 # in seconds
## Ensure that scene is long enough to remove buffer from analysis
if self.max_time_scene > transition_time_buffer:
thresholded_df = thresholded_df[thresholded_df['time_scene'] > transition_time_buffer]
thresholded_df = thresholded_df[thresholded_df['time_scene'] < self.max_time_scene - transition_time_buffer]
## Do not remove transitions if scene is too short
else:
pass
if not thresholded_df.empty:
##moving_flow_points = thresholded_df.shape[0]
moving_frames = thresholded_df.groupby(by=['frame_number']).mean().shape[0]
else:
##moving_flow_points = 0
moving_frames = 0
##pts_ratio = 1. * moving_flow_points/self.num_frames
pts_ratio = 1. * moving_frames/self.num_frames
# less than 1 moving frame per 4 frames
is_static = pts_ratio < .25
return is_static
def num_trackable_points_per_frame(self):
"""
Find the total number of optical flow points that are trackable per
frame.
"Trackability" is defined as being able to find a specific optical
flow point between frames.
Args:
None
Returns:
A dataframe with the number of trackable points, by frame.
"""
return self.flow_df.groupby('frame_number').size()
def avg_num_trackable_points_per_frame(self):
"""
Find the average number of optical flow points that are trackable,
over all frames in the frame.
"Trackability" is defined as being able to find a specific optical
flow point between frames.
Args:
None
Returns:
A float value of the average number of trackable optical flow
points in all of the scene's frames
"""
return 1. * len(self.flow_df) / self.num_frames
def to_df(self):
"""
Return a dataframe containing all features
TODO: better type checking
Args:
None
Returns:
Dataframe with all features
"""
scene_df = pd.DataFrame(index=[0])
top_colors = self.get_top_colors()
for n in range(self.num_colors):
scene_df['top_color_' + str(n)] = top_colors[n]
scene_df['avg_sat'] = self.get_avg_saturation()
scene_df['avg_val'] = self.get_avg_value()
scene_df['black_pixel_pct'] = self.get_pixel_pct('num_black_pixels')
scene_df['white_pixel_pct'] = self.get_pixel_pct('num_white_pixels')
scene_df['flow_percentile_25'] = self.get_flow_percentile(0.25)
scene_df['flow_percentile_50'] = self.get_flow_percentile(0.25)
scene_df['flow_percentile_75'] = self.get_flow_percentile(0.25)
scene_df['flow_avg'] = self.get_avg_flow()
scene_df['flow_angle'] = self.get_flow_angle()
scene_df['flow_angle_std_dev'] = self.get_flow_angle_std_dev()
scene_df['is_static_scene'] = self.is_static_scene()
##scene_df['action_peak_in_scene'] = None # where in scene does no
scene_df['shake_coeff'] = self.get_shake()
scene_df['avg_flow_pts_per_frame'] = self.avg_num_trackable_points_per_frame()
scene_df['blur'] = self.get_avg_blur()
scene_df['blur_pct'] = self.get_blur_percentage()
scene_df['duration'] = self.get_duration()
return scene_df
| [((951, 994), 'flow_preprocess.FlowPreprocess.flow_distances', 'FlowPreprocess.flow_distances', (['self.flow_df'], {}), '(self.flow_df)\n', (980, 994), False, 'from flow_preprocess import FlowPreprocess\n'), ((1027, 1067), 'flow_preprocess.FlowPreprocess.flow_angles', 'FlowPreprocess.flow_angles', (['self.flow_df'], {}), '(self.flow_df)\n', (1053, 1067), False, 'from flow_preprocess import FlowPreprocess\n'), ((2371, 2402), 'numpy.min', 'np.min', (["self.quality_df['time']"], {}), "(self.quality_df['time'])\n", (2377, 2402), True, 'import numpy as np\n'), ((2422, 2453), 'numpy.max', 'np.max', (["self.quality_df['time']"], {}), "(self.quality_df['time'])\n", (2428, 2453), True, 'import numpy as np\n'), ((2758, 2790), 'numpy.mean', 'np.mean', (["self.quality_df['blur']"], {}), "(self.quality_df['blur'])\n", (2765, 2790), True, 'import numpy as np\n'), ((4002, 4015), 'numpy.array', 'np.array', (['str'], {}), '(str)\n', (4010, 4015), True, 'import numpy as np\n'), ((4412, 4436), 'collections.Counter', 'Counter', (['max_color_array'], {}), '(max_color_array)\n', (4419, 4436), False, 'from collections import Counter\n'), ((6592, 6625), 'numpy.sum', 'np.sum', (['self.quality_df[col_name]'], {}), '(self.quality_df[col_name])\n', (6598, 6625), True, 'import numpy as np\n'), ((11967, 11990), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': '[0]'}), '(index=[0])\n', (11979, 11990), True, 'import pandas as pd\n'), ((2090, 2111), 're.search', 're.search', (['name_re', 'c'], {}), '(name_re, c)\n', (2099, 2111), False, 'import re\n'), ((4308, 4353), 'numpy.append', 'np.append', (['max_color_array', 'frame_color_array'], {}), '(max_color_array, frame_color_array)\n', (4317, 4353), True, 'import numpy as np\n'), ((4887, 4915), 're.search', 're.search', (['"""_(\\\\d+.\\\\d+)"""', 'c'], {}), "('_(\\\\d+.\\\\d+)', c)\n", (4896, 4915), False, 'import re\n'), ((5544, 5572), 'numpy.sum', 'np.sum', (['(sums * vals).values'], {}), '((sums * vals).values)\n', (5550, 5572), True, 'import numpy as np\n'), ((5573, 5585), 'numpy.sum', 'np.sum', (['sums'], {}), '(sums)\n', (5579, 5585), True, 'import numpy as np\n'), ((6030, 6058), 'numpy.sum', 'np.sum', (['(sums * vals).values'], {}), '((sums * vals).values)\n', (6036, 6058), True, 'import numpy as np\n'), ((6059, 6071), 'numpy.sum', 'np.sum', (['sums'], {}), '(sums)\n', (6065, 6071), True, 'import numpy as np\n'), ((3287, 3335), 'numpy.sum', 'np.sum', (["(self.quality_df['blur'] < blur_threshold)"], {}), "(self.quality_df['blur'] < blur_threshold)\n", (3293, 3335), True, 'import numpy as np\n')] |
ThomasRot/rational_activations | examples/pytorch/mnist/plot.py | 1fa26d1ee5f3c916eda00c899afa96eccb960143 | import torch
import numpy as np
import pickle
torch.manual_seed(17)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
np.random.seed(17)
import argparse
import torch.nn as nn
import torch.nn.functional as F
import matplotlib
import os
from rational.torch import Rational, RecurrentRational, RecurrentRationalModule
from torchvision import datasets, transforms
from torch.utils.tensorboard import SummaryWriter
from mnist import VGG, LeNet5, actfvs
from matplotlib import pyplot as plt
font = {'family': 'normal',
'weight': 'bold',
'size': 22}
matplotlib.rc('font', **font)
torch.set_anomaly_enabled(True)
def test(args, model, device, test_loader, epoch):
model.eval()
test_loss = 0
correct = 0
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
output = model(data)
test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= len(test_loader.dataset)
acc = 100. * correct / len(test_loader.dataset)
print('\nTest set: Epoch: {}, Average loss: {:.4f}, Accuracy: {}/{} ({:.2f}%)\n'.format(epoch, test_loss,
correct,
len(test_loader.dataset),
acc))
return acc
def main():
# Training settings
parser = argparse.ArgumentParser(description='PyTorch MNIST Example')
parser.add_argument('--batch-size', type=int, default=64, metavar='N',
help='input batch size for training (default: 64)')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--seed', type=int, default=17, metavar='S',
help='random seed (default: 1)')
parser.add_argument('--log-interval', type=int, default=10, metavar='N',
help='how many batches to wait before logging training status')
parser.add_argument('--dataset', type=str, default='mnist',
help='dataset to use')
parser.add_argument('--arch', type=str, required=True)
parser.add_argument('--init', type=str, default="", choices=["", "xavier", "he"])
args = parser.parse_args()
networks = dict({
"vgg": VGG,
"lenet": LeNet5,
})
network = networks[args.arch]
# activation_function_keys = [x for x in list(actfvs.keys()) if 'pau' in x]
# activation_function_keys = ['pau']
# activation_function_keys = ['recurrent_pau']
activation_function_keys = ['pau', 'recurrent_pau']
optimizer = 'sgd'
epochs = ['final']
for activation_function_key in activation_function_keys:
for epoch in epochs:
print("---" * 42)
print("Starting with dataset: {}, activation function: {}".format(args.dataset, activation_function_key))
print("---" * 42)
load_path = 'examples/runs/mnist/paper_{}_{}_{}{}_seed{}/'.format(args.dataset, args.arch, optimizer,
"_init_{}".format(args.init) if args.init != "" else "",
args.seed) + activation_function_key
use_cuda = not args.no_cuda and torch.cuda.is_available()
torch.manual_seed(args.seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
np.random.seed(args.seed)
device = torch.device("cuda" if use_cuda else "cpu")
kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {}
if args.dataset == 'mnist':
test_loader = torch.utils.data.DataLoader(
datasets.MNIST('../data', train=False, transform=transforms.Compose([
transforms.Resize((32, 32)),
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])),
batch_size=args.batch_size, shuffle=True, **kwargs)
lr_scheduler_milestones = [30, 60, 90] # Simple CNN with 3 Conv
# lr_scheduler_milestones = [40, 80] # VGG
elif args.dataset == 'fmnist':
test_loader = torch.utils.data.DataLoader(
datasets.FashionMNIST('../data', train=False, transform=transforms.Compose([
transforms.Resize((32, 32)),
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])),
batch_size=args.batch_size, shuffle=True, **kwargs)
lr_scheduler_milestones = [40, 80]
else:
raise ValueError('dataset error')
model = network(activation_func=activation_function_key).to(device)
model.load_state_dict(torch.load(os.path.join(load_path, 'model_{}.pt'.format(epoch))))
paus = list()
for name, layer in model.named_modules():
if isinstance(layer, Rational):
layer.input_retrieve_mode(max_saves=10)
paus.append(('rational', name, layer))
if isinstance(layer, RecurrentRationalModule):
layer.input_retrieve_mode(max_saves=10)
paus.append(('recurrent_rational', name, layer))
if len(paus) > 0:
os.makedirs(os.path.join(load_path, 'plots'), exist_ok=True)
# dict(model.named_parameters())["features.3.0.bias"][0]
# dict(model.named_parameters())["features.4.2.numerator"][0]
print("Starting model eval")
acc = test(args, model, device, test_loader, epoch)
print("Finished model eval -> Plot")
# fig = plt.figure(1, figsize=(6*len(paus),6))
fig_dicts = []
for i, p in enumerate(paus):
fig = p[2].show(display=False)
print(fig)
fig_dicts.append(fig)
pickle.dump(fig_dicts, open(f'{args.dataset}_{args.arch}_{activation_function_key}_(acc{acc}%).fig.pkl', "wb"))
else:
print("No Rational Activations found. Exit without plotting")
if __name__ == '__main__':
main()
| [((47, 68), 'torch.manual_seed', 'torch.manual_seed', (['(17)'], {}), '(17)\n', (64, 68), False, 'import torch\n'), ((150, 168), 'numpy.random.seed', 'np.random.seed', (['(17)'], {}), '(17)\n', (164, 168), True, 'import numpy as np\n'), ((592, 621), 'matplotlib.rc', 'matplotlib.rc', (['"""font"""'], {}), "('font', **font)\n", (605, 621), False, 'import matplotlib\n'), ((623, 654), 'torch.set_anomaly_enabled', 'torch.set_anomaly_enabled', (['(True)'], {}), '(True)\n', (648, 654), False, 'import torch\n'), ((1771, 1831), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""PyTorch MNIST Example"""'}), "(description='PyTorch MNIST Example')\n", (1794, 1831), False, 'import argparse\n'), ((768, 783), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (781, 783), False, 'import torch\n'), ((3773, 3801), 'torch.manual_seed', 'torch.manual_seed', (['args.seed'], {}), '(args.seed)\n', (3790, 3801), False, 'import torch\n'), ((3919, 3944), 'numpy.random.seed', 'np.random.seed', (['args.seed'], {}), '(args.seed)\n', (3933, 3944), True, 'import numpy as np\n'), ((3967, 4010), 'torch.device', 'torch.device', (["('cuda' if use_cuda else 'cpu')"], {}), "('cuda' if use_cuda else 'cpu')\n", (3979, 4010), False, 'import torch\n'), ((3735, 3760), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3758, 3760), False, 'import torch\n'), ((946, 989), 'torch.nn.functional.nll_loss', 'F.nll_loss', (['output', 'target'], {'reduction': '"""sum"""'}), "(output, target, reduction='sum')\n", (956, 989), True, 'import torch.nn.functional as F\n'), ((5948, 5980), 'os.path.join', 'os.path.join', (['load_path', '"""plots"""'], {}), "(load_path, 'plots')\n", (5960, 5980), False, 'import os\n'), ((4305, 4332), 'torchvision.transforms.Resize', 'transforms.Resize', (['(32, 32)'], {}), '((32, 32))\n', (4322, 4332), False, 'from torchvision import datasets, transforms\n'), ((4358, 4379), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (4377, 4379), False, 'from torchvision import datasets, transforms\n'), ((4405, 4447), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['(0.1307,)', '(0.3081,)'], {}), '((0.1307,), (0.3081,))\n', (4425, 4447), False, 'from torchvision import datasets, transforms\n'), ((4909, 4936), 'torchvision.transforms.Resize', 'transforms.Resize', (['(32, 32)'], {}), '((32, 32))\n', (4926, 4936), False, 'from torchvision import datasets, transforms\n'), ((4962, 4983), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (4981, 4983), False, 'from torchvision import datasets, transforms\n'), ((5009, 5051), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['(0.1307,)', '(0.3081,)'], {}), '((0.1307,), (0.3081,))\n', (5029, 5051), False, 'from torchvision import datasets, transforms\n')] |
AetherBlack/Veille-Informatique | fts/fluxrss.py | e80451c5eb21f43ac1a9baac3342ad0d4102d18b | #!/usr/bin/python3
from urllib.parse import urlparse
import feedparser
import requests
import asyncio
import discord
import hashlib
import os
from const import CHANNEL_RSS, WAIT_UNTIL_NEW_CHECK, \
SQLITE_FOLDER_NAME, SQLITE_FILE_NAME
from fts.database import Database
from fts.cleandatabase import CleanDatabase
class FluxRSS:
"""
Class of FluxRSS.
Get news of the feedrss url parse in args.
"""
def __init__(self, bot, cwd):
"""
Initialize class
@param => DiscordBot: `bot`: Discord Bot Instance.
@param => str: `cwd`: Current Working Directory of main.py file.
"""
# Discord
self.bot = bot
self.bot_username = self.bot.user.name
self.rss_channel = self.bot.get_channel(CHANNEL_RSS)
# Path
self.cwd = cwd
# Database
self.db_path = os.path.join(self.cwd, SQLITE_FOLDER_NAME)
self.database = Database(self.db_path, SQLITE_FILE_NAME)
def get_news(self, url):
"""
Get the news of the rss feed.
@param => str: `url`: url of the rss feed.
Return dict with an int index key and
title, description and link in a list for the value.
"""
dict_news = dict()
# Get the content of the requests
content = requests.get(url).text
# Parse the content
parser = feedparser.parse(content)
# Set the root
parser = parser["entries"]
# Get the number of news
news_number = len(parser)
# Construct the dict
for index in range(news_number):
# Get the title
title = parser[index]["title"]
# Get the description
description = parser[index]["description"]
# Get the link
link = parser[index]["links"][0]["href"]
# Set list
args = [
title, description, link
]
# Add the list to the dict
dict_news[str(index)] = args
# Return the dict
return dict_news
def is_new(self, root, name, title, description, link):
"""
Return True if the news in the feed is new.
@param => str: `title`: Title of the news.
@param => str: `description`: Description of the news.
@param => str: `link`: Link of the rss feed.
"""
# Hash description
hash_description = hashlib.sha256(bytes(description, "utf-8", errors="ignore")).hexdigest()
# Return the check of the query
return not self.database.isNewsExists(root, name, title, hash_description, link)
def embeded_msg(self, root, name, title, content, link, color):
"""
Create the embeded message and send it to discord.
@param => str: `root`: Name of the Website.
@param => str: `name`: Name set in const. Categorie of the news
@param => str: `title`: Title of the news.
@param => str: `content`: Content description of the news.
@param => str: `link`: Link of the news.
@param => discord.Color: `color`: Color for the left panel.
"""
# Set the Name, description and color on the left
news = discord.Embed(title="{0} - {1}".format(root, name), description="News :", color=(color or 0x00ff00))
#Set bot name and profil picture
news.set_author(name=self.bot_username, icon_url=self.bot.user.avatar_url)
#Set the description and the link for the main message
content = content + "\n" + link
news.add_field(name=title, value=content[:1024], inline=False)
#Show the bot username in footer
news.set_footer(text="Generate by @{0}".format(self.bot_username))
# Return the final Discord embeded message
return news
async def feedrss(self, json_rss):
"""
Get the news and send it to the channel.
@param => dict: `json_rss`: JSON data of the RSS Flux.
"""
# Show const for the format
self.json_rss = json_rss
# While the connection is not closed
while not self.bot.is_closed():
# For each key
for key, sections in self.json_rss.items():
# Get the root name set in const
root = key
# For each sections
for index_section, section in enumerate(sections):
# Check customization of the section
if "custom" in section.keys():
# Check color
if "color" in section["custom"].keys():
color = getattr(discord.Color, section["custom"]["color"])()
else:
color = False
else:
color = False
# Get the name of the section
name = section["name"]
# Get the time until the cleaning of the database for the root and name given
wait_time = section["clean"]
# Check if the cleaning database is already launched
if isinstance(wait_time, str):
# Launch the function to clean the database
Thread = CleanDatabase(root, name, wait_time, self.db_path, SQLITE_FILE_NAME)
Thread.start()
# Change the variable type of the clean line in json_rss to launch relaunch the requests
self.json_rss[root][index_section]["clean"] = True
# For each link in the section
for link in section["link"]:
# Get title, description and link in a dict
dict_news = self.get_news(link)
# Verify if the news already exists
for value in dict_news.values():
# Get title
title = value[0]
# Get description
description = value[1]
# Get link
link = value[2]
# Check if the news is new
if self.is_new(root, name, title, description, link):
# Hash the description
hash_description = hashlib.sha256(bytes(description, "utf-8", errors="ignore")).hexdigest()
# write the news into the database
self.database.AddNews(root, name, title, hash_description, link)
#Create the discord message
message = self.embeded_msg(root, name, title, description, link, color)
#Send to discord
await self.rss_channel.send(embed=message)
# Wait until the next verification
await asyncio.sleep(WAIT_UNTIL_NEW_CHECK)
| [((866, 908), 'os.path.join', 'os.path.join', (['self.cwd', 'SQLITE_FOLDER_NAME'], {}), '(self.cwd, SQLITE_FOLDER_NAME)\n', (878, 908), False, 'import os\n'), ((933, 973), 'fts.database.Database', 'Database', (['self.db_path', 'SQLITE_FILE_NAME'], {}), '(self.db_path, SQLITE_FILE_NAME)\n', (941, 973), False, 'from fts.database import Database\n'), ((1382, 1407), 'feedparser.parse', 'feedparser.parse', (['content'], {}), '(content)\n', (1398, 1407), False, 'import feedparser\n'), ((1313, 1330), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1325, 1330), False, 'import requests\n'), ((7057, 7092), 'asyncio.sleep', 'asyncio.sleep', (['WAIT_UNTIL_NEW_CHECK'], {}), '(WAIT_UNTIL_NEW_CHECK)\n', (7070, 7092), False, 'import asyncio\n'), ((5316, 5384), 'fts.cleandatabase.CleanDatabase', 'CleanDatabase', (['root', 'name', 'wait_time', 'self.db_path', 'SQLITE_FILE_NAME'], {}), '(root, name, wait_time, self.db_path, SQLITE_FILE_NAME)\n', (5329, 5384), False, 'from fts.cleandatabase import CleanDatabase\n')] |
kiyohiro8/poke-env | src/poke_env/player/player_network_interface.py | 7a1a4b155e8a73bd712d44e70c4192f8032d7e6f | # -*- coding: utf-8 -*-
"""This module defines a base class for communicating with showdown servers.
"""
import json
import logging
import requests
import websockets # pyre-ignore
from abc import ABC
from abc import abstractmethod
from asyncio import CancelledError
from asyncio import ensure_future
from asyncio import Event
from asyncio import Lock
from asyncio import sleep
from time import perf_counter
from typing import List
from typing import Optional
from aiologger import Logger # pyre-ignore
from poke_env.exceptions import ShowdownException
from poke_env.player_configuration import PlayerConfiguration
from poke_env.server_configuration import ServerConfiguration
class PlayerNetwork(ABC):
"""
Network interface of a player.
Responsible for communicating with showdown servers. Also implements some higher
level methods for basic tasks, such as changing avatar and low-level message
handling.
"""
def __init__(
self,
player_configuration: PlayerConfiguration,
*,
avatar: Optional[int] = None,
log_level: Optional[int] = None,
server_configuration: ServerConfiguration,
start_listening: bool = True,
) -> None:
"""
:param player_configuration: Player configuration.
:type player_configuration: PlayerConfiguration
:param avatar: Player avatar id. Optional.
:type avatar: int, optional
:param log_level: The player's logger level.
:type log_level: int. Defaults to logging's default level.
:param server_configuration: Server configuration.
:type server_configuration: ServerConfiguration
:param start_listening: Wheter to start listening to the server. Defaults to
True.
:type start_listening: bool
"""
self._authentication_url = server_configuration.authentication_url
self._avatar = avatar
self._password = player_configuration.password
self._username = player_configuration.username
self._server_url = server_configuration.server_url
self._logged_in: Event = Event()
self._sending_lock = Lock()
self._websocket: websockets.client.WebSocketClientProtocol # pyre-ignore
self._logger: Logger = self._create_player_logger(log_level) # pyre-ignore
if start_listening:
self._listening_coroutine = ensure_future(self.listen())
async def _accept_challenge(self, username: str) -> None:
assert self.logged_in.is_set()
await self._set_team()
await self._send_message("/accept %s" % username)
async def _challenge(self, username: str, format_: str):
assert self.logged_in.is_set()
await self._set_team()
await self._send_message(f"/challenge {username}, {format_}")
async def _change_avatar(self, avatar_id: Optional[int]) -> None:
"""Changes the player's avatar.
:param avatar_id: The new avatar id. If None, nothing happens.
:type avatar_id: int
"""
await self._wait_for_login()
if avatar_id is not None:
await self._send_message(f"/avatar {avatar_id}")
def _create_player_logger(self, log_level: Optional[int]) -> Logger: # pyre-ignore
"""Creates a logger for the player.
Returns a Logger displaying asctime and the player's username before messages.
:param log_level: The logger's level.
:type log_level: int
:return: The logger.
:rtype: Logger
"""
logger = logging.getLogger(self._username)
stream_handler = logging.StreamHandler()
if log_level is not None:
logger.setLevel(log_level)
formatter = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
return logger
async def _handle_message(self, message: str) -> None:
"""Handle received messages.
:param message: The message to parse.
:type message: str
"""
try:
self.logger.debug("Received message to handle: %s", message)
# Showdown websocket messages are pipe-separated sequences
split_message = message.split("|")
assert len(split_message) > 1
# The type of message is determined by the first entry in the message
# For battles, this is the zero-th entry
# Otherwise it is the one-th entry
if split_message[1] == "challstr":
# Confirms connection to the server: we can login
await self._log_in(split_message)
elif split_message[1] == "updateuser":
if split_message[2] == " " + self._username:
# Confirms successful login
self.logged_in.set()
elif not split_message[2].startswith(" Guest "):
self.logger.warning(
"""Trying to login as %s, showdown returned %s """
"""- this might prevent future actions from this agent. """
"""Changing the agent's username might solve this problem.""",
self.username,
split_message[2],
)
elif "updatechallenges" in split_message[1]:
# Contain information about current challenge
await self._update_challenges(split_message)
elif split_message[0].startswith(">battle"):
# Battle update
await self._handle_battle_message(message)
elif split_message[1] == "updatesearch":
self.logger.debug("Ignored message: %s", message)
pass
elif split_message[1] == "popup":
self.logger.warning("Popup message received: %s", message)
elif split_message[1] in ["nametaken"]:
self.logger.critical("Error message received: %s", message)
raise ShowdownException("Error message received: %s", message)
elif split_message[1] == "pm":
self.logger.info("Received pm: %s", split_message)
else:
self.logger.critical("Unhandled message: %s", message)
raise NotImplementedError("Unhandled message: %s" % message)
except CancelledError as e:
self.logger.critical("CancelledError intercepted. %s", e)
except Exception as exception:
self.logger.exception(
"Unhandled exception raised while handling message:\n%s", message
)
raise exception
async def _log_in(self, split_message: List[str]) -> None:
"""Log the player with specified username and password.
Split message contains information sent by the server. This information is
necessary to log in.
:param split_message: Message received from the server that triggers logging in.
:type split_message: List[str]
"""
if self._password:
log_in_request = requests.post(
self._authentication_url,
data={
"act": "login",
"name": self._username,
"pass": self._password,
"challstr": split_message[2] + "%7C" + split_message[3],
},
)
self.logger.info("Sending authentication request")
assertion = json.loads(log_in_request.text[1:])["assertion"]
else:
self.logger.info("Bypassing authentication request")
assertion = ""
await self._send_message(f"/trn {self._username},0,{assertion}")
await self._change_avatar(self._avatar)
async def _search_ladder_game(self, format_):
await self._set_team()
await self._send_message(f"/search {format_}")
async def _send_message(
self, message: str, room: str = "", message_2: Optional[str] = None
) -> None:
"""Sends a message to the specified room.
`message_2` can be used to send a sequence of length 2.
:param message: The message to send.
:type message: str
:param room: The room to which the message should be sent.
:type room: str
:param message_2: Second element of the sequence to be sent. Optional.
:type message_2: str, optional
"""
if message_2:
to_send = "|".join([room, message, message_2])
else:
to_send = "|".join([room, message])
await self._websocket.send(to_send)
self.logger.info(">>> %s", to_send)
async def _set_team(self):
if self._team is not None:
await self._send_message("/utm %s" % self._team.yield_team())
async def _wait_for_login(
self, checking_interval: float = 0.001, wait_for: int = 5
) -> None:
start = perf_counter()
while perf_counter() - start < wait_for:
await sleep(checking_interval)
if self.logged_in:
return
assert self.logged_in
async def listen(self) -> None:
"""Listen to a showdown websocket and dispatch messages to be handled."""
self.logger.info("Starting listening to showdown websocket")
coroutines = []
try:
async with websockets.connect(
self.websocket_url, max_queue=None
) as websocket:
self._websocket = websocket
async for message in websocket:
self.logger.info("<<< %s", message)
coroutines.append(ensure_future(self._handle_message(message)))
except websockets.exceptions.ConnectionClosedOK:
self.logger.warning(
"Websocket connection with %s closed", self.websocket_url
)
except (CancelledError, RuntimeError) as e:
self.logger.critical("Listen interrupted by %s", e)
except Exception as e:
self.logger.exception(e)
finally:
for coroutine in coroutines:
coroutine.cancel()
async def stop_listening(self) -> None:
if self._listening_coroutine is not None:
self._listening_coroutine.cancel()
await self._websocket.close()
@abstractmethod
async def _handle_battle_message(self, message: str) -> None:
"""Abstract method.
Implementation should redirect messages to corresponding battles.
"""
@abstractmethod
async def _update_challenges(self, split_message: List[str]) -> None:
"""Abstract method.
Implementation should keep track of current challenges.
"""
@property
def logged_in(self) -> Event:
"""Event object associated with user login.
:return: The logged-in event
:rtype: Event
"""
return self._logged_in
@property
def logger(self) -> Logger: # pyre-ignore
"""Logger associated with the player.
:return: The logger.
:rtype: Logger
"""
return self._logger
@property
def username(self) -> str:
"""The player's username.
:return: The player's username.
:rtype: str
"""
return self._username
@property
def websocket_url(self) -> str:
"""The websocket url.
It is derived from the server url.
:return: The websocket url.
:rtype: str
"""
return f"ws://{self._server_url}/showdown/websocket"
| [((2127, 2134), 'asyncio.Event', 'Event', ([], {}), '()\n', (2132, 2134), False, 'from asyncio import Event\n'), ((2164, 2170), 'asyncio.Lock', 'Lock', ([], {}), '()\n', (2168, 2170), False, 'from asyncio import Lock\n'), ((3563, 3596), 'logging.getLogger', 'logging.getLogger', (['self._username'], {}), '(self._username)\n', (3580, 3596), False, 'import logging\n'), ((3623, 3646), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (3644, 3646), False, 'import logging\n'), ((3741, 3814), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (3758, 3814), False, 'import logging\n'), ((9041, 9055), 'time.perf_counter', 'perf_counter', ([], {}), '()\n', (9053, 9055), False, 'from time import perf_counter\n'), ((7194, 7370), 'requests.post', 'requests.post', (['self._authentication_url'], {'data': "{'act': 'login', 'name': self._username, 'pass': self._password, 'challstr':\n split_message[2] + '%7C' + split_message[3]}"}), "(self._authentication_url, data={'act': 'login', 'name': self.\n _username, 'pass': self._password, 'challstr': split_message[2] + '%7C' +\n split_message[3]})\n", (7207, 7370), False, 'import requests\n'), ((7595, 7630), 'json.loads', 'json.loads', (['log_in_request.text[1:]'], {}), '(log_in_request.text[1:])\n', (7605, 7630), False, 'import json\n'), ((9070, 9084), 'time.perf_counter', 'perf_counter', ([], {}), '()\n', (9082, 9084), False, 'from time import perf_counter\n'), ((9123, 9147), 'asyncio.sleep', 'sleep', (['checking_interval'], {}), '(checking_interval)\n', (9128, 9147), False, 'from asyncio import sleep\n'), ((9480, 9534), 'websockets.connect', 'websockets.connect', (['self.websocket_url'], {'max_queue': 'None'}), '(self.websocket_url, max_queue=None)\n', (9498, 9534), False, 'import websockets\n'), ((6119, 6175), 'poke_env.exceptions.ShowdownException', 'ShowdownException', (['"""Error message received: %s"""', 'message'], {}), "('Error message received: %s', message)\n", (6136, 6175), False, 'from poke_env.exceptions import ShowdownException\n')] |
Keesiu/meta-kaggle | data/external/repositories/42139/KDDCup13Track2-master/blocking.py | 87de739aba2399fd31072ee81b391f9b7a63f540 | #!/usr/bin/env python
from common import *
import csv
import argparse
from unidecode import unidecode
from nameparser import constants as npc
from collections import defaultdict
import cPickle as pickle
import re
stopwords_custom = set(['document', 'preparation', 'system', 'consortium', 'committee', 'international', 'artificial', 'network', 'distributed', 'based', 'research', 'language', 'technology', 'project', 'design', 'computer', 'control', 'object', 'internet', 'propulsion', 'corp', 'workshop', 'xml', 'world', 'work', 'thesis', 'test', 'tool', 'structure', 'statistical', 'laboratory', 'ltd', 'objects', 'process', 'scheduling', 'september', 'special', 'student', 'programs', 'capacitated', 'balancing', 'assembly', 'aspect', 'model', 'inc', 'psychological', 'psychology', 'mohammed', 'computing', 'software', 'programming', 'new', 'applications', 'jet', 'propulsion', 'classification', 'recommendation'])
stopwords = stopwords_custom | npc.TITLES | npc.PREFIXES | npc.SUFFIXES | npc.CONJUNCTIONS
def bin_exactsamename(authors):
bins = defaultdict(set)
for i, (id, a) in enumerate(authors.iteritems()):
bins[a['fullname']].add(id)
if (i+1) % 10000 == 0:
print_err(i+1)
return bins
def bin_samename(authors):
bins = defaultdict(set)
for i, (id, a) in enumerate(authors.iteritems()):
bins[a['fullname_joined']].add(id)
if (i+1) % 10000 == 0:
print_err(i+1)
return bins
def bin_fFfL(authors):
bins = defaultdict(set)
for i, (id, a) in enumerate(authors.iteritems()):
bins[a['fFfL']].add(id)
if (i+1) % 10000 == 0:
print_err(i+1)
return bins
def bin_fF3L(authors, max_bin_size=20):
bins = defaultdict(set)
for i, (id, a) in enumerate(authors.iteritems()):
if ':' not in a['fFiL'] and len(a['name_last']) >= 3 and len(a['fFiL']) > 2:
bins[a['fFiL'] + a['name_last'][1:3]].add(id)
if (i+1) % 10000 == 0:
print_err(i+1)
bk = bins.keys()
for b in bk:
if len(bins[b]) > max_bin_size:
del bins[b]
return bins
def bin_fFiL(authors, max_bin_size=20):
bins = defaultdict(set)
for i, (id, a) in enumerate(authors.iteritems()):
if len(a['fFiL']) > 2:
bins[a['fFiL']].add(id)
if (i+1) % 10000 == 0:
print_err(i+1)
bk = bins.keys()
for b in bk:
if len(bins[b]) > max_bin_size:
del bins[b]
return bins
def bin_iFfL(authors):
bins = defaultdict(set)
for i, (id, a) in enumerate(authors.iteritems()):
bins[a['iFfL']].add(id)
if (i+1) % 10000 == 0:
print_err(i+1)
return bins
def bin_fullparsedname(authors):
bins = defaultdict(set)
for i, (id, a) in enumerate(authors.iteritems()):
bins[a['fullname_parsed']].add(id)
if (i+1) % 10000 == 0:
print_err(i+1)
return bins
def bin_iFoffbyoneL(authors, max_bin_size=30):
bins = defaultdict(set)
for i, (id, a) in enumerate(authors.iteritems()):
if ':' not in a['fullname'] and a['name_first'] and a['name_last']:
bins[a['name_first'][0] + a['name_last']].add(id)
if len(a['name_last']) > 1:
bins[a['name_first'][0] + a['name_last'][:-1]].add(id)
if (i+1) % 10000 == 0:
print_err(i+1)
bk = bins.keys()
for b in bk:
if len(bins[b]) > max_bin_size:
del bins[b]
return bins
def bin_2FoffbyoneL(authors, max_bin_size=30):
bins = defaultdict(set)
for i, (id, a) in enumerate(authors.iteritems()):
if ':' not in a['fullname'] and len(a['name_first']) >= 2 and a['name_last']:
bins[a['name_first'][0:2] + a['name_last']].add(id)
if len(a['name_last']) > 1:
bins[a['name_first'][0:2] + a['name_last'][:-1]].add(id)
if (i+1) % 10000 == 0:
print_err(i+1)
bk = bins.keys()
for b in bk:
if len(bins[b]) > max_bin_size:
del bins[b]
return bins
def bin_metaphone(authors):
bins = defaultdict(set)
for i, (id, a) in enumerate(authors.iteritems()):
if a['metaphone_fullname']:
bins[a['metaphone_fullname']].add(id)
if (i+1) % 10000 == 0:
print_err(i+1)
# bk = bins.keys()
# for b in bk:
# if len(bins[b]) > max_bin_size:
# del bins[b]
return bins
def bin_offbylastone(authors):
bins = defaultdict(set)
for i, (id, a) in enumerate(authors.iteritems()):
if ':' not in a['fullname_joined']:
bins[a['fullname_joined']].add(id)
if len(a['fullname_joined']) > 1:
bins[a['fullname_joined'][:-1]].add(id)
if (i+1) % 10000 == 0:
print_err(i+1)
return bins
def bin_token(authors, nw=2, max_bin_size=100):
bins = defaultdict(set)
for i, (id, a) in enumerate(authors.iteritems()):
if ':' not in a['name']:
tokens = re.sub("[^\w]", " ", a['name']).split()
tokens = [v for v in tokens if len(v) > 2 and v not in stopwords]
ngrams = zip(*[tokens[j:] for j in range(nw)])
for p in ngrams:
pg = ' '.join(p)
if len(pg) > len(p)*2-1:
bins[pg].add(id)
if (i+1) % 10000 == 0:
print_err(i+1)
bk = bins.keys()
for b in bk:
if len(bins[b]) > max_bin_size:
del bins[b]
return bins
def bin_ngrams(authors, n=15, max_bin_size=30):
bins = defaultdict(set)
for i, (id, a) in enumerate(authors.iteritems()):
if ':' not in a['fullname']:
lname = a['fullname']
ngrams = zip(*[lname[j:] for j in range(n)])
for p in ngrams:
if not any(((s in p) for s in stopwords_custom)):
bins[''.join(p)].add(id)
if (i+1) % 10000 == 0:
print_err(i+1)
bk = bins.keys()
for b in bk:
if len(bins[b]) > max_bin_size:
del bins[b]
return bins
def main():
parser = argparse.ArgumentParser()
parser.add_argument('authorprefeat', nargs='?', default='generated/Author_prefeat.pickle')
parser.add_argument('type', nargs='?', default='iFfL')
args = parser.parse_args()
print_err("Loading pickled author pre-features")
authors = pickle.load(open(args.authorprefeat, 'rb'))
bins = globals()["bin_"+args.type](authors)
bins = sorted([(len(bv), blabel, bv) for blabel, bv in bins.iteritems()], reverse=True)
for _, binlabel, binv in bins:
print binlabel + ';' + ','.join(map(str, sorted(binv)))
if __name__ == "__main__":
main() | [] |
basepipe/developer_onboarding | resources/dot_PyCharm/system/python_stubs/-762174762/PySide/QtCore/QAbstractFileEngineIterator.py | 05b6a776f8974c89517868131b201f11c6c2a5ad | # encoding: utf-8
# module PySide.QtCore
# from C:\Python27\lib\site-packages\PySide\QtCore.pyd
# by generator 1.147
# no doc
# imports
import Shiboken as __Shiboken
class QAbstractFileEngineIterator(__Shiboken.Object):
# no doc
def currentFileInfo(self, *args, **kwargs): # real signature unknown
pass
def currentFileName(self, *args, **kwargs): # real signature unknown
pass
def currentFilePath(self, *args, **kwargs): # real signature unknown
pass
def filters(self, *args, **kwargs): # real signature unknown
pass
def hasNext(self, *args, **kwargs): # real signature unknown
pass
def nameFilters(self, *args, **kwargs): # real signature unknown
pass
def next(self, *args, **kwargs): # real signature unknown
pass
def path(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
| [] |
priyatharsan/beyond | tests/conftest.py | 1061b870407d316d43e4d1351a7ec026629685ae | import numpy as np
from pytest import fixture, mark, skip
from unittest.mock import patch
from pathlib import Path
from beyond.config import config
from beyond.dates.eop import Eop
from beyond.frames.stations import create_station
from beyond.io.tle import Tle
from beyond.propagators.keplernum import KeplerNum
from beyond.dates import Date, timedelta
from beyond.env.solarsystem import get_body
np.set_printoptions(linewidth=200)
@fixture(autouse=True, scope="session")
def config_override():
"""Create a dummy config dict containing basic data
"""
config.update({
"eop": {
"missing_policy": "pass",
}
})
@fixture
def common_env():
with patch('beyond.dates.date.EopDb.get') as m:
m.return_value = Eop(
x=-0.00951054166666622, y=0.31093590624999734, dpsi=-94.19544791666682,
deps=-10.295645833333051, dy=-0.10067361111115315, dx=-0.06829513888889051,
lod=1.6242802083331438, ut1_utc=0.01756018472222477, tai_utc=36.0
)
yield
@fixture
def station(common_env):
return create_station('Toulouse', (43.604482, 1.443962, 172.))
@fixture
def iss_tle(common_env):
return Tle("""ISS (ZARYA)
1 25544U 98067A 18124.55610684 .00001524 00000-0 30197-4 0 9997
2 25544 51.6421 236.2139 0003381 47.8509 47.6767 15.54198229111731""")
@fixture
def molniya_tle(common_env):
return Tle("""MOLNIYA 1-90
1 24960U 97054A 18123.22759647 .00000163 00000-0 24467-3 0 9999
2 24960 62.6812 182.7824 6470982 294.8616 12.8538 3.18684355160009""")
@fixture(params=["tle", "ephem"])
def orbit(request, iss_tle):
orb = iss_tle.orbit()
if request.param == "tle":
return orb
elif request.param == "ephem":
start = Date(2018, 4, 5, 16, 50)
stop = timedelta(hours=6)
step = timedelta(seconds=15)
return orb.ephem(start=start, stop=stop, step=step)
elif request.param == "kepler":
orb.propagator = KeplerNum(
timedelta(seconds=60),
get_body('Earth')
)
return orb
@fixture(params=["tle", "ephem"])
def molniya(request, molniya_tle):
orb = molniya_tle.orbit()
if request.param == "tle":
return orb
elif request.param == "ephem":
start = Date(2018, 4, 5, 16, 50)
stop = timedelta(hours=15)
step = timedelta(minutes=1)
return orb.ephem(start=start, stop=stop, step=step)
@fixture
def jplfiles():
config['env'] = {
'jpl': [
str(Path(__file__).parent / "data" / "jpl" / "de403_2000-2020.bsp"),
str(Path(__file__).parent / "data" / "jpl" / "pck00010.tpc"),
str(Path(__file__).parent / "data" / "jpl" / "gm_de431.tpc"),
]
}
def _skip_if_no_mpl():
"""Specific for dynamically skipping the test if matplotlib is not present
as it is not a dependency of the library, but merely a convenience
"""
try:
import matplotlib.pyplot as plt
except ImportError:
return True
else:
return False
def pytest_configure(config):
"""Declare the skip_if_no_mpl marker in pytest's '--markers' helper option
This has no actual effect on the tests
"""
config.addinivalue_line(
"markers", "skip_if_no_mpl: skip if matplotlib is not installed"
)
def pytest_runtest_setup(item):
"""This function is called for each test case.
It looks if the test case has the skip_if_no_mpl decorator. If so, skip the test case
"""
if _skip_if_no_mpl() and list(item.iter_markers(name="skip_if_no_mpl")):
skip("matplotlib not installed")
| [((400, 434), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'linewidth': '(200)'}), '(linewidth=200)\n', (419, 434), True, 'import numpy as np\n'), ((438, 476), 'pytest.fixture', 'fixture', ([], {'autouse': '(True)', 'scope': '"""session"""'}), "(autouse=True, scope='session')\n", (445, 476), False, 'from pytest import fixture, mark, skip\n'), ((1573, 1605), 'pytest.fixture', 'fixture', ([], {'params': "['tle', 'ephem']"}), "(params=['tle', 'ephem'])\n", (1580, 1605), False, 'from pytest import fixture, mark, skip\n'), ((2090, 2122), 'pytest.fixture', 'fixture', ([], {'params': "['tle', 'ephem']"}), "(params=['tle', 'ephem'])\n", (2097, 2122), False, 'from pytest import fixture, mark, skip\n'), ((569, 619), 'beyond.config.config.update', 'config.update', (["{'eop': {'missing_policy': 'pass'}}"], {}), "({'eop': {'missing_policy': 'pass'}})\n", (582, 619), False, 'from beyond.config import config\n'), ((1089, 1145), 'beyond.frames.stations.create_station', 'create_station', (['"""Toulouse"""', '(43.604482, 1.443962, 172.0)'], {}), "('Toulouse', (43.604482, 1.443962, 172.0))\n", (1103, 1145), False, 'from beyond.frames.stations import create_station\n'), ((1192, 1359), 'beyond.io.tle.Tle', 'Tle', (['"""ISS (ZARYA)\n1 25544U 98067A 18124.55610684 .00001524 00000-0 30197-4 0 9997\n2 25544 51.6421 236.2139 0003381 47.8509 47.6767 15.54198229111731"""'], {}), '("""ISS (ZARYA)\n1 25544U 98067A 18124.55610684 .00001524 00000-0 30197-4 0 9997\n2 25544 51.6421 236.2139 0003381 47.8509 47.6767 15.54198229111731"""\n )\n', (1195, 1359), False, 'from beyond.io.tle import Tle\n'), ((1406, 1574), 'beyond.io.tle.Tle', 'Tle', (['"""MOLNIYA 1-90\n1 24960U 97054A 18123.22759647 .00000163 00000-0 24467-3 0 9999\n2 24960 62.6812 182.7824 6470982 294.8616 12.8538 3.18684355160009"""'], {}), '("""MOLNIYA 1-90\n1 24960U 97054A 18123.22759647 .00000163 00000-0 24467-3 0 9999\n2 24960 62.6812 182.7824 6470982 294.8616 12.8538 3.18684355160009"""\n )\n', (1409, 1574), False, 'from beyond.io.tle import Tle\n'), ((3232, 3325), 'beyond.config.config.addinivalue_line', 'config.addinivalue_line', (['"""markers"""', '"""skip_if_no_mpl: skip if matplotlib is not installed"""'], {}), "('markers',\n 'skip_if_no_mpl: skip if matplotlib is not installed')\n", (3255, 3325), False, 'from beyond.config import config\n'), ((695, 731), 'unittest.mock.patch', 'patch', (['"""beyond.dates.date.EopDb.get"""'], {}), "('beyond.dates.date.EopDb.get')\n", (700, 731), False, 'from unittest.mock import patch\n'), ((763, 995), 'beyond.dates.eop.Eop', 'Eop', ([], {'x': '(-0.00951054166666622)', 'y': '(0.31093590624999734)', 'dpsi': '(-94.19544791666682)', 'deps': '(-10.295645833333051)', 'dy': '(-0.10067361111115315)', 'dx': '(-0.06829513888889051)', 'lod': '(1.6242802083331438)', 'ut1_utc': '(0.01756018472222477)', 'tai_utc': '(36.0)'}), '(x=-0.00951054166666622, y=0.31093590624999734, dpsi=-94.19544791666682,\n deps=-10.295645833333051, dy=-0.10067361111115315, dx=-\n 0.06829513888889051, lod=1.6242802083331438, ut1_utc=\n 0.01756018472222477, tai_utc=36.0)\n', (766, 995), False, 'from beyond.dates.eop import Eop\n'), ((3604, 3636), 'pytest.skip', 'skip', (['"""matplotlib not installed"""'], {}), "('matplotlib not installed')\n", (3608, 3636), False, 'from pytest import fixture, mark, skip\n'), ((1764, 1788), 'beyond.dates.Date', 'Date', (['(2018)', '(4)', '(5)', '(16)', '(50)'], {}), '(2018, 4, 5, 16, 50)\n', (1768, 1788), False, 'from beyond.dates import Date, timedelta\n'), ((1804, 1822), 'beyond.dates.timedelta', 'timedelta', ([], {'hours': '(6)'}), '(hours=6)\n', (1813, 1822), False, 'from beyond.dates import Date, timedelta\n'), ((1838, 1859), 'beyond.dates.timedelta', 'timedelta', ([], {'seconds': '(15)'}), '(seconds=15)\n', (1847, 1859), False, 'from beyond.dates import Date, timedelta\n'), ((2291, 2315), 'beyond.dates.Date', 'Date', (['(2018)', '(4)', '(5)', '(16)', '(50)'], {}), '(2018, 4, 5, 16, 50)\n', (2295, 2315), False, 'from beyond.dates import Date, timedelta\n'), ((2331, 2350), 'beyond.dates.timedelta', 'timedelta', ([], {'hours': '(15)'}), '(hours=15)\n', (2340, 2350), False, 'from beyond.dates import Date, timedelta\n'), ((2366, 2386), 'beyond.dates.timedelta', 'timedelta', ([], {'minutes': '(1)'}), '(minutes=1)\n', (2375, 2386), False, 'from beyond.dates import Date, timedelta\n'), ((2005, 2026), 'beyond.dates.timedelta', 'timedelta', ([], {'seconds': '(60)'}), '(seconds=60)\n', (2014, 2026), False, 'from beyond.dates import Date, timedelta\n'), ((2040, 2057), 'beyond.env.solarsystem.get_body', 'get_body', (['"""Earth"""'], {}), "('Earth')\n", (2048, 2057), False, 'from beyond.env.solarsystem import get_body\n'), ((2530, 2544), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (2534, 2544), False, 'from pathlib import Path\n'), ((2611, 2625), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (2615, 2625), False, 'from pathlib import Path\n'), ((2685, 2699), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (2689, 2699), False, 'from pathlib import Path\n')] |
Electric-tric/diofant | diofant/tests/integrals/test_heurisch.py | 92c4bf0ef301e5d6f0cfab545b036e1cb7de3c0a | import pytest
from diofant import (Add, Derivative, Ei, Eq, Function, I, Integral, LambertW,
Piecewise, Rational, Sum, Symbol, acos, asin, asinh,
besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp,
root, simplify, sin, sinh, sqrt, symbols, tan)
from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper
__all__ = ()
x, y, z, nu = symbols('x,y,z,nu')
f = Function('f')
def test_components():
assert components(x*y, x) == {x}
assert components(1/(x + y), x) == {x}
assert components(sin(x), x) == {sin(x), x}
assert components(sin(x)*sqrt(log(x)), x) == \
{log(x), sin(x), sqrt(log(x)), x}
assert components(x*sin(exp(x)*y), x) == \
{sin(y*exp(x)), x, exp(x)}
assert components(x**Rational(17, 54)/sqrt(sin(x)), x) == \
{sin(x), root(x, 54), sqrt(sin(x)), x}
assert components(f(x), x) == \
{x, f(x)}
assert components(Derivative(f(x), x), x) == \
{x, f(x), Derivative(f(x), x)}
assert components(f(x)*diff(f(x), x), x) == \
{x, f(x), Derivative(f(x), x), Derivative(f(x), x)}
def test_heurisch_polynomials():
assert heurisch(1, x) == x
assert heurisch(x, x) == x**2/2
assert heurisch(x**17, x) == x**18/18
def test_heurisch_fractions():
assert heurisch(1/x, x) == log(x)
assert heurisch(1/(2 + x), x) == log(x + 2)
assert heurisch(1/(x + sin(y)), x) == log(x + sin(y))
# Up to a constant, where C = 5*pi*I/12, Mathematica gives identical
# result in the first case. The difference is because diofant changes
# signs of expressions without any care.
# XXX ^ ^ ^ is this still correct?
assert heurisch(5*x**5/(
2*x**6 - 5), x) in [5*log(2*x**6 - 5) / 12, 5*log(-2*x**6 + 5) / 12]
assert heurisch(5*x**5/(2*x**6 + 5), x) == 5*log(2*x**6 + 5) / 12
assert heurisch(1/x**2, x) == -1/x
assert heurisch(-1/x**5, x) == 1/(4*x**4)
def test_heurisch_log():
assert heurisch(log(x), x) == x*log(x) - x
assert heurisch(log(3*x), x) == -x + x*log(3) + x*log(x)
assert heurisch(log(x**2), x) in [x*log(x**2) - 2*x, 2*x*log(x) - 2*x]
def test_heurisch_exp():
assert heurisch(exp(x), x) == exp(x)
assert heurisch(exp(-x), x) == -exp(-x)
assert heurisch(exp(17*x), x) == exp(17*x) / 17
assert heurisch(x*exp(x), x) == x*exp(x) - exp(x)
assert heurisch(x*exp(x**2), x) == exp(x**2) / 2
assert heurisch(exp(-x**2), x) is None
assert heurisch(2**x, x) == 2**x/log(2)
assert heurisch(x*2**x, x) == x*2**x/log(2) - 2**x*log(2)**(-2)
assert heurisch(Integral(x**z*y, (y, 1, 2), (z, 2, 3)).function, x) == (x*x**z*y)/(z+1)
assert heurisch(Sum(x**z, (z, 1, 2)).function, z) == x**z/log(x)
def test_heurisch_trigonometric():
assert heurisch(sin(x), x) == -cos(x)
assert heurisch(pi*sin(x) + 1, x) == x - pi*cos(x)
assert heurisch(cos(x), x) == sin(x)
assert heurisch(tan(x), x) in [
log(1 + tan(x)**2)/2,
log(tan(x) + I) + I*x,
log(tan(x) - I) - I*x,
]
assert heurisch(sin(x)*sin(y), x) == -cos(x)*sin(y)
assert heurisch(sin(x)*sin(y), y) == -cos(y)*sin(x)
# gives sin(x) in answer when run via setup.py and cos(x) when run via py.test
assert heurisch(sin(x)*cos(x), x) in [sin(x)**2 / 2, -cos(x)**2 / 2]
assert heurisch(cos(x)/sin(x), x) == log(sin(x))
assert heurisch(x*sin(7*x), x) == sin(7*x) / 49 - x*cos(7*x) / 7
assert heurisch(1/pi/4 * x**2*cos(x), x) == 1/pi/4*(x**2*sin(x) -
2*sin(x) + 2*x*cos(x))
assert heurisch(acos(x/4) * asin(x/4), x) == 2*x - (sqrt(16 - x**2))*asin(x/4) \
+ (sqrt(16 - x**2))*acos(x/4) + x*asin(x/4)*acos(x/4)
def test_heurisch_hyperbolic():
assert heurisch(sinh(x), x) == cosh(x)
assert heurisch(cosh(x), x) == sinh(x)
assert heurisch(x*sinh(x), x) == x*cosh(x) - sinh(x)
assert heurisch(x*cosh(x), x) == x*sinh(x) - cosh(x)
assert heurisch(
x*asinh(x/2), x) == x**2*asinh(x/2)/2 + asinh(x/2) - x*sqrt(4 + x**2)/4
def test_heurisch_mixed():
assert heurisch(sin(x)*exp(x), x) == exp(x)*sin(x)/2 - exp(x)*cos(x)/2
def test_heurisch_radicals():
assert heurisch(1/sqrt(x), x) == 2*sqrt(x)
assert heurisch(1/sqrt(x)**3, x) == -2/sqrt(x)
assert heurisch(sqrt(x)**3, x) == 2*sqrt(x)**5/5
assert heurisch(sin(x)*sqrt(cos(x)), x) == -2*sqrt(cos(x))**3/3
y = Symbol('y')
assert heurisch(sin(y*sqrt(x)), x) == 2/y**2*sin(y*sqrt(x)) - \
2*sqrt(x)*cos(y*sqrt(x))/y
assert heurisch_wrapper(sin(y*sqrt(x)), x) == Piecewise(
(0, Eq(y, 0)),
(-2*sqrt(x)*cos(sqrt(x)*y)/y + 2*sin(sqrt(x)*y)/y**2, True))
y = Symbol('y', positive=True)
assert heurisch_wrapper(sin(y*sqrt(x)), x) == 2/y**2*sin(y*sqrt(x)) - \
2*sqrt(x)*cos(y*sqrt(x))/y
def test_heurisch_special():
assert heurisch(erf(x), x) == x*erf(x) + exp(-x**2)/sqrt(pi)
assert heurisch(exp(-x**2)*erf(x), x) == sqrt(pi)*erf(x)**2 / 4
def test_heurisch_symbolic_coeffs():
assert heurisch(1/(x + y), x) == log(x + y)
assert heurisch(1/(x + sqrt(2)), x) == log(x + sqrt(2))
assert simplify(diff(heurisch(log(x + y + z), y), y)) == log(x + y + z)
def test_heurisch_symbolic_coeffs_1130():
y = Symbol('y')
assert heurisch_wrapper(1/(x**2 + y), x) == Piecewise(
(-1/x, Eq(y, 0)),
(-I*log(x - I*sqrt(y))/(2*sqrt(y)) + I*log(x + I*sqrt(y))/(2*sqrt(y)), True))
y = Symbol('y', positive=True)
assert heurisch_wrapper(1/(x**2 + y), x) in [I/sqrt(y)*log(x + sqrt(-y))/2 -
I/sqrt(y)*log(x - sqrt(-y))/2, I*log(x + I*sqrt(y)) /
(2*sqrt(y)) - I*log(x - I*sqrt(y))/(2*sqrt(y))]
def test_heurisch_hacking():
assert (heurisch(sqrt(1 + 7*x**2), x, hints=[]) ==
x*sqrt(1 + 7*x**2)/2 + sqrt(7)*asinh(sqrt(7)*x)/14)
assert (heurisch(sqrt(1 - 7*x**2), x, hints=[]) ==
x*sqrt(1 - 7*x**2)/2 + sqrt(7)*asin(sqrt(7)*x)/14)
assert (heurisch(1/sqrt(1 + 7*x**2), x, hints=[]) ==
sqrt(7)*asinh(sqrt(7)*x)/7)
assert (heurisch(1/sqrt(1 - 7*x**2), x, hints=[]) ==
sqrt(7)*asin(sqrt(7)*x)/7)
assert (heurisch(exp(-7*x**2), x, hints=[]) == sqrt(7*pi)*erf(sqrt(7)*x)/14)
assert heurisch(1/sqrt(9 - 4*x**2), x, hints=[]) == asin(2*x/3)/2
assert heurisch(1/sqrt(9 + 4*x**2), x, hints=[]) == asinh(2*x/3)/2
assert heurisch(li(x), x, hints=[]) == x*li(x) - Ei(2*log(x))
def test_heurisch_function():
assert heurisch(f(x), x) is None
def test_heurisch_wrapper():
f = 1/(y + x)
assert heurisch_wrapper(f, x) == log(x + y)
f = 1/(y - x)
assert heurisch_wrapper(f, x) == -log(x - y)
f = 1/((y - x)*(y + x))
assert heurisch_wrapper(f, x) == \
Piecewise((1/x, Eq(y, 0)), (log(x + y)/2/y - log(x - y)/2/y, True))
# issue sympy/sympy#6926
f = sqrt(x**2/((y - x)*(y + x)))
assert heurisch_wrapper(f, x) == x*sqrt(x**2)*sqrt(1/(-x**2 + y**2)) \
- y**2*sqrt(x**2)*sqrt(1/(-x**2 + y**2))/x
def test_sympyissue_3609():
assert heurisch(1/(x * (1 + log(x)**2)), x) == I*log(log(x) + I)/2 - \
I*log(log(x) - I)/2
# These are examples from the Poor Man's Integrator
# http://www-sop.inria.fr/cafe/Manuel.Bronstein/pmint/examples/
def test_pmint_rat():
# TODO: heurisch() is off by a constant: -3/4. Possibly different permutation
# would give the optimal result?
def drop_const(expr, x):
if expr.is_Add:
return Add(*[ arg for arg in expr.args if arg.has(x) ])
else:
return expr
f = (x**7 - 24*x**4 - 4*x**2 + 8*x - 8)/(x**8 + 6*x**6 + 12*x**4 + 8*x**2)
g = (4 + 8*x**2 + 6*x + 3*x**3)/(x**5 + 4*x**3 + 4*x) + log(x)
assert drop_const(ratsimp(heurisch(f, x)), x) == g
def test_pmint_trig():
f = (x - tan(x)) / tan(x)**2 + tan(x)
g = -x**2/2 - x/tan(x) + log(tan(x)**2 + 1)/2
assert heurisch(f, x) == g
@pytest.mark.slow # 8 seconds on 3.4 GHz
def test_pmint_logexp():
f = (1 + x + x*exp(x))*(x + log(x) + exp(x) - 1)/(x + log(x) + exp(x))**2/x
g = log(x**2 + 2*x*exp(x) + 2*x*log(x) + exp(2*x) + 2*exp(x)*log(x) + log(x)**2)/2 + 1/(x + exp(x) + log(x))
# TODO: Optimal solution is g = 1/(x + log(x) + exp(x)) + log(x + log(x) + exp(x)),
# but Diofant requires a lot of guidance to properly simplify heurisch() output.
assert ratsimp(heurisch(f, x)) == g
@pytest.mark.slow # 8 seconds on 3.4 GHz
def test_pmint_erf():
f = exp(-x**2)*erf(x)/(erf(x)**3 - erf(x)**2 - erf(x) + 1)
g = sqrt(pi)*log(erf(x) - 1)/8 - sqrt(pi)*log(erf(x) + 1)/8 - sqrt(pi)/(4*erf(x) - 4)
assert ratsimp(heurisch(f, x)) == g
def test_pmint_LambertW():
f = LambertW(x)
g = x*LambertW(x) - x + x/LambertW(x)
assert heurisch(f, x) == g
@pytest.mark.xfail
def test_pmint_besselj():
# TODO: in both cases heurisch() gives None. Wrong besselj() derivative?
f = besselj(nu + 1, x)/besselj(nu, x)
g = nu*log(x) - log(besselj(nu, x))
assert simplify(heurisch(f, x) - g) == 0
f = (nu*besselj(nu, x) - x*besselj(nu + 1, x))/x
g = besselj(nu, x)
assert simplify(heurisch(f, x) - g) == 0
@pytest.mark.slow
def test_pmint_WrightOmega():
def omega(x):
return LambertW(exp(x))
f = (1 + omega(x) * (2 + cos(omega(x)) * (x + omega(x))))/(1 + omega(x))/(x + omega(x))
g = log(x + LambertW(exp(x))) + sin(LambertW(exp(x)))
assert heurisch(f, x) == g
def test_RR():
# Make sure the algorithm does the right thing if the ring is RR. See
# issue sympy/sympy#8685.
assert heurisch(sqrt(1 + 0.25*x**2), x, hints=[]) == \
0.5*x*sqrt(0.25*x**2 + 1) + 1.0*asinh(0.5*x)
# TODO: convert the rest of PMINT tests:
# Airy functions
# f = (x - AiryAi(x)*AiryAi(1, x)) / (x**2 - AiryAi(x)**2)
# g = Rational(1,2)*ln(x + AiryAi(x)) + Rational(1,2)*ln(x - AiryAi(x))
# f = x**2 * AiryAi(x)
# g = -AiryAi(x) + AiryAi(1, x)*x
# Whittaker functions
# f = WhittakerW(mu + 1, nu, x) / (WhittakerW(mu, nu, x) * x)
# g = x/2 - mu*ln(x) - ln(WhittakerW(mu, nu, x))
| [((423, 442), 'diofant.symbols', 'symbols', (['"""x,y,z,nu"""'], {}), "('x,y,z,nu')\n", (430, 442), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((447, 460), 'diofant.Function', 'Function', (['"""f"""'], {}), "('f')\n", (455, 460), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4471, 4482), 'diofant.Symbol', 'Symbol', (['"""y"""'], {}), "('y')\n", (4477, 4482), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4747, 4773), 'diofant.Symbol', 'Symbol', (['"""y"""'], {'positive': '(True)'}), "('y', positive=True)\n", (4753, 4773), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5324, 5335), 'diofant.Symbol', 'Symbol', (['"""y"""'], {}), "('y')\n", (5330, 5335), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5515, 5541), 'diofant.Symbol', 'Symbol', (['"""y"""'], {'positive': '(True)'}), "('y', positive=True)\n", (5521, 5541), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6989, 7023), 'diofant.sqrt', 'sqrt', (['(x ** 2 / ((y - x) * (y + x)))'], {}), '(x ** 2 / ((y - x) * (y + x)))\n', (6993, 7023), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8823, 8834), 'diofant.LambertW', 'LambertW', (['x'], {}), '(x)\n', (8831, 8834), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((9224, 9238), 'diofant.besselj', 'besselj', (['nu', 'x'], {}), '(nu, x)\n', (9231, 9238), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((497, 517), 'diofant.integrals.heurisch.components', 'components', (['(x * y)', 'x'], {}), '(x * y, x)\n', (507, 517), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((534, 560), 'diofant.integrals.heurisch.components', 'components', (['(1 / (x + y))', 'x'], {}), '(1 / (x + y), x)\n', (544, 560), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((1201, 1215), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['(1)', 'x'], {}), '(1, x)\n', (1209, 1215), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((1232, 1246), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['x', 'x'], {}), '(x, x)\n', (1240, 1246), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((1268, 1288), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['(x ** 17)', 'x'], {}), '(x ** 17, x)\n', (1276, 1288), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((1343, 1361), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['(1 / x)', 'x'], {}), '(1 / x, x)\n', (1351, 1361), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((1363, 1369), 'diofant.log', 'log', (['x'], {}), '(x)\n', (1366, 1369), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((1381, 1405), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['(1 / (2 + x))', 'x'], {}), '(1 / (2 + x), x)\n', (1389, 1405), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((1407, 1417), 'diofant.log', 'log', (['(x + 2)'], {}), '(x + 2)\n', (1410, 1417), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((1719, 1761), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['(5 * x ** 5 / (2 * x ** 6 - 5))', 'x'], {}), '(5 * x ** 5 / (2 * x ** 6 - 5), x)\n', (1727, 1761), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((1825, 1867), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['(5 * x ** 5 / (2 * x ** 6 + 5))', 'x'], {}), '(5 * x ** 5 / (2 * x ** 6 + 5), x)\n', (1833, 1867), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((1896, 1919), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['(1 / x ** 2)', 'x'], {}), '(1 / x ** 2, x)\n', (1904, 1919), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((1935, 1959), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['(-1 / x ** 5)', 'x'], {}), '(-1 / x ** 5, x)\n', (1943, 1959), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((2241, 2247), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (2244, 2247), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2507, 2526), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['(2 ** x)', 'x'], {}), '(2 ** x, x)\n', (2515, 2526), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((2551, 2574), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['(x * 2 ** x)', 'x'], {}), '(x * 2 ** x, x)\n', (2559, 2574), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((2939, 2945), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (2942, 2945), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3839, 3846), 'diofant.cosh', 'cosh', (['x'], {}), '(x)\n', (3843, 3846), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3882, 3889), 'diofant.sinh', 'sinh', (['x'], {}), '(x)\n', (3886, 3889), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5099, 5123), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['(1 / (x + y))', 'x'], {}), '(1 / (x + y), x)\n', (5107, 5123), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((5125, 5135), 'diofant.log', 'log', (['(x + y)'], {}), '(x + y)\n', (5128, 5135), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5257, 5271), 'diofant.log', 'log', (['(x + y + z)'], {}), '(x + y + z)\n', (5260, 5271), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5347, 5384), 'diofant.integrals.heurisch.heurisch_wrapper', 'heurisch_wrapper', (['(1 / (x ** 2 + y))', 'x'], {}), '(1 / (x ** 2 + y), x)\n', (5363, 5384), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((5553, 5590), 'diofant.integrals.heurisch.heurisch_wrapper', 'heurisch_wrapper', (['(1 / (x ** 2 + y))', 'x'], {}), '(1 / (x ** 2 + y), x)\n', (5569, 5590), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((6705, 6727), 'diofant.integrals.heurisch.heurisch_wrapper', 'heurisch_wrapper', (['f', 'x'], {}), '(f, x)\n', (6721, 6727), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((6731, 6741), 'diofant.log', 'log', (['(x + y)'], {}), '(x + y)\n', (6734, 6741), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6771, 6793), 'diofant.integrals.heurisch.heurisch_wrapper', 'heurisch_wrapper', (['f', 'x'], {}), '(f, x)\n', (6787, 6793), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((6848, 6870), 'diofant.integrals.heurisch.heurisch_wrapper', 'heurisch_wrapper', (['f', 'x'], {}), '(f, x)\n', (6864, 6870), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((7029, 7051), 'diofant.integrals.heurisch.heurisch_wrapper', 'heurisch_wrapper', (['f', 'x'], {}), '(f, x)\n', (7045, 7051), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((7837, 7843), 'diofant.log', 'log', (['x'], {}), '(x)\n', (7840, 7843), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((7960, 7966), 'diofant.tan', 'tan', (['x'], {}), '(x)\n', (7963, 7966), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8029, 8043), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['f', 'x'], {}), '(f, x)\n', (8037, 8043), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((8889, 8903), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['f', 'x'], {}), '(f, x)\n', (8897, 8903), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((9042, 9060), 'diofant.besselj', 'besselj', (['(nu + 1)', 'x'], {}), '(nu + 1, x)\n', (9049, 9060), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((9061, 9075), 'diofant.besselj', 'besselj', (['nu', 'x'], {}), '(nu, x)\n', (9068, 9075), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((9548, 9562), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['f', 'x'], {}), '(f, x)\n', (9556, 9562), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((588, 594), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (591, 594), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((603, 609), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (606, 609), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((674, 680), 'diofant.log', 'log', (['x'], {}), '(x)\n', (677, 680), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((682, 688), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (685, 688), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((781, 787), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (784, 787), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((862, 868), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (865, 868), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((870, 881), 'diofant.root', 'root', (['x', '(54)'], {}), '(x, 54)\n', (874, 881), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2017, 2023), 'diofant.log', 'log', (['x'], {}), '(x)\n', (2020, 2023), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2064, 2074), 'diofant.log', 'log', (['(3 * x)'], {}), '(3 * x)\n', (2067, 2074), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2125, 2136), 'diofant.log', 'log', (['(x ** 2)'], {}), '(x ** 2)\n', (2128, 2136), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2227, 2233), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (2230, 2233), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2268, 2275), 'diofant.exp', 'exp', (['(-x)'], {}), '(-x)\n', (2271, 2275), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2284, 2291), 'diofant.exp', 'exp', (['(-x)'], {}), '(-x)\n', (2287, 2291), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2312, 2323), 'diofant.exp', 'exp', (['(17 * x)'], {}), '(17 * x)\n', (2315, 2323), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2329, 2340), 'diofant.exp', 'exp', (['(17 * x)'], {}), '(17 * x)\n', (2332, 2340), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2391, 2397), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (2394, 2397), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2437, 2448), 'diofant.exp', 'exp', (['(x ** 2)'], {}), '(x ** 2)\n', (2440, 2448), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2472, 2484), 'diofant.exp', 'exp', (['(-x ** 2)'], {}), '(-x ** 2)\n', (2475, 2484), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2533, 2539), 'diofant.log', 'log', (['(2)'], {}), '(2)\n', (2536, 2539), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2763, 2769), 'diofant.log', 'log', (['x'], {}), '(x)\n', (2766, 2769), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2827, 2833), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (2830, 2833), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2842, 2848), 'diofant.cos', 'cos', (['x'], {}), '(x)\n', (2845, 2848), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2925, 2931), 'diofant.cos', 'cos', (['x'], {}), '(x)\n', (2928, 2931), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2966, 2972), 'diofant.tan', 'tan', (['x'], {}), '(x)\n', (2969, 2972), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3130, 3136), 'diofant.sin', 'sin', (['y'], {}), '(y)\n', (3133, 3136), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3186, 3192), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (3189, 3192), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3395, 3401), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (3398, 3401), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3824, 3831), 'diofant.sinh', 'sinh', (['x'], {}), '(x)\n', (3828, 3831), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3867, 3874), 'diofant.cosh', 'cosh', (['x'], {}), '(x)\n', (3871, 3874), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3940, 3947), 'diofant.sinh', 'sinh', (['x'], {}), '(x)\n', (3944, 3947), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3997, 4004), 'diofant.cosh', 'cosh', (['x'], {}), '(x)\n', (4001, 4004), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4282, 4289), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4286, 4289), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4333, 4340), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4337, 4340), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4936, 4942), 'diofant.erf', 'erf', (['x'], {}), '(x)\n', (4939, 4942), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5875, 5895), 'diofant.sqrt', 'sqrt', (['(1 + 7 * x ** 2)'], {}), '(1 + 7 * x ** 2)\n', (5879, 5895), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5994, 6014), 'diofant.sqrt', 'sqrt', (['(1 - 7 * x ** 2)'], {}), '(1 - 7 * x ** 2)\n', (5998, 6014), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6307, 6323), 'diofant.exp', 'exp', (['(-7 * x ** 2)'], {}), '(-7 * x ** 2)\n', (6310, 6323), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6424, 6439), 'diofant.asin', 'asin', (['(2 * x / 3)'], {}), '(2 * x / 3)\n', (6428, 6439), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6494, 6510), 'diofant.asinh', 'asinh', (['(2 * x / 3)'], {}), '(2 * x / 3)\n', (6499, 6510), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6530, 6535), 'diofant.li', 'li', (['x'], {}), '(x)\n', (6532, 6535), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6798, 6808), 'diofant.log', 'log', (['(x - y)'], {}), '(x - y)\n', (6801, 6808), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8505, 8519), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['f', 'x'], {}), '(f, x)\n', (8513, 8519), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((8600, 8612), 'diofant.exp', 'exp', (['(-x ** 2)'], {}), '(-x ** 2)\n', (8603, 8612), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8611, 8617), 'diofant.erf', 'erf', (['x'], {}), '(x)\n', (8614, 8617), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8721, 8729), 'diofant.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (8725, 8729), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8765, 8779), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['f', 'x'], {}), '(f, x)\n', (8773, 8779), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((8865, 8876), 'diofant.LambertW', 'LambertW', (['x'], {}), '(x)\n', (8873, 8876), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((9087, 9093), 'diofant.log', 'log', (['x'], {}), '(x)\n', (9090, 9093), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((9100, 9114), 'diofant.besselj', 'besselj', (['nu', 'x'], {}), '(nu, x)\n', (9107, 9114), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((9377, 9383), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (9380, 9383), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((9709, 9732), 'diofant.sqrt', 'sqrt', (['(1 + 0.25 * x ** 2)'], {}), '(1 + 0.25 * x ** 2)\n', (9713, 9732), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((636, 642), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (639, 642), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((695, 701), 'diofant.log', 'log', (['x'], {}), '(x)\n', (698, 701), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((888, 894), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (891, 894), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((1468, 1474), 'diofant.sin', 'sin', (['y'], {}), '(y)\n', (1471, 1474), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((1863, 1882), 'diofant.log', 'log', (['(2 * x ** 6 + 5)'], {}), '(2 * x ** 6 + 5)\n', (1866, 1882), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2033, 2039), 'diofant.log', 'log', (['x'], {}), '(x)\n', (2036, 2039), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2098, 2104), 'diofant.log', 'log', (['x'], {}), '(x)\n', (2101, 2104), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2366, 2372), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (2369, 2372), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2382, 2388), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (2385, 2388), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2420, 2431), 'diofant.exp', 'exp', (['(x ** 2)'], {}), '(x ** 2)\n', (2423, 2431), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2581, 2587), 'diofant.log', 'log', (['(2)'], {}), '(2)\n', (2584, 2587), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2629, 2671), 'diofant.Integral', 'Integral', (['(x ** z * y)', '(y, 1, 2)', '(z, 2, 3)'], {}), '(x ** z * y, (y, 1, 2), (z, 2, 3))\n', (2637, 2671), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2721, 2743), 'diofant.Sum', 'Sum', (['(x ** z)', '(z, 1, 2)'], {}), '(x ** z, (z, 1, 2))\n', (2724, 2743), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2897, 2903), 'diofant.cos', 'cos', (['x'], {}), '(x)\n', (2900, 2903), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3101, 3107), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (3104, 3107), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3108, 3114), 'diofant.sin', 'sin', (['y'], {}), '(y)\n', (3111, 3114), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3123, 3129), 'diofant.cos', 'cos', (['x'], {}), '(x)\n', (3126, 3129), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3157, 3163), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (3160, 3163), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3164, 3170), 'diofant.sin', 'sin', (['y'], {}), '(y)\n', (3167, 3170), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3179, 3185), 'diofant.cos', 'cos', (['y'], {}), '(y)\n', (3182, 3185), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3297, 3303), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (3300, 3303), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3304, 3310), 'diofant.cos', 'cos', (['x'], {}), '(x)\n', (3307, 3310), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3370, 3376), 'diofant.cos', 'cos', (['x'], {}), '(x)\n', (3373, 3376), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3377, 3383), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (3380, 3383), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3426, 3436), 'diofant.sin', 'sin', (['(7 * x)'], {}), '(7 * x)\n', (3429, 3436), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3442, 3452), 'diofant.sin', 'sin', (['(7 * x)'], {}), '(7 * x)\n', (3445, 3452), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3507, 3513), 'diofant.cos', 'cos', (['x'], {}), '(x)\n', (3510, 3513), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3643, 3654), 'diofant.acos', 'acos', (['(x / 4)'], {}), '(x / 4)\n', (3647, 3654), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3655, 3666), 'diofant.asin', 'asin', (['(x / 4)'], {}), '(x / 4)\n', (3659, 3666), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3760, 3771), 'diofant.acos', 'acos', (['(x / 4)'], {}), '(x / 4)\n', (3764, 3771), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3913, 3920), 'diofant.sinh', 'sinh', (['x'], {}), '(x)\n', (3917, 3920), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3930, 3937), 'diofant.cosh', 'cosh', (['x'], {}), '(x)\n', (3934, 3937), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3970, 3977), 'diofant.cosh', 'cosh', (['x'], {}), '(x)\n', (3974, 3977), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3987, 3994), 'diofant.sinh', 'sinh', (['x'], {}), '(x)\n', (3991, 3994), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4037, 4049), 'diofant.asinh', 'asinh', (['(x / 2)'], {}), '(x / 2)\n', (4042, 4049), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4075, 4087), 'diofant.asinh', 'asinh', (['(x / 2)'], {}), '(x / 2)\n', (4080, 4087), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4156, 4162), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (4159, 4162), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4163, 4169), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (4166, 4169), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4265, 4272), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4269, 4272), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4361, 4368), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4365, 4368), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4415, 4421), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (4418, 4421), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4659, 4667), 'diofant.Eq', 'Eq', (['y', '(0)'], {}), '(y, 0)\n', (4661, 4667), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4952, 4958), 'diofant.erf', 'erf', (['x'], {}), '(x)\n', (4955, 4958), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4961, 4973), 'diofant.exp', 'exp', (['(-x ** 2)'], {}), '(-x ** 2)\n', (4964, 4973), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4972, 4980), 'diofant.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (4976, 4980), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5001, 5013), 'diofant.exp', 'exp', (['(-x ** 2)'], {}), '(-x ** 2)\n', (5004, 5013), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5012, 5018), 'diofant.erf', 'erf', (['x'], {}), '(x)\n', (5015, 5018), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5026, 5034), 'diofant.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (5030, 5034), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5187, 5194), 'diofant.sqrt', 'sqrt', (['(2)'], {}), '(2)\n', (5191, 5194), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5410, 5418), 'diofant.Eq', 'Eq', (['y', '(0)'], {}), '(y, 0)\n', (5412, 5418), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6115, 6135), 'diofant.sqrt', 'sqrt', (['(1 + 7 * x ** 2)'], {}), '(1 + 7 * x ** 2)\n', (6119, 6135), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6161, 6168), 'diofant.sqrt', 'sqrt', (['(7)'], {}), '(7)\n', (6165, 6168), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6212, 6232), 'diofant.sqrt', 'sqrt', (['(1 - 7 * x ** 2)'], {}), '(1 - 7 * x ** 2)\n', (6216, 6232), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6258, 6265), 'diofant.sqrt', 'sqrt', (['(7)'], {}), '(7)\n', (6262, 6265), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6337, 6349), 'diofant.sqrt', 'sqrt', (['(7 * pi)'], {}), '(7 * pi)\n', (6341, 6349), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6390, 6410), 'diofant.sqrt', 'sqrt', (['(9 - 4 * x ** 2)'], {}), '(9 - 4 * x ** 2)\n', (6394, 6410), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6460, 6480), 'diofant.sqrt', 'sqrt', (['(9 + 4 * x ** 2)'], {}), '(9 + 4 * x ** 2)\n', (6464, 6480), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6555, 6560), 'diofant.li', 'li', (['x'], {}), '(x)\n', (6557, 6560), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6900, 6908), 'diofant.Eq', 'Eq', (['y', '(0)'], {}), '(y, 0)\n', (6902, 6908), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((7068, 7096), 'diofant.sqrt', 'sqrt', (['(1 / (-x ** 2 + y ** 2))'], {}), '(1 / (-x ** 2 + y ** 2))\n', (7072, 7096), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((7875, 7889), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['f', 'x'], {}), '(f, x)\n', (7883, 7889), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((7938, 7944), 'diofant.tan', 'tan', (['x'], {}), '(x)\n', (7941, 7944), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((7948, 7954), 'diofant.tan', 'tan', (['x'], {}), '(x)\n', (7951, 7954), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((7987, 7993), 'diofant.tan', 'tan', (['x'], {}), '(x)\n', (7990, 7993), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8303, 8309), 'diofant.log', 'log', (['x'], {}), '(x)\n', (8306, 8309), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8643, 8649), 'diofant.erf', 'erf', (['x'], {}), '(x)\n', (8646, 8649), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8845, 8856), 'diofant.LambertW', 'LambertW', (['x'], {}), '(x)\n', (8853, 8856), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((9137, 9151), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['f', 'x'], {}), '(f, x)\n', (9145, 9151), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((9175, 9189), 'diofant.besselj', 'besselj', (['nu', 'x'], {}), '(nu, x)\n', (9182, 9189), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((9194, 9212), 'diofant.besselj', 'besselj', (['(nu + 1)', 'x'], {}), '(nu + 1, x)\n', (9201, 9212), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((9260, 9274), 'diofant.integrals.heurisch.heurisch', 'heurisch', (['f', 'x'], {}), '(f, x)\n', (9268, 9274), False, 'from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper\n'), ((9527, 9533), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (9530, 9533), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((9762, 9785), 'diofant.sqrt', 'sqrt', (['(0.25 * x ** 2 + 1)'], {}), '(0.25 * x ** 2 + 1)\n', (9766, 9785), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((9788, 9802), 'diofant.asinh', 'asinh', (['(0.5 * x)'], {}), '(0.5 * x)\n', (9793, 9802), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((648, 654), 'diofant.log', 'log', (['x'], {}), '(x)\n', (651, 654), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((769, 775), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (772, 775), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((814, 830), 'diofant.Rational', 'Rational', (['(17)', '(54)'], {}), '(17, 54)\n', (822, 830), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((836, 842), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (839, 842), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((1445, 1451), 'diofant.sin', 'sin', (['y'], {}), '(y)\n', (1448, 1451), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((1767, 1786), 'diofant.log', 'log', (['(2 * x ** 6 - 5)'], {}), '(2 * x ** 6 - 5)\n', (1770, 1786), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((1791, 1811), 'diofant.log', 'log', (['(-2 * x ** 6 + 5)'], {}), '(-2 * x ** 6 + 5)\n', (1794, 1811), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2087, 2093), 'diofant.log', 'log', (['(3)'], {}), '(3)\n', (2090, 2093), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2145, 2156), 'diofant.log', 'log', (['(x ** 2)'], {}), '(x ** 2)\n', (2148, 2156), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2166, 2172), 'diofant.log', 'log', (['x'], {}), '(x)\n', (2169, 2172), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2595, 2601), 'diofant.log', 'log', (['(2)'], {}), '(2)\n', (2598, 2601), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2872, 2878), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (2875, 2878), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3319, 3325), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (3322, 3325), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3460, 3470), 'diofant.cos', 'cos', (['(7 * x)'], {}), '(7 * x)\n', (3463, 3470), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3614, 3620), 'diofant.cos', 'cos', (['x'], {}), '(x)\n', (3617, 3620), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3719, 3736), 'diofant.sqrt', 'sqrt', (['(16 - x ** 2)'], {}), '(16 - x ** 2)\n', (3723, 3736), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3736, 3747), 'diofant.acos', 'acos', (['(x / 4)'], {}), '(x / 4)\n', (3740, 3747), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3750, 3761), 'diofant.asin', 'asin', (['(x / 4)'], {}), '(x / 4)\n', (3754, 3761), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4090, 4106), 'diofant.sqrt', 'sqrt', (['(4 + x ** 2)'], {}), '(4 + x ** 2)\n', (4094, 4106), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4177, 4183), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (4180, 4183), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4184, 4190), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (4187, 4190), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4195, 4201), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (4198, 4201), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4202, 4208), 'diofant.cos', 'cos', (['x'], {}), '(x)\n', (4205, 4208), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4312, 4319), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4316, 4319), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4381, 4388), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4385, 4388), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4427, 4433), 'diofant.cos', 'cos', (['x'], {}), '(x)\n', (4430, 4433), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4509, 4516), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4513, 4516), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4620, 4627), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4624, 4627), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4808, 4815), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4812, 4815), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5035, 5041), 'diofant.erf', 'erf', (['x'], {}), '(x)\n', (5038, 5041), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5163, 5170), 'diofant.sqrt', 'sqrt', (['(2)'], {}), '(2)\n', (5167, 5170), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5230, 5244), 'diofant.log', 'log', (['(x + y + z)'], {}), '(x + y + z)\n', (5233, 5244), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5923, 5943), 'diofant.sqrt', 'sqrt', (['(1 + 7 * x ** 2)'], {}), '(1 + 7 * x ** 2)\n', (5927, 5943), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5944, 5951), 'diofant.sqrt', 'sqrt', (['(7)'], {}), '(7)\n', (5948, 5951), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6042, 6062), 'diofant.sqrt', 'sqrt', (['(1 - 7 * x ** 2)'], {}), '(1 - 7 * x ** 2)\n', (6046, 6062), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6063, 6070), 'diofant.sqrt', 'sqrt', (['(7)'], {}), '(7)\n', (6067, 6070), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6568, 6574), 'diofant.log', 'log', (['x'], {}), '(x)\n', (6571, 6574), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((7057, 7069), 'diofant.sqrt', 'sqrt', (['(x ** 2)'], {}), '(x ** 2)\n', (7061, 7069), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((7119, 7147), 'diofant.sqrt', 'sqrt', (['(1 / (-x ** 2 + y ** 2))'], {}), '(1 / (-x ** 2 + y ** 2))\n', (7123, 7147), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8185, 8191), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (8188, 8191), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8294, 8300), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (8297, 8300), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8663, 8671), 'diofant.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (8667, 8671), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8692, 8700), 'diofant.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (8696, 8700), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8733, 8739), 'diofant.erf', 'erf', (['x'], {}), '(x)\n', (8736, 8739), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((9503, 9509), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (9506, 9509), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((735, 741), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (738, 741), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3024, 3030), 'diofant.tan', 'tan', (['x'], {}), '(x)\n', (3027, 3030), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3055, 3061), 'diofant.tan', 'tan', (['x'], {}), '(x)\n', (3058, 3061), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3335, 3341), 'diofant.cos', 'cos', (['x'], {}), '(x)\n', (3338, 3341), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3534, 3540), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (3537, 3540), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3601, 3607), 'diofant.sin', 'sin', (['x'], {}), '(x)\n', (3604, 3607), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3679, 3696), 'diofant.sqrt', 'sqrt', (['(16 - x ** 2)'], {}), '(16 - x ** 2)\n', (3683, 3696), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((3696, 3707), 'diofant.asin', 'asin', (['(x / 4)'], {}), '(x / 4)\n', (3700, 3707), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4060, 4072), 'diofant.asinh', 'asinh', (['(x / 2)'], {}), '(x / 2)\n', (4065, 4072), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4450, 4456), 'diofant.cos', 'cos', (['x'], {}), '(x)\n', (4453, 4456), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4538, 4545), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4542, 4545), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4561, 4568), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4565, 4568), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4837, 4844), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4841, 4844), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4860, 4867), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4864, 4867), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5778, 5785), 'diofant.sqrt', 'sqrt', (['y'], {}), '(y)\n', (5782, 5785), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5813, 5820), 'diofant.sqrt', 'sqrt', (['y'], {}), '(y)\n', (5817, 5820), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6175, 6182), 'diofant.sqrt', 'sqrt', (['(7)'], {}), '(7)\n', (6179, 6182), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6271, 6278), 'diofant.sqrt', 'sqrt', (['(7)'], {}), '(7)\n', (6275, 6278), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6352, 6359), 'diofant.sqrt', 'sqrt', (['(7)'], {}), '(7)\n', (6356, 6359), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((7108, 7120), 'diofant.sqrt', 'sqrt', (['(x ** 2)'], {}), '(x ** 2)\n', (7112, 7120), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8000, 8006), 'diofant.tan', 'tan', (['x'], {}), '(x)\n', (8003, 8006), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8137, 8143), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (8140, 8143), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8159, 8165), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (8162, 8165), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8176, 8182), 'diofant.log', 'log', (['x'], {}), '(x)\n', (8179, 8182), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8272, 8278), 'diofant.log', 'log', (['x'], {}), '(x)\n', (8275, 8278), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8619, 8625), 'diofant.erf', 'erf', (['x'], {}), '(x)\n', (8622, 8625), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8631, 8637), 'diofant.erf', 'erf', (['x'], {}), '(x)\n', (8634, 8637), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((2998, 3004), 'diofant.tan', 'tan', (['x'], {}), '(x)\n', (3001, 3004), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4575, 4582), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4579, 4582), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4874, 4881), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4878, 4881), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5455, 5462), 'diofant.sqrt', 'sqrt', (['y'], {}), '(y)\n', (5459, 5462), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5490, 5497), 'diofant.sqrt', 'sqrt', (['y'], {}), '(y)\n', (5494, 5497), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5593, 5600), 'diofant.sqrt', 'sqrt', (['y'], {}), '(y)\n', (5597, 5600), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5674, 5681), 'diofant.sqrt', 'sqrt', (['y'], {}), '(y)\n', (5678, 5681), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5958, 5965), 'diofant.sqrt', 'sqrt', (['(7)'], {}), '(7)\n', (5962, 5965), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6076, 6083), 'diofant.sqrt', 'sqrt', (['(7)'], {}), '(7)\n', (6080, 6083), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6912, 6922), 'diofant.log', 'log', (['(x + y)'], {}), '(x + y)\n', (6915, 6922), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((6929, 6939), 'diofant.log', 'log', (['(x - y)'], {}), '(x - y)\n', (6932, 6939), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((7206, 7212), 'diofant.log', 'log', (['x'], {}), '(x)\n', (7209, 7212), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((7231, 7237), 'diofant.log', 'log', (['x'], {}), '(x)\n', (7234, 7237), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((7263, 7269), 'diofant.log', 'log', (['x'], {}), '(x)\n', (7266, 7269), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8150, 8156), 'diofant.log', 'log', (['x'], {}), '(x)\n', (8153, 8156), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8243, 8253), 'diofant.exp', 'exp', (['(2 * x)'], {}), '(2 * x)\n', (8246, 8253), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8263, 8269), 'diofant.log', 'log', (['x'], {}), '(x)\n', (8266, 8269), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8676, 8682), 'diofant.erf', 'erf', (['x'], {}), '(x)\n', (8679, 8682), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8705, 8711), 'diofant.erf', 'erf', (['x'], {}), '(x)\n', (8708, 8711), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4682, 4689), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4686, 4689), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5609, 5617), 'diofant.sqrt', 'sqrt', (['(-y)'], {}), '(-y)\n', (5613, 5617), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5690, 5698), 'diofant.sqrt', 'sqrt', (['(-y)'], {}), '(-y)\n', (5694, 5698), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8256, 8262), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (8259, 8262), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4694, 4701), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4698, 4701), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((4715, 4722), 'diofant.sqrt', 'sqrt', (['x'], {}), '(x)\n', (4719, 4722), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5715, 5722), 'diofant.sqrt', 'sqrt', (['y'], {}), '(y)\n', (5719, 5722), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5801, 5808), 'diofant.sqrt', 'sqrt', (['y'], {}), '(y)\n', (5805, 5808), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8234, 8240), 'diofant.log', 'log', (['x'], {}), '(x)\n', (8237, 8240), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5443, 5450), 'diofant.sqrt', 'sqrt', (['y'], {}), '(y)\n', (5447, 5450), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((5478, 5485), 'diofant.sqrt', 'sqrt', (['y'], {}), '(y)\n', (5482, 5485), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n'), ((8221, 8227), 'diofant.exp', 'exp', (['x'], {}), '(x)\n', (8224, 8227), False, 'from diofant import Add, Derivative, Ei, Eq, Function, I, Integral, LambertW, Piecewise, Rational, Sum, Symbol, acos, asin, asinh, besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp, root, simplify, sin, sinh, sqrt, symbols, tan\n')] |
carlosb1/kornia | kornia/color/adjust.py | a2b34d497314e7ed65f114401efdd3cc9ba2077c | from typing import Union
import torch
import torch.nn as nn
from kornia.color.hsv import rgb_to_hsv, hsv_to_rgb
from kornia.constants import pi
def adjust_saturation_raw(input: torch.Tensor, saturation_factor: Union[float, torch.Tensor]) -> torch.Tensor:
r"""Adjust color saturation of an image. Expecting input to be in hsv format already.
See :class:`~kornia.color.AdjustSaturation` for details.
"""
if not torch.is_tensor(input):
raise TypeError(f"Input type is not a torch.Tensor. Got {type(input)}")
if not isinstance(saturation_factor, (float, torch.Tensor,)):
raise TypeError(f"The saturation_factor should be a float number or torch.Tensor."
f"Got {type(saturation_factor)}")
if isinstance(saturation_factor, float):
saturation_factor = torch.tensor([saturation_factor])
saturation_factor = saturation_factor.to(input.device).to(input.dtype)
if (saturation_factor < 0).any():
raise ValueError(f"Saturation factor must be non-negative. Got {saturation_factor}")
for _ in input.shape[1:]:
saturation_factor = torch.unsqueeze(saturation_factor, dim=-1)
# unpack the hsv values
h, s, v = torch.chunk(input, chunks=3, dim=-3)
# transform the hue value and appl module
s_out: torch.Tensor = torch.clamp(s * saturation_factor, min=0, max=1)
# pack back back the corrected hue
out: torch.Tensor = torch.cat([h, s_out, v], dim=-3)
return out
def adjust_saturation(input: torch.Tensor, saturation_factor: Union[float, torch.Tensor]) -> torch.Tensor:
r"""Adjust color saturation of an image.
See :class:`~kornia.color.AdjustSaturation` for details.
"""
# convert the rgb image to hsv
x_hsv: torch.Tensor = rgb_to_hsv(input)
# perform the conversion
x_adjusted: torch.Tensor = adjust_saturation_raw(x_hsv, saturation_factor)
# convert back to rgb
out: torch.Tensor = hsv_to_rgb(x_adjusted)
return out
def adjust_hue_raw(input: torch.Tensor, hue_factor: Union[float, torch.Tensor]) -> torch.Tensor:
r"""Adjust hue of an image. Expecting input to be in hsv format already.
See :class:`~kornia.color.AdjustHue` for details.
"""
if not torch.is_tensor(input):
raise TypeError(f"Input type is not a torch.Tensor. Got {type(input)}")
if not isinstance(hue_factor, (float, torch.Tensor)):
raise TypeError(f"The hue_factor should be a float number or torch.Tensor in the range between"
f" [-PI, PI]. Got {type(hue_factor)}")
if isinstance(hue_factor, float):
hue_factor = torch.tensor([hue_factor])
hue_factor = hue_factor.to(input.device).to(input.dtype)
if ((hue_factor < -pi) | (hue_factor > pi)).any():
raise ValueError(f"Hue-factor must be in the range [-PI, PI]. Got {hue_factor}")
for _ in input.shape[1:]:
hue_factor = torch.unsqueeze(hue_factor, dim=-1)
# unpack the hsv values
h, s, v = torch.chunk(input, chunks=3, dim=-3)
# transform the hue value and appl module
divisor: float = 2 * pi.item()
h_out: torch.Tensor = torch.fmod(h + hue_factor, divisor)
# pack back back the corrected hue
out: torch.Tensor = torch.cat([h_out, s, v], dim=-3)
return out
def adjust_hue(input: torch.Tensor, hue_factor: Union[float, torch.Tensor]) -> torch.Tensor:
r"""Adjust hue of an image.
See :class:`~kornia.color.AdjustHue` for details.
"""
# convert the rgb image to hsv
x_hsv: torch.Tensor = rgb_to_hsv(input)
# perform the conversion
x_adjusted: torch.Tensor = adjust_hue_raw(x_hsv, hue_factor)
# convert back to rgb
out: torch.Tensor = hsv_to_rgb(x_adjusted)
return out
def adjust_gamma(input: torch.Tensor, gamma: Union[float, torch.Tensor],
gain: Union[float, torch.Tensor] = 1.) -> torch.Tensor:
r"""Perform gamma correction on an image.
See :class:`~kornia.color.AdjustGamma` for details.
"""
if not torch.is_tensor(input):
raise TypeError(f"Input type is not a torch.Tensor. Got {type(input)}")
if not isinstance(gamma, (float, torch.Tensor)):
raise TypeError(f"The gamma should be a positive float or torch.Tensor. Got {type(gamma)}")
if not isinstance(gain, (float, torch.Tensor)):
raise TypeError(f"The gain should be a positive float or torch.Tensor. Got {type(gain)}")
if isinstance(gamma, float):
gamma = torch.tensor([gamma])
if isinstance(gain, float):
gain = torch.tensor([gain])
gamma = gamma.to(input.device).to(input.dtype)
gain = gain.to(input.device).to(input.dtype)
if (gamma < 0.0).any():
raise ValueError(f"Gamma must be non-negative. Got {gamma}")
if (gain < 0.0).any():
raise ValueError(f"Gain must be non-negative. Got {gain}")
for _ in input.shape[1:]:
gamma = torch.unsqueeze(gamma, dim=-1)
gain = torch.unsqueeze(gain, dim=-1)
# Apply the gamma correction
x_adjust: torch.Tensor = gain * torch.pow(input, gamma)
# Truncate between pixel values
out: torch.Tensor = torch.clamp(x_adjust, 0.0, 1.0)
return out
def adjust_contrast(input: torch.Tensor,
contrast_factor: Union[float, torch.Tensor]) -> torch.Tensor:
r"""Adjust Contrast of an image.
See :class:`~kornia.color.AdjustContrast` for details.
"""
if not torch.is_tensor(input):
raise TypeError(f"Input type is not a torch.Tensor. Got {type(input)}")
if not isinstance(contrast_factor, (float, torch.Tensor,)):
raise TypeError(f"The factor should be either a float or torch.Tensor. "
f"Got {type(contrast_factor)}")
if isinstance(contrast_factor, float):
contrast_factor = torch.tensor([contrast_factor])
contrast_factor = contrast_factor.to(input.device).to(input.dtype)
if (contrast_factor < 0).any():
raise ValueError(f"Contrast factor must be non-negative. Got {contrast_factor}")
for _ in input.shape[1:]:
contrast_factor = torch.unsqueeze(contrast_factor, dim=-1)
# Apply contrast factor to each channel
x_adjust: torch.Tensor = input * contrast_factor
# Truncate between pixel values
out: torch.Tensor = torch.clamp(x_adjust, 0.0, 1.0)
return out
def adjust_brightness(input: torch.Tensor,
brightness_factor: Union[float, torch.Tensor]) -> torch.Tensor:
r"""Adjust Brightness of an image.
See :class:`~kornia.color.AdjustBrightness` for details.
"""
if not torch.is_tensor(input):
raise TypeError(f"Input type is not a torch.Tensor. Got {type(input)}")
if not isinstance(brightness_factor, (float, torch.Tensor,)):
raise TypeError(f"The factor should be either a float or torch.Tensor. "
f"Got {type(brightness_factor)}")
if isinstance(brightness_factor, float):
brightness_factor = torch.tensor([brightness_factor])
brightness_factor = brightness_factor.to(input.device).to(input.dtype)
for _ in input.shape[1:]:
brightness_factor = torch.unsqueeze(brightness_factor, dim=-1)
# Apply brightness factor to each channel
x_adjust: torch.Tensor = input + brightness_factor
# Truncate between pixel values
out: torch.Tensor = torch.clamp(x_adjust, 0.0, 1.0)
return out
class AdjustSaturation(nn.Module):
r"""Adjust color saturation of an image.
The input image is expected to be an RGB image in the range of [0, 1].
Args:
input (torch.Tensor): Image/Tensor to be adjusted in the shape of (\*, N).
saturation_factor (float): How much to adjust the saturation. 0 will give a black
and white image, 1 will give the original image while 2 will enhance the saturation
by a factor of 2.
Returns:
torch.Tensor: Adjusted image.
"""
def __init__(self, saturation_factor: Union[float, torch.Tensor]) -> None:
super(AdjustSaturation, self).__init__()
self.saturation_factor: Union[float, torch.Tensor] = saturation_factor
def forward(self, input: torch.Tensor) -> torch.Tensor: # type: ignore
return adjust_saturation(input, self.saturation_factor)
class AdjustHue(nn.Module):
r"""Adjust hue of an image.
The input image is expected to be an RGB image in the range of [0, 1].
Args:
input (torch.Tensor): Image/Tensor to be adjusted in the shape of (\*, N).
hue_factor (float): How much to shift the hue channel. Should be in [-PI, PI]. PI
and -PI give complete reversal of hue channel in HSV space in positive and negative
direction respectively. 0 means no shift. Therefore, both -PI and PI will give an
image with complementary colors while 0 gives the original image.
Returns:
torch.Tensor: Adjusted image.
"""
def __init__(self, hue_factor: Union[float, torch.Tensor]) -> None:
super(AdjustHue, self).__init__()
self.hue_factor: Union[float, torch.Tensor] = hue_factor
def forward(self, input: torch.Tensor) -> torch.Tensor: # type: ignore
return adjust_hue(input, self.hue_factor)
class AdjustGamma(nn.Module):
r"""Perform gamma correction on an image.
The input image is expected to be in the range of [0, 1].
Args:
input (torch.Tensor): Image/Tensor to be adjusted in the shape of (\*, N).
gamma (float): Non negative real number, same as γ\gammaγ in the equation.
gamma larger than 1 make the shadows darker, while gamma smaller than 1 make
dark regions lighter.
gain (float, optional): The constant multiplier. Default 1.
Returns:
torch.Tensor: Adjusted image.
"""
def __init__(self, gamma: Union[float, torch.Tensor], gain: Union[float, torch.Tensor] = 1.) -> None:
super(AdjustGamma, self).__init__()
self.gamma: Union[float, torch.Tensor] = gamma
self.gain: Union[float, torch.Tensor] = gain
def forward(self, input: torch.Tensor) -> torch.Tensor: # type: ignore
return adjust_gamma(input, self.gamma, self.gain)
class AdjustContrast(nn.Module):
r"""Adjust Contrast of an image. This implementation aligns OpenCV, not PIL. Hence,
the output differs from TorchVision.
The input image is expected to be in the range of [0, 1].
Args:
input (torch.Tensor): Image to be adjusted in the shape of (\*, N).
contrast_factor (Union[float, torch.Tensor]): Contrast adjust factor per element
in the batch. 0 generates a compleatly black image, 1 does not modify
the input image while any other non-negative number modify the
brightness by this factor.
Returns:
torch.Tensor: Adjusted image.
"""
def __init__(self, contrast_factor: Union[float, torch.Tensor]) -> None:
super(AdjustContrast, self).__init__()
self.contrast_factor: Union[float, torch.Tensor] = contrast_factor
def forward(self, input: torch.Tensor) -> torch.Tensor: # type: ignore
return adjust_contrast(input, self.contrast_factor)
class AdjustBrightness(nn.Module):
r"""Adjust Brightness of an image. This implementation aligns OpenCV, not PIL. Hence,
the output differs from TorchVision.
The input image is expected to be in the range of [0, 1].
Args:
input (torch.Tensor): Image/Input to be adjusted in the shape of (\*, N).
brightness_factor (Union[float, torch.Tensor]): Brightness adjust factor per element
in the batch. 0 does not modify the input image while any other number modify the
brightness.
Returns:
torch.Tensor: Adjusted image.
"""
def __init__(self, brightness_factor: Union[float, torch.Tensor]) -> None:
super(AdjustBrightness, self).__init__()
self.brightness_factor: Union[float, torch.Tensor] = brightness_factor
def forward(self, input: torch.Tensor) -> torch.Tensor: # type: ignore
return adjust_brightness(input, self.brightness_factor)
| [((1212, 1248), 'torch.chunk', 'torch.chunk', (['input'], {'chunks': '(3)', 'dim': '(-3)'}), '(input, chunks=3, dim=-3)\n', (1223, 1248), False, 'import torch\n'), ((1322, 1370), 'torch.clamp', 'torch.clamp', (['(s * saturation_factor)'], {'min': '(0)', 'max': '(1)'}), '(s * saturation_factor, min=0, max=1)\n', (1333, 1370), False, 'import torch\n'), ((1435, 1467), 'torch.cat', 'torch.cat', (['[h, s_out, v]'], {'dim': '(-3)'}), '([h, s_out, v], dim=-3)\n', (1444, 1467), False, 'import torch\n'), ((1770, 1787), 'kornia.color.hsv.rgb_to_hsv', 'rgb_to_hsv', (['input'], {}), '(input)\n', (1780, 1787), False, 'from kornia.color.hsv import rgb_to_hsv, hsv_to_rgb\n'), ((1948, 1970), 'kornia.color.hsv.hsv_to_rgb', 'hsv_to_rgb', (['x_adjusted'], {}), '(x_adjusted)\n', (1958, 1970), False, 'from kornia.color.hsv import rgb_to_hsv, hsv_to_rgb\n'), ((2993, 3029), 'torch.chunk', 'torch.chunk', (['input'], {'chunks': '(3)', 'dim': '(-3)'}), '(input, chunks=3, dim=-3)\n', (3004, 3029), False, 'import torch\n'), ((3138, 3173), 'torch.fmod', 'torch.fmod', (['(h + hue_factor)', 'divisor'], {}), '(h + hue_factor, divisor)\n', (3148, 3173), False, 'import torch\n'), ((3238, 3270), 'torch.cat', 'torch.cat', (['[h_out, s, v]'], {'dim': '(-3)'}), '([h_out, s, v], dim=-3)\n', (3247, 3270), False, 'import torch\n'), ((3539, 3556), 'kornia.color.hsv.rgb_to_hsv', 'rgb_to_hsv', (['input'], {}), '(input)\n', (3549, 3556), False, 'from kornia.color.hsv import rgb_to_hsv, hsv_to_rgb\n'), ((3703, 3725), 'kornia.color.hsv.hsv_to_rgb', 'hsv_to_rgb', (['x_adjusted'], {}), '(x_adjusted)\n', (3713, 3725), False, 'from kornia.color.hsv import rgb_to_hsv, hsv_to_rgb\n'), ((5135, 5166), 'torch.clamp', 'torch.clamp', (['x_adjust', '(0.0)', '(1.0)'], {}), '(x_adjust, 0.0, 1.0)\n', (5146, 5166), False, 'import torch\n'), ((6288, 6319), 'torch.clamp', 'torch.clamp', (['x_adjust', '(0.0)', '(1.0)'], {}), '(x_adjust, 0.0, 1.0)\n', (6299, 6319), False, 'import torch\n'), ((7347, 7378), 'torch.clamp', 'torch.clamp', (['x_adjust', '(0.0)', '(1.0)'], {}), '(x_adjust, 0.0, 1.0)\n', (7358, 7378), False, 'import torch\n'), ((431, 453), 'torch.is_tensor', 'torch.is_tensor', (['input'], {}), '(input)\n', (446, 453), False, 'import torch\n'), ((825, 858), 'torch.tensor', 'torch.tensor', (['[saturation_factor]'], {}), '([saturation_factor])\n', (837, 858), False, 'import torch\n'), ((1126, 1168), 'torch.unsqueeze', 'torch.unsqueeze', (['saturation_factor'], {'dim': '(-1)'}), '(saturation_factor, dim=-1)\n', (1141, 1168), False, 'import torch\n'), ((2238, 2260), 'torch.is_tensor', 'torch.is_tensor', (['input'], {}), '(input)\n', (2253, 2260), False, 'import torch\n'), ((2628, 2654), 'torch.tensor', 'torch.tensor', (['[hue_factor]'], {}), '([hue_factor])\n', (2640, 2654), False, 'import torch\n'), ((2914, 2949), 'torch.unsqueeze', 'torch.unsqueeze', (['hue_factor'], {'dim': '(-1)'}), '(hue_factor, dim=-1)\n', (2929, 2949), False, 'import torch\n'), ((3102, 3111), 'kornia.constants.pi.item', 'pi.item', ([], {}), '()\n', (3109, 3111), False, 'from kornia.constants import pi\n'), ((4013, 4035), 'torch.is_tensor', 'torch.is_tensor', (['input'], {}), '(input)\n', (4028, 4035), False, 'import torch\n'), ((4472, 4493), 'torch.tensor', 'torch.tensor', (['[gamma]'], {}), '([gamma])\n', (4484, 4493), False, 'import torch\n'), ((4542, 4562), 'torch.tensor', 'torch.tensor', (['[gain]'], {}), '([gain])\n', (4554, 4562), False, 'import torch\n'), ((4904, 4934), 'torch.unsqueeze', 'torch.unsqueeze', (['gamma'], {'dim': '(-1)'}), '(gamma, dim=-1)\n', (4919, 4934), False, 'import torch\n'), ((4950, 4979), 'torch.unsqueeze', 'torch.unsqueeze', (['gain'], {'dim': '(-1)'}), '(gain, dim=-1)\n', (4965, 4979), False, 'import torch\n'), ((5050, 5073), 'torch.pow', 'torch.pow', (['input', 'gamma'], {}), '(input, gamma)\n', (5059, 5073), False, 'import torch\n'), ((5425, 5447), 'torch.is_tensor', 'torch.is_tensor', (['input'], {}), '(input)\n', (5440, 5447), False, 'import torch\n'), ((5801, 5832), 'torch.tensor', 'torch.tensor', (['[contrast_factor]'], {}), '([contrast_factor])\n', (5813, 5832), False, 'import torch\n'), ((6088, 6128), 'torch.unsqueeze', 'torch.unsqueeze', (['contrast_factor'], {'dim': '(-1)'}), '(contrast_factor, dim=-1)\n', (6103, 6128), False, 'import torch\n'), ((6588, 6610), 'torch.is_tensor', 'torch.is_tensor', (['input'], {}), '(input)\n', (6603, 6610), False, 'import torch\n'), ((6972, 7005), 'torch.tensor', 'torch.tensor', (['[brightness_factor]'], {}), '([brightness_factor])\n', (6984, 7005), False, 'import torch\n'), ((7141, 7183), 'torch.unsqueeze', 'torch.unsqueeze', (['brightness_factor'], {'dim': '(-1)'}), '(brightness_factor, dim=-1)\n', (7156, 7183), False, 'import torch\n')] |
rmccann01/playground | pommerman/__init__.py | 354041cd1d9b70ffe82c18fb5b4035fab721eb92 | '''Entry point into the pommerman module'''
import gym
import inspect
from . import agents
from . import configs
from . import constants
from . import forward_model
from . import helpers
from . import utility
from . import network
gym.logger.set_level(40)
REGISTRY = None
def _register():
global REGISTRY
REGISTRY = []
for name, f in inspect.getmembers(configs, inspect.isfunction):
if not name.endswith('_env'):
continue
config = f()
gym.envs.registration.register(
id=config['env_id'],
entry_point=config['env_entry_point'],
kwargs=config['env_kwargs']
)
REGISTRY.append(config['env_id'])
# Register environments with gym
_register()
def make(config_id, agent_list, game_state_file=None, render_mode='human'):
'''Makes the pommerman env and registers it with gym'''
assert config_id in REGISTRY, "Unknown configuration '{}'. " \
"Possible values: {}".format(config_id, REGISTRY)
env = gym.make(config_id)
for id_, agent in enumerate(agent_list):
assert isinstance(agent, agents.BaseAgent)
# NOTE: This is IMPORTANT so that the agent character is initialized
agent.init_agent(id_, env.spec._kwargs['game_type'])
env.set_agents(agent_list)
env.set_init_game_state(game_state_file)
env.set_render_mode(render_mode)
return env
from . import cli
| [((232, 256), 'gym.logger.set_level', 'gym.logger.set_level', (['(40)'], {}), '(40)\n', (252, 256), False, 'import gym\n'), ((349, 396), 'inspect.getmembers', 'inspect.getmembers', (['configs', 'inspect.isfunction'], {}), '(configs, inspect.isfunction)\n', (367, 396), False, 'import inspect\n'), ((1014, 1033), 'gym.make', 'gym.make', (['config_id'], {}), '(config_id)\n', (1022, 1033), False, 'import gym\n'), ((487, 611), 'gym.envs.registration.register', 'gym.envs.registration.register', ([], {'id': "config['env_id']", 'entry_point': "config['env_entry_point']", 'kwargs': "config['env_kwargs']"}), "(id=config['env_id'], entry_point=config[\n 'env_entry_point'], kwargs=config['env_kwargs'])\n", (517, 611), False, 'import gym\n')] |
caravancoop/rest-auth-toolkit | demo/demo/accounts/urls.py | 425bf293987f7128d9538f27a5eca7e47ba84217 | from django.urls import path
from .views import ProfileView
urlpatterns = [
path('', ProfileView.as_view(), name='user-profile'),
]
| [] |
vardaan-raj/auto-sklearn | test/test_pipeline/components/classification/test_passive_aggressive.py | 4597152e3a60cd6f6e32719a3bef26e13951b102 | import sklearn.linear_model
from autosklearn.pipeline.components.classification.passive_aggressive import \
PassiveAggressive
from .test_base import BaseClassificationComponentTest
class PassiveAggressiveComponentTest(BaseClassificationComponentTest):
__test__ = True
res = dict()
res["default_iris"] = 0.92
res["iris_n_calls"] = 5
res["default_iris_iterative"] = 0.92
res["iris_iterative_n_iter"] = 32
res["default_iris_proba"] = 0.29271032477461295
res["default_iris_sparse"] = 0.4
res["default_digits"] = 0.9156041287188829
res["digits_n_calls"] = 6
res["default_digits_iterative"] = 0.9156041287188829
res["digits_iterative_n_iter"] = 64
res["default_digits_binary"] = 0.9927140255009107
res["default_digits_multilabel"] = 0.90997912489192
res["default_digits_multilabel_proba"] = 1.0
res['ignore_hps'] = ['max_iter']
sk_mod = sklearn.linear_model.PassiveAggressiveClassifier
module = PassiveAggressive
step_hyperparameter = {
'name': 'max_iter',
'value': module.get_max_iter(),
}
| [] |
harsh020/datasets | tensorflow_datasets/structured/dart/dart_test.py | b4ad3617b279ec65356e696c4c860458621976f6 | # coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dart dataset tests."""
import json
import mock
import tensorflow.compat.v2 as tf
import tensorflow_datasets.public_api as tfds
from tensorflow_datasets.structured.dart import dart
class DartTest(tfds.testing.DatasetBuilderTestCase):
DATASET_CLASS = dart.Dart
SPLITS = {
'train': 2,
'validation': 1,
'test': 2,
}
def test_split_generators(self):
json_str = """
[
{
"tripleset": [
[
"Mars Hill College",
"JOINED",
"1973"
],
[
"Mars Hill College",
"LOCATION",
"Mars Hill, North Carolina"
]
],
"subtree_was_extended": true,
"annotations": [
{
"source": "WikiSQL_decl_sents",
"text": "A school from Mars Hill, North Carolina, joined in 1973."
}
]
}
]
"""
expected_examples = [{
'input_text': {
'table': [
{
'column_header': 'subject',
'row_number': 0,
'content': 'Mars Hill College',
},
{
'column_header': 'predicate',
'row_number': 0,
'content': 'JOINED',
},
{
'column_header': 'object',
'row_number': 0,
'content': '1973',
},
{
'column_header': 'subject',
'row_number': 1,
'content': 'Mars Hill College',
},
{
'column_header': 'predicate',
'row_number': 1,
'content': 'LOCATION',
},
{
'column_header': 'object',
'row_number': 1,
'content': 'Mars Hill, North Carolina',
},
]
},
'target_text':
'A school from Mars Hill, North Carolina, joined in 1973.'
}]
dart_dataset = dart.Dart()
with mock.patch.object(
json, 'load',
return_value=json.loads(json_str)), mock.patch.object(
tf, 'io'):
for i, (_, example) in enumerate(dart_dataset._generate_examples('')):
self.assertCountEqual(example, expected_examples[i])
if __name__ == '__main__':
tfds.testing.test_main()
| [((3123, 3147), 'tensorflow_datasets.public_api.testing.test_main', 'tfds.testing.test_main', ([], {}), '()\n', (3145, 3147), True, 'import tensorflow_datasets.public_api as tfds\n'), ((2806, 2817), 'tensorflow_datasets.structured.dart.dart.Dart', 'dart.Dart', ([], {}), '()\n', (2815, 2817), False, 'from tensorflow_datasets.structured.dart import dart\n'), ((2912, 2939), 'mock.patch.object', 'mock.patch.object', (['tf', '"""io"""'], {}), "(tf, 'io')\n", (2929, 2939), False, 'import mock\n'), ((2889, 2909), 'json.loads', 'json.loads', (['json_str'], {}), '(json_str)\n', (2899, 2909), False, 'import json\n')] |
AdamLohSg/GTA | exp/exp_informer_dad.py | bf6a745a6e28e365466e76360a15ca10ce61e009 | from data.data_loader_dad import (
NASA_Anomaly,
WADI
)
from exp.exp_basic import Exp_Basic
from models.model import Informer
from utils.tools import EarlyStopping, adjust_learning_rate
from utils.metrics import metric
from sklearn.metrics import classification_report
import numpy as np
import torch
import torch.nn as nn
from torch import optim
from torch.utils.data import DataLoader
import os
import time
import warnings
warnings.filterwarnings('ignore')
class Exp_Informer_DAD(Exp_Basic):
def __init__(self, args):
super(Exp_Informer_DAD, self).__init__(args)
def _build_model(self):
model_dict = {
'informer':Informer,
}
if self.args.model=='informer':
model = model_dict[self.args.model](
self.args.enc_in,
self.args.dec_in,
self.args.c_out,
self.args.seq_len,
self.args.label_len,
self.args.pred_len,
self.args.factor,
self.args.d_model,
self.args.n_heads,
self.args.e_layers,
self.args.d_layers,
self.args.d_ff,
self.args.dropout,
self.args.attn,
self.args.embed,
self.args.data[:-1],
self.args.activation,
self.device
)
return model.double()
def _get_data(self, flag):
args = self.args
data_dict = {
'SMAP':NASA_Anomaly,
'MSL':NASA_Anomaly,
'WADI':WADI,
}
Data = data_dict[self.args.data]
if flag == 'test':
shuffle_flag = False; drop_last = True; batch_size = args.batch_size
else:
shuffle_flag = True; drop_last = True; batch_size = args.batch_size
data_set = Data(
root_path=args.root_path,
data_path=args.data_path,
flag=flag,
size=[args.seq_len, args.label_len, args.pred_len],
features=args.features,
target=args.target
)
print(flag, len(data_set))
data_loader = DataLoader(
data_set,
batch_size=batch_size,
shuffle=shuffle_flag,
num_workers=args.num_workers,
drop_last=drop_last)
return data_set, data_loader
def _select_optimizer(self):
model_optim = optim.Adam(self.model.parameters(), lr=self.args.learning_rate)
return model_optim
def _select_criterion(self):
criterion = nn.MSELoss()
return criterion
def vali(self, vali_data, vali_loader, criterion):
self.model.eval()
total_loss = []
for i, (batch_x,batch_y,batch_x_mark,batch_y_mark,batch_label) in enumerate(vali_loader):
batch_x = batch_x.double().to(self.device)
batch_y = batch_y.double()
batch_x_mark = batch_x_mark.double().to(self.device)
batch_y_mark = batch_y_mark.double().to(self.device)
# decoder input
dec_inp = torch.zeros_like(batch_y[:,-self.args.pred_len:,:]).double()
dec_inp = torch.cat([batch_y[:,:self.args.label_len,:], dec_inp], dim=1).double().to(self.device)
# encoder - decoder
outputs = self.model(batch_x, batch_x_mark, dec_inp, batch_y_mark)
batch_y = batch_y[:,-self.args.pred_len:,:].to(self.device)
pred = outputs.detach().cpu()
true = batch_y.detach().cpu()
loss = criterion(pred, true)
total_loss.append(loss)
total_loss = np.average(total_loss)
self.model.train()
return total_loss
def train(self, setting):
train_data, train_loader = self._get_data(flag = 'train')
vali_data, vali_loader = self._get_data(flag = 'val')
test_data, test_loader = self._get_data(flag = 'test')
path = './checkpoints/'+setting
if not os.path.exists(path):
os.makedirs(path)
time_now = time.time()
train_steps = len(train_loader)
early_stopping = EarlyStopping(patience=self.args.patience, verbose=True)
model_optim = self._select_optimizer()
criterion = self._select_criterion()
for epoch in range(self.args.train_epochs):
iter_count = 0
train_loss = []
self.model.train()
for i, (batch_x,batch_y,batch_x_mark,batch_y_mark) in enumerate(train_loader):
iter_count += 1
model_optim.zero_grad()
batch_x = batch_x.double().to(self.device)
batch_y = batch_y.double()
batch_x_mark = batch_x_mark.double().to(self.device)
batch_y_mark = batch_y_mark.double().to(self.device)
# decoder input
dec_inp = torch.zeros_like(batch_y[:,-self.args.pred_len:,:]).double()
dec_inp = torch.cat([batch_y[:,:self.args.label_len,:], dec_inp], dim=1).double().to(self.device)
# encoder - decoder
outputs = self.model(batch_x, batch_x_mark, dec_inp, batch_y_mark)
batch_y = batch_y[:,-self.args.pred_len:,:].to(self.device)
loss = criterion(outputs, batch_y)
train_loss.append(loss.item())
if (i+1) % 100==0:
print("\titers: {0}, epoch: {1} | loss: {2:.7f}".format(i + 1, epoch + 1, loss.item()))
speed = (time.time()-time_now)/iter_count
left_time = speed*((self.args.train_epochs - epoch)*train_steps - i)
print('\tspeed: {:.4f}s/iter; left time: {:.4f}s'.format(speed, left_time))
iter_count = 0
time_now = time.time()
loss.backward()
model_optim.step()
train_loss = np.average(train_loss)
vali_loss = self.vali(vali_data, vali_loader, criterion)
test_loss = self.vali(test_data, test_loader, criterion)
print("Epoch: {0}, Steps: {1} | Train Loss: {2:.7f} Vali Loss: {3:.7f} Test Loss: {4:.7f}".format(
epoch + 1, train_steps, train_loss, vali_loss, test_loss))
early_stopping(vali_loss, self.model, path)
if early_stopping.early_stop:
print("Early stopping")
break
adjust_learning_rate(model_optim, epoch+1, self.args)
best_model_path = path+'/'+'checkpoint.pth'
self.model.load_state_dict(torch.load(best_model_path))
return self.model
def test(self, setting):
test_data, test_loader = self._get_data(flag='test')
self.model.eval()
preds = []
trues = []
labels = []
with torch.no_grad():
for i, (batch_x,batch_y,batch_x_mark,batch_y_mark,batch_label) in enumerate(test_loader):
batch_x = batch_x.double().to(self.device)
batch_y = batch_y.double()
batch_x_mark = batch_x_mark.double().to(self.device)
batch_y_mark = batch_y_mark.double().to(self.device)
# decoder input
dec_inp = torch.zeros_like(batch_y[:,-self.args.pred_len:,:]).double()
dec_inp = torch.cat([batch_y[:,:self.args.label_len,:], dec_inp], dim=1).double().to(self.device)
# encoder - decoder
outputs = self.model(batch_x, batch_x_mark, dec_inp, batch_y_mark)
batch_y = batch_y[:,-self.args.pred_len:,:].to(self.device)
pred = outputs.detach().cpu().numpy()#.squeeze()
true = batch_y.detach().cpu().numpy()#.squeeze()
batch_label = batch_label.long().detach().numpy()
preds.append(pred)
trues.append(true)
labels.append(batch_label)
preds = np.array(preds)
trues = np.array(trues)
labels = np.array(labels)
print('test shape:', preds.shape, trues.shape)
preds = preds.reshape(-1, preds.shape[-2], preds.shape[-1])
trues = trues.reshape(-1, trues.shape[-2], trues.shape[-1])
labels = labels.reshape(-1, labels.shape[-1])
print('test shape:', preds.shape, trues.shape)
# result save
folder_path = './results/' + setting +'/'
if not os.path.exists(folder_path):
os.makedirs(folder_path)
mae, mse, rmse, mape, mspe = metric(preds, trues)
print('mse:{}, mae:{}'.format(mse, mae))
np.save(folder_path+'metrics.npy', np.array([mae, mse, rmse, mape, mspe]))
np.save(folder_path+'pred.npy', preds)
np.save(folder_path+'true.npy', trues)
np.save(folder_path+'label.npy', labels)
return | [((438, 471), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (461, 471), False, 'import warnings\n'), ((2215, 2335), 'torch.utils.data.DataLoader', 'DataLoader', (['data_set'], {'batch_size': 'batch_size', 'shuffle': 'shuffle_flag', 'num_workers': 'args.num_workers', 'drop_last': 'drop_last'}), '(data_set, batch_size=batch_size, shuffle=shuffle_flag,\n num_workers=args.num_workers, drop_last=drop_last)\n', (2225, 2335), False, 'from torch.utils.data import DataLoader\n'), ((2637, 2649), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (2647, 2649), True, 'import torch.nn as nn\n'), ((3705, 3727), 'numpy.average', 'np.average', (['total_loss'], {}), '(total_loss)\n', (3715, 3727), True, 'import numpy as np\n'), ((4139, 4150), 'time.time', 'time.time', ([], {}), '()\n', (4148, 4150), False, 'import time\n'), ((4225, 4281), 'utils.tools.EarlyStopping', 'EarlyStopping', ([], {'patience': 'self.args.patience', 'verbose': '(True)'}), '(patience=self.args.patience, verbose=True)\n', (4238, 4281), False, 'from utils.tools import EarlyStopping, adjust_learning_rate\n'), ((8199, 8214), 'numpy.array', 'np.array', (['preds'], {}), '(preds)\n', (8207, 8214), True, 'import numpy as np\n'), ((8231, 8246), 'numpy.array', 'np.array', (['trues'], {}), '(trues)\n', (8239, 8246), True, 'import numpy as np\n'), ((8264, 8280), 'numpy.array', 'np.array', (['labels'], {}), '(labels)\n', (8272, 8280), True, 'import numpy as np\n'), ((8773, 8793), 'utils.metrics.metric', 'metric', (['preds', 'trues'], {}), '(preds, trues)\n', (8779, 8793), False, 'from utils.metrics import metric\n'), ((8935, 8975), 'numpy.save', 'np.save', (["(folder_path + 'pred.npy')", 'preds'], {}), "(folder_path + 'pred.npy', preds)\n", (8942, 8975), True, 'import numpy as np\n'), ((8982, 9022), 'numpy.save', 'np.save', (["(folder_path + 'true.npy')", 'trues'], {}), "(folder_path + 'true.npy', trues)\n", (8989, 9022), True, 'import numpy as np\n'), ((9029, 9071), 'numpy.save', 'np.save', (["(folder_path + 'label.npy')", 'labels'], {}), "(folder_path + 'label.npy', labels)\n", (9036, 9071), True, 'import numpy as np\n'), ((4067, 4087), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (4081, 4087), False, 'import os\n'), ((4101, 4118), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (4112, 4118), False, 'import os\n'), ((6113, 6135), 'numpy.average', 'np.average', (['train_loss'], {}), '(train_loss)\n', (6123, 6135), True, 'import numpy as np\n'), ((6634, 6689), 'utils.tools.adjust_learning_rate', 'adjust_learning_rate', (['model_optim', '(epoch + 1)', 'self.args'], {}), '(model_optim, epoch + 1, self.args)\n', (6654, 6689), False, 'from utils.tools import EarlyStopping, adjust_learning_rate\n'), ((6788, 6815), 'torch.load', 'torch.load', (['best_model_path'], {}), '(best_model_path)\n', (6798, 6815), False, 'import torch\n'), ((7067, 7082), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (7080, 7082), False, 'import torch\n'), ((8669, 8696), 'os.path.exists', 'os.path.exists', (['folder_path'], {}), '(folder_path)\n', (8683, 8696), False, 'import os\n'), ((8710, 8734), 'os.makedirs', 'os.makedirs', (['folder_path'], {}), '(folder_path)\n', (8721, 8734), False, 'import os\n'), ((8887, 8925), 'numpy.array', 'np.array', (['[mae, mse, rmse, mape, mspe]'], {}), '([mae, mse, rmse, mape, mspe])\n', (8895, 8925), True, 'import numpy as np\n'), ((3156, 3209), 'torch.zeros_like', 'torch.zeros_like', (['batch_y[:, -self.args.pred_len:, :]'], {}), '(batch_y[:, -self.args.pred_len:, :])\n', (3172, 3209), False, 'import torch\n'), ((5991, 6002), 'time.time', 'time.time', ([], {}), '()\n', (6000, 6002), False, 'import time\n'), ((5049, 5102), 'torch.zeros_like', 'torch.zeros_like', (['batch_y[:, -self.args.pred_len:, :]'], {}), '(batch_y[:, -self.args.pred_len:, :])\n', (5065, 5102), False, 'import torch\n'), ((7485, 7538), 'torch.zeros_like', 'torch.zeros_like', (['batch_y[:, -self.args.pred_len:, :]'], {}), '(batch_y[:, -self.args.pred_len:, :])\n', (7501, 7538), False, 'import torch\n'), ((3239, 3303), 'torch.cat', 'torch.cat', (['[batch_y[:, :self.args.label_len, :], dec_inp]'], {'dim': '(1)'}), '([batch_y[:, :self.args.label_len, :], dec_inp], dim=1)\n', (3248, 3303), False, 'import torch\n'), ((5707, 5718), 'time.time', 'time.time', ([], {}), '()\n', (5716, 5718), False, 'import time\n'), ((5136, 5200), 'torch.cat', 'torch.cat', (['[batch_y[:, :self.args.label_len, :], dec_inp]'], {'dim': '(1)'}), '([batch_y[:, :self.args.label_len, :], dec_inp], dim=1)\n', (5145, 5200), False, 'import torch\n'), ((7572, 7636), 'torch.cat', 'torch.cat', (['[batch_y[:, :self.args.label_len, :], dec_inp]'], {'dim': '(1)'}), '([batch_y[:, :self.args.label_len, :], dec_inp], dim=1)\n', (7581, 7636), False, 'import torch\n')] |
liangleslie/core | tests/components/mysensors/conftest.py | cc807b4d597daaaadc92df4a93c6e30da4f570c6 | """Provide common mysensors fixtures."""
from __future__ import annotations
from collections.abc import AsyncGenerator, Callable, Generator
import json
from typing import Any
from unittest.mock import AsyncMock, MagicMock, patch
from mysensors import BaseSyncGateway
from mysensors.persistence import MySensorsJSONDecoder
from mysensors.sensor import Sensor
import pytest
from homeassistant.components.device_tracker.legacy import Device
from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN
from homeassistant.components.mysensors.config_flow import DEFAULT_BAUD_RATE
from homeassistant.components.mysensors.const import (
CONF_BAUD_RATE,
CONF_DEVICE,
CONF_GATEWAY_TYPE,
CONF_GATEWAY_TYPE_SERIAL,
CONF_VERSION,
DOMAIN,
)
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry, load_fixture
@pytest.fixture(autouse=True)
def device_tracker_storage(mock_device_tracker_conf: list[Device]) -> list[Device]:
"""Mock out device tracker known devices storage."""
devices = mock_device_tracker_conf
return devices
@pytest.fixture(name="mqtt")
def mock_mqtt_fixture(hass: HomeAssistant) -> None:
"""Mock the MQTT integration."""
hass.config.components.add(MQTT_DOMAIN)
@pytest.fixture(name="is_serial_port")
def is_serial_port_fixture() -> Generator[MagicMock, None, None]:
"""Patch the serial port check."""
with patch("homeassistant.components.mysensors.gateway.cv.isdevice") as is_device:
is_device.side_effect = lambda device: device
yield is_device
@pytest.fixture(name="gateway_nodes")
def gateway_nodes_fixture() -> dict[int, Sensor]:
"""Return the gateway nodes dict."""
return {}
@pytest.fixture(name="serial_transport")
async def serial_transport_fixture(
gateway_nodes: dict[int, Sensor],
is_serial_port: MagicMock,
) -> AsyncGenerator[dict[int, Sensor], None]:
"""Mock a serial transport."""
with patch(
"mysensors.gateway_serial.AsyncTransport", autospec=True
) as transport_class, patch("mysensors.task.OTAFirmware", autospec=True), patch(
"mysensors.task.load_fw", autospec=True
), patch(
"mysensors.task.Persistence", autospec=True
) as persistence_class:
persistence = persistence_class.return_value
mock_gateway_features(persistence, transport_class, gateway_nodes)
yield transport_class
def mock_gateway_features(
persistence: MagicMock, transport_class: MagicMock, nodes: dict[int, Sensor]
) -> None:
"""Mock the gateway features."""
async def mock_schedule_save_sensors() -> None:
"""Load nodes from via persistence."""
gateway = transport_class.call_args[0][0]
gateway.sensors.update(nodes)
persistence.schedule_save_sensors = AsyncMock(
side_effect=mock_schedule_save_sensors
)
# For some reason autospeccing does not recognize these methods.
persistence.safe_load_sensors = MagicMock()
persistence.save_sensors = MagicMock()
async def mock_connect() -> None:
"""Mock the start method."""
transport.connect_task = MagicMock()
gateway = transport_class.call_args[0][0]
gateway.on_conn_made(gateway)
transport = transport_class.return_value
transport.connect_task = None
transport.connect.side_effect = mock_connect
@pytest.fixture(name="transport")
def transport_fixture(serial_transport: MagicMock) -> MagicMock:
"""Return the default mocked transport."""
return serial_transport
@pytest.fixture
def transport_write(transport: MagicMock) -> MagicMock:
"""Return the transport mock that accepts string messages."""
return transport.return_value.send
@pytest.fixture(name="serial_entry")
async def serial_entry_fixture(hass: HomeAssistant) -> MockConfigEntry:
"""Create a config entry for a serial gateway."""
entry = MockConfigEntry(
domain=DOMAIN,
data={
CONF_GATEWAY_TYPE: CONF_GATEWAY_TYPE_SERIAL,
CONF_VERSION: "2.3",
CONF_DEVICE: "/test/device",
CONF_BAUD_RATE: DEFAULT_BAUD_RATE,
},
)
return entry
@pytest.fixture(name="config_entry")
def config_entry_fixture(serial_entry: MockConfigEntry) -> MockConfigEntry:
"""Provide the config entry used for integration set up."""
return serial_entry
@pytest.fixture(name="integration")
async def integration_fixture(
hass: HomeAssistant, transport: MagicMock, config_entry: MockConfigEntry
) -> AsyncGenerator[MockConfigEntry, None]:
"""Set up the mysensors integration with a config entry."""
config: dict[str, Any] = {}
config_entry.add_to_hass(hass)
with patch("homeassistant.components.mysensors.device.UPDATE_DELAY", new=0):
await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
yield config_entry
@pytest.fixture
def receive_message(
transport: MagicMock, integration: MockConfigEntry
) -> Callable[[str], None]:
"""Receive a message for the gateway."""
def receive_message_callback(message_string: str) -> None:
"""Receive a message with the transport.
The message_string parameter is a string in the MySensors message format.
"""
gateway = transport.call_args[0][0]
# node_id;child_id;command;ack;type;payload\n
gateway.logic(message_string)
return receive_message_callback
@pytest.fixture(name="gateway")
def gateway_fixture(
transport: MagicMock, integration: MockConfigEntry
) -> BaseSyncGateway:
"""Return a setup gateway."""
return transport.call_args[0][0]
def load_nodes_state(fixture_path: str) -> dict:
"""Load mysensors nodes fixture."""
return json.loads(load_fixture(fixture_path), cls=MySensorsJSONDecoder)
def update_gateway_nodes(
gateway_nodes: dict[int, Sensor], nodes: dict[int, Sensor]
) -> dict:
"""Update the gateway nodes."""
gateway_nodes.update(nodes)
return nodes
@pytest.fixture(name="gps_sensor_state", scope="session")
def gps_sensor_state_fixture() -> dict:
"""Load the gps sensor state."""
return load_nodes_state("mysensors/gps_sensor_state.json")
@pytest.fixture
def gps_sensor(gateway_nodes: dict[int, Sensor], gps_sensor_state: dict) -> Sensor:
"""Load the gps sensor."""
nodes = update_gateway_nodes(gateway_nodes, gps_sensor_state)
node = nodes[1]
return node
@pytest.fixture(name="power_sensor_state", scope="session")
def power_sensor_state_fixture() -> dict:
"""Load the power sensor state."""
return load_nodes_state("mysensors/power_sensor_state.json")
@pytest.fixture
def power_sensor(gateway_nodes: dict[int, Sensor], power_sensor_state: dict) -> Sensor:
"""Load the power sensor."""
nodes = update_gateway_nodes(gateway_nodes, power_sensor_state)
node = nodes[1]
return node
@pytest.fixture(name="energy_sensor_state", scope="session")
def energy_sensor_state_fixture() -> dict:
"""Load the energy sensor state."""
return load_nodes_state("mysensors/energy_sensor_state.json")
@pytest.fixture
def energy_sensor(
gateway_nodes: dict[int, Sensor], energy_sensor_state: dict
) -> Sensor:
"""Load the energy sensor."""
nodes = update_gateway_nodes(gateway_nodes, energy_sensor_state)
node = nodes[1]
return node
@pytest.fixture(name="sound_sensor_state", scope="session")
def sound_sensor_state_fixture() -> dict:
"""Load the sound sensor state."""
return load_nodes_state("mysensors/sound_sensor_state.json")
@pytest.fixture
def sound_sensor(gateway_nodes: dict[int, Sensor], sound_sensor_state: dict) -> Sensor:
"""Load the sound sensor."""
nodes = update_gateway_nodes(gateway_nodes, sound_sensor_state)
node = nodes[1]
return node
@pytest.fixture(name="distance_sensor_state", scope="session")
def distance_sensor_state_fixture() -> dict:
"""Load the distance sensor state."""
return load_nodes_state("mysensors/distance_sensor_state.json")
@pytest.fixture
def distance_sensor(
gateway_nodes: dict[int, Sensor], distance_sensor_state: dict
) -> Sensor:
"""Load the distance sensor."""
nodes = update_gateway_nodes(gateway_nodes, distance_sensor_state)
node = nodes[1]
return node
@pytest.fixture(name="temperature_sensor_state", scope="session")
def temperature_sensor_state_fixture() -> dict:
"""Load the temperature sensor state."""
return load_nodes_state("mysensors/temperature_sensor_state.json")
@pytest.fixture
def temperature_sensor(
gateway_nodes: dict[int, Sensor], temperature_sensor_state: dict
) -> Sensor:
"""Load the temperature sensor."""
nodes = update_gateway_nodes(gateway_nodes, temperature_sensor_state)
node = nodes[1]
return node
@pytest.fixture(name="text_node_state", scope="session")
def text_node_state_fixture() -> dict:
"""Load the text node state."""
return load_nodes_state("mysensors/text_node_state.json")
@pytest.fixture
def text_node(gateway_nodes: dict[int, Sensor], text_node_state: dict) -> Sensor:
"""Load the text child node."""
nodes = update_gateway_nodes(gateway_nodes, text_node_state)
node = nodes[1]
return node
| [((917, 945), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (931, 945), False, 'import pytest\n'), ((1148, 1175), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""mqtt"""'}), "(name='mqtt')\n", (1162, 1175), False, 'import pytest\n'), ((1312, 1349), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""is_serial_port"""'}), "(name='is_serial_port')\n", (1326, 1349), False, 'import pytest\n'), ((1623, 1659), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""gateway_nodes"""'}), "(name='gateway_nodes')\n", (1637, 1659), False, 'import pytest\n'), ((1768, 1807), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""serial_transport"""'}), "(name='serial_transport')\n", (1782, 1807), False, 'import pytest\n'), ((3414, 3446), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""transport"""'}), "(name='transport')\n", (3428, 3446), False, 'import pytest\n'), ((3769, 3804), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""serial_entry"""'}), "(name='serial_entry')\n", (3783, 3804), False, 'import pytest\n'), ((4213, 4248), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""config_entry"""'}), "(name='config_entry')\n", (4227, 4248), False, 'import pytest\n'), ((4416, 4450), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""integration"""'}), "(name='integration')\n", (4430, 4450), False, 'import pytest\n'), ((5495, 5525), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""gateway"""'}), "(name='gateway')\n", (5509, 5525), False, 'import pytest\n'), ((6052, 6108), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""gps_sensor_state"""', 'scope': '"""session"""'}), "(name='gps_sensor_state', scope='session')\n", (6066, 6108), False, 'import pytest\n'), ((6487, 6545), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""power_sensor_state"""', 'scope': '"""session"""'}), "(name='power_sensor_state', scope='session')\n", (6501, 6545), False, 'import pytest\n'), ((6938, 6997), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""energy_sensor_state"""', 'scope': '"""session"""'}), "(name='energy_sensor_state', scope='session')\n", (6952, 6997), False, 'import pytest\n'), ((7403, 7461), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""sound_sensor_state"""', 'scope': '"""session"""'}), "(name='sound_sensor_state', scope='session')\n", (7417, 7461), False, 'import pytest\n'), ((7854, 7915), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""distance_sensor_state"""', 'scope': '"""session"""'}), "(name='distance_sensor_state', scope='session')\n", (7868, 7915), False, 'import pytest\n'), ((8335, 8399), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""temperature_sensor_state"""', 'scope': '"""session"""'}), "(name='temperature_sensor_state', scope='session')\n", (8349, 8399), False, 'import pytest\n'), ((8840, 8895), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""text_node_state"""', 'scope': '"""session"""'}), "(name='text_node_state', scope='session')\n", (8854, 8895), False, 'import pytest\n'), ((2849, 2898), 'unittest.mock.AsyncMock', 'AsyncMock', ([], {'side_effect': 'mock_schedule_save_sensors'}), '(side_effect=mock_schedule_save_sensors)\n', (2858, 2898), False, 'from unittest.mock import AsyncMock, MagicMock, patch\n'), ((3018, 3029), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (3027, 3029), False, 'from unittest.mock import AsyncMock, MagicMock, patch\n'), ((3061, 3072), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (3070, 3072), False, 'from unittest.mock import AsyncMock, MagicMock, patch\n'), ((3943, 4118), 'tests.common.MockConfigEntry', 'MockConfigEntry', ([], {'domain': 'DOMAIN', 'data': "{CONF_GATEWAY_TYPE: CONF_GATEWAY_TYPE_SERIAL, CONF_VERSION: '2.3',\n CONF_DEVICE: '/test/device', CONF_BAUD_RATE: DEFAULT_BAUD_RATE}"}), "(domain=DOMAIN, data={CONF_GATEWAY_TYPE:\n CONF_GATEWAY_TYPE_SERIAL, CONF_VERSION: '2.3', CONF_DEVICE:\n '/test/device', CONF_BAUD_RATE: DEFAULT_BAUD_RATE})\n", (3958, 4118), False, 'from tests.common import MockConfigEntry, load_fixture\n'), ((1464, 1527), 'unittest.mock.patch', 'patch', (['"""homeassistant.components.mysensors.gateway.cv.isdevice"""'], {}), "('homeassistant.components.mysensors.gateway.cv.isdevice')\n", (1469, 1527), False, 'from unittest.mock import AsyncMock, MagicMock, patch\n'), ((2003, 2066), 'unittest.mock.patch', 'patch', (['"""mysensors.gateway_serial.AsyncTransport"""'], {'autospec': '(True)'}), "('mysensors.gateway_serial.AsyncTransport', autospec=True)\n", (2008, 2066), False, 'from unittest.mock import AsyncMock, MagicMock, patch\n'), ((2101, 2151), 'unittest.mock.patch', 'patch', (['"""mysensors.task.OTAFirmware"""'], {'autospec': '(True)'}), "('mysensors.task.OTAFirmware', autospec=True)\n", (2106, 2151), False, 'from unittest.mock import AsyncMock, MagicMock, patch\n'), ((2153, 2199), 'unittest.mock.patch', 'patch', (['"""mysensors.task.load_fw"""'], {'autospec': '(True)'}), "('mysensors.task.load_fw', autospec=True)\n", (2158, 2199), False, 'from unittest.mock import AsyncMock, MagicMock, patch\n'), ((2215, 2265), 'unittest.mock.patch', 'patch', (['"""mysensors.task.Persistence"""'], {'autospec': '(True)'}), "('mysensors.task.Persistence', autospec=True)\n", (2220, 2265), False, 'from unittest.mock import AsyncMock, MagicMock, patch\n'), ((3182, 3193), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (3191, 3193), False, 'from unittest.mock import AsyncMock, MagicMock, patch\n'), ((4744, 4814), 'unittest.mock.patch', 'patch', (['"""homeassistant.components.mysensors.device.UPDATE_DELAY"""'], {'new': '(0)'}), "('homeassistant.components.mysensors.device.UPDATE_DELAY', new=0)\n", (4749, 4814), False, 'from unittest.mock import AsyncMock, MagicMock, patch\n'), ((5808, 5834), 'tests.common.load_fixture', 'load_fixture', (['fixture_path'], {}), '(fixture_path)\n', (5820, 5834), False, 'from tests.common import MockConfigEntry, load_fixture\n'), ((4830, 4873), 'homeassistant.setup.async_setup_component', 'async_setup_component', (['hass', 'DOMAIN', 'config'], {}), '(hass, DOMAIN, config)\n', (4851, 4873), False, 'from homeassistant.setup import async_setup_component\n')] |
SymenYang/Vanish-Point-Detect | Detect.py | 0e83e2b2a86e9523ed4a86f592f3a8dee594d691 | import cv2 as cv
import numpy as np
import copy
import math
import Edges
import INTPoint
eps = 1e-7
votes = {}
Groups = []
VPoints = []
Centers = []
Cluster = []
voters = {}
def getEdges(image):
#moved to Edges.py
return Edges.getEdges(image)
def getLines(edges):
#moved to Edges.py
return Edges.getLines(edges)
def checkRound(pos,edges):
#moved to Edges.py
return Edges.checkRound(pos,edges)
def outOfSize(pos,edges):
#moved to Edges.py
return Edges.outOfSize(pos,edges)
def extenLine(line,edges):
#moved to Edges.py
return Edges.extenLine(line,edges)
def extenLines(lines,edges):
#moved to Edges.py
return Edges.extenLines(lines,edges)
def shouldMerge(line1,line2):
#moved to Edges.py
return Edges.shouldMerge(line1,line2)
def mergeLines(lines):
#moved to Edges.py
return Edges.mergeLines(lines)
def getLineABC(line):
#moved to Edges.py
return Edges.getLineABC(line)
def getCirAnch(a,b):
#moved to Edges.py
return Edges.getCirAnch(a,b)
def getCrossPoint(linea,lineb):
#moved to INTPoint.py
return INTPoint.getIntersectPoint(linea,lineb)
def sortLines(lines):
#moved to Edges.py
return Edges.sortLines(lines)
def getVPoints2(lines,arange = 0.2617):
#moved to INTPoint.py
global VPoints
VPoints = INTPoint.getVPoints2(lines,arange)
return VPoints
def getVPoints(num = 16):
#this function is fallen into disuse because of the low speed
for i in range(0,num + 1,1):
lens = len(Groups[i])
for j in range(0,lens,1):
for k in range(j+1,lens,1):
VPoints.append(getCrossPoint(Groups[i][j],Groups[i][k]))
def removeSame(list):
#moved to INTPoint.py
return INTPoint.removeSame(list)
def getLinesLength(line):
#moved to INTPoint.py
return INTPoint.getLinesLength(line)
def getMidPoint(line):
#moved to INTPoint.py
return INTPoint.getMidPoint(line)
def getArch(line,point):
#moved to INTPoint.py
return INTPoint.getArch(line,point)
def voteForPoint(lines):
#moved to INTPoint.py
global votes
global voters
votes,voters = INTPoint.voteForPoint(lines,VPoints)
return
def getGraPoint(points):
count = 1.0
sumx = 0.0
sumy = 0.0
for point in points:
w = votes[point]
count += w
sumx += w * point[0]
sumy += w * point[1]
return (sumx/count,sumy/count)
def devideIntoPoints(Points):
global Cluster
lens = len(Cluster)
for i in range(0,lens,1):
Cluster[i] = []
for point in Points:
if point[0] == 'p' or point[0] == 'h' or point[0] == 'v':
continue
if votes[point] == 0:
continue
minlens = 1e15
minpos = 0
now = -1
for cen in Centers:
now += 1
lens = getLinesLength((point[0],point[1],cen[0],cen[1]))
if lens < minlens:
minlens = lens
minpos = now
Cluster[minpos].append(point)
def KMean(points,K = 3,step = 50):
global Cluster
global Centers
Cluster = []
Centers = []
if K == 1:
step = 1
for i in range(0,K,1):
Cluster.append([])
Centers.append([0,0])
count = 0
for point in points:
if point[0] != 'p' and point[0] != 'v' and point[0] != 'h' and votes[point] != 0:
Centers[count][0] = point[0]
Centers[count][1] = point[1]
count += 1
if count == K:
break
for i in range(0,step,1):
devideIntoPoints(points)
for i in range(0,K,1):
Centers[i] = getGraPoint(Cluster[i])
def getFinal(points):
count = 0.0
num = 0
p1 = 0.0
ret1 = []
p2 = 0.0
ret2 = []
for item in votes:
if item[0] == 'p' or item[0] == 'h' or item[0] == 'v':
if votes[item] > p1:
p2 = p1
ret2 = ret1
p1 = votes[item]
ret1 = item
else:
if votes[item] > p2:
p2 = votes[item]
ret2 = item
else:
count += votes[item]
num += 1
K = 3
ret = []
count = count / num * 0.1
if p1 > count:
K -= 1
ret.append(ret1)
if p2 > count:
K -= 1
ret.append(ret2)
KMean(points,K)
for i in range(0,K,1):
ret.append(Centers[i])
return ret
def deal(inputname,outputname):
global votes
global Groups
global VPoints
global Centers
global Cluster
global voters
votes = {}
Groups = []
VPoints = []
Centers = []
Cluster = []
voters = {}
image = cv.imread(inputname)
edges = getEdges(image)
cv.imwrite(outputname + 'edges.jpg',edges)
lines = getLines(edges)
lines2 = copy.deepcopy(lines)
lines2 = extenLines(lines2,edges)
lines2 = mergeLines(lines2)
#devideIntoGroups(lines2,3)
lines2 = sortLines(lines2)
getVPoints2(lines2)
VPoints = removeSame(VPoints)
voteForPoint(lines2)
votes2 = sorted(votes.iteritems(),key=lambda votes:votes[1],reverse=True)
lenofvotes = min(len(votes2),max(5,int(len(votes2) * 0.2)))
votesFinal = {}
VPoints = []
for i in range(0,lenofvotes,1):
votesFinal[votes2[i][0]] = votes2[i][1]
VPoints.append(votes2[i][0])
for i in range(lenofvotes,len(votes2),1):
if votes2[i][0][0] == 'h' or votes2[i][0][0] == 'v' or votes2[i][0][0] == 'p':
votesFinal[votes2[i][0]] = votes2[i][1]
VPoints.append(votes2[i][0])
votes = votesFinal
ans = getFinal(VPoints)
print ans
edges = cv.cvtColor(edges,cv.COLOR_GRAY2BGR)
edges2 = copy.deepcopy(edges)
for item in lines:
if item[0] == 'N':
continue
cv.line(edges,(item[0],item[1]),(item[2],item[3]),(0,0,255),2)
for item in lines2:
cv.line(edges2,(item[0],item[1]),(item[2],item[3]),(0,0,255),2)
color = [255,0,0,0]
for clu in Cluster:
for i in range(0,4,1):
if color[i] == 255:
color[i+1] = 255
color[i] = 0
break
for point in clu:
if point[0] > 0 and point[1] > 0:
if point[0] < edges.shape[1] and point[1] < edges.shape[0]:
if votes[point] == 0:
continue
cv.line(edges2,(int(point[0]),int(point[1])),(int(point[0]),int(point[1])),(color[1],color[2],color[3]),10)
for point in ans:
if point[0] > 0 and point[1] > 0:
if point[0] < edges.shape[1] and point[1] < edges.shape[0]:
cv.line(edges2,(int(point[0]),int(point[1])),(int(point[0]),int(point[1])),(255,255,255),10)
cv.imwrite(outputname + 'linedetect.jpg',edges)
cv.imwrite(outputname + 'answer.jpg',edges2)
fd = open(outputname + 'answer.txt','w')
fd.write('(' + str(ans[0][0]) + ',' + str(ans[0][1]) + ')(' + str(ans[1][0]) + ',' + str(ans[1][1]) + ')(' + str(ans[2][0]) + ',' + str(ans[2][1]) + ')')
fd.close
deal("data/1.jpg",'1') | [] |
wanasit/labelling-notebook | test/test_files.py | c9e7f6895cd4672e3b5af603bdddf08246d35094 | def test_list_example_directory(client):
response = client.get("/api/files")
assert response.status_code == 200
file_list = response.get_json()
assert len(file_list) == 5
assert file_list[0]['key'] == 'image_annotated.jpg'
assert file_list[1]['key'] == 'image.jpg'
assert file_list[2]['key'] == 'more_images/'
assert file_list[3]['key'] == 'more_images/01.jpg'
assert file_list[4]['key'] == 'more_images/02.png'
def test_list_example_directory_nested(client):
response = client.get("/api/files?path=more_images")
assert response.status_code == 200
file_list = response.get_json()
assert len(file_list) == 2
assert file_list[0]['key'] == '01.jpg'
assert file_list[1]['key'] == '02.png'
def test_get_example_image(client):
response = client.get("/api/files/image/x.jpg")
assert response.status_code == 404
response = client.get("/api/files/image/image.jpg")
assert response.status_code == 200
response = client.get("/api/files/image/more_images/01.jpg")
assert response.status_code == 200
def test_get_example_image_data(client):
response = client.get("/api/files/image_data/image.jpg")
assert response.status_code == 404
response = client.get("/api/files/image_data/image_annotated.jpg")
assert response.status_code == 200
data = response.get_json()
assert 'annotations' in data
assert 'tags' in data
def test_put_example_image_data(client):
response = client.get("/api/files/image_data/image.jpg")
assert response.status_code == 404
response = client.put("/api/files/image_data/image.jpg", json={
'annotations': [{'width': 10, 'height': 10, 'x': 0, 'y': 0}],
'tags': ['a', 'b']
})
assert response.status_code == 200
response = client.get("/api/files/image_data/image.jpg")
assert response.status_code == 200
data = response.get_json()
assert 'annotations' in data
assert 'tags' in data
| [] |
osoco/better-ways-of-thinking-about-software | Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/openedx/core/djangoapps/course_groups/migrations/0001_initial.py | 83e70d23c873509e22362a09a10d3510e10f6992 | from django.db import migrations, models
from django.conf import settings
from opaque_keys.edx.django.models import CourseKeyField
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='CohortMembership',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('course_id', CourseKeyField(max_length=255)),
],
),
migrations.CreateModel(
name='CourseCohort',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('assignment_type', models.CharField(default='manual', max_length=20, choices=[('random', 'Random'), ('manual', 'Manual')])),
],
),
migrations.CreateModel(
name='CourseCohortsSettings',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('is_cohorted', models.BooleanField(default=False)),
('course_id', CourseKeyField(help_text='Which course are these settings associated with?', unique=True, max_length=255, db_index=True)),
('_cohorted_discussions', models.TextField(null=True, db_column='cohorted_discussions', blank=True)),
('always_cohort_inline_discussions', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='CourseUserGroup',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='What is the name of this group? Must be unique within a course.', max_length=255)),
('course_id', CourseKeyField(help_text='Which course is this group associated with?', max_length=255, db_index=True)),
('group_type', models.CharField(max_length=20, choices=[('cohort', 'Cohort')])),
('users', models.ManyToManyField(help_text='Who is in this group?', related_name='course_groups', to=settings.AUTH_USER_MODEL, db_index=True)),
],
),
migrations.CreateModel(
name='CourseUserGroupPartitionGroup',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('partition_id', models.IntegerField(help_text='contains the id of a cohorted partition in this course')),
('group_id', models.IntegerField(help_text='contains the id of a specific group within the cohorted partition')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('course_user_group', models.OneToOneField(to='course_groups.CourseUserGroup', on_delete=models.CASCADE)),
],
),
migrations.AddField(
model_name='coursecohort',
name='course_user_group',
field=models.OneToOneField(related_name='cohort', to='course_groups.CourseUserGroup', on_delete=models.CASCADE),
),
migrations.AddField(
model_name='cohortmembership',
name='course_user_group',
field=models.ForeignKey(to='course_groups.CourseUserGroup', on_delete=models.CASCADE),
),
migrations.AddField(
model_name='cohortmembership',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE),
),
migrations.AlterUniqueTogether(
name='courseusergroup',
unique_together={('name', 'course_id')},
),
migrations.AlterUniqueTogether(
name='cohortmembership',
unique_together={('user', 'course_id')},
),
]
| [((202, 259), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (233, 259), False, 'from django.db import migrations, models\n'), ((3807, 3907), 'django.db.migrations.AlterUniqueTogether', 'migrations.AlterUniqueTogether', ([], {'name': '"""courseusergroup"""', 'unique_together': "{('name', 'course_id')}"}), "(name='courseusergroup', unique_together={(\n 'name', 'course_id')})\n", (3837, 3907), False, 'from django.db import migrations, models\n'), ((3947, 4048), 'django.db.migrations.AlterUniqueTogether', 'migrations.AlterUniqueTogether', ([], {'name': '"""cohortmembership"""', 'unique_together': "{('user', 'course_id')}"}), "(name='cohortmembership', unique_together={(\n 'user', 'course_id')})\n", (3977, 4048), False, 'from django.db import migrations, models\n'), ((3261, 3371), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'related_name': '"""cohort"""', 'to': '"""course_groups.CourseUserGroup"""', 'on_delete': 'models.CASCADE'}), "(related_name='cohort', to=\n 'course_groups.CourseUserGroup', on_delete=models.CASCADE)\n", (3281, 3371), False, 'from django.db import migrations, models\n'), ((3507, 3586), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""course_groups.CourseUserGroup"""', 'on_delete': 'models.CASCADE'}), "(to='course_groups.CourseUserGroup', on_delete=models.CASCADE)\n", (3524, 3586), False, 'from django.db import migrations, models\n'), ((3714, 3786), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': 'settings.AUTH_USER_MODEL', 'on_delete': 'models.CASCADE'}), '(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)\n', (3731, 3786), False, 'from django.db import migrations, models\n'), ((400, 493), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (416, 493), False, 'from django.db import migrations, models\n'), ((522, 552), 'opaque_keys.edx.django.models.CourseKeyField', 'CourseKeyField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (536, 552), False, 'from opaque_keys.edx.django.models import CourseKeyField\n'), ((690, 783), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (706, 783), False, 'from django.db import migrations, models\n'), ((818, 925), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""manual"""', 'max_length': '(20)', 'choices': "[('random', 'Random'), ('manual', 'Manual')]"}), "(default='manual', max_length=20, choices=[('random',\n 'Random'), ('manual', 'Manual')])\n", (834, 925), False, 'from django.db import migrations, models\n'), ((1068, 1161), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (1084, 1161), False, 'from django.db import migrations, models\n'), ((1192, 1226), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1211, 1226), False, 'from django.db import migrations, models\n'), ((1259, 1383), 'opaque_keys.edx.django.models.CourseKeyField', 'CourseKeyField', ([], {'help_text': '"""Which course are these settings associated with?"""', 'unique': '(True)', 'max_length': '(255)', 'db_index': '(True)'}), "(help_text='Which course are these settings associated with?',\n unique=True, max_length=255, db_index=True)\n", (1273, 1383), False, 'from opaque_keys.edx.django.models import CourseKeyField\n'), ((1424, 1497), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'db_column': '"""cohorted_discussions"""', 'blank': '(True)'}), "(null=True, db_column='cohorted_discussions', blank=True)\n", (1440, 1497), False, 'from django.db import migrations, models\n'), ((1553, 1586), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1572, 1586), False, 'from django.db import migrations, models\n'), ((1727, 1820), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (1743, 1820), False, 'from django.db import migrations, models\n'), ((1844, 1963), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""What is the name of this group? Must be unique within a course."""', 'max_length': '(255)'}), "(help_text=\n 'What is the name of this group? Must be unique within a course.',\n max_length=255)\n", (1860, 1963), False, 'from django.db import migrations, models\n'), ((1987, 2093), 'opaque_keys.edx.django.models.CourseKeyField', 'CourseKeyField', ([], {'help_text': '"""Which course is this group associated with?"""', 'max_length': '(255)', 'db_index': '(True)'}), "(help_text='Which course is this group associated with?',\n max_length=255, db_index=True)\n", (2001, 2093), False, 'from opaque_keys.edx.django.models import CourseKeyField\n'), ((2123, 2186), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'choices': "[('cohort', 'Cohort')]"}), "(max_length=20, choices=[('cohort', 'Cohort')])\n", (2139, 2186), False, 'from django.db import migrations, models\n'), ((2215, 2351), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'help_text': '"""Who is in this group?"""', 'related_name': '"""course_groups"""', 'to': 'settings.AUTH_USER_MODEL', 'db_index': '(True)'}), "(help_text='Who is in this group?', related_name=\n 'course_groups', to=settings.AUTH_USER_MODEL, db_index=True)\n", (2237, 2351), False, 'from django.db import migrations, models\n'), ((2501, 2594), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (2517, 2594), False, 'from django.db import migrations, models\n'), ((2626, 2718), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'help_text': '"""contains the id of a cohorted partition in this course"""'}), "(help_text=\n 'contains the id of a cohorted partition in this course')\n", (2645, 2718), False, 'from django.db import migrations, models\n'), ((2745, 2848), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'help_text': '"""contains the id of a specific group within the cohorted partition"""'}), "(help_text=\n 'contains the id of a specific group within the cohorted partition')\n", (2764, 2848), False, 'from django.db import migrations, models\n'), ((2877, 2916), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (2897, 2916), False, 'from django.db import migrations, models\n'), ((2950, 2985), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (2970, 2985), False, 'from django.db import migrations, models\n'), ((3026, 3113), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'to': '"""course_groups.CourseUserGroup"""', 'on_delete': 'models.CASCADE'}), "(to='course_groups.CourseUserGroup', on_delete=models.\n CASCADE)\n", (3046, 3113), False, 'from django.db import migrations, models\n')] |
farkaskid/recipes | kafka-rockset-integration/generate_customers_data.py | 8eef799cda899ea266f2849d485917f9b0d83190 | """Generate Customer Data"""
import csv
import random
from config import MIN_CUSTOMER_ID, MAX_CUSTOMER_ID
ACQUISITION_SOURCES = [
'OrganicSearch',
'PaidSearch',
'Email',
'SocialMedia',
'Display',
'Affiliate'
'Referral'
]
def main():
with open('customers.csv', 'w') as fout:
writer = csv.DictWriter(fout, fieldnames=['CustomerID', 'AcquisitionSource'])
writer.writeheader()
for customer_id in range(MIN_CUSTOMER_ID, MAX_CUSTOMER_ID + 1):
record = {
'CustomerID': int(customer_id),
'AcquisitionSource': random.choices(ACQUISITION_SOURCES).pop()
}
writer.writerow(record)
if __name__ == '__main__':
main()
| [((328, 396), 'csv.DictWriter', 'csv.DictWriter', (['fout'], {'fieldnames': "['CustomerID', 'AcquisitionSource']"}), "(fout, fieldnames=['CustomerID', 'AcquisitionSource'])\n", (342, 396), False, 'import csv\n'), ((606, 641), 'random.choices', 'random.choices', (['ACQUISITION_SOURCES'], {}), '(ACQUISITION_SOURCES)\n', (620, 641), False, 'import random\n')] |
MatthewBM/parsl | parsl/tests/test_error_handling/test_resource_spec.py | f11417a0255ed290fd0d78ffa1bc52cfe7a06301 | import parsl
from parsl.app.app import python_app
from parsl.tests.configs.local_threads import config
from parsl.executors.errors import UnsupportedFeatureError
from parsl.executors import WorkQueueExecutor
@python_app
def double(x, parsl_resource_specification={}):
return x * 2
def test_resource(n=2):
spec = {'cores': 2, 'memory': '1GiB'}
fut = double(n, parsl_resource_specification=spec)
try:
fut.result()
except Exception as e:
assert isinstance(e, UnsupportedFeatureError)
else:
executors = parsl.dfk().executors
executor = None
for label in executors:
if label != 'data_manager':
executor = executors[label]
break
assert isinstance(executor, WorkQueueExecutor)
if __name__ == '__main__':
local_config = config
parsl.load(local_config)
x = test_resource(2)
| [((849, 873), 'parsl.load', 'parsl.load', (['local_config'], {}), '(local_config)\n', (859, 873), False, 'import parsl\n'), ((551, 562), 'parsl.dfk', 'parsl.dfk', ([], {}), '()\n', (560, 562), False, 'import parsl\n')] |
cincanproject/cincan-command | cincan/file_tool.py | b8cde81931b1c8583ac7daa1327520fb9f06856e | import pathlib
import re
from typing import List, Optional, Dict, Set, Tuple, Iterable
import shlex
class FileMatcher:
"""Match files based on a pattern"""
def __init__(self, match_string: str, include: bool):
self.match_string = match_string
self.exact = '*' not in match_string
self.absolute_path = match_string.startswith('/')
self.include = include
@classmethod
def parse(cls, match_strings: List[str]) -> List['FileMatcher']:
"""Parse pattens from a list"""
res = []
for m in match_strings:
if m.startswith('^'):
res.append(FileMatcher(m[1:], include=False))
else:
res.append(FileMatcher(m, include=True))
return res
def filter_upload_files(self, files: List[pathlib.Path]) -> List[pathlib.Path]:
"""Filter uploaded files by this pattern"""
return list(filter(lambda f: self.__match(f.as_posix()) == self.include, files))
def filter_download_files(self, files: List[str], work_dir: str) -> List[str]:
"""Filter downloaded files by this pattern"""
if self.absolute_path:
# matching absolute files
res = []
for file in files:
if self.__match(file) == self.include:
res.append(file)
return res
else:
# matching files relative to working directory
res = []
for file in files:
try:
rel_file = pathlib.Path(file).relative_to(work_dir).as_posix()
except ValueError:
if not self.include:
res.append(file)
continue
if self.__match(rel_file) == self.include:
res.append(file)
return res
def __match(self, value: str) -> bool:
"""Match value with this pattern"""
if self.exact:
return self.match_string == value
split = self.match_string.split("*")
i = 0
off = 0
len_v = len(value)
s = split[0]
len_s = len(s)
if len_s > 0:
if len_v < i + len_s or value[i:i + len_s] != s:
return False
off += len_s
i += 1
while i < len(split):
s = split[i]
len_s = len(s)
if len_s > 0:
off = value.find(s, off)
if off < 0:
return False
i += 1
off += len_s
if split[-1] != '' and off != len_v:
return False
return True
class FileResolver:
"""Resolve files from command line arguments"""
def __init__(self, args: List[str], directory: pathlib.Path, output_dirs: List[str] = None,
do_resolve: bool = True, input_filters: List[FileMatcher] = None):
self.original_args = args
self.directory = directory
self.host_files: List[pathlib.Path] = []
self.command_args = args.copy()
# Additional punctuation chars, whereas we might split command (On top of shlex basic)
self.additional_punc_chars = "=,"
# these are output directories, upload them without contents
for dir in output_dirs or []:
self.host_files.append(pathlib.Path(dir))
self.output_dirs = set([pathlib.Path(d) for d in (output_dirs or [])])
if do_resolve:
# autodetect input files
self.__analyze()
# exclude files by filters, perhaps?
for filth in input_filters or []:
self.host_files = filth.filter_upload_files(self.host_files)
def __file_exists(self, path: str, already_listed: Set[pathlib.Path], parent_check: bool = True) -> Optional[str]:
"""
Method for evaluating the possible existence of input files and potential output directories.
If there is local match for file/directory, it is marked as uploadable file into container, and path is changed
to be relative of working directory of container, when command is passed into container.
Special case: when possible argument is coming from first layer (not quoted) of arguments, is valid path
and has no whitespace in arguments, we are processing this part later, because we can support special markups
such as % and & in here.
"""
o_file = pathlib.Path(path)
# does file/dir exists? No attempt to copy '/', leave it as it is...
file_exists = o_file.exists() and not all([c == '/' for c in path])
# When filename contains potentially spaces, were are only interested about absolute path
# Not checking parents
if not file_exists and not parent_check and not " " in path:
return None
if not file_exists and not o_file.is_absolute() and '..' not in o_file.as_posix():
# the file does not exist, but it is relative path to a file/directory...
o_parent = o_file.parent
while not file_exists and o_parent and o_parent.as_posix() != '.':
if o_parent.is_dir() and o_parent not in self.host_files:
file_exists = True # ...and there is existing parent directory, perhaps for output
o_parent = o_parent.parent
if file_exists:
h_file, a_name = self.__archive_name_for(o_file)
if h_file not in already_listed:
self.host_files.append(h_file)
already_listed.add(h_file)
# '/' in the end gets eaten away... fix
for p in range(len(path) - 1, 0, -1):
if path[p] != '/':
break
a_name += '/'
if file_exists and o_file.is_dir() and o_file not in self.output_dirs:
# include files in sub directories
self.__include_sub_dirs(o_file.iterdir(), already_listed)
if file_exists:
return a_name
else:
return None
def __analyze(self):
"""Analyze the command line"""
self.command_args = []
already_listed: Set[pathlib.Path] = self.output_dirs.copy()
for o_arg in self.original_args:
a_name = self.__file_exists(o_arg, already_listed, parent_check=False)
# Potential path as argument, not dividing it pieces yet for further analysis
if a_name:
self.command_args.append(a_name)
continue
# NOTE: Shlex not Windows compatible!
lex = shlex.shlex(o_arg, posix=True, punctuation_chars=self.additional_punc_chars)
split = list(lex)
modified_paths = []
for part in split:
a_name = self.__file_exists(part, already_listed)
if a_name:
modified_paths.append((part, a_name))
for m_part, m_name in modified_paths:
o_arg = o_arg.replace(m_part, m_name)
self.command_args.append(o_arg)
def __include_sub_dirs(self, files: Iterable[pathlib.Path], file_set: Set[pathlib.Path]):
"""Include files from sub directories"""
for f in files:
if f not in file_set:
self.host_files.append(f)
file_set.add(f)
if f.is_dir():
self.__include_sub_dirs(f.iterdir(), file_set)
def resolve_upload_files(self, upload_files: Dict[pathlib.Path, str]):
"""Resolve the files to upload"""
for up_file in self.detect_upload_files():
host_file, arc_name = self.__archive_name_for(up_file)
upload_files[host_file] = arc_name
cmd_args = self.command_args
return cmd_args
def detect_upload_files(self, files: Optional[Iterable[pathlib.Path]] = None) -> List[pathlib.Path]:
"""Detect files to upload"""
it_files = sorted(self.host_files) if files is None else files
res = []
# filter out files which do not exist nor should exists
for file in it_files:
if file.exists() or file in self.output_dirs:
res.append(file)
if files is None:
# make sure also paths leading to output files are uploaded
all_dirs = set()
for file in res:
all_dirs.add(file)
for p in file.parents:
all_dirs.add(p)
for file in filter(lambda f: not f.exists(), it_files):
# file not exists, but marked for upload - must mean some sub directory for output
p = file.parent
while not p.exists():
p = p.parent
if p not in all_dirs:
res.append(p)
return res
@classmethod
def __archive_name_for(cls, file: pathlib.Path) -> Tuple[pathlib.Path, str]:
"""Resolve host file and archive name for uploaded file"""
if cls.__use_absolute_path(file):
h_file = file.resolve()
a_file = file.resolve().as_posix()
a_file = a_file[1:] if a_file.startswith('/') else a_file
else:
h_file = file
a_file = file.as_posix()
return h_file, a_file
@classmethod
def __use_absolute_path(cls, file: pathlib.Path) -> bool:
"""Should use absolute path to refer a file path?"""
# - use absolute paths, if /../ used (ok, quite weak)
return file.is_absolute() or (".." in file.as_posix())
| [((4455, 4473), 'pathlib.Path', 'pathlib.Path', (['path'], {}), '(path)\n', (4467, 4473), False, 'import pathlib\n'), ((6606, 6682), 'shlex.shlex', 'shlex.shlex', (['o_arg'], {'posix': '(True)', 'punctuation_chars': 'self.additional_punc_chars'}), '(o_arg, posix=True, punctuation_chars=self.additional_punc_chars)\n', (6617, 6682), False, 'import shlex\n'), ((3348, 3365), 'pathlib.Path', 'pathlib.Path', (['dir'], {}), '(dir)\n', (3360, 3365), False, 'import pathlib\n'), ((3399, 3414), 'pathlib.Path', 'pathlib.Path', (['d'], {}), '(d)\n', (3411, 3414), False, 'import pathlib\n'), ((1537, 1555), 'pathlib.Path', 'pathlib.Path', (['file'], {}), '(file)\n', (1549, 1555), False, 'import pathlib\n')] |
hubaimaster/aws-interface | aws_interface/cloud/auth/set_me.py | 162dd056546d58b6eb29afcae1c3c2d78e4309b2 |
from cloud.permission import Permission, NeedPermission
from cloud.message import error
# Define the input output format of the function.
# This information is used when creating the *SDK*.
info = {
'input_format': {
'session_id': 'str',
'field': 'str',
'value?': 'str',
},
'output_format': {
'user_id?': 'str',
},
'description': 'Set my information'
}
@NeedPermission(Permission.Run.Auth.set_me)
def do(data, resource):
body = {}
params = data['params']
user = data['user']
user_id = user['id']
field = params.get('field')
value = params.get('value', None)
user = resource.db_get_item(user_id)
# For security
if field in ['id', 'email', 'password_hash', 'salt', 'groups', 'login_method']:
body['error'] = error.FORBIDDEN_MODIFICATION
return body
else:
user[field] = value
resource.db_update_item(user_id, user)
body['user_id'] = user_id
return body
| [((410, 452), 'cloud.permission.NeedPermission', 'NeedPermission', (['Permission.Run.Auth.set_me'], {}), '(Permission.Run.Auth.set_me)\n', (424, 452), False, 'from cloud.permission import Permission, NeedPermission\n')] |
celikten/armi | doc/gallery-src/analysis/run_blockMcnpMaterialCard.py | 4e100dd514a59caa9c502bd5a0967fd77fdaf00e | """
Write MCNP Material Cards
=========================
Here we load a test reactor and write each component of one fuel block out as
MCNP material cards.
Normally, code-specific utility code would belong in a code-specific ARMI
plugin. But in this case, the need for MCNP materials cards is so pervasive
that it made it into the framework.
"""
from armi.reactor.tests import test_reactors
from armi.reactor.flags import Flags
from armi.utils.densityTools import formatMaterialCard
from armi.nucDirectory import nuclideBases as nb
from armi import configure
configure(permissive=True)
_o, r = test_reactors.loadTestReactor()
bFuel = r.core.getBlocks(Flags.FUEL)[0]
for ci, component in enumerate(bFuel, start=1):
ndens = component.getNumberDensities()
# convert nucName (str) keys to nuclideBase keys
ndensByBase = {nb.byName[nucName]: dens for nucName, dens in ndens.items()}
print("".join(formatMaterialCard(ndensByBase, matNum=ci)))
| [((562, 588), 'armi.configure', 'configure', ([], {'permissive': '(True)'}), '(permissive=True)\n', (571, 588), False, 'from armi import configure\n'), ((598, 629), 'armi.reactor.tests.test_reactors.loadTestReactor', 'test_reactors.loadTestReactor', ([], {}), '()\n', (627, 629), False, 'from armi.reactor.tests import test_reactors\n'), ((914, 956), 'armi.utils.densityTools.formatMaterialCard', 'formatMaterialCard', (['ndensByBase'], {'matNum': 'ci'}), '(ndensByBase, matNum=ci)\n', (932, 956), False, 'from armi.utils.densityTools import formatMaterialCard\n')] |
mustaqimM/life_line_chart | life_line_chart/_autogenerate_data.py | a9bbbbdeb5568aa0cc3b3b585337a3d655f4b2d6 | import names
import os
import datetime
from random import random
def generate_gedcom_file():
"""generate some gedcom file"""
db = {}
db['n_individuals'] = 0
db['max_individuals'] = 8000
db['n_families'] = 0
db['yougest'] = None
gedcom_content = """
0 HEAD
1 SOUR Gramps
2 VERS 3.3.0
2 NAME Gramps
1 DATE {}
2 TIME 15:35:24
1 SUBM @SUBM@
1 COPR Copyright (c) 2020 Christian Schulze,,,.
1 GEDC
2 VERS 5.5
1 CHAR UTF-8
1 LANG German
""".format(datetime.date.today())
def generate_individual(db, birth_year, sex=None, last_name=None):
if not sex:
sex = 'F' if random() < 0.5 else 'M'
first_name = names.get_first_name(
gender='male' if sex == 'M' else 'female')
if random() < 0.3:
first_name += ' ' + \
names.get_first_name(gender='male' if sex == 'M' else 'female')
if not last_name:
last_name = names.get_last_name()
birth_place = 'Paris' if random() < 0.5 else 'Rome'
death_place = 'Zorge' if random() < 0.5 else 'Bruegge'
db['n_individuals'] += 1
individual_id = '@I{}@'.format(db["n_individuals"])
death_year = birth_year + 40 + int(random()*20)
db[individual_id] = {
'birth': birth_year,
'death': death_year,
'sex': sex,
'last_name': last_name
}
birth_date = '1 JUN {}'.format(birth_year)
death_date = '1 JUN {}'.format(birth_year)
if not db['yougest']:
db['yougest'] = individual_id
elif db[db['yougest']]['birth'] < birth_year:
db['yougest'] = individual_id
db[individual_id]['string'] = """0 {individual_id} INDI
1 NAME {first_name} /{last_name}/
1 SEX {sex}
1 BIRT
2 DATE {birth_date}
2 PLAC {birth_place}
1 DEAT
2 DATE {death_date}
2 PLAC {death_place}
""".format(**locals())
return individual_id
def generate_family(db, husband_id, wife_id, children_ids, marriage_year, marriage_place=None):
if not marriage_place:
marriage_place = 'London' if random() < 0.5 else 'Tokio'
db['n_families'] += 1
marriage_date = '1 MAY {}'.format(marriage_year)
family_id = "@F{}@".format(db['n_families'])
db[family_id] = {'string': """0 {family_id} FAM
1 HUSB {husband_id}
1 WIFE {wife_id}
1 MARR
2 DATE {marriage_date}
2 PLAC {marriage_place}
""".format(
**locals()
)}
for child_id in children_ids:
db[family_id]['string'] += "1 CHIL {}\n".format(child_id)
return family_id
def find_by_birth_date(db, from_year, to_year, sex, exclude=[]):
ids = []
for individual_id, data in db.items():
if not individual_id.startswith('@I'):
continue
if 'famc' in data:
if data['birth'] > from_year and data['birth'] < to_year:
if sex == data['sex']:
if individual_id not in exclude:
ids.append(individual_id)
if ids:
return ids[int(random()*len(ids))]
return None
def generate_recursive_family(db, start_year=1000, generations=2, husband_id=None, wife_id=None, siblings=[], max_children=5):
if not husband_id:
if random() < 0.2:
exclude = siblings.copy()
if wife_id:
exclude += [wife_id]
husband_id = find_by_birth_date(
db, start_year, start_year + 10, sex='M', exclude=exclude)
if not husband_id:
husband_id = generate_individual(
db, start_year + int(random()*5), sex='M')
else:
print('reused {}'.format(husband_id))
if not wife_id:
if random() < 10.9:
exclude = siblings.copy() + [husband_id]
wife_id = find_by_birth_date(
db, start_year, start_year + 10, sex='F', exclude=exclude)
if not wife_id:
wife_id = generate_individual(
db, start_year + int(random()*5), sex='F')
else:
print('reused {}'.format(wife_id))
n_children = int((1+random()*(max_children-1)) *
(1 - db['n_individuals'] / db['max_individuals']))
marriage_year = start_year + 20 + int(random()*5)
children_ids = []
for i in range(n_children):
children_ids.append(generate_individual(
db, birth_year=marriage_year + 1 + int(random()*10), last_name=db[husband_id]['last_name']))
family_id = generate_family(
db, husband_id, wife_id, children_ids, marriage_year)
for i in range(n_children):
db[children_ids[i]]['string'] += "1 FAMC "+family_id + '\n'
db[children_ids[i]]['famc'] = family_id
if generations > 0:
generate_recursive_family(
db,
db[children_ids[i]]['birth'],
generations - 1,
children_ids[i] if db[children_ids[i]
]['sex'] == 'M' else None,
children_ids[i] if db[children_ids[i]
]['sex'] == 'F' else None,
children_ids)
db[husband_id]['string'] += "1 FAMS "+family_id + '\n'
db[wife_id]['string'] += "1 FAMS "+family_id + '\n'
generate_recursive_family(db, generations=8, max_children=4)
for k, v in db.items():
if k.startswith('@I'):
gedcom_content += v['string']
for k, v in db.items():
if k.startswith('@F'):
gedcom_content += v['string']
gedcom_content += '0 TRLR\n'
open(os.path.join(os.path.dirname(__file__), '..', 'tests',
'autogenerated.ged'), 'w').write(gedcom_content)
# generate_gedcom_file()
def generate_individual_images():
from PIL import Image, ImageDraw, ImageFont
def generate_one_image(filename, text, font_size=22, pos=(15, 40), size=(100, 100), color=(160, 160, 160)):
img = Image.new('RGB', size, color=color)
d = ImageDraw.Draw(img)
font = ImageFont.truetype(r'arial.ttf', font_size)
d.text(pos, text, fill=(0, 0, 0), font=font)
img.save(filename)
for i in range(20):
generate_one_image(
'tests/images/individual_I6_image_age_{}.png'.format(
1+i*4
), 'Age {}'.format(
1+i*4,
))
generate_individual_images()
| [((475, 496), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (494, 496), False, 'import datetime\n'), ((660, 723), 'names.get_first_name', 'names.get_first_name', ([], {'gender': "('male' if sex == 'M' else 'female')"}), "(gender='male' if sex == 'M' else 'female')\n", (680, 723), False, 'import names\n'), ((6157, 6192), 'PIL.Image.new', 'Image.new', (['"""RGB"""', 'size'], {'color': 'color'}), "('RGB', size, color=color)\n", (6166, 6192), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((6206, 6225), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['img'], {}), '(img)\n', (6220, 6225), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((6241, 6283), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (['"""arial.ttf"""', 'font_size'], {}), "('arial.ttf', font_size)\n", (6259, 6283), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((748, 756), 'random.random', 'random', ([], {}), '()\n', (754, 756), False, 'from random import random\n'), ((928, 949), 'names.get_last_name', 'names.get_last_name', ([], {}), '()\n', (947, 949), False, 'import names\n'), ((814, 877), 'names.get_first_name', 'names.get_first_name', ([], {'gender': "('male' if sex == 'M' else 'female')"}), "(gender='male' if sex == 'M' else 'female')\n", (834, 877), False, 'import names\n'), ((983, 991), 'random.random', 'random', ([], {}), '()\n', (989, 991), False, 'from random import random\n'), ((1043, 1051), 'random.random', 'random', ([], {}), '()\n', (1049, 1051), False, 'from random import random\n'), ((3293, 3301), 'random.random', 'random', ([], {}), '()\n', (3299, 3301), False, 'from random import random\n'), ((3803, 3811), 'random.random', 'random', ([], {}), '()\n', (3809, 3811), False, 'from random import random\n'), ((615, 623), 'random.random', 'random', ([], {}), '()\n', (621, 623), False, 'from random import random\n'), ((1209, 1217), 'random.random', 'random', ([], {}), '()\n', (1215, 1217), False, 'from random import random\n'), ((2089, 2097), 'random.random', 'random', ([], {}), '()\n', (2095, 2097), False, 'from random import random\n'), ((4388, 4396), 'random.random', 'random', ([], {}), '()\n', (4394, 4396), False, 'from random import random\n'), ((5808, 5833), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (5823, 5833), False, 'import os\n'), ((4237, 4245), 'random.random', 'random', ([], {}), '()\n', (4243, 4245), False, 'from random import random\n'), ((3079, 3087), 'random.random', 'random', ([], {}), '()\n', (3085, 3087), False, 'from random import random\n'), ((3670, 3678), 'random.random', 'random', ([], {}), '()\n', (3676, 3678), False, 'from random import random\n'), ((4118, 4126), 'random.random', 'random', ([], {}), '()\n', (4124, 4126), False, 'from random import random\n'), ((4570, 4578), 'random.random', 'random', ([], {}), '()\n', (4576, 4578), False, 'from random import random\n')] |
LiorAvrahami/arcade | arcade/examples/sprite_bullets_enemy_aims.py | fce254a9eb89629de1f99d57a63759a2953184e9 | """
Show how to have enemies shoot bullets aimed at the player.
If Python and Arcade are installed, this example can be run from the command line with:
python -m arcade.examples.sprite_bullets_enemy_aims
"""
import arcade
import math
import os
SCREEN_WIDTH = 800
SCREEN_HEIGHT = 600
SCREEN_TITLE = "Sprites and Bullets Enemy Aims Example"
BULLET_SPEED = 4
class MyGame(arcade.Window):
""" Main application class """
def __init__(self, width, height, title):
super().__init__(width, height, title)
# Set the working directory (where we expect to find files) to the same
# directory this .py file is in. You can leave this out of your own
# code, but it is needed to easily run the examples using "python -m"
# as mentioned at the top of this program.
file_path = os.path.dirname(os.path.abspath(__file__))
os.chdir(file_path)
arcade.set_background_color(arcade.color.BLACK)
self.frame_count = 0
self.enemy_list = None
self.bullet_list = None
self.player_list = None
self.player = None
def setup(self):
self.enemy_list = arcade.SpriteList()
self.bullet_list = arcade.SpriteList()
self.player_list = arcade.SpriteList()
# Add player ship
self.player = arcade.Sprite(":resources:images/space_shooter/playerShip1_orange.png", 0.5)
self.player_list.append(self.player)
# Add top-left enemy ship
enemy = arcade.Sprite(":resources:images/space_shooter/playerShip1_green.png", 0.5)
enemy.center_x = 120
enemy.center_y = SCREEN_HEIGHT - enemy.height
enemy.angle = 180
self.enemy_list.append(enemy)
# Add top-right enemy ship
enemy = arcade.Sprite(":resources:images/space_shooter/playerShip1_green.png", 0.5)
enemy.center_x = SCREEN_WIDTH - 120
enemy.center_y = SCREEN_HEIGHT - enemy.height
enemy.angle = 180
self.enemy_list.append(enemy)
def on_draw(self):
"""Render the screen. """
arcade.start_render()
self.enemy_list.draw()
self.bullet_list.draw()
self.player_list.draw()
def on_update(self, delta_time):
"""All the logic to move, and the game logic goes here. """
self.frame_count += 1
# Loop through each enemy that we have
for enemy in self.enemy_list:
# First, calculate the angle to the player. We could do this
# only when the bullet fires, but in this case we will rotate
# the enemy to face the player each frame, so we'll do this
# each frame.
# Position the start at the enemy's current location
start_x = enemy.center_x
start_y = enemy.center_y
# Get the destination location for the bullet
dest_x = self.player.center_x
dest_y = self.player.center_y
# Do math to calculate how to get the bullet to the destination.
# Calculation the angle in radians between the start points
# and end points. This is the angle the bullet will travel.
x_diff = dest_x - start_x
y_diff = dest_y - start_y
angle = math.atan2(y_diff, x_diff)
# Set the enemy to face the player.
enemy.angle = math.degrees(angle)-90
# Shoot every 60 frames change of shooting each frame
if self.frame_count % 60 == 0:
bullet = arcade.Sprite(":resources:images/space_shooter/laserBlue01.png")
bullet.center_x = start_x
bullet.center_y = start_y
# Angle the bullet sprite
bullet.angle = math.degrees(angle)
# Taking into account the angle, calculate our change_x
# and change_y. Velocity is how fast the bullet travels.
bullet.change_x = math.cos(angle) * BULLET_SPEED
bullet.change_y = math.sin(angle) * BULLET_SPEED
self.bullet_list.append(bullet)
# Get rid of the bullet when it flies off-screen
for bullet in self.bullet_list:
if bullet.top < 0:
bullet.remove_from_sprite_lists()
self.bullet_list.update()
def on_mouse_motion(self, x, y, delta_x, delta_y):
"""Called whenever the mouse moves. """
self.player.center_x = x
self.player.center_y = y
def main():
""" Main method """
window = MyGame(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
window.setup()
arcade.run()
if __name__ == "__main__":
main()
| [((4594, 4606), 'arcade.run', 'arcade.run', ([], {}), '()\n', (4604, 4606), False, 'import arcade\n'), ((876, 895), 'os.chdir', 'os.chdir', (['file_path'], {}), '(file_path)\n', (884, 895), False, 'import os\n'), ((905, 952), 'arcade.set_background_color', 'arcade.set_background_color', (['arcade.color.BLACK'], {}), '(arcade.color.BLACK)\n', (932, 952), False, 'import arcade\n'), ((1154, 1173), 'arcade.SpriteList', 'arcade.SpriteList', ([], {}), '()\n', (1171, 1173), False, 'import arcade\n'), ((1201, 1220), 'arcade.SpriteList', 'arcade.SpriteList', ([], {}), '()\n', (1218, 1220), False, 'import arcade\n'), ((1248, 1267), 'arcade.SpriteList', 'arcade.SpriteList', ([], {}), '()\n', (1265, 1267), False, 'import arcade\n'), ((1317, 1393), 'arcade.Sprite', 'arcade.Sprite', (['""":resources:images/space_shooter/playerShip1_orange.png"""', '(0.5)'], {}), "(':resources:images/space_shooter/playerShip1_orange.png', 0.5)\n", (1330, 1393), False, 'import arcade\n'), ((1490, 1565), 'arcade.Sprite', 'arcade.Sprite', (['""":resources:images/space_shooter/playerShip1_green.png"""', '(0.5)'], {}), "(':resources:images/space_shooter/playerShip1_green.png', 0.5)\n", (1503, 1565), False, 'import arcade\n'), ((1765, 1840), 'arcade.Sprite', 'arcade.Sprite', (['""":resources:images/space_shooter/playerShip1_green.png"""', '(0.5)'], {}), "(':resources:images/space_shooter/playerShip1_green.png', 0.5)\n", (1778, 1840), False, 'import arcade\n'), ((2070, 2091), 'arcade.start_render', 'arcade.start_render', ([], {}), '()\n', (2089, 2091), False, 'import arcade\n'), ((841, 866), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (856, 866), False, 'import os\n'), ((3258, 3284), 'math.atan2', 'math.atan2', (['y_diff', 'x_diff'], {}), '(y_diff, x_diff)\n', (3268, 3284), False, 'import math\n'), ((3360, 3379), 'math.degrees', 'math.degrees', (['angle'], {}), '(angle)\n', (3372, 3379), False, 'import math\n'), ((3518, 3582), 'arcade.Sprite', 'arcade.Sprite', (['""":resources:images/space_shooter/laserBlue01.png"""'], {}), "(':resources:images/space_shooter/laserBlue01.png')\n", (3531, 3582), False, 'import arcade\n'), ((3741, 3760), 'math.degrees', 'math.degrees', (['angle'], {}), '(angle)\n', (3753, 3760), False, 'import math\n'), ((3941, 3956), 'math.cos', 'math.cos', (['angle'], {}), '(angle)\n', (3949, 3956), False, 'import math\n'), ((4006, 4021), 'math.sin', 'math.sin', (['angle'], {}), '(angle)\n', (4014, 4021), False, 'import math\n')] |
FreakX23/EBook_Training | app1.py | de445b0a9e56a1f1ffc51ae3c5e10ebe8297e9b6 | # This Part will gather Infos and demonstrate the use of Variables.
usrName = input("What is your Name?")
usrAge = int(input("What is your Age?"))
usrGPA = float(input("What is your GPA?"))
print () #cheap way to get a new line
print ("Hello, %s" % (usrName))
print ("Did you know that in two years you will be %d years old? " % (usrAge +2))
print ("Also you need to improve your GPA by %f points to have a perfect score." % (4.0 - usrGPA))
print ()
| [] |
AmitHasanShuvo/Programming | borze.py | f47ecc626e518a0bf5f9f749afd15ce67bbe737b | a = input()
a = a.replace('--', '2')
a = a.replace('-.', '1')
a = a.replace('.', '0')
print(a)
| [] |
charlesemurray/DistributedProgramming | distalg/message.py | f7b5001a6acb0583cd6b7bb611f27893b830c296 | class Message:
def __init__(self, from_channel=None, **kwargs):
self._channel = from_channel
if kwargs is not None:
for key, value in kwargs.items():
setattr(self, key, value)
@property
def carrier(self):
return self._channel
def sender(self):
return self._channel.sender
def receiver(self):
return self._channel.receiver
class CallbackMessage(Message):
def __init__(self, function):
super(CallbackMessage, self).__init__(function=function)
if __name__ == "__main__":
msg = Message(sender="A", receiver="B")
assert msg.sender is "A"
assert msg.receiver is "B"
| [] |
rupeshparab/techscan | myenv/lib/python3.5/site-packages/tests/handlers/logging/logging_tests.py | ce2558602ddad31873d7129f25b1cc61895b9939 | import logging
from opbeat.handlers.logging import OpbeatHandler
from opbeat.utils.stacks import iter_stack_frames
from tests.helpers import get_tempstoreclient
from tests.utils.compat import TestCase
class LoggingIntegrationTest(TestCase):
def setUp(self):
self.client = get_tempstoreclient(include_paths=['tests', 'opbeat'])
self.handler = OpbeatHandler(self.client)
self.logger = logging.getLogger(__name__)
self.logger.handlers = []
self.logger.addHandler(self.handler)
def test_logger_basic(self):
self.logger.error('This is a test error')
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['logger'], __name__)
self.assertEquals(event['level'], "error")
self.assertEquals(event['message'], 'This is a test error')
self.assertFalse('stacktrace' in event)
self.assertFalse('exception' in event)
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test error')
self.assertEquals(msg['params'], ())
def test_logger_warning(self):
self.logger.warning('This is a test warning')
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['logger'], __name__)
self.assertEquals(event['level'], "warning")
self.assertFalse('stacktrace' in event)
self.assertFalse('exception' in event)
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test warning')
self.assertEquals(msg['params'], ())
def test_logger_extra_data(self):
self.logger.info('This is a test info with a url', extra=dict(
data=dict(
url='http://example.com',
),
))
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['extra']['url'], 'http://example.com')
self.assertFalse('stacktrace' in event)
self.assertFalse('exception' in event)
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test info with a url')
self.assertEquals(msg['params'], ())
def test_logger_exc_info(self):
try:
raise ValueError('This is a test ValueError')
except ValueError:
self.logger.info('This is a test info with an exception', exc_info=True)
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
# self.assertEquals(event['message'], 'This is a test info with an exception')
self.assertTrue('stacktrace' in event)
self.assertTrue('exception' in event)
exc = event['exception']
self.assertEquals(exc['type'], 'ValueError')
self.assertEquals(exc['value'], 'This is a test ValueError')
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test info with an exception')
self.assertEquals(msg['params'], ())
def test_message_params(self):
self.logger.info('This is a test of %s', 'args')
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
# self.assertEquals(event['message'], 'This is a test of args')
# print event.keys()
self.assertFalse('stacktrace' in event)
self.assertFalse('exception' in event)
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test of %s')
self.assertEquals(msg['params'], ('args',))
def test_record_stack(self):
self.logger.info('This is a test of stacks', extra={'stack': True})
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertTrue('stacktrace' in event)
frames = event['stacktrace']['frames']
self.assertNotEquals(len(frames), 1)
frame = frames[0]
self.assertEquals(frame['module'], __name__)
self.assertFalse('exception' in event)
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test of stacks')
self.assertEquals(msg['params'], ())
self.assertEquals(event['culprit'], 'tests.handlers.logging.logging_tests.test_record_stack')
self.assertEquals(event['message'], 'This is a test of stacks')
def test_no_record_stack(self):
self.logger.info('This is a test of no stacks', extra={'stack': False})
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event.get('culprit'), None)
self.assertEquals(event['message'], 'This is a test of no stacks')
self.assertFalse('stacktrace' in event)
self.assertFalse('exception' in event)
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test of no stacks')
self.assertEquals(msg['params'], ())
def test_explicit_stack(self):
self.logger.info('This is a test of stacks', extra={'stack': iter_stack_frames()})
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertTrue('culprit' in event, event)
self.assertEquals(event['culprit'], 'tests.handlers.logging.logging_tests.test_explicit_stack')
self.assertTrue('message' in event, event)
self.assertEquals(event['message'], 'This is a test of stacks')
self.assertFalse('exception' in event)
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test of stacks')
self.assertEquals(msg['params'], ())
self.assertTrue('stacktrace' in event)
def test_extra_culprit(self):
self.logger.info('This is a test of stacks', extra={'culprit': 'foo.bar'})
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['culprit'], 'foo.bar')
def test_logger_exception(self):
try:
raise ValueError('This is a test ValueError')
except ValueError:
self.logger.exception('This is a test with an exception')
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['message'], 'This is a test with an exception')
self.assertTrue('stacktrace' in event)
self.assertTrue('exception' in event)
exc = event['exception']
self.assertEquals(exc['type'], 'ValueError')
self.assertEquals(exc['value'], 'This is a test ValueError')
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test with an exception')
self.assertEquals(msg['params'], ())
class LoggingHandlerTest(TestCase):
def test_client_arg(self):
client = get_tempstoreclient(include_paths=['tests'])
handler = OpbeatHandler(client)
self.assertEquals(handler.client, client)
def test_client_kwarg(self):
client = get_tempstoreclient(include_paths=['tests'])
handler = OpbeatHandler(client=client)
self.assertEquals(handler.client, client)
def test_invalid_first_arg_type(self):
self.assertRaises(ValueError, OpbeatHandler, object)
| [((287, 341), 'tests.helpers.get_tempstoreclient', 'get_tempstoreclient', ([], {'include_paths': "['tests', 'opbeat']"}), "(include_paths=['tests', 'opbeat'])\n", (306, 341), False, 'from tests.helpers import get_tempstoreclient\n'), ((365, 391), 'opbeat.handlers.logging.OpbeatHandler', 'OpbeatHandler', (['self.client'], {}), '(self.client)\n', (378, 391), False, 'from opbeat.handlers.logging import OpbeatHandler\n'), ((414, 441), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (431, 441), False, 'import logging\n'), ((7366, 7410), 'tests.helpers.get_tempstoreclient', 'get_tempstoreclient', ([], {'include_paths': "['tests']"}), "(include_paths=['tests'])\n", (7385, 7410), False, 'from tests.helpers import get_tempstoreclient\n'), ((7429, 7450), 'opbeat.handlers.logging.OpbeatHandler', 'OpbeatHandler', (['client'], {}), '(client)\n', (7442, 7450), False, 'from opbeat.handlers.logging import OpbeatHandler\n'), ((7552, 7596), 'tests.helpers.get_tempstoreclient', 'get_tempstoreclient', ([], {'include_paths': "['tests']"}), "(include_paths=['tests'])\n", (7571, 7596), False, 'from tests.helpers import get_tempstoreclient\n'), ((7615, 7643), 'opbeat.handlers.logging.OpbeatHandler', 'OpbeatHandler', ([], {'client': 'client'}), '(client=client)\n', (7628, 7643), False, 'from opbeat.handlers.logging import OpbeatHandler\n'), ((5477, 5496), 'opbeat.utils.stacks.iter_stack_frames', 'iter_stack_frames', ([], {}), '()\n', (5494, 5496), False, 'from opbeat.utils.stacks import iter_stack_frames\n')] |
silenius/amnesia | amnesia/modules/mime/model.py | ba5e3ac79a89da599c22206ad1fd17541855f74c | # -*- coding: utf-8 -*-
# pylint: disable=E1101
from sqlalchemy import sql
from sqlalchemy import orm
from sqlalchemy.orm.exc import NoResultFound
from .. import Base
# http://www.iana.org/assignments/media-types/media-types.xhtml
class MimeMajor(Base):
"""Mime major"""
def __init__(self, name):
super().__init__()
self.name = name
class Mime(Base):
def __init__(self, name, template, major):
super().__init__()
self.name = name
self.template = template
self.major = major
@property
def full(self):
return '{0}/{1}'.format(self.major.name, self.name)
@staticmethod
def q_major_minor(dbsession, major, minor):
cond = sql.and_(
MimeMajor.name == major,
Mime.name == minor
)
result = dbsession.execute(
sql.select(Mime).join(Mime.major).options(
orm.contains_eager(Mime.major)
).filter(cond)
).scalar_one_or_none()
return result
###########
# Filters #
###########
@classmethod
def filter_mime(cls, value):
(major, minor) = value.split('/')
cond = sql.and_()
cond.append(MimeMajor.name == major)
if minor and minor != '*':
cond.append(Mime.name == minor)
return cond
| [((722, 775), 'sqlalchemy.sql.and_', 'sql.and_', (['(MimeMajor.name == major)', '(Mime.name == minor)'], {}), '(MimeMajor.name == major, Mime.name == minor)\n', (730, 775), False, 'from sqlalchemy import sql\n'), ((1187, 1197), 'sqlalchemy.sql.and_', 'sql.and_', ([], {}), '()\n', (1195, 1197), False, 'from sqlalchemy import sql\n'), ((918, 948), 'sqlalchemy.orm.contains_eager', 'orm.contains_eager', (['Mime.major'], {}), '(Mime.major)\n', (936, 948), False, 'from sqlalchemy import orm\n'), ((859, 875), 'sqlalchemy.sql.select', 'sql.select', (['Mime'], {}), '(Mime)\n', (869, 875), False, 'from sqlalchemy import sql\n')] |
sunwei19910119/DjangoShop | apps/goods/views_base.py | 188102dc8ef9f4751f4eeeb7574e95c8cc270484 | # encoding: utf-8
from goods.models import Goods
from django.views.generic.base import View
class GoodsListView(View):
def get(self, request):
"""
通过django的view实现商品列表页
"""
json_list = []
goods = Goods.objects.all()[:10]
# for good in goods:
# json_dict = {}
# json_dict["name"] = good.name
# json_dict["category"] = good.category.name
# json_dict["market_price"] = good.market_price
# json_dict["add_time"] = good.add_time
# json_list.append(json_dict)
# from django.http import HttpResponse
# import json
# return HttpResponse(json.dumps(json_list),content_type="application/json")
from django.forms.models import model_to_dict
for good in goods:
json_dict = model_to_dict(good)
json_list.append(json_dict)
import json
from django.core import serializers
json_data = serializers.serialize('json', goods)
json_data = json.loads(json_data)
from django.http import HttpResponse, JsonResponse
# jsonResponse做的工作也就是加上了dumps和content_type
# return HttpResponse(json.dumps(json_data), content_type="application/json")
# 注释掉loads,下面语句正常
# return HttpResponse(json_data, content_type="application/json")
return JsonResponse(json_data, safe=False)
| [((986, 1022), 'django.core.serializers.serialize', 'serializers.serialize', (['"""json"""', 'goods'], {}), "('json', goods)\n", (1007, 1022), False, 'from django.core import serializers\n'), ((1043, 1064), 'json.loads', 'json.loads', (['json_data'], {}), '(json_data)\n', (1053, 1064), False, 'import json\n'), ((1376, 1411), 'django.http.JsonResponse', 'JsonResponse', (['json_data'], {'safe': '(False)'}), '(json_data, safe=False)\n', (1388, 1411), False, 'from django.http import HttpResponse, JsonResponse\n'), ((241, 260), 'goods.models.Goods.objects.all', 'Goods.objects.all', ([], {}), '()\n', (258, 260), False, 'from goods.models import Goods\n'), ((841, 860), 'django.forms.models.model_to_dict', 'model_to_dict', (['good'], {}), '(good)\n', (854, 860), False, 'from django.forms.models import model_to_dict\n')] |
iyersathya/airlift | launcher/src/main/scripts/bin/launcher.py | 27e981a50cee655ff4e1e13801ba5a55991f93ce | #!/usr/bin/env python
import errno
import os
import platform
import sys
import traceback
from fcntl import flock, LOCK_EX, LOCK_NB
from optparse import OptionParser
from os import O_RDWR, O_CREAT, O_WRONLY, O_APPEND
from os.path import basename, dirname, exists, realpath
from os.path import join as pathjoin
from signal import SIGTERM, SIGKILL
from stat import S_ISLNK
from time import sleep
COMMANDS = ['run', 'start', 'stop', 'restart', 'kill', 'status']
LSB_NOT_RUNNING = 3
LSB_STATUS_UNKNOWN = 4
def find_install_path(f):
"""Find canonical parent of bin/launcher.py"""
if basename(f) != 'launcher.py':
raise Exception("Expected file '%s' to be 'launcher.py' not '%s'" % (f, basename(f)))
p = realpath(dirname(f))
if basename(p) != 'bin':
raise Exception("Expected file '%s' directory to be 'bin' not '%s" % (f, basename(p)))
return dirname(p)
def makedirs(p):
"""Create directory and all intermediate ones"""
try:
os.makedirs(p)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def load_properties(f):
"""Load key/value pairs from a file"""
properties = {}
for line in load_lines(f):
k, v = line.split('=', 1)
properties[k.strip()] = v.strip()
return properties
def load_lines(f):
"""Load lines from a file, ignoring blank or comment lines"""
lines = []
for line in open(f, 'r').readlines():
line = line.strip()
if len(line) > 0 and not line.startswith('#'):
lines.append(line)
return lines
def try_lock(f):
"""Try to open an exclusive lock (inheritable) on a file"""
try:
flock(f, LOCK_EX | LOCK_NB)
return True
except (IOError, OSError): # IOError in Python 2, OSError in Python 3.
return False
def open_read_write(f, mode):
"""Open file in read/write mode (without truncating it)"""
return os.fdopen(os.open(f, O_RDWR | O_CREAT, mode), 'r+')
class Process:
def __init__(self, path):
makedirs(dirname(path))
self.path = path
self.pid_file = open_read_write(path, 0o600)
self.refresh()
def refresh(self):
self.locked = try_lock(self.pid_file)
def clear_pid(self):
assert self.locked, 'pid file not locked by us'
self.pid_file.seek(0)
self.pid_file.truncate()
def write_pid(self, pid):
self.clear_pid()
self.pid_file.write(str(pid) + '\n')
self.pid_file.flush()
def alive(self):
self.refresh()
if self.locked:
return False
pid = self.read_pid()
try:
os.kill(pid, 0)
return True
except OSError as e:
raise Exception('Signaling pid %s failed: %s' % (pid, e))
def read_pid(self):
assert not self.locked, 'pid file is locked by us'
self.pid_file.seek(0)
line = self.pid_file.readline().strip()
if len(line) == 0:
raise Exception("Pid file '%s' is empty" % self.path)
try:
pid = int(line)
except ValueError:
raise Exception("Pid file '%s' contains garbage: %s" % (self.path, line))
if pid <= 0:
raise Exception("Pid file '%s' contains an invalid pid: %s" % (self.path, pid))
return pid
def redirect_stdin_to_devnull():
"""Redirect stdin to /dev/null"""
fd = os.open(os.devnull, O_RDWR)
os.dup2(fd, sys.stdin.fileno())
os.close(fd)
def open_append(f):
"""Open a raw file descriptor in append mode"""
# noinspection PyTypeChecker
return os.open(f, O_WRONLY | O_APPEND | O_CREAT, 0o644)
def redirect_output(fd):
"""Redirect stdout and stderr to a file descriptor"""
os.dup2(fd, sys.stdout.fileno())
os.dup2(fd, sys.stderr.fileno())
def symlink_exists(p):
"""Check if symlink exists and raise if another type of file exists"""
try:
st = os.lstat(p)
if not S_ISLNK(st.st_mode):
raise Exception('Path exists and is not a symlink: %s' % p)
return True
except OSError as e:
if e.errno != errno.ENOENT:
raise
return False
def create_symlink(source, target):
"""Create a symlink, removing the target first if it is a symlink"""
if symlink_exists(target):
os.remove(target)
if exists(source):
os.symlink(source, target)
def create_app_symlinks(options):
"""
Symlink the 'etc' and 'plugin' directory into the data directory.
This is needed to support programs that reference 'etc/xyz' from within
their config files: log.levels-file=etc/log.properties
"""
if options.etc_dir != pathjoin(options.data_dir, 'etc'):
create_symlink(
options.etc_dir,
pathjoin(options.data_dir, 'etc'))
if options.install_path != options.data_dir:
create_symlink(
pathjoin(options.install_path, 'plugin'),
pathjoin(options.data_dir, 'plugin'))
def build_java_execution(options, daemon):
if not exists(options.config_path):
raise Exception('Config file is missing: %s' % options.config_path)
if not exists(options.jvm_config):
raise Exception('JVM config file is missing: %s' % options.jvm_config)
if not exists(options.launcher_config):
raise Exception('Launcher config file is missing: %s' % options.launcher_config)
if options.log_levels_set and not exists(options.log_levels):
raise Exception('Log levels file is missing: %s' % options.log_levels)
properties = options.properties.copy()
if exists(options.log_levels):
properties['log.levels-file'] = options.log_levels
if daemon:
properties['log.output-file'] = options.server_log
properties['log.enable-console'] = 'false'
jvm_properties = load_lines(options.jvm_config)
launcher_properties = load_properties(options.launcher_config)
try:
main_class = launcher_properties['main-class']
except KeyError:
raise Exception("Launcher config is missing 'main-class' property")
properties['config'] = options.config_path
system_properties = ['-D%s=%s' % i for i in properties.items()]
classpath = pathjoin(options.install_path, 'lib', '*')
command = ['java', '-cp', classpath]
command += jvm_properties + system_properties
command += [main_class]
command += options.arguments
if options.verbose:
print(command)
print("")
env = os.environ.copy()
# set process name: https://github.com/electrum/procname
process_name = launcher_properties.get('process-name', '')
if len(process_name) > 0:
system = platform.system() + '-' + platform.machine()
shim = pathjoin(options.install_path, 'bin', 'procname', system, 'libprocname.so')
if exists(shim):
env['LD_PRELOAD'] = (env.get('LD_PRELOAD', '') + ':' + shim).strip()
env['PROCNAME'] = process_name
return command, env
def run(process, options):
if process.alive():
print('Already running as %s' % process.read_pid())
return
create_app_symlinks(options)
args, env = build_java_execution(options, False)
makedirs(options.data_dir)
os.chdir(options.data_dir)
process.write_pid(os.getpid())
redirect_stdin_to_devnull()
os.execvpe(args[0], args, env)
def start(process, options):
if process.alive():
print('Already running as %s' % process.read_pid())
return
create_app_symlinks(options)
args, env = build_java_execution(options, True)
makedirs(dirname(options.launcher_log))
log = open_append(options.launcher_log)
makedirs(options.data_dir)
os.chdir(options.data_dir)
pid = os.fork()
if pid > 0:
process.write_pid(pid)
print('Started as %s' % pid)
return
if hasattr(os, "set_inheritable"):
# See https://docs.python.org/3/library/os.html#inheritance-of-file-descriptors
# Since Python 3.4
os.set_inheritable(process.pid_file.fileno(), True)
os.setsid()
redirect_stdin_to_devnull()
redirect_output(log)
os.close(log)
os.execvpe(args[0], args, env)
def terminate(process, signal, message):
if not process.alive():
print('Not running')
return
pid = process.read_pid()
while True:
try:
os.kill(pid, signal)
except OSError as e:
if e.errno != errno.ESRCH:
raise Exception('Signaling pid %s failed: %s' % (pid, e))
if not process.alive():
process.clear_pid()
break
sleep(0.1)
print('%s %s' % (message, pid))
def stop(process):
terminate(process, SIGTERM, 'Stopped')
def kill(process):
terminate(process, SIGKILL, 'Killed')
def status(process):
if not process.alive():
print('Not running')
sys.exit(LSB_NOT_RUNNING)
print('Running as %s' % process.read_pid())
def handle_command(command, options):
process = Process(options.pid_file)
if command == 'run':
run(process, options)
elif command == 'start':
start(process, options)
elif command == 'stop':
stop(process)
elif command == 'restart':
stop(process)
start(process, options)
elif command == 'kill':
kill(process)
elif command == 'status':
status(process)
else:
raise AssertionError('Unhandled command: ' + command)
def create_parser():
commands = 'Commands: ' + ', '.join(COMMANDS)
parser = OptionParser(prog='launcher', usage='usage: %prog [options] command', description=commands)
parser.add_option('-v', '--verbose', action='store_true', default=False, help='Run verbosely')
parser.add_option('--etc-dir', metavar='DIR', help='Defaults to INSTALL_PATH/etc')
parser.add_option('--launcher-config', metavar='FILE', help='Defaults to INSTALL_PATH/bin/launcher.properties')
parser.add_option('--node-config', metavar='FILE', help='Defaults to ETC_DIR/node.properties')
parser.add_option('--jvm-config', metavar='FILE', help='Defaults to ETC_DIR/jvm.config')
parser.add_option('--config', metavar='FILE', help='Defaults to ETC_DIR/config.properties')
parser.add_option('--log-levels-file', metavar='FILE', help='Defaults to ETC_DIR/log.properties')
parser.add_option('--data-dir', metavar='DIR', help='Defaults to INSTALL_PATH')
parser.add_option('--pid-file', metavar='FILE', help='Defaults to DATA_DIR/var/run/launcher.pid')
parser.add_option('--arg', action='append', metavar='ARG', dest='arguments', help='Add a program argument of the Java application')
parser.add_option('--launcher-log-file', metavar='FILE', help='Defaults to DATA_DIR/var/log/launcher.log (only in daemon mode)')
parser.add_option('--server-log-file', metavar='FILE', help='Defaults to DATA_DIR/var/log/server.log (only in daemon mode)')
parser.add_option('-D', action='append', metavar='NAME=VALUE', dest='properties', help='Set a Java system property')
return parser
def parse_properties(parser, args):
properties = {}
for arg in args:
if '=' not in arg:
parser.error('property is malformed: %s' % arg)
key, value = [i.strip() for i in arg.split('=', 1)]
if key == 'config':
parser.error('cannot specify config using -D option (use --config)')
if key == 'log.output-file':
parser.error('cannot specify server log using -D option (use --server-log-file)')
if key == 'log.levels-file':
parser.error('cannot specify log levels using -D option (use --log-levels-file)')
properties[key] = value
return properties
def print_options(options):
if options.verbose:
for i in sorted(vars(options)):
print("%-15s = %s" % (i, getattr(options, i)))
print("")
class Options:
pass
def main():
parser = create_parser()
(options, args) = parser.parse_args()
if len(args) != 1:
if len(args) == 0:
parser.error('command name not specified')
else:
parser.error('too many arguments')
command = args[0]
if command not in COMMANDS:
parser.error('unsupported command: %s' % command)
try:
install_path = find_install_path(sys.argv[0])
except Exception as e:
print('ERROR: %s' % e)
sys.exit(LSB_STATUS_UNKNOWN)
o = Options()
o.verbose = options.verbose
o.install_path = install_path
o.launcher_config = realpath(options.launcher_config or pathjoin(o.install_path, 'bin/launcher.properties'))
o.etc_dir = realpath(options.etc_dir or pathjoin(o.install_path, 'etc'))
o.node_config = realpath(options.node_config or pathjoin(o.etc_dir, 'node.properties'))
o.jvm_config = realpath(options.jvm_config or pathjoin(o.etc_dir, 'jvm.config'))
o.config_path = realpath(options.config or pathjoin(o.etc_dir, 'config.properties'))
o.log_levels = realpath(options.log_levels_file or pathjoin(o.etc_dir, 'log.properties'))
o.log_levels_set = bool(options.log_levels_file)
if options.node_config and not exists(o.node_config):
parser.error('Node config file is missing: %s' % o.node_config)
node_properties = {}
if exists(o.node_config):
node_properties = load_properties(o.node_config)
data_dir = node_properties.get('node.data-dir')
o.data_dir = realpath(options.data_dir or data_dir or o.install_path)
o.pid_file = realpath(options.pid_file or pathjoin(o.data_dir, 'var/run/launcher.pid'))
o.launcher_log = realpath(options.launcher_log_file or pathjoin(o.data_dir, 'var/log/launcher.log'))
o.server_log = realpath(options.server_log_file or pathjoin(o.data_dir, 'var/log/server.log'))
o.properties = parse_properties(parser, options.properties or {})
for k, v in node_properties.items():
if k not in o.properties:
o.properties[k] = v
o.arguments = options.arguments or []
if o.verbose:
print_options(o)
try:
handle_command(command, o)
except SystemExit:
raise
except Exception as e:
if o.verbose:
traceback.print_exc()
else:
print('ERROR: %s' % e)
sys.exit(LSB_STATUS_UNKNOWN)
if __name__ == '__main__':
main()
| [((879, 889), 'os.path.dirname', 'dirname', (['p'], {}), '(p)\n', (886, 889), False, 'from os.path import basename, dirname, exists, realpath\n'), ((3408, 3435), 'os.open', 'os.open', (['os.devnull', 'O_RDWR'], {}), '(os.devnull, O_RDWR)\n', (3415, 3435), False, 'import os\n'), ((3476, 3488), 'os.close', 'os.close', (['fd'], {}), '(fd)\n', (3484, 3488), False, 'import os\n'), ((3607, 3653), 'os.open', 'os.open', (['f', '(O_WRONLY | O_APPEND | O_CREAT)', '(420)'], {}), '(f, O_WRONLY | O_APPEND | O_CREAT, 420)\n', (3614, 3653), False, 'import os\n'), ((4348, 4362), 'os.path.exists', 'exists', (['source'], {}), '(source)\n', (4354, 4362), False, 'from os.path import basename, dirname, exists, realpath\n'), ((5605, 5631), 'os.path.exists', 'exists', (['options.log_levels'], {}), '(options.log_levels)\n', (5611, 5631), False, 'from os.path import basename, dirname, exists, realpath\n'), ((6233, 6275), 'os.path.join', 'pathjoin', (['options.install_path', '"""lib"""', '"""*"""'], {}), "(options.install_path, 'lib', '*')\n", (6241, 6275), True, 'from os.path import join as pathjoin\n'), ((6506, 6523), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (6521, 6523), False, 'import os\n'), ((7257, 7283), 'os.chdir', 'os.chdir', (['options.data_dir'], {}), '(options.data_dir)\n', (7265, 7283), False, 'import os\n'), ((7358, 7388), 'os.execvpe', 'os.execvpe', (['args[0]', 'args', 'env'], {}), '(args[0], args, env)\n', (7368, 7388), False, 'import os\n'), ((7730, 7756), 'os.chdir', 'os.chdir', (['options.data_dir'], {}), '(options.data_dir)\n', (7738, 7756), False, 'import os\n'), ((7768, 7777), 'os.fork', 'os.fork', ([], {}), '()\n', (7775, 7777), False, 'import os\n'), ((8097, 8108), 'os.setsid', 'os.setsid', ([], {}), '()\n', (8106, 8108), False, 'import os\n'), ((8171, 8184), 'os.close', 'os.close', (['log'], {}), '(log)\n', (8179, 8184), False, 'import os\n'), ((8190, 8220), 'os.execvpe', 'os.execvpe', (['args[0]', 'args', 'env'], {}), '(args[0], args, env)\n', (8200, 8220), False, 'import os\n'), ((9593, 9688), 'optparse.OptionParser', 'OptionParser', ([], {'prog': '"""launcher"""', 'usage': '"""usage: %prog [options] command"""', 'description': 'commands'}), "(prog='launcher', usage='usage: %prog [options] command',\n description=commands)\n", (9605, 9688), False, 'from optparse import OptionParser\n'), ((13325, 13346), 'os.path.exists', 'exists', (['o.node_config'], {}), '(o.node_config)\n', (13331, 13346), False, 'from os.path import basename, dirname, exists, realpath\n'), ((13475, 13531), 'os.path.realpath', 'realpath', (['(options.data_dir or data_dir or o.install_path)'], {}), '(options.data_dir or data_dir or o.install_path)\n', (13483, 13531), False, 'from os.path import basename, dirname, exists, realpath\n'), ((591, 602), 'os.path.basename', 'basename', (['f'], {}), '(f)\n', (599, 602), False, 'from os.path import basename, dirname, exists, realpath\n'), ((732, 742), 'os.path.dirname', 'dirname', (['f'], {}), '(f)\n', (739, 742), False, 'from os.path import basename, dirname, exists, realpath\n'), ((751, 762), 'os.path.basename', 'basename', (['p'], {}), '(p)\n', (759, 762), False, 'from os.path import basename, dirname, exists, realpath\n'), ((979, 993), 'os.makedirs', 'os.makedirs', (['p'], {}), '(p)\n', (990, 993), False, 'import os\n'), ((1666, 1693), 'fcntl.flock', 'flock', (['f', '(LOCK_EX | LOCK_NB)'], {}), '(f, LOCK_EX | LOCK_NB)\n', (1671, 1693), False, 'from fcntl import flock, LOCK_EX, LOCK_NB\n'), ((1927, 1961), 'os.open', 'os.open', (['f', '(O_RDWR | O_CREAT)', 'mode'], {}), '(f, O_RDWR | O_CREAT, mode)\n', (1934, 1961), False, 'import os\n'), ((3452, 3470), 'sys.stdin.fileno', 'sys.stdin.fileno', ([], {}), '()\n', (3468, 3470), False, 'import sys\n'), ((3757, 3776), 'sys.stdout.fileno', 'sys.stdout.fileno', ([], {}), '()\n', (3774, 3776), False, 'import sys\n'), ((3794, 3813), 'sys.stderr.fileno', 'sys.stderr.fileno', ([], {}), '()\n', (3811, 3813), False, 'import sys\n'), ((3937, 3948), 'os.lstat', 'os.lstat', (['p'], {}), '(p)\n', (3945, 3948), False, 'import os\n'), ((4323, 4340), 'os.remove', 'os.remove', (['target'], {}), '(target)\n', (4332, 4340), False, 'import os\n'), ((4372, 4398), 'os.symlink', 'os.symlink', (['source', 'target'], {}), '(source, target)\n', (4382, 4398), False, 'import os\n'), ((4683, 4716), 'os.path.join', 'pathjoin', (['options.data_dir', '"""etc"""'], {}), "(options.data_dir, 'etc')\n", (4691, 4716), True, 'from os.path import join as pathjoin\n'), ((5052, 5079), 'os.path.exists', 'exists', (['options.config_path'], {}), '(options.config_path)\n', (5058, 5079), False, 'from os.path import basename, dirname, exists, realpath\n'), ((5168, 5194), 'os.path.exists', 'exists', (['options.jvm_config'], {}), '(options.jvm_config)\n', (5174, 5194), False, 'from os.path import basename, dirname, exists, realpath\n'), ((5286, 5317), 'os.path.exists', 'exists', (['options.launcher_config'], {}), '(options.launcher_config)\n', (5292, 5317), False, 'from os.path import basename, dirname, exists, realpath\n'), ((6756, 6831), 'os.path.join', 'pathjoin', (['options.install_path', '"""bin"""', '"""procname"""', 'system', '"""libprocname.so"""'], {}), "(options.install_path, 'bin', 'procname', system, 'libprocname.so')\n", (6764, 6831), True, 'from os.path import join as pathjoin\n'), ((6843, 6855), 'os.path.exists', 'exists', (['shim'], {}), '(shim)\n', (6849, 6855), False, 'from os.path import basename, dirname, exists, realpath\n'), ((7307, 7318), 'os.getpid', 'os.getpid', ([], {}), '()\n', (7316, 7318), False, 'import os\n'), ((7619, 7648), 'os.path.dirname', 'dirname', (['options.launcher_log'], {}), '(options.launcher_log)\n', (7626, 7648), False, 'from os.path import basename, dirname, exists, realpath\n'), ((8663, 8673), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (8668, 8673), False, 'from time import sleep\n'), ((8926, 8951), 'sys.exit', 'sys.exit', (['LSB_NOT_RUNNING'], {}), '(LSB_NOT_RUNNING)\n', (8934, 8951), False, 'import sys\n'), ((2033, 2046), 'os.path.dirname', 'dirname', (['path'], {}), '(path)\n', (2040, 2046), False, 'from os.path import basename, dirname, exists, realpath\n'), ((2645, 2660), 'os.kill', 'os.kill', (['pid', '(0)'], {}), '(pid, 0)\n', (2652, 2660), False, 'import os\n'), ((3964, 3983), 'stat.S_ISLNK', 'S_ISLNK', (['st.st_mode'], {}), '(st.st_mode)\n', (3971, 3983), False, 'from stat import S_ISLNK\n'), ((4783, 4816), 'os.path.join', 'pathjoin', (['options.data_dir', '"""etc"""'], {}), "(options.data_dir, 'etc')\n", (4791, 4816), True, 'from os.path import join as pathjoin\n'), ((4904, 4944), 'os.path.join', 'pathjoin', (['options.install_path', '"""plugin"""'], {}), "(options.install_path, 'plugin')\n", (4912, 4944), True, 'from os.path import join as pathjoin\n'), ((4958, 4994), 'os.path.join', 'pathjoin', (['options.data_dir', '"""plugin"""'], {}), "(options.data_dir, 'plugin')\n", (4966, 4994), True, 'from os.path import join as pathjoin\n'), ((5446, 5472), 'os.path.exists', 'exists', (['options.log_levels'], {}), '(options.log_levels)\n', (5452, 5472), False, 'from os.path import basename, dirname, exists, realpath\n'), ((6722, 6740), 'platform.machine', 'platform.machine', ([], {}), '()\n', (6738, 6740), False, 'import platform\n'), ((8408, 8428), 'os.kill', 'os.kill', (['pid', 'signal'], {}), '(pid, signal)\n', (8415, 8428), False, 'import os\n'), ((12444, 12472), 'sys.exit', 'sys.exit', (['LSB_STATUS_UNKNOWN'], {}), '(LSB_STATUS_UNKNOWN)\n', (12452, 12472), False, 'import sys\n'), ((12618, 12669), 'os.path.join', 'pathjoin', (['o.install_path', '"""bin/launcher.properties"""'], {}), "(o.install_path, 'bin/launcher.properties')\n", (12626, 12669), True, 'from os.path import join as pathjoin\n'), ((12715, 12746), 'os.path.join', 'pathjoin', (['o.install_path', '"""etc"""'], {}), "(o.install_path, 'etc')\n", (12723, 12746), True, 'from os.path import join as pathjoin\n'), ((12800, 12838), 'os.path.join', 'pathjoin', (['o.etc_dir', '"""node.properties"""'], {}), "(o.etc_dir, 'node.properties')\n", (12808, 12838), True, 'from os.path import join as pathjoin\n'), ((12890, 12923), 'os.path.join', 'pathjoin', (['o.etc_dir', '"""jvm.config"""'], {}), "(o.etc_dir, 'jvm.config')\n", (12898, 12923), True, 'from os.path import join as pathjoin\n'), ((12972, 13012), 'os.path.join', 'pathjoin', (['o.etc_dir', '"""config.properties"""'], {}), "(o.etc_dir, 'config.properties')\n", (12980, 13012), True, 'from os.path import join as pathjoin\n'), ((13069, 13106), 'os.path.join', 'pathjoin', (['o.etc_dir', '"""log.properties"""'], {}), "(o.etc_dir, 'log.properties')\n", (13077, 13106), True, 'from os.path import join as pathjoin\n'), ((13197, 13218), 'os.path.exists', 'exists', (['o.node_config'], {}), '(o.node_config)\n', (13203, 13218), False, 'from os.path import basename, dirname, exists, realpath\n'), ((13579, 13623), 'os.path.join', 'pathjoin', (['o.data_dir', '"""var/run/launcher.pid"""'], {}), "(o.data_dir, 'var/run/launcher.pid')\n", (13587, 13623), True, 'from os.path import join as pathjoin\n'), ((13684, 13728), 'os.path.join', 'pathjoin', (['o.data_dir', '"""var/log/launcher.log"""'], {}), "(o.data_dir, 'var/log/launcher.log')\n", (13692, 13728), True, 'from os.path import join as pathjoin\n'), ((13785, 13827), 'os.path.join', 'pathjoin', (['o.data_dir', '"""var/log/server.log"""'], {}), "(o.data_dir, 'var/log/server.log')\n", (13793, 13827), True, 'from os.path import join as pathjoin\n'), ((14316, 14344), 'sys.exit', 'sys.exit', (['LSB_STATUS_UNKNOWN'], {}), '(LSB_STATUS_UNKNOWN)\n', (14324, 14344), False, 'import sys\n'), ((6696, 6713), 'platform.system', 'platform.system', ([], {}), '()\n', (6711, 6713), False, 'import platform\n'), ((14237, 14258), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (14256, 14258), False, 'import traceback\n'), ((701, 712), 'os.path.basename', 'basename', (['f'], {}), '(f)\n', (709, 712), False, 'from os.path import basename, dirname, exists, realpath\n'), ((854, 865), 'os.path.basename', 'basename', (['p'], {}), '(p)\n', (862, 865), False, 'from os.path import basename, dirname, exists, realpath\n')] |
vectorcrumb/Ballbot_IEE2913 | code/sim/test.py | 5ab54825b2bfadae251e2c6bfaaa7f8fcdae77a0 | from direct.showbase.ShowBase import ShowBase
from direct.task import Task
from direct.actor.Actor import Actor
import numpy as np
class MyApp(ShowBase):
def __init__(self):
ShowBase.__init__(self)
# Load environment model
self.scene = self.loader.loadModel("models/environment")
# Reparent model to render
self.scene.reparentTo(self.render)
# Scale and position model
self.scene.setScale(0.25, 0.25, 0.25)
self.scene.setPos(-8, 42, 0)
# Add spinCameraTask to task manager to execute
self.taskMgr.add(self.spinCameraTask, "SpinCameraTask")
# Load and transform panda actor
self.pandaActor = Actor("models/panda-model", {"walk": "models/panda-walk4"})
self.pandaActor.setScale(0.005, 0.005, 0.005)
self.pandaActor.reparentTo(self.render)
# Loop animation
self.pandaActor.loop("walk")
def spinCameraTask(self, task):
angleDegs = task.time * 6.0
angleRads = angleDegs * (np.pi / 180.0)
self.camera.setPos(20*np.sin(angleRads), -20.0 * np.cos(angleRads), 3)
self.camera.setHpr(angleDegs, 0, 0)
return Task.cont
app = MyApp()
app.run() | [((188, 211), 'direct.showbase.ShowBase.ShowBase.__init__', 'ShowBase.__init__', (['self'], {}), '(self)\n', (205, 211), False, 'from direct.showbase.ShowBase import ShowBase\n'), ((695, 754), 'direct.actor.Actor.Actor', 'Actor', (['"""models/panda-model"""', "{'walk': 'models/panda-walk4'}"], {}), "('models/panda-model', {'walk': 'models/panda-walk4'})\n", (700, 754), False, 'from direct.actor.Actor import Actor\n'), ((1071, 1088), 'numpy.sin', 'np.sin', (['angleRads'], {}), '(angleRads)\n', (1077, 1088), True, 'import numpy as np\n'), ((1098, 1115), 'numpy.cos', 'np.cos', (['angleRads'], {}), '(angleRads)\n', (1104, 1115), True, 'import numpy as np\n')] |
nationalarchives/tdr-service-unavailable | run_locally.py | fcb5930f57459b1e4e6d2d14244ebeecee2f6907 | from app import app
app.run()
| [((22, 31), 'app.app.run', 'app.run', ([], {}), '()\n', (29, 31), False, 'from app import app\n')] |
briangrahamww/pandas-profiling | src/pandas_profiling/model/describe.py | 62f8e3fd81720d444041069191c4aacd03d79ad5 | """Organize the calculation of statistics for each series in this DataFrame."""
import warnings
from datetime import datetime
from typing import Optional
import pandas as pd
from tqdm.auto import tqdm
from visions import VisionsTypeset
from pandas_profiling.config import Settings
from pandas_profiling.model.correlations import calculate_correlation
from pandas_profiling.model.duplicates import get_duplicates
from pandas_profiling.model.sample import Sample, get_sample
from pandas_profiling.model.summarizer import BaseSummarizer
from pandas_profiling.model.summary import (
get_messages,
get_missing_diagrams,
get_scatter_matrix,
get_series_descriptions,
get_table_stats,
)
from pandas_profiling.version import __version__
def describe(
config: Settings,
df: pd.DataFrame,
summarizer: BaseSummarizer,
typeset: VisionsTypeset,
sample: Optional[dict] = None,
) -> dict:
"""Calculate the statistics for each series in this DataFrame.
Args:
config: report Settings object
df: DataFrame.
sample: optional, dict with custom sample
Returns:
This function returns a dictionary containing:
- table: overall statistics.
- variables: descriptions per series.
- correlations: correlation matrices.
- missing: missing value diagrams.
- messages: direct special attention to these patterns in your data.
- package: package details.
"""
if df is None:
raise ValueError("Can not describe a `lazy` ProfileReport without a DataFrame.")
if not isinstance(df, pd.DataFrame):
warnings.warn("df is not of type pandas.DataFrame")
disable_progress_bar = not config.progress_bar
date_start = datetime.utcnow()
correlation_names = [
correlation_name
for correlation_name in [
"pearson",
"spearman",
"kendall",
"phi_k",
"cramers",
]
if config.correlations[correlation_name].calculate
]
number_of_tasks = 8 + len(df.columns) + len(correlation_names)
with tqdm(
total=number_of_tasks, desc="Summarize dataset", disable=disable_progress_bar
) as pbar:
series_description = get_series_descriptions(
config, df, summarizer, typeset, pbar
)
pbar.set_postfix_str("Get variable types")
variables = {
column: description["type"]
for column, description in series_description.items()
}
supported_columns = [
column
for column, type_name in variables.items()
if type_name != "Unsupported"
]
interval_columns = [
column for column, type_name in variables.items() if type_name == "Numeric"
]
pbar.update()
# Get correlations
correlations = {}
for correlation_name in correlation_names:
pbar.set_postfix_str(f"Calculate {correlation_name} correlation")
correlations[correlation_name] = calculate_correlation(
config, df, correlation_name, series_description
)
pbar.update()
# make sure correlations is not None
correlations = {
key: value for key, value in correlations.items() if value is not None
}
# Scatter matrix
pbar.set_postfix_str("Get scatter matrix")
scatter_matrix = get_scatter_matrix(config, df, interval_columns)
pbar.update()
# Table statistics
pbar.set_postfix_str("Get table statistics")
table_stats = get_table_stats(config, df, series_description)
pbar.update()
# missing diagrams
pbar.set_postfix_str("Get missing diagrams")
missing = get_missing_diagrams(config, df, table_stats)
pbar.update()
# Sample
pbar.set_postfix_str("Take sample")
if sample is None:
samples = get_sample(config, df)
else:
if "name" not in sample:
sample["name"] = None
if "caption" not in sample:
sample["caption"] = None
samples = [
Sample(
id="custom",
data=sample["data"],
name=sample["name"],
caption=sample["caption"],
)
]
pbar.update()
# Duplicates
pbar.set_postfix_str("Locating duplicates")
metrics, duplicates = get_duplicates(config, df, supported_columns)
table_stats.update(metrics)
pbar.update()
# Messages
pbar.set_postfix_str("Get messages/warnings")
messages = get_messages(config, table_stats, series_description, correlations)
pbar.update()
pbar.set_postfix_str("Get reproduction details")
package = {
"pandas_profiling_version": __version__,
"pandas_profiling_config": config.json(),
}
pbar.update()
pbar.set_postfix_str("Completed")
date_end = datetime.utcnow()
analysis = {
"title": config.title,
"date_start": date_start,
"date_end": date_end,
"duration": date_end - date_start,
}
return {
# Analysis metadata
"analysis": analysis,
# Overall dataset description
"table": table_stats,
# Per variable descriptions
"variables": series_description,
# Bivariate relations
"scatter": scatter_matrix,
# Correlation matrices
"correlations": correlations,
# Missing values
"missing": missing,
# Warnings
"messages": messages,
# Package
"package": package,
# Sample
"sample": samples,
# Duplicates
"duplicates": duplicates,
}
| [((1774, 1791), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1789, 1791), False, 'from datetime import datetime\n'), ((5130, 5147), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (5145, 5147), False, 'from datetime import datetime\n'), ((1652, 1703), 'warnings.warn', 'warnings.warn', (['"""df is not of type pandas.DataFrame"""'], {}), "('df is not of type pandas.DataFrame')\n", (1665, 1703), False, 'import warnings\n'), ((2145, 2233), 'tqdm.auto.tqdm', 'tqdm', ([], {'total': 'number_of_tasks', 'desc': '"""Summarize dataset"""', 'disable': 'disable_progress_bar'}), "(total=number_of_tasks, desc='Summarize dataset', disable=\n disable_progress_bar)\n", (2149, 2233), False, 'from tqdm.auto import tqdm\n'), ((2281, 2343), 'pandas_profiling.model.summary.get_series_descriptions', 'get_series_descriptions', (['config', 'df', 'summarizer', 'typeset', 'pbar'], {}), '(config, df, summarizer, typeset, pbar)\n', (2304, 2343), False, 'from pandas_profiling.model.summary import get_messages, get_missing_diagrams, get_scatter_matrix, get_series_descriptions, get_table_stats\n'), ((3483, 3531), 'pandas_profiling.model.summary.get_scatter_matrix', 'get_scatter_matrix', (['config', 'df', 'interval_columns'], {}), '(config, df, interval_columns)\n', (3501, 3531), False, 'from pandas_profiling.model.summary import get_messages, get_missing_diagrams, get_scatter_matrix, get_series_descriptions, get_table_stats\n'), ((3657, 3704), 'pandas_profiling.model.summary.get_table_stats', 'get_table_stats', (['config', 'df', 'series_description'], {}), '(config, df, series_description)\n', (3672, 3704), False, 'from pandas_profiling.model.summary import get_messages, get_missing_diagrams, get_scatter_matrix, get_series_descriptions, get_table_stats\n'), ((3826, 3871), 'pandas_profiling.model.summary.get_missing_diagrams', 'get_missing_diagrams', (['config', 'df', 'table_stats'], {}), '(config, df, table_stats)\n', (3846, 3871), False, 'from pandas_profiling.model.summary import get_messages, get_missing_diagrams, get_scatter_matrix, get_series_descriptions, get_table_stats\n'), ((4567, 4612), 'pandas_profiling.model.duplicates.get_duplicates', 'get_duplicates', (['config', 'df', 'supported_columns'], {}), '(config, df, supported_columns)\n', (4581, 4612), False, 'from pandas_profiling.model.duplicates import get_duplicates\n'), ((4764, 4831), 'pandas_profiling.model.summary.get_messages', 'get_messages', (['config', 'table_stats', 'series_description', 'correlations'], {}), '(config, table_stats, series_description, correlations)\n', (4776, 4831), False, 'from pandas_profiling.model.summary import get_messages, get_missing_diagrams, get_scatter_matrix, get_series_descriptions, get_table_stats\n'), ((3089, 3160), 'pandas_profiling.model.correlations.calculate_correlation', 'calculate_correlation', (['config', 'df', 'correlation_name', 'series_description'], {}), '(config, df, correlation_name, series_description)\n', (3110, 3160), False, 'from pandas_profiling.model.correlations import calculate_correlation\n'), ((4005, 4027), 'pandas_profiling.model.sample.get_sample', 'get_sample', (['config', 'df'], {}), '(config, df)\n', (4015, 4027), False, 'from pandas_profiling.model.sample import Sample, get_sample\n'), ((4239, 4332), 'pandas_profiling.model.sample.Sample', 'Sample', ([], {'id': '"""custom"""', 'data': "sample['data']", 'name': "sample['name']", 'caption': "sample['caption']"}), "(id='custom', data=sample['data'], name=sample['name'], caption=\n sample['caption'])\n", (4245, 4332), False, 'from pandas_profiling.model.sample import Sample, get_sample\n')] |
Ruanxingzhi/King-of-Pigeon | maxOfferNum.py | 38d6191c93c2d485b2e5cf163f06b9f2a5dacbec | import operator
class Std(object):
def __init__(self):
self.name = ''
self.offerNum = 0
self.offers = []
stds = []
stdsDict = {}
index = 0
def readStd(name,camper):
global stds
global stdsDict
global index
if name not in stdsDict:
newStd = Std()
newStd.name = name
stds.append(newStd)
stdsDict[name] = index
index += 1
if camper not in stds[stdsDict[name]].offers:
stds[stdsDict[name]].offers.append(camper)
stds[stdsDict[name]].offerNum += 1
if __name__ == "__main__":
campers = ['PKUxk','THUsz_ai','THUsz_cs','THUsz_data','USTC_cs']
for camper in campers:
filename = camper + '.txt'
with open('data/%s'%(filename), "r") as f:
data = f.readlines()
for std in data:
readStd(std,camper)
cmpfun = operator.attrgetter('offerNum','name')
stds.sort(key = cmpfun,reverse = True)
for std in stds:
if std.name[-1] == '\n':
std.name = std.name[:-1]
print(f'{std.name} 拿了 {std.offerNum} 个 offer: {std.offers}') | [((872, 911), 'operator.attrgetter', 'operator.attrgetter', (['"""offerNum"""', '"""name"""'], {}), "('offerNum', 'name')\n", (891, 911), False, 'import operator\n')] |
MrMonk3y/vimrc | tabnine-vim/third_party/ycmd/third_party/python-future/setup.py | 950230fb3fd7991d1234c2ab516ec03245945677 | #!/usr/bin/env python
from __future__ import absolute_import, print_function
import os
import os.path
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
NAME = "future"
PACKAGES = ["future",
"future.builtins",
"future.types",
"future.standard_library",
"future.backports",
"future.backports.email",
"future.backports.email.mime",
"future.backports.html",
"future.backports.http",
"future.backports.test",
"future.backports.urllib",
"future.backports.xmlrpc",
"future.moves",
"future.moves.dbm",
"future.moves.html",
"future.moves.http",
"future.moves.test",
"future.moves.tkinter",
"future.moves.urllib",
"future.moves.xmlrpc",
"future.tests", # for future.tests.base
# "future.tests.test_email",
"future.utils",
"past",
"past.builtins",
"past.types",
"past.utils",
# "past.tests",
"past.translation",
"libfuturize",
"libfuturize.fixes",
"libpasteurize",
"libpasteurize.fixes",
]
# PEP 3108 stdlib moves:
if sys.version_info[:2] < (3, 0):
PACKAGES += [
"builtins",
"configparser",
"copyreg",
"html",
"http",
"queue",
"reprlib",
"socketserver",
"tkinter",
"winreg",
"xmlrpc",
"_dummy_thread",
"_markupbase",
"_thread",
]
PACKAGE_DATA = {'': [
'README.rst',
'LICENSE.txt',
'futurize.py',
'pasteurize.py',
'discover_tests.py',
'check_rst.sh',
'TESTING.txt',
],
'tests': ['*.py'],
}
REQUIRES = []
TEST_REQUIRES = []
if sys.version_info[:2] == (2, 6):
REQUIRES += ['importlib', 'argparse']
TEST_REQUIRES += ['unittest2']
import src.future
VERSION = src.future.__version__
DESCRIPTION = "Clean single-source support for Python 3 and 2"
LONG_DESC = src.future.__doc__
AUTHOR = "Ed Schofield"
AUTHOR_EMAIL = "[email protected]"
URL="https://python-future.org"
LICENSE = "MIT"
KEYWORDS = "future past python3 migration futurize backport six 2to3 modernize pasteurize 3to2"
CLASSIFIERS = [
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"License :: OSI Approved",
"License :: OSI Approved :: MIT License",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
]
setup_kwds = {}
# * Important *
# We forcibly remove the build folder to avoid breaking the
# user's Py3 installation if they run "python2 setup.py
# build" and then "python3 setup.py install".
try:
# If the user happens to run:
# python2 setup.py build
# python3 setup.py install
# then folders like "configparser" will be in build/lib.
# If so, we CANNOT let the user install this, because
# this may break his/her Python 3 install, depending on the folder order in
# sys.path. (Running "import configparser" etc. may pick up our Py2
# substitute packages, instead of the intended system stdlib modules.)
SYSTEM_MODULES = set([
'_dummy_thread',
'_markupbase',
'_thread',
'builtins',
'configparser',
'copyreg',
'html',
'http',
'queue',
'reprlib',
'socketserver',
'tkinter',
'winreg',
'xmlrpc'
])
if sys.version_info[0] >= 3:
# Do any of the above folders exist in build/lib?
files = os.listdir(os.path.join('build', 'lib'))
if len(set(files) & set(SYSTEM_MODULES)) > 0:
print('ERROR: Your build folder is in an inconsistent state for '
'a Python 3.x install. Please remove it manually and run '
'setup.py again.', file=sys.stderr)
sys.exit(1)
except OSError:
pass
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
entry_points={
'console_scripts': [
'futurize = libfuturize.main:main',
'pasteurize = libpasteurize.main:main'
]
},
package_dir={'': 'src'},
packages=PACKAGES,
package_data=PACKAGE_DATA,
include_package_data=True,
install_requires=REQUIRES,
classifiers=CLASSIFIERS,
test_suite = "discover_tests",
tests_require=TEST_REQUIRES,
**setup_kwds
)
| [((4843, 5381), 'distutils.core.setup', 'setup', ([], {'name': 'NAME', 'version': 'VERSION', 'author': 'AUTHOR', 'author_email': 'AUTHOR_EMAIL', 'url': 'URL', 'description': 'DESCRIPTION', 'long_description': 'LONG_DESC', 'license': 'LICENSE', 'keywords': 'KEYWORDS', 'entry_points': "{'console_scripts': ['futurize = libfuturize.main:main',\n 'pasteurize = libpasteurize.main:main']}", 'package_dir': "{'': 'src'}", 'packages': 'PACKAGES', 'package_data': 'PACKAGE_DATA', 'include_package_data': '(True)', 'install_requires': 'REQUIRES', 'classifiers': 'CLASSIFIERS', 'test_suite': '"""discover_tests"""', 'tests_require': 'TEST_REQUIRES'}), "(name=NAME, version=VERSION, author=AUTHOR, author_email=AUTHOR_EMAIL,\n url=URL, description=DESCRIPTION, long_description=LONG_DESC, license=\n LICENSE, keywords=KEYWORDS, entry_points={'console_scripts': [\n 'futurize = libfuturize.main:main',\n 'pasteurize = libpasteurize.main:main']}, package_dir={'': 'src'},\n packages=PACKAGES, package_data=PACKAGE_DATA, include_package_data=True,\n install_requires=REQUIRES, classifiers=CLASSIFIERS, test_suite=\n 'discover_tests', tests_require=TEST_REQUIRES, **setup_kwds)\n", (4848, 5381), False, 'from distutils.core import setup\n'), ((248, 289), 'os.system', 'os.system', (['"""python setup.py sdist upload"""'], {}), "('python setup.py sdist upload')\n", (257, 289), False, 'import os\n'), ((294, 304), 'sys.exit', 'sys.exit', ([], {}), '()\n', (302, 304), False, 'import sys\n'), ((4500, 4528), 'os.path.join', 'os.path.join', (['"""build"""', '"""lib"""'], {}), "('build', 'lib')\n", (4512, 4528), False, 'import os\n'), ((4805, 4816), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4813, 4816), False, 'import sys\n')] |
Andrelpoj/hire.me | url_shortener/src/__init__.py | 79428e2094a6b56e762a7f958e1b75f395f59cef | from flask import Flask
from .extensions import db
from .routes import short
from . import config
def create_app():
""" Creates Flask App, connect to Database and register Blueprint of routes"""
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = config.DATABASE_CONNECTION_URI
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.app_context().push()
db.init_app(app)
db.create_all()
app.register_blueprint(short)
return app | [((217, 232), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (222, 232), False, 'from flask import Flask\n')] |
wangchuanli001/Project-experience | python-百度翻译调用/Baidu_translate/com/translate/baidu/stackoverflow_question_handler.py | b563c5c3afc07c913c2e1fd25dff41c70533f8de | import requests
from bs4 import BeautifulSoup
import urllib.request
import os
import random
import time
def html(url):
user_agents = [
'Mozilla/5.0 (Windows; U; Windows NT 5.1; it; rv:1.8.1.11) Gecko/20071127 Firefox/2.0.0.11',
'Opera/9.25 (Windows NT 5.1; U; en)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)',
'Mozilla/5.0 (compatible; Konqueror/3.5; Linux) KHTML/3.5.5 (like Gecko) (Kubuntu)',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.12) Gecko/20070731 Ubuntu/dapper-security Firefox/1.5.0.12',
'Lynx/2.8.5rel.1 libwww-FM/2.14 SSL-MM/1.4.1 GNUTLS/1.2.9',
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.7 (KHTML, like Gecko) Ubuntu/11.04 Chromium/16.0.912.77 Chrome/16.0.912.77 Safari/535.7",
"Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:10.0) Gecko/20100101 Firefox/10.0 "]
user_agent = random.choice(user_agents)
headers = {
'User-Agent': user_agent,
'Accept-Encoding': 'gzip'}
req = requests.get(url=url, headers=headers)
html_doc = req.text
soup = BeautifulSoup(html_doc, "html.parser")
times = soup.select("time")
views = soup.select("p.label-key > b")
active_str = str(views[2])
active = active_str[active_str.find("title=\"") + 7:active_str.find("Z")]
answers = soup.select("#answers-header > div > h2 >span")
question_content = soup.select("div.post-text")
tags = soup.select("#question > div.post-layout > div.postcell.post-layout--right > "
"div.post-taglist.grid.gs4.gsy.fd-column > div >a")
title = soup.select("h1 >a")
tags_str = ""
item = []
for tag in tags:
tags_str += tag.get_text() + ","
answer_contetnts = []
for i in range(1, len(question_content)):
answer_contetnts.append(question_content[i])
for i in range(len(times)):
if len(times[i].get_text()) > 1:
asked_time = times[i].get("datetime").replace("T", " ")
item.append(title[
0].get_text()) # title views answersnum asked_time tag_str active_time quest_content_ text answer_content_list
item.append(views[1].get_text())
item.append(answers[0].get_text())
item.append(asked_time)
item.append(tags_str)
item.append(active)
item.append(question_content[0])
item.append(answer_contetnts)
print(item)
# updatetosql(item)
def updatetosql(item):
ansers_text = "[split]".join(item[7])
updatesql = "UPDATE `t_stackoverflow_question` " \
"SET `tags`='%s', `views`='%s', `answers_num`='%s', `asked_time`='%s', `last_active_time`='%s', `question_content`='%s', `answers_contetnt`='%s' " \
"WHERE (`question_id`='%s') " \
% (item[4], item[1], item[2], item[3], item[5], item[6], ansers_text, item[0],)
pass
if __name__ == '__main__':
html("https://stackoverflow.com/questions/50119673/nginx-fast-cgi-cache-on-error-page-404")
| [((923, 949), 'random.choice', 'random.choice', (['user_agents'], {}), '(user_agents)\n', (936, 949), False, 'import random\n'), ((1045, 1083), 'requests.get', 'requests.get', ([], {'url': 'url', 'headers': 'headers'}), '(url=url, headers=headers)\n', (1057, 1083), False, 'import requests\n'), ((1119, 1157), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html_doc', '"""html.parser"""'], {}), "(html_doc, 'html.parser')\n", (1132, 1157), False, 'from bs4 import BeautifulSoup\n')] |
ALEXKIRNAS/Kaggle-C-CORE-Iceberg-Classifier-Challenge | Research/data_loader.py | d8b06969c9393cfce6d9ac96b58c9d365ff4369d | import os
import numpy as np
import pandas as pd
from keras.utils import to_categorical
from sklearn.model_selection import KFold, train_test_split
def load_data(path):
train = pd.read_json(os.path.join(path, "./train.json"))
test = pd.read_json(os.path.join(path, "./test.json"))
return (train, test)
def preprocess(df,
means=(-22.159262, -24.953745, 40.021883465782651),
stds=(5.33146, 4.5463958, 4.0815391476694414)):
X_band_1 = np.array([np.array(band).astype(np.float32).reshape(75, 75)
for band in df["band_1"]])
X_band_2 = np.array([np.array(band).astype(np.float32).reshape(75, 75)
for band in df["band_2"]])
angl = df['inc_angle'].map(lambda x: np.cos(x * np.pi / 180) if x != 'na' else means[3])
angl = np.array([np.full(shape=(75, 75), fill_value=angel).astype(np.float32)
for angel in angl])
X_band_1 = (X_band_1 - means[0]) / stds[0]
X_band_2 = (X_band_2 - means[1]) / stds[1]
angl = (angl - means[2]) / stds[2]
images = np.concatenate([X_band_1[:, :, :, np.newaxis],
X_band_2[:, :, :, np.newaxis],
angl[:, :, :, np.newaxis]],
axis=-1)
return images
def prepare_data_cv(path):
train, test = load_data(path)
X_train, y_train = (preprocess(train),
to_categorical(train['is_iceberg'].as_matrix().reshape(-1, 1)))
kfold_data = []
kf = KFold(n_splits=5, shuffle=True, random_state=0xCAFFE)
for train_indices, val_indices in kf.split(y_train):
X_train_cv = X_train[train_indices]
y_train_cv = y_train[train_indices]
X_val = X_train[val_indices]
y_val = y_train[val_indices]
kfold_data.append((X_train_cv, y_train_cv, X_val, y_val))
X_test = preprocess(test)
return (kfold_data, X_test)
def prepare_data(path):
train, test = load_data(path)
X_train, y_train = (preprocess(train),
to_categorical(train['is_iceberg'].as_matrix().reshape(-1, 1)))
X_train_cv, X_valid, y_train_cv, y_valid = train_test_split(X_train,
y_train,
random_state=0xCAFFE,
train_size=0.8)
X_test = preprocess(test)
return ([(X_train_cv, y_train_cv, X_valid, y_valid)], X_test)
| [((1090, 1215), 'numpy.concatenate', 'np.concatenate', (['[X_band_1[:, :, :, (np.newaxis)], X_band_2[:, :, :, (np.newaxis)], angl[:,\n :, :, (np.newaxis)]]'], {'axis': '(-1)'}), '([X_band_1[:, :, :, (np.newaxis)], X_band_2[:, :, :, (np.\n newaxis)], angl[:, :, :, (np.newaxis)]], axis=-1)\n', (1104, 1215), True, 'import numpy as np\n'), ((1533, 1585), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': '(5)', 'shuffle': '(True)', 'random_state': '(831486)'}), '(n_splits=5, shuffle=True, random_state=831486)\n', (1538, 1585), False, 'from sklearn.model_selection import KFold, train_test_split\n'), ((2178, 2249), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X_train', 'y_train'], {'random_state': '(831486)', 'train_size': '(0.8)'}), '(X_train, y_train, random_state=831486, train_size=0.8)\n', (2194, 2249), False, 'from sklearn.model_selection import KFold, train_test_split\n'), ((198, 232), 'os.path.join', 'os.path.join', (['path', '"""./train.json"""'], {}), "(path, './train.json')\n", (210, 232), False, 'import os\n'), ((258, 291), 'os.path.join', 'os.path.join', (['path', '"""./test.json"""'], {}), "(path, './test.json')\n", (270, 291), False, 'import os\n'), ((767, 790), 'numpy.cos', 'np.cos', (['(x * np.pi / 180)'], {}), '(x * np.pi / 180)\n', (773, 790), True, 'import numpy as np\n'), ((840, 881), 'numpy.full', 'np.full', ([], {'shape': '(75, 75)', 'fill_value': 'angel'}), '(shape=(75, 75), fill_value=angel)\n', (847, 881), True, 'import numpy as np\n'), ((495, 509), 'numpy.array', 'np.array', (['band'], {}), '(band)\n', (503, 509), True, 'import numpy as np\n'), ((623, 637), 'numpy.array', 'np.array', (['band'], {}), '(band)\n', (631, 637), True, 'import numpy as np\n')] |
tiagopms/polyaxon-cli | polyaxon_cli/cli/experiment.py | eb13e3b8389ccf069a421a4dabc87aaa506ab61c | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import sys
import click
import rhea
from polyaxon_cli.cli.getters.experiment import (
get_experiment_job_or_local,
get_project_experiment_or_local
)
from polyaxon_cli.cli.upload import upload
from polyaxon_cli.client import PolyaxonClient
from polyaxon_cli.client.exceptions import PolyaxonHTTPError, PolyaxonShouldExitError
from polyaxon_cli.logger import clean_outputs
from polyaxon_cli.managers.experiment import ExperimentManager
from polyaxon_cli.managers.experiment_job import ExperimentJobManager
from polyaxon_cli.utils import cache
from polyaxon_cli.utils.formatting import (
Printer,
dict_tabulate,
get_meta_response,
get_resources,
list_dicts_to_tabulate
)
from polyaxon_cli.utils.log_handler import get_logs_handler
from polyaxon_cli.utils.validation import validate_tags
from polyaxon_client.exceptions import PolyaxonClientException
def get_experiment_details(experiment): # pylint:disable=redefined-outer-name
if experiment.description:
Printer.print_header("Experiment description:")
click.echo('{}\n'.format(experiment.description))
if experiment.resources:
get_resources(experiment.resources.to_dict(), header="Experiment resources:")
if experiment.declarations:
Printer.print_header("Experiment declarations:")
dict_tabulate(experiment.declarations)
if experiment.last_metric:
Printer.print_header("Experiment last metrics:")
dict_tabulate(experiment.last_metric)
response = experiment.to_light_dict(
humanize_values=True,
exclude_attrs=[
'uuid', 'config', 'project', 'experiments', 'description',
'declarations', 'last_metric', 'resources', 'jobs', 'run_env'
])
Printer.print_header("Experiment info:")
dict_tabulate(Printer.add_status_color(response))
@click.group()
@click.option('--project', '-p', type=str, help="The project name, e.g. 'mnist' or 'adam/mnist'.")
@click.option('--experiment', '-xp', type=int, help="The experiment id number.")
@click.pass_context
@clean_outputs
def experiment(ctx, project, experiment): # pylint:disable=redefined-outer-name
"""Commands for experiments."""
ctx.obj = ctx.obj or {}
ctx.obj['project'] = project
ctx.obj['experiment'] = experiment
@experiment.command()
@click.option('--job', '-j', type=int, help="The job id.")
@click.pass_context
@clean_outputs
def get(ctx, job):
"""Get experiment or experiment job.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples for getting an experiment:
\b
```bash
$ polyaxon experiment get # if experiment is cached
```
\b
```bash
$ polyaxon experiment --experiment=1 get
```
\b
```bash
$ polyaxon experiment -xp 1 --project=cats-vs-dogs get
```
\b
```bash
$ polyaxon experiment -xp 1 -p alain/cats-vs-dogs get
```
Examples for getting an experiment job:
\b
```bash
$ polyaxon experiment get -j 1 # if experiment is cached
```
\b
```bash
$ polyaxon experiment --experiment=1 get --job=10
```
\b
```bash
$ polyaxon experiment -xp 1 --project=cats-vs-dogs get -j 2
```
\b
```bash
$ polyaxon experiment -xp 1 -p alain/cats-vs-dogs get -j 2
```
"""
def get_experiment():
try:
response = PolyaxonClient().experiment.get_experiment(user, project_name, _experiment)
cache.cache(config_manager=ExperimentManager, response=response)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not load experiment `{}` info.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
get_experiment_details(response)
def get_experiment_job():
try:
response = PolyaxonClient().experiment_job.get_job(user,
project_name,
_experiment,
_job)
cache.cache(config_manager=ExperimentJobManager, response=response)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get job `{}`.'.format(_job))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
if response.resources:
get_resources(response.resources.to_dict(), header="Job resources:")
response = Printer.add_status_color(response.to_light_dict(
humanize_values=True,
exclude_attrs=['uuid', 'definition', 'experiment', 'unique_name', 'resources']
))
Printer.print_header("Job info:")
dict_tabulate(response)
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
if job:
_job = get_experiment_job_or_local(job)
get_experiment_job()
else:
get_experiment()
@experiment.command()
@click.pass_context
@clean_outputs
def delete(ctx):
"""Delete experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Example:
\b
```bash
$ polyaxon experiment delete
```
"""
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
if not click.confirm("Are sure you want to delete experiment `{}`".format(_experiment)):
click.echo('Existing without deleting experiment.')
sys.exit(1)
try:
response = PolyaxonClient().experiment.delete_experiment(
user, project_name, _experiment)
# Purge caching
ExperimentManager.purge()
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not delete experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
if response.status_code == 204:
Printer.print_success("Experiment `{}` was delete successfully".format(_experiment))
@experiment.command()
@click.option('--name', type=str,
help='Name of the experiment, must be unique within the project, could be none.')
@click.option('--description', type=str, help='Description of the experiment.')
@click.option('--tags', type=str, help='Tags of the experiment, comma separated values.')
@click.pass_context
@clean_outputs
def update(ctx, name, description, tags):
"""Update experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon experiment -xp 2 update --description="new description for my experiments"
```
\b
```bash
$ polyaxon experiment -xp 2 update --tags="foo, bar" --name="unique-name"
```
"""
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
update_dict = {}
if name:
update_dict['name'] = name
if description:
update_dict['description'] = description
tags = validate_tags(tags)
if tags:
update_dict['tags'] = tags
if not update_dict:
Printer.print_warning('No argument was provided to update the experiment.')
sys.exit(0)
try:
response = PolyaxonClient().experiment.update_experiment(
user, project_name, _experiment, update_dict)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not update experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
Printer.print_success("Experiment updated.")
get_experiment_details(response)
@experiment.command()
@click.option('--yes', '-y', is_flag=True, default=False,
help="Automatic yes to prompts. "
"Assume \"yes\" as answer to all prompts and run non-interactively.")
@click.pass_context
@clean_outputs
def stop(ctx, yes):
"""Stop experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon experiment stop
```
\b
```bash
$ polyaxon experiment -xp 2 stop
```
"""
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
if not yes and not click.confirm("Are sure you want to stop "
"experiment `{}`".format(_experiment)):
click.echo('Existing without stopping experiment.')
sys.exit(0)
try:
PolyaxonClient().experiment.stop(user, project_name, _experiment)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not stop experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
Printer.print_success("Experiment is being stopped.")
@experiment.command()
@click.option('--copy', '-c', is_flag=True, default=False,
help="To copy the experiment before restarting.")
@click.option('--file', '-f', multiple=True, type=click.Path(exists=True),
help="The polyaxon files to update with.")
@click.option('-u', is_flag=True, default=False,
help="To upload the repo before restarting.")
@click.pass_context
@clean_outputs
def restart(ctx, copy, file, u): # pylint:disable=redefined-builtin
"""Restart experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon experiment --experiment=1 restart
```
"""
config = None
update_code = None
if file:
config = rhea.read(file)
# Check if we need to upload
if u:
ctx.invoke(upload, sync=False)
update_code = True
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
try:
if copy:
response = PolyaxonClient().experiment.copy(
user, project_name, _experiment, config=config, update_code=update_code)
Printer.print_success('Experiment was copied with id {}'.format(response.id))
else:
response = PolyaxonClient().experiment.restart(
user, project_name, _experiment, config=config, update_code=update_code)
Printer.print_success('Experiment was restarted with id {}'.format(response.id))
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not restart experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
@experiment.command()
@click.option('--file', '-f', multiple=True, type=click.Path(exists=True),
help="The polyaxon files to update with.")
@click.option('-u', is_flag=True, default=False,
help="To upload the repo before resuming.")
@click.pass_context
@clean_outputs
def resume(ctx, file, u): # pylint:disable=redefined-builtin
"""Resume experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon experiment --experiment=1 resume
```
"""
config = None
update_code = None
if file:
config = rhea.read(file)
# Check if we need to upload
if u:
ctx.invoke(upload, sync=False)
update_code = True
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
try:
response = PolyaxonClient().experiment.resume(
user, project_name, _experiment, config=config, update_code=update_code)
Printer.print_success('Experiment was resumed with id {}'.format(response.id))
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not resume experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
@experiment.command()
@click.option('--page', type=int, help="To paginate through the list of jobs.")
@click.pass_context
@clean_outputs
def jobs(ctx, page):
"""List jobs for experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon experiment --experiment=1 jobs
```
"""
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
page = page or 1
try:
response = PolyaxonClient().experiment.list_jobs(
user, project_name, _experiment, page=page)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get jobs for experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
meta = get_meta_response(response)
if meta:
Printer.print_header('Jobs for experiment `{}`.'.format(_experiment))
Printer.print_header('Navigation:')
dict_tabulate(meta)
else:
Printer.print_header('No jobs found for experiment `{}`.'.format(_experiment))
objects = [Printer.add_status_color(o.to_light_dict(humanize_values=True))
for o in response['results']]
objects = list_dicts_to_tabulate(objects)
if objects:
Printer.print_header("Jobs:")
objects.pop('experiment', None)
dict_tabulate(objects, is_list_dict=True)
@experiment.command()
@click.option('--job', '-j', type=int, help="The job id.")
@click.option('--page', type=int, help="To paginate through the list of statuses.")
@click.pass_context
@clean_outputs
def statuses(ctx, job, page):
"""Get experiment or experiment job statuses.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples getting experiment statuses:
\b
```bash
$ polyaxon experiment statuses
```
\b
```bash
$ polyaxon experiment -xp 1 statuses
```
Examples getting experiment job statuses:
\b
```bash
$ polyaxon experiment statuses -j 3
```
\b
```bash
$ polyaxon experiment -xp 1 statuses --job 1
```
"""
def get_experiment_statuses():
try:
response = PolyaxonClient().experiment.get_statuses(
user, project_name, _experiment, page=page)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could get status for experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
meta = get_meta_response(response)
if meta:
Printer.print_header('Statuses for experiment `{}`.'.format(_experiment))
Printer.print_header('Navigation:')
dict_tabulate(meta)
else:
Printer.print_header('No statuses found for experiment `{}`.'.format(_experiment))
objects = list_dicts_to_tabulate(
[Printer.add_status_color(o.to_light_dict(humanize_values=True), status_key='status')
for o in response['results']])
if objects:
Printer.print_header("Statuses:")
objects.pop('experiment', None)
dict_tabulate(objects, is_list_dict=True)
def get_experiment_job_statuses():
try:
response = PolyaxonClient().experiment_job.get_statuses(user,
project_name,
_experiment,
_job,
page=page)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get status for job `{}`.'.format(job))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
meta = get_meta_response(response)
if meta:
Printer.print_header('Statuses for Job `{}`.'.format(_job))
Printer.print_header('Navigation:')
dict_tabulate(meta)
else:
Printer.print_header('No statuses found for job `{}`.'.format(_job))
objects = list_dicts_to_tabulate(
[Printer.add_status_color(o.to_light_dict(humanize_values=True), status_key='status')
for o in response['results']])
if objects:
Printer.print_header("Statuses:")
objects.pop('job', None)
dict_tabulate(objects, is_list_dict=True)
page = page or 1
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
if job:
_job = get_experiment_job_or_local(job)
get_experiment_job_statuses()
else:
get_experiment_statuses()
@experiment.command()
@click.option('--job', '-j', type=int, help="The job id.")
@click.option('--gpu', '-g', is_flag=True, help="List experiment GPU resources.")
@click.pass_context
@clean_outputs
def resources(ctx, job, gpu):
"""Get experiment or experiment job resources.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples for getting experiment resources:
\b
```bash
$ polyaxon experiment -xp 19 resources
```
For GPU resources
\b
```bash
$ polyaxon experiment -xp 19 resources --gpu
```
Examples for getting experiment job resources:
\b
```bash
$ polyaxon experiment -xp 19 resources -j 1
```
For GPU resources
\b
```bash
$ polyaxon experiment -xp 19 resources -j 1 --gpu
```
"""
def get_experiment_resources():
try:
message_handler = Printer.gpu_resources if gpu else Printer.resources
PolyaxonClient().experiment.resources(
user, project_name, _experiment, message_handler=message_handler)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get resources for experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
def get_experiment_job_resources():
try:
message_handler = Printer.gpu_resources if gpu else Printer.resources
PolyaxonClient().experiment_job.resources(user,
project_name,
_experiment,
_job,
message_handler=message_handler)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get resources for job `{}`.'.format(_job))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
if job:
_job = get_experiment_job_or_local(job)
get_experiment_job_resources()
else:
get_experiment_resources()
@experiment.command()
@click.option('--job', '-j', type=int, help="The job id.")
@click.option('--past', '-p', is_flag=True, help="Show the past logs.")
@click.option('--follow', '-f', is_flag=True, default=False,
help="Stream logs after showing past logs.")
@click.option('--hide_time', is_flag=True, default=False,
help="Whether or not to hide timestamps from the log stream.")
@click.pass_context
@clean_outputs
def logs(ctx, job, past, follow, hide_time):
"""Get experiment or experiment job logs.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples for getting experiment logs:
\b
```bash
$ polyaxon experiment logs
```
\b
```bash
$ polyaxon experiment -xp 10 -p mnist logs
```
Examples for getting experiment job logs:
\b
```bash
$ polyaxon experiment -xp 1 -j 1 logs
```
"""
def get_experiment_logs():
if past:
try:
response = PolyaxonClient().experiment.logs(
user, project_name, _experiment, stream=False)
get_logs_handler(handle_job_info=True,
show_timestamp=not hide_time,
stream=False)(response.content.decode().split('\n'))
print()
if not follow:
return
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
if not follow:
Printer.print_error(
'Could not get logs for experiment `{}`.'.format(_experiment))
Printer.print_error(
'Error message `{}`.'.format(e))
sys.exit(1)
try:
PolyaxonClient().experiment.logs(
user,
project_name,
_experiment,
message_handler=get_logs_handler(handle_job_info=True,
show_timestamp=not hide_time))
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get logs for experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
def get_experiment_job_logs():
if past:
try:
response = PolyaxonClient().experiment_job.logs(
user,
project_name,
_experiment,
_job,
stream=False)
get_logs_handler(handle_job_info=True,
show_timestamp=not hide_time,
stream=False)(response.content.decode().split('\n'))
print()
if not follow:
return
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
if not follow:
Printer.print_error(
'Could not get logs for experiment `{}`.'.format(_experiment))
Printer.print_error(
'Error message `{}`.'.format(e))
sys.exit(1)
try:
PolyaxonClient().experiment_job.logs(
user,
project_name,
_experiment,
_job,
message_handler=get_logs_handler(handle_job_info=True,
show_timestamp=not hide_time))
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get logs for job `{}`.'.format(_job))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
if job:
_job = get_experiment_job_or_local(job)
get_experiment_job_logs()
else:
get_experiment_logs()
@experiment.command()
@click.pass_context
@clean_outputs
def outputs(ctx):
"""Download outputs for experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon experiment -xp 1 outputs
```
"""
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
try:
PolyaxonClient().experiment.download_outputs(user, project_name, _experiment)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not download outputs for experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
Printer.print_success('Files downloaded.')
@experiment.command()
@click.pass_context
@clean_outputs
def bookmark(ctx):
"""Bookmark experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon experiment bookmark
```
\b
```bash
$ polyaxon experiment -xp 2 bookmark
```
"""
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
try:
PolyaxonClient().experiment.bookmark(user, project_name, _experiment)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not bookmark experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
Printer.print_success("Experiment is bookmarked.")
@experiment.command()
@click.pass_context
@clean_outputs
def unbookmark(ctx):
"""Unbookmark experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon experiment unbookmark
```
\b
```bash
$ polyaxon experiment -xp 2 unbookmark
```
"""
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
try:
PolyaxonClient().experiment.unbookmark(user, project_name, _experiment)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not unbookmark experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
Printer.print_success("Experiment is unbookmarked.")
| [((1936, 1949), 'click.group', 'click.group', ([], {}), '()\n', (1947, 1949), False, 'import click\n'), ((1951, 2053), 'click.option', 'click.option', (['"""--project"""', '"""-p"""'], {'type': 'str', 'help': '"""The project name, e.g. \'mnist\' or \'adam/mnist\'."""'}), '(\'--project\', \'-p\', type=str, help=\n "The project name, e.g. \'mnist\' or \'adam/mnist\'.")\n', (1963, 2053), False, 'import click\n'), ((2050, 2129), 'click.option', 'click.option', (['"""--experiment"""', '"""-xp"""'], {'type': 'int', 'help': '"""The experiment id number."""'}), "('--experiment', '-xp', type=int, help='The experiment id number.')\n", (2062, 2129), False, 'import click\n'), ((2407, 2464), 'click.option', 'click.option', (['"""--job"""', '"""-j"""'], {'type': 'int', 'help': '"""The job id."""'}), "('--job', '-j', type=int, help='The job id.')\n", (2419, 2464), False, 'import click\n'), ((6492, 6616), 'click.option', 'click.option', (['"""--name"""'], {'type': 'str', 'help': '"""Name of the experiment, must be unique within the project, could be none."""'}), "('--name', type=str, help=\n 'Name of the experiment, must be unique within the project, could be none.'\n )\n", (6504, 6616), False, 'import click\n'), ((6622, 6700), 'click.option', 'click.option', (['"""--description"""'], {'type': 'str', 'help': '"""Description of the experiment."""'}), "('--description', type=str, help='Description of the experiment.')\n", (6634, 6700), False, 'import click\n'), ((6702, 6795), 'click.option', 'click.option', (['"""--tags"""'], {'type': 'str', 'help': '"""Tags of the experiment, comma separated values."""'}), "('--tags', type=str, help=\n 'Tags of the experiment, comma separated values.')\n", (6714, 6795), False, 'import click\n'), ((8235, 8400), 'click.option', 'click.option', (['"""--yes"""', '"""-y"""'], {'is_flag': '(True)', 'default': '(False)', 'help': '"""Automatic yes to prompts. Assume "yes" as answer to all prompts and run non-interactively."""'}), '(\'--yes\', \'-y\', is_flag=True, default=False, help=\n \'Automatic yes to prompts. Assume "yes" as answer to all prompts and run non-interactively.\'\n )\n', (8247, 8400), False, 'import click\n'), ((9543, 9655), 'click.option', 'click.option', (['"""--copy"""', '"""-c"""'], {'is_flag': '(True)', 'default': '(False)', 'help': '"""To copy the experiment before restarting."""'}), "('--copy', '-c', is_flag=True, default=False, help=\n 'To copy the experiment before restarting.')\n", (9555, 9655), False, 'import click\n'), ((9798, 9896), 'click.option', 'click.option', (['"""-u"""'], {'is_flag': '(True)', 'default': '(False)', 'help': '"""To upload the repo before restarting."""'}), "('-u', is_flag=True, default=False, help=\n 'To upload the repo before restarting.')\n", (9810, 9896), False, 'import click\n'), ((11510, 11606), 'click.option', 'click.option', (['"""-u"""'], {'is_flag': '(True)', 'default': '(False)', 'help': '"""To upload the repo before resuming."""'}), "('-u', is_flag=True, default=False, help=\n 'To upload the repo before resuming.')\n", (11522, 11606), False, 'import click\n'), ((12796, 12874), 'click.option', 'click.option', (['"""--page"""'], {'type': 'int', 'help': '"""To paginate through the list of jobs."""'}), "('--page', type=int, help='To paginate through the list of jobs.')\n", (12808, 12874), False, 'import click\n'), ((14350, 14407), 'click.option', 'click.option', (['"""--job"""', '"""-j"""'], {'type': 'int', 'help': '"""The job id."""'}), "('--job', '-j', type=int, help='The job id.')\n", (14362, 14407), False, 'import click\n'), ((14409, 14496), 'click.option', 'click.option', (['"""--page"""'], {'type': 'int', 'help': '"""To paginate through the list of statuses."""'}), "('--page', type=int, help=\n 'To paginate through the list of statuses.')\n", (14421, 14496), False, 'import click\n'), ((17906, 17963), 'click.option', 'click.option', (['"""--job"""', '"""-j"""'], {'type': 'int', 'help': '"""The job id."""'}), "('--job', '-j', type=int, help='The job id.')\n", (17918, 17963), False, 'import click\n'), ((17965, 18050), 'click.option', 'click.option', (['"""--gpu"""', '"""-g"""'], {'is_flag': '(True)', 'help': '"""List experiment GPU resources."""'}), "('--gpu', '-g', is_flag=True, help='List experiment GPU resources.'\n )\n", (17977, 18050), False, 'import click\n'), ((20328, 20385), 'click.option', 'click.option', (['"""--job"""', '"""-j"""'], {'type': 'int', 'help': '"""The job id."""'}), "('--job', '-j', type=int, help='The job id.')\n", (20340, 20385), False, 'import click\n'), ((20387, 20457), 'click.option', 'click.option', (['"""--past"""', '"""-p"""'], {'is_flag': '(True)', 'help': '"""Show the past logs."""'}), "('--past', '-p', is_flag=True, help='Show the past logs.')\n", (20399, 20457), False, 'import click\n'), ((20459, 20568), 'click.option', 'click.option', (['"""--follow"""', '"""-f"""'], {'is_flag': '(True)', 'default': '(False)', 'help': '"""Stream logs after showing past logs."""'}), "('--follow', '-f', is_flag=True, default=False, help=\n 'Stream logs after showing past logs.')\n", (20471, 20568), False, 'import click\n'), ((20579, 20703), 'click.option', 'click.option', (['"""--hide_time"""'], {'is_flag': '(True)', 'default': '(False)', 'help': '"""Whether or not to hide timestamps from the log stream."""'}), "('--hide_time', is_flag=True, default=False, help=\n 'Whether or not to hide timestamps from the log stream.')\n", (20591, 20703), False, 'import click\n'), ((1838, 1878), 'polyaxon_cli.utils.formatting.Printer.print_header', 'Printer.print_header', (['"""Experiment info:"""'], {}), "('Experiment info:')\n", (1858, 1878), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((7539, 7558), 'polyaxon_cli.utils.validation.validate_tags', 'validate_tags', (['tags'], {}), '(tags)\n', (7552, 7558), False, 'from polyaxon_cli.utils.validation import validate_tags\n'), ((8128, 8172), 'polyaxon_cli.utils.formatting.Printer.print_success', 'Printer.print_success', (['"""Experiment updated."""'], {}), "('Experiment updated.')\n", (8149, 8172), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((9464, 9517), 'polyaxon_cli.utils.formatting.Printer.print_success', 'Printer.print_success', (['"""Experiment is being stopped."""'], {}), "('Experiment is being stopped.')\n", (9485, 9517), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((13722, 13749), 'polyaxon_cli.utils.formatting.get_meta_response', 'get_meta_response', (['response'], {}), '(response)\n', (13739, 13749), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((14149, 14180), 'polyaxon_cli.utils.formatting.list_dicts_to_tabulate', 'list_dicts_to_tabulate', (['objects'], {}), '(objects)\n', (14171, 14180), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((25313, 25355), 'polyaxon_cli.utils.formatting.Printer.print_success', 'Printer.print_success', (['"""Files downloaded."""'], {}), "('Files downloaded.')\n", (25334, 25355), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((26210, 26260), 'polyaxon_cli.utils.formatting.Printer.print_success', 'Printer.print_success', (['"""Experiment is bookmarked."""'], {}), "('Experiment is bookmarked.')\n", (26231, 26260), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((27127, 27179), 'polyaxon_cli.utils.formatting.Printer.print_success', 'Printer.print_success', (['"""Experiment is unbookmarked."""'], {}), "('Experiment is unbookmarked.')\n", (27148, 27179), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((1087, 1134), 'polyaxon_cli.utils.formatting.Printer.print_header', 'Printer.print_header', (['"""Experiment description:"""'], {}), "('Experiment description:')\n", (1107, 1134), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((1350, 1398), 'polyaxon_cli.utils.formatting.Printer.print_header', 'Printer.print_header', (['"""Experiment declarations:"""'], {}), "('Experiment declarations:')\n", (1370, 1398), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((1407, 1445), 'polyaxon_cli.utils.formatting.dict_tabulate', 'dict_tabulate', (['experiment.declarations'], {}), '(experiment.declarations)\n', (1420, 1445), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((1486, 1534), 'polyaxon_cli.utils.formatting.Printer.print_header', 'Printer.print_header', (['"""Experiment last metrics:"""'], {}), "('Experiment last metrics:')\n", (1506, 1534), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((1543, 1580), 'polyaxon_cli.utils.formatting.dict_tabulate', 'dict_tabulate', (['experiment.last_metric'], {}), '(experiment.last_metric)\n', (1556, 1580), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((1897, 1931), 'polyaxon_cli.utils.formatting.Printer.add_status_color', 'Printer.add_status_color', (['response'], {}), '(response)\n', (1921, 1931), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((4918, 4951), 'polyaxon_cli.utils.formatting.Printer.print_header', 'Printer.print_header', (['"""Job info:"""'], {}), "('Job info:')\n", (4938, 4951), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((4960, 4983), 'polyaxon_cli.utils.formatting.dict_tabulate', 'dict_tabulate', (['response'], {}), '(response)\n', (4973, 4983), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((5204, 5236), 'polyaxon_cli.cli.getters.experiment.get_experiment_job_or_local', 'get_experiment_job_or_local', (['job'], {}), '(job)\n', (5231, 5236), False, 'from polyaxon_cli.cli.getters.experiment import get_experiment_job_or_local, get_project_experiment_or_local\n'), ((5833, 5884), 'click.echo', 'click.echo', (['"""Existing without deleting experiment."""'], {}), "('Existing without deleting experiment.')\n", (5843, 5884), False, 'import click\n'), ((5893, 5904), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5901, 5904), False, 'import sys\n'), ((6058, 6083), 'polyaxon_cli.managers.experiment.ExperimentManager.purge', 'ExperimentManager.purge', ([], {}), '()\n', (6081, 6083), False, 'from polyaxon_cli.managers.experiment import ExperimentManager\n'), ((7640, 7715), 'polyaxon_cli.utils.formatting.Printer.print_warning', 'Printer.print_warning', (['"""No argument was provided to update the experiment."""'], {}), "('No argument was provided to update the experiment.')\n", (7661, 7715), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((7724, 7735), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (7732, 7735), False, 'import sys\n'), ((9052, 9103), 'click.echo', 'click.echo', (['"""Existing without stopping experiment."""'], {}), "('Existing without stopping experiment.')\n", (9062, 9103), False, 'import click\n'), ((9112, 9123), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (9120, 9123), False, 'import sys\n'), ((10263, 10278), 'rhea.read', 'rhea.read', (['file'], {}), '(file)\n', (10272, 10278), False, 'import rhea\n'), ((9715, 9738), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (9725, 9738), False, 'import click\n'), ((11964, 11979), 'rhea.read', 'rhea.read', (['file'], {}), '(file)\n', (11973, 11979), False, 'import rhea\n'), ((11427, 11450), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (11437, 11450), False, 'import click\n'), ((13849, 13884), 'polyaxon_cli.utils.formatting.Printer.print_header', 'Printer.print_header', (['"""Navigation:"""'], {}), "('Navigation:')\n", (13869, 13884), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((13893, 13912), 'polyaxon_cli.utils.formatting.dict_tabulate', 'dict_tabulate', (['meta'], {}), '(meta)\n', (13906, 13912), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((14205, 14234), 'polyaxon_cli.utils.formatting.Printer.print_header', 'Printer.print_header', (['"""Jobs:"""'], {}), "('Jobs:')\n", (14225, 14234), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((14283, 14324), 'polyaxon_cli.utils.formatting.dict_tabulate', 'dict_tabulate', (['objects'], {'is_list_dict': '(True)'}), '(objects, is_list_dict=True)\n', (14296, 14324), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((15500, 15527), 'polyaxon_cli.utils.formatting.get_meta_response', 'get_meta_response', (['response'], {}), '(response)\n', (15517, 15527), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((16890, 16917), 'polyaxon_cli.utils.formatting.get_meta_response', 'get_meta_response', (['response'], {}), '(response)\n', (16907, 16917), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((17766, 17798), 'polyaxon_cli.cli.getters.experiment.get_experiment_job_or_local', 'get_experiment_job_or_local', (['job'], {}), '(job)\n', (17793, 17798), False, 'from polyaxon_cli.cli.getters.experiment import get_experiment_job_or_local, get_project_experiment_or_local\n'), ((20186, 20218), 'polyaxon_cli.cli.getters.experiment.get_experiment_job_or_local', 'get_experiment_job_or_local', (['job'], {}), '(job)\n', (20213, 20218), False, 'from polyaxon_cli.cli.getters.experiment import get_experiment_job_or_local, get_project_experiment_or_local\n'), ((24386, 24418), 'polyaxon_cli.cli.getters.experiment.get_experiment_job_or_local', 'get_experiment_job_or_local', (['job'], {}), '(job)\n', (24413, 24418), False, 'from polyaxon_cli.cli.getters.experiment import get_experiment_job_or_local, get_project_experiment_or_local\n'), ((3546, 3610), 'polyaxon_cli.utils.cache.cache', 'cache.cache', ([], {'config_manager': 'ExperimentManager', 'response': 'response'}), '(config_manager=ExperimentManager, response=response)\n', (3557, 3610), False, 'from polyaxon_cli.utils import cache\n'), ((4272, 4339), 'polyaxon_cli.utils.cache.cache', 'cache.cache', ([], {'config_manager': 'ExperimentJobManager', 'response': 'response'}), '(config_manager=ExperimentJobManager, response=response)\n', (4283, 4339), False, 'from polyaxon_cli.utils import cache\n'), ((6325, 6336), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6333, 6336), False, 'import sys\n'), ((8111, 8122), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (8119, 8122), False, 'import sys\n'), ((9447, 9458), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (9455, 9458), False, 'import sys\n'), ((11341, 11352), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (11349, 11352), False, 'import sys\n'), ((12759, 12770), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (12767, 12770), False, 'import sys\n'), ((13698, 13709), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (13706, 13709), False, 'import sys\n'), ((15643, 15678), 'polyaxon_cli.utils.formatting.Printer.print_header', 'Printer.print_header', (['"""Navigation:"""'], {}), "('Navigation:')\n", (15663, 15678), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((15691, 15710), 'polyaxon_cli.utils.formatting.dict_tabulate', 'dict_tabulate', (['meta'], {}), '(meta)\n', (15704, 15710), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((16037, 16070), 'polyaxon_cli.utils.formatting.Printer.print_header', 'Printer.print_header', (['"""Statuses:"""'], {}), "('Statuses:')\n", (16057, 16070), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((16127, 16168), 'polyaxon_cli.utils.formatting.dict_tabulate', 'dict_tabulate', (['objects'], {'is_list_dict': '(True)'}), '(objects, is_list_dict=True)\n', (16140, 16168), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((17019, 17054), 'polyaxon_cli.utils.formatting.Printer.print_header', 'Printer.print_header', (['"""Navigation:"""'], {}), "('Navigation:')\n", (17039, 17054), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((17067, 17086), 'polyaxon_cli.utils.formatting.dict_tabulate', 'dict_tabulate', (['meta'], {}), '(meta)\n', (17080, 17086), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((17399, 17432), 'polyaxon_cli.utils.formatting.Printer.print_header', 'Printer.print_header', (['"""Statuses:"""'], {}), "('Statuses:')\n", (17419, 17432), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((17482, 17523), 'polyaxon_cli.utils.formatting.dict_tabulate', 'dict_tabulate', (['objects'], {'is_list_dict': '(True)'}), '(objects, is_list_dict=True)\n', (17495, 17523), False, 'from polyaxon_cli.utils.formatting import Printer, dict_tabulate, get_meta_response, get_resources, list_dicts_to_tabulate\n'), ((25297, 25308), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (25305, 25308), False, 'import sys\n'), ((26193, 26204), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (26201, 26204), False, 'import sys\n'), ((27110, 27121), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (27118, 27121), False, 'import sys\n'), ((3871, 3882), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3879, 3882), False, 'import sys\n'), ((4580, 4591), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4588, 4591), False, 'import sys\n'), ((15472, 15483), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (15480, 15483), False, 'import sys\n'), ((16862, 16873), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (16870, 16873), False, 'import sys\n'), ((19210, 19221), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (19218, 19221), False, 'import sys\n'), ((19954, 19965), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (19962, 19965), False, 'import sys\n'), ((22616, 22627), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (22624, 22627), False, 'import sys\n'), ((24154, 24165), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (24162, 24165), False, 'import sys\n'), ((5934, 5950), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (5948, 5950), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((7765, 7781), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (7779, 7781), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((9142, 9158), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (9156, 9158), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((12310, 12326), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (12324, 12326), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((13356, 13372), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (13370, 13372), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((21406, 21493), 'polyaxon_cli.utils.log_handler.get_logs_handler', 'get_logs_handler', ([], {'handle_job_info': '(True)', 'show_timestamp': '(not hide_time)', 'stream': '(False)'}), '(handle_job_info=True, show_timestamp=not hide_time, stream\n =False)\n', (21422, 21493), False, 'from polyaxon_cli.utils.log_handler import get_logs_handler\n'), ((22234, 22302), 'polyaxon_cli.utils.log_handler.get_logs_handler', 'get_logs_handler', ([], {'handle_job_info': '(True)', 'show_timestamp': '(not hide_time)'}), '(handle_job_info=True, show_timestamp=not hide_time)\n', (22250, 22302), False, 'from polyaxon_cli.utils.log_handler import get_logs_handler\n'), ((22932, 23019), 'polyaxon_cli.utils.log_handler.get_logs_handler', 'get_logs_handler', ([], {'handle_job_info': '(True)', 'show_timestamp': '(not hide_time)', 'stream': '(False)'}), '(handle_job_info=True, show_timestamp=not hide_time, stream\n =False)\n', (22948, 23019), False, 'from polyaxon_cli.utils.log_handler import get_logs_handler\n'), ((23786, 23854), 'polyaxon_cli.utils.log_handler.get_logs_handler', 'get_logs_handler', ([], {'handle_job_info': '(True)', 'show_timestamp': '(not hide_time)'}), '(handle_job_info=True, show_timestamp=not hide_time)\n', (23802, 23854), False, 'from polyaxon_cli.utils.log_handler import get_logs_handler\n'), ((24964, 24980), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (24978, 24980), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((25880, 25896), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (25894, 25896), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((26793, 26809), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (26807, 26809), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((3458, 3474), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (3472, 3474), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((3992, 4008), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (4006, 4008), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((10630, 10646), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (10644, 10646), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((10880, 10896), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (10894, 10896), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((15109, 15125), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (15123, 15125), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((16245, 16261), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (16259, 16261), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((18821, 18837), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (18835, 18837), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((19370, 19386), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (19384, 19386), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((22049, 22060), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (22057, 22060), False, 'import sys\n'), ((22087, 22103), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (22101, 22103), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((23575, 23586), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (23583, 23586), False, 'import sys\n'), ((23613, 23629), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (23627, 23629), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((21289, 21305), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (21303, 21305), False, 'from polyaxon_cli.client import PolyaxonClient\n'), ((22725, 22741), 'polyaxon_cli.client.PolyaxonClient', 'PolyaxonClient', ([], {}), '()\n', (22739, 22741), False, 'from polyaxon_cli.client import PolyaxonClient\n')] |
Habbo3/Project-Euler | Problem_09.py | 1a01d67f72b9cfb606d13df91af89159b588216e | """
A Pythagorean triplet is a set of three natural numbers, a < b < c, for which,
a2 + b2 = c2
For example, 32 + 42 = 9 + 16 = 25 = 52.
There exists exactly one Pythagorean triplet for which a + b + c = 1000.
Find the product abc.
"""
solved = False
for a in range(1, 1000):
for b in range(1, 1000):
for c in range(1, 1000):
if a < b < c:
if a + b + c == 1000:
if a**2 + b**2 == c**2:
solved = True
break
if solved:
break
if solved:
break
product = a*b*c
print("The product of only triplet who exists is : ", product) | [] |
fanscribed/fanscribed | fanscribed/apps/transcripts/tests/test_transcripts.py | 89b14496459f81a152df38ed5098fba2b087a1d7 | from decimal import Decimal
import os
from django.test import TestCase
from unipath import Path
from ....utils import refresh
from ...media import tests
from ..models import Transcript, TranscriptMedia
MEDIA_TESTDATA_PATH = Path(tests.__file__).parent.child('testdata')
RAW_MEDIA_PATH = MEDIA_TESTDATA_PATH.child('raw').child(
'NA-472-2012-12-23-Final-excerpt.mp3').absolute()
class TranscriptsTestCase(TestCase):
def test_transcript_starts_out_with_unknown_length(self):
transcript = Transcript.objects.create(title='test')
self.assertEqual(transcript.length, None)
def test_setting_transcript_length_creates_fragments_and_stitches(self):
t = Transcript.objects.create(title='test')
t.set_length('3.33')
f0, = t.fragments.all()
self.assertEqual(f0.start, Decimal('0.00'))
self.assertEqual(f0.end, Decimal('3.33'))
self.assertEqual(t.stitches.count(), 0)
t = Transcript.objects.create(title='test')
t.set_length('7.77')
f0, = t.fragments.all()
self.assertEqual(f0.start, Decimal('0.00'))
self.assertEqual(f0.end, Decimal('7.77'))
self.assertEqual(t.stitches.count(), 0)
t = Transcript.objects.create(title='test')
t.set_length('17.77')
f0, f1, f2 = t.fragments.all()
self.assertEqual(f0.start, Decimal('0.00'))
self.assertEqual(f0.end, Decimal('5.00'))
self.assertEqual(f1.start, Decimal('5.00'))
self.assertEqual(f1.end, Decimal('10.00'))
self.assertEqual(f2.start, Decimal('10.00'))
self.assertEqual(f2.end, Decimal('17.77'))
s0, s1 = t.stitches.all()
self.assertEqual(s0.left, f0)
self.assertEqual(s0.right, f1)
self.assertEqual(s0.state, 'notready')
self.assertEqual(s1.left, f1)
self.assertEqual(s1.right, f2)
self.assertEqual(s1.state, 'notready')
if os.environ.get('FAST_TEST') != '1':
from django.core.files import File
class SlowTranscriptsTestCase(TestCase):
def test_transcript_with_processed_media_has_length(self):
transcript = Transcript.objects.create(
title='test transcript',
)
raw_media = TranscriptMedia(
transcript=transcript,
is_processed=False,
is_full_length=True,
)
with open(RAW_MEDIA_PATH, 'rb') as f:
raw_media.file.save('{transcript.id}_raw.mp3'.format(**locals()), File(f))
raw_media.save()
# Process raw media.
raw_media.create_processed_task()
transcript = refresh(transcript)
# Check length.
expected_length = 5 * 60 # 5 minutes.
self.assertAlmostEqual(
transcript.length, expected_length, delta=0.2)
| [((1922, 1949), 'os.environ.get', 'os.environ.get', (['"""FAST_TEST"""'], {}), "('FAST_TEST')\n", (1936, 1949), False, 'import os\n'), ((228, 248), 'unipath.Path', 'Path', (['tests.__file__'], {}), '(tests.__file__)\n', (232, 248), False, 'from unipath import Path\n'), ((825, 840), 'decimal.Decimal', 'Decimal', (['"""0.00"""'], {}), "('0.00')\n", (832, 840), False, 'from decimal import Decimal\n'), ((875, 890), 'decimal.Decimal', 'Decimal', (['"""3.33"""'], {}), "('3.33')\n", (882, 890), False, 'from decimal import Decimal\n'), ((1089, 1104), 'decimal.Decimal', 'Decimal', (['"""0.00"""'], {}), "('0.00')\n", (1096, 1104), False, 'from decimal import Decimal\n'), ((1139, 1154), 'decimal.Decimal', 'Decimal', (['"""7.77"""'], {}), "('7.77')\n", (1146, 1154), False, 'from decimal import Decimal\n'), ((1361, 1376), 'decimal.Decimal', 'Decimal', (['"""0.00"""'], {}), "('0.00')\n", (1368, 1376), False, 'from decimal import Decimal\n'), ((1411, 1426), 'decimal.Decimal', 'Decimal', (['"""5.00"""'], {}), "('5.00')\n", (1418, 1426), False, 'from decimal import Decimal\n'), ((1463, 1478), 'decimal.Decimal', 'Decimal', (['"""5.00"""'], {}), "('5.00')\n", (1470, 1478), False, 'from decimal import Decimal\n'), ((1513, 1529), 'decimal.Decimal', 'Decimal', (['"""10.00"""'], {}), "('10.00')\n", (1520, 1529), False, 'from decimal import Decimal\n'), ((1566, 1582), 'decimal.Decimal', 'Decimal', (['"""10.00"""'], {}), "('10.00')\n", (1573, 1582), False, 'from decimal import Decimal\n'), ((1617, 1633), 'decimal.Decimal', 'Decimal', (['"""17.77"""'], {}), "('17.77')\n", (1624, 1633), False, 'from decimal import Decimal\n'), ((2520, 2527), 'django.core.files.File', 'File', (['f'], {}), '(f)\n', (2524, 2527), False, 'from django.core.files import File\n')] |
BurcinSayin/pf2 | buildAncestryFeats.py | bcd362dc0a750b8ee59cd19ecff9cf5be4f34b19 | from bs4 import BeautifulSoup
import requests
import json
import datetime
import codecs
import re
featHolder = {}
featHolder['name'] = 'Pathfinder 2.0 Ancestry feat list'
featHolder['date'] = datetime.date.today().strftime("%B %d, %Y")
def get_details(link):
res = requests.get(link)
res.raise_for_status()
soup = BeautifulSoup(res.text, 'lxml')
feat = soup.find_all("div", {'class':'main'})
detailraw = soup.find("meta", {'name':'description'})['content'] #First we grab the content from the meta tag
detailsplit = re.split('<(.*?)>', detailraw) #Now we split it into groups of strings seperated by < >, to pull out any links
detail = ''.join(detailsplit[::2]) #Finally, we join every other group together (passing over the link groups) into one string
#print(detail)
return detail
def get_feats(link):
feats = []
res = requests.get(link)
res.raise_for_status()
soup = BeautifulSoup(res.text, 'lxml')
table = soup.find(lambda tag: tag.name=='table' and tag.has_attr('id') and tag['id']=="ctl00_MainContent_TableElement")
rows = table.findAll(lambda tag: tag.name=='tr')
t = 0
for row in rows:
t += 1
#print(row)
#print("-----------------------------------")
feat = {}
entries = row.find_all(lambda tag: tag.name=='td')
if entries is not None:
if len(entries) > 0:
name = entries[0].find("a").next_sibling.text #We do next_sibling here because the source puts PFS links first, which we want to skip over.
link = entries[0].find("a").next_sibling.a['href']
#for entry in entries:
# print(entry)
# print("row---------------")
level = entries[1].text
traits = entries[2].text
prereq = entries[3].text
source = entries[4].text
feat['name'] = name
feat['level'] = level
feat['traits'] = traits.split(", ")
feat['link'] = "https://2e.aonprd.com/" +link
feat['prereq'] = prereq
feat['benefits'] = source
details = get_details(feat['link'])
feat['text'] = details
feats.append(feat)
#if t > 5:
#break
return feats
listOfPages = codecs.open("ancestryFeats.csv", encoding='utf-8')
for line in listOfPages:
featMD = line.split(",")
print("Getting feats for :", featMD[0],"This url:", featMD[2])
featHolder[featMD[1]] = get_feats(featMD[2].strip('\n'))
json_data = json.dumps(featHolder, indent=4)
#print(json_data)
filename = "ancestry-feats-pf2.json"
f = open(filename, "w")
f.write(json_data)
f.close
| [((2384, 2434), 'codecs.open', 'codecs.open', (['"""ancestryFeats.csv"""'], {'encoding': '"""utf-8"""'}), "('ancestryFeats.csv', encoding='utf-8')\n", (2395, 2434), False, 'import codecs\n'), ((2632, 2664), 'json.dumps', 'json.dumps', (['featHolder'], {'indent': '(4)'}), '(featHolder, indent=4)\n', (2642, 2664), False, 'import json\n'), ((273, 291), 'requests.get', 'requests.get', (['link'], {}), '(link)\n', (285, 291), False, 'import requests\n'), ((330, 361), 'bs4.BeautifulSoup', 'BeautifulSoup', (['res.text', '"""lxml"""'], {}), "(res.text, 'lxml')\n", (343, 361), False, 'from bs4 import BeautifulSoup\n'), ((544, 574), 're.split', 're.split', (['"""<(.*?)>"""', 'detailraw'], {}), "('<(.*?)>', detailraw)\n", (552, 574), False, 'import re\n'), ((880, 898), 'requests.get', 'requests.get', (['link'], {}), '(link)\n', (892, 898), False, 'import requests\n'), ((937, 968), 'bs4.BeautifulSoup', 'BeautifulSoup', (['res.text', '"""lxml"""'], {}), "(res.text, 'lxml')\n", (950, 968), False, 'from bs4 import BeautifulSoup\n'), ((193, 214), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (212, 214), False, 'import datetime\n')] |
Jahronimo/public_question_book_framework | Random_item_selector_module.py | 812bd11b104de013e930536713b8134d046642d5 | import random
def Randomise(questions_lists):
import random
import secrets
secure_random = secrets.SystemRandom()# creates a secure random object.
group_of_items = questions_lists
num_qustion_t_select = num_question_to_display
list_of_random_items = secure_random.sample(group_of_items, num_qustion_t_select)
# randomly selecting from strings within each question list
for each_question in range (0, num_qustion_t_select):
# I think this is where i need to add in some information but don't understand.
#printing some kind of structure with numbers of question and space to answer.
print (("Q."),(each_question + 1),((list_of_random_items[each_question])))
print (("A."),(each_question + 1),("_______________________"))
print ("\n")
| [((130, 152), 'secrets.SystemRandom', 'secrets.SystemRandom', ([], {}), '()\n', (150, 152), False, 'import secrets\n')] |
jose-marquez89/tech-job-landscape | python_scrape/test_functions.py | 0b509536e7ba22885f50c82da8cf990b65373090 | import unittest
import scrape
class TestScrapeFunctions(unittest.TestCase):
def test_build_url(self):
url = scrape.build_url("indeed",
"/jobs?q=Data+Scientist&l=Texas&start=10",
join_next=True)
expected = ("https://www.indeed.com/"
"jobs?q=Data+Scientist&l=Texas&start=10")
url2 = scrape.build_url("indeed", job="Data Scientist", state="Texas")
expected2 = ("https://www.indeed.com/"
"jobs?q=Data%20Scientist&l=Texas&start=0")
self.assertEqual(url, expected)
self.assertEqual(url2, expected2)
def test_fetch_page(self):
fpl = scrape.fetch_page_listings
job_data = fpl("indeed",
job="Data Scientist",
state="Texas")
self.assertNotEqual(len(job_data), 0)
self.assertIsInstance(job_data, tuple)
self.assertIsInstance(job_data[0][0], dict)
self.assertIsInstance(job_data[1], str)
job_data = fpl("indeed",
next_page="/jobs?q=Data+Scientist"
"&l=Texas&start=10")
if __name__ == '__main__':
unittest.main()
| [((1212, 1227), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1225, 1227), False, 'import unittest\n'), ((123, 212), 'scrape.build_url', 'scrape.build_url', (['"""indeed"""', '"""/jobs?q=Data+Scientist&l=Texas&start=10"""'], {'join_next': '(True)'}), "('indeed', '/jobs?q=Data+Scientist&l=Texas&start=10',\n join_next=True)\n", (139, 212), False, 'import scrape\n'), ((395, 458), 'scrape.build_url', 'scrape.build_url', (['"""indeed"""'], {'job': '"""Data Scientist"""', 'state': '"""Texas"""'}), "('indeed', job='Data Scientist', state='Texas')\n", (411, 458), False, 'import scrape\n')] |
jaeheeLee17/BOJ_Algorithms | Level1_Input_Output/10172.py | c14641693d7ef0f5bba0a6637166c7ceadb2a0be | def main():
print("|\_/|")
print("|q p| /}")
print("( 0 )\"\"\"\\")
print("|\"^\"` |")
print("||_/=\\\\__|")
if __name__ == "__main__":
main()
| [] |
rupakc/Kaggle-Compendium | Whats Cooking/KaggleCookingComparison.py | 61634ba742f9a0239f2d1e45973c4bb477ac6306 | # -*- coding: utf-8 -*-
"""
Created on Sat Dec 26 13:20:45 2015
Code for Kaggle What's Cooking Competition
It uses the following classifiers with tf-idf,hashvectors and bag_of_words approach
1. Adaboost
2. Extratrees
3. Bagging
4. Random Forests
@author: Rupak Chakraborty
"""
import numpy as np
import time
import json
import ClassificationUtils
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import HashingVectorizer
from sklearn.ensemble import AdaBoostClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.ensemble import BaggingClassifier
from sklearn import metrics
# Create the feature extractors
bag_of_words = CountVectorizer(stop_words='english')
tfidf = TfidfVectorizer(stop_words='english')
hashvec = HashingVectorizer(stop_words='english')
# Create the Classifier objects
adaboost = AdaBoostClassifier()
randomforest = RandomForestClassifier()
extratrees = ExtraTreesClassifier()
bagging = BaggingClassifier()
filepath = "train.json"
f = open(filepath,"r")
content = f.read()
jsonData = json.loads(content)
cuisine_set = set([])
ingredient_set = set([])
cuisine_map = {}
cuisine_numerical_map = {}
ingredient_numerical_map = {}
ingredient_map = {}
ingredient_list = list([])
c = 0
print "Size of the data set : ", len(jsonData)
print "Starting Loading of Data Set...."
start = time.time()
for recipe in jsonData:
if "cuisine" in recipe:
s = ""
if recipe["cuisine"] in cuisine_set:
cuisine_map[recipe["cuisine"]] = cuisine_map[recipe["cuisine"]] + 1
else:
cuisine_map[recipe["cuisine"]] = 1
cuisine_set.add(recipe["cuisine"])
for ingredient in recipe["ingredients"]:
if ingredient in ingredient_set:
ingredient_map[ingredient] = ingredient_map[ingredient] + 1
else:
ingredient_map[ingredient] = 1
ingredient_set.add(ingredient)
s = s + " " + ingredient
ingredient_list.append(s)
end = time.time()
print "Time Taken to Load the Dataset : ",end-start
for cuisine in cuisine_set:
cuisine_numerical_map[cuisine] = c
c = c+1
c = 0
for ingredient in ingredient_set:
ingredient_numerical_map[ingredient] = c
c = c+1
print "Starting Feature Extracting ......"
start = time.time()
train_labels = np.zeros(len(ingredient_list))
train_data_tfidf = tfidf.fit_transform(ingredient_list)
train_data_hash = hashvec.fit_transform(ingredient_list)
train_data_bag = bag_of_words.fit_transform(ingredient_list)
c = 0
for recipe in jsonData:
if "cuisine" in recipe:
train_labels[c] = cuisine_numerical_map[recipe["cuisine"]]
c = c+1
end = time.time()
print "Time Taken to Train Extract Different Features : ", end-start
test_labels = train_labels[1:30000]
test_data_tfidf = tfidf.transform(ingredient_list[1:30000])
test_data_hash = hashvec.transform(ingredient_list[1:30000])
test_data_bag = bag_of_words.transform(ingredient_list[1:30000])
print "Starting Training of Models for Hash Vectorizer Feature....."
start = time.time()
adaboost.fit(train_data_bag,train_labels)
randomforest.fit(train_data_bag,train_labels)
extratrees.fit(train_data_bag,train_labels)
bagging.fit(train_data_bag,train_labels)
end=time.time()
print "Time Taken to train all Ensemble Models : ", end-start
print "Starting Prediction of Test Labels ...."
start = time.time()
ada_predict = adaboost.predict(test_data_bag)
rf_predict = randomforest.predict(test_data_bag)
extree_predict = extratrees.predict(test_data_bag)
bagging_predict = bagging.predict(test_data_bag)
end = time.time()
print "Time Taken to Test the models : ", end-start
print "Accuracy of AdaBoost Algorithm : ", metrics.accuracy_score(test_labels,ada_predict)
print "Accuracy of Random Forests : ", metrics.accuracy_score(test_labels,rf_predict)
print "Accuracy of Extra Trees : ", metrics.accuracy_score(test_labels,extree_predict)
print "Accuracy of Bagging : ", metrics.accuracy_score(test_labels,bagging_predict)
# Saving the tf-idf model and classifiers
ClassificationUtils.save_classifier("ada_bag_cook.pickle",adaboost)
ClassificationUtils.save_classifier("rf_bag_cook.pickle",randomforest)
ClassificationUtils.save_classifier("extree_bag_cook.pickle",extratrees)
ClassificationUtils.save_classifier("bagging_bag_cook.pickle",bagging)
ClassificationUtils.save_classifier("bag_of_words.pickle",tfidf)
def printIngredientDistribution():
print "----------- Distribution of the Recipe Ingredients ------------------"
for key in ingredient_map.keys():
print key, " : " ,ingredient_map[key]
def printCuisineDistribution():
print "----------- Distribution of the Cuisines ------------------"
for key in cuisine_map.keys():
print key, " : " ,cuisine_map[key]
| [] |
YanhaoXu/python-learning | pybook/ch10/DeckOfCards.py | 856687a71635a2ca67dab49d396c238f128e5ec0 | import random
# Create a deck of cards
deck = [x for x in range(52)]
# Create suits and ranks lists
suits = ["Spades", "Hearts", "Diamonds", "Clubs"]
ranks = ["Ace", "2", "3", "4", "5", "6", "7", "8", "9",
"10", "Jack", "Queen", "King"]
# Shuffle the cards
random.shuffle(deck)
# Display the first four cards
for i in range(4):
suit = suits[deck[i] // 13]
rank = ranks[deck[i] % 13]
print("Card number", deck[i], "is the", rank, "of", suit)
| [((269, 289), 'random.shuffle', 'random.shuffle', (['deck'], {}), '(deck)\n', (283, 289), False, 'import random\n')] |
ojipadeson/NLPGNN | nlpgnn/gnn/RGCNConv.py | 7c43d2f0cb2b16c046c930037fd505c5c4f36db4 | #! usr/bin/env python3
# -*- coding:utf-8 -*-
"""
@Author:Kaiyin Zhou
Usage:
node_embeddings = tf.random.normal(shape=(5, 3))
adjacency_lists = [
tf.constant([[0, 1], [2, 4], [2, 4]], dtype=tf.int32),
tf.constant([[0, 1], [2, 4], [2, 4]], dtype=tf.int32)
]
layer = RGraphConvolution(out_features=12)
x = layer(GNNInput(node_embeddings, adjacency_lists))
"""
import tensorflow as tf
from nlpgnn.gnn.messagepassing import MessagePassing
class RGraphConvolution(MessagePassing):
def __init__(self,
out_features,
epsion=1e-7,
aggr="sum",
normalize=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
use_bias=True,
**kwargs):
super(RGraphConvolution, self).__init__(aggr, **kwargs)
self.kernel_initializer = tf.keras.initializers.get(kernel_initializer)
self.bias_initializer = tf.keras.initializers.get(bias_initializer)
self.use_bias = use_bias
self.normalize = normalize
self.out_features = out_features
self.epsion = epsion
def build(self, input_shapes):
node_embedding_shapes = input_shapes.node_embeddings
adjacency_list_shapes = input_shapes.adjacency_lists
num_edge_type = len(adjacency_list_shapes)
in_features = node_embedding_shapes[-1]
self._edge_type_weights = []
self._edge_type_bias = []
for i in range(num_edge_type):
weight = self.add_weight(
shape=(in_features, self.out_features),
initializer=self.kernel_initializer,
name='wt_{}'.format(i),
)
self._edge_type_weights.append(weight)
if self.use_bias:
self.bias = self.add_weight(
shape=(self.out_features),
initializer=self.bias_initializer,
name='b',
)
else:
self.bias = None
self.weight_o = self.add_weight(
shape=(in_features, self.out_features),
initializer=self.kernel_initializer,
name='wo',
)
self.built = True
def message_function(self, edge_source_states,
edge_target_states,
num_incoming_to_node_per_message,
num_outing_to_node_per_message,
edge_type_idx):
"""
:param edge_source_states: [M,H]
:param edge_target_states: [M,H]
:param num_incoming_to_node_per_message:[M]
:param edge_type_idx:
:param training:
:return:
"""
weight_r = self._edge_type_weights[edge_type_idx]
messages = tf.linalg.matmul(edge_source_states, weight_r)
if self.normalize:
messages = (
tf.expand_dims(1.0 / (tf.cast(num_incoming_to_node_per_message,
tf.float32) + self.epsion), axis=-1) * messages
)
return messages
def call(self, inputs):
aggr_out = self.propagate(inputs) # message_passing + update
aggr_out += tf.linalg.matmul(inputs.node_embeddings, self.weight_o)
if self.bias is not None:
aggr_out += self.bias
return aggr_out
| [((938, 983), 'tensorflow.keras.initializers.get', 'tf.keras.initializers.get', (['kernel_initializer'], {}), '(kernel_initializer)\n', (963, 983), True, 'import tensorflow as tf\n'), ((1016, 1059), 'tensorflow.keras.initializers.get', 'tf.keras.initializers.get', (['bias_initializer'], {}), '(bias_initializer)\n', (1041, 1059), True, 'import tensorflow as tf\n'), ((2824, 2870), 'tensorflow.linalg.matmul', 'tf.linalg.matmul', (['edge_source_states', 'weight_r'], {}), '(edge_source_states, weight_r)\n', (2840, 2870), True, 'import tensorflow as tf\n'), ((3262, 3317), 'tensorflow.linalg.matmul', 'tf.linalg.matmul', (['inputs.node_embeddings', 'self.weight_o'], {}), '(inputs.node_embeddings, self.weight_o)\n', (3278, 3317), True, 'import tensorflow as tf\n'), ((2965, 3018), 'tensorflow.cast', 'tf.cast', (['num_incoming_to_node_per_message', 'tf.float32'], {}), '(num_incoming_to_node_per_message, tf.float32)\n', (2972, 3018), True, 'import tensorflow as tf\n')] |
erikwebb/google-cloud-python | automl/google/cloud/automl_v1beta1/gapic/auto_ml_client.py | 288a878e9a07239015c78a193eca1cc15e926127 | # -*- coding: utf-8 -*-
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Accesses the google.cloud.automl.v1beta1 AutoMl API."""
import functools
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.grpc_helpers
import google.api_core.operation
import google.api_core.operations_v1
import google.api_core.page_iterator
import google.api_core.path_template
import grpc
from google.cloud.automl_v1beta1.gapic import auto_ml_client_config
from google.cloud.automl_v1beta1.gapic import enums
from google.cloud.automl_v1beta1.gapic.transports import auto_ml_grpc_transport
from google.cloud.automl_v1beta1.proto import data_items_pb2
from google.cloud.automl_v1beta1.proto import dataset_pb2
from google.cloud.automl_v1beta1.proto import io_pb2
from google.cloud.automl_v1beta1.proto import model_evaluation_pb2
from google.cloud.automl_v1beta1.proto import model_pb2
from google.cloud.automl_v1beta1.proto import operations_pb2 as proto_operations_pb2
from google.cloud.automl_v1beta1.proto import prediction_service_pb2
from google.cloud.automl_v1beta1.proto import prediction_service_pb2_grpc
from google.cloud.automl_v1beta1.proto import service_pb2
from google.cloud.automl_v1beta1.proto import service_pb2_grpc
from google.longrunning import operations_pb2 as longrunning_operations_pb2
from google.protobuf import empty_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-automl").version
class AutoMlClient(object):
"""
AutoML Server API.
The resource names are assigned by the server. The server never reuses
names that it has created after the resources with those names are
deleted.
An ID of a resource is the last element of the item's resource name. For
``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}``,
then the id for the item is ``{dataset_id}``.
"""
SERVICE_ADDRESS = "automl.googleapis.com:443"
"""The default address of the service."""
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = "google.cloud.automl.v1beta1.AutoMl"
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
AutoMlClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@classmethod
def location_path(cls, project, location):
"""Return a fully-qualified location string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}",
project=project,
location=location,
)
@classmethod
def dataset_path(cls, project, location, dataset):
"""Return a fully-qualified dataset string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/datasets/{dataset}",
project=project,
location=location,
dataset=dataset,
)
@classmethod
def model_path(cls, project, location, model):
"""Return a fully-qualified model string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/models/{model}",
project=project,
location=location,
model=model,
)
@classmethod
def model_evaluation_path(cls, project, location, model, model_evaluation):
"""Return a fully-qualified model_evaluation string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/models/{model}/modelEvaluations/{model_evaluation}",
project=project,
location=location,
model=model,
model_evaluation=model_evaluation,
)
def __init__(
self,
transport=None,
channel=None,
credentials=None,
client_config=None,
client_info=None,
):
"""Constructor.
Args:
transport (Union[~.AutoMlGrpcTransport,
Callable[[~.Credentials, type], ~.AutoMlGrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Raise deprecation warnings for things we want to go away.
if client_config is not None:
warnings.warn(
"The `client_config` argument is deprecated.",
PendingDeprecationWarning,
stacklevel=2,
)
else:
client_config = auto_ml_client_config.config
if channel:
warnings.warn(
"The `channel` argument is deprecated; use " "`transport` instead.",
PendingDeprecationWarning,
stacklevel=2,
)
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=auto_ml_grpc_transport.AutoMlGrpcTransport,
)
else:
if credentials:
raise ValueError(
"Received both a transport instance and "
"credentials; these are mutually exclusive."
)
self.transport = transport
else:
self.transport = auto_ml_grpc_transport.AutoMlGrpcTransport(
address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config["interfaces"][self._INTERFACE_NAME]
)
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def create_dataset(
self,
parent,
dataset,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a dataset.
Example:
>>> from google.cloud import automl_v1beta1
>>>
>>> client = automl_v1beta1.AutoMlClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # TODO: Initialize `dataset`:
>>> dataset = {}
>>>
>>> response = client.create_dataset(parent, dataset)
Args:
parent (str): The resource name of the project to create the dataset for.
dataset (Union[dict, ~google.cloud.automl_v1beta1.types.Dataset]): The dataset to create.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.automl_v1beta1.types.Dataset`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.automl_v1beta1.types.Dataset` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_dataset" not in self._inner_api_calls:
self._inner_api_calls[
"create_dataset"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_dataset,
default_retry=self._method_configs["CreateDataset"].retry,
default_timeout=self._method_configs["CreateDataset"].timeout,
client_info=self._client_info,
)
request = service_pb2.CreateDatasetRequest(parent=parent, dataset=dataset)
return self._inner_api_calls["create_dataset"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def get_dataset(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets a dataset.
Example:
>>> from google.cloud import automl_v1beta1
>>>
>>> client = automl_v1beta1.AutoMlClient()
>>>
>>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]')
>>>
>>> response = client.get_dataset(name)
Args:
name (str): The resource name of the dataset to retrieve.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.automl_v1beta1.types.Dataset` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "get_dataset" not in self._inner_api_calls:
self._inner_api_calls[
"get_dataset"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_dataset,
default_retry=self._method_configs["GetDataset"].retry,
default_timeout=self._method_configs["GetDataset"].timeout,
client_info=self._client_info,
)
request = service_pb2.GetDatasetRequest(name=name)
return self._inner_api_calls["get_dataset"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def list_datasets(
self,
parent,
filter_=None,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists datasets in a project.
Example:
>>> from google.cloud import automl_v1beta1
>>>
>>> client = automl_v1beta1.AutoMlClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # Iterate over all results
>>> for element in client.list_datasets(parent):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_datasets(parent).pages:
... for element in page:
... # process element
... pass
Args:
parent (str): The resource name of the project from which to list datasets.
filter_ (str): An expression for filtering the results of the request.
- ``dataset_metadata`` - for existence of the case.
An example of using the filter is:
- ``translation_dataset_metadata:*`` --> The dataset has
translation\_dataset\_metadata.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.automl_v1beta1.types.Dataset` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "list_datasets" not in self._inner_api_calls:
self._inner_api_calls[
"list_datasets"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_datasets,
default_retry=self._method_configs["ListDatasets"].retry,
default_timeout=self._method_configs["ListDatasets"].timeout,
client_info=self._client_info,
)
request = service_pb2.ListDatasetsRequest(
parent=parent, filter=filter_, page_size=page_size
)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_datasets"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="datasets",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
def delete_dataset(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Deletes a dataset and all of its contents. Returns empty response in the
``response`` field when it completes, and ``delete_details`` in the
``metadata`` field.
Example:
>>> from google.cloud import automl_v1beta1
>>>
>>> client = automl_v1beta1.AutoMlClient()
>>>
>>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]')
>>>
>>> response = client.delete_dataset(name)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
name (str): The resource name of the dataset to delete.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "delete_dataset" not in self._inner_api_calls:
self._inner_api_calls[
"delete_dataset"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_dataset,
default_retry=self._method_configs["DeleteDataset"].retry,
default_timeout=self._method_configs["DeleteDataset"].timeout,
client_info=self._client_info,
)
request = service_pb2.DeleteDatasetRequest(name=name)
operation = self._inner_api_calls["delete_dataset"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
empty_pb2.Empty,
metadata_type=proto_operations_pb2.OperationMetadata,
)
def import_data(
self,
name,
input_config,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Imports data into a dataset. Returns an empty response in the
``response`` field when it completes.
Example:
>>> from google.cloud import automl_v1beta1
>>>
>>> client = automl_v1beta1.AutoMlClient()
>>>
>>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]')
>>>
>>> # TODO: Initialize `input_config`:
>>> input_config = {}
>>>
>>> response = client.import_data(name, input_config)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
name (str): Required. Dataset name. Dataset must already exist. All imported
annotations and examples will be added.
input_config (Union[dict, ~google.cloud.automl_v1beta1.types.InputConfig]): Required. The desired input location.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.automl_v1beta1.types.InputConfig`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "import_data" not in self._inner_api_calls:
self._inner_api_calls[
"import_data"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.import_data,
default_retry=self._method_configs["ImportData"].retry,
default_timeout=self._method_configs["ImportData"].timeout,
client_info=self._client_info,
)
request = service_pb2.ImportDataRequest(name=name, input_config=input_config)
operation = self._inner_api_calls["import_data"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
empty_pb2.Empty,
metadata_type=proto_operations_pb2.OperationMetadata,
)
def export_data(
self,
name,
output_config,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Exports dataset's data to a Google Cloud Storage bucket. Returns an
empty response in the ``response`` field when it completes.
Example:
>>> from google.cloud import automl_v1beta1
>>>
>>> client = automl_v1beta1.AutoMlClient()
>>>
>>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]')
>>>
>>> # TODO: Initialize `output_config`:
>>> output_config = {}
>>>
>>> response = client.export_data(name, output_config)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
name (str): Required. The resource name of the dataset.
output_config (Union[dict, ~google.cloud.automl_v1beta1.types.OutputConfig]): Required. The desired output location.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.automl_v1beta1.types.OutputConfig`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "export_data" not in self._inner_api_calls:
self._inner_api_calls[
"export_data"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.export_data,
default_retry=self._method_configs["ExportData"].retry,
default_timeout=self._method_configs["ExportData"].timeout,
client_info=self._client_info,
)
request = service_pb2.ExportDataRequest(name=name, output_config=output_config)
operation = self._inner_api_calls["export_data"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
empty_pb2.Empty,
metadata_type=proto_operations_pb2.OperationMetadata,
)
def create_model(
self,
parent,
model,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a model. Returns a Model in the ``response`` field when it
completes. When you create a model, several model evaluations are
created for it: a global evaluation, and one evaluation for each
annotation spec.
Example:
>>> from google.cloud import automl_v1beta1
>>>
>>> client = automl_v1beta1.AutoMlClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # TODO: Initialize `model`:
>>> model = {}
>>>
>>> response = client.create_model(parent, model)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
parent (str): Resource name of the parent project where the model is being created.
model (Union[dict, ~google.cloud.automl_v1beta1.types.Model]): The model to create.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.automl_v1beta1.types.Model`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_model" not in self._inner_api_calls:
self._inner_api_calls[
"create_model"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_model,
default_retry=self._method_configs["CreateModel"].retry,
default_timeout=self._method_configs["CreateModel"].timeout,
client_info=self._client_info,
)
request = service_pb2.CreateModelRequest(parent=parent, model=model)
operation = self._inner_api_calls["create_model"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
model_pb2.Model,
metadata_type=proto_operations_pb2.OperationMetadata,
)
def get_model(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets a model.
Example:
>>> from google.cloud import automl_v1beta1
>>>
>>> client = automl_v1beta1.AutoMlClient()
>>>
>>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]')
>>>
>>> response = client.get_model(name)
Args:
name (str): Resource name of the model.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.automl_v1beta1.types.Model` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "get_model" not in self._inner_api_calls:
self._inner_api_calls[
"get_model"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_model,
default_retry=self._method_configs["GetModel"].retry,
default_timeout=self._method_configs["GetModel"].timeout,
client_info=self._client_info,
)
request = service_pb2.GetModelRequest(name=name)
return self._inner_api_calls["get_model"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def list_models(
self,
parent,
filter_=None,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists models.
Example:
>>> from google.cloud import automl_v1beta1
>>>
>>> client = automl_v1beta1.AutoMlClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # Iterate over all results
>>> for element in client.list_models(parent):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_models(parent).pages:
... for element in page:
... # process element
... pass
Args:
parent (str): Resource name of the project, from which to list the models.
filter_ (str): An expression for filtering the results of the request.
- ``model_metadata`` - for existence of the case.
- ``dataset_id`` - for = or !=.
Some examples of using the filter are:
- ``image_classification_model_metadata:*`` --> The model has
image\_classification\_model\_metadata.
- ``dataset_id=5`` --> The model was created from a sibling dataset
with ID 5.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.automl_v1beta1.types.Model` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "list_models" not in self._inner_api_calls:
self._inner_api_calls[
"list_models"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_models,
default_retry=self._method_configs["ListModels"].retry,
default_timeout=self._method_configs["ListModels"].timeout,
client_info=self._client_info,
)
request = service_pb2.ListModelsRequest(
parent=parent, filter=filter_, page_size=page_size
)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_models"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="model",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
def delete_model(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Deletes a model. If a model is already deployed, this only deletes the
model in AutoML BE, and does not change the status of the deployed model
in the production environment. Returns ``google.protobuf.Empty`` in the
``response`` field when it completes, and ``delete_details`` in the
``metadata`` field.
Example:
>>> from google.cloud import automl_v1beta1
>>>
>>> client = automl_v1beta1.AutoMlClient()
>>>
>>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]')
>>>
>>> response = client.delete_model(name)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
name (str): Resource name of the model being deleted.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "delete_model" not in self._inner_api_calls:
self._inner_api_calls[
"delete_model"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_model,
default_retry=self._method_configs["DeleteModel"].retry,
default_timeout=self._method_configs["DeleteModel"].timeout,
client_info=self._client_info,
)
request = service_pb2.DeleteModelRequest(name=name)
operation = self._inner_api_calls["delete_model"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
empty_pb2.Empty,
metadata_type=proto_operations_pb2.OperationMetadata,
)
def deploy_model(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Deploys model. Returns a ``DeployModelResponse`` in the ``response``
field when it completes.
Example:
>>> from google.cloud import automl_v1beta1
>>>
>>> client = automl_v1beta1.AutoMlClient()
>>>
>>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]')
>>>
>>> response = client.deploy_model(name)
Args:
name (str): Resource name of the model to deploy.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.automl_v1beta1.types.Operation` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "deploy_model" not in self._inner_api_calls:
self._inner_api_calls[
"deploy_model"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.deploy_model,
default_retry=self._method_configs["DeployModel"].retry,
default_timeout=self._method_configs["DeployModel"].timeout,
client_info=self._client_info,
)
request = service_pb2.DeployModelRequest(name=name)
return self._inner_api_calls["deploy_model"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def undeploy_model(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Undeploys model. Returns an ``UndeployModelResponse`` in the
``response`` field when it completes.
Example:
>>> from google.cloud import automl_v1beta1
>>>
>>> client = automl_v1beta1.AutoMlClient()
>>>
>>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]')
>>>
>>> response = client.undeploy_model(name)
Args:
name (str): Resource name of the model to undeploy.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.automl_v1beta1.types.Operation` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "undeploy_model" not in self._inner_api_calls:
self._inner_api_calls[
"undeploy_model"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.undeploy_model,
default_retry=self._method_configs["UndeployModel"].retry,
default_timeout=self._method_configs["UndeployModel"].timeout,
client_info=self._client_info,
)
request = service_pb2.UndeployModelRequest(name=name)
return self._inner_api_calls["undeploy_model"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def get_model_evaluation(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets a model evaluation.
Example:
>>> from google.cloud import automl_v1beta1
>>>
>>> client = automl_v1beta1.AutoMlClient()
>>>
>>> name = client.model_evaluation_path('[PROJECT]', '[LOCATION]', '[MODEL]', '[MODEL_EVALUATION]')
>>>
>>> response = client.get_model_evaluation(name)
Args:
name (str): Resource name for the model evaluation.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.automl_v1beta1.types.ModelEvaluation` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "get_model_evaluation" not in self._inner_api_calls:
self._inner_api_calls[
"get_model_evaluation"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_model_evaluation,
default_retry=self._method_configs["GetModelEvaluation"].retry,
default_timeout=self._method_configs["GetModelEvaluation"].timeout,
client_info=self._client_info,
)
request = service_pb2.GetModelEvaluationRequest(name=name)
return self._inner_api_calls["get_model_evaluation"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def list_model_evaluations(
self,
parent,
filter_=None,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists model evaluations.
Example:
>>> from google.cloud import automl_v1beta1
>>>
>>> client = automl_v1beta1.AutoMlClient()
>>>
>>> parent = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]')
>>>
>>> # Iterate over all results
>>> for element in client.list_model_evaluations(parent):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_model_evaluations(parent).pages:
... for element in page:
... # process element
... pass
Args:
parent (str): Resource name of the model to list the model evaluations for.
If modelId is set as "-", this will list model evaluations from across all
models of the parent location.
filter_ (str): An expression for filtering the results of the request.
- ``annotation_spec_id`` - for =, != or existence. See example below
for the last.
Some examples of using the filter are:
- ``annotation_spec_id!=4`` --> The model evaluation was done for
annotation spec with ID different than 4.
- ``NOT annotation_spec_id:*`` --> The model evaluation was done for
aggregate of all annotation specs.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.automl_v1beta1.types.ModelEvaluation` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "list_model_evaluations" not in self._inner_api_calls:
self._inner_api_calls[
"list_model_evaluations"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_model_evaluations,
default_retry=self._method_configs["ListModelEvaluations"].retry,
default_timeout=self._method_configs["ListModelEvaluations"].timeout,
client_info=self._client_info,
)
request = service_pb2.ListModelEvaluationsRequest(
parent=parent, filter=filter_, page_size=page_size
)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_model_evaluations"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="model_evaluation",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
| [((2057, 2110), 'pkg_resources.get_distribution', 'pkg_resources.get_distribution', (['"""google-cloud-automl"""'], {}), "('google-cloud-automl')\n", (2087, 2110), False, 'import pkg_resources\n'), ((3380, 3443), 'google.oauth2.service_account.Credentials.from_service_account_file', 'service_account.Credentials.from_service_account_file', (['filename'], {}), '(filename)\n', (3433, 3443), False, 'from google.oauth2 import service_account\n'), ((11996, 12060), 'google.cloud.automl_v1beta1.proto.service_pb2.CreateDatasetRequest', 'service_pb2.CreateDatasetRequest', ([], {'parent': 'parent', 'dataset': 'dataset'}), '(parent=parent, dataset=dataset)\n', (12028, 12060), False, 'from google.cloud.automl_v1beta1.proto import service_pb2\n'), ((14322, 14362), 'google.cloud.automl_v1beta1.proto.service_pb2.GetDatasetRequest', 'service_pb2.GetDatasetRequest', ([], {'name': 'name'}), '(name=name)\n', (14351, 14362), False, 'from google.cloud.automl_v1beta1.proto import service_pb2\n'), ((18051, 18139), 'google.cloud.automl_v1beta1.proto.service_pb2.ListDatasetsRequest', 'service_pb2.ListDatasetsRequest', ([], {'parent': 'parent', 'filter': 'filter_', 'page_size': 'page_size'}), '(parent=parent, filter=filter_, page_size=\n page_size)\n', (18082, 18139), False, 'from google.cloud.automl_v1beta1.proto import service_pb2\n'), ((21286, 21329), 'google.cloud.automl_v1beta1.proto.service_pb2.DeleteDatasetRequest', 'service_pb2.DeleteDatasetRequest', ([], {'name': 'name'}), '(name=name)\n', (21318, 21329), False, 'from google.cloud.automl_v1beta1.proto import service_pb2\n'), ((24750, 24817), 'google.cloud.automl_v1beta1.proto.service_pb2.ImportDataRequest', 'service_pb2.ImportDataRequest', ([], {'name': 'name', 'input_config': 'input_config'}), '(name=name, input_config=input_config)\n', (24779, 24817), False, 'from google.cloud.automl_v1beta1.proto import service_pb2\n'), ((28194, 28263), 'google.cloud.automl_v1beta1.proto.service_pb2.ExportDataRequest', 'service_pb2.ExportDataRequest', ([], {'name': 'name', 'output_config': 'output_config'}), '(name=name, output_config=output_config)\n', (28223, 28263), False, 'from google.cloud.automl_v1beta1.proto import service_pb2\n'), ((31700, 31758), 'google.cloud.automl_v1beta1.proto.service_pb2.CreateModelRequest', 'service_pb2.CreateModelRequest', ([], {'parent': 'parent', 'model': 'model'}), '(parent=parent, model=model)\n', (31730, 31758), False, 'from google.cloud.automl_v1beta1.proto import service_pb2\n'), ((34211, 34249), 'google.cloud.automl_v1beta1.proto.service_pb2.GetModelRequest', 'service_pb2.GetModelRequest', ([], {'name': 'name'}), '(name=name)\n', (34238, 34249), False, 'from google.cloud.automl_v1beta1.proto import service_pb2\n'), ((38081, 38167), 'google.cloud.automl_v1beta1.proto.service_pb2.ListModelsRequest', 'service_pb2.ListModelsRequest', ([], {'parent': 'parent', 'filter': 'filter_', 'page_size': 'page_size'}), '(parent=parent, filter=filter_, page_size=\n page_size)\n', (38110, 38167), False, 'from google.cloud.automl_v1beta1.proto import service_pb2\n'), ((41448, 41489), 'google.cloud.automl_v1beta1.proto.service_pb2.DeleteModelRequest', 'service_pb2.DeleteModelRequest', ([], {'name': 'name'}), '(name=name)\n', (41478, 41489), False, 'from google.cloud.automl_v1beta1.proto import service_pb2\n'), ((44065, 44106), 'google.cloud.automl_v1beta1.proto.service_pb2.DeployModelRequest', 'service_pb2.DeployModelRequest', ([], {'name': 'name'}), '(name=name)\n', (44095, 44106), False, 'from google.cloud.automl_v1beta1.proto import service_pb2\n'), ((46470, 46513), 'google.cloud.automl_v1beta1.proto.service_pb2.UndeployModelRequest', 'service_pb2.UndeployModelRequest', ([], {'name': 'name'}), '(name=name)\n', (46502, 46513), False, 'from google.cloud.automl_v1beta1.proto import service_pb2\n'), ((48876, 48924), 'google.cloud.automl_v1beta1.proto.service_pb2.GetModelEvaluationRequest', 'service_pb2.GetModelEvaluationRequest', ([], {'name': 'name'}), '(name=name)\n', (48913, 48924), False, 'from google.cloud.automl_v1beta1.proto import service_pb2\n'), ((53055, 53150), 'google.cloud.automl_v1beta1.proto.service_pb2.ListModelEvaluationsRequest', 'service_pb2.ListModelEvaluationsRequest', ([], {'parent': 'parent', 'filter': 'filter_', 'page_size': 'page_size'}), '(parent=parent, filter=filter_,\n page_size=page_size)\n', (53094, 53150), False, 'from google.cloud.automl_v1beta1.proto import service_pb2\n'), ((7146, 7251), 'warnings.warn', 'warnings.warn', (['"""The `client_config` argument is deprecated."""', 'PendingDeprecationWarning'], {'stacklevel': '(2)'}), "('The `client_config` argument is deprecated.',\n PendingDeprecationWarning, stacklevel=2)\n", (7159, 7251), False, 'import warnings\n'), ((7415, 7539), 'warnings.warn', 'warnings.warn', (['"""The `channel` argument is deprecated; use `transport` instead."""', 'PendingDeprecationWarning'], {'stacklevel': '(2)'}), "('The `channel` argument is deprecated; use `transport` instead.',\n PendingDeprecationWarning, stacklevel=2)\n", (7428, 7539), False, 'import warnings\n'), ((8352, 8470), 'google.cloud.automl_v1beta1.gapic.transports.auto_ml_grpc_transport.AutoMlGrpcTransport', 'auto_ml_grpc_transport.AutoMlGrpcTransport', ([], {'address': 'self.SERVICE_ADDRESS', 'channel': 'channel', 'credentials': 'credentials'}), '(address=self.SERVICE_ADDRESS,\n channel=channel, credentials=credentials)\n', (8394, 8470), False, 'from google.cloud.automl_v1beta1.gapic.transports import auto_ml_grpc_transport\n'), ((18264, 18374), 'functools.partial', 'functools.partial', (["self._inner_api_calls['list_datasets']"], {'retry': 'retry', 'timeout': 'timeout', 'metadata': 'metadata'}), "(self._inner_api_calls['list_datasets'], retry=retry,\n timeout=timeout, metadata=metadata)\n", (18281, 18374), False, 'import functools\n'), ((38292, 38400), 'functools.partial', 'functools.partial', (["self._inner_api_calls['list_models']"], {'retry': 'retry', 'timeout': 'timeout', 'metadata': 'metadata'}), "(self._inner_api_calls['list_models'], retry=retry,\n timeout=timeout, metadata=metadata)\n", (38309, 38400), False, 'import functools\n'), ((53276, 53396), 'functools.partial', 'functools.partial', (["self._inner_api_calls['list_model_evaluations']"], {'retry': 'retry', 'timeout': 'timeout', 'metadata': 'metadata'}), "(self._inner_api_calls['list_model_evaluations'], retry=\n retry, timeout=timeout, metadata=metadata)\n", (53293, 53396), False, 'import functools\n')] |
SHIVJITH/Odoo_Machine_Test | addons/project/models/project.py | 310497a9872db7844b521e6dab5f7a9f61d365a4 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import ast
from datetime import timedelta, datetime
from random import randint
from odoo import api, fields, models, tools, SUPERUSER_ID, _
from odoo.exceptions import UserError, AccessError, ValidationError, RedirectWarning
from odoo.tools.misc import format_date, get_lang
from odoo.osv.expression import OR
from .project_task_recurrence import DAYS, WEEKS
class ProjectTaskType(models.Model):
_name = 'project.task.type'
_description = 'Task Stage'
_order = 'sequence, id'
def _get_default_project_ids(self):
default_project_id = self.env.context.get('default_project_id')
return [default_project_id] if default_project_id else None
active = fields.Boolean('Active', default=True)
name = fields.Char(string='Stage Name', required=True, translate=True)
description = fields.Text(translate=True)
sequence = fields.Integer(default=1)
project_ids = fields.Many2many('project.project', 'project_task_type_rel', 'type_id', 'project_id', string='Projects',
default=_get_default_project_ids)
legend_blocked = fields.Char(
'Red Kanban Label', default=lambda s: _('Blocked'), translate=True, required=True,
help='Override the default value displayed for the blocked state for kanban selection, when the task or issue is in that stage.')
legend_done = fields.Char(
'Green Kanban Label', default=lambda s: _('Ready'), translate=True, required=True,
help='Override the default value displayed for the done state for kanban selection, when the task or issue is in that stage.')
legend_normal = fields.Char(
'Grey Kanban Label', default=lambda s: _('In Progress'), translate=True, required=True,
help='Override the default value displayed for the normal state for kanban selection, when the task or issue is in that stage.')
mail_template_id = fields.Many2one(
'mail.template',
string='Email Template',
domain=[('model', '=', 'project.task')],
help="If set an email will be sent to the customer when the task or issue reaches this step.")
fold = fields.Boolean(string='Folded in Kanban',
help='This stage is folded in the kanban view when there are no records in that stage to display.')
rating_template_id = fields.Many2one(
'mail.template',
string='Rating Email Template',
domain=[('model', '=', 'project.task')],
help="If set and if the project's rating configuration is 'Rating when changing stage', then an email will be sent to the customer when the task reaches this step.")
auto_validation_kanban_state = fields.Boolean('Automatic kanban status', default=False,
help="Automatically modify the kanban state when the customer replies to the feedback for this stage.\n"
" * A good feedback from the customer will update the kanban state to 'ready for the new stage' (green bullet).\n"
" * A medium or a bad feedback will set the kanban state to 'blocked' (red bullet).\n")
is_closed = fields.Boolean('Closing Stage', help="Tasks in this stage are considered as closed.")
disabled_rating_warning = fields.Text(compute='_compute_disabled_rating_warning')
def unlink_wizard(self, stage_view=False):
self = self.with_context(active_test=False)
# retrieves all the projects with a least 1 task in that stage
# a task can be in a stage even if the project is not assigned to the stage
readgroup = self.with_context(active_test=False).env['project.task'].read_group([('stage_id', 'in', self.ids)], ['project_id'], ['project_id'])
project_ids = list(set([project['project_id'][0] for project in readgroup] + self.project_ids.ids))
wizard = self.with_context(project_ids=project_ids).env['project.task.type.delete.wizard'].create({
'project_ids': project_ids,
'stage_ids': self.ids
})
context = dict(self.env.context)
context['stage_view'] = stage_view
return {
'name': _('Delete Stage'),
'view_mode': 'form',
'res_model': 'project.task.type.delete.wizard',
'views': [(self.env.ref('project.view_project_task_type_delete_wizard').id, 'form')],
'type': 'ir.actions.act_window',
'res_id': wizard.id,
'target': 'new',
'context': context,
}
def write(self, vals):
if 'active' in vals and not vals['active']:
self.env['project.task'].search([('stage_id', 'in', self.ids)]).write({'active': False})
return super(ProjectTaskType, self).write(vals)
@api.depends('project_ids', 'project_ids.rating_active')
def _compute_disabled_rating_warning(self):
for stage in self:
disabled_projects = stage.project_ids.filtered(lambda p: not p.rating_active)
if disabled_projects:
stage.disabled_rating_warning = '\n'.join('- %s' % p.name for p in disabled_projects)
else:
stage.disabled_rating_warning = False
class Project(models.Model):
_name = "project.project"
_description = "Project"
_inherit = ['portal.mixin', 'mail.alias.mixin', 'mail.thread', 'rating.parent.mixin']
_order = "sequence, name, id"
_rating_satisfaction_days = False # takes all existing ratings
_check_company_auto = True
def _compute_attached_docs_count(self):
Attachment = self.env['ir.attachment']
for project in self:
project.doc_count = Attachment.search_count([
'|',
'&',
('res_model', '=', 'project.project'), ('res_id', '=', project.id),
'&',
('res_model', '=', 'project.task'), ('res_id', 'in', project.task_ids.ids)
])
def _compute_task_count(self):
task_data = self.env['project.task'].read_group([('project_id', 'in', self.ids), '|', '&', ('stage_id.is_closed', '=', False), ('stage_id.fold', '=', False), ('stage_id', '=', False)], ['project_id'], ['project_id'])
result = dict((data['project_id'][0], data['project_id_count']) for data in task_data)
for project in self:
project.task_count = result.get(project.id, 0)
def attachment_tree_view(self):
action = self.env['ir.actions.act_window']._for_xml_id('base.action_attachment')
action['domain'] = str([
'|',
'&',
('res_model', '=', 'project.project'),
('res_id', 'in', self.ids),
'&',
('res_model', '=', 'project.task'),
('res_id', 'in', self.task_ids.ids)
])
action['context'] = "{'default_res_model': '%s','default_res_id': %d}" % (self._name, self.id)
return action
def _compute_is_favorite(self):
for project in self:
project.is_favorite = self.env.user in project.favorite_user_ids
def _inverse_is_favorite(self):
favorite_projects = not_fav_projects = self.env['project.project'].sudo()
for project in self:
if self.env.user in project.favorite_user_ids:
favorite_projects |= project
else:
not_fav_projects |= project
# Project User has no write access for project.
not_fav_projects.write({'favorite_user_ids': [(4, self.env.uid)]})
favorite_projects.write({'favorite_user_ids': [(3, self.env.uid)]})
def _get_default_favorite_user_ids(self):
return [(6, 0, [self.env.uid])]
name = fields.Char("Name", index=True, required=True, tracking=True)
description = fields.Html()
active = fields.Boolean(default=True,
help="If the active field is set to False, it will allow you to hide the project without removing it.")
sequence = fields.Integer(default=10, help="Gives the sequence order when displaying a list of Projects.")
partner_id = fields.Many2one('res.partner', string='Customer', auto_join=True, tracking=True, domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]")
partner_email = fields.Char(
compute='_compute_partner_email', inverse='_inverse_partner_email',
string='Email', readonly=False, store=True, copy=False)
partner_phone = fields.Char(
compute='_compute_partner_phone', inverse='_inverse_partner_phone',
string="Phone", readonly=False, store=True, copy=False)
company_id = fields.Many2one('res.company', string='Company', required=True, default=lambda self: self.env.company)
currency_id = fields.Many2one('res.currency', related="company_id.currency_id", string="Currency", readonly=True)
analytic_account_id = fields.Many2one('account.analytic.account', string="Analytic Account", copy=False, ondelete='set null',
domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]", check_company=True,
help="Analytic account to which this project is linked for financial management. "
"Use an analytic account to record cost and revenue on your project.")
favorite_user_ids = fields.Many2many(
'res.users', 'project_favorite_user_rel', 'project_id', 'user_id',
default=_get_default_favorite_user_ids,
string='Members')
is_favorite = fields.Boolean(compute='_compute_is_favorite', inverse='_inverse_is_favorite', string='Show Project on dashboard',
help="Whether this project should be displayed on your dashboard.")
label_tasks = fields.Char(string='Use Tasks as', default='Tasks', help="Label used for the tasks of the project.", translate=True)
tasks = fields.One2many('project.task', 'project_id', string="Task Activities")
resource_calendar_id = fields.Many2one(
'resource.calendar', string='Working Time',
related='company_id.resource_calendar_id')
type_ids = fields.Many2many('project.task.type', 'project_task_type_rel', 'project_id', 'type_id', string='Tasks Stages')
task_count = fields.Integer(compute='_compute_task_count', string="Task Count")
task_ids = fields.One2many('project.task', 'project_id', string='Tasks',
domain=['|', ('stage_id.fold', '=', False), ('stage_id', '=', False)])
color = fields.Integer(string='Color Index')
user_id = fields.Many2one('res.users', string='Project Manager', default=lambda self: self.env.user, tracking=True)
alias_enabled = fields.Boolean(string='Use email alias', compute='_compute_alias_enabled', readonly=False)
alias_id = fields.Many2one('mail.alias', string='Alias', ondelete="restrict", required=True,
help="Internal email associated with this project. Incoming emails are automatically synchronized "
"with Tasks (or optionally Issues if the Issue Tracker module is installed).")
privacy_visibility = fields.Selection([
('followers', 'Invited internal users'),
('employees', 'All internal users'),
('portal', 'Invited portal users and all internal users'),
],
string='Visibility', required=True,
default='portal',
help="Defines the visibility of the tasks of the project:\n"
"- Invited internal users: employees may only see the followed project and tasks.\n"
"- All internal users: employees may see all project and tasks.\n"
"- Invited portal and all internal users: employees may see everything."
" Portal users may see project and tasks followed by\n"
" them or by someone of their company.")
allowed_user_ids = fields.Many2many('res.users', compute='_compute_allowed_users', inverse='_inverse_allowed_user')
allowed_internal_user_ids = fields.Many2many('res.users', 'project_allowed_internal_users_rel',
string="Allowed Internal Users", default=lambda self: self.env.user, domain=[('share', '=', False)])
allowed_portal_user_ids = fields.Many2many('res.users', 'project_allowed_portal_users_rel', string="Allowed Portal Users", domain=[('share', '=', True)])
doc_count = fields.Integer(compute='_compute_attached_docs_count', string="Number of documents attached")
date_start = fields.Date(string='Start Date')
date = fields.Date(string='Expiration Date', index=True, tracking=True)
subtask_project_id = fields.Many2one('project.project', string='Sub-task Project', ondelete="restrict",
help="Project in which sub-tasks of the current project will be created. It can be the current project itself.")
allow_subtasks = fields.Boolean('Sub-tasks', default=lambda self: self.env.user.has_group('project.group_subtask_project'))
allow_recurring_tasks = fields.Boolean('Recurring Tasks', default=lambda self: self.env.user.has_group('project.group_project_recurring_tasks'))
# rating fields
rating_request_deadline = fields.Datetime(compute='_compute_rating_request_deadline', store=True)
rating_active = fields.Boolean('Customer Ratings', default=lambda self: self.env.user.has_group('project.group_project_rating'))
rating_status = fields.Selection(
[('stage', 'Rating when changing stage'),
('periodic', 'Periodical Rating')
], 'Customer Ratings Status', default="stage", required=True,
help="How to get customer feedback?\n"
"- Rating when changing stage: an email will be sent when a task is pulled in another stage.\n"
"- Periodical Rating: email will be sent periodically.\n\n"
"Don't forget to set up the mail templates on the stages for which you want to get the customer's feedbacks.")
rating_status_period = fields.Selection([
('daily', 'Daily'),
('weekly', 'Weekly'),
('bimonthly', 'Twice a Month'),
('monthly', 'Once a Month'),
('quarterly', 'Quarterly'),
('yearly', 'Yearly')], 'Rating Frequency', required=True, default='monthly')
_sql_constraints = [
('project_date_greater', 'check(date >= date_start)', 'Error! project start-date must be lower than project end-date.')
]
@api.depends('partner_id.email')
def _compute_partner_email(self):
for project in self:
if project.partner_id and project.partner_id.email != project.partner_email:
project.partner_email = project.partner_id.email
def _inverse_partner_email(self):
for project in self:
if project.partner_id and project.partner_email != project.partner_id.email:
project.partner_id.email = project.partner_email
@api.depends('partner_id.phone')
def _compute_partner_phone(self):
for project in self:
if project.partner_id and project.partner_phone != project.partner_id.phone:
project.partner_phone = project.partner_id.phone
def _inverse_partner_phone(self):
for project in self:
if project.partner_id and project.partner_phone != project.partner_id.phone:
project.partner_id.phone = project.partner_phone
@api.onchange('alias_enabled')
def _onchange_alias_name(self):
if not self.alias_enabled:
self.alias_name = False
def _compute_alias_enabled(self):
for project in self:
project.alias_enabled = project.alias_domain and project.alias_id.alias_name
@api.depends('allowed_internal_user_ids', 'allowed_portal_user_ids')
def _compute_allowed_users(self):
for project in self:
users = project.allowed_internal_user_ids | project.allowed_portal_user_ids
project.allowed_user_ids = users
def _inverse_allowed_user(self):
for project in self:
allowed_users = project.allowed_user_ids
project.allowed_portal_user_ids = allowed_users.filtered('share')
project.allowed_internal_user_ids = allowed_users - project.allowed_portal_user_ids
def _compute_access_url(self):
super(Project, self)._compute_access_url()
for project in self:
project.access_url = '/my/project/%s' % project.id
def _compute_access_warning(self):
super(Project, self)._compute_access_warning()
for project in self.filtered(lambda x: x.privacy_visibility != 'portal'):
project.access_warning = _(
"The project cannot be shared with the recipient(s) because the privacy of the project is too restricted. Set the privacy to 'Visible by following customers' in order to make it accessible by the recipient(s).")
@api.depends('rating_status', 'rating_status_period')
def _compute_rating_request_deadline(self):
periods = {'daily': 1, 'weekly': 7, 'bimonthly': 15, 'monthly': 30, 'quarterly': 90, 'yearly': 365}
for project in self:
project.rating_request_deadline = fields.datetime.now() + timedelta(days=periods.get(project.rating_status_period, 0))
@api.model
def _map_tasks_default_valeus(self, task, project):
""" get the default value for the copied task on project duplication """
return {
'stage_id': task.stage_id.id,
'name': task.name,
'company_id': project.company_id.id,
}
def map_tasks(self, new_project_id):
""" copy and map tasks from old to new project """
project = self.browse(new_project_id)
tasks = self.env['project.task']
# We want to copy archived task, but do not propagate an active_test context key
task_ids = self.env['project.task'].with_context(active_test=False).search([('project_id', '=', self.id)], order='parent_id').ids
old_to_new_tasks = {}
for task in self.env['project.task'].browse(task_ids):
# preserve task name and stage, normally altered during copy
defaults = self._map_tasks_default_valeus(task, project)
if task.parent_id:
# set the parent to the duplicated task
defaults['parent_id'] = old_to_new_tasks.get(task.parent_id.id, False)
new_task = task.copy(defaults)
old_to_new_tasks[task.id] = new_task.id
tasks += new_task
return project.write({'tasks': [(6, 0, tasks.ids)]})
@api.returns('self', lambda value: value.id)
def copy(self, default=None):
if default is None:
default = {}
if not default.get('name'):
default['name'] = _("%s (copy)") % (self.name)
project = super(Project, self).copy(default)
if self.subtask_project_id == self:
project.subtask_project_id = project
for follower in self.message_follower_ids:
project.message_subscribe(partner_ids=follower.partner_id.ids, subtype_ids=follower.subtype_ids.ids)
if 'tasks' not in default:
self.map_tasks(project.id)
return project
@api.model
def create(self, vals):
# Prevent double project creation
self = self.with_context(mail_create_nosubscribe=True)
project = super(Project, self).create(vals)
if not vals.get('subtask_project_id'):
project.subtask_project_id = project.id
if project.privacy_visibility == 'portal' and project.partner_id.user_ids:
project.allowed_user_ids |= project.partner_id.user_ids
return project
def write(self, vals):
allowed_users_changed = 'allowed_portal_user_ids' in vals or 'allowed_internal_user_ids' in vals
if allowed_users_changed:
allowed_users = {project: project.allowed_user_ids for project in self}
# directly compute is_favorite to dodge allow write access right
if 'is_favorite' in vals:
vals.pop('is_favorite')
self._fields['is_favorite'].determine_inverse(self)
res = super(Project, self).write(vals) if vals else True
if allowed_users_changed:
for project in self:
permission_removed = allowed_users.get(project) - project.allowed_user_ids
allowed_portal_users_removed = permission_removed.filtered('share')
project.message_unsubscribe(allowed_portal_users_removed.partner_id.commercial_partner_id.ids)
for task in project.task_ids:
task.allowed_user_ids -= permission_removed
if 'allow_recurring_tasks' in vals and not vals.get('allow_recurring_tasks'):
self.env['project.task'].search([('project_id', 'in', self.ids), ('recurring_task', '=', True)]).write({'recurring_task': False})
if 'active' in vals:
# archiving/unarchiving a project does it on its tasks, too
self.with_context(active_test=False).mapped('tasks').write({'active': vals['active']})
if vals.get('partner_id') or vals.get('privacy_visibility'):
for project in self.filtered(lambda project: project.privacy_visibility == 'portal'):
project.allowed_user_ids |= project.partner_id.user_ids
return res
def action_unlink(self):
wizard = self.env['project.delete.wizard'].create({
'project_ids': self.ids
})
return {
'name': _('Confirmation'),
'view_mode': 'form',
'res_model': 'project.delete.wizard',
'views': [(self.env.ref('project.project_delete_wizard_form').id, 'form')],
'type': 'ir.actions.act_window',
'res_id': wizard.id,
'target': 'new',
'context': self.env.context,
}
def unlink(self):
# Check project is empty
for project in self.with_context(active_test=False):
if project.tasks:
raise UserError(_('You cannot delete a project containing tasks. You can either archive it or first delete all of its tasks.'))
# Delete the empty related analytic account
analytic_accounts_to_delete = self.env['account.analytic.account']
for project in self:
if project.analytic_account_id and not project.analytic_account_id.line_ids:
analytic_accounts_to_delete |= project.analytic_account_id
result = super(Project, self).unlink()
analytic_accounts_to_delete.unlink()
return result
def message_subscribe(self, partner_ids=None, channel_ids=None, subtype_ids=None):
"""
Subscribe to all existing active tasks when subscribing to a project
And add the portal user subscribed to allowed portal users
"""
res = super(Project, self).message_subscribe(partner_ids=partner_ids, channel_ids=channel_ids, subtype_ids=subtype_ids)
project_subtypes = self.env['mail.message.subtype'].browse(subtype_ids) if subtype_ids else None
task_subtypes = (project_subtypes.mapped('parent_id') | project_subtypes.filtered(lambda sub: sub.internal or sub.default)).ids if project_subtypes else None
if not subtype_ids or task_subtypes:
self.mapped('tasks').message_subscribe(
partner_ids=partner_ids, channel_ids=channel_ids, subtype_ids=task_subtypes)
if partner_ids:
all_users = self.env['res.partner'].browse(partner_ids).user_ids
portal_users = all_users.filtered('share')
internal_users = all_users - portal_users
self.allowed_portal_user_ids |= portal_users
self.allowed_internal_user_ids |= internal_users
return res
def message_unsubscribe(self, partner_ids=None, channel_ids=None):
""" Unsubscribe from all tasks when unsubscribing from a project """
self.mapped('tasks').message_unsubscribe(partner_ids=partner_ids, channel_ids=channel_ids)
return super(Project, self).message_unsubscribe(partner_ids=partner_ids, channel_ids=channel_ids)
def _alias_get_creation_values(self):
values = super(Project, self)._alias_get_creation_values()
values['alias_model_id'] = self.env['ir.model']._get('project.task').id
if self.id:
values['alias_defaults'] = defaults = ast.literal_eval(self.alias_defaults or "{}")
defaults['project_id'] = self.id
return values
# ---------------------------------------------------
# Actions
# ---------------------------------------------------
def toggle_favorite(self):
favorite_projects = not_fav_projects = self.env['project.project'].sudo()
for project in self:
if self.env.user in project.favorite_user_ids:
favorite_projects |= project
else:
not_fav_projects |= project
# Project User has no write access for project.
not_fav_projects.write({'favorite_user_ids': [(4, self.env.uid)]})
favorite_projects.write({'favorite_user_ids': [(3, self.env.uid)]})
def action_view_tasks(self):
action = self.with_context(active_id=self.id, active_ids=self.ids) \
.env.ref('project.act_project_project_2_project_task_all') \
.sudo().read()[0]
action['display_name'] = self.name
return action
def action_view_account_analytic_line(self):
""" return the action to see all the analytic lines of the project's analytic account """
action = self.env["ir.actions.actions"]._for_xml_id("analytic.account_analytic_line_action")
action['context'] = {'default_account_id': self.analytic_account_id.id}
action['domain'] = [('account_id', '=', self.analytic_account_id.id)]
return action
def action_view_all_rating(self):
""" return the action to see all the rating of the project and activate default filters"""
action = self.env['ir.actions.act_window']._for_xml_id('project.rating_rating_action_view_project_rating')
action['name'] = _('Ratings of %s') % (self.name,)
action_context = ast.literal_eval(action['context']) if action['context'] else {}
action_context.update(self._context)
action_context['search_default_parent_res_name'] = self.name
action_context.pop('group_by', None)
return dict(action, context=action_context)
# ---------------------------------------------------
# Business Methods
# ---------------------------------------------------
@api.model
def _create_analytic_account_from_values(self, values):
analytic_account = self.env['account.analytic.account'].create({
'name': values.get('name', _('Unknown Analytic Account')),
'company_id': values.get('company_id') or self.env.company.id,
'partner_id': values.get('partner_id'),
'active': True,
})
return analytic_account
def _create_analytic_account(self):
for project in self:
analytic_account = self.env['account.analytic.account'].create({
'name': project.name,
'company_id': project.company_id.id,
'partner_id': project.partner_id.id,
'active': True,
})
project.write({'analytic_account_id': analytic_account.id})
# ---------------------------------------------------
# Rating business
# ---------------------------------------------------
# This method should be called once a day by the scheduler
@api.model
def _send_rating_all(self):
projects = self.search([
('rating_active', '=', True),
('rating_status', '=', 'periodic'),
('rating_request_deadline', '<=', fields.Datetime.now())
])
for project in projects:
project.task_ids._send_task_rating_mail()
project._compute_rating_request_deadline()
self.env.cr.commit()
class Task(models.Model):
_name = "project.task"
_description = "Task"
_date_name = "date_assign"
_inherit = ['portal.mixin', 'mail.thread.cc', 'mail.activity.mixin', 'rating.mixin']
_mail_post_access = 'read'
_order = "priority desc, sequence, id desc"
_check_company_auto = True
def _get_default_stage_id(self):
""" Gives default stage_id """
project_id = self.env.context.get('default_project_id')
if not project_id:
return False
return self.stage_find(project_id, [('fold', '=', False), ('is_closed', '=', False)])
@api.model
def _default_company_id(self):
if self._context.get('default_project_id'):
return self.env['project.project'].browse(self._context['default_project_id']).company_id
return self.env.company
@api.model
def _read_group_stage_ids(self, stages, domain, order):
search_domain = [('id', 'in', stages.ids)]
if 'default_project_id' in self.env.context:
search_domain = ['|', ('project_ids', '=', self.env.context['default_project_id'])] + search_domain
stage_ids = stages._search(search_domain, order=order, access_rights_uid=SUPERUSER_ID)
return stages.browse(stage_ids)
active = fields.Boolean(default=True)
name = fields.Char(string='Title', tracking=True, required=True, index=True)
description = fields.Html(string='Description')
priority = fields.Selection([
('0', 'Normal'),
('1', 'Important'),
], default='0', index=True, string="Priority")
sequence = fields.Integer(string='Sequence', index=True, default=10,
help="Gives the sequence order when displaying a list of tasks.")
stage_id = fields.Many2one('project.task.type', string='Stage', compute='_compute_stage_id',
store=True, readonly=False, ondelete='restrict', tracking=True, index=True,
default=_get_default_stage_id, group_expand='_read_group_stage_ids',
domain="[('project_ids', '=', project_id)]", copy=False)
tag_ids = fields.Many2many('project.tags', string='Tags')
kanban_state = fields.Selection([
('normal', 'In Progress'),
('done', 'Ready'),
('blocked', 'Blocked')], string='Kanban State',
copy=False, default='normal', required=True)
kanban_state_label = fields.Char(compute='_compute_kanban_state_label', string='Kanban State Label', tracking=True)
create_date = fields.Datetime("Created On", readonly=True, index=True)
write_date = fields.Datetime("Last Updated On", readonly=True, index=True)
date_end = fields.Datetime(string='Ending Date', index=True, copy=False)
date_assign = fields.Datetime(string='Assigning Date', index=True, copy=False, readonly=True)
date_deadline = fields.Date(string='Deadline', index=True, copy=False, tracking=True)
date_last_stage_update = fields.Datetime(string='Last Stage Update',
index=True,
copy=False,
readonly=True)
project_id = fields.Many2one('project.project', string='Project',
compute='_compute_project_id', store=True, readonly=False,
index=True, tracking=True, check_company=True, change_default=True)
planned_hours = fields.Float("Initially Planned Hours", help='Time planned to achieve this task (including its sub-tasks).', tracking=True)
subtask_planned_hours = fields.Float("Sub-tasks Planned Hours", compute='_compute_subtask_planned_hours', help="Sum of the time planned of all the sub-tasks linked to this task. Usually less or equal to the initially time planned of this task.")
user_id = fields.Many2one('res.users',
string='Assigned to',
default=lambda self: self.env.uid,
index=True, tracking=True)
partner_id = fields.Many2one('res.partner',
string='Customer',
compute='_compute_partner_id', store=True, readonly=False,
domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]")
partner_is_company = fields.Boolean(related='partner_id.is_company', readonly=True)
commercial_partner_id = fields.Many2one(related='partner_id.commercial_partner_id')
partner_email = fields.Char(
compute='_compute_partner_email', inverse='_inverse_partner_email',
string='Email', readonly=False, store=True, copy=False)
partner_phone = fields.Char(
compute='_compute_partner_phone', inverse='_inverse_partner_phone',
string="Phone", readonly=False, store=True, copy=False)
ribbon_message = fields.Char('Ribbon message', compute='_compute_ribbon_message')
partner_city = fields.Char(related='partner_id.city', readonly=False)
manager_id = fields.Many2one('res.users', string='Project Manager', related='project_id.user_id', readonly=True)
company_id = fields.Many2one(
'res.company', string='Company', compute='_compute_company_id', store=True, readonly=False,
required=True, copy=True, default=_default_company_id)
color = fields.Integer(string='Color Index')
user_email = fields.Char(related='user_id.email', string='User Email', readonly=True, related_sudo=False)
attachment_ids = fields.One2many('ir.attachment', compute='_compute_attachment_ids', string="Main Attachments",
help="Attachment that don't come from message.")
# In the domain of displayed_image_id, we couln't use attachment_ids because a one2many is represented as a list of commands so we used res_model & res_id
displayed_image_id = fields.Many2one('ir.attachment', domain="[('res_model', '=', 'project.task'), ('res_id', '=', id), ('mimetype', 'ilike', 'image')]", string='Cover Image')
legend_blocked = fields.Char(related='stage_id.legend_blocked', string='Kanban Blocked Explanation', readonly=True, related_sudo=False)
legend_done = fields.Char(related='stage_id.legend_done', string='Kanban Valid Explanation', readonly=True, related_sudo=False)
legend_normal = fields.Char(related='stage_id.legend_normal', string='Kanban Ongoing Explanation', readonly=True, related_sudo=False)
is_closed = fields.Boolean(related="stage_id.is_closed", string="Closing Stage", readonly=True, related_sudo=False)
parent_id = fields.Many2one('project.task', string='Parent Task', index=True)
child_ids = fields.One2many('project.task', 'parent_id', string="Sub-tasks", context={'active_test': False})
subtask_project_id = fields.Many2one('project.project', related="project_id.subtask_project_id", string='Sub-task Project', readonly=True)
allow_subtasks = fields.Boolean(string="Allow Sub-tasks", related="project_id.allow_subtasks", readonly=True)
subtask_count = fields.Integer("Sub-task count", compute='_compute_subtask_count')
email_from = fields.Char(string='Email From', help="These people will receive email.", index=True,
compute='_compute_email_from', store="True", readonly=False)
allowed_user_ids = fields.Many2many('res.users', string="Visible to", groups='project.group_project_manager', compute='_compute_allowed_user_ids', store=True, readonly=False, copy=False)
project_privacy_visibility = fields.Selection(related='project_id.privacy_visibility', string="Project Visibility")
# Computed field about working time elapsed between record creation and assignation/closing.
working_hours_open = fields.Float(compute='_compute_elapsed', string='Working hours to assign', store=True, group_operator="avg")
working_hours_close = fields.Float(compute='_compute_elapsed', string='Working hours to close', store=True, group_operator="avg")
working_days_open = fields.Float(compute='_compute_elapsed', string='Working days to assign', store=True, group_operator="avg")
working_days_close = fields.Float(compute='_compute_elapsed', string='Working days to close', store=True, group_operator="avg")
# customer portal: include comment and incoming emails in communication history
website_message_ids = fields.One2many(domain=lambda self: [('model', '=', self._name), ('message_type', 'in', ['email', 'comment'])])
# recurrence fields
allow_recurring_tasks = fields.Boolean(related='project_id.allow_recurring_tasks')
recurring_task = fields.Boolean(string="Recurrent")
recurring_count = fields.Integer(string="Tasks in Recurrence", compute='_compute_recurring_count')
recurrence_id = fields.Many2one('project.task.recurrence', copy=False)
recurrence_update = fields.Selection([
('this', 'This task'),
('subsequent', 'This and following tasks'),
('all', 'All tasks'),
], default='this', store=False)
recurrence_message = fields.Char(string='Next Recurrencies', compute='_compute_recurrence_message')
repeat_interval = fields.Integer(string='Repeat Every', default=1, compute='_compute_repeat', readonly=False)
repeat_unit = fields.Selection([
('day', 'Days'),
('week', 'Weeks'),
('month', 'Months'),
('year', 'Years'),
], default='week', compute='_compute_repeat', readonly=False)
repeat_type = fields.Selection([
('forever', 'Forever'),
('until', 'End Date'),
('after', 'Number of Repetitions'),
], default="forever", string="Until", compute='_compute_repeat', readonly=False)
repeat_until = fields.Date(string="End Date", compute='_compute_repeat', readonly=False)
repeat_number = fields.Integer(string="Repetitions", default=1, compute='_compute_repeat', readonly=False)
repeat_on_month = fields.Selection([
('date', 'Date of the Month'),
('day', 'Day of the Month'),
], default='date', compute='_compute_repeat', readonly=False)
repeat_on_year = fields.Selection([
('date', 'Date of the Year'),
('day', 'Day of the Year'),
], default='date', compute='_compute_repeat', readonly=False)
mon = fields.Boolean(string="Mon", compute='_compute_repeat', readonly=False)
tue = fields.Boolean(string="Tue", compute='_compute_repeat', readonly=False)
wed = fields.Boolean(string="Wed", compute='_compute_repeat', readonly=False)
thu = fields.Boolean(string="Thu", compute='_compute_repeat', readonly=False)
fri = fields.Boolean(string="Fri", compute='_compute_repeat', readonly=False)
sat = fields.Boolean(string="Sat", compute='_compute_repeat', readonly=False)
sun = fields.Boolean(string="Sun", compute='_compute_repeat', readonly=False)
repeat_day = fields.Selection([
(str(i), str(i)) for i in range(1, 32)
], compute='_compute_repeat', readonly=False)
repeat_week = fields.Selection([
('first', 'First'),
('second', 'Second'),
('third', 'Third'),
('last', 'Last'),
], default='first', compute='_compute_repeat', readonly=False)
repeat_weekday = fields.Selection([
('mon', 'Monday'),
('tue', 'Tuesday'),
('wed', 'Wednesday'),
('thu', 'Thursday'),
('fri', 'Friday'),
('sat', 'Saturday'),
('sun', 'Sunday'),
], string='Day Of The Week', compute='_compute_repeat', readonly=False)
repeat_month = fields.Selection([
('january', 'January'),
('february', 'February'),
('march', 'March'),
('april', 'April'),
('may', 'May'),
('june', 'June'),
('july', 'July'),
('august', 'August'),
('september', 'September'),
('october', 'October'),
('november', 'November'),
('december', 'December'),
], compute='_compute_repeat', readonly=False)
repeat_show_dow = fields.Boolean(compute='_compute_repeat_visibility')
repeat_show_day = fields.Boolean(compute='_compute_repeat_visibility')
repeat_show_week = fields.Boolean(compute='_compute_repeat_visibility')
repeat_show_month = fields.Boolean(compute='_compute_repeat_visibility')
@api.model
def _get_recurrence_fields(self):
return ['repeat_interval', 'repeat_unit', 'repeat_type', 'repeat_until', 'repeat_number',
'repeat_on_month', 'repeat_on_year', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat',
'sun', 'repeat_day', 'repeat_week', 'repeat_month', 'repeat_weekday']
@api.depends('recurring_task', 'repeat_unit', 'repeat_on_month', 'repeat_on_year')
def _compute_repeat_visibility(self):
for task in self:
task.repeat_show_day = task.recurring_task and (task.repeat_unit == 'month' and task.repeat_on_month == 'date') or (task.repeat_unit == 'year' and task.repeat_on_year == 'date')
task.repeat_show_week = task.recurring_task and (task.repeat_unit == 'month' and task.repeat_on_month == 'day') or (task.repeat_unit == 'year' and task.repeat_on_year == 'day')
task.repeat_show_dow = task.recurring_task and task.repeat_unit == 'week'
task.repeat_show_month = task.recurring_task and task.repeat_unit == 'year'
@api.depends('recurring_task')
def _compute_repeat(self):
rec_fields = self._get_recurrence_fields()
defaults = self.default_get(rec_fields)
for task in self:
for f in rec_fields:
if task.recurrence_id:
task[f] = task.recurrence_id[f]
else:
if task.recurring_task:
task[f] = defaults.get(f)
else:
task[f] = False
def _get_weekdays(self, n=1):
self.ensure_one()
if self.repeat_unit == 'week':
return [fn(n) for day, fn in DAYS.items() if self[day]]
return [DAYS.get(self.repeat_weekday)(n)]
@api.depends(
'recurring_task', 'repeat_interval', 'repeat_unit', 'repeat_type', 'repeat_until',
'repeat_number', 'repeat_on_month', 'repeat_on_year', 'mon', 'tue', 'wed', 'thu', 'fri',
'sat', 'sun', 'repeat_day', 'repeat_week', 'repeat_month', 'repeat_weekday')
def _compute_recurrence_message(self):
self.recurrence_message = False
for task in self.filtered(lambda t: t.recurring_task and t._is_recurrence_valid()):
date = fields.Date.today()
number_occurrences = min(5, task.repeat_number if task.repeat_type == 'after' else 5)
delta = task.repeat_interval if task.repeat_unit == 'day' else 1
recurring_dates = self.env['project.task.recurrence']._get_next_recurring_dates(
date + timedelta(days=delta),
task.repeat_interval,
task.repeat_unit,
task.repeat_type,
task.repeat_until,
task.repeat_on_month,
task.repeat_on_year,
task._get_weekdays(WEEKS.get(task.repeat_week)),
task.repeat_day,
task.repeat_week,
task.repeat_month,
count=number_occurrences)
date_format = self.env['res.lang']._lang_get(self.env.user.lang).date_format
task.recurrence_message = '<ul>'
for date in recurring_dates[:5]:
task.recurrence_message += '<li>%s</li>' % date.strftime(date_format)
if task.repeat_type == 'after' and task.repeat_number > 5 or task.repeat_type == 'forever' or len(recurring_dates) > 5:
task.recurrence_message += '<li>...</li>'
task.recurrence_message += '</ul>'
if task.repeat_type == 'until':
task.recurrence_message += _('<p><em>Number of tasks: %(tasks_count)s</em></p>') % {'tasks_count': len(recurring_dates)}
def _is_recurrence_valid(self):
self.ensure_one()
return self.repeat_interval > 0 and\
(not self.repeat_show_dow or self._get_weekdays()) and\
(self.repeat_type != 'after' or self.repeat_number) and\
(self.repeat_type != 'until' or self.repeat_until and self.repeat_until > fields.Date.today())
@api.depends('recurrence_id')
def _compute_recurring_count(self):
self.recurring_count = 0
recurring_tasks = self.filtered(lambda l: l.recurrence_id)
count = self.env['project.task'].read_group([('recurrence_id', 'in', recurring_tasks.recurrence_id.ids)], ['id'], 'recurrence_id')
tasks_count = {c.get('recurrence_id')[0]: c.get('recurrence_id_count') for c in count}
for task in recurring_tasks:
task.recurring_count = tasks_count.get(task.recurrence_id.id, 0)
@api.depends('partner_id.email')
def _compute_partner_email(self):
for task in self:
if task.partner_id and task.partner_id.email != task.partner_email:
task.partner_email = task.partner_id.email
def _inverse_partner_email(self):
for task in self:
if task.partner_id and task.partner_email != task.partner_id.email:
task.partner_id.email = task.partner_email
@api.depends('partner_id.phone')
def _compute_partner_phone(self):
for task in self:
if task.partner_id and task.partner_phone != task.partner_id.phone:
task.partner_phone = task.partner_id.phone
def _inverse_partner_phone(self):
for task in self:
if task.partner_id and task.partner_phone != task.partner_id.phone:
task.partner_id.phone = task.partner_phone
@api.depends('partner_email', 'partner_phone', 'partner_id')
def _compute_ribbon_message(self):
for task in self:
will_write_email = task.partner_id and task.partner_email != task.partner_id.email
will_write_phone = task.partner_id and task.partner_phone != task.partner_id.phone
if will_write_email and will_write_phone:
task.ribbon_message = _('By saving this change, the customer email and phone number will also be updated.')
elif will_write_email:
task.ribbon_message = _('By saving this change, the customer email will also be updated.')
elif will_write_phone:
task.ribbon_message = _('By saving this change, the customer phone number will also be updated.')
else:
task.ribbon_message = False
@api.constrains('parent_id')
def _check_parent_id(self):
if not self._check_recursion():
raise ValidationError(_('Error! You cannot create recursive hierarchy of tasks.'))
@api.constrains('allowed_user_ids')
def _check_no_portal_allowed(self):
for task in self.filtered(lambda t: t.project_id.privacy_visibility != 'portal'):
portal_users = task.allowed_user_ids.filtered('share')
if portal_users:
user_names = ', '.join(portal_users[:10].mapped('name'))
raise ValidationError(_("The project visibility setting doesn't allow portal users to see the project's tasks. (%s)", user_names))
def _compute_attachment_ids(self):
for task in self:
attachment_ids = self.env['ir.attachment'].search([('res_id', '=', task.id), ('res_model', '=', 'project.task')]).ids
message_attachment_ids = task.mapped('message_ids.attachment_ids').ids # from mail_thread
task.attachment_ids = [(6, 0, list(set(attachment_ids) - set(message_attachment_ids)))]
@api.depends('project_id.allowed_user_ids', 'project_id.privacy_visibility')
def _compute_allowed_user_ids(self):
for task in self:
portal_users = task.allowed_user_ids.filtered('share')
internal_users = task.allowed_user_ids - portal_users
if task.project_id.privacy_visibility == 'followers':
task.allowed_user_ids |= task.project_id.allowed_internal_user_ids
task.allowed_user_ids -= portal_users
elif task.project_id.privacy_visibility == 'portal':
task.allowed_user_ids |= task.project_id.allowed_portal_user_ids
if task.project_id.privacy_visibility != 'portal':
task.allowed_user_ids -= portal_users
elif task.project_id.privacy_visibility != 'followers':
task.allowed_user_ids -= internal_users
@api.depends('create_date', 'date_end', 'date_assign')
def _compute_elapsed(self):
task_linked_to_calendar = self.filtered(
lambda task: task.project_id.resource_calendar_id and task.create_date
)
for task in task_linked_to_calendar:
dt_create_date = fields.Datetime.from_string(task.create_date)
if task.date_assign:
dt_date_assign = fields.Datetime.from_string(task.date_assign)
duration_data = task.project_id.resource_calendar_id.get_work_duration_data(dt_create_date, dt_date_assign, compute_leaves=True)
task.working_hours_open = duration_data['hours']
task.working_days_open = duration_data['days']
else:
task.working_hours_open = 0.0
task.working_days_open = 0.0
if task.date_end:
dt_date_end = fields.Datetime.from_string(task.date_end)
duration_data = task.project_id.resource_calendar_id.get_work_duration_data(dt_create_date, dt_date_end, compute_leaves=True)
task.working_hours_close = duration_data['hours']
task.working_days_close = duration_data['days']
else:
task.working_hours_close = 0.0
task.working_days_close = 0.0
(self - task_linked_to_calendar).update(dict.fromkeys(
['working_hours_open', 'working_hours_close', 'working_days_open', 'working_days_close'], 0.0))
@api.depends('stage_id', 'kanban_state')
def _compute_kanban_state_label(self):
for task in self:
if task.kanban_state == 'normal':
task.kanban_state_label = task.legend_normal
elif task.kanban_state == 'blocked':
task.kanban_state_label = task.legend_blocked
else:
task.kanban_state_label = task.legend_done
def _compute_access_url(self):
super(Task, self)._compute_access_url()
for task in self:
task.access_url = '/my/task/%s' % task.id
def _compute_access_warning(self):
super(Task, self)._compute_access_warning()
for task in self.filtered(lambda x: x.project_id.privacy_visibility != 'portal'):
task.access_warning = _(
"The task cannot be shared with the recipient(s) because the privacy of the project is too restricted. Set the privacy of the project to 'Visible by following customers' in order to make it accessible by the recipient(s).")
@api.depends('child_ids.planned_hours')
def _compute_subtask_planned_hours(self):
for task in self:
task.subtask_planned_hours = sum(child_task.planned_hours + child_task.subtask_planned_hours for child_task in task.child_ids)
@api.depends('child_ids')
def _compute_subtask_count(self):
for task in self:
task.subtask_count = len(task._get_all_subtasks())
@api.onchange('company_id')
def _onchange_task_company(self):
if self.project_id.company_id != self.company_id:
self.project_id = False
@api.depends('project_id.company_id')
def _compute_company_id(self):
for task in self.filtered(lambda task: task.project_id):
task.company_id = task.project_id.company_id
@api.depends('project_id')
def _compute_stage_id(self):
for task in self:
if task.project_id:
if task.project_id not in task.stage_id.project_ids:
task.stage_id = task.stage_find(task.project_id.id, [
('fold', '=', False), ('is_closed', '=', False)])
else:
task.stage_id = False
@api.returns('self', lambda value: value.id)
def copy(self, default=None):
if default is None:
default = {}
if not default.get('name'):
default['name'] = _("%s (copy)", self.name)
if self.recurrence_id:
default['recurrence_id'] = self.recurrence_id.copy().id
return super(Task, self).copy(default)
@api.constrains('parent_id')
def _check_parent_id(self):
for task in self:
if not task._check_recursion():
raise ValidationError(_('Error! You cannot create recursive hierarchy of task(s).'))
@api.model
def get_empty_list_help(self, help):
tname = _("task")
project_id = self.env.context.get('default_project_id', False)
if project_id:
name = self.env['project.project'].browse(project_id).label_tasks
if name: tname = name.lower()
self = self.with_context(
empty_list_help_id=self.env.context.get('default_project_id'),
empty_list_help_model='project.project',
empty_list_help_document_name=tname,
)
return super(Task, self).get_empty_list_help(help)
def message_subscribe(self, partner_ids=None, channel_ids=None, subtype_ids=None):
"""
Add the users subscribed to allowed portal users
"""
res = super(Task, self).message_subscribe(partner_ids=partner_ids, channel_ids=channel_ids, subtype_ids=subtype_ids)
if partner_ids:
new_allowed_users = self.env['res.partner'].browse(partner_ids).user_ids.filtered('share')
tasks = self.filtered(lambda task: task.project_id.privacy_visibility == 'portal')
tasks.sudo().write({'allowed_user_ids': [(4, user.id) for user in new_allowed_users]})
return res
# ----------------------------------------
# Case management
# ----------------------------------------
def stage_find(self, section_id, domain=[], order='sequence'):
""" Override of the base.stage method
Parameter of the stage search taken from the lead:
- section_id: if set, stages must belong to this section or
be a default stage; if not set, stages must be default
stages
"""
# collect all section_ids
section_ids = []
if section_id:
section_ids.append(section_id)
section_ids.extend(self.mapped('project_id').ids)
search_domain = []
if section_ids:
search_domain = [('|')] * (len(section_ids) - 1)
for section_id in section_ids:
search_domain.append(('project_ids', '=', section_id))
search_domain += list(domain)
# perform search, return the first found
return self.env['project.task.type'].search(search_domain, order=order, limit=1).id
# ------------------------------------------------
# CRUD overrides
# ------------------------------------------------
@api.model
def default_get(self, default_fields):
vals = super(Task, self).default_get(default_fields)
days = list(DAYS.keys())
week_start = fields.Datetime.today().weekday()
if all(d in default_fields for d in days):
vals[days[week_start]] = True
if 'repeat_day' in default_fields:
vals['repeat_day'] = str(fields.Datetime.today().day)
if 'repeat_month' in default_fields:
vals['repeat_month'] = self._fields.get('repeat_month').selection[fields.Datetime.today().month - 1][0]
if 'repeat_until' in default_fields:
vals['repeat_until'] = fields.Date.today() + timedelta(days=7)
if 'repeat_weekday' in default_fields:
vals['repeat_weekday'] = self._fields.get('repeat_weekday').selection[week_start][0]
return vals
@api.model_create_multi
def create(self, vals_list):
default_stage = dict()
for vals in vals_list:
project_id = vals.get('project_id') or self.env.context.get('default_project_id')
if project_id and not "company_id" in vals:
vals["company_id"] = self.env["project.project"].browse(
project_id
).company_id.id or self.env.company.id
if project_id and "stage_id" not in vals:
# 1) Allows keeping the batch creation of tasks
# 2) Ensure the defaults are correct (and computed once by project),
# by using default get (instead of _get_default_stage_id or _stage_find),
if project_id not in default_stage:
default_stage[project_id] = self.with_context(
default_project_id=project_id
).default_get(['stage_id']).get('stage_id')
vals["stage_id"] = default_stage[project_id]
# user_id change: update date_assign
if vals.get('user_id'):
vals['date_assign'] = fields.Datetime.now()
# Stage change: Update date_end if folded stage and date_last_stage_update
if vals.get('stage_id'):
vals.update(self.update_date_end(vals['stage_id']))
vals['date_last_stage_update'] = fields.Datetime.now()
# recurrence
rec_fields = vals.keys() & self._get_recurrence_fields()
if rec_fields and vals.get('recurring_task') is True:
rec_values = {rec_field: vals[rec_field] for rec_field in rec_fields}
rec_values['next_recurrence_date'] = fields.Datetime.today()
recurrence = self.env['project.task.recurrence'].create(rec_values)
vals['recurrence_id'] = recurrence.id
tasks = super().create(vals_list)
for task in tasks:
if task.project_id.privacy_visibility == 'portal':
task._portal_ensure_token()
return tasks
def write(self, vals):
now = fields.Datetime.now()
if 'parent_id' in vals and vals['parent_id'] in self.ids:
raise UserError(_("Sorry. You can't set a task as its parent task."))
if 'active' in vals and not vals.get('active') and any(self.mapped('recurrence_id')):
# TODO: show a dialog to stop the recurrence
raise UserError(_('You cannot archive recurring tasks. Please, disable the recurrence first.'))
# stage change: update date_last_stage_update
if 'stage_id' in vals:
vals.update(self.update_date_end(vals['stage_id']))
vals['date_last_stage_update'] = now
# reset kanban state when changing stage
if 'kanban_state' not in vals:
vals['kanban_state'] = 'normal'
# user_id change: update date_assign
if vals.get('user_id') and 'date_assign' not in vals:
vals['date_assign'] = now
# recurrence fields
rec_fields = vals.keys() & self._get_recurrence_fields()
if rec_fields:
rec_values = {rec_field: vals[rec_field] for rec_field in rec_fields}
for task in self:
if task.recurrence_id:
task.recurrence_id.write(rec_values)
elif vals.get('recurring_task'):
rec_values['next_recurrence_date'] = fields.Datetime.today()
recurrence = self.env['project.task.recurrence'].create(rec_values)
task.recurrence_id = recurrence.id
if 'recurring_task' in vals and not vals.get('recurring_task'):
self.recurrence_id.unlink()
tasks = self
recurrence_update = vals.pop('recurrence_update', 'this')
if recurrence_update != 'this':
recurrence_domain = []
if recurrence_update == 'subsequent':
for task in self:
recurrence_domain = OR([recurrence_domain, ['&', ('recurrence_id', '=', task.recurrence_id.id), ('create_date', '>=', task.create_date)]])
else:
recurrence_domain = [('recurrence_id', 'in', self.recurrence_id.ids)]
tasks |= self.env['project.task'].search(recurrence_domain)
result = super(Task, tasks).write(vals)
# rating on stage
if 'stage_id' in vals and vals.get('stage_id'):
self.filtered(lambda x: x.project_id.rating_active and x.project_id.rating_status == 'stage')._send_task_rating_mail(force_send=True)
return result
def update_date_end(self, stage_id):
project_task_type = self.env['project.task.type'].browse(stage_id)
if project_task_type.fold or project_task_type.is_closed:
return {'date_end': fields.Datetime.now()}
return {'date_end': False}
def unlink(self):
if any(self.mapped('recurrence_id')):
# TODO: show a dialog to stop the recurrence
raise UserError(_('You cannot delete recurring tasks. Please, disable the recurrence first.'))
return super().unlink()
# ---------------------------------------------------
# Subtasks
# ---------------------------------------------------
@api.depends('parent_id.partner_id', 'project_id.partner_id')
def _compute_partner_id(self):
"""
If a task has no partner_id, use the project partner_id if any, or else the parent task partner_id.
Once the task partner_id has been set:
1) if the project partner_id changes, the task partner_id is automatically changed also.
2) if the parent task partner_id changes, the task partner_id remains the same.
"""
for task in self:
if task.partner_id:
if task.project_id.partner_id:
task.partner_id = task.project_id.partner_id
else:
task.partner_id = task.project_id.partner_id or task.parent_id.partner_id
@api.depends('partner_id.email', 'parent_id.email_from')
def _compute_email_from(self):
for task in self:
task.email_from = task.partner_id.email or ((task.partner_id or task.parent_id) and task.email_from) or task.parent_id.email_from
@api.depends('parent_id.project_id.subtask_project_id')
def _compute_project_id(self):
for task in self:
if not task.project_id:
task.project_id = task.parent_id.project_id.subtask_project_id
# ---------------------------------------------------
# Mail gateway
# ---------------------------------------------------
def _track_template(self, changes):
res = super(Task, self)._track_template(changes)
test_task = self[0]
if 'stage_id' in changes and test_task.stage_id.mail_template_id:
res['stage_id'] = (test_task.stage_id.mail_template_id, {
'auto_delete_message': True,
'subtype_id': self.env['ir.model.data'].xmlid_to_res_id('mail.mt_note'),
'email_layout_xmlid': 'mail.mail_notification_light'
})
return res
def _creation_subtype(self):
return self.env.ref('project.mt_task_new')
def _track_subtype(self, init_values):
self.ensure_one()
if 'kanban_state_label' in init_values and self.kanban_state == 'blocked':
return self.env.ref('project.mt_task_blocked')
elif 'kanban_state_label' in init_values and self.kanban_state == 'done':
return self.env.ref('project.mt_task_ready')
elif 'stage_id' in init_values:
return self.env.ref('project.mt_task_stage')
return super(Task, self)._track_subtype(init_values)
def _notify_get_groups(self, msg_vals=None):
""" Handle project users and managers recipients that can assign
tasks and create new one directly from notification emails. Also give
access button to portal users and portal customers. If they are notified
they should probably have access to the document. """
groups = super(Task, self)._notify_get_groups(msg_vals=msg_vals)
local_msg_vals = dict(msg_vals or {})
self.ensure_one()
project_user_group_id = self.env.ref('project.group_project_user').id
group_func = lambda pdata: pdata['type'] == 'user' and project_user_group_id in pdata['groups']
if self.project_id.privacy_visibility == 'followers':
allowed_user_ids = self.project_id.allowed_internal_user_ids.partner_id.ids
group_func = lambda pdata: pdata['type'] == 'user' and project_user_group_id in pdata['groups'] and pdata['id'] in allowed_user_ids
new_group = ('group_project_user', group_func, {})
if not self.user_id and not self.stage_id.fold:
take_action = self._notify_get_action_link('assign', **local_msg_vals)
project_actions = [{'url': take_action, 'title': _('I take it')}]
new_group[2]['actions'] = project_actions
groups = [new_group] + groups
if self.project_id.privacy_visibility == 'portal':
allowed_user_ids = self.project_id.allowed_portal_user_ids.partner_id.ids
groups.insert(0, (
'allowed_portal_users',
lambda pdata: pdata['type'] == 'portal' and pdata['id'] in allowed_user_ids,
{}
))
portal_privacy = self.project_id.privacy_visibility == 'portal'
for group_name, group_method, group_data in groups:
if group_name in ('customer', 'user') or group_name == 'portal_customer' and not portal_privacy:
group_data['has_button_access'] = False
elif group_name == 'portal_customer' and portal_privacy:
group_data['has_button_access'] = True
return groups
def _notify_get_reply_to(self, default=None, records=None, company=None, doc_names=None):
""" Override to set alias of tasks to their project if any. """
aliases = self.sudo().mapped('project_id')._notify_get_reply_to(default=default, records=None, company=company, doc_names=None)
res = {task.id: aliases.get(task.project_id.id) for task in self}
leftover = self.filtered(lambda rec: not rec.project_id)
if leftover:
res.update(super(Task, leftover)._notify_get_reply_to(default=default, records=None, company=company, doc_names=doc_names))
return res
def email_split(self, msg):
email_list = tools.email_split((msg.get('to') or '') + ',' + (msg.get('cc') or ''))
# check left-part is not already an alias
aliases = self.mapped('project_id.alias_name')
return [x for x in email_list if x.split('@')[0] not in aliases]
@api.model
def message_new(self, msg, custom_values=None):
""" Overrides mail_thread message_new that is called by the mailgateway
through message_process.
This override updates the document according to the email.
"""
# remove default author when going through the mail gateway. Indeed we
# do not want to explicitly set user_id to False; however we do not
# want the gateway user to be responsible if no other responsible is
# found.
create_context = dict(self.env.context or {})
create_context['default_user_id'] = False
if custom_values is None:
custom_values = {}
defaults = {
'name': msg.get('subject') or _("No Subject"),
'email_from': msg.get('from'),
'planned_hours': 0.0,
'partner_id': msg.get('author_id')
}
defaults.update(custom_values)
task = super(Task, self.with_context(create_context)).message_new(msg, custom_values=defaults)
email_list = task.email_split(msg)
partner_ids = [p.id for p in self.env['mail.thread']._mail_find_partner_from_emails(email_list, records=task, force_create=False) if p]
task.message_subscribe(partner_ids)
return task
def message_update(self, msg, update_vals=None):
""" Override to update the task according to the email. """
email_list = self.email_split(msg)
partner_ids = [p.id for p in self.env['mail.thread']._mail_find_partner_from_emails(email_list, records=self, force_create=False) if p]
self.message_subscribe(partner_ids)
return super(Task, self).message_update(msg, update_vals=update_vals)
def _message_get_suggested_recipients(self):
recipients = super(Task, self)._message_get_suggested_recipients()
for task in self:
if task.partner_id:
reason = _('Customer Email') if task.partner_id.email else _('Customer')
task._message_add_suggested_recipient(recipients, partner=task.partner_id, reason=reason)
elif task.email_from:
task._message_add_suggested_recipient(recipients, email=task.email_from, reason=_('Customer Email'))
return recipients
def _notify_email_header_dict(self):
headers = super(Task, self)._notify_email_header_dict()
if self.project_id:
current_objects = [h for h in headers.get('X-Odoo-Objects', '').split(',') if h]
current_objects.insert(0, 'project.project-%s, ' % self.project_id.id)
headers['X-Odoo-Objects'] = ','.join(current_objects)
if self.tag_ids:
headers['X-Odoo-Tags'] = ','.join(self.tag_ids.mapped('name'))
return headers
def _message_post_after_hook(self, message, msg_vals):
if message.attachment_ids and not self.displayed_image_id:
image_attachments = message.attachment_ids.filtered(lambda a: a.mimetype == 'image')
if image_attachments:
self.displayed_image_id = image_attachments[0]
if self.email_from and not self.partner_id:
# we consider that posting a message with a specified recipient (not a follower, a specific one)
# on a document without customer means that it was created through the chatter using
# suggested recipients. This heuristic allows to avoid ugly hacks in JS.
new_partner = message.partner_ids.filtered(lambda partner: partner.email == self.email_from)
if new_partner:
self.search([
('partner_id', '=', False),
('email_from', '=', new_partner.email),
('stage_id.fold', '=', False)]).write({'partner_id': new_partner.id})
return super(Task, self)._message_post_after_hook(message, msg_vals)
def action_assign_to_me(self):
self.write({'user_id': self.env.user.id})
# If depth == 1, return only direct children
# If depth == 3, return children to third generation
# If depth <= 0, return all children without depth limit
def _get_all_subtasks(self, depth=0):
children = self.mapped('child_ids').filtered(lambda children: children.active)
if not children:
return self.env['project.task']
if depth == 1:
return children
return children + children._get_all_subtasks(depth - 1)
def action_open_parent_task(self):
return {
'name': _('Parent Task'),
'view_mode': 'form',
'res_model': 'project.task',
'res_id': self.parent_id.id,
'type': 'ir.actions.act_window',
'context': dict(self._context, create=False)
}
def action_subtask(self):
action = self.env["ir.actions.actions"]._for_xml_id("project.project_task_action_sub_task")
# display all subtasks of current task
action['domain'] = [('id', 'child_of', self.id), ('id', '!=', self.id)]
# update context, with all default values as 'quick_create' does not contains all field in its view
if self._context.get('default_project_id'):
default_project = self.env['project.project'].browse(self.env.context['default_project_id'])
else:
default_project = self.project_id.subtask_project_id or self.project_id
ctx = dict(self.env.context)
ctx = {k: v for k, v in ctx.items() if not k.startswith('search_default_')}
ctx.update({
'default_name': self.env.context.get('name', self.name) + ':',
'default_parent_id': self.id, # will give default subtask field in `default_get`
'default_company_id': default_project.company_id.id if default_project else self.env.company.id,
})
action['context'] = ctx
return action
def action_recurring_tasks(self):
return {
'name': 'Tasks in Recurrence',
'type': 'ir.actions.act_window',
'res_model': 'project.task',
'view_mode': 'tree,form',
'domain': [('recurrence_id', 'in', self.recurrence_id.ids)],
}
# ---------------------------------------------------
# Rating business
# ---------------------------------------------------
def _send_task_rating_mail(self, force_send=False):
for task in self:
rating_template = task.stage_id.rating_template_id
if rating_template:
task.rating_send_request(rating_template, lang=task.partner_id.lang, force_send=force_send)
def rating_get_partner_id(self):
res = super(Task, self).rating_get_partner_id()
if not res and self.project_id.partner_id:
return self.project_id.partner_id
return res
def rating_apply(self, rate, token=None, feedback=None, subtype_xmlid=None):
return super(Task, self).rating_apply(rate, token=token, feedback=feedback, subtype_xmlid="project.mt_task_rating")
def _rating_get_parent_field_name(self):
return 'project_id'
class ProjectTags(models.Model):
""" Tags of project's tasks """
_name = "project.tags"
_description = "Project Tags"
def _get_default_color(self):
return randint(1, 11)
name = fields.Char('Name', required=True)
color = fields.Integer(string='Color', default=_get_default_color)
_sql_constraints = [
('name_uniq', 'unique (name)', "Tag name already exists!"),
]
| [((786, 824), 'odoo.fields.Boolean', 'fields.Boolean', (['"""Active"""'], {'default': '(True)'}), "('Active', default=True)\n", (800, 824), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((836, 899), 'odoo.fields.Char', 'fields.Char', ([], {'string': '"""Stage Name"""', 'required': '(True)', 'translate': '(True)'}), "(string='Stage Name', required=True, translate=True)\n", (847, 899), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((918, 945), 'odoo.fields.Text', 'fields.Text', ([], {'translate': '(True)'}), '(translate=True)\n', (929, 945), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((961, 986), 'odoo.fields.Integer', 'fields.Integer', ([], {'default': '(1)'}), '(default=1)\n', (975, 986), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((1005, 1147), 'odoo.fields.Many2many', 'fields.Many2many', (['"""project.project"""', '"""project_task_type_rel"""', '"""type_id"""', '"""project_id"""'], {'string': '"""Projects"""', 'default': '_get_default_project_ids'}), "('project.project', 'project_task_type_rel', 'type_id',\n 'project_id', string='Projects', default=_get_default_project_ids)\n", (1021, 1147), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((1961, 2168), 'odoo.fields.Many2one', 'fields.Many2one', (['"""mail.template"""'], {'string': '"""Email Template"""', 'domain': "[('model', '=', 'project.task')]", 'help': '"""If set an email will be sent to the customer when the task or issue reaches this step."""'}), "('mail.template', string='Email Template', domain=[('model',\n '=', 'project.task')], help=\n 'If set an email will be sent to the customer when the task or issue reaches this step.'\n )\n", (1976, 2168), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((2199, 2350), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'string': '"""Folded in Kanban"""', 'help': '"""This stage is folded in the kanban view when there are no records in that stage to display."""'}), "(string='Folded in Kanban', help=\n 'This stage is folded in the kanban view when there are no records in that stage to display.'\n )\n", (2213, 2350), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((2374, 2660), 'odoo.fields.Many2one', 'fields.Many2one', (['"""mail.template"""'], {'string': '"""Rating Email Template"""', 'domain': "[('model', '=', 'project.task')]", 'help': '"""If set and if the project\'s rating configuration is \'Rating when changing stage\', then an email will be sent to the customer when the task reaches this step."""'}), '(\'mail.template\', string=\'Rating Email Template\', domain=[(\n \'model\', \'=\', \'project.task\')], help=\n "If set and if the project\'s rating configuration is \'Rating when changing stage\', then an email will be sent to the customer when the task reaches this step."\n )\n', (2389, 2660), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((2714, 3083), 'odoo.fields.Boolean', 'fields.Boolean', (['"""Automatic kanban status"""'], {'default': '(False)', 'help': '"""Automatically modify the kanban state when the customer replies to the feedback for this stage.\n * A good feedback from the customer will update the kanban state to \'ready for the new stage\' (green bullet).\n * A medium or a bad feedback will set the kanban state to \'blocked\' (red bullet).\n"""'}), '(\'Automatic kanban status\', default=False, help=\n """Automatically modify the kanban state when the customer replies to the feedback for this stage.\n * A good feedback from the customer will update the kanban state to \'ready for the new stage\' (green bullet).\n * A medium or a bad feedback will set the kanban state to \'blocked\' (red bullet).\n"""\n )\n', (2728, 3083), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((3127, 3217), 'odoo.fields.Boolean', 'fields.Boolean', (['"""Closing Stage"""'], {'help': '"""Tasks in this stage are considered as closed."""'}), "('Closing Stage', help=\n 'Tasks in this stage are considered as closed.')\n", (3141, 3217), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((3243, 3298), 'odoo.fields.Text', 'fields.Text', ([], {'compute': '"""_compute_disabled_rating_warning"""'}), "(compute='_compute_disabled_rating_warning')\n", (3254, 3298), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((4732, 4787), 'odoo.api.depends', 'api.depends', (['"""project_ids"""', '"""project_ids.rating_active"""'], {}), "('project_ids', 'project_ids.rating_active')\n", (4743, 4787), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((7647, 7708), 'odoo.fields.Char', 'fields.Char', (['"""Name"""'], {'index': '(True)', 'required': '(True)', 'tracking': '(True)'}), "('Name', index=True, required=True, tracking=True)\n", (7658, 7708), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((7727, 7740), 'odoo.fields.Html', 'fields.Html', ([], {}), '()\n', (7738, 7740), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((7754, 7896), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'default': '(True)', 'help': '"""If the active field is set to False, it will allow you to hide the project without removing it."""'}), "(default=True, help=\n 'If the active field is set to False, it will allow you to hide the project without removing it.'\n )\n", (7768, 7896), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((7910, 8010), 'odoo.fields.Integer', 'fields.Integer', ([], {'default': '(10)', 'help': '"""Gives the sequence order when displaying a list of Projects."""'}), "(default=10, help=\n 'Gives the sequence order when displaying a list of Projects.')\n", (7924, 8010), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((8023, 8190), 'odoo.fields.Many2one', 'fields.Many2one', (['"""res.partner"""'], {'string': '"""Customer"""', 'auto_join': '(True)', 'tracking': '(True)', 'domain': '"""[\'|\', (\'company_id\', \'=\', False), (\'company_id\', \'=\', company_id)]"""'}), '(\'res.partner\', string=\'Customer\', auto_join=True, tracking=\n True, domain=\n "[\'|\', (\'company_id\', \'=\', False), (\'company_id\', \'=\', company_id)]")\n', (8038, 8190), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((8201, 8345), 'odoo.fields.Char', 'fields.Char', ([], {'compute': '"""_compute_partner_email"""', 'inverse': '"""_inverse_partner_email"""', 'string': '"""Email"""', 'readonly': '(False)', 'store': '(True)', 'copy': '(False)'}), "(compute='_compute_partner_email', inverse=\n '_inverse_partner_email', string='Email', readonly=False, store=True,\n copy=False)\n", (8212, 8345), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((8374, 8518), 'odoo.fields.Char', 'fields.Char', ([], {'compute': '"""_compute_partner_phone"""', 'inverse': '"""_inverse_partner_phone"""', 'string': '"""Phone"""', 'readonly': '(False)', 'store': '(True)', 'copy': '(False)'}), "(compute='_compute_partner_phone', inverse=\n '_inverse_partner_phone', string='Phone', readonly=False, store=True,\n copy=False)\n", (8385, 8518), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((8544, 8650), 'odoo.fields.Many2one', 'fields.Many2one', (['"""res.company"""'], {'string': '"""Company"""', 'required': '(True)', 'default': '(lambda self: self.env.company)'}), "('res.company', string='Company', required=True, default=lambda\n self: self.env.company)\n", (8559, 8650), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((8665, 8769), 'odoo.fields.Many2one', 'fields.Many2one', (['"""res.currency"""'], {'related': '"""company_id.currency_id"""', 'string': '"""Currency"""', 'readonly': '(True)'}), "('res.currency', related='company_id.currency_id', string=\n 'Currency', readonly=True)\n", (8680, 8769), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((8791, 9166), 'odoo.fields.Many2one', 'fields.Many2one', (['"""account.analytic.account"""'], {'string': '"""Analytic Account"""', 'copy': '(False)', 'ondelete': '"""set null"""', 'domain': '"""[\'|\', (\'company_id\', \'=\', False), (\'company_id\', \'=\', company_id)]"""', 'check_company': '(True)', 'help': '"""Analytic account to which this project is linked for financial management. Use an analytic account to record cost and revenue on your project."""'}), '(\'account.analytic.account\', string=\'Analytic Account\', copy\n =False, ondelete=\'set null\', domain=\n "[\'|\', (\'company_id\', \'=\', False), (\'company_id\', \'=\', company_id)]",\n check_company=True, help=\n \'Analytic account to which this project is linked for financial management. Use an analytic account to record cost and revenue on your project.\'\n )\n', (8806, 9166), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((9200, 9345), 'odoo.fields.Many2many', 'fields.Many2many', (['"""res.users"""', '"""project_favorite_user_rel"""', '"""project_id"""', '"""user_id"""'], {'default': '_get_default_favorite_user_ids', 'string': '"""Members"""'}), "('res.users', 'project_favorite_user_rel', 'project_id',\n 'user_id', default=_get_default_favorite_user_ids, string='Members')\n", (9216, 9345), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((9385, 9577), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'compute': '"""_compute_is_favorite"""', 'inverse': '"""_inverse_is_favorite"""', 'string': '"""Show Project on dashboard"""', 'help': '"""Whether this project should be displayed on your dashboard."""'}), "(compute='_compute_is_favorite', inverse=\n '_inverse_is_favorite', string='Show Project on dashboard', help=\n 'Whether this project should be displayed on your dashboard.')\n", (9399, 9577), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((9594, 9715), 'odoo.fields.Char', 'fields.Char', ([], {'string': '"""Use Tasks as"""', 'default': '"""Tasks"""', 'help': '"""Label used for the tasks of the project."""', 'translate': '(True)'}), "(string='Use Tasks as', default='Tasks', help=\n 'Label used for the tasks of the project.', translate=True)\n", (9605, 9715), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((9723, 9794), 'odoo.fields.One2many', 'fields.One2many', (['"""project.task"""', '"""project_id"""'], {'string': '"""Task Activities"""'}), "('project.task', 'project_id', string='Task Activities')\n", (9738, 9794), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((9822, 9929), 'odoo.fields.Many2one', 'fields.Many2one', (['"""resource.calendar"""'], {'string': '"""Working Time"""', 'related': '"""company_id.resource_calendar_id"""'}), "('resource.calendar', string='Working Time', related=\n 'company_id.resource_calendar_id')\n", (9837, 9929), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((9957, 10071), 'odoo.fields.Many2many', 'fields.Many2many', (['"""project.task.type"""', '"""project_task_type_rel"""', '"""project_id"""', '"""type_id"""'], {'string': '"""Tasks Stages"""'}), "('project.task.type', 'project_task_type_rel', 'project_id',\n 'type_id', string='Tasks Stages')\n", (9973, 10071), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((10085, 10151), 'odoo.fields.Integer', 'fields.Integer', ([], {'compute': '"""_compute_task_count"""', 'string': '"""Task Count"""'}), "(compute='_compute_task_count', string='Task Count')\n", (10099, 10151), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((10167, 10303), 'odoo.fields.One2many', 'fields.One2many', (['"""project.task"""', '"""project_id"""'], {'string': '"""Tasks"""', 'domain': "['|', ('stage_id.fold', '=', False), ('stage_id', '=', False)]"}), "('project.task', 'project_id', string='Tasks', domain=['|',\n ('stage_id.fold', '=', False), ('stage_id', '=', False)])\n", (10182, 10303), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((10343, 10379), 'odoo.fields.Integer', 'fields.Integer', ([], {'string': '"""Color Index"""'}), "(string='Color Index')\n", (10357, 10379), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((10394, 10503), 'odoo.fields.Many2one', 'fields.Many2one', (['"""res.users"""'], {'string': '"""Project Manager"""', 'default': '(lambda self: self.env.user)', 'tracking': '(True)'}), "('res.users', string='Project Manager', default=lambda self:\n self.env.user, tracking=True)\n", (10409, 10503), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((10520, 10614), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'string': '"""Use email alias"""', 'compute': '"""_compute_alias_enabled"""', 'readonly': '(False)'}), "(string='Use email alias', compute='_compute_alias_enabled',\n readonly=False)\n", (10534, 10614), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((10626, 10898), 'odoo.fields.Many2one', 'fields.Many2one', (['"""mail.alias"""'], {'string': '"""Alias"""', 'ondelete': '"""restrict"""', 'required': '(True)', 'help': '"""Internal email associated with this project. Incoming emails are automatically synchronized with Tasks (or optionally Issues if the Issue Tracker module is installed)."""'}), "('mail.alias', string='Alias', ondelete='restrict', required\n =True, help=\n 'Internal email associated with this project. Incoming emails are automatically synchronized with Tasks (or optionally Issues if the Issue Tracker module is installed).'\n )\n", (10641, 10898), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((10933, 11536), 'odoo.fields.Selection', 'fields.Selection', (["[('followers', 'Invited internal users'), ('employees',\n 'All internal users'), ('portal',\n 'Invited portal users and all internal users')]"], {'string': '"""Visibility"""', 'required': '(True)', 'default': '"""portal"""', 'help': '"""Defines the visibility of the tasks of the project:\n- Invited internal users: employees may only see the followed project and tasks.\n- All internal users: employees may see all project and tasks.\n- Invited portal and all internal users: employees may see everything. Portal users may see project and tasks followed by\n them or by someone of their company."""'}), '([(\'followers\', \'Invited internal users\'), (\'employees\',\n \'All internal users\'), (\'portal\',\n \'Invited portal users and all internal users\')], string=\'Visibility\',\n required=True, default=\'portal\', help=\n """Defines the visibility of the tasks of the project:\n- Invited internal users: employees may only see the followed project and tasks.\n- All internal users: employees may see all project and tasks.\n- Invited portal and all internal users: employees may see everything. Portal users may see project and tasks followed by\n them or by someone of their company."""\n )\n', (10949, 11536), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((11705, 11806), 'odoo.fields.Many2many', 'fields.Many2many', (['"""res.users"""'], {'compute': '"""_compute_allowed_users"""', 'inverse': '"""_inverse_allowed_user"""'}), "('res.users', compute='_compute_allowed_users', inverse=\n '_inverse_allowed_user')\n", (11721, 11806), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((11834, 12012), 'odoo.fields.Many2many', 'fields.Many2many', (['"""res.users"""', '"""project_allowed_internal_users_rel"""'], {'string': '"""Allowed Internal Users"""', 'default': '(lambda self: self.env.user)', 'domain': "[('share', '=', False)]"}), "('res.users', 'project_allowed_internal_users_rel', string=\n 'Allowed Internal Users', default=lambda self: self.env.user, domain=[(\n 'share', '=', False)])\n", (11850, 12012), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((12082, 12214), 'odoo.fields.Many2many', 'fields.Many2many', (['"""res.users"""', '"""project_allowed_portal_users_rel"""'], {'string': '"""Allowed Portal Users"""', 'domain': "[('share', '=', True)]"}), "('res.users', 'project_allowed_portal_users_rel', string=\n 'Allowed Portal Users', domain=[('share', '=', True)])\n", (12098, 12214), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((12226, 12324), 'odoo.fields.Integer', 'fields.Integer', ([], {'compute': '"""_compute_attached_docs_count"""', 'string': '"""Number of documents attached"""'}), "(compute='_compute_attached_docs_count', string=\n 'Number of documents attached')\n", (12240, 12324), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((12337, 12369), 'odoo.fields.Date', 'fields.Date', ([], {'string': '"""Start Date"""'}), "(string='Start Date')\n", (12348, 12369), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((12381, 12445), 'odoo.fields.Date', 'fields.Date', ([], {'string': '"""Expiration Date"""', 'index': '(True)', 'tracking': '(True)'}), "(string='Expiration Date', index=True, tracking=True)\n", (12392, 12445), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((12471, 12681), 'odoo.fields.Many2one', 'fields.Many2one', (['"""project.project"""'], {'string': '"""Sub-task Project"""', 'ondelete': '"""restrict"""', 'help': '"""Project in which sub-tasks of the current project will be created. It can be the current project itself."""'}), "('project.project', string='Sub-task Project', ondelete=\n 'restrict', help=\n 'Project in which sub-tasks of the current project will be created. It can be the current project itself.'\n )\n", (12486, 12681), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((13003, 13074), 'odoo.fields.Datetime', 'fields.Datetime', ([], {'compute': '"""_compute_rating_request_deadline"""', 'store': '(True)'}), "(compute='_compute_rating_request_deadline', store=True)\n", (13018, 13074), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((13228, 13696), 'odoo.fields.Selection', 'fields.Selection', (["[('stage', 'Rating when changing stage'), ('periodic', 'Periodical Rating')]", '"""Customer Ratings Status"""'], {'default': '"""stage"""', 'required': '(True)', 'help': '"""How to get customer feedback?\n- Rating when changing stage: an email will be sent when a task is pulled in another stage.\n- Periodical Rating: email will be sent periodically.\n\nDon\'t forget to set up the mail templates on the stages for which you want to get the customer\'s feedbacks."""'}), '([(\'stage\', \'Rating when changing stage\'), (\'periodic\',\n \'Periodical Rating\')], \'Customer Ratings Status\', default=\'stage\',\n required=True, help=\n """How to get customer feedback?\n- Rating when changing stage: an email will be sent when a task is pulled in another stage.\n- Periodical Rating: email will be sent periodically.\n\nDon\'t forget to set up the mail templates on the stages for which you want to get the customer\'s feedbacks."""\n )\n', (13244, 13696), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((13789, 14026), 'odoo.fields.Selection', 'fields.Selection', (["[('daily', 'Daily'), ('weekly', 'Weekly'), ('bimonthly', 'Twice a Month'),\n ('monthly', 'Once a Month'), ('quarterly', 'Quarterly'), ('yearly',\n 'Yearly')]", '"""Rating Frequency"""'], {'required': '(True)', 'default': '"""monthly"""'}), "([('daily', 'Daily'), ('weekly', 'Weekly'), ('bimonthly',\n 'Twice a Month'), ('monthly', 'Once a Month'), ('quarterly',\n 'Quarterly'), ('yearly', 'Yearly')], 'Rating Frequency', required=True,\n default='monthly')\n", (13805, 14026), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((14230, 14261), 'odoo.api.depends', 'api.depends', (['"""partner_id.email"""'], {}), "('partner_id.email')\n", (14241, 14261), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((14711, 14742), 'odoo.api.depends', 'api.depends', (['"""partner_id.phone"""'], {}), "('partner_id.phone')\n", (14722, 14742), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((15192, 15221), 'odoo.api.onchange', 'api.onchange', (['"""alias_enabled"""'], {}), "('alias_enabled')\n", (15204, 15221), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((15492, 15559), 'odoo.api.depends', 'api.depends', (['"""allowed_internal_user_ids"""', '"""allowed_portal_user_ids"""'], {}), "('allowed_internal_user_ids', 'allowed_portal_user_ids')\n", (15503, 15559), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((16684, 16736), 'odoo.api.depends', 'api.depends', (['"""rating_status"""', '"""rating_status_period"""'], {}), "('rating_status', 'rating_status_period')\n", (16695, 16736), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((18372, 18415), 'odoo.api.returns', 'api.returns', (['"""self"""', '(lambda value: value.id)'], {}), "('self', lambda value: value.id)\n", (18383, 18415), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((29166, 29194), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'default': '(True)'}), '(default=True)\n', (29180, 29194), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((29206, 29275), 'odoo.fields.Char', 'fields.Char', ([], {'string': '"""Title"""', 'tracking': '(True)', 'required': '(True)', 'index': '(True)'}), "(string='Title', tracking=True, required=True, index=True)\n", (29217, 29275), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((29294, 29327), 'odoo.fields.Html', 'fields.Html', ([], {'string': '"""Description"""'}), "(string='Description')\n", (29305, 29327), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((29343, 29447), 'odoo.fields.Selection', 'fields.Selection', (["[('0', 'Normal'), ('1', 'Important')]"], {'default': '"""0"""', 'index': '(True)', 'string': '"""Priority"""'}), "([('0', 'Normal'), ('1', 'Important')], default='0', index=\n True, string='Priority')\n", (29359, 29447), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((29481, 29609), 'odoo.fields.Integer', 'fields.Integer', ([], {'string': '"""Sequence"""', 'index': '(True)', 'default': '(10)', 'help': '"""Gives the sequence order when displaying a list of tasks."""'}), "(string='Sequence', index=True, default=10, help=\n 'Gives the sequence order when displaying a list of tasks.')\n", (29495, 29609), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((29628, 29929), 'odoo.fields.Many2one', 'fields.Many2one', (['"""project.task.type"""'], {'string': '"""Stage"""', 'compute': '"""_compute_stage_id"""', 'store': '(True)', 'readonly': '(False)', 'ondelete': '"""restrict"""', 'tracking': '(True)', 'index': '(True)', 'default': '_get_default_stage_id', 'group_expand': '"""_read_group_stage_ids"""', 'domain': '"""[(\'project_ids\', \'=\', project_id)]"""', 'copy': '(False)'}), '(\'project.task.type\', string=\'Stage\', compute=\n \'_compute_stage_id\', store=True, readonly=False, ondelete=\'restrict\',\n tracking=True, index=True, default=_get_default_stage_id, group_expand=\n \'_read_group_stage_ids\', domain="[(\'project_ids\', \'=\', project_id)]",\n copy=False)\n', (29643, 29929), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((29950, 29997), 'odoo.fields.Many2many', 'fields.Many2many', (['"""project.tags"""'], {'string': '"""Tags"""'}), "('project.tags', string='Tags')\n", (29966, 29997), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((30017, 30181), 'odoo.fields.Selection', 'fields.Selection', (["[('normal', 'In Progress'), ('done', 'Ready'), ('blocked', 'Blocked')]"], {'string': '"""Kanban State"""', 'copy': '(False)', 'default': '"""normal"""', 'required': '(True)'}), "([('normal', 'In Progress'), ('done', 'Ready'), ('blocked',\n 'Blocked')], string='Kanban State', copy=False, default='normal',\n required=True)\n", (30033, 30181), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((30232, 30331), 'odoo.fields.Char', 'fields.Char', ([], {'compute': '"""_compute_kanban_state_label"""', 'string': '"""Kanban State Label"""', 'tracking': '(True)'}), "(compute='_compute_kanban_state_label', string=\n 'Kanban State Label', tracking=True)\n", (30243, 30331), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((30345, 30401), 'odoo.fields.Datetime', 'fields.Datetime', (['"""Created On"""'], {'readonly': '(True)', 'index': '(True)'}), "('Created On', readonly=True, index=True)\n", (30360, 30401), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((30419, 30480), 'odoo.fields.Datetime', 'fields.Datetime', (['"""Last Updated On"""'], {'readonly': '(True)', 'index': '(True)'}), "('Last Updated On', readonly=True, index=True)\n", (30434, 30480), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((30496, 30557), 'odoo.fields.Datetime', 'fields.Datetime', ([], {'string': '"""Ending Date"""', 'index': '(True)', 'copy': '(False)'}), "(string='Ending Date', index=True, copy=False)\n", (30511, 30557), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((30576, 30655), 'odoo.fields.Datetime', 'fields.Datetime', ([], {'string': '"""Assigning Date"""', 'index': '(True)', 'copy': '(False)', 'readonly': '(True)'}), "(string='Assigning Date', index=True, copy=False, readonly=True)\n", (30591, 30655), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((30676, 30745), 'odoo.fields.Date', 'fields.Date', ([], {'string': '"""Deadline"""', 'index': '(True)', 'copy': '(False)', 'tracking': '(True)'}), "(string='Deadline', index=True, copy=False, tracking=True)\n", (30687, 30745), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((30775, 30861), 'odoo.fields.Datetime', 'fields.Datetime', ([], {'string': '"""Last Stage Update"""', 'index': '(True)', 'copy': '(False)', 'readonly': '(True)'}), "(string='Last Stage Update', index=True, copy=False,\n readonly=True)\n", (30790, 30861), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((30899, 31088), 'odoo.fields.Many2one', 'fields.Many2one', (['"""project.project"""'], {'string': '"""Project"""', 'compute': '"""_compute_project_id"""', 'store': '(True)', 'readonly': '(False)', 'index': '(True)', 'tracking': '(True)', 'check_company': '(True)', 'change_default': '(True)'}), "('project.project', string='Project', compute=\n '_compute_project_id', store=True, readonly=False, index=True, tracking\n =True, check_company=True, change_default=True)\n", (30914, 31088), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((31115, 31247), 'odoo.fields.Float', 'fields.Float', (['"""Initially Planned Hours"""'], {'help': '"""Time planned to achieve this task (including its sub-tasks)."""', 'tracking': '(True)'}), "('Initially Planned Hours', help=\n 'Time planned to achieve this task (including its sub-tasks).',\n tracking=True)\n", (31127, 31247), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((31267, 31503), 'odoo.fields.Float', 'fields.Float', (['"""Sub-tasks Planned Hours"""'], {'compute': '"""_compute_subtask_planned_hours"""', 'help': '"""Sum of the time planned of all the sub-tasks linked to this task. Usually less or equal to the initially time planned of this task."""'}), "('Sub-tasks Planned Hours', compute=\n '_compute_subtask_planned_hours', help=\n 'Sum of the time planned of all the sub-tasks linked to this task. Usually less or equal to the initially time planned of this task.'\n )\n", (31279, 31503), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((31503, 31619), 'odoo.fields.Many2one', 'fields.Many2one', (['"""res.users"""'], {'string': '"""Assigned to"""', 'default': '(lambda self: self.env.uid)', 'index': '(True)', 'tracking': '(True)'}), "('res.users', string='Assigned to', default=lambda self:\n self.env.uid, index=True, tracking=True)\n", (31518, 31619), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((31657, 31852), 'odoo.fields.Many2one', 'fields.Many2one', (['"""res.partner"""'], {'string': '"""Customer"""', 'compute': '"""_compute_partner_id"""', 'store': '(True)', 'readonly': '(False)', 'domain': '"""[\'|\', (\'company_id\', \'=\', False), (\'company_id\', \'=\', company_id)]"""'}), '(\'res.partner\', string=\'Customer\', compute=\n \'_compute_partner_id\', store=True, readonly=False, domain=\n "[\'|\', (\'company_id\', \'=\', False), (\'company_id\', \'=\', company_id)]")\n', (31672, 31852), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((31892, 31954), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'related': '"""partner_id.is_company"""', 'readonly': '(True)'}), "(related='partner_id.is_company', readonly=True)\n", (31906, 31954), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((31983, 32042), 'odoo.fields.Many2one', 'fields.Many2one', ([], {'related': '"""partner_id.commercial_partner_id"""'}), "(related='partner_id.commercial_partner_id')\n", (31998, 32042), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((32063, 32207), 'odoo.fields.Char', 'fields.Char', ([], {'compute': '"""_compute_partner_email"""', 'inverse': '"""_inverse_partner_email"""', 'string': '"""Email"""', 'readonly': '(False)', 'store': '(True)', 'copy': '(False)'}), "(compute='_compute_partner_email', inverse=\n '_inverse_partner_email', string='Email', readonly=False, store=True,\n copy=False)\n", (32074, 32207), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((32236, 32380), 'odoo.fields.Char', 'fields.Char', ([], {'compute': '"""_compute_partner_phone"""', 'inverse': '"""_inverse_partner_phone"""', 'string': '"""Phone"""', 'readonly': '(False)', 'store': '(True)', 'copy': '(False)'}), "(compute='_compute_partner_phone', inverse=\n '_inverse_partner_phone', string='Phone', readonly=False, store=True,\n copy=False)\n", (32247, 32380), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((32410, 32474), 'odoo.fields.Char', 'fields.Char', (['"""Ribbon message"""'], {'compute': '"""_compute_ribbon_message"""'}), "('Ribbon message', compute='_compute_ribbon_message')\n", (32421, 32474), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((32494, 32548), 'odoo.fields.Char', 'fields.Char', ([], {'related': '"""partner_id.city"""', 'readonly': '(False)'}), "(related='partner_id.city', readonly=False)\n", (32505, 32548), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((32566, 32670), 'odoo.fields.Many2one', 'fields.Many2one', (['"""res.users"""'], {'string': '"""Project Manager"""', 'related': '"""project_id.user_id"""', 'readonly': '(True)'}), "('res.users', string='Project Manager', related=\n 'project_id.user_id', readonly=True)\n", (32581, 32670), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((32683, 32855), 'odoo.fields.Many2one', 'fields.Many2one', (['"""res.company"""'], {'string': '"""Company"""', 'compute': '"""_compute_company_id"""', 'store': '(True)', 'readonly': '(False)', 'required': '(True)', 'copy': '(True)', 'default': '_default_company_id'}), "('res.company', string='Company', compute=\n '_compute_company_id', store=True, readonly=False, required=True, copy=\n True, default=_default_company_id)\n", (32698, 32855), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((32875, 32911), 'odoo.fields.Integer', 'fields.Integer', ([], {'string': '"""Color Index"""'}), "(string='Color Index')\n", (32889, 32911), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((32929, 33025), 'odoo.fields.Char', 'fields.Char', ([], {'related': '"""user_id.email"""', 'string': '"""User Email"""', 'readonly': '(True)', 'related_sudo': '(False)'}), "(related='user_id.email', string='User Email', readonly=True,\n related_sudo=False)\n", (32940, 33025), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((33043, 33191), 'odoo.fields.One2many', 'fields.One2many', (['"""ir.attachment"""'], {'compute': '"""_compute_attachment_ids"""', 'string': '"""Main Attachments"""', 'help': '"""Attachment that don\'t come from message."""'}), '(\'ir.attachment\', compute=\'_compute_attachment_ids\', string=\n \'Main Attachments\', help="Attachment that don\'t come from message.")\n', (33058, 33191), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((33379, 33543), 'odoo.fields.Many2one', 'fields.Many2one', (['"""ir.attachment"""'], {'domain': '"""[(\'res_model\', \'=\', \'project.task\'), (\'res_id\', \'=\', id), (\'mimetype\', \'ilike\', \'image\')]"""', 'string': '"""Cover Image"""'}), '(\'ir.attachment\', domain=\n "[(\'res_model\', \'=\', \'project.task\'), (\'res_id\', \'=\', id), (\'mimetype\', \'ilike\', \'image\')]"\n , string=\'Cover Image\')\n', (33394, 33543), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((33555, 33678), 'odoo.fields.Char', 'fields.Char', ([], {'related': '"""stage_id.legend_blocked"""', 'string': '"""Kanban Blocked Explanation"""', 'readonly': '(True)', 'related_sudo': '(False)'}), "(related='stage_id.legend_blocked', string=\n 'Kanban Blocked Explanation', readonly=True, related_sudo=False)\n", (33566, 33678), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((33692, 33810), 'odoo.fields.Char', 'fields.Char', ([], {'related': '"""stage_id.legend_done"""', 'string': '"""Kanban Valid Explanation"""', 'readonly': '(True)', 'related_sudo': '(False)'}), "(related='stage_id.legend_done', string=\n 'Kanban Valid Explanation', readonly=True, related_sudo=False)\n", (33703, 33810), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((33826, 33948), 'odoo.fields.Char', 'fields.Char', ([], {'related': '"""stage_id.legend_normal"""', 'string': '"""Kanban Ongoing Explanation"""', 'readonly': '(True)', 'related_sudo': '(False)'}), "(related='stage_id.legend_normal', string=\n 'Kanban Ongoing Explanation', readonly=True, related_sudo=False)\n", (33837, 33948), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((33960, 34067), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'related': '"""stage_id.is_closed"""', 'string': '"""Closing Stage"""', 'readonly': '(True)', 'related_sudo': '(False)'}), "(related='stage_id.is_closed', string='Closing Stage',\n readonly=True, related_sudo=False)\n", (33974, 34067), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((34080, 34145), 'odoo.fields.Many2one', 'fields.Many2one', (['"""project.task"""'], {'string': '"""Parent Task"""', 'index': '(True)'}), "('project.task', string='Parent Task', index=True)\n", (34095, 34145), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((34162, 34263), 'odoo.fields.One2many', 'fields.One2many', (['"""project.task"""', '"""parent_id"""'], {'string': '"""Sub-tasks"""', 'context': "{'active_test': False}"}), "('project.task', 'parent_id', string='Sub-tasks', context={\n 'active_test': False})\n", (34177, 34263), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((34284, 34405), 'odoo.fields.Many2one', 'fields.Many2one', (['"""project.project"""'], {'related': '"""project_id.subtask_project_id"""', 'string': '"""Sub-task Project"""', 'readonly': '(True)'}), "('project.project', related='project_id.subtask_project_id',\n string='Sub-task Project', readonly=True)\n", (34299, 34405), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((34423, 34520), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'string': '"""Allow Sub-tasks"""', 'related': '"""project_id.allow_subtasks"""', 'readonly': '(True)'}), "(string='Allow Sub-tasks', related=\n 'project_id.allow_subtasks', readonly=True)\n", (34437, 34520), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((34536, 34602), 'odoo.fields.Integer', 'fields.Integer', (['"""Sub-task count"""'], {'compute': '"""_compute_subtask_count"""'}), "('Sub-task count', compute='_compute_subtask_count')\n", (34550, 34602), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((34620, 34770), 'odoo.fields.Char', 'fields.Char', ([], {'string': '"""Email From"""', 'help': '"""These people will receive email."""', 'index': '(True)', 'compute': '"""_compute_email_from"""', 'store': '"""True"""', 'readonly': '(False)'}), "(string='Email From', help='These people will receive email.',\n index=True, compute='_compute_email_from', store='True', readonly=False)\n", (34631, 34770), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((34798, 34974), 'odoo.fields.Many2many', 'fields.Many2many', (['"""res.users"""'], {'string': '"""Visible to"""', 'groups': '"""project.group_project_manager"""', 'compute': '"""_compute_allowed_user_ids"""', 'store': '(True)', 'readonly': '(False)', 'copy': '(False)'}), "('res.users', string='Visible to', groups=\n 'project.group_project_manager', compute='_compute_allowed_user_ids',\n store=True, readonly=False, copy=False)\n", (34814, 34974), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((34999, 35090), 'odoo.fields.Selection', 'fields.Selection', ([], {'related': '"""project_id.privacy_visibility"""', 'string': '"""Project Visibility"""'}), "(related='project_id.privacy_visibility', string=\n 'Project Visibility')\n", (35015, 35090), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((35208, 35320), 'odoo.fields.Float', 'fields.Float', ([], {'compute': '"""_compute_elapsed"""', 'string': '"""Working hours to assign"""', 'store': '(True)', 'group_operator': '"""avg"""'}), "(compute='_compute_elapsed', string='Working hours to assign',\n store=True, group_operator='avg')\n", (35220, 35320), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((35343, 35454), 'odoo.fields.Float', 'fields.Float', ([], {'compute': '"""_compute_elapsed"""', 'string': '"""Working hours to close"""', 'store': '(True)', 'group_operator': '"""avg"""'}), "(compute='_compute_elapsed', string='Working hours to close',\n store=True, group_operator='avg')\n", (35355, 35454), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((35475, 35586), 'odoo.fields.Float', 'fields.Float', ([], {'compute': '"""_compute_elapsed"""', 'string': '"""Working days to assign"""', 'store': '(True)', 'group_operator': '"""avg"""'}), "(compute='_compute_elapsed', string='Working days to assign',\n store=True, group_operator='avg')\n", (35487, 35586), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((35608, 35718), 'odoo.fields.Float', 'fields.Float', ([], {'compute': '"""_compute_elapsed"""', 'string': '"""Working days to close"""', 'store': '(True)', 'group_operator': '"""avg"""'}), "(compute='_compute_elapsed', string='Working days to close',\n store=True, group_operator='avg')\n", (35620, 35718), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((35825, 35941), 'odoo.fields.One2many', 'fields.One2many', ([], {'domain': "(lambda self: [('model', '=', self._name), ('message_type', 'in', ['email',\n 'comment'])])"}), "(domain=lambda self: [('model', '=', self._name), (\n 'message_type', 'in', ['email', 'comment'])])\n", (35840, 35941), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((35990, 36048), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'related': '"""project_id.allow_recurring_tasks"""'}), "(related='project_id.allow_recurring_tasks')\n", (36004, 36048), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((36070, 36104), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'string': '"""Recurrent"""'}), "(string='Recurrent')\n", (36084, 36104), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((36127, 36212), 'odoo.fields.Integer', 'fields.Integer', ([], {'string': '"""Tasks in Recurrence"""', 'compute': '"""_compute_recurring_count"""'}), "(string='Tasks in Recurrence', compute='_compute_recurring_count'\n )\n", (36141, 36212), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((36228, 36282), 'odoo.fields.Many2one', 'fields.Many2one', (['"""project.task.recurrence"""'], {'copy': '(False)'}), "('project.task.recurrence', copy=False)\n", (36243, 36282), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((36307, 36451), 'odoo.fields.Selection', 'fields.Selection', (["[('this', 'This task'), ('subsequent', 'This and following tasks'), ('all',\n 'All tasks')]"], {'default': '"""this"""', 'store': '(False)'}), "([('this', 'This task'), ('subsequent',\n 'This and following tasks'), ('all', 'All tasks')], default='this',\n store=False)\n", (36323, 36451), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((36500, 36578), 'odoo.fields.Char', 'fields.Char', ([], {'string': '"""Next Recurrencies"""', 'compute': '"""_compute_recurrence_message"""'}), "(string='Next Recurrencies', compute='_compute_recurrence_message')\n", (36511, 36578), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((36602, 36697), 'odoo.fields.Integer', 'fields.Integer', ([], {'string': '"""Repeat Every"""', 'default': '(1)', 'compute': '"""_compute_repeat"""', 'readonly': '(False)'}), "(string='Repeat Every', default=1, compute='_compute_repeat',\n readonly=False)\n", (36616, 36697), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((36712, 36874), 'odoo.fields.Selection', 'fields.Selection', (["[('day', 'Days'), ('week', 'Weeks'), ('month', 'Months'), ('year', 'Years')]"], {'default': '"""week"""', 'compute': '"""_compute_repeat"""', 'readonly': '(False)'}), "([('day', 'Days'), ('week', 'Weeks'), ('month', 'Months'),\n ('year', 'Years')], default='week', compute='_compute_repeat', readonly\n =False)\n", (36728, 36874), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((36923, 37111), 'odoo.fields.Selection', 'fields.Selection', (["[('forever', 'Forever'), ('until', 'End Date'), ('after',\n 'Number of Repetitions')]"], {'default': '"""forever"""', 'string': '"""Until"""', 'compute': '"""_compute_repeat"""', 'readonly': '(False)'}), "([('forever', 'Forever'), ('until', 'End Date'), ('after',\n 'Number of Repetitions')], default='forever', string='Until', compute=\n '_compute_repeat', readonly=False)\n", (36939, 37111), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((37153, 37226), 'odoo.fields.Date', 'fields.Date', ([], {'string': '"""End Date"""', 'compute': '"""_compute_repeat"""', 'readonly': '(False)'}), "(string='End Date', compute='_compute_repeat', readonly=False)\n", (37164, 37226), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((37247, 37341), 'odoo.fields.Integer', 'fields.Integer', ([], {'string': '"""Repetitions"""', 'default': '(1)', 'compute': '"""_compute_repeat"""', 'readonly': '(False)'}), "(string='Repetitions', default=1, compute='_compute_repeat',\n readonly=False)\n", (37261, 37341), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((37361, 37503), 'odoo.fields.Selection', 'fields.Selection', (["[('date', 'Date of the Month'), ('day', 'Day of the Month')]"], {'default': '"""date"""', 'compute': '"""_compute_repeat"""', 'readonly': '(False)'}), "([('date', 'Date of the Month'), ('day', 'Day of the Month'\n )], default='date', compute='_compute_repeat', readonly=False)\n", (37377, 37503), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((37544, 37683), 'odoo.fields.Selection', 'fields.Selection', (["[('date', 'Date of the Year'), ('day', 'Day of the Year')]"], {'default': '"""date"""', 'compute': '"""_compute_repeat"""', 'readonly': '(False)'}), "([('date', 'Date of the Year'), ('day', 'Day of the Year')],\n default='date', compute='_compute_repeat', readonly=False)\n", (37560, 37683), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((37714, 37785), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'string': '"""Mon"""', 'compute': '"""_compute_repeat"""', 'readonly': '(False)'}), "(string='Mon', compute='_compute_repeat', readonly=False)\n", (37728, 37785), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((37796, 37867), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'string': '"""Tue"""', 'compute': '"""_compute_repeat"""', 'readonly': '(False)'}), "(string='Tue', compute='_compute_repeat', readonly=False)\n", (37810, 37867), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((37878, 37949), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'string': '"""Wed"""', 'compute': '"""_compute_repeat"""', 'readonly': '(False)'}), "(string='Wed', compute='_compute_repeat', readonly=False)\n", (37892, 37949), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((37960, 38031), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'string': '"""Thu"""', 'compute': '"""_compute_repeat"""', 'readonly': '(False)'}), "(string='Thu', compute='_compute_repeat', readonly=False)\n", (37974, 38031), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((38042, 38113), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'string': '"""Fri"""', 'compute': '"""_compute_repeat"""', 'readonly': '(False)'}), "(string='Fri', compute='_compute_repeat', readonly=False)\n", (38056, 38113), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((38124, 38195), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'string': '"""Sat"""', 'compute': '"""_compute_repeat"""', 'readonly': '(False)'}), "(string='Sat', compute='_compute_repeat', readonly=False)\n", (38138, 38195), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((38206, 38277), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'string': '"""Sun"""', 'compute': '"""_compute_repeat"""', 'readonly': '(False)'}), "(string='Sun', compute='_compute_repeat', readonly=False)\n", (38220, 38277), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((38430, 38596), 'odoo.fields.Selection', 'fields.Selection', (["[('first', 'First'), ('second', 'Second'), ('third', 'Third'), ('last', 'Last')\n ]"], {'default': '"""first"""', 'compute': '"""_compute_repeat"""', 'readonly': '(False)'}), "([('first', 'First'), ('second', 'Second'), ('third',\n 'Third'), ('last', 'Last')], default='first', compute='_compute_repeat',\n readonly=False)\n", (38446, 38596), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((38649, 38890), 'odoo.fields.Selection', 'fields.Selection', (["[('mon', 'Monday'), ('tue', 'Tuesday'), ('wed', 'Wednesday'), ('thu',\n 'Thursday'), ('fri', 'Friday'), ('sat', 'Saturday'), ('sun', 'Sunday')]"], {'string': '"""Day Of The Week"""', 'compute': '"""_compute_repeat"""', 'readonly': '(False)'}), "([('mon', 'Monday'), ('tue', 'Tuesday'), ('wed',\n 'Wednesday'), ('thu', 'Thursday'), ('fri', 'Friday'), ('sat',\n 'Saturday'), ('sun', 'Sunday')], string='Day Of The Week', compute=\n '_compute_repeat', readonly=False)\n", (38665, 38890), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((38960, 39307), 'odoo.fields.Selection', 'fields.Selection', (["[('january', 'January'), ('february', 'February'), ('march', 'March'), (\n 'april', 'April'), ('may', 'May'), ('june', 'June'), ('july', 'July'),\n ('august', 'August'), ('september', 'September'), ('october', 'October'\n ), ('november', 'November'), ('december', 'December')]"], {'compute': '"""_compute_repeat"""', 'readonly': '(False)'}), "([('january', 'January'), ('february', 'February'), (\n 'march', 'March'), ('april', 'April'), ('may', 'May'), ('june', 'June'),\n ('july', 'July'), ('august', 'August'), ('september', 'September'), (\n 'october', 'October'), ('november', 'November'), ('december',\n 'December')], compute='_compute_repeat', readonly=False)\n", (38976, 39307), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((39416, 39468), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'compute': '"""_compute_repeat_visibility"""'}), "(compute='_compute_repeat_visibility')\n", (39430, 39468), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((39491, 39543), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'compute': '"""_compute_repeat_visibility"""'}), "(compute='_compute_repeat_visibility')\n", (39505, 39543), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((39567, 39619), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'compute': '"""_compute_repeat_visibility"""'}), "(compute='_compute_repeat_visibility')\n", (39581, 39619), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((39644, 39696), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'compute': '"""_compute_repeat_visibility"""'}), "(compute='_compute_repeat_visibility')\n", (39658, 39696), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((40036, 40121), 'odoo.api.depends', 'api.depends', (['"""recurring_task"""', '"""repeat_unit"""', '"""repeat_on_month"""', '"""repeat_on_year"""'], {}), "('recurring_task', 'repeat_unit', 'repeat_on_month',\n 'repeat_on_year')\n", (40047, 40121), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((40745, 40774), 'odoo.api.depends', 'api.depends', (['"""recurring_task"""'], {}), "('recurring_task')\n", (40756, 40774), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((41461, 41733), 'odoo.api.depends', 'api.depends', (['"""recurring_task"""', '"""repeat_interval"""', '"""repeat_unit"""', '"""repeat_type"""', '"""repeat_until"""', '"""repeat_number"""', '"""repeat_on_month"""', '"""repeat_on_year"""', '"""mon"""', '"""tue"""', '"""wed"""', '"""thu"""', '"""fri"""', '"""sat"""', '"""sun"""', '"""repeat_day"""', '"""repeat_week"""', '"""repeat_month"""', '"""repeat_weekday"""'], {}), "('recurring_task', 'repeat_interval', 'repeat_unit',\n 'repeat_type', 'repeat_until', 'repeat_number', 'repeat_on_month',\n 'repeat_on_year', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun',\n 'repeat_day', 'repeat_week', 'repeat_month', 'repeat_weekday')\n", (41472, 41733), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((43753, 43781), 'odoo.api.depends', 'api.depends', (['"""recurrence_id"""'], {}), "('recurrence_id')\n", (43764, 43781), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((44276, 44307), 'odoo.api.depends', 'api.depends', (['"""partner_id.email"""'], {}), "('partner_id.email')\n", (44287, 44307), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((44721, 44752), 'odoo.api.depends', 'api.depends', (['"""partner_id.phone"""'], {}), "('partner_id.phone')\n", (44732, 44752), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((45166, 45225), 'odoo.api.depends', 'api.depends', (['"""partner_email"""', '"""partner_phone"""', '"""partner_id"""'], {}), "('partner_email', 'partner_phone', 'partner_id')\n", (45177, 45225), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((46019, 46046), 'odoo.api.constrains', 'api.constrains', (['"""parent_id"""'], {}), "('parent_id')\n", (46033, 46046), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((46220, 46254), 'odoo.api.constrains', 'api.constrains', (['"""allowed_user_ids"""'], {}), "('allowed_user_ids')\n", (46234, 46254), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((47106, 47181), 'odoo.api.depends', 'api.depends', (['"""project_id.allowed_user_ids"""', '"""project_id.privacy_visibility"""'], {}), "('project_id.allowed_user_ids', 'project_id.privacy_visibility')\n", (47117, 47181), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((47978, 48031), 'odoo.api.depends', 'api.depends', (['"""create_date"""', '"""date_end"""', '"""date_assign"""'], {}), "('create_date', 'date_end', 'date_assign')\n", (47989, 48031), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((49486, 49525), 'odoo.api.depends', 'api.depends', (['"""stage_id"""', '"""kanban_state"""'], {}), "('stage_id', 'kanban_state')\n", (49497, 49525), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((50519, 50557), 'odoo.api.depends', 'api.depends', (['"""child_ids.planned_hours"""'], {}), "('child_ids.planned_hours')\n", (50530, 50557), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((50775, 50799), 'odoo.api.depends', 'api.depends', (['"""child_ids"""'], {}), "('child_ids')\n", (50786, 50799), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((50933, 50959), 'odoo.api.onchange', 'api.onchange', (['"""company_id"""'], {}), "('company_id')\n", (50945, 50959), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((51098, 51134), 'odoo.api.depends', 'api.depends', (['"""project_id.company_id"""'], {}), "('project_id.company_id')\n", (51109, 51134), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((51298, 51323), 'odoo.api.depends', 'api.depends', (['"""project_id"""'], {}), "('project_id')\n", (51309, 51323), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((51694, 51737), 'odoo.api.returns', 'api.returns', (['"""self"""', '(lambda value: value.id)'], {}), "('self', lambda value: value.id)\n", (51705, 51737), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((52069, 52096), 'odoo.api.constrains', 'api.constrains', (['"""parent_id"""'], {}), "('parent_id')\n", (52083, 52096), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((60873, 60933), 'odoo.api.depends', 'api.depends', (['"""parent_id.partner_id"""', '"""project_id.partner_id"""'], {}), "('parent_id.partner_id', 'project_id.partner_id')\n", (60884, 60933), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((61626, 61681), 'odoo.api.depends', 'api.depends', (['"""partner_id.email"""', '"""parent_id.email_from"""'], {}), "('partner_id.email', 'parent_id.email_from')\n", (61637, 61681), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((61891, 61945), 'odoo.api.depends', 'api.depends', (['"""parent_id.project_id.subtask_project_id"""'], {}), "('parent_id.project_id.subtask_project_id')\n", (61902, 61945), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((73717, 73751), 'odoo.fields.Char', 'fields.Char', (['"""Name"""'], {'required': '(True)'}), "('Name', required=True)\n", (73728, 73751), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((73764, 73822), 'odoo.fields.Integer', 'fields.Integer', ([], {'string': '"""Color"""', 'default': '_get_default_color'}), "(string='Color', default=_get_default_color)\n", (73778, 73822), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((52373, 52382), 'odoo._', '_', (['"""task"""'], {}), "('task')\n", (52374, 52382), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((57689, 57710), 'odoo.fields.Datetime.now', 'fields.Datetime.now', ([], {}), '()\n', (57708, 57710), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((73690, 73704), 'random.randint', 'randint', (['(1)', '(11)'], {}), '(1, 11)\n', (73697, 73704), False, 'from random import randint\n'), ((4130, 4147), 'odoo._', '_', (['"""Delete Stage"""'], {}), "('Delete Stage')\n", (4131, 4147), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((16447, 16665), 'odoo._', '_', (['"""The project cannot be shared with the recipient(s) because the privacy of the project is too restricted. Set the privacy to \'Visible by following customers\' in order to make it accessible by the recipient(s)."""'], {}), '("The project cannot be shared with the recipient(s) because the privacy of the project is too restricted. Set the privacy to \'Visible by following customers\' in order to make it accessible by the recipient(s)."\n )\n', (16448, 16665), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((21330, 21347), 'odoo._', '_', (['"""Confirmation"""'], {}), "('Confirmation')\n", (21331, 21347), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((24209, 24254), 'ast.literal_eval', 'ast.literal_eval', (["(self.alias_defaults or '{}')"], {}), "(self.alias_defaults or '{}')\n", (24225, 24254), False, 'import ast\n'), ((25957, 25975), 'odoo._', '_', (['"""Ratings of %s"""'], {}), "('Ratings of %s')\n", (25958, 25975), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((26016, 26051), 'ast.literal_eval', 'ast.literal_eval', (["action['context']"], {}), "(action['context'])\n", (26032, 26051), False, 'import ast\n'), ((41941, 41960), 'odoo.fields.Date.today', 'fields.Date.today', ([], {}), '()\n', (41958, 41960), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((48280, 48325), 'odoo.fields.Datetime.from_string', 'fields.Datetime.from_string', (['task.create_date'], {}), '(task.create_date)\n', (48307, 48325), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((50270, 50500), 'odoo._', '_', (['"""The task cannot be shared with the recipient(s) because the privacy of the project is too restricted. Set the privacy of the project to \'Visible by following customers\' in order to make it accessible by the recipient(s)."""'], {}), '("The task cannot be shared with the recipient(s) because the privacy of the project is too restricted. Set the privacy of the project to \'Visible by following customers\' in order to make it accessible by the recipient(s)."\n )\n', (50271, 50500), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((51891, 51916), 'odoo._', '_', (['"""%s (copy)"""', 'self.name'], {}), "('%s (copy)', self.name)\n", (51892, 51916), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((70932, 70948), 'odoo._', '_', (['"""Parent Task"""'], {}), "('Parent Task')\n", (70933, 70948), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((1232, 1244), 'odoo._', '_', (['"""Blocked"""'], {}), "('Blocked')\n", (1233, 1244), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((1494, 1504), 'odoo._', '_', (['"""Ready"""'], {}), "('Ready')\n", (1495, 1504), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((1752, 1768), 'odoo._', '_', (['"""In Progress"""'], {}), "('In Progress')\n", (1753, 1768), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((16968, 16989), 'odoo.fields.datetime.now', 'fields.datetime.now', ([], {}), '()\n', (16987, 16989), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((18569, 18583), 'odoo._', '_', (['"""%s (copy)"""'], {}), "('%s (copy)')\n", (18570, 18583), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((45574, 45664), 'odoo._', '_', (['"""By saving this change, the customer email and phone number will also be updated."""'], {}), "('By saving this change, the customer email and phone number will also be updated.'\n )\n", (45575, 45664), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((46153, 46212), 'odoo._', '_', (['"""Error! You cannot create recursive hierarchy of tasks."""'], {}), "('Error! You cannot create recursive hierarchy of tasks.')\n", (46154, 46212), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((48393, 48438), 'odoo.fields.Datetime.from_string', 'fields.Datetime.from_string', (['task.date_assign'], {}), '(task.date_assign)\n', (48420, 48438), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((48882, 48924), 'odoo.fields.Datetime.from_string', 'fields.Datetime.from_string', (['task.date_end'], {}), '(task.date_end)\n', (48909, 48924), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((54874, 54897), 'odoo.fields.Datetime.today', 'fields.Datetime.today', ([], {}), '()\n', (54895, 54897), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((55352, 55371), 'odoo.fields.Date.today', 'fields.Date.today', ([], {}), '()\n', (55369, 55371), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((55374, 55391), 'datetime.timedelta', 'timedelta', ([], {'days': '(7)'}), '(days=7)\n', (55383, 55391), False, 'from datetime import timedelta, datetime\n'), ((56704, 56725), 'odoo.fields.Datetime.now', 'fields.Datetime.now', ([], {}), '()\n', (56723, 56725), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((56967, 56988), 'odoo.fields.Datetime.now', 'fields.Datetime.now', ([], {}), '()\n', (56986, 56988), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((57288, 57311), 'odoo.fields.Datetime.today', 'fields.Datetime.today', ([], {}), '()\n', (57309, 57311), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((57805, 57857), 'odoo._', '_', (['"""Sorry. You can\'t set a task as its parent task."""'], {}), '("Sorry. You can\'t set a task as its parent task.")\n', (57806, 57857), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((58038, 58116), 'odoo._', '_', (['"""You cannot archive recurring tasks. Please, disable the recurrence first."""'], {}), "('You cannot archive recurring tasks. Please, disable the recurrence first.')\n", (58039, 58116), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((60412, 60433), 'odoo.fields.Datetime.now', 'fields.Datetime.now', ([], {}), '()\n', (60431, 60433), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((60624, 60701), 'odoo._', '_', (['"""You cannot delete recurring tasks. Please, disable the recurrence first."""'], {}), "('You cannot delete recurring tasks. Please, disable the recurrence first.')\n", (60625, 60701), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((67155, 67170), 'odoo._', '_', (['"""No Subject"""'], {}), "('No Subject')\n", (67156, 67170), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((21857, 21972), 'odoo._', '_', (['"""You cannot delete a project containing tasks. You can either archive it or first delete all of its tasks."""'], {}), "('You cannot delete a project containing tasks. You can either archive it or first delete all of its tasks.'\n )\n", (21858, 21972), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((26621, 26650), 'odoo._', '_', (['"""Unknown Analytic Account"""'], {}), "('Unknown Analytic Account')\n", (26622, 26650), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((27680, 27701), 'odoo.fields.Datetime.now', 'fields.Datetime.now', ([], {}), '()\n', (27699, 27701), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((42252, 42273), 'datetime.timedelta', 'timedelta', ([], {'days': 'delta'}), '(days=delta)\n', (42261, 42273), False, 'from datetime import timedelta, datetime\n'), ((43289, 43342), 'odoo._', '_', (['"""<p><em>Number of tasks: %(tasks_count)s</em></p>"""'], {}), "('<p><em>Number of tasks: %(tasks_count)s</em></p>')\n", (43290, 43342), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((45733, 45801), 'odoo._', '_', (['"""By saving this change, the customer email will also be updated."""'], {}), "('By saving this change, the customer email will also be updated.')\n", (45734, 45801), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((46592, 46704), 'odoo._', '_', (['"""The project visibility setting doesn\'t allow portal users to see the project\'s tasks. (%s)"""', 'user_names'], {}), '("The project visibility setting doesn\'t allow portal users to see the project\'s tasks. (%s)"\n , user_names)\n', (46593, 46704), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((52237, 52298), 'odoo._', '_', (['"""Error! You cannot create recursive hierarchy of task(s)."""'], {}), "('Error! You cannot create recursive hierarchy of task(s).')\n", (52238, 52298), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((55082, 55105), 'odoo.fields.Datetime.today', 'fields.Datetime.today', ([], {}), '()\n', (55103, 55105), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((59603, 59725), 'odoo.osv.expression.OR', 'OR', (["[recurrence_domain, ['&', ('recurrence_id', '=', task.recurrence_id.id), (\n 'create_date', '>=', task.create_date)]]"], {}), "([recurrence_domain, ['&', ('recurrence_id', '=', task.recurrence_id.id),\n ('create_date', '>=', task.create_date)]])\n", (59605, 59725), False, 'from odoo.osv.expression import OR\n'), ((64586, 64600), 'odoo._', '_', (['"""I take it"""'], {}), "('I take it')\n", (64587, 64600), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((68339, 68358), 'odoo._', '_', (['"""Customer Email"""'], {}), "('Customer Email')\n", (68340, 68358), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((68389, 68402), 'odoo._', '_', (['"""Customer"""'], {}), "('Customer')\n", (68390, 68402), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((43726, 43745), 'odoo.fields.Date.today', 'fields.Date.today', ([], {}), '()\n', (43743, 43745), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((45875, 45950), 'odoo._', '_', (['"""By saving this change, the customer phone number will also be updated."""'], {}), "('By saving this change, the customer phone number will also be updated.')\n", (45876, 45950), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((59036, 59059), 'odoo.fields.Datetime.today', 'fields.Datetime.today', ([], {}), '()\n', (59057, 59059), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((68639, 68658), 'odoo._', '_', (['"""Customer Email"""'], {}), "('Customer Email')\n", (68640, 68658), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n'), ((55234, 55257), 'odoo.fields.Datetime.today', 'fields.Datetime.today', ([], {}), '()\n', (55255, 55257), False, 'from odoo import api, fields, models, tools, SUPERUSER_ID, _\n')] |
Maethorin/pivocram | app/config.py | f1709f5ee76d0280601efa87f3af8e89c2968f43 | # -*- coding: utf-8 -*-
"""
Config File for enviroment variables
"""
import os
from importlib import import_module
class Config(object):
"""
Base class for all config variables
"""
DEBUG = False
TESTING = False
DEVELOPMENT = False
CSRF_ENABLED = True
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
SECRET_KEY = os.environ['SECRET_KEY']
class ProductionConfig(Config):
"""
Production Config... this is the real thing
"""
DEBUG = False
class StagingConfig(Config):
"""
Staging Config is for... staging things
"""
DEBUG = True
class DevelopmentConfig(Config):
"""
Development Config... this is your home developer!
"""
DEVELOPMENT = True
DEBUG = True
class TestingConfig(Config):
"""
Test Config... You should be testing right now instead reading docs!!!
"""
TESTING = True
KEY_ON_TEST = 'KEY ON TEST'
class ConfigClassNotFound(Exception):
"""
Raises when the APP_SETTINGS environment variable have a value which does not point to an uninstantiable class.
"""
pass
def get_config():
"""
Get the Config Class instance defined in APP_SETTINGS environment variable
:return The config class instance
:rtype: Config
"""
config_imports = os.environ['APP_SETTINGS'].split('.')
config_class_name = config_imports[-1]
config_module = import_module('.'.join(config_imports[:-1]))
config_class = getattr(config_module, config_class_name, None)
if not config_class:
raise ConfigClassNotFound('Unable to find a config class in {}'.format(os.environ['APP_SETTINGS']))
return config_class() | [] |
hongyuanChrisLi/RealEstateDBConvert | initial_load.py | 0fd04f5213ff3fd3548db3f322828bd80cf41791 | from mysql_dao.select_dao import SelectDao as MysqlSelectDao
from postgres_dao.ddl_dao import DdlDao
from postgres_dao.dml_dao import DmlDao as PsqlDmlDao
psql_ddl_dao = DdlDao()
mysql_select_dao = MysqlSelectDao()
psql_dml_dao = PsqlDmlDao()
psql_ddl_dao.create_tables()
county_data = mysql_select_dao.select_all_counties()
psql_dml_dao.insert_county(county_data)
city_data = mysql_select_dao.select_all_cities()
psql_dml_dao.insert_city(city_data)
zipcode_data = mysql_select_dao.select_all_zipcodes()
psql_dml_dao.insert_zipcode(zipcode_data)
data = mysql_select_dao.select_full_addr_month_rpt()
psql_dml_dao.trunc_addr_month_rpt()
psql_dml_dao.insert_addr_month_rpt(data)
data = mysql_select_dao.select_full_mls_daily_rpt()
psql_dml_dao.trunc_mls_rpt()
psql_dml_dao.insert_mls_rpt(data)
mysql_select_dao.close()
psql_dml_dao.close()
| [((171, 179), 'postgres_dao.ddl_dao.DdlDao', 'DdlDao', ([], {}), '()\n', (177, 179), False, 'from postgres_dao.ddl_dao import DdlDao\n'), ((199, 215), 'mysql_dao.select_dao.SelectDao', 'MysqlSelectDao', ([], {}), '()\n', (213, 215), True, 'from mysql_dao.select_dao import SelectDao as MysqlSelectDao\n'), ((231, 243), 'postgres_dao.dml_dao.DmlDao', 'PsqlDmlDao', ([], {}), '()\n', (241, 243), True, 'from postgres_dao.dml_dao import DmlDao as PsqlDmlDao\n')] |
ramalingam-cb/testrunner | pytests/docs/docs.py | 81cea7a5a493cf0c67fca7f97c667cd3c6ad2142 | import time
import logger
from basetestcase import BaseTestCase
from couchbase_helper.documentgenerator import DocumentGenerator
from membase.api.rest_client import RestConnection
from couchbase_helper.documentgenerator import BlobGenerator
class DocsTests(BaseTestCase):
def setUp(self):
super(DocsTests, self).setUp()
def tearDown(self):
super(DocsTests, self).tearDown()
def test_docs_int_big_values(self):
degree = self.input.param("degree", 53)
error = self.input.param("error", False)
number = 2**degree
first = ['james', 'sharon']
template = '{{ "number": {0}, "first_name": "{1}" }}'
gen_load = DocumentGenerator('test_docs', template, [number,], first,
start=0, end=self.num_items)
self.log.info("create %s documents..." % (self.num_items))
try:
self._load_all_buckets(self.master, gen_load, "create", 0)
self._verify_stats_all_buckets([self.master])
except Exception as e:
if error:
self.log.info("Unable to create documents as expected: %s" % str(e))
else:
raise e
else:
if error:
self.fail("Able to create documents with value: %s" % str(number))
#docs.docs.DocsTests.test_load_memory,nodes_init=3,standard_buckets=3,memcached_buckets=1,replicas=2,quota_percent=75
"""
1) Configure a cluster with 4 Couchbase Buckets and 1 Memcached Buckets.
2) Total memory quota allocated for Couchbase should be approx. 75% (12G) of total RAM.
3) Load initial data on all buckets upto 60% of each memory quota
4) Pick one bucket and do the following (5) to (8)
5) Insert new items upto high_wat_mark (75% of memory quota)
6) Expire/Delete/update random items (ratio of expiration vs delete ~= 8:2)
7) Repeat (6) until "ep_total_del_items" is ~= (3 X # of items being loaded in (3))
8) Expire 90% of remaining items
9) Insert new items or update existing items across buckets
10) See if we can run into "Hard out of Memory" error (UI)
"""
def test_load_memory(self):
num_items = self.quota * 1024 * 0.6 / self.value_size
num_items = num_items / len(self.buckets)
self.log.info("Load initial data on all buckets upto 60% of each memory quota")
gen_load = BlobGenerator('mike', 'mike-', self.value_size, start=0,
end=num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
self.log.info("Insert new items upto high_wat_mark (75% of memory quota)")
for bucket in self.buckets:
if bucket.type != 'memcached':
bucket_to_load = bucket
break
new_num_items = self.quota * 1024 * 0.15 / self.value_size
gen_load = BlobGenerator('mike', 'mike-', self.value_size, start=num_items,
end=new_num_items + num_items)
load = self.cluster.async_load_gen_docs(self.master, bucket_to_load.name, gen_load,
bucket_to_load.kvs[1], 'create', compression=self.sdk_compression)
load.result()
end_time = time.time() + 60*60*3
while time.time() < end_time:
self.log.info("check memUsed")
rest = RestConnection(self.master)
for bucket in rest.get_buckets():
self.log.info("*****************************\
bucket %s: memUsed %s\
****************************" % (bucket.name,
bucket.stats.memUsed))
self.log.info("Expire/Delete/update random items (ratio \
of expiration vs delete ~= 8:2)")
current_num = 0
wait_task = self.cluster.async_wait_for_stats(self.servers[:self.nodes_init], bucket_to_load,
'all', 'ep_total_del_items', '==', num_items * 3)
while wait_task.state != "FINISHED":
gen_update = BlobGenerator('mike', 'mike-', self.value_size, start=current_num,
end=current_num + 5000)
gen_expire = BlobGenerator('mike', 'mike-', self.value_size, start=current_num + 5000,
end=current_num + 6600)
gen_delete = BlobGenerator('mike', 'mike-', self.value_size, start=current_num + 6600,
end=current_num + 7000)
tasks = []
tasks.append(self.cluster.async_load_gen_docs(self.master, bucket_to_load.name,
gen_update, bucket_to_load.kvs[1], 'update', compression=self.sdk_compression))
tasks.append(self.cluster.async_load_gen_docs(self.master, bucket_to_load.name,
gen_expire, bucket_to_load.kvs[1], 'update', exp=1,
compression=self.sdk_compression))
tasks.append(self.cluster.async_load_gen_docs(self.master, bucket_to_load.name,
gen_delete, bucket_to_load.kvs[1], 'delete', compression=self.sdk_compression))
for task in tasks:
task.result()
current_num += 7000
self.log.info("Expire 90% of remaining items")
remain_keys, _ = bucket_to_load.kvs[1].key_set()
last_key_to_expire = remain_keys[0.9 * len(remain_keys)][4:]
gen_expire = BlobGenerator('mike', 'mike-', self.value_size, start=0,
end=last_key_to_expire)
load = self.cluster.async_load_gen_docs(self.master, bucket_to_load.name,
gen_expire, bucket_to_load.kvs[1], 'update', exp=1, compression=self.sdk_compression)
load.result()
self.log.info("Insert new items or update existing items across buckets")
gen_load = BlobGenerator('mike', 'mike-', self.value_size, start=new_num_items + num_items,
end=new_num_items * 2 + num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
| [((683, 774), 'couchbase_helper.documentgenerator.DocumentGenerator', 'DocumentGenerator', (['"""test_docs"""', 'template', '[number]', 'first'], {'start': '(0)', 'end': 'self.num_items'}), "('test_docs', template, [number], first, start=0, end=self\n .num_items)\n", (700, 774), False, 'from couchbase_helper.documentgenerator import DocumentGenerator\n'), ((2397, 2468), 'couchbase_helper.documentgenerator.BlobGenerator', 'BlobGenerator', (['"""mike"""', '"""mike-"""', 'self.value_size'], {'start': '(0)', 'end': 'num_items'}), "('mike', 'mike-', self.value_size, start=0, end=num_items)\n", (2410, 2468), False, 'from couchbase_helper.documentgenerator import BlobGenerator\n'), ((2876, 2976), 'couchbase_helper.documentgenerator.BlobGenerator', 'BlobGenerator', (['"""mike"""', '"""mike-"""', 'self.value_size'], {'start': 'num_items', 'end': '(new_num_items + num_items)'}), "('mike', 'mike-', self.value_size, start=num_items, end=\n new_num_items + num_items)\n", (2889, 2976), False, 'from couchbase_helper.documentgenerator import BlobGenerator\n'), ((5651, 5736), 'couchbase_helper.documentgenerator.BlobGenerator', 'BlobGenerator', (['"""mike"""', '"""mike-"""', 'self.value_size'], {'start': '(0)', 'end': 'last_key_to_expire'}), "('mike', 'mike-', self.value_size, start=0, end=last_key_to_expire\n )\n", (5664, 5736), False, 'from couchbase_helper.documentgenerator import BlobGenerator\n'), ((6090, 6209), 'couchbase_helper.documentgenerator.BlobGenerator', 'BlobGenerator', (['"""mike"""', '"""mike-"""', 'self.value_size'], {'start': '(new_num_items + num_items)', 'end': '(new_num_items * 2 + num_items)'}), "('mike', 'mike-', self.value_size, start=new_num_items +\n num_items, end=new_num_items * 2 + num_items)\n", (6103, 6209), False, 'from couchbase_helper.documentgenerator import BlobGenerator\n'), ((3238, 3249), 'time.time', 'time.time', ([], {}), '()\n', (3247, 3249), False, 'import time\n'), ((3274, 3285), 'time.time', 'time.time', ([], {}), '()\n', (3283, 3285), False, 'import time\n'), ((3360, 3387), 'membase.api.rest_client.RestConnection', 'RestConnection', (['self.master'], {}), '(self.master)\n', (3374, 3387), False, 'from membase.api.rest_client import RestConnection\n'), ((4150, 4245), 'couchbase_helper.documentgenerator.BlobGenerator', 'BlobGenerator', (['"""mike"""', '"""mike-"""', 'self.value_size'], {'start': 'current_num', 'end': '(current_num + 5000)'}), "('mike', 'mike-', self.value_size, start=current_num, end=\n current_num + 5000)\n", (4163, 4245), False, 'from couchbase_helper.documentgenerator import BlobGenerator\n'), ((4308, 4409), 'couchbase_helper.documentgenerator.BlobGenerator', 'BlobGenerator', (['"""mike"""', '"""mike-"""', 'self.value_size'], {'start': '(current_num + 5000)', 'end': '(current_num + 6600)'}), "('mike', 'mike-', self.value_size, start=current_num + 5000,\n end=current_num + 6600)\n", (4321, 4409), False, 'from couchbase_helper.documentgenerator import BlobGenerator\n'), ((4473, 4574), 'couchbase_helper.documentgenerator.BlobGenerator', 'BlobGenerator', (['"""mike"""', '"""mike-"""', 'self.value_size'], {'start': '(current_num + 6600)', 'end': '(current_num + 7000)'}), "('mike', 'mike-', self.value_size, start=current_num + 6600,\n end=current_num + 7000)\n", (4486, 4574), False, 'from couchbase_helper.documentgenerator import BlobGenerator\n')] |
truongaxin123/lichthidtu | lichthi.py | 77ba75974769ab1fdd1281b6088a1734dc0a3a83 | from bs4 import BeautifulSoup
import requests
from urllib.request import urlretrieve
ROOT = 'http://pdaotao.duytan.edu.vn'
def get_url_sub(sub, id_, page):
all_td_tag = []
for i in range(1, page+1):
print('http://pdaotao.duytan.edu.vn/EXAM_LIST/?page={}&lang=VN'.format(i))
r = requests.get('http://pdaotao.duytan.edu.vn/EXAM_LIST/?page={}&lang=VN'.format(i))
soup = BeautifulSoup(r.text, 'lxml')
list_td_tag = soup.find_all('td', attrs={'style': 'padding-top:10px'})
all_td_tag = all_td_tag + list_td_tag
for td_tag in all_td_tag:
if (((sub+id_) in str(td_tag.a.contents[0])) or
((sub+' '+id_) in str(td_tag.a.contents[0])) or
((sub+'_'+id_) in str(td_tag.a.contents[0]))):
print('\nComplete!!!')
print(' '.join(str(td_tag.a.string).split()))
print(str(td_tag.a['href']).replace('..', ROOT))
return str(td_tag.a['href']).replace('..', ROOT)
def get_excel_url(url):
r = requests.get(url)
soup = BeautifulSoup(r.text,'lxml')
list_span_tags = soup.find_all('span',class_='txt_l4')
excel_url = list_span_tags[1].a['href'].replace('..',ROOT)
return excel_url
# a = get_excel_url('http://pdaotao.duytan.edu.vn/EXAM_LIST_Detail/?ID=52289&lang=VN')
def main():
sub = input('Nhap ten mon: ')
id_ = input('Nhap id mon: ')
url = get_url_sub(sub,id_,4)
if url == None:
print('Khong tim thay mon nao nhu nay ({} {}) ca :('.format(sub, id_))
return
else:
print('get excel URL!!!')
excel_url = get_excel_url(url)
excel_url = excel_url.replace(' ','%20')
print('Download excel file!!!')
save_at = 'C:/Users/truon/Desktop/'
filename = save_at + excel_url.split('/')[-1].replace('%20',' ')
urlretrieve(excel_url,filename)
print('Done!')
main()
| [((1004, 1021), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1016, 1021), False, 'import requests\n'), ((1033, 1062), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r.text', '"""lxml"""'], {}), "(r.text, 'lxml')\n", (1046, 1062), False, 'from bs4 import BeautifulSoup\n'), ((401, 430), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r.text', '"""lxml"""'], {}), "(r.text, 'lxml')\n", (414, 430), False, 'from bs4 import BeautifulSoup\n'), ((1818, 1850), 'urllib.request.urlretrieve', 'urlretrieve', (['excel_url', 'filename'], {}), '(excel_url, filename)\n', (1829, 1850), False, 'from urllib.request import urlretrieve\n')] |
isabella232/feedloader | appengine/uploader/main.py | c0417480804d406a83d1aedcb7e7d719058fdbfd | # coding=utf-8
# Copyright 2021 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Uploader module that handles batch jobs sent from Task Queue.
This module receives batch jobs from TaskQueue. For each job, the module loads
data from BigQuery and sends it to Merchant Center.
"""
import http
import json
import logging
import socket
from typing import List, Tuple
import flask
from google.cloud import bigquery
from google.cloud import logging as cloud_logging
from googleapiclient import errors
import batch_creator
import bigquery_client
import constants
import content_api_client
import result_recorder
import shoptimizer_client
from models import failure
from models import process_result
from models import upload_task
app = flask.Flask(__name__)
_logging_client = cloud_logging.Client()
_logging_client.setup_logging(log_level=logging.DEBUG)
_SHOPTIMIZER_CONFIG_FILE_PATH = 'config/shoptimizer_config.json'
OPERATION_TO_METHOD = {
constants.Operation.UPSERT: constants.Method.INSERT,
constants.Operation.DELETE: constants.Method.DELETE,
constants.Operation.PREVENT_EXPIRING: constants.Method.INSERT
}
# Used to check if this is the last retry for alerting purposes.
# Should match task_retry_limit in appengine/initiator/queue.yaml.
TASK_RETRY_LIMIT = 5
@app.route('/insert_items', methods=['POST'])
def run_insert_process() -> Tuple[str, http.HTTPStatus]:
"""Handles uploading tasks pushed from Task Queue."""
return _run_process(constants.Operation.UPSERT)
@app.route('/delete_items', methods=['POST'])
def run_delete_process() -> Tuple[str, http.HTTPStatus]:
"""Handles deleting tasks pushed from Task Queue."""
return _run_process(constants.Operation.DELETE)
@app.route('/prevent_expiring_items', methods=['POST'])
def run_prevent_expiring_process() -> Tuple[str, http.HTTPStatus]:
"""Handles prevent expiring tasks pushed from Task Queue."""
return _run_process(constants.Operation.PREVENT_EXPIRING)
def _run_process(operation: constants.Operation) -> Tuple[str, http.HTTPStatus]:
"""Handles tasks pushed from Task Queue.
When tasks are enqueued to Task Queue by initiator, this method will be
called. It extracts necessary information from a Task Queue message. The
following processes are executed in this function:
- Loading items to process from BigQuery.
- Converts items into a batch that can be sent to Content API for Shopping.
- Sending items to Content API for Shopping (Merchant Center).
- Records the results of the Content API for Shopping call.
Args:
operation: Type of operation to perform on the items.
Returns:
The result of HTTP request.
"""
request_body = json.loads(flask.request.data.decode('utf-8'))
task = upload_task.UploadTask.from_json(request_body)
if task.batch_size == 0:
return 'OK', http.HTTPStatus.OK
batch_number = int(task.start_index / task.batch_size) + 1
logging.info(
'%s started. Batch #%d info: start_index: %d, batch_size: %d,'
'initiation timestamp: %s', operation.value, batch_number,
task.start_index, task.batch_size, task.timestamp)
try:
items = _load_items_from_bigquery(operation, task)
except errors.HttpError:
return 'Error loading items from BigQuery', http.HTTPStatus.INTERNAL_SERVER_ERROR
result = process_result.ProcessResult([], [], [])
try:
if not items:
logging.error(
'Batch #%d, operation %s: 0 items loaded from BigQuery so batch not sent to Content API. Start_index: %d, batch_size: %d,'
'initiation timestamp: %s', batch_number, operation.value,
task.start_index, task.batch_size, task.timestamp)
return 'No items to process', http.HTTPStatus.OK
method = OPERATION_TO_METHOD.get(operation)
# Creates batch from items loaded from BigQuery
original_batch, skipped_item_ids, batch_id_to_item_id = batch_creator.create_batch(
batch_number, items, method)
# Optimizes batch via Shoptimizer for upsert/prevent_expiring operations
if operation != constants.Operation.DELETE and constants.SHOPTIMIZER_API_INTEGRATION_ON:
batch_to_send_to_content_api = _create_optimized_batch(
original_batch, batch_number, operation)
else:
batch_to_send_to_content_api = original_batch
# Sends batch of items to Content API for Shopping
api_client = content_api_client.ContentApiClient()
successful_item_ids, item_failures = api_client.process_items(
batch_to_send_to_content_api, batch_number, batch_id_to_item_id, method)
result = process_result.ProcessResult(
successfully_processed_item_ids=successful_item_ids,
content_api_failures=item_failures,
skipped_item_ids=skipped_item_ids)
except errors.HttpError as http_error:
error_status_code = http_error.resp.status
error_reason = http_error.resp.reason
result = _handle_content_api_error(error_status_code, error_reason,
batch_number, http_error, items,
operation, task)
return error_reason, error_status_code
except socket.timeout as timeout_error:
error_status_code = http.HTTPStatus.REQUEST_TIMEOUT
error_reason = 'Socket timeout'
result = _handle_content_api_error(error_status_code, error_reason,
batch_number, timeout_error, items,
operation, task)
return error_reason, error_status_code
else:
logging.info(
'Batch #%d with operation %s and initiation timestamp %s successfully processed %s items, failed to process %s items and skipped %s items.',
batch_number, operation.value, task.timestamp,
result.get_success_count(), result.get_failure_count(),
result.get_skipped_count())
finally:
recorder = result_recorder.ResultRecorder.from_service_account_json(
constants.GCP_SERVICE_ACCOUNT_PATH, constants.DATASET_ID_FOR_MONITORING,
constants.TABLE_ID_FOR_RESULT_COUNTS_MONITORING,
constants.TABLE_ID_FOR_ITEM_RESULTS_MONITORING)
recorder.insert_result(operation.value, result, task.timestamp,
batch_number)
return 'OK', http.HTTPStatus.OK
def _load_items_from_bigquery(
operation: constants.Operation,
task: upload_task.UploadTask) -> List[bigquery.Row]:
"""Loads items from BigQuery.
Args:
operation: The operation to be performed on this batch of items.
task: The Cloud Task object that initiated this request.
Returns:
The list of items loaded from BigQuery.
"""
table_id = f'process_items_to_{operation.value}_{task.timestamp}'
bq_client = bigquery_client.BigQueryClient.from_service_account_json(
constants.GCP_SERVICE_ACCOUNT_PATH, constants.DATASET_ID_FOR_PROCESSING,
table_id)
try:
items_iterator = bq_client.load_items(task.start_index, task.batch_size)
except errors.HttpError as http_error:
logging.exception(
'Error loading items from %s.%s. HTTP status: %s. Error: %s',
constants.DATASET_ID_FOR_PROCESSING, table_id, http_error.resp.status,
http_error.resp.reason)
raise
return list(items_iterator)
def _create_optimized_batch(batch: constants.Batch, batch_number: int,
operation: constants.Operation) -> constants.Batch:
"""Creates an optimized batch by calling the Shoptimizer API.
Args:
batch: The batch of product data to be optimized.
batch_number: The number that identifies this batch.
operation: The operation to be performed on this batch (upsert, delete,
prevent_expiring).
Returns:
The batch returned from the Shoptimizer API Client.
"""
try:
optimization_client = shoptimizer_client.ShoptimizerClient(
batch_number, operation)
except (OSError, ValueError):
return batch
return optimization_client.shoptimize(batch)
def _handle_content_api_error(
error_status_code: int, error_reason: str, batch_num: int, error: Exception,
item_rows: List[bigquery.Row], operation: constants.Operation,
task: upload_task.UploadTask) -> process_result.ProcessResult:
"""Logs network related errors returned from Content API and returns a list of item failures.
Args:
error_status_code: HTTP status code from Content API.
error_reason: The reason for the error.
batch_num: The batch number.
error: The error thrown by Content API.
item_rows: The items being processed in this batch.
operation: The operation to be performed on this batch of items.
task: The Cloud Task object that initiated this request.
Returns:
The list of items that failed due to the error, wrapped in a
process_result.
"""
logging.warning(
'Batch #%d with operation %s and initiation timestamp %s failed. HTTP status: %s. Error: %s',
batch_num, operation.value, task.timestamp, error_status_code,
error_reason)
# If the batch API call received an HttpError, mark every id as failed.
item_failures = [
failure.Failure(str(item_row.get('item_id', 'Missing ID')), error_reason)
for item_row in item_rows
]
api_result = process_result.ProcessResult([], item_failures, [])
if content_api_client.suggest_retry(
error_status_code) and _get_execution_attempt() < TASK_RETRY_LIMIT:
logging.warning(
'Batch #%d with operation %s and initiation timestamp %s will be requeued for retry',
batch_num, operation.value, task.timestamp)
else:
logging.error(
'Batch #%d with operation %s and initiation timestamp %s failed and will not be retried. Error: %s',
batch_num, operation.value, task.timestamp, error)
return api_result
def _get_execution_attempt() -> int:
"""Returns the number of times this task has previously been executed.
If the execution count header does not exist, it means the request did not
come from Cloud Tasks.
In this case, there will be no retry, so set execution attempt to the retry
limit.
Returns:
int, the number of times this task has previously been executed.
"""
execution_attempt = flask.request.headers.get(
'X-AppEngine-TaskExecutionCount', '')
if execution_attempt:
return int(execution_attempt)
else:
return TASK_RETRY_LIMIT
if __name__ == '__main__':
# This is used when running locally. Gunicorn is used to run the
# application on Google App Engine. See entrypoint in app.yaml.
app.run(host='127.0.0.1', port=8080, debug=True)
| [((1246, 1267), 'flask.Flask', 'flask.Flask', (['__name__'], {}), '(__name__)\n', (1257, 1267), False, 'import flask\n'), ((1287, 1309), 'google.cloud.logging.Client', 'cloud_logging.Client', ([], {}), '()\n', (1307, 1309), True, 'from google.cloud import logging as cloud_logging\n'), ((3229, 3275), 'models.upload_task.UploadTask.from_json', 'upload_task.UploadTask.from_json', (['request_body'], {}), '(request_body)\n', (3261, 3275), False, 'from models import upload_task\n'), ((3404, 3600), 'logging.info', 'logging.info', (['"""%s started. Batch #%d info: start_index: %d, batch_size: %d,initiation timestamp: %s"""', 'operation.value', 'batch_number', 'task.start_index', 'task.batch_size', 'task.timestamp'], {}), "(\n '%s started. Batch #%d info: start_index: %d, batch_size: %d,initiation timestamp: %s'\n , operation.value, batch_number, task.start_index, task.batch_size,\n task.timestamp)\n", (3416, 3600), False, 'import logging\n'), ((3797, 3837), 'models.process_result.ProcessResult', 'process_result.ProcessResult', (['[]', '[]', '[]'], {}), '([], [], [])\n', (3825, 3837), False, 'from models import process_result\n'), ((7171, 7315), 'bigquery_client.BigQueryClient.from_service_account_json', 'bigquery_client.BigQueryClient.from_service_account_json', (['constants.GCP_SERVICE_ACCOUNT_PATH', 'constants.DATASET_ID_FOR_PROCESSING', 'table_id'], {}), '(constants.\n GCP_SERVICE_ACCOUNT_PATH, constants.DATASET_ID_FOR_PROCESSING, table_id)\n', (7227, 7315), False, 'import bigquery_client\n'), ((9229, 9429), 'logging.warning', 'logging.warning', (['"""Batch #%d with operation %s and initiation timestamp %s failed. HTTP status: %s. Error: %s"""', 'batch_num', 'operation.value', 'task.timestamp', 'error_status_code', 'error_reason'], {}), "(\n 'Batch #%d with operation %s and initiation timestamp %s failed. HTTP status: %s. Error: %s'\n , batch_num, operation.value, task.timestamp, error_status_code,\n error_reason)\n", (9244, 9429), False, 'import logging\n'), ((9660, 9711), 'models.process_result.ProcessResult', 'process_result.ProcessResult', (['[]', 'item_failures', '[]'], {}), '([], item_failures, [])\n', (9688, 9711), False, 'from models import process_result\n'), ((10620, 10683), 'flask.request.headers.get', 'flask.request.headers.get', (['"""X-AppEngine-TaskExecutionCount"""', '""""""'], {}), "('X-AppEngine-TaskExecutionCount', '')\n", (10645, 10683), False, 'import flask\n'), ((3184, 3218), 'flask.request.data.decode', 'flask.request.data.decode', (['"""utf-8"""'], {}), "('utf-8')\n", (3209, 3218), False, 'import flask\n'), ((4364, 4419), 'batch_creator.create_batch', 'batch_creator.create_batch', (['batch_number', 'items', 'method'], {}), '(batch_number, items, method)\n', (4390, 4419), False, 'import batch_creator\n'), ((4848, 4885), 'content_api_client.ContentApiClient', 'content_api_client.ContentApiClient', ([], {}), '()\n', (4883, 4885), False, 'import content_api_client\n'), ((5048, 5209), 'models.process_result.ProcessResult', 'process_result.ProcessResult', ([], {'successfully_processed_item_ids': 'successful_item_ids', 'content_api_failures': 'item_failures', 'skipped_item_ids': 'skipped_item_ids'}), '(successfully_processed_item_ids=\n successful_item_ids, content_api_failures=item_failures,\n skipped_item_ids=skipped_item_ids)\n', (5076, 5209), False, 'from models import process_result\n'), ((6335, 6575), 'result_recorder.ResultRecorder.from_service_account_json', 'result_recorder.ResultRecorder.from_service_account_json', (['constants.GCP_SERVICE_ACCOUNT_PATH', 'constants.DATASET_ID_FOR_MONITORING', 'constants.TABLE_ID_FOR_RESULT_COUNTS_MONITORING', 'constants.TABLE_ID_FOR_ITEM_RESULTS_MONITORING'], {}), '(constants.\n GCP_SERVICE_ACCOUNT_PATH, constants.DATASET_ID_FOR_MONITORING,\n constants.TABLE_ID_FOR_RESULT_COUNTS_MONITORING, constants.\n TABLE_ID_FOR_ITEM_RESULTS_MONITORING)\n', (6391, 6575), False, 'import result_recorder\n'), ((8238, 8299), 'shoptimizer_client.ShoptimizerClient', 'shoptimizer_client.ShoptimizerClient', (['batch_number', 'operation'], {}), '(batch_number, operation)\n', (8274, 8299), False, 'import shoptimizer_client\n'), ((9718, 9769), 'content_api_client.suggest_retry', 'content_api_client.suggest_retry', (['error_status_code'], {}), '(error_status_code)\n', (9750, 9769), False, 'import content_api_client\n'), ((9830, 9985), 'logging.warning', 'logging.warning', (['"""Batch #%d with operation %s and initiation timestamp %s will be requeued for retry"""', 'batch_num', 'operation.value', 'task.timestamp'], {}), "(\n 'Batch #%d with operation %s and initiation timestamp %s will be requeued for retry'\n , batch_num, operation.value, task.timestamp)\n", (9845, 9985), False, 'import logging\n'), ((10005, 10180), 'logging.error', 'logging.error', (['"""Batch #%d with operation %s and initiation timestamp %s failed and will not be retried. Error: %s"""', 'batch_num', 'operation.value', 'task.timestamp', 'error'], {}), "(\n 'Batch #%d with operation %s and initiation timestamp %s failed and will not be retried. Error: %s'\n , batch_num, operation.value, task.timestamp, error)\n", (10018, 10180), False, 'import logging\n'), ((3869, 4126), 'logging.error', 'logging.error', (['"""Batch #%d, operation %s: 0 items loaded from BigQuery so batch not sent to Content API. Start_index: %d, batch_size: %d,initiation timestamp: %s"""', 'batch_number', 'operation.value', 'task.start_index', 'task.batch_size', 'task.timestamp'], {}), "(\n 'Batch #%d, operation %s: 0 items loaded from BigQuery so batch not sent to Content API. Start_index: %d, batch_size: %d,initiation timestamp: %s'\n , batch_number, operation.value, task.start_index, task.batch_size,\n task.timestamp)\n", (3882, 4126), False, 'import logging\n'), ((7453, 7635), 'logging.exception', 'logging.exception', (['"""Error loading items from %s.%s. HTTP status: %s. Error: %s"""', 'constants.DATASET_ID_FOR_PROCESSING', 'table_id', 'http_error.resp.status', 'http_error.resp.reason'], {}), "('Error loading items from %s.%s. HTTP status: %s. Error: %s',\n constants.DATASET_ID_FOR_PROCESSING, table_id, http_error.resp.status,\n http_error.resp.reason)\n", (7470, 7635), False, 'import logging\n')] |
BadDevCode/lumberyard | dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/transforms.py | 3d688932f919dbf5821f0cb8a210ce24abe39e9e | """
Implement transformation on Numba IR
"""
from __future__ import absolute_import, print_function
from collections import namedtuple, defaultdict
import logging
from numba.analysis import compute_cfg_from_blocks, find_top_level_loops
from numba import ir, errors, ir_utils
from numba.analysis import compute_use_defs
_logger = logging.getLogger(__name__)
def _extract_loop_lifting_candidates(cfg, blocks):
"""
Returns a list of loops that are candidate for loop lifting
"""
# check well-formed-ness of the loop
def same_exit_point(loop):
"all exits must point to the same location"
outedges = set()
for k in loop.exits:
succs = set(x for x, _ in cfg.successors(k))
if not succs:
# If the exit point has no successor, it contains an return
# statement, which is not handled by the looplifting code.
# Thus, this loop is not a candidate.
_logger.debug("return-statement in loop.")
return False
outedges |= succs
ok = len(outedges) == 1
_logger.debug("same_exit_point=%s (%s)", ok, outedges)
return ok
def one_entry(loop):
"there is one entry"
ok = len(loop.entries) == 1
_logger.debug("one_entry=%s", ok)
return ok
def cannot_yield(loop):
"cannot have yield inside the loop"
insiders = set(loop.body) | set(loop.entries) | set(loop.exits)
for blk in map(blocks.__getitem__, insiders):
for inst in blk.body:
if isinstance(inst, ir.Assign):
if isinstance(inst.value, ir.Yield):
_logger.debug("has yield")
return False
_logger.debug("no yield")
return True
_logger.info('finding looplift candidates')
# the check for cfg.entry_point in the loop.entries is to prevent a bad
# rewrite where a prelude for a lifted loop would get written into block -1
# if a loop entry were in block 0
candidates = []
for loop in find_top_level_loops(cfg):
_logger.debug("top-level loop: %s", loop)
if (same_exit_point(loop) and one_entry(loop) and cannot_yield(loop) and
cfg.entry_point() not in loop.entries):
candidates.append(loop)
_logger.debug("add candidate: %s", loop)
return candidates
def find_region_inout_vars(blocks, livemap, callfrom, returnto, body_block_ids):
"""Find input and output variables to a block region.
"""
inputs = livemap[callfrom]
outputs = livemap[returnto]
# ensure live variables are actually used in the blocks, else remove,
# saves having to create something valid to run through postproc
# to achieve similar
loopblocks = {}
for k in body_block_ids:
loopblocks[k] = blocks[k]
used_vars = set()
def_vars = set()
defs = compute_use_defs(loopblocks)
for vs in defs.usemap.values():
used_vars |= vs
for vs in defs.defmap.values():
def_vars |= vs
used_or_defined = used_vars | def_vars
# note: sorted for stable ordering
inputs = sorted(set(inputs) & used_or_defined)
outputs = sorted(set(outputs) & used_or_defined & def_vars)
return inputs, outputs
_loop_lift_info = namedtuple('loop_lift_info',
'loop,inputs,outputs,callfrom,returnto')
def _loop_lift_get_candidate_infos(cfg, blocks, livemap):
"""
Returns information on looplifting candidates.
"""
loops = _extract_loop_lifting_candidates(cfg, blocks)
loopinfos = []
for loop in loops:
[callfrom] = loop.entries # requirement checked earlier
an_exit = next(iter(loop.exits)) # anyone of the exit block
if len(loop.exits) > 1:
# Pre-Py3.8 may have multiple exits
[(returnto, _)] = cfg.successors(an_exit) # requirement checked earlier
else:
# Post-Py3.8 DO NOT have multiple exits
returnto = an_exit
local_block_ids = set(loop.body) | set(loop.entries)
inputs, outputs = find_region_inout_vars(
blocks=blocks,
livemap=livemap,
callfrom=callfrom,
returnto=returnto,
body_block_ids=local_block_ids,
)
lli = _loop_lift_info(loop=loop, inputs=inputs, outputs=outputs,
callfrom=callfrom, returnto=returnto)
loopinfos.append(lli)
return loopinfos
def _loop_lift_modify_call_block(liftedloop, block, inputs, outputs, returnto):
"""
Transform calling block from top-level function to call the lifted loop.
"""
scope = block.scope
loc = block.loc
blk = ir.Block(scope=scope, loc=loc)
ir_utils.fill_block_with_call(
newblock=blk,
callee=liftedloop,
label_next=returnto,
inputs=inputs,
outputs=outputs,
)
return blk
def _loop_lift_prepare_loop_func(loopinfo, blocks):
"""
Inplace transform loop blocks for use as lifted loop.
"""
entry_block = blocks[loopinfo.callfrom]
scope = entry_block.scope
loc = entry_block.loc
# Lowering assumes the first block to be the one with the smallest offset
firstblk = min(blocks) - 1
blocks[firstblk] = ir_utils.fill_callee_prologue(
block=ir.Block(scope=scope, loc=loc),
inputs=loopinfo.inputs,
label_next=loopinfo.callfrom,
)
blocks[loopinfo.returnto] = ir_utils.fill_callee_epilogue(
block=ir.Block(scope=scope, loc=loc),
outputs=loopinfo.outputs,
)
def _loop_lift_modify_blocks(func_ir, loopinfo, blocks,
typingctx, targetctx, flags, locals):
"""
Modify the block inplace to call to the lifted-loop.
Returns a dictionary of blocks of the lifted-loop.
"""
from numba.dispatcher import LiftedLoop
# Copy loop blocks
loop = loopinfo.loop
loopblockkeys = set(loop.body) | set(loop.entries)
if len(loop.exits) > 1:
# Pre-Py3.8 may have multiple exits
loopblockkeys |= loop.exits
loopblocks = dict((k, blocks[k].copy()) for k in loopblockkeys)
# Modify the loop blocks
_loop_lift_prepare_loop_func(loopinfo, loopblocks)
# Create a new IR for the lifted loop
lifted_ir = func_ir.derive(blocks=loopblocks,
arg_names=tuple(loopinfo.inputs),
arg_count=len(loopinfo.inputs),
force_non_generator=True)
liftedloop = LiftedLoop(lifted_ir,
typingctx, targetctx, flags, locals)
# modify for calling into liftedloop
callblock = _loop_lift_modify_call_block(liftedloop, blocks[loopinfo.callfrom],
loopinfo.inputs, loopinfo.outputs,
loopinfo.returnto)
# remove blocks
for k in loopblockkeys:
del blocks[k]
# update main interpreter callsite into the liftedloop
blocks[loopinfo.callfrom] = callblock
return liftedloop
def loop_lifting(func_ir, typingctx, targetctx, flags, locals):
"""
Loop lifting transformation.
Given a interpreter `func_ir` returns a 2 tuple of
`(toplevel_interp, [loop0_interp, loop1_interp, ....])`
"""
blocks = func_ir.blocks.copy()
cfg = compute_cfg_from_blocks(blocks)
loopinfos = _loop_lift_get_candidate_infos(cfg, blocks,
func_ir.variable_lifetime.livemap)
loops = []
if loopinfos:
_logger.debug('loop lifting this IR with %d candidates:\n%s',
len(loopinfos), func_ir.dump_to_string())
for loopinfo in loopinfos:
lifted = _loop_lift_modify_blocks(func_ir, loopinfo, blocks,
typingctx, targetctx, flags, locals)
loops.append(lifted)
# Make main IR
main = func_ir.derive(blocks=blocks)
return main, loops
def canonicalize_cfg_single_backedge(blocks):
"""
Rewrite loops that have multiple backedges.
"""
cfg = compute_cfg_from_blocks(blocks)
newblocks = blocks.copy()
def new_block_id():
return max(newblocks.keys()) + 1
def has_multiple_backedges(loop):
count = 0
for k in loop.body:
blk = blocks[k]
edges = blk.terminator.get_targets()
# is a backedge?
if loop.header in edges:
count += 1
if count > 1:
# early exit
return True
return False
def yield_loops_with_multiple_backedges():
for lp in cfg.loops().values():
if has_multiple_backedges(lp):
yield lp
def replace_target(term, src, dst):
def replace(target):
return (dst if target == src else target)
if isinstance(term, ir.Branch):
return ir.Branch(cond=term.cond,
truebr=replace(term.truebr),
falsebr=replace(term.falsebr),
loc=term.loc)
elif isinstance(term, ir.Jump):
return ir.Jump(target=replace(term.target), loc=term.loc)
else:
assert not term.get_targets()
return term
def rewrite_single_backedge(loop):
"""
Add new tail block that gathers all the backedges
"""
header = loop.header
tailkey = new_block_id()
for blkkey in loop.body:
blk = newblocks[blkkey]
if header in blk.terminator.get_targets():
newblk = blk.copy()
# rewrite backedge into jumps to new tail block
newblk.body[-1] = replace_target(blk.terminator, header,
tailkey)
newblocks[blkkey] = newblk
# create new tail block
entryblk = newblocks[header]
tailblk = ir.Block(scope=entryblk.scope, loc=entryblk.loc)
# add backedge
tailblk.append(ir.Jump(target=header, loc=tailblk.loc))
newblocks[tailkey] = tailblk
for loop in yield_loops_with_multiple_backedges():
rewrite_single_backedge(loop)
return newblocks
def canonicalize_cfg(blocks):
"""
Rewrite the given blocks to canonicalize the CFG.
Returns a new dictionary of blocks.
"""
return canonicalize_cfg_single_backedge(blocks)
def with_lifting(func_ir, typingctx, targetctx, flags, locals):
"""With-lifting transformation
Rewrite the IR to extract all withs.
Only the top-level withs are extracted.
Returns the (the_new_ir, the_lifted_with_ir)
"""
from numba import postproc
def dispatcher_factory(func_ir, objectmode=False, **kwargs):
from numba.dispatcher import LiftedWith, ObjModeLiftedWith
myflags = flags.copy()
if objectmode:
# Lifted with-block cannot looplift
myflags.enable_looplift = False
# Lifted with-block uses object mode
myflags.enable_pyobject = True
myflags.force_pyobject = True
myflags.no_cpython_wrapper = False
cls = ObjModeLiftedWith
else:
cls = LiftedWith
return cls(func_ir, typingctx, targetctx, myflags, locals, **kwargs)
postproc.PostProcessor(func_ir).run() # ensure we have variable lifetime
assert func_ir.variable_lifetime
vlt = func_ir.variable_lifetime
blocks = func_ir.blocks.copy()
# find where with-contexts regions are
withs = find_setupwiths(blocks)
cfg = vlt.cfg
_legalize_withs_cfg(withs, cfg, blocks)
# For each with-regions, mutate them according to
# the kind of contextmanager
sub_irs = []
for (blk_start, blk_end) in withs:
body_blocks = []
for node in _cfg_nodes_in_region(cfg, blk_start, blk_end):
body_blocks.append(node)
_legalize_with_head(blocks[blk_start])
# Find the contextmanager
cmkind, extra = _get_with_contextmanager(func_ir, blocks, blk_start)
# Mutate the body and get new IR
sub = cmkind.mutate_with_body(func_ir, blocks, blk_start, blk_end,
body_blocks, dispatcher_factory,
extra)
sub_irs.append(sub)
if not sub_irs:
# Unchanged
new_ir = func_ir
else:
new_ir = func_ir.derive(blocks)
return new_ir, sub_irs
def _get_with_contextmanager(func_ir, blocks, blk_start):
"""Get the global object used for the context manager
"""
_illegal_cm_msg = "Illegal use of context-manager."
def get_var_dfn(var):
"""Get the definition given a variable"""
return func_ir.get_definition(var)
def get_ctxmgr_obj(var_ref):
"""Return the context-manager object and extra info.
The extra contains the arguments if the context-manager is used
as a call.
"""
# If the contextmanager used as a Call
dfn = func_ir.get_definition(var_ref)
if isinstance(dfn, ir.Expr) and dfn.op == 'call':
args = [get_var_dfn(x) for x in dfn.args]
kws = {k: get_var_dfn(v) for k, v in dfn.kws}
extra = {'args': args, 'kwargs': kws}
var_ref = dfn.func
else:
extra = None
ctxobj = ir_utils.guard(ir_utils.find_global_value, func_ir, var_ref)
# check the contextmanager object
if ctxobj is ir.UNDEFINED:
raise errors.CompilerError(
"Undefined variable used as context manager",
loc=blocks[blk_start].loc,
)
if ctxobj is None:
raise errors.CompilerError(_illegal_cm_msg, loc=dfn.loc)
return ctxobj, extra
# Scan the start of the with-region for the contextmanager
for stmt in blocks[blk_start].body:
if isinstance(stmt, ir.EnterWith):
var_ref = stmt.contextmanager
ctxobj, extra = get_ctxmgr_obj(var_ref)
if not hasattr(ctxobj, 'mutate_with_body'):
raise errors.CompilerError(
"Unsupported context manager in use",
loc=blocks[blk_start].loc,
)
return ctxobj, extra
# No contextmanager found?
raise errors.CompilerError(
"malformed with-context usage",
loc=blocks[blk_start].loc,
)
def _legalize_with_head(blk):
"""Given *blk*, the head block of the with-context, check that it doesn't
do anything else.
"""
counters = defaultdict(int)
for stmt in blk.body:
counters[type(stmt)] += 1
if counters.pop(ir.EnterWith) != 1:
raise errors.CompilerError(
"with's head-block must have exactly 1 ENTER_WITH",
loc=blk.loc,
)
if counters.pop(ir.Jump) != 1:
raise errors.CompilerError(
"with's head-block must have exactly 1 JUMP",
loc=blk.loc,
)
# Can have any number of del
counters.pop(ir.Del, None)
# There MUST NOT be any other statements
if counters:
raise errors.CompilerError(
"illegal statements in with's head-block",
loc=blk.loc,
)
def _cfg_nodes_in_region(cfg, region_begin, region_end):
"""Find the set of CFG nodes that are in the given region
"""
region_nodes = set()
stack = [region_begin]
while stack:
tos = stack.pop()
succs, _ = zip(*cfg.successors(tos))
nodes = set([node for node in succs
if node not in region_nodes and
node != region_end])
stack.extend(nodes)
region_nodes |= nodes
return region_nodes
def _legalize_withs_cfg(withs, cfg, blocks):
"""Verify the CFG of the with-context(s).
"""
doms = cfg.dominators()
postdoms = cfg.post_dominators()
# Verify that the with-context has no side-exits
for s, e in withs:
loc = blocks[s].loc
if s not in doms[e]:
# Not sure what condition can trigger this error.
msg = "Entry of with-context not dominating the exit."
raise errors.CompilerError(msg, loc=loc)
if e not in postdoms[s]:
msg = (
"Does not support with-context that contain branches "
"(i.e. break/return/raise) that can leave the with-context. "
"Details: exit of with-context not post-dominating the entry. "
)
raise errors.CompilerError(msg, loc=loc)
def find_setupwiths(blocks):
"""Find all top-level with.
Returns a list of ranges for the with-regions.
"""
def find_ranges(blocks):
for blk in blocks.values():
for ew in blk.find_insts(ir.EnterWith):
yield ew.begin, ew.end
def previously_occurred(start, known_ranges):
for a, b in known_ranges:
if s >= a and s < b:
return True
return False
known_ranges = []
for s, e in sorted(find_ranges(blocks)):
if not previously_occurred(s, known_ranges):
if e not in blocks:
# this's possible if there's an exit path in the with-block
raise errors.CompilerError(
'unsupported controlflow due to return/raise '
'statements inside with block'
)
assert s in blocks, 'starting offset is not a label'
known_ranges.append((s, e))
return known_ranges
| [((334, 361), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (351, 361), False, 'import logging\n'), ((3335, 3404), 'collections.namedtuple', 'namedtuple', (['"""loop_lift_info"""', '"""loop,inputs,outputs,callfrom,returnto"""'], {}), "('loop_lift_info', 'loop,inputs,outputs,callfrom,returnto')\n", (3345, 3404), False, 'from collections import namedtuple, defaultdict\n'), ((2102, 2127), 'numba.analysis.find_top_level_loops', 'find_top_level_loops', (['cfg'], {}), '(cfg)\n', (2122, 2127), False, 'from numba.analysis import compute_cfg_from_blocks, find_top_level_loops\n'), ((2942, 2970), 'numba.analysis.compute_use_defs', 'compute_use_defs', (['loopblocks'], {}), '(loopblocks)\n', (2958, 2970), False, 'from numba.analysis import compute_use_defs\n'), ((4766, 4796), 'numba.ir.Block', 'ir.Block', ([], {'scope': 'scope', 'loc': 'loc'}), '(scope=scope, loc=loc)\n', (4774, 4796), False, 'from numba import ir, errors, ir_utils\n'), ((4802, 4922), 'numba.ir_utils.fill_block_with_call', 'ir_utils.fill_block_with_call', ([], {'newblock': 'blk', 'callee': 'liftedloop', 'label_next': 'returnto', 'inputs': 'inputs', 'outputs': 'outputs'}), '(newblock=blk, callee=liftedloop, label_next=\n returnto, inputs=inputs, outputs=outputs)\n', (4831, 4922), False, 'from numba import ir, errors, ir_utils\n'), ((6600, 6658), 'numba.dispatcher.LiftedLoop', 'LiftedLoop', (['lifted_ir', 'typingctx', 'targetctx', 'flags', 'locals'], {}), '(lifted_ir, typingctx, targetctx, flags, locals)\n', (6610, 6658), False, 'from numba.dispatcher import LiftedLoop\n'), ((7426, 7457), 'numba.analysis.compute_cfg_from_blocks', 'compute_cfg_from_blocks', (['blocks'], {}), '(blocks)\n', (7449, 7457), False, 'from numba.analysis import compute_cfg_from_blocks, find_top_level_loops\n'), ((8182, 8213), 'numba.analysis.compute_cfg_from_blocks', 'compute_cfg_from_blocks', (['blocks'], {}), '(blocks)\n', (8205, 8213), False, 'from numba.analysis import compute_cfg_from_blocks, find_top_level_loops\n'), ((14476, 14555), 'numba.errors.CompilerError', 'errors.CompilerError', (['"""malformed with-context usage"""'], {'loc': 'blocks[blk_start].loc'}), "('malformed with-context usage', loc=blocks[blk_start].loc)\n", (14496, 14555), False, 'from numba import ir, errors, ir_utils\n'), ((14738, 14754), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (14749, 14754), False, 'from collections import namedtuple, defaultdict\n'), ((10067, 10115), 'numba.ir.Block', 'ir.Block', ([], {'scope': 'entryblk.scope', 'loc': 'entryblk.loc'}), '(scope=entryblk.scope, loc=entryblk.loc)\n', (10075, 10115), False, 'from numba import ir, errors, ir_utils\n'), ((13505, 13565), 'numba.ir_utils.guard', 'ir_utils.guard', (['ir_utils.find_global_value', 'func_ir', 'var_ref'], {}), '(ir_utils.find_global_value, func_ir, var_ref)\n', (13519, 13565), False, 'from numba import ir, errors, ir_utils\n'), ((14870, 14959), 'numba.errors.CompilerError', 'errors.CompilerError', (['"""with\'s head-block must have exactly 1 ENTER_WITH"""'], {'loc': 'blk.loc'}), '("with\'s head-block must have exactly 1 ENTER_WITH",\n loc=blk.loc)\n', (14890, 14959), False, 'from numba import ir, errors, ir_utils\n'), ((15044, 15123), 'numba.errors.CompilerError', 'errors.CompilerError', (['"""with\'s head-block must have exactly 1 JUMP"""'], {'loc': 'blk.loc'}), '("with\'s head-block must have exactly 1 JUMP", loc=blk.loc)\n', (15064, 15123), False, 'from numba import ir, errors, ir_utils\n'), ((15303, 15379), 'numba.errors.CompilerError', 'errors.CompilerError', (['"""illegal statements in with\'s head-block"""'], {'loc': 'blk.loc'}), '("illegal statements in with\'s head-block", loc=blk.loc)\n', (15323, 15379), False, 'from numba import ir, errors, ir_utils\n'), ((5386, 5416), 'numba.ir.Block', 'ir.Block', ([], {'scope': 'scope', 'loc': 'loc'}), '(scope=scope, loc=loc)\n', (5394, 5416), False, 'from numba import ir, errors, ir_utils\n'), ((5571, 5601), 'numba.ir.Block', 'ir.Block', ([], {'scope': 'scope', 'loc': 'loc'}), '(scope=scope, loc=loc)\n', (5579, 5601), False, 'from numba import ir, errors, ir_utils\n'), ((10162, 10201), 'numba.ir.Jump', 'ir.Jump', ([], {'target': 'header', 'loc': 'tailblk.loc'}), '(target=header, loc=tailblk.loc)\n', (10169, 10201), False, 'from numba import ir, errors, ir_utils\n'), ((11447, 11478), 'numba.postproc.PostProcessor', 'postproc.PostProcessor', (['func_ir'], {}), '(func_ir)\n', (11469, 11478), False, 'from numba import postproc\n'), ((13662, 13760), 'numba.errors.CompilerError', 'errors.CompilerError', (['"""Undefined variable used as context manager"""'], {'loc': 'blocks[blk_start].loc'}), "('Undefined variable used as context manager', loc=\n blocks[blk_start].loc)\n", (13682, 13760), False, 'from numba import ir, errors, ir_utils\n'), ((13853, 13903), 'numba.errors.CompilerError', 'errors.CompilerError', (['_illegal_cm_msg'], {'loc': 'dfn.loc'}), '(_illegal_cm_msg, loc=dfn.loc)\n', (13873, 13903), False, 'from numba import ir, errors, ir_utils\n'), ((16357, 16391), 'numba.errors.CompilerError', 'errors.CompilerError', (['msg'], {'loc': 'loc'}), '(msg, loc=loc)\n', (16377, 16391), False, 'from numba import ir, errors, ir_utils\n'), ((16706, 16740), 'numba.errors.CompilerError', 'errors.CompilerError', (['msg'], {'loc': 'loc'}), '(msg, loc=loc)\n', (16726, 16740), False, 'from numba import ir, errors, ir_utils\n'), ((14253, 14343), 'numba.errors.CompilerError', 'errors.CompilerError', (['"""Unsupported context manager in use"""'], {'loc': 'blocks[blk_start].loc'}), "('Unsupported context manager in use', loc=blocks[\n blk_start].loc)\n", (14273, 14343), False, 'from numba import ir, errors, ir_utils\n'), ((17438, 17539), 'numba.errors.CompilerError', 'errors.CompilerError', (['"""unsupported controlflow due to return/raise statements inside with block"""'], {}), "(\n 'unsupported controlflow due to return/raise statements inside with block')\n", (17458, 17539), False, 'from numba import ir, errors, ir_utils\n')] |
HabibMrad/MONAI | tests/test_masked_inference_wsi_dataset.py | 1314701c15623422574b0153d746666dc6004454 | import os
import unittest
from unittest import skipUnless
import numpy as np
from numpy.testing import assert_array_equal
from parameterized import parameterized
from monai.apps.pathology.datasets import MaskedInferenceWSIDataset
from monai.apps.utils import download_url
from monai.utils import optional_import
from tests.utils import skip_if_quick
_, has_cim = optional_import("cucim")
_, has_osl = optional_import("openslide")
FILE_URL = "http://openslide.cs.cmu.edu/download/openslide-testdata/Generic-TIFF/CMU-1.tiff"
FILE_PATH = os.path.join(os.path.dirname(__file__), "testing_data", os.path.basename(FILE_URL))
MASK1 = os.path.join(os.path.dirname(__file__), "testing_data", "tissue_mask1.npy")
MASK2 = os.path.join(os.path.dirname(__file__), "testing_data", "tissue_mask2.npy")
MASK4 = os.path.join(os.path.dirname(__file__), "testing_data", "tissue_mask4.npy")
HEIGHT = 32914
WIDTH = 46000
def prepare_data():
mask = np.zeros((WIDTH // 2, HEIGHT // 2))
mask[100, 100] = 1
np.save(MASK1, mask)
mask[100, 100:102] = 1
np.save(MASK2, mask)
mask[100:102, 100:102] = 1
np.save(MASK4, mask)
TEST_CASE_0 = [
{
"data": [
{"image": FILE_PATH, "mask": MASK1},
],
"patch_size": 1,
"image_reader_name": "cuCIM",
},
[
{
"image": np.array([[[243]], [[243]], [[243]]], dtype=np.uint8),
"name": "CMU-1",
"mask_location": [100, 100],
},
],
]
TEST_CASE_1 = [
{
"data": [{"image": FILE_PATH, "mask": MASK2}],
"patch_size": 1,
"image_reader_name": "cuCIM",
},
[
{
"image": np.array([[[243]], [[243]], [[243]]], dtype=np.uint8),
"name": "CMU-1",
"mask_location": [100, 100],
},
{
"image": np.array([[[243]], [[243]], [[243]]], dtype=np.uint8),
"name": "CMU-1",
"mask_location": [101, 100],
},
],
]
TEST_CASE_2 = [
{
"data": [{"image": FILE_PATH, "mask": MASK4}],
"patch_size": 1,
"image_reader_name": "cuCIM",
},
[
{
"image": np.array([[[243]], [[243]], [[243]]], dtype=np.uint8),
"name": "CMU-1",
"mask_location": [100, 100],
},
{
"image": np.array([[[243]], [[243]], [[243]]], dtype=np.uint8),
"name": "CMU-1",
"mask_location": [100, 101],
},
{
"image": np.array([[[243]], [[243]], [[243]]], dtype=np.uint8),
"name": "CMU-1",
"mask_location": [101, 100],
},
{
"image": np.array([[[243]], [[243]], [[243]]], dtype=np.uint8),
"name": "CMU-1",
"mask_location": [101, 101],
},
],
]
TEST_CASE_3 = [
{
"data": [
{"image": FILE_PATH, "mask": MASK1},
],
"patch_size": 2,
"image_reader_name": "cuCIM",
},
[
{
"image": np.array(
[
[[243, 243], [243, 243]],
[[243, 243], [243, 243]],
[[243, 243], [243, 243]],
],
dtype=np.uint8,
),
"name": "CMU-1",
"mask_location": [100, 100],
},
],
]
TEST_CASE_4 = [
{
"data": [
{"image": FILE_PATH, "mask": MASK1},
{"image": FILE_PATH, "mask": MASK2},
],
"patch_size": 1,
"image_reader_name": "cuCIM",
},
[
{
"image": np.array([[[243]], [[243]], [[243]]], dtype=np.uint8),
"name": "CMU-1",
"mask_location": [100, 100],
},
{
"image": np.array([[[243]], [[243]], [[243]]], dtype=np.uint8),
"name": "CMU-1",
"mask_location": [100, 100],
},
{
"image": np.array([[[243]], [[243]], [[243]]], dtype=np.uint8),
"name": "CMU-1",
"mask_location": [101, 100],
},
],
]
TEST_CASE_OPENSLIDE_0 = [
{
"data": [
{"image": FILE_PATH, "mask": MASK1},
],
"patch_size": 1,
"image_reader_name": "OpenSlide",
},
[
{
"image": np.array([[[243]], [[243]], [[243]]], dtype=np.uint8),
"name": "CMU-1",
"mask_location": [100, 100],
},
],
]
TEST_CASE_OPENSLIDE_1 = [
{
"data": [{"image": FILE_PATH, "mask": MASK2}],
"patch_size": 1,
"image_reader_name": "OpenSlide",
},
[
{
"image": np.array([[[243]], [[243]], [[243]]], dtype=np.uint8),
"name": "CMU-1",
"mask_location": [100, 100],
},
{
"image": np.array([[[243]], [[243]], [[243]]], dtype=np.uint8),
"name": "CMU-1",
"mask_location": [101, 100],
},
],
]
class TestMaskedInferenceWSIDataset(unittest.TestCase):
def setUp(self):
prepare_data()
download_url(FILE_URL, FILE_PATH, "5a3cfd4fd725c50578ddb80b517b759f")
@parameterized.expand(
[
TEST_CASE_0,
TEST_CASE_1,
TEST_CASE_2,
TEST_CASE_3,
TEST_CASE_4,
]
)
@skipUnless(has_cim, "Requires CuCIM")
@skip_if_quick
def test_read_patches_cucim(self, input_parameters, expected):
dataset = MaskedInferenceWSIDataset(**input_parameters)
self.compare_samples_expected(dataset, expected)
@parameterized.expand(
[
TEST_CASE_OPENSLIDE_0,
TEST_CASE_OPENSLIDE_1,
]
)
@skipUnless(has_osl, "Requires OpenSlide")
@skip_if_quick
def test_read_patches_openslide(self, input_parameters, expected):
dataset = MaskedInferenceWSIDataset(**input_parameters)
self.compare_samples_expected(dataset, expected)
def compare_samples_expected(self, dataset, expected):
for i in range(len(dataset)):
self.assertTupleEqual(dataset[i][0]["image"].shape, expected[i]["image"].shape)
self.assertIsNone(assert_array_equal(dataset[i][0]["image"], expected[i]["image"]))
self.assertEqual(dataset[i][0]["name"], expected[i]["name"])
self.assertListEqual(dataset[i][0]["mask_location"], expected[i]["mask_location"])
if __name__ == "__main__":
unittest.main()
| [((366, 390), 'monai.utils.optional_import', 'optional_import', (['"""cucim"""'], {}), "('cucim')\n", (381, 390), False, 'from monai.utils import optional_import\n'), ((404, 432), 'monai.utils.optional_import', 'optional_import', (['"""openslide"""'], {}), "('openslide')\n", (419, 432), False, 'from monai.utils import optional_import\n'), ((552, 577), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (567, 577), False, 'import os\n'), ((595, 621), 'os.path.basename', 'os.path.basename', (['FILE_URL'], {}), '(FILE_URL)\n', (611, 621), False, 'import os\n'), ((645, 670), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (660, 670), False, 'import os\n'), ((729, 754), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (744, 754), False, 'import os\n'), ((813, 838), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (828, 838), False, 'import os\n'), ((940, 975), 'numpy.zeros', 'np.zeros', (['(WIDTH // 2, HEIGHT // 2)'], {}), '((WIDTH // 2, HEIGHT // 2))\n', (948, 975), True, 'import numpy as np\n'), ((1003, 1023), 'numpy.save', 'np.save', (['MASK1', 'mask'], {}), '(MASK1, mask)\n', (1010, 1023), True, 'import numpy as np\n'), ((1055, 1075), 'numpy.save', 'np.save', (['MASK2', 'mask'], {}), '(MASK2, mask)\n', (1062, 1075), True, 'import numpy as np\n'), ((1111, 1131), 'numpy.save', 'np.save', (['MASK4', 'mask'], {}), '(MASK4, mask)\n', (1118, 1131), True, 'import numpy as np\n'), ((5145, 5236), 'parameterized.parameterized.expand', 'parameterized.expand', (['[TEST_CASE_0, TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4]'], {}), '([TEST_CASE_0, TEST_CASE_1, TEST_CASE_2, TEST_CASE_3,\n TEST_CASE_4])\n', (5165, 5236), False, 'from parameterized import parameterized\n'), ((5323, 5360), 'unittest.skipUnless', 'skipUnless', (['has_cim', '"""Requires CuCIM"""'], {}), "(has_cim, 'Requires CuCIM')\n", (5333, 5360), False, 'from unittest import skipUnless\n'), ((5574, 5642), 'parameterized.parameterized.expand', 'parameterized.expand', (['[TEST_CASE_OPENSLIDE_0, TEST_CASE_OPENSLIDE_1]'], {}), '([TEST_CASE_OPENSLIDE_0, TEST_CASE_OPENSLIDE_1])\n', (5594, 5642), False, 'from parameterized import parameterized\n'), ((5697, 5738), 'unittest.skipUnless', 'skipUnless', (['has_osl', '"""Requires OpenSlide"""'], {}), "(has_osl, 'Requires OpenSlide')\n", (5707, 5738), False, 'from unittest import skipUnless\n'), ((6437, 6452), 'unittest.main', 'unittest.main', ([], {}), '()\n', (6450, 6452), False, 'import unittest\n'), ((5069, 5138), 'monai.apps.utils.download_url', 'download_url', (['FILE_URL', 'FILE_PATH', '"""5a3cfd4fd725c50578ddb80b517b759f"""'], {}), "(FILE_URL, FILE_PATH, '5a3cfd4fd725c50578ddb80b517b759f')\n", (5081, 5138), False, 'from monai.apps.utils import download_url\n'), ((5465, 5510), 'monai.apps.pathology.datasets.MaskedInferenceWSIDataset', 'MaskedInferenceWSIDataset', ([], {}), '(**input_parameters)\n', (5490, 5510), False, 'from monai.apps.pathology.datasets import MaskedInferenceWSIDataset\n'), ((5847, 5892), 'monai.apps.pathology.datasets.MaskedInferenceWSIDataset', 'MaskedInferenceWSIDataset', ([], {}), '(**input_parameters)\n', (5872, 5892), False, 'from monai.apps.pathology.datasets import MaskedInferenceWSIDataset\n'), ((1341, 1394), 'numpy.array', 'np.array', (['[[[243]], [[243]], [[243]]]'], {'dtype': 'np.uint8'}), '([[[243]], [[243]], [[243]]], dtype=np.uint8)\n', (1349, 1394), True, 'import numpy as np\n'), ((1671, 1724), 'numpy.array', 'np.array', (['[[[243]], [[243]], [[243]]]'], {'dtype': 'np.uint8'}), '([[[243]], [[243]], [[243]]], dtype=np.uint8)\n', (1679, 1724), True, 'import numpy as np\n'), ((1838, 1891), 'numpy.array', 'np.array', (['[[[243]], [[243]], [[243]]]'], {'dtype': 'np.uint8'}), '([[[243]], [[243]], [[243]]], dtype=np.uint8)\n', (1846, 1891), True, 'import numpy as np\n'), ((2168, 2221), 'numpy.array', 'np.array', (['[[[243]], [[243]], [[243]]]'], {'dtype': 'np.uint8'}), '([[[243]], [[243]], [[243]]], dtype=np.uint8)\n', (2176, 2221), True, 'import numpy as np\n'), ((2335, 2388), 'numpy.array', 'np.array', (['[[[243]], [[243]], [[243]]]'], {'dtype': 'np.uint8'}), '([[[243]], [[243]], [[243]]], dtype=np.uint8)\n', (2343, 2388), True, 'import numpy as np\n'), ((2502, 2555), 'numpy.array', 'np.array', (['[[[243]], [[243]], [[243]]]'], {'dtype': 'np.uint8'}), '([[[243]], [[243]], [[243]]], dtype=np.uint8)\n', (2510, 2555), True, 'import numpy as np\n'), ((2669, 2722), 'numpy.array', 'np.array', (['[[[243]], [[243]], [[243]]]'], {'dtype': 'np.uint8'}), '([[[243]], [[243]], [[243]]], dtype=np.uint8)\n', (2677, 2722), True, 'import numpy as np\n'), ((3022, 3130), 'numpy.array', 'np.array', (['[[[243, 243], [243, 243]], [[243, 243], [243, 243]], [[243, 243], [243, 243]]]'], {'dtype': 'np.uint8'}), '([[[243, 243], [243, 243]], [[243, 243], [243, 243]], [[243, 243],\n [243, 243]]], dtype=np.uint8)\n', (3030, 3130), True, 'import numpy as np\n'), ((3601, 3654), 'numpy.array', 'np.array', (['[[[243]], [[243]], [[243]]]'], {'dtype': 'np.uint8'}), '([[[243]], [[243]], [[243]]], dtype=np.uint8)\n', (3609, 3654), True, 'import numpy as np\n'), ((3768, 3821), 'numpy.array', 'np.array', (['[[[243]], [[243]], [[243]]]'], {'dtype': 'np.uint8'}), '([[[243]], [[243]], [[243]]], dtype=np.uint8)\n', (3776, 3821), True, 'import numpy as np\n'), ((3935, 3988), 'numpy.array', 'np.array', (['[[[243]], [[243]], [[243]]]'], {'dtype': 'np.uint8'}), '([[[243]], [[243]], [[243]]], dtype=np.uint8)\n', (3943, 3988), True, 'import numpy as np\n'), ((4303, 4356), 'numpy.array', 'np.array', (['[[[243]], [[243]], [[243]]]'], {'dtype': 'np.uint8'}), '([[[243]], [[243]], [[243]]], dtype=np.uint8)\n', (4311, 4356), True, 'import numpy as np\n'), ((4647, 4700), 'numpy.array', 'np.array', (['[[[243]], [[243]], [[243]]]'], {'dtype': 'np.uint8'}), '([[[243]], [[243]], [[243]]], dtype=np.uint8)\n', (4655, 4700), True, 'import numpy as np\n'), ((4814, 4867), 'numpy.array', 'np.array', (['[[[243]], [[243]], [[243]]]'], {'dtype': 'np.uint8'}), '([[[243]], [[243]], [[243]]], dtype=np.uint8)\n', (4822, 4867), True, 'import numpy as np\n'), ((6170, 6234), 'numpy.testing.assert_array_equal', 'assert_array_equal', (["dataset[i][0]['image']", "expected[i]['image']"], {}), "(dataset[i][0]['image'], expected[i]['image'])\n", (6188, 6234), False, 'from numpy.testing import assert_array_equal\n')] |
paulolimac/manga-py | manga_py/providers/doujins_com.py | 3d180846750a4e770b5024eb8cd15629362875b1 | from manga_py.provider import Provider
from .helpers.std import Std
class DoujinsCom(Provider, Std):
img_selector = '#image-container img.doujin'
def get_archive_name(self) -> str:
return 'archive'
def get_chapter_index(self) -> str:
return '0'
def get_main_content(self):
return self._get_content('{}/gallery/{}')
def get_manga_name(self) -> str:
return self._get_name('/gallery/([^/]+)')
def get_chapters(self):
return [b'']
def get_files(self):
items = self.document_fromstring(self.content, self.img_selector)
return [i.get('data-file').replace('&', '&') for i in items]
def get_cover(self) -> str:
return self._cover_from_content(self.img_selector)
def book_meta(self) -> dict:
# todo meta
pass
def chapter_for_json(self):
return self.get_url()
main = DoujinsCom
| [] |
awesome-archive/urh | src/urh/ui/delegates/CheckBoxDelegate.py | c8c3aabc9d637ca660d8c72c3d8372055e0f3ec7 | from PyQt5.QtCore import QModelIndex, QAbstractItemModel, Qt, pyqtSlot
from PyQt5.QtWidgets import QItemDelegate, QWidget, QStyleOptionViewItem, QCheckBox
class CheckBoxDelegate(QItemDelegate):
def __init__(self, parent=None):
super().__init__(parent)
self.enabled = True
def createEditor(self, parent: QWidget, option: QStyleOptionViewItem, index: QModelIndex):
editor = QCheckBox(parent)
editor.stateChanged.connect(self.stateChanged)
return editor
def setEditorData(self, editor: QCheckBox, index: QModelIndex):
editor.blockSignals(True)
editor.setChecked(index.model().data(index))
self.enabled = editor.isChecked()
editor.blockSignals(False)
def setModelData(self, editor: QCheckBox, model: QAbstractItemModel, index: QModelIndex):
model.setData(index, editor.isChecked(), Qt.EditRole)
@pyqtSlot()
def stateChanged(self):
self.commitData.emit(self.sender()) | [((898, 908), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (906, 908), False, 'from PyQt5.QtCore import QModelIndex, QAbstractItemModel, Qt, pyqtSlot\n'), ((407, 424), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', (['parent'], {}), '(parent)\n', (416, 424), False, 'from PyQt5.QtWidgets import QItemDelegate, QWidget, QStyleOptionViewItem, QCheckBox\n')] |
acceleratedmaterials/AMDworkshop_demo | neural_network/backup_casestudy/denbigh/tf_RNN.py | e7c2b931e023fc00ff7494b8acb2181f5c75bc4e | # -*- coding: utf-8 -*-
'''
Framework: Tensorflow
Training samples: 1600
Validation samples: 400
RNN with 128 units
Optimizer: Adam
Epoch: 100
Loss: Cross Entropy
Activation function: Relu for network and Soft-max for regression
Regularization: Drop-out, keep_prob = 0.8
Accuracy of Validation set: 95%
'''
from __future__ import division, print_function, absolute_import
import tflearn
from tflearn.data_utils import to_categorical, pad_sequences
from data_denbigh import *
X, Y = getDenbighData()
#Hyperparams
neurons_num = 128 # Number of neurons in the RNN layer
keep_prob = 0.5 # Keep probability for the drop-out regularization
learning_rate = 0.001 # Learning rate for mini-batch SGD
batch_size = 32 # Batch size
n_epoch = 100 # Number of epoch
#Data preprocessing/ Converting data to vector for the
X = pad_sequences(X, maxlen=5, value=0.)
Y = to_categorical(Y, 2)
#Build the network
net = tflearn.input_data([None, 5])
net = tflearn.embedding(net, input_dim=10000, output_dim=128)
net = tflearn.simple_rnn(net, neurons_num, dropout=keep_prob)
net = tflearn.fully_connected(net, 2, activation='softmax')
net = tflearn.regression(net, optimizer='adam', learning_rate=learning_rate,
loss='categorical_crossentropy')
model = tflearn.DNN(net, tensorboard_verbose=0)
model.fit(X, Y, validation_set=0.2, show_metric=True,
batch_size=batch_size, n_epoch=n_epoch)
model.save('./model.tfl') | [((824, 861), 'tflearn.data_utils.pad_sequences', 'pad_sequences', (['X'], {'maxlen': '(5)', 'value': '(0.0)'}), '(X, maxlen=5, value=0.0)\n', (837, 861), False, 'from tflearn.data_utils import to_categorical, pad_sequences\n'), ((865, 885), 'tflearn.data_utils.to_categorical', 'to_categorical', (['Y', '(2)'], {}), '(Y, 2)\n', (879, 885), False, 'from tflearn.data_utils import to_categorical, pad_sequences\n'), ((911, 940), 'tflearn.input_data', 'tflearn.input_data', (['[None, 5]'], {}), '([None, 5])\n', (929, 940), False, 'import tflearn\n'), ((947, 1002), 'tflearn.embedding', 'tflearn.embedding', (['net'], {'input_dim': '(10000)', 'output_dim': '(128)'}), '(net, input_dim=10000, output_dim=128)\n', (964, 1002), False, 'import tflearn\n'), ((1009, 1064), 'tflearn.simple_rnn', 'tflearn.simple_rnn', (['net', 'neurons_num'], {'dropout': 'keep_prob'}), '(net, neurons_num, dropout=keep_prob)\n', (1027, 1064), False, 'import tflearn\n'), ((1071, 1124), 'tflearn.fully_connected', 'tflearn.fully_connected', (['net', '(2)'], {'activation': '"""softmax"""'}), "(net, 2, activation='softmax')\n", (1094, 1124), False, 'import tflearn\n'), ((1131, 1239), 'tflearn.regression', 'tflearn.regression', (['net'], {'optimizer': '"""adam"""', 'learning_rate': 'learning_rate', 'loss': '"""categorical_crossentropy"""'}), "(net, optimizer='adam', learning_rate=learning_rate, loss\n ='categorical_crossentropy')\n", (1149, 1239), False, 'import tflearn\n'), ((1244, 1283), 'tflearn.DNN', 'tflearn.DNN', (['net'], {'tensorboard_verbose': '(0)'}), '(net, tensorboard_verbose=0)\n', (1255, 1283), False, 'import tflearn\n')] |
Nocty-chan/cs224n-squad | code/tests/test_tile_tf.py | 0c0b342621e038aba8e20ff411da13dfa173351d | import numpy as np
import tensorflow as tf
H = 2
N = 2
M = 3
BS = 10
def my_softmax(arr):
max_elements = np.reshape(np.max(arr, axis = 2), (BS, N, 1))
arr = arr - max_elements
exp_array = np.exp(arr)
print (exp_array)
sum_array = np.reshape(np.sum(exp_array, axis=2), (BS, N, 1))
return exp_array /sum_array
def masked_softmax(logits, mask, dim):
"""
Takes masked softmax over given dimension of logits.
Inputs:
logits: Numpy array. We want to take softmax over dimension dim.
mask: Numpy array of same shape as logits.
Has 1s where there's real data in logits, 0 where there's padding
dim: int. dimension over which to take softmax
Returns:
masked_logits: Numpy array same shape as logits.
This is the same as logits, but with 1e30 subtracted
(i.e. very large negative number) in the padding locations.
prob_dist: Numpy array same shape as logits.
The result of taking softmax over masked_logits in given dimension.
Should be 0 in padding locations.
Should sum to 1 over given dimension.
"""
exp_mask = (1 - tf.cast(mask, 'float64')) * (-1e30) # -large where there's padding, 0 elsewhere
print (exp_mask)
masked_logits = tf.add(logits, exp_mask) # where there's padding, set logits to -large
prob_dist = tf.nn.softmax(masked_logits, dim)
return masked_logits, prob_dist
def test_build_similarity(contexts, questions):
w_sim_1 = tf.get_variable('w_sim_1',
initializer=w_1) # 2 * H
w_sim_2 = tf.get_variable('w_sim_2',
initializer=w_2) # 2 * self.hidden_size
w_sim_3 = tf.get_variable('w_sim_3',
initializer=w_3) # 2 * self.hidden_size
q_tile = tf.tile(tf.expand_dims(questions, 0), [N, 1, 1, 1]) # N x BS x M x 2H
q_tile = tf.transpose(q_tile, (1, 0, 3, 2)) # BS x N x 2H x M
contexts = tf.expand_dims(contexts, -1) # BS x N x 2H x 1
result = (contexts * q_tile) # BS x N x 2H x M
tf.assert_equal(tf.shape(result), [BS, N, 2 * H, M])
result = tf.transpose(result, (0, 1, 3, 2)) # BS x N x M x 2H
result = tf.reshape(result, (-1, N * M, 2 * H)) # BS x (NxM) x 2H
tf.assert_equal(tf.shape(result), [BS, N*M, 2*H])
# w_sim_1 = tf.tile(tf.expand_dims(w_sim_1, 0), [BS, 1])
# w_sim_2 = tf.tile(tf.expand_dims(w_sim_2, 0), [BS, 1])
# w_sim_3 = tf.tile(tf.expand_dims(w_sim_3, 0), [BS, 1])
term1 = tf.matmul(tf.reshape(contexts, (BS * N, 2*H)), tf.expand_dims(w_sim_1, -1)) # BS x N
term1 = tf.reshape(term1, (-1, N))
term2 = tf.matmul(tf.reshape(questions, (BS * M, 2*H)), tf.expand_dims(w_sim_2, -1)) # BS x M
term2 = tf.reshape(term2, (-1, M))
term3 = tf.matmul(tf.reshape(result, (BS * N * M, 2* H)), tf.expand_dims(w_sim_3, -1))
term3 = tf.reshape(term3, (-1, N, M)) # BS x N x M
S = tf.reshape(term1,(-1, N, 1)) + term3 + tf.reshape(term2, (-1, 1, M))
return S
def test_build_sim_mask():
context_mask = np.array([True, True]) # BS x N
question_mask = np.array([True, True, False]) # BS x M
context_mask = np.tile(context_mask, [BS, 1])
question_mask = np.tile(question_mask, [BS, 1])
context_mask = tf.get_variable('context_mask', initializer=context_mask)
question_mask = tf.get_variable('question_mask', initializer=question_mask)
context_mask = tf.expand_dims(context_mask, -1) # BS x N x 1
question_mask = tf.expand_dims(question_mask, -1) # BS x M x 1
question_mask = tf.transpose(question_mask, (0, 2, 1)) # BS x 1 x M
sim_mask = tf.matmul(tf.cast(context_mask, dtype=tf.int32),
tf.cast(question_mask, dtype=tf.int32)) # BS x N x M
return sim_mask
def test_build_c2q(S, S_mask, questions):
_, alpha = masked_softmax(S, mask, 2) # BS x N x M
return tf.matmul(alpha, questions)
def test_build_q2c(S, S_mask, contexts):
# S = BS x N x M
# contexts = BS x N x 2H
m = tf.reduce_max(S * tf.cast(S_mask, dtype=tf.float64), axis=2) # BS x N
beta = tf.expand_dims(tf.nn.softmax(m), -1) # BS x N x 1
beta = tf.transpose(beta, (0, 2, 1))
q2c = tf.matmul(beta, contexts)
return m, beta, q2c
def test_concatenation(c2q, q2c):
q2c = tf.tile(q2c, (1, N, 1))
output = tf.concat([c2q, q2c], axis=2)
tf.assert_equal(tf.shape(output), [BS, N, 4*H])
return output
if __name__== "__main__":
w_1 = np.array([1., 2., 3., 4.])
w_2 = np.array([5., 6., 7., 8.])
w_3 = np.array([13., 12., 11., 10.])
c = np.array([[[1., 2., 3., 4.], [5., 6., 7., 8.]]]) # BS x N x 2H
q = np.array([[[1., 2., 3., 0.], [5., 6., 7., 4.], [8., 9. , 10., 11.]]]) # BS x M x 2H
c = np.tile(c, [BS, 1, 1])
q = np.tile(q, [BS, 1, 1])
questions = tf.get_variable('questions', initializer=q)
contexts = tf.get_variable('contexts', initializer=c)
S = test_build_similarity(contexts, questions)
mask = test_build_sim_mask()
c2q = test_build_c2q(S, mask, questions)
m, beta, q2c = test_build_q2c(S, mask, contexts)
output = test_concatenation(c2q, q2c)
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
S_result, mask_result, c2q_r = sess.run([S, mask, c2q])
actual_result = np.tile(np.array([[228, 772, 1372], [548, 1828, 3140]]), [BS, 1, 1])
assert np.array_equal(actual_result, S_result), 'Arrays are not equal'
print ("Building similarity matrix is successful!")
print ("Context 2 Question attention")
m_r, beta_r, q2c_r = sess.run([m, beta, q2c])
output_r = sess.run(output)
| [((201, 212), 'numpy.exp', 'np.exp', (['arr'], {}), '(arr)\n', (207, 212), True, 'import numpy as np\n'), ((1260, 1284), 'tensorflow.add', 'tf.add', (['logits', 'exp_mask'], {}), '(logits, exp_mask)\n', (1266, 1284), True, 'import tensorflow as tf\n'), ((1347, 1380), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['masked_logits', 'dim'], {}), '(masked_logits, dim)\n', (1360, 1380), True, 'import tensorflow as tf\n'), ((1481, 1524), 'tensorflow.get_variable', 'tf.get_variable', (['"""w_sim_1"""'], {'initializer': 'w_1'}), "('w_sim_1', initializer=w_1)\n", (1496, 1524), True, 'import tensorflow as tf\n'), ((1555, 1598), 'tensorflow.get_variable', 'tf.get_variable', (['"""w_sim_2"""'], {'initializer': 'w_2'}), "('w_sim_2', initializer=w_2)\n", (1570, 1598), True, 'import tensorflow as tf\n'), ((1644, 1687), 'tensorflow.get_variable', 'tf.get_variable', (['"""w_sim_3"""'], {'initializer': 'w_3'}), "('w_sim_3', initializer=w_3)\n", (1659, 1687), True, 'import tensorflow as tf\n'), ((1816, 1850), 'tensorflow.transpose', 'tf.transpose', (['q_tile', '(1, 0, 3, 2)'], {}), '(q_tile, (1, 0, 3, 2))\n', (1828, 1850), True, 'import tensorflow as tf\n'), ((1884, 1912), 'tensorflow.expand_dims', 'tf.expand_dims', (['contexts', '(-1)'], {}), '(contexts, -1)\n', (1898, 1912), True, 'import tensorflow as tf\n'), ((2052, 2086), 'tensorflow.transpose', 'tf.transpose', (['result', '(0, 1, 3, 2)'], {}), '(result, (0, 1, 3, 2))\n', (2064, 2086), True, 'import tensorflow as tf\n'), ((2118, 2156), 'tensorflow.reshape', 'tf.reshape', (['result', '(-1, N * M, 2 * H)'], {}), '(result, (-1, N * M, 2 * H))\n', (2128, 2156), True, 'import tensorflow as tf\n'), ((2522, 2548), 'tensorflow.reshape', 'tf.reshape', (['term1', '(-1, N)'], {}), '(term1, (-1, N))\n', (2532, 2548), True, 'import tensorflow as tf\n'), ((2659, 2685), 'tensorflow.reshape', 'tf.reshape', (['term2', '(-1, M)'], {}), '(term2, (-1, M))\n', (2669, 2685), True, 'import tensorflow as tf\n'), ((2789, 2818), 'tensorflow.reshape', 'tf.reshape', (['term3', '(-1, N, M)'], {}), '(term3, (-1, N, M))\n', (2799, 2818), True, 'import tensorflow as tf\n'), ((2969, 2991), 'numpy.array', 'np.array', (['[True, True]'], {}), '([True, True])\n', (2977, 2991), True, 'import numpy as np\n'), ((3021, 3050), 'numpy.array', 'np.array', (['[True, True, False]'], {}), '([True, True, False])\n', (3029, 3050), True, 'import numpy as np\n'), ((3079, 3109), 'numpy.tile', 'np.tile', (['context_mask', '[BS, 1]'], {}), '(context_mask, [BS, 1])\n', (3086, 3109), True, 'import numpy as np\n'), ((3130, 3161), 'numpy.tile', 'np.tile', (['question_mask', '[BS, 1]'], {}), '(question_mask, [BS, 1])\n', (3137, 3161), True, 'import numpy as np\n'), ((3181, 3238), 'tensorflow.get_variable', 'tf.get_variable', (['"""context_mask"""'], {'initializer': 'context_mask'}), "('context_mask', initializer=context_mask)\n", (3196, 3238), True, 'import tensorflow as tf\n'), ((3259, 3318), 'tensorflow.get_variable', 'tf.get_variable', (['"""question_mask"""'], {'initializer': 'question_mask'}), "('question_mask', initializer=question_mask)\n", (3274, 3318), True, 'import tensorflow as tf\n'), ((3338, 3370), 'tensorflow.expand_dims', 'tf.expand_dims', (['context_mask', '(-1)'], {}), '(context_mask, -1)\n', (3352, 3370), True, 'import tensorflow as tf\n'), ((3404, 3437), 'tensorflow.expand_dims', 'tf.expand_dims', (['question_mask', '(-1)'], {}), '(question_mask, -1)\n', (3418, 3437), True, 'import tensorflow as tf\n'), ((3471, 3509), 'tensorflow.transpose', 'tf.transpose', (['question_mask', '(0, 2, 1)'], {}), '(question_mask, (0, 2, 1))\n', (3483, 3509), True, 'import tensorflow as tf\n'), ((3781, 3808), 'tensorflow.matmul', 'tf.matmul', (['alpha', 'questions'], {}), '(alpha, questions)\n', (3790, 3808), True, 'import tensorflow as tf\n'), ((4051, 4080), 'tensorflow.transpose', 'tf.transpose', (['beta', '(0, 2, 1)'], {}), '(beta, (0, 2, 1))\n', (4063, 4080), True, 'import tensorflow as tf\n'), ((4091, 4116), 'tensorflow.matmul', 'tf.matmul', (['beta', 'contexts'], {}), '(beta, contexts)\n', (4100, 4116), True, 'import tensorflow as tf\n'), ((4186, 4209), 'tensorflow.tile', 'tf.tile', (['q2c', '(1, N, 1)'], {}), '(q2c, (1, N, 1))\n', (4193, 4209), True, 'import tensorflow as tf\n'), ((4223, 4252), 'tensorflow.concat', 'tf.concat', (['[c2q, q2c]'], {'axis': '(2)'}), '([c2q, q2c], axis=2)\n', (4232, 4252), True, 'import tensorflow as tf\n'), ((4360, 4390), 'numpy.array', 'np.array', (['[1.0, 2.0, 3.0, 4.0]'], {}), '([1.0, 2.0, 3.0, 4.0])\n', (4368, 4390), True, 'import numpy as np\n'), ((4397, 4427), 'numpy.array', 'np.array', (['[5.0, 6.0, 7.0, 8.0]'], {}), '([5.0, 6.0, 7.0, 8.0])\n', (4405, 4427), True, 'import numpy as np\n'), ((4434, 4468), 'numpy.array', 'np.array', (['[13.0, 12.0, 11.0, 10.0]'], {}), '([13.0, 12.0, 11.0, 10.0])\n', (4442, 4468), True, 'import numpy as np\n'), ((4474, 4530), 'numpy.array', 'np.array', (['[[[1.0, 2.0, 3.0, 4.0], [5.0, 6.0, 7.0, 8.0]]]'], {}), '([[[1.0, 2.0, 3.0, 4.0], [5.0, 6.0, 7.0, 8.0]]])\n', (4482, 4530), True, 'import numpy as np\n'), ((4545, 4630), 'numpy.array', 'np.array', (['[[[1.0, 2.0, 3.0, 0.0], [5.0, 6.0, 7.0, 4.0], [8.0, 9.0, 10.0, 11.0]]]'], {}), '([[[1.0, 2.0, 3.0, 0.0], [5.0, 6.0, 7.0, 4.0], [8.0, 9.0, 10.0, 11.0]]]\n )\n', (4553, 4630), True, 'import numpy as np\n'), ((4637, 4659), 'numpy.tile', 'np.tile', (['c', '[BS, 1, 1]'], {}), '(c, [BS, 1, 1])\n', (4644, 4659), True, 'import numpy as np\n'), ((4668, 4690), 'numpy.tile', 'np.tile', (['q', '[BS, 1, 1]'], {}), '(q, [BS, 1, 1])\n', (4675, 4690), True, 'import numpy as np\n'), ((4709, 4752), 'tensorflow.get_variable', 'tf.get_variable', (['"""questions"""'], {'initializer': 'q'}), "('questions', initializer=q)\n", (4724, 4752), True, 'import tensorflow as tf\n'), ((4768, 4810), 'tensorflow.get_variable', 'tf.get_variable', (['"""contexts"""'], {'initializer': 'c'}), "('contexts', initializer=c)\n", (4783, 4810), True, 'import tensorflow as tf\n'), ((5048, 5081), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (5079, 5081), True, 'import tensorflow as tf\n'), ((121, 140), 'numpy.max', 'np.max', (['arr'], {'axis': '(2)'}), '(arr, axis=2)\n', (127, 140), True, 'import numpy as np\n'), ((262, 287), 'numpy.sum', 'np.sum', (['exp_array'], {'axis': '(2)'}), '(exp_array, axis=2)\n', (268, 287), True, 'import numpy as np\n'), ((1740, 1768), 'tensorflow.expand_dims', 'tf.expand_dims', (['questions', '(0)'], {}), '(questions, 0)\n', (1754, 1768), True, 'import tensorflow as tf\n'), ((2002, 2018), 'tensorflow.shape', 'tf.shape', (['result'], {}), '(result)\n', (2010, 2018), True, 'import tensorflow as tf\n'), ((2195, 2211), 'tensorflow.shape', 'tf.shape', (['result'], {}), '(result)\n', (2203, 2211), True, 'import tensorflow as tf\n'), ((2435, 2472), 'tensorflow.reshape', 'tf.reshape', (['contexts', '(BS * N, 2 * H)'], {}), '(contexts, (BS * N, 2 * H))\n', (2445, 2472), True, 'import tensorflow as tf\n'), ((2472, 2499), 'tensorflow.expand_dims', 'tf.expand_dims', (['w_sim_1', '(-1)'], {}), '(w_sim_1, -1)\n', (2486, 2499), True, 'import tensorflow as tf\n'), ((2571, 2609), 'tensorflow.reshape', 'tf.reshape', (['questions', '(BS * M, 2 * H)'], {}), '(questions, (BS * M, 2 * H))\n', (2581, 2609), True, 'import tensorflow as tf\n'), ((2609, 2636), 'tensorflow.expand_dims', 'tf.expand_dims', (['w_sim_2', '(-1)'], {}), '(w_sim_2, -1)\n', (2623, 2636), True, 'import tensorflow as tf\n'), ((2708, 2747), 'tensorflow.reshape', 'tf.reshape', (['result', '(BS * N * M, 2 * H)'], {}), '(result, (BS * N * M, 2 * H))\n', (2718, 2747), True, 'import tensorflow as tf\n'), ((2748, 2775), 'tensorflow.expand_dims', 'tf.expand_dims', (['w_sim_3', '(-1)'], {}), '(w_sim_3, -1)\n', (2762, 2775), True, 'import tensorflow as tf\n'), ((2879, 2908), 'tensorflow.reshape', 'tf.reshape', (['term2', '(-1, 1, M)'], {}), '(term2, (-1, 1, M))\n', (2889, 2908), True, 'import tensorflow as tf\n'), ((3548, 3585), 'tensorflow.cast', 'tf.cast', (['context_mask'], {'dtype': 'tf.int32'}), '(context_mask, dtype=tf.int32)\n', (3555, 3585), True, 'import tensorflow as tf\n'), ((3599, 3637), 'tensorflow.cast', 'tf.cast', (['question_mask'], {'dtype': 'tf.int32'}), '(question_mask, dtype=tf.int32)\n', (3606, 3637), True, 'import tensorflow as tf\n'), ((4005, 4021), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['m'], {}), '(m)\n', (4018, 4021), True, 'import tensorflow as tf\n'), ((4273, 4289), 'tensorflow.shape', 'tf.shape', (['output'], {}), '(output)\n', (4281, 4289), True, 'import tensorflow as tf\n'), ((5091, 5103), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (5101, 5103), True, 'import tensorflow as tf\n'), ((5308, 5347), 'numpy.array_equal', 'np.array_equal', (['actual_result', 'S_result'], {}), '(actual_result, S_result)\n', (5322, 5347), True, 'import numpy as np\n'), ((1139, 1163), 'tensorflow.cast', 'tf.cast', (['mask', '"""float64"""'], {}), "(mask, 'float64')\n", (1146, 1163), True, 'import tensorflow as tf\n'), ((2840, 2869), 'tensorflow.reshape', 'tf.reshape', (['term1', '(-1, N, 1)'], {}), '(term1, (-1, N, 1))\n', (2850, 2869), True, 'import tensorflow as tf\n'), ((3927, 3960), 'tensorflow.cast', 'tf.cast', (['S_mask'], {'dtype': 'tf.float64'}), '(S_mask, dtype=tf.float64)\n', (3934, 3960), True, 'import tensorflow as tf\n'), ((5232, 5279), 'numpy.array', 'np.array', (['[[228, 772, 1372], [548, 1828, 3140]]'], {}), '([[228, 772, 1372], [548, 1828, 3140]])\n', (5240, 5279), True, 'import numpy as np\n')] |
hamogu/specutils | specutils/tests/test_smoothing.py | b873f2ac9b3c207c9e670246d102f46a9606d6ed | import numpy as np
import pytest
from astropy import convolution
from scipy.signal import medfilt
import astropy.units as u
from ..spectra.spectrum1d import Spectrum1D
from ..tests.spectral_examples import simulated_spectra
from ..manipulation.smoothing import (convolution_smooth, box_smooth,
gaussian_smooth, trapezoid_smooth,
median_smooth)
def compare_flux(flux_smooth1, flux_smooth2, flux_original, rtol=0.01):
"""
There are two things to compare for each set of smoothing:
1. Compare the smoothed flux from the astropy machinery vs
the smoothed flux from specutils. This is done by
comparing flux_smooth1 and flux_smooth2.
2. Next we want to compare the smoothed flux to the original
flux. This is a little more difficult as smoothing will
make a difference for median filter, but less so for
convolution based smoothing if the kernel is normalized
(area under the kernel = 1).
In this second case the rtol (relative tolerance) is used
judiciously.
"""
# Compare, element by element, the two smoothed fluxes.
assert np.allclose(flux_smooth1, flux_smooth2)
# Compare the total spectral flux of the smoothed to the original.
assert np.allclose(sum(flux_smooth1), sum(flux_original), rtol=rtol)
def test_smooth_custom_kernel(simulated_spectra):
"""
Test CustomKernel smoothing with correct parmaeters.
"""
# Create the original spectrum
spec1 = simulated_spectra.s1_um_mJy_e1
flux_original = spec1.flux
# Create a custom kernel (some weird asymmetric-ness)
numpy_kernel = np.array([0.5, 1, 2, 0.5, 0.2])
numpy_kernel = numpy_kernel / np.sum(numpy_kernel)
custom_kernel = convolution.CustomKernel(numpy_kernel)
flux_smoothed_astropy = convolution.convolve(flux_original, custom_kernel)
# Calculate the custom smoothed
spec1_smoothed = convolution_smooth(spec1, custom_kernel)
compare_flux(spec1_smoothed.flux.value, flux_smoothed_astropy, flux_original.value)
@pytest.mark.parametrize("width", [1, 2.3])
def test_smooth_box_good(simulated_spectra, width):
"""
Test Box1DKernel smoothing with correct parmaeters.
Width values need to be a number greater than 0.
"""
# Create the original spectrum
spec1 = simulated_spectra.s1_um_mJy_e1
flux_original = spec1.flux
# Calculate the smoothed flux using Astropy
box_kernel = convolution.Box1DKernel(width)
flux_smoothed_astropy = convolution.convolve(flux_original, box_kernel)
# Calculate the box smoothed
spec1_smoothed = box_smooth(spec1, width)
compare_flux(spec1_smoothed.flux.value, flux_smoothed_astropy, flux_original.value)
# Check the input and output units
assert spec1.wavelength.unit == spec1_smoothed.wavelength.unit
assert spec1.flux.unit == spec1_smoothed.flux.unit
@pytest.mark.parametrize("width", [-1, 0, 'a'])
def test_smooth_box_bad(simulated_spectra, width):
"""
Test Box1DKernel smoothing with incorrect parmaeters.
Width values need to be a number greater than 0.
"""
# Create the spectrum
spec1 = simulated_spectra.s1_um_mJy_e1
# Test bad input parameters
with pytest.raises(ValueError):
box_smooth(spec1, width)
@pytest.mark.parametrize("stddev", [1, 2.3])
def test_smooth_gaussian_good(simulated_spectra, stddev):
"""
Test Gaussian1DKernel smoothing with correct parmaeters.
Standard deviation values need to be a number greater than 0.
"""
# Create the spectrum
spec1 = simulated_spectra.s1_um_mJy_e1
flux_original = spec1.flux
# Calculate the smoothed flux using Astropy
gaussian_kernel = convolution.Gaussian1DKernel(stddev)
flux_smoothed_astropy = convolution.convolve(flux_original, gaussian_kernel)
# Test gaussian smoothing
spec1_smoothed = gaussian_smooth(spec1, stddev)
compare_flux(spec1_smoothed.flux.value, flux_smoothed_astropy, flux_original.value, rtol=0.02)
# Check the input and output units
assert spec1.wavelength.unit == spec1_smoothed.wavelength.unit
assert spec1.flux.unit == spec1_smoothed.flux.unit
@pytest.mark.parametrize("stddev", [-1, 0, 'a'])
def test_smooth_gaussian_bad(simulated_spectra, stddev):
"""
Test MexicanHat1DKernel smoothing with incorrect parmaeters.
Standard deviation values need to be a number greater than 0.
"""
# Create the spectrum
spec1 = simulated_spectra.s1_um_mJy_e1
# Test bad input paramters
with pytest.raises(ValueError):
gaussian_smooth(spec1, stddev)
@pytest.mark.parametrize("stddev", [1, 2.3])
def test_smooth_trapezoid_good(simulated_spectra, stddev):
"""
Test Trapezoid1DKernel smoothing with correct parmaeters.
Standard deviation values need to be a number greater than 0.
"""
# Create the spectrum
spec1 = simulated_spectra.s1_um_mJy_e1
flux_original = spec1.flux
# Create the flux_smoothed which is what we want to compare to
trapezoid_kernel = convolution.Trapezoid1DKernel(stddev)
flux_smoothed_astropy = convolution.convolve(flux_original, trapezoid_kernel)
# Test trapezoid smoothing
spec1_smoothed = trapezoid_smooth(spec1, stddev)
compare_flux(spec1_smoothed.flux.value, flux_smoothed_astropy, flux_original.value)
# Check the input and output units
assert spec1.wavelength.unit == spec1_smoothed.wavelength.unit
assert spec1.flux.unit == spec1_smoothed.flux.unit
@pytest.mark.parametrize("stddev", [-1, 0, 'a'])
def test_smooth_trapezoid_bad(simulated_spectra, stddev):
"""
Test Trapezoid1DKernel smoothing with incorrect parmaeters.
Standard deviation values need to be a number greater than 0.
"""
# Create the spectrum
spec1 = simulated_spectra.s1_um_mJy_e1
# Test bad parameters
with pytest.raises(ValueError):
trapezoid_smooth(spec1, stddev)
@pytest.mark.parametrize("width", [1, 3, 9])
def test_smooth_median_good(simulated_spectra, width):
"""
Test Median smoothing with correct parmaeters.
Width values need to be a number greater than 0.
"""
# Create the spectrum
spec1 = simulated_spectra.s1_um_mJy_e1
flux_original = spec1.flux
# Create the flux_smoothed which is what we want to compare to
flux_smoothed_astropy = medfilt(flux_original, width)
# Test median smoothing
spec1_smoothed = median_smooth(spec1, width)
compare_flux(spec1_smoothed.flux.value, flux_smoothed_astropy, flux_original.value, rtol=0.15)
# Check the input and output units
assert spec1.wavelength.unit == spec1_smoothed.wavelength.unit
assert spec1.flux.unit == spec1_smoothed.flux.unit
@pytest.mark.parametrize("width", [-1, 0, 'a'])
def test_smooth_median_bad(simulated_spectra, width):
"""
Test Median smoothing with incorrect parmaeters.
Width values need to be a number greater than 0.
"""
# Create the spectrum
spec1 = simulated_spectra.s1_um_mJy_e1
# Test bad parameters
with pytest.raises(ValueError):
median_smooth(spec1, width)
| [((2105, 2147), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""width"""', '[1, 2.3]'], {}), "('width', [1, 2.3])\n", (2128, 2147), False, 'import pytest\n'), ((2941, 2987), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""width"""', "[-1, 0, 'a']"], {}), "('width', [-1, 0, 'a'])\n", (2964, 2987), False, 'import pytest\n'), ((3343, 3386), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""stddev"""', '[1, 2.3]'], {}), "('stddev', [1, 2.3])\n", (3366, 3386), False, 'import pytest\n'), ((4227, 4274), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""stddev"""', "[-1, 0, 'a']"], {}), "('stddev', [-1, 0, 'a'])\n", (4250, 4274), False, 'import pytest\n'), ((4661, 4704), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""stddev"""', '[1, 2.3]'], {}), "('stddev', [1, 2.3])\n", (4684, 4704), False, 'import pytest\n'), ((5560, 5607), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""stddev"""', "[-1, 0, 'a']"], {}), "('stddev', [-1, 0, 'a'])\n", (5583, 5607), False, 'import pytest\n'), ((5990, 6033), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""width"""', '[1, 3, 9]'], {}), "('width', [1, 3, 9])\n", (6013, 6033), False, 'import pytest\n'), ((6780, 6826), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""width"""', "[-1, 0, 'a']"], {}), "('width', [-1, 0, 'a'])\n", (6803, 6826), False, 'import pytest\n'), ((1191, 1230), 'numpy.allclose', 'np.allclose', (['flux_smooth1', 'flux_smooth2'], {}), '(flux_smooth1, flux_smooth2)\n', (1202, 1230), True, 'import numpy as np\n'), ((1689, 1720), 'numpy.array', 'np.array', (['[0.5, 1, 2, 0.5, 0.2]'], {}), '([0.5, 1, 2, 0.5, 0.2])\n', (1697, 1720), True, 'import numpy as np\n'), ((1797, 1835), 'astropy.convolution.CustomKernel', 'convolution.CustomKernel', (['numpy_kernel'], {}), '(numpy_kernel)\n', (1821, 1835), False, 'from astropy import convolution\n'), ((1864, 1914), 'astropy.convolution.convolve', 'convolution.convolve', (['flux_original', 'custom_kernel'], {}), '(flux_original, custom_kernel)\n', (1884, 1914), False, 'from astropy import convolution\n'), ((2502, 2532), 'astropy.convolution.Box1DKernel', 'convolution.Box1DKernel', (['width'], {}), '(width)\n', (2525, 2532), False, 'from astropy import convolution\n'), ((2561, 2608), 'astropy.convolution.convolve', 'convolution.convolve', (['flux_original', 'box_kernel'], {}), '(flux_original, box_kernel)\n', (2581, 2608), False, 'from astropy import convolution\n'), ((3762, 3798), 'astropy.convolution.Gaussian1DKernel', 'convolution.Gaussian1DKernel', (['stddev'], {}), '(stddev)\n', (3790, 3798), False, 'from astropy import convolution\n'), ((3827, 3879), 'astropy.convolution.convolve', 'convolution.convolve', (['flux_original', 'gaussian_kernel'], {}), '(flux_original, gaussian_kernel)\n', (3847, 3879), False, 'from astropy import convolution\n'), ((5102, 5139), 'astropy.convolution.Trapezoid1DKernel', 'convolution.Trapezoid1DKernel', (['stddev'], {}), '(stddev)\n', (5131, 5139), False, 'from astropy import convolution\n'), ((5168, 5221), 'astropy.convolution.convolve', 'convolution.convolve', (['flux_original', 'trapezoid_kernel'], {}), '(flux_original, trapezoid_kernel)\n', (5188, 5221), False, 'from astropy import convolution\n'), ((6408, 6437), 'scipy.signal.medfilt', 'medfilt', (['flux_original', 'width'], {}), '(flux_original, width)\n', (6415, 6437), False, 'from scipy.signal import medfilt\n'), ((1755, 1775), 'numpy.sum', 'np.sum', (['numpy_kernel'], {}), '(numpy_kernel)\n', (1761, 1775), True, 'import numpy as np\n'), ((3280, 3305), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3293, 3305), False, 'import pytest\n'), ((4592, 4617), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (4605, 4617), False, 'import pytest\n'), ((5920, 5945), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (5933, 5945), False, 'import pytest\n'), ((7111, 7136), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (7124, 7136), False, 'import pytest\n')] |
buulikduong/1d_sgl_solver | modules/interpolator.py | 03ce0b362d45acbbd3bb35e7b604ba97982eea92 | """Module interpolating mathematical functions out of support points"""
from scipy.interpolate import interp1d, lagrange, CubicSpline
def interpolator(x_sup, y_sup, method):
"""Interpolates a mathematical function from a given set of
points using either linear, polynomial or cubic spline for the
interpolation.
Args:
x_sup (list): x-coordinates of the function
y_sup (list): y-coordinates of the function
method (string): name of the interpolation method to be used
Returns:
intfunc: interpolated function
"""
if method == "linear":
intfunc = interp1d(x_sup, y_sup, kind="linear")
return intfunc
elif method == "polynomial":
intfunc = lagrange(x_sup, y_sup)
return intfunc
elif method == "cspline":
intfunc = CubicSpline(x_sup, y_sup, bc_type="natural")
return intfunc
return None
| [((618, 655), 'scipy.interpolate.interp1d', 'interp1d', (['x_sup', 'y_sup'], {'kind': '"""linear"""'}), "(x_sup, y_sup, kind='linear')\n", (626, 655), False, 'from scipy.interpolate import interp1d, lagrange, CubicSpline\n'), ((730, 752), 'scipy.interpolate.lagrange', 'lagrange', (['x_sup', 'y_sup'], {}), '(x_sup, y_sup)\n', (738, 752), False, 'from scipy.interpolate import interp1d, lagrange, CubicSpline\n'), ((824, 868), 'scipy.interpolate.CubicSpline', 'CubicSpline', (['x_sup', 'y_sup'], {'bc_type': '"""natural"""'}), "(x_sup, y_sup, bc_type='natural')\n", (835, 868), False, 'from scipy.interpolate import interp1d, lagrange, CubicSpline\n')] |
chentaoz/frappe | frappe/patches/v13_0/remove_web_view.py | ee3c4943bf6177ad3b410cdb0d802af486751a65 | import frappe
def execute():
frappe.delete_doc_if_exists("DocType", "Web View")
frappe.delete_doc_if_exists("DocType", "Web View Component")
frappe.delete_doc_if_exists("DocType", "CSS Class") | [((31, 81), 'frappe.delete_doc_if_exists', 'frappe.delete_doc_if_exists', (['"""DocType"""', '"""Web View"""'], {}), "('DocType', 'Web View')\n", (58, 81), False, 'import frappe\n'), ((83, 143), 'frappe.delete_doc_if_exists', 'frappe.delete_doc_if_exists', (['"""DocType"""', '"""Web View Component"""'], {}), "('DocType', 'Web View Component')\n", (110, 143), False, 'import frappe\n'), ((145, 196), 'frappe.delete_doc_if_exists', 'frappe.delete_doc_if_exists', (['"""DocType"""', '"""CSS Class"""'], {}), "('DocType', 'CSS Class')\n", (172, 196), False, 'import frappe\n')] |
cpratim/DSA-Research-Paper | games.py | ebb856ef62f8a04aa72380e39afdde958eed529a | import json
import matplotlib.pyplot as plt
from pprint import pprint
import numpy as np
from scipy.stats import linregress
from util.stats import *
with open('data/game_stats.json', 'r') as f:
df = json.load(f)
X, y = [], []
for match, stats in df.items():
home, away = stats['home'], stats['away']
if home['mp'] != away['mp'] != '240': continue
try:
ft_dif = float(home['fta']) - float(away['fta'])
pt_dif = float(home['pts']) - float(away['pts'])
if abs(pt_dif) > 10: continue
except:
continue
X.append(ft_dif)
y.append(pt_dif)
c = 0
for f, p in zip(X, y):
if f * p > 0:
c += 1
print(c / len(X))
slope, intercept, r, p, std = linregress(X, y)
f = lambda x: x*slope + intercept
fit_y = [f(min(X)), f(max(X))]
plt.xlabel('Free Throw Attempts')
plt.ylabel('Point Differential')
plt.title('FTA vs Point Differential')
print(correlation(X, y))
plt.plot([min(X), max(X)], fit_y, color = 'red')
plt.scatter(X, y)
plt.show() | [((654, 670), 'scipy.stats.linregress', 'linregress', (['X', 'y'], {}), '(X, y)\n', (664, 670), False, 'from scipy.stats import linregress\n'), ((737, 770), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Free Throw Attempts"""'], {}), "('Free Throw Attempts')\n", (747, 770), True, 'import matplotlib.pyplot as plt\n'), ((771, 803), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Point Differential"""'], {}), "('Point Differential')\n", (781, 803), True, 'import matplotlib.pyplot as plt\n'), ((804, 842), 'matplotlib.pyplot.title', 'plt.title', (['"""FTA vs Point Differential"""'], {}), "('FTA vs Point Differential')\n", (813, 842), True, 'import matplotlib.pyplot as plt\n'), ((917, 934), 'matplotlib.pyplot.scatter', 'plt.scatter', (['X', 'y'], {}), '(X, y)\n', (928, 934), True, 'import matplotlib.pyplot as plt\n'), ((936, 946), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (944, 946), True, 'import matplotlib.pyplot as plt\n'), ((201, 213), 'json.load', 'json.load', (['f'], {}), '(f)\n', (210, 213), False, 'import json\n')] |
gycggd/leaf-classification | src/generate_data.py | b37dd4a6a262562c454038218c1472329e54128b | import os
import numpy as np
import pandas as pd
import tensorflow as tf
from keras.preprocessing.image import ImageDataGenerator
from keras.preprocessing.image import img_to_array, load_img
from keras.utils.np_utils import to_categorical
from sklearn.model_selection import StratifiedShuffleSplit
from sklearn.preprocessing import LabelEncoder, StandardScaler
def load_numeric_training(standardize=True):
data = pd.read_csv('../train.csv')
ID = data.pop('id')
y = data.pop('species')
y = LabelEncoder().fit(y).transform(y)
X = StandardScaler().fit(data).transform(data) if standardize else data.values
return ID.values, X, y
def load_numeric_test(standardize=True):
data = pd.read_csv('../test.csv')
ID = data.pop('id')
test = StandardScaler().fit(data).transform(data) if standardize else data.values
return ID.values, test
def resize_img(img, max_dim=96):
max_axis = np.argmax(img.size)
scale = max_dim / img.size[max_axis]
return img.resize((int(img.size[0] * scale), int(img.size[1] * scale)))
def load_img_data(ids, max_dim=96, center=True):
X = np.empty((len(ids), max_dim, max_dim, 1))
for i, id in enumerate(ids):
img = load_img('../images/{}.jpg'.format(id), grayscale=True)
img = resize_img(img, max_dim=max_dim)
x = img_to_array(img)
h, w = x.shape[:2]
if center:
h1 = (max_dim - h) >> 1
h2 = h1 + h
w1 = (max_dim - w) >> 1
w2 = w1 + w
else:
h1, h2, w1, w2 = 0, h, 0, w
X[i][h1:h2, w1:w2][:] = x
return np.around(X / 255)
def load_train_data(split=0.9, random_state=7):
ID, X_num_train, y = load_numeric_training()
X_img_train = load_img_data(ID)
sss = StratifiedShuffleSplit(n_splits=1, train_size=split, test_size=1 - split, random_state=random_state)
train_idx, val_idx = next(sss.split(X_num_train, y))
ID_tr, X_num_tr, X_img_tr, y_tr = ID[train_idx], X_num_train[train_idx], X_img_train[train_idx], y[train_idx]
ID_val, X_num_val, X_img_val, y_val = ID[val_idx], X_num_train[val_idx], X_img_train[val_idx], y[val_idx]
return (ID_tr, X_num_tr, X_img_tr, y_tr), (ID_val, X_num_val, X_img_val, y_val)
def load_test_data():
ID, X_num_test = load_numeric_test()
X_img_test = load_img_data(ID)
return ID, X_num_test, X_img_test
print('Loading train data ...')
(ID_train, X_num_tr, X_img_tr, y_tr), (ID_val, X_num_val, X_img_val, y_val) = load_train_data()
# Prepare ID-to-label and ID-to-numerical dictionary
ID_y_dic, ID_num_dic = {}, {}
for i in range(len(ID_train)):
ID_y_dic[ID_train[i]] = y_tr[i]
ID_num_dic[ID_train[i]] = X_num_tr[i, :]
print('Loading test data ...')
ID_test, X_num_test, X_img_test = load_test_data()
# Convert label to categorical/one-hot
ID_train, y_tr, y_val = to_categorical(ID_train), to_categorical(y_tr), to_categorical((y_val))
def _bytes_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
def _int64_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def _float32_feature(value):
return tf.train.Feature(float_list=tf.train.FloatList(value=value))
def write_val_data():
val_data_path = '../tfrecords/val_data_1.tfrecords'
if os.path.exists(val_data_path):
print('Warning: old file exists, removed.')
os.remove(val_data_path)
val_image, val_num, val_label = X_img_val.astype(np.bool), X_num_val.astype(np.float64), y_val.astype(np.bool)
print(val_image.shape, val_num.shape, val_label.shape)
val_writer = tf.python_io.TFRecordWriter(val_data_path)
print('Writing data into tfrecord ...')
for i in range(len(val_image)):
image, num, label = val_image[i], val_num[i], val_label[i]
feature = {'image': _bytes_feature(image.tostring()),
'num': _bytes_feature(num.tostring()),
'label': _bytes_feature(label.tostring())}
example = tf.train.Example(features=tf.train.Features(feature=feature))
val_writer.write(example.SerializeToString())
print('Done!')
def write_train_data():
imgen = ImageDataGenerator(rotation_range=20, zoom_range=0.2, horizontal_flip=True,
vertical_flip=True, fill_mode='nearest')
imgen_train = imgen.flow(X_img_tr, ID_train, batch_size=32, seed=7)
print('Generating augmented images')
all_images = []
all_ID = []
p = True
for i in range(28 * 200):
print('Generating augmented images for epoch {}, batch {}'.format(i // 28, i % 28))
X, ID = imgen_train.next()
all_images.append(X)
all_ID.append(np.argmax(ID, axis=1))
all_images = np.concatenate(all_images).astype(np.bool)
all_ID = np.concatenate(all_ID)
all_y = np.zeros(all_ID.shape)
all_nums = np.zeros((all_ID.shape[0], X_num_tr.shape[1]))
for i in range(len(all_ID)):
all_nums[i, :] = ID_num_dic[all_ID[i]]
all_y[i] = ID_y_dic[all_ID[i]]
all_y = to_categorical(all_y).astype(np.bool)
print('Data shapes:')
print('Image:', all_images.shape)
print('Label:', all_y.shape)
print('Numerical:', all_nums.shape)
train_data_path = '../tfrecords/train_data_1.tfrecords'
if os.path.exists(train_data_path):
print('Warning: old file exists, removed.')
os.remove(train_data_path)
# compression = tf.python_io.TFRecordCompressionType.GZIP
# train_writer = tf.python_io.TFRecordWriter(train_data_path, options=tf.python_io.TFRecordOptions(compression))
train_writer = tf.python_io.TFRecordWriter(train_data_path)
print('Writing data into tfrecord ...')
for i in range(len(all_images)):
if i % 891 == 0:
print('Writing {} th epoch data ...'.format(i // 891))
image, num, label = all_images[i], all_nums[i], all_y[i]
feature = {'image': _bytes_feature(image.tostring()),
'num': _bytes_feature(num.tostring()),
'label': _bytes_feature(label.tostring())}
example = tf.train.Example(features=tf.train.Features(feature=feature))
train_writer.write(example.SerializeToString())
print('Done!')
write_val_data()
| [((419, 446), 'pandas.read_csv', 'pd.read_csv', (['"""../train.csv"""'], {}), "('../train.csv')\n", (430, 446), True, 'import pandas as pd\n'), ((706, 732), 'pandas.read_csv', 'pd.read_csv', (['"""../test.csv"""'], {}), "('../test.csv')\n", (717, 732), True, 'import pandas as pd\n'), ((920, 939), 'numpy.argmax', 'np.argmax', (['img.size'], {}), '(img.size)\n', (929, 939), True, 'import numpy as np\n'), ((1603, 1621), 'numpy.around', 'np.around', (['(X / 255)'], {}), '(X / 255)\n', (1612, 1621), True, 'import numpy as np\n'), ((1767, 1871), 'sklearn.model_selection.StratifiedShuffleSplit', 'StratifiedShuffleSplit', ([], {'n_splits': '(1)', 'train_size': 'split', 'test_size': '(1 - split)', 'random_state': 'random_state'}), '(n_splits=1, train_size=split, test_size=1 - split,\n random_state=random_state)\n', (1789, 1871), False, 'from sklearn.model_selection import StratifiedShuffleSplit\n'), ((2844, 2868), 'keras.utils.np_utils.to_categorical', 'to_categorical', (['ID_train'], {}), '(ID_train)\n', (2858, 2868), False, 'from keras.utils.np_utils import to_categorical\n'), ((2870, 2890), 'keras.utils.np_utils.to_categorical', 'to_categorical', (['y_tr'], {}), '(y_tr)\n', (2884, 2890), False, 'from keras.utils.np_utils import to_categorical\n'), ((2892, 2913), 'keras.utils.np_utils.to_categorical', 'to_categorical', (['y_val'], {}), '(y_val)\n', (2906, 2913), False, 'from keras.utils.np_utils import to_categorical\n'), ((3312, 3341), 'os.path.exists', 'os.path.exists', (['val_data_path'], {}), '(val_data_path)\n', (3326, 3341), False, 'import os\n'), ((3620, 3662), 'tensorflow.python_io.TFRecordWriter', 'tf.python_io.TFRecordWriter', (['val_data_path'], {}), '(val_data_path)\n', (3647, 3662), True, 'import tensorflow as tf\n'), ((4184, 4304), 'keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rotation_range': '(20)', 'zoom_range': '(0.2)', 'horizontal_flip': '(True)', 'vertical_flip': '(True)', 'fill_mode': '"""nearest"""'}), "(rotation_range=20, zoom_range=0.2, horizontal_flip=True,\n vertical_flip=True, fill_mode='nearest')\n", (4202, 4304), False, 'from keras.preprocessing.image import ImageDataGenerator\n'), ((4800, 4822), 'numpy.concatenate', 'np.concatenate', (['all_ID'], {}), '(all_ID)\n', (4814, 4822), True, 'import numpy as np\n'), ((4835, 4857), 'numpy.zeros', 'np.zeros', (['all_ID.shape'], {}), '(all_ID.shape)\n', (4843, 4857), True, 'import numpy as np\n'), ((4873, 4919), 'numpy.zeros', 'np.zeros', (['(all_ID.shape[0], X_num_tr.shape[1])'], {}), '((all_ID.shape[0], X_num_tr.shape[1]))\n', (4881, 4919), True, 'import numpy as np\n'), ((5295, 5326), 'os.path.exists', 'os.path.exists', (['train_data_path'], {}), '(train_data_path)\n', (5309, 5326), False, 'import os\n'), ((5614, 5658), 'tensorflow.python_io.TFRecordWriter', 'tf.python_io.TFRecordWriter', (['train_data_path'], {}), '(train_data_path)\n', (5641, 5658), True, 'import tensorflow as tf\n'), ((1320, 1337), 'keras.preprocessing.image.img_to_array', 'img_to_array', (['img'], {}), '(img)\n', (1332, 1337), False, 'from keras.preprocessing.image import img_to_array, load_img\n'), ((3403, 3427), 'os.remove', 'os.remove', (['val_data_path'], {}), '(val_data_path)\n', (3412, 3427), False, 'import os\n'), ((5388, 5414), 'os.remove', 'os.remove', (['train_data_path'], {}), '(train_data_path)\n', (5397, 5414), False, 'import os\n'), ((2984, 3017), 'tensorflow.train.BytesList', 'tf.train.BytesList', ([], {'value': '[value]'}), '(value=[value])\n', (3002, 3017), True, 'import tensorflow as tf\n'), ((3087, 3120), 'tensorflow.train.Int64List', 'tf.train.Int64List', ([], {'value': '[value]'}), '(value=[value])\n', (3105, 3120), True, 'import tensorflow as tf\n'), ((3192, 3223), 'tensorflow.train.FloatList', 'tf.train.FloatList', ([], {'value': 'value'}), '(value=value)\n', (3210, 3223), True, 'import tensorflow as tf\n'), ((4703, 4724), 'numpy.argmax', 'np.argmax', (['ID'], {'axis': '(1)'}), '(ID, axis=1)\n', (4712, 4724), True, 'import numpy as np\n'), ((4744, 4770), 'numpy.concatenate', 'np.concatenate', (['all_images'], {}), '(all_images)\n', (4758, 4770), True, 'import numpy as np\n'), ((5051, 5072), 'keras.utils.np_utils.to_categorical', 'to_categorical', (['all_y'], {}), '(all_y)\n', (5065, 5072), False, 'from keras.utils.np_utils import to_categorical\n'), ((4036, 4070), 'tensorflow.train.Features', 'tf.train.Features', ([], {'feature': 'feature'}), '(feature=feature)\n', (4053, 4070), True, 'import tensorflow as tf\n'), ((6124, 6158), 'tensorflow.train.Features', 'tf.train.Features', ([], {'feature': 'feature'}), '(feature=feature)\n', (6141, 6158), True, 'import tensorflow as tf\n'), ((507, 521), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (519, 521), False, 'from sklearn.preprocessing import LabelEncoder, StandardScaler\n'), ((550, 566), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (564, 566), False, 'from sklearn.preprocessing import LabelEncoder, StandardScaler\n'), ((768, 784), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (782, 784), False, 'from sklearn.preprocessing import LabelEncoder, StandardScaler\n')] |
ErickSimoes/URI-Online-Judge | 2650-construindo-muralhas.py | 7e6f141db2647b1d0d69951b064bd95b0ce4ba1a | # -*- coding: utf-8 -*-
n, w = map(int, input().split())
for _ in range(n):
entrada = input()
last_space = entrada.rfind(' ')
if int(entrada[last_space:]) > w:
print(entrada[:last_space])
| [] |
yarix/m2cgen | tests/assemblers/test_ensemble.py | f1aa01e4c70a6d1a8893e27bfbe3c36fcb1e8546 | from sklearn import ensemble
from m2cgen import assemblers, ast
from tests import utils
def test_single_condition():
estimator = ensemble.RandomForestRegressor(n_estimators=2, random_state=1)
estimator.fit([[1], [2]], [1, 2])
assembler = assemblers.RandomForestModelAssembler(estimator)
actual = assembler.assemble()
expected = ast.BinNumExpr(
ast.BinNumExpr(
ast.NumVal(1.0),
ast.IfExpr(
ast.CompExpr(
ast.FeatureRef(0),
ast.NumVal(1.5),
ast.CompOpType.LTE),
ast.NumVal(1.0),
ast.NumVal(2.0)),
ast.BinNumOpType.ADD),
ast.NumVal(0.5),
ast.BinNumOpType.MUL)
assert utils.cmp_exprs(actual, expected)
def test_two_conditions():
estimator = ensemble.RandomForestRegressor(n_estimators=2, random_state=13)
estimator.fit([[1], [2], [3]], [1, 2, 3])
assembler = assemblers.RandomForestModelAssembler(estimator)
actual = assembler.assemble()
expected = ast.BinNumExpr(
ast.BinNumExpr(
ast.IfExpr(
ast.CompExpr(
ast.FeatureRef(0),
ast.NumVal(1.5),
ast.CompOpType.LTE),
ast.NumVal(1.0),
ast.NumVal(2.0)),
ast.IfExpr(
ast.CompExpr(
ast.FeatureRef(0),
ast.NumVal(2.5),
ast.CompOpType.LTE),
ast.NumVal(2.0),
ast.NumVal(3.0)),
ast.BinNumOpType.ADD),
ast.NumVal(0.5),
ast.BinNumOpType.MUL)
assert utils.cmp_exprs(actual, expected)
def test_multi_class():
estimator = ensemble.RandomForestClassifier(
n_estimators=2, random_state=13)
estimator.fit([[1], [2], [3]], [1, -1, 1])
assembler = assemblers.RandomForestModelAssembler(estimator)
actual = assembler.assemble()
expected = ast.BinVectorNumExpr(
ast.BinVectorExpr(
ast.IfExpr(
ast.CompExpr(
ast.FeatureRef(0),
ast.NumVal(1.5),
ast.CompOpType.LTE),
ast.VectorVal([
ast.NumVal(0.0),
ast.NumVal(1.0)]),
ast.VectorVal([
ast.NumVal(1.0),
ast.NumVal(0.0)])),
ast.IfExpr(
ast.CompExpr(
ast.FeatureRef(0),
ast.NumVal(2.5),
ast.CompOpType.LTE),
ast.VectorVal([
ast.NumVal(1.0),
ast.NumVal(0.0)]),
ast.VectorVal([
ast.NumVal(0.0),
ast.NumVal(1.0)])),
ast.BinNumOpType.ADD),
ast.NumVal(0.5),
ast.BinNumOpType.MUL)
assert utils.cmp_exprs(actual, expected)
| [((136, 198), 'sklearn.ensemble.RandomForestRegressor', 'ensemble.RandomForestRegressor', ([], {'n_estimators': '(2)', 'random_state': '(1)'}), '(n_estimators=2, random_state=1)\n', (166, 198), False, 'from sklearn import ensemble\n'), ((255, 303), 'm2cgen.assemblers.RandomForestModelAssembler', 'assemblers.RandomForestModelAssembler', (['estimator'], {}), '(estimator)\n', (292, 303), False, 'from m2cgen import assemblers, ast\n'), ((763, 796), 'tests.utils.cmp_exprs', 'utils.cmp_exprs', (['actual', 'expected'], {}), '(actual, expected)\n', (778, 796), False, 'from tests import utils\n'), ((842, 905), 'sklearn.ensemble.RandomForestRegressor', 'ensemble.RandomForestRegressor', ([], {'n_estimators': '(2)', 'random_state': '(13)'}), '(n_estimators=2, random_state=13)\n', (872, 905), False, 'from sklearn import ensemble\n'), ((970, 1018), 'm2cgen.assemblers.RandomForestModelAssembler', 'assemblers.RandomForestModelAssembler', (['estimator'], {}), '(estimator)\n', (1007, 1018), False, 'from m2cgen import assemblers, ast\n'), ((1687, 1720), 'tests.utils.cmp_exprs', 'utils.cmp_exprs', (['actual', 'expected'], {}), '(actual, expected)\n', (1702, 1720), False, 'from tests import utils\n'), ((1763, 1827), 'sklearn.ensemble.RandomForestClassifier', 'ensemble.RandomForestClassifier', ([], {'n_estimators': '(2)', 'random_state': '(13)'}), '(n_estimators=2, random_state=13)\n', (1794, 1827), False, 'from sklearn import ensemble\n'), ((1902, 1950), 'm2cgen.assemblers.RandomForestModelAssembler', 'assemblers.RandomForestModelAssembler', (['estimator'], {}), '(estimator)\n', (1939, 1950), False, 'from m2cgen import assemblers, ast\n'), ((2928, 2961), 'tests.utils.cmp_exprs', 'utils.cmp_exprs', (['actual', 'expected'], {}), '(actual, expected)\n', (2943, 2961), False, 'from tests import utils\n'), ((704, 719), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(0.5)'], {}), '(0.5)\n', (714, 719), False, 'from m2cgen import assemblers, ast\n'), ((1628, 1643), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(0.5)'], {}), '(0.5)\n', (1638, 1643), False, 'from m2cgen import assemblers, ast\n'), ((2869, 2884), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(0.5)'], {}), '(0.5)\n', (2879, 2884), False, 'from m2cgen import assemblers, ast\n'), ((406, 421), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(1.0)'], {}), '(1.0)\n', (416, 421), False, 'from m2cgen import assemblers, ast\n'), ((610, 625), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(1.0)'], {}), '(1.0)\n', (620, 625), False, 'from m2cgen import assemblers, ast\n'), ((643, 658), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(2.0)'], {}), '(2.0)\n', (653, 658), False, 'from m2cgen import assemblers, ast\n'), ((1296, 1311), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(1.0)'], {}), '(1.0)\n', (1306, 1311), False, 'from m2cgen import assemblers, ast\n'), ((1329, 1344), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(2.0)'], {}), '(2.0)\n', (1339, 1344), False, 'from m2cgen import assemblers, ast\n'), ((1534, 1549), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(2.0)'], {}), '(2.0)\n', (1544, 1549), False, 'from m2cgen import assemblers, ast\n'), ((1567, 1582), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(3.0)'], {}), '(3.0)\n', (1577, 1582), False, 'from m2cgen import assemblers, ast\n'), ((497, 514), 'm2cgen.ast.FeatureRef', 'ast.FeatureRef', (['(0)'], {}), '(0)\n', (511, 514), False, 'from m2cgen import assemblers, ast\n'), ((536, 551), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(1.5)'], {}), '(1.5)\n', (546, 551), False, 'from m2cgen import assemblers, ast\n'), ((1183, 1200), 'm2cgen.ast.FeatureRef', 'ast.FeatureRef', (['(0)'], {}), '(0)\n', (1197, 1200), False, 'from m2cgen import assemblers, ast\n'), ((1222, 1237), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(1.5)'], {}), '(1.5)\n', (1232, 1237), False, 'from m2cgen import assemblers, ast\n'), ((1421, 1438), 'm2cgen.ast.FeatureRef', 'ast.FeatureRef', (['(0)'], {}), '(0)\n', (1435, 1438), False, 'from m2cgen import assemblers, ast\n'), ((1460, 1475), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(2.5)'], {}), '(2.5)\n', (1470, 1475), False, 'from m2cgen import assemblers, ast\n'), ((2124, 2141), 'm2cgen.ast.FeatureRef', 'ast.FeatureRef', (['(0)'], {}), '(0)\n', (2138, 2141), False, 'from m2cgen import assemblers, ast\n'), ((2163, 2178), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(1.5)'], {}), '(1.5)\n', (2173, 2178), False, 'from m2cgen import assemblers, ast\n'), ((2512, 2529), 'm2cgen.ast.FeatureRef', 'ast.FeatureRef', (['(0)'], {}), '(0)\n', (2526, 2529), False, 'from m2cgen import assemblers, ast\n'), ((2551, 2566), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(2.5)'], {}), '(2.5)\n', (2561, 2566), False, 'from m2cgen import assemblers, ast\n'), ((2273, 2288), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(0.0)'], {}), '(0.0)\n', (2283, 2288), False, 'from m2cgen import assemblers, ast\n'), ((2310, 2325), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(1.0)'], {}), '(1.0)\n', (2320, 2325), False, 'from m2cgen import assemblers, ast\n'), ((2381, 2396), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(1.0)'], {}), '(1.0)\n', (2391, 2396), False, 'from m2cgen import assemblers, ast\n'), ((2418, 2433), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(0.0)'], {}), '(0.0)\n', (2428, 2433), False, 'from m2cgen import assemblers, ast\n'), ((2661, 2676), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(1.0)'], {}), '(1.0)\n', (2671, 2676), False, 'from m2cgen import assemblers, ast\n'), ((2698, 2713), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(0.0)'], {}), '(0.0)\n', (2708, 2713), False, 'from m2cgen import assemblers, ast\n'), ((2769, 2784), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(0.0)'], {}), '(0.0)\n', (2779, 2784), False, 'from m2cgen import assemblers, ast\n'), ((2806, 2821), 'm2cgen.ast.NumVal', 'ast.NumVal', (['(1.0)'], {}), '(1.0)\n', (2816, 2821), False, 'from m2cgen import assemblers, ast\n')] |
Parquery/pynumenc | setup.py | f14abab40b7d08c55824bf1da5b2a7026c0a7282 | """A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
import os
from setuptools import setup, find_packages, Extension
import pynumenc_meta
# pylint: disable=redefined-builtin
here = os.path.abspath(os.path.dirname(__file__)) # pylint: disable=invalid-name
with open(os.path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read() # pylint: disable=invalid-name
setup(
name=pynumenc_meta.__title__,
version=pynumenc_meta.__version__,
description=pynumenc_meta.__description__,
long_description=long_description,
url=pynumenc_meta.__url__,
author=pynumenc_meta.__author__,
author_email=pynumenc_meta.__author_email__,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
license='License :: OSI Approved :: MIT License',
keywords='C++ encode decode bytes encoding decoding sorted',
packages=find_packages(exclude=['docs', 'tests']),
install_requires=[],
extras_require={
'dev': [
# yapf: disable,
'docutils>=0.14,<1',
'mypy==0.641',
'hypothesis==3.82.1',
'pygments>=2.2.0,<3',
'pydocstyle>=3.0.0,<4',
'pylint==2.1.1',
'yapf==0.24.0'
# yapf: enable
]
},
ext_modules=[
Extension('numenc', sources=['numenc-cpp/encoder_decoder.cpp'])
],
scripts=['bin/pynumenc'],
py_modules=['pynumenc_meta'],
package_data={'pynumenc': ['py.typed']},
data_files=[('.', ['LICENSE.txt', 'README.rst'])])
| [((290, 315), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (305, 315), False, 'import os\n'), ((360, 392), 'os.path.join', 'os.path.join', (['here', '"""README.rst"""'], {}), "(here, 'README.rst')\n", (372, 392), False, 'import os\n'), ((1175, 1215), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['docs', 'tests']"}), "(exclude=['docs', 'tests'])\n", (1188, 1215), False, 'from setuptools import setup, find_packages, Extension\n'), ((1599, 1662), 'setuptools.Extension', 'Extension', (['"""numenc"""'], {'sources': "['numenc-cpp/encoder_decoder.cpp']"}), "('numenc', sources=['numenc-cpp/encoder_decoder.cpp'])\n", (1608, 1662), False, 'from setuptools import setup, find_packages, Extension\n')] |
nipunjain099/AutoGuard | Models/License-Plate-Recognition-Nigerian-vehicles-master/License-Plate-Recognition-Nigerian-vehicles-master/ocr.py | 8217cd03af7927590ef3a160ecb7d9bc9f50d101 | import numpy as np
from skimage.transform import resize
from skimage import measure
from skimage.measure import regionprops
class OCROnObjects():
def __init__(self, license_plate):
character_objects = self.identify_boundary_objects(license_plate)
self.get_regions(character_objects, license_plate)
def identify_boundary_objects(self, a_license_plate):
labelImage = measure.label(a_license_plate)
character_dimensions = (0.4*a_license_plate.shape[0], 0.85*a_license_plate.shape[0], 0.04*a_license_plate.shape[1], 0.15*a_license_plate.shape[1])
minHeight, maxHeight, minWidth, maxWidth = character_dimensions
regionLists = regionprops(labelImage)
return regionLists
def get_regions(self, character_objects, a_license_plate):
"""
used to map out regions where the license plate charcters are
the principle of connected component analysis and labelling
were used
Parameters:
-----------
a_license_plate: 2D numpy binary image of the license plate
Returns:
--------
a dictionary containing the index
fullscale: 3D array containig 2D array of each character
columnsVal: 1D array the starting column of each character
coordinates:
"""
cord = []
counter=0
column_list = []
character_dimensions = (0.35*a_license_plate.shape[0], 0.60*a_license_plate.shape[0], 0.05*a_license_plate.shape[1], 0.15*a_license_plate.shape[1])
minHeight, maxHeight, minWidth, maxWidth = character_dimensions
for regions in character_objects:
minimumRow, minimumCol, maximumRow, maximumCol = regions.bbox
character_height = maximumRow - minimumRow
character_width = maximumCol - minimumCol
roi = a_license_plate[minimumRow:maximumRow, minimumCol:maximumCol]
if character_height > minHeight and character_height < maxHeight and character_width > minWidth and character_width < maxWidth:
if counter == 0:
samples = resize(roi, (20,20))
cord.append(regions.bbox)
counter += 1
elif counter == 1:
roismall = resize(roi, (20,20))
samples = np.concatenate((samples[None,:,:], roismall[None,:,:]), axis=0)
cord.append(regions.bbox)
counter+=1
else:
roismall = resize(roi, (20,20))
samples = np.concatenate((samples[:,:,:], roismall[None,:,:]), axis=0)
cord.append(regions.bbox)
column_list.append(minimumCol)
if len(column_list) == 0:
self.candidates = {}
else:
self.candidates = {
'fullscale': samples,
'coordinates': np.array(cord),
'columnsVal': column_list
}
return self.candidates | [((412, 442), 'skimage.measure.label', 'measure.label', (['a_license_plate'], {}), '(a_license_plate)\n', (425, 442), False, 'from skimage import measure\n'), ((692, 715), 'skimage.measure.regionprops', 'regionprops', (['labelImage'], {}), '(labelImage)\n', (703, 715), False, 'from skimage.measure import regionprops\n'), ((2943, 2957), 'numpy.array', 'np.array', (['cord'], {}), '(cord)\n', (2951, 2957), True, 'import numpy as np\n'), ((2129, 2150), 'skimage.transform.resize', 'resize', (['roi', '(20, 20)'], {}), '(roi, (20, 20))\n', (2135, 2150), False, 'from skimage.transform import resize\n'), ((2295, 2316), 'skimage.transform.resize', 'resize', (['roi', '(20, 20)'], {}), '(roi, (20, 20))\n', (2301, 2316), False, 'from skimage.transform import resize\n'), ((2346, 2417), 'numpy.concatenate', 'np.concatenate', (['(samples[(None), :, :], roismall[(None), :, :])'], {'axis': '(0)'}), '((samples[(None), :, :], roismall[(None), :, :]), axis=0)\n', (2360, 2417), True, 'import numpy as np\n'), ((2540, 2561), 'skimage.transform.resize', 'resize', (['roi', '(20, 20)'], {}), '(roi, (20, 20))\n', (2546, 2561), False, 'from skimage.transform import resize\n'), ((2591, 2657), 'numpy.concatenate', 'np.concatenate', (['(samples[:, :, :], roismall[(None), :, :])'], {'axis': '(0)'}), '((samples[:, :, :], roismall[(None), :, :]), axis=0)\n', (2605, 2657), True, 'import numpy as np\n')] |
mvlima/flask-jwt-auth | project/server/models.py | 6cb210b50888b1e9a41ea9e63a80eafcbe436560 | # project/server/models.py
import jwt
import datetime
from project.server import app, db, bcrypt
class User(db.Model):
""" User Model for storing user related details """
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
username = db.Column(db.String(255), unique=True, nullable=False)
email = db.Column(db.String(255), unique=True, nullable=False)
password = db.Column(db.String(255), nullable=False)
name = db.Column(db.String(255), nullable=False)
age = db.Column(db.Integer, nullable=False)
address = db.Column(db.Integer(255), nullable=False)
registered_on = db.Column(db.DateTime, nullable=False)
admin = db.Column(db.Boolean, nullable=False, default=False)
def __init__(self, email, username, password, name, age, address, admin=False):
self.email = email
self.username = username
self.password = bcrypt.generate_password_hash(
password, app.config.get('BCRYPT_LOG_ROUNDS')
).decode()
self.name = name
self.age = age
self.address = address
self.registered_on = datetime.datetime.now()
self.admin = admin
def encode_auth_token(self, user_id):
"""
Generates the Auth Token
:return: string
"""
try:
payload = {
'exp': datetime.datetime.utcnow() + datetime.timedelta(days=0, seconds=5),
'iat': datetime.datetime.utcnow(),
'sub': user_id
}
return jwt.encode(
payload,
app.config.get('SECRET_KEY'),
algorithm='HS256'
)
except Exception as e:
return e
@staticmethod
def decode_auth_token(auth_token):
"""
Validates the auth token
:param auth_token:
:return: integer|string
"""
try:
payload = jwt.decode(auth_token, app.config.get('SECRET_KEY'))
is_blacklisted_token = BlacklistToken.check_blacklist(auth_token)
if is_blacklisted_token:
return 'Token blacklisted. Please log in again.'
else:
return payload['sub']
except jwt.ExpiredSignatureError:
return 'Signature expired. Please log in again.'
except jwt.InvalidTokenError:
return 'Invalid token. Please log in again.'
class BlacklistToken(db.Model):
"""
Token Model for storing JWT tokens
"""
__tablename__ = 'blacklist_tokens'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
token = db.Column(db.String(500), unique=True, nullable=False)
blacklisted_on = db.Column(db.DateTime, nullable=False)
def __init__(self, token):
self.token = token
self.blacklisted_on = datetime.datetime.now()
def __repr__(self):
return '<id: token: {}'.format(self.token)
@staticmethod
def check_blacklist(auth_token):
# Check whether auth token has been blacklisted
res = BlacklistToken.query.filter_by(token=str(auth_token)).first()
if res:
return True
else:
return False
| [((216, 275), 'project.server.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)', 'autoincrement': '(True)'}), '(db.Integer, primary_key=True, autoincrement=True)\n', (225, 275), False, 'from project.server import app, db, bcrypt\n'), ((533, 570), 'project.server.db.Column', 'db.Column', (['db.Integer'], {'nullable': '(False)'}), '(db.Integer, nullable=False)\n', (542, 570), False, 'from project.server import app, db, bcrypt\n'), ((648, 686), 'project.server.db.Column', 'db.Column', (['db.DateTime'], {'nullable': '(False)'}), '(db.DateTime, nullable=False)\n', (657, 686), False, 'from project.server import app, db, bcrypt\n'), ((699, 751), 'project.server.db.Column', 'db.Column', (['db.Boolean'], {'nullable': '(False)', 'default': '(False)'}), '(db.Boolean, nullable=False, default=False)\n', (708, 751), False, 'from project.server import app, db, bcrypt\n'), ((2572, 2631), 'project.server.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)', 'autoincrement': '(True)'}), '(db.Integer, primary_key=True, autoincrement=True)\n', (2581, 2631), False, 'from project.server import app, db, bcrypt\n'), ((2720, 2758), 'project.server.db.Column', 'db.Column', (['db.DateTime'], {'nullable': '(False)'}), '(db.DateTime, nullable=False)\n', (2729, 2758), False, 'from project.server import app, db, bcrypt\n'), ((301, 315), 'project.server.db.String', 'db.String', (['(255)'], {}), '(255)\n', (310, 315), False, 'from project.server import app, db, bcrypt\n'), ((368, 382), 'project.server.db.String', 'db.String', (['(255)'], {}), '(255)\n', (377, 382), False, 'from project.server import app, db, bcrypt\n'), ((438, 452), 'project.server.db.String', 'db.String', (['(255)'], {}), '(255)\n', (447, 452), False, 'from project.server import app, db, bcrypt\n'), ((491, 505), 'project.server.db.String', 'db.String', (['(255)'], {}), '(255)\n', (500, 505), False, 'from project.server import app, db, bcrypt\n'), ((595, 610), 'project.server.db.Integer', 'db.Integer', (['(255)'], {}), '(255)\n', (605, 610), False, 'from project.server import app, db, bcrypt\n'), ((1137, 1160), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1158, 1160), False, 'import datetime\n'), ((2654, 2668), 'project.server.db.String', 'db.String', (['(500)'], {}), '(500)\n', (2663, 2668), False, 'from project.server import app, db, bcrypt\n'), ((2848, 2871), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2869, 2871), False, 'import datetime\n'), ((1463, 1489), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (1487, 1489), False, 'import datetime\n'), ((1608, 1636), 'project.server.app.config.get', 'app.config.get', (['"""SECRET_KEY"""'], {}), "('SECRET_KEY')\n", (1622, 1636), False, 'from project.server import app, db, bcrypt\n'), ((1970, 1998), 'project.server.app.config.get', 'app.config.get', (['"""SECRET_KEY"""'], {}), "('SECRET_KEY')\n", (1984, 1998), False, 'from project.server import app, db, bcrypt\n'), ((974, 1009), 'project.server.app.config.get', 'app.config.get', (['"""BCRYPT_LOG_ROUNDS"""'], {}), "('BCRYPT_LOG_ROUNDS')\n", (988, 1009), False, 'from project.server import app, db, bcrypt\n'), ((1372, 1398), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (1396, 1398), False, 'import datetime\n'), ((1401, 1438), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(0)', 'seconds': '(5)'}), '(days=0, seconds=5)\n', (1419, 1438), False, 'import datetime\n')] |
ccppuu/certbot | letsencrypt/setup.py | 9fead41aaf93dde0d36d4aef6fded8dd306c1ddc | import codecs
import os
import sys
from setuptools import setup
from setuptools import find_packages
def read_file(filename, encoding='utf8'):
"""Read unicode from given file."""
with codecs.open(filename, encoding=encoding) as fd:
return fd.read()
here = os.path.abspath(os.path.dirname(__file__))
readme = read_file(os.path.join(here, 'README.rst'))
# This package is a simple shim around certbot
install_requires = ['certbot']
version = '0.7.0.dev0'
setup(
name='letsencrypt',
version=version,
description="ACME client",
long_description=readme,
url='https://github.com/letsencrypt/letsencrypt',
author="Certbot Project",
author_email='[email protected]',
license='Apache License 2.0',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Environment :: Console :: Curses',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
'Topic :: System :: Installation/Setup',
'Topic :: System :: Networking',
'Topic :: System :: Systems Administration',
'Topic :: Utilities',
],
packages=find_packages(),
include_package_data=True,
install_requires=install_requires,
entry_points={
'console_scripts': [
'letsencrypt = certbot.main:main',
],
},
)
| [((293, 318), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (308, 318), False, 'import os\n'), ((339, 371), 'os.path.join', 'os.path.join', (['here', '"""README.rst"""'], {}), "(here, 'README.rst')\n", (351, 371), False, 'import os\n'), ((195, 235), 'codecs.open', 'codecs.open', (['filename'], {'encoding': 'encoding'}), '(filename, encoding=encoding)\n', (206, 235), False, 'import codecs\n'), ((1509, 1524), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (1522, 1524), False, 'from setuptools import find_packages\n')] |
dekhrekh/elastalert | elastalert/alerts.py | 0c1ce30302c575bd0be404582cd452f38c01c774 | # -*- coding: utf-8 -*-
import copy
import datetime
import json
import logging
import subprocess
import sys
import warnings
from email.mime.text import MIMEText
from email.utils import formatdate
from smtplib import SMTP
from smtplib import SMTP_SSL
from smtplib import SMTPAuthenticationError
from smtplib import SMTPException
from socket import error
import boto3
import requests
import stomp
from exotel import Exotel
from jira.client import JIRA
from jira.exceptions import JIRAError
from requests.exceptions import RequestException
from staticconf.loader import yaml_loader
from texttable import Texttable
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client as TwilioClient
from util import EAException
from util import elastalert_logger
from util import lookup_es_key
from util import pretty_ts
from util import ts_now
from util import ts_to_dt
class DateTimeEncoder(json.JSONEncoder):
def default(self, obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
else:
return json.JSONEncoder.default(self, obj)
class BasicMatchString(object):
""" Creates a string containing fields in match for the given rule. """
def __init__(self, rule, match):
self.rule = rule
self.match = match
def _ensure_new_line(self):
while self.text[-2:] != '\n\n':
self.text += '\n'
def _add_custom_alert_text(self):
missing = '<MISSING VALUE>'
alert_text = unicode(self.rule.get('alert_text', ''))
if 'alert_text_args' in self.rule:
alert_text_args = self.rule.get('alert_text_args')
alert_text_values = [lookup_es_key(self.match, arg) for arg in alert_text_args]
# Support referencing other top-level rule properties
# This technically may not work if there is a top-level rule property with the same name
# as an es result key, since it would have been matched in the lookup_es_key call above
for i in xrange(len(alert_text_values)):
if alert_text_values[i] is None:
alert_value = self.rule.get(alert_text_args[i])
if alert_value:
alert_text_values[i] = alert_value
alert_text_values = [missing if val is None else val for val in alert_text_values]
alert_text = alert_text.format(*alert_text_values)
elif 'alert_text_kw' in self.rule:
kw = {}
for name, kw_name in self.rule.get('alert_text_kw').items():
val = lookup_es_key(self.match, name)
# Support referencing other top-level rule properties
# This technically may not work if there is a top-level rule property with the same name
# as an es result key, since it would have been matched in the lookup_es_key call above
if val is None:
val = self.rule.get(name)
kw[kw_name] = missing if val is None else val
alert_text = alert_text.format(**kw)
self.text += alert_text
def _add_rule_text(self):
self.text += self.rule['type'].get_match_str(self.match)
def _add_top_counts(self):
for key, counts in self.match.items():
if key.startswith('top_events_'):
self.text += '%s:\n' % (key[11:])
top_events = counts.items()
if not top_events:
self.text += 'No events found.\n'
else:
top_events.sort(key=lambda x: x[1], reverse=True)
for term, count in top_events:
self.text += '%s: %s\n' % (term, count)
self.text += '\n'
def _add_match_items(self):
match_items = self.match.items()
match_items.sort(key=lambda x: x[0])
for key, value in match_items:
if key.startswith('top_events_'):
continue
value_str = unicode(value)
value_str.replace('\\n', '\n')
if type(value) in [list, dict]:
try:
value_str = self._pretty_print_as_json(value)
except TypeError:
# Non serializable object, fallback to str
pass
self.text += '%s: %s\n' % (key, value_str)
def _pretty_print_as_json(self, blob):
try:
return json.dumps(blob, cls=DateTimeEncoder, sort_keys=True, indent=4, ensure_ascii=False)
except UnicodeDecodeError:
# This blob contains non-unicode, so lets pretend it's Latin-1 to show something
return json.dumps(blob, cls=DateTimeEncoder, sort_keys=True, indent=4, encoding='Latin-1', ensure_ascii=False)
def __str__(self):
self.text = ''
if 'alert_text' not in self.rule:
self.text += self.rule['name'] + '\n\n'
self._add_custom_alert_text()
self._ensure_new_line()
if self.rule.get('alert_text_type') != 'alert_text_only':
self._add_rule_text()
self._ensure_new_line()
if self.rule.get('top_count_keys'):
self._add_top_counts()
if self.rule.get('alert_text_type') != 'exclude_fields':
self._add_match_items()
return self.text
class JiraFormattedMatchString(BasicMatchString):
def _add_match_items(self):
match_items = dict([(x, y) for x, y in self.match.items() if not x.startswith('top_events_')])
json_blob = self._pretty_print_as_json(match_items)
preformatted_text = u'{{code:json}}{0}{{code}}'.format(json_blob)
self.text += preformatted_text
class Alerter(object):
""" Base class for types of alerts.
:param rule: The rule configuration.
"""
required_options = frozenset([])
def __init__(self, rule):
self.rule = rule
# pipeline object is created by ElastAlerter.send_alert()
# and attached to each alerters used by a rule before calling alert()
self.pipeline = None
self.resolve_rule_references(self.rule)
def resolve_rule_references(self, root):
# Support referencing other top-level rule properties to avoid redundant copy/paste
if type(root) == list:
# Make a copy since we may be modifying the contents of the structure we're walking
for i, item in enumerate(copy.copy(root)):
if type(item) == dict or type(item) == list:
self.resolve_rule_references(root[i])
else:
root[i] = self.resolve_rule_reference(item)
elif type(root) == dict:
# Make a copy since we may be modifying the contents of the structure we're walking
for key, value in root.copy().iteritems():
if type(value) == dict or type(value) == list:
self.resolve_rule_references(root[key])
else:
root[key] = self.resolve_rule_reference(value)
def resolve_rule_reference(self, value):
strValue = unicode(value)
if strValue.startswith('$') and strValue.endswith('$') and strValue[1:-1] in self.rule:
if type(value) == int:
return int(self.rule[strValue[1:-1]])
else:
return self.rule[strValue[1:-1]]
else:
return value
def alert(self, match):
""" Send an alert. Match is a dictionary of information about the alert.
:param match: A dictionary of relevant information to the alert.
"""
raise NotImplementedError()
def get_info(self):
""" Returns a dictionary of data related to this alert. At minimum, this should contain
a field type corresponding to the type of Alerter. """
return {'type': 'Unknown'}
def create_title(self, matches):
""" Creates custom alert title to be used, e.g. as an e-mail subject or JIRA issue summary.
:param matches: A list of dictionaries of relevant information to the alert.
"""
if 'alert_subject' in self.rule:
return self.create_custom_title(matches)
return self.create_default_title(matches)
def create_custom_title(self, matches):
alert_subject = unicode(self.rule['alert_subject'])
if 'alert_subject_args' in self.rule:
alert_subject_args = self.rule['alert_subject_args']
alert_subject_values = [lookup_es_key(matches[0], arg) for arg in alert_subject_args]
# Support referencing other top-level rule properties
# This technically may not work if there is a top-level rule property with the same name
# as an es result key, since it would have been matched in the lookup_es_key call above
for i in xrange(len(alert_subject_values)):
if alert_subject_values[i] is None:
alert_value = self.rule.get(alert_subject_args[i])
if alert_value:
alert_subject_values[i] = alert_value
alert_subject_values = ['<MISSING VALUE>' if val is None else val for val in alert_subject_values]
return alert_subject.format(*alert_subject_values)
return alert_subject
def create_alert_body(self, matches):
body = self.get_aggregation_summary_text(matches)
for match in matches:
body += unicode(BasicMatchString(self.rule, match))
# Separate text of aggregated alerts with dashes
if len(matches) > 1:
body += '\n----------------------------------------\n'
return body
def get_aggregation_summary_text(self, matches):
text = ''
if 'aggregation' in self.rule and 'summary_table_fields' in self.rule:
summary_table_fields = self.rule['summary_table_fields']
if not isinstance(summary_table_fields, list):
summary_table_fields = [summary_table_fields]
# Include a count aggregation so that we can see at a glance how many of each aggregation_key were encountered
summary_table_fields_with_count = summary_table_fields + ['count']
text += "Aggregation resulted in the following data for summary_table_fields ==> {0}:\n\n".format(
summary_table_fields_with_count
)
text_table = Texttable()
text_table.header(summary_table_fields_with_count)
match_aggregation = {}
# Maintain an aggregate count for each unique key encountered in the aggregation period
for match in matches:
key_tuple = tuple([unicode(lookup_es_key(match, key)) for key in summary_table_fields])
if key_tuple not in match_aggregation:
match_aggregation[key_tuple] = 1
else:
match_aggregation[key_tuple] = match_aggregation[key_tuple] + 1
for keys, count in match_aggregation.iteritems():
text_table.add_row([key for key in keys] + [count])
text += text_table.draw() + '\n\n'
return unicode(text)
def create_default_title(self, matches):
return self.rule['name']
def get_account(self, account_file):
""" Gets the username and password from an account file.
:param account_file: Name of the file which contains user and password information.
"""
account_conf = yaml_loader(account_file)
if 'user' not in account_conf or 'password' not in account_conf:
raise EAException('Account file must have user and password fields')
self.user = account_conf['user']
self.password = account_conf['password']
class StompAlerter(Alerter):
""" The stomp alerter publishes alerts via stomp to a broker. """
required_options = frozenset(['stomp_hostname', 'stomp_hostport', 'stomp_login', 'stomp_password'])
def alert(self, matches):
alerts = []
qk = self.rule.get('query_key', None)
fullmessage = {}
for match in matches:
if qk in match:
elastalert_logger.info(
'Alert for %s, %s at %s:' % (self.rule['name'], match[qk], lookup_es_key(match, self.rule['timestamp_field'])))
alerts.append(
'1)Alert for %s, %s at %s:' % (self.rule['name'], match[qk], lookup_es_key(match, self.rule['timestamp_field']))
)
fullmessage['match'] = match[qk]
else:
elastalert_logger.info('Alert for %s at %s:' % (self.rule['name'], lookup_es_key(match, self.rule['timestamp_field'])))
alerts.append(
'2)Alert for %s at %s:' % (self.rule['name'], lookup_es_key(match, self.rule['timestamp_field']))
)
fullmessage['match'] = lookup_es_key(match, self.rule['timestamp_field'])
elastalert_logger.info(unicode(BasicMatchString(self.rule, match)))
fullmessage['alerts'] = alerts
fullmessage['rule'] = self.rule['name']
fullmessage['matching'] = unicode(BasicMatchString(self.rule, match))
fullmessage['alertDate'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
fullmessage['body'] = self.create_alert_body(matches)
self.stomp_hostname = self.rule.get('stomp_hostname', 'localhost')
self.stomp_hostport = self.rule.get('stomp_hostport', '61613')
self.stomp_login = self.rule.get('stomp_login', 'admin')
self.stomp_password = self.rule.get('stomp_password', 'admin')
self.stomp_destination = self.rule.get('stomp_destination', '/queue/ALERT')
conn = stomp.Connection([(self.stomp_hostname, self.stomp_hostport)])
conn.start()
conn.connect(self.stomp_login, self.stomp_password)
conn.send(self.stomp_destination, json.dumps(fullmessage))
conn.disconnect()
def get_info(self):
return {'type': 'stomp'}
class DebugAlerter(Alerter):
""" The debug alerter uses a Python logger (by default, alerting to terminal). """
def alert(self, matches):
qk = self.rule.get('query_key', None)
for match in matches:
if qk in match:
elastalert_logger.info(
'Alert for %s, %s at %s:' % (self.rule['name'], match[qk], lookup_es_key(match, self.rule['timestamp_field'])))
else:
elastalert_logger.info('Alert for %s at %s:' % (self.rule['name'], lookup_es_key(match, self.rule['timestamp_field'])))
elastalert_logger.info(unicode(BasicMatchString(self.rule, match)))
def get_info(self):
return {'type': 'debug'}
class EmailAlerter(Alerter):
""" Sends an email alert """
required_options = frozenset(['email'])
def __init__(self, *args):
super(EmailAlerter, self).__init__(*args)
self.smtp_host = self.rule.get('smtp_host', 'localhost')
self.smtp_ssl = self.rule.get('smtp_ssl', False)
self.from_addr = self.rule.get('from_addr', 'ElastAlert')
self.smtp_port = self.rule.get('smtp_port')
if self.rule.get('smtp_auth_file'):
self.get_account(self.rule['smtp_auth_file'])
self.smtp_key_file = self.rule.get('smtp_key_file')
self.smtp_cert_file = self.rule.get('smtp_cert_file')
# Convert email to a list if it isn't already
if isinstance(self.rule['email'], basestring):
self.rule['email'] = [self.rule['email']]
# If there is a cc then also convert it a list if it isn't
cc = self.rule.get('cc')
if cc and isinstance(cc, basestring):
self.rule['cc'] = [self.rule['cc']]
# If there is a bcc then also convert it to a list if it isn't
bcc = self.rule.get('bcc')
if bcc and isinstance(bcc, basestring):
self.rule['bcc'] = [self.rule['bcc']]
add_suffix = self.rule.get('email_add_domain')
if add_suffix and not add_suffix.startswith('@'):
self.rule['email_add_domain'] = '@' + add_suffix
def alert(self, matches):
body = self.create_alert_body(matches)
# Add JIRA ticket if it exists
if self.pipeline is not None and 'jira_ticket' in self.pipeline:
url = '%s/browse/%s' % (self.pipeline['jira_server'], self.pipeline['jira_ticket'])
body += '\nJIRA ticket: %s' % (url)
to_addr = self.rule['email']
if 'email_from_field' in self.rule:
recipient = lookup_es_key(matches[0], self.rule['email_from_field'])
if isinstance(recipient, basestring):
if '@' in recipient:
to_addr = [recipient]
elif 'email_add_domain' in self.rule:
to_addr = [recipient + self.rule['email_add_domain']]
elif isinstance(recipient, list):
to_addr = recipient
if 'email_add_domain' in self.rule:
to_addr = [name + self.rule['email_add_domain'] for name in to_addr]
email_msg = MIMEText(body.encode('UTF-8'), _charset='UTF-8')
email_msg['Subject'] = self.create_title(matches)
email_msg['To'] = ', '.join(to_addr)
email_msg['From'] = self.from_addr
email_msg['Reply-To'] = self.rule.get('email_reply_to', email_msg['To'])
email_msg['Date'] = formatdate()
if self.rule.get('cc'):
email_msg['CC'] = ','.join(self.rule['cc'])
to_addr = to_addr + self.rule['cc']
if self.rule.get('bcc'):
to_addr = to_addr + self.rule['bcc']
try:
if self.smtp_ssl:
if self.smtp_port:
self.smtp = SMTP_SSL(self.smtp_host, self.smtp_port, keyfile=self.smtp_key_file, certfile=self.smtp_cert_file)
else:
self.smtp = SMTP_SSL(self.smtp_host, keyfile=self.smtp_key_file, certfile=self.smtp_cert_file)
else:
if self.smtp_port:
self.smtp = SMTP(self.smtp_host, self.smtp_port)
else:
self.smtp = SMTP(self.smtp_host)
self.smtp.ehlo()
if self.smtp.has_extn('STARTTLS'):
self.smtp.starttls(keyfile=self.smtp_key_file, certfile=self.smtp_cert_file)
if 'smtp_auth_file' in self.rule:
self.smtp.login(self.user, self.password)
except (SMTPException, error) as e:
raise EAException("Error connecting to SMTP host: %s" % (e))
except SMTPAuthenticationError as e:
raise EAException("SMTP username/password rejected: %s" % (e))
self.smtp.sendmail(self.from_addr, to_addr, email_msg.as_string())
self.smtp.close()
elastalert_logger.info("Sent email to %s" % (to_addr))
def create_default_title(self, matches):
subject = 'ElastAlert: %s' % (self.rule['name'])
# If the rule has a query_key, add that value plus timestamp to subject
if 'query_key' in self.rule:
qk = matches[0].get(self.rule['query_key'])
if qk:
subject += ' - %s' % (qk)
return subject
def get_info(self):
return {'type': 'email',
'recipients': self.rule['email']}
class JiraAlerter(Alerter):
""" Creates a Jira ticket for each alert """
required_options = frozenset(['jira_server', 'jira_account_file', 'jira_project', 'jira_issuetype'])
# Maintain a static set of built-in fields that we explicitly know how to set
# For anything else, we will do best-effort and try to set a string value
known_field_list = [
'jira_account_file',
'jira_assignee',
'jira_bump_after_inactivity',
'jira_bump_in_statuses',
'jira_bump_not_in_statuses',
'jira_bump_tickets',
'jira_component',
'jira_components',
'jira_description',
'jira_ignore_in_title',
'jira_issuetype',
'jira_label',
'jira_labels',
'jira_max_age',
'jira_priority',
'jira_project',
'jira_server',
'jira_watchers',
]
# Some built-in jira types that can be used as custom fields require special handling
# Here is a sample of one of them:
# {"id":"customfield_12807","name":"My Custom Field","custom":true,"orderable":true,"navigable":true,"searchable":true,
# "clauseNames":["cf[12807]","My Custom Field"],"schema":{"type":"array","items":"string",
# "custom":"com.atlassian.jira.plugin.system.customfieldtypes:multiselect","customId":12807}}
# There are likely others that will need to be updated on a case-by-case basis
custom_string_types_with_special_handling = [
'com.atlassian.jira.plugin.system.customfieldtypes:multicheckboxes',
'com.atlassian.jira.plugin.system.customfieldtypes:multiselect',
'com.atlassian.jira.plugin.system.customfieldtypes:radiobuttons',
]
def __init__(self, rule):
super(JiraAlerter, self).__init__(rule)
self.server = self.rule['jira_server']
self.get_account(self.rule['jira_account_file'])
self.project = self.rule['jira_project']
self.issue_type = self.rule['jira_issuetype']
# We used to support only a single component. This allows us to maintain backwards compatibility
# while also giving the user-facing API a more representative name
self.components = self.rule.get('jira_components', self.rule.get('jira_component'))
# We used to support only a single label. This allows us to maintain backwards compatibility
# while also giving the user-facing API a more representative name
self.labels = self.rule.get('jira_labels', self.rule.get('jira_label'))
self.description = self.rule.get('jira_description', '')
self.assignee = self.rule.get('jira_assignee')
self.max_age = self.rule.get('jira_max_age', 30)
self.priority = self.rule.get('jira_priority')
self.bump_tickets = self.rule.get('jira_bump_tickets', False)
self.bump_not_in_statuses = self.rule.get('jira_bump_not_in_statuses')
self.bump_in_statuses = self.rule.get('jira_bump_in_statuses')
self.bump_after_inactivity = self.rule.get('jira_bump_after_inactivity', self.max_age)
self.watchers = self.rule.get('jira_watchers')
if self.bump_in_statuses and self.bump_not_in_statuses:
msg = 'Both jira_bump_in_statuses (%s) and jira_bump_not_in_statuses (%s) are set.' % \
(','.join(self.bump_in_statuses), ','.join(self.bump_not_in_statuses))
intersection = list(set(self.bump_in_statuses) & set(self.bump_in_statuses))
if intersection:
msg = '%s Both have common statuses of (%s). As such, no tickets will ever be found.' % (
msg, ','.join(intersection))
msg += ' This should be simplified to use only one or the other.'
logging.warning(msg)
self.jira_args = {'project': {'key': self.project},
'issuetype': {'name': self.issue_type}}
if self.components:
# Support single component or list
if type(self.components) != list:
self.jira_args['components'] = [{'name': self.components}]
else:
self.jira_args['components'] = [{'name': component} for component in self.components]
if self.labels:
# Support single label or list
if type(self.labels) != list:
self.labels = [self.labels]
self.jira_args['labels'] = self.labels
if self.watchers:
# Support single watcher or list
if type(self.watchers) != list:
self.watchers = [self.watchers]
if self.assignee:
self.jira_args['assignee'] = {'name': self.assignee}
try:
self.client = JIRA(self.server, basic_auth=(self.user, self.password))
self.get_priorities()
self.get_arbitrary_fields()
except JIRAError as e:
# JIRAError may contain HTML, pass along only first 1024 chars
raise EAException("Error connecting to JIRA: %s" % (str(e)[:1024]))
try:
if self.priority is not None:
self.jira_args['priority'] = {'id': self.priority_ids[self.priority]}
except KeyError:
logging.error("Priority %s not found. Valid priorities are %s" % (self.priority, self.priority_ids.keys()))
def get_arbitrary_fields(self):
# This API returns metadata about all the fields defined on the jira server (built-ins and custom ones)
fields = self.client.fields()
for jira_field, value in self.rule.iteritems():
# If we find a field that is not covered by the set that we are aware of, it means it is either:
# 1. A built-in supported field in JIRA that we don't have on our radar
# 2. A custom field that a JIRA admin has configured
if jira_field.startswith('jira_') and jira_field not in self.known_field_list:
# Remove the jira_ part. Convert underscores to spaces
normalized_jira_field = jira_field[5:].replace('_', ' ').lower()
# All jira fields should be found in the 'id' or the 'name' field. Therefore, try both just in case
for identifier in ['name', 'id']:
field = next((f for f in fields if normalized_jira_field == f[identifier].replace('_', ' ').lower()), None)
if field:
break
if not field:
# Log a warning to ElastAlert saying that we couldn't find that type?
# OR raise and fail to load the alert entirely? Probably the latter...
raise Exception("Could not find a definition for the jira field '{0}'".format(normalized_jira_field))
arg_name = field['id']
# Check the schema information to decide how to set the value correctly
# If the schema information is not available, raise an exception since we don't know how to set it
# Note this is only the case for two built-in types, id: issuekey and id: thumbnail
if not ('schema' in field or 'type' in field['schema']):
raise Exception("Could not determine schema information for the jira field '{0}'".format(normalized_jira_field))
arg_type = field['schema']['type']
# Handle arrays of simple types like strings or numbers
if arg_type == 'array':
# As a convenience, support the scenario wherein the user only provides
# a single value for a multi-value field e.g. jira_labels: Only_One_Label
if type(value) != list:
value = [value]
array_items = field['schema']['items']
# Simple string types
if array_items in ['string', 'date', 'datetime']:
# Special case for multi-select custom types (the JIRA metadata says that these are strings, but
# in reality, they are required to be provided as an object.
if 'custom' in field['schema'] and field['schema']['custom'] in self.custom_string_types_with_special_handling:
self.jira_args[arg_name] = [{'value': v} for v in value]
else:
self.jira_args[arg_name] = value
elif array_items == 'number':
self.jira_args[arg_name] = [int(v) for v in value]
# Also attempt to handle arrays of complex types that have to be passed as objects with an identifier 'key'
elif array_items == 'option':
self.jira_args[arg_name] = [{'value': v} for v in value]
else:
# Try setting it as an object, using 'name' as the key
# This may not work, as the key might actually be 'key', 'id', 'value', or something else
# If it works, great! If not, it will manifest itself as an API error that will bubble up
self.jira_args[arg_name] = [{'name': v} for v in value]
# Handle non-array types
else:
# Simple string types
if arg_type in ['string', 'date', 'datetime']:
# Special case for custom types (the JIRA metadata says that these are strings, but
# in reality, they are required to be provided as an object.
if 'custom' in field['schema'] and field['schema']['custom'] in self.custom_string_types_with_special_handling:
self.jira_args[arg_name] = {'value': value}
else:
self.jira_args[arg_name] = value
# Number type
elif arg_type == 'number':
self.jira_args[arg_name] = int(value)
elif arg_type == 'option':
self.jira_args[arg_name] = {'value': value}
# Complex type
else:
self.jira_args[arg_name] = {'name': value}
def get_priorities(self):
""" Creates a mapping of priority index to id. """
priorities = self.client.priorities()
self.priority_ids = {}
for x in range(len(priorities)):
self.priority_ids[x] = priorities[x].id
def set_assignee(self, assignee):
self.assignee = assignee
if assignee:
self.jira_args['assignee'] = {'name': assignee}
elif 'assignee' in self.jira_args:
self.jira_args.pop('assignee')
def find_existing_ticket(self, matches):
# Default title, get stripped search version
if 'alert_subject' not in self.rule:
title = self.create_default_title(matches, True)
else:
title = self.create_title(matches)
if 'jira_ignore_in_title' in self.rule:
title = title.replace(matches[0].get(self.rule['jira_ignore_in_title'], ''), '')
# This is necessary for search to work. Other special characters and dashes
# directly adjacent to words appear to be ok
title = title.replace(' - ', ' ')
title = title.replace('\\', '\\\\')
date = (datetime.datetime.now() - datetime.timedelta(days=self.max_age)).strftime('%Y-%m-%d')
jql = 'project=%s AND summary~"%s" and created >= "%s"' % (self.project, title, date)
if self.bump_in_statuses:
jql = '%s and status in (%s)' % (jql, ','.join(self.bump_in_statuses))
if self.bump_not_in_statuses:
jql = '%s and status not in (%s)' % (jql, ','.join(self.bump_not_in_statuses))
try:
issues = self.client.search_issues(jql)
except JIRAError as e:
logging.exception("Error while searching for JIRA ticket using jql '%s': %s" % (jql, e))
return None
if len(issues):
return issues[0]
def comment_on_ticket(self, ticket, match):
text = unicode(JiraFormattedMatchString(self.rule, match))
timestamp = pretty_ts(lookup_es_key(match, self.rule['timestamp_field']))
comment = "This alert was triggered again at %s\n%s" % (timestamp, text)
self.client.add_comment(ticket, comment)
def alert(self, matches):
title = self.create_title(matches)
if self.bump_tickets:
ticket = self.find_existing_ticket(matches)
if ticket:
inactivity_datetime = ts_now() - datetime.timedelta(days=self.bump_after_inactivity)
if ts_to_dt(ticket.fields.updated) >= inactivity_datetime:
if self.pipeline is not None:
self.pipeline['jira_ticket'] = None
self.pipeline['jira_server'] = self.server
return None
elastalert_logger.info('Commenting on existing ticket %s' % (ticket.key))
for match in matches:
try:
self.comment_on_ticket(ticket, match)
except JIRAError as e:
logging.exception("Error while commenting on ticket %s: %s" % (ticket, e))
if self.pipeline is not None:
self.pipeline['jira_ticket'] = ticket
self.pipeline['jira_server'] = self.server
return None
self.jira_args['summary'] = title
self.jira_args['description'] = self.create_alert_body(matches)
try:
self.issue = self.client.create_issue(**self.jira_args)
# You can not add watchers on initial creation. Only as a follow-up action
if self.watchers:
for watcher in self.watchers:
try:
self.client.add_watcher(self.issue.key, watcher)
except Exception as ex:
# Re-raise the exception, preserve the stack-trace, and give some
# context as to which watcher failed to be added
raise Exception(
"Exception encountered when trying to add '{0}' as a watcher. Does the user exist?\n{1}" .format(
watcher,
ex
)), None, sys.exc_info()[2]
except JIRAError as e:
raise EAException("Error creating JIRA ticket using jira_args (%s): %s" % (self.jira_args, e))
elastalert_logger.info("Opened Jira ticket: %s" % (self.issue))
if self.pipeline is not None:
self.pipeline['jira_ticket'] = self.issue
self.pipeline['jira_server'] = self.server
def create_alert_body(self, matches):
body = self.description + '\n'
body += self.get_aggregation_summary_text(matches)
for match in matches:
body += unicode(JiraFormattedMatchString(self.rule, match))
if len(matches) > 1:
body += '\n----------------------------------------\n'
return body
def get_aggregation_summary_text(self, matches):
text = super(JiraAlerter, self).get_aggregation_summary_text(matches)
if text:
text = u'{{noformat}}{0}{{noformat}}'.format(text)
return text
def create_default_title(self, matches, for_search=False):
# If there is a query_key, use that in the title
if 'query_key' in self.rule and lookup_es_key(matches[0], self.rule['query_key']):
title = 'ElastAlert: %s matched %s' % (lookup_es_key(matches[0], self.rule['query_key']), self.rule['name'])
else:
title = 'ElastAlert: %s' % (self.rule['name'])
if for_search:
return title
title += ' - %s' % (pretty_ts(matches[0][self.rule['timestamp_field']], self.rule.get('use_local_time')))
# Add count for spikes
count = matches[0].get('spike_count')
if count:
title += ' - %s+ events' % (count)
return title
def get_info(self):
return {'type': 'jira'}
class CommandAlerter(Alerter):
required_options = set(['command'])
def __init__(self, *args):
super(CommandAlerter, self).__init__(*args)
self.last_command = []
self.shell = False
if isinstance(self.rule['command'], basestring):
self.shell = True
if '%' in self.rule['command']:
logging.warning('Warning! You could be vulnerable to shell injection!')
self.rule['command'] = [self.rule['command']]
self.new_style_string_format = False
if 'new_style_string_format' in self.rule and self.rule['new_style_string_format']:
self.new_style_string_format = True
def alert(self, matches):
# Format the command and arguments
try:
if self.new_style_string_format:
command = [command_arg.format(match=matches[0]) for command_arg in self.rule['command']]
else:
command = [command_arg % matches[0] for command_arg in self.rule['command']]
self.last_command = command
except KeyError as e:
raise EAException("Error formatting command: %s" % (e))
# Run command and pipe data
try:
subp = subprocess.Popen(command, stdin=subprocess.PIPE, shell=self.shell)
if self.rule.get('pipe_match_json'):
match_json = json.dumps(matches, cls=DateTimeEncoder) + '\n'
stdout, stderr = subp.communicate(input=match_json)
if self.rule.get("fail_on_non_zero_exit", False) and subp.wait():
raise EAException("Non-zero exit code while running command %s" % (' '.join(command)))
except OSError as e:
raise EAException("Error while running command %s: %s" % (' '.join(command), e))
def get_info(self):
return {'type': 'command',
'command': ' '.join(self.last_command)}
class SnsAlerter(Alerter):
""" Send alert using AWS SNS service """
required_options = frozenset(['sns_topic_arn'])
def __init__(self, *args):
super(SnsAlerter, self).__init__(*args)
self.sns_topic_arn = self.rule.get('sns_topic_arn', '')
self.aws_access_key_id = self.rule.get('aws_access_key_id')
self.aws_secret_access_key = self.rule.get('aws_secret_access_key')
self.aws_region = self.rule.get('aws_region', 'us-east-1')
self.profile = self.rule.get('boto_profile', None) # Deprecated
self.profile = self.rule.get('aws_profile', None)
def create_default_title(self, matches):
subject = 'ElastAlert: %s' % (self.rule['name'])
return subject
def alert(self, matches):
body = self.create_alert_body(matches)
session = boto3.Session(
aws_access_key_id=self.aws_access_key_id,
aws_secret_access_key=self.aws_secret_access_key,
region_name=self.aws_region,
profile_name=self.profile
)
sns_client = session.client('sns')
sns_client.publish(
TopicArn=self.sns_topic_arn,
Message=body,
Subject=self.create_title(matches)
)
elastalert_logger.info("Sent sns notification to %s" % (self.sns_topic_arn))
class HipChatAlerter(Alerter):
""" Creates a HipChat room notification for each alert """
required_options = frozenset(['hipchat_auth_token', 'hipchat_room_id'])
def __init__(self, rule):
super(HipChatAlerter, self).__init__(rule)
self.hipchat_msg_color = self.rule.get('hipchat_msg_color', 'red')
self.hipchat_message_format = self.rule.get('hipchat_message_format', 'html')
self.hipchat_auth_token = self.rule['hipchat_auth_token']
self.hipchat_room_id = self.rule['hipchat_room_id']
self.hipchat_domain = self.rule.get('hipchat_domain', 'api.hipchat.com')
self.hipchat_ignore_ssl_errors = self.rule.get('hipchat_ignore_ssl_errors', False)
self.hipchat_notify = self.rule.get('hipchat_notify', True)
self.hipchat_from = self.rule.get('hipchat_from', '')
self.url = 'https://%s/v2/room/%s/notification?auth_token=%s' % (
self.hipchat_domain, self.hipchat_room_id, self.hipchat_auth_token)
self.hipchat_proxy = self.rule.get('hipchat_proxy', None)
def alert(self, matches):
body = self.create_alert_body(matches)
# HipChat sends 400 bad request on messages longer than 10000 characters
if (len(body) > 9999):
body = body[:9980] + '..(truncated)'
# Use appropriate line ending for text/html
if self.hipchat_message_format == 'html':
body = body.replace('\n', '<br />')
# Post to HipChat
headers = {'content-type': 'application/json'}
# set https proxy, if it was provided
proxies = {'https': self.hipchat_proxy} if self.hipchat_proxy else None
payload = {
'color': self.hipchat_msg_color,
'message': body,
'message_format': self.hipchat_message_format,
'notify': self.hipchat_notify,
'from': self.hipchat_from
}
try:
if self.hipchat_ignore_ssl_errors:
requests.packages.urllib3.disable_warnings()
response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers,
verify=not self.hipchat_ignore_ssl_errors,
proxies=proxies)
warnings.resetwarnings()
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to HipChat: %s" % e)
elastalert_logger.info("Alert sent to HipChat room %s" % self.hipchat_room_id)
def get_info(self):
return {'type': 'hipchat',
'hipchat_room_id': self.hipchat_room_id}
class MsTeamsAlerter(Alerter):
""" Creates a Microsoft Teams Conversation Message for each alert """
required_options = frozenset(['ms_teams_webhook_url', 'ms_teams_alert_summary'])
def __init__(self, rule):
super(MsTeamsAlerter, self).__init__(rule)
self.ms_teams_webhook_url = self.rule['ms_teams_webhook_url']
if isinstance(self.ms_teams_webhook_url, basestring):
self.ms_teams_webhook_url = [self.ms_teams_webhook_url]
self.ms_teams_proxy = self.rule.get('ms_teams_proxy', None)
self.ms_teams_alert_summary = self.rule.get('ms_teams_alert_summary', 'ElastAlert Message')
self.ms_teams_alert_fixed_width = self.rule.get('ms_teams_alert_fixed_width', False)
self.ms_teams_theme_color = self.rule.get('ms_teams_theme_color', '')
def format_body(self, body):
body = body.encode('UTF-8')
if self.ms_teams_alert_fixed_width:
body = body.replace('`', "'")
body = "```{0}```".format('```\n\n```'.join(x for x in body.split('\n'))).replace('\n``````', '')
return body
def alert(self, matches):
body = self.create_alert_body(matches)
body = self.format_body(body)
# post to Teams
headers = {'content-type': 'application/json'}
# set https proxy, if it was provided
proxies = {'https': self.ms_teams_proxy} if self.ms_teams_proxy else None
payload = {
'@type': 'MessageCard',
'@context': 'http://schema.org/extensions',
'summary': self.ms_teams_alert_summary,
'title': self.create_title(matches),
'text': body
}
if self.ms_teams_theme_color != '':
payload['themeColor'] = self.ms_teams_theme_color
for url in self.ms_teams_webhook_url:
try:
response = requests.post(url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies)
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to ms teams: %s" % e)
elastalert_logger.info("Alert sent to MS Teams")
def get_info(self):
return {'type': 'ms_teams',
'ms_teams_webhook_url': self.ms_teams_webhook_url}
class SlackAlerter(Alerter):
""" Creates a Slack room message for each alert """
required_options = frozenset(['slack_webhook_url'])
def __init__(self, rule):
super(SlackAlerter, self).__init__(rule)
self.slack_webhook_url = self.rule['slack_webhook_url']
if isinstance(self.slack_webhook_url, basestring):
self.slack_webhook_url = [self.slack_webhook_url]
self.slack_proxy = self.rule.get('slack_proxy', None)
self.slack_username_override = self.rule.get('slack_username_override', 'elastalert')
self.slack_channel_override = self.rule.get('slack_channel_override', '')
self.slack_emoji_override = self.rule.get('slack_emoji_override', ':ghost:')
self.slack_icon_url_override = self.rule.get('slack_icon_url_override', '')
self.slack_msg_color = self.rule.get('slack_msg_color', 'danger')
self.slack_parse_override = self.rule.get('slack_parse_override', 'none')
self.slack_text_string = self.rule.get('slack_text_string', '')
def format_body(self, body):
# https://api.slack.com/docs/formatting
body = body.encode('UTF-8')
body = body.replace('&', '&')
body = body.replace('<', '<')
body = body.replace('>', '>')
return body
def alert(self, matches):
body = self.create_alert_body(matches)
body = self.format_body(body)
# post to slack
headers = {'content-type': 'application/json'}
# set https proxy, if it was provided
proxies = {'https': self.slack_proxy} if self.slack_proxy else None
payload = {
'username': self.slack_username_override,
'channel': self.slack_channel_override,
'parse': self.slack_parse_override,
'text': self.slack_text_string,
'attachments': [
{
'color': self.slack_msg_color,
'title': self.create_title(matches),
'text': body,
'mrkdwn_in': ['text', 'pretext'],
'fields': []
}
]
}
if self.slack_icon_url_override != '':
payload['icon_url'] = self.slack_icon_url_override
else:
payload['icon_emoji'] = self.slack_emoji_override
for url in self.slack_webhook_url:
try:
response = requests.post(url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies)
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to slack: %s" % e)
elastalert_logger.info("Alert sent to Slack")
def get_info(self):
return {'type': 'slack',
'slack_username_override': self.slack_username_override,
'slack_webhook_url': self.slack_webhook_url}
class PagerDutyAlerter(Alerter):
""" Create an incident on PagerDuty for each alert """
required_options = frozenset(['pagerduty_service_key', 'pagerduty_client_name'])
def __init__(self, rule):
super(PagerDutyAlerter, self).__init__(rule)
self.pagerduty_service_key = self.rule['pagerduty_service_key']
self.pagerduty_client_name = self.rule['pagerduty_client_name']
self.pagerduty_incident_key = self.rule.get('pagerduty_incident_key', '')
self.pagerduty_incident_key_args = self.rule.get('pagerduty_incident_key_args', None)
self.pagerduty_proxy = self.rule.get('pagerduty_proxy', None)
self.url = 'https://events.pagerduty.com/generic/2010-04-15/create_event.json'
def alert(self, matches):
body = self.create_alert_body(matches)
# post to pagerduty
headers = {'content-type': 'application/json'}
payload = {
'service_key': self.pagerduty_service_key,
'description': self.create_title(matches),
'event_type': 'trigger',
'incident_key': self.get_incident_key(matches),
'client': self.pagerduty_client_name,
'details': {
"information": body.encode('UTF-8'),
},
}
# set https proxy, if it was provided
proxies = {'https': self.pagerduty_proxy} if self.pagerduty_proxy else None
try:
response = requests.post(
self.url,
data=json.dumps(payload, cls=DateTimeEncoder, ensure_ascii=False),
headers=headers,
proxies=proxies
)
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to pagerduty: %s" % e)
elastalert_logger.info("Trigger sent to PagerDuty")
def get_incident_key(self, matches):
if self.pagerduty_incident_key_args:
incident_key_values = [lookup_es_key(matches[0], arg) for arg in self.pagerduty_incident_key_args]
# Populate values with rule level properties too
for i in range(len(incident_key_values)):
if incident_key_values[i] is None:
key_value = self.rule.get(self.pagerduty_incident_key_args[i])
if key_value:
incident_key_values[i] = key_value
incident_key_values = ['<MISSING VALUE>' if val is None else val for val in incident_key_values]
return self.pagerduty_incident_key.format(*incident_key_values)
else:
return self.pagerduty_incident_key
def get_info(self):
return {'type': 'pagerduty',
'pagerduty_client_name': self.pagerduty_client_name}
class ExotelAlerter(Alerter):
required_options = frozenset(['exotel_account_sid', 'exotel_auth_token', 'exotel_to_number', 'exotel_from_number'])
def __init__(self, rule):
super(ExotelAlerter, self).__init__(rule)
self.exotel_account_sid = self.rule['exotel_account_sid']
self.exotel_auth_token = self.rule['exotel_auth_token']
self.exotel_to_number = self.rule['exotel_to_number']
self.exotel_from_number = self.rule['exotel_from_number']
self.sms_body = self.rule.get('exotel_message_body', '')
def alert(self, matches):
client = Exotel(self.exotel_account_sid, self.exotel_auth_token)
try:
message_body = self.rule['name'] + self.sms_body
response = client.sms(self.rule['exotel_from_number'], self.rule['exotel_to_number'], message_body)
if response != 200:
raise EAException("Error posting to Exotel, response code is %s" % response)
except:
raise EAException("Error posting to Exotel"), None, sys.exc_info()[2]
elastalert_logger.info("Trigger sent to Exotel")
def get_info(self):
return {'type': 'exotel', 'exotel_account': self.exotel_account_sid}
class TwilioAlerter(Alerter):
required_options = frozenset(['twilio_account_sid', 'twilio_auth_token', 'twilio_to_number', 'twilio_from_number'])
def __init__(self, rule):
super(TwilioAlerter, self).__init__(rule)
self.twilio_account_sid = self.rule['twilio_account_sid']
self.twilio_auth_token = self.rule['twilio_auth_token']
self.twilio_to_number = self.rule['twilio_to_number']
self.twilio_from_number = self.rule['twilio_from_number']
def alert(self, matches):
client = TwilioClient(self.twilio_account_sid, self.twilio_auth_token)
try:
client.messages.create(body=self.rule['name'],
to=self.twilio_to_number,
from_=self.twilio_from_number)
except TwilioRestException as e:
raise EAException("Error posting to twilio: %s" % e)
elastalert_logger.info("Trigger sent to Twilio")
def get_info(self):
return {'type': 'twilio',
'twilio_client_name': self.twilio_from_number}
class VictorOpsAlerter(Alerter):
""" Creates a VictorOps Incident for each alert """
required_options = frozenset(['victorops_api_key', 'victorops_routing_key', 'victorops_message_type'])
def __init__(self, rule):
super(VictorOpsAlerter, self).__init__(rule)
self.victorops_api_key = self.rule['victorops_api_key']
self.victorops_routing_key = self.rule['victorops_routing_key']
self.victorops_message_type = self.rule['victorops_message_type']
self.victorops_entity_display_name = self.rule.get('victorops_entity_display_name', 'no entity display name')
self.url = 'https://alert.victorops.com/integrations/generic/20131114/alert/%s/%s' % (
self.victorops_api_key, self.victorops_routing_key)
self.victorops_proxy = self.rule.get('victorops_proxy', None)
def alert(self, matches):
body = self.create_alert_body(matches)
# post to victorops
headers = {'content-type': 'application/json'}
# set https proxy, if it was provided
proxies = {'https': self.victorops_proxy} if self.victorops_proxy else None
payload = {
"message_type": self.victorops_message_type,
"entity_display_name": self.victorops_entity_display_name,
"monitoring_tool": "ElastAlert",
"state_message": body
}
try:
response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies)
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to VictorOps: %s" % e)
elastalert_logger.info("Trigger sent to VictorOps")
def get_info(self):
return {'type': 'victorops',
'victorops_routing_key': self.victorops_routing_key}
class TelegramAlerter(Alerter):
""" Send a Telegram message via bot api for each alert """
required_options = frozenset(['telegram_bot_token', 'telegram_room_id'])
def __init__(self, rule):
super(TelegramAlerter, self).__init__(rule)
self.telegram_bot_token = self.rule['telegram_bot_token']
self.telegram_room_id = self.rule['telegram_room_id']
self.telegram_api_url = self.rule.get('telegram_api_url', 'api.telegram.org')
self.url = 'https://%s/bot%s/%s' % (self.telegram_api_url, self.telegram_bot_token, "sendMessage")
self.telegram_proxy = self.rule.get('telegram_proxy', None)
def alert(self, matches):
body = u'⚠ *%s* ⚠ ```\n' % (self.create_title(matches))
for match in matches:
body += unicode(BasicMatchString(self.rule, match))
# Separate text of aggregated alerts with dashes
if len(matches) > 1:
body += '\n----------------------------------------\n'
body += u' ```'
headers = {'content-type': 'application/json'}
# set https proxy, if it was provided
proxies = {'https': self.telegram_proxy} if self.telegram_proxy else None
payload = {
'chat_id': self.telegram_room_id,
'text': body,
'parse_mode': 'markdown',
'disable_web_page_preview': True
}
try:
response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies)
warnings.resetwarnings()
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to Telegram: %s" % e)
elastalert_logger.info(
"Alert sent to Telegram room %s" % self.telegram_room_id)
def get_info(self):
return {'type': 'telegram',
'telegram_room_id': self.telegram_room_id}
class GitterAlerter(Alerter):
""" Creates a Gitter activity message for each alert """
required_options = frozenset(['gitter_webhook_url'])
def __init__(self, rule):
super(GitterAlerter, self).__init__(rule)
self.gitter_webhook_url = self.rule['gitter_webhook_url']
self.gitter_proxy = self.rule.get('gitter_proxy', None)
self.gitter_msg_level = self.rule.get('gitter_msg_level', 'error')
def alert(self, matches):
body = self.create_alert_body(matches)
# post to Gitter
headers = {'content-type': 'application/json'}
# set https proxy, if it was provided
proxies = {'https': self.gitter_proxy} if self.gitter_proxy else None
payload = {
'message': body,
'level': self.gitter_msg_level
}
try:
response = requests.post(self.gitter_webhook_url, json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies)
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to Gitter: %s" % e)
elastalert_logger.info("Alert sent to Gitter")
def get_info(self):
return {'type': 'gitter',
'gitter_webhook_url': self.gitter_webhook_url}
class ServiceNowAlerter(Alerter):
""" Creates a ServiceNow alert """
required_options = set([
'username',
'password',
'servicenow_rest_url',
'short_description',
'comments',
'assignment_group',
'category',
'subcategory',
'cmdb_ci',
'caller_id'
])
def __init__(self, rule):
super(ServiceNowAlerter, self).__init__(rule)
self.servicenow_rest_url = self.rule['servicenow_rest_url']
self.servicenow_proxy = self.rule.get('servicenow_proxy', None)
def alert(self, matches):
for match in matches:
# Parse everything into description.
description = str(BasicMatchString(self.rule, match))
# Set proper headers
headers = {
"Content-Type": "application/json",
"Accept": "application/json;charset=utf-8"
}
proxies = {'https': self.servicenow_proxy} if self.servicenow_proxy else None
payload = {
"description": description,
"short_description": self.rule['short_description'],
"comments": self.rule['comments'],
"assignment_group": self.rule['assignment_group'],
"category": self.rule['category'],
"subcategory": self.rule['subcategory'],
"cmdb_ci": self.rule['cmdb_ci'],
"caller_id": self.rule["caller_id"]
}
try:
response = requests.post(
self.servicenow_rest_url,
auth=(self.rule['username'], self.rule['password']),
headers=headers,
data=json.dumps(payload, cls=DateTimeEncoder),
proxies=proxies
)
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to ServiceNow: %s" % e)
elastalert_logger.info("Alert sent to ServiceNow")
def get_info(self):
return {'type': 'ServiceNow',
'self.servicenow_rest_url': self.servicenow_rest_url}
class HTTPPostAlerter(Alerter):
""" Requested elasticsearch indices are sent by HTTP POST. Encoded with JSON. """
def __init__(self, rule):
super(HTTPPostAlerter, self).__init__(rule)
post_url = self.rule.get('http_post_url')
if isinstance(post_url, basestring):
post_url = [post_url]
self.post_url = post_url
self.post_proxy = self.rule.get('http_post_proxy')
self.post_payload = self.rule.get('http_post_payload', {})
self.post_static_payload = self.rule.get('http_post_static_payload', {})
self.post_all_values = self.rule.get('http_post_all_values', not self.post_payload)
def alert(self, matches):
""" Each match will trigger a POST to the specified endpoint(s). """
for match in matches:
payload = match if self.post_all_values else {}
payload.update(self.post_static_payload)
for post_key, es_key in self.post_payload.items():
payload[post_key] = lookup_es_key(match, es_key)
headers = {
"Content-Type": "application/json",
"Accept": "application/json;charset=utf-8"
}
proxies = {'https': self.post_proxy} if self.post_proxy else None
for url in self.post_url:
try:
response = requests.post(url, data=json.dumps(payload, cls=DateTimeEncoder),
headers=headers, proxies=proxies)
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting HTTP Post alert: %s" % e)
elastalert_logger.info("HTTP Post alert sent.")
def get_info(self):
return {'type': 'http_post',
'http_post_webhook_url': self.post_url}
| [] |
DDC-NDRS/fledge-iot_fledge | tests/unit/python/fledge/services/core/scheduler/test_scheduler.py | 27a5e66a55daaab1aca14ce6e66f9f1e6efaef51 | # -*- coding: utf-8 -*-
# FLEDGE_BEGIN
# See: http://fledge-iot.readthedocs.io/
# FLEDGE_END
import asyncio
import datetime
import uuid
import time
import json
from unittest.mock import MagicMock, call
import sys
import copy
import pytest
from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager
from fledge.services.core.scheduler.entities import *
from fledge.services.core.scheduler.exceptions import *
from fledge.common.storage_client.storage_client import StorageClientAsync
__author__ = "Amarendra K Sinha"
__copyright__ = "Copyright (c) 2017 OSIsoft, LLC"
__license__ = "Apache 2.0"
__version__ = "${VERSION}"
async def mock_task():
return ""
async def mock_process():
m = MagicMock()
m.pid = 9999
m.terminate = lambda: True
return m
@pytest.allure.feature("unit")
@pytest.allure.story("scheduler")
class TestScheduler:
async def scheduler_fixture(self, mocker):
# Changed in version 3.8: patch() now returns an AsyncMock if the target is an async function.
if sys.version_info.major == 3 and sys.version_info.minor >= 8:
_rv = await mock_process()
else:
_rv = asyncio.ensure_future(mock_process())
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
mocker.patch.object(scheduler, '_schedule_first_task')
mocker.patch.object(scheduler, '_ready', True)
mocker.patch.object(scheduler, '_paused', False)
mocker.patch.object(scheduler, '_process_scripts', return_value="North Readings to PI")
mocker.patch.object(scheduler, '_wait_for_task_completion', return_value=asyncio.ensure_future(mock_task()))
mocker.patch.object(scheduler, '_terminate_child_processes')
mocker.patch.object(asyncio, 'create_subprocess_exec', return_value=_rv)
await scheduler._get_schedules()
schedule = scheduler._ScheduleRow(
id=uuid.UUID("2b614d26-760f-11e7-b5a5-be2e44b06b34"),
process_name="North Readings to PI",
name="OMF to PI north",
type=Schedule.Type.INTERVAL,
repeat=datetime.timedelta(seconds=30),
repeat_seconds=30,
time=None,
day=None,
exclusive=True,
enabled=True)
log_exception = mocker.patch.object(scheduler._logger, "exception")
log_error = mocker.patch.object(scheduler._logger, "error")
log_debug = mocker.patch.object(scheduler._logger, "debug")
log_info = mocker.patch.object(scheduler._logger, "info")
return scheduler, schedule, log_info, log_exception, log_error, log_debug
@pytest.mark.asyncio
async def test__resume_check_schedules(self, mocker):
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
# WHEN
# Check IF part
mocker.patch.object(scheduler, '_scheduler_loop_sleep_task', asyncio.Task(asyncio.sleep(5)))
scheduler._resume_check_schedules()
# THEN
assert scheduler._check_processes_pending is False
# WHEN
# Check ELSE part
mocker.patch.object(scheduler, '_scheduler_loop_sleep_task', None)
scheduler._resume_check_schedules()
# THEN
assert scheduler._check_processes_pending is True
@pytest.mark.asyncio
async def test__wait_for_task_completion(self, mocker):
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
log_info = mocker.patch.object(scheduler._logger, "info")
mock_schedules = dict()
mock_schedule = scheduler._ScheduleRow(
id=uuid.UUID("2b614d26-760f-11e7-b5a5-be2e44b06b34"),
process_name="North Readings to PI",
name="OMF to PI north",
type=Schedule.Type.INTERVAL,
repeat=datetime.timedelta(seconds=30),
repeat_seconds=30,
time=None,
day=None,
exclusive=True,
enabled=True)
mock_schedules[mock_schedule.id] = mock_schedule
mock_task_process = scheduler._TaskProcess()
mock_task_processes = dict()
mock_task_process.process = await asyncio.create_subprocess_exec("sleep", ".1")
mock_task_process.schedule = mock_schedule
mock_task_id = uuid.uuid4()
mock_task_process.task_id = mock_task_id
mock_task_processes[mock_task_process.task_id] = mock_task_process
mock_schedule_executions = dict()
mock_schedule_execution = scheduler._ScheduleExecution()
mock_schedule_executions[mock_schedule.id] = mock_schedule_execution
mock_schedule_executions[mock_schedule.id].task_processes[mock_task_id] = mock_task_process
mocker.patch.object(scheduler, '_resume_check_schedules')
mocker.patch.object(scheduler, '_schedule_next_task')
mocker.patch.multiple(scheduler, _schedules=mock_schedules,
_task_processes=mock_task_processes,
_schedule_executions=mock_schedule_executions)
mocker.patch.object(scheduler, '_process_scripts', return_value="North Readings to PI")
# WHEN
await scheduler._wait_for_task_completion(mock_task_process)
# THEN
# After task completion, sleep above, no task processes should be left pending
assert 0 == len(scheduler._task_processes)
assert 0 == len(scheduler._schedule_executions[mock_schedule.id].task_processes)
args, kwargs = log_info.call_args_list[0]
assert 'OMF to PI north' in args
assert 'North Readings to PI' in args
@pytest.mark.asyncio
async def test__start_task(self, mocker):
# TODO: Mandatory - Add negative tests for full code coverage
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
log_info = mocker.patch.object(scheduler._logger, "info")
mocker.patch.object(scheduler, '_schedule_first_task')
await scheduler._get_schedules()
schedule = scheduler._ScheduleRow(
id=uuid.UUID("2b614d26-760f-11e7-b5a5-be2e44b06b34"),
process_name="North Readings to PI",
name="OMF to PI north",
type=Schedule.Type.INTERVAL,
repeat=datetime.timedelta(seconds=30),
repeat_seconds=30,
time=None,
day=None,
exclusive=True,
enabled=True)
mocker.patch.object(scheduler, '_ready', True)
mocker.patch.object(scheduler, '_resume_check_schedules')
# Assert that there is no task queued for mock_schedule
with pytest.raises(KeyError) as excinfo:
assert scheduler._schedule_executions[schedule.id] is True
# Now queue task and assert that the task has been queued
await scheduler.queue_task(schedule.id)
assert isinstance(scheduler._schedule_executions[schedule.id], scheduler._ScheduleExecution)
# Changed in version 3.8: patch() now returns an AsyncMock if the target is an async function.
if sys.version_info.major == 3 and sys.version_info.minor >= 8:
_rv = await mock_process()
else:
_rv = asyncio.ensure_future(mock_process())
mocker.patch.object(asyncio, 'create_subprocess_exec', return_value=_rv)
mocker.patch.object(asyncio, 'ensure_future', return_value=asyncio.ensure_future(mock_task()))
mocker.patch.object(scheduler, '_resume_check_schedules')
mocker.patch.object(scheduler, '_process_scripts', return_value="North Readings to PI")
mocker.patch.object(scheduler, '_wait_for_task_completion')
# Confirm that task has not started yet
assert 0 == len(scheduler._schedule_executions[schedule.id].task_processes)
# WHEN
await scheduler._start_task(schedule)
# THEN
# Confirm that task has started
assert 1 == len(scheduler._schedule_executions[schedule.id].task_processes)
assert 1 == log_info.call_count
# assert call("Queued schedule '%s' for execution", 'OMF to PI north') == log_info.call_args_list[0]
args, kwargs = log_info.call_args_list[0]
assert "Process started: Schedule '%s' process '%s' task %s pid %s, %s running tasks\n%s" in args
assert 'OMF to PI north' in args
assert 'North Readings to PI' in args
@pytest.mark.asyncio
async def test_purge_tasks(self, mocker):
# TODO: Mandatory - Add negative tests for full code coverage
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
mocker.patch.multiple(scheduler, _ready=True, _paused=False)
mocker.patch.object(scheduler, '_max_completed_task_age', datetime.datetime.now())
# WHEN
await scheduler.purge_tasks()
# THEN
assert scheduler._purge_tasks_task is None
assert scheduler._last_task_purge_time is not None
@pytest.mark.asyncio
async def test__check_purge_tasks(self, mocker):
# TODO: Mandatory - Add negative tests for full code coverage
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
mocker.patch.multiple(scheduler, _purge_tasks_task=None,
_last_task_purge_time=None)
mocker.patch.object(scheduler, 'purge_tasks', return_value=asyncio.ensure_future(mock_task()))
# WHEN
scheduler._check_purge_tasks()
# THEN
assert scheduler._purge_tasks_task is not None
@pytest.mark.asyncio
async def test__check_schedules(self, mocker):
# TODO: Mandatory - Add negative tests for full code coverage
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
log_info = mocker.patch.object(scheduler._logger, "info")
current_time = time.time()
mocker.patch.multiple(scheduler, _max_running_tasks=10,
_start_time=current_time)
await scheduler._get_schedules()
mocker.patch.object(scheduler, '_start_task', return_value=asyncio.ensure_future(mock_task()))
# WHEN
earliest_start_time = await scheduler._check_schedules()
# THEN
assert earliest_start_time is not None
assert 3 == log_info.call_count
args0, kwargs0 = log_info.call_args_list[0]
args1, kwargs1 = log_info.call_args_list[1]
args2, kwargs2 = log_info.call_args_list[2]
assert 'stats collection' in args0
assert 'COAP listener south' in args1
assert 'OMF to PI north' in args2
@pytest.mark.asyncio
@pytest.mark.skip("_scheduler_loop() not suitable for unit testing. Will be tested during System tests.")
async def test__scheduler_loop(self, mocker):
pass
@pytest.mark.asyncio
async def test__schedule_next_timed_task(self, mocker):
# TODO: Mandatory - Add negative tests for full code coverage
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
log_info = mocker.patch.object(scheduler._logger, "info")
current_time = time.time()
mocker.patch.multiple(scheduler, _max_running_tasks=10,
_start_time=current_time)
await scheduler._get_schedules()
sch_id = uuid.UUID("2176eb68-7303-11e7-8cf7-a6006ad3dba0") # stat collector
sch = scheduler._schedules[sch_id]
sch_execution = scheduler._schedule_executions[sch_id]
time_before_call = sch_execution.next_start_time
# WHEN
next_dt = datetime.datetime.fromtimestamp(sch_execution.next_start_time)
next_dt += datetime.timedelta(seconds=sch.repeat_seconds)
scheduler._schedule_next_timed_task(sch, sch_execution, next_dt)
time_after_call = sch_execution.next_start_time
# THEN
assert time_after_call > time_before_call
assert 3 == log_info.call_count
args0, kwargs0 = log_info.call_args_list[0]
args1, kwargs1 = log_info.call_args_list[1]
args2, kwargs2 = log_info.call_args_list[2]
assert 'stats collection' in args0
assert 'COAP listener south' in args1
assert 'OMF to PI north' in args2
@pytest.mark.asyncio
async def test__schedule_next_task(self, mocker):
# TODO: Mandatory - Add negative tests for full code coverage
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
log_info = mocker.patch.object(scheduler._logger, "info")
current_time = time.time()
mocker.patch.multiple(scheduler, _max_running_tasks=10,
_start_time=current_time-3600)
await scheduler._get_schedules()
sch_id = uuid.UUID("2176eb68-7303-11e7-8cf7-a6006ad3dba0") # stat collector
sch = scheduler._schedules[sch_id]
sch_execution = scheduler._schedule_executions[sch_id]
time_before_call = sch_execution.next_start_time
# WHEN
scheduler._schedule_next_task(sch)
time_after_call = sch_execution.next_start_time
# THEN
assert time_after_call > time_before_call
assert 4 == log_info.call_count
args0, kwargs0 = log_info.call_args_list[0]
args1, kwargs1 = log_info.call_args_list[1]
args2, kwargs2 = log_info.call_args_list[2]
args3, kwargs3 = log_info.call_args_list[3]
assert 'stats collection' in args0
assert 'COAP listener south' in args1
assert 'OMF to PI north' in args2
# As part of scheduler._get_schedules(), scheduler._schedule_first_task() also gets executed, hence
# "stat collector" appears twice in this list.
assert 'stats collection' in args3
@pytest.mark.asyncio
async def test__schedule_first_task(self, mocker):
# TODO: Mandatory - Add negative tests for full code coverage
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
log_info = mocker.patch.object(scheduler._logger, "info")
current_time = time.time()
curr_time = datetime.datetime.fromtimestamp(current_time)
mocker.patch.multiple(scheduler, _max_running_tasks=10,
_start_time=current_time)
await scheduler._get_schedules()
sch_id = uuid.UUID("2176eb68-7303-11e7-8cf7-a6006ad3dba0") # stat collector
sch = scheduler._schedules[sch_id]
sch_execution = scheduler._schedule_executions[sch_id]
# WHEN
scheduler._schedule_first_task(sch, current_time)
time_after_call = sch_execution.next_start_time
# THEN
assert time_after_call > time.mktime(curr_time.timetuple())
assert 4 == log_info.call_count
args0, kwargs0 = log_info.call_args_list[0]
args1, kwargs1 = log_info.call_args_list[1]
args2, kwargs2 = log_info.call_args_list[2]
args3, kwargs3 = log_info.call_args_list[3]
assert 'stats collection' in args0
assert 'COAP listener south' in args1
assert 'OMF to PI north' in args2
# As part of scheduler._get_schedules(), scheduler._schedule_first_task() also gets executed, hence
# "stat collector" appears twice in this list.
assert 'stats collection' in args3
@pytest.mark.asyncio
async def test__get_process_scripts(self, mocker):
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
# WHEN
await scheduler._get_process_scripts()
# THEN
assert len(scheduler._storage_async.scheduled_processes) == len(scheduler._process_scripts)
@pytest.mark.asyncio
async def test__get_process_scripts_exception(self, mocker):
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
log_debug = mocker.patch.object(scheduler._logger, "debug", side_effect=Exception())
log_exception = mocker.patch.object(scheduler._logger, "exception")
# WHEN
# THEN
with pytest.raises(Exception):
await scheduler._get_process_scripts()
log_args = 'Query failed: %s', 'scheduled_processes'
log_exception.assert_called_once_with(*log_args)
@pytest.mark.asyncio
@pytest.mark.parametrize("test_interval, is_exception", [
('"Blah" 0 days', True),
('12:30:11', False),
('0 day 12:30:11', False),
('1 day 12:40:11', False),
('2 days', True),
('2 days 00:00:59', False),
('00:25:61', True)
])
async def test__get_schedules(self, test_interval, is_exception, mocker):
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
mocker.patch.object(scheduler, '_schedule_first_task')
log_exception = mocker.patch.object(scheduler._logger, "exception")
new_schedules = copy.deepcopy(MockStorageAsync.schedules)
new_schedules[5]['schedule_interval'] = test_interval
mocker.patch.object(MockStorageAsync, 'schedules', new_schedules)
# WHEN
# THEN
if is_exception is True:
with pytest.raises(Exception):
await scheduler._get_schedules()
assert 1 == log_exception.call_count
else:
await scheduler._get_schedules()
assert len(scheduler._storage_async.schedules) == len(scheduler._schedules)
@pytest.mark.asyncio
async def test__get_schedules_exception(self, mocker):
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
log_debug = mocker.patch.object(scheduler._logger, "debug", side_effect=Exception())
log_exception = mocker.patch.object(scheduler._logger, "exception")
mocker.patch.object(scheduler, '_schedule_first_task', side_effect=Exception())
# WHEN
# THEN
with pytest.raises(Exception):
await scheduler._get_schedules()
log_args = 'Query failed: %s', 'schedules'
log_exception.assert_called_once_with(*log_args)
@pytest.mark.asyncio
async def test__read_storage(self, mocker):
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
mocker.patch.object(scheduler, '_schedule_first_task')
# WHEN
await scheduler._read_storage()
# THEN
assert len(scheduler._storage_async.scheduled_processes) == len(scheduler._process_scripts)
assert len(scheduler._storage_async.schedules) == len(scheduler._schedules)
@pytest.mark.asyncio
@pytest.mark.skip("_mark_tasks_interrupted() not implemented in main Scheduler class.")
async def test__mark_tasks_interrupted(self, mocker):
pass
@pytest.mark.asyncio
async def test__read_config(self, mocker):
async def get_cat():
return {
"max_running_tasks": {
"description": "The maximum number of tasks that can be running at any given time",
"type": "integer",
"default": str(Scheduler._DEFAULT_MAX_RUNNING_TASKS),
"value": str(Scheduler._DEFAULT_MAX_RUNNING_TASKS)
},
"max_completed_task_age_days": {
"description": "The maximum age, in days (based on the start time), for a rows "
"in the tasks table that do not have a status of running",
"type": "integer",
"default": str(Scheduler._DEFAULT_MAX_COMPLETED_TASK_AGE_DAYS),
"value": str(Scheduler._DEFAULT_MAX_COMPLETED_TASK_AGE_DAYS)
},
}
# Changed in version 3.8: patch() now returns an AsyncMock if the target is an async function.
if sys.version_info.major == 3 and sys.version_info.minor >= 8:
_rv = await get_cat()
else:
_rv = asyncio.ensure_future(get_cat())
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
cr_cat = mocker.patch.object(ConfigurationManager, "create_category", return_value=asyncio.ensure_future(mock_task()))
get_cat = mocker.patch.object(ConfigurationManager, "get_category_all_items", return_value=_rv)
# WHEN
assert scheduler._max_running_tasks is None
assert scheduler._max_completed_task_age is None
await scheduler._read_config()
# THEN
assert 1 == cr_cat.call_count
assert 1 == get_cat.call_count
assert scheduler._max_running_tasks is not None
assert scheduler._max_completed_task_age is not None
@pytest.mark.asyncio
async def test_start(self, mocker):
# TODO: Mandatory - Add negative tests for full code coverage
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
log_debug = mocker.patch.object(scheduler._logger, "debug")
log_info = mocker.patch.object(scheduler._logger, "info")
current_time = time.time()
mocker.patch.object(scheduler, '_schedule_first_task')
mocker.patch.object(scheduler, '_scheduler_loop', return_value=asyncio.ensure_future(mock_task()))
mocker.patch.multiple(scheduler, _core_management_port=9999,
_core_management_host="0.0.0.0",
current_time=current_time - 3600)
# TODO: Remove after implementation of above test test__read_config()
mocker.patch.object(scheduler, '_read_config', return_value=asyncio.ensure_future(mock_task()))
assert scheduler._ready is False
# WHEN
await scheduler.start()
# THEN
assert scheduler._ready is True
assert len(scheduler._storage_async.scheduled_processes) == len(scheduler._process_scripts)
assert len(scheduler._storage_async.schedules) == len(scheduler._schedules)
calls = [call('Starting'),
call('Starting Scheduler: Management port received is %d', 9999)]
log_info.assert_has_calls(calls, any_order=True)
calls = [call('Database command: %s', 'scheduled_processes'),
call('Database command: %s', 'schedules')]
log_debug.assert_has_calls(calls, any_order=True)
@pytest.mark.asyncio
async def test_stop(self, mocker):
# TODO: Mandatory - Add negative tests for full code coverage
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
log_info = mocker.patch.object(scheduler._logger, "info")
log_exception = mocker.patch.object(scheduler._logger, "exception")
mocker.patch.object(scheduler, '_scheduler_loop', return_value=asyncio.ensure_future(mock_task()))
mocker.patch.object(scheduler, '_resume_check_schedules', return_value=asyncio.ensure_future(mock_task()))
mocker.patch.object(scheduler, '_purge_tasks_task', return_value=asyncio.ensure_future(asyncio.sleep(.1)))
mocker.patch.object(scheduler, '_scheduler_loop_task', return_value=asyncio.ensure_future(asyncio.sleep(.1)))
current_time = time.time()
mocker.patch.multiple(scheduler, _core_management_port=9999,
_core_management_host="0.0.0.0",
_start_time=current_time - 3600,
_paused=False,
_task_processes={})
# WHEN
retval = await scheduler.stop()
# THEN
assert retval is True
assert scheduler._schedule_executions is None
assert scheduler._task_processes is None
assert scheduler._schedules is None
assert scheduler._process_scripts is None
assert scheduler._ready is False
assert scheduler._paused is False
assert scheduler._start_time is None
calls = [call('Processing stop request'), call('Stopped')]
log_info.assert_has_calls(calls, any_order=True)
# TODO: Find why these exceptions are being raised despite mocking _purge_tasks_task, _scheduler_loop_task
calls = [call('An exception was raised by Scheduler._purge_tasks %s', "object MagicMock can't be used in 'await' expression"),
call('An exception was raised by Scheduler._scheduler_loop %s', "object MagicMock can't be used in 'await' expression")]
log_exception.assert_has_calls(calls)
@pytest.mark.asyncio
async def test_get_scheduled_processes(self, mocker):
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
await scheduler._get_process_scripts()
mocker.patch.object(scheduler, '_ready', True)
# WHEN
processes = await scheduler.get_scheduled_processes()
# THEN
assert len(scheduler._storage_async.scheduled_processes) == len(processes)
@pytest.mark.asyncio
async def test_schedule_row_to_schedule(self, mocker):
# GIVEN
scheduler = Scheduler()
schedule_id = uuid.uuid4()
schedule_row = scheduler._ScheduleRow(
id=schedule_id,
name='Test Schedule',
type=Schedule.Type.INTERVAL,
day=0,
time=0,
repeat=10,
repeat_seconds=10,
exclusive=False,
enabled=True,
process_name='TestProcess')
# WHEN
schedule = scheduler._schedule_row_to_schedule(schedule_id, schedule_row)
# THEN
assert isinstance(schedule, Schedule)
assert schedule.schedule_id == schedule_row[0]
assert schedule.name == schedule_row[1]
assert schedule.schedule_type == schedule_row[2]
assert schedule_row[3] is 0 # 0 for Interval Schedule
assert schedule_row[4] is 0 # 0 for Interval Schedule
assert schedule.repeat == schedule_row[5]
assert schedule.exclusive == schedule_row[7]
assert schedule.enabled == schedule_row[8]
assert schedule.process_name == schedule_row[9]
@pytest.mark.asyncio
async def test_get_schedules(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
# WHEN
schedules = await scheduler.get_schedules()
# THEN
assert len(scheduler._storage_async.schedules) == len(schedules)
@pytest.mark.asyncio
async def test_get_schedule(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
schedule_id = uuid.UUID("cea17db8-6ccc-11e7-907b-a6006ad3dba0") # purge schedule
# WHEN
schedule = await scheduler.get_schedule(schedule_id)
# THEN
assert isinstance(schedule, Schedule)
assert schedule.schedule_id == schedule_id
assert schedule.name == "purge"
assert schedule.schedule_type == Schedule.Type.MANUAL
assert schedule.repeat == datetime.timedelta(0, 3600)
assert schedule.exclusive is True
assert schedule.enabled is True
assert schedule.process_name == "purge"
@pytest.mark.asyncio
async def test_get_schedule_exception(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
schedule_id = uuid.uuid4()
# WHEN
# THEN
with pytest.raises(ScheduleNotFoundError):
schedule = await scheduler.get_schedule(schedule_id)
@pytest.mark.asyncio
async def test_save_schedule_new(self, mocker):
@asyncio.coroutine
def mock_coro():
return ""
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
audit_logger = mocker.patch.object(AuditLogger, 'information', return_value=asyncio.ensure_future(mock_task()))
first_task = mocker.patch.object(scheduler, '_schedule_first_task')
resume_sch = mocker.patch.object(scheduler, '_resume_check_schedules')
log_info = mocker.patch.object(scheduler._logger, "info")
enable_schedule = mocker.patch.object(scheduler, "enable_schedule", return_value=mock_coro())
disable_schedule = mocker.patch.object(scheduler, "disable_schedule", return_value=mock_coro())
schedule_id = uuid.uuid4()
schedule_row = scheduler._ScheduleRow(
id=schedule_id,
name='Test Schedule',
type=Schedule.Type.INTERVAL,
day=0,
time=0,
repeat=datetime.timedelta(seconds=30),
repeat_seconds=30,
exclusive=False,
enabled=True,
process_name='TestProcess')
schedule = scheduler._schedule_row_to_schedule(schedule_id, schedule_row)
# WHEN
await scheduler.save_schedule(schedule)
# THEN
assert len(scheduler._storage_async.schedules) + 1 == len(scheduler._schedules)
assert 1 == audit_logger.call_count
calls =[call('SCHAD', {'schedule': {'name': 'Test Schedule', 'processName': 'TestProcess',
'type': Schedule.Type.INTERVAL, 'repeat': 30.0, 'enabled': True,
'exclusive': False}})]
audit_logger.assert_has_calls(calls, any_order=True)
assert 1 == first_task.call_count
assert 1 == resume_sch.call_count
assert 0 == enable_schedule.call_count
assert 0 == disable_schedule.call_count
@pytest.mark.asyncio
async def test_save_schedule_new_with_enable_modified(self, mocker):
@asyncio.coroutine
def mock_coro():
return ""
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
audit_logger = mocker.patch.object(AuditLogger, 'information', return_value=asyncio.ensure_future(mock_task()))
first_task = mocker.patch.object(scheduler, '_schedule_first_task')
resume_sch = mocker.patch.object(scheduler, '_resume_check_schedules')
log_info = mocker.patch.object(scheduler._logger, "info")
enable_schedule = mocker.patch.object(scheduler, "enable_schedule", return_value=mock_coro())
disable_schedule = mocker.patch.object(scheduler, "disable_schedule", return_value=mock_coro())
schedule_id = uuid.uuid4()
schedule_row = scheduler._ScheduleRow(
id=schedule_id,
name='Test Schedule',
type=Schedule.Type.INTERVAL,
day=0,
time=0,
repeat=datetime.timedelta(seconds=30),
repeat_seconds=30,
exclusive=False,
enabled=True,
process_name='TestProcess')
schedule = scheduler._schedule_row_to_schedule(schedule_id, schedule_row)
# WHEN
await scheduler.save_schedule(schedule, is_enabled_modified=True)
# THEN
assert len(scheduler._storage_async.schedules) + 1 == len(scheduler._schedules)
assert 1 == audit_logger.call_count
calls =[call('SCHAD', {'schedule': {'name': 'Test Schedule', 'processName': 'TestProcess',
'type': Schedule.Type.INTERVAL, 'repeat': 30.0, 'enabled': True,
'exclusive': False}})]
audit_logger.assert_has_calls(calls, any_order=True)
assert 1 == first_task.call_count
assert 1 == resume_sch.call_count
assert 1 == enable_schedule.call_count
assert 0 == disable_schedule.call_count
# WHEN
await scheduler.save_schedule(schedule, is_enabled_modified=False)
# THEN
assert 1 == disable_schedule.call_count
@pytest.mark.asyncio
async def test_save_schedule_update(self, mocker):
@asyncio.coroutine
def mock_coro():
return ""
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
audit_logger = mocker.patch.object(AuditLogger, 'information', return_value=asyncio.ensure_future(mock_task()))
first_task = mocker.patch.object(scheduler, '_schedule_first_task')
resume_sch = mocker.patch.object(scheduler, '_resume_check_schedules')
log_info = mocker.patch.object(scheduler._logger, "info")
schedule_id = uuid.UUID("2b614d26-760f-11e7-b5a5-be2e44b06b34") # OMF to PI North
schedule_row = scheduler._ScheduleRow(
id=schedule_id,
name='Test Schedule',
type=Schedule.Type.TIMED,
day=1,
time=datetime.time(),
repeat=datetime.timedelta(seconds=30),
repeat_seconds=30,
exclusive=False,
enabled=True,
process_name='TestProcess')
schedule = scheduler._schedule_row_to_schedule(schedule_id, schedule_row)
enable_schedule = mocker.patch.object(scheduler, "enable_schedule", return_value=mock_coro())
disable_schedule = mocker.patch.object(scheduler, "disable_schedule", return_value=mock_coro())
# WHEN
await scheduler.save_schedule(schedule)
# THEN
assert len(scheduler._storage_async.schedules) == len(scheduler._schedules)
assert 1 == audit_logger.call_count
calls = [call('SCHCH', {'schedule': {'name': 'Test Schedule', 'enabled': True, 'repeat': 30.0,
'exclusive': False, 'day': 1, 'time': '0:0:0',
'processName': 'TestProcess', 'type': Schedule.Type.TIMED}})]
audit_logger.assert_has_calls(calls, any_order=True)
assert 1 == first_task.call_count
assert 1 == resume_sch.call_count
assert 0 == enable_schedule.call_count
assert 0 == disable_schedule.call_count
@pytest.mark.asyncio
async def test_save_schedule_update_with_enable_modified(self, mocker):
@asyncio.coroutine
def mock_coro():
return ""
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
audit_logger = mocker.patch.object(AuditLogger, 'information', return_value=asyncio.ensure_future(mock_task()))
first_task = mocker.patch.object(scheduler, '_schedule_first_task')
resume_sch = mocker.patch.object(scheduler, '_resume_check_schedules')
log_info = mocker.patch.object(scheduler._logger, "info")
schedule_id = uuid.UUID("2b614d26-760f-11e7-b5a5-be2e44b06b34") # OMF to PI North
schedule_row = scheduler._ScheduleRow(
id=schedule_id,
name='Test Schedule',
type=Schedule.Type.TIMED,
day=1,
time=datetime.time(),
repeat=datetime.timedelta(seconds=30),
repeat_seconds=30,
exclusive=False,
enabled=True,
process_name='TestProcess')
schedule = scheduler._schedule_row_to_schedule(schedule_id, schedule_row)
enable_schedule = mocker.patch.object(scheduler, "enable_schedule", return_value=mock_coro())
disable_schedule = mocker.patch.object(scheduler, "disable_schedule", return_value=mock_coro())
# WHEN
await scheduler.save_schedule(schedule, is_enabled_modified=True)
# THEN
assert len(scheduler._storage_async.schedules) == len(scheduler._schedules)
assert 1 == audit_logger.call_count
calls = [call('SCHCH', {'schedule': {'name': 'Test Schedule', 'enabled': True, 'repeat': 30.0,
'exclusive': False, 'day': 1, 'time': '0:0:0',
'processName': 'TestProcess', 'type': Schedule.Type.TIMED}})]
audit_logger.assert_has_calls(calls, any_order=True)
assert 1 == first_task.call_count
assert 1 == resume_sch.call_count
assert 1 == enable_schedule.call_count
assert 0 == disable_schedule.call_count
# WHEN
await scheduler.save_schedule(schedule, is_enabled_modified=False)
# THEN
assert 1 == disable_schedule.call_count
@pytest.mark.asyncio
async def test_save_schedule_exception(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
schedule_id = uuid.uuid4()
schedule_row = scheduler._ScheduleRow(
id=schedule_id,
name='Test Schedule',
type=Schedule.Type.TIMED,
day=0,
time=0,
repeat=datetime.timedelta(seconds=30),
repeat_seconds=30,
exclusive=False,
enabled=True,
process_name='TestProcess')
# WHEN
# THEN
with pytest.raises(ValueError) as ex:
temp_schedule = scheduler._schedule_row_to_schedule(schedule_id, schedule_row)
temp_schedule.name = None
await scheduler.save_schedule(temp_schedule)
del temp_schedule
assert str(ex).endswith("name can not be empty")
with pytest.raises(ValueError) as ex:
temp_schedule = scheduler._schedule_row_to_schedule(schedule_id, schedule_row)
temp_schedule.name = ""
await scheduler.save_schedule(temp_schedule)
del temp_schedule
assert str(ex).endswith("name can not be empty")
with pytest.raises(ValueError) as ex:
temp_schedule = scheduler._schedule_row_to_schedule(schedule_id, schedule_row)
temp_schedule.repeat = 1234
await scheduler.save_schedule(temp_schedule)
del temp_schedule
assert str(ex).endswith('repeat must be of type datetime.timedelta')
with pytest.raises(ValueError) as ex:
temp_schedule = scheduler._schedule_row_to_schedule(schedule_id, schedule_row)
temp_schedule.exclusive = None
await scheduler.save_schedule(temp_schedule)
del temp_schedule
assert str(ex).endswith('exclusive can not be None')
with pytest.raises(ValueError) as ex:
temp_schedule = scheduler._schedule_row_to_schedule(schedule_id, schedule_row)
temp_schedule.time = 1234
await scheduler.save_schedule(temp_schedule)
del temp_schedule
assert str(ex).endswith('time must be of type datetime.time')
with pytest.raises(ValueError) as ex:
temp_schedule = scheduler._schedule_row_to_schedule(schedule_id, schedule_row)
temp_schedule.day = 0
temp_schedule.time = datetime.time()
await scheduler.save_schedule(temp_schedule)
del temp_schedule
assert str(ex).endswith('day must be between 1 and 7')
@pytest.mark.asyncio
@pytest.mark.skip(reason="To be done")
async def test_remove_service_from_task_processes(self):
pass
@pytest.mark.asyncio
async def test_disable_schedule(self, mocker):
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
mocker.patch.object(scheduler, '_schedule_first_task')
await scheduler._get_schedules()
mocker.patch.object(scheduler, '_ready', True)
mocker.patch.object(scheduler, '_task_processes')
audit_logger = mocker.patch.object(AuditLogger, 'information', return_value=asyncio.ensure_future(mock_task()))
log_info = mocker.patch.object(scheduler._logger, "info")
sch_id = uuid.UUID("2b614d26-760f-11e7-b5a5-be2e44b06b34") # OMF to PI North
# WHEN
status, message = await scheduler.disable_schedule(sch_id)
# THEN
assert status is True
assert message == "Schedule successfully disabled"
assert (scheduler._schedules[sch_id]).id == sch_id
assert (scheduler._schedules[sch_id]).enabled is False
assert 2 == log_info.call_count
calls = [call('No Task running for Schedule %s', '2b614d26-760f-11e7-b5a5-be2e44b06b34'),
call("Disabled Schedule '%s/%s' process '%s'\n", 'OMF to PI north',
'2b614d26-760f-11e7-b5a5-be2e44b06b34', 'North Readings to PI')]
log_info.assert_has_calls(calls)
assert 1 == audit_logger.call_count
calls = [call('SCHCH', {'schedule': {'name': 'OMF to PI north', 'repeat': 30.0, 'enabled': False,
'type': Schedule.Type.INTERVAL, 'exclusive': True,
'processName': 'North Readings to PI'}})]
audit_logger.assert_has_calls(calls, any_order=True)
@pytest.mark.asyncio
async def test_disable_schedule_wrong_schedule_id(self, mocker):
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
mocker.patch.object(scheduler, '_schedule_first_task')
await scheduler._get_schedules()
mocker.patch.object(scheduler, '_ready', True)
mocker.patch.object(scheduler, '_task_processes')
log_exception = mocker.patch.object(scheduler._logger, "exception")
random_schedule_id = uuid.uuid4()
# WHEN
await scheduler.disable_schedule(random_schedule_id)
# THEN
log_params = "No such Schedule %s", str(random_schedule_id)
log_exception.assert_called_with(*log_params)
@pytest.mark.asyncio
async def test_disable_schedule_already_disabled(self, mocker):
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
mocker.patch.object(scheduler, '_schedule_first_task')
await scheduler._get_schedules()
mocker.patch.object(scheduler, '_ready', True)
mocker.patch.object(scheduler, '_task_processes')
log_info = mocker.patch.object(scheduler._logger, "info")
sch_id = uuid.UUID("d1631422-9ec6-11e7-abc4-cec278b6b50a") # backup
# WHEN
status, message = await scheduler.disable_schedule(sch_id)
# THEN
assert status is True
assert message == "Schedule {} already disabled".format(str(sch_id))
assert (scheduler._schedules[sch_id]).id == sch_id
assert (scheduler._schedules[sch_id]).enabled is False
log_params = "Schedule %s already disabled", str(sch_id)
log_info.assert_called_with(*log_params)
@pytest.mark.asyncio
async def test_enable_schedule(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
sch_id = uuid.UUID("d1631422-9ec6-11e7-abc4-cec278b6b50a") # backup
queue_task = mocker.patch.object(scheduler, 'queue_task', return_value=asyncio.ensure_future(mock_task()))
audit_logger = mocker.patch.object(AuditLogger, 'information', return_value=asyncio.ensure_future(mock_task()))
# WHEN
status, message = await scheduler.enable_schedule(sch_id)
# THEN
assert status is True
assert message == "Schedule successfully enabled"
assert (scheduler._schedules[sch_id]).id == sch_id
assert (scheduler._schedules[sch_id]).enabled is True
assert 1 == queue_task.call_count
calls = [call("Enabled Schedule '%s/%s' process '%s'\n", 'backup hourly', 'd1631422-9ec6-11e7-abc4-cec278b6b50a', 'backup')]
log_info.assert_has_calls(calls, any_order=True)
assert 1 == audit_logger.call_count
calls = [call('SCHCH', {'schedule': {'name': 'backup hourly', 'type': Schedule.Type.INTERVAL, 'processName': 'backup', 'exclusive': True, 'repeat': 3600.0, 'enabled': True}})]
audit_logger.assert_has_calls(calls, any_order=True)
@pytest.mark.asyncio
async def test_enable_schedule_already_enabled(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
sch_id = uuid.UUID("ada12840-68d3-11e7-907b-a6006ad3dba0") #Coap
mocker.patch.object(scheduler, 'queue_task', return_value=asyncio.ensure_future(mock_task()))
# WHEN
status, message = await scheduler.enable_schedule(sch_id)
# THEN
assert status is True
assert message == "Schedule is already enabled"
assert (scheduler._schedules[sch_id]).id == sch_id
assert (scheduler._schedules[sch_id]).enabled is True
log_params = "Schedule %s already enabled", str(sch_id)
log_info.assert_called_with(*log_params)
@pytest.mark.asyncio
async def test_enable_schedule_wrong_schedule_id(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
random_schedule_id = uuid.uuid4()
# WHEN
await scheduler.enable_schedule(random_schedule_id)
# THEN
log_params = "No such Schedule %s", str(random_schedule_id)
log_exception.assert_called_with(*log_params)
@pytest.mark.asyncio
async def test_queue_task(self, mocker):
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
mocker.patch.object(scheduler, '_schedule_first_task')
# log_info = mocker.patch.object(scheduler._logger, "info")
await scheduler._get_schedules()
sch_id = uuid.UUID("cea17db8-6ccc-11e7-907b-a6006ad3dba0") # backup
mocker.patch.object(scheduler, '_ready', True)
mocker.patch.object(scheduler, '_resume_check_schedules')
# Assert that there is no task queued for this schedule at first
with pytest.raises(KeyError) as excinfo:
assert scheduler._schedule_executions[sch_id] is True
# WHEN
await scheduler.queue_task(sch_id)
# THEN
assert isinstance(scheduler._schedule_executions[sch_id], scheduler._ScheduleExecution)
# log_params = "Queued schedule '%s' for execution", 'purge'
# log_info.assert_called_with(*log_params)
@pytest.mark.asyncio
async def test_queue_task_schedule_not_found(self, mocker):
# GIVEN
scheduler = Scheduler()
scheduler._storage = MockStorage(core_management_host=None, core_management_port=None)
scheduler._storage_async = MockStorageAsync(core_management_host=None, core_management_port=None)
mocker.patch.object(scheduler, '_schedule_first_task')
mocker.patch.object(scheduler, '_ready', True)
mocker.patch.object(scheduler, '_resume_check_schedules')
# WHEN
# THEN
with pytest.raises(ScheduleNotFoundError) as excinfo:
await scheduler.queue_task(uuid.uuid4())
@pytest.mark.asyncio
async def test_delete_schedule(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
sch_id = uuid.UUID("d1631422-9ec6-11e7-abc4-cec278b6b50a") # backup
await scheduler._get_schedules()
# Confirm no. of schedules
assert len(scheduler._storage_async.schedules) == len(scheduler._schedules)
mocker.patch.object(scheduler, '_ready', True)
# WHEN
# Now delete schedule
await scheduler.delete_schedule(sch_id)
# THEN
# Now confirm there is one schedule less
assert len(scheduler._storage_async.schedules) - 1 == len(scheduler._schedules)
@pytest.mark.asyncio
async def test_delete_schedule_enabled_schedule(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
sch_id = uuid.UUID("ada12840-68d3-11e7-907b-a6006ad3dba0") #Coap
await scheduler._get_schedules()
mocker.patch.object(scheduler, '_ready', True)
# Confirm there are 14 schedules
assert len(scheduler._storage_async.schedules) == len(scheduler._schedules)
# WHEN
# Now delete schedule
with pytest.raises(RuntimeWarning):
await scheduler.delete_schedule(sch_id)
# THEN
# Now confirm no schedule is deleted
assert len(scheduler._storage_async.schedules) == len(scheduler._schedules)
assert 1 == log_exception.call_count
log_params = 'Attempt to delete an enabled Schedule %s. Not deleted.', str(sch_id)
log_exception.assert_called_with(*log_params)
@pytest.mark.asyncio
async def test_delete_schedule_exception(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
log_debug = mocker.patch.object(scheduler._logger, 'debug', side_effect=Exception())
sch_id = uuid.UUID("d1631422-9ec6-11e7-abc4-cec278b6b50a") # backup
# WHEN
# THEN
with pytest.raises(ScheduleNotFoundError) as excinfo:
await scheduler.delete_schedule(uuid.uuid4())
@pytest.mark.asyncio
async def test_delete_schedule_not_found(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
# WHEN
# THEN
with pytest.raises(ScheduleNotFoundError) as excinfo:
await scheduler.delete_schedule(uuid.uuid4())
@pytest.mark.asyncio
async def test_get_running_tasks(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
# Assert that there is no task queued for schedule
with pytest.raises(KeyError) as excinfo:
assert scheduler._schedule_executions[schedule.id] is True
# Now queue task and assert that the task has been queued
await scheduler.queue_task(schedule.id)
assert isinstance(scheduler._schedule_executions[schedule.id], scheduler._ScheduleExecution)
# Confirm that no task has started yet
assert 0 == len(scheduler._schedule_executions[schedule.id].task_processes)
await scheduler._start_task(schedule)
# Confirm that task has started
assert 1 == len(scheduler._schedule_executions[schedule.id].task_processes)
# WHEN
tasks = await scheduler.get_running_tasks()
# THEN
assert 1 == len(tasks)
assert schedule.process_name == tasks[0].process_name
assert tasks[0].reason is None
assert tasks[0].state == Task.State.RUNNING
assert tasks[0].cancel_requested is None
assert tasks[0].start_time is not None
assert tasks[0].end_time is None
assert tasks[0].exit_code is None
@pytest.mark.asyncio
async def test_get_task(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
# Assert that there is no North task queued for schedule
with pytest.raises(KeyError) as excinfo:
assert scheduler._schedule_executions[schedule.id] is True
# Now queue task and assert that the North task has been queued
await scheduler.queue_task(schedule.id)
assert isinstance(scheduler._schedule_executions[schedule.id], scheduler._ScheduleExecution)
# Confirm that no task has started yet
assert 0 == len(scheduler._schedule_executions[schedule.id].task_processes)
await scheduler._start_task(schedule)
# Confirm that task has started
assert 1 == len(scheduler._schedule_executions[schedule.id].task_processes)
task_id = list(scheduler._schedule_executions[schedule.id].task_processes.keys())[0]
# WHEN
task = await scheduler.get_task(task_id)
# THEN
assert schedule.process_name == task.process_name
assert task.reason is ''
assert task.state is not None
assert task.cancel_requested is None
assert task.start_time is not None
assert task.end_time is not None
assert task.exit_code is '0'
@pytest.mark.skip("Need a suitable fixture")
@pytest.mark.asyncio
async def test_get_task_not_found(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
# WHEN
# THEN
with pytest.raises(TaskNotFoundError) as excinfo:
tasks = await scheduler.get_task(uuid.uuid4())
@pytest.mark.asyncio
async def test_get_task_exception(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
log_debug = mocker.patch.object(scheduler._logger, 'debug', side_effect=Exception())
# WHEN
# THEN
task_id = uuid.uuid4()
with pytest.raises(Exception) as excinfo:
await scheduler.get_task(task_id)
# THEN
payload = {"return": ["id", "process_name", "schedule_name", "state", {"alias": "start_time", "format": "YYYY-MM-DD HH24:MI:SS.MS", "column": "start_time"}, {"alias": "end_time", "format": "YYYY-MM-DD HH24:MI:SS.MS", "column": "end_time"}, "reason", "exit_code"], "where": {"column": "id", "condition": "=", "value": str(task_id)}}
args, kwargs = log_exception.call_args
assert 'Query failed: %s' == args[0]
p = json.loads(args[1])
assert payload == p
@pytest.mark.asyncio
async def test_get_tasks(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
# Assert that there is no North task queued for schedule
with pytest.raises(KeyError) as excinfo:
assert scheduler._schedule_executions[schedule.id] is True
# Now queue task and assert that the North task has been queued
await scheduler.queue_task(schedule.id)
assert isinstance(scheduler._schedule_executions[schedule.id], scheduler._ScheduleExecution)
# Confirm that no task has started yet
assert 0 == len(scheduler._schedule_executions[schedule.id].task_processes)
await scheduler._start_task(schedule)
# Confirm that task has started
assert 1 == len(scheduler._schedule_executions[schedule.id].task_processes)
task_id = list(scheduler._schedule_executions[schedule.id].task_processes.keys())[0]
# WHEN
tasks = await scheduler.get_tasks()
# THEN
assert schedule.process_name == tasks[0].process_name
assert tasks[0].reason is ''
assert tasks[0].state is not None
assert tasks[0].cancel_requested is None
assert tasks[0].start_time is not None
assert tasks[0].end_time is not None
assert tasks[0].exit_code is '0'
@pytest.mark.asyncio
async def test_get_tasks_exception(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
log_debug = mocker.patch.object(scheduler._logger, 'debug', side_effect=Exception())
# WHEN
with pytest.raises(Exception) as excinfo:
tasks = await scheduler.get_tasks()
# THEN
payload = {"return": ["id", "process_name", "schedule_name", "state", {"alias": "start_time", "column": "start_time", "format": "YYYY-MM-DD HH24:MI:SS.MS"}, {"alias": "end_time", "column": "end_time", "format": "YYYY-MM-DD HH24:MI:SS.MS"}, "reason", "exit_code"], "limit": 100}
args, kwargs = log_exception.call_args
assert 'Query failed: %s' == args[0]
p = json.loads(args[1])
assert payload == p
@pytest.mark.asyncio
async def test_cancel_task_all_ok(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
# Assert that there is no task queued for schedule
with pytest.raises(KeyError) as excinfo:
assert scheduler._schedule_executions[schedule.id] is True
# Now queue task and assert that the task has been queued
await scheduler.queue_task(schedule.id)
assert isinstance(scheduler._schedule_executions[schedule.id], scheduler._ScheduleExecution)
# Confirm that no task has started yet
assert 0 == len(scheduler._schedule_executions[schedule.id].task_processes)
await scheduler._start_task(schedule)
# Confirm that task has started
assert 1 == len(scheduler._schedule_executions[schedule.id].task_processes)
task_id = list(scheduler._schedule_executions[schedule.id].task_processes.keys())[0]
# Confirm that cancel request has not been made
assert scheduler._schedule_executions[schedule.id].task_processes[task_id].cancel_requested is None
# WHEN
await scheduler.cancel_task(task_id)
# THEN
assert scheduler._schedule_executions[schedule.id].task_processes[task_id].cancel_requested is not None
assert 2 == log_info.call_count
# args, kwargs = log_info.call_args_list[0]
# assert ("Queued schedule '%s' for execution", 'OMF to PI north') == args
args, kwargs = log_info.call_args_list[0]
assert "Process started: Schedule '%s' process '%s' task %s pid %s, %s running tasks\n%s" in args
assert 'OMF to PI north' in args
assert 'North Readings to PI' in args
args, kwargs = log_info.call_args_list[1]
assert "Stopping process: Schedule '%s' process '%s' task %s pid %s\n%s" in args
assert 'OMF to PI north' in args
assert 'North Readings to PI' in args
@pytest.mark.asyncio
async def test_cancel_task_exception(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
# WHEN
# THEN
with pytest.raises(TaskNotRunningError) as excinfo:
await scheduler.cancel_task(uuid.uuid4())
@pytest.mark.asyncio
async def test_not_ready_and_paused(self, mocker):
# GIVEN
scheduler, schedule, log_info, log_exception, log_error, log_debug = await self.scheduler_fixture(mocker)
mocker.patch.object(scheduler, '_ready', False)
mocker.patch.object(scheduler, '_paused', True)
# WHEN
# THEN
with pytest.raises(NotReadyError) as excinfo:
await scheduler.start()
with pytest.raises(NotReadyError) as excinfo:
await scheduler.get_scheduled_processes()
with pytest.raises(NotReadyError) as excinfo:
await scheduler.get_schedules()
with pytest.raises(NotReadyError) as excinfo:
await scheduler.get_schedule(uuid.uuid4())
with pytest.raises(NotReadyError) as excinfo:
await scheduler.save_schedule(Schedule(Schedule.Type.INTERVAL))
with pytest.raises(NotReadyError) as excinfo:
await scheduler.disable_schedule(uuid.uuid4())
with pytest.raises(NotReadyError) as excinfo:
await scheduler.enable_schedule(uuid.uuid4())
with pytest.raises(NotReadyError) as excinfo:
await scheduler.queue_task(uuid.uuid4())
with pytest.raises(NotReadyError) as excinfo:
await scheduler.delete_schedule(uuid.uuid4())
with pytest.raises(NotReadyError) as excinfo:
await scheduler.get_running_tasks()
with pytest.raises(NotReadyError) as excinfo:
await scheduler.cancel_task(uuid.uuid4())
@pytest.mark.skip("_terminate_child_processes() not fit for unit test.")
@pytest.mark.asyncio
async def test__terminate_child_processes(self, mocker):
pass
class MockStorage(StorageClientAsync):
def __init__(self, core_management_host=None, core_management_port=None):
super().__init__(core_management_host, core_management_port)
def _get_storage_service(self, host, port):
return {
"id": uuid.uuid4(),
"name": "Fledge Storage",
"type": "Storage",
"service_port": 9999,
"management_port": 9999,
"address": "0.0.0.0",
"protocol": "http"
}
class MockStorageAsync(StorageClientAsync):
schedules = [
{
"id": "cea17db8-6ccc-11e7-907b-a6006ad3dba0",
"process_name": "purge",
"schedule_name": "purge",
"schedule_type": 4,
"schedule_interval": "01:00:00",
"schedule_time": "",
"schedule_day": 0,
"exclusive": "t",
"enabled": "t"
},
{
"id": "2176eb68-7303-11e7-8cf7-a6006ad3dba0",
"process_name": "stats collector",
"schedule_name": "stats collection",
"schedule_type": 2,
"schedule_interval": "00:00:15",
"schedule_time": "00:00:15",
"schedule_day": 3,
"exclusive": "f",
"enabled": "t"
},
{
"id": "d1631422-9ec6-11e7-abc4-cec278b6b50a",
"process_name": "backup",
"schedule_name": "backup hourly",
"schedule_type": 3,
"schedule_interval": "01:00:00",
"schedule_time": "",
"schedule_day": 0,
"exclusive": "t",
"enabled": "f"
},
{
"id": "ada12840-68d3-11e7-907b-a6006ad3dba0",
"process_name": "COAP",
"schedule_name": "COAP listener south",
"schedule_type": 1,
"schedule_interval": "00:00:00",
"schedule_time": "",
"schedule_day": 0,
"exclusive": "t",
"enabled": "t"
},
{
"id": "2b614d26-760f-11e7-b5a5-be2e44b06b34",
"process_name": "North Readings to PI",
"schedule_name": "OMF to PI north",
"schedule_type": 3,
"schedule_interval": "00:00:30",
"schedule_time": "",
"schedule_day": 0,
"exclusive": "t",
"enabled": "t"
},
{
"id": "5d7fed92-fb9a-11e7-8c3f-9a214cf093ae",
"process_name": "North Readings to OCS",
"schedule_name": "OMF to OCS north",
"schedule_type": 3,
"schedule_interval": "1 day 00:00:40",
"schedule_time": "",
"schedule_day": 0,
"exclusive": "t",
"enabled": "f"
},
]
scheduled_processes = [
{
"name": "purge",
"script": [
"tasks/purge"
]
},
{
"name": "stats collector",
"script": [
"tasks/statistics"
]
},
{
"name": "backup",
"script": [
"tasks/backup_postgres"
]
},
{
"name": "COAP",
"script": [
"services/south"
]
},
{
"name": "North Readings to PI",
"script": [
"tasks/north",
"--stream_id",
"1",
"--debug_level",
"1"
]
},
{
"name": "North Readings to OCS",
"script": [
"tasks/north",
"--stream_id",
"4",
"--debug_level",
"1"
]
},
]
tasks = [
{
"id": "259b8570-65c1-4b92-8c62-e9642631a600",
"process_name": "North Readings to PI",
"state": 1,
"start_time": "2018-02-06 13:28:14.477868",
"end_time": "2018-02-06 13:28:14.856375",
"exit_code": "0",
"reason": ""
}
]
def __init__(self, core_management_host=None, core_management_port=None):
super().__init__(core_management_host, core_management_port)
def _get_storage_service(self, host, port):
return {
"id": uuid.uuid4(),
"name": "Fledge Storage",
"type": "Storage",
"service_port": 9999,
"management_port": 9999,
"address": "0.0.0.0",
"protocol": "http"
}
@classmethod
async def insert_into_tbl(cls, table_name, payload):
pass
@classmethod
async def update_tbl(cls, table_name, payload):
# Only valid for test_save_schedule_update
if table_name == "schedules":
return {"count": 1}
@classmethod
async def delete_from_tbl(cls, table_name, condition=None):
pass
@classmethod
async def query_tbl_with_payload(cls, table_name, query_payload):
if table_name == 'tasks':
return {
"count": len(MockStorageAsync.tasks),
"rows": MockStorageAsync.tasks
}
@classmethod
async def query_tbl(cls, table_name, query=None):
if table_name == 'schedules':
return {
"count": len(MockStorageAsync.schedules),
"rows": MockStorageAsync.schedules
}
if table_name == 'scheduled_processes':
return {
"count": len(MockStorageAsync.scheduled_processes),
"rows": MockStorageAsync.scheduled_processes
}
| [((813, 842), 'pytest.allure.feature', 'pytest.allure.feature', (['"""unit"""'], {}), "('unit')\n", (834, 842), False, 'import pytest\n'), ((844, 876), 'pytest.allure.story', 'pytest.allure.story', (['"""scheduler"""'], {}), "('scheduler')\n", (863, 876), False, 'import pytest\n'), ((737, 748), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (746, 748), False, 'from unittest.mock import MagicMock, call\n'), ((11819, 11933), 'pytest.mark.skip', 'pytest.mark.skip', (['"""_scheduler_loop() not suitable for unit testing. Will be tested during System tests."""'], {}), "(\n '_scheduler_loop() not suitable for unit testing. Will be tested during System tests.'\n )\n", (11835, 11933), False, 'import pytest\n'), ((18280, 18516), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""test_interval, is_exception"""', '[(\'"Blah" 0 days\', True), (\'12:30:11\', False), (\'0 day 12:30:11\', False), (\n \'1 day 12:40:11\', False), (\'2 days\', True), (\'2 days 00:00:59\', False),\n (\'00:25:61\', True)]'], {}), '(\'test_interval, is_exception\', [(\'"Blah" 0 days\', \n True), (\'12:30:11\', False), (\'0 day 12:30:11\', False), (\n \'1 day 12:40:11\', False), (\'2 days\', True), (\'2 days 00:00:59\', False),\n (\'00:25:61\', True)])\n', (18303, 18516), False, 'import pytest\n'), ((21078, 21169), 'pytest.mark.skip', 'pytest.mark.skip', (['"""_mark_tasks_interrupted() not implemented in main Scheduler class."""'], {}), "(\n '_mark_tasks_interrupted() not implemented in main Scheduler class.')\n", (21094, 21169), False, 'import pytest\n'), ((42221, 42258), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""To be done"""'}), "(reason='To be done')\n", (42237, 42258), False, 'import pytest\n'), ((56112, 56155), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Need a suitable fixture"""'], {}), "('Need a suitable fixture')\n", (56128, 56155), False, 'import pytest\n'), ((63670, 63741), 'pytest.mark.skip', 'pytest.mark.skip', (['"""_terminate_child_processes() not fit for unit test."""'], {}), "('_terminate_child_processes() not fit for unit test.')\n", (63686, 63741), False, 'import pytest\n'), ((1258, 1269), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (1267, 1269), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((2950, 2961), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (2959, 2961), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((3780, 3791), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (3789, 3791), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((4823, 4835), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (4833, 4835), False, 'import uuid\n'), ((6325, 6336), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (6334, 6336), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((9252, 9263), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (9261, 9263), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((9990, 10001), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (9999, 10001), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((10738, 10749), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (10747, 10749), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((11041, 11052), 'time.time', 'time.time', ([], {}), '()\n', (11050, 11052), False, 'import time\n'), ((12179, 12190), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (12188, 12190), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((12482, 12493), 'time.time', 'time.time', ([], {}), '()\n', (12491, 12493), False, 'import time\n'), ((12673, 12722), 'uuid.UUID', 'uuid.UUID', (['"""2176eb68-7303-11e7-8cf7-a6006ad3dba0"""'], {}), "('2176eb68-7303-11e7-8cf7-a6006ad3dba0')\n", (12682, 12722), False, 'import uuid\n'), ((12938, 13000), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['sch_execution.next_start_time'], {}), '(sch_execution.next_start_time)\n', (12969, 13000), False, 'import datetime\n'), ((13020, 13066), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'sch.repeat_seconds'}), '(seconds=sch.repeat_seconds)\n', (13038, 13066), False, 'import datetime\n'), ((13775, 13786), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (13784, 13786), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((14078, 14089), 'time.time', 'time.time', ([], {}), '()\n', (14087, 14089), False, 'import time\n'), ((14274, 14323), 'uuid.UUID', 'uuid.UUID', (['"""2176eb68-7303-11e7-8cf7-a6006ad3dba0"""'], {}), "('2176eb68-7303-11e7-8cf7-a6006ad3dba0')\n", (14283, 14323), False, 'import uuid\n'), ((15458, 15469), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (15467, 15469), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((15761, 15772), 'time.time', 'time.time', ([], {}), '()\n', (15770, 15772), False, 'import time\n'), ((15793, 15838), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['current_time'], {}), '(current_time)\n', (15824, 15838), False, 'import datetime\n'), ((16018, 16067), 'uuid.UUID', 'uuid.UUID', (['"""2176eb68-7303-11e7-8cf7-a6006ad3dba0"""'], {}), "('2176eb68-7303-11e7-8cf7-a6006ad3dba0')\n", (16027, 16067), False, 'import uuid\n'), ((17108, 17119), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (17117, 17119), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((17627, 17638), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (17636, 17638), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((18679, 18690), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (18688, 18690), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((19056, 19097), 'copy.deepcopy', 'copy.deepcopy', (['MockStorageAsync.schedules'], {}), '(MockStorageAsync.schedules)\n', (19069, 19097), False, 'import copy\n'), ((19711, 19722), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (19720, 19722), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((20515, 20526), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (20524, 20526), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((22566, 22577), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (22575, 22577), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((23556, 23567), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (23565, 23567), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((23927, 23938), 'time.time', 'time.time', ([], {}), '()\n', (23936, 23938), False, 'import time\n'), ((25352, 25363), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (25361, 25363), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((26186, 26197), 'time.time', 'time.time', ([], {}), '()\n', (26195, 26197), False, 'import time\n'), ((27594, 27605), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (27603, 27605), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((28207, 28218), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (28216, 28218), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((28241, 28253), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (28251, 28253), False, 'import uuid\n'), ((29834, 29883), 'uuid.UUID', 'uuid.UUID', (['"""cea17db8-6ccc-11e7-907b-a6006ad3dba0"""'], {}), "('cea17db8-6ccc-11e7-907b-a6006ad3dba0')\n", (29843, 29883), False, 'import uuid\n'), ((30621, 30633), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (30631, 30633), False, 'import uuid\n'), ((31635, 31647), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (31645, 31647), False, 'import uuid\n'), ((33698, 33710), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (33708, 33710), False, 'import uuid\n'), ((35715, 35764), 'uuid.UUID', 'uuid.UUID', (['"""2b614d26-760f-11e7-b5a5-be2e44b06b34"""'], {}), "('2b614d26-760f-11e7-b5a5-be2e44b06b34')\n", (35724, 35764), False, 'import uuid\n'), ((37870, 37919), 'uuid.UUID', 'uuid.UUID', (['"""2b614d26-760f-11e7-b5a5-be2e44b06b34"""'], {}), "('2b614d26-760f-11e7-b5a5-be2e44b06b34')\n", (37879, 37919), False, 'import uuid\n'), ((39771, 39783), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (39781, 39783), False, 'import uuid\n'), ((42446, 42457), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (42455, 42457), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((43079, 43128), 'uuid.UUID', 'uuid.UUID', (['"""2b614d26-760f-11e7-b5a5-be2e44b06b34"""'], {}), "('2b614d26-760f-11e7-b5a5-be2e44b06b34')\n", (43088, 43128), False, 'import uuid\n'), ((44334, 44345), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (44343, 44345), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((44869, 44881), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (44879, 44881), False, 'import uuid\n'), ((45227, 45238), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (45236, 45238), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((45740, 45789), 'uuid.UUID', 'uuid.UUID', (['"""d1631422-9ec6-11e7-abc4-cec278b6b50a"""'], {}), "('d1631422-9ec6-11e7-abc4-cec278b6b50a')\n", (45749, 45789), False, 'import uuid\n'), ((46465, 46514), 'uuid.UUID', 'uuid.UUID', (['"""d1631422-9ec6-11e7-abc4-cec278b6b50a"""'], {}), "('d1631422-9ec6-11e7-abc4-cec278b6b50a')\n", (46474, 46514), False, 'import uuid\n'), ((47827, 47876), 'uuid.UUID', 'uuid.UUID', (['"""ada12840-68d3-11e7-907b-a6006ad3dba0"""'], {}), "('ada12840-68d3-11e7-907b-a6006ad3dba0')\n", (47836, 47876), False, 'import uuid\n'), ((48657, 48669), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (48667, 48669), False, 'import uuid\n'), ((48991, 49002), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (49000, 49002), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((49393, 49442), 'uuid.UUID', 'uuid.UUID', (['"""cea17db8-6ccc-11e7-907b-a6006ad3dba0"""'], {}), "('cea17db8-6ccc-11e7-907b-a6006ad3dba0')\n", (49402, 49442), False, 'import uuid\n'), ((50181, 50192), 'fledge.services.core.scheduler.scheduler.Scheduler', 'Scheduler', ([], {}), '()\n', (50190, 50192), False, 'from fledge.services.core.scheduler.scheduler import Scheduler, AuditLogger, ConfigurationManager\n'), ((50947, 50996), 'uuid.UUID', 'uuid.UUID', (['"""d1631422-9ec6-11e7-abc4-cec278b6b50a"""'], {}), "('d1631422-9ec6-11e7-abc4-cec278b6b50a')\n", (50956, 50996), False, 'import uuid\n'), ((51711, 51760), 'uuid.UUID', 'uuid.UUID', (['"""ada12840-68d3-11e7-907b-a6006ad3dba0"""'], {}), "('ada12840-68d3-11e7-907b-a6006ad3dba0')\n", (51720, 51760), False, 'import uuid\n'), ((52793, 52842), 'uuid.UUID', 'uuid.UUID', (['"""d1631422-9ec6-11e7-abc4-cec278b6b50a"""'], {}), "('d1631422-9ec6-11e7-abc4-cec278b6b50a')\n", (52802, 52842), False, 'import uuid\n'), ((56863, 56875), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (56873, 56875), False, 'import uuid\n'), ((57432, 57451), 'json.loads', 'json.loads', (['args[1]'], {}), '(args[1])\n', (57442, 57451), False, 'import json\n'), ((59707, 59726), 'json.loads', 'json.loads', (['args[1]'], {}), '(args[1])\n', (59717, 59726), False, 'import json\n'), ((4703, 4748), 'asyncio.create_subprocess_exec', 'asyncio.create_subprocess_exec', (['"""sleep"""', '""".1"""'], {}), "('sleep', '.1')\n", (4733, 4748), False, 'import asyncio\n'), ((7325, 7348), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (7338, 7348), False, 'import pytest\n'), ((9600, 9623), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (9621, 9623), False, 'import datetime\n'), ((18053, 18077), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (18066, 18077), False, 'import pytest\n'), ((20225, 20249), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (20238, 20249), False, 'import pytest\n'), ((24835, 24851), 'unittest.mock.call', 'call', (['"""Starting"""'], {}), "('Starting')\n", (24839, 24851), False, 'from unittest.mock import MagicMock, call\n'), ((24870, 24934), 'unittest.mock.call', 'call', (['"""Starting Scheduler: Management port received is %d"""', '(9999)'], {}), "('Starting Scheduler: Management port received is %d', 9999)\n", (24874, 24934), False, 'from unittest.mock import MagicMock, call\n'), ((25010, 25061), 'unittest.mock.call', 'call', (['"""Database command: %s"""', '"""scheduled_processes"""'], {}), "('Database command: %s', 'scheduled_processes')\n", (25014, 25061), False, 'from unittest.mock import MagicMock, call\n'), ((25080, 25121), 'unittest.mock.call', 'call', (['"""Database command: %s"""', '"""schedules"""'], {}), "('Database command: %s', 'schedules')\n", (25084, 25121), False, 'from unittest.mock import MagicMock, call\n'), ((26932, 26963), 'unittest.mock.call', 'call', (['"""Processing stop request"""'], {}), "('Processing stop request')\n", (26936, 26963), False, 'from unittest.mock import MagicMock, call\n'), ((26965, 26980), 'unittest.mock.call', 'call', (['"""Stopped"""'], {}), "('Stopped')\n", (26969, 26980), False, 'from unittest.mock import MagicMock, call\n'), ((27172, 27292), 'unittest.mock.call', 'call', (['"""An exception was raised by Scheduler._purge_tasks %s"""', '"""object MagicMock can\'t be used in \'await\' expression"""'], {}), '(\'An exception was raised by Scheduler._purge_tasks %s\',\n "object MagicMock can\'t be used in \'await\' expression")\n', (27176, 27292), False, 'from unittest.mock import MagicMock, call\n'), ((27307, 27430), 'unittest.mock.call', 'call', (['"""An exception was raised by Scheduler._scheduler_loop %s"""', '"""object MagicMock can\'t be used in \'await\' expression"""'], {}), '(\'An exception was raised by Scheduler._scheduler_loop %s\',\n "object MagicMock can\'t be used in \'await\' expression")\n', (27311, 27430), False, 'from unittest.mock import MagicMock, call\n'), ((30228, 30255), 'datetime.timedelta', 'datetime.timedelta', (['(0)', '(3600)'], {}), '(0, 3600)\n', (30246, 30255), False, 'import datetime\n'), ((30678, 30714), 'pytest.raises', 'pytest.raises', (['ScheduleNotFoundError'], {}), '(ScheduleNotFoundError)\n', (30691, 30714), False, 'import pytest\n'), ((32324, 32501), 'unittest.mock.call', 'call', (['"""SCHAD"""', "{'schedule': {'name': 'Test Schedule', 'processName': 'TestProcess', 'type':\n Schedule.Type.INTERVAL, 'repeat': 30.0, 'enabled': True, 'exclusive': \n False}}"], {}), "('SCHAD', {'schedule': {'name': 'Test Schedule', 'processName':\n 'TestProcess', 'type': Schedule.Type.INTERVAL, 'repeat': 30.0,\n 'enabled': True, 'exclusive': False}})\n", (32328, 32501), False, 'from unittest.mock import MagicMock, call\n'), ((34413, 34590), 'unittest.mock.call', 'call', (['"""SCHAD"""', "{'schedule': {'name': 'Test Schedule', 'processName': 'TestProcess', 'type':\n Schedule.Type.INTERVAL, 'repeat': 30.0, 'enabled': True, 'exclusive': \n False}}"], {}), "('SCHAD', {'schedule': {'name': 'Test Schedule', 'processName':\n 'TestProcess', 'type': Schedule.Type.INTERVAL, 'repeat': 30.0,\n 'enabled': True, 'exclusive': False}})\n", (34417, 34590), False, 'from unittest.mock import MagicMock, call\n'), ((36675, 36876), 'unittest.mock.call', 'call', (['"""SCHCH"""', "{'schedule': {'name': 'Test Schedule', 'enabled': True, 'repeat': 30.0,\n 'exclusive': False, 'day': 1, 'time': '0:0:0', 'processName':\n 'TestProcess', 'type': Schedule.Type.TIMED}}"], {}), "('SCHCH', {'schedule': {'name': 'Test Schedule', 'enabled': True,\n 'repeat': 30.0, 'exclusive': False, 'day': 1, 'time': '0:0:0',\n 'processName': 'TestProcess', 'type': Schedule.Type.TIMED}})\n", (36679, 36876), False, 'from unittest.mock import MagicMock, call\n'), ((38856, 39057), 'unittest.mock.call', 'call', (['"""SCHCH"""', "{'schedule': {'name': 'Test Schedule', 'enabled': True, 'repeat': 30.0,\n 'exclusive': False, 'day': 1, 'time': '0:0:0', 'processName':\n 'TestProcess', 'type': Schedule.Type.TIMED}}"], {}), "('SCHCH', {'schedule': {'name': 'Test Schedule', 'enabled': True,\n 'repeat': 30.0, 'exclusive': False, 'day': 1, 'time': '0:0:0',\n 'processName': 'TestProcess', 'type': Schedule.Type.TIMED}})\n", (38860, 39057), False, 'from unittest.mock import MagicMock, call\n'), ((40191, 40216), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (40204, 40216), False, 'import pytest\n'), ((40511, 40536), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (40524, 40536), False, 'import pytest\n'), ((40829, 40854), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (40842, 40854), False, 'import pytest\n'), ((41171, 41196), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (41184, 41196), False, 'import pytest\n'), ((41500, 41525), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (41513, 41525), False, 'import pytest\n'), ((41833, 41858), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (41846, 41858), False, 'import pytest\n'), ((42024, 42039), 'datetime.time', 'datetime.time', ([], {}), '()\n', (42037, 42039), False, 'import datetime\n'), ((43515, 43594), 'unittest.mock.call', 'call', (['"""No Task running for Schedule %s"""', '"""2b614d26-760f-11e7-b5a5-be2e44b06b34"""'], {}), "('No Task running for Schedule %s', '2b614d26-760f-11e7-b5a5-be2e44b06b34')\n", (43519, 43594), False, 'from unittest.mock import MagicMock, call\n'), ((43613, 43748), 'unittest.mock.call', 'call', (['"""Disabled Schedule \'%s/%s\' process \'%s\'\n"""', '"""OMF to PI north"""', '"""2b614d26-760f-11e7-b5a5-be2e44b06b34"""', '"""North Readings to PI"""'], {}), '("Disabled Schedule \'%s/%s\' process \'%s\'\\n", \'OMF to PI north\',\n \'2b614d26-760f-11e7-b5a5-be2e44b06b34\', \'North Readings to PI\')\n', (43617, 43748), False, 'from unittest.mock import MagicMock, call\n'), ((43870, 44058), 'unittest.mock.call', 'call', (['"""SCHCH"""', "{'schedule': {'name': 'OMF to PI north', 'repeat': 30.0, 'enabled': False,\n 'type': Schedule.Type.INTERVAL, 'exclusive': True, 'processName':\n 'North Readings to PI'}}"], {}), "('SCHCH', {'schedule': {'name': 'OMF to PI north', 'repeat': 30.0,\n 'enabled': False, 'type': Schedule.Type.INTERVAL, 'exclusive': True,\n 'processName': 'North Readings to PI'}})\n", (43874, 44058), False, 'from unittest.mock import MagicMock, call\n'), ((47126, 47244), 'unittest.mock.call', 'call', (['"""Enabled Schedule \'%s/%s\' process \'%s\'\n"""', '"""backup hourly"""', '"""d1631422-9ec6-11e7-abc4-cec278b6b50a"""', '"""backup"""'], {}), '("Enabled Schedule \'%s/%s\' process \'%s\'\\n", \'backup hourly\',\n \'d1631422-9ec6-11e7-abc4-cec278b6b50a\', \'backup\')\n', (47130, 47244), False, 'from unittest.mock import MagicMock, call\n'), ((47360, 47534), 'unittest.mock.call', 'call', (['"""SCHCH"""', "{'schedule': {'name': 'backup hourly', 'type': Schedule.Type.INTERVAL,\n 'processName': 'backup', 'exclusive': True, 'repeat': 3600.0, 'enabled':\n True}}"], {}), "('SCHCH', {'schedule': {'name': 'backup hourly', 'type': Schedule.Type.\n INTERVAL, 'processName': 'backup', 'exclusive': True, 'repeat': 3600.0,\n 'enabled': True}})\n", (47364, 47534), False, 'from unittest.mock import MagicMock, call\n'), ((49662, 49685), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (49675, 49685), False, 'import pytest\n'), ((50622, 50658), 'pytest.raises', 'pytest.raises', (['ScheduleNotFoundError'], {}), '(ScheduleNotFoundError)\n', (50635, 50658), False, 'import pytest\n'), ((52049, 52078), 'pytest.raises', 'pytest.raises', (['RuntimeWarning'], {}), '(RuntimeWarning)\n', (52062, 52078), False, 'import pytest\n'), ((52897, 52933), 'pytest.raises', 'pytest.raises', (['ScheduleNotFoundError'], {}), '(ScheduleNotFoundError)\n', (52910, 52933), False, 'import pytest\n'), ((53264, 53300), 'pytest.raises', 'pytest.raises', (['ScheduleNotFoundError'], {}), '(ScheduleNotFoundError)\n', (53277, 53300), False, 'import pytest\n'), ((53652, 53675), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (53665, 53675), False, 'import pytest\n'), ((55004, 55027), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (55017, 55027), False, 'import pytest\n'), ((56408, 56440), 'pytest.raises', 'pytest.raises', (['TaskNotFoundError'], {}), '(TaskNotFoundError)\n', (56421, 56440), False, 'import pytest\n'), ((56889, 56913), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (56902, 56913), False, 'import pytest\n'), ((57759, 57782), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (57772, 57782), False, 'import pytest\n'), ((59216, 59240), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (59229, 59240), False, 'import pytest\n'), ((60037, 60060), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (60050, 60060), False, 'import pytest\n'), ((62010, 62044), 'pytest.raises', 'pytest.raises', (['TaskNotRunningError'], {}), '(TaskNotRunningError)\n', (62023, 62044), False, 'import pytest\n'), ((62478, 62506), 'pytest.raises', 'pytest.raises', (['NotReadyError'], {}), '(NotReadyError)\n', (62491, 62506), False, 'import pytest\n'), ((62569, 62597), 'pytest.raises', 'pytest.raises', (['NotReadyError'], {}), '(NotReadyError)\n', (62582, 62597), False, 'import pytest\n'), ((62678, 62706), 'pytest.raises', 'pytest.raises', (['NotReadyError'], {}), '(NotReadyError)\n', (62691, 62706), False, 'import pytest\n'), ((62777, 62805), 'pytest.raises', 'pytest.raises', (['NotReadyError'], {}), '(NotReadyError)\n', (62790, 62805), False, 'import pytest\n'), ((62887, 62915), 'pytest.raises', 'pytest.raises', (['NotReadyError'], {}), '(NotReadyError)\n', (62900, 62915), False, 'import pytest\n'), ((63018, 63046), 'pytest.raises', 'pytest.raises', (['NotReadyError'], {}), '(NotReadyError)\n', (63031, 63046), False, 'import pytest\n'), ((63132, 63160), 'pytest.raises', 'pytest.raises', (['NotReadyError'], {}), '(NotReadyError)\n', (63145, 63160), False, 'import pytest\n'), ((63245, 63273), 'pytest.raises', 'pytest.raises', (['NotReadyError'], {}), '(NotReadyError)\n', (63258, 63273), False, 'import pytest\n'), ((63353, 63381), 'pytest.raises', 'pytest.raises', (['NotReadyError'], {}), '(NotReadyError)\n', (63366, 63381), False, 'import pytest\n'), ((63466, 63494), 'pytest.raises', 'pytest.raises', (['NotReadyError'], {}), '(NotReadyError)\n', (63479, 63494), False, 'import pytest\n'), ((63569, 63597), 'pytest.raises', 'pytest.raises', (['NotReadyError'], {}), '(NotReadyError)\n', (63582, 63597), False, 'import pytest\n'), ((64116, 64128), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (64126, 64128), False, 'import uuid\n'), ((68249, 68261), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (68259, 68261), False, 'import uuid\n'), ((2110, 2159), 'uuid.UUID', 'uuid.UUID', (['"""2b614d26-760f-11e7-b5a5-be2e44b06b34"""'], {}), "('2b614d26-760f-11e7-b5a5-be2e44b06b34')\n", (2119, 2159), False, 'import uuid\n'), ((2306, 2336), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (2324, 2336), False, 'import datetime\n'), ((3285, 3301), 'asyncio.sleep', 'asyncio.sleep', (['(5)'], {}), '(5)\n', (3298, 3301), False, 'import asyncio\n'), ((4155, 4204), 'uuid.UUID', 'uuid.UUID', (['"""2b614d26-760f-11e7-b5a5-be2e44b06b34"""'], {}), "('2b614d26-760f-11e7-b5a5-be2e44b06b34')\n", (4164, 4204), False, 'import uuid\n'), ((4351, 4381), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (4369, 4381), False, 'import datetime\n'), ((6767, 6816), 'uuid.UUID', 'uuid.UUID', (['"""2b614d26-760f-11e7-b5a5-be2e44b06b34"""'], {}), "('2b614d26-760f-11e7-b5a5-be2e44b06b34')\n", (6776, 6816), False, 'import uuid\n'), ((6963, 6993), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (6981, 6993), False, 'import datetime\n'), ((19315, 19339), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (19328, 19339), False, 'import pytest\n'), ((31856, 31886), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (31874, 31886), False, 'import datetime\n'), ((33919, 33949), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (33937, 33949), False, 'import datetime\n'), ((35967, 35982), 'datetime.time', 'datetime.time', ([], {}), '()\n', (35980, 35982), False, 'import datetime\n'), ((36003, 36033), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (36021, 36033), False, 'import datetime\n'), ((38122, 38137), 'datetime.time', 'datetime.time', ([], {}), '()\n', (38135, 38137), False, 'import datetime\n'), ((38158, 38188), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (38176, 38188), False, 'import datetime\n'), ((39989, 40019), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (40007, 40019), False, 'import datetime\n'), ((26025, 26043), 'asyncio.sleep', 'asyncio.sleep', (['(0.1)'], {}), '(0.1)\n', (26038, 26043), False, 'import asyncio\n'), ((26143, 26161), 'asyncio.sleep', 'asyncio.sleep', (['(0.1)'], {}), '(0.1)\n', (26156, 26161), False, 'import asyncio\n'), ((50710, 50722), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (50720, 50722), False, 'import uuid\n'), ((52990, 53002), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (53000, 53002), False, 'import uuid\n'), ((53357, 53369), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (53367, 53369), False, 'import uuid\n'), ((56498, 56510), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (56508, 56510), False, 'import uuid\n'), ((62097, 62109), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (62107, 62109), False, 'import uuid\n'), ((62859, 62871), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (62869, 62871), False, 'import uuid\n'), ((63104, 63116), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (63114, 63116), False, 'import uuid\n'), ((63217, 63229), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (63227, 63229), False, 'import uuid\n'), ((63325, 63337), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (63335, 63337), False, 'import uuid\n'), ((63438, 63450), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (63448, 63450), False, 'import uuid\n'), ((63650, 63662), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (63660, 63662), False, 'import uuid\n')] |
kcc3/hackerrank-solutions | problem_solving/python/algorithms/greedy/marcs_cakewalk.py | f862b44b840bd447d99dc148f6bb5e2f5bfb8a86 | def marcs_cakewalk(calorie):
"""Hackerrank Problem: https://www.hackerrank.com/challenges/marcs-cakewalk/problem
Marc loves cupcakes, but he also likes to stay fit. Each cupcake has a calorie count, and Marc can walk a distance
to expend those calories. If Marc has eaten j cupcakes so far, after eating a cupcake with c calories he must walk
at least 2**j x c miles to maintain his weight.
Solve:
To calculate the minimum miles, you solve based on the highest calorie to lowest calorie cupcake
Args:
calorie (list): List of integers denoting the calories for each cupcake
Returns:
int: The minimum number of miels Marc must walk to maintain his weight
"""
calories = 0
for i, c in enumerate(sorted(calorie, reverse=True)):
calories += (2 ** i * c)
return calories
if __name__ == "__main__":
assert marcs_cakewalk([5, 10, 7]) == 44
assert marcs_cakewalk([1, 3, 2]) == 11
assert marcs_cakewalk([7, 4, 9, 6]) == 79
| [] |
ankitgoswami23/CoronaIndiaTracker | coronaindiatracker/coronatracker/views.py | b2e116a595b3c69ccefa93b60833c09aa07b5eed | from django.shortcuts import render
import requests
from bs4 import BeautifulSoup
def corona_data(request):
"Testaaaa"
corona_html = requests.get("https://www.mygov.in/covid-19")
soup = BeautifulSoup(corona_html.content, 'html.parser')
state_wise_data = soup.find_all('div', class_='views-row')
information = soup.find('div', class_='information_row')
info = {
'update_data': information.find('div', class_='info_title').find('span').string,
'active_case': information.find('div', class_='active-case').find('span', class_='icount').string,
'discharge': information.find('div', class_='discharge').find('span', class_='icount').string,
'death': information.find('div', class_='death_case').find('span', class_='icount').string
}
corona_info = [
{
"state_name": state.find_all('span', class_='st_name')[0].string,
"confirm_case": state.find_all('div', class_='tick-confirmed')[0].find_all('small')[0].string,
"active_case": state.find_all('div', class_='tick-active')[0].find_all('small')[0].string,
"discharge": state.find_all('div', class_='tick-discharged')[0].find_all('small')[0].string,
"death": state.find_all('div', class_='tick-death')[0].find_all('small')[0].string
} for state in state_wise_data
]
context = {
'corona_info': info,
'data': sorted(corona_info, key=lambda i: int(''.join(i['confirm_case'].replace(',', ''))), reverse=True)
}
return render(request, 'coronainfo/index.html', context)
| [((143, 188), 'requests.get', 'requests.get', (['"""https://www.mygov.in/covid-19"""'], {}), "('https://www.mygov.in/covid-19')\n", (155, 188), False, 'import requests\n'), ((200, 249), 'bs4.BeautifulSoup', 'BeautifulSoup', (['corona_html.content', '"""html.parser"""'], {}), "(corona_html.content, 'html.parser')\n", (213, 249), False, 'from bs4 import BeautifulSoup\n'), ((1533, 1582), 'django.shortcuts.render', 'render', (['request', '"""coronainfo/index.html"""', 'context'], {}), "(request, 'coronainfo/index.html', context)\n", (1539, 1582), False, 'from django.shortcuts import render\n')] |
geohackweek/ghw2019_wiggles | compare.py | 9b636db8d97986e038a301e36b808e820ccc525f | # Script tests GPD model using UW truth data
# Test outputs:
# - type of event tested [EQS, EQP, SUS, SUP, THS, THP, SNS, SNP, PXS, PXP]
# - phase [P, S, N] Note: N - not detected
# - model time offset (t_truth - t_model_pick)
import numpy
import math
import string
import datetime
import sys
import os
import csv
from datetime import datetime
from datetime import timedelta
# params
padding_time = 10
fudge_factor = timedelta(seconds=27)
time_diff = timedelta(seconds=10)
# file dirs
parsed_arrivals = []
model_in = []
model_out = []
comp_out = []
for etype in ['EQS','EQP','SUS','SUP','THS','THP','SNS','SNP','PXS','PXP']:
arrival = "parsed_arrivals/" + etype + ".arrivals.txt"
infile = "input_files/GPD." + etype + ".in"
outfile = "output_files/GPD." + etype + ".out"
parsed_arrivals.append(arrival)
model_in.append(infile)
model_out.append(outfile)
comp_out.append("comparison_out/comp." + etype + ".out")
# ------------------
# read in UW arrival times as an array
def read_arrivals_to_arr(filename):
model_list = []
with open(filename) as f:
for ln in f:
row = ln.split()
line = []
line.extend([row[0].strip(), row[1].strip(), row[2].strip()])
formatted_time = datetime.strptime(row[3], "%Y-%m-%dT%H:%M:%S.%f") - fudge_factor
line.extend([formatted_time, row[4].strip(), row[5].strip()])
model_list.append(line)
return model_list
def arrivals_to_dictionary(arrivals):
picks = {}
for arr in arrivals:
key = datetime.strftime(arr[3], "%Y-%m-%dT%H:%M:%S.%f")
key = key[0:-7]
picks[key] = arr
return picks
def model_in_to_array(file):
timestamps = []
with open(file) as f:
for ln in f:
entry = ln.split()
entry = entry[0].strip()
entry = entry[len(entry)-20:len(entry)-6]
entry = entry[0:4] + "-" + entry[4:6] + "-" + entry[6:8] + "T" + entry[8:10] + ":" + entry[10:12] + ":" + entry[12:14]
# ------------- TIME STAMP ISSUES --------------------
# case 1: run if .mseed files have correct timestamps
"""
time = datetime.strptime(entry, "%Y-%m-%dT%H:%M:%S") - fudge_factor # + time_diff (might need to add this)
time = datetime.strftime(time, "%Y-%m-%dT%H:%M:%S")
"""
# case 2: run if .mseed files have buggy minutes in the timestamps
time = datetime.strptime(entry, "%Y-%m-%dT%H:%M:%S")
if time.second >=37 and time.second <=51:
time = time + timedelta(seconds=23) + time_diff
time = datetime.strftime(time, "%Y-%m-%dT%H:%M:%S")
else:
sec_int = time.second + 23
if sec_int > 59:
sec_int = sec_int - 60
sec_int = str(sec_int).zfill(2)
time = datetime.strftime(time, "%Y-%m-%dT%H:%M:%S")
time = time[:-2] + sec_int
time = datetime.strptime(time, "%Y-%m-%dT%H:%M:%S") + time_diff
time = datetime.strftime(time, "%Y-%m-%dT%H:%M:%S")
# -----------------------------------------------------
timestamps.append(time)
return timestamps
def filter_times(arrivals, model_in):
filtered = []
for key in model_in:
if key in arrivals:
filtered.append(arrivals[key])
return filtered
# read in Caltech model output and create a dictionary
def read_output_to_dict(filename):
model_dict = {}
with open(filename) as f:
for line in f:
tmp = line.split()
key = tmp[0] + "-" + tmp[1] + "-" + tmp[2]
try: # fails if date is missing floating point numbers
formatted_time = datetime.strptime(tmp[3], "%Y-%m-%dT%H:%M:%S.%f")
if key not in model_dict:
model_dict[key] = []
model_dict[key].append(formatted_time)
except:
pass
return model_dict
# lookup time in the dictionary
def key_lookup(event, phase, model_dict):
key = event[0] + "-" + event[1] + "-" + phase
times = []
if key in model_dict.keys():
times = model_dict[key]
times = time_lookup(event[3], times)
return times
# search for arrivals within the padding time window
def time_lookup(t, time_arr):
t_lower = t - timedelta(seconds=padding_time)
t_upper = t + timedelta(seconds=padding_time)
offsets = []
for time in time_arr:
if time > t_lower and time < t_upper:
offset = t - time # or format time to absolute value: abs(t - time)
offset = offset.total_seconds()
offsets.append('{:.6f}'.format(offset))
return offsets
def execute_script(arrival, inf, outf, comp_out):
# write outputs to file
outp_file = open(comp_out, 'w')
truth_arr = read_arrivals_to_arr(arrival) # read in the arrival times to a list
truth_dict = arrivals_to_dictionary(truth_arr) # convert arrivals to a dictionary (key=truncated timestamp)
model_in = model_in_to_array(inf) # read in model .in file as a list
truth_arr = filter_times(truth_dict, model_in) # filter arrivals to picks that were passed to the model (.in file)
model_dict = read_output_to_dict(outf) # read output file
for event in truth_arr:
phase = event[2]
times = key_lookup(event, phase, model_dict)
if len(times) == 0:
if phase == 'P':
phase = 'S'
else:
phase = 'P'
times = key_lookup(event, phase, model_dict)
if len(times) == 0:
phase = 'N'
times = ['nan']
outp_file.write(str(event[5]) + " " + phase)
for offset in times:
outp_file.write(" " + str(offset))
outp_file.write('\n')
outp_file.close()
for i in range(len(model_out)):
execute_script(parsed_arrivals[i], model_in[i], model_out[i], comp_out[i])
| [((433, 454), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(27)'}), '(seconds=27)\n', (442, 454), False, 'from datetime import timedelta\n'), ((467, 488), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(10)'}), '(seconds=10)\n', (476, 488), False, 'from datetime import timedelta\n'), ((1567, 1616), 'datetime.datetime.strftime', 'datetime.strftime', (['arr[3]', '"""%Y-%m-%dT%H:%M:%S.%f"""'], {}), "(arr[3], '%Y-%m-%dT%H:%M:%S.%f')\n", (1584, 1616), False, 'from datetime import datetime\n'), ((4429, 4460), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'padding_time'}), '(seconds=padding_time)\n', (4438, 4460), False, 'from datetime import timedelta\n'), ((4479, 4510), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'padding_time'}), '(seconds=padding_time)\n', (4488, 4510), False, 'from datetime import timedelta\n'), ((2483, 2528), 'datetime.datetime.strptime', 'datetime.strptime', (['entry', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(entry, '%Y-%m-%dT%H:%M:%S')\n", (2500, 2528), False, 'from datetime import datetime\n'), ((1277, 1326), 'datetime.datetime.strptime', 'datetime.strptime', (['row[3]', '"""%Y-%m-%dT%H:%M:%S.%f"""'], {}), "(row[3], '%Y-%m-%dT%H:%M:%S.%f')\n", (1294, 1326), False, 'from datetime import datetime\n'), ((2670, 2714), 'datetime.datetime.strftime', 'datetime.strftime', (['time', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(time, '%Y-%m-%dT%H:%M:%S')\n", (2687, 2714), False, 'from datetime import datetime\n'), ((2923, 2967), 'datetime.datetime.strftime', 'datetime.strftime', (['time', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(time, '%Y-%m-%dT%H:%M:%S')\n", (2940, 2967), False, 'from datetime import datetime\n'), ((3114, 3158), 'datetime.datetime.strftime', 'datetime.strftime', (['time', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(time, '%Y-%m-%dT%H:%M:%S')\n", (3131, 3158), False, 'from datetime import datetime\n'), ((3808, 3857), 'datetime.datetime.strptime', 'datetime.strptime', (['tmp[3]', '"""%Y-%m-%dT%H:%M:%S.%f"""'], {}), "(tmp[3], '%Y-%m-%dT%H:%M:%S.%f')\n", (3825, 3857), False, 'from datetime import datetime\n'), ((3034, 3078), 'datetime.datetime.strptime', 'datetime.strptime', (['time', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(time, '%Y-%m-%dT%H:%M:%S')\n", (3051, 3078), False, 'from datetime import datetime\n'), ((2613, 2634), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(23)'}), '(seconds=23)\n', (2622, 2634), False, 'from datetime import timedelta\n')] |
atheheath/ultitracker-api | ultitrackerapi/ultitrackerapi/extract_and_upload_video.py | 5d7ea7ae97c53faf02416f17baf11ed09fd55276 | import argparse
import boto3
import datetime
import json
import os
import posixpath
import re
import shutil
import tempfile
import uuid
from concurrent import futures
from multiprocessing import Pool
from ultitrackerapi import get_backend, get_logger, get_s3Client, video
backend_instance = get_backend()
logger = get_logger(__name__, level="DEBUG")
s3Client = get_s3Client()
def update_game_video_length(game_id, video_length):
command = """
UPDATE ultitracker.game_metadata
SET data = jsonb_set(data, '{{length}}', '"{video_length}"', true)
WHERE game_id = '{game_id}'
""".format(
video_length=video_length,
game_id=game_id
)
backend_instance.client.execute(command)
def get_frame_number(key, chunk_multiplier=60):
frame_number = int(posixpath.splitext(posixpath.basename(key))[0].split("_")[1])
chunk_number = int(posixpath.basename(posixpath.dirname(key)).split("_")[1])
return chunk_number * chunk_multiplier + frame_number
def insert_images(
img_raw_paths,
img_types,
img_metadatas,
game_id,
frame_numbers
):
command = """
INSERT INTO ultitracker.img_location (img_id, img_raw_path, img_type, img_metadata, game_id, frame_number) VALUES
"""
for i, (img_raw_path, img_type, img_metadata, frame_number) in enumerate(zip(img_raw_paths, img_types, img_metadatas, frame_numbers)):
command += """('{img_id}', '{img_raw_path}', '{img_type}', '{img_metadata}', '{game_id}', {frame_number}){include_comma}
""".format(
img_id=uuid.uuid4(),
img_raw_path=img_raw_path,
img_type=img_type,
img_metadata=json.dumps(img_metadata),
game_id=game_id,
frame_number=frame_number,
include_comma="," if i < (len(img_raw_paths) - 1) else ""
)
backend_instance.client.execute(command)
def extract_and_upload_video(
bucket,
video_filename,
thumbnail_filename,
video_key,
thumbnail_key,
game_id
):
logger.debug("extract_and_upload_video: Getting video length")
video_length_seconds = int(video.get_video_duration(video_filename))
video_length = str(datetime.timedelta(seconds=video_length_seconds))
logger.debug("extract_and_upload_video: Finished getting video length")
logger.debug("extract_and_upload_video: Getting video height and width")
video_height_width = video.get_video_height_width(video_filename)
logger.debug("extract_and_upload_video: Finished getting height and width")
logger.debug("extract_and_upload_video: Updating length in db")
update_game_video_length(game_id, video_length)
logger.debug("extract_and_upload_video: Finished updating length in db")
logger.debug("extract_and_upload_video: Extracting thumbnail")
video.get_thumbnail(video_filename, thumbnail_filename, time=video_length_seconds // 2)
logger.debug("extract_and_upload_video: Finished extracting thumbnail")
logger.debug("extract_and_upload_video: Uploading thumbnail")
s3Client.upload_file(
thumbnail_filename,
bucket,
thumbnail_key
)
logger.debug("extract_and_upload_video: Finished uploading thumbnail")
logger.debug("extract_and_upload_video: Uploading video to S3")
s3Client.upload_file(
video_filename,
bucket,
video_key
)
logger.debug("extract_and_upload_video: Finished uploading video to S3")
logger.debug("extract_and_upload_video: Chunking video")
chunked_video_dir = tempfile.mkdtemp()
video.chunk_video(video_filename, chunked_video_dir, chunk_size=60)
logger.debug("extract_and_upload_video: Finished chunking video")
logger.debug("extract_and_upload_video: Uploading video chunks")
with futures.ThreadPoolExecutor(8) as ex:
for vid in os.listdir(chunked_video_dir):
ex.submit(
s3Client.upload_file,
os.path.join(chunked_video_dir, vid),
bucket,
posixpath.join(
posixpath.dirname(video_key),
"chunks",
vid
)
)
logger.debug("extract_and_upload_video: Finished uploading video chunks")
logger.debug("extract_and_upload_video: Submitting lambda frame extraction")
aws_lambda_payloads = [
json.dumps({
"s3_bucket_path": bucket,
"s3_video_path": posixpath.join(posixpath.dirname(video_key), "chunks", basename),
"s3_output_frames_path": posixpath.join(posixpath.dirname(video_key), "frames", posixpath.splitext(basename)[0]),
"video_metadata": video_height_width
}).encode()
for basename in os.listdir(chunked_video_dir)
]
client = boto3.client('lambda')
aws_lambda_responses = []
with futures.ThreadPoolExecutor(max_workers=16) as ex:
result_futures = []
for payload in aws_lambda_payloads:
result_futures.append(ex.submit(
client.invoke,
FunctionName="extractFrames",
# InvocationType="Event",
Payload=payload
))
logger.debug("extract_and_upload_video: Submitted lambda frame extraction")
for result_future in futures.as_completed(result_futures):
aws_lambda_response = json.loads(result_future.result()["Payload"].read().decode("utf-8"))
aws_lambda_responses.append(aws_lambda_response)
raw_paths = ["s3://" + posixpath.join(frame["bucket"], frame["key"]) for frame in aws_lambda_response["frames"]]
img_types = ["png" for frame in aws_lambda_response["frames"]]
metadatas = [
{"bucket": bucket}
for frame in aws_lambda_response["frames"]
]
frame_numbers = [-1 for frame in aws_lambda_response["frames"]]
insert_images(
raw_paths,
img_types,
metadatas,
game_id,
frame_numbers
)
logger.debug("extract_and_upload_video: Received all lambda responses")
logger.debug("extract_and_upload_video: Finished inserting image metadata")
os.remove(video_filename)
os.remove(thumbnail_filename)
shutil.rmtree(chunked_video_dir)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("bucket")
parser.add_argument("video_filename")
parser.add_argument("thumbnail_filename")
parser.add_argument("video_key")
parser.add_argument("thumbnail_key")
parser.add_argument("game_id")
args = parser.parse_args()
extract_and_upload_video(
bucket=args.bucket,
video_filename=args.video_filename,
thumbnail_filename=args.thumbnail_filename,
video_key=args.video_key,
thumbnail_key=args.thumbnail_key,
game_id=args.game_id
)
if __name__ == "__main__":
main() | [((297, 310), 'ultitrackerapi.get_backend', 'get_backend', ([], {}), '()\n', (308, 310), False, 'from ultitrackerapi import get_backend, get_logger, get_s3Client, video\n'), ((320, 355), 'ultitrackerapi.get_logger', 'get_logger', (['__name__'], {'level': '"""DEBUG"""'}), "(__name__, level='DEBUG')\n", (330, 355), False, 'from ultitrackerapi import get_backend, get_logger, get_s3Client, video\n'), ((367, 381), 'ultitrackerapi.get_s3Client', 'get_s3Client', ([], {}), '()\n', (379, 381), False, 'from ultitrackerapi import get_backend, get_logger, get_s3Client, video\n'), ((2423, 2467), 'ultitrackerapi.video.get_video_height_width', 'video.get_video_height_width', (['video_filename'], {}), '(video_filename)\n', (2451, 2467), False, 'from ultitrackerapi import get_backend, get_logger, get_s3Client, video\n'), ((2822, 2914), 'ultitrackerapi.video.get_thumbnail', 'video.get_thumbnail', (['video_filename', 'thumbnail_filename'], {'time': '(video_length_seconds // 2)'}), '(video_filename, thumbnail_filename, time=\n video_length_seconds // 2)\n', (2841, 2914), False, 'from ultitrackerapi import get_backend, get_logger, get_s3Client, video\n'), ((3552, 3570), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (3568, 3570), False, 'import tempfile\n'), ((3575, 3642), 'ultitrackerapi.video.chunk_video', 'video.chunk_video', (['video_filename', 'chunked_video_dir'], {'chunk_size': '(60)'}), '(video_filename, chunked_video_dir, chunk_size=60)\n', (3592, 3642), False, 'from ultitrackerapi import get_backend, get_logger, get_s3Client, video\n'), ((4806, 4828), 'boto3.client', 'boto3.client', (['"""lambda"""'], {}), "('lambda')\n", (4818, 4828), False, 'import boto3\n'), ((6287, 6312), 'os.remove', 'os.remove', (['video_filename'], {}), '(video_filename)\n', (6296, 6312), False, 'import os\n'), ((6317, 6346), 'os.remove', 'os.remove', (['thumbnail_filename'], {}), '(thumbnail_filename)\n', (6326, 6346), False, 'import os\n'), ((6351, 6383), 'shutil.rmtree', 'shutil.rmtree', (['chunked_video_dir'], {}), '(chunked_video_dir)\n', (6364, 6383), False, 'import shutil\n'), ((6411, 6436), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (6434, 6436), False, 'import argparse\n'), ((2129, 2169), 'ultitrackerapi.video.get_video_duration', 'video.get_video_duration', (['video_filename'], {}), '(video_filename)\n', (2153, 2169), False, 'from ultitrackerapi import get_backend, get_logger, get_s3Client, video\n'), ((2194, 2242), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'video_length_seconds'}), '(seconds=video_length_seconds)\n', (2212, 2242), False, 'import datetime\n'), ((3792, 3821), 'concurrent.futures.ThreadPoolExecutor', 'futures.ThreadPoolExecutor', (['(8)'], {}), '(8)\n', (3818, 3821), False, 'from concurrent import futures\n'), ((3848, 3877), 'os.listdir', 'os.listdir', (['chunked_video_dir'], {}), '(chunked_video_dir)\n', (3858, 3877), False, 'import os\n'), ((4870, 4912), 'concurrent.futures.ThreadPoolExecutor', 'futures.ThreadPoolExecutor', ([], {'max_workers': '(16)'}), '(max_workers=16)\n', (4896, 4912), False, 'from concurrent import futures\n'), ((5331, 5367), 'concurrent.futures.as_completed', 'futures.as_completed', (['result_futures'], {}), '(result_futures)\n', (5351, 5367), False, 'from concurrent import futures\n'), ((4756, 4785), 'os.listdir', 'os.listdir', (['chunked_video_dir'], {}), '(chunked_video_dir)\n', (4766, 4785), False, 'import os\n'), ((1559, 1571), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1569, 1571), False, 'import uuid\n'), ((1668, 1692), 'json.dumps', 'json.dumps', (['img_metadata'], {}), '(img_metadata)\n', (1678, 1692), False, 'import json\n'), ((3956, 3992), 'os.path.join', 'os.path.join', (['chunked_video_dir', 'vid'], {}), '(chunked_video_dir, vid)\n', (3968, 3992), False, 'import os\n'), ((4070, 4098), 'posixpath.dirname', 'posixpath.dirname', (['video_key'], {}), '(video_key)\n', (4087, 4098), False, 'import posixpath\n'), ((5569, 5614), 'posixpath.join', 'posixpath.join', (["frame['bucket']", "frame['key']"], {}), "(frame['bucket'], frame['key'])\n", (5583, 5614), False, 'import posixpath\n'), ((898, 920), 'posixpath.dirname', 'posixpath.dirname', (['key'], {}), '(key)\n', (915, 920), False, 'import posixpath\n'), ((813, 836), 'posixpath.basename', 'posixpath.basename', (['key'], {}), '(key)\n', (831, 836), False, 'import posixpath\n'), ((4486, 4514), 'posixpath.dirname', 'posixpath.dirname', (['video_key'], {}), '(video_key)\n', (4503, 4514), False, 'import posixpath\n'), ((4589, 4617), 'posixpath.dirname', 'posixpath.dirname', (['video_key'], {}), '(video_key)\n', (4606, 4617), False, 'import posixpath\n'), ((4629, 4657), 'posixpath.splitext', 'posixpath.splitext', (['basename'], {}), '(basename)\n', (4647, 4657), False, 'import posixpath\n')] |
PacktPublishing/Hands-On-Ensemble-Learning-with-Python | Chapter03/scikit_soft_voting_2knn.py | db9b90189dbebbc6ab5ebba0e2e173ba80197c35 | # --- SECTION 1 ---
# Import the required libraries
from sklearn import datasets, naive_bayes, svm, neighbors
from sklearn.ensemble import VotingClassifier
from sklearn.metrics import accuracy_score
# Load the dataset
breast_cancer = datasets.load_breast_cancer()
x, y = breast_cancer.data, breast_cancer.target
# Split the train and test samples
test_samples = 100
x_train, y_train = x[:-test_samples], y[:-test_samples]
x_test, y_test = x[-test_samples:], y[-test_samples:]
# --- SECTION 2 ---
# Instantiate the learners (classifiers)
learner_1 = neighbors.KNeighborsClassifier(n_neighbors=5)
learner_2 = naive_bayes.GaussianNB()
learner_3 = neighbors.KNeighborsClassifier(n_neighbors=50)
# --- SECTION 3 ---
# Instantiate the voting classifier
voting = VotingClassifier([('5NN', learner_1),
('NB', learner_2),
('50NN', learner_3)],
voting='soft')
# --- SECTION 4 ---
# Fit classifier with the training data
voting.fit(x_train, y_train)
learner_1.fit(x_train, y_train)
learner_2.fit(x_train, y_train)
learner_3.fit(x_train, y_train)
# --- SECTION 5 ---
# Predict the most probable class
hard_predictions = voting.predict(x_test)
# --- SECTION 6 ---
# Get the base learner predictions
predictions_1 = learner_1.predict(x_test)
predictions_2 = learner_2.predict(x_test)
predictions_3 = learner_3.predict(x_test)
# --- SECTION 7 ---
# Accuracies of base learners
print('L1:', accuracy_score(y_test, predictions_1))
print('L2:', accuracy_score(y_test, predictions_2))
print('L3:', accuracy_score(y_test, predictions_3))
# Accuracy of hard voting
print('-'*30)
print('Hard Voting:', accuracy_score(y_test, hard_predictions))
# --- SECTION 1 ---
# Import the required libraries
import matplotlib as mpl
import matplotlib.pyplot as plt
mpl.style.use('seaborn-paper')
# --- SECTION 2 ---
# Get the wrongly predicted instances
# and the predicted probabilities for the whole test set
errors = y_test-hard_predictions
probabilities_1 = learner_1.predict_proba(x_test)
probabilities_2 = learner_2.predict_proba(x_test)
probabilities_3 = learner_3.predict_proba(x_test)
# --- SECTION 2 ---
# Store the predicted probability for
# each wrongly predicted instance, for each base learner
# as well as the average predicted probability
#
x=[]
y_1=[]
y_2=[]
y_3=[]
y_avg=[]
for i in range(len(errors)):
if not errors[i] == 0:
x.append(i)
y_1.append(probabilities_1[i][0])
y_2.append(probabilities_2[i][0])
y_3.append(probabilities_3[i][0])
y_avg.append((probabilities_1[i][0]+probabilities_2[i][0]+probabilities_3[i][0])/3)
# --- SECTION 3 ---
# Plot the predicted probaiblity of each base learner as
# a bar and the average probability as an X
plt.bar(x, y_1, 3, label='5NN')
plt.bar(x, y_2, 2, label='NB')
plt.bar(x, y_3, 1, label='50NN')
plt.scatter(x, y_avg, marker='x', c='k', s=150, label='Average Positive', zorder=10)
y = [0.5 for x in range(len(errors))]
plt.plot(y, c='k', linestyle='--')
plt.title('Positive Probability')
plt.xlabel('Test sample')
plt.ylabel('probability')
plt.legend()
| [((241, 270), 'sklearn.datasets.load_breast_cancer', 'datasets.load_breast_cancer', ([], {}), '()\n', (268, 270), False, 'from sklearn import datasets, naive_bayes, svm, neighbors\n'), ((569, 614), 'sklearn.neighbors.KNeighborsClassifier', 'neighbors.KNeighborsClassifier', ([], {'n_neighbors': '(5)'}), '(n_neighbors=5)\n', (599, 614), False, 'from sklearn import datasets, naive_bayes, svm, neighbors\n'), ((628, 652), 'sklearn.naive_bayes.GaussianNB', 'naive_bayes.GaussianNB', ([], {}), '()\n', (650, 652), False, 'from sklearn import datasets, naive_bayes, svm, neighbors\n'), ((666, 712), 'sklearn.neighbors.KNeighborsClassifier', 'neighbors.KNeighborsClassifier', ([], {'n_neighbors': '(50)'}), '(n_neighbors=50)\n', (696, 712), False, 'from sklearn import datasets, naive_bayes, svm, neighbors\n'), ((784, 882), 'sklearn.ensemble.VotingClassifier', 'VotingClassifier', (["[('5NN', learner_1), ('NB', learner_2), ('50NN', learner_3)]"], {'voting': '"""soft"""'}), "([('5NN', learner_1), ('NB', learner_2), ('50NN', learner_3\n )], voting='soft')\n", (800, 882), False, 'from sklearn.ensemble import VotingClassifier\n'), ((1892, 1922), 'matplotlib.style.use', 'mpl.style.use', (['"""seaborn-paper"""'], {}), "('seaborn-paper')\n", (1905, 1922), True, 'import matplotlib as mpl\n'), ((2880, 2911), 'matplotlib.pyplot.bar', 'plt.bar', (['x', 'y_1', '(3)'], {'label': '"""5NN"""'}), "(x, y_1, 3, label='5NN')\n", (2887, 2911), True, 'import matplotlib.pyplot as plt\n'), ((2915, 2945), 'matplotlib.pyplot.bar', 'plt.bar', (['x', 'y_2', '(2)'], {'label': '"""NB"""'}), "(x, y_2, 2, label='NB')\n", (2922, 2945), True, 'import matplotlib.pyplot as plt\n'), ((2949, 2981), 'matplotlib.pyplot.bar', 'plt.bar', (['x', 'y_3', '(1)'], {'label': '"""50NN"""'}), "(x, y_3, 1, label='50NN')\n", (2956, 2981), True, 'import matplotlib.pyplot as plt\n'), ((2985, 3073), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x', 'y_avg'], {'marker': '"""x"""', 'c': '"""k"""', 's': '(150)', 'label': '"""Average Positive"""', 'zorder': '(10)'}), "(x, y_avg, marker='x', c='k', s=150, label='Average Positive',\n zorder=10)\n", (2996, 3073), True, 'import matplotlib.pyplot as plt\n'), ((3112, 3146), 'matplotlib.pyplot.plot', 'plt.plot', (['y'], {'c': '"""k"""', 'linestyle': '"""--"""'}), "(y, c='k', linestyle='--')\n", (3120, 3146), True, 'import matplotlib.pyplot as plt\n'), ((3150, 3183), 'matplotlib.pyplot.title', 'plt.title', (['"""Positive Probability"""'], {}), "('Positive Probability')\n", (3159, 3183), True, 'import matplotlib.pyplot as plt\n'), ((3185, 3210), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Test sample"""'], {}), "('Test sample')\n", (3195, 3210), True, 'import matplotlib.pyplot as plt\n'), ((3212, 3237), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""probability"""'], {}), "('probability')\n", (3222, 3237), True, 'import matplotlib.pyplot as plt\n'), ((3239, 3251), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (3249, 3251), True, 'import matplotlib.pyplot as plt\n'), ((1523, 1560), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y_test', 'predictions_1'], {}), '(y_test, predictions_1)\n', (1537, 1560), False, 'from sklearn.metrics import accuracy_score\n'), ((1576, 1613), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y_test', 'predictions_2'], {}), '(y_test, predictions_2)\n', (1590, 1613), False, 'from sklearn.metrics import accuracy_score\n'), ((1629, 1666), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y_test', 'predictions_3'], {}), '(y_test, predictions_3)\n', (1643, 1666), False, 'from sklearn.metrics import accuracy_score\n'), ((1733, 1773), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y_test', 'hard_predictions'], {}), '(y_test, hard_predictions)\n', (1747, 1773), False, 'from sklearn.metrics import accuracy_score\n')] |
kgarchie/ReSTful-Django-API | API/migrations/0005_alter_news_date_time_alter_news_headline.py | 851c76eb75747042ceac0a6c164266409ca935d4 | # Generated by Django 4.0.3 on 2022-03-23 14:31
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('API', '0004_alter_news_date_time_alter_news_headline'),
]
operations = [
migrations.AlterField(
model_name='news',
name='date_time',
field=models.DateTimeField(default=datetime.datetime(2022, 3, 23, 17, 31, 17, 27766)),
),
migrations.AlterField(
model_name='news',
name='headline',
field=models.CharField(max_length=100),
),
]
| [((572, 604), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (588, 604), False, 'from django.db import migrations, models\n'), ((400, 449), 'datetime.datetime', 'datetime.datetime', (['(2022)', '(3)', '(23)', '(17)', '(31)', '(17)', '(27766)'], {}), '(2022, 3, 23, 17, 31, 17, 27766)\n', (417, 449), False, 'import datetime\n')] |
stefan-woerner/aqua | qiskit/ml/datasets/iris.py | 12e1b867e254977d9c5992612a7919d8fe016cb4 | # This code is part of Qiskit.
#
# (C) Copyright IBM 2018, 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
iris dataset
"""
import numpy as np
from sklearn import datasets
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from sklearn.decomposition import PCA
from qiskit.aqua import MissingOptionalLibraryError
def iris(training_size, test_size, n, plot_data=False):
""" returns iris dataset """
class_labels = [r'A', r'B', r'C']
data, target = datasets.load_iris(return_X_y=True)
sample_train, sample_test, label_train, label_test = \
train_test_split(data, target, test_size=1, random_state=42)
# Now we standardize for gaussian around 0 with unit variance
std_scale = StandardScaler().fit(sample_train)
sample_train = std_scale.transform(sample_train)
sample_test = std_scale.transform(sample_test)
# Now reduce number of features to number of qubits
pca = PCA(n_components=n).fit(sample_train)
sample_train = pca.transform(sample_train)
sample_test = pca.transform(sample_test)
# Scale to the range (-1,+1)
samples = np.append(sample_train, sample_test, axis=0)
minmax_scale = MinMaxScaler((-1, 1)).fit(samples)
sample_train = minmax_scale.transform(sample_train)
sample_test = minmax_scale.transform(sample_test)
# Pick training size number of samples from each distro
training_input = {key: (sample_train[label_train == k, :])[:training_size]
for k, key in enumerate(class_labels)}
test_input = {key: (sample_test[label_test == k, :])[:test_size]
for k, key in enumerate(class_labels)}
if plot_data:
try:
import matplotlib.pyplot as plt
except ImportError as ex:
raise MissingOptionalLibraryError(
libname='Matplotlib',
name='iris',
pip_install='pip install matplotlib') from ex
for k in range(0, 3):
plt.scatter(sample_train[label_train == k, 0][:training_size],
sample_train[label_train == k, 1][:training_size])
plt.title("Iris dataset")
plt.show()
return sample_train, training_input, test_input, class_labels
| [((908, 943), 'sklearn.datasets.load_iris', 'datasets.load_iris', ([], {'return_X_y': '(True)'}), '(return_X_y=True)\n', (926, 943), False, 'from sklearn import datasets\n'), ((1011, 1071), 'sklearn.model_selection.train_test_split', 'train_test_split', (['data', 'target'], {'test_size': '(1)', 'random_state': '(42)'}), '(data, target, test_size=1, random_state=42)\n', (1027, 1071), False, 'from sklearn.model_selection import train_test_split\n'), ((1539, 1583), 'numpy.append', 'np.append', (['sample_train', 'sample_test'], {'axis': '(0)'}), '(sample_train, sample_test, axis=0)\n', (1548, 1583), True, 'import numpy as np\n'), ((2550, 2575), 'matplotlib.pyplot.title', 'plt.title', (['"""Iris dataset"""'], {}), "('Iris dataset')\n", (2559, 2575), True, 'import matplotlib.pyplot as plt\n'), ((2584, 2594), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2592, 2594), True, 'import matplotlib.pyplot as plt\n'), ((1155, 1171), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (1169, 1171), False, 'from sklearn.preprocessing import StandardScaler, MinMaxScaler\n'), ((1361, 1380), 'sklearn.decomposition.PCA', 'PCA', ([], {'n_components': 'n'}), '(n_components=n)\n', (1364, 1380), False, 'from sklearn.decomposition import PCA\n'), ((1603, 1624), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', (['(-1, 1)'], {}), '((-1, 1))\n', (1615, 1624), False, 'from sklearn.preprocessing import StandardScaler, MinMaxScaler\n'), ((2403, 2521), 'matplotlib.pyplot.scatter', 'plt.scatter', (['sample_train[label_train == k, 0][:training_size]', 'sample_train[label_train == k, 1][:training_size]'], {}), '(sample_train[label_train == k, 0][:training_size], sample_train\n [label_train == k, 1][:training_size])\n', (2414, 2521), True, 'import matplotlib.pyplot as plt\n'), ((2203, 2308), 'qiskit.aqua.MissingOptionalLibraryError', 'MissingOptionalLibraryError', ([], {'libname': '"""Matplotlib"""', 'name': '"""iris"""', 'pip_install': '"""pip install matplotlib"""'}), "(libname='Matplotlib', name='iris', pip_install=\n 'pip install matplotlib')\n", (2230, 2308), False, 'from qiskit.aqua import MissingOptionalLibraryError\n')] |
discodavey/h | tests/h/views/api_auth_test.py | 7bff8478b3a5b936de82ac9fcd89b355f4afd3aa | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import json
import mock
import pytest
from oauthlib.oauth2 import InvalidRequestFatalError
from oauthlib.common import Request as OAuthRequest
from pyramid import httpexceptions
from h._compat import urlparse
from h.exceptions import OAuthTokenError
from h.models.auth_client import ResponseType
from h.services.auth_token import auth_token_service_factory
from h.services.oauth_provider import OAuthProviderService
from h.services.oauth_validator import DEFAULT_SCOPES
from h.services.user import user_service_factory
from h.util.datetime import utc_iso8601
from h.views import api_auth as views
@pytest.mark.usefixtures('routes', 'oauth_provider', 'user_svc')
class TestOAuthAuthorizeController(object):
@pytest.mark.usefixtures('authenticated_user')
@pytest.mark.parametrize('view_name', ['get', 'get_web_message'])
def test_get_validates_request(self, controller, pyramid_request, view_name):
view = getattr(controller, view_name)
view()
controller.oauth.validate_authorization_request.assert_called_once_with(
pyramid_request.url)
@pytest.mark.parametrize('view_name', ['get', 'get_web_message'])
def test_get_raises_for_invalid_request(self, controller, view_name):
controller.oauth.validate_authorization_request.side_effect = InvalidRequestFatalError('boom!')
with pytest.raises(InvalidRequestFatalError) as exc:
view = getattr(controller, view_name)
view()
assert exc.value.description == 'boom!'
@pytest.mark.parametrize('view_name', ['get', 'get_web_message'])
def test_get_redirects_to_login_when_not_authenticated(self, controller, pyramid_request, view_name):
with pytest.raises(httpexceptions.HTTPFound) as exc:
view = getattr(controller, view_name)
view()
parsed_url = urlparse.urlparse(exc.value.location)
assert parsed_url.path == '/login'
assert urlparse.parse_qs(parsed_url.query) == {'next': [pyramid_request.url],
'for_oauth': ['True']}
@pytest.mark.parametrize('response_mode,view_name', [
(None, 'get'),
('web_message', 'get_web_message'),
])
def test_get_returns_expected_context(self, controller, auth_client, authenticated_user, oauth_request, response_mode, view_name):
oauth_request.response_mode = response_mode
view = getattr(controller, view_name)
assert view() == {
'client_id': auth_client.id,
'client_name': auth_client.name,
'response_mode': response_mode,
'response_type': auth_client.response_type.value,
'state': 'foobar',
'username': authenticated_user.username,
}
@pytest.mark.parametrize('view_name', ['get', 'get_web_message'])
def test_get_creates_authorization_response_for_trusted_clients(self, controller, auth_client, authenticated_user, pyramid_request, view_name):
auth_client.trusted = True
view = getattr(controller, view_name)
view()
controller.oauth.create_authorization_response.assert_called_once_with(
pyramid_request.url,
credentials={'user': authenticated_user},
scopes=DEFAULT_SCOPES)
def test_get_returns_redirect_immediately_for_trusted_clients(self, controller, auth_client, authenticated_user, pyramid_request):
auth_client.trusted = True
response = controller.get()
expected = '{}?code=abcdef123456&state=foobar'.format(auth_client.redirect_uri)
assert response.location == expected
@pytest.mark.usefixtures('authenticated_user')
def test_get_web_message_renders_template_for_trusted_clients(self, controller, auth_client):
auth_client.trusted = True
assert controller.request.override_renderer is None
controller.get_web_message()
assert controller.request.override_renderer == 'h:templates/oauth/authorize_web_message.html.jinja2'
@pytest.mark.usefixtures('authenticated_user')
def test_get_web_message_returns_context_for_trusted_clients(self, controller, auth_client):
auth_client.trusted = True
response = controller.get_web_message()
assert response == {
'code': 'abcdef123456',
'origin': 'http://client.com',
'state': 'foobar',
}
@pytest.mark.usefixtures('authenticated_user')
def test_get_web_message_allows_empty_state_in_context_for_trusted_clients(self, controller, auth_client, oauth_provider):
auth_client.trusted = True
headers = {'Location': '{}?code=abcdef123456'.format(auth_client.redirect_uri)}
oauth_provider.create_authorization_response.return_value = (headers, None, 302)
response = controller.get_web_message()
assert response['state'] is None
@pytest.mark.parametrize('view_name', ['post', 'post_web_message'])
def test_post_creates_authorization_response(self, controller, pyramid_request, authenticated_user, view_name):
pyramid_request.url = 'http://example.com/auth?client_id=the-client-id' + \
'&response_type=code' + \
'&state=foobar' + \
'&scope=exploit'
view = getattr(controller, view_name)
view()
controller.oauth.create_authorization_response.assert_called_once_with(
pyramid_request.url,
credentials={'user': authenticated_user},
scopes=DEFAULT_SCOPES)
@pytest.mark.usefixtures('authenticated_user')
@pytest.mark.parametrize('view_name', ['post', 'post_web_message'])
def test_post_raises_for_invalid_request(self, controller, view_name):
controller.oauth.create_authorization_response.side_effect = InvalidRequestFatalError('boom!')
with pytest.raises(InvalidRequestFatalError) as exc:
view = getattr(controller, view_name)
view()
assert exc.value.description == 'boom!'
def test_post_redirects_to_client(self, controller, auth_client):
response = controller.post()
expected = '{}?code=abcdef123456&state=foobar'.format(auth_client.redirect_uri)
assert response.location == expected
def test_post_web_message_returns_expected_context(self, controller, auth_client):
response = controller.post_web_message()
assert response == {
'code': 'abcdef123456',
'origin': 'http://client.com',
'state': 'foobar',
}
def test_post_web_message_allows_empty_state_in_context(self, controller, auth_client, oauth_provider):
auth_client.trusted = True
headers = {'Location': '{}?code=abcdef123456'.format(auth_client.redirect_uri)}
oauth_provider.create_authorization_response.return_value = (headers, None, 302)
response = controller.post_web_message()
assert response['state'] is None
@pytest.fixture
def controller(self, pyramid_request):
pyramid_request.override_renderer = None
return views.OAuthAuthorizeController(None, pyramid_request)
@pytest.fixture
def oauth_request(self):
return OAuthRequest('/')
@pytest.fixture
def oauth_provider(self, pyramid_config, auth_client, oauth_request):
svc = mock.create_autospec(OAuthProviderService, instance=True)
scopes = ['annotation:read', 'annotation:write']
credentials = {'client_id': auth_client.id, 'state': 'foobar', 'request': oauth_request}
svc.validate_authorization_request.return_value = (scopes, credentials)
headers = {'Location': '{}?code=abcdef123456&state=foobar'.format(auth_client.redirect_uri)}
body = None
status = 302
svc.create_authorization_response.return_value = (headers, body, status)
pyramid_config.register_service(svc, name='oauth_provider')
return svc
@pytest.fixture
def auth_client(self, factories):
return factories.AuthClient(name='Test Client',
redirect_uri='http://client.com/auth/callback',
response_type=ResponseType.code)
@pytest.fixture
def user_svc(self, pyramid_config, pyramid_request):
svc = mock.Mock(spec_set=user_service_factory(None, pyramid_request))
pyramid_config.register_service(svc, name='user')
return svc
@pytest.fixture
def pyramid_request(self, pyramid_request):
pyramid_request.url = 'http://example.com/auth?client_id=the-client-id&response_type=code&state=foobar'
return pyramid_request
@pytest.fixture
def authenticated_user(self, factories, pyramid_config, user_svc):
user = factories.User.build()
pyramid_config.testing_securitypolicy(user.userid)
def fake_fetch(userid):
if userid == user.userid:
return user
user_svc.fetch.side_effect = fake_fetch
return user
@pytest.fixture
def routes(self, pyramid_config):
pyramid_config.add_route('login', '/login')
@pytest.mark.usefixtures('oauth_provider')
class TestOAuthAccessTokenController(object):
def test_it_creates_token_response(self, pyramid_request, controller, oauth_provider):
controller.post()
oauth_provider.create_token_response.assert_called_once_with(
pyramid_request.url, pyramid_request.method, pyramid_request.POST, pyramid_request.headers)
def test_it_returns_correct_response_on_success(self, controller, oauth_provider):
body = json.dumps({'access_token': 'the-access-token'})
oauth_provider.create_token_response.return_value = ({}, body, 200)
assert controller.post() == {'access_token': 'the-access-token'}
def test_it_raises_when_error(self, controller, oauth_provider):
body = json.dumps({'error': 'invalid_request'})
oauth_provider.create_token_response.return_value = ({}, body, 400)
with pytest.raises(httpexceptions.HTTPBadRequest) as exc:
controller.post()
assert exc.value.body == body
@pytest.fixture
def controller(self, pyramid_request):
pyramid_request.method = 'POST'
pyramid_request.POST['grant_type'] = 'authorization_code'
pyramid_request.POST['code'] = 'the-authz-code'
pyramid_request.headers = {'X-Test-ID': '1234'}
return views.OAuthAccessTokenController(pyramid_request)
@pytest.fixture
def oauth_provider(self, pyramid_config):
svc = mock.Mock(spec_set=['create_token_response'])
svc.create_token_response.return_value = ({}, '{}', 200)
pyramid_config.register_service(svc, name='oauth_provider')
return svc
@pytest.mark.usefixtures('oauth_provider')
class TestOAuthRevocationController(object):
def test_it_creates_revocation_response(self, pyramid_request, controller, oauth_provider):
controller.post()
oauth_provider.create_revocation_response.assert_called_once_with(
pyramid_request.url, pyramid_request.method, pyramid_request.POST, pyramid_request.headers)
def test_it_returns_empty_response_on_success(self, controller):
response = controller.post()
assert response == {}
def test_it_raises_when_error(self, controller, oauth_provider):
body = json.dumps({'error': 'invalid_request'})
oauth_provider.create_revocation_response.return_value = ({}, body, 400)
with pytest.raises(httpexceptions.HTTPBadRequest) as exc:
controller.post()
assert exc.value.body == body
@pytest.fixture
def controller(self, pyramid_request):
pyramid_request.method = 'POST'
pyramid_request.POST['token'] = 'the-token'
pyramid_request.headers = {'X-Test-ID': '1234'}
return views.OAuthRevocationController(pyramid_request)
@pytest.fixture
def oauth_provider(self, pyramid_config):
svc = mock.Mock(spec_set=['create_revocation_response'])
svc.create_revocation_response.return_value = ({}, '{}', 200)
pyramid_config.register_service(svc, name='oauth_provider')
return svc
class TestDebugToken(object):
def test_it_raises_error_when_token_is_missing(self, pyramid_request):
pyramid_request.auth_token = None
with pytest.raises(OAuthTokenError) as exc:
views.debug_token(pyramid_request)
assert exc.value.type == 'missing_token'
assert 'Bearer token is missing' in exc.value.message
def test_it_raises_error_when_token_is_empty(self, pyramid_request):
pyramid_request.auth_token = ''
with pytest.raises(OAuthTokenError) as exc:
views.debug_token(pyramid_request)
assert exc.value.type == 'missing_token'
assert 'Bearer token is missing' in exc.value.message
def test_it_validates_token(self, pyramid_request, token_service):
pyramid_request.auth_token = 'the-access-token'
views.debug_token(pyramid_request)
token_service.validate.assert_called_once_with('the-access-token')
def test_it_raises_error_when_token_is_invalid(self, pyramid_request, token_service):
pyramid_request.auth_token = 'the-token'
token_service.validate.return_value = None
with pytest.raises(OAuthTokenError) as exc:
views.debug_token(pyramid_request)
assert exc.value.type == 'missing_token'
assert 'Bearer token does not exist or is expired' in exc.value.message
def test_returns_debug_data_for_oauth_token(self, pyramid_request, token_service, oauth_token):
pyramid_request.auth_token = oauth_token.value
token_service.fetch.return_value = oauth_token
result = views.debug_token(pyramid_request)
assert result == {'userid': oauth_token.userid,
'client': {'id': oauth_token.authclient.id,
'name': oauth_token.authclient.name},
'issued_at': utc_iso8601(oauth_token.created),
'expires_at': utc_iso8601(oauth_token.expires),
'expired': oauth_token.expired}
def test_returns_debug_data_for_developer_token(self, pyramid_request, token_service, developer_token):
pyramid_request.auth_token = developer_token.value
token_service.fetch.return_value = developer_token
result = views.debug_token(pyramid_request)
assert result == {'userid': developer_token.userid,
'issued_at': utc_iso8601(developer_token.created),
'expires_at': None,
'expired': False}
@pytest.fixture
def token_service(self, pyramid_config, pyramid_request):
svc = mock.Mock(spec_set=auth_token_service_factory(None, pyramid_request))
pyramid_config.register_service(svc, name='auth_token')
return svc
@pytest.fixture
def oauth_token(self, factories):
authclient = factories.AuthClient(name='Example Client')
expires = datetime.datetime.utcnow() + datetime.timedelta(minutes=10)
return factories.DeveloperToken(authclient=authclient, expires=expires)
@pytest.fixture
def developer_token(self, factories):
return factories.DeveloperToken()
class TestAPITokenError(object):
def test_it_sets_the_response_status_code(self, pyramid_request):
context = OAuthTokenError('the error message', 'error_type', status_code=403)
views.api_token_error(context, pyramid_request)
assert pyramid_request.response.status_code == 403
def test_it_returns_the_error(self, pyramid_request):
context = OAuthTokenError('', 'error_type')
result = views.api_token_error(context, pyramid_request)
assert result['error'] == 'error_type'
def test_it_returns_error_description(self, pyramid_request):
context = OAuthTokenError('error description', 'error_type')
result = views.api_token_error(context, pyramid_request)
assert result['error_description'] == 'error description'
def test_it_skips_description_when_missing(self, pyramid_request):
context = OAuthTokenError(None, 'invalid_request')
result = views.api_token_error(context, pyramid_request)
assert 'error_description' not in result
def test_it_skips_description_when_empty(self, pyramid_request):
context = OAuthTokenError('', 'invalid_request')
result = views.api_token_error(context, pyramid_request)
assert 'error_description' not in result
| [((685, 748), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""routes"""', '"""oauth_provider"""', '"""user_svc"""'], {}), "('routes', 'oauth_provider', 'user_svc')\n", (708, 748), False, 'import pytest\n'), ((9321, 9362), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""oauth_provider"""'], {}), "('oauth_provider')\n", (9344, 9362), False, 'import pytest\n'), ((10973, 11014), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""oauth_provider"""'], {}), "('oauth_provider')\n", (10996, 11014), False, 'import pytest\n'), ((798, 843), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""authenticated_user"""'], {}), "('authenticated_user')\n", (821, 843), False, 'import pytest\n'), ((849, 913), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""view_name"""', "['get', 'get_web_message']"], {}), "('view_name', ['get', 'get_web_message'])\n", (872, 913), False, 'import pytest\n'), ((1178, 1242), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""view_name"""', "['get', 'get_web_message']"], {}), "('view_name', ['get', 'get_web_message'])\n", (1201, 1242), False, 'import pytest\n'), ((1607, 1671), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""view_name"""', "['get', 'get_web_message']"], {}), "('view_name', ['get', 'get_web_message'])\n", (1630, 1671), False, 'import pytest\n'), ((2181, 2289), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""response_mode,view_name"""', "[(None, 'get'), ('web_message', 'get_web_message')]"], {}), "('response_mode,view_name', [(None, 'get'), (\n 'web_message', 'get_web_message')])\n", (2204, 2289), False, 'import pytest\n'), ((2861, 2925), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""view_name"""', "['get', 'get_web_message']"], {}), "('view_name', ['get', 'get_web_message'])\n", (2884, 2925), False, 'import pytest\n'), ((3722, 3767), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""authenticated_user"""'], {}), "('authenticated_user')\n", (3745, 3767), False, 'import pytest\n'), ((4114, 4159), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""authenticated_user"""'], {}), "('authenticated_user')\n", (4137, 4159), False, 'import pytest\n'), ((4497, 4542), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""authenticated_user"""'], {}), "('authenticated_user')\n", (4520, 4542), False, 'import pytest\n'), ((4979, 5045), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""view_name"""', "['post', 'post_web_message']"], {}), "('view_name', ['post', 'post_web_message'])\n", (5002, 5045), False, 'import pytest\n'), ((5739, 5784), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""authenticated_user"""'], {}), "('authenticated_user')\n", (5762, 5784), False, 'import pytest\n'), ((5790, 5856), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""view_name"""', "['post', 'post_web_message']"], {}), "('view_name', ['post', 'post_web_message'])\n", (5813, 5856), False, 'import pytest\n'), ((1387, 1420), 'oauthlib.oauth2.InvalidRequestFatalError', 'InvalidRequestFatalError', (['"""boom!"""'], {}), "('boom!')\n", (1411, 1420), False, 'from oauthlib.oauth2 import InvalidRequestFatalError\n'), ((1930, 1967), 'h._compat.urlparse.urlparse', 'urlparse.urlparse', (['exc.value.location'], {}), '(exc.value.location)\n', (1947, 1967), False, 'from h._compat import urlparse\n'), ((6001, 6034), 'oauthlib.oauth2.InvalidRequestFatalError', 'InvalidRequestFatalError', (['"""boom!"""'], {}), "('boom!')\n", (6025, 6034), False, 'from oauthlib.oauth2 import InvalidRequestFatalError\n'), ((7285, 7338), 'h.views.api_auth.OAuthAuthorizeController', 'views.OAuthAuthorizeController', (['None', 'pyramid_request'], {}), '(None, pyramid_request)\n', (7315, 7338), True, 'from h.views import api_auth as views\n'), ((7404, 7421), 'oauthlib.common.Request', 'OAuthRequest', (['"""/"""'], {}), "('/')\n", (7416, 7421), True, 'from oauthlib.common import Request as OAuthRequest\n'), ((7531, 7588), 'mock.create_autospec', 'mock.create_autospec', (['OAuthProviderService'], {'instance': '(True)'}), '(OAuthProviderService, instance=True)\n', (7551, 7588), False, 'import mock\n'), ((9807, 9855), 'json.dumps', 'json.dumps', (["{'access_token': 'the-access-token'}"], {}), "({'access_token': 'the-access-token'})\n", (9817, 9855), False, 'import json\n'), ((10091, 10131), 'json.dumps', 'json.dumps', (["{'error': 'invalid_request'}"], {}), "({'error': 'invalid_request'})\n", (10101, 10131), False, 'import json\n'), ((10641, 10690), 'h.views.api_auth.OAuthAccessTokenController', 'views.OAuthAccessTokenController', (['pyramid_request'], {}), '(pyramid_request)\n', (10673, 10690), True, 'from h.views import api_auth as views\n'), ((10772, 10817), 'mock.Mock', 'mock.Mock', ([], {'spec_set': "['create_token_response']"}), "(spec_set=['create_token_response'])\n", (10781, 10817), False, 'import mock\n'), ((11587, 11627), 'json.dumps', 'json.dumps', (["{'error': 'invalid_request'}"], {}), "({'error': 'invalid_request'})\n", (11597, 11627), False, 'import json\n'), ((12072, 12120), 'h.views.api_auth.OAuthRevocationController', 'views.OAuthRevocationController', (['pyramid_request'], {}), '(pyramid_request)\n', (12103, 12120), True, 'from h.views import api_auth as views\n'), ((12202, 12252), 'mock.Mock', 'mock.Mock', ([], {'spec_set': "['create_revocation_response']"}), "(spec_set=['create_revocation_response'])\n", (12211, 12252), False, 'import mock\n'), ((13234, 13268), 'h.views.api_auth.debug_token', 'views.debug_token', (['pyramid_request'], {}), '(pyramid_request)\n', (13251, 13268), True, 'from h.views import api_auth as views\n'), ((13995, 14029), 'h.views.api_auth.debug_token', 'views.debug_token', (['pyramid_request'], {}), '(pyramid_request)\n', (14012, 14029), True, 'from h.views import api_auth as views\n'), ((14682, 14716), 'h.views.api_auth.debug_token', 'views.debug_token', (['pyramid_request'], {}), '(pyramid_request)\n', (14699, 14716), True, 'from h.views import api_auth as views\n'), ((15705, 15772), 'h.exceptions.OAuthTokenError', 'OAuthTokenError', (['"""the error message"""', '"""error_type"""'], {'status_code': '(403)'}), "('the error message', 'error_type', status_code=403)\n", (15720, 15772), False, 'from h.exceptions import OAuthTokenError\n'), ((15781, 15828), 'h.views.api_auth.api_token_error', 'views.api_token_error', (['context', 'pyramid_request'], {}), '(context, pyramid_request)\n', (15802, 15828), True, 'from h.views import api_auth as views\n'), ((15965, 15998), 'h.exceptions.OAuthTokenError', 'OAuthTokenError', (['""""""', '"""error_type"""'], {}), "('', 'error_type')\n", (15980, 15998), False, 'from h.exceptions import OAuthTokenError\n'), ((16016, 16063), 'h.views.api_auth.api_token_error', 'views.api_token_error', (['context', 'pyramid_request'], {}), '(context, pyramid_request)\n', (16037, 16063), True, 'from h.views import api_auth as views\n'), ((16196, 16246), 'h.exceptions.OAuthTokenError', 'OAuthTokenError', (['"""error description"""', '"""error_type"""'], {}), "('error description', 'error_type')\n", (16211, 16246), False, 'from h.exceptions import OAuthTokenError\n'), ((16264, 16311), 'h.views.api_auth.api_token_error', 'views.api_token_error', (['context', 'pyramid_request'], {}), '(context, pyramid_request)\n', (16285, 16311), True, 'from h.views import api_auth as views\n'), ((16468, 16508), 'h.exceptions.OAuthTokenError', 'OAuthTokenError', (['None', '"""invalid_request"""'], {}), "(None, 'invalid_request')\n", (16483, 16508), False, 'from h.exceptions import OAuthTokenError\n'), ((16526, 16573), 'h.views.api_auth.api_token_error', 'views.api_token_error', (['context', 'pyramid_request'], {}), '(context, pyramid_request)\n', (16547, 16573), True, 'from h.views import api_auth as views\n'), ((16711, 16749), 'h.exceptions.OAuthTokenError', 'OAuthTokenError', (['""""""', '"""invalid_request"""'], {}), "('', 'invalid_request')\n", (16726, 16749), False, 'from h.exceptions import OAuthTokenError\n'), ((16767, 16814), 'h.views.api_auth.api_token_error', 'views.api_token_error', (['context', 'pyramid_request'], {}), '(context, pyramid_request)\n', (16788, 16814), True, 'from h.views import api_auth as views\n'), ((1435, 1474), 'pytest.raises', 'pytest.raises', (['InvalidRequestFatalError'], {}), '(InvalidRequestFatalError)\n', (1448, 1474), False, 'import pytest\n'), ((1791, 1830), 'pytest.raises', 'pytest.raises', (['httpexceptions.HTTPFound'], {}), '(httpexceptions.HTTPFound)\n', (1804, 1830), False, 'import pytest\n'), ((2026, 2061), 'h._compat.urlparse.parse_qs', 'urlparse.parse_qs', (['parsed_url.query'], {}), '(parsed_url.query)\n', (2043, 2061), False, 'from h._compat import urlparse\n'), ((6049, 6088), 'pytest.raises', 'pytest.raises', (['InvalidRequestFatalError'], {}), '(InvalidRequestFatalError)\n', (6062, 6088), False, 'import pytest\n'), ((10222, 10266), 'pytest.raises', 'pytest.raises', (['httpexceptions.HTTPBadRequest'], {}), '(httpexceptions.HTTPBadRequest)\n', (10235, 10266), False, 'import pytest\n'), ((11723, 11767), 'pytest.raises', 'pytest.raises', (['httpexceptions.HTTPBadRequest'], {}), '(httpexceptions.HTTPBadRequest)\n', (11736, 11767), False, 'import pytest\n'), ((12573, 12603), 'pytest.raises', 'pytest.raises', (['OAuthTokenError'], {}), '(OAuthTokenError)\n', (12586, 12603), False, 'import pytest\n'), ((12624, 12658), 'h.views.api_auth.debug_token', 'views.debug_token', (['pyramid_request'], {}), '(pyramid_request)\n', (12641, 12658), True, 'from h.views import api_auth as views\n'), ((12899, 12929), 'pytest.raises', 'pytest.raises', (['OAuthTokenError'], {}), '(OAuthTokenError)\n', (12912, 12929), False, 'import pytest\n'), ((12950, 12984), 'h.views.api_auth.debug_token', 'views.debug_token', (['pyramid_request'], {}), '(pyramid_request)\n', (12967, 12984), True, 'from h.views import api_auth as views\n'), ((13550, 13580), 'pytest.raises', 'pytest.raises', (['OAuthTokenError'], {}), '(OAuthTokenError)\n', (13563, 13580), False, 'import pytest\n'), ((13601, 13635), 'h.views.api_auth.debug_token', 'views.debug_token', (['pyramid_request'], {}), '(pyramid_request)\n', (13618, 13635), True, 'from h.views import api_auth as views\n'), ((15337, 15363), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (15361, 15363), False, 'import datetime\n'), ((15366, 15396), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(10)'}), '(minutes=10)\n', (15384, 15396), False, 'import datetime\n'), ((8516, 8559), 'h.services.user.user_service_factory', 'user_service_factory', (['None', 'pyramid_request'], {}), '(None, pyramid_request)\n', (8536, 8559), False, 'from h.services.user import user_service_factory\n'), ((14271, 14303), 'h.util.datetime.utc_iso8601', 'utc_iso8601', (['oauth_token.created'], {}), '(oauth_token.created)\n', (14282, 14303), False, 'from h.util.datetime import utc_iso8601\n'), ((14345, 14377), 'h.util.datetime.utc_iso8601', 'utc_iso8601', (['oauth_token.expires'], {}), '(oauth_token.expires)\n', (14356, 14377), False, 'from h.util.datetime import utc_iso8601\n'), ((14817, 14853), 'h.util.datetime.utc_iso8601', 'utc_iso8601', (['developer_token.created'], {}), '(developer_token.created)\n', (14828, 14853), False, 'from h.util.datetime import utc_iso8601\n'), ((15061, 15110), 'h.services.auth_token.auth_token_service_factory', 'auth_token_service_factory', (['None', 'pyramid_request'], {}), '(None, pyramid_request)\n', (15087, 15110), False, 'from h.services.auth_token import auth_token_service_factory\n')] |
juslee/boost-svn | tools/build/v2/test/conditionals.py | 6d5a03c1f5ed3e2b23bd0f3ad98d13ff33d4dcbb | #!/usr/bin/python
# Copyright 2003 Dave Abrahams
# Copyright 2002, 2003, 2004 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# Test conditional properties.
import BoostBuild
t = BoostBuild.Tester()
# Arrange a project which will build only if 'a.cpp' is compiled with "STATIC"
# define.
t.write("a.cpp", """\
#ifdef STATIC
int main() {}
#endif
""")
# Test conditionals in target requirements.
t.write("jamroot.jam", "exe a : a.cpp : <link>static:<define>STATIC ;")
t.run_build_system(["link=static"])
t.expect_addition("bin/$toolset/debug/link-static/a.exe")
t.rm("bin")
# Test conditionals in project requirements.
t.write("jamroot.jam", """
project : requirements <link>static:<define>STATIC ;
exe a : a.cpp ;
""")
t.run_build_system(["link=static"])
t.expect_addition("bin/$toolset/debug/link-static/a.exe")
t.rm("bin")
# Regression test for a bug found by Ali Azarbayejani. Conditionals inside
# usage requirement were not being evaluated.
t.write("jamroot.jam", """
lib l : l.cpp : : : <link>static:<define>STATIC ;
exe a : a.cpp l ;
""")
t.write("l.cpp", "int i;")
t.run_build_system(["link=static"])
t.expect_addition("bin/$toolset/debug/link-static/a.exe")
t.cleanup()
| [((302, 321), 'BoostBuild.Tester', 'BoostBuild.Tester', ([], {}), '()\n', (319, 321), False, 'import BoostBuild\n')] |
FriendRat/pyo3 | examples/setuptools-rust-starter/tests/test_setuptools_rust_starter.py | 5446fe2062cb3bf11bf61bd4a2c58a7ed8b408d2 | from setuptools_rust_starter import PythonClass, ExampleClass
def test_python_class() -> None:
py_class = PythonClass(value=10)
assert py_class.value == 10
def test_example_class() -> None:
example = ExampleClass(value=11)
assert example.value == 11
| [((112, 133), 'setuptools_rust_starter.PythonClass', 'PythonClass', ([], {'value': '(10)'}), '(value=10)\n', (123, 133), False, 'from setuptools_rust_starter import PythonClass, ExampleClass\n'), ((216, 238), 'setuptools_rust_starter.ExampleClass', 'ExampleClass', ([], {'value': '(11)'}), '(value=11)\n', (228, 238), False, 'from setuptools_rust_starter import PythonClass, ExampleClass\n')] |
sunhailin-Leo/TeamLeoX_BlogsCrawler | spiders/juejin_spider.py | 389ff31e02bdff415c8bc470a3a48da1acb14c4c | import time
from typing import Dict, List, Tuple, Optional
from utils.logger_utils import LogManager
from utils.str_utils import check_is_json
from config import LOG_LEVEL, PROCESS_STATUS_FAIL
from utils.time_utils import datetime_str_change_fmt
from utils.exception_utils import LoginException, ParseDataException
from spiders import BaseSpider, BaseSpiderParseMethodType, CookieUtils
from utils.str_utils import check_is_phone_number, check_is_email_address
logger = LogManager(__name__).get_logger_and_add_handlers(
formatter_template=5, log_level_int=LOG_LEVEL
)
class JuejinSpider(BaseSpider):
def __init__(self, task_id: str, username: str, password: str):
self._main_url = "https://juejin.im/auth/type"
self._blogs_url = "https://timeline-merger-ms.juejin.im/v1/get_entry_by_self"
self._like_blogs_url = "https://user-like-wrapper-ms.juejin.im/v1/user"
self._task_id = task_id
self._login_username = username
self._login_password = password
self._spider_name: str = f"juejin:{self._login_username}"
self._login_cookies: Optional[str] = None
self._login_token: Optional[str] = None
self._login_uid: Optional[str] = None
self._login_client_id: Optional[str] = None
self._response_data = None
self._blogs_data: List = []
self._like_blogs_data: List = []
self._like_blogs_total_page: int = 0
super().__init__()
self._login_cookies = self.get_cookies(spider_name=self._spider_name)
def _check_username(self) -> Optional[Tuple[str, Dict]]:
"""
解析用户名
:return: 结果
"""
phone_login = check_is_phone_number(data=self._login_username)
email_login = check_is_email_address(data=self._login_username)
login_data: Dict = {"password": self._login_password}
if phone_login is None and email_login is None:
raise ValueError("Your login username is illegal!")
if phone_login is not None:
login_data.update(phoneNumber=self._login_username)
return f"{self._main_url}/phoneNumber", login_data
if email_login is not None:
login_data.update(email=self._login_username)
return f"{self._main_url}/email", login_data
return None
def parse_data_with_method(self, method: str):
if method == BaseSpiderParseMethodType.LoginResult:
self._parse_login_data()
elif method == BaseSpiderParseMethodType.PersonalBlogs:
self._parse_personal_blogs()
self._parse_personal_like_blogs()
elif method == BaseSpiderParseMethodType.Finish:
self.send_data()
def login(self):
if self._login_cookies is None:
login_url, login_data = self._check_username()
response = self.make_request(
url=login_url,
headers=self._common_headers,
method="POST",
json=login_data,
)
if response.content.decode() != "":
logger.info("登录成功!")
self._response_data = response.json()
self._login_cookies = CookieUtils(
cookie_list=response.cookies.items()
).to_str()
logger.debug(self._login_cookies)
self.set_cookies(
spider_name=self._spider_name, cookies=self._login_cookies
)
self.parse_data_with_method(
method=BaseSpiderParseMethodType.LoginResult
)
else:
logger.error("登录失败!")
raise LoginException()
else:
get_result: str = self.get_data(spider_name=f"{self._spider_name}:params")
if get_result is None:
self.parse_data_with_method(
method=BaseSpiderParseMethodType.LoginResult
)
else:
try:
login_params = get_result.split("&")[1:-1]
self._login_uid = [d for d in login_params if "uid" in d][
0
].replace("uid=", "")
self._login_token = [d for d in login_params if "token" in d][
0
].replace("token=", "")
self._login_client_id = [
d for d in login_params if "device_id" in d
][0].replace("device_id=", "")
self.parse_data_with_method(
method=BaseSpiderParseMethodType.PersonalBlogs
)
except Exception as err:
logger.error(f"解析 Redis 返回数据失败! 错误原因: {err}")
self.parse_data_with_method(
method=BaseSpiderParseMethodType.LoginResult
)
def _parse_login_data(self):
# 公共参数
self._login_token = self._response_data["token"]
self._login_uid = self._response_data["userId"]
self._login_client_id = self._response_data["clientId"]
# 重要参数持久化
params: str = f"?src=web&uid={self._login_uid}" f"&token={self._login_token}" f"&device_id={self._login_client_id}" f"¤t_uid={self._login_uid}"
self.set_data(spider_name=f"{self._spider_name}:params", data=params)
# 个人数据
username = self._response_data["user"]["username"]
description = self._response_data["user"]["selfDescription"]
avatar_img = self._response_data["user"]["avatarLarge"]
followee = self._response_data["user"]["followeesCount"]
follower = self._response_data["user"]["followersCount"]
like_blogs = self._response_data["user"]["collectedEntriesCount"]
personal_data: Dict = {
"username": username,
"description": description,
"avatarImg": avatar_img,
"followee": followee,
"follower": follower,
"likeBlogs": like_blogs,
}
logger.debug(personal_data)
self.data_model.set_personal_data(data=personal_data)
self.parse_data_with_method(method=BaseSpiderParseMethodType.PersonalBlogs)
def _parse_personal_blogs(self, next_params: Optional[str] = None):
req_data: dict = {
"src": "web",
"uid": self._login_uid,
"device_id": self._login_client_id,
"token": self._login_token,
"targetUid": self._login_uid,
"type": "post",
"limit": "20",
"order": "createdAt",
}
if next_params is not None:
req_data.update(before=next_params)
url_params: str = ""
for index, data in enumerate(req_data.items()):
if index == 0:
url_params += f"?{data[0]}={data[1]}"
else:
url_params += f"&{data[0]}={data[1]}"
blogs_url: str = f"{self._blogs_url}{url_params}"
response = self.make_request(url=blogs_url, headers=self._common_headers)
if response.content.decode() != "":
self._response_data = response.json()
if self._response_data is not None and self._response_data["m"] == "ok":
next_page_variable = None
entry_list = self._response_data["d"]["entrylist"]
if len(entry_list) > 0:
for personal_blog in entry_list:
blog_create_time = datetime_str_change_fmt(
time_str=personal_blog["createdAt"],
prev_fmt="%Y-%m-%dT%H:%M:%S.%fZ",
)
blog_data: Dict = {
"blogId": personal_blog["objectId"],
"blogTitle": personal_blog["title"],
"blogHref": personal_blog["originalUrl"],
"blogViewers": personal_blog["viewsCount"],
"blogCreateTime": blog_create_time,
}
self._blogs_data.append(blog_data)
next_page_variable = personal_blog["verifyCreatedAt"]
if self._response_data["d"]["total"] > 20:
time.sleep(0.5)
self._parse_personal_blogs(next_params=next_page_variable)
else:
logger.debug(self._blogs_data)
self.data_model.set_personal_blogs_data(data=self._blogs_data)
logger.info("获取个人博客数据成功!")
else:
logger.error("查询个人博客失败!")
self.update_task_status(
task_id=self._task_id, data=str(PROCESS_STATUS_FAIL)
)
raise LoginException()
def _parse_personal_like_blogs(self, page_no: int = 0):
like_blogs_url: str = f"{self._like_blogs_url}/{self._login_uid}/like/entry?page={page_no}&pageSize=20"
self._common_headers.update(
{
"X-Juejin-Client": str(self._login_client_id),
"X-Juejin-Src": "web",
"X-Juejin-Token": self._login_token,
"X-Juejin-Uid": self._login_uid,
}
)
response = self.make_request(url=like_blogs_url, headers=self._common_headers)
if response.content.decode() != "":
self._response_data = response.json()
if (
self._response_data is not None
and self._response_data["m"] == "success"
):
logger.info(f"当前正在获取第{page_no + 1}页的数据!")
if page_no == 0:
total_count = self._response_data["d"]["total"]
total_pages = total_count // 20
rest_count = total_count % 20
if rest_count != 0:
total_pages += 1
self._like_blogs_total_page = total_pages
entry_list = self._response_data["d"]["entryList"]
if len(entry_list) > 0:
for entry_data in entry_list:
if entry_data is None:
continue
blog_data: Dict = {
"blogId": entry_data["objectId"],
"blogTitle": entry_data["title"],
"blogHref": entry_data["originalUrl"],
"blogViewers": entry_data["viewsCount"],
"blogCreateTime": datetime_str_change_fmt(
time_str=entry_data["createdAt"],
prev_fmt="%Y-%m-%dT%H:%M:%S.%fZ",
),
}
self._like_blogs_data.append(blog_data)
page_no += 1
if page_no <= self._like_blogs_total_page:
# TODO 后面考虑多线程进行任务拆分,并发获取数据
time.sleep(0.5)
self._parse_personal_like_blogs(page_no=page_no)
else:
# logger.debug(self._like_blogs_data)
logger.debug(f"获取到 {len(self._like_blogs_data)} 条个人点赞博客")
self.data_model.set_personal_like_blogs_data(
data=self._like_blogs_data
)
logger.info("获取个人点赞博客成功!")
# 任务末尾
self.parse_data_with_method(method=BaseSpiderParseMethodType.Finish)
else:
logger.error("查询个人点赞博客失败!")
self.update_task_status(
task_id=self._task_id, data=str(PROCESS_STATUS_FAIL)
)
raise ParseDataException()
def _test_cookies(self, cookies: Optional[str] = None) -> bool:
params = self.get_data(spider_name=f"{self._spider_name}:params")
if params is None:
return False
test_user_url: str = f"https://user-storage-api-ms.juejin.im/v1/getUserInfo{params}"
test_request_headers: Dict = self.get_default_headers()
test_response = self.make_request(
url=test_user_url, headers=test_request_headers
)
if (
test_response.status_code != 200
or check_is_json(test_response.content.decode()) is not True
):
logger.error(f"当前掘金账号登录状态: 已退出!")
self._async_task.remove_async_scheduler(job_id=self._spider_name)
return False
test_json_response = test_response.json()
if test_json_response["s"] == 1:
logger.info(f"当前掘金账号为: {self._login_username}, 状态: 已登录")
return True
else:
logger.error(f"当前掘金账号登录状态: 已退出!")
return False
| [((471, 491), 'utils.logger_utils.LogManager', 'LogManager', (['__name__'], {}), '(__name__)\n', (481, 491), False, 'from utils.logger_utils import LogManager\n'), ((1681, 1729), 'utils.str_utils.check_is_phone_number', 'check_is_phone_number', ([], {'data': 'self._login_username'}), '(data=self._login_username)\n', (1702, 1729), False, 'from utils.str_utils import check_is_phone_number, check_is_email_address\n'), ((1752, 1801), 'utils.str_utils.check_is_email_address', 'check_is_email_address', ([], {'data': 'self._login_username'}), '(data=self._login_username)\n', (1774, 1801), False, 'from utils.str_utils import check_is_phone_number, check_is_email_address\n'), ((8826, 8842), 'utils.exception_utils.LoginException', 'LoginException', ([], {}), '()\n', (8840, 8842), False, 'from utils.exception_utils import LoginException, ParseDataException\n'), ((11766, 11786), 'utils.exception_utils.ParseDataException', 'ParseDataException', ([], {}), '()\n', (11784, 11786), False, 'from utils.exception_utils import LoginException, ParseDataException\n'), ((3687, 3703), 'utils.exception_utils.LoginException', 'LoginException', ([], {}), '()\n', (3701, 3703), False, 'from utils.exception_utils import LoginException, ParseDataException\n'), ((8338, 8353), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (8348, 8353), False, 'import time\n'), ((11045, 11060), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (11055, 11060), False, 'import time\n'), ((7536, 7635), 'utils.time_utils.datetime_str_change_fmt', 'datetime_str_change_fmt', ([], {'time_str': "personal_blog['createdAt']", 'prev_fmt': '"""%Y-%m-%dT%H:%M:%S.%fZ"""'}), "(time_str=personal_blog['createdAt'], prev_fmt=\n '%Y-%m-%dT%H:%M:%S.%fZ')\n", (7559, 7635), False, 'from utils.time_utils import datetime_str_change_fmt\n'), ((10610, 10706), 'utils.time_utils.datetime_str_change_fmt', 'datetime_str_change_fmt', ([], {'time_str': "entry_data['createdAt']", 'prev_fmt': '"""%Y-%m-%dT%H:%M:%S.%fZ"""'}), "(time_str=entry_data['createdAt'], prev_fmt=\n '%Y-%m-%dT%H:%M:%S.%fZ')\n", (10633, 10706), False, 'from utils.time_utils import datetime_str_change_fmt\n')] |
naviocean/SimpleCVReproduction | NAS/PaddleSlim/train_supernet.py | 61b43e3583977f42e6f91ef176ec5e1701e98d33 | from paddle.vision.transforms import (
ToTensor, RandomHorizontalFlip, RandomResizedCrop, SaturationTransform, Compose,
HueTransform, BrightnessTransform, ContrastTransform, RandomCrop, Normalize, RandomRotation
)
from paddle.vision.datasets import Cifar100
from paddle.io import DataLoader
from paddle.optimizer.lr import CosineAnnealingDecay, MultiStepDecay, LinearWarmup
import random
from resnet20 import *
import paddle
# supernet trainning 基于paddleslim模型压缩包
# https://github.com/PaddlePaddle/PaddleSlim 欢迎大家多多star
from paddleslim.nas.ofa.convert_super import Convert, supernet
from paddleslim.nas.ofa import OFA, RunConfig, DistillConfig
from paddleslim.nas.ofa.utils import utils
channel_list = []
for i in range(1, 21):
if 0 < i <= 7:
# channel_list.append(random.choice([ 4, 8, 12, 16]))
channel_list.append(16)
elif 7 < i <= 13:
# channel_list.append(random.choice([ 4, 8, 12, 16, 20, 24, 28, 32]))
channel_list.append(32)
elif 13 < i <= 19:
# channel_list.append(random.choice([ 4, 8, 12, 16, 20, 24, 28, 32, 36, 40, 44, 48, 52, 56,60, 64]))
channel_list.append(64)
else:
# channel_list.append(random.choice([ 4, 8, 12, 16, 20, 24, 28, 32, 36, 40, 44, 48, 52, 56,60, 64]))
channel_list.append(64)
net = ResNet20(100, channel_list)
net2 = ResNet20(100, channel_list)
net2.set_state_dict(paddle.load('./pretrained_model/resnet20.pdparams'))
channel_optional = []
for i in range(0, 23):
if i <= 7:
channel_optional.append([4, 8, 12, 16])
# channel_optional.append([12, 16])
elif 7 < i <= 14:
channel_optional.append([4, 8, 12, 16, 20, 24, 28, 32])
# channel_optional.append([20, 24, 28, 32])
elif 14 < i <= 21:
channel_optional.append(
[4, 8, 12, 16, 20, 24, 28, 32, 36, 40, 44, 48, 52, 56, 60, 64])
# channel_optional.append([36, 40, 44, 48, 52, 56,60, 64])
else:
channel_optional.append(
[4, 8, 12, 16, 20, 24, 28, 32, 36, 40, 44, 48, 52, 56, 60, 64])
# channel_optional.append([36, 40, 44, 48, 52, 56,60, 64])
distill_config = DistillConfig(teacher_model=net2)
sp_net_config = supernet(channel=channel_optional)
sp_model = Convert(sp_net_config).convert(net)
ofa_net = OFA(sp_model, distill_config=distill_config)
ofa_net.set_task('channel')
model = paddle.Model(ofa_net)
MAX_EPOCH = 300
LR = 0.1
WEIGHT_DECAY = 5e-4
MOMENTUM = 0.9
BATCH_SIZE = 128
CIFAR_MEAN = [0.5071, 0.4865, 0.4409]
CIFAR_STD = [0.1942, 0.1918, 0.1958]
DATA_FILE = './data/data76994/cifar-100-python.tar.gz'
model.prepare(
paddle.optimizer.Momentum(
learning_rate=LinearWarmup(
CosineAnnealingDecay(LR, MAX_EPOCH), 2000, 0., LR),
momentum=MOMENTUM,
parameters=model.parameters(),
weight_decay=WEIGHT_DECAY),
CrossEntropyLoss(),
paddle.metric.Accuracy(topk=(1, 5)))
transforms = Compose([
RandomCrop(32, padding=4),
RandomApply(BrightnessTransform(0.1)),
RandomApply(ContrastTransform(0.1)),
RandomHorizontalFlip(),
RandomRotation(15),
ToArray(),
Normalize(CIFAR_MEAN, CIFAR_STD),
])
val_transforms = Compose([ToArray(), Normalize(CIFAR_MEAN, CIFAR_STD)])
train_set = Cifar100(DATA_FILE, mode='train', transform=transforms)
test_set = Cifar100(DATA_FILE, mode='test', transform=val_transforms)
callbacks = [LRSchedulerM(), callbacks.VisualDL('vis_logs/ofa_resnet20')]
model.fit(
train_set,
test_set,
epochs=MAX_EPOCH,
batch_size=BATCH_SIZE,
save_dir='checkpoints',
save_freq=100,
shuffle=True,
num_workers=4,
verbose=1,
callbacks=callbacks,
)
| [((2136, 2169), 'paddleslim.nas.ofa.DistillConfig', 'DistillConfig', ([], {'teacher_model': 'net2'}), '(teacher_model=net2)\n', (2149, 2169), False, 'from paddleslim.nas.ofa import OFA, RunConfig, DistillConfig\n'), ((2186, 2220), 'paddleslim.nas.ofa.convert_super.supernet', 'supernet', ([], {'channel': 'channel_optional'}), '(channel=channel_optional)\n', (2194, 2220), False, 'from paddleslim.nas.ofa.convert_super import Convert, supernet\n'), ((2278, 2322), 'paddleslim.nas.ofa.OFA', 'OFA', (['sp_model'], {'distill_config': 'distill_config'}), '(sp_model, distill_config=distill_config)\n', (2281, 2322), False, 'from paddleslim.nas.ofa import OFA, RunConfig, DistillConfig\n'), ((2361, 2382), 'paddle.Model', 'paddle.Model', (['ofa_net'], {}), '(ofa_net)\n', (2373, 2382), False, 'import paddle\n'), ((3237, 3292), 'paddle.vision.datasets.Cifar100', 'Cifar100', (['DATA_FILE'], {'mode': '"""train"""', 'transform': 'transforms'}), "(DATA_FILE, mode='train', transform=transforms)\n", (3245, 3292), False, 'from paddle.vision.datasets import Cifar100\n'), ((3304, 3362), 'paddle.vision.datasets.Cifar100', 'Cifar100', (['DATA_FILE'], {'mode': '"""test"""', 'transform': 'val_transforms'}), "(DATA_FILE, mode='test', transform=val_transforms)\n", (3312, 3362), False, 'from paddle.vision.datasets import Cifar100\n'), ((1389, 1440), 'paddle.load', 'paddle.load', (['"""./pretrained_model/resnet20.pdparams"""'], {}), "('./pretrained_model/resnet20.pdparams')\n", (1400, 1440), False, 'import paddle\n'), ((2868, 2903), 'paddle.metric.Accuracy', 'paddle.metric.Accuracy', ([], {'topk': '(1, 5)'}), '(topk=(1, 5))\n', (2890, 2903), False, 'import paddle\n'), ((2232, 2254), 'paddleslim.nas.ofa.convert_super.Convert', 'Convert', (['sp_net_config'], {}), '(sp_net_config)\n', (2239, 2254), False, 'from paddleslim.nas.ofa.convert_super import Convert, supernet\n'), ((2933, 2958), 'paddle.vision.transforms.RandomCrop', 'RandomCrop', (['(32)'], {'padding': '(4)'}), '(32, padding=4)\n', (2943, 2958), False, 'from paddle.vision.transforms import ToTensor, RandomHorizontalFlip, RandomResizedCrop, SaturationTransform, Compose, HueTransform, BrightnessTransform, ContrastTransform, RandomCrop, Normalize, RandomRotation\n'), ((3048, 3070), 'paddle.vision.transforms.RandomHorizontalFlip', 'RandomHorizontalFlip', ([], {}), '()\n', (3068, 3070), False, 'from paddle.vision.transforms import ToTensor, RandomHorizontalFlip, RandomResizedCrop, SaturationTransform, Compose, HueTransform, BrightnessTransform, ContrastTransform, RandomCrop, Normalize, RandomRotation\n'), ((3076, 3094), 'paddle.vision.transforms.RandomRotation', 'RandomRotation', (['(15)'], {}), '(15)\n', (3090, 3094), False, 'from paddle.vision.transforms import ToTensor, RandomHorizontalFlip, RandomResizedCrop, SaturationTransform, Compose, HueTransform, BrightnessTransform, ContrastTransform, RandomCrop, Normalize, RandomRotation\n'), ((3115, 3147), 'paddle.vision.transforms.Normalize', 'Normalize', (['CIFAR_MEAN', 'CIFAR_STD'], {}), '(CIFAR_MEAN, CIFAR_STD)\n', (3124, 3147), False, 'from paddle.vision.transforms import ToTensor, RandomHorizontalFlip, RandomResizedCrop, SaturationTransform, Compose, HueTransform, BrightnessTransform, ContrastTransform, RandomCrop, Normalize, RandomRotation\n'), ((3190, 3222), 'paddle.vision.transforms.Normalize', 'Normalize', (['CIFAR_MEAN', 'CIFAR_STD'], {}), '(CIFAR_MEAN, CIFAR_STD)\n', (3199, 3222), False, 'from paddle.vision.transforms import ToTensor, RandomHorizontalFlip, RandomResizedCrop, SaturationTransform, Compose, HueTransform, BrightnessTransform, ContrastTransform, RandomCrop, Normalize, RandomRotation\n'), ((2976, 3000), 'paddle.vision.transforms.BrightnessTransform', 'BrightnessTransform', (['(0.1)'], {}), '(0.1)\n', (2995, 3000), False, 'from paddle.vision.transforms import ToTensor, RandomHorizontalFlip, RandomResizedCrop, SaturationTransform, Compose, HueTransform, BrightnessTransform, ContrastTransform, RandomCrop, Normalize, RandomRotation\n'), ((3019, 3041), 'paddle.vision.transforms.ContrastTransform', 'ContrastTransform', (['(0.1)'], {}), '(0.1)\n', (3036, 3041), False, 'from paddle.vision.transforms import ToTensor, RandomHorizontalFlip, RandomResizedCrop, SaturationTransform, Compose, HueTransform, BrightnessTransform, ContrastTransform, RandomCrop, Normalize, RandomRotation\n'), ((2686, 2721), 'paddle.optimizer.lr.CosineAnnealingDecay', 'CosineAnnealingDecay', (['LR', 'MAX_EPOCH'], {}), '(LR, MAX_EPOCH)\n', (2706, 2721), False, 'from paddle.optimizer.lr import CosineAnnealingDecay, MultiStepDecay, LinearWarmup\n')] |
Myst1c-a/phen-cogs | slashtags/mixins/commands.py | 672f9022ddbbd9a84b0a05357347e99e64a776fc | """
MIT License
Copyright (c) 2020-present phenom4n4n
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import asyncio
import logging
import re
import types
from collections import Counter
from copy import copy
from typing import Dict, List, Union
import discord
from redbot.core import commands
from redbot.core.utils.chat_formatting import box, humanize_list, inline, pagify
from redbot.core.utils.menus import DEFAULT_CONTROLS, menu
from redbot.core.utils.predicates import MessagePredicate
from tabulate import tabulate
from ..abc import MixinMeta
from ..converters import (
GlobalTagConverter,
GuildTagConverter,
PastebinConverter,
TagConverter,
TagName,
TagScriptConverter,
)
from ..http import ApplicationOptionChoice, SlashOptionType
from ..objects import ApplicationCommand, ApplicationCommandType, SlashOption, SlashTag
from ..testing.button_menus import menu as button_menu
from ..utils import ARGUMENT_NAME_DESCRIPTION, chunks, dev_check
TAG_RE = re.compile(r"(?i)(\[p\])?\b(slash\s?)?tag'?s?\b")
CHOICE_RE = re.compile(r".{1,100}:.{1,100}")
CHOICE_LIMIT = 25
log = logging.getLogger("red.phenom4n4n.slashtags.commands")
def _sub(match: re.Match) -> str:
if match.group(1):
return "[p]slashtag global"
repl = "global "
name = match.group(0)
repl += name
if name.istitle():
repl = repl.title()
return repl
def copy_doc(original: Union[commands.Command, types.FunctionType]):
def decorator(overriden: Union[commands.Command, types.FunctionType]):
doc = original.help if isinstance(original, commands.Command) else original.__doc__
doc = TAG_RE.sub(_sub, doc)
if isinstance(overriden, commands.Command):
overriden._help_override = doc
else:
overriden.__doc__ = doc
return overriden
return decorator
class Commands(MixinMeta):
@commands.guild_only()
@commands.group(aliases=["st"])
async def slashtag(self, ctx: commands.Context):
"""
Slash Tag management with TagScript.
These commands use TagScriptEngine.
[This site](https://phen-cogs.readthedocs.io/en/latest/index.html) has documentation on how to use TagScript blocks.
"""
@commands.mod_or_permissions(manage_guild=True)
@slashtag.command("add", aliases=["create", "+"])
async def slashtag_add(
self,
ctx: commands.Context,
tag_name: TagName(check_global=False),
*,
tagscript: TagScriptConverter,
):
"""
Add a slash tag with TagScript.
[Slash tag usage guide](https://phen-cogs.readthedocs.io/en/latest/slashtags/slashtags.html)
"""
await self.create_slash_tag(ctx, tag_name, tagscript, is_global=False)
async def create_slash_tag(
self,
ctx: commands.Context,
tag_name: str,
tagscript: str,
*,
is_global: bool = False,
command_type: ApplicationCommandType = ApplicationCommandType.CHAT_INPUT,
):
options: List[SlashOption] = []
guild_id = None if is_global else ctx.guild.id
if command_type == ApplicationCommandType.CHAT_INPUT:
try:
description = await self.send_and_query_response(
ctx,
"What should the tag description to be? (maximum 100 characters)",
pred=MessagePredicate.length_less(101, ctx),
)
except asyncio.TimeoutError:
return await ctx.send("Tag addition timed out.")
else:
description = ""
if command_type == ApplicationCommandType.CHAT_INPUT:
pred = MessagePredicate.yes_or_no(ctx)
try:
await self.send_and_query_response(
ctx, "Would you like to add arguments to this tag? (Y/n)", pred
)
except asyncio.TimeoutError:
await ctx.send("Query timed out, not adding arguments.")
else:
if pred.result is True:
await self.get_options(ctx, options)
command = ApplicationCommand(
self,
name=tag_name,
description=description,
guild_id=guild_id,
options=options,
type=command_type,
)
try:
await command.register()
except discord.Forbidden as error:
log.error(
"Failed to create command {command!r} on guild {ctx.guild!r}", exc_info=error
)
text = (
"Looks like I don't have permission to add Slash Commands here. Reinvite me "
"with this invite link and try again: <https://discordapp.com/oauth2/authorize"
f"?client_id={self.bot.user.id}&scope=bot%20applications.commands>"
)
return await ctx.send(text)
except Exception:
log.error("Failed to create command {command!r} on guild {ctx.guild!r}")
# exc info unneeded since error handler should print it, however info on the command options is needed
raise
tag = SlashTag(
self,
tagscript,
guild_id=guild_id,
author_id=ctx.author.id,
command=command,
)
await ctx.send(await tag.initialize())
async def get_options(
self, ctx: commands.Context, options: List[SlashOption]
) -> List[SlashOption]:
added_required = False
for i in range(1, 11):
try:
option = await self.get_option(ctx, added_required=added_required)
if not option.required:
added_required = True
except asyncio.TimeoutError:
await ctx.send("Adding this argument timed out.", delete_after=15)
break
options.append(option)
if i == 10:
break
pred = MessagePredicate.yes_or_no(ctx)
try:
await self.send_and_query_response(
ctx, "Would you like to add another argument? (Y/n)", pred
)
except asyncio.TimeoutError:
await ctx.send("Query timed out, not adding additional arguments.")
break
if pred.result is False:
break
return options
async def send_and_query_response(
self,
ctx: commands.Context,
query: str,
pred: MessagePredicate = None,
*,
timeout: int = 60,
) -> str:
if pred is None:
pred = MessagePredicate.same_context(ctx)
ask = await ctx.send(query)
try:
message = await self.bot.wait_for("message", check=pred, timeout=timeout)
except asyncio.TimeoutError:
await self.delete_quietly(ask)
raise
await self.delete_quietly(ask)
await self.delete_quietly(message)
return message.content
async def get_choices(self, ctx: commands.Context) -> List[ApplicationOptionChoice]:
query = (
"Send the list of choice names and values you would like to add as choices to "
"the tag. Choice names and values should be seperated by `:`, and each choice "
"should be seperated by `|`. Example:\n`dog:Doggo|cat:Catto`"
)
response = await self.send_and_query_response(ctx, query)
choices = []
for choice_text in response.split("|"):
if ":" not in choice_text:
await ctx.send(
f"Failed to parse `{choice_text}` to a choice as its name and value "
"weren't seperated by a `:`.",
delete_after=15,
)
continue
if not CHOICE_RE.match(choice_text):
await ctx.send(
f"Failed to parse `{choice_text}` to a choice as "
"its name or value exceeded the 100 character limit.",
delete_after=15,
)
continue
choice = ApplicationOptionChoice(*choice_text.split(":", 1))
choices.append(choice)
if len(choices) >= CHOICE_LIMIT:
await ctx.send(f"Reached max choices ({CHOICE_LIMIT}).")
break
return choices
async def get_option(
self, ctx: commands.Context, *, added_required: bool = False
) -> SlashOption:
name_desc = [
"What should the argument name be and description be?",
"The argument name and description should be split by a `:`.",
"Example: `member:A member of this server.`\n",
"*Slash argument names may not exceed 32 characters and can only contain characters "
"that are alphanumeric or '_' or '-'.",
"The argument description must be less than or equal to 100 characters.*",
]
name_pred = MessagePredicate.regex(ARGUMENT_NAME_DESCRIPTION, ctx)
await self.send_and_query_response(ctx, "\n".join(name_desc), name_pred)
match = name_pred.result
name, description = match.group(1), match.group(2)
valid_option_types = [
name.lower()
for name in SlashOptionType.__members__.keys()
if not name.startswith("SUB")
]
valid_option_types.append("choices")
option_query = [
"What should the argument type be?",
f"Valid option types: {humanize_list([inline(n) for n in valid_option_types])}",
"(select `string` if you don't understand)",
]
option_type = await self.send_and_query_response(
ctx,
"\n".join(option_query),
MessagePredicate.lower_contained_in(valid_option_types, ctx),
)
if option_type.lower() == "choices":
choices = await self.get_choices(ctx)
option_type = "STRING"
else:
choices = []
option_type = SlashOptionType[option_type.upper()]
if not added_required:
pred = MessagePredicate.yes_or_no(ctx)
await self.send_and_query_response(
ctx,
"Is this argument required? (Y/n)\n*Keep in mind that if you choose to make this argument optional, all following arguments must also be optional.*",
pred,
)
required = pred.result
else:
await ctx.send(
"This argument was automatically made optional as the previous one was optional.",
delete_after=15,
)
required = False
return SlashOption(
name=name.lower(),
description=description,
option_type=option_type,
required=required,
choices=choices,
)
@commands.mod_or_permissions(manage_guild=True)
@slashtag.command("message")
async def slashtag_message(
self,
ctx: commands.Context,
tag_name: TagName(check_global=False, check_regex=False),
*,
tagscript: TagScriptConverter,
):
"""
Add a message command tag with TagScript.
[Slash tag usage guide](https://phen-cogs.readthedocs.io/en/latest/slashtags/slashtags.html)
"""
await self.create_slash_tag(
ctx, tag_name, tagscript, is_global=False, command_type=ApplicationCommandType.MESSAGE
)
@commands.mod_or_permissions(manage_guild=True)
@slashtag.command("user")
async def slashtag_user(
self,
ctx: commands.Context,
tag_name: TagName(check_global=False, check_regex=False),
*,
tagscript: TagScriptConverter,
):
"""
Add a user command tag with TagScript.
[Slash tag usage guide](https://phen-cogs.readthedocs.io/en/latest/slashtags/slashtags.html)
"""
await self.create_slash_tag(
ctx, tag_name, tagscript, is_global=False, command_type=ApplicationCommandType.USER
)
@commands.mod_or_permissions(manage_guild=True)
@slashtag.command("pastebin", aliases=["++"])
async def slashtag_pastebin(
self,
ctx: commands.Context,
tag_name: TagName(check_global=False),
*,
link: PastebinConverter,
):
"""
Add a slash tag with a Pastebin link.
"""
await self.create_slash_tag(ctx, tag_name, link, is_global=False)
@commands.mod_or_permissions(manage_guild=True)
@slashtag.group("edit", aliases=["e"], invoke_without_command=True)
async def slashtag_edit(
self, ctx: commands.Context, tag: GuildTagConverter, *, tagscript: TagScriptConverter
):
"""Edit a slash tag."""
await ctx.send(await tag.edit_tagscript(tagscript))
@slashtag_edit.command("tagscript")
async def slashtag_edit_tagscript(
self, ctx: commands.Context, tag: GuildTagConverter, *, tagscript: TagScriptConverter
):
"""Edit a slash tag's TagScript."""
await self.slashtag_edit(ctx, tag, tagscript=tagscript)
@slashtag_edit.command("name")
async def slashtag_edit_name(
self, ctx: commands.Context, tag: GuildTagConverter, *, name: TagName(check_global=False)
):
"""Edit a slash tag's name."""
await ctx.send(await tag.edit_name(name))
@slashtag_edit.command("description")
async def slashtag_edit_description(
self, ctx: commands.Context, tag: GuildTagConverter, *, description: str
):
"""Edit a slash tag's description."""
await ctx.send(await tag.edit_description(description))
@slashtag_edit.command("arguments", aliases=["options"])
async def slashtag_edit_arguments(self, ctx: commands.Context, tag: GuildTagConverter):
"""
Edit a slash tag's arguments.
See [this documentation page](https://phen-cogs.readthedocs.io/en/latest/slashtags/slash_arguments.html) for more information on slash tag arguments.
"""
await tag.edit_options(ctx)
@slashtag_edit.command("argument", aliases=["option"])
async def slashtag_edit_argument(
self, ctx: commands.Context, tag: GuildTagConverter, argument: str
):
"""Edit a single slash tag's argument by name."""
await tag.edit_single_option(ctx, argument)
@commands.mod_or_permissions(manage_guild=True)
@slashtag.command("remove", aliases=["delete", "-"])
async def slashtag_remove(self, ctx: commands.Context, *, tag: GuildTagConverter):
"""Delete a slash tag."""
await ctx.send(await tag.delete())
@slashtag.command("info")
async def slashtag_info(self, ctx: commands.Context, *, tag: TagConverter):
"""Get info about a slash tag that is stored on this server."""
await tag.send_info(ctx)
@slashtag.command("raw")
async def slashtag_raw(self, ctx: commands.Context, *, tag: GuildTagConverter):
"""Get a slash tag's raw content."""
await tag.send_raw_tagscript(ctx)
@classmethod
def format_tagscript(cls, tag: SlashTag, limit: int = 60) -> str:
title = f"`{tag.type.get_prefix()}{tag.name}` - "
limit -= len(title)
tagscript = tag.tagscript
if len(tagscript) > limit - 3:
tagscript = tagscript[:limit] + "..."
tagscript = tagscript.replace("\n", " ")
return f"{title}{discord.utils.escape_markdown(tagscript)}"
async def view_slash_tags(
self,
ctx: commands.Context,
tags: Dict[int, SlashTag],
*,
is_global: bool,
):
description = [
self.format_tagscript(tag) for tag in sorted(tags.values(), key=lambda t: t.name)
]
description = "\n".join(description)
e = discord.Embed(color=await ctx.embed_color())
if is_global:
slash_tags = "global slash tags"
e.set_author(name="Global Slash Tags", icon_url=ctx.me.avatar_url)
else:
slash_tags = "slash tags"
e.set_author(name="Stored Slash Tags", icon_url=ctx.guild.icon_url)
embeds = []
pages = list(pagify(description))
for index, page in enumerate(pages, 1):
embed = e.copy()
embed.description = page
embed.set_footer(text=f"{index}/{len(pages)} | {len(tags)} {slash_tags}")
embeds.append(embed)
# await menu(ctx, embeds, DEFAULT_CONTROLS)
await button_menu(ctx, embeds)
@slashtag.command("list")
async def slashtag_list(self, ctx: commands.Context):
"""View stored slash tags."""
tags = self.guild_tag_cache[ctx.guild.id]
if not tags:
return await ctx.send("There are no slash tags on this server.")
await self.view_slash_tags(ctx, tags, is_global=False)
async def show_slash_tag_usage(self, ctx: commands.Context, guild: discord.Guild = None):
tags = self.guild_tag_cache[guild.id] if guild else self.global_tag_cache
if not tags:
message = (
"This server has no slash tags." if guild else "There are no global slash tags."
)
return await ctx.send(message)
counter = Counter({tag.name: tag.uses for tag in tags.copy().values()})
e = discord.Embed(title="Slash Tag Stats", color=await ctx.embed_color())
embeds = []
for usage_data in chunks(counter.most_common(), 10):
usage_chart = box(tabulate(usage_data, headers=("Tag", "Uses")), "prolog")
embed = e.copy()
embed.description = usage_chart
embeds.append(embed)
await menu(ctx, embeds, DEFAULT_CONTROLS)
@slashtag.command("usage", aliases=["stats"])
async def slashtag_usage(self, ctx: commands.Context):
"""
See this slash tag usage stats.
**Example:**
`[p]slashtag usage`
"""
await self.show_slash_tag_usage(ctx, ctx.guild)
@commands.is_owner()
@slashtag.command("restore", hidden=True)
async def slashtag_restore(self, ctx: commands.Context):
"""Restore all slash tags from the database."""
await self.restore_tags(ctx, ctx.guild)
@commands.is_owner()
@slashtag.command("clear", hidden=True)
async def slashtag_clear(self, ctx: commands.Context):
"""Clear all slash tags for this server."""
pred = MessagePredicate.yes_or_no(ctx)
try:
await self.send_and_query_response(
ctx, "Are you sure you want to delete all slash tags on this server? (Y/n)", pred
)
except asyncio.TimeoutError:
return await ctx.send("Timed out, not deleting slash tags.")
if not pred.result:
return await ctx.send("Ok, not deleting slash tags.")
guild: discord.Guild = ctx.guild
await self.http.put_guild_slash_commands(guild.id, [])
for tag in copy(self.guild_tag_cache[guild.id]).values():
tag.remove_from_cache()
tag.command.remove_from_cache()
del tag
self.guild_tag_cache[guild.id].clear()
await self.config.guild(guild).tags.clear()
await ctx.send("Tags deleted.")
@commands.is_owner()
@slashtag.group("global")
@copy_doc(slashtag)
async def slashtag_global(self, ctx: commands.Context):
pass
@slashtag_global.command("add")
@copy_doc(slashtag_add)
async def slashtag_global_add(
self,
ctx: commands.Context,
tag_name: TagName(global_priority=True),
*,
tagscript: TagScriptConverter,
):
await self.create_slash_tag(ctx, tag_name, tagscript, is_global=True)
@commands.mod_or_permissions(manage_guild=True)
@slashtag_global.command("message")
@copy_doc(slashtag_message)
async def slashtag_global_message(
self,
ctx: commands.Context,
tag_name: TagName(global_priority=True, check_regex=False),
*,
tagscript: TagScriptConverter,
):
await self.create_slash_tag(
ctx, tag_name, tagscript, is_global=True, command_type=ApplicationCommandType.MESSAGE
)
@commands.mod_or_permissions(manage_guild=True)
@slashtag_global.command("user")
@copy_doc(slashtag_user)
async def slashtag_global_user(
self,
ctx: commands.Context,
tag_name: TagName(global_priority=True, check_regex=False),
*,
tagscript: TagScriptConverter,
):
await self.create_slash_tag(
ctx, tag_name, tagscript, is_global=True, command_type=ApplicationCommandType.USER
)
@slashtag_global.command("pastebin", aliases=["++"])
@copy_doc(slashtag_pastebin)
async def slashtag_global_pastebin(
self,
ctx: commands.Context,
tag_name: TagName(check_global=False),
*,
link: PastebinConverter,
):
await self.create_slash_tag(ctx, tag_name, link, is_global=True)
@slashtag_global.group("edit", aliases=["e"], invoke_without_command=True)
@copy_doc(slashtag_edit)
async def slashtag_global_edit(
self, ctx: commands.Context, tag: GlobalTagConverter, *, tagscript: TagScriptConverter
):
await ctx.send(await tag.edit_tagscript(tagscript))
@slashtag_global_edit.command("tagscript")
@copy_doc(slashtag_edit_tagscript)
async def slashtag_global_edit_tagscript(
self, ctx: commands.Context, tag: GlobalTagConverter, *, tagscript: TagScriptConverter
):
await self.slashtag_global_edit(ctx, tag, tagscript=tagscript)
@slashtag_global_edit.command("name")
@copy_doc(slashtag_edit_name)
async def slashtag_global_edit_name(
self,
ctx: commands.Context,
tag: GlobalTagConverter,
*,
name: TagName(global_priority=True),
):
await ctx.send(await tag.edit_name(name))
@slashtag_global_edit.command("description")
@copy_doc(slashtag_edit_description)
async def slashtag_global_edit_description(
self, ctx: commands.Context, tag: GlobalTagConverter, *, description: str
):
await ctx.send(await tag.edit_description(description))
@slashtag_global_edit.command("arguments", aliases=["options"])
@copy_doc(slashtag_edit_arguments)
async def slashtag_global_edit_arguments(self, ctx: commands.Context, tag: GlobalTagConverter):
await tag.edit_options(ctx)
@slashtag_global_edit.command("argument", aliases=["option"])
@copy_doc(slashtag_edit_argument)
async def slashtag_global_edit_argument(
self, ctx: commands.Context, tag: GuildTagConverter, argument: str
):
await tag.edit_single_option(ctx, argument)
@slashtag_global.command("remove", aliases=["delete", "-"])
@copy_doc(slashtag_remove)
async def slashtag_global_remove(self, ctx: commands.Context, *, tag: GlobalTagConverter):
await ctx.send(await tag.delete())
@slashtag_global.command("raw")
@copy_doc(slashtag_raw)
async def slashtag_global_raw(self, ctx: commands.Context, *, tag: GlobalTagConverter):
await tag.send_raw_tagscript(ctx)
@slashtag_global.command("list")
@copy_doc(slashtag_list)
async def slashtag_global_list(self, ctx: commands.Context):
tags = self.global_tag_cache
if not tags:
return await ctx.send("There are no global slash tags.")
await self.view_slash_tags(ctx, tags, is_global=True)
@slashtag_global.command("usage", aliases=["stats"])
@copy_doc(slashtag_usage)
async def slashtag_global_usage(self, ctx: commands.Context):
await self.show_slash_tag_usage(ctx)
@slashtag_global.command("restore", hidden=True)
@copy_doc(slashtag_restore)
async def slashtag_global_restore(self, ctx: commands.Context):
await self.restore_tags(ctx, None)
@commands.is_owner()
@commands.group(aliases=["slashset"])
async def slashtagset(self, ctx: commands.Context):
"""Manage SlashTags settings."""
@slashtagset.command("settings")
async def slashtagset_settings(self, ctx: commands.Context):
"""View SlashTags settings."""
eval_command = f"✅ (**{self.eval_command}**)" if self.eval_command else "❎"
testing_enabled = "✅" if self.testing_enabled else "❎"
description = [
f"Application ID: **{self.application_id}**",
f"Eval command: {eval_command}",
f"Test cog loaded: {testing_enabled}",
]
embed = discord.Embed(
color=0xC9C9C9, title="SlashTags Settings", description="\n".join(description)
)
await ctx.send(embed=embed)
@slashtagset.command("appid")
async def slashtagset_appid(self, ctx: commands.Context, id: int = None):
"""
Manually set the application ID for [botname] slash commands if it differs from the bot user ID.
This only applies to legacy bots. If you don't know what this means, you don't need to worry about it.
"""
app_id = id or self.bot.user.id
await self.config.application_id.set(app_id)
self.application_id = app_id
await ctx.send(f"Application ID set to `{id}`.")
@commands.check(dev_check)
@slashtagset.command("addeval")
async def slashtagset_addeval(self, ctx: commands.Context):
"""Add a slash eval command for debugging."""
if self.eval_command:
return await ctx.send("An eval command is already registered.")
slasheval = ApplicationCommand(
self,
name="eval",
description="SlashTags debugging eval command. Only bot owners can use this.",
options=[
SlashOption(name="body", description="Code body to evaluate.", required=True)
],
)
await slasheval.register()
await self.config.eval_command.set(slasheval.id)
self.eval_command = slasheval.id
await ctx.send("`/eval` has been registered.")
@commands.check(dev_check)
@slashtagset.command("rmeval")
async def slashtagset_rmeval(self, ctx: commands.Context):
"""Remove the slash eval command."""
if not self.eval_command:
return await ctx.send("The eval command hasn't been registered.")
try:
await self.http.remove_slash_command(self.eval_command)
except discord.HTTPException:
pass
await self.config.eval_command.clear()
self.eval_command = None
await ctx.send("`/eval` has been deleted.")
@slashtagset.command("testing")
async def slashtagset_testing(self, ctx: commands.Context, true_or_false: bool = None):
"""
Load or unload the SlashTag interaction development test cog.
"""
target_state = (
true_or_false if true_or_false is not None else not await self.config.testing_enabled()
)
if target_state is self.testing_enabled:
loaded = "loaded" if target_state else "unloaded"
return await ctx.send(f"The SlashTag interaction testing cog is already {loaded}.")
await self.config.testing_enabled.set(target_state)
if target_state:
loaded = "Loaded"
self.add_test_cog()
else:
loaded = "Unloaded"
self.remove_test_cog()
await ctx.send(f"{loaded} the SlashTag interaction testing cog.")
| [((1966, 2019), 're.compile', 're.compile', (['"""(?i)(\\\\[p\\\\])?\\\\b(slash\\\\s?)?tag\'?s?\\\\b"""'], {}), '("(?i)(\\\\[p\\\\])?\\\\b(slash\\\\s?)?tag\'?s?\\\\b")\n', (1976, 2019), False, 'import re\n'), ((2029, 2060), 're.compile', 're.compile', (['""".{1,100}:.{1,100}"""'], {}), "('.{1,100}:.{1,100}')\n", (2039, 2060), False, 'import re\n'), ((2088, 2142), 'logging.getLogger', 'logging.getLogger', (['"""red.phenom4n4n.slashtags.commands"""'], {}), "('red.phenom4n4n.slashtags.commands')\n", (2105, 2142), False, 'import logging\n'), ((2871, 2892), 'redbot.core.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (2890, 2892), False, 'from redbot.core import commands\n'), ((2898, 2928), 'redbot.core.commands.group', 'commands.group', ([], {'aliases': "['st']"}), "(aliases=['st'])\n", (2912, 2928), False, 'from redbot.core import commands\n'), ((3227, 3273), 'redbot.core.commands.mod_or_permissions', 'commands.mod_or_permissions', ([], {'manage_guild': '(True)'}), '(manage_guild=True)\n', (3254, 3273), False, 'from redbot.core import commands\n'), ((11933, 11979), 'redbot.core.commands.mod_or_permissions', 'commands.mod_or_permissions', ([], {'manage_guild': '(True)'}), '(manage_guild=True)\n', (11960, 11979), False, 'from redbot.core import commands\n'), ((12541, 12587), 'redbot.core.commands.mod_or_permissions', 'commands.mod_or_permissions', ([], {'manage_guild': '(True)'}), '(manage_guild=True)\n', (12568, 12587), False, 'from redbot.core import commands\n'), ((13137, 13183), 'redbot.core.commands.mod_or_permissions', 'commands.mod_or_permissions', ([], {'manage_guild': '(True)'}), '(manage_guild=True)\n', (13164, 13183), False, 'from redbot.core import commands\n'), ((13560, 13606), 'redbot.core.commands.mod_or_permissions', 'commands.mod_or_permissions', ([], {'manage_guild': '(True)'}), '(manage_guild=True)\n', (13587, 13606), False, 'from redbot.core import commands\n'), ((15443, 15489), 'redbot.core.commands.mod_or_permissions', 'commands.mod_or_permissions', ([], {'manage_guild': '(True)'}), '(manage_guild=True)\n', (15470, 15489), False, 'from redbot.core import commands\n'), ((19079, 19098), 'redbot.core.commands.is_owner', 'commands.is_owner', ([], {}), '()\n', (19096, 19098), False, 'from redbot.core import commands\n'), ((19316, 19335), 'redbot.core.commands.is_owner', 'commands.is_owner', ([], {}), '()\n', (19333, 19335), False, 'from redbot.core import commands\n'), ((20330, 20349), 'redbot.core.commands.is_owner', 'commands.is_owner', ([], {}), '()\n', (20347, 20349), False, 'from redbot.core import commands\n'), ((20812, 20858), 'redbot.core.commands.mod_or_permissions', 'commands.mod_or_permissions', ([], {'manage_guild': '(True)'}), '(manage_guild=True)\n', (20839, 20858), False, 'from redbot.core import commands\n'), ((21291, 21337), 'redbot.core.commands.mod_or_permissions', 'commands.mod_or_permissions', ([], {'manage_guild': '(True)'}), '(manage_guild=True)\n', (21318, 21337), False, 'from redbot.core import commands\n'), ((24997, 25016), 'redbot.core.commands.is_owner', 'commands.is_owner', ([], {}), '()\n', (25014, 25016), False, 'from redbot.core import commands\n'), ((25022, 25058), 'redbot.core.commands.group', 'commands.group', ([], {'aliases': "['slashset']"}), "(aliases=['slashset'])\n", (25036, 25058), False, 'from redbot.core import commands\n'), ((26348, 26373), 'redbot.core.commands.check', 'commands.check', (['dev_check'], {}), '(dev_check)\n', (26362, 26373), False, 'from redbot.core import commands\n'), ((27143, 27168), 'redbot.core.commands.check', 'commands.check', (['dev_check'], {}), '(dev_check)\n', (27157, 27168), False, 'from redbot.core import commands\n'), ((10017, 10071), 'redbot.core.utils.predicates.MessagePredicate.regex', 'MessagePredicate.regex', (['ARGUMENT_NAME_DESCRIPTION', 'ctx'], {}), '(ARGUMENT_NAME_DESCRIPTION, ctx)\n', (10039, 10071), False, 'from redbot.core.utils.predicates import MessagePredicate\n'), ((19506, 19537), 'redbot.core.utils.predicates.MessagePredicate.yes_or_no', 'MessagePredicate.yes_or_no', (['ctx'], {}), '(ctx)\n', (19532, 19537), False, 'from redbot.core.utils.predicates import MessagePredicate\n'), ((4674, 4705), 'redbot.core.utils.predicates.MessagePredicate.yes_or_no', 'MessagePredicate.yes_or_no', (['ctx'], {}), '(ctx)\n', (4700, 4705), False, 'from redbot.core.utils.predicates import MessagePredicate\n'), ((6977, 7008), 'redbot.core.utils.predicates.MessagePredicate.yes_or_no', 'MessagePredicate.yes_or_no', (['ctx'], {}), '(ctx)\n', (7003, 7008), False, 'from redbot.core.utils.predicates import MessagePredicate\n'), ((7645, 7679), 'redbot.core.utils.predicates.MessagePredicate.same_context', 'MessagePredicate.same_context', (['ctx'], {}), '(ctx)\n', (7674, 7679), False, 'from redbot.core.utils.predicates import MessagePredicate\n'), ((11168, 11199), 'redbot.core.utils.predicates.MessagePredicate.yes_or_no', 'MessagePredicate.yes_or_no', (['ctx'], {}), '(ctx)\n', (11194, 11199), False, 'from redbot.core.utils.predicates import MessagePredicate\n'), ((17248, 17267), 'redbot.core.utils.chat_formatting.pagify', 'pagify', (['description'], {}), '(description)\n', (17254, 17267), False, 'from redbot.core.utils.chat_formatting import box, humanize_list, inline, pagify\n'), ((18757, 18792), 'redbot.core.utils.menus.menu', 'menu', (['ctx', 'embeds', 'DEFAULT_CONTROLS'], {}), '(ctx, embeds, DEFAULT_CONTROLS)\n', (18761, 18792), False, 'from redbot.core.utils.menus import DEFAULT_CONTROLS, menu\n'), ((10817, 10877), 'redbot.core.utils.predicates.MessagePredicate.lower_contained_in', 'MessagePredicate.lower_contained_in', (['valid_option_types', 'ctx'], {}), '(valid_option_types, ctx)\n', (10852, 10877), False, 'from redbot.core.utils.predicates import MessagePredicate\n'), ((16499, 16539), 'discord.utils.escape_markdown', 'discord.utils.escape_markdown', (['tagscript'], {}), '(tagscript)\n', (16528, 16539), False, 'import discord\n'), ((18580, 18625), 'tabulate.tabulate', 'tabulate', (['usage_data'], {'headers': "('Tag', 'Uses')"}), "(usage_data, headers=('Tag', 'Uses'))\n", (18588, 18625), False, 'from tabulate import tabulate\n'), ((20038, 20074), 'copy.copy', 'copy', (['self.guild_tag_cache[guild.id]'], {}), '(self.guild_tag_cache[guild.id])\n', (20042, 20074), False, 'from copy import copy\n'), ((4385, 4423), 'redbot.core.utils.predicates.MessagePredicate.length_less', 'MessagePredicate.length_less', (['(101)', 'ctx'], {}), '(101, ctx)\n', (4413, 4423), False, 'from redbot.core.utils.predicates import MessagePredicate\n'), ((10583, 10592), 'redbot.core.utils.chat_formatting.inline', 'inline', (['n'], {}), '(n)\n', (10589, 10592), False, 'from redbot.core.utils.chat_formatting import box, humanize_list, inline, pagify\n')] |
Subsets and Splits