code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
from django.contrib import admin
from apps.iotdb_cloud_core.models import IoTDBRelease
admin.site.register(IoTDBRelease)
|
[
"django.contrib.admin.site.register"
] |
[((89, 122), 'django.contrib.admin.site.register', 'admin.site.register', (['IoTDBRelease'], {}), '(IoTDBRelease)\n', (108, 122), False, 'from django.contrib import admin\n')]
|
import copy
import numpy as np
PXL2CM = 0.035277778
def print_formatted_stats(stats):
"""
Print formatted results for result tables
"""
print("& {:.2f} & {:.2f} & {:.2f} & {:.2f} \\\\" .format(np.mean(stats['tracked_until_end_ratio']),
np.mean(stats['global_tracking_ratio']),
np.mean(stats['alignment_errors_mean'])*PXL2CM,
np.mean(stats['alignment_errors_std'])*PXL2CM))
def compute_alignment_stats(evaluation_data):
"""
Compute alignment stats
"""
alignment_errors = []
tracking_ratios = []
tracked_until_end = 0
tracked_onsets = 0
total_onsets = 0
for date_entry in evaluation_data:
alignment_errors += date_entry['alignment_errors']
tracking_ratios.append(date_entry['onsets_tracked'] / float(date_entry['total_onsets']))
if date_entry['onsets_tracked'] == date_entry['total_onsets']:
tracked_until_end += 1
tracked_onsets += date_entry['onsets_tracked']
total_onsets += date_entry['total_onsets']
alignment_errors = np.asarray(alignment_errors)
abs_alignment_errors = np.abs(alignment_errors)
tracking_ratios = np.asarray(tracking_ratios)
ae_mean, ae_median, ae_std = -1, -1, -1
if len(abs_alignment_errors) > 0:
ae_mean = abs_alignment_errors.mean()
ae_median = np.median(abs_alignment_errors)
ae_std = abs_alignment_errors.std()
tracking_ratios_mean = tracking_ratios.mean()
tracked_to_end_ratio = tracked_until_end / float(len(evaluation_data))
global_tracking_ratio = float(tracked_onsets) / total_onsets
stats = dict()
stats['alignment_errors_mean'] = ae_mean
stats['alignment_errors_median'] = ae_median
stats['alignment_errors_std'] = ae_std
stats['tracking_ratios_mean'] = tracking_ratios_mean
stats['global_tracking_ratio'] = global_tracking_ratio
stats['tracked_until_end_ratio'] = tracked_to_end_ratio
return stats
class Evaluator:
def __init__(self, make_env, evaluation_pools, config, trials=1, render_mode=None):
self.make_env = make_env
self.evaluation_pools = evaluation_pools
self.config = config
self.render_mode = render_mode
self.trials = trials
def _eval_pool(self, agent, pool, verbose):
pool.reset()
if verbose:
print(pool.get_current_song_name().ljust(60), end=" ")
env = self.make_env(pool, self.config, render_mode=self.render_mode)
alignment_errors = []
# get observations
episode_reward = 0
observation = env.reset()
onset_list = pool.get_current_song_onsets()
while True:
# choose action
action = agent.select_action(observation, train=False)
# perform step and observe
observation, reward, done, info = env.step(action)
episode_reward += reward
# keep alignment errors, only store tracking error if an onset occurs
if pool.curr_perf_frame in onset_list:
alignment_errors.append(pool.tracking_error())
if done:
break
# compute number of tracked onsets
onsets_tracked = np.sum(onset_list <= pool.curr_perf_frame)
song_data = {'alignment_errors': alignment_errors, 'onsets_tracked': onsets_tracked,
'total_onsets': len(onset_list)}
return song_data
def evaluate(self, agent, log_writer=None, log_step=0, verbose=False):
raise NotImplementedError
class PerformanceEvaluator(Evaluator):
def __init__(self, make_env, evaluation_pools, config, trials=1, render_mode=None):
Evaluator.__init__(self, make_env, evaluation_pools, config, trials, render_mode)
def evaluate(self, agent, log_writer=None, log_step=0, verbose=False):
mean_stats = None
for _ in range(self.trials):
evaluation_data = []
for pool in self.evaluation_pools:
song_data = self._eval_pool(agent, pool, verbose)
evaluation_data.append(song_data)
if verbose:
song_stats = compute_alignment_stats([song_data])
string = "tracking ratio: %.2f" % song_stats['global_tracking_ratio']
if song_stats['global_tracking_ratio'] == 1.0:
string += " +"
print(string)
# compute alignment stats
stats = compute_alignment_stats(evaluation_data)
stats['evaluation_data'] = evaluation_data
if mean_stats is None:
mean_stats = dict()
for key in stats.keys():
if key != "evaluation_data":
mean_stats[key] = []
for key in mean_stats.keys():
mean_stats[key].append(stats[key])
for key in mean_stats.keys():
mean_stats[key] = np.mean(mean_stats[key])
if log_writer is not None:
log_writer.add_scalar('eval/alignment_errors_mean', mean_stats['alignment_errors_mean'], log_step)
log_writer.add_scalar('eval/alignment_errors_median', mean_stats['alignment_errors_median'], log_step)
log_writer.add_scalar('eval/alignment_errors_std', mean_stats['alignment_errors_std'], log_step)
log_writer.add_scalar('eval/tracking_ratios_mean', mean_stats['tracking_ratios_mean'], log_step)
log_writer.add_scalar('eval/global_tracking_ratio', mean_stats['global_tracking_ratio'], log_step)
log_writer.add_scalar('eval/tracked_until_end_ratio', mean_stats['tracked_until_end_ratio'], log_step)
return mean_stats
class EmbeddingEvaluator(Evaluator):
def __init__(self, make_env, evaluation_pools, config, trials=1, render_mode=None):
Evaluator.__init__(self, make_env, evaluation_pools, config, trials, render_mode)
self.embedding = None
def store_embedding(self, module, input_, output_):
self.embedding = input_[0]
def register_hook(self, net):
embedding_layer = net._modules.get('policy_fc')
embedding_layer.register_forward_hook(self.store_embedding)
def _eval_pool(self, agent, pool, verbose):
self.register_hook(agent.model.net)
pool.reset()
if verbose:
print(pool.get_current_song_name())
env = self.make_env(pool, self.config, render_mode=self.render_mode)
plain_env = self.make_env(copy.deepcopy(pool), self.config, render_mode=self.render_mode)
while not hasattr(plain_env, 'rl_pool'):
plain_env = plain_env.env
plain_env.reset()
# get observations
observation = env.reset()
return_dicts = {'state': [],
'value': [],
'embedding': [],
'onsets_in_state': [],
'target_lost': [],
'song_name': [],
'tracking_error': [],
'speed': []}
# song_onsets = plain_env.rl_pool.curr_song.get_perf_onsets()
song_onsets = plain_env.rl_pool.curr_song.cur_perf['onsets_padded']
while True:
# choose action
action = agent.select_action(observation)
# perform step and observe
observation, reward, done, info = env.step(action)
cur_perf_frame = plain_env.rl_pool.curr_perf_frame
in_len = plain_env.rl_pool.perf_shape[-1]
onsets_in_input = len(list(filter(lambda o: cur_perf_frame-in_len <= o <= cur_perf_frame, song_onsets)))
# perform a step in the plain env to get the original observation
obs_org, r, d, _ = plain_env.step(action)
return_dicts['state'].append(obs_org)
return_dicts['value'].append(agent.predict_value(observation))
return_dicts['embedding'].append(self.embedding.cpu().data.numpy())
return_dicts['onsets_in_state'].append(onsets_in_input)
return_dicts['target_lost'].append(done)
return_dicts['song_name'].append(plain_env.rl_pool.curr_song.song_name)
return_dicts['tracking_error'].append(plain_env.rl_pool.tracking_error())
return_dicts['speed'].append(plain_env.rl_pool.sheet_speed)
if done:
break
tue = np.sum(song_onsets <= plain_env.rl_pool.curr_perf_frame) == len(song_onsets)
return_dicts['tue'] = [tue for _ in range(len(return_dicts['state']))]
return return_dicts
def evaluate(self, agent, log_writer=None, log_step=0, verbose=False):
return_dicts = {'state': [],
'value': [],
'embedding': [],
'onsets_in_state': [],
'tue': [],
'target_lost': [],
'song_name': [],
'tracking_error': [],
'speed': []}
for _ in range(self.trials):
for pool in self.evaluation_pools:
res = self._eval_pool(agent, pool, verbose)
return_dicts['state'].extend(res['state'])
return_dicts['value'].extend(res['value'])
return_dicts['embedding'].extend(res['embedding'])
return_dicts['onsets_in_state'].extend(res['onsets_in_state'])
return_dicts['tue'].extend(res['tue'])
return_dicts['target_lost'].extend(res['target_lost'])
return_dicts['song_name'].extend(res['song_name'])
return_dicts['tracking_error'].extend(res['tracking_error'])
return_dicts['speed'].extend(res['speed'])
return return_dicts
|
[
"copy.deepcopy",
"numpy.abs",
"numpy.sum",
"numpy.median",
"numpy.asarray",
"numpy.mean"
] |
[((1224, 1252), 'numpy.asarray', 'np.asarray', (['alignment_errors'], {}), '(alignment_errors)\n', (1234, 1252), True, 'import numpy as np\n'), ((1280, 1304), 'numpy.abs', 'np.abs', (['alignment_errors'], {}), '(alignment_errors)\n', (1286, 1304), True, 'import numpy as np\n'), ((1327, 1354), 'numpy.asarray', 'np.asarray', (['tracking_ratios'], {}), '(tracking_ratios)\n', (1337, 1354), True, 'import numpy as np\n'), ((1504, 1535), 'numpy.median', 'np.median', (['abs_alignment_errors'], {}), '(abs_alignment_errors)\n', (1513, 1535), True, 'import numpy as np\n'), ((3384, 3426), 'numpy.sum', 'np.sum', (['(onset_list <= pool.curr_perf_frame)'], {}), '(onset_list <= pool.curr_perf_frame)\n', (3390, 3426), True, 'import numpy as np\n'), ((214, 255), 'numpy.mean', 'np.mean', (["stats['tracked_until_end_ratio']"], {}), "(stats['tracked_until_end_ratio'])\n", (221, 255), True, 'import numpy as np\n'), ((318, 357), 'numpy.mean', 'np.mean', (["stats['global_tracking_ratio']"], {}), "(stats['global_tracking_ratio'])\n", (325, 357), True, 'import numpy as np\n'), ((5123, 5147), 'numpy.mean', 'np.mean', (['mean_stats[key]'], {}), '(mean_stats[key])\n', (5130, 5147), True, 'import numpy as np\n'), ((6678, 6697), 'copy.deepcopy', 'copy.deepcopy', (['pool'], {}), '(pool)\n', (6691, 6697), False, 'import copy\n'), ((8597, 8653), 'numpy.sum', 'np.sum', (['(song_onsets <= plain_env.rl_pool.curr_perf_frame)'], {}), '(song_onsets <= plain_env.rl_pool.curr_perf_frame)\n', (8603, 8653), True, 'import numpy as np\n'), ((420, 459), 'numpy.mean', 'np.mean', (["stats['alignment_errors_mean']"], {}), "(stats['alignment_errors_mean'])\n", (427, 459), True, 'import numpy as np\n'), ((529, 567), 'numpy.mean', 'np.mean', (["stats['alignment_errors_std']"], {}), "(stats['alignment_errors_std'])\n", (536, 567), True, 'import numpy as np\n')]
|
from pathlib import Path
from torchvision.datasets import VisionDataset
import numpy as np
from PIL import Image
class MHPv1(VisionDataset):
"""
MHP dataset : Multi-Human Parsing
V1은 human parsing 만 있고, v2는 pose 포함
https://github.com/ZhaoJ9014/Multi-Human-Parsing
or https://lv-mhp.github.io/
The MHP v1.0 dataset contains 4,980 images,
each with at least two persons (average is 3).
We randomly choose 980 images and their corresponding annotations as the testing set.
The rest form a training set of 3,000 images and a validation set of 1,000 images.
For each instance, 18 semantic categories are defined and annotated except for the
"background" category, i.e. “hat”, “hair”, “sunglasses”, “upper clothes”, “skirt”,
“pants”, “dress”, “belt”, “left shoe”, “right shoe”, “face”, “left leg”, “right leg”,
“left arm”, “right arm”, “bag”, “scarf” and “torso skin”.
Each instance has a complete set of annotations whenever the corresponding category
appears in the current image.
List of contents:
./images:
All images in the dataset.
./annotations
The segmentation annotation files corresponding to the images.
One image is corresponding to multiple annotation files with the same prefix, one file per person. In each annotation file, the label represents:
0: 'background',
1: 'hat',
2: 'hair',
3: 'sunglass',
4: 'upper-clothes',
5: 'skirt',
6: 'pants',
7: 'dress',
8: 'belt',
9: 'left-shoe',
10: 'right-shoe',
11: 'face',
12: 'left-leg',
13: 'right-leg',
14: 'left-arm',
15: 'right-arm',
16: 'bag',
17: 'scarf',
18: 'torso-skin',
./visualization.m
Matlab script to visualize the annotations
./train_list.txt 4000개
The list of images for training and validataion
./test_list.txt 980개
The list of images for testing
"""
root = '/data/public/rw/datasets/human/parsing/LV-MHP-v1'
category = ('__background__', 'hat', 'hair', 'sunglass', 'upper-clothes',
'skirt', 'pants', 'dress', 'belt', 'left-shoe',
'right-shoe', 'face', 'left-leg', 'right-leg', 'left-arm',
'right-arm', 'bag', 'scarf', 'torso-skin',)
def __init__(self, what='train', transforms=None, transform=None, target_transform=None, root=None):
root = root or MHPv1.root
super(MHPv1, self).__init__(root=root, transforms=transforms,
transform=transform, target_transform=target_transform)
assert what in ('train', 'test')
self.what = what
root = Path(root)
self.imagepath = root / 'images'
self.annopath = root / 'annotations'
fname = root / f'{what}_list.txt'
with open(fname, 'r') as f:
image_ids = [line.split('.jpg')[0] for line in f.readlines()]
self.image_ids = image_ids
def __len__(self):
return len(self.image_ids)
def __getitem__(self, index):
i = self.image_ids[index]
fname = self.imagepath / f'{i}.jpg'
image = Image.open(fname)
files = self.annopath.glob(f'{i}_*.png')
anno = [Image.open(f) for f in files]
return image, anno
|
[
"pathlib.Path",
"PIL.Image.open"
] |
[((2751, 2761), 'pathlib.Path', 'Path', (['root'], {}), '(root)\n', (2755, 2761), False, 'from pathlib import Path\n'), ((3223, 3240), 'PIL.Image.open', 'Image.open', (['fname'], {}), '(fname)\n', (3233, 3240), False, 'from PIL import Image\n'), ((3306, 3319), 'PIL.Image.open', 'Image.open', (['f'], {}), '(f)\n', (3316, 3319), False, 'from PIL import Image\n')]
|
from sys import stdin
def count(S, m, n):
tabla = [[0 for x in range(m)] for x in range(n+1)]
for i in range(m):
tabla[0][i] = 1
for i in range(1, n+1):
for j in range(m):
x = tabla[i - S[j]][j] if i-S[j] >= 0 else 0
y = tabla[i][j-1] if j >= 1 else 0
tabla[i][j] = x + y
return tabla[n][m-1]
def main():
n = int(stdin.readline().strip())
arr = list(map(int,stdin.readline().strip().split(',')))
m = len(arr)
print(count(arr, m, n))
main()
|
[
"sys.stdin.readline"
] |
[((404, 420), 'sys.stdin.readline', 'stdin.readline', ([], {}), '()\n', (418, 420), False, 'from sys import stdin\n'), ((453, 469), 'sys.stdin.readline', 'stdin.readline', ([], {}), '()\n', (467, 469), False, 'from sys import stdin\n')]
|
from django.db import models
from django.utils import timezone
from django.forms import ModelForm
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.core.exceptions import ValidationError
import secrets
import os
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
email_confirmed = models.BooleanField(default=False)
email = models.EmailField(max_length=254, default = "")
@receiver(post_save, sender=User)
def update_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
instance.profile.save()
def file_size(value):
limit = 100 * 1024 * 1024
print("Value Size: ", value.size)
if value.size > limit:
raise ValidationError('File too large. Size should not exceed 100 MB.')
def user_directory_path(instance, filename):
return os.path.join('videos', secrets.token_urlsafe(64) + '.mp4')
class Video(models.Model):
UserID = models.ForeignKey(User, on_delete=models.CASCADE, default=1)
VideoPath = models.FileField(upload_to=user_directory_path, validators=[file_size], null=True, verbose_name="",)
Name = models.CharField(max_length=400)
def __str__(self):
return "Video: " + str(self.VideoPath)
class Split(models.Model):
SplitPath = models.CharField(max_length=400)
def __str__(self):
return str(str(self.id) + ":" + self.SplitPath)
class VideoSplit(models.Model):
VideoID = models.ForeignKey(Video, on_delete=models.CASCADE)
SplitID = models.ForeignKey(Split, on_delete=models.CASCADE)
def __str__(self):
return str(self.VideoID)
class Meta:
unique_together = (('VideoID', 'SplitID'),)
class SplitTranscript(models.Model):
SplitID = models.OneToOneField(Split, on_delete=models.CASCADE, primary_key=True)
Transcript = models.TextField()
def __str__(self):
return self.Transcript
class SplitSpeech(models.Model):
SplitID = models.OneToOneField(Split, on_delete=models.CASCADE, primary_key=True)
SpeechPath = models.TextField()
def __str__(self):
return str(self.SpeechPath)
class SplitSummary(models.Model):
SplitID = models.ForeignKey(Split, on_delete=models.CASCADE)
Summary = models.TextField()
def __str__(self):
return str(self.Summary)
class SplitTag(models.Model):
SplitID = models.ForeignKey(Split, on_delete=models.CASCADE)
Tag = models.TextField()
def __str__(self):
return str(self.Tag)
|
[
"django.db.models.FileField",
"django.db.models.OneToOneField",
"django.db.models.TextField",
"django.core.exceptions.ValidationError",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.dispatch.receiver",
"secrets.token_urlsafe",
"django.db.models.BooleanField",
"django.db.models.EmailField"
] |
[((516, 548), 'django.dispatch.receiver', 'receiver', (['post_save'], {'sender': 'User'}), '(post_save, sender=User)\n', (524, 548), False, 'from django.dispatch import receiver\n'), ((343, 395), 'django.db.models.OneToOneField', 'models.OneToOneField', (['User'], {'on_delete': 'models.CASCADE'}), '(User, on_delete=models.CASCADE)\n', (363, 395), False, 'from django.db import models\n'), ((418, 452), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (437, 452), False, 'from django.db import models\n'), ((465, 510), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(254)', 'default': '""""""'}), "(max_length=254, default='')\n", (482, 510), False, 'from django.db import models\n'), ((1057, 1117), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'default': '(1)'}), '(User, on_delete=models.CASCADE, default=1)\n', (1074, 1117), False, 'from django.db import models\n'), ((1134, 1237), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': 'user_directory_path', 'validators': '[file_size]', 'null': '(True)', 'verbose_name': '""""""'}), "(upload_to=user_directory_path, validators=[file_size],\n null=True, verbose_name='')\n", (1150, 1237), False, 'from django.db import models\n'), ((1246, 1278), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(400)'}), '(max_length=400)\n', (1262, 1278), False, 'from django.db import models\n'), ((1394, 1426), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(400)'}), '(max_length=400)\n', (1410, 1426), False, 'from django.db import models\n'), ((1554, 1604), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Video'], {'on_delete': 'models.CASCADE'}), '(Video, on_delete=models.CASCADE)\n', (1571, 1604), False, 'from django.db import models\n'), ((1619, 1669), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Split'], {'on_delete': 'models.CASCADE'}), '(Split, on_delete=models.CASCADE)\n', (1636, 1669), False, 'from django.db import models\n'), ((1849, 1920), 'django.db.models.OneToOneField', 'models.OneToOneField', (['Split'], {'on_delete': 'models.CASCADE', 'primary_key': '(True)'}), '(Split, on_delete=models.CASCADE, primary_key=True)\n', (1869, 1920), False, 'from django.db import models\n'), ((1938, 1956), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (1954, 1956), False, 'from django.db import models\n'), ((2060, 2131), 'django.db.models.OneToOneField', 'models.OneToOneField', (['Split'], {'on_delete': 'models.CASCADE', 'primary_key': '(True)'}), '(Split, on_delete=models.CASCADE, primary_key=True)\n', (2080, 2131), False, 'from django.db import models\n'), ((2149, 2167), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2165, 2167), False, 'from django.db import models\n'), ((2278, 2328), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Split'], {'on_delete': 'models.CASCADE'}), '(Split, on_delete=models.CASCADE)\n', (2295, 2328), False, 'from django.db import models\n'), ((2343, 2361), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2359, 2361), False, 'from django.db import models\n'), ((2465, 2515), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Split'], {'on_delete': 'models.CASCADE'}), '(Split, on_delete=models.CASCADE)\n', (2482, 2515), False, 'from django.db import models\n'), ((2526, 2544), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2542, 2544), False, 'from django.db import models\n'), ((834, 899), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""File too large. Size should not exceed 100 MB."""'], {}), "('File too large. Size should not exceed 100 MB.')\n", (849, 899), False, 'from django.core.exceptions import ValidationError\n'), ((980, 1005), 'secrets.token_urlsafe', 'secrets.token_urlsafe', (['(64)'], {}), '(64)\n', (1001, 1005), False, 'import secrets\n')]
|
__source__ = 'https://leetcode.com/problems/k-empty-slots/'
# Time: O()
# Space: O()
#
# Description: Leetcode # 683. K Empty Slots
#
#There is a garden with N slots. In each slot, there is a flower. The N flowers will bloom one by one in N days.
# In each day, there will be exactly one flower blooming and it will be in the status of blooming since then.
#
# Given an array flowers consists of number from 1 to N.
# Each number in the array represents the place where the flower will open in that day.
#
# For example, flowers[i] = x means that the unique flower that blooms at day i will be at position x,
# where i and x will be in the range from 1 to N.
#
# Also given an integer k, you need to output in which day there exists two flowers in the status of blooming,
# and also the number of flowers between them is k and these flowers are not blooming.
#
# If there isn't such day, output -1.
#
# Example 1:
# Input:
# flowers: [1,3,2]
# k: 1
# Output: 2
# Explanation: In the second day, the first and the third flower have become blooming.
#
# Example 2:
# Input:
# flowers: [1,2,3]
# k: 1
# Output: -1
# Note:
# The given array will be in the range [1, 20000].
#
# Companies
# Google
# Related Topics
# Array
#
import unittest
class Solution(object):
pass # your function here
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/k-empty-slots/solution/
It seems that this question has some mistakes. I think there are two places that might lead to misunderstandings:
(please feel free to tell me if I'm incorrect)
flowers[i] = x should mean that the unique flower that blooms at day i+1 (not i) will be at position x.
If you can get multiple possible results, then you need to return the minimum one.
The idea is to use an array days[] to record each position's flower's blooming day.
That means days[i] is the blooming day of the flower in position i+1.
We just need to find a subarray days[left, left+1,..., left+k-1, right] which satisfies:
for any i = left+1,..., left+k-1, we can have days[left] < days[i] && days[right] < days[i].
Then, the result is max(days[left], days[right]).
# 112ms 10.06%
class Solution {
public int kEmptySlots(int[] flowers, int k) {
TreeSet<Integer> blooming = new TreeSet<>();
int day = 0;
for (int slot: flowers) {
day++;
blooming.add(slot);
for (Integer neighbor : new Integer[]{blooming.lower(slot), blooming.higher(slot)}){
if (neighbor != null && Math.abs(neighbor - slot) - 1 == k) return day;
}
}
return -1;
}
}
# 8ms 98%
class Solution {
public int kEmptySlots(int[] flowers, int k) {
if(k < 0 || k > flowers.length - 2) {
return -1;
}
k++;
int[] mins = new int[flowers.length / k + 3];
int[] maxs = new int[mins.length];
Arrays.fill(mins, Integer.MAX_VALUE);
Arrays.fill(maxs, Integer.MIN_VALUE);
for(int i = 0; i < flowers.length; i++) {
int flower = flowers[i];
int index = flower / k + 1;
if(flower < mins[index]) {
mins[index] = flower;
if(maxs[index - 1] + k == flower) {
return i + 1;
}
}
if(flower > maxs[index]) {
maxs[index] = flower;
if(flower + k == mins[index + 1]) {
return i + 1;
}
}
}
return -1;
}
}
'''
|
[
"unittest.main"
] |
[((1422, 1437), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1435, 1437), False, 'import unittest\n')]
|
from flask import render_template,request,redirect,url_for
from . import main
from ..request import get_news
from ..request import get_news, get_news_articles,search_article
# Views
@main.route('/')
def index():
'''
function that returns the index page and its data
'''
#Get popular news
general_news = get_news('general')
sports_news = get_news('sports')
entertainment_news = get_news('entertainment')
health_news = get_news('health')
business_news = get_news('business')
tech_news = get_news('technology')
science_news = get_news('science')
title = 'Home - Welcome to The best News Website Online'
search_article = request.args.get('news_query')
if search_article:
return redirect(url_for('search',article_name=search_article))
else:
return render_template('index.html', title = title, general = general_news, sports = sports_news, entertainment = entertainment_news, health = health_news, business = business_news, technology = tech_news, science = science_news)
@main.route('/articles/<id>')
def articles(id):
'''
View article function that returns the articles in a source
'''
articles = get_news_articles(id)
return render_template('articles.html', id = id, articles = articles)
@main.route('/search/<article_name>')
def search(article_name):
'''
View function to display the search results
'''
article_name_list = article_name.split(" ")
article_name_format = "+".join(article_name_list)
searched_articles = search_article(article_name_format)
title = f'search results for {article_name}'
return render_template('search.html',article = searched_articles)
|
[
"flask.render_template",
"flask.url_for",
"flask.request.args.get"
] |
[((677, 707), 'flask.request.args.get', 'request.args.get', (['"""news_query"""'], {}), "('news_query')\n", (693, 707), False, 'from flask import render_template, request, redirect, url_for\n'), ((1227, 1285), 'flask.render_template', 'render_template', (['"""articles.html"""'], {'id': 'id', 'articles': 'articles'}), "('articles.html', id=id, articles=articles)\n", (1242, 1285), False, 'from flask import render_template, request, redirect, url_for\n'), ((1640, 1697), 'flask.render_template', 'render_template', (['"""search.html"""'], {'article': 'searched_articles'}), "('search.html', article=searched_articles)\n", (1655, 1697), False, 'from flask import render_template, request, redirect, url_for\n'), ((825, 1040), 'flask.render_template', 'render_template', (['"""index.html"""'], {'title': 'title', 'general': 'general_news', 'sports': 'sports_news', 'entertainment': 'entertainment_news', 'health': 'health_news', 'business': 'business_news', 'technology': 'tech_news', 'science': 'science_news'}), "('index.html', title=title, general=general_news, sports=\n sports_news, entertainment=entertainment_news, health=health_news,\n business=business_news, technology=tech_news, science=science_news)\n", (840, 1040), False, 'from flask import render_template, request, redirect, url_for\n'), ((756, 802), 'flask.url_for', 'url_for', (['"""search"""'], {'article_name': 'search_article'}), "('search', article_name=search_article)\n", (763, 802), False, 'from flask import render_template, request, redirect, url_for\n')]
|
#!/usr/bin/env python
"""
hostlists plugin to recursively query plugins based on type.
This makes it possible to obtain lists of hosts by recursively
querying multiple backends.
For example:
* Query dns for www.foo.com
* Get a list of two hostnames back haproxy1.ny.foo.com and
haproxy1.lax.foo.com.
* Query reverse proxies and load balancers for the
above two hostnames and the names of any hosts serving
the traffic for them. haproxy1.ny.foo.com is a vip being
served by apache1.ny.foo.com ad apache2.ny.foo.com.
haproxy1.lax.foo.com is a vip being serviced by
apache2.lax.foo.com, apache3.lax.foo.com and
joesdesktop.foo.com.
* Return apache[1-2].ny.foo.com, apache[2-3].lax.foo.com,
joesdektop.foo.com
"""
# Copyright (c) 2010-2015 Yahoo! Inc. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
from hostlists.plugin_manager import get_plugins
def name():
return ['type', 'type_vip', 'type_vip_up', 'type_vip_down']
def expand(value, name=None):
""" Try all plugins of a specific type for a result, if none
are able to expand the value further then return just the value """
mod_type = 'vip'
if not name:
return [value]
if name.lower() in ['type_vip']:
mod_type = 'vip'
filter_append = ''
if name.lower() in ['type_vip_down']:
mod_type = 'vip_down'
filter_append = '_down'
if name.lower() in ['type_vip_up']:
mod_type = 'vip_up'
filter_append = '_up'
plugins = get_plugins()
for plugin_name in plugins.keys():
if (
(filter_append != '' and plugin_name.endswith(filter_append)) or (filter_append == '' and plugin_name.find('_') == -1)
):
try:
if mod_type in plugins[plugin_name].type():
name = plugin_name + filter_append
result = plugins[plugin_name].expand(value, name=name)
if len(result):
return result
except AttributeError:
pass
return [value]
|
[
"hostlists.plugin_manager.get_plugins"
] |
[((2060, 2073), 'hostlists.plugin_manager.get_plugins', 'get_plugins', ([], {}), '()\n', (2071, 2073), False, 'from hostlists.plugin_manager import get_plugins\n')]
|
import asyncio
from PIL import Image
from ..Base import Base
class BiasGame(Base):
def __init__(self, *args):
super().__init__(*args)
async def create_bias_game_image(self, first_idol_id, second_idol_id):
"""Uses thread pool to create bias game image to prevent IO blocking."""
# (self.ex.thread_pool.submit(self.merge_images, first_idol_id, second_idol_id)).result()
await self.ex.run_blocking_code(self.merge_images, first_idol_id, second_idol_id)
return f"{ self.ex.keys.bias_game_location}{first_idol_id}_{second_idol_id}.png"
def merge_images(self, first_idol_id, second_idol_id):
"""Merge Idol Images if the merge doesn't exist already."""
file_name = f"{first_idol_id}_{second_idol_id}.png"
if not self.ex.check_file_exists(f"{self.ex.keys.bias_game_location}{file_name}"):
# open the images.
with Image.open(f'{self.ex.keys.bias_game_location}versus.png') as versus_image, \
Image.open(f'{self.ex.keys.idol_avatar_location}{first_idol_id}_IDOL.png') as first_idol_image, \
Image.open(f'{self.ex.keys.idol_avatar_location}{second_idol_id}_IDOL.png') as second_idol_image:
# define the dimensions
idol_image_width = 150
idol_image_height = 150
first_image_area = (0, 0)
second_image_area = (versus_image.width - idol_image_width, 0)
image_size = (idol_image_width, idol_image_height)
# resize the idol images
first_idol_image = first_idol_image.resize(image_size)
second_idol_image = second_idol_image.resize(image_size)
# add the idol images onto the VS image.
versus_image.paste(first_idol_image, first_image_area)
versus_image.paste(second_idol_image, second_image_area)
# save the versus image.
versus_image.save(f"{self.ex.keys.bias_game_location}{file_name}")
async def create_bias_game_bracket(self, all_games, user_id, bracket_winner):
# (self.ex.thread_pool.submit(self.create_bracket, all_games, user_id, bracket_winner)).result()
await self.ex.run_blocking_code(self.create_bracket, all_games, user_id, bracket_winner)
return f"{self.ex.keys.bias_game_location}{user_id}.png"
def create_bracket(self, all_games, user_id, bracket_winner):
def resize_images(first_img, second_img, first_img_size, second_img_size):
return first_img.resize(first_img_size), second_img.resize(second_img_size)
def paste_image(first_idol_img, second_idol_img, first_img_area, second_img_area):
bracket.paste(first_idol_img, first_img_area)
bracket.paste(second_idol_img, second_img_area)
with Image.open(f'{self.ex.keys.bias_game_location}bracket8.png') as bracket:
count = 1
for c_round in all_games:
if len(c_round) > 4:
continue
for first_idol, second_idol in c_round:
first_idol_info = self.ex.cache.stored_bracket_positions.get(count)
second_idol_info = self.ex.cache.stored_bracket_positions.get(count + 1)
with Image.open(f'{self.ex.keys.idol_avatar_location}{first_idol.id}_IDOL.png') as first_idol_image, \
Image.open(f'{self.ex.keys.idol_avatar_location}{second_idol.id}_IDOL.png') as second_idol_image:
# resize images
first_idol_image, second_idol_image = resize_images(first_idol_image, second_idol_image,
first_idol_info.get('img_size'),
second_idol_info.get('img_size'))
# paste image to bracket
paste_image(first_idol_image, second_idol_image, first_idol_info.get('pos'),
second_idol_info.get('pos'))
count = count + 2
# add winner
idol_info = self.ex.cache.stored_bracket_positions.get(count)
with Image.open(f'{self.ex.keys.idol_avatar_location}{bracket_winner.id}_IDOL.png') as idol_image:
idol_image = idol_image.resize(idol_info.get('img_size'))
bracket.paste(idol_image, idol_info.get('pos'))
bracket.save(f"{self.ex.keys.bias_game_location}{user_id}.png")
|
[
"PIL.Image.open"
] |
[((2858, 2918), 'PIL.Image.open', 'Image.open', (['f"""{self.ex.keys.bias_game_location}bracket8.png"""'], {}), "(f'{self.ex.keys.bias_game_location}bracket8.png')\n", (2868, 2918), False, 'from PIL import Image\n'), ((910, 968), 'PIL.Image.open', 'Image.open', (['f"""{self.ex.keys.bias_game_location}versus.png"""'], {}), "(f'{self.ex.keys.bias_game_location}versus.png')\n", (920, 968), False, 'from PIL import Image\n'), ((1008, 1082), 'PIL.Image.open', 'Image.open', (['f"""{self.ex.keys.idol_avatar_location}{first_idol_id}_IDOL.png"""'], {}), "(f'{self.ex.keys.idol_avatar_location}{first_idol_id}_IDOL.png')\n", (1018, 1082), False, 'from PIL import Image\n'), ((1126, 1201), 'PIL.Image.open', 'Image.open', (['f"""{self.ex.keys.idol_avatar_location}{second_idol_id}_IDOL.png"""'], {}), "(f'{self.ex.keys.idol_avatar_location}{second_idol_id}_IDOL.png')\n", (1136, 1201), False, 'from PIL import Image\n'), ((4293, 4371), 'PIL.Image.open', 'Image.open', (['f"""{self.ex.keys.idol_avatar_location}{bracket_winner.id}_IDOL.png"""'], {}), "(f'{self.ex.keys.idol_avatar_location}{bracket_winner.id}_IDOL.png')\n", (4303, 4371), False, 'from PIL import Image\n'), ((3320, 3394), 'PIL.Image.open', 'Image.open', (['f"""{self.ex.keys.idol_avatar_location}{first_idol.id}_IDOL.png"""'], {}), "(f'{self.ex.keys.idol_avatar_location}{first_idol.id}_IDOL.png')\n", (3330, 3394), False, 'from PIL import Image\n'), ((3446, 3521), 'PIL.Image.open', 'Image.open', (['f"""{self.ex.keys.idol_avatar_location}{second_idol.id}_IDOL.png"""'], {}), "(f'{self.ex.keys.idol_avatar_location}{second_idol.id}_IDOL.png')\n", (3456, 3521), False, 'from PIL import Image\n')]
|
# -*-mode: python; encoding: utf-8; test-case-name: tests.test_app-*-
# ========================================================================
"""
Copyright |(c)| 2017 `Dropbox, Inc.`_
.. |(c)| unicode:: u+a9
.. _`Dropbox, Inc.`: https://www.dropbox.com/
Please see the accompanying ``LICENSE`` and ``CREDITS`` file(s) for
rights and restrictions governing use of this software. All rights not
expressly waived or licensed are reserved. If such a file did not
accompany this software, then please contact the author before viewing
or using this software in any capacity.
"""
# ========================================================================
from __future__ import (
absolute_import, division, print_function, unicode_literals,
)
from builtins import * # noqa: F401,F403; pylint: disable=redefined-builtin,unused-wildcard-import,useless-suppression,wildcard-import
from future.builtins.disabled import * # noqa: F401,F403; pylint: disable=redefined-builtin,unused-wildcard-import,useless-suppression,wildcard-import
# ---- Imports -----------------------------------------------------------
from future.moves.urllib.parse import urljoin
from future.utils import bytes_to_native_str
import hashlib
import hmac
import os
import sqlite3
import dropbox
import flask
# Used for the tutorial
import datetime # noqa: F401; pylint: disable=unused-import
import humanize # noqa: F401; pylint: disable=unused-import
# ---- Constants ---------------------------------------------------------
__all__ = ()
_SESSION_USER_ID = 'user-id'
_SESSION_DBX_AUTH_STATE = 'dbx-auth-state'
_APP = flask.Flask(__name__)
# ---- Functions ---------------------------------------------------------
# ========================================================================
@_APP.route('/', methods=( 'GET', 'POST' ))
def route_():
db = get_db()
user_id = flask.session.get(_SESSION_USER_ID)
user_dbx_acct_entry = None
if user_id is not None:
user_dbx_acct_entry = db_user_dbx_acct_select_one_by_user_id(db, user_id)
if user_dbx_acct_entry is None:
# They have a stale user ID, but we don't know why, so just
# treat them as a new browser
user_id = None
flask.session.pop(_SESSION_USER_ID, None)
if flask.request.method == 'GET':
# This displays the main page, which changes based on whether
# the session contains a valid user ID
template_vars = {
'title': _APP.config['SITE_TITLE'],
}
if user_dbx_acct_entry is not None:
user_name = user_dbx_acct_entry[bytes_to_native_str(b'user_name')]
user_email = user_dbx_acct_entry[bytes_to_native_str(b'user_email')]
template_vars['user_name'] = user_name
if user_email is not None:
template_vars['user_email'] = user_email
# TODO: Maybe we should do something fun here?
return flask.render_template('settings.html', **template_vars)
elif flask.request.method == 'POST':
action = flask.request.form.get('action')
if action == 'enable':
# Start the auth flow
return flask.redirect(flask.url_for('route_start'))
elif action == 'disable':
# We need to try to revoke all the tokens we have and clear
# this session. See WARNING comment in ``route_finish``.
if user_dbx_acct_entry is not None:
dbx_acct_id = user_dbx_acct_entry[bytes_to_native_str(b'dbx_acct_id')]
db_user_update_for_delete_by_dbx_acct_id(db, dbx_acct_id)
for user_entry in db_user_select_all_deleted_by_dbx_acct_id(db, dbx_acct_id):
dbx_auth_token = user_entry[bytes_to_native_str(b'dbx_auth_token')]
dbx = dropbox.Dropbox(dbx_auth_token)
try:
dbx.auth_token_revoke()
except dropbox.exceptions.AuthError:
# Token is already revoked
_APP.logger.info('token "%s" already revoked', dbx_auth_token)
user_id = user_entry[bytes_to_native_str(b'user_id')]
db_user_delete(db, user_id)
db.commit()
flask.session.pop(_SESSION_USER_ID, None)
return flask.redirect(flask.url_for('route_'))
else:
flask.abort(400) # bad request
# ========================================================================
@_APP.route('/finish')
def route_finish():
# This is basically modified from the example code at
# <http://dropbox-sdk-python.readthedocs.io/en/master/moduledoc.html#dropbox.oauth.DropboxOAuth2Flow>
auth_flow = _new_dbx_auth_flow(flask.session)
try:
auth_res = auth_flow.finish(flask.request.args)
except dropbox.oauth.BadRequestException:
flask.abort(400)
except dropbox.oauth.BadStateException:
# Start the auth flow again
return flask.redirect(flask.url_for('route_start'))
except dropbox.oauth.CsrfException:
flask.abort(403)
except dropbox.oauth.NotApprovedException:
flask.abort(401)
except dropbox.oauth.ProviderException as exc:
_APP.logger.info('auth error: %s', exc)
flask.abort(403)
# Compare our saved random state with what comes back from Dropbox
dbx_auth_state = flask.session.pop(_SESSION_DBX_AUTH_STATE, None)
if dbx_auth_state is None \
or auth_res.url_state != dbx_auth_state:
_APP.logger.info('browser state (%s) does not equal returned state (%s)', dbx_auth_state, auth_res.url_state)
flask.abort(403)
# Brilliant! Now we can DO stuff!
dbx_auth_token = auth_res.access_token
dbx_acct_id = auth_res.account_id
# TODO: Maybe now that we have an auth token, we can retrieve the
# user's Dropbox account name and e-mail using the API?
user_name = '<USE THE API TO RETRIEVE ME!>'
user_email = None
# Fake a secure-ish user ID and save the new user. See warning below.
user_id_seed = bytes(dbx_acct_id, encoding='utf-8') + os.urandom(24)
user_id = hashlib.sha256(user_id_seed).hexdigest()
db = get_db()
try:
db_dbx_acct_insert(db, dbx_acct_id, user_name, user_email)
except sqlite3.IntegrityError:
# The user's account record is already there, so we update the
# name and e-mail to the latest
db_dbx_acct_update(db, dbx_acct_id, user_name, user_email)
db_user_insert(db, user_id, dbx_acct_id, dbx_auth_token)
db.commit()
# WARNING: This is just to make our demo simpler. Don't ever use Flask
# sessions this way if your want to be #WorthyOfTrust. See
# <https://blog.miguelgrinberg.com/post/how-secure-is-the-flask-user-session>.
#
# Further, even if this WERE secure (which it isn't), this effectively
# treats Dropbox as an identity provider, which we shouldn't do. From
# <https://www.dropbox.com/developers/documentation/http/documentation#authorization>:
#
# Note: OAuth is an authorization protocol, not an authentication
# protocol. Dropbox should not be used as an identity provider.
#
# What we should be doing instead is having logins of our own that
# refer to at most one auth token. Ye have been warned.
flask.session[_SESSION_USER_ID] = user_id
return flask.redirect(flask.url_for('route_'))
# ========================================================================
@_APP.route('/start')
def route_start():
# This is basically modified from the example code at
# <http://dropbox-sdk-python.readthedocs.io/en/master/moduledoc.html#dropbox.oauth.DropboxOAuth2Flow>
dbx_auth_state = hashlib.sha256(os.urandom(24)).hexdigest()
# Save our random state in the browser so we can compare it with what
# comes back from Dropbox later
flask.session[_SESSION_DBX_AUTH_STATE] = dbx_auth_state
auth_url = _new_dbx_auth_flow(flask.session).start(dbx_auth_state)
return flask.redirect(auth_url)
# ========================================================================
@_APP.route('/webhook', methods=( 'GET', 'POST' ))
def route_webhook():
if flask.request.method == 'GET':
return flask.request.args.get('challenge', '')
elif flask.request.method == 'POST':
# Make sure we have a valid request. See
# <https://www.dropbox.com/developers/reference/webhooks#notifications>.
signature = flask.request.headers.get('X-Dropbox-Signature')
expected = hmac.new(_APP.config['DXB_APP_SECRET'], flask.request.data, hashlib.sha256).hexdigest()
if not hmac.compare_digest(signature, expected):
flask.abort(403)
# This is just to make our demo simpler. We shouldn't normally do
# any processing here. From
# <https://www.dropbox.com/developers/reference/webhooks#best-practices>:
#
# Your app only has ten seconds to respond to webhook requests.
# ... To make sure you can always respond within ten seconds,
# you should always do your work on a separate thread ... or
# asynchronously using a queue.
# TODO: What fun things can we do here?
# ========================================================================
def _new_dbx_auth_flow(session):
return dropbox.DropboxOAuth2Flow(
_APP.config['DBX_APP_KEY'],
_APP.config['DBX_APP_SECRET'],
urljoin(_APP.config['BASE_URL'], flask.url_for('route_finish')),
session,
'dbx-auth-csrf-token',
)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Pretty much everything below this point is unrelated to using the
# Dropbox API
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# ========================================================================
@_APP.teardown_appcontext
def close_db(_):
if hasattr(flask.g, 'sqlite_db'):
flask.g.sqlite_db.close()
# ========================================================================
@_APP.cli.command('initdb')
def initdb_command():
init_db()
print('initialized database')
# ========================================================================
def connect_db():
rv = sqlite3.connect(_APP.config['DATABASE'])
rv.row_factory = sqlite3.Row
return rv
# ========================================================================
def db_dbx_acct_insert(db, dbx_acct_id, user_name, user_email):
db.execute(
"""
INSERT INTO dbx_accts ( dbx_acct_id, user_name, user_email )
VALUES ( ?, ?, ? )
""",
( dbx_acct_id, user_name, user_email ),
)
# ========================================================================
def db_dbx_acct_update(db, dbx_acct_id, user_name, user_email):
db.execute(
"""
UPDATE dbx_accts SET user_name = ?, user_email = ?
WHERE dbx_acct_id = ?
""",
( user_name, user_email, dbx_acct_id ),
)
# ========================================================================
def db_dbx_acct_select(db, dbx_acct_id):
cur = db.execute(
"""
SELECT dbx_acct_id, user_name, user_email
FROM dbx_accts
WHERE dbx_acct_id = ?
""",
( dbx_acct_id, ),
)
return cur.fetchone()
# ========================================================================
def db_user_dbx_acct_select_one_by_user_id(db, user_id):
cur = db.execute(
"""
SELECT
u.user_id AS user_id,
u.dbx_acct_id AS dbx_acct_id,
u.dbx_auth_token AS dbx_auth_token,
da.user_name AS user_name,
da.user_email AS user_email
FROM users AS u
JOIN dbx_accts AS da
ON da.dbx_acct_id = u.dbx_acct_id
WHERE u.user_id = ?
""",
( user_id, ),
)
return cur.fetchone()
# ========================================================================
def db_user_delete(db, user_id):
db.execute(
"""
DELETE FROM users
WHERE user_id = ?
""",
( user_id, ),
)
# ========================================================================
def db_user_insert(db, user_id, dbx_acct_id, dbx_auth_token=None):
db.execute(
"""
INSERT INTO users ( user_id, dbx_acct_id, dbx_auth_token )
VALUES ( ?, ?, ? )
""",
( user_id, dbx_acct_id, dbx_auth_token ),
)
# ========================================================================
def db_user_select_all_deleted_by_dbx_acct_id(db, dbx_acct_id):
cur = db.execute(
"""
SELECT user_id, dbx_acct_id, dbx_auth_token
FROM users
WHERE dbx_acct_id = ?
AND user_id LIKE '%-deleted'
""",
( dbx_acct_id, ),
)
return cur.fetchall()
# ========================================================================
def db_user_update_for_delete_by_dbx_acct_id(db, dbx_acct_id):
db.execute(
"""
UPDATE users
SET user_id = user_id || '-deleted'
WHERE dbx_acct_id = ?
""",
( dbx_acct_id, ),
)
# ========================================================================
def get_db():
if not hasattr(flask.g, 'sqlite_db'):
flask.g.sqlite_db = connect_db()
return flask.g.sqlite_db
# ========================================================================
def init_db():
db = get_db()
with _APP.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
db.commit()
# ---- Initialization ----------------------------------------------------
_APP.config.from_object(__name__)
_APP.config.update(dict(
DATABASE=os.path.join(_APP.root_path, 'primer.db'),
SECRET_KEY=bytes_to_native_str(b'__SET_ME__'),
DBX_APP_KEY=bytes_to_native_str(b'__SET_ME__'),
DBX_APP_SECRET=bytes_to_native_str(b'__SET_ME__'),
SITE_TITLE=bytes_to_native_str(b'__SET_ME__'),
BASE_URL=b'http://localhost:5000/',
))
_APP.config.from_envvar('DBX_API_PRIMER_SETTINGS', silent=True)
if _APP.config['SECRET_KEY'] == bytes_to_native_str(b'__SET_ME__'):
_APP.logger.critical('SECRET_KEY must be set')
|
[
"flask.request.form.get",
"future.utils.bytes_to_native_str",
"hmac.compare_digest",
"flask.url_for",
"os.path.join",
"flask.redirect",
"flask.request.args.get",
"flask.request.headers.get",
"flask.abort",
"hashlib.sha256",
"flask.render_template",
"os.urandom",
"hmac.new",
"sqlite3.connect",
"flask.session.pop",
"dropbox.Dropbox",
"flask.Flask",
"flask.session.get",
"flask.g.sqlite_db.close"
] |
[((1619, 1640), 'flask.Flask', 'flask.Flask', (['__name__'], {}), '(__name__)\n', (1630, 1640), False, 'import flask\n'), ((1883, 1918), 'flask.session.get', 'flask.session.get', (['_SESSION_USER_ID'], {}), '(_SESSION_USER_ID)\n', (1900, 1918), False, 'import flask\n'), ((5429, 5477), 'flask.session.pop', 'flask.session.pop', (['_SESSION_DBX_AUTH_STATE', 'None'], {}), '(_SESSION_DBX_AUTH_STATE, None)\n', (5446, 5477), False, 'import flask\n'), ((8067, 8091), 'flask.redirect', 'flask.redirect', (['auth_url'], {}), '(auth_url)\n', (8081, 8091), False, 'import flask\n'), ((10326, 10366), 'sqlite3.connect', 'sqlite3.connect', (["_APP.config['DATABASE']"], {}), "(_APP.config['DATABASE'])\n", (10341, 10366), False, 'import sqlite3\n'), ((14202, 14236), 'future.utils.bytes_to_native_str', 'bytes_to_native_str', (["b'__SET_ME__'"], {}), "(b'__SET_ME__')\n", (14221, 14236), False, 'from future.utils import bytes_to_native_str\n'), ((2966, 3021), 'flask.render_template', 'flask.render_template', (['"""settings.html"""'], {}), "('settings.html', **template_vars)\n", (2987, 3021), False, 'import flask\n'), ((5690, 5706), 'flask.abort', 'flask.abort', (['(403)'], {}), '(403)\n', (5701, 5706), False, 'import flask\n'), ((6161, 6175), 'os.urandom', 'os.urandom', (['(24)'], {}), '(24)\n', (6171, 6175), False, 'import os\n'), ((7444, 7467), 'flask.url_for', 'flask.url_for', (['"""route_"""'], {}), "('route_')\n", (7457, 7467), False, 'import flask\n'), ((8293, 8332), 'flask.request.args.get', 'flask.request.args.get', (['"""challenge"""', '""""""'], {}), "('challenge', '')\n", (8315, 8332), False, 'import flask\n'), ((10023, 10048), 'flask.g.sqlite_db.close', 'flask.g.sqlite_db.close', ([], {}), '()\n', (10046, 10048), False, 'import flask\n'), ((2255, 2296), 'flask.session.pop', 'flask.session.pop', (['_SESSION_USER_ID', 'None'], {}), '(_SESSION_USER_ID, None)\n', (2272, 2296), False, 'import flask\n'), ((3080, 3112), 'flask.request.form.get', 'flask.request.form.get', (['"""action"""'], {}), "('action')\n", (3102, 3112), False, 'import flask\n'), ((4918, 4934), 'flask.abort', 'flask.abort', (['(400)'], {}), '(400)\n', (4929, 4934), False, 'import flask\n'), ((5123, 5139), 'flask.abort', 'flask.abort', (['(403)'], {}), '(403)\n', (5134, 5139), False, 'import flask\n'), ((5195, 5211), 'flask.abort', 'flask.abort', (['(401)'], {}), '(401)\n', (5206, 5211), False, 'import flask\n'), ((5319, 5335), 'flask.abort', 'flask.abort', (['(403)'], {}), '(403)\n', (5330, 5335), False, 'import flask\n'), ((6190, 6218), 'hashlib.sha256', 'hashlib.sha256', (['user_id_seed'], {}), '(user_id_seed)\n', (6204, 6218), False, 'import hashlib\n'), ((8524, 8572), 'flask.request.headers.get', 'flask.request.headers.get', (['"""X-Dropbox-Signature"""'], {}), "('X-Dropbox-Signature')\n", (8549, 8572), False, 'import flask\n'), ((9541, 9570), 'flask.url_for', 'flask.url_for', (['"""route_finish"""'], {}), "('route_finish')\n", (9554, 9570), False, 'import flask\n'), ((13809, 13850), 'os.path.join', 'os.path.join', (['_APP.root_path', '"""primer.db"""'], {}), "(_APP.root_path, 'primer.db')\n", (13821, 13850), False, 'import os\n'), ((13867, 13901), 'future.utils.bytes_to_native_str', 'bytes_to_native_str', (["b'__SET_ME__'"], {}), "(b'__SET_ME__')\n", (13886, 13901), False, 'from future.utils import bytes_to_native_str\n'), ((13919, 13953), 'future.utils.bytes_to_native_str', 'bytes_to_native_str', (["b'__SET_ME__'"], {}), "(b'__SET_ME__')\n", (13938, 13953), False, 'from future.utils import bytes_to_native_str\n'), ((13974, 14008), 'future.utils.bytes_to_native_str', 'bytes_to_native_str', (["b'__SET_ME__'"], {}), "(b'__SET_ME__')\n", (13993, 14008), False, 'from future.utils import bytes_to_native_str\n'), ((14025, 14059), 'future.utils.bytes_to_native_str', 'bytes_to_native_str', (["b'__SET_ME__'"], {}), "(b'__SET_ME__')\n", (14044, 14059), False, 'from future.utils import bytes_to_native_str\n'), ((2626, 2659), 'future.utils.bytes_to_native_str', 'bytes_to_native_str', (["b'user_name'"], {}), "(b'user_name')\n", (2645, 2659), False, 'from future.utils import bytes_to_native_str\n'), ((2706, 2740), 'future.utils.bytes_to_native_str', 'bytes_to_native_str', (["b'user_email'"], {}), "(b'user_email')\n", (2725, 2740), False, 'from future.utils import bytes_to_native_str\n'), ((5045, 5073), 'flask.url_for', 'flask.url_for', (['"""route_start"""'], {}), "('route_start')\n", (5058, 5073), False, 'import flask\n'), ((7786, 7800), 'os.urandom', 'os.urandom', (['(24)'], {}), '(24)\n', (7796, 7800), False, 'import os\n'), ((8696, 8736), 'hmac.compare_digest', 'hmac.compare_digest', (['signature', 'expected'], {}), '(signature, expected)\n', (8715, 8736), False, 'import hmac\n'), ((8750, 8766), 'flask.abort', 'flask.abort', (['(403)'], {}), '(403)\n', (8761, 8766), False, 'import flask\n'), ((3213, 3241), 'flask.url_for', 'flask.url_for', (['"""route_start"""'], {}), "('route_start')\n", (3226, 3241), False, 'import flask\n'), ((4433, 4449), 'flask.abort', 'flask.abort', (['(400)'], {}), '(400)\n', (4444, 4449), False, 'import flask\n'), ((8592, 8667), 'hmac.new', 'hmac.new', (["_APP.config['DXB_APP_SECRET']", 'flask.request.data', 'hashlib.sha256'], {}), "(_APP.config['DXB_APP_SECRET'], flask.request.data, hashlib.sha256)\n", (8600, 8667), False, 'import hmac\n'), ((4305, 4346), 'flask.session.pop', 'flask.session.pop', (['_SESSION_USER_ID', 'None'], {}), '(_SESSION_USER_ID, None)\n', (4322, 4346), False, 'import flask\n'), ((4382, 4405), 'flask.url_for', 'flask.url_for', (['"""route_"""'], {}), "('route_')\n", (4395, 4405), False, 'import flask\n'), ((3516, 3551), 'future.utils.bytes_to_native_str', 'bytes_to_native_str', (["b'dbx_acct_id'"], {}), "(b'dbx_acct_id')\n", (3535, 3551), False, 'from future.utils import bytes_to_native_str\n'), ((3836, 3867), 'dropbox.Dropbox', 'dropbox.Dropbox', (['dbx_auth_token'], {}), '(dbx_auth_token)\n', (3851, 3867), False, 'import dropbox\n'), ((3770, 3808), 'future.utils.bytes_to_native_str', 'bytes_to_native_str', (["b'dbx_auth_token'"], {}), "(b'dbx_auth_token')\n", (3789, 3808), False, 'from future.utils import bytes_to_native_str\n'), ((4179, 4210), 'future.utils.bytes_to_native_str', 'bytes_to_native_str', (["b'user_id'"], {}), "(b'user_id')\n", (4198, 4210), False, 'from future.utils import bytes_to_native_str\n')]
|
from PIL import ImageFont
class Letter:
""" letter class- each letter is one of these objects, and is rendered in order. """
def __init__(self,char,size,font,color = (255,255,255,255),b=False,i=False,u=False):
"""
char: character.
size: size of letter.
font: PIL truetype font object. TODO: add handling for other types
color: color of letter, RGBA tuple, range 0-1.
b: Bold flag.
i: Italics flag.
u: Underlined flag.
"""
self.char = char
self.size = size
self.font = ImageFont.truetype(font, size)
self.color = color
self.b = b
self.i = i
self.u = u
def get_kerning(self):
""" gets dimensions as tuple(w,h) that it will be when rendered. """
return self.font.getsize(self.char)
|
[
"PIL.ImageFont.truetype"
] |
[((572, 602), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (['font', 'size'], {}), '(font, size)\n', (590, 602), False, 'from PIL import ImageFont\n')]
|
'''
#############################################################################
#
# M A I N L O O P & P R I M A R Y F U N C T I O N S
#
#############################################################################
'''
import getopt
import os
import random
import signal
import sys
try:
import numpy as np # pylint: disable=import-error
except ImportError:
pass
# Check if SciPy is available
try:
import scipy.integrate # pylint: disable=import-error
import scipy.optimize # pylint: disable=import-error
except ModuleNotFoundError:
pass
# # Check if Matplotlib is available
# try:
# import matplotlib # pylint: disable=import-error
# except ModuleNotFoundError:
# pass
from rpn.debug import dbg, typename
from rpn.exception import *
import rpn.flag
import rpn.globl
import rpn.tvm
import rpn.type
import rpn.unit
import rpn.util
import rpn.word
disable_all_extensions = False
load_init_file = True
want_debug = False
def usage():
print("""\
Usage: rpn [-d] [-f FILE] [-i] [-l FILE] [-q] [-V] cmds...
-d Enable debugging
-f FILE Load FILE and exit
-i Force interactive mode
-l FILE Load FILE and continue
-q Do not load init file (~/.rpnrc)
-Q Disable all extensions (implies -q)
-V Display version information""")
sys.exit(64) # EX_USAGE
def initialize(rpndir, argv):
global disable_all_extensions # pylint: disable=global-statement
global load_init_file # pylint: disable=global-statement
# Set up low level stuff, stacks, variables
sys.setrecursionlimit(2000) # default is 10002
random.seed()
rpn.globl.push_scope(rpn.globl.root_scope, "Root scope")
rpn.globl.disp_stack.push(rpn.util.DisplayConfig())
rpn.word.w_std('std')
rpn.unit.define_units()
define_variables()
# Set up signal handling
signal.signal(signal.SIGINT, sigint_handler)
signal.signal(signal.SIGQUIT, sigquit_handler)
signal.signal(signal.SIGWINCH, sigwinch_handler)
sigwinch_handler(0, 0) # Read & define ROWS and COLS via stty(1)
# Set initial conditions
rpn.globl.eval_string("clreg clflag clfin")
rpn.flag.set_flag(rpn.flag.F_SHOW_PROMPT)
# Define built-in secondary (protected) words
if not disable_all_extensions:
try:
load_file(os.path.join(rpndir, "secondary.rpn"))
except RuntimeErr as err_f_opt:
rpn.globl.lnwriteln(str(err_f_opt))
sys.exit(1)
# Switch to user mode, where words and variables are no longer
# protected, and define built-in tertiary (non-protected) words
rpn.globl.default_protected = False
if not disable_all_extensions:
try:
load_file(os.path.join(rpndir, "tertiary.rpn"))
except RuntimeErr as err_f_opt:
rpn.globl.lnwriteln(str(err_f_opt))
sys.exit(1)
# Parse command line
argv = parse_args(argv)
# Hopefully load the user's init file
if load_init_file:
init_file = os.path.expanduser("~/.rpnrc")
if os.path.isfile(init_file):
(rpnrc, _) = rpn.globl.lookup_variable('RPNRC')
rpnrc.obj = rpn.type.String(init_file)
load_file(init_file)
# rpn.globl.lnwriteln("--------------------------------")
if len(argv) > 0:
s = " ".join(argv)
rpn.globl.eval_string(s)
if rpn.globl.interactive is None:
rpn.globl.interactive = False
else:
# No command args, so maybe go interactive
if rpn.globl.interactive is None:
rpn.globl.interactive = True
return rpn.globl.interactive
def define_variables():
# Variables defined here are all protected=True by default
rpn.globl.sharpout = rpn.globl.defvar('#OUT', rpn.type.Integer(0),
readonly=True, noshadow=True)
rpn.tvm.CF = rpn.globl.defvar('CF', rpn.type.Integer(1),
noshadow=True,
pre_hooks=[pre_require_int, pre_require_positive],
post_hooks=[post_label_with_identifier],
doc="Compounding Frequency")
rpn.globl.scr_cols = rpn.globl.defvar('COLS', rpn.type.Integer(0),
pre_hooks=[pre_require_int, pre_require_positive])
rpn.tvm.FV = rpn.globl.defvar('FV', None,
noshadow=True,
pre_hooks=[pre_require_int_or_float],
post_hooks=[post_label_with_identifier],
doc="Future Value")
rpn.tvm.INT = rpn.globl.defvar('INT', None,
noshadow=True,
pre_hooks=[pre_require_int_or_float, pre_require_non_negative],
post_hooks=[post_label_with_identifier],
doc="Interest rate")
rpn.tvm.N = rpn.globl.defvar('N', None,
noshadow=True,
pre_hooks=[pre_require_int_or_float, pre_require_positive],
post_hooks=[post_label_with_identifier],
doc="Number of payments")
rpn.globl.defvar('NUMPY', rpn.type.Integer(rpn.globl.bool_to_int(rpn.globl.have_module('numpy'))),
readonly=True, noshadow=True)
if rpn.globl.have_module('numpy'):
rpn.globl.defvar('NUMPY_VER', rpn.type.String(np.__version__),
readonly=True)
rpn.tvm.PF = rpn.globl.defvar('PF', rpn.type.Integer(1),
noshadow=True,
pre_hooks=[pre_require_int, pre_require_positive],
post_hooks=[post_label_with_identifier],
doc="Payment Frequency")
rpn.tvm.PMT = rpn.globl.defvar('PMT', None,
noshadow=True,
pre_hooks=[pre_require_int_or_float],
post_hooks=[post_label_with_identifier],
doc="Payment amount")
rpn.tvm.PV = rpn.globl.defvar('PV', None,
noshadow=True,
pre_hooks=[pre_require_int_or_float],
post_hooks=[post_label_with_identifier],
doc="Present Value")
rpn.globl.scr_rows = rpn.globl.defvar('ROWS', rpn.type.Integer(0),
pre_hooks=[pre_require_int, pre_require_positive])
rpn.globl.defvar('RPNRC', rpn.type.String(""),
readonly=True, hidden=True)
rpn.globl.defvar('SCIPY', rpn.type.Integer(rpn.globl.bool_to_int(rpn.globl.have_module('scipy'))),
readonly=True, noshadow=True)
if rpn.globl.have_module('scipy'):
rpn.globl.defvar('SCIPY_VER', rpn.type.String(scipy.__version__),
readonly=True)
rpn.globl.defvar('SIZE', rpn.type.Integer(20),
noshadow=True,
pre_hooks=[pre_validate_size_arg],
post_hooks=[post_clear_newly_unveiled_registers])
rpn.globl.defvar('Sreg', rpn.type.Integer(11),
pre_hooks=[pre_validate_Sreg_arg])
rpn.globl.defvar('VER', rpn.type.Float(rpn.globl.RPN_VERSION),
readonly=True, noshadow=True)
def parse_args(argv):
global want_debug # pylint: disable=global-statement
global load_init_file # pylint: disable=global-statement
global disable_all_extensions # pylint: disable=global-statement
try:
opts, argv = getopt.getopt(argv, "dDf:il:qQV")
except getopt.GetoptError as e:
print(str(e)) # OK
usage()
for opt, arg in opts:
if opt == "-d": # Sets debug only when main_loop is ready
want_debug = True
elif opt == "-D":
rpn.flag.set_flag(rpn.flag.F_DEBUG_ENABLED) # Debug immediately, useful for built-in words
elif opt == "-f":
if rpn.globl.interactive is None:
rpn.globl.interactive = False
try:
load_file(arg)
except RuntimeErr as err_f_opt:
rpn.globl.lnwriteln(str(err_f_opt))
elif opt == "-i":
rpn.globl.interactive = True
elif opt == "-l":
try:
load_file(arg)
except RuntimeErr as err_l_opt:
rpn.globl.lnwriteln(str(err_l_opt))
elif opt == "-q":
load_init_file = False
elif opt == "-Q":
load_init_file = False
disable_all_extensions = True
elif opt == "-V":
rpn.globl.show_version_info()
if rpn.globl.interactive is None:
rpn.globl.interactive = False
else:
print("Unhandled option {}".format(opt)) # OK
sys.exit(1)
return argv
def load_file(filename):
fn = filename
if not os.path.isfile(fn):
fn += ".rpn"
if not os.path.isfile(fn):
throw(X_NON_EXISTENT_FILE, "load", filename)
try:
with open(fn, "r") as file:
contents = file.read()
except PermissionError as e:
throw(X_FILE_IO, "load", "Cannot open file '{}'".format(fn))
else:
dbg("load_file", 3, "load_file({})='{}'".format(fn, contents))
rpn.globl.eval_string(contents)
def main_loop():
global want_debug # pylint: disable=global-statement
global disable_all_extensions # pylint: disable=global-statement
rpn.flag.clear_flag(rpn.flag.F_SHOW_X) # Reset, because some argv may have set it to True
# Non-existence of ~/.rpnrc is indicator of novice mode
(rpnrc, _) = rpn.globl.lookup_variable("RPNRC")
if len(rpnrc.obj.value) == 0 and not disable_all_extensions:
rpn.globl.lnwriteln("Type ? for information, help <word> for help on a specific word.")
rpn.globl.lnwriteln("Type vlist for a list of all words, vars to see your variables.")
rpn.globl.lnwriteln("Type .s to display the stack non-destructively, and bye to exit.")
if not rpn.globl.param_stack.empty():
if rpn.globl.param_stack.size() == 1:
rpn.globl.eval_string("dup . cr")
else:
rpn.word.w_dot_s('.s')
if want_debug:
rpn.flag.set_flag(rpn.flag.F_DEBUG_ENABLED)
while True:
try:
(error, tok_list) = generate_token_list()
except StopIteration:
return
except TopLevel:
continue
if error is True:
rpn.globl.lnwriteln("main_loop: Parse error: Could not get next token")
s = " ".join([t.value for t in tok_list])
dbg("parse", 1, "s='{}'".format(s))
rpn.globl.eval_string(s)
def end_program():
if rpn.globl.sharpout.obj.value != 0:
rpn.globl.writeln()
if not rpn.globl.string_stack.empty():
if rpn.globl.string_stack.size() == 1:
rpn.word.w_dollar_dot('$.')
rpn.word.w_cr('cr')
else:
rpn.globl.lnwriteln("Strings:")
rpn.word.w_dollar_dot_s('$.s')
if not rpn.globl.param_stack.empty():
if rpn.globl.param_stack.size() == 1:
rpn.word.w_dot('.')
rpn.word.w_cr('cr')
else:
rpn.globl.lnwriteln("Stack:")
rpn.word.w_dot_s('.s')
def generate_token_list():
'''Returns a tuple (flag, list)
flag is True if initial parse error, False if no error'''
initial_parse_error = False
rpn.globl.parse_stack.clear()
tok_list = []
depth = {
'BRACKET' : 0,
'PAREN' : 0
}
while True:
# Get next token
tok = next(rpn.util.TokenMgr.next_token())
dbg("token", 1, "token({},{})".format(tok.type, repr(tok.value)))
# See if it's an immediate word; if so, call it
if tok.type == 'IDENTIFIER':
(word, _) = rpn.globl.lookup_word(tok.value)
if word is not None and word.immediate():
dbg("token", 3, "Word {} is immediate, calling...".format(word))
word.__call__(word.name)
continue
tok_list.append(tok)
# These need a second token or they will be very angry
elif tok.type in ['AT_SIGN', 'CATCH', 'CONSTANT', 'EXCLAM', 'FORGET',
'HELP', 'HIDE', 'SHOW', 'UNDEF', 'VARIABLE' ]:
rpn.globl.parse_stack.push(tok.type)
try:
tok2 = next(rpn.util.TokenMgr.next_token())
dbg("token", 1, "token({},{})".format(tok2.type, repr(tok2.value)))
except StopIteration:
initial_parse_error = True
dbg("token", 1, "{}: No more tokens, exiting".format(tok.type))
break
finally:
tok_list.append(tok)
rpn.globl.parse_stack.pop()
tok_list.append(tok2)
elif tok.type in ['OPEN_BRACKET', 'CLOSE_BRACKET',
'OPEN_PAREN', 'CLOSE_PAREN']:
tok_list.append(tok)
# borp == "bracket or paren"
(open_close, borp) = tok.type.split("_")
#print("borp={}".format(borp))
if borp == 'PAREN':
c = '('
elif borp == 'BRACKET':
c = '['
if open_close == 'OPEN':
if borp == 'PAREN' and depth[borp] > 0:
rpn.globl.lnwriteln("{}: Embedded {} not allowed".format(tok.type, c))
initial_parse_error = True
else:
rpn.globl.parse_stack.push(c)
depth[borp] += 1
if open_close == 'CLOSE':
if rpn.globl.parse_stack.empty() or \
borp == 'BRACKET' and rpn.globl.parse_stack.top() != c or \
borp == 'PAREN' and rpn.globl.parse_stack.top() != '(,':
rpn.globl.lnwriteln("{}: {} lost".format(tok.type, c))
initial_parse_error = True
else:
rpn.globl.parse_stack.pop()
depth[borp] -= 1
elif tok.type == 'COMMA':
if rpn.globl.parse_stack.empty() or rpn.globl.parse_stack.top() != '(':
rpn.globl.lnwriteln("{}: no matching (".format(tok.type))
initial_parse_error = True
else:
tok_list.append(tok)
rpn.globl.parse_stack.pop()
rpn.globl.parse_stack.push('(,')
elif tok.type in ['BEGIN', 'CASE', 'COLON', 'DO', 'IF']:
tok_list.append(tok)
rpn.globl.parse_stack.push(tok.type)
elif tok.type in ['AGAIN', 'UNTIL']:
if rpn.globl.parse_stack.empty() or rpn.globl.parse_stack.top() != 'BEGIN':
rpn.globl.lnwriteln("{}: no matching BEGIN".format(tok.type))
initial_parse_error = True
else:
tok_list.append(tok)
rpn.globl.parse_stack.pop()
elif tok.type == 'ELSE':
if rpn.globl.parse_stack.empty() or rpn.globl.parse_stack.top() != 'IF':
rpn.globl.lnwriteln("ELSE: no matching IF")
initial_parse_error = True
else:
tok_list.append(tok)
rpn.globl.parse_stack.pop()
rpn.globl.parse_stack.push(tok.type)
elif tok.type == 'ENDCASE':
if rpn.globl.parse_stack.empty() or rpn.globl.parse_stack.top() not in ['CASE', 'OTHERWISE']:
rpn.globl.lnwriteln("ENDCASE: no matching CASE")
initial_parse_error = True
else:
tok_list.append(tok)
rpn.globl.parse_stack.pop()
elif tok.type == 'ENDOF':
if rpn.globl.parse_stack.empty() or rpn.globl.parse_stack.top() != 'OF':
rpn.globl.lnwriteln("ENDOF: no matching OF")
initial_parse_error = True
else:
tok_list.append(tok)
rpn.globl.parse_stack.pop()
rpn.globl.parse_stack.push('CASE')
elif tok.type == 'ERROR':
rpn.globl.lnwriteln("ERROR {}".format(tok))
initial_parse_error = True
elif tok.type in ['LOOP', 'PLUS_LOOP']:
if rpn.globl.parse_stack.empty() or rpn.globl.parse_stack.top() != 'DO':
rpn.globl.lnwriteln("{}: no matching DO".format(tok.type))
initial_parse_error = True
else:
tok_list.append(tok)
rpn.globl.parse_stack.pop()
elif tok.type in ['OF', 'OTHERWISE']:
if rpn.globl.parse_stack.empty() or rpn.globl.parse_stack.top() != 'CASE':
rpn.globl.lnwriteln("{}: no matching CASE".format(tok.type))
initial_parse_error = True
else:
tok_list.append(tok)
rpn.globl.parse_stack.pop()
rpn.globl.parse_stack.push(tok.type)
elif tok.type == 'REPEAT':
if rpn.globl.parse_stack.empty() or rpn.globl.parse_stack.top() != 'WHILE':
rpn.globl.lnwriteln("REPEAT: no matching WHILE")
initial_parse_error = True
else:
tok_list.append(tok)
rpn.globl.parse_stack.pop()
elif tok.type == 'SEMICOLON':
if rpn.globl.parse_stack.empty() or rpn.globl.parse_stack.top() != 'COLON':
rpn.globl.lnwriteln("SEMICOLON: no matching COLON")
initial_parse_error = True
else:
tok_list.append(tok)
rpn.globl.parse_stack.pop()
elif tok.type == 'THEN':
if rpn.globl.parse_stack.empty() or rpn.globl.parse_stack.top() not in ['IF', 'ELSE']:
rpn.globl.lnwriteln("THEN: no matching IF")
initial_parse_error = True
else:
tok_list.append(tok)
rpn.globl.parse_stack.pop()
elif tok.type == 'WHILE':
if rpn.globl.parse_stack.empty() or rpn.globl.parse_stack.top() != 'BEGIN':
rpn.globl.lnwriteln("WHILE: no matching BEGIN")
initial_parse_error = True
else:
tok_list.append(tok)
rpn.globl.parse_stack.pop()
rpn.globl.parse_stack.push(tok.type)
else:
# 'ABORT_QUOTE',
# 'DOC_STR',
# 'DOT_QUOTE',
# 'VBAR',
# 'WS',
tok_list.append(tok)
# Here's what breaks the while True loop, sauf StopIteration
if rpn.globl.parse_stack.empty() and depth['PAREN'] == 0 and depth['BRACKET'] == 0:
break
return (initial_parse_error, tok_list)
# Simple SIGWINCH handler can become overwhelmed and crash if window
# changes come too fast. Consider using shutil.get_terminal_size()
def sigwinch_handler(_signum, _frame):
rpn.globl.update_screen_size()
def sigint_handler(_signam, _frame):
rpn.globl.sigint_detected = True
# It is NOT safe to do I/O inside a signal handler.
# Can crash with error:
# RuntimeError: reentrant call inside <_io.BufferedWriter name='<stdout>'>
# sys.stderr.write("^C")
# sys.stderr.flush()
# rpn.globl.eval_string("?cr")
throw(X_INTERRUPT)
def sigquit_handler(_signum, _frame):
rpn.globl.lnwriteln("[Quit]")
raise EndProgram()
# def example_pre_hook_func(ident, cur_obj, new_obj):
# print("example_pre_hook_func:")
# print("ident ={}".format(ident))
# print("cur_obj={}".format(repr(cur_obj)))
# print("new_obj={}".format(repr(new_obj)))
# # Check against None first due to undef case
# if new_obj is not None and new_obj.value < 0:
# throw(X_INVALID_ARG, "!{}".format(identifier), "Must be positive")
#
# def example_post_hook_func(ident, old_obj, cur_obj):
# print("example_post_hook_func:")
# print("ident ={}".format(ident))
# print("old_obj={}".format(repr(old_obj)))
# print("cur_obj={}".format(repr(cur_obj)))
def pre_require_int(identifier, _cur, new):
if type(new) is not rpn.type.Integer:
throw(X_ARG_TYPE_MISMATCH, "!{}".format(identifier), "({})".format(typename(new)))
def pre_require_int_or_float(identifier, _cur, new):
if type(new) not in [rpn.type.Integer, rpn.type.Float]:
throw(X_ARG_TYPE_MISMATCH, "!{}".format(identifier), "({})".format(typename(new)))
def pre_require_positive(identifier, _cur, new):
if new.value <= 0:
throw(X_INVALID_ARG, "!{}".format(identifier), "Must be positive")
def pre_require_non_negative(identifier, _cur, new):
if new.value < 0:
throw(X_INVALID_ARG, "!{}".format(identifier), "Must be non-negative")
def pre_validate_Sreg_arg(identifier, _cur, new):
if type(new) is not rpn.type.Integer:
throw(X_ARG_TYPE_MISMATCH, "!{}".format(identifier), "({})".format(typename(new)))
new_Sreg = new.value
(reg_size, _) = rpn.globl.lookup_variable("SIZE")
if new_Sreg < 0 or new_Sreg > reg_size.obj.value - 6:
throw(X_INVALID_ARG, "!{}".format(identifier), "Sreg {} out of range (0..{} expected); check SIZE".format(new_Sreg, reg_size.obj.value - 6))
def pre_validate_size_arg(identifier, _cur, new):
if type(new) is not rpn.type.Integer:
throw(X_ARG_TYPE_MISMATCH, "!{}".format(identifier), "({})".format(typename(new)))
new_size = new.value
if new_size < rpn.globl.REG_SIZE_MIN or new_size > rpn.globl.REG_SIZE_MAX:
throw(X_INVALID_ARG, "!{}".format(identifier), "Size {} out of range ({}..{} expected)".format(new_size, rpn.globl.REG_SIZE_MIN, rpn.globl.REG_SIZE_MAX))
(reg_Sreg, _) = rpn.globl.lookup_variable("Sreg")
if new_size < reg_Sreg.obj.value + 6:
throw(X_INVALID_ARG, "!{}".format(identifier), "Size {} too small for Sreg ({})".format(new_size, reg_Sreg.obj.value))
def post_clear_newly_unveiled_registers(_identifier, old, cur):
old_size = old.value
cur_size = cur.value
# If we're increasing the number of registers, zero out the newly
# available ones. It is not really necessary to do this when
# decreasing, because those registers will no longer be accessible.
if cur_size > old_size:
for r in range(cur_size - old_size):
rpn.globl.register[old_size + r] = rpn.type.Float(0.0)
def post_label_with_identifier(identifier, _old, cur):
cur.label = identifier
|
[
"getopt.getopt",
"os.path.join",
"rpn.debug.typename",
"os.path.isfile",
"random.seed",
"signal.signal",
"sys.setrecursionlimit",
"os.path.expanduser",
"sys.exit"
] |
[((1384, 1396), 'sys.exit', 'sys.exit', (['(64)'], {}), '(64)\n', (1392, 1396), False, 'import sys\n'), ((1646, 1673), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(2000)'], {}), '(2000)\n', (1667, 1673), False, 'import sys\n'), ((1697, 1710), 'random.seed', 'random.seed', ([], {}), '()\n', (1708, 1710), False, 'import random\n'), ((1939, 1983), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'sigint_handler'], {}), '(signal.SIGINT, sigint_handler)\n', (1952, 1983), False, 'import signal\n'), ((1990, 2036), 'signal.signal', 'signal.signal', (['signal.SIGQUIT', 'sigquit_handler'], {}), '(signal.SIGQUIT, sigquit_handler)\n', (2003, 2036), False, 'import signal\n'), ((2042, 2090), 'signal.signal', 'signal.signal', (['signal.SIGWINCH', 'sigwinch_handler'], {}), '(signal.SIGWINCH, sigwinch_handler)\n', (2055, 2090), False, 'import signal\n'), ((3096, 3126), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.rpnrc"""'], {}), "('~/.rpnrc')\n", (3114, 3126), False, 'import os\n'), ((3138, 3163), 'os.path.isfile', 'os.path.isfile', (['init_file'], {}), '(init_file)\n', (3152, 3163), False, 'import os\n'), ((7894, 7927), 'getopt.getopt', 'getopt.getopt', (['argv', '"""dDf:il:qQV"""'], {}), "(argv, 'dDf:il:qQV')\n", (7907, 7927), False, 'import getopt\n'), ((9269, 9287), 'os.path.isfile', 'os.path.isfile', (['fn'], {}), '(fn)\n', (9283, 9287), False, 'import os\n'), ((9325, 9343), 'os.path.isfile', 'os.path.isfile', (['fn'], {}), '(fn)\n', (9339, 9343), False, 'import os\n'), ((2409, 2446), 'os.path.join', 'os.path.join', (['rpndir', '"""secondary.rpn"""'], {}), "(rpndir, 'secondary.rpn')\n", (2421, 2446), False, 'import os\n'), ((2548, 2559), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2556, 2559), False, 'import sys\n'), ((2806, 2842), 'os.path.join', 'os.path.join', (['rpndir', '"""tertiary.rpn"""'], {}), "(rpndir, 'tertiary.rpn')\n", (2818, 2842), False, 'import os\n'), ((2944, 2955), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2952, 2955), False, 'import sys\n'), ((20589, 20602), 'rpn.debug.typename', 'typename', (['new'], {}), '(new)\n', (20597, 20602), False, 'from rpn.debug import dbg, typename\n'), ((20794, 20807), 'rpn.debug.typename', 'typename', (['new'], {}), '(new)\n', (20802, 20807), False, 'from rpn.debug import dbg, typename\n'), ((21281, 21294), 'rpn.debug.typename', 'typename', (['new'], {}), '(new)\n', (21289, 21294), False, 'from rpn.debug import dbg, typename\n'), ((21751, 21764), 'rpn.debug.typename', 'typename', (['new'], {}), '(new)\n', (21759, 21764), False, 'from rpn.debug import dbg, typename\n'), ((9184, 9195), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (9192, 9195), False, 'import sys\n')]
|
"""Main application initilization."""
import os.path
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
BOOK_PATH = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', 'books'))
# Make sure to add your own secret key in config.py
SECRET_KEY = "<KEY>"
# Environment variable for config file name
ENV_KEY = 'LINGA_CONFIG_FILE'
CONFIG_FILE = os.environ[ENV_KEY] if os.environ.get(ENV_KEY) else '../config.py'
SQLALCHEMY_TRACK_MODIFICATIONS = False
app = Flask(__name__) #pylint: disable=invalid-name
app.config.from_object(__name__)
app.config.from_pyfile(CONFIG_FILE, silent=True)
db = SQLAlchemy(app) #pylint: disable=invalid-name
login_manager = LoginManager() #pylint: disable=invalid-name
# Try to accomodate old versions of flask-login
try:
login_manager.init_app(app)
except Exception as ex: #pylint: disable=broad-except
login_manager.setup_app(app)
def get_config(key=''):
"""Get a key value from the app config, or the entire config if no key given."""
if key:
return app.config[key]
return app.config
@app.after_request
def after_request(req):
return req
|
[
"flask_sqlalchemy.SQLAlchemy",
"flask_login.LoginManager",
"flask.Flask"
] |
[((530, 545), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (535, 545), False, 'from flask import Flask\n'), ((668, 683), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (678, 683), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((733, 747), 'flask_login.LoginManager', 'LoginManager', ([], {}), '()\n', (745, 747), False, 'from flask_login import LoginManager\n')]
|
# Copyright 2021 Arm Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import tensorflow as tf
import numpy as np
from models.quantize_utils import fake_quant_with_min_max_vars_per_channel, fake_quant_with_min_max_vars, compute_ranges
##############################
## LINEAR BLOCK DEFINITIONS ##
##############################
#EXPANDED Linear block
class LinearBlock_e(tf.keras.layers.Layer):
def __init__(self,
in_filters: int,
num_inner_layers: int,
kernel_size: int,
padding: str,
out_filters: int,
feature_size: int,
quant_W: bool,
mode: str):
super().__init__()
"""
Expanded linear block. Input --> 3x3 Conv to expand number of channels
to 'feature_size' --> 1x1 Conv to project channels into 'out_filters'.
At inference time, this can be analytically collapsed into a single,
small 3x3 Conv layer. See also the LinearBlock_c class which is a
very efficient method to train linear blocks without any loss in
image quality.
"""
assert not quant_W, 'expanded linear block not compatible with w quant'
def conv2d(filters: int, kernel_size_: int) -> tf.keras.layers.Layer:
return tf.keras.layers.Conv2D(filters=filters, kernel_size=kernel_size_, padding=padding)
layers = []
for _ in range(num_inner_layers):
layers.extend([conv2d(filters=feature_size, kernel_size_=kernel_size)])
layers.append(conv2d(filters=out_filters, kernel_size_=1))
self.block = tf.keras.Sequential(layers)
self.mode = mode
def call(self, inputs, training=None, mask=None):
return self.block(inputs, training=training)
#COLLAPSED Linear block
class LinearBlock_c(tf.keras.layers.Layer):
def __init__(self,
in_filters: int,
num_inner_layers: int,
kernel_size: int,
padding: str,
out_filters: int,
feature_size: int,
quant_W: bool,
mode: str):
tf.keras.layers.Layer.__init__(self)
"""
This is a simulated linear block in the train path. The idea is to collapse
linear block at each training step to speed up the forward pass. The backward
pass still updates all the expanded weights.
After training is completed, the weight generation ops are replaced by
a tf.constant at pb/tflite generation time.
----------------------------------------------------------------
| padded_identity |
| | |
| conv1x1(inCh, r*inCh) [optional] |
| | |
| convkxk(r*inCh, r*inCh) |
| | |
| conv1x1(r*inCh, outCh) |
| | |
| simulating residual: identity -> + |
| (or) padded_conv1x1_wt | (weight_tensor generated)|
----------------------------------------------------------------
|
input_tensor -> Actual convkxk(inCh, outCh)
|
Final output
"""
def conv2d(filters: int, kernel_size_: int, padding_: str) -> tf.keras.layers.Layer:
return tf.keras.layers.Conv2D(filters=filters, kernel_size=kernel_size_, padding=padding_)
# Params
self.in_filters = in_filters
self.out_filters = out_filters
self.feature_size = feature_size
self.quant_W = quant_W
self.mode = mode
# If num_inner_layers > 1, then use another conv1x1 at the beginning
onebyone = True if num_inner_layers > 1 else False
# expansion with kx,ky kernel and then project to out_filters using 1x1
kernel_size = [kernel_size, kernel_size]
self.kx, self.ky = kernel_size
# Learnable Collapse Conv's
conv1 = conv2d(feature_size, [1, 1], "valid")
conv2 = conv2d(feature_size, kernel_size, "valid")
conv3 = conv2d(out_filters, [1, 1], "valid")
self.collapsed_weights = None
# Define Collapse Block
if onebyone:
self.collapse = tf.keras.Sequential([conv1, conv2, conv3])
else:
self.collapse = tf.keras.Sequential([conv2, conv3])
if self.mode == 'train':
self.fake_quant_with_min_max_vars_per_channel_fn = \
fake_quant_with_min_max_vars_per_channel
elif self.mode == 'infer':
self.fake_quant_with_min_max_vars_per_channel_fn = \
tf.quantization.fake_quant_with_min_max_vars_per_channel
def build(self, input_shapes):
# shape: (in_filters,in_filters)
delta = tf.eye(self.in_filters)
# expanded shape:(in_filters, 1, 1, in_filters)
delta = tf.expand_dims(tf.expand_dims(delta, 1), 1)
# padded shape: (in_filters, kx, ky, in_filters)
delta = tf.pad(delta, paddings=[[0, 0], [self.kx - 1, self.kx - 1], [self.ky - 1, self.ky - 1], [0, 0]])
# Ensure the Value isn't trainable
self.delta = tf.Variable(initial_value=delta, trainable=False, dtype=tf.float32)
if self.quant_W:
self.wt_quant_min = self.add_weight(
name='wt_quant_min',
shape=(self.out_filters,),
trainable=True)
self.wt_quant_max = self.add_weight(
name='wt_quant_max',
shape=(self.out_filters,),
trainable=True)
if self.mode == "train":
self.wt_quant_initialized = tf.Variable(False, trainable=False)
# Calculate Residual
kernel_dim = [self.kx, self.ky, self.in_filters, self.out_filters]
residual = np.zeros(kernel_dim, dtype=np.float32)
if self.in_filters == self.out_filters:
mid_kx = int(self.kx / 2)
mid_ky = int(self.ky / 2)
for out_ch in range(self.out_filters):
residual[mid_kx, mid_ky, out_ch, out_ch] = 1.0
# Ensure the Value isn't trainable
self.residual = tf.Variable(initial_value=residual, trainable=False, dtype=tf.float32)
def init_wt_quant_ranges(self, kernel: tf.Tensor) -> None:
quant_max, quant_min = compute_ranges(kernel, per_channel=True, symmetric=True)
self.wt_quant_max.assign(quant_max)
self.wt_quant_min.assign(quant_min)
self.wt_quant_initialized.assign(True)
def call(self, inputs):
if self.mode == "train" or (self.collapsed_weights is None):
# Run Through Conv2D's - online linear collapse
wt_tensor = self.collapse(self.delta)
# reverse order of elements in 1,2 axes
wt_tensor = tf.reverse(wt_tensor, tf.constant([1, 2]))
# (in_filters, kx, ky, out_filters) -> (kx, ky, in_filters, out_filters)
wt_tensor = tf.transpose(wt_tensor, [1, 2, 0, 3])
# Direct-residual addition
# when in_filters != self.out_filters, this is just zeros
wt_tensor += self.residual
if self.mode == "infer":
# store collapsed weights in the first inferece, won't need to collapse again
self.collapsed_weights = tf.Variable(
initial_value=wt_tensor,
trainable=False,
dtype=tf.float32)
# remove references to uncollapsed variables
self.collapse = None
else:
# use pre-collapsed weights
wt_tensor = self.collapsed_weights
if self.mode == "train":
if self.quant_W:
if not self.wt_quant_initialized:
self.init_wt_quant_ranges(wt_tensor)
elif self.mode == "infer":
pass
else:
assert False, self.mode
if self.quant_W:
wt_tensor = self.fake_quant_with_min_max_vars_per_channel_fn(
wt_tensor,
min=self.wt_quant_min,
max=self.wt_quant_max,
num_bits=8,
narrow_range=True)
# Output - the actual conv2d
out = tf.nn.conv2d(inputs, wt_tensor, strides=[1, 1, 1, 1], padding="SAME")
return out
|
[
"tensorflow.keras.layers.Conv2D",
"tensorflow.pad",
"tensorflow.eye",
"numpy.zeros",
"models.quantize_utils.compute_ranges",
"tensorflow.transpose",
"tensorflow.constant",
"tensorflow.Variable",
"tensorflow.nn.conv2d",
"tensorflow.keras.Sequential",
"tensorflow.keras.layers.Layer.__init__",
"tensorflow.expand_dims"
] |
[((2255, 2282), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', (['layers'], {}), '(layers)\n', (2274, 2282), True, 'import tensorflow as tf\n'), ((2789, 2825), 'tensorflow.keras.layers.Layer.__init__', 'tf.keras.layers.Layer.__init__', (['self'], {}), '(self)\n', (2819, 2825), True, 'import tensorflow as tf\n'), ((5867, 5890), 'tensorflow.eye', 'tf.eye', (['self.in_filters'], {}), '(self.in_filters)\n', (5873, 5890), True, 'import tensorflow as tf\n'), ((6082, 6183), 'tensorflow.pad', 'tf.pad', (['delta'], {'paddings': '[[0, 0], [self.kx - 1, self.kx - 1], [self.ky - 1, self.ky - 1], [0, 0]]'}), '(delta, paddings=[[0, 0], [self.kx - 1, self.kx - 1], [self.ky - 1, \n self.ky - 1], [0, 0]])\n', (6088, 6183), True, 'import tensorflow as tf\n'), ((6244, 6311), 'tensorflow.Variable', 'tf.Variable', ([], {'initial_value': 'delta', 'trainable': '(False)', 'dtype': 'tf.float32'}), '(initial_value=delta, trainable=False, dtype=tf.float32)\n', (6255, 6311), True, 'import tensorflow as tf\n'), ((6916, 6954), 'numpy.zeros', 'np.zeros', (['kernel_dim'], {'dtype': 'np.float32'}), '(kernel_dim, dtype=np.float32)\n', (6924, 6954), True, 'import numpy as np\n'), ((7263, 7333), 'tensorflow.Variable', 'tf.Variable', ([], {'initial_value': 'residual', 'trainable': '(False)', 'dtype': 'tf.float32'}), '(initial_value=residual, trainable=False, dtype=tf.float32)\n', (7274, 7333), True, 'import tensorflow as tf\n'), ((7429, 7485), 'models.quantize_utils.compute_ranges', 'compute_ranges', (['kernel'], {'per_channel': '(True)', 'symmetric': '(True)'}), '(kernel, per_channel=True, symmetric=True)\n', (7443, 7485), False, 'from models.quantize_utils import fake_quant_with_min_max_vars_per_channel, fake_quant_with_min_max_vars, compute_ranges\n'), ((9403, 9472), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['inputs', 'wt_tensor'], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(inputs, wt_tensor, strides=[1, 1, 1, 1], padding='SAME')\n", (9415, 9472), True, 'import tensorflow as tf\n'), ((1937, 2024), 'tensorflow.keras.layers.Conv2D', 'tf.keras.layers.Conv2D', ([], {'filters': 'filters', 'kernel_size': 'kernel_size_', 'padding': 'padding'}), '(filters=filters, kernel_size=kernel_size_, padding=\n padding)\n', (1959, 2024), True, 'import tensorflow as tf\n'), ((4407, 4495), 'tensorflow.keras.layers.Conv2D', 'tf.keras.layers.Conv2D', ([], {'filters': 'filters', 'kernel_size': 'kernel_size_', 'padding': 'padding_'}), '(filters=filters, kernel_size=kernel_size_, padding=\n padding_)\n', (4429, 4495), True, 'import tensorflow as tf\n'), ((5314, 5356), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', (['[conv1, conv2, conv3]'], {}), '([conv1, conv2, conv3])\n', (5333, 5356), True, 'import tensorflow as tf\n'), ((5399, 5434), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', (['[conv2, conv3]'], {}), '([conv2, conv3])\n', (5418, 5434), True, 'import tensorflow as tf\n'), ((5979, 6003), 'tensorflow.expand_dims', 'tf.expand_dims', (['delta', '(1)'], {}), '(delta, 1)\n', (5993, 6003), True, 'import tensorflow as tf\n'), ((8079, 8116), 'tensorflow.transpose', 'tf.transpose', (['wt_tensor', '[1, 2, 0, 3]'], {}), '(wt_tensor, [1, 2, 0, 3])\n', (8091, 8116), True, 'import tensorflow as tf\n'), ((6749, 6784), 'tensorflow.Variable', 'tf.Variable', (['(False)'], {'trainable': '(False)'}), '(False, trainable=False)\n', (6760, 6784), True, 'import tensorflow as tf\n'), ((7948, 7967), 'tensorflow.constant', 'tf.constant', (['[1, 2]'], {}), '([1, 2])\n', (7959, 7967), True, 'import tensorflow as tf\n'), ((8451, 8522), 'tensorflow.Variable', 'tf.Variable', ([], {'initial_value': 'wt_tensor', 'trainable': '(False)', 'dtype': 'tf.float32'}), '(initial_value=wt_tensor, trainable=False, dtype=tf.float32)\n', (8462, 8522), True, 'import tensorflow as tf\n')]
|
import random
from LogicClass.MonteCarloClass.ArborescenteTree import ArborescenteTree
class MonteCarloMove:
def __init__(self, isInspector, isPhantom, numberOfRoom):
self.tree = ArborescenteTree()
self.isPhantom = isPhantom
self.isInspector = isInspector
self.numberOfRoom = numberOfRoom
def wiseMovePhantom(self, player, roomList):
roomPossibilities = random.sample(range(0, self.numberOfRoom), self.numberOfRoom - (int(self.numberOfRoom / 2)))
for roomIndex in roomPossibilities:
if (roomList[roomIndex].isOn() == False and len(roomList[roomIndex].getPlayers()) > 1):
self.tree.addPossibilities(roomList[roomIndex].id, value=30)
elif (roomList[roomIndex].isOn() == False):
self.tree.addPossibilities(roomList[roomIndex].id, value=15)
elif (len(roomList[roomIndex].getPlayers()) == 0):
self.tree.addPossibilities(roomList[roomIndex].id, value=10)
else:
self.tree.addPossibilities(roomList[roomIndex].id)
roomId = self.tree.chooseLeafMove(self.tree.headCell)
self.tree.headCell.childCell = []
if (roomId != None):
player.playerMove(roomList[roomId])
return
player.playerMove(roomList[random.randint(0, self.numberOfRoom - 1)])
def wiseMoveInspector(self, player, roomList):
if (player.monteCarloInspector == None):
value = random.randint(0, self.numberOfRoom - 1)
player.playerMove(roomList[value])
return
roomPossibilities = random.sample(range(0, self.numberOfRoom), self.numberOfRoom - (int(self.numberOfRoom / 2)))
for roomIndex in roomPossibilities:
for playerInTheRoom in roomList[roomIndex].getPlayers():
value = player.monteCarloInspector.tree.checkPresenceInTheNodeMove(playerInTheRoom.id, player.monteCarloInspector.tree.headCell.childCell, value=0)
if (value[0] == True):
self.tree.addPossibilities(roomIndex, value[1])
else:
self.tree.addPossibilities(roomIndex, value=1)
roomId = self.tree.chooseLeafMove(self.tree.headCell)
self.tree.headCell.childCell = []
if (roomId != None):
roomList[roomId].switchOnTheLight()
player.playerMove(roomList[roomId])
return
value = random.randint(0, self.numberOfRoom - 1)
roomList[value].switchOnTheLight()
player.playerMove(roomList[value])
def wiseMoveCharacter(self, player, roomList):
roomPossibilities = random.sample(range(0, self.numberOfRoom), int(self.numberOfRoom / 2))
for roomIndex in roomPossibilities:
if (roomList[roomIndex].isRunningJob() == True):
self.tree.addPossibilities(roomList[roomIndex].id, value=10)
else:
self.tree.addPossibilities(roomList[roomIndex].id)
roomId = self.tree.chooseLeafMove(self.tree.headCell)
self.tree.headCell.childCell = []
if (roomId != None):
roomList[roomId].switchOnTheLight()
player.playerMove(roomList[roomId])
return
value = random.randint(0, self.numberOfRoom - 1)
roomList[value].switchOnTheLight()
player.playerMove(roomList[value])
def wiseMove(self, player, roomList, specialTurn):
if (self.isPhantom == True and specialTurn == True):
self.wiseMovePhantom(player, roomList)
return
if (self.isInspector == True and specialTurn == True):
self.wiseMoveInspector(player, roomList)
return
self.wiseMoveCharacter(player, roomList)
|
[
"random.randint",
"LogicClass.MonteCarloClass.ArborescenteTree.ArborescenteTree"
] |
[((193, 211), 'LogicClass.MonteCarloClass.ArborescenteTree.ArborescenteTree', 'ArborescenteTree', ([], {}), '()\n', (209, 211), False, 'from LogicClass.MonteCarloClass.ArborescenteTree import ArborescenteTree\n'), ((2441, 2481), 'random.randint', 'random.randint', (['(0)', '(self.numberOfRoom - 1)'], {}), '(0, self.numberOfRoom - 1)\n', (2455, 2481), False, 'import random\n'), ((3250, 3290), 'random.randint', 'random.randint', (['(0)', '(self.numberOfRoom - 1)'], {}), '(0, self.numberOfRoom - 1)\n', (3264, 3290), False, 'import random\n'), ((1476, 1516), 'random.randint', 'random.randint', (['(0)', '(self.numberOfRoom - 1)'], {}), '(0, self.numberOfRoom - 1)\n', (1490, 1516), False, 'import random\n'), ((1312, 1352), 'random.randint', 'random.randint', (['(0)', '(self.numberOfRoom - 1)'], {}), '(0, self.numberOfRoom - 1)\n', (1326, 1352), False, 'import random\n')]
|
"""
Tests for the Paver commands for updating test databases and its utility methods
"""
import os
import shutil
import tarfile
from tempfile import mkdtemp
from unittest import TestCase
from unittest.mock import call, patch, Mock
import boto
from pavelib import database
from pavelib.utils import db_utils
from pavelib.utils.db_utils import extract_files_from_zip
from pavelib.utils.envs import Env
from .utils import PaverTestCase
class TestPaverDbUtils(TestCase):
""" Tests for paver bokchoy database utils """
@patch('pavelib.utils.db_utils.verify_files_exist')
def test_extract_files_from_zip(self, _mock_verify):
test_dir = mkdtemp()
output_dir = mkdtemp()
self.addCleanup(shutil.rmtree, test_dir)
self.addCleanup(shutil.rmtree, output_dir)
tmp_file_name = os.path.join(test_dir, 'test.txt')
with open(tmp_file_name, 'w') as tmp_file:
tmp_file.write('Test file content')
tmp_tarfile = os.path.join(test_dir, 'test.tar.gz')
with tarfile.open(name=tmp_tarfile, mode='w:gz') as tar_file:
tar_file.add(tmp_file_name, arcname='test.txt')
extract_files_from_zip(['test.txt'], tmp_tarfile, output_dir)
extracted_file = os.path.join(output_dir, 'test.txt')
assert os.path.isfile(extracted_file)
with open(extracted_file) as test_file:
data = test_file.read()
assert data == 'Test file content'
def _write_temporary_db_cache_files(path, files):
"""
create some temporary files to act as the local db cache files so that
we can compute a fingerprint
"""
for index, filename in enumerate(files):
filepath = os.path.join(path, filename)
with open(filepath, 'w') as cache_file:
cache_file.write(str(index))
class TestPaverDatabaseTasks(PaverTestCase):
"""
Tests for the high level database tasks
"""
def setUp(self):
super().setUp()
# This value is the actual sha1 fingerprint calculated for the dummy
# files used in these tests
self.expected_fingerprint = '<PASSWORD>'
self.fingerprint_filename = f'{self.expected_fingerprint}.tar.gz'
self.bucket = Mock(name='test_bucket')
@patch.object(db_utils, 'CACHE_FOLDER', mkdtemp())
@patch.object(db_utils, 'FINGERPRINT_FILEPATH', os.path.join(mkdtemp(), 'fingerprint'))
@patch.object(db_utils, 'sh')
def test_load_data_from_local_cache(self, _mock_sh):
"""
Assuming that the computed db cache file fingerprint is the same as
the stored fingerprint, verify that we make a call to load data into
the database without running migrations
"""
self.addCleanup(shutil.rmtree, db_utils.CACHE_FOLDER)
self.addCleanup(os.remove, db_utils.FINGERPRINT_FILEPATH)
_write_temporary_db_cache_files(db_utils.CACHE_FOLDER, database.ALL_DB_FILES)
# write the local fingerprint file with the same value than the
# computed fingerprint
with open(db_utils.FINGERPRINT_FILEPATH, 'w') as fingerprint_file:
fingerprint_file.write(self.expected_fingerprint)
with patch.object(db_utils, 'get_file_from_s3', wraps=db_utils.get_file_from_s3) as _mock_get_file:
database.update_local_bokchoy_db_from_s3() # pylint: disable=no-value-for-parameter
# Make sure that the local cache files are used - NOT downloaded from s3
assert not _mock_get_file.called
calls = [
call(f'{Env.REPO_ROOT}/scripts/reset-test-db.sh --calculate_migrations'),
call(f'{Env.REPO_ROOT}/scripts/reset-test-db.sh --use-existing-db')
]
_mock_sh.assert_has_calls(calls)
@patch.object(database, 'CACHE_BUCKET_NAME', 'test_bucket')
@patch.object(db_utils, 'CACHE_FOLDER', mkdtemp())
@patch.object(db_utils, 'FINGERPRINT_FILEPATH', os.path.join(mkdtemp(), 'fingerprint'))
@patch.object(db_utils, 'sh')
def test_load_data_from_s3_fingerprint(self, _mock_sh):
"""
Assuming that the computed db cache file fingerprint is different
than the stored fingerprint AND there is a matching fingerprint file
in s3, verify that we make a call to load data into the database
without running migrations
"""
self.addCleanup(shutil.rmtree, db_utils.CACHE_FOLDER)
self.addCleanup(os.remove, db_utils.FINGERPRINT_FILEPATH)
_write_temporary_db_cache_files(db_utils.CACHE_FOLDER, database.ALL_DB_FILES)
# zip the temporary files and push them to s3 bucket
zipfile_path = os.path.join(db_utils.CACHE_FOLDER, self.fingerprint_filename)
with tarfile.open(name=zipfile_path, mode='w:gz') as tar_file:
for name in database.ALL_DB_FILES:
tar_file.add(os.path.join(db_utils.CACHE_FOLDER, name), arcname=name)
key = boto.s3.key.Key(bucket=self.bucket, name=self.fingerprint_filename)
key.set_contents_from_filename(zipfile_path, replace=False)
# write the local fingerprint file with a different value than
# the computed fingerprint
local_fingerprint = '1<PASSWORD>'
with open(db_utils.FINGERPRINT_FILEPATH, 'w') as fingerprint_file:
fingerprint_file.write(local_fingerprint)
with patch('boto.connect_s3', Mock(return_value=Mock())):
with patch.object(db_utils, 'get_file_from_s3') as _mock_get_file:
database.update_local_bokchoy_db_from_s3() # pylint: disable=no-value-for-parameter
# Make sure that the fingerprint file is downloaded from s3
_mock_get_file.assert_called_once_with(
'test_bucket', self.fingerprint_filename, db_utils.CACHE_FOLDER
)
calls = [
call(f'{Env.REPO_ROOT}/scripts/reset-test-db.sh --calculate_migrations'),
call(f'{Env.REPO_ROOT}/scripts/reset-test-db.sh --use-existing-db')
]
_mock_sh.assert_has_calls(calls)
@patch.object(database, 'CACHE_BUCKET_NAME', 'test_bucket')
@patch.object(db_utils, 'CACHE_FOLDER', mkdtemp())
@patch.object(db_utils, 'FINGERPRINT_FILEPATH', os.path.join(mkdtemp(), 'fingerprint'))
@patch.object(db_utils, 'sh')
def test_load_data_and_run_migrations(self, _mock_sh):
"""
Assuming that the computed db cache file fingerprint is different
than the stored fingerprint AND there is NO matching fingerprint file
in s3, verify that we make a call to load data into the database, run
migrations and update the local db cache files
"""
self.addCleanup(shutil.rmtree, db_utils.CACHE_FOLDER)
self.addCleanup(os.remove, db_utils.FINGERPRINT_FILEPATH)
_write_temporary_db_cache_files(db_utils.CACHE_FOLDER, database.ALL_DB_FILES)
# write the local fingerprint file with a different value than
# the computed fingerprint
local_fingerprint = '12<PASSWORD>6<PASSWORD>'
with open(db_utils.FINGERPRINT_FILEPATH, 'w') as fingerprint_file:
fingerprint_file.write(local_fingerprint)
database.update_local_bokchoy_db_from_s3() # pylint: disable=no-value-for-parameter
calls = [
call(f'{Env.REPO_ROOT}/scripts/reset-test-db.sh --calculate_migrations'),
call(f'{Env.REPO_ROOT}/scripts/reset-test-db.sh --rebuild_cache --use-existing-db')
]
_mock_sh.assert_has_calls(calls)
@patch.object(database, 'CACHE_BUCKET_NAME', 'test_bucket')
@patch.object(db_utils, 'CACHE_FOLDER', mkdtemp())
@patch.object(db_utils, 'FINGERPRINT_FILEPATH', os.path.join(mkdtemp(), 'fingerprint'))
@patch.object(db_utils, 'sh')
def test_updated_db_cache_pushed_to_s3(self, _mock_sh):
"""
Assuming that the computed db cache file fingerprint is different
than the stored fingerprint AND there is NO matching fingerprint file
in s3, verify that an updated fingeprint file is pushed to s3
"""
self.addCleanup(shutil.rmtree, db_utils.CACHE_FOLDER)
self.addCleanup(os.remove, db_utils.FINGERPRINT_FILEPATH)
_write_temporary_db_cache_files(db_utils.CACHE_FOLDER, database.ALL_DB_FILES)
# write the local fingerprint file with a different value than
# the computed fingerprint
local_fingerprint = '<PASSWORD>'
with open(db_utils.FINGERPRINT_FILEPATH, 'w') as fingerprint_file:
fingerprint_file.write(local_fingerprint)
database.update_local_bokchoy_db_from_s3() # pylint: disable=no-value-for-parameter
assert self.bucket.get_key(self.fingerprint_filename)
|
[
"unittest.mock.patch.object",
"pavelib.utils.db_utils.extract_files_from_zip",
"unittest.mock.Mock",
"pavelib.database.update_local_bokchoy_db_from_s3",
"unittest.mock.patch",
"unittest.mock.call",
"os.path.isfile",
"tempfile.mkdtemp",
"tarfile.open",
"os.path.join",
"boto.s3.key.Key"
] |
[((530, 580), 'unittest.mock.patch', 'patch', (['"""pavelib.utils.db_utils.verify_files_exist"""'], {}), "('pavelib.utils.db_utils.verify_files_exist')\n", (535, 580), False, 'from unittest.mock import call, patch, Mock\n'), ((2404, 2432), 'unittest.mock.patch.object', 'patch.object', (['db_utils', '"""sh"""'], {}), "(db_utils, 'sh')\n", (2416, 2432), False, 'from unittest.mock import call, patch, Mock\n'), ((3746, 3804), 'unittest.mock.patch.object', 'patch.object', (['database', '"""CACHE_BUCKET_NAME"""', '"""test_bucket"""'], {}), "(database, 'CACHE_BUCKET_NAME', 'test_bucket')\n", (3758, 3804), False, 'from unittest.mock import call, patch, Mock\n'), ((3957, 3985), 'unittest.mock.patch.object', 'patch.object', (['db_utils', '"""sh"""'], {}), "(db_utils, 'sh')\n", (3969, 3985), False, 'from unittest.mock import call, patch, Mock\n'), ((6046, 6104), 'unittest.mock.patch.object', 'patch.object', (['database', '"""CACHE_BUCKET_NAME"""', '"""test_bucket"""'], {}), "(database, 'CACHE_BUCKET_NAME', 'test_bucket')\n", (6058, 6104), False, 'from unittest.mock import call, patch, Mock\n'), ((6257, 6285), 'unittest.mock.patch.object', 'patch.object', (['db_utils', '"""sh"""'], {}), "(db_utils, 'sh')\n", (6269, 6285), False, 'from unittest.mock import call, patch, Mock\n'), ((7509, 7567), 'unittest.mock.patch.object', 'patch.object', (['database', '"""CACHE_BUCKET_NAME"""', '"""test_bucket"""'], {}), "(database, 'CACHE_BUCKET_NAME', 'test_bucket')\n", (7521, 7567), False, 'from unittest.mock import call, patch, Mock\n'), ((7720, 7748), 'unittest.mock.patch.object', 'patch.object', (['db_utils', '"""sh"""'], {}), "(db_utils, 'sh')\n", (7732, 7748), False, 'from unittest.mock import call, patch, Mock\n'), ((657, 666), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (664, 666), False, 'from tempfile import mkdtemp\n'), ((688, 697), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (695, 697), False, 'from tempfile import mkdtemp\n'), ((823, 857), 'os.path.join', 'os.path.join', (['test_dir', '"""test.txt"""'], {}), "(test_dir, 'test.txt')\n", (835, 857), False, 'import os\n'), ((980, 1017), 'os.path.join', 'os.path.join', (['test_dir', '"""test.tar.gz"""'], {}), "(test_dir, 'test.tar.gz')\n", (992, 1017), False, 'import os\n'), ((1158, 1219), 'pavelib.utils.db_utils.extract_files_from_zip', 'extract_files_from_zip', (["['test.txt']", 'tmp_tarfile', 'output_dir'], {}), "(['test.txt'], tmp_tarfile, output_dir)\n", (1180, 1219), False, 'from pavelib.utils.db_utils import extract_files_from_zip\n'), ((1246, 1282), 'os.path.join', 'os.path.join', (['output_dir', '"""test.txt"""'], {}), "(output_dir, 'test.txt')\n", (1258, 1282), False, 'import os\n'), ((1298, 1328), 'os.path.isfile', 'os.path.isfile', (['extracted_file'], {}), '(extracted_file)\n', (1312, 1328), False, 'import os\n'), ((1697, 1725), 'os.path.join', 'os.path.join', (['path', 'filename'], {}), '(path, filename)\n', (1709, 1725), False, 'import os\n'), ((2226, 2250), 'unittest.mock.Mock', 'Mock', ([], {'name': '"""test_bucket"""'}), "(name='test_bucket')\n", (2230, 2250), False, 'from unittest.mock import call, patch, Mock\n'), ((2296, 2305), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (2303, 2305), False, 'from tempfile import mkdtemp\n'), ((4628, 4690), 'os.path.join', 'os.path.join', (['db_utils.CACHE_FOLDER', 'self.fingerprint_filename'], {}), '(db_utils.CACHE_FOLDER, self.fingerprint_filename)\n', (4640, 4690), False, 'import os\n'), ((4909, 4976), 'boto.s3.key.Key', 'boto.s3.key.Key', ([], {'bucket': 'self.bucket', 'name': 'self.fingerprint_filename'}), '(bucket=self.bucket, name=self.fingerprint_filename)\n', (4924, 4976), False, 'import boto\n'), ((3849, 3858), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (3856, 3858), False, 'from tempfile import mkdtemp\n'), ((7167, 7209), 'pavelib.database.update_local_bokchoy_db_from_s3', 'database.update_local_bokchoy_db_from_s3', ([], {}), '()\n', (7207, 7209), False, 'from pavelib import database\n'), ((6149, 6158), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (6156, 6158), False, 'from tempfile import mkdtemp\n'), ((8555, 8597), 'pavelib.database.update_local_bokchoy_db_from_s3', 'database.update_local_bokchoy_db_from_s3', ([], {}), '()\n', (8595, 8597), False, 'from pavelib import database\n'), ((7612, 7621), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (7619, 7621), False, 'from tempfile import mkdtemp\n'), ((1032, 1075), 'tarfile.open', 'tarfile.open', ([], {'name': 'tmp_tarfile', 'mode': '"""w:gz"""'}), "(name=tmp_tarfile, mode='w:gz')\n", (1044, 1075), False, 'import tarfile\n'), ((3183, 3258), 'unittest.mock.patch.object', 'patch.object', (['db_utils', '"""get_file_from_s3"""'], {'wraps': 'db_utils.get_file_from_s3'}), "(db_utils, 'get_file_from_s3', wraps=db_utils.get_file_from_s3)\n", (3195, 3258), False, 'from unittest.mock import call, patch, Mock\n'), ((3290, 3332), 'pavelib.database.update_local_bokchoy_db_from_s3', 'database.update_local_bokchoy_db_from_s3', ([], {}), '()\n', (3330, 3332), False, 'from pavelib import database\n'), ((3535, 3607), 'unittest.mock.call', 'call', (['f"""{Env.REPO_ROOT}/scripts/reset-test-db.sh --calculate_migrations"""'], {}), "(f'{Env.REPO_ROOT}/scripts/reset-test-db.sh --calculate_migrations')\n", (3539, 3607), False, 'from unittest.mock import call, patch, Mock\n'), ((3621, 3688), 'unittest.mock.call', 'call', (['f"""{Env.REPO_ROOT}/scripts/reset-test-db.sh --use-existing-db"""'], {}), "(f'{Env.REPO_ROOT}/scripts/reset-test-db.sh --use-existing-db')\n", (3625, 3688), False, 'from unittest.mock import call, patch, Mock\n'), ((2372, 2381), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (2379, 2381), False, 'from tempfile import mkdtemp\n'), ((4704, 4748), 'tarfile.open', 'tarfile.open', ([], {'name': 'zipfile_path', 'mode': '"""w:gz"""'}), "(name=zipfile_path, mode='w:gz')\n", (4716, 4748), False, 'import tarfile\n'), ((5835, 5907), 'unittest.mock.call', 'call', (['f"""{Env.REPO_ROOT}/scripts/reset-test-db.sh --calculate_migrations"""'], {}), "(f'{Env.REPO_ROOT}/scripts/reset-test-db.sh --calculate_migrations')\n", (5839, 5907), False, 'from unittest.mock import call, patch, Mock\n'), ((5921, 5988), 'unittest.mock.call', 'call', (['f"""{Env.REPO_ROOT}/scripts/reset-test-db.sh --use-existing-db"""'], {}), "(f'{Env.REPO_ROOT}/scripts/reset-test-db.sh --use-existing-db')\n", (5925, 5988), False, 'from unittest.mock import call, patch, Mock\n'), ((3925, 3934), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (3932, 3934), False, 'from tempfile import mkdtemp\n'), ((7282, 7354), 'unittest.mock.call', 'call', (['f"""{Env.REPO_ROOT}/scripts/reset-test-db.sh --calculate_migrations"""'], {}), "(f'{Env.REPO_ROOT}/scripts/reset-test-db.sh --calculate_migrations')\n", (7286, 7354), False, 'from unittest.mock import call, patch, Mock\n'), ((7368, 7461), 'unittest.mock.call', 'call', (['f"""{Env.REPO_ROOT}/scripts/reset-test-db.sh --rebuild_cache --use-existing-db"""'], {}), "(\n f'{Env.REPO_ROOT}/scripts/reset-test-db.sh --rebuild_cache --use-existing-db'\n )\n", (7372, 7461), False, 'from unittest.mock import call, patch, Mock\n'), ((6225, 6234), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (6232, 6234), False, 'from tempfile import mkdtemp\n'), ((7688, 7697), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (7695, 7697), False, 'from tempfile import mkdtemp\n'), ((5407, 5449), 'unittest.mock.patch.object', 'patch.object', (['db_utils', '"""get_file_from_s3"""'], {}), "(db_utils, 'get_file_from_s3')\n", (5419, 5449), False, 'from unittest.mock import call, patch, Mock\n'), ((5485, 5527), 'pavelib.database.update_local_bokchoy_db_from_s3', 'database.update_local_bokchoy_db_from_s3', ([], {}), '()\n', (5525, 5527), False, 'from pavelib import database\n'), ((4838, 4879), 'os.path.join', 'os.path.join', (['db_utils.CACHE_FOLDER', 'name'], {}), '(db_utils.CACHE_FOLDER, name)\n', (4850, 4879), False, 'import os\n'), ((5380, 5386), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (5384, 5386), False, 'from unittest.mock import call, patch, Mock\n')]
|
from functools import lru_cache
from typing import Optional
from pygame.freetype import get_default_font, SysFont
font_cache = {}
@lru_cache(100)
def get_font(fontname: Optional[str] = None, size: int = 12, bold: bool = False, italic: bool = False):
if fontname is None:
fontname = get_default_font()
return SysFont(fontname, size, bold=bold, italic=italic)
|
[
"pygame.freetype.get_default_font",
"functools.lru_cache",
"pygame.freetype.SysFont"
] |
[((136, 150), 'functools.lru_cache', 'lru_cache', (['(100)'], {}), '(100)\n', (145, 150), False, 'from functools import lru_cache\n'), ((329, 378), 'pygame.freetype.SysFont', 'SysFont', (['fontname', 'size'], {'bold': 'bold', 'italic': 'italic'}), '(fontname, size, bold=bold, italic=italic)\n', (336, 378), False, 'from pygame.freetype import get_default_font, SysFont\n'), ((299, 317), 'pygame.freetype.get_default_font', 'get_default_font', ([], {}), '()\n', (315, 317), False, 'from pygame.freetype import get_default_font, SysFont\n')]
|
import requests
def get_event(user_key, latitude, longitude):
url = "http://api.eventful.com/json/events/search?"
url += "&app_key=" + user_key
url += "&date=Future" #+ date
url += "&page_size=100"
url += "&sort_order=popularity"
url += "&sort_direction=descending"
url += "&q=music"
url += "&c=music"
url += "&where=" + latitude + "," + longitude + "&within=10&km"
data = requests.get(url).json()
if int(data["total_items"]) > 0:
return data["events"]["event"]
else:
return "404"
|
[
"requests.get"
] |
[((417, 434), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (429, 434), False, 'import requests\n')]
|
from typing import List, Text, Tuple
import logging
import re
import lxml
from .. import utils
MAIN_PAGE = "Wikiquote:Accueil"
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
def extract_quotes(tree: lxml.html.HtmlElement, max_quotes: int) -> List[Text]:
# French wiki uses a "citation" HTML class
nodes = tree.xpath('//div[@class="citation"]')
quotes = [utils.clean_txt(node.text_content()) for node in nodes]
return quotes[:max_quotes]
def qotd_old_method(html_tree: lxml.html.HtmlElement) -> Tuple[Text, Text]:
tree = html_tree.get_element_by_id("mf-cdj")
tree = tree.xpath("div/div")[1].xpath("table/tbody/tr/td")[1]
quote = tree.xpath("div/i")[0].text_content()
author = tree.xpath("div/a")[0].text_content()
return quote, author
def qotd_new_method(html_tree: lxml.html.HtmlElement) -> Tuple[Text, Text]:
tree = html_tree.get_element_by_id("mf-cdj")
lines = [
line.strip().replace(u"\xa0", " ") for line in tree.text_content().splitlines()
]
for line in lines:
matches = re.search(r"«(.+?)»(.+)", line)
if not matches:
continue
quote = matches.group(1).strip()
author = matches.group(2).strip("-—– \n")
return quote, author
raise Exception("Could not parse quote of the day from page contents.")
def qotd(html_tree: lxml.html.HtmlElement) -> Tuple[Text, Text]:
try:
return qotd_new_method(html_tree)
except Exception as e:
logger.warning("Could not extract French QOTD using new method due to: %s", e)
return qotd_old_method(html_tree)
|
[
"re.search",
"logging.getLogger",
"logging.NullHandler"
] |
[((141, 168), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (158, 168), False, 'import logging\n'), ((187, 208), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (206, 208), False, 'import logging\n'), ((1088, 1118), 're.search', 're.search', (['"""«(.+?)»(.+)"""', 'line'], {}), "('«(.+?)»(.+)', line)\n", (1097, 1118), False, 'import re\n')]
|
# Generated by Django 3.2.6 on 2022-02-14 13:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("database", "0060_set_ordering_on_tablewebhook_models"),
]
operations = [
migrations.AlterField(
model_name="numberfield",
name="number_decimal_places",
field=models.IntegerField(
choices=[
(0, "1"),
(1, "1.0"),
(2, "1.00"),
(3, "1.000"),
(4, "1.0000"),
(5, "1.00000"),
],
default=0,
help_text="The amount of digits allowed after the point.",
),
),
]
|
[
"django.db.models.IntegerField"
] |
[((374, 563), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(0, '1'), (1, '1.0'), (2, '1.00'), (3, '1.000'), (4, '1.0000'), (5, '1.00000')\n ]", 'default': '(0)', 'help_text': '"""The amount of digits allowed after the point."""'}), "(choices=[(0, '1'), (1, '1.0'), (2, '1.00'), (3, '1.000'\n ), (4, '1.0000'), (5, '1.00000')], default=0, help_text=\n 'The amount of digits allowed after the point.')\n", (393, 563), False, 'from django.db import migrations, models\n')]
|
from IPython import display
from torch.utils.data import DataLoader
from torchvision import transforms, datasets
import os
import tensorflow as tf
from tensorflow import nn, layers
from tensorflow.contrib import layers as clayers
import numpy as np
import errno
import torchvision.utils as vutils
from tensorboardX import SummaryWriter
from matplotlib import pyplot as plt
import torch
# Output Directory
OUTPUT_PATH = os.path.join(os.path.abspath(__file__+ "../../"), '.output/')
DATASET_PATH = os.path.join(os.path.abspath(__file__+ "../../"), '.dataset/CIFAR/')
if not os.path.exists(OUTPUT_PATH): os.makedirs(OUTPUT_PATH)
if not os.path.exists(DATASET_PATH): os.makedirs(DATASET_PATH)
def cifar_data():
compose = transforms.Compose([transforms.Resize(64),transforms.ToTensor(),transforms.Normalize((.5, .5, .5), (.5, .5, .5)),])
return datasets.CIFAR10(root=DATASET_PATH, train=True, download=True, transform=compose)
dataset = cifar_data()
batch_size = 100
dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True)
num_batches = len(dataloader)
IMAGES_SHAPE = (64, 64, 3)
NOISE_SIZE = 100
def default_conv2d(inputs, filters):
return layers.conv2d(
inputs,
filters=filters,
kernel_size=4,
strides=(2, 2),
padding='same',
data_format='channels_last',
use_bias=False,
)
def default_conv2d_transpose(inputs, filters):
return layers.conv2d_transpose(
inputs,
filters=filters,
kernel_size=4,
strides=(2, 2),
padding='same',
data_format='channels_last',
use_bias=False,
)
def noise(n_rows, n_cols):
return np.random.normal(size=(n_rows, n_cols))
def discriminator(x):
with tf.variable_scope("discriminator", reuse=tf.AUTO_REUSE):
with tf.variable_scope("conv1"):
conv1 = default_conv2d(x, 128)
conv1 = nn.leaky_relu(conv1,alpha=0.2)
with tf.variable_scope("conv2"):
conv2 = default_conv2d(conv1, 256)
conv2 = layers.batch_normalization(conv2)
conv2 = nn.leaky_relu(conv2,alpha=0.2)
with tf.variable_scope("conv3"):
conv3 = default_conv2d(conv2, 512)
conv3 = layers.batch_normalization(conv3)
conv3 = nn.leaky_relu(conv3,alpha=0.2)
with tf.variable_scope("conv4"):
conv4 = default_conv2d(conv3, 1024)
conv4 = layers.batch_normalization(conv3)
conv4 = nn.leaky_relu(conv3,alpha=0.2)
with tf.variable_scope("linear"):
linear = clayers.flatten(conv4)
linear = clayers.fully_connected(linear, 1)
with tf.variable_scope("out"):
out = nn.sigmoid(linear)
return out
def generator(z):
with tf.variable_scope("generator", reuse=tf.AUTO_REUSE):
with tf.variable_scope("linear"):
linear = clayers.fully_connected(z, 1024 * 4 * 4)
with tf.variable_scope("conv1_transp"):
# Reshape as 4x4 images
conv1 = tf.reshape(linear, (-1, 4, 4, 1024))
conv1 = default_conv2d_transpose(conv1, 512)
conv1 = layers.batch_normalization(conv1)
conv1 = nn.relu(conv1)
with tf.variable_scope("conv2_transp"):
conv2 = default_conv2d_transpose(conv1, 256)
conv2 = layers.batch_normalization(conv2)
conv2 = nn.relu(conv2)
with tf.variable_scope("conv3_transp"):
conv3 = default_conv2d_transpose(conv2, 128)
conv3 = layers.batch_normalization(conv3)
conv3 = nn.relu(conv3)
with tf.variable_scope("conv4_transp"):
conv4 = default_conv2d_transpose(conv3, 3)
with tf.variable_scope("out"):
out = tf.tanh(conv4)
return out
## Real Input
real_sample = tf.placeholder(tf.float32, shape=(None, )+IMAGES_SHAPE)
## Latent Variables / Noise
noise_sample = tf.placeholder(tf.float32, shape=(None, NOISE_SIZE))
# Generator
G_sample = generator(noise_sample)
# Discriminator
D_real = discriminator(real_sample)
D_fake = discriminator(G_sample)
# Generator
G_loss = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(
logits=D_fake, labels=tf.ones_like(D_fake)
)
)
# Discriminator
D_loss_real = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(
logits=D_real, labels=tf.ones_like(D_real)
)
)
D_loss_fake = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(
logits=D_fake, labels=tf.zeros_like(D_fake)
)
)
D_loss = D_loss_real + D_loss_fake
# Obtain trainable variables for both networks
train_vars = tf.trainable_variables()
G_vars = [var for var in train_vars if 'generator' in var.name]
D_vars = [var for var in train_vars if 'discriminator' in var.name]
num_epochs = 200
G_opt = tf.train.AdamOptimizer(2e-4).minimize(G_loss, var_list=G_vars,)
D_opt = tf.train.AdamOptimizer(2e-4).minimize(D_loss, var_list=D_vars,)
num_test_samples = 16
test_noise = noise(num_test_samples, NOISE_SIZE)
BATCH_SIZE = 100
NUM_EPOCHS = 200
# session = tf.InteractiveSession()
# tf.global_variables_initializer().run(session=session)
sess = tf.Session()
tf.global_variables_initializer().run(session=sess)
logger = Logger(model_name='DCGAN1', data_name='CIFAR10', root_path=OUTPUT_PATH)
# Iterate through epochs
for epoch in range(NUM_EPOCHS):
for n_batch, (batch,_) in enumerate(dataloader):
# 1. Train Discriminator
X_batch = batch.permute(0, 2, 3, 1).numpy()
feed_dict = {real_sample: X_batch, noise_sample: noise(BATCH_SIZE, NOISE_SIZE)}
_, d_error, d_pred_real, d_pred_fake = sess.run([D_opt, D_loss, D_real, D_fake], feed_dict=feed_dict)
# 2. Train Generator
feed_dict = {noise_sample: noise(BATCH_SIZE, NOISE_SIZE)}
_, g_error = sess.run([G_opt, G_loss], feed_dict=feed_dict)
# if n_batch % 10 == 0:
logger.display_status(epoch, num_epochs, n_batch, num_batches,d_error, g_error, d_pred_real, d_pred_fake)
if n_batch % 100 == 0:
display.clear_output(True)
# Generate images from test noise
test_images = sess.run(G_sample, feed_dict={noise_sample: test_noise})
# Log Images
logger.log_images(test_images, num_test_samples, epoch, n_batch, num_batches, format='NHWC');
# Log Status
logger.display_status(epoch, num_epochs, n_batch, num_batches,d_error, g_error, d_pred_real, d_pred_fake)
class Logger:
def __init__(self, model_name, data_name, root_path):
self.model_name = model_name
self.data_name = data_name
self.comment = '{}_{}'.format(model_name, data_name)
self.data_subdir = '{}/{}'.format(model_name, data_name)
# TensorBoard
self.writer = SummaryWriter(comment=self.comment)
self.rootPath = root_path
def log(self, d_error, g_error, epoch, n_batch, num_batches):
# var_class = torch.autograd.variable.Variable
if isinstance(d_error, torch.autograd.Variable):
d_error = d_error.data.cpu().numpy()
if isinstance(g_error, torch.autograd.Variable):
g_error = g_error.data.cpu().numpy()
step = Logger._step(epoch, n_batch, num_batches)
self.writer.add_scalar(
'{}/D_error'.format(self.comment), d_error, step)
self.writer.add_scalar(
'{}/G_error'.format(self.comment), g_error, step)
def log_images(self, images, num_images, epoch, n_batch, num_batches, format='NCHW', normalize=True):
'''
input images are expected in format (NCHW)
'''
if type(images) == np.ndarray:
images = torch.from_numpy(images)
if format=='NHWC':
images = images.transpose(1,3)
step = Logger._step(epoch, n_batch, num_batches)
img_name = '{}/images{}'.format(self.comment, '')
# Make horizontal grid from image tensor
horizontal_grid = vutils.make_grid(images, normalize=normalize, scale_each=True)
# Make vertical grid from image tensor
nrows = int(np.sqrt(num_images))
grid = vutils.make_grid(images, nrow=nrows, normalize=True, scale_each=True)
# Add horizontal images to tensorboard
self.writer.add_image(img_name, horizontal_grid, step)
# Save plots
self.save_torch_images(horizontal_grid, grid, epoch, n_batch)
print("Save Log Image")
def save_torch_images(self, horizontal_grid, grid, epoch, n_batch, plot_horizontal=True):
out_dir = (self.rootPath+'/images/{}').format(self.data_subdir)
Logger._make_dir(out_dir)
# Plot and save horizontal
fig = plt.figure(figsize=(16, 16))
plt.imshow(np.moveaxis(horizontal_grid.numpy(), 0, -1))
plt.axis('off')
if plot_horizontal:
display.display(plt.gcf())
self._save_images(fig, epoch, n_batch, 'hori')
plt.close()
# Save squared
fig = plt.figure()
plt.imshow(np.moveaxis(grid.numpy(), 0, -1))
plt.axis('off')
self._save_images(fig, epoch, n_batch)
plt.close()
def _save_images(self, fig, epoch, n_batch, comment=''):
out_dir = (self.rootPath+'/images/{}').format(self.data_subdir)
Logger._make_dir(out_dir)
fig.savefig('{}/{}_epoch_{}_batch_{}.png'.format(out_dir,comment, epoch, n_batch))
def display_status(self, epoch, num_epochs, n_batch, num_batches, d_error, g_error, d_pred_real, d_pred_fake):
# var_class = torch.autograd.variable.Variable
if isinstance(d_error, torch.autograd.Variable):
d_error = d_error.data.cpu().numpy()
if isinstance(g_error, torch.autograd.Variable):
g_error = g_error.data.cpu().numpy()
if isinstance(d_pred_real, torch.autograd.Variable):
d_pred_real = d_pred_real.data
if isinstance(d_pred_fake, torch.autograd.Variable):
d_pred_fake = d_pred_fake.data
print('Epoch: [{}/{}], Batch Num: [{}/{}]'.format(
epoch,num_epochs, n_batch, num_batches)
)
print('Discriminator Loss: {:.4f}, Generator Loss: {:.4f}'.format(d_error, g_error))
print('D(x): {:.4f}, D(G(z)): {:.4f}'.format(d_pred_real.mean(), d_pred_fake.mean()))
def save_models(self, generator, discriminator, epoch):
out_dir = (self.rootPath+'/models/{}').format(self.data_subdir)
Logger._make_dir(out_dir)
torch.save(generator.state_dict(),
'{}/G_epoch_{}'.format(out_dir, epoch))
torch.save(discriminator.state_dict(),
'{}/D_epoch_{}'.format(out_dir, epoch))
def close(self):
self.writer.close()
# Private Functionality
@staticmethod
def _step(epoch, n_batch, num_batches):
return epoch * num_batches + n_batch
@staticmethod
def _make_dir(directory):
try:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise
|
[
"tensorflow.trainable_variables",
"tensorflow.contrib.layers.flatten",
"tensorflow.reshape",
"tensorflow.zeros_like",
"torchvision.datasets.CIFAR10",
"matplotlib.pyplot.figure",
"numpy.random.normal",
"tensorflow.layers.conv2d_transpose",
"torchvision.transforms.Normalize",
"tensorflow.nn.leaky_relu",
"tensorflow.layers.batch_normalization",
"numpy.sqrt",
"os.path.abspath",
"tensorflow.nn.relu",
"torch.utils.data.DataLoader",
"matplotlib.pyplot.close",
"os.path.exists",
"tensorflow.variable_scope",
"tensorflow.placeholder",
"tensorflow.tanh",
"tensorflow.contrib.layers.fully_connected",
"tensorflow.global_variables_initializer",
"tensorflow.Session",
"tensorflow.ones_like",
"tensorflow.layers.conv2d",
"IPython.display.clear_output",
"matplotlib.pyplot.gcf",
"torchvision.transforms.Resize",
"torch.from_numpy",
"tensorboardX.SummaryWriter",
"os.makedirs",
"matplotlib.pyplot.axis",
"tensorflow.nn.sigmoid",
"torchvision.utils.make_grid",
"tensorflow.train.AdamOptimizer",
"torchvision.transforms.ToTensor"
] |
[((989, 1045), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': 'batch_size', 'shuffle': '(True)'}), '(dataset, batch_size=batch_size, shuffle=True)\n', (999, 1045), False, 'from torch.utils.data import DataLoader\n'), ((3937, 3993), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '((None,) + IMAGES_SHAPE)'}), '(tf.float32, shape=(None,) + IMAGES_SHAPE)\n', (3951, 3993), True, 'import tensorflow as tf\n'), ((4036, 4088), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '(None, NOISE_SIZE)'}), '(tf.float32, shape=(None, NOISE_SIZE))\n', (4050, 4088), True, 'import tensorflow as tf\n'), ((4748, 4772), 'tensorflow.trainable_variables', 'tf.trainable_variables', ([], {}), '()\n', (4770, 4772), True, 'import tensorflow as tf\n'), ((5278, 5290), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (5288, 5290), True, 'import tensorflow as tf\n'), ((436, 472), 'os.path.abspath', 'os.path.abspath', (["(__file__ + '../../')"], {}), "(__file__ + '../../')\n", (451, 472), False, 'import os\n'), ((513, 549), 'os.path.abspath', 'os.path.abspath', (["(__file__ + '../../')"], {}), "(__file__ + '../../')\n", (528, 549), False, 'import os\n'), ((576, 603), 'os.path.exists', 'os.path.exists', (['OUTPUT_PATH'], {}), '(OUTPUT_PATH)\n', (590, 603), False, 'import os\n'), ((605, 629), 'os.makedirs', 'os.makedirs', (['OUTPUT_PATH'], {}), '(OUTPUT_PATH)\n', (616, 629), False, 'import os\n'), ((637, 665), 'os.path.exists', 'os.path.exists', (['DATASET_PATH'], {}), '(DATASET_PATH)\n', (651, 665), False, 'import os\n'), ((667, 692), 'os.makedirs', 'os.makedirs', (['DATASET_PATH'], {}), '(DATASET_PATH)\n', (678, 692), False, 'import os\n'), ((853, 939), 'torchvision.datasets.CIFAR10', 'datasets.CIFAR10', ([], {'root': 'DATASET_PATH', 'train': '(True)', 'download': '(True)', 'transform': 'compose'}), '(root=DATASET_PATH, train=True, download=True, transform=\n compose)\n', (869, 939), False, 'from torchvision import transforms, datasets\n'), ((1170, 1304), 'tensorflow.layers.conv2d', 'layers.conv2d', (['inputs'], {'filters': 'filters', 'kernel_size': '(4)', 'strides': '(2, 2)', 'padding': '"""same"""', 'data_format': '"""channels_last"""', 'use_bias': '(False)'}), "(inputs, filters=filters, kernel_size=4, strides=(2, 2),\n padding='same', data_format='channels_last', use_bias=False)\n", (1183, 1304), False, 'from tensorflow import nn, layers\n'), ((1423, 1567), 'tensorflow.layers.conv2d_transpose', 'layers.conv2d_transpose', (['inputs'], {'filters': 'filters', 'kernel_size': '(4)', 'strides': '(2, 2)', 'padding': '"""same"""', 'data_format': '"""channels_last"""', 'use_bias': '(False)'}), "(inputs, filters=filters, kernel_size=4, strides=(2,\n 2), padding='same', data_format='channels_last', use_bias=False)\n", (1446, 1567), False, 'from tensorflow import nn, layers\n'), ((1666, 1705), 'numpy.random.normal', 'np.random.normal', ([], {'size': '(n_rows, n_cols)'}), '(size=(n_rows, n_cols))\n', (1682, 1705), True, 'import numpy as np\n'), ((1738, 1793), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""discriminator"""'], {'reuse': 'tf.AUTO_REUSE'}), "('discriminator', reuse=tf.AUTO_REUSE)\n", (1755, 1793), True, 'import tensorflow as tf\n'), ((2824, 2875), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""generator"""'], {'reuse': 'tf.AUTO_REUSE'}), "('generator', reuse=tf.AUTO_REUSE)\n", (2841, 2875), True, 'import tensorflow as tf\n'), ((4933, 4963), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['(0.0002)'], {}), '(0.0002)\n', (4955, 4963), True, 'import tensorflow as tf\n'), ((5005, 5035), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['(0.0002)'], {}), '(0.0002)\n', (5027, 5035), True, 'import tensorflow as tf\n'), ((5291, 5324), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (5322, 5324), True, 'import tensorflow as tf\n'), ((6938, 6973), 'tensorboardX.SummaryWriter', 'SummaryWriter', ([], {'comment': 'self.comment'}), '(comment=self.comment)\n', (6951, 6973), False, 'from tensorboardX import SummaryWriter\n'), ((8136, 8198), 'torchvision.utils.make_grid', 'vutils.make_grid', (['images'], {'normalize': 'normalize', 'scale_each': '(True)'}), '(images, normalize=normalize, scale_each=True)\n', (8152, 8198), True, 'import torchvision.utils as vutils\n'), ((8302, 8371), 'torchvision.utils.make_grid', 'vutils.make_grid', (['images'], {'nrow': 'nrows', 'normalize': '(True)', 'scale_each': '(True)'}), '(images, nrow=nrows, normalize=True, scale_each=True)\n', (8318, 8371), True, 'import torchvision.utils as vutils\n'), ((8858, 8886), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(16, 16)'}), '(figsize=(16, 16))\n', (8868, 8886), True, 'from matplotlib import pyplot as plt\n'), ((8959, 8974), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (8967, 8974), True, 'from matplotlib import pyplot as plt\n'), ((9105, 9116), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (9114, 9116), True, 'from matplotlib import pyplot as plt\n'), ((9155, 9167), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (9165, 9167), True, 'from matplotlib import pyplot as plt\n'), ((9229, 9244), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (9237, 9244), True, 'from matplotlib import pyplot as plt\n'), ((9300, 9311), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (9309, 9311), True, 'from matplotlib import pyplot as plt\n'), ((746, 767), 'torchvision.transforms.Resize', 'transforms.Resize', (['(64)'], {}), '(64)\n', (763, 767), False, 'from torchvision import transforms, datasets\n'), ((768, 789), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (787, 789), False, 'from torchvision import transforms, datasets\n'), ((790, 844), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['(0.5, 0.5, 0.5)', '(0.5, 0.5, 0.5)'], {}), '((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))\n', (810, 844), False, 'from torchvision import transforms, datasets\n'), ((1808, 1834), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""conv1"""'], {}), "('conv1')\n", (1825, 1834), True, 'import tensorflow as tf\n'), ((1899, 1930), 'tensorflow.nn.leaky_relu', 'nn.leaky_relu', (['conv1'], {'alpha': '(0.2)'}), '(conv1, alpha=0.2)\n', (1912, 1930), False, 'from tensorflow import nn, layers\n'), ((1952, 1978), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""conv2"""'], {}), "('conv2')\n", (1969, 1978), True, 'import tensorflow as tf\n'), ((2047, 2080), 'tensorflow.layers.batch_normalization', 'layers.batch_normalization', (['conv2'], {}), '(conv2)\n', (2073, 2080), False, 'from tensorflow import nn, layers\n'), ((2101, 2132), 'tensorflow.nn.leaky_relu', 'nn.leaky_relu', (['conv2'], {'alpha': '(0.2)'}), '(conv2, alpha=0.2)\n', (2114, 2132), False, 'from tensorflow import nn, layers\n'), ((2158, 2184), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""conv3"""'], {}), "('conv3')\n", (2175, 2184), True, 'import tensorflow as tf\n'), ((2253, 2286), 'tensorflow.layers.batch_normalization', 'layers.batch_normalization', (['conv3'], {}), '(conv3)\n', (2279, 2286), False, 'from tensorflow import nn, layers\n'), ((2307, 2338), 'tensorflow.nn.leaky_relu', 'nn.leaky_relu', (['conv3'], {'alpha': '(0.2)'}), '(conv3, alpha=0.2)\n', (2320, 2338), False, 'from tensorflow import nn, layers\n'), ((2364, 2390), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""conv4"""'], {}), "('conv4')\n", (2381, 2390), True, 'import tensorflow as tf\n'), ((2460, 2493), 'tensorflow.layers.batch_normalization', 'layers.batch_normalization', (['conv3'], {}), '(conv3)\n', (2486, 2493), False, 'from tensorflow import nn, layers\n'), ((2514, 2545), 'tensorflow.nn.leaky_relu', 'nn.leaky_relu', (['conv3'], {'alpha': '(0.2)'}), '(conv3, alpha=0.2)\n', (2527, 2545), False, 'from tensorflow import nn, layers\n'), ((2567, 2594), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""linear"""'], {}), "('linear')\n", (2584, 2594), True, 'import tensorflow as tf\n'), ((2617, 2639), 'tensorflow.contrib.layers.flatten', 'clayers.flatten', (['conv4'], {}), '(conv4)\n', (2632, 2639), True, 'from tensorflow.contrib import layers as clayers\n'), ((2661, 2695), 'tensorflow.contrib.layers.fully_connected', 'clayers.fully_connected', (['linear', '(1)'], {}), '(linear, 1)\n', (2684, 2695), True, 'from tensorflow.contrib import layers as clayers\n'), ((2718, 2742), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""out"""'], {}), "('out')\n", (2735, 2742), True, 'import tensorflow as tf\n'), ((2762, 2780), 'tensorflow.nn.sigmoid', 'nn.sigmoid', (['linear'], {}), '(linear)\n', (2772, 2780), False, 'from tensorflow import nn, layers\n'), ((2895, 2922), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""linear"""'], {}), "('linear')\n", (2912, 2922), True, 'import tensorflow as tf\n'), ((2945, 2985), 'tensorflow.contrib.layers.fully_connected', 'clayers.fully_connected', (['z', '(1024 * 4 * 4)'], {}), '(z, 1024 * 4 * 4)\n', (2968, 2985), True, 'from tensorflow.contrib import layers as clayers\n'), ((3012, 3045), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""conv1_transp"""'], {}), "('conv1_transp')\n", (3029, 3045), True, 'import tensorflow as tf\n'), ((3103, 3139), 'tensorflow.reshape', 'tf.reshape', (['linear', '(-1, 4, 4, 1024)'], {}), '(linear, (-1, 4, 4, 1024))\n', (3113, 3139), True, 'import tensorflow as tf\n'), ((3217, 3250), 'tensorflow.layers.batch_normalization', 'layers.batch_normalization', (['conv1'], {}), '(conv1)\n', (3243, 3250), False, 'from tensorflow import nn, layers\n'), ((3271, 3285), 'tensorflow.nn.relu', 'nn.relu', (['conv1'], {}), '(conv1)\n', (3278, 3285), False, 'from tensorflow import nn, layers\n'), ((3308, 3341), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""conv2_transp"""'], {}), "('conv2_transp')\n", (3325, 3341), True, 'import tensorflow as tf\n'), ((3420, 3453), 'tensorflow.layers.batch_normalization', 'layers.batch_normalization', (['conv2'], {}), '(conv2)\n', (3446, 3453), False, 'from tensorflow import nn, layers\n'), ((3474, 3488), 'tensorflow.nn.relu', 'nn.relu', (['conv2'], {}), '(conv2)\n', (3481, 3488), False, 'from tensorflow import nn, layers\n'), ((3515, 3548), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""conv3_transp"""'], {}), "('conv3_transp')\n", (3532, 3548), True, 'import tensorflow as tf\n'), ((3627, 3660), 'tensorflow.layers.batch_normalization', 'layers.batch_normalization', (['conv3'], {}), '(conv3)\n', (3653, 3660), False, 'from tensorflow import nn, layers\n'), ((3681, 3695), 'tensorflow.nn.relu', 'nn.relu', (['conv3'], {}), '(conv3)\n', (3688, 3695), False, 'from tensorflow import nn, layers\n'), ((3722, 3755), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""conv4_transp"""'], {}), "('conv4_transp')\n", (3739, 3755), True, 'import tensorflow as tf\n'), ((3834, 3858), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""out"""'], {}), "('out')\n", (3851, 3858), True, 'import tensorflow as tf\n'), ((3878, 3892), 'tensorflow.tanh', 'tf.tanh', (['conv4'], {}), '(conv4)\n', (3885, 3892), True, 'import tensorflow as tf\n'), ((4335, 4355), 'tensorflow.ones_like', 'tf.ones_like', (['D_fake'], {}), '(D_fake)\n', (4347, 4355), True, 'import tensorflow as tf\n'), ((4486, 4506), 'tensorflow.ones_like', 'tf.ones_like', (['D_real'], {}), '(D_real)\n', (4498, 4506), True, 'import tensorflow as tf\n'), ((4621, 4642), 'tensorflow.zeros_like', 'tf.zeros_like', (['D_fake'], {}), '(D_fake)\n', (4634, 4642), True, 'import tensorflow as tf\n'), ((6190, 6216), 'IPython.display.clear_output', 'display.clear_output', (['(True)'], {}), '(True)\n', (6210, 6216), False, 'from IPython import display\n'), ((7831, 7855), 'torch.from_numpy', 'torch.from_numpy', (['images'], {}), '(images)\n', (7847, 7855), False, 'import torch\n'), ((8266, 8285), 'numpy.sqrt', 'np.sqrt', (['num_images'], {}), '(num_images)\n', (8273, 8285), True, 'import numpy as np\n'), ((11120, 11142), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (11131, 11142), False, 'import os\n'), ((9031, 9040), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (9038, 9040), True, 'from matplotlib import pyplot as plt\n')]
|
# -*- coding: UTF-8 -*-
# Copyright 2013-2017 <NAME>
# License: BSD (see file COPYING for details)
"""See :doc:`/specs/vat`.
.. autosummary::
:toctree:
utils
.. fixtures.novat fixtures.euvatrates
"""
from django.utils.translation import ugettext_lazy as _
from lino.api import ad
import six
class Plugin(ad.Plugin):
"""The :class:`Plugin <lino.core.plugin.Plugin>` object for this
plugin.
"""
verbose_name = _("VAT")
needs_plugins = ['lino_xl.lib.countries', 'lino_xl.lib.ledger']
default_vat_regime = 'normal'
"""The default VAT regime. If this is specified as a string, Lino will
resolve it at startup into an item of :class:`VatRegimes
<lino_xl.lib.vat.VatRegimes>`.
"""
default_vat_class = 'normal'
"""The default VAT class. If this is specified as a string, Lino will
resolve it at startup into an item of :class:`VatClasses
<lino_xl.lib.vat.VatClasses>`.
"""
def get_vat_class(self, tt, item):
"""Return the VAT class to be used for given trade type and given
invoice item. Return value must be an item of
:class:`lino_xl.lib.vat.VatClasses`.
"""
return self.default_vat_class
def on_site_startup(self, site):
vat = site.modules.vat
if isinstance(self.default_vat_regime, six.string_types):
self.default_vat_regime = vat.VatRegimes.get_by_name(
self.default_vat_regime)
if isinstance(self.default_vat_class, six.string_types):
self.default_vat_class = vat.VatClasses.get_by_name(
self.default_vat_class)
def setup_reports_menu(self, site, user_type, m):
mg = site.plugins.accounts
# mg = site.plugins.vat
# mg = self
m = m.add_menu(mg.app_label, mg.verbose_name)
m.add_action('vat.PrintableInvoicesByJournal')
m.add_action('vat.IntracomPurchases')
m.add_action('vat.IntracomSales')
def setup_explorer_menu(self, site, user_type, m):
m = m.add_menu(self.app_label, self.verbose_name)
m.add_action('vat.VatAreas')
m.add_action('vat.VatRegimes')
m.add_action('vat.VatClasses')
m.add_action('vat.VatColumns')
m.add_action('vat.Invoices')
m.add_action('vat.VatRules')
# m.add_action('vat.InvoiceItems')
|
[
"django.utils.translation.ugettext_lazy"
] |
[((440, 448), 'django.utils.translation.ugettext_lazy', '_', (['"""VAT"""'], {}), "('VAT')\n", (441, 448), True, 'from django.utils.translation import ugettext_lazy as _\n')]
|
from flask import Blueprint
admin = Blueprint('admin', __name__, url_prefix='/admin',
template_folder='templates')
|
[
"flask.Blueprint"
] |
[((38, 116), 'flask.Blueprint', 'Blueprint', (['"""admin"""', '__name__'], {'url_prefix': '"""/admin"""', 'template_folder': '"""templates"""'}), "('admin', __name__, url_prefix='/admin', template_folder='templates')\n", (47, 116), False, 'from flask import Blueprint\n')]
|
# Generated by Django 2.1.5 on 2019-01-29 23:48
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Call',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('call_time', models.IntegerField(verbose_name='Call duration in seconds')),
('call_type', models.CharField(choices=[('IN', 'International'), ('NL', 'National'), ('DO', 'Domestic')], default='DO', max_length=2)),
],
),
]
|
[
"django.db.models.CharField",
"django.db.models.IntegerField",
"django.db.models.AutoField"
] |
[((300, 393), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (316, 393), False, 'from django.db import migrations, models\n'), ((422, 482), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'verbose_name': '"""Call duration in seconds"""'}), "(verbose_name='Call duration in seconds')\n", (441, 482), False, 'from django.db import migrations, models\n'), ((515, 639), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('IN', 'International'), ('NL', 'National'), ('DO', 'Domestic')]", 'default': '"""DO"""', 'max_length': '(2)'}), "(choices=[('IN', 'International'), ('NL', 'National'), (\n 'DO', 'Domestic')], default='DO', max_length=2)\n", (531, 639), False, 'from django.db import migrations, models\n')]
|
import csv
import logging
import numpy as np
import datajoint as dj
import pathlib
import scipy.io as scio
from tifffile import imread
from . import InsertBuffer
from .reference import ccf_ontology
from . import get_schema_name
schema = dj.schema(get_schema_name('ccf'))
log = logging.getLogger(__name__)
@schema
class CCFLabel(dj.Lookup):
definition = """
# CCF Dataset Information
ccf_label_id: int # Local CCF ID
---
ccf_version: int # Allen CCF Version
ccf_resolution: int # Voxel Resolution (uM)
ccf_description: varchar(255) # CCFLabel Description
"""
CCF_R3_20UM_ID = 0
CCF_R3_20UM_DESC = 'Allen Institute Mouse CCF, Rev. 3, 20uM Resolution'
CCF_R3_20UM_TYPE = 'CCF_R3_20UM'
contents = [(CCF_R3_20UM_ID, 3, 20, CCF_R3_20UM_DESC)]
@schema
class CCF(dj.Lookup):
definition = """
# Common Coordinate Framework
-> CCFLabel
ccf_x : int # (um)
ccf_y : int # (um)
ccf_z : int # (um)
"""
@schema
class AnnotationType(dj.Lookup):
definition = """
annotation_type : varchar(16)
"""
contents = ((CCFLabel.CCF_R3_20UM_TYPE,),)
@schema
class CCFAnnotation(dj.Manual):
definition = """
-> CCF
-> AnnotationType
---
annotation : varchar(1024)
index (annotation)
"""
@classmethod
def get_ccf_r3_20um_ontology_regions(cls):
return [c for c in csv.reader(ccf_ontology.splitlines())
if len(c) == 2]
@classmethod
def load_ccf_r3_20um(cls):
"""
Load the CCF r3 20 uM Dataset.
Requires that dj.config['ccf.r3_20um_path'] be set to the location
of the CCF Annotation tif stack.
"""
# TODO: scaling
log.info('CCFAnnotation.load_ccf_r3_20um(): start')
self = cls() # Instantiate self,
stack_path = dj.config['custom']['ccf.r3_20um_path']
stack = imread(stack_path) # load reference stack,
log.info('.. loaded stack of shape {} from {}'
.format(stack.shape, stack_path))
# iterate over ccf ontology region id/name records,
regions = self.get_ccf_r3_20um_ontology_regions()
region, nregions = 0, len(regions)
chunksz, ib_args = 50000, {'skip_duplicates': True,
'allow_direct_insert': True}
for num, txt in regions:
region += 1
num = int(num)
log.info('.. loading region {} ({}/{}) ({})'
.format(num, region, nregions, txt))
# extracting filled volumes from stack in scaled [[x,y,z]] shape,
vol = np.array(np.where(stack == num)).T[:, [2, 1, 0]] * 20
if not vol.shape[0]:
log.info('.. region {} volume: shape {} - skipping'
.format(num, vol.shape))
continue
log.info('.. region {} volume: shape {}'.format(num, vol.shape))
with dj.conn().transaction:
with InsertBuffer(CCF, chunksz, **ib_args) as buf:
for vox in vol:
buf.insert1((CCFLabel.CCF_R3_20UM_ID, *vox))
buf.flush()
with InsertBuffer(cls, chunksz, **ib_args) as buf:
for vox in vol:
buf.insert1((CCFLabel.CCF_R3_20UM_ID, *vox,
CCFLabel.CCF_R3_20UM_TYPE, txt))
buf.flush()
log.info('.. done.')
@schema
class AnnotatedBrainSurface(dj.Manual):
definition = """ # iso-surface of annotated brain in CCF coordinate frame
annotated_brain_name: varchar(100) # e.g. Annotation_new_10_ds222_16bit
---
vertices: longblob # (px)
faces: longblob
"""
@classmethod
def load_matlab_mesh(self, mesh_fp):
mesh_fp = pathlib.Path(mesh_fp).resolve()
assert mesh_fp.exists()
mesh = scio.loadmat(mesh_fp, struct_as_record = False, squeeze_me = True)['mesh']
self.insert1(dict(annotated_brain_name=mesh_fp.stem,
vertices=mesh.vertices,
faces=mesh.faces - 1), # 0-base index
allow_direct_insert=True)
|
[
"scipy.io.loadmat",
"pathlib.Path",
"numpy.where",
"tifffile.imread",
"datajoint.conn",
"logging.getLogger"
] |
[((284, 311), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (301, 311), False, 'import logging\n'), ((1964, 1982), 'tifffile.imread', 'imread', (['stack_path'], {}), '(stack_path)\n', (1970, 1982), False, 'from tifffile import imread\n'), ((3995, 4057), 'scipy.io.loadmat', 'scio.loadmat', (['mesh_fp'], {'struct_as_record': '(False)', 'squeeze_me': '(True)'}), '(mesh_fp, struct_as_record=False, squeeze_me=True)\n', (4007, 4057), True, 'import scipy.io as scio\n'), ((3916, 3937), 'pathlib.Path', 'pathlib.Path', (['mesh_fp'], {}), '(mesh_fp)\n', (3928, 3937), False, 'import pathlib\n'), ((3027, 3036), 'datajoint.conn', 'dj.conn', ([], {}), '()\n', (3034, 3036), True, 'import datajoint as dj\n'), ((2709, 2731), 'numpy.where', 'np.where', (['(stack == num)'], {}), '(stack == num)\n', (2717, 2731), True, 'import numpy as np\n')]
|
import os
import discord
import requests
import json
from dotenv import load_dotenv
from discord.ext import commands
from datetime import datetime
load_dotenv()
TOKEN = os.getenv('DISCORD_TOKEN')
API_KEY = os.getenv('API_KEY')
HEADERS = {
"x-api-key" : API_KEY
}
bot = commands.Bot(command_prefix = "-")
ROOT_URL = "https://esports-api.lolesports.com/persisted/gw/"
# Getting leagues from API data
response = requests.get(ROOT_URL + "getLeagues?hl=en-US", headers=HEADERS)
response_info = response.json()
leagues = response_info["data"]["leagues"]
# Organizing data and adding it to region-league hashmap, and adding IDs to list
regions = {}
ids = []
imgs = []
for league in leagues:
region = league["region"]
league_name = league["name"]
league_id = league["id"]
league_image = league["image"]
if region not in regions:
regions[region] = []
regions[region].append(league_name)
ids.append(league_id)
imgs.append(league_image)
# Connection
@bot.event
async def on_ready():
print(f'{bot.user.name} has connected to Discord')
# Ping command
@bot.command(help='Returns connection time')
async def ping(ctx):
msg = await ctx.channel.send("Pong")
now = datetime.now().timestamp()
ping = round(bot.latency * 1000)
edit_to = f"Pong, {ping} ms"
await msg.edit(content=edit_to)
# List command
# Outputs all the supported leagues, organized by region in embed menu
@bot.command(help="Returns supported leagues")
async def list(ctx):
# Creating embed
embed = discord.Embed(title="Supported leagues", color=discord.Color.blurple())
embed.set_thumbnail(url="https://pbs.twimg.com/profile_images/1407732613171793925/pQZWynfn_400x400.jpg")
for region in regions:
formatted_str = ", ".join(regions[region])
if region == "COMMONWEALTH OF INDEPENDENT STATES":
embed.add_field(name="CONTINENTAL", value=formatted_str, inline=True)
else:
embed.add_field(name=region, value=formatted_str, inline=(region!="EUROPE"))
# Adding empty character to fix column alignment in embed
v = 3 - ((len(regions) - 1) % 3)
for _ in range(v):
embed.add_field(name = "\u200b", value= "\u200b")
await ctx.channel.send(embed=embed)
bot.run(TOKEN)
|
[
"discord.Color.blurple",
"dotenv.load_dotenv",
"requests.get",
"discord.ext.commands.Bot",
"datetime.datetime.now",
"os.getenv"
] |
[((148, 161), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (159, 161), False, 'from dotenv import load_dotenv\n'), ((170, 196), 'os.getenv', 'os.getenv', (['"""DISCORD_TOKEN"""'], {}), "('DISCORD_TOKEN')\n", (179, 196), False, 'import os\n'), ((207, 227), 'os.getenv', 'os.getenv', (['"""API_KEY"""'], {}), "('API_KEY')\n", (216, 227), False, 'import os\n'), ((276, 308), 'discord.ext.commands.Bot', 'commands.Bot', ([], {'command_prefix': '"""-"""'}), "(command_prefix='-')\n", (288, 308), False, 'from discord.ext import commands\n'), ((418, 481), 'requests.get', 'requests.get', (["(ROOT_URL + 'getLeagues?hl=en-US')"], {'headers': 'HEADERS'}), "(ROOT_URL + 'getLeagues?hl=en-US', headers=HEADERS)\n", (430, 481), False, 'import requests\n'), ((1234, 1248), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1246, 1248), False, 'from datetime import datetime\n'), ((1607, 1630), 'discord.Color.blurple', 'discord.Color.blurple', ([], {}), '()\n', (1628, 1630), False, 'import discord\n')]
|
import logging
from common import settings
from elasticapm.contrib.flask import ElasticAPM
log = logging.getLogger(__name__)
def configure_app(flask_app):
flask_app.config.SWAGGER_UI_DOC_EXPANSION = settings.RESTPLUS_SWAGGER_UI_DOC_EXPANSION
flask_app.config.RESTPLUS_VALIDATE = settings.RESTPLUS_VALIDATE
flask_app.config.RESTPLUS_MASK_SWAGGER = settings.RESTPLUS_MASK_SWAGGER
flask_app.config.ERROR_404_HELP = settings.RESTPLUS_ERROR_404_HELP
if settings.APM_SERVICE_NAME and settings.APM_SERVICE_URL and settings.APM_SECRET:
flask_app.config['ELASTIC_APM'] = {
'SERVICE_NAME': settings.APM_SERVICE_NAME,
'SERVER_URL': settings.APM_SERVICE_URL,
'SECRET_TOKEN': settings.APM_SECRET,
'COLLECT_LOCAL_VARIABLES': 'off',
# regex to ignore specific routes
'TRANSACTIONS_IGNORE_PATTERNS': ['^OPTIONS ', '^HEAD ', '^.*\/\s*$', '.*swagger'],
'CAPTURE_BODY': 'errors',
'CAPTURE_HEADERS': 'false'
}
apm = ElasticAPM(flask_app, logging=settings.APM_LOG_LEVEL)
apm.capture_message('hello, apm!')
log.info("ElasticAPM enabled")
log.debug(f"APM details. Name: {settings.APM_SERVICE_NAME}, log level: {settings.APM_LOG_LEVEL}")
else:
log.info("ElasticAPM is disabled")
def initialize_app(flask_app, api):
configure_app(flask_app)
api.init_app(flask_app)
|
[
"elasticapm.contrib.flask.ElasticAPM",
"logging.getLogger"
] |
[((99, 126), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (116, 126), False, 'import logging\n'), ((1040, 1093), 'elasticapm.contrib.flask.ElasticAPM', 'ElasticAPM', (['flask_app'], {'logging': 'settings.APM_LOG_LEVEL'}), '(flask_app, logging=settings.APM_LOG_LEVEL)\n', (1050, 1093), False, 'from elasticapm.contrib.flask import ElasticAPM\n')]
|
"""CLI for data preparation and processing."""
import argparse
from utils import data_prep
from utils import read_one_row
from utils import save_input
parser = argparse.ArgumentParser()
parser.add_argument(
"--save_row",
type=int,
default="0",
help="Saves a single row to a file defaults to row 0",
)
parser.add_argument(
"--input_file",
type=str,
default="final_data_with_feature_engineered.csv",
help=(
"File to read the row from defaults to "
"final_data_with_feature_engineered.csv"
),
)
parser.add_argument(
"--output_file",
type=str,
default="input.npy",
help=("Output file with the input row defaults to " "input.npy"),
)
config = parser.parse_args()
input_file = config.input_file
output_file = config.output_file
save_row = config.save_row
train_x_df, _ = data_prep(input_file)
out = read_one_row(save_row, train_x_df)
save_input(output_file, out)
|
[
"utils.save_input",
"utils.data_prep",
"argparse.ArgumentParser",
"utils.read_one_row"
] |
[((162, 187), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (185, 187), False, 'import argparse\n'), ((837, 858), 'utils.data_prep', 'data_prep', (['input_file'], {}), '(input_file)\n', (846, 858), False, 'from utils import data_prep\n'), ((865, 899), 'utils.read_one_row', 'read_one_row', (['save_row', 'train_x_df'], {}), '(save_row, train_x_df)\n', (877, 899), False, 'from utils import read_one_row\n'), ((900, 928), 'utils.save_input', 'save_input', (['output_file', 'out'], {}), '(output_file, out)\n', (910, 928), False, 'from utils import save_input\n')]
|
import os
import sys
import unittest
from io import StringIO
import bpy
import tests.test_utils as tutils
from blendernc.preferences import get_addon_preference
@tutils.refresh_state
def create_nodes(file, var):
node_groups = bpy.data.node_groups
if tutils.is_blendernc_in_nodetree(node_groups):
node_groups.remove(node_groups["BlenderNC"])
bpy.data.node_groups.new("BlenderNC", "BlenderNC")
# Create nodes
nodes = ["datacubePath", "datacubeNode", "datacubeResolution", "datacubeOutput"]
node_names = tutils.create_nodes(nodes)
node_tree = bpy.data.node_groups["BlenderNC"]
existing_nodes = [node_tree.nodes[node] for node in node_names]
# Now let's change properties.
props = tutils.build_dict_blendernc_prop(existing_nodes)
props["datacube Path"]["blendernc_file"] = file
props["datacube Input"]["blendernc_datacube_vars"] = var
props["Resolution"]["bendernc_resolution"] = 80
props["Output"]["update_on_frame_change"] = True
tutils.join_nodes(node_tree, existing_nodes, props)
# Create new image
bpy.ops.image.new(
name="BlenderNC_default",
width=1024,
height=1024,
color=(0.0, 0.0, 0.0, 1.0),
alpha=True,
generated_type="BLANK",
float=True,
)
# Assign new image to node
existing_nodes[-1].image = bpy.data.images.get("BlenderNC_default")
class Capturing(list):
def __enter__(self):
self._stdout = sys.stdout
sys.stdout = self._stringio = StringIO()
return self
def __exit__(self, *args):
self.extend(self._stringio.getvalue().splitlines())
del self._stringio # free up some memory
sys.stdout = self._stdout
class Test_settings(unittest.TestCase):
def setUp(self) -> None:
file = os.path.abspath("./dataset/ssh_1995-01.nc")
var = "adt"
create_nodes(file, var)
return super().setUp()
def test_animation_setting_extend(self):
bpy.context.scene.blendernc_animation_type = "EXTEND"
frames = [3, 4, 5]
loaded_frames = []
for frame in frames:
with Capturing() as output:
bpy.context.scene.frame_set(frame)
if output:
loaded_frames.append(int(output[-3].split(" ")[-2]))
print("\n".join(output))
self.assertEqual(loaded_frames, [3, 4, 4])
def test_animation_setting_none(self):
loaded_frames = []
bpy.context.scene.blendernc_animation_type = "NONE"
frames = [3, 4, 5]
for frame in frames:
with Capturing() as output:
bpy.context.scene.frame_set(frame)
if output:
loaded_frames.append(int(output[-3].split(" ")[-2]))
print("\n".join(output))
print(loaded_frames)
# self.assertEqual(loaded_frames, [3,4])
def test_animation_setting_loop(self):
loaded_frames = []
bpy.context.scene.blendernc_animation_type = "LOOP"
frames = [3, 4, 5]
for frame in frames:
with Capturing() as output:
bpy.context.scene.frame_set(frame)
if output:
loaded_frames.append(int(output[-3].split(" ")[-2]))
print("\n".join(output))
self.assertEqual(loaded_frames, [3, 4, 0])
def test_memory_frames(self):
bpy.context.scene.blendernc_memory_handle = "FRAMES"
bpy.context.scene.blendernc_frames = 1
frames = [2, 3, 4]
removed_frames = []
for frame in frames:
with Capturing() as output:
bpy.context.scene.frame_set(frame)
if "Removed" in output[-1]:
removed_frames.append(int(output[-1].split(": ")[-1]))
print("\n".join(output))
self.assertEqual(removed_frames, [2, 3])
def test_memory_dynamic(self):
bpy.context.scene.blendernc_memory_handle = "DYNAMIC"
bpy.context.scene.blendernc_avail_mem_purge = 100
frames = [2, 3, 4]
removed_frames = []
for frame in frames:
with Capturing() as output:
bpy.context.scene.frame_set(frame)
if "Removed" in output[-1]:
removed_frames.append(int(output[-1].split(": ")[-1]))
print("\n".join(output))
self.assertEqual(removed_frames, [2, 3])
def test_dask(self):
pref = get_addon_preference()
pref.blendernc_use_dask = "True"
pref.blendernc_use_dask = "True"
pref.blendernc_use_dask = "False"
pref.blendernc_use_dask = "False"
pref.blendernc_use_dask = "True"
pref.blendernc_use_dask = "False"
suite = unittest.defaultTestLoader.loadTestsFromTestCase(Test_settings)
test = unittest.TextTestRunner().run(suite)
ret = not test.wasSuccessful()
sys.exit(ret)
|
[
"io.StringIO",
"bpy.ops.image.new",
"os.path.abspath",
"tests.test_utils.is_blendernc_in_nodetree",
"unittest.TextTestRunner",
"tests.test_utils.build_dict_blendernc_prop",
"bpy.context.scene.frame_set",
"bpy.data.node_groups.new",
"unittest.defaultTestLoader.loadTestsFromTestCase",
"tests.test_utils.join_nodes",
"blendernc.preferences.get_addon_preference",
"bpy.data.images.get",
"sys.exit",
"tests.test_utils.create_nodes"
] |
[((4711, 4774), 'unittest.defaultTestLoader.loadTestsFromTestCase', 'unittest.defaultTestLoader.loadTestsFromTestCase', (['Test_settings'], {}), '(Test_settings)\n', (4759, 4774), False, 'import unittest\n'), ((4851, 4864), 'sys.exit', 'sys.exit', (['ret'], {}), '(ret)\n', (4859, 4864), False, 'import sys\n'), ((262, 306), 'tests.test_utils.is_blendernc_in_nodetree', 'tutils.is_blendernc_in_nodetree', (['node_groups'], {}), '(node_groups)\n', (293, 306), True, 'import tests.test_utils as tutils\n'), ((366, 416), 'bpy.data.node_groups.new', 'bpy.data.node_groups.new', (['"""BlenderNC"""', '"""BlenderNC"""'], {}), "('BlenderNC', 'BlenderNC')\n", (390, 416), False, 'import bpy\n'), ((540, 566), 'tests.test_utils.create_nodes', 'tutils.create_nodes', (['nodes'], {}), '(nodes)\n', (559, 566), True, 'import tests.test_utils as tutils\n'), ((733, 781), 'tests.test_utils.build_dict_blendernc_prop', 'tutils.build_dict_blendernc_prop', (['existing_nodes'], {}), '(existing_nodes)\n', (765, 781), True, 'import tests.test_utils as tutils\n'), ((1006, 1057), 'tests.test_utils.join_nodes', 'tutils.join_nodes', (['node_tree', 'existing_nodes', 'props'], {}), '(node_tree, existing_nodes, props)\n', (1023, 1057), True, 'import tests.test_utils as tutils\n'), ((1086, 1235), 'bpy.ops.image.new', 'bpy.ops.image.new', ([], {'name': '"""BlenderNC_default"""', 'width': '(1024)', 'height': '(1024)', 'color': '(0.0, 0.0, 0.0, 1.0)', 'alpha': '(True)', 'generated_type': '"""BLANK"""', 'float': '(True)'}), "(name='BlenderNC_default', width=1024, height=1024, color=\n (0.0, 0.0, 0.0, 1.0), alpha=True, generated_type='BLANK', float=True)\n", (1103, 1235), False, 'import bpy\n'), ((1357, 1397), 'bpy.data.images.get', 'bpy.data.images.get', (['"""BlenderNC_default"""'], {}), "('BlenderNC_default')\n", (1376, 1397), False, 'import bpy\n'), ((1520, 1530), 'io.StringIO', 'StringIO', ([], {}), '()\n', (1528, 1530), False, 'from io import StringIO\n'), ((1813, 1856), 'os.path.abspath', 'os.path.abspath', (['"""./dataset/ssh_1995-01.nc"""'], {}), "('./dataset/ssh_1995-01.nc')\n", (1828, 1856), False, 'import os\n'), ((4429, 4451), 'blendernc.preferences.get_addon_preference', 'get_addon_preference', ([], {}), '()\n', (4449, 4451), False, 'from blendernc.preferences import get_addon_preference\n'), ((4782, 4807), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {}), '()\n', (4805, 4807), False, 'import unittest\n'), ((2187, 2221), 'bpy.context.scene.frame_set', 'bpy.context.scene.frame_set', (['frame'], {}), '(frame)\n', (2214, 2221), False, 'import bpy\n'), ((2645, 2679), 'bpy.context.scene.frame_set', 'bpy.context.scene.frame_set', (['frame'], {}), '(frame)\n', (2672, 2679), False, 'import bpy\n'), ((3130, 3164), 'bpy.context.scene.frame_set', 'bpy.context.scene.frame_set', (['frame'], {}), '(frame)\n', (3157, 3164), False, 'import bpy\n'), ((3628, 3662), 'bpy.context.scene.frame_set', 'bpy.context.scene.frame_set', (['frame'], {}), '(frame)\n', (3655, 3662), False, 'import bpy\n'), ((4156, 4190), 'bpy.context.scene.frame_set', 'bpy.context.scene.frame_set', (['frame'], {}), '(frame)\n', (4183, 4190), False, 'import bpy\n')]
|
from __future__ import print_function
import os
import warnings
import pytest
from graphene_django.utils.testing import graphql_query
from graphql_jwt.settings import jwt_settings
from graphql_jwt.shortcuts import get_token
from neo4j.exceptions import ClientError as CypherError
from neobolt.exceptions import ClientError
from neomodel import change_neo4j_password, clear_neo4j_database, config, db
from kaffepause.accounts.models import Account
from kaffepause.accounts.test.factories import AccountFactory
from kaffepause.users.models import User
from kaffepause.users.test.factories import UserFactory
@pytest.fixture(autouse=True)
def setup_and_teardown():
"""Fixture to clear database in between each test function."""
clear_neo4j_database(db)
yield
clear_neo4j_database(db)
def pytest_addoption(parser):
"""
Adds the command line option --resetdb.
:param parser: The parser object. Please see <https://docs.pytest.org/en/latest/reference.html#_pytest.hookspec.pytest_addoption>`_
:type Parser object: For more information please see <https://docs.pytest.org/en/latest/reference.html#_pytest.config.Parser>`_
"""
parser.addoption(
"--resetdb",
action="store_true",
help="Ensures that the database is clear prior to running tests for neomodel",
default=False,
)
def pytest_sessionstart(session):
"""
Provides initial connection to the database and sets up the rest of the test suite
:param session: The session object. Please see <https://docs.pytest.org/en/latest/reference.html#_pytest.hookspec.pytest_sessionstart>`_
:type Session object: For more information please see <https://docs.pytest.org/en/latest/reference.html#session>`_
"""
warnings.simplefilter("default")
config.DATABASE_URL = os.environ.get(
"NEO4J_BOLT_URL", "bolt://neo4j:foobar@localhost:7687"
)
config.AUTO_INSTALL_LABELS = True
try:
# Clear the database if required
database_is_populated, _ = db.cypher_query(
"MATCH (a) return count(a)>0 as database_is_populated"
)
if database_is_populated[0][0] and not session.config.getoption("resetdb"):
raise SystemError(
"Please note: The database seems to be populated.\n\tEither delete all nodes and edges manually, "
"or set the --resetdb parameter when calling pytest\n\n\tpytest --resetdb."
)
else:
clear_neo4j_database(db)
except (CypherError, ClientError) as ce:
# Handle instance without password being changed
if (
"The credentials you provided were valid, but must be changed before you can use this instance"
in str(ce)
):
warnings.warn(
"New database with no password set, setting password to '<PASSWORD>'"
)
try:
change_neo4j_password(db, "test")
# Ensures that multiprocessing tests can use the new password
config.DATABASE_URL = "bolt://neo4j:test@localhost:7687"
db.set_connection("bolt://neo4j:test@localhost:7687")
warnings.warn(
"Please 'export NEO4J_BOLT_URL=bolt://neo4j:test@localhost:7687' for subsequent test runs"
)
except (CypherError, ClientError) as e:
if (
"The credentials you provided were valid, but must be changed before you can use this instance"
in str(e)
):
warnings.warn(
"You appear to be running on version 4.0+ of Neo4j, without having changed the password."
"Please manually log in, change your password, then update the config.DATABASE_URL call at "
"line 32 in this file"
)
else:
raise e
else:
raise ce
@pytest.fixture(autouse=True)
def account() -> Account:
account = AccountFactory()
account.status.verified = True
account.status.save()
return account
@pytest.fixture(autouse=True)
def user(account) -> User:
return UserFactory(uuid=account.id)
@pytest.fixture
def friend(user) -> User:
friend = UserFactory()
user.add_friend(friend)
return friend
@pytest.fixture(autouse=True)
def token(account):
return f"{jwt_settings.JWT_AUTH_HEADER_PREFIX} {get_token(account)}"
@pytest.fixture(autouse=True)
def auth_headers(token):
return {jwt_settings.JWT_AUTH_HEADER_NAME: token}
@pytest.fixture
def client_query(client):
def func(*args, **kwargs):
return graphql_query(*args, **kwargs, client=client)
return func
|
[
"warnings.simplefilter",
"graphene_django.utils.testing.graphql_query",
"neomodel.db.set_connection",
"graphql_jwt.shortcuts.get_token",
"pytest.fixture",
"kaffepause.users.test.factories.UserFactory",
"neomodel.clear_neo4j_database",
"os.environ.get",
"warnings.warn",
"kaffepause.accounts.test.factories.AccountFactory",
"neomodel.db.cypher_query",
"neomodel.change_neo4j_password"
] |
[((612, 640), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (626, 640), False, 'import pytest\n'), ((3998, 4026), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (4012, 4026), False, 'import pytest\n'), ((4167, 4195), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (4181, 4195), False, 'import pytest\n'), ((4383, 4411), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (4397, 4411), False, 'import pytest\n'), ((4508, 4536), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (4522, 4536), False, 'import pytest\n'), ((738, 762), 'neomodel.clear_neo4j_database', 'clear_neo4j_database', (['db'], {}), '(db)\n', (758, 762), False, 'from neomodel import change_neo4j_password, clear_neo4j_database, config, db\n'), ((777, 801), 'neomodel.clear_neo4j_database', 'clear_neo4j_database', (['db'], {}), '(db)\n', (797, 801), False, 'from neomodel import change_neo4j_password, clear_neo4j_database, config, db\n'), ((1755, 1787), 'warnings.simplefilter', 'warnings.simplefilter', (['"""default"""'], {}), "('default')\n", (1776, 1787), False, 'import warnings\n'), ((1815, 1885), 'os.environ.get', 'os.environ.get', (['"""NEO4J_BOLT_URL"""', '"""bolt://neo4j:foobar@localhost:7687"""'], {}), "('NEO4J_BOLT_URL', 'bolt://neo4j:foobar@localhost:7687')\n", (1829, 1885), False, 'import os\n'), ((4067, 4083), 'kaffepause.accounts.test.factories.AccountFactory', 'AccountFactory', ([], {}), '()\n', (4081, 4083), False, 'from kaffepause.accounts.test.factories import AccountFactory\n'), ((4234, 4262), 'kaffepause.users.test.factories.UserFactory', 'UserFactory', ([], {'uuid': 'account.id'}), '(uuid=account.id)\n', (4245, 4262), False, 'from kaffepause.users.test.factories import UserFactory\n'), ((4320, 4333), 'kaffepause.users.test.factories.UserFactory', 'UserFactory', ([], {}), '()\n', (4331, 4333), False, 'from kaffepause.users.test.factories import UserFactory\n'), ((2024, 2095), 'neomodel.db.cypher_query', 'db.cypher_query', (['"""MATCH (a) return count(a)>0 as database_is_populated"""'], {}), "('MATCH (a) return count(a)>0 as database_is_populated')\n", (2039, 2095), False, 'from neomodel import change_neo4j_password, clear_neo4j_database, config, db\n'), ((4706, 4751), 'graphene_django.utils.testing.graphql_query', 'graphql_query', (['*args'], {'client': 'client'}), '(*args, **kwargs, client=client)\n', (4719, 4751), False, 'from graphene_django.utils.testing import graphql_query\n'), ((2480, 2504), 'neomodel.clear_neo4j_database', 'clear_neo4j_database', (['db'], {}), '(db)\n', (2500, 2504), False, 'from neomodel import change_neo4j_password, clear_neo4j_database, config, db\n'), ((4484, 4502), 'graphql_jwt.shortcuts.get_token', 'get_token', (['account'], {}), '(account)\n', (4493, 4502), False, 'from graphql_jwt.shortcuts import get_token\n'), ((2774, 2863), 'warnings.warn', 'warnings.warn', (['"""New database with no password set, setting password to \'<PASSWORD>\'"""'], {}), '(\n "New database with no password set, setting password to \'<PASSWORD>\'")\n', (2787, 2863), False, 'import warnings\n'), ((2922, 2955), 'neomodel.change_neo4j_password', 'change_neo4j_password', (['db', '"""test"""'], {}), "(db, 'test')\n", (2943, 2955), False, 'from neomodel import change_neo4j_password, clear_neo4j_database, config, db\n'), ((3123, 3176), 'neomodel.db.set_connection', 'db.set_connection', (['"""bolt://neo4j:test@localhost:7687"""'], {}), "('bolt://neo4j:test@localhost:7687')\n", (3140, 3176), False, 'from neomodel import change_neo4j_password, clear_neo4j_database, config, db\n'), ((3193, 3308), 'warnings.warn', 'warnings.warn', (['"""Please \'export NEO4J_BOLT_URL=bolt://neo4j:test@localhost:7687\' for subsequent test runs"""'], {}), '(\n "Please \'export NEO4J_BOLT_URL=bolt://neo4j:test@localhost:7687\' for subsequent test runs"\n )\n', (3206, 3308), False, 'import warnings\n'), ((3595, 3819), 'warnings.warn', 'warnings.warn', (['"""You appear to be running on version 4.0+ of Neo4j, without having changed the password.Please manually log in, change your password, then update the config.DATABASE_URL call at line 32 in this file"""'], {}), "(\n 'You appear to be running on version 4.0+ of Neo4j, without having changed the password.Please manually log in, change your password, then update the config.DATABASE_URL call at line 32 in this file'\n )\n", (3608, 3819), False, 'import warnings\n')]
|
# Copyright (C) 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import proto
from google.protobuf.json_format import MessageToJson, Parse
def test_message_to_json():
class Squid(proto.Message):
mass_kg = proto.Field(proto.INT32, number=1)
s = Squid(mass_kg=100)
json = Squid.to_json(s)
json = json.replace(" ", "").replace("\n", "")
assert json == '{"massKg":100}'
def test_message_from_json():
class Squid(proto.Message):
mass_kg = proto.Field(proto.INT32, number=1)
json = """{
"massKg": 100
}
"""
s = Squid.from_json(json)
assert s == Squid(mass_kg=100)
def test_message_json_round_trip():
class Squid(proto.Message):
mass_kg = proto.Field(proto.INT32, number=1)
s = Squid(mass_kg=100)
json = Squid.to_json(s)
s2 = Squid.from_json(json)
assert s == s2
|
[
"proto.Field"
] |
[((749, 783), 'proto.Field', 'proto.Field', (['proto.INT32'], {'number': '(1)'}), '(proto.INT32, number=1)\n', (760, 783), False, 'import proto\n'), ((1009, 1043), 'proto.Field', 'proto.Field', (['proto.INT32'], {'number': '(1)'}), '(proto.INT32, number=1)\n', (1020, 1043), False, 'import proto\n'), ((1251, 1285), 'proto.Field', 'proto.Field', (['proto.INT32'], {'number': '(1)'}), '(proto.INT32, number=1)\n', (1262, 1285), False, 'import proto\n')]
|
import numpy as np
import pandas as pd
import sys
import os
from utils import DATA_DIR
class Dataset(object):
def __init__(self, DATA_NAME):
self.DATA_NAME = DATA_NAME
print("Initializing dataset:", DATA_NAME)
sys.stdout.flush()
data = pd.read_csv(os.path.join(DATA_DIR, "df_"+DATA_NAME+".csv"))
data['item_id'].loc[data['item_id'].isna()] = ''
data['user_id'].loc[data['user_id'].isna()] = ''
item_id_vals, item_ids = pd.factorize(data['item_id'].values)
user_id_vals, user_ids = pd.factorize(data['user_id'].values)
item_attr_vals, item_attr_ids = pd.factorize(data['model_attr'].values)
user_attr_vals, user_attr_ids = pd.factorize(data['user_attr'].values)
tmp = dict(zip(data['item_id'].values, item_attr_vals))
self.item_attr = np.array([tmp[_i] for _i in item_ids], dtype=int)
tmp = dict(zip(data['user_id'].values, user_attr_vals))
self.user_attr = np.array([tmp[_i] for _i in user_ids], dtype=int)
data['item_id'] = item_id_vals
data['user_id'] = user_id_vals
self.item_ids = item_ids
self.user_ids = user_ids
self.item_attr_ids = item_attr_ids
self.user_attr_ids = user_attr_ids
self.n_item = data['item_id'].max()+1
self.n_user = data['user_id'].max()+1
self.data = data[['user_id','item_id','rating','split','model_attr','user_attr']]
print("Successfully initialized!")
print(self.data.shape[0], "training records")
print("about", self.n_user, "users and", self.n_item, "items are loaded!")
sys.stdout.flush()
def get_user_item_train_map(self):
data = self.data
user_item_train_map = (self.data.loc[(self.data['rating']>=4) & (self.data['split'] == 0)]).groupby(
['user_id'])['item_id'].apply(list).to_dict()
return user_item_train_map
def get_neg_samples(self, N_NEG=10):
user_item_map = (self.data.loc[self.data['rating']>=4]).groupby(['user_id'])['item_id'].apply(list).to_dict()
print("Start sampling negative examples ...")
neg_samples = []
count = 0
print("current progress for", self.n_user, "users: ", end="")
sys.stdout.flush()
for u in range(self.n_user):
if count % 5000 == 0:
print(count, end=", ")
sys.stdout.flush()
count += 1
p = np.ones(self.n_item)
if u in user_item_map:
pos_items = np.array(user_item_map[u], dtype=int)
p[pos_items] = 0
p /= np.sum(p)
neg_items = np.random.choice(self.n_item, size=N_NEG, p=p)
neg_samples.append(neg_items)
print("done!")
sys.stdout.flush()
return np.array(neg_samples, dtype=int)
|
[
"numpy.sum",
"numpy.ones",
"numpy.array",
"sys.stdout.flush",
"numpy.random.choice",
"pandas.factorize",
"os.path.join"
] |
[((249, 267), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (265, 267), False, 'import sys\n'), ((500, 536), 'pandas.factorize', 'pd.factorize', (["data['item_id'].values"], {}), "(data['item_id'].values)\n", (512, 536), True, 'import pandas as pd\n'), ((570, 606), 'pandas.factorize', 'pd.factorize', (["data['user_id'].values"], {}), "(data['user_id'].values)\n", (582, 606), True, 'import pandas as pd\n'), ((647, 686), 'pandas.factorize', 'pd.factorize', (["data['model_attr'].values"], {}), "(data['model_attr'].values)\n", (659, 686), True, 'import pandas as pd\n'), ((727, 765), 'pandas.factorize', 'pd.factorize', (["data['user_attr'].values"], {}), "(data['user_attr'].values)\n", (739, 765), True, 'import pandas as pd\n'), ((864, 913), 'numpy.array', 'np.array', (['[tmp[_i] for _i in item_ids]'], {'dtype': 'int'}), '([tmp[_i] for _i in item_ids], dtype=int)\n', (872, 913), True, 'import numpy as np\n'), ((1003, 1052), 'numpy.array', 'np.array', (['[tmp[_i] for _i in user_ids]'], {'dtype': 'int'}), '([tmp[_i] for _i in user_ids], dtype=int)\n', (1011, 1052), True, 'import numpy as np\n'), ((1690, 1708), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1706, 1708), False, 'import sys\n'), ((2365, 2383), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (2381, 2383), False, 'import sys\n'), ((2901, 2919), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (2917, 2919), False, 'import sys\n'), ((2944, 2976), 'numpy.array', 'np.array', (['neg_samples'], {'dtype': 'int'}), '(neg_samples, dtype=int)\n', (2952, 2976), True, 'import numpy as np\n'), ((296, 346), 'os.path.join', 'os.path.join', (['DATA_DIR', "('df_' + DATA_NAME + '.csv')"], {}), "(DATA_DIR, 'df_' + DATA_NAME + '.csv')\n", (308, 346), False, 'import os\n'), ((2575, 2595), 'numpy.ones', 'np.ones', (['self.n_item'], {}), '(self.n_item)\n', (2582, 2595), True, 'import numpy as np\n'), ((2747, 2756), 'numpy.sum', 'np.sum', (['p'], {}), '(p)\n', (2753, 2756), True, 'import numpy as np\n'), ((2781, 2827), 'numpy.random.choice', 'np.random.choice', (['self.n_item'], {'size': 'N_NEG', 'p': 'p'}), '(self.n_item, size=N_NEG, p=p)\n', (2797, 2827), True, 'import numpy as np\n'), ((2517, 2535), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (2533, 2535), False, 'import sys\n'), ((2659, 2696), 'numpy.array', 'np.array', (['user_item_map[u]'], {'dtype': 'int'}), '(user_item_map[u], dtype=int)\n', (2667, 2696), True, 'import numpy as np\n')]
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name='HPexome',
version='1.2.1',
author="<NAME>",
author_email="<EMAIL>",
description="An automated tool for processing whole-exome sequencing data",
long_description=long_description,
long_description_content_type="text/markdown",
url="http://bcblab.org/hpexome",
packages=setuptools.find_packages(),
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3"
],
install_requires=[
'Click'
],
entry_points='''
[console_scripts]
hpexome=hpexome.hpexome:hpexome
''',
project_urls={
"Source Code": "https://github.com/labbcb/hpexome",
"Bug Tracker": "https://github.com/labbcb/hpexome/issues"
}
)
|
[
"setuptools.find_packages"
] |
[((416, 442), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (440, 442), False, 'import setuptools\n')]
|
import os
import random
import syft as sy
import pandas as pd
import numpy as np
from PIL import Image
from tqdm import tqdm
from torch import ( # pylint:disable=no-name-in-module
manual_seed,
stack,
cat,
std_mean,
save,
is_tensor,
from_numpy,
randperm,
default_generator,
)
from torch._utils import _accumulate
import albumentations as a
from copy import deepcopy
from torch.utils import data as torchdata
from torchvision.datasets import MNIST
from torchvision import transforms
from torchvision.datasets.folder import default_loader
from os.path import splitext
from typing import Dict, Union, Set, Callable
from pathlib import Path
from .dicomtools import DicomLoader
class AlbumentationsTorchTransform:
def __init__(self, transform, **kwargs):
# print("init albu transform wrapper")
self.transform = transform
self.kwargs = kwargs
def __call__(self, img):
# print("call albu transform wrapper")
if Image.isImageType(img):
img = np.array(img)
elif is_tensor(img):
img = img.cpu().numpy()
img = self.transform(image=img, **self.kwargs)["image"]
# if img.max() > 1:
# img = a.augmentations.functional.to_float(img, max_value=255)
img = from_numpy(img)
if img.shape[-1] < img.shape[0]:
img = img.permute(2, 0, 1)
return img
class CombinedLoader:
"""Class that combines several data loaders and their extensions.
Args:
mapping (Dict): Dictionary that maps loader names to tuples
consisting of (corresponding extensions, loader method)
"""
def __init__(
self,
mapping: Dict[str, Dict[str, Union[Set[str], Callable]]] = {
"default": {
"extensions": {
".jpg",
".jpeg",
".png",
".ppm",
".bmp",
".pgm",
".tif",
".tiff",
".webp",
},
"loader": default_loader,
},
"dicom": {"extensions": {".dcm", ".dicom"}, "loader": DicomLoader(3)},
},
):
self.extensions = set()
self.mapping = mapping
self.ext_to_loader_name = dict()
for loader_name, defining_dict in mapping.items():
self.extensions |= defining_dict["extensions"]
for ext in defining_dict["extensions"]:
if ext in self.ext_to_loader_name:
raise RuntimeError(
"Extension {:s} was passed for multiple loaders".format(ext)
)
self.ext_to_loader_name[ext] = loader_name
def __call__(self, path: Path, **kwargs):
"""Apply loader to path
Args:
path (Path): path to file.
kwargs: kwargs passed to load methods
Returns:
Image: a PIL image of the given path
Raises:
RuntimeError: If loader for path extension not specified.
"""
file_ending = splitext(path)[1].lower()
if file_ending in self.extensions:
return self.mapping[self.ext_to_loader_name[file_ending]]["loader"](
path, **kwargs
)
else:
raise RuntimeError(
"file extension does not match specified supported extensions. "
"Please provide the matching loader for the {:s} extension.".format(
file_ending
)
)
def change_channels(self, num_channels: int):
"""Change the number of channels that are loaded (Default: 3)
Args:
num_channels (int): Number of channels. Currently only 1 and 3 supported
Raises:
RuntimeError: if num_channels is not 1 or 3
"""
if num_channels not in [1, 3]:
raise RuntimeError("Only 1 or 3 channels supported yet.")
self.mapping["default"]["loader"] = (
single_channel_loader if num_channels == 1 else default_loader
)
self.mapping["dicom"]["loader"] = DicomLoader(num_channels)
def create_albu_transform(args, mean, std):
train_tf = transforms.RandomAffine(
degrees=args.rotation,
translate=(args.translate, args.translate),
scale=(1.0 - args.scale, 1.0 + args.scale),
shear=args.shear,
# fillcolor=0,
)
start_transformations = [
a.Resize(args.inference_resolution, args.inference_resolution),
a.RandomCrop(args.train_resolution, args.train_resolution),
]
if args.clahe:
start_transformations.extend(
[
a.FromFloat(dtype="uint8", max_value=1.0),
a.CLAHE(always_apply=True, clip_limit=(1, 1)),
]
)
train_tf_albu = [
a.VerticalFlip(p=args.individual_albu_probs),
]
if args.randomgamma:
train_tf_albu.append(a.RandomGamma(p=args.individual_albu_probs))
if args.randombrightness:
train_tf_albu.append(a.RandomBrightness(p=args.individual_albu_probs))
if args.blur:
train_tf_albu.append(a.Blur(p=args.individual_albu_probs))
if args.elastic:
train_tf_albu.append(a.ElasticTransform(p=args.individual_albu_probs))
if args.optical_distortion:
train_tf_albu.append(a.OpticalDistortion(p=args.individual_albu_probs))
if args.grid_distortion:
train_tf_albu.append(a.GridDistortion(p=args.individual_albu_probs))
if args.grid_shuffle:
train_tf_albu.append(a.RandomGridShuffle(p=args.individual_albu_probs))
if args.hsv:
train_tf_albu.append(a.HueSaturationValue(p=args.individual_albu_probs))
if args.invert:
train_tf_albu.append(a.InvertImg(p=args.individual_albu_probs))
if args.cutout:
train_tf_albu.append(
a.Cutout(
num_holes=5, max_h_size=80, max_w_size=80, p=args.individual_albu_probs
)
)
if args.shadow:
assert args.pretrained, "RandomShadows needs 3 channels"
train_tf_albu.append(a.RandomShadow(p=args.individual_albu_probs))
if args.fog:
assert args.pretrained, "RandomFog needs 3 channels"
train_tf_albu.append(a.RandomFog(p=args.individual_albu_probs))
if args.sun_flare:
assert args.pretrained, "RandomSunFlare needs 3 channels"
train_tf_albu.append(a.RandomSunFlare(p=args.individual_albu_probs))
if args.solarize:
train_tf_albu.append(a.Solarize(p=args.individual_albu_probs))
if args.equalize:
train_tf_albu.append(a.Equalize(p=args.individual_albu_probs))
if args.grid_dropout:
train_tf_albu.append(a.GridDropout(p=args.individual_albu_probs))
train_tf_albu.append(a.GaussNoise(var_limit=args.noise_std ** 2, p=args.noise_prob))
end_transformations = [
a.ToFloat(max_value=255.0),
a.Normalize(mean, std, max_pixel_value=1.0),
]
if not args.pretrained:
end_transformations.append(
a.Lambda(image=lambda x, **kwargs: x[:, :, np.newaxis])
)
train_tf_albu = AlbumentationsTorchTransform(
a.Compose(
[
a.Compose(start_transformations),
a.Compose(train_tf_albu, p=args.albu_prob),
a.Compose(end_transformations),
]
)
)
return transforms.Compose([train_tf, train_tf_albu,])
def calc_mean_std(dataset, save_folder=None):
"""
Calculates the mean and standard deviation of `dataset` and
saves them to `save_folder`.
Needs a dataset where all images have the same size
"""
accumulated_data = []
for d in tqdm(
dataset, total=len(dataset), leave=False, desc="accumulate data in dataset"
):
if type(d) is tuple or type(d) is list:
d = d[0]
accumulated_data.append(d)
if isinstance(dataset, torchdata.Dataset):
accumulated_data = stack(accumulated_data)
elif isinstance(dataset, torchdata.DataLoader):
accumulated_data = cat(accumulated_data)
else:
raise NotImplementedError("don't know how to process this data input class")
if accumulated_data.shape[1] in [1, 3]: # ugly hack
dims = (0, *range(2, len(accumulated_data.shape)))
else:
dims = (*range(len(accumulated_data.shape)),)
std, mean = std_mean(accumulated_data, dim=dims)
if save_folder:
save(stack([mean, std]), os.path.join(save_folder, "mean_std.pt"))
return mean, std
def single_channel_loader(filename):
"""Converts `filename` to a grayscale PIL Image
"""
with open(filename, "rb") as f:
img = Image.open(f).convert("L")
return img.copy()
class LabelMNIST(MNIST):
def __init__(self, labels, *args, **kwargs):
super().__init__(*args, **kwargs)
indices = np.isin(self.targets, labels).astype("bool")
self.data = self.data[indices]
self.targets = self.targets[indices]
class PathDataset(torchdata.Dataset):
def __init__(
self,
root,
transform=None,
loader=CombinedLoader(),
extensions=[
".jpg",
".jpeg",
".png",
".ppm",
".bmp",
".pgm",
".tif",
".tiff",
".webp",
".dcm",
".dicom",
],
):
super(PathDataset, self).__init__()
self.root = root
self.transform = transform
self.loader = loader
self.imgs = [
f
for f in os.listdir(root)
if os.path.splitext(f)[1].lower() in extensions
and not os.path.split(f)[1].lower().startswith("._")
]
def __len__(self):
return len(self.imgs)
def __getitem__(self, idx):
img_path = self.imgs[idx]
img = self.loader(os.path.join(self.root, img_path))
if self.transform:
img = self.transform(img)
return img
class RemoteTensorDataset(torchdata.Dataset):
def __init__(self, tensor):
self.tensor = tensor
def __len__(self):
return self.tensor.shape[0]
def __getitem__(self, idx):
return self.tensor[idx].copy()
class ImageFolderFromCSV(torchdata.Dataset):
def __init__(
self, csv_path, img_folder_path, transform=None, target_transform=None
):
super().__init__()
self.transform = transform
self.target_transform = target_transform
self.img_folder_path = img_folder_path
self.img_files = [
i for i in os.listdir(img_folder_path) if not i.startswith(".")
]
metastats = pd.read_csv(csv_path)
metastats["class_label"] = metastats.apply(
ImageFolderFromCSV.__meta_to_class__, axis=1
)
self.categorize_dict = dict(
zip(metastats.X_ray_image_name, metastats.class_label)
)
for img in self.img_files:
assert (
img in self.categorize_dict.keys()
), "img label not known {:s}".format(str(img))
if self.categorize_dict[img] == -1:
self.img_files.remove(img)
print("Ignore image {:s} because category is certain".format(img))
@staticmethod
def __meta_to_class__(row):
if row["Label"] == "Normal":
return 0
if row["Label"] == "Pnemonia": # i know this is a typo but was in original csv
if row["Label_1_Virus_category"] == "bacteria":
return 1
if row["Label_1_Virus_category"] == "Virus":
return 2
return -1
def __getitem__(self, i):
img_path = self.img_files[i]
label = self.categorize_dict[img_path]
img = single_channel_loader(os.path.join(self.img_folder_path, img_path))
if self.transform:
img = self.transform(img)
if self.target_transform:
label = self.target_transform(label)
return img, label
def __len__(self):
return len(self.img_files)
class PPPP(torchdata.Dataset):
def __init__(
self, label_path="data/Labels.csv", train=False, transform=None, seed=1,
):
super().__init__()
random.seed(seed)
manual_seed(seed)
self.train = train
self.labels = pd.read_csv(label_path)
self.labels = self.labels[
self.labels["Dataset_type"] == ("TRAIN" if train else "TEST")
]
self.transform = transform
"""
Split into train and validation set
if self.train:
indices = [
i
for i in range(len(self.labels))
if ((i % self.val_split) != 0 and self.val)
or (not self.val and (i % self.val_split) == 0)
]
self.labels = self.labels.drop(index=indices)
"""
def __len__(self):
return len(self.labels)
def __getitem__(self, index):
row = self.labels.iloc[index]
label = row["Numeric_Label"]
path = "train" if self.train else "test"
path = os.path.join("data", path, row["X_ray_image_name"])
img = single_channel_loader(path)
if self.transform:
img = self.transform(img)
return img, label
# def get_class_name(self, numeric_label):
# return self.class_names[numeric_label]
"""
Works only if not torch.utils.torchdata.random_split is applied
"""
def get_class_occurances(self):
return dict(self.labels["Numeric_Label"].value_counts())
def __compute_mean_std__(self):
calc_mean_std(
self, save_folder="data",
)
##This is from torch.data.utils and adapted for our purposes
class Subset(torchdata.Dataset):
def __init__(self, dataset, indices):
self.dataset = deepcopy(dataset)
self.indices = indices
def __getitem__(self, idx):
return self.dataset[self.indices[idx]]
def __len__(self):
return len(self.indices)
def random_split(dataset, lengths, generator=default_generator):
if sum(lengths) != len(dataset):
raise ValueError(
"Sum of input lengths does not equal the length of the input dataset!"
)
indices = randperm(sum(lengths), generator=generator).tolist()
return [
Subset(dataset, indices[offset - length : offset])
for offset, length in zip(_accumulate(lengths), lengths)
]
if __name__ == "__main__":
# import matplotlib.pyplot as plt
import sys
from tqdm import tqdm
import numpy as np
sys.path.append(
os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))
)
from torchlib.utils import AddGaussianNoise
ds = PPPP(train=True, transform=transforms.ToTensor())
print("Class distribution")
print(ds.get_class_occurances())
sizes = []
for data, _ in tqdm(ds, total=len(ds), leave=False):
sizes.append(data.size()[1:])
sizes = np.array(sizes)
print(
"data resolution stats: \n\tmin: {:s}\n\tmax: {:s}\n\tmean: {:s}\n\tmedian: {:s}".format(
str(np.min(sizes, axis=0)),
str(np.max(sizes, axis=0)),
str(np.mean(sizes, axis=0)),
str(np.median(sizes, axis=0)),
)
)
ds = PPPP(train=False)
L = len(ds)
print("length test set: {:d}".format(L))
img, label = ds[1]
img.show()
tf = transforms.Compose(
[transforms.Resize(224), transforms.CenterCrop(224), transforms.ToTensor(),]
)
ds = PPPP(train=True, transform=tf)
ds.__compute_mean_std__()
L = len(ds)
print("length train set: {:d}".format(L))
from matplotlib import pyplot as plt
ds = PPPP()
hist = ds.labels.hist(bins=3, column="Numeric_Label")
plt.show()
|
[
"numpy.isin",
"albumentations.Lambda",
"albumentations.RandomSunFlare",
"albumentations.GaussNoise",
"albumentations.Resize",
"pandas.read_csv",
"albumentations.RandomShadow",
"torch.cat",
"albumentations.RandomFog",
"numpy.mean",
"albumentations.Normalize",
"torch.std_mean",
"os.path.join",
"PIL.Image.isImageType",
"albumentations.Blur",
"albumentations.Cutout",
"os.path.dirname",
"albumentations.OpticalDistortion",
"numpy.max",
"torchvision.transforms.Compose",
"random.seed",
"torchvision.transforms.CenterCrop",
"albumentations.CLAHE",
"torch.is_tensor",
"copy.deepcopy",
"matplotlib.pyplot.show",
"albumentations.GridDistortion",
"albumentations.GridDropout",
"torch.manual_seed",
"numpy.median",
"albumentations.RandomGridShuffle",
"numpy.min",
"albumentations.RandomBrightness",
"albumentations.RandomGamma",
"albumentations.VerticalFlip",
"os.listdir",
"torch.from_numpy",
"albumentations.ToFloat",
"torchvision.transforms.Resize",
"torch._utils._accumulate",
"torchvision.transforms.RandomAffine",
"albumentations.Compose",
"torch.stack",
"albumentations.InvertImg",
"albumentations.Solarize",
"albumentations.HueSaturationValue",
"PIL.Image.open",
"numpy.array",
"albumentations.FromFloat",
"os.path.splitext",
"albumentations.ElasticTransform",
"albumentations.RandomCrop",
"os.path.split",
"albumentations.Equalize",
"torchvision.transforms.ToTensor"
] |
[((4313, 4474), 'torchvision.transforms.RandomAffine', 'transforms.RandomAffine', ([], {'degrees': 'args.rotation', 'translate': '(args.translate, args.translate)', 'scale': '(1.0 - args.scale, 1.0 + args.scale)', 'shear': 'args.shear'}), '(degrees=args.rotation, translate=(args.translate,\n args.translate), scale=(1.0 - args.scale, 1.0 + args.scale), shear=args\n .shear)\n', (4336, 4474), False, 'from torchvision import transforms\n'), ((7495, 7540), 'torchvision.transforms.Compose', 'transforms.Compose', (['[train_tf, train_tf_albu]'], {}), '([train_tf, train_tf_albu])\n', (7513, 7540), False, 'from torchvision import transforms\n'), ((8490, 8526), 'torch.std_mean', 'std_mean', (['accumulated_data'], {'dim': 'dims'}), '(accumulated_data, dim=dims)\n', (8498, 8526), False, 'from torch import manual_seed, stack, cat, std_mean, save, is_tensor, from_numpy, randperm, default_generator\n'), ((15169, 15184), 'numpy.array', 'np.array', (['sizes'], {}), '(sizes)\n', (15177, 15184), True, 'import numpy as np\n'), ((15977, 15987), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (15985, 15987), True, 'from matplotlib import pyplot as plt\n'), ((993, 1015), 'PIL.Image.isImageType', 'Image.isImageType', (['img'], {}), '(img)\n', (1010, 1015), False, 'from PIL import Image\n'), ((1296, 1311), 'torch.from_numpy', 'from_numpy', (['img'], {}), '(img)\n', (1306, 1311), False, 'from torch import manual_seed, stack, cat, std_mean, save, is_tensor, from_numpy, randperm, default_generator\n'), ((4569, 4631), 'albumentations.Resize', 'a.Resize', (['args.inference_resolution', 'args.inference_resolution'], {}), '(args.inference_resolution, args.inference_resolution)\n', (4577, 4631), True, 'import albumentations as a\n'), ((4641, 4699), 'albumentations.RandomCrop', 'a.RandomCrop', (['args.train_resolution', 'args.train_resolution'], {}), '(args.train_resolution, args.train_resolution)\n', (4653, 4699), True, 'import albumentations as a\n'), ((4954, 4998), 'albumentations.VerticalFlip', 'a.VerticalFlip', ([], {'p': 'args.individual_albu_probs'}), '(p=args.individual_albu_probs)\n', (4968, 4998), True, 'import albumentations as a\n'), ((6884, 6946), 'albumentations.GaussNoise', 'a.GaussNoise', ([], {'var_limit': '(args.noise_std ** 2)', 'p': 'args.noise_prob'}), '(var_limit=args.noise_std ** 2, p=args.noise_prob)\n', (6896, 6946), True, 'import albumentations as a\n'), ((6984, 7010), 'albumentations.ToFloat', 'a.ToFloat', ([], {'max_value': '(255.0)'}), '(max_value=255.0)\n', (6993, 7010), True, 'import albumentations as a\n'), ((7020, 7063), 'albumentations.Normalize', 'a.Normalize', (['mean', 'std'], {'max_pixel_value': '(1.0)'}), '(mean, std, max_pixel_value=1.0)\n', (7031, 7063), True, 'import albumentations as a\n'), ((8074, 8097), 'torch.stack', 'stack', (['accumulated_data'], {}), '(accumulated_data)\n', (8079, 8097), False, 'from torch import manual_seed, stack, cat, std_mean, save, is_tensor, from_numpy, randperm, default_generator\n'), ((10809, 10830), 'pandas.read_csv', 'pd.read_csv', (['csv_path'], {}), '(csv_path)\n', (10820, 10830), True, 'import pandas as pd\n'), ((12391, 12408), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (12402, 12408), False, 'import random\n'), ((12417, 12434), 'torch.manual_seed', 'manual_seed', (['seed'], {}), '(seed)\n', (12428, 12434), False, 'from torch import manual_seed, stack, cat, std_mean, save, is_tensor, from_numpy, randperm, default_generator\n'), ((12484, 12507), 'pandas.read_csv', 'pd.read_csv', (['label_path'], {}), '(label_path)\n', (12495, 12507), True, 'import pandas as pd\n'), ((13270, 13321), 'os.path.join', 'os.path.join', (['"""data"""', 'path', "row['X_ray_image_name']"], {}), "('data', path, row['X_ray_image_name'])\n", (13282, 13321), False, 'import os\n'), ((14008, 14025), 'copy.deepcopy', 'deepcopy', (['dataset'], {}), '(dataset)\n', (14016, 14025), False, 'from copy import deepcopy\n'), ((1035, 1048), 'numpy.array', 'np.array', (['img'], {}), '(img)\n', (1043, 1048), True, 'import numpy as np\n'), ((1062, 1076), 'torch.is_tensor', 'is_tensor', (['img'], {}), '(img)\n', (1071, 1076), False, 'from torch import manual_seed, stack, cat, std_mean, save, is_tensor, from_numpy, randperm, default_generator\n'), ((5060, 5103), 'albumentations.RandomGamma', 'a.RandomGamma', ([], {'p': 'args.individual_albu_probs'}), '(p=args.individual_albu_probs)\n', (5073, 5103), True, 'import albumentations as a\n'), ((5164, 5212), 'albumentations.RandomBrightness', 'a.RandomBrightness', ([], {'p': 'args.individual_albu_probs'}), '(p=args.individual_albu_probs)\n', (5182, 5212), True, 'import albumentations as a\n'), ((5261, 5297), 'albumentations.Blur', 'a.Blur', ([], {'p': 'args.individual_albu_probs'}), '(p=args.individual_albu_probs)\n', (5267, 5297), True, 'import albumentations as a\n'), ((5349, 5397), 'albumentations.ElasticTransform', 'a.ElasticTransform', ([], {'p': 'args.individual_albu_probs'}), '(p=args.individual_albu_probs)\n', (5367, 5397), True, 'import albumentations as a\n'), ((5460, 5509), 'albumentations.OpticalDistortion', 'a.OpticalDistortion', ([], {'p': 'args.individual_albu_probs'}), '(p=args.individual_albu_probs)\n', (5479, 5509), True, 'import albumentations as a\n'), ((5569, 5615), 'albumentations.GridDistortion', 'a.GridDistortion', ([], {'p': 'args.individual_albu_probs'}), '(p=args.individual_albu_probs)\n', (5585, 5615), True, 'import albumentations as a\n'), ((5672, 5721), 'albumentations.RandomGridShuffle', 'a.RandomGridShuffle', ([], {'p': 'args.individual_albu_probs'}), '(p=args.individual_albu_probs)\n', (5691, 5721), True, 'import albumentations as a\n'), ((5769, 5819), 'albumentations.HueSaturationValue', 'a.HueSaturationValue', ([], {'p': 'args.individual_albu_probs'}), '(p=args.individual_albu_probs)\n', (5789, 5819), True, 'import albumentations as a\n'), ((5870, 5911), 'albumentations.InvertImg', 'a.InvertImg', ([], {'p': 'args.individual_albu_probs'}), '(p=args.individual_albu_probs)\n', (5881, 5911), True, 'import albumentations as a\n'), ((5975, 6061), 'albumentations.Cutout', 'a.Cutout', ([], {'num_holes': '(5)', 'max_h_size': '(80)', 'max_w_size': '(80)', 'p': 'args.individual_albu_probs'}), '(num_holes=5, max_h_size=80, max_w_size=80, p=args.\n individual_albu_probs)\n', (5983, 6061), True, 'import albumentations as a\n'), ((6211, 6255), 'albumentations.RandomShadow', 'a.RandomShadow', ([], {'p': 'args.individual_albu_probs'}), '(p=args.individual_albu_probs)\n', (6225, 6255), True, 'import albumentations as a\n'), ((6364, 6405), 'albumentations.RandomFog', 'a.RandomFog', ([], {'p': 'args.individual_albu_probs'}), '(p=args.individual_albu_probs)\n', (6375, 6405), True, 'import albumentations as a\n'), ((6525, 6571), 'albumentations.RandomSunFlare', 'a.RandomSunFlare', ([], {'p': 'args.individual_albu_probs'}), '(p=args.individual_albu_probs)\n', (6541, 6571), True, 'import albumentations as a\n'), ((6624, 6664), 'albumentations.Solarize', 'a.Solarize', ([], {'p': 'args.individual_albu_probs'}), '(p=args.individual_albu_probs)\n', (6634, 6664), True, 'import albumentations as a\n'), ((6717, 6757), 'albumentations.Equalize', 'a.Equalize', ([], {'p': 'args.individual_albu_probs'}), '(p=args.individual_albu_probs)\n', (6727, 6757), True, 'import albumentations as a\n'), ((6814, 6857), 'albumentations.GridDropout', 'a.GridDropout', ([], {'p': 'args.individual_albu_probs'}), '(p=args.individual_albu_probs)\n', (6827, 6857), True, 'import albumentations as a\n'), ((7147, 7202), 'albumentations.Lambda', 'a.Lambda', ([], {'image': '(lambda x, **kwargs: x[:, :, np.newaxis])'}), '(image=lambda x, **kwargs: x[:, :, np.newaxis])\n', (7155, 7202), True, 'import albumentations as a\n'), ((8177, 8198), 'torch.cat', 'cat', (['accumulated_data'], {}), '(accumulated_data)\n', (8180, 8198), False, 'from torch import manual_seed, stack, cat, std_mean, save, is_tensor, from_numpy, randperm, default_generator\n'), ((8560, 8578), 'torch.stack', 'stack', (['[mean, std]'], {}), '([mean, std])\n', (8565, 8578), False, 'from torch import manual_seed, stack, cat, std_mean, save, is_tensor, from_numpy, randperm, default_generator\n'), ((8580, 8620), 'os.path.join', 'os.path.join', (['save_folder', '"""mean_std.pt"""'], {}), "(save_folder, 'mean_std.pt')\n", (8592, 8620), False, 'import os\n'), ((10006, 10039), 'os.path.join', 'os.path.join', (['self.root', 'img_path'], {}), '(self.root, img_path)\n', (10018, 10039), False, 'import os\n'), ((11938, 11982), 'os.path.join', 'os.path.join', (['self.img_folder_path', 'img_path'], {}), '(self.img_folder_path, img_path)\n', (11950, 11982), False, 'import os\n'), ((14953, 14974), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (14972, 14974), False, 'from torchvision import transforms\n'), ((15641, 15663), 'torchvision.transforms.Resize', 'transforms.Resize', (['(224)'], {}), '(224)\n', (15658, 15663), False, 'from torchvision import transforms\n'), ((15665, 15691), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', (['(224)'], {}), '(224)\n', (15686, 15691), False, 'from torchvision import transforms\n'), ((15693, 15714), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (15712, 15714), False, 'from torchvision import transforms\n'), ((4794, 4835), 'albumentations.FromFloat', 'a.FromFloat', ([], {'dtype': '"""uint8"""', 'max_value': '(1.0)'}), "(dtype='uint8', max_value=1.0)\n", (4805, 4835), True, 'import albumentations as a\n'), ((4853, 4898), 'albumentations.CLAHE', 'a.CLAHE', ([], {'always_apply': '(True)', 'clip_limit': '(1, 1)'}), '(always_apply=True, clip_limit=(1, 1))\n', (4860, 4898), True, 'import albumentations as a\n'), ((7312, 7344), 'albumentations.Compose', 'a.Compose', (['start_transformations'], {}), '(start_transformations)\n', (7321, 7344), True, 'import albumentations as a\n'), ((7362, 7404), 'albumentations.Compose', 'a.Compose', (['train_tf_albu'], {'p': 'args.albu_prob'}), '(train_tf_albu, p=args.albu_prob)\n', (7371, 7404), True, 'import albumentations as a\n'), ((7422, 7452), 'albumentations.Compose', 'a.Compose', (['end_transformations'], {}), '(end_transformations)\n', (7431, 7452), True, 'import albumentations as a\n'), ((8792, 8805), 'PIL.Image.open', 'Image.open', (['f'], {}), '(f)\n', (8802, 8805), False, 'from PIL import Image\n'), ((8981, 9010), 'numpy.isin', 'np.isin', (['self.targets', 'labels'], {}), '(self.targets, labels)\n', (8988, 9010), True, 'import numpy as np\n'), ((9707, 9723), 'os.listdir', 'os.listdir', (['root'], {}), '(root)\n', (9717, 9723), False, 'import os\n'), ((10725, 10752), 'os.listdir', 'os.listdir', (['img_folder_path'], {}), '(img_folder_path)\n', (10735, 10752), False, 'import os\n'), ((14591, 14611), 'torch._utils._accumulate', '_accumulate', (['lengths'], {}), '(lengths)\n', (14602, 14611), False, 'from torch._utils import _accumulate\n'), ((14818, 14843), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (14833, 14843), False, 'import os\n'), ((15310, 15331), 'numpy.min', 'np.min', (['sizes'], {'axis': '(0)'}), '(sizes, axis=0)\n', (15316, 15331), True, 'import numpy as np\n'), ((15350, 15371), 'numpy.max', 'np.max', (['sizes'], {'axis': '(0)'}), '(sizes, axis=0)\n', (15356, 15371), True, 'import numpy as np\n'), ((15390, 15412), 'numpy.mean', 'np.mean', (['sizes'], {'axis': '(0)'}), '(sizes, axis=0)\n', (15397, 15412), True, 'import numpy as np\n'), ((15431, 15455), 'numpy.median', 'np.median', (['sizes'], {'axis': '(0)'}), '(sizes, axis=0)\n', (15440, 15455), True, 'import numpy as np\n'), ((3167, 3181), 'os.path.splitext', 'splitext', (['path'], {}), '(path)\n', (3175, 3181), False, 'from os.path import splitext\n'), ((9739, 9758), 'os.path.splitext', 'os.path.splitext', (['f'], {}), '(f)\n', (9755, 9758), False, 'import os\n'), ((9804, 9820), 'os.path.split', 'os.path.split', (['f'], {}), '(f)\n', (9817, 9820), False, 'import os\n')]
|
from math import trunc
def two_decimal_places(number):
factor = float(10 ** 2)
return trunc(number * factor) / factor
|
[
"math.trunc"
] |
[((96, 118), 'math.trunc', 'trunc', (['(number * factor)'], {}), '(number * factor)\n', (101, 118), False, 'from math import trunc\n')]
|
import warnings
warnings.simplefilter("ignore", UserWarning)
warnings.simplefilter("ignore", FutureWarning)
import argparse
import os
import pandas as pd
import numpy as np
from torch import nn
from torch.utils.data import DataLoader
from tqdm import tqdm
from collections import defaultdict
from catalyst.utils import any2device
from pytorch_toolbelt.utils import to_numpy, fs
from pytorch_toolbelt.utils.catalyst import report_checkpoint
from alaska2 import *
from alaska2.dataset import get_train_except_holdout
@torch.no_grad()
def compute_trn_predictions(model, dataset, fp16=False, batch_size=1, workers=0) -> pd.DataFrame:
df = defaultdict(list)
for batch in tqdm(
DataLoader(
dataset, batch_size=batch_size, num_workers=workers, shuffle=False, drop_last=False, pin_memory=True
)
):
batch = any2device(batch, device="cuda")
if fp16 and INPUT_FEATURES_JPEG_FLOAT in batch:
batch[INPUT_FEATURES_JPEG_FLOAT] = batch[INPUT_FEATURES_JPEG_FLOAT].half()
if INPUT_TRUE_MODIFICATION_FLAG in batch:
y_trues = to_numpy(batch[INPUT_TRUE_MODIFICATION_FLAG]).flatten()
df[INPUT_TRUE_MODIFICATION_FLAG].extend(y_trues)
if INPUT_TRUE_MODIFICATION_TYPE in batch:
y_labels = to_numpy(batch[INPUT_TRUE_MODIFICATION_TYPE]).flatten()
df[INPUT_TRUE_MODIFICATION_TYPE].extend(y_labels)
image_ids = batch[INPUT_IMAGE_ID_KEY]
df[INPUT_IMAGE_ID_KEY].extend(image_ids)
outputs = model(**batch)
if OUTPUT_PRED_MODIFICATION_FLAG in outputs:
df[OUTPUT_PRED_MODIFICATION_FLAG].extend(to_numpy(outputs[OUTPUT_PRED_MODIFICATION_FLAG]).flatten())
if OUTPUT_PRED_MODIFICATION_TYPE in outputs:
df[OUTPUT_PRED_MODIFICATION_TYPE].extend(outputs[OUTPUT_PRED_MODIFICATION_TYPE].tolist())
if OUTPUT_PRED_EMBEDDING in outputs:
df[OUTPUT_PRED_EMBEDDING].extend(outputs[OUTPUT_PRED_EMBEDDING].tolist())
# Save also TTA predictions for future use
if OUTPUT_PRED_MODIFICATION_FLAG + "_tta" in outputs:
df[OUTPUT_PRED_MODIFICATION_FLAG + "_tta"].extend(
to_numpy(outputs[OUTPUT_PRED_MODIFICATION_FLAG + "_tta"]).tolist()
)
if OUTPUT_PRED_MODIFICATION_TYPE + "_tta" in outputs:
df[OUTPUT_PRED_MODIFICATION_TYPE + "_tta"].extend(
to_numpy(outputs[OUTPUT_PRED_MODIFICATION_TYPE + "_tta"]).tolist()
)
df = pd.DataFrame.from_dict(df)
return df
@torch.no_grad()
def main():
# Give no chance to randomness
torch.manual_seed(0)
np.random.seed(0)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
parser = argparse.ArgumentParser()
parser.add_argument("checkpoint", type=str, nargs="+")
parser.add_argument("-dd", "--data-dir", type=str, default=os.environ.get("KAGGLE_2020_ALASKA2"))
parser.add_argument("-b", "--batch-size", type=int, default=1)
parser.add_argument("-w", "--workers", type=int, default=0)
parser.add_argument("-d4", "--d4-tta", action="store_true")
parser.add_argument("-hv", "--hv-tta", action="store_true")
parser.add_argument("-f", "--force-recompute", action="store_true")
parser.add_argument("-fp16", "--fp16", action="store_true")
args = parser.parse_args()
checkpoint_fnames = args.checkpoint
data_dir = args.data_dir
batch_size = args.batch_size
workers = args.workers
fp16 = args.fp16
d4_tta = args.d4_tta
force_recompute = args.force_recompute
need_embedding = True
outputs = [OUTPUT_PRED_MODIFICATION_FLAG, OUTPUT_PRED_MODIFICATION_TYPE, OUTPUT_PRED_EMBEDDING]
embedding_suffix = "_w_emb" if need_embedding else ""
for checkpoint_fname in checkpoint_fnames:
model, checkpoints, required_features = ensemble_from_checkpoints(
[checkpoint_fname], strict=True, outputs=outputs, activation=None, tta=None, need_embedding=need_embedding
)
report_checkpoint(checkpoints[0])
model = model.cuda()
if torch.cuda.device_count() > 1:
model = nn.DataParallel(model)
model = model.eval()
if fp16:
model = model.half()
train_ds = get_train_except_holdout(data_dir, features=required_features)
holdout_ds = get_holdout(data_dir, features=required_features)
test_ds = get_test_dataset(data_dir, features=required_features)
if d4_tta:
model = wrap_model_with_tta(model, "d4", inputs=required_features, outputs=outputs).eval()
tta_suffix = "_d4_tta"
else:
tta_suffix = ""
# Train
trn_predictions_csv = fs.change_extension(
checkpoint_fname, f"_train_predictions{embedding_suffix}{tta_suffix}.pkl"
)
if force_recompute or not os.path.exists(trn_predictions_csv):
trn_predictions = compute_trn_predictions(
model, train_ds, fp16=fp16, batch_size=batch_size, workers=workers
)
trn_predictions.to_pickle(trn_predictions_csv)
# Holdout
hld_predictions_csv = fs.change_extension(
checkpoint_fname, f"_holdout_predictions{embedding_suffix}{tta_suffix}.pkl"
)
if force_recompute or not os.path.exists(hld_predictions_csv):
hld_predictions = compute_trn_predictions(
model, holdout_ds, fp16=fp16, batch_size=batch_size, workers=workers
)
hld_predictions.to_pickle(hld_predictions_csv)
# Test
tst_predictions_csv = fs.change_extension(
checkpoint_fname, f"_test_predictions{embedding_suffix}{tta_suffix}.pkl"
)
if force_recompute or not os.path.exists(tst_predictions_csv):
tst_predictions = compute_trn_predictions(
model, test_ds, fp16=fp16, batch_size=batch_size, workers=workers
)
tst_predictions.to_pickle(tst_predictions_csv)
if __name__ == "__main__":
main()
|
[
"numpy.random.seed",
"pandas.DataFrame.from_dict",
"warnings.simplefilter",
"argparse.ArgumentParser",
"torch.utils.data.DataLoader",
"catalyst.utils.any2device",
"os.path.exists",
"collections.defaultdict",
"pytorch_toolbelt.utils.fs.change_extension",
"os.environ.get",
"pytorch_toolbelt.utils.to_numpy",
"torch.nn.DataParallel",
"pytorch_toolbelt.utils.catalyst.report_checkpoint",
"alaska2.dataset.get_train_except_holdout"
] |
[((18, 62), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""', 'UserWarning'], {}), "('ignore', UserWarning)\n", (39, 62), False, 'import warnings\n'), ((63, 109), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""', 'FutureWarning'], {}), "('ignore', FutureWarning)\n", (84, 109), False, 'import warnings\n'), ((646, 663), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (657, 663), False, 'from collections import defaultdict\n'), ((2504, 2530), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['df'], {}), '(df)\n', (2526, 2530), True, 'import pandas as pd\n'), ((2640, 2657), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (2654, 2657), True, 'import numpy as np\n'), ((2761, 2786), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2784, 2786), False, 'import argparse\n'), ((695, 812), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': 'batch_size', 'num_workers': 'workers', 'shuffle': '(False)', 'drop_last': '(False)', 'pin_memory': '(True)'}), '(dataset, batch_size=batch_size, num_workers=workers, shuffle=\n False, drop_last=False, pin_memory=True)\n', (705, 812), False, 'from torch.utils.data import DataLoader\n'), ((853, 885), 'catalyst.utils.any2device', 'any2device', (['batch'], {'device': '"""cuda"""'}), "(batch, device='cuda')\n", (863, 885), False, 'from catalyst.utils import any2device\n'), ((4040, 4073), 'pytorch_toolbelt.utils.catalyst.report_checkpoint', 'report_checkpoint', (['checkpoints[0]'], {}), '(checkpoints[0])\n', (4057, 4073), False, 'from pytorch_toolbelt.utils.catalyst import report_checkpoint\n'), ((4289, 4351), 'alaska2.dataset.get_train_except_holdout', 'get_train_except_holdout', (['data_dir'], {'features': 'required_features'}), '(data_dir, features=required_features)\n', (4313, 4351), False, 'from alaska2.dataset import get_train_except_holdout\n'), ((4743, 4841), 'pytorch_toolbelt.utils.fs.change_extension', 'fs.change_extension', (['checkpoint_fname', 'f"""_train_predictions{embedding_suffix}{tta_suffix}.pkl"""'], {}), "(checkpoint_fname,\n f'_train_predictions{embedding_suffix}{tta_suffix}.pkl')\n", (4762, 4841), False, 'from pytorch_toolbelt.utils import to_numpy, fs\n'), ((5191, 5291), 'pytorch_toolbelt.utils.fs.change_extension', 'fs.change_extension', (['checkpoint_fname', 'f"""_holdout_predictions{embedding_suffix}{tta_suffix}.pkl"""'], {}), "(checkpoint_fname,\n f'_holdout_predictions{embedding_suffix}{tta_suffix}.pkl')\n", (5210, 5291), False, 'from pytorch_toolbelt.utils import to_numpy, fs\n'), ((5640, 5737), 'pytorch_toolbelt.utils.fs.change_extension', 'fs.change_extension', (['checkpoint_fname', 'f"""_test_predictions{embedding_suffix}{tta_suffix}.pkl"""'], {}), "(checkpoint_fname,\n f'_test_predictions{embedding_suffix}{tta_suffix}.pkl')\n", (5659, 5737), False, 'from pytorch_toolbelt.utils import to_numpy, fs\n'), ((2909, 2946), 'os.environ.get', 'os.environ.get', (['"""KAGGLE_2020_ALASKA2"""'], {}), "('KAGGLE_2020_ALASKA2')\n", (2923, 2946), False, 'import os\n'), ((4166, 4188), 'torch.nn.DataParallel', 'nn.DataParallel', (['model'], {}), '(model)\n', (4181, 4188), False, 'from torch import nn\n'), ((4894, 4929), 'os.path.exists', 'os.path.exists', (['trn_predictions_csv'], {}), '(trn_predictions_csv)\n', (4908, 4929), False, 'import os\n'), ((5344, 5379), 'os.path.exists', 'os.path.exists', (['hld_predictions_csv'], {}), '(hld_predictions_csv)\n', (5358, 5379), False, 'import os\n'), ((5790, 5825), 'os.path.exists', 'os.path.exists', (['tst_predictions_csv'], {}), '(tst_predictions_csv)\n', (5804, 5825), False, 'import os\n'), ((1103, 1148), 'pytorch_toolbelt.utils.to_numpy', 'to_numpy', (['batch[INPUT_TRUE_MODIFICATION_FLAG]'], {}), '(batch[INPUT_TRUE_MODIFICATION_FLAG])\n', (1111, 1148), False, 'from pytorch_toolbelt.utils import to_numpy, fs\n'), ((1294, 1339), 'pytorch_toolbelt.utils.to_numpy', 'to_numpy', (['batch[INPUT_TRUE_MODIFICATION_TYPE]'], {}), '(batch[INPUT_TRUE_MODIFICATION_TYPE])\n', (1302, 1339), False, 'from pytorch_toolbelt.utils import to_numpy, fs\n'), ((1649, 1697), 'pytorch_toolbelt.utils.to_numpy', 'to_numpy', (['outputs[OUTPUT_PRED_MODIFICATION_FLAG]'], {}), '(outputs[OUTPUT_PRED_MODIFICATION_FLAG])\n', (1657, 1697), False, 'from pytorch_toolbelt.utils import to_numpy, fs\n'), ((2190, 2247), 'pytorch_toolbelt.utils.to_numpy', 'to_numpy', (["outputs[OUTPUT_PRED_MODIFICATION_FLAG + '_tta']"], {}), "(outputs[OUTPUT_PRED_MODIFICATION_FLAG + '_tta'])\n", (2198, 2247), False, 'from pytorch_toolbelt.utils import to_numpy, fs\n'), ((2413, 2470), 'pytorch_toolbelt.utils.to_numpy', 'to_numpy', (["outputs[OUTPUT_PRED_MODIFICATION_TYPE + '_tta']"], {}), "(outputs[OUTPUT_PRED_MODIFICATION_TYPE + '_tta'])\n", (2421, 2470), False, 'from pytorch_toolbelt.utils import to_numpy, fs\n')]
|
from django.db import models
class Course(models.Model):
created = models.DateTimeField(auto_now_add=True)
title = models.CharField(max_length=100, blank=True, default='')
content = models.TextField()
owner = models.ForeignKey('auth.User', related_name='Course', on_delete=models.CASCADE)
class Meta:
ordering = ['created']
|
[
"django.db.models.CharField",
"django.db.models.DateTimeField",
"django.db.models.TextField",
"django.db.models.ForeignKey"
] |
[((73, 112), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (93, 112), False, 'from django.db import models\n'), ((125, 181), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)', 'default': '""""""'}), "(max_length=100, blank=True, default='')\n", (141, 181), False, 'from django.db import models\n'), ((196, 214), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (212, 214), False, 'from django.db import models\n'), ((227, 306), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""auth.User"""'], {'related_name': '"""Course"""', 'on_delete': 'models.CASCADE'}), "('auth.User', related_name='Course', on_delete=models.CASCADE)\n", (244, 306), False, 'from django.db import models\n')]
|
#! /usr/bin/env python3
import sys
import csv
import argparse
import numpy as np
import pandas as pd
from mll_calc.all_jobs import parent_jobs, kid_jobs
def row_calcs(ext_test):
if 'no' in ext_test:
#db_rows = 450240
#max_jobs = 9750
db_rows = 90048 * 4
max_jobs = 978 * 4
else:
db_rows = 505
max_jobs = 10
n_rows = db_rows // max_jobs
init_rows = np.arange(0, db_rows, n_rows).tolist()
end_rows = init_rows[1:]
# TODO took out +1 below because had index 1 too high last time
end_rows.append(db_rows)
################################################
################ In-script test ################
################################################
if db_rows % n_rows == 0:
total_jobs = db_rows // n_rows
else:
total_jobs = db_rows // n_rows + 1
if len(init_rows) != total_jobs or len(end_rows) != total_jobs:
print(total_jobs, len(init_rows), len(end_rows))
sys.exit('total expected jobs does not equal one of db_row lists')
################################################
return init_rows, end_rows
def make_paramstxt(parent_job, kid_jobs):
parent_dir = parent_job['parent_dir']
fname = parent_dir + '_params.txt'
init_rows, end_rows = row_calcs(parent_job['ext_test'])
for unc_num, (kid_dir, unc) in enumerate(zip(kid_jobs['job_dirs'], kid_jobs['uncs'])):
if parent_dir == 'train_nuc29':
fname = parent_dir + '_' + str(unc_num) + '_params.txt'
#with open(fname, 'w') as f:
with open(fname, 'a') as f:
w = csv.writer(f)
job_dir = parent_dir + '/' + kid_dir
for i in range(0, len(init_rows)):
job = [job_dir, unc,
parent_job['train_pkl'], parent_job['test_pkl'],
str(i).zfill(4), init_rows[i], end_rows[i],
parent_job['ext_test'], parent_job['ratios']
]
w.writerow(job)
return
def main():
"""
Reads all the job descriptions from all_jobs.py and populates the necessary
params_mll_calc.txt files
"""
for parent_job in parent_jobs:
make_paramstxt(parent_job, kid_jobs)
return
if __name__ == "__main__":
main()
|
[
"numpy.arange",
"csv.writer",
"sys.exit"
] |
[((993, 1059), 'sys.exit', 'sys.exit', (['"""total expected jobs does not equal one of db_row lists"""'], {}), "('total expected jobs does not equal one of db_row lists')\n", (1001, 1059), False, 'import sys\n'), ((414, 443), 'numpy.arange', 'np.arange', (['(0)', 'db_rows', 'n_rows'], {}), '(0, db_rows, n_rows)\n', (423, 443), True, 'import numpy as np\n'), ((1617, 1630), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (1627, 1630), False, 'import csv\n')]
|
#!/usr/bin/env python3
import os, sys
import setuptools
# Get text from README.txt
with open("README.md", "r") as fp:
readme_text = fp.read()
# Get __version__ without importing
with open(os.path.join(os.path.dirname(__file__),"ndsb", "__init__.py"), "r") as f:
for line in f:
if line.startswith("__version__ = "):
exec(line.strip())
break
setuptools.setup(
name="ndsb",
version=__version__,
description="Collect data, turn it into static artifacts and beam it to a vault.",
license="MIT",
author="<NAME>",
author_email="<EMAIL>",
url="https://github.com/dbbs-lab/ndsb",
long_description=readme_text,
long_description_content_type="text/markdown",
packages=["ndsb"],
classifiers=[
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
install_requires=["portalocker", "requests", "requests-toolbelt"],
extras_require={"dev": ["sphinx", "sphinx_rtd_theme>=0.4.3", "pre-commit", "black"],},
)
|
[
"os.path.dirname",
"setuptools.setup"
] |
[((383, 1072), 'setuptools.setup', 'setuptools.setup', ([], {'name': '"""ndsb"""', 'version': '__version__', 'description': '"""Collect data, turn it into static artifacts and beam it to a vault."""', 'license': '"""MIT"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'url': '"""https://github.com/dbbs-lab/ndsb"""', 'long_description': 'readme_text', 'long_description_content_type': '"""text/markdown"""', 'packages': "['ndsb']", 'classifiers': "['Intended Audience :: Science/Research',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: OS Independent', 'Programming Language :: Python']", 'install_requires': "['portalocker', 'requests', 'requests-toolbelt']", 'extras_require': "{'dev': ['sphinx', 'sphinx_rtd_theme>=0.4.3', 'pre-commit', 'black']}"}), "(name='ndsb', version=__version__, description=\n 'Collect data, turn it into static artifacts and beam it to a vault.',\n license='MIT', author='<NAME>', author_email='<EMAIL>', url=\n 'https://github.com/dbbs-lab/ndsb', long_description=readme_text,\n long_description_content_type='text/markdown', packages=['ndsb'],\n classifiers=['Intended Audience :: Science/Research',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: OS Independent', 'Programming Language :: Python'],\n install_requires=['portalocker', 'requests', 'requests-toolbelt'],\n extras_require={'dev': ['sphinx', 'sphinx_rtd_theme>=0.4.3',\n 'pre-commit', 'black']})\n", (399, 1072), False, 'import setuptools\n'), ((207, 232), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (222, 232), False, 'import os, sys\n')]
|
# import the needed packages
import pickle
from sklearn import preprocessing
import time
from os import listdir
from os.path import isfile, join
from random import randint, uniform
import numpy as np
from matplotlib import pyplot as plt
import cv2 as cv
from scipy import ndimage
from skimage import morphology
from skimage import exposure
import os
from math import pi
from math import isnan
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.metrics import confusion_matrix, precision_score, recall_score
from skimage.filters import sobel
# set random seed
np.random.seed(26)
# the NaiveBayes classifier I wrote for assignment 6 in BSYSE_530, modified a little for this purpose
class NaiveBayes:
# P(c|x) = P(x|c) * P(c) / P(x)
# P(x|x) is the posterior probability
# P(x|c) is the likelihood
# P(c) is the class prior probability, or the prob of c occuring indpendently.
# P(x) is the predictor prior probability, or the prob of x occuring independently
def fit(self, features, target):
# define class variables
self.classes = np.unique(target)
self.count = len(self.classes)
self.feature_nums = features.shape[1]
self.rows = features.shape[0]
# calculate statistics for all those features
self.calc_statistics(features, target)
# prior is the random chance of drawing a particular class based on its proportion in the dataset
self.prior = self.calc_prior(features, target)
def get_predictions(self, input_vector):
predictions = []
for i in range(len(input_vector)):
result = self.calc_posterior((input_vector.iloc[i,:]))
predictions.append(result)
return predictions
def predict(self, observation):
#call the calc_posterior function on the observation
pred_class = self.calc_posterior(observation)
return pred_class
def calc_statistics(self, features, target):
# calculate mean, variance for each column and convert to numpy array
self.mean = features.groupby(target).apply(np.mean).to_numpy()
self.var = features.groupby(target).apply(np.var).to_numpy()
return self.mean, self.var
def calc_prior(self, features, target):
# this is the probability of picking one of a class at random from the dataset
self.prior = (features.groupby(target).apply(lambda x: len(x)/self.rows).to_numpy())
return self.prior
def calc_posterior(self, x):
# this is the probability, post evidence
# x is a numpy array
# x is feature vector for one observation
# make a list that we will add each classes posterior prob to
posteriors = []
# iterate through the classes
for i in range(0, self.count):
# for each class look at the prior probability for the class
prior = self.prior[i]
# calculate the conditional probability for the
conditional = np.sum(self.gaussian_density(i, x))
posterior = prior + conditional
# print(f"i = {i}, prior = {prior}, conditional = {conditional}, posterior = {posterior}")
posteriors.append(posterior)
return self.classes[np.argmax(posteriors)]
def gaussian_density(self, class_idx, x):
# calc probability from gaussian denssityy fucntion (normal dist)
mean = self.mean[class_idx]
var = self.var[class_idx]
# this part sucked and I had a typo that cost me hours
numerator = np.exp(-((x-mean)**2 / (2 * var)))
denominator = np.sqrt(2 * np.pi * var)
return numerator / denominator
def pdf(self, x, mean, stdev):
# calculate probability density function
exponent = np.exp(-((x-mean)**2 / (2*stdev**2)))
return exponent * (1/(np.sqrt(2*np.pi)*stdev))
def get_accuracy(self, test, predictions):
correct = 0
for i in range(len(test)):
if test.iloc[i] == predictions[i]:
correct += 1
return (correct / float(len(test)))
# TODO: read these and see how it works
# https://www.mathworks.com/help/matlab/matlab_external/matlab-arrays-as-python-variables.html
# https://www.mathworks.com/help/matlab/matlab_external/passing-data-to-python.html
# this exists only for my testing purposes
class MatlabSurrogate():
def __init__(self):
self.state_of_mind = "Badass."
def acquire_kinect_image(self, filename):
# give this function a filename, and it will load that image with opencv
# this will be a BGR format, because that is how opencv rolls
kinect_image = cv.imread(filename)
print(f"kinect has acquired the image with shape = {kinect_image.shape}")
return kinect_image
# function to display images resized, using opencv
def imshow(self, image, imdiv = 4):
imdiv = int(imdiv)
w, h = int(image.shape[1]/imdiv), int(image.shape[0]/imdiv)
cv.namedWindow("output", cv.WINDOW_NORMAL)
cv.resizeWindow("output", (w, h))
cv.imshow("output", image)
cv.waitKey(0)
cv.destroyAllWindows()
# I should probably have one image processing class that takes in a single image and then spits out a dataframe that could be used for prediction
# replaces ImageSegmenter
class ImageProcess():
def __init__(self):
print("image processor activated! use 'process_image_to_df()' to get back a pandas df")
self.black_lower = (0, 0, 0)
self.black_upper = (179, 255, 30)
self.hsv_lower = (0, 0, 0)
self.hsv_upper = (179, 255, 90)
# self.black_lower = (0, 0, 203)
# self.black_upper = (43, 255, 255)
# self.hsv_lower = (0, 0, 70)
# self.hsv_upper = (179, 34, 255)
# NOT mask for lego_imgs[14]
# hsv_lower = (0,0,0)
# hsv_upper = (179,234,77)
def dummy_method(self, a):
if type(a) is np.ndarray:
result = "object is a numpy.ndarray, this is perfect. Is the image RGB order or BGR?"
return result
else:
result = "object is a " + str(type(a)) + "and I'm gonna have a hard time with that"
return result
def bg_segmentation(self, image, mode="hsv", show_img=False):
# create an hsv mask for red colors
hsv_mask = cv.inRange(cv.cvtColor(image, cv.COLOR_BGR2HSV),
self.hsv_lower,
self.hsv_upper).astype(np.uint8)
# use this as a NOT mask
hsv_mask = np.where(hsv_mask > 1, 0, 1).astype(np.uint8)
hsv_mask = ndimage.gaussian_filter(hsv_mask, sigma=1)
# erode the mask
hsv_mask = morphology.erosion(hsv_mask, morphology.disk(3))
# # median filter to despeckle
# hsv_mask = ndimage.median_filter(hsv_mask, size=(3, 3)).astype(np.uint8)
# binary dilation
hsv_mask = morphology.binary_dilation(hsv_mask, np.ones((20, 20))).astype(np.uint8)
# fill the holes
hsv_mask = ndimage.binary_fill_holes(hsv_mask).astype(np.uint8)
# erode the mask
hsv_mask = morphology.erosion(hsv_mask, morphology.disk(5))
# TODO: remove this it is for testing purposes to show the segmentation
if (show_img == True):
m = MatlabSurrogate()
m.imshow(cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8))
# apply the mask and return the result
return cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8)
def process_image_to_df(self, input_image, area_th):
seg_img = self.bg_segmentation(input_image, show_img=False)
# # make the mask a binary thresholded image
mask = cv.cvtColor(seg_img, cv.COLOR_BGR2GRAY)
mask = cv.GaussianBlur(mask,(5,5),0)
ret3, mask = cv.threshold(mask,0,255,cv.THRESH_BINARY)
# output image with contours drawn on the original image
output_image = input_image.copy()
# find the contours of the detected objects in the image
contours, hier = cv.findContours(mask, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
# create the df that we'll return for this image
df = pd.DataFrame(columns=['color'])
# # reset the object num
object_num = 0
for cnt in contours:
# draw contours on the output image for our personal enjoyment
cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255), thickness=5)
# CALCULATE ALL THE CONTOUR SHAPE FEATURES
# get the x, y, w, h of the bounding rect for the contour
x, y, w, h = cv.boundingRect(cnt)
# contour features
area = cv.contourArea(cnt)
rect_area = w * h
fullosity = area / rect_area
aspect_ratio = float(w)/h
extent = float(area/ rect_area)
hull = cv.convexHull(cnt)
hull_area = cv.contourArea(hull)
solidity = float(area)/hull_area
eq_diameter = np.sqrt(4*area/np.pi)
M= cv.moments(cnt)
cx= int(M['m10']/M['m00'])
cy= int(M['m01']/M['m00'])
# take this rectangle as a subset of the input_image, and calculate things within it
img_subset = input_image[y:y+h, x:x+w, :]
# convert to hsv for extracting those values
img_subset_hsv = cv.cvtColor(img_subset, cv.COLOR_BGR2HSV)
# FILTER OUT THE WEIRD ONES
# get rid of tiny objects that are probably noisef
if area > area_th:
# draw a blank canvas to put the contour onto, JUST THIS ONE not the others
# this is a mask
cimg_justthiscontour = np.zeros_like(input_image)
# draw the contours on the blank canvas which is original sized
cv.drawContours(cimg_justthiscontour, [cnt], 0, color=(255, 255, 255), thickness=-1)
# now take the subset of just the area around the contour of interest
cimg_subset = cimg_justthiscontour[y:y+h, x:x+w, :]
# make a binary mask
cimg_mask = cv.cvtColor(cimg_subset, cv.COLOR_BGR2GRAY)
ret2, mask = cv.threshold(cimg_mask,0,255,cv.THRESH_BINARY)
# draw contours on the output image for our personal enjoyment
cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255), thickness=5)
img_subset = cv.bitwise_and(img_subset, img_subset, mask=mask).astype(np.uint8)
# calculate where the object is
pts = np.where(cimg_subset == 255)
hue = img_subset_hsv[pts[0], pts[1], 0]
sat = img_subset_hsv[pts[0], pts[1], 1]
val = img_subset_hsv[pts[0], pts[1], 2]
r = img_subset[pts[0], pts[1], 0]
g = img_subset[pts[0], pts[1], 1]
b = img_subset[pts[0], pts[1], 2]
# and export the image for later analysis with something else like a neural network
cv.imwrite(f"images/train/XX_{object_num}_{randint(10000,99999)}.png", img_subset)
# add the object labels to the cimg for identification
cv.putText(output_image, text= str(object_num),
org=(cx - 5,cy - 5),
fontFace= cv.FONT_HERSHEY_SIMPLEX,
fontScale=3,
color=(255,255,255),
thickness=5,
lineType=cv.LINE_AA)
# print(r.mean(), g.mean(), b.mean(), gli.mean())
df = df.append({'color' : 0,
'x': x,
'y': y,
'object_num': object_num,
'r': r.mean(),
'g': g.mean(),
'b': b.mean(),
'hue': hue.mean(),
'sat': sat.mean(),
'val': val.mean()
}, ignore_index=True)
# last thing we do on this loop is increment the object_num
object_num += 1
#
# end result should be a pandas dataframe and the contour image with numbers
return df.sort_values(by='object_num', axis=0, ascending=True), output_image
def hsv_slide_tool(self, image):
def empty(a):
pass
h, w = int(image.shape[1]/2), int(image.shape[0]/2)
cv.namedWindow('masked_image', cv.WINDOW_NORMAL)
cv.resizeWindow('masked_image', h, w)
cv.namedWindow("trackbars")
cv.resizeWindow("trackbars", 800, 300)
# color mask trackbars
cv.createTrackbar("hue_min", "trackbars", 0, 179, empty)
cv.createTrackbar('hue_max', 'trackbars', 179, 179, empty)
cv.createTrackbar('sat_min', 'trackbars', 0, 255, empty)
cv.createTrackbar('sat_max', 'trackbars', 255, 255, empty)
cv.createTrackbar('val_min', 'trackbars', 0, 255, empty)
cv.createTrackbar('val_max', 'trackbars', 255, 255, empty)
while True:
# get image
img_hsv = cv.cvtColor(image, cv.COLOR_BGR2HSV)
# get trackbar positions
h_min = cv.getTrackbarPos("hue_min", "trackbars")
h_max = cv.getTrackbarPos('hue_max', 'trackbars')
s_min = cv.getTrackbarPos('sat_min', 'trackbars')
s_max = cv.getTrackbarPos('sat_max', 'trackbars')
v_min = cv.getTrackbarPos('val_min', 'trackbars')
v_max = cv.getTrackbarPos('val_max', 'trackbars')
# self.black_lower = (0, 0, 0)
# self.black_upper = (179, 255, 30)
# self.hsv_lower = (0, 0, 100)
# self.hsv_upper = (179, 255, 255)
# create mask
hsv_lower = np.array([h_min, s_min, v_min])
hsv_upper = np.array([h_max, s_max, v_max])
black_lower = np.array([0, 0, 0])
black_upper = np.array([179, 255, 30])
color_mask = cv.inRange(img_hsv, hsv_lower, hsv_upper)
black_mask = cv.inRange(img_hsv, black_lower, black_upper)
mask = color_mask + black_mask
masked_image = cv.bitwise_and(img_hsv, img_hsv, mask=mask)
cv.imshow('masked_image', masked_image)
k = cv.waitKey(1000) & 0xFF # large wait time
if k == 113 or k == 27:
break
cv.destroyAllWindows()
print(f'hsv_lower is {hsv_lower}, hsv_upper = {hsv_upper}')
def label_dataframe(self, image_df, class_list):
for i, row in image_df.iterrows():
image_df.loc[i, 'color'] = class_list[i]
print(type(image_df))
return image_df
# def fake_df(self, input_df, reps = 3):
# # creates a bunch of fake adjustments to the dataframe so my train set is bigger
# output_df = input_df.copy()
# for rep in range(0, reps):
# fake_df = input_df.copy()
# for i, row in fake_df.iterrows():
# fake_df.loc[i, 'r'] = fake_df.loc[i, 'r'] + uniform(-.1, .1)
# fake_df.loc[i, 'g'] = fake_df.loc[i, 'g'] + uniform(-.1, .1)
# fake_df.loc[i, 'b'] = fake_df.loc[i, 'b'] + uniform(-.1, .1)
# output_df = pd.concat(output_df, fake_df)
# return output_df
def otsu_threshold(self, image):
blur = cv.GaussianBlur(image,(5,5),0)
ret3,th3 = cv.threshold(blur,0,255,cv.THRESH_BINARY+cv.THRESH_OTSU)
return ret3, th3
def process_image_make_predictions(self, input_image, model):
predictive_model = model
area_th = 400
seg_img = self.bg_segmentation(input_image, show_img=False)
# # make the mask a binary thresholded image
mask = cv.cvtColor(seg_img, cv.COLOR_BGR2GRAY)
mask = cv.GaussianBlur(mask,(5,5),0)
ret3, mask = cv.threshold(mask,0,255,cv.THRESH_BINARY)
# output image with contours drawn on the original image
output_image = input_image.copy()
# find the contours of the detected objects in the image
contours, hier = cv.findContours(mask, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
# create the df that we'll return for this image
df = pd.DataFrame(columns=['color'])
# # reset the object num
object_num = 0
for cnt in contours:
# draw contours on the output image for our personal enjoyment
cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255), thickness=5)
# CALCULATE ALL THE CONTOUR SHAPE FEATURES
# get the x, y, w, h of the bounding rect for the contour
x, y, w, h = cv.boundingRect(cnt)
# contour features
area = cv.contourArea(cnt)
rect_area = w * h
fullosity = area / rect_area
aspect_ratio = float(w)/h
extent = float(area/ rect_area)
hull = cv.convexHull(cnt)
hull_area = cv.contourArea(hull)
solidity = float(area)/hull_area
eq_diameter = np.sqrt(4*area/np.pi)
M= cv.moments(cnt)
cx= int(M['m10']/M['m00'])
cy= int(M['m01']/M['m00'])
# take this rectangle as a subset of the input_image, and calculate things within it
img_subset = input_image[y:y+h, x:x+w, :]
# convert to hsv for extracting those values
img_subset_hsv = cv.cvtColor(img_subset, cv.COLOR_BGR2HSV)
# FILTER OUT THE WEIRD ONES
# get rid of tiny objects that are probably noisef
if area > area_th:
# draw a blank canvas to put the contour onto, JUST THIS ONE not the others
# this is a mask
cimg_justthiscontour = np.zeros_like(input_image)
# draw the contours on the blank canvas which is original sized
cv.drawContours(cimg_justthiscontour, [cnt], 0, color=(255, 255, 255), thickness=-1)
# now take the subset of just the area around the contour of interest
cimg_subset = cimg_justthiscontour[y:y+h, x:x+w, :]
# make a binary mask
cimg_mask = cv.cvtColor(cimg_subset, cv.COLOR_BGR2GRAY)
ret2, mask = cv.threshold(cimg_mask,0,255,cv.THRESH_BINARY)
# draw contours on the output image for our personal enjoyment
cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255), thickness=5)
img_subset = cv.bitwise_and(img_subset, img_subset, mask=mask).astype(np.uint8)
# calculate where the object is
pts = np.where(cimg_subset == 255)
hue = img_subset_hsv[pts[0], pts[1], 0]
sat = img_subset_hsv[pts[0], pts[1], 1]
val = img_subset_hsv[pts[0], pts[1], 2]
r = img_subset[pts[0], pts[1], 0]
g = img_subset[pts[0], pts[1], 1]
b = img_subset[pts[0], pts[1], 2]
df = [{'r': (r.mean() / 255),
'g': (g.mean() / 255),
'b': (b.mean() / 255),
'hue': (hue.mean() / 255),
'sat': (sat.mean() / 255),
'val': (val.mean() / 255)}]
df = pd.DataFrame.from_dict(df)
pred = predictive_model.get_predictions(df)
class_dict = {0:"medium_blue",
1:"black",
2:"darK_stone_gray",
3:"bright_green",
4:"light_green",
5:"bright_orange",
6:"bright_red",
7:"bright_blue",
8:"white",
9:"bright_yellow"}
color_text = class_dict[pred[0]]
object_label = "obj" + str(object_num) + "_pred" + str(pred[0])
print(object_label)
# add the object labels to the cimg for identification
cv.putText(output_image, text= str(object_label),
org=(cx - 5,cy - 5),
fontFace= cv.FONT_HERSHEY_SIMPLEX,
fontScale=1,
color=(0,255,0),
thickness=3,
lineType=cv.LINE_AA)
# last thing we do on this loop is increment the object_num
object_num += 1
# AFTER ALL CONTOURS HAVE BEEN DONE submit the df to the model for predictions
# results = predictive_model.blind_predictions()
# result = loaded_model.get_predictions(X_test, Y_test)
# print(result)
# # use the test set to see how we do
# y_test_predictions = nb.get_predictions(X_test)
# # scores
# acc = nb.get_accuracy(y_test, y_test_predictions)
# prec = precision_score(y_test, y_test_predictions, average="micro")
# rec = recall_score(y_test, y_test_predictions, average="micro")
# print(f"precision is {prec}, recall is {rec}, accuracy = {acc}")
# # confusion matrix
# labels = [(i, c) for i, c in class_dict.items()]
# cm = confusion_matrix(y_test, y_test_predictions)
# fig = plt.figure()
# ax = fig.add_subplot(111)
# cax = ax.matshow(cm)
# plt.title('confusion matrix of the classifier')
# fig.colorbar(cax)
# plt.xlabel('Predicted')
# plt.ylabel('True')
# plt.show()
# print(labels)
# take the row
# end result should be a pandas dataframe and the contour image with numbers
return output_image
|
[
"cv2.GaussianBlur",
"numpy.random.seed",
"cv2.bitwise_and",
"numpy.argmax",
"scipy.ndimage.binary_fill_holes",
"numpy.ones",
"numpy.exp",
"cv2.imshow",
"cv2.inRange",
"numpy.unique",
"pandas.DataFrame",
"cv2.contourArea",
"numpy.zeros_like",
"random.randint",
"cv2.cvtColor",
"scipy.ndimage.gaussian_filter",
"cv2.namedWindow",
"cv2.getTrackbarPos",
"cv2.drawContours",
"cv2.destroyAllWindows",
"cv2.boundingRect",
"cv2.createTrackbar",
"pandas.DataFrame.from_dict",
"cv2.waitKey",
"cv2.convexHull",
"cv2.resizeWindow",
"cv2.threshold",
"cv2.moments",
"skimage.morphology.disk",
"cv2.imread",
"numpy.where",
"numpy.array",
"cv2.findContours",
"numpy.sqrt"
] |
[((622, 640), 'numpy.random.seed', 'np.random.seed', (['(26)'], {}), '(26)\n', (636, 640), True, 'import numpy as np\n'), ((1152, 1169), 'numpy.unique', 'np.unique', (['target'], {}), '(target)\n', (1161, 1169), True, 'import numpy as np\n'), ((3827, 3865), 'numpy.exp', 'np.exp', (['(-((x - mean) ** 2 / (2 * var)))'], {}), '(-((x - mean) ** 2 / (2 * var)))\n', (3833, 3865), True, 'import numpy as np\n'), ((3885, 3909), 'numpy.sqrt', 'np.sqrt', (['(2 * np.pi * var)'], {}), '(2 * np.pi * var)\n', (3892, 3909), True, 'import numpy as np\n'), ((4072, 4117), 'numpy.exp', 'np.exp', (['(-((x - mean) ** 2 / (2 * stdev ** 2)))'], {}), '(-((x - mean) ** 2 / (2 * stdev ** 2)))\n', (4078, 4117), True, 'import numpy as np\n'), ((5055, 5074), 'cv2.imread', 'cv.imread', (['filename'], {}), '(filename)\n', (5064, 5074), True, 'import cv2 as cv\n'), ((5402, 5444), 'cv2.namedWindow', 'cv.namedWindow', (['"""output"""', 'cv.WINDOW_NORMAL'], {}), "('output', cv.WINDOW_NORMAL)\n", (5416, 5444), True, 'import cv2 as cv\n'), ((5454, 5487), 'cv2.resizeWindow', 'cv.resizeWindow', (['"""output"""', '(w, h)'], {}), "('output', (w, h))\n", (5469, 5487), True, 'import cv2 as cv\n'), ((5497, 5523), 'cv2.imshow', 'cv.imshow', (['"""output"""', 'image'], {}), "('output', image)\n", (5506, 5523), True, 'import cv2 as cv\n'), ((5533, 5546), 'cv2.waitKey', 'cv.waitKey', (['(0)'], {}), '(0)\n', (5543, 5546), True, 'import cv2 as cv\n'), ((5556, 5578), 'cv2.destroyAllWindows', 'cv.destroyAllWindows', ([], {}), '()\n', (5576, 5578), True, 'import cv2 as cv\n'), ((7160, 7202), 'scipy.ndimage.gaussian_filter', 'ndimage.gaussian_filter', (['hsv_mask'], {'sigma': '(1)'}), '(hsv_mask, sigma=1)\n', (7183, 7202), False, 'from scipy import ndimage\n'), ((8439, 8478), 'cv2.cvtColor', 'cv.cvtColor', (['seg_img', 'cv.COLOR_BGR2GRAY'], {}), '(seg_img, cv.COLOR_BGR2GRAY)\n', (8450, 8478), True, 'import cv2 as cv\n'), ((8495, 8527), 'cv2.GaussianBlur', 'cv.GaussianBlur', (['mask', '(5, 5)', '(0)'], {}), '(mask, (5, 5), 0)\n', (8510, 8527), True, 'import cv2 as cv\n'), ((8547, 8591), 'cv2.threshold', 'cv.threshold', (['mask', '(0)', '(255)', 'cv.THRESH_BINARY'], {}), '(mask, 0, 255, cv.THRESH_BINARY)\n', (8559, 8591), True, 'import cv2 as cv\n'), ((8798, 8857), 'cv2.findContours', 'cv.findContours', (['mask', 'cv.RETR_TREE', 'cv.CHAIN_APPROX_SIMPLE'], {}), '(mask, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)\n', (8813, 8857), True, 'import cv2 as cv\n'), ((8934, 8965), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['color']"}), "(columns=['color'])\n", (8946, 8965), True, 'import pandas as pd\n'), ((13522, 13570), 'cv2.namedWindow', 'cv.namedWindow', (['"""masked_image"""', 'cv.WINDOW_NORMAL'], {}), "('masked_image', cv.WINDOW_NORMAL)\n", (13536, 13570), True, 'import cv2 as cv\n'), ((13580, 13617), 'cv2.resizeWindow', 'cv.resizeWindow', (['"""masked_image"""', 'h', 'w'], {}), "('masked_image', h, w)\n", (13595, 13617), True, 'import cv2 as cv\n'), ((13637, 13664), 'cv2.namedWindow', 'cv.namedWindow', (['"""trackbars"""'], {}), "('trackbars')\n", (13651, 13664), True, 'import cv2 as cv\n'), ((13674, 13712), 'cv2.resizeWindow', 'cv.resizeWindow', (['"""trackbars"""', '(800)', '(300)'], {}), "('trackbars', 800, 300)\n", (13689, 13712), True, 'import cv2 as cv\n'), ((13772, 13828), 'cv2.createTrackbar', 'cv.createTrackbar', (['"""hue_min"""', '"""trackbars"""', '(0)', '(179)', 'empty'], {}), "('hue_min', 'trackbars', 0, 179, empty)\n", (13789, 13828), True, 'import cv2 as cv\n'), ((13838, 13896), 'cv2.createTrackbar', 'cv.createTrackbar', (['"""hue_max"""', '"""trackbars"""', '(179)', '(179)', 'empty'], {}), "('hue_max', 'trackbars', 179, 179, empty)\n", (13855, 13896), True, 'import cv2 as cv\n'), ((13906, 13962), 'cv2.createTrackbar', 'cv.createTrackbar', (['"""sat_min"""', '"""trackbars"""', '(0)', '(255)', 'empty'], {}), "('sat_min', 'trackbars', 0, 255, empty)\n", (13923, 13962), True, 'import cv2 as cv\n'), ((13972, 14030), 'cv2.createTrackbar', 'cv.createTrackbar', (['"""sat_max"""', '"""trackbars"""', '(255)', '(255)', 'empty'], {}), "('sat_max', 'trackbars', 255, 255, empty)\n", (13989, 14030), True, 'import cv2 as cv\n'), ((14040, 14096), 'cv2.createTrackbar', 'cv.createTrackbar', (['"""val_min"""', '"""trackbars"""', '(0)', '(255)', 'empty'], {}), "('val_min', 'trackbars', 0, 255, empty)\n", (14057, 14096), True, 'import cv2 as cv\n'), ((14106, 14164), 'cv2.createTrackbar', 'cv.createTrackbar', (['"""val_max"""', '"""trackbars"""', '(255)', '(255)', 'empty'], {}), "('val_max', 'trackbars', 255, 255, empty)\n", (14123, 14164), True, 'import cv2 as cv\n'), ((15636, 15658), 'cv2.destroyAllWindows', 'cv.destroyAllWindows', ([], {}), '()\n', (15656, 15658), True, 'import cv2 as cv\n'), ((16693, 16726), 'cv2.GaussianBlur', 'cv.GaussianBlur', (['image', '(5, 5)', '(0)'], {}), '(image, (5, 5), 0)\n', (16708, 16726), True, 'import cv2 as cv\n'), ((16744, 16805), 'cv2.threshold', 'cv.threshold', (['blur', '(0)', '(255)', '(cv.THRESH_BINARY + cv.THRESH_OTSU)'], {}), '(blur, 0, 255, cv.THRESH_BINARY + cv.THRESH_OTSU)\n', (16756, 16805), True, 'import cv2 as cv\n'), ((17134, 17173), 'cv2.cvtColor', 'cv.cvtColor', (['seg_img', 'cv.COLOR_BGR2GRAY'], {}), '(seg_img, cv.COLOR_BGR2GRAY)\n', (17145, 17173), True, 'import cv2 as cv\n'), ((17190, 17222), 'cv2.GaussianBlur', 'cv.GaussianBlur', (['mask', '(5, 5)', '(0)'], {}), '(mask, (5, 5), 0)\n', (17205, 17222), True, 'import cv2 as cv\n'), ((17242, 17286), 'cv2.threshold', 'cv.threshold', (['mask', '(0)', '(255)', 'cv.THRESH_BINARY'], {}), '(mask, 0, 255, cv.THRESH_BINARY)\n', (17254, 17286), True, 'import cv2 as cv\n'), ((17489, 17548), 'cv2.findContours', 'cv.findContours', (['mask', 'cv.RETR_TREE', 'cv.CHAIN_APPROX_SIMPLE'], {}), '(mask, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)\n', (17504, 17548), True, 'import cv2 as cv\n'), ((17623, 17654), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['color']"}), "(columns=['color'])\n", (17635, 17654), True, 'import pandas as pd\n'), ((3505, 3526), 'numpy.argmax', 'np.argmax', (['posteriors'], {}), '(posteriors)\n', (3514, 3526), True, 'import numpy as np\n'), ((7288, 7306), 'skimage.morphology.disk', 'morphology.disk', (['(3)'], {}), '(3)\n', (7303, 7306), False, 'from skimage import morphology\n'), ((7767, 7785), 'skimage.morphology.disk', 'morphology.disk', (['(5)'], {}), '(5)\n', (7782, 7785), False, 'from skimage import morphology\n'), ((9155, 9230), 'cv2.drawContours', 'cv.drawContours', (['output_image', '[cnt]', '(0)'], {'color': '(255, 255, 255)', 'thickness': '(5)'}), '(output_image, [cnt], 0, color=(255, 255, 255), thickness=5)\n', (9170, 9230), True, 'import cv2 as cv\n'), ((9386, 9406), 'cv2.boundingRect', 'cv.boundingRect', (['cnt'], {}), '(cnt)\n', (9401, 9406), True, 'import cv2 as cv\n'), ((9461, 9480), 'cv2.contourArea', 'cv.contourArea', (['cnt'], {}), '(cnt)\n', (9475, 9480), True, 'import cv2 as cv\n'), ((9660, 9678), 'cv2.convexHull', 'cv.convexHull', (['cnt'], {}), '(cnt)\n', (9673, 9678), True, 'import cv2 as cv\n'), ((9704, 9724), 'cv2.contourArea', 'cv.contourArea', (['hull'], {}), '(hull)\n', (9718, 9724), True, 'import cv2 as cv\n'), ((9800, 9825), 'numpy.sqrt', 'np.sqrt', (['(4 * area / np.pi)'], {}), '(4 * area / np.pi)\n', (9807, 9825), True, 'import numpy as np\n'), ((9840, 9855), 'cv2.moments', 'cv.moments', (['cnt'], {}), '(cnt)\n', (9850, 9855), True, 'import cv2 as cv\n'), ((10181, 10222), 'cv2.cvtColor', 'cv.cvtColor', (['img_subset', 'cv.COLOR_BGR2HSV'], {}), '(img_subset, cv.COLOR_BGR2HSV)\n', (10192, 10222), True, 'import cv2 as cv\n'), ((14245, 14281), 'cv2.cvtColor', 'cv.cvtColor', (['image', 'cv.COLOR_BGR2HSV'], {}), '(image, cv.COLOR_BGR2HSV)\n', (14256, 14281), True, 'import cv2 as cv\n'), ((14355, 14396), 'cv2.getTrackbarPos', 'cv.getTrackbarPos', (['"""hue_min"""', '"""trackbars"""'], {}), "('hue_min', 'trackbars')\n", (14372, 14396), True, 'import cv2 as cv\n'), ((14418, 14459), 'cv2.getTrackbarPos', 'cv.getTrackbarPos', (['"""hue_max"""', '"""trackbars"""'], {}), "('hue_max', 'trackbars')\n", (14435, 14459), True, 'import cv2 as cv\n'), ((14481, 14522), 'cv2.getTrackbarPos', 'cv.getTrackbarPos', (['"""sat_min"""', '"""trackbars"""'], {}), "('sat_min', 'trackbars')\n", (14498, 14522), True, 'import cv2 as cv\n'), ((14544, 14585), 'cv2.getTrackbarPos', 'cv.getTrackbarPos', (['"""sat_max"""', '"""trackbars"""'], {}), "('sat_max', 'trackbars')\n", (14561, 14585), True, 'import cv2 as cv\n'), ((14607, 14648), 'cv2.getTrackbarPos', 'cv.getTrackbarPos', (['"""val_min"""', '"""trackbars"""'], {}), "('val_min', 'trackbars')\n", (14624, 14648), True, 'import cv2 as cv\n'), ((14670, 14711), 'cv2.getTrackbarPos', 'cv.getTrackbarPos', (['"""val_max"""', '"""trackbars"""'], {}), "('val_max', 'trackbars')\n", (14687, 14711), True, 'import cv2 as cv\n'), ((14973, 15004), 'numpy.array', 'np.array', (['[h_min, s_min, v_min]'], {}), '([h_min, s_min, v_min])\n', (14981, 15004), True, 'import numpy as np\n'), ((15030, 15061), 'numpy.array', 'np.array', (['[h_max, s_max, v_max]'], {}), '([h_max, s_max, v_max])\n', (15038, 15061), True, 'import numpy as np\n'), ((15089, 15108), 'numpy.array', 'np.array', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (15097, 15108), True, 'import numpy as np\n'), ((15136, 15160), 'numpy.array', 'np.array', (['[179, 255, 30]'], {}), '([179, 255, 30])\n', (15144, 15160), True, 'import numpy as np\n'), ((15201, 15242), 'cv2.inRange', 'cv.inRange', (['img_hsv', 'hsv_lower', 'hsv_upper'], {}), '(img_hsv, hsv_lower, hsv_upper)\n', (15211, 15242), True, 'import cv2 as cv\n'), ((15269, 15314), 'cv2.inRange', 'cv.inRange', (['img_hsv', 'black_lower', 'black_upper'], {}), '(img_hsv, black_lower, black_upper)\n', (15279, 15314), True, 'import cv2 as cv\n'), ((15387, 15430), 'cv2.bitwise_and', 'cv.bitwise_and', (['img_hsv', 'img_hsv'], {'mask': 'mask'}), '(img_hsv, img_hsv, mask=mask)\n', (15401, 15430), True, 'import cv2 as cv\n'), ((15458, 15497), 'cv2.imshow', 'cv.imshow', (['"""masked_image"""', 'masked_image'], {}), "('masked_image', masked_image)\n", (15467, 15497), True, 'import cv2 as cv\n'), ((17844, 17919), 'cv2.drawContours', 'cv.drawContours', (['output_image', '[cnt]', '(0)'], {'color': '(255, 255, 255)', 'thickness': '(5)'}), '(output_image, [cnt], 0, color=(255, 255, 255), thickness=5)\n', (17859, 17919), True, 'import cv2 as cv\n'), ((18075, 18095), 'cv2.boundingRect', 'cv.boundingRect', (['cnt'], {}), '(cnt)\n', (18090, 18095), True, 'import cv2 as cv\n'), ((18150, 18169), 'cv2.contourArea', 'cv.contourArea', (['cnt'], {}), '(cnt)\n', (18164, 18169), True, 'import cv2 as cv\n'), ((18349, 18367), 'cv2.convexHull', 'cv.convexHull', (['cnt'], {}), '(cnt)\n', (18362, 18367), True, 'import cv2 as cv\n'), ((18393, 18413), 'cv2.contourArea', 'cv.contourArea', (['hull'], {}), '(hull)\n', (18407, 18413), True, 'import cv2 as cv\n'), ((18489, 18514), 'numpy.sqrt', 'np.sqrt', (['(4 * area / np.pi)'], {}), '(4 * area / np.pi)\n', (18496, 18514), True, 'import numpy as np\n'), ((18529, 18544), 'cv2.moments', 'cv.moments', (['cnt'], {}), '(cnt)\n', (18539, 18544), True, 'import cv2 as cv\n'), ((18870, 18911), 'cv2.cvtColor', 'cv.cvtColor', (['img_subset', 'cv.COLOR_BGR2HSV'], {}), '(img_subset, cv.COLOR_BGR2HSV)\n', (18881, 18911), True, 'import cv2 as cv\n'), ((7086, 7114), 'numpy.where', 'np.where', (['(hsv_mask > 1)', '(0)', '(1)'], {}), '(hsv_mask > 1, 0, 1)\n', (7094, 7114), True, 'import numpy as np\n'), ((7629, 7664), 'scipy.ndimage.binary_fill_holes', 'ndimage.binary_fill_holes', (['hsv_mask'], {}), '(hsv_mask)\n', (7654, 7664), False, 'from scipy import ndimage\n'), ((8159, 8202), 'cv2.bitwise_and', 'cv.bitwise_and', (['image', 'image'], {'mask': 'hsv_mask'}), '(image, image, mask=hsv_mask)\n', (8173, 8202), True, 'import cv2 as cv\n'), ((10529, 10555), 'numpy.zeros_like', 'np.zeros_like', (['input_image'], {}), '(input_image)\n', (10542, 10555), True, 'import numpy as np\n'), ((10656, 10744), 'cv2.drawContours', 'cv.drawContours', (['cimg_justthiscontour', '[cnt]', '(0)'], {'color': '(255, 255, 255)', 'thickness': '(-1)'}), '(cimg_justthiscontour, [cnt], 0, color=(255, 255, 255),\n thickness=-1)\n', (10671, 10744), True, 'import cv2 as cv\n'), ((10968, 11011), 'cv2.cvtColor', 'cv.cvtColor', (['cimg_subset', 'cv.COLOR_BGR2GRAY'], {}), '(cimg_subset, cv.COLOR_BGR2GRAY)\n', (10979, 11011), True, 'import cv2 as cv\n'), ((11046, 11095), 'cv2.threshold', 'cv.threshold', (['cimg_mask', '(0)', '(255)', 'cv.THRESH_BINARY'], {}), '(cimg_mask, 0, 255, cv.THRESH_BINARY)\n', (11058, 11095), True, 'import cv2 as cv\n'), ((11192, 11267), 'cv2.drawContours', 'cv.drawContours', (['output_image', '[cnt]', '(0)'], {'color': '(255, 255, 255)', 'thickness': '(5)'}), '(output_image, [cnt], 0, color=(255, 255, 255), thickness=5)\n', (11207, 11267), True, 'import cv2 as cv\n'), ((11441, 11469), 'numpy.where', 'np.where', (['(cimg_subset == 255)'], {}), '(cimg_subset == 255)\n', (11449, 11469), True, 'import numpy as np\n'), ((15515, 15531), 'cv2.waitKey', 'cv.waitKey', (['(1000)'], {}), '(1000)\n', (15525, 15531), True, 'import cv2 as cv\n'), ((19220, 19246), 'numpy.zeros_like', 'np.zeros_like', (['input_image'], {}), '(input_image)\n', (19233, 19246), True, 'import numpy as np\n'), ((19347, 19435), 'cv2.drawContours', 'cv.drawContours', (['cimg_justthiscontour', '[cnt]', '(0)'], {'color': '(255, 255, 255)', 'thickness': '(-1)'}), '(cimg_justthiscontour, [cnt], 0, color=(255, 255, 255),\n thickness=-1)\n', (19362, 19435), True, 'import cv2 as cv\n'), ((19659, 19702), 'cv2.cvtColor', 'cv.cvtColor', (['cimg_subset', 'cv.COLOR_BGR2GRAY'], {}), '(cimg_subset, cv.COLOR_BGR2GRAY)\n', (19670, 19702), True, 'import cv2 as cv\n'), ((19737, 19786), 'cv2.threshold', 'cv.threshold', (['cimg_mask', '(0)', '(255)', 'cv.THRESH_BINARY'], {}), '(cimg_mask, 0, 255, cv.THRESH_BINARY)\n', (19749, 19786), True, 'import cv2 as cv\n'), ((19883, 19958), 'cv2.drawContours', 'cv.drawContours', (['output_image', '[cnt]', '(0)'], {'color': '(255, 255, 255)', 'thickness': '(5)'}), '(output_image, [cnt], 0, color=(255, 255, 255), thickness=5)\n', (19898, 19958), True, 'import cv2 as cv\n'), ((20132, 20160), 'numpy.where', 'np.where', (['(cimg_subset == 255)'], {}), '(cimg_subset == 255)\n', (20140, 20160), True, 'import numpy as np\n'), ((20869, 20895), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['df'], {}), '(df)\n', (20891, 20895), True, 'import pandas as pd\n'), ((4141, 4159), 'numpy.sqrt', 'np.sqrt', (['(2 * np.pi)'], {}), '(2 * np.pi)\n', (4148, 4159), True, 'import numpy as np\n'), ((6853, 6889), 'cv2.cvtColor', 'cv.cvtColor', (['image', 'cv.COLOR_BGR2HSV'], {}), '(image, cv.COLOR_BGR2HSV)\n', (6864, 6889), True, 'import cv2 as cv\n'), ((7537, 7554), 'numpy.ones', 'np.ones', (['(20, 20)'], {}), '((20, 20))\n', (7544, 7554), True, 'import numpy as np\n'), ((8003, 8046), 'cv2.bitwise_and', 'cv.bitwise_and', (['image', 'image'], {'mask': 'hsv_mask'}), '(image, image, mask=hsv_mask)\n', (8017, 8046), True, 'import cv2 as cv\n'), ((11300, 11349), 'cv2.bitwise_and', 'cv.bitwise_and', (['img_subset', 'img_subset'], {'mask': 'mask'}), '(img_subset, img_subset, mask=mask)\n', (11314, 11349), True, 'import cv2 as cv\n'), ((19991, 20040), 'cv2.bitwise_and', 'cv.bitwise_and', (['img_subset', 'img_subset'], {'mask': 'mask'}), '(img_subset, img_subset, mask=mask)\n', (20005, 20040), True, 'import cv2 as cv\n'), ((11957, 11978), 'random.randint', 'randint', (['(10000)', '(99999)'], {}), '(10000, 99999)\n', (11964, 11978), False, 'from random import randint, uniform\n')]
|
############################################################
# -*- coding: utf-8 -*-
#
# # # # # # ####
# ## ## # ## # #
# # # # # # # # # ###
# # ## # ## ## #
# # # # # # ####
#
# Python-based Tool for interaction with the 10micron mounts
# GUI with PyQT5 for python
# Python v3.6.4
#
# <NAME>
# (c) 2016, 2017, 2018
#
# Licence APL2.0
#
###########################################################
import logging
import os
import PyQt5
import time
import copy
import operator
import numpy
from astrometry import transform
class ModelPoints:
logger = logging.getLogger(__name__)
def __init__(self, app):
self.app = app
self.transform = transform.Transform(self.app)
self.horizonPoints = list()
self.modelPoints = list()
self.celestialEquator = list()
# signal slot
self.app.ui.btn_loadInitialModelPoints.clicked.connect(self.selectInitialModelPointsFileName)
self.app.ui.btn_saveInitialModelPoints.clicked.connect(self.saveInitialModelPoints)
self.app.ui.btn_saveInitialModelPointsAs.clicked.connect(self.saveInitialModelPointsAs)
self.app.ui.btn_loadFullModelPoints.clicked.connect(self.selectFullModelPointsFileName)
self.app.ui.btn_saveFullModelPoints.clicked.connect(self.saveFullModelPoints)
self.app.ui.btn_saveFullModelPointsAs.clicked.connect(self.saveFullModelPointsAs)
self.app.ui.btn_loadHorizonMask.clicked.connect(self.selectHorizonPointsFileName)
self.app.ui.btn_saveHorizonMask.clicked.connect(self.saveHorizonMask)
self.app.ui.btn_saveHorizonMaskAs.clicked.connect(self.saveHorizonMaskAs)
self.app.signalMountSiteData.connect(self.generateCelestialEquator)
def initConfig(self):
try:
if 'HorizonPointsFileName' in self.app.config:
self.app.ui.le_horizonPointsFileName.setText(self.app.config['HorizonPointsFileName'])
if 'CheckUseMinimumHorizonLine' in self.app.config:
self.app.ui.checkUseMinimumHorizonLine.setChecked(self.app.config['CheckUseMinimumHorizonLine'])
if 'CheckUseFileHorizonLine' in self.app.config:
self.app.ui.checkUseFileHorizonLine.setChecked(self.app.config['CheckUseFileHorizonLine'])
if 'AltitudeMinimumHorizon' in self.app.config:
self.app.ui.altitudeMinimumHorizon.setValue(self.app.config['AltitudeMinimumHorizon'])
if 'ModelInitialPointsFileName' in self.app.config:
self.app.ui.le_modelInitialPointsFileName.setText(self.app.config['ModelInitialPointsFileName'])
if 'ModelFullPointsFileName' in self.app.config:
self.app.ui.le_modelFullPointsFileName.setText(self.app.config['ModelFullPointsFileName'])
if 'HorizonPointsFileName' in self.app.config and 'CheckUseMinimumHorizonLine' in self.app.config and 'CheckUseFileHorizonLine' in self.app.config and 'AltitudeMinimumHorizon' in self.app.config:
self.loadHorizonPoints(self.app.config['HorizonPointsFileName'],
self.app.config['CheckUseFileHorizonLine'],
self.app.config['CheckUseMinimumHorizonLine'],
self.app.config['AltitudeMinimumHorizon'])
except Exception as e:
self.logger.error('item in config.cfg could not be initialize, error:{0}'.format(e))
finally:
pass
def storeConfig(self):
self.app.config['HorizonPointsFileName'] = self.app.ui.le_horizonPointsFileName.text()
self.app.config['CheckUseMinimumHorizonLine'] = self.app.ui.checkUseMinimumHorizonLine.isChecked()
self.app.config['CheckUseFileHorizonLine'] = self.app.ui.checkUseFileHorizonLine.isChecked()
self.app.config['AltitudeMinimumHorizon'] = self.app.ui.altitudeMinimumHorizon.value()
self.app.config['ModelInitialPointsFileName'] = self.app.ui.le_modelInitialPointsFileName.text()
self.app.config['ModelFullPointsFileName'] = self.app.ui.le_modelFullPointsFileName.text()
def saveHorizonMask(self):
filepath = os.getcwd() + '/config/' + self.app.ui.le_horizonPointsFileName.text()
self.saveHorizonPoints(filepath)
def saveHorizonMaskAs(self):
value, ext = self.app.selectFile(self.app, 'Save horizon mask points file', '/config', 'Model point files (*.txt)', False)
if value != '':
self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value))
self.saveHorizonPoints(value)
else:
self.logger.warning('No model points file selected')
def selectHorizonPointsFileName(self):
value, ext = self.app.selectFile(self.app, 'Open horizon mask file', '/config', 'Horizon mask files (*.txt)', True)
if value != '':
self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value))
self.app.hemisphereWindow.selectHorizonPointsMode()
self.app.hemisphereWindow.drawHemisphere()
def saveModelPoints(self, modelPointsFileName):
msg = None
fileHandle = None
if modelPointsFileName.strip() == '':
msg = 'No Model Points Filename given!'
self.logger.warning('No Model Points Filename given!')
return msg
try:
fileHandle = open(modelPointsFileName + '.txt', 'w')
for i in range(0, len(self.modelPoints)):
fileHandle.write('MW-3:{0:03.2f}:{1:03.2f}\n'.format(self.modelPoints[i][0], self.modelPoints[i][1]))
fileHandle.close()
except Exception as e:
msg = 'Error saving modeling points to file [{0}] error: {1}!'.format(modelPointsFileName, e)
self.logger.warning('Error loading modeling points to file [{0}] error: {1}!'.format(modelPointsFileName, e))
finally:
if fileHandle:
fileHandle.close()
return msg
def saveInitialModelPoints(self):
filepath = os.getcwd() + '/config/' + self.app.ui.le_modelInitialPointsFileName.text()
self.saveModelPoints(filepath)
def saveInitialModelPointsAs(self):
value, ext = self.app.selectFile(self.app, 'Save initial model points file', '/config', 'Model point files (*.txt)', False)
if value != '':
self.app.ui.le_modelInitialPointsFileName.setText(os.path.basename(value))
self.saveModelPoints(value)
else:
self.logger.warning('No model points file selected')
def selectInitialModelPointsFileName(self):
value, ext = self.app.selectFile(self.app, 'Open initial model points file', '/config', 'Model points files (*.txt)', True)
if value != '':
value = os.path.basename(value)
self.app.ui.le_modelInitialPointsFileName.setText(value)
self.showInitialPoints(value)
else:
self.logger.warning('No file selected')
def saveFullModelPoints(self):
filepath = os.getcwd() + '/config/' + self.app.ui.le_modelFullPointsFileName.text()
self.saveModelPoints(filepath)
def saveFullModelPointsAs(self):
value, ext = self.app.selectFile(self.app, 'Save full model points file', '/config', 'Model point files (*.txt)', False)
if value != '':
self.app.ui.le_modelFullPointsFileName.setText(os.path.basename(value))
self.saveModelPoints(value)
else:
self.logger.warning('No model points file selected')
def selectFullModelPointsFileName(self):
value, ext = self.app.selectFile(self.app, 'Open full model points file', '/config', 'Model points files (*.txt)', True)
if value != '':
value = os.path.basename(value)
self.app.ui.le_modelFullPointsFileName.setText(value)
self.showFullPoints(value, self.app.ui.checkDeletePointsHorizonMask.isChecked(), self.app.ui.checkSortPoints.isChecked())
else:
self.logger.warning('No file selected')
def loadModelPoints(self, modelPointsFileName, modeltype):
p = []
number = 0
msg = None
if modelPointsFileName.strip() == '':
msg = 'No model points filename given!'
self.logger.warning('No model points filename given!')
return p, msg
try:
with open('config/' + modelPointsFileName + '.txt', 'r') as fileHandle:
for line in fileHandle:
if line.startswith('GRID'):
# if grid, then its a TSX file (the sky x)
convertedLine = line.rstrip('\n').split()
point = (float(convertedLine[2]), float(convertedLine[3]))
number += 1
if modeltype == 'Refinement' and number > 3:
p.append(point)
elif modeltype == 'Base' and number <= 3:
p.append(point)
elif line.startswith('MW-3'):
# if mountwizzard3, it's native version 3
convertedLine = line.rstrip('\n').split(':')
p.append((float(convertedLine[1]), float(convertedLine[2])))
else:
# format is same as Per's Model Maker
convertedLine = line.rstrip('\n').split(':')
point = (int(convertedLine[0]), int(convertedLine[1]))
if len(convertedLine) == 2 and modeltype == 'Full':
p.append(point)
elif len(convertedLine) != 2 and modeltype == 'Initial':
p.append(point)
except Exception as e:
msg = 'Error loading modeling points from file [{0}] error: {1}!'.format(modelPointsFileName, e)
self.logger.warning('Error loading modeling points from file [{0}] error: {1}!'.format(modelPointsFileName, e))
finally:
return p, msg
def sortPoints(self):
if len(self.modelPoints) == 0:
self.logger.warning('There are no points to sort')
return
westSide = []
eastSide = []
a = sorted(self.modelPoints, key=operator.itemgetter(0))
for i in range(0, len(a)):
if a[i][0] >= 180:
westSide.append((a[i][0], a[i][1]))
else:
eastSide.append((a[i][0], a[i][1]))
westSide = sorted(westSide, key=operator.itemgetter(1))
eastSide = sorted(eastSide, key=operator.itemgetter(1))
self.modelPoints = westSide + eastSide
def loadHorizonPoints(self, horizonPointsFileName, horizonByFile, horizonByAltitude, altitudeMinimumHorizon):
self.horizonPoints = []
if not (horizonByFile or horizonByAltitude):
return
hp = []
msg = None
if horizonByFile:
if horizonPointsFileName == '':
msg = 'No horizon points filename given !'
return msg
if not os.path.isfile(os.getcwd() + '/config/' + horizonPointsFileName + '.txt'):
msg = 'Horizon points file does not exist !'
self.logger.warning('Horizon points file does not exist')
else:
try:
with open(os.getcwd() + '/config/' + horizonPointsFileName + '.txt') as f:
for line in f:
if ':' in line:
# model maker format
m = line.rstrip('\n').split(':')
else:
# carte du ciel / skychart format
m = line.rstrip('\n').split(' ')
point = (int(m[0]), int(m[1]))
hp.append(point)
f.close()
except Exception as e:
msg = 'Error loading horizon points: {0}'.format(e)
self.logger.error('Error loading horizon points: {0}'.format(e))
return msg
hp = sorted(hp, key=operator.itemgetter(0))
if len(hp) == 0:
hp = ((0, 0), (360, 0))
x = [i[0] for i in hp]
y = [i[1] for i in hp]
if horizonByAltitude:
y = numpy.clip(y, altitudeMinimumHorizon, None)
self.horizonPoints = [list(a) for a in zip(x, y)]
return msg
def saveHorizonPoints(self, horizonPointsFileName):
msg = None
fileHandle = None
if horizonPointsFileName.strip() == '':
msg = 'No horizon points filename given!'
self.logger.warning('No Model Points Filename given!')
return msg
try:
fileHandle = open(horizonPointsFileName + '.txt', 'w')
for i in range(0, len(self.horizonPoints)):
# saving in model maker format
fileHandle.write('{0:03d}:{1:03d}\n'.format(int(self.horizonPoints[i][0]), int(int(self.horizonPoints[i][1]))))
fileHandle.close()
except Exception as e:
msg = 'Error saving horizon points to file [{0}] error: {1}!'.format(horizonPointsFileName, e)
self.logger.warning('Error loading horizon points to file [{0}] error: {1}!'.format(horizonPointsFileName, e))
finally:
if fileHandle:
fileHandle.close()
return msg
def isAboveHorizonLine(self, point):
x = range(0, 361)
y = numpy.interp(x, [i[0] for i in self.horizonPoints], [i[1] for i in self.horizonPoints], left=None, right=None, period=None)
if point[1] > y[int(point[0])]:
return True
else:
return False
def deleteBelowHorizonLine(self):
i = 0
while i < len(self.modelPoints):
if self.isAboveHorizonLine(self.modelPoints[i]):
i += 1
else:
del self.modelPoints[i]
def deletePoints(self):
self.modelPoints = list()
self.app.workerModelingDispatcher.signalModelPointsRedraw.emit()
def showInitialPoints(self, filename):
self.modelPoints, msg = self.loadModelPoints(filename, 'Initial')
self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints)))
self.app.workerModelingDispatcher.signalModelPointsRedraw.emit()
def showFullPoints(self, filename, limitByHorizonMask, doSortingPoints):
self.modelPoints, msg = self.loadModelPoints(filename, 'Full')
if limitByHorizonMask:
self.deleteBelowHorizonLine()
if doSortingPoints:
self.sortPoints()
self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints)))
self.app.workerModelingDispatcher.signalModelPointsRedraw.emit()
def generateDSOPoints(self, limitByHorizonMask, hoursPathLength, numberOfPathPoints, hoursPathLengthPreview):
# we have no position of the mount -> therefore we can't calculate the path
if 'RaJNow' not in self.app.workerMountDispatcher.data:
return
self.modelPoints = list()
ra = copy.copy(self.app.workerMountDispatcher.data['RaJNow'])
dec = copy.copy(self.app.workerMountDispatcher.data['DecJNow'])
for i in range(0, numberOfPathPoints):
ra = ra - float(i) * hoursPathLength / numberOfPathPoints - hoursPathLengthPreview
az, alt = self.transform.transformERFA(ra, dec, 1)
if alt > 0:
self.modelPoints.append((az, alt))
if limitByHorizonMask:
self.deleteBelowHorizonLine()
self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints)))
self.app.workerModelingDispatcher.signalModelPointsRedraw.emit()
def generateMaxPoints(self, limitByHorizonMask, doSortingPoints):
west = []
east = []
off = -5
i = 0
for dec in range(-15, 90, 10):
if dec < 30:
step = 10
elif dec < 70:
step = 10
else:
step = 30
if i % 2:
for ha in range(120 + off, -120 + off, -step):
az, alt = self.transform.topocentricToAzAlt(ha / 10, dec)
if alt > 0:
if az > 180:
east.insert(0, (az, alt))
else:
west.append((az, alt))
else:
for ha in range(-120 + off, 120 + off, step):
az, alt = self.transform.topocentricToAzAlt(ha / 10, dec)
if alt > 0:
if az > 180:
east.insert(0, (az, alt))
else:
west.append((az, alt))
i += 1
self.modelPoints = west + east
if limitByHorizonMask:
self.deleteBelowHorizonLine()
if doSortingPoints:
self.sortPoints()
self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints)))
self.app.workerModelingDispatcher.signalModelPointsRedraw.emit()
def generateNormalPoints(self, limitByHorizonMask, doSortingPoints):
west = []
east = []
off = -5
i = 0
for dec in range(-15, 90, 15):
if dec < 60:
step = 10
else:
step = 20
if i % 2:
for ha in range(120 + off, -120 + off, -step):
az, alt = self.transform.topocentricToAzAlt(ha / 10, dec)
if alt > 0:
if az > 180:
east.insert(0, (az, alt))
else:
west.append((az, alt))
else:
for ha in range(-120 + off, 120 + off, step):
az, alt = self.transform.topocentricToAzAlt(ha / 10, dec)
if alt > 0:
if az > 180:
east.insert(0, (az, alt))
else:
west.append((az, alt))
i += 1
self.modelPoints = west + east
if limitByHorizonMask:
self.deleteBelowHorizonLine()
if doSortingPoints:
self.sortPoints()
self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints)))
self.app.workerModelingDispatcher.signalModelPointsRedraw.emit()
def generateMinPoints(self, limitByHorizonMask, doSortingPoints):
west = list()
east = list()
off = -5
i = 0
for dec in range(-15, 90, 15):
if dec < 60:
step = 15
else:
step = 30
if i % 2:
for ha in range(120 + off, -120 + off, -step):
az, alt = self.transform.topocentricToAzAlt(ha / 10, dec)
if alt > 0:
if az > 180:
east.insert(0, (az, alt))
else:
west.append((az, alt))
else:
for ha in range(-120 + off, 120 + off, step):
az, alt = self.transform.topocentricToAzAlt(ha / 10, dec)
if alt > 0:
if az > 180:
east.insert(0, (az, alt))
else:
west.append((az, alt))
i += 1
self.modelPoints = west + east
if limitByHorizonMask:
self.deleteBelowHorizonLine()
if doSortingPoints:
self.sortPoints()
self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints)))
self.app.workerModelingDispatcher.signalModelPointsRedraw.emit()
def generateGridPoints(self, limitByHorizonMask, doSortingPoints, numberOfRows, numberOfColumns, altitudeMin, altitudeMax):
west = list()
east = list()
i = 0
for alt in range(altitudeMin, altitudeMax + 1, int((altitudeMax - altitudeMin) / (numberOfRows - 1))):
if i % 2:
for az in range(365 - int(360 / numberOfColumns), 0, -int(360 / numberOfColumns)):
if alt > 0:
if az > 180:
east.insert(0, (az, alt))
else:
west.append((az, alt))
else:
for az in range(5, 360, int(360 / numberOfColumns)):
if alt > 0:
if az > 180:
east.insert(0, (az, alt))
else:
west.append((az, alt))
i += 1
self.modelPoints = west + east
if limitByHorizonMask:
self.deleteBelowHorizonLine()
if doSortingPoints:
self.sortPoints()
self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints)))
self.app.workerModelingDispatcher.signalModelPointsRedraw.emit()
def generateInitialPoints(self, azimuth, altitude, numberOfPoints):
self.modelPoints = list()
for i in range(0, numberOfPoints):
azp = i * 360 / numberOfPoints + azimuth
if azp > 360:
azp -= 360
azp = int(azp)
point = (azp, altitude)
self.modelPoints.append(point)
self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints)))
self.app.workerModelingDispatcher.signalModelPointsRedraw.emit()
def generateCelestialEquator(self):
self.celestialEquator = list()
off = -5
for dec in range(-15, 90, 15):
for ha in range(120 + off, -120 + off, -2):
az, alt = self.transform.topocentricToAzAlt(ha / 10, dec)
if alt > 0:
self.celestialEquator.append((az, alt))
|
[
"os.path.basename",
"os.getcwd",
"copy.copy",
"numpy.clip",
"astrometry.transform.Transform",
"numpy.interp",
"operator.itemgetter",
"logging.getLogger"
] |
[((621, 648), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (638, 648), False, 'import logging\n'), ((727, 756), 'astrometry.transform.Transform', 'transform.Transform', (['self.app'], {}), '(self.app)\n', (746, 756), False, 'from astrometry import transform\n'), ((13679, 13807), 'numpy.interp', 'numpy.interp', (['x', '[i[0] for i in self.horizonPoints]', '[i[1] for i in self.horizonPoints]'], {'left': 'None', 'right': 'None', 'period': 'None'}), '(x, [i[0] for i in self.horizonPoints], [i[1] for i in self.\n horizonPoints], left=None, right=None, period=None)\n', (13691, 13807), False, 'import numpy\n'), ((15317, 15373), 'copy.copy', 'copy.copy', (["self.app.workerMountDispatcher.data['RaJNow']"], {}), "(self.app.workerMountDispatcher.data['RaJNow'])\n", (15326, 15373), False, 'import copy\n'), ((15388, 15445), 'copy.copy', 'copy.copy', (["self.app.workerMountDispatcher.data['DecJNow']"], {}), "(self.app.workerMountDispatcher.data['DecJNow'])\n", (15397, 15445), False, 'import copy\n'), ((6841, 6864), 'os.path.basename', 'os.path.basename', (['value'], {}), '(value)\n', (6857, 6864), False, 'import os\n'), ((7822, 7845), 'os.path.basename', 'os.path.basename', (['value'], {}), '(value)\n', (7838, 7845), False, 'import os\n'), ((12483, 12526), 'numpy.clip', 'numpy.clip', (['y', 'altitudeMinimumHorizon', 'None'], {}), '(y, altitudeMinimumHorizon, None)\n', (12493, 12526), False, 'import numpy\n'), ((4215, 4226), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4224, 4226), False, 'import os\n'), ((4573, 4596), 'os.path.basename', 'os.path.basename', (['value'], {}), '(value)\n', (4589, 4596), False, 'import os\n'), ((4968, 4991), 'os.path.basename', 'os.path.basename', (['value'], {}), '(value)\n', (4984, 4991), False, 'import os\n'), ((6098, 6109), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (6107, 6109), False, 'import os\n'), ((6472, 6495), 'os.path.basename', 'os.path.basename', (['value'], {}), '(value)\n', (6488, 6495), False, 'import os\n'), ((7097, 7108), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (7106, 7108), False, 'import os\n'), ((7459, 7482), 'os.path.basename', 'os.path.basename', (['value'], {}), '(value)\n', (7475, 7482), False, 'import os\n'), ((10371, 10393), 'operator.itemgetter', 'operator.itemgetter', (['(0)'], {}), '(0)\n', (10390, 10393), False, 'import operator\n'), ((10623, 10645), 'operator.itemgetter', 'operator.itemgetter', (['(1)'], {}), '(1)\n', (10642, 10645), False, 'import operator\n'), ((10687, 10709), 'operator.itemgetter', 'operator.itemgetter', (['(1)'], {}), '(1)\n', (10706, 10709), False, 'import operator\n'), ((12290, 12312), 'operator.itemgetter', 'operator.itemgetter', (['(0)'], {}), '(0)\n', (12309, 12312), False, 'import operator\n'), ((11202, 11213), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (11211, 11213), False, 'import os\n'), ((11466, 11477), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (11475, 11477), False, 'import os\n')]
|
# Project: hardInfo
# Author: <NAME>
# Date Started: March 18, 2022
# Copyright: (c) Copyright 2022 <NAME>
# Module: model/LsBlk.py
# Date Started: March 23, 2022
# Purpose: Store and provide API for Linux lsblk command.
# Development:
# Arguments to include in the command line:
# lsblk --json --all --zoned --output-all --paths
#
from enum import Enum
from subprocess import Popen, PIPE
from sys import stderr
from json import loads
from tkinter import Tk, messagebox, LabelFrame, BOTH, RAISED
from model.Installation import INSTALLATION_FOLDER
from view.Components import JsonTreeView
PROGRAM_TITLE = "lsblk API"
LSBLK_JSON_FILE = 'lsblk.json'
class Action(Enum):
Generate = 'Generate'
Help = "Help"
Load = 'Load'
Store = 'Store'
Search = 'Search'
Update = 'Update'
Log = 'Log'
Exit = 'Exit'
def __str__(self):
return self.value
class Dispatcher:
def __init__(self):
print("Lshw.Dispatcher does not instantiate")
@staticmethod
def do( action: Action):
if action == Action.Generate:
return Dispatcher.__generateLsBlkJsonFile()
@staticmethod
def __generateLsBlkJsonFile():
# lsblk --json --all --zoned --output-all --paths
proc = Popen(['lsblk', '--json', '--all', '--zoned', '--output-all', '--paths'],
stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate()
jsonText = proc[0].decode('utf-8')
errors = proc[1].decode('utf-8')
if len(errors) > 0:
print(errors, file=stderr)
print("Saving output to:\t" + LSBLK_JSON_FILE)
file = open(LSBLK_JSON_FILE, "w")
file.write(jsonText)
file.close()
return jsonText
def ExitProgram():
answer = messagebox.askyesno('Exit program ', "Exit the " + PROGRAM_TITLE + " program?")
if answer:
mainView.destroy()
if __name__ == '__main__':
mainView = Tk()
mainView.protocol('WM_DELETE_WINDOW', ExitProgram)
mainView.geometry("700x450+250+50")
mainView.title(PROGRAM_TITLE)
jsonText = Dispatcher.do(Action.Generate)
lsblkJson = loads(jsonText)
borderFrame = LabelFrame(mainView, text="Block Devices", border=5, relief=RAISED)
jsonTreeView = JsonTreeView(borderFrame, lsblkJson, {"openBranches": True, "mode": "strict"})
jsonTreeView.pack(expand=True, fill=BOTH)
borderFrame.pack(expand=True, fill=BOTH)
mainView.mainloop()
|
[
"subprocess.Popen",
"json.loads",
"view.Components.JsonTreeView",
"tkinter.messagebox.askyesno",
"tkinter.LabelFrame",
"tkinter.Tk"
] |
[((1875, 1954), 'tkinter.messagebox.askyesno', 'messagebox.askyesno', (['"""Exit program """', "('Exit the ' + PROGRAM_TITLE + ' program?')"], {}), "('Exit program ', 'Exit the ' + PROGRAM_TITLE + ' program?')\n", (1894, 1954), False, 'from tkinter import Tk, messagebox, LabelFrame, BOTH, RAISED\n'), ((2041, 2045), 'tkinter.Tk', 'Tk', ([], {}), '()\n', (2043, 2045), False, 'from tkinter import Tk, messagebox, LabelFrame, BOTH, RAISED\n'), ((2244, 2259), 'json.loads', 'loads', (['jsonText'], {}), '(jsonText)\n', (2249, 2259), False, 'from json import loads\n'), ((2278, 2345), 'tkinter.LabelFrame', 'LabelFrame', (['mainView'], {'text': '"""Block Devices"""', 'border': '(5)', 'relief': 'RAISED'}), "(mainView, text='Block Devices', border=5, relief=RAISED)\n", (2288, 2345), False, 'from tkinter import Tk, messagebox, LabelFrame, BOTH, RAISED\n'), ((2365, 2443), 'view.Components.JsonTreeView', 'JsonTreeView', (['borderFrame', 'lsblkJson', "{'openBranches': True, 'mode': 'strict'}"], {}), "(borderFrame, lsblkJson, {'openBranches': True, 'mode': 'strict'})\n", (2377, 2443), False, 'from view.Components import JsonTreeView\n'), ((1372, 1487), 'subprocess.Popen', 'Popen', (["['lsblk', '--json', '--all', '--zoned', '--output-all', '--paths']"], {'stdin': 'PIPE', 'stdout': 'PIPE', 'stderr': 'PIPE'}), "(['lsblk', '--json', '--all', '--zoned', '--output-all', '--paths'],\n stdin=PIPE, stdout=PIPE, stderr=PIPE)\n", (1377, 1487), False, 'from subprocess import Popen, PIPE\n')]
|
import argparse
def encrypt(message: str, key: dict[int, int]) -> str:
encrypted = map(lambda char: key[char], message)
return encrypted
def decrypt(message: str, key: dict[int, int]) -> str:
decrypted = map(lambda char: key[char], message)
return decrypted
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--file')
parser.add_argument('--keyFile')
parser.add_argument('--output')
parser.add_argument('--encrypt', action='store_true')
parser.add_argument('--decrypt', action='store_true')
args = parser.parse_args()
key = None
with open(args.keyFile) as file:
key = file.readline()
encrypting_key = {idx: int(value) for idx, value in enumerate(key.split(' '))}
decrypting_key = {int(value): idx for idx, value in enumerate(key.split(' '))}
file_contents = None
encrypted = None
with open(args.file, 'rb') as file:
file_contents = file.read()
output = args.output
if args.encrypt is True:
encrypted = encrypt(file_contents, encrypting_key)
with open(output, "wb") as out:
out.write(bytearray(encrypted))
elif args.decrypt is True:
decrypted = decrypt(file_contents, decrypting_key)
with open(output, "wb") as out:
out.write(bytearray(decrypted))
else:
print('No action type was given')
if __name__ == '__main__':
main()
|
[
"argparse.ArgumentParser"
] |
[((293, 318), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (316, 318), False, 'import argparse\n')]
|
"""
Obit Plotting class
Create a plot object using newOPlot which allows specifying the output
and background color. If no output is specified this information
will be prompted.
Next, the plotting region must be specified using either PSetPlot,
one of the XY plotting routines (PXYPlot, PXYOver, or PXYErr)
PGrayScale, or PContour. Then additional lines, curves, text or symbols may be added.
When all has been added to the plot, use PShow to finalize it.
Notes: on text strings in PLPlot installations
If the Obit installation uses PLPlot for plotting the following
can be used in text strings:
- Greek letters, A #g immediately prior to a Latin character will cause
the Greek equivalent to be used, e.g. #ga will be a lower case alpha.
- Subscripts: Characters between a #d and #u will be written as subscripts
- Superscripts: Characters between a #u and #d will be written as
superscripts
"""
# $Id$
#-----------------------------------------------------------------------
# Copyright (C) 2006,2016,2019
# Associated Universities, Inc. Washington DC, USA.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this program; if not, write to the Free
# Software Foundation, Inc., 675 Massachusetts Ave, Cambridge,
# MA 02139, USA.
#
# Correspondence concerning this software should be addressed as follows:
# Internet email: <EMAIL>.
# Postal address: <NAME>
# National Radio Astronomy Observatory
# 520 Edgemont Road
# Charlottesville, VA 22903-2475 USA
#-----------------------------------------------------------------------
# Python shadow class to ObitPlot class
from __future__ import absolute_import
from __future__ import print_function
import Obit, _Obit, InfoList, Image
import math
class OPlot(Obit.OPlot):
"""
Python Obit interface to display server
This class is for creating and using the interface to a plot
Image Members with python interfaces:
======== =======================================
InfoList used to pass instructions to processing
Member List
======== =======================================
"""
def __init__(self, name):
super(OPlot, self).__init__()
Obit.CreateOPlot(self.this, name)
def __del__(self, DeleteOPlot=_Obit.DeleteOPlot):
if _Obit!=None:
DeleteOPlot(self.this)
def __setattr__(self,name,value):
if name == "me" :
# Out with the old
if self.this!=None:
Obit.OPlotUnref(Obit.OPlot_Get_me(self.this))
# In with the new
Obit.OPlot_Set_me(self.this,value)
return
self.__dict__[name] = value
def __getattr__(self,name):
if not isinstance(self, OPlot):
return "Bogus Dude"+str(self.__class__)
if name == "me" :
return Obit.OPlot_Get_me(self.this)
# Functions to return members
if name=="List":
return PGetList(self)
raise AttributeError(name)
def __repr__(self):
if not isinstance(self, OPlot):
return "Bogus Dude"+str(self.__class__)
return "<C OPlot instance> " + Obit.OPlotGetName(self.me)
# Foreground Colors
unBLACK = 0
RED = 1
YELLOW = 2
GREEN = 3
AQUAMARINE = 4
BLACK = 5
WHEAT = 6
GRAY = 7
BROWN = 8
BLUE = 9
BLUEVIOLET = 10
CYAN = 11
TURQUOISE = 12
MAGENTA = 13
SALMON = 14
WHITE = 15
def newOPlot(name, err, output="None", bgcolor=BLACK, nx=1, ny=1 ):
"""
Create and initialize an ObitPlot
* name = name desired for object (labeling purposes)
* err = Python Obit Error/message stack
* output = name and type of output device:
====== ==========================
"None" interactive prompt
"xwin" X-Window (Xlib)
"gcw" Gnome Canvas Widget (interacts with ObitTalk)
"ps" PostScript File (monochrome)
"psc" PostScript File (color)
"xfig" Fig file
"png" PNG file
"jpeg" JPEG file
"gif" GIF file
"null" Null device
====== ==========================
* bgcolor = background color index (1-15), symbolic names:
BLACK, RED(default), YELLOW, GREEN, AQUAMARINE, PINK, WHEAT, GRAY, BROWN,
BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE
* nx = Number of horizontal subpages
* ny = Number of vertical subpages
"""
################################################################
out = OPlot(name)
Obit.PlotInitPlot(out.me, output, bgcolor, nx, ny, err.me)
return out
# end newOPlot
def PXYPlot (plot, symbol, x, y, err):
"""
Simple XY Plot
Plot X vs Y using symbol.
Plot should be finalized and displayed with PShow
This routine draws the frame and adds labels, to only overplot data
on the same frame, use ObitPlotXYOver
* plot = plot
* symbol = Symbol index to use for plotting
values in the range [1,12] are usable if negative, use abs value and
connect points
== =================
0 line only
1 dot
2 plus
3 \*
4 open circle
5 x
6 open square
7 open triangle
8 open star
9 filled triangle
10 filled square
11 filled circle
12 filled star
== =================
* x = Independent variable, if None use index
* y = Dependent variable
* err = ObitErr error stack
Optional parameters on plot InfoList
====== ======== ===============================================
XMAX (float) maximum X value (defaults to actual value)
XMIN (float) minimum X value (defaults to actual value)
YMAX (float) maximum Y value (defaults to actual value)
YMIN (float) minimum Y value (defaults to actual value)
TITLE (string) Label for the plot (defaults to none), max 120
XLABEL (string) Label for horizontal axis (defaults to none)
XOPT (string) Options for horizontal axis (default "BCNTS")
See PDrawAxes for details.
YLABEL (string) Label for vertical axis (defaults to none)
YOPT (string) Options for vertical axis (default "BCNTS")
See PDrawAxes for details.
XTICK (float) world coordinate interval between major tick marks
on X axis. If xtick=0.0 [def], the interval is chosen.
NXSUB (long) the number of subintervals to divide the major
coordinate interval into. If xtick=0.0 or nxsub=0,
the number is chosen. [def 0]
YTICK (float) like xtick for the Y axis.
NYSUB (int) like nxsub for the Y axis
CSIZE (int) Scaling factor for characters(default = 1)
SSIZE (int) Scaling factor for symbols(default = 1)
LWIDTH (int) Line width (default = 1)
JUST (int) If !=0 then force X and Y axis scaling to be the same
====== ======== ===============================================
"""
################################################################
# Checks
if not PIsA(plot):
print("Actually ",plot.__class__)
raise TypeError("plot MUST be a Python Obit Plot")
n = len(y) # How many points?
Obit.PlotXYPlot (plot.me, symbol, n, x, y, err.me)
# end PXYPlot
def PXYOver (plot, symbol, x, y, err):
"""
Overplot X vs Y
Overplot X vs Y using symbol.
Plot should be finalized and displayed with PShow
* plot = plot
* symbol = Symbol index to use for plotting. Values in the range [1,12]
are usable. If negative, use abs value and connect points.
== ===============
0 line only
1 dot
2 plus
3 \*
4 open circle
5 x
6 open square
7 open triangle
8 open star
9 filled triangle
10 filled square
11 filled circle
12 filled star
== ===============
* x = Independent variable, if None use index
* y = Dependent variable
* err = ObitErr error stack
"""
################################################################
# Checks
if not PIsA(plot):
print("Actually ",plot.__class__)
raise TypeError("plot MUST be a Python Obit Plot")
n = len(y) # How many points?
Obit.PlotXYOver (plot.me, symbol, n, x, y, err.me)
# end PXYOver
def PXYErr (plot, symbol, x, y, e, err):
"""
Simple XY Plot with error bars
Plot X vs Y using symbol and error bars.
Plot should be finalized and displayed with PShow
This routine draws the frame and adds labels, to only overplot data
on the same frame, use ObitPlotXYOver
* plot = plot
* symbol = Symbol index to use for plotting. Values in the range [1,12]
are usable. If negative, use abs value and connect points.
== ===============
0 line only
1 dot
2 plus
3 \*
4 open circle
5 x
6 open square
7 open triangle
8 open star
9 filled triangle
10 filled square
11 filled circle
12 filled star
== ===============
* x = Independent variable, if None use index
* y = Dependent variable
* e = if nonNone, error in y
* err = ObitErr error stack
Optional parameters on plot InfoList:
====== ======== ==================================================
XMAX (float) maximum X value (defaults to actual value)
XMIN (float) minimum X value (defaults to actual value)
YMAX (float) maximum Y value (defaults to actual value)
YMIN (float) minimum Y value (defaults to actual value)
TITLE (string) Label for the plot (defaults to none), max 120
XLABEL (string) Label for horizontal axis (defaults to none)
XOPT (string) Options for horizontal axis (default "BCNTS")
See PDrawAxes for details.
YLABEL (string) Label for vertical axis (defaults to none)
YOPT (string) Options for vertical axis (default "BCNTS")
See PDrawAxes for details.
XTICK (float) world coordinate interval between major tick marks
on X axis. If xtick=0.0 [def], the interval is chosen.
NXSUB (int) the number of subintervals to divide the major
coordinate interval into. If xtick=0.0 or nxsub=0,
the number is chosen. [def 0]
YTICK (float) like xtick for the Y axis.
NYSUB (int) like nxsub for the Y axis
CSIZE (int) Scaling factor for characters(default = 1)
SSIZE (int) Scaling factor for symbols(default = 1)
LWIDTH (int) Line width (default = 1)
JUST (int) If !=0 then force X and Y axis scaling to be the same
====== ======== ==================================================
"""
################################################################
# Checks
if not PIsA(plot):
print("Actually ",plot.__class__)
raise TypeError("plot MUST be a Python Obit Plot")
n = len(y) # How many points?
Obit.PlotXYErr (plot.me, symbol, n, x, y, e, err.me)
# end PXYErr
def PContour (plot, label, image, lev, cntfac, err):
"""
Contour plot of image
Contours at lev times powers of cntfac
Plot should be finalized and displayed with PShow
* plot = plot
* label = Label for plot
* image = ObitImage to plot, BLC, TRC on info member honored
* lev = basic contour level (def 0.1 peak)
* cntfac = factor for spacing between contours (def sqrt(2)
* err = ObitErr error stack
Optional parameters on plot InfoList:
====== ======= ==================================================
XTICK (float) world coordinate interval between major tick marks
on X axis. If xtick=0.0 [def], the interval is chosen.
NXSUB (int) the number of subintervals to divide the major
coordinate interval into. If xtick=0.0 or nxsub=0,
the number is chosen. [def 0]
YTICK (float) like xtick for the Y axis.
NYSUB (int) like nxsub for the Y axis
CSIZE (int) Scaling factor for characters(default = 1)
LWIDTH (int) Line width (default = 1)
====== ======= ==================================================
"""
################################################################
# Checks
if not PIsA(plot):
print("Actually ",plot.__class__)
raise TypeError("plot MUST be a Python Obit Plot")
if not Image.PIsA(image):
print("Actually ",image.__class__)
raise TypeError("image MUST be a Python Obit Image")
Obit.PlotContour (plot.me, label, image.me, lev, cntfac, err.me)
# end PContour
def PGrayScale (plot, label, image, err):
"""
Gray Scale plot of image
Gray Scales plot of image
Plot should be finalized and displayed with PShow
* plot = plot
* label = Label for plot
* image = ObitImage to plot, BLC, TRC on info member honored
* err = ObitErr error stack
Optional parameters on plot InfoList:
======= ======== =================================================
XTICK (float) world coordinate interval between major tick marks
on X axis. If xtick=0.0 [def], the interval is chosen.
NXSUB (int) the number of subintervals to divide the major
coordinate interval into. If xtick=0.0 or nxsub=0,
the number is chosen. [def 0]
YTICK (float) like xtick for the Y axis.
NYSUB (int) like nxsub for the Y axis
CSIZE (int) Scaling factor for characters(default = 1)
SQRT (bool) If present and true plot sqrt (pixel_value)
INVERT (bool) If present and true ionvert colors
COLOR (string) Color scheme 'GRAY', 'CONTOUR', 'PHLAME'
default 'GRAY'
PIX_MAX (float) maximum pixel value [def min in image]
PIX_MIN (float) minimum pixel value [def max in image]
======= ======== =================================================
"""
################################################################
# Checks
if not PIsA(plot):
print("Actually ",plot.__class__)
raise TypeError("plot MUST be a Python Obit Plot")
if not Image.PIsA(image):
print("Actually ",image.__class__)
raise TypeError("image MUST be a Python Obit Image")
Obit.PlotGrayScale (plot.me, label, image.me, err.me)
# end PGrayScale
def PMarkCross (plot, image, ra, dec, err, size=5.0):
"""
Mark positions on Contour plot of image
Place cross at positions.
Plot should be finalized and displayed with PShow
* plot = plot
* image = ObitImage to plot
* ra = list of RAs (deg)
* dec = list of Declinations (deg)
* err = ObitErr error stack
* size = size of cross in pixels
Optional parameters on plot InfoList
====== ===== ============================================
CSIZE (int) Scaling factor for characters(default = 1)
LWIDTH (int) Line width (default = 1)
====== ===== ============================================
"""
################################################################
# Checks
if not PIsA(plot):
print("Actually ",plot.__class__)
raise TypeError("plot MUST be a Python Obit Plot")
if not Image.PIsA(image):
print("Actually ",image.__class__)
raise TypeError("image MUST be a Python Obit Image")
n = len(ra)
Obit.PlotMarkCross (plot.me, image.me, n, ra, dec, size, err.me)
# end PMarkCross
def PShow (plot, err):
"""
Display plot
* plot = Python Plot object
* err = ObitErr error stack
"""
################################################################
# Checks
if not PIsA(plot):
raise TypeError("plot MUST be a Python Obit plot")
#
Obit.PlotFinishPlot(plot.me, err.me)
# end PShow
def PSetPlot (plot, xmin, xmax, ymin, ymax, just, axis, err):
"""
Define plotting area
* plot = Python Plot object
* xmin = the world x-coordinate at the bottom left corner of the viewport.
* xmax = the world x-coordinate at the top right corner of the viewport
(note XMAX may be less than XMIN).
* ymin = the world y-coordinate at the bottom left corner
of the viewport.
* ymax = the world y-coordinate at the top right corner
of the viewport (note YMAX may be less than YMIN)
* just = if JUST=1, the scales of the x and y axes (in
world coordinates per inch) will be equal,
otherwise they will be scaled independently.
* axis = controls the plotting of axes, tick marks, etc:
== ===========================================
-2 draw no box, axes or labels;
-1 draw box only;
0 draw box and label it with coordinates;
1 same as axis=0, but also draw the
coordinate axes (X=0, Y=0);
2 same as axis=1, but also draw grid lines
at major increments of the coordinates;
10 draw box and label X-axis logarithmically;
20 draw box and label Y-axis logarithmically;
30 draw box and label both axes logarithmically.
== ===========================================
* err = ObitErr error stack
"""
################################################################
# Checks
if not PIsA(plot):
raise TypeError("plot MUST be a Python Obit plot")
#
Obit.PlotSetPlot(plot.me, xmin, xmax, ymin, ymax, just, axis, err.me)
# end PSetPlot
def PLabel (plot, xlabel, ylabel, title, err):
"""
Display plot
* plot = Python Plot object
* xlabel = a label for the x-axis (centered below the viewport).
* ylabel = a label for the y-axis (centered to the left
of the viewport, drawn vertically)
* title = a label for the entire plot (centered above the viewport)
* err = ObitErr error stack
"""
################################################################
# Checks
if not PIsA(plot):
raise TypeError("plot MUST be a Python Obit plot")
#
Obit.PlotLabel(plot.me, xlabel, ylabel, title, err.me)
# end PLabel
def PDrawAxes(plot, xopt, xtick, nxsub, yopt, ytick, nysub, err):
"""
Draw axes for a plot, label
* plot = Python Plot object
* xopt = string of options for X (horizontal) axis of plot.
Options are single letters, and may be in any order:
= ======================================================================
A draw Axis (X axis is horizontal line Y=0, Y axis is vertical line X=0).
B draw bottom (X) or left (Y) edge of frame.
C draw top (X) or right (Y) edge of frame.
G draw Grid of vertical (X) or horizontal (Y) lines
I Invert the tick marks; ie draw them outside the viewport instead of inside.
L label axis Logarithmically
N write Numeric labels in the conventional location below the
viewport (X) or to the left of the viewport (Y).
M write numeric labels in the unconventional location above the
viewport (X) or to the right of the viewport (Y).
P extend ("Project") major tick marks outside the box (ignored if
option I is specified)
T draw major Tick marks at the major coordinate interval.
S draw minor tick marks (Subticks).
= ======================================================================
* xtick = World coordinate interval between major tick marks
on X axis. If xtick=0.0, the interval is chosen.
* nxsub = The number of subintervals to divide the major coordinate interval
into. If xtick=0.0 or nxsub=0, the number is chosen.
* yopt = string of options for Y (vertical) axis of plot.
Coding is the same as for xopt.
* ytick = like xtick for the Y axis.
* nysub = like nxsub for the Y axis
* err = ObitErr error stack
"""
################################################################
# Checks
if not PIsA(plot):
raise TypeError("plot MUST be a Python Obit plot")
#
Obit.PlotDrawAxes(plot.me, xopt, xtick, nxsub, yopt, ytick, nysub, err.me)
# end DrawAxes
def PSetCharSize (plot,cscale, err):
"""
Set scaling for characters
* plot = Python Plot object
* cscale = new character size (integer multiple of the default size).
* err = ObitErr error stack
"""
################################################################
# Checks
if not PIsA(plot):
raise TypeError("plot MUST be a Python Obit plot")
#
Obit.PlotSetCharSize (plot.me, cscale, err.me)
# end PSetCharSize
def PSetLineWidth (plot, lwidth, err):
"""
Set line width
* plot = Python Plot object
* lwidth = Width of line (integer multiple of the default size).
* err = ObitErr error stack
"""
################################################################
# Checks
if not PIsA(plot):
raise TypeError("plot MUST be a Python Obit plot")
#
Obit.PlotSetLineWidth(plot.me, lwidth, err.me)
# end PetLineWidth
def PSetLineStyle (plot, lstyle, err):
"""
Set line style
* plot = Python Plot object
* lstyle = Style of line (integer multiple of the default size).
1 = continious, 2 = dashed, 3=dot dash, 4 = dotted,
5 = dash dot dot dot
* err = ObitErr error stack
"""
################################################################
# Checks
if not PIsA(plot):
raise TypeError("plot MUST be a Python Obit plot")
#
Obit.PlotSetLineStyle(plot.me, lstyle, err.me)
# end PetLineStyle
def PSetColor (plot, color, err):
"""
Set foreground color
* plot = Python Plot object
* color = color index (1-15), symbolic names:
BLACK (notreally), RED(default), YELLOW, GREEN, AQUAMARINE, BLACK, WHEAT,
GRAY, BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE
* err = ObitErr error stack
"""
################################################################
# Checks
if not PIsA(plot):
raise TypeError("plot MUST be a Python Obit plot")
#
Obit.PlotSetColor(plot.me, color, err.me)
# end PSetColor
def PSetPage (plot, sub, err):
"""
Set or advance sub page
Note: some functions such as PContour advance the page
* plot = Python Plot object
* sub = if <=0 advance page, if >0 set current subpage to sub
numbering starts at the top left at 1 and increases along
rows and columns
* err = ObitErr error stack
"""
################################################################
# Checks
if not PIsA(plot):
raise TypeError("plot MUST be a Python Obit plot")
#
Obit.PlotSetPage(plot.me, sub, err.me)
# end PSetPage
def PText (plot, x, y, angle, just, text, err):
"""
Write text on plot
* plot = Python Plot object
* x = Plot x in world coordinates
* y = Plot y in world coordinates
* angle = Orientation of the text in deg, 0=horizontal
* just = Controls justification of the string parallel to
the specified edge of the viewport. If
FJUST = 0.0, the left-hand end of the string will
be placed at (x,y); if JUST = 0.5, the center of
the string will be placed at (x,y); if JUST = 1.0,
the right-hand end of the string will be placed at
at (x,y). Other values between 0 and 1 give intermediate
placing, but they are not very useful.
* text = The text string to be plotted. Trailing spaces are
ignored when justifying the string, but leading spaces are
significant.
* err = ObitErr error stack
"""
################################################################
# Checks
if not PIsA(plot):
raise TypeError("plot MUST be a Python Obit plot")
#
dx = math.cos(angle/57.296)
dy = math.sin(angle/57.296)
Obit.PlotText(plot.me, x, y, dx, dy, just, text, err.me)
# end PText
def PRelText (plot, side, disp, coord, fjust, text, err):
"""
Write text on plot relative to port
* plot = Python Plot object
* side = Must include one of the characters 'B', 'L', 'T',
or 'R' signifying the Bottom, Left, Top, or Right
margin of the viewport. If it includes 'LV' or
'RV', the string is written perpendicular to the
frame rather than parallel to it.
* disp = The displacement of the character string from the
specified edge of the viewport, measured outwards
from the viewport in units of the character
height. Use a negative value to write inside the
viewport, a positive value to write outside.
* coord = The location of the character string along the
specified edge of the viewport, as a fraction of
the length of the edge.
* just = Controls justification of the string parallel to
the specified edge of the viewport. If
* just = 0.0, the left-hand end of the string will
be placed at COORD; if JUST = 0.5, the center of
the string will be placed at COORD; if JUST = 1.0,
the right-hand end of the string will be placed at
at COORD. Other values between 0 and 1 give
intermediate placing, but they are not very useful.
* text = The text string to be plotted. Trailing spaces are
ignored when justifying the string, but leading
spaces are significant.
* err = ObitErr error stack
"""
################################################################
# Checks
if not PIsA(plot):
raise TypeError("plot MUST be a Python Obit plot")
#
Obit.PlotRelText(plot.me, side, disp, coord, fjust, text, err.me)
# end PRelText
def PDrawLine (plot, x1, y1, x2, y2, err):
"""
Draw a line.
* plot = Python Plot object
* x1 = world x-coordinate of the new pen position.
* y1 = world y-coordinate of the new pen position.
* x2 = world x-coordinate of the new pen position.
* y2 = world y-coordinate of the new pen position.
* err = ObitErr error stack
"""
################################################################
# Checks
if not PIsA(plot):
raise TypeError("plot MUST be a Python Obit plot")
#
Obit.PlotDrawLine(plot.me, x1, y1, x2, y2, err.me)
# end PDrawLine
def PDrawCurve (plot, x, y, err):
"""
Draw a curve.
* plot = Python Plot object
* x = Array of world x-coordinates of points
* y = Array of world y-coordinates of points
* err = ObitErr error stack
"""
################################################################
# Checks
if not PIsA(plot):
raise TypeError("plot MUST be a Python Obit plot")
#
n = len(x)
Obit.PlotDrawCurve (plot.me, n, x, y, err.me)
# end PDrawCurve
def PDrawCircle (plot, x, y,radius, err):
"""
Draw a circle.
* plot = Python Plot object
* x = World x-coordinate of center
* y = World y-coordinate of center
* radius = World coordinate radius
* err = ObitErr error stack
"""
################################################################
# Checks
if not PIsA(plot):
raise TypeError("plot MUST be a Python Obit plot")
#
Obit.PlotDrawCircle (plot.me, x, y, radius, err.me)
# end PDrawCircle
def PDrawSymbol (plot, x, y, symbol, err):
"""
Draw a Symbol
* plot = Python Plot object
* x = world x-coordinate of the center of the symbol
* y = world y-coordinate of the center of the symbol
* symbol = Symbol index to use for plotting. Values in the range [1,12]
are usable. If negative, use abs value and connect points.
== ===============
0 line only
1 dot
2 plus
3 \*
4 open circle
5 x
6 open square
7 open triangle
8 open star
9 filled triangle
10 filled square
11 filled circle
12 filled star
== ===============
* err = ObitErr error stack
"""
################################################################
# Checks
if not PIsA(plot):
raise TypeError("plot MUST be a Python Obit plot")
#
Obit.PlotDrawSymbol(plot.me, x, y, symbol, err.me)
# end PDrawSymbol
def PDrawPoly (plot, x, y, fill, err):
"""
Draw a Polygon, possibly filled
* plot = Python Plot object
* n = number of vertices
* x = array of world x-coordinates of the vertices
* y = array of world y-coordinates of the vertices
* fill = Fill pattern, plot package dependent
* values in the range [0,8] are usable
== ===============
0 no fill
1 hatched
2 crosshatched
3 plplot:lines 45 deg downwards
4 plplot:lines 30 deg upwards
5 plplot:lines 30 deg downwards
6 plplot:horizontal/vertical lines crossed
7 plplot:horizontal lines
8 plplot:vertical lines
== ===============
* err = ObitErr error stack
"""
################################################################
# Checks
if not PIsA(plot):
raise TypeError("plot MUST be a Python Obit plot")
#
scale = 1.0
Obit.PlotDrawPoly(plot.me, len(x), x, y, fill, scale, err.me)
# end PDrawPoly
def PGetList (plot):
"""
Return the member InfoList
returns InfoList
* plot = Python Obit Plot object
"""
################################################################
# Checks
if not PIsA(plot):
raise TypeError("plot MUST be a Python Obit plot")
#
out = InfoList.InfoList()
out.me = Obit.PlotGetList(plot.me)
return out
# end PGetList
def PIsA (plot):
"""
Tells if the input is a Python ObitPlot
returns true Or false
* Plot = Python Obit Plot to test
"""
################################################################
# Checks
if not isinstance(plot, OPlot):
return False
return Obit.OPlotIsA(plot.me)!=0
# end PIsA
|
[
"Obit.PlotInitPlot",
"Obit.PlotXYPlot",
"Obit.CreateOPlot",
"Obit.PlotSetPage",
"Obit.PlotXYErr",
"Obit.PlotDrawSymbol",
"Obit.PlotSetColor",
"Obit.PlotText",
"InfoList.InfoList",
"Obit.PlotSetLineStyle",
"Obit.PlotContour",
"math.cos",
"Obit.OPlot_Get_me",
"Obit.PlotGetList",
"Obit.OPlot_Set_me",
"Obit.OPlotGetName",
"Obit.PlotXYOver",
"Obit.PlotDrawCurve",
"Obit.PlotDrawAxes",
"Obit.PlotGrayScale",
"math.sin",
"Obit.PlotDrawCircle",
"Obit.PlotSetCharSize",
"Obit.PlotDrawLine",
"Obit.PlotLabel",
"Obit.OPlotIsA",
"Obit.PlotMarkCross",
"Obit.PlotRelText",
"Obit.PlotFinishPlot",
"Image.PIsA",
"Obit.PlotSetLineWidth",
"Obit.PlotSetPlot"
] |
[((5153, 5211), 'Obit.PlotInitPlot', 'Obit.PlotInitPlot', (['out.me', 'output', 'bgcolor', 'nx', 'ny', 'err.me'], {}), '(out.me, output, bgcolor, nx, ny, err.me)\n', (5170, 5211), False, 'import Obit, _Obit, InfoList, Image\n'), ((7952, 8001), 'Obit.PlotXYPlot', 'Obit.PlotXYPlot', (['plot.me', 'symbol', 'n', 'x', 'y', 'err.me'], {}), '(plot.me, symbol, n, x, y, err.me)\n', (7967, 8001), False, 'import Obit, _Obit, InfoList, Image\n'), ((9054, 9103), 'Obit.PlotXYOver', 'Obit.PlotXYOver', (['plot.me', 'symbol', 'n', 'x', 'y', 'err.me'], {}), '(plot.me, symbol, n, x, y, err.me)\n', (9069, 9103), False, 'import Obit, _Obit, InfoList, Image\n'), ((11914, 11965), 'Obit.PlotXYErr', 'Obit.PlotXYErr', (['plot.me', 'symbol', 'n', 'x', 'y', 'e', 'err.me'], {}), '(plot.me, symbol, n, x, y, e, err.me)\n', (11928, 11965), False, 'import Obit, _Obit, InfoList, Image\n'), ((13529, 13592), 'Obit.PlotContour', 'Obit.PlotContour', (['plot.me', 'label', 'image.me', 'lev', 'cntfac', 'err.me'], {}), '(plot.me, label, image.me, lev, cntfac, err.me)\n', (13545, 13592), False, 'import Obit, _Obit, InfoList, Image\n'), ((15357, 15409), 'Obit.PlotGrayScale', 'Obit.PlotGrayScale', (['plot.me', 'label', 'image.me', 'err.me'], {}), '(plot.me, label, image.me, err.me)\n', (15375, 15409), False, 'import Obit, _Obit, InfoList, Image\n'), ((16493, 16556), 'Obit.PlotMarkCross', 'Obit.PlotMarkCross', (['plot.me', 'image.me', 'n', 'ra', 'dec', 'size', 'err.me'], {}), '(plot.me, image.me, n, ra, dec, size, err.me)\n', (16511, 16556), False, 'import Obit, _Obit, InfoList, Image\n'), ((16881, 16917), 'Obit.PlotFinishPlot', 'Obit.PlotFinishPlot', (['plot.me', 'err.me'], {}), '(plot.me, err.me)\n', (16900, 16917), False, 'import Obit, _Obit, InfoList, Image\n'), ((18526, 18595), 'Obit.PlotSetPlot', 'Obit.PlotSetPlot', (['plot.me', 'xmin', 'xmax', 'ymin', 'ymax', 'just', 'axis', 'err.me'], {}), '(plot.me, xmin, xmax, ymin, ymax, just, axis, err.me)\n', (18542, 18595), False, 'import Obit, _Obit, InfoList, Image\n'), ((19194, 19248), 'Obit.PlotLabel', 'Obit.PlotLabel', (['plot.me', 'xlabel', 'ylabel', 'title', 'err.me'], {}), '(plot.me, xlabel, ylabel, title, err.me)\n', (19208, 19248), False, 'import Obit, _Obit, InfoList, Image\n'), ((21208, 21282), 'Obit.PlotDrawAxes', 'Obit.PlotDrawAxes', (['plot.me', 'xopt', 'xtick', 'nxsub', 'yopt', 'ytick', 'nysub', 'err.me'], {}), '(plot.me, xopt, xtick, nxsub, yopt, ytick, nysub, err.me)\n', (21225, 21282), False, 'import Obit, _Obit, InfoList, Image\n'), ((21709, 21754), 'Obit.PlotSetCharSize', 'Obit.PlotSetCharSize', (['plot.me', 'cscale', 'err.me'], {}), '(plot.me, cscale, err.me)\n', (21729, 21754), False, 'import Obit, _Obit, InfoList, Image\n'), ((22170, 22216), 'Obit.PlotSetLineWidth', 'Obit.PlotSetLineWidth', (['plot.me', 'lwidth', 'err.me'], {}), '(plot.me, lwidth, err.me)\n', (22191, 22216), False, 'import Obit, _Obit, InfoList, Image\n'), ((22715, 22761), 'Obit.PlotSetLineStyle', 'Obit.PlotSetLineStyle', (['plot.me', 'lstyle', 'err.me'], {}), '(plot.me, lstyle, err.me)\n', (22736, 22761), False, 'import Obit, _Obit, InfoList, Image\n'), ((23318, 23359), 'Obit.PlotSetColor', 'Obit.PlotSetColor', (['plot.me', 'color', 'err.me'], {}), '(plot.me, color, err.me)\n', (23335, 23359), False, 'import Obit, _Obit, InfoList, Image\n'), ((23922, 23960), 'Obit.PlotSetPage', 'Obit.PlotSetPage', (['plot.me', 'sub', 'err.me'], {}), '(plot.me, sub, err.me)\n', (23938, 23960), False, 'import Obit, _Obit, InfoList, Image\n'), ((25063, 25087), 'math.cos', 'math.cos', (['(angle / 57.296)'], {}), '(angle / 57.296)\n', (25071, 25087), False, 'import math\n'), ((25095, 25119), 'math.sin', 'math.sin', (['(angle / 57.296)'], {}), '(angle / 57.296)\n', (25103, 25119), False, 'import math\n'), ((25122, 25178), 'Obit.PlotText', 'Obit.PlotText', (['plot.me', 'x', 'y', 'dx', 'dy', 'just', 'text', 'err.me'], {}), '(plot.me, x, y, dx, dy, just, text, err.me)\n', (25135, 25178), False, 'import Obit, _Obit, InfoList, Image\n'), ((26846, 26911), 'Obit.PlotRelText', 'Obit.PlotRelText', (['plot.me', 'side', 'disp', 'coord', 'fjust', 'text', 'err.me'], {}), '(plot.me, side, disp, coord, fjust, text, err.me)\n', (26862, 26911), False, 'import Obit, _Obit, InfoList, Image\n'), ((27495, 27545), 'Obit.PlotDrawLine', 'Obit.PlotDrawLine', (['plot.me', 'x1', 'y1', 'x2', 'y2', 'err.me'], {}), '(plot.me, x1, y1, x2, y2, err.me)\n', (27512, 27545), False, 'import Obit, _Obit, InfoList, Image\n'), ((28007, 28051), 'Obit.PlotDrawCurve', 'Obit.PlotDrawCurve', (['plot.me', 'n', 'x', 'y', 'err.me'], {}), '(plot.me, n, x, y, err.me)\n', (28025, 28051), False, 'import Obit, _Obit, InfoList, Image\n'), ((28531, 28581), 'Obit.PlotDrawCircle', 'Obit.PlotDrawCircle', (['plot.me', 'x', 'y', 'radius', 'err.me'], {}), '(plot.me, x, y, radius, err.me)\n', (28550, 28581), False, 'import Obit, _Obit, InfoList, Image\n'), ((29527, 29577), 'Obit.PlotDrawSymbol', 'Obit.PlotDrawSymbol', (['plot.me', 'x', 'y', 'symbol', 'err.me'], {}), '(plot.me, x, y, symbol, err.me)\n', (29546, 29577), False, 'import Obit, _Obit, InfoList, Image\n'), ((30977, 30996), 'InfoList.InfoList', 'InfoList.InfoList', ([], {}), '()\n', (30994, 30996), False, 'import Obit, _Obit, InfoList, Image\n'), ((31010, 31035), 'Obit.PlotGetList', 'Obit.PlotGetList', (['plot.me'], {}), '(plot.me)\n', (31026, 31035), False, 'import Obit, _Obit, InfoList, Image\n'), ((2803, 2836), 'Obit.CreateOPlot', 'Obit.CreateOPlot', (['self.this', 'name'], {}), '(self.this, name)\n', (2819, 2836), False, 'import Obit, _Obit, InfoList, Image\n'), ((13402, 13419), 'Image.PIsA', 'Image.PIsA', (['image'], {}), '(image)\n', (13412, 13419), False, 'import Obit, _Obit, InfoList, Image\n'), ((15230, 15247), 'Image.PIsA', 'Image.PIsA', (['image'], {}), '(image)\n', (15240, 15247), False, 'import Obit, _Obit, InfoList, Image\n'), ((16350, 16367), 'Image.PIsA', 'Image.PIsA', (['image'], {}), '(image)\n', (16360, 16367), False, 'import Obit, _Obit, InfoList, Image\n'), ((31370, 31392), 'Obit.OPlotIsA', 'Obit.OPlotIsA', (['plot.me'], {}), '(plot.me)\n', (31383, 31392), False, 'import Obit, _Obit, InfoList, Image\n'), ((3181, 3216), 'Obit.OPlot_Set_me', 'Obit.OPlot_Set_me', (['self.this', 'value'], {}), '(self.this, value)\n', (3198, 3216), False, 'import Obit, _Obit, InfoList, Image\n'), ((3441, 3469), 'Obit.OPlot_Get_me', 'Obit.OPlot_Get_me', (['self.this'], {}), '(self.this)\n', (3458, 3469), False, 'import Obit, _Obit, InfoList, Image\n'), ((3757, 3783), 'Obit.OPlotGetName', 'Obit.OPlotGetName', (['self.me'], {}), '(self.me)\n', (3774, 3783), False, 'import Obit, _Obit, InfoList, Image\n'), ((3109, 3137), 'Obit.OPlot_Get_me', 'Obit.OPlot_Get_me', (['self.this'], {}), '(self.this)\n', (3126, 3137), False, 'import Obit, _Obit, InfoList, Image\n')]
|
# --------------
#Code starts here
import sys
def palindrome(num):
numstr = str(num)
for i in range(num+1,sys.maxsize):
if str(i)== str(i)[::-1]:
return i
palindrome(123)
# --------------
#Code starts here
from collections import Counter
def a_scramble(str_1,str_2):
list_str1 = Counter(str_1.lower())
print(list_str1)
list_str2 = Counter(str_2.lower())
print(list_str2)
if not list_str2 - list_str1:
return True
else:
return False
a_scramble("<NAME>","Voldemort")
# --------------
#Code starts here
import math
def isPerfectSquare(x):
s = int(math.sqrt(x))
return s*s == x
def check_fib(num):
return isPerfectSquare(5*num*num + 4)or isPerfectSquare(5*num*num - 4)
check_fib(377)
# --------------
#Code starts here
def compress(word):
string = word.lower()
res = ""
count = 1
res += string[0]
for i in range(len(string)-1):
if(string[i]==string[i+1]):
count+=1
else:
if(count >= 1):
res += str(count)
res += string[i+1]
count = 1
if(count >= 1):
res += str(count)
return res
compress("abbs")
# --------------
#Code starts here
#Code starts here
from collections import Counter
def k_distinct(string,k):
c = Counter(string.lower())
if k==len(c.keys()):
return True
return False
k_distinct('Messoptamia',8)
k_distinct('SUBBOOKKEEPER',7)
|
[
"math.sqrt"
] |
[((630, 642), 'math.sqrt', 'math.sqrt', (['x'], {}), '(x)\n', (639, 642), False, 'import math\n')]
|
from utils import open_image, open_greyscale_bmp
from workspace_calc import WorkspaceCalculator
from workspace_view import WorkspaceView
class Workspace:
def __init__(self, app_page, room_name, robot_name):
room_bmp = open_greyscale_bmp(room_name)
robot_bmp = open_greyscale_bmp(robot_name)
robot_png = open_image(robot_name, 'png')
self.__calculator = WorkspaceCalculator(room_bmp, robot_bmp)
self.__view = WorkspaceView(app_page, room_bmp, robot_png)
self.__init_config_xy = [] # point -> [0] = x , [1] = y
self.__goal_config_xy = [] # point -> [0] = x , [1] = y
self.current_position_xy = [] # point -> [0] = x , [1] = y
def bind_click_callback(self, action_ref) -> None:
self.__view.set_click_callback(action_ref)
def is_in_collision(self, x, y) -> bool:
return self.__calculator.is_robot_in_collision(x, y)
def reset(self) -> None:
self.__init_config_xy = []
self.__goal_config_xy = []
self.current_position_xy = []
self.__view.reset()
def set_init_config(self, x, y) -> None:
self.__init_config_xy = [x, y]
self.draw_robot_state(x, y)
def set_goal_config(self, x, y) -> None:
self.__goal_config_xy = [x, y]
self.draw_robot_state(x, y)
def draw_robot_state(self, x, y) -> None:
self.__view.reset()
if self.__init_config_xy:
self.__view.draw_robot(self.__init_config_xy[0], self.__init_config_xy[1])
if self.__goal_config_xy:
self.__view.draw_robot(self.__goal_config_xy[0], self.__goal_config_xy[1])
self.__view.draw_robot(x, y)
|
[
"utils.open_greyscale_bmp",
"workspace_view.WorkspaceView",
"workspace_calc.WorkspaceCalculator",
"utils.open_image"
] |
[((232, 261), 'utils.open_greyscale_bmp', 'open_greyscale_bmp', (['room_name'], {}), '(room_name)\n', (250, 261), False, 'from utils import open_image, open_greyscale_bmp\n'), ((282, 312), 'utils.open_greyscale_bmp', 'open_greyscale_bmp', (['robot_name'], {}), '(robot_name)\n', (300, 312), False, 'from utils import open_image, open_greyscale_bmp\n'), ((333, 362), 'utils.open_image', 'open_image', (['robot_name', '"""png"""'], {}), "(robot_name, 'png')\n", (343, 362), False, 'from utils import open_image, open_greyscale_bmp\n'), ((391, 431), 'workspace_calc.WorkspaceCalculator', 'WorkspaceCalculator', (['room_bmp', 'robot_bmp'], {}), '(room_bmp, robot_bmp)\n', (410, 431), False, 'from workspace_calc import WorkspaceCalculator\n'), ((454, 498), 'workspace_view.WorkspaceView', 'WorkspaceView', (['app_page', 'room_bmp', 'robot_png'], {}), '(app_page, room_bmp, robot_png)\n', (467, 498), False, 'from workspace_view import WorkspaceView\n')]
|
# Generated by Django 2.2.13 on 2021-06-08 10:08
import os
from django.db import migrations
def create_premier_tenant(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
Client = apps.get_model('Customers', 'Client')
Domain = apps.get_model('Customers', 'Domain')
DNS = os.getenv('DOMAIN')
tenant_public = Client.objects.get_or_create(schema_name='public',
name='Tibillet Public',
paid_until='2200-12-05',
on_trial=False)[0]
# Add one or more domains for the tenant
domaine_seul = Domain.objects.get_or_create(domain=DNS,
tenant=tenant_public,
is_primary=True,
)
domaine_www = Domain.objects.get_or_create(domain=f'www.{DNS}',
tenant=tenant_public,
is_primary=False,
)
return tenant_public, domaine_seul[0], domaine_www[0]
def reverse(apps, schema_editor):
tenant_public, domaine_seul, domaine_www = create_premier_tenant(apps, schema_editor)
tenant_public.delete()
domaine_seul.delete()
domaine_www.delete()
class Migration(migrations.Migration):
dependencies = [
('Customers', '0001_initial'),
]
operations = [
migrations.RunPython(create_premier_tenant, reverse),
]
|
[
"django.db.migrations.RunPython",
"os.getenv"
] |
[((397, 416), 'os.getenv', 'os.getenv', (['"""DOMAIN"""'], {}), "('DOMAIN')\n", (406, 416), False, 'import os\n'), ((1645, 1697), 'django.db.migrations.RunPython', 'migrations.RunPython', (['create_premier_tenant', 'reverse'], {}), '(create_premier_tenant, reverse)\n', (1665, 1697), False, 'from django.db import migrations\n')]
|
from common.input_validation import (
extract_phone_number,
)
def test_extract_phone_number():
assert extract_phone_number('510501622') == None
assert extract_phone_number('5105016227') == '15105016227'
assert extract_phone_number('15105016227') == '15105016227'
assert extract_phone_number('+15105016227') == '15105016227'
assert extract_phone_number('My number is 510 501 6227') == '15105016227'
assert extract_phone_number('My number is (510) 501-6227.') == '15105016227'
|
[
"common.input_validation.extract_phone_number"
] |
[((112, 145), 'common.input_validation.extract_phone_number', 'extract_phone_number', (['"""510501622"""'], {}), "('510501622')\n", (132, 145), False, 'from common.input_validation import extract_phone_number\n'), ((165, 199), 'common.input_validation.extract_phone_number', 'extract_phone_number', (['"""5105016227"""'], {}), "('5105016227')\n", (185, 199), False, 'from common.input_validation import extract_phone_number\n'), ((228, 263), 'common.input_validation.extract_phone_number', 'extract_phone_number', (['"""15105016227"""'], {}), "('15105016227')\n", (248, 263), False, 'from common.input_validation import extract_phone_number\n'), ((292, 328), 'common.input_validation.extract_phone_number', 'extract_phone_number', (['"""+15105016227"""'], {}), "('+15105016227')\n", (312, 328), False, 'from common.input_validation import extract_phone_number\n'), ((357, 406), 'common.input_validation.extract_phone_number', 'extract_phone_number', (['"""My number is 510 501 6227"""'], {}), "('My number is 510 501 6227')\n", (377, 406), False, 'from common.input_validation import extract_phone_number\n'), ((435, 487), 'common.input_validation.extract_phone_number', 'extract_phone_number', (['"""My number is (510) 501-6227."""'], {}), "('My number is (510) 501-6227.')\n", (455, 487), False, 'from common.input_validation import extract_phone_number\n')]
|
import unittest
import math
import datasets
from pdffigures_utils import get_num_pages_in_pdf
class TestDataset(unittest.TestCase):
def test_pages_annotated_consistency(self):
for dataset in datasets.DATASETS.values():
dataset = dataset()
pages_annotated = dataset.get_annotated_pages_map()
if pages_annotated is None:
continue
pdf_file_map = dataset.get_pdf_file_map()
annotations = dataset.get_annotations("all")
docs = dataset.get_doc_ids("all")
self.assertEqual(set(docs), pages_annotated.keys())
for doc, pages in pages_annotated.items():
filename = pdf_file_map[doc]
self.assertTrue(len(pages) <= dataset.MAX_PAGES_TO_ANNOTATE)
num_pages = get_num_pages_in_pdf(filename)
self.assertTrue(num_pages >= max(pages) - 1)
expected_pages = math.ceil(num_pages*dataset.PAGE_SAMPLE_PERCENT)
expected_pages = min(expected_pages, dataset.MAX_PAGES_TO_ANNOTATE)
self.assertTrue(len(pages) == expected_pages)
if doc in annotations:
ann = annotations[doc]
self.assertEqual(set(ann["annotated_pages"]), set(pages))
for fig in ann["figures"]:
self.assertTrue(fig.page in pages)
def test_consistency(self):
for dataset in datasets.DATASETS.values():
dataset = dataset()
all_docs = set(dataset.get_doc_ids(datasets.DatasetPartition("all")))
doc_map = dataset.get_pdf_file_map()
self.assertEqual(len(all_docs - doc_map.keys()), 0)
doc_map = dataset.get_color_image_file_map()
if doc_map is not None:
self.assertEqual(len(all_docs - doc_map.keys()), 0)
doc_map = dataset.get_gray_image_file_map()
if doc_map is not None:
self.assertEqual(len(all_docs - doc_map.keys()), 0)
documents = dataset.load_doc_ids(all_docs)
self.assertEqual(all_docs, set([x.doc_id for x in documents]))
for doc in documents:
if doc.color_images is not None and doc.gray_images is not None:
self.assertEqual(doc.gray_images.keys(), doc.color_images.keys())
pages_annotated = doc.pages_annotated
for fig in doc.figures:
self.assertTrue(fig.page in pages_annotated)
self.assertEqual(doc.pdffile.split("/")[-1][:-4], doc.doc_id)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"datasets.DATASETS.values",
"pdffigures_utils.get_num_pages_in_pdf",
"math.ceil",
"datasets.DatasetPartition"
] |
[((2637, 2652), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2650, 2652), False, 'import unittest\n'), ((206, 232), 'datasets.DATASETS.values', 'datasets.DATASETS.values', ([], {}), '()\n', (230, 232), False, 'import datasets\n'), ((1464, 1490), 'datasets.DATASETS.values', 'datasets.DATASETS.values', ([], {}), '()\n', (1488, 1490), False, 'import datasets\n'), ((821, 851), 'pdffigures_utils.get_num_pages_in_pdf', 'get_num_pages_in_pdf', (['filename'], {}), '(filename)\n', (841, 851), False, 'from pdffigures_utils import get_num_pages_in_pdf\n'), ((946, 996), 'math.ceil', 'math.ceil', (['(num_pages * dataset.PAGE_SAMPLE_PERCENT)'], {}), '(num_pages * dataset.PAGE_SAMPLE_PERCENT)\n', (955, 996), False, 'import math\n'), ((1571, 1603), 'datasets.DatasetPartition', 'datasets.DatasetPartition', (['"""all"""'], {}), "('all')\n", (1596, 1603), False, 'import datasets\n')]
|
# -*- coding: utf-8 -*-
import botocore
import boto3
import io
from datetime import datetime
import s3Uploader
# Refs : https://boto3.readthedocs.io/en/latest/reference/services/s3.html
s3 = boto3.client('s3')
def main():
# [追加する時]
# バケットがなければ作成
# あればそれを使う。
# ファイルの重複チェック
# 重複していれば、削除し更新
# 重複していなければ追加。
# [読み込み]
# ファイルを読み込む
# response = s3.delete_bucket( Bucket='bun-chan-bot-images')
# print(response)
# response = s3.create_bucket(
# Bucket='bun-chan-bot-images',
# CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'}
# )
# print(response)
# response = None
# response = s3.list_buckets()
# # 指定したBucketが存在しなければ例外発生する。確認用に使える。
# try:
# response = s3.head_bucket(Bucket='bun-chan-bot-images')
# # response = s3.head_bucket(Bucket='test-lambda-on-java')
# print(response)
# except botocore.exceptions.ClientError as e:
# print('The bucket does not found')
# print(e)
# response = s3.head_bucket(Bucket='bun-chan-bot-images')
# print(response)
# for bucket in response['Buckets']:
# print(bucket.get('Name'))
# if bucket.get('Name') != 'bun-chan-bot-images':
# print('Not Found')
# if isExistBucketFor(bucketName):
# else:
# print('Delet bucket...')
# response = s3.delete_bucket( Bucket='bun-chan-bot-images')
# print(response)
# print('Create bucket...')
# response = s3.create_bucket(
# Bucket='bun-chan-bot-images',
# CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'}
# )
bucketName = 'bun-chan-bot-images'
objectName = "image_{name}.jpg".format(name=datetime.now().strftime("%Y%m%d_%H%M%S"))
uploader = s3Uploader.s3Uploader(bucketName, objectName, './image.jpg')
uploader.upload()
if __name__ == '__main__':
main()
|
[
"s3Uploader.s3Uploader",
"datetime.datetime.now",
"boto3.client"
] |
[((194, 212), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (206, 212), False, 'import boto3\n'), ((1821, 1881), 's3Uploader.s3Uploader', 's3Uploader.s3Uploader', (['bucketName', 'objectName', '"""./image.jpg"""'], {}), "(bucketName, objectName, './image.jpg')\n", (1842, 1881), False, 'import s3Uploader\n'), ((1763, 1777), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1775, 1777), False, 'from datetime import datetime\n')]
|
from django.test import TestCase
from majority_judgment.tools import get_ranking, get_ratings, majority_grade
class MajorityJudgmentTestCase(TestCase):
fixtures = ['election.json']
# def setUp(self):
def test_ranking(self):
election_id = 2
ranking = get_ranking(election_id)
ranking = [candidate.pk for candidate in ranking]
ground_truth = [ 2, 3, 4, 13, 6, 7, 15, 14, 8, 12, 16, 5, 11, 17, 10, 1, 9]
self.assertEqual(ranking, ground_truth)
def test_majority_grade(self):
election_id = 2
ranking = get_ranking(election_id)
# ratings = get_ratings(election_id)
majority_grades = [majority_grade(candidate.ratings) for candidate in ranking]
ground_truth = [0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]
self.assertEqual(majority_grades, ground_truth)
|
[
"majority_judgment.tools.majority_grade",
"majority_judgment.tools.get_ranking"
] |
[((282, 306), 'majority_judgment.tools.get_ranking', 'get_ranking', (['election_id'], {}), '(election_id)\n', (293, 306), False, 'from majority_judgment.tools import get_ranking, get_ratings, majority_grade\n'), ((584, 608), 'majority_judgment.tools.get_ranking', 'get_ranking', (['election_id'], {}), '(election_id)\n', (595, 608), False, 'from majority_judgment.tools import get_ranking, get_ratings, majority_grade\n'), ((681, 714), 'majority_judgment.tools.majority_grade', 'majority_grade', (['candidate.ratings'], {}), '(candidate.ratings)\n', (695, 714), False, 'from majority_judgment.tools import get_ranking, get_ratings, majority_grade\n')]
|
from typing import List, Union
import pandas as pd
from zvdata import IntervalLevel
from zvt.api.common import get_kdata_schema
from zvt.factors.algorithm import MacdTransformer, MaTransformer
from zvt.factors.factor import Factor, Transformer, Accumulator
class TechnicalFactor(Factor):
def __init__(self,
entity_ids: List[str] = None,
entity_type: str = 'stock',
exchanges: List[str] = ['sh', 'sz'],
codes: List[str] = None,
the_timestamp: Union[str, pd.Timestamp] = None,
start_timestamp: Union[str, pd.Timestamp] = None,
end_timestamp: Union[str, pd.Timestamp] = None,
columns: List = None,
filters: List = None,
order: object = None,
limit: int = None,
provider: str = 'joinquant',
level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY,
category_field: str = 'entity_id',
time_field: str = 'timestamp',
computing_window: int = None,
keep_all_timestamp: bool = False,
fill_method: str = 'ffill',
effective_number: int = 10,
transformer: Transformer = MacdTransformer(),
accumulator: Accumulator = None,
persist_factor: bool = False,
dry_run: bool = True) -> None:
self.data_schema = get_kdata_schema(entity_type, level=level)
if transformer:
self.indicator_cols = transformer.indicator_cols
if not columns:
columns = ['id', 'entity_id', 'timestamp', 'level', 'open', 'close', 'high', 'low']
super().__init__(self.data_schema, entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp,
end_timestamp, columns, filters, order, limit, provider, level, category_field, time_field,
computing_window, keep_all_timestamp, fill_method, effective_number,
transformer, accumulator, persist_factor, dry_run)
def __json__(self):
result = super().__json__()
result['indicator_cols'] = self.indicator_cols
return result
for_json = __json__ # supported by simplejson
class BullFactor(TechnicalFactor):
def __init__(self,
entity_ids: List[str] = None,
entity_type: str = 'stock',
exchanges: List[str] = ['sh', 'sz'],
codes: List[str] = None,
the_timestamp: Union[str, pd.Timestamp] = None,
start_timestamp: Union[str, pd.Timestamp] = None,
end_timestamp: Union[str, pd.Timestamp] = None,
columns: List = None,
filters: List = None,
order: object = None,
limit: int = None,
provider: str = 'joinquant',
level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY,
category_field: str = 'entity_id',
time_field: str = 'timestamp',
persist_factor: bool = False, dry_run: bool = False) -> None:
transformer = MacdTransformer()
super().__init__(entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp,
columns, filters, order, limit, provider, level, category_field, time_field, 26,
False, None, None, transformer, None, persist_factor, dry_run)
def do_compute(self):
super().do_compute()
s = (self.factor_df['diff'] > 0) & (self.factor_df['dea'] > 0)
self.result_df = s.to_frame(name='score')
if __name__ == '__main__':
factor = TechnicalFactor(entity_type='stock',
codes=['000338'],
start_timestamp='2019-01-01',
end_timestamp='2019-06-10',
level=IntervalLevel.LEVEL_1DAY,
provider='joinquant',
computing_window=26,
transformer=MacdTransformer())
print(factor.get_factor_df().tail())
factor.move_on(to_timestamp='2019-06-17')
diff = factor.get_factor_df()['diff']
dea = factor.get_factor_df()['dea']
macd = factor.get_factor_df()['macd']
assert round(diff.loc[('stock_sz_000338', '2019-06-17')], 2) == 0.06
assert round(dea.loc[('stock_sz_000338', '2019-06-17')], 2) == -0.03
assert round(macd.loc[('stock_sz_000338', '2019-06-17')], 2) == 0.19
|
[
"zvt.factors.algorithm.MacdTransformer",
"zvt.api.common.get_kdata_schema"
] |
[((1309, 1326), 'zvt.factors.algorithm.MacdTransformer', 'MacdTransformer', ([], {}), '()\n', (1324, 1326), False, 'from zvt.factors.algorithm import MacdTransformer, MaTransformer\n'), ((1500, 1542), 'zvt.api.common.get_kdata_schema', 'get_kdata_schema', (['entity_type'], {'level': 'level'}), '(entity_type, level=level)\n', (1516, 1542), False, 'from zvt.api.common import get_kdata_schema\n'), ((3268, 3285), 'zvt.factors.algorithm.MacdTransformer', 'MacdTransformer', ([], {}), '()\n', (3283, 3285), False, 'from zvt.factors.algorithm import MacdTransformer, MaTransformer\n'), ((4218, 4235), 'zvt.factors.algorithm.MacdTransformer', 'MacdTransformer', ([], {}), '()\n', (4233, 4235), False, 'from zvt.factors.algorithm import MacdTransformer, MaTransformer\n')]
|
# Generated by Django 2.2.12 on 2020-06-16 13:14
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('memes', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='task',
name='name',
),
migrations.AddField(
model_name='task',
name='result_url',
field=models.URLField(null=True),
),
migrations.AlterField(
model_name='task',
name='status',
field=models.IntegerField(choices=[(10, '서버 작업 대기중'), (20, '서버 작업 시작'), (30, '서버 리소스 다운로드'), (0, '작업 실패')], default=10),
),
migrations.AlterField(
model_name='taskresource',
name='task',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_resources', to='memes.Task'),
),
]
|
[
"django.db.migrations.RemoveField",
"django.db.models.URLField",
"django.db.models.IntegerField",
"django.db.models.ForeignKey"
] |
[((256, 310), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""task"""', 'name': '"""name"""'}), "(model_name='task', name='name')\n", (278, 310), False, 'from django.db import migrations, models\n'), ((456, 482), 'django.db.models.URLField', 'models.URLField', ([], {'null': '(True)'}), '(null=True)\n', (471, 482), False, 'from django.db import migrations, models\n'), ((602, 719), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(10, '서버 작업 대기중'), (20, '서버 작업 시작'), (30, '서버 리소스 다운로드'), (0, '작업 실패')]", 'default': '(10)'}), "(choices=[(10, '서버 작업 대기중'), (20, '서버 작업 시작'), (30,\n '서버 리소스 다운로드'), (0, '작업 실패')], default=10)\n", (621, 719), False, 'from django.db import migrations, models\n'), ((841, 956), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""task_resources"""', 'to': '"""memes.Task"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='task_resources', to='memes.Task')\n", (858, 956), False, 'from django.db import migrations, models\n')]
|
from entities.remote_radio_head import RemoteRadioHead
from entities.hypervisor import Hypervisor
from entities.baseband_unit import BasebandUnit
from entities.switch import Switch
from forwarding.forwarding import Forwarding
class StatHistory(object):
history = {}
def get(key, current):
if (key in StatHistory.history):
value = current - StatHistory.history[key]
StatHistory.history[key] = current
return value
StatHistory.history[key] = current
return current
class Topology(object):
def __init__(self, env, configuration):
self.env = env
self.forwarding = Forwarding(self.env, self)
self.rrhs = []
self.hypervisors = []
self.external_switch = None
self.stat_history = {}
self.total_migrations = 0
self.setup(configuration)
def update_load(self, rrh_id, arrival_rate = None, packet_mean = None, packet_dev = None):
for rrh in self.rrhs:
if (rrh.id == rrh_id):
if arrival_rate is not None:
rrh.set_arrival_rate(arrival_rate)
if packet_mean is not None:
rrh.set_packet_mean(packet_mean)
if packet_dev is not None:
rrh.set_packet_dev(packet_dev)
def migrate(self, bbu_id, target_hypervisor_id):
target_hypervisor = [hv for hv in self.hypervisors if hv.id == target_hypervisor_id][0]
if (target_hypervisor is None):
raise Exception("Target hypervisor not found with the given id")
for hypervisor in self.hypervisors:
subject_bbu = hypervisor.find_baseband_unit(bbu_id)
if (subject_bbu is not None and hypervisor.id != target_hypervisor.id):
hypervisor.remove_baseband_unit(subject_bbu)
target_hypervisor.add_baseband_unit(subject_bbu)
self.total_migrations += 1
def get_cluster_load(self, cluster):
load = 0
for rrh in self.rrhs:
mapping = self.forwarding.get_mapping(rrh.id)
for bbu in cluster.baseband_units:
if (bbu.id in mapping):
load += (rrh.arrival_rate * rrh.packet_mean)
# break the loop once we found a single transmission
break
return load
def get_common_load(self, bbu_x, bbu_y):
if (bbu_x.id == bbu_y.id): return 0
load = 0
for rrh in self.rrhs:
mapping = self.forwarding.get_mapping(rrh.id)
if (bbu_x.id in mapping and bbu_y.id in mapping):
load += (rrh.arrival_rate * rrh.packet_mean)
return load
def get_transmission_cost(self):
return StatHistory.get('transmission_cost', self.forwarding.get_transmission_cost())
def get_migration_count(self):
return StatHistory.get('migration_count', self.total_migrations)
def get_current_load(self):
total = 0
for rrh in self.rrhs:
total += (rrh.arrival_rate * rrh.packet_mean * len(self.forwarding.get_mapping(rrh.id)))
return total
def get_lifetime_replication_factor(self):
total_received = 0
for hypervisor in self.hypervisors:
total_received += hypervisor.switch.packets_rec
return total_received / self.external_switch.packets_rec
def get_current_replication_factor(self):
total_received = 0
for hypervisor in self.hypervisors:
total_received += StatHistory.get('hypervisor.%d.switch.packets_rec' % hypervisor.id, hypervisor.switch.packets_rec)
if (total_received == 0): return 0.0
return total_received / StatHistory.get('extswitch.packets_rec', self.external_switch.packets_rec)
def get_current_wait(self):
total = 0
bbu_count = 0
for hypervisor in self.hypervisors:
for bbu in hypervisor.bbus:
total += bbu.get_current_wait()
bbu_count += 1
return total / bbu_count
def get_lifetime_wait(self):
total = 0
bbu_count = 0
for hypervisor in self.hypervisors:
for bbu in hypervisor.bbus:
total += bbu.get_lifetime_wait()
bbu_count += 1
return total / bbu_count
def get_current_delay(self):
total = 0
bbu_count = 0
for hypervisor in self.hypervisors:
for bbu in hypervisor.bbus:
total += bbu.get_current_delay()
bbu_count += 1
return total / bbu_count
def get_lifetime_delay(self):
total = 0
bbu_count = 0
for hypervisor in self.hypervisors:
for bbu in hypervisor.bbus:
total += bbu.get_lifetime_delay()
bbu_count += 1
return total / bbu_count
def get_lifetime_drop_rate(self):
total = 0
total_drop = 0
for hypervisor in self.hypervisors:
stats = hypervisor.switch.get_lifetime_stats()
total += (stats['rec'] + stats['drop'])
total_drop += stats['drop']
if (total == 0): return 0.0
return total_drop / total
def get_current_drop_rate(self):
total = 0
total_drop = 0
for hypervisor in self.hypervisors:
stats = hypervisor.switch.get_current_stats()
total += (stats['rec'] + stats['drop'])
total_drop += stats['drop']
if (total == 0): return 0.0
return total_drop / total
def get_current_utilization(self, hypervisor):
load = 0
for rrh in self.rrhs:
mapping = self.forwarding.get_mapping(rrh.id)
for bbu in hypervisor.bbus:
if (bbu.id in mapping):
load += (rrh.arrival_rate * rrh.packet_mean)
# break the loop once we found a single transmission
break
return load / hypervisor.switch.rate
def get_utilization_gain(self):
stopped_hypervisors = 0
for hypervisor in self.hypervisors:
if (len(hypervisor.bbus) == 0):
stopped_hypervisors += 1
return stopped_hypervisors / len(self.hypervisors)
def setup(self, configuration):
self.external_switch = Switch(self.env, 'physical', 'external')
self.external_switch.set_forwarding_function(self.forwarding.forwarding_function)
for remote_radio_head in configuration['remote_radio_heads']:
rrh_object = RemoteRadioHead(self.env, remote_radio_head['id'])
rrh_object.set_arrival_rate(remote_radio_head['arrival_rate'])
rrh_object.set_packet_mean(remote_radio_head['packet_mean'])
rrh_object.set_packet_dev(remote_radio_head['packet_dev'])
rrh_object.out = self.external_switch
self.rrhs.append(rrh_object)
self.forwarding.add_mapping(remote_radio_head['id'], remote_radio_head['baseband_units'])
for hypervisor in configuration['hypervisors']:
hypervisor_object = Hypervisor(self.env, hypervisor['id'])
for baseband_unit in hypervisor['baseband_units']:
bbu_object = BasebandUnit(self.env, baseband_unit['id'])
hypervisor_object.add_baseband_unit(bbu_object)
hypervisor_object.switch.set_forwarding_function(self.forwarding.forwarding_function)
self.hypervisors.append(hypervisor_object)
|
[
"forwarding.forwarding.Forwarding",
"entities.remote_radio_head.RemoteRadioHead",
"entities.baseband_unit.BasebandUnit",
"entities.switch.Switch",
"entities.hypervisor.Hypervisor"
] |
[((651, 677), 'forwarding.forwarding.Forwarding', 'Forwarding', (['self.env', 'self'], {}), '(self.env, self)\n', (661, 677), False, 'from forwarding.forwarding import Forwarding\n'), ((6337, 6377), 'entities.switch.Switch', 'Switch', (['self.env', '"""physical"""', '"""external"""'], {}), "(self.env, 'physical', 'external')\n", (6343, 6377), False, 'from entities.switch import Switch\n'), ((6564, 6614), 'entities.remote_radio_head.RemoteRadioHead', 'RemoteRadioHead', (['self.env', "remote_radio_head['id']"], {}), "(self.env, remote_radio_head['id'])\n", (6579, 6614), False, 'from entities.remote_radio_head import RemoteRadioHead\n'), ((7116, 7154), 'entities.hypervisor.Hypervisor', 'Hypervisor', (['self.env', "hypervisor['id']"], {}), "(self.env, hypervisor['id'])\n", (7126, 7154), False, 'from entities.hypervisor import Hypervisor\n'), ((7247, 7290), 'entities.baseband_unit.BasebandUnit', 'BasebandUnit', (['self.env', "baseband_unit['id']"], {}), "(self.env, baseband_unit['id'])\n", (7259, 7290), False, 'from entities.baseband_unit import BasebandUnit\n')]
|
# coding=utf-8
# Author: <NAME> & <NAME>
# Date: Jan 06, 2021
#
# Description: Utility functions
#
import os
import re
import functools
import pickle
import numpy as np
#
# Functions to handle Twitter text
#
re_all_after_retweet = re.compile(r"rt @[a-zA-Z0-9_]+.+", re.IGNORECASE | re.UNICODE)
def removeAllAfterRetweet(text):
""" Remove everything after a retweet is seen."""
return re_all_after_retweet.sub(text, '')
#
# Functions to handle Instagram Caption/Hashtag
#
re_repostapp = re.compile(r"(#Repost @\w+ with @repostapp)|(#EzRepost @\w+ with @ezrepostapp)|(Regrann from @\w+ -)")
def addSpacesBetweenHashTags(text):
""" Add spaces between hastags: #i#love#newyork -> #i #love #newyork """
if len(text) == 0:
return ''
# Add spaces if hashtags are togerther
new_text = ''
for i, c in enumerate(text, start=0):
if (c in ['#', '@']) and (i > 0):
if text[i - 1] != ' ':
new_text += ' '
new_text += c
return new_text
def combineTagsAndText(text, tags):
""" Combine Both Tags and Text Fields."""
text = addSpacesBetweenHashTags(text)
tags = [tag for tag in tags if tag not in text]
if len(tags):
new_tags = '. '.join(['#' + w for w in tags])
tagsandtext = text + '. ' + new_tags + '.'
else:
tagsandtext = text
return tagsandtext
def removeNewLines(sentence):
""" Remove new lines """
sentence = sentence.replace('\r\n', ' ').replace('\n', ' ').replace('\r', ' ')
return sentence
def removeRepostApp(caption):
""" Remove content that was posted by another person using the @repostapp """
m = re_repostapp.search(caption)
if m:
start, finish = m.span()
return caption[:start]
else:
return caption
#
# Functions to handle general social media text
#
re_atmention = re.compile(r"@[a-zA-Z0-9_]+")
re_hashtagsymbol = re.compile(r"#([a-zA-Z0-9_]+)")
re_links = re.compile(r"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+")
def removeAtMention(text):
""" Remove @mentions"""
return re_atmention.sub('', text)
def removeHashtagSymbol(text):
""" # - remove # symbol """
return re_hashtagsymbol.sub(r'\1', text)
def removeLinks(text):
""" remove links from text """
return re_links.sub('', text)
#
# File handling functions
#
def ensurePathExists(path):
""" Ensure path exists."""
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
print('-- Creating Folders: %s --' % (dirname))
os.makedirs(dirname)
def load_save_return(dbname):
""" What does this do? """
def LS_decorator(func):
@functools.wraps(func)
def LS_wrapper(*args, **kwargs):
# dbpath = os.path.join(godbpath, dbname)
dbpath = dbname
if os.path.isfile(dbpath):
with open(dbpath, 'rb') as db_fp:
return pickle.load(db_fp)
else:
result = func(*args, **kwargs)
with open(dbpath, 'wb') as db_fp:
pickle.dump(result, db_fp)
return result
return LS_wrapper
return LS_decorator
#
# Network functions
#
def prox2dist(p):
"""Transforms a non-negative ``[0,1]`` proximity to distance in the ``[0,inf]`` interval:
Args:
p (float): proximity value
Returns:
d (float): distance value
"""
if (p == 0):
return np.inf
else:
return (1 / float(p)) - 1
|
[
"pickle.dump",
"os.makedirs",
"os.path.dirname",
"os.path.exists",
"os.path.isfile",
"pickle.load",
"functools.wraps",
"re.compile"
] |
[((232, 293), 're.compile', 're.compile', (['"""rt @[a-zA-Z0-9_]+.+"""', '(re.IGNORECASE | re.UNICODE)'], {}), "('rt @[a-zA-Z0-9_]+.+', re.IGNORECASE | re.UNICODE)\n", (242, 293), False, 'import re\n'), ((499, 613), 're.compile', 're.compile', (['"""(#Repost @\\\\w+ with @repostapp)|(#EzRepost @\\\\w+ with @ezrepostapp)|(Regrann from @\\\\w+ -)"""'], {}), "(\n '(#Repost @\\\\w+ with @repostapp)|(#EzRepost @\\\\w+ with @ezrepostapp)|(Regrann from @\\\\w+ -)'\n )\n", (509, 613), False, 'import re\n'), ((1865, 1893), 're.compile', 're.compile', (['"""@[a-zA-Z0-9_]+"""'], {}), "('@[a-zA-Z0-9_]+')\n", (1875, 1893), False, 'import re\n'), ((1914, 1944), 're.compile', 're.compile', (['"""#([a-zA-Z0-9_]+)"""'], {}), "('#([a-zA-Z0-9_]+)')\n", (1924, 1944), False, 'import re\n'), ((1957, 2061), 're.compile', 're.compile', (['"""http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\\\(\\\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+"""'], {}), "(\n 'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\\\(\\\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+'\n )\n", (1967, 2061), False, 'import re\n'), ((2455, 2476), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (2470, 2476), False, 'import os\n'), ((2488, 2511), 'os.path.exists', 'os.path.exists', (['dirname'], {}), '(dirname)\n', (2502, 2511), False, 'import os\n'), ((2577, 2597), 'os.makedirs', 'os.makedirs', (['dirname'], {}), '(dirname)\n', (2588, 2597), False, 'import os\n'), ((2698, 2719), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (2713, 2719), False, 'import functools\n'), ((2858, 2880), 'os.path.isfile', 'os.path.isfile', (['dbpath'], {}), '(dbpath)\n', (2872, 2880), False, 'import os\n'), ((2959, 2977), 'pickle.load', 'pickle.load', (['db_fp'], {}), '(db_fp)\n', (2970, 2977), False, 'import pickle\n'), ((3113, 3139), 'pickle.dump', 'pickle.dump', (['result', 'db_fp'], {}), '(result, db_fp)\n', (3124, 3139), False, 'import pickle\n')]
|
import hashlib
import sys
if sys.version_info[0] < 3:
import urllib
else:
import urllib.parse as urllib
class Gravtr(object):
GRAVATAR_URL = 'https://www.gravatar.com/avatar/'
GRAVATAR_URL_UNSECURE = 'http://www.gravatar.com/avatar/'
class ratingType(object):
G = 'g'
PG = 'pg'
R = 'r'
X = 'x'
def __init__(self, email):
self.email = email.encode('utf-8')
def generate(self, unsecure=False, size=None, typed=False, default=None, force_default=False, rating_type=None):
gravatar_url = self.GRAVATAR_URL if not unsecure else self.GRAVATAR_URL_UNSECURE
self.url = gravatar_url + hashlib.md5(self.email).hexdigest()
params = dict()
if size:
params['s'] = str(size)
if typed:
self.url = self.url + '.jpg'
if default:
params['d'] = str(default)
if force_default:
params['f'] = 'y'
if rating_type:
params['r'] = str(rating_type)
return self.url + '?' + urllib.urlencode(params)
|
[
"hashlib.md5",
"urllib.parse.urlencode"
] |
[((1050, 1074), 'urllib.parse.urlencode', 'urllib.urlencode', (['params'], {}), '(params)\n', (1066, 1074), True, 'import urllib.parse as urllib\n'), ((664, 687), 'hashlib.md5', 'hashlib.md5', (['self.email'], {}), '(self.email)\n', (675, 687), False, 'import hashlib\n')]
|
import os
import unittest
from MuseParse.tests.testUsingXML.xmlSet import xmlSet, parsePiece
from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex
from MuseParse.classes.ObjectHierarchy.TreeClasses.NoteNode import NoteNode
from MuseParse.classes.ObjectHierarchy.TreeClasses.MeasureNode import MeasureNode
from MuseParse.classes.ObjectHierarchy.ItemClasses import Note
from MuseParse.SampleMusicXML import testcases
partname = "arpeggiosAndGlissandos.xml"
directory = testcases.__path__._path[0]
piece = parsePiece(os.path.join(directory, partname))
class testArpeg(xmlSet):
def setUp(self):
xmlSet.setUp(self)
self.m_num = 32
self.p_id = "P1"
self.p_name = "Piccolo"
self.note_num = {1: 4, 2: 4, 3: 1, 4: 1, 5: 1, 6: 1, 7: 1, 8: 1, 9: 1, 10: 1,
11: 1, 12: 1, 13: 1, 14: 1, 15: 1, 16: 1, 17: 1, 18: 1, 19: 1,
20: 1, 21: 1, 22: 1, 23: 1, 24: 1, 25: 1, 26: 1, 27: 1, 28: 1,
29: 1, 30: 1, 31: 1, 32: 1}
def testParts(self):
global piece
self.assertTrue(piece.getPart(self.p_id) is not None)
self.assertEqual(self.p_name, piece.getPart(self.p_id).GetItem().name)
def testMeasures(self):
self.assertIsInstance(
FindByIndex(
piece.getPart(
self.p_id),
self.m_num),
MeasureNode)
def testNotes(self):
part = piece.getPart(self.p_id)
staff = part.getStaff(1)
keys = staff.GetChildrenIndexes()
for measure in keys:
if measure in self.note_num:
measure_obj = part.getMeasure(measure=measure, staff=1)
self.assertIsInstance(
Search(
NoteNode,
measure_obj.getVoice(1),
self.note_num[measure]),
NoteNode)
class testBar(unittest.TestCase):
def testInstance(self):
if hasattr(self, "instance_type"):
self.assertIsInstance(
self.item.wrap_notation[0],
self.instance_type)
def testEquality(self):
if hasattr(self, "value"):
self.assertEqual(self.item, self.value)
class Note1Measure1(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=1, staff=1)
self.item = Search(NoteNode, measure, 1).GetItem()
self.instance_type = Note.Arpeggiate
class Note2Measure1(testBar):
def setUp(self):
part = piece.getPart("P1")
measure = part.getMeasure(measure=1, staff=1)
self.item = Search(NoteNode, measure, 2).GetItem()
self.instance_type = Note.Arpeggiate
class Note2Measure1DirectionValue(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=1, staff=1)
note = Search(NoteNode, measure, 2).GetItem()
self.item = note.wrap_notation[0].direction
self.value = "up"
class Note3Measure1(testBar):
def setUp(self):
part = piece.getPart("P1")
measure = part.getMeasure(measure=1, staff=1)
self.item = Search(NoteNode, measure, 3).GetItem()
self.instance_type = Note.Arpeggiate
class Note3Measure1DirectionValue(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=1, staff=1)
note = Search(NoteNode, measure, 3).GetItem()
self.item = note.wrap_notation[0].direction
self.value = "down"
class Note4Measure1FirstNotation(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=1, staff=1)
self.item = Search(NoteNode, measure, 4).GetItem()
self.instance_type = Note.NonArpeggiate
class Note4Measure1SecondNotation(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=1, staff=1)
self.item = Search(NoteNode, measure, 4).GetItem()
self.instance_type = Note.NonArpeggiate
# TODO: fix this
# class Note4Measure1Notation1Type(testBar):
# def setUp(self):
# self.p_id = "P1"
# part = piece.getPart(self.p_id)
# measure = part.getMeasure(measure=1,staff=1)
# self.item = Search(NoteNode, measure, 4).GetItem().wrap_notation[0].type
# self.value = "bottom"
class Note4Measure1Notation2Type(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=1, staff=1)
self.item = Search(
NoteNode,
measure,
4).GetItem().wrap_notation[1].type
self.value = "top"
class Note1Measure2(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=2, staff=1)
self.item = Search(NoteNode, measure, 1).GetItem()
self.instance_type = Note.Slide
class Note1Measure2Type(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=2, staff=1)
self.item = Search(
NoteNode,
measure,
1).GetItem().wrap_notation[0].type
self.value = "start"
class Note1Measure2Number(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=2, staff=1)
self.item = Search(
NoteNode,
measure,
1).GetItem().wrap_notation[0].number
self.value = 1
class Note1Measure2LineType(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=2, staff=1)
self.item = Search(
NoteNode,
measure,
1).GetItem().wrap_notation[0].lineType
self.value = "solid"
class Note2Measure2(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=2, staff=1)
self.item = Search(NoteNode, measure, 2).GetItem()
self.instance_type = Note.Slide
class Note2Measure2Type(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=2, staff=1)
self.item = Search(
NoteNode,
measure,
2).GetItem().wrap_notation[0].type
self.value = "stop"
class Note2Measure2Number(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=2, staff=1)
self.item = Search(
NoteNode,
measure,
2).GetItem().wrap_notation[0].number
self.value = 1
class Note2Measure2LineType(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=2, staff=1)
self.item = Search(
NoteNode,
measure,
2).GetItem().wrap_notation[0].lineType
self.value = "solid"
class Note3Measure2(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=2, staff=1)
self.item = Search(NoteNode, measure, 3).GetItem()
self.instance_type = Note.Glissando
class Note3Measure2Type(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=2, staff=1)
self.item = Search(
NoteNode,
measure,
3).GetItem().wrap_notation[0].type
self.value = "start"
class Note3Measure2Number(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=2, staff=1)
self.item = Search(
NoteNode,
measure,
3).GetItem().wrap_notation[0].number
self.value = 1
class Note3Measure2LineType(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=2, staff=1)
self.item = Search(
NoteNode,
measure,
3).GetItem().wrap_notation[0].lineType
self.value = "wavy"
class Note4Measure2(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=2, staff=1)
self.item = Search(NoteNode, measure, 4).GetItem()
self.instance_type = Note.Glissando
class Note4Measure2Type(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=2, staff=1)
self.item = Search(
NoteNode,
measure,
4).GetItem().wrap_notation[0].type
self.value = "stop"
class Note4Measure2Number(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=2, staff=1)
self.item = Search(
NoteNode,
measure,
4).GetItem().wrap_notation[0].number
self.value = 1
class Note4Measure2LineType(testBar):
def setUp(self):
self.p_id = "P1"
part = piece.getPart(self.p_id)
measure = part.getMeasure(measure=2, staff=1)
self.item = Search(
NoteNode,
measure,
4).GetItem().wrap_notation[0].lineType
self.value = "wavy"
|
[
"MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search",
"os.path.join",
"MuseParse.tests.testUsingXML.xmlSet.xmlSet.setUp"
] |
[((550, 583), 'os.path.join', 'os.path.join', (['directory', 'partname'], {}), '(directory, partname)\n', (562, 583), False, 'import os\n'), ((642, 660), 'MuseParse.tests.testUsingXML.xmlSet.xmlSet.setUp', 'xmlSet.setUp', (['self'], {}), '(self)\n', (654, 660), False, 'from MuseParse.tests.testUsingXML.xmlSet import xmlSet, parsePiece\n'), ((2491, 2519), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(1)'], {}), '(NoteNode, measure, 1)\n', (2497, 2519), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((2738, 2766), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(2)'], {}), '(NoteNode, measure, 2)\n', (2744, 2766), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((3024, 3052), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(2)'], {}), '(NoteNode, measure, 2)\n', (3030, 3052), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((3304, 3332), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(3)'], {}), '(NoteNode, measure, 3)\n', (3310, 3332), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((3590, 3618), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(3)'], {}), '(NoteNode, measure, 3)\n', (3596, 3618), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((3915, 3943), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(4)'], {}), '(NoteNode, measure, 4)\n', (3921, 3943), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((4209, 4237), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(4)'], {}), '(NoteNode, measure, 4)\n', (4215, 4237), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((5144, 5172), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(1)'], {}), '(NoteNode, measure, 1)\n', (5150, 5172), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((6394, 6422), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(2)'], {}), '(NoteNode, measure, 2)\n', (6400, 6422), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((7643, 7671), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(3)'], {}), '(NoteNode, measure, 3)\n', (7649, 7671), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((8896, 8924), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(4)'], {}), '(NoteNode, measure, 4)\n', (8902, 8924), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((4826, 4854), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(4)'], {}), '(NoteNode, measure, 4)\n', (4832, 4854), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((5420, 5448), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(1)'], {}), '(NoteNode, measure, 1)\n', (5426, 5448), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((5746, 5774), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(1)'], {}), '(NoteNode, measure, 1)\n', (5752, 5774), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((6070, 6098), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(1)'], {}), '(NoteNode, measure, 1)\n', (6076, 6098), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((6670, 6698), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(2)'], {}), '(NoteNode, measure, 2)\n', (6676, 6698), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((6995, 7023), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(2)'], {}), '(NoteNode, measure, 2)\n', (7001, 7023), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((7319, 7347), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(2)'], {}), '(NoteNode, measure, 2)\n', (7325, 7347), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((7923, 7951), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(3)'], {}), '(NoteNode, measure, 3)\n', (7929, 7951), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((8249, 8277), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(3)'], {}), '(NoteNode, measure, 3)\n', (8255, 8277), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((8573, 8601), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(3)'], {}), '(NoteNode, measure, 3)\n', (8579, 8601), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((9176, 9204), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(4)'], {}), '(NoteNode, measure, 4)\n', (9182, 9204), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((9501, 9529), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(4)'], {}), '(NoteNode, measure, 4)\n', (9507, 9529), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n'), ((9825, 9853), 'MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree.Search', 'Search', (['NoteNode', 'measure', '(4)'], {}), '(NoteNode, measure, 4)\n', (9831, 9853), False, 'from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex\n')]
|
import os
import pathlib
from ctypes import *
from ctypes import _SimpleCData
from ctypes import _Pointer
from .common import CEnum
class SNResult(CEnum):
SN_S_OK = (0)
SN_S_PENDING = (1)
SN_S_NO_MSG = (3)
SN_S_TM_VERSION = (4)
SN_S_REPLACED = (5)
SN_S_NO_ACTION = (6)
SN_S_CONNECTED = SN_S_NO_ACTION
SN_S_TARGET_STILL_REGISTERED = (7)
SN_E_NOT_IMPL = (-1)
SN_E_TM_NOT_RUNNING = (-2)
SN_E_BAD_TARGET = (-3)
SN_E_NOT_CONNECTED = (-4)
SN_E_COMMS_ERR = (-5)
SN_E_TM_COMMS_ERR = (-6)
SN_E_TIMEOUT = (-7)
SN_E_HOST_NOT_FOUND = (-8)
SN_E_TARGET_IN_USE = (-9)
SN_E_LOAD_ELF_FAILED = (-10)
SN_E_BAD_UNIT = (-11)
SN_E_OUT_OF_MEM = (-12)
SN_E_NOT_LISTED = (-13)
SN_E_TM_VERSION = (-14)
SN_E_DLL_NOT_INITIALISED = (-15)
SN_E_TARGET_RUNNING = (-17)
SN_E_BAD_MEMSPACE = (-18)
SN_E_NO_TARGETS = (-19)
SN_E_NO_SEL = (-20)
SN_E_BAD_PARAM = (-21)
SN_E_BUSY = (-22)
SN_E_DECI_ERROR = (-23)
SN_E_INSUFFICIENT_DATA = (-25)
SN_E_DATA_TOO_LONG = (-26)
SN_E_DEPRECATED = (-27)
SN_E_BAD_ALIGN = (-28)
SN_E_FILE_ERROR = (-29)
SN_E_NOT_SUPPORTED_IN_SDK_VERSION = (-30)
SN_E_LOAD_MODULE_FAILED = (-31)
SN_E_CHECK_TARGET_CONFIGURATION = (-33)
SN_E_MODULE_NOT_FOUND = (-34)
SN_E_CONNECT_TO_GAMEPORT_FAILED = (-35)
SN_E_COMMAND_CANCELLED = (-36)
SN_E_PROTOCOL_ALREADY_REGISTERED = (-37)
SN_E_CONNECTED = (-38)
SN_E_COMMS_EVENT_MISMATCHED_ERR = (-39)
SN_E_TARGET_IS_POWERED_OFF = (-40)
class SNTargetInfoFlags(CEnum):
SN_TI_TARGETID = (0x00000001)
SN_TI_NAME = (0x00000002)
SN_TI_INFO = (0x00000004)
SN_TI_HOMEDIR = (0x00000008)
SN_TI_FILESERVEDIR = (0x00000010)
SN_TI_BOOT = (0x00000020)
class SNPS3TargetInfo(Structure):
_fields_ = [
("nFlags", c_uint32 ),
("hTarget", c_uint32 ),
("pszName", c_char_p ),
("pszType", c_char_p ),
("pszInfo", c_char_p ),
("pszHomeDir", c_char_p ),
("pszFSDir", c_char_p ),
("boot", c_uint64 ),
]
class TMAPIExports:
def __init__(self):
os.add_dll_directory(os.getcwd())
os.add_dll_directory(os.path.join(os.getenv('SN_PS3_PATH'), "bin"))
self.TMAPI_DLL = CDLL("ps3tmapi.dll")
'''
SNAPI SNRESULT SNPS3InitTargetComms(void);
Initialises target communications and launches Target Manager.
'''
self.SNPS3InitTargetComms = self.TMAPI_DLL.SNPS3InitTargetComms
self.SNPS3InitTargetComms.argtypes = []
self.SNPS3InitTargetComms.restype = SNResult
'''
SNAPI SNRESULT SNPS3CloseTargetComms(void);
Shuts down internal communications (but does not close the Target Manager) and frees resources.
'''
self.SNPS3CloseTargetComms = self.TMAPI_DLL.SNPS3CloseTargetComms
self.SNPS3CloseTargetComms.argtypes = []
self.SNPS3CloseTargetComms.restype = SNResult
'''
SNAPI SNRESULT SNPS3IsScanning();
Returns SN_E_BUSY if a search is already in progress.
'''
self.SNPS3IsScanning = self.TMAPI_DLL.SNPS3IsScanning
self.SNPS3IsScanning.argtypes = []
self.SNPS3IsScanning.restype = SNResult
'''
SNAPI SNRESULT SNPS3Connect(
HTARGET hTarget,
const char *pszApplication
);
Connect to specified target.
'''
self.SNPS3Connect = self.TMAPI_DLL.SNPS3Connect
self.SNPS3Connect.argtypes = [ c_uint32, c_char_p ]
self.SNPS3Connect.restype = SNResult
'''
SNAPI SNRESULT SNPS3ConnectEx(
HTARGET hTarget,
const char *pszApplication,
BOOL bForceFlag
);
Connect to specified target.
'''
self.SNPS3ConnectEx = self.TMAPI_DLL.SNPS3ConnectEx
self.SNPS3ConnectEx.argtypes = [ c_uint32, c_char_p, c_bool ]
self.SNPS3ConnectEx.restype = SNResult
'''
SNAPI SNRESULT SNPS3GetTargetInfo(
SNPS3TargetInfo *pTargetInfo
);
Retrieves information for a target specified by hTarget member of SNPS3TargetInfo() structure.
'''
self.SNPS3GetTargetInfo = self.TMAPI_DLL.SNPS3GetTargetInfo
self.SNPS3GetTargetInfo.argtypes = [ POINTER(SNPS3TargetInfo) ]
self.SNPS3GetTargetInfo.restype = SNResult
'''
SNAPI SNRESULT SNPS3GetDefaultTarget(
HTARGET *pTarget
);
Gets the default target.
'''
self.SNPS3GetDefaultTarget = self.TMAPI_DLL.SNPS3GetDefaultTarget
self.SNPS3GetDefaultTarget.argtypes = [ POINTER(c_uint32) ]
self.SNPS3GetDefaultTarget.restype = SNResult
'''
SNAPI SNRESULT SNPS3SetDefaultTarget(
HTARGET hTarget
);
Gets the default target.
'''
self.SNPS3SetDefaultTarget = self.TMAPI_DLL.SNPS3SetDefaultTarget
self.SNPS3SetDefaultTarget.argtypes = [ c_uint32 ]
self.SNPS3SetDefaultTarget.restype = SNResult
'''
SNAPI SNRESULT SNPS3ProcessList(
HTARGET hTarget,
UINT32 *puCount,
UINT32 *puBuffer
);
Fetches a list of processes running on the specified target.
'''
self.SNPS3ProcessList = self.TMAPI_DLL.SNPS3ProcessList
self.SNPS3ProcessList.argtypes = [ c_uint32, POINTER(c_uint32), POINTER(c_uint32) ]
self.SNPS3ProcessList.restype = SNResult
'''
SNAPI SNRESULT SNPS3ProcessAttach(
HTARGET hTarget,
UINT32 uUnitID,
UINT32 uProcessID
);
Attach to a process.
'''
self.SNPS3ProcessAttach = self.TMAPI_DLL.SNPS3ProcessAttach
self.SNPS3ProcessAttach.argtypes = [ c_uint32, c_uint32, c_uint32 ]
self.SNPS3ProcessAttach.restype = SNResult
'''
SNAPI SNRESULT SNPS3ProcessContinue(
HTARGET hTarget,
UINT32 uProcessID
);
Continues all threads from a specified process.
'''
self.SNPS3ProcessContinue = self.TMAPI_DLL.SNPS3ProcessContinue
self.SNPS3ProcessContinue.argtypes = [ c_uint32, c_uint32 ]
self.SNPS3ProcessContinue.restype = SNResult
'''
SNAPI SNRESULT SNPS3ProcessStop(
HTARGET hTarget,
UINT32 uProcessID
);
Stops all threads from a specified process.
'''
self.SNPS3ProcessStop = self.TMAPI_DLL.SNPS3ProcessStop
self.SNPS3ProcessStop.argtypes = [ c_uint32, c_uint32 ]
self.SNPS3ProcessStop.restype = SNResult
'''
SNAPI SNRESULT SNPS3ProcessGetMemory(
HTARGET hTarget,
UINT32 uUnit,
UINT32 uProcessID,
UINT64 uThreadID,
UINT64 uAddress,
int nCount,
BYTE *pBuffer
);
'''
self.SNPS3ProcessGetMemory = self.TMAPI_DLL.SNPS3ProcessGetMemory
self.SNPS3ProcessGetMemory.argtypes = [ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ]
self.SNPS3ProcessGetMemory.restype = SNResult
'''
SNAPI SNRESULT SNPS3ProcessSetMemory(
HTARGET hTarget,
UINT32 uUnit,
UINT32 uProcessID,
UINT64 uThreadID,
UINT64 uAddress,
int nCount,
const BYTE *pBuffer
);
'''
self.SNPS3ProcessSetMemory = self.TMAPI_DLL.SNPS3ProcessSetMemory
self.SNPS3ProcessSetMemory.argtypes = [ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ]
self.SNPS3ProcessSetMemory.restype = SNResult
class TMAPI:
def __init__(self):
self.NativeAPI = TMAPIExports()
self.PS3TargetIndex = -1
self.IsConnected = False
if self.NativeAPI.SNPS3InitTargetComms() != SNResult.SN_S_OK:
raise Exception("SNPS3InitTargetComms() Failed")
def ThrowIfNotConnected(self):
if self.IsConnected == False:
raise Exception("Error: Not Connected to PS3")
def GetDefaultTarget(self):
DefaultTargetIndex = pointer(c_uint32(0))
if self.NativeAPI.SNPS3GetDefaultTarget(DefaultTargetIndex) != SNResult.SN_S_OK:
raise Exception("SNPS3InitTargetComms() Failed")
return DefaultTargetIndex[0]
def ConnectTarget(self, TargetIndex=-1):
self.IsConnected = False
if TargetIndex == -1:
TargetIndex = self.GetDefaultTarget()
if self.NativeAPI.SNPS3ConnectEx(TargetIndex, None, True) not in [ SNResult.SN_S_OK, SNResult.SN_S_CONNECTED ]:
return False
self.PS3TargetIndex = TargetIndex
self.IsConnected = True
return True
def GetProcessList(self):
self.ThrowIfNotConnected()
NumProcessesPtr = pointer(c_uint32(0))
if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, None) != SNResult.SN_S_OK:
raise Exception("SNPS3ProcessList(): GetNumProcesses Failed")
NumProcesses = NumProcessesPtr.contents.value
if NumProcesses == 0:
raise Exception("No process running")
ProcessList = (c_uint32*NumProcesses)()
if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, ProcessList) != SNResult.SN_S_OK:
raise Exception("SNPS3ProcessList(): GetProcessInfos Failed")
return list(ProcessList)
def AttachProcess(self, ProcessID=-1):
self.ThrowIfNotConnected()
if ProcessID == -1:
ProcessList = self.GetProcessList()
if len(ProcessList) == 0:
return False
ProcessID = ProcessList[0]
if self.NativeAPI.SNPS3ProcessAttach(self.PS3TargetIndex, 0, ProcessID) != SNResult.SN_S_OK:
return False
if self.NativeAPI.SNPS3ProcessContinue(self.PS3TargetIndex, ProcessID) != SNResult.SN_S_OK:
raise Exception("SNPS3ProcessContinue() Failed")
self.ProcessID = ProcessID
return True
def ReadMemory(self, Address, Size):
self.ThrowIfNotConnected()
MemoryBuffer = (c_char * Size)()
self.NativeAPI.SNPS3ProcessGetMemory(self.PS3TargetIndex, 0, self.ProcessID, 0, Address, Size, MemoryBuffer)
return bytes(MemoryBuffer)
def WriteMemory(self, Address, Bytes):
self.ThrowIfNotConnected()
WriteBuffer = (c_char * len(Bytes)).from_buffer(bytearray(Bytes))
return self.NativeAPI.SNPS3ProcessSetMemory(self.PS3TargetIndex, 0, self.ProcessID, 0, Address, len(Bytes), WriteBuffer)
|
[
"os.getcwd",
"os.getenv"
] |
[((2153, 2164), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2162, 2164), False, 'import os\n'), ((2208, 2232), 'os.getenv', 'os.getenv', (['"""SN_PS3_PATH"""'], {}), "('SN_PS3_PATH')\n", (2217, 2232), False, 'import os\n')]
|
import sys
import unittest
import os
script_dir = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(1, os.path.abspath(
os.path.join(script_dir, os.path.join('..', '..'))))
from pake import process
import pake.program
import pake
class ProcessTest(unittest.TestCase):
def test_call(self):
cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')]
with self.assertRaises(process.TimeoutExpired) as exc:
process.call(*cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL)
self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd)
self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'throw.py'),
stderr=process.DEVNULL, stdout=process.DEVNULL), 0)
self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'killself.py'),
stderr=process.DEVNULL, stdout=process.DEVNULL), 0)
def test_check_call(self):
cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')]
with self.assertRaises(process.TimeoutExpired) as exc:
process.check_call(cmd, timeout=0.1,
stderr=process.DEVNULL, stdout=process.DEVNULL)
self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd)
_ = str(exc.exception) # just test for serialization exceptions
cmd = [sys.executable, os.path.join(script_dir, 'throw.py')]
with self.assertRaises(process.CalledProcessException) as exc:
process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL)
self.assertListEqual(cmd, exc.exception.cmd)
_ = str(exc.exception) # just test for serialization exceptions
# Check pake propagates the exception correctly
pake.de_init(clear_conf=False)
pk = pake.init()
@pk.task
def dummy(ctx):
process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL)
with self.assertRaises(pake.TaskException) as exc:
pk.run(tasks=dummy)
self.assertEqual(type(exc.exception.exception), process.CalledProcessException)
def test_check_output(self):
cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')]
with self.assertRaises(process.TimeoutExpired) as exc:
process.check_output(*cmd, timeout=0.1, stderr=process.DEVNULL)
_ = str(exc.exception) # just test for serialization exceptions
cmd = [sys.executable, os.path.join(script_dir, 'throw.py')]
with self.assertRaises(process.CalledProcessException) as exc:
process.check_output(cmd, stderr=process.DEVNULL)
_ = str(exc.exception) # just test for serialization exceptions
# Check pake propagates the exception correctly
pake.de_init(clear_conf=False)
pk = pake.init()
@pk.task
def dummy(ctx):
process.check_output(cmd, stderr=process.DEVNULL)
with self.assertRaises(pake.TaskException) as exc:
pk.run(tasks=dummy)
self.assertEqual(type(exc.exception.exception), process.CalledProcessException)
|
[
"pake.de_init",
"pake.process.check_output",
"os.path.realpath",
"pake.process.call",
"pake.init",
"pake.process.check_call",
"os.path.join"
] |
[((68, 94), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (84, 94), False, 'import os\n'), ((1840, 1870), 'pake.de_init', 'pake.de_init', ([], {'clear_conf': '(False)'}), '(clear_conf=False)\n', (1852, 1870), False, 'import pake\n'), ((1885, 1896), 'pake.init', 'pake.init', ([], {}), '()\n', (1894, 1896), False, 'import pake\n'), ((2868, 2898), 'pake.de_init', 'pake.de_init', ([], {'clear_conf': '(False)'}), '(clear_conf=False)\n', (2880, 2898), False, 'import pake\n'), ((2913, 2924), 'pake.init', 'pake.init', ([], {}), '()\n', (2922, 2924), False, 'import pake\n'), ((162, 186), 'os.path.join', 'os.path.join', (['""".."""', '""".."""'], {}), "('..', '..')\n", (174, 186), False, 'import os\n'), ((344, 382), 'os.path.join', 'os.path.join', (['script_dir', '"""timeout.py"""'], {}), "(script_dir, 'timeout.py')\n", (356, 382), False, 'import os\n'), ((460, 539), 'pake.process.call', 'process.call', (['*cmd'], {'timeout': '(0.1)', 'stderr': 'process.DEVNULL', 'stdout': 'process.DEVNULL'}), '(*cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL)\n', (472, 539), False, 'from pake import process\n'), ((1049, 1087), 'os.path.join', 'os.path.join', (['script_dir', '"""timeout.py"""'], {}), "(script_dir, 'timeout.py')\n", (1061, 1087), False, 'import os\n'), ((1165, 1254), 'pake.process.check_call', 'process.check_call', (['cmd'], {'timeout': '(0.1)', 'stderr': 'process.DEVNULL', 'stdout': 'process.DEVNULL'}), '(cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process\n .DEVNULL)\n', (1183, 1254), False, 'from pake import process\n'), ((1452, 1488), 'os.path.join', 'os.path.join', (['script_dir', '"""throw.py"""'], {}), "(script_dir, 'throw.py')\n", (1464, 1488), False, 'import os\n'), ((1574, 1645), 'pake.process.check_call', 'process.check_call', (['cmd'], {'stderr': 'process.DEVNULL', 'stdout': 'process.DEVNULL'}), '(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL)\n', (1592, 1645), False, 'from pake import process\n'), ((1951, 2022), 'pake.process.check_call', 'process.check_call', (['cmd'], {'stderr': 'process.DEVNULL', 'stdout': 'process.DEVNULL'}), '(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL)\n', (1969, 2022), False, 'from pake import process\n'), ((2270, 2308), 'os.path.join', 'os.path.join', (['script_dir', '"""timeout.py"""'], {}), "(script_dir, 'timeout.py')\n", (2282, 2308), False, 'import os\n'), ((2386, 2449), 'pake.process.check_output', 'process.check_output', (['*cmd'], {'timeout': '(0.1)', 'stderr': 'process.DEVNULL'}), '(*cmd, timeout=0.1, stderr=process.DEVNULL)\n', (2406, 2449), False, 'from pake import process\n'), ((2556, 2592), 'os.path.join', 'os.path.join', (['script_dir', '"""throw.py"""'], {}), "(script_dir, 'throw.py')\n", (2568, 2592), False, 'import os\n'), ((2678, 2727), 'pake.process.check_output', 'process.check_output', (['cmd'], {'stderr': 'process.DEVNULL'}), '(cmd, stderr=process.DEVNULL)\n', (2698, 2727), False, 'from pake import process\n'), ((2979, 3028), 'pake.process.check_output', 'process.check_output', (['cmd'], {'stderr': 'process.DEVNULL'}), '(cmd, stderr=process.DEVNULL)\n', (2999, 3028), False, 'from pake import process\n'), ((663, 699), 'os.path.join', 'os.path.join', (['script_dir', '"""throw.py"""'], {}), "(script_dir, 'throw.py')\n", (675, 699), False, 'import os\n'), ((852, 891), 'os.path.join', 'os.path.join', (['script_dir', '"""killself.py"""'], {}), "(script_dir, 'killself.py')\n", (864, 891), False, 'import os\n')]
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""train resnet."""
import os
import time
import argparse
import ast
import numpy as np
from mindspore import context
from mindspore import Tensor
from mindspore.nn.optim.momentum import Momentum
from mindspore.train.model import Model
from mindspore.context import ParallelMode
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig
from mindspore.train.loss_scale_manager import FixedLossScaleManager
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindspore.common import set_seed
from mindspore.communication.management import init
from mindspore.train.callback import Callback
from src.loss import Softmaxloss
from src.loss import Tripletloss
from src.loss import Quadrupletloss
from src.lr_generator import get_lr
from src.resnet import resnet50
from src.utility import GetDatasetGenerator_softmax, GetDatasetGenerator_triplet, GetDatasetGenerator_quadruplet
set_seed(1)
parser = argparse.ArgumentParser(description='Image classification')
# modelarts parameter
parser.add_argument('--train_url', type=str, default=None, help='Train output path')
parser.add_argument('--data_url', type=str, default=None, help='Dataset path')
parser.add_argument('--ckpt_url', type=str, default=None, help='Pretrained ckpt path')
parser.add_argument('--checkpoint_name', type=str, default='resnet-120_625.ckpt', help='Checkpoint file')
parser.add_argument('--loss_name', type=str, default='softmax',
help='loss name: softmax(pretrained) triplet quadruplet')
# Ascend parameter
parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path')
parser.add_argument('--ckpt_path', type=str, default=None, help='ckpt path name')
parser.add_argument('--run_distribute', type=ast.literal_eval, default=False, help='Run distribute')
parser.add_argument('--device_id', type=int, default=0, help='Device id')
parser.add_argument('--run_modelarts', type=ast.literal_eval, default=False, help='Run distribute')
args_opt = parser.parse_args()
class Monitor(Callback):
"""Monitor"""
def __init__(self, lr_init=None):
super(Monitor, self).__init__()
self.lr_init = lr_init
self.lr_init_len = len(lr_init)
def epoch_begin(self, run_context):
self.losses = []
self.epoch_time = time.time()
dataset_generator.__init__(data_dir=DATA_DIR, train_list=TRAIN_LIST)
def epoch_end(self, run_context):
cb_params = run_context.original_args()
epoch_mseconds = (time.time() - self.epoch_time) * 1000
per_step_mseconds = epoch_mseconds / cb_params.batch_num
print("epoch time: {:5.3f}, per step time: {:5.3f}, avg loss: {:8.5f}"
.format(epoch_mseconds, per_step_mseconds, np.mean(self.losses)))
print('batch_size:', config.batch_size, 'epochs_size:', config.epoch_size,
'lr_model:', config.lr_decay_mode, 'lr:', config.lr_max, 'step_size:', step_size)
def step_begin(self, run_context):
self.step_time = time.time()
def step_end(self, run_context):
"""step_end"""
cb_params = run_context.original_args()
step_mseconds = (time.time() - self.step_time) * 1000
step_loss = cb_params.net_outputs
if isinstance(step_loss, (tuple, list)) and isinstance(step_loss[0], Tensor):
step_loss = step_loss[0]
if isinstance(step_loss, Tensor):
step_loss = np.mean(step_loss.asnumpy())
self.losses.append(step_loss)
cur_step_in_epoch = (cb_params.cur_step_num - 1) % cb_params.batch_num
print("epochs: [{:3d}/{:3d}], step:[{:5d}/{:5d}], loss:[{:8.5f}/{:8.5f}], time:[{:5.3f}], lr:[{:8.5f}]".format(
cb_params.cur_epoch_num, config.epoch_size, cur_step_in_epoch, cb_params.batch_num, step_loss,
np.mean(self.losses), step_mseconds, self.lr_init[cb_params.cur_step_num - 1]))
if __name__ == '__main__':
if args_opt.loss_name == 'softmax':
from src.config import config0 as config
from src.dataset import create_dataset0 as create_dataset
elif args_opt.loss_name == 'triplet':
from src.config import config1 as config
from src.dataset import create_dataset1 as create_dataset
elif args_opt.loss_name == 'quadruplet':
from src.config import config2 as config
from src.dataset import create_dataset1 as create_dataset
else:
print('loss no')
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", save_graphs=False)
# init distributed
if args_opt.run_modelarts:
import moxing as mox
device_id = int(os.getenv('DEVICE_ID'))
device_num = int(os.getenv('RANK_SIZE'))
context.set_context(device_id=device_id)
local_data_url = '/cache/data'
local_ckpt_url = '/cache/ckpt'
local_train_url = '/cache/train'
if device_num > 1:
init()
context.set_auto_parallel_context(device_num=device_num,
parallel_mode=ParallelMode.DATA_PARALLEL,
gradients_mean=True)
local_data_url = os.path.join(local_data_url, str(device_id))
local_ckpt_url = os.path.join(local_ckpt_url, str(device_id))
mox.file.copy_parallel(args_opt.data_url, local_data_url)
mox.file.copy_parallel(args_opt.ckpt_url, local_ckpt_url)
DATA_DIR = local_data_url + '/'
else:
if args_opt.run_distribute:
device_id = int(os.getenv('DEVICE_ID'))
device_num = int(os.getenv('RANK_SIZE'))
context.set_context(device_id=device_id)
init()
context.reset_auto_parallel_context()
context.set_auto_parallel_context(device_num=device_num,
parallel_mode=ParallelMode.DATA_PARALLEL,
gradients_mean=True)
else:
context.set_context(device_id=args_opt.device_id)
device_num = 1
device_id = args_opt.device_id
DATA_DIR = args_opt.dataset_path + '/'
# create dataset
TRAIN_LIST = DATA_DIR + 'train_half.txt'
if args_opt.loss_name == 'softmax':
dataset_generator = GetDatasetGenerator_softmax(data_dir=DATA_DIR,
train_list=TRAIN_LIST)
elif args_opt.loss_name == 'triplet':
dataset_generator = GetDatasetGenerator_triplet(data_dir=DATA_DIR,
train_list=TRAIN_LIST)
elif args_opt.loss_name == 'quadruplet':
dataset_generator = GetDatasetGenerator_quadruplet(data_dir=DATA_DIR,
train_list=TRAIN_LIST)
else:
print('loss no')
dataset = create_dataset(dataset_generator, do_train=True, batch_size=config.batch_size,
device_num=device_num, rank_id=device_id)
step_size = dataset.get_dataset_size()
# define net
net = resnet50(class_num=config.class_num)
# init weight
if args_opt.run_modelarts:
checkpoint_path = os.path.join(local_ckpt_url, args_opt.checkpoint_name)
else:
checkpoint_path = args_opt.ckpt_path
param_dict = load_checkpoint(checkpoint_path)
load_param_into_net(net.backbone, param_dict)
# init lr
lr = Tensor(get_lr(lr_init=config.lr_init,
lr_end=config.lr_end,
lr_max=config.lr_max,
warmup_epochs=config.warmup_epochs,
total_epochs=config.epoch_size,
steps_per_epoch=step_size,
lr_decay_mode=config.lr_decay_mode))
# define opt
opt = Momentum(params=net.trainable_params(),
learning_rate=lr,
momentum=config.momentum,
weight_decay=config.weight_decay,
loss_scale=config.loss_scale)
# define loss, model
if args_opt.loss_name == 'softmax':
loss = Softmaxloss(sparse=True, smooth_factor=0.1, num_classes=config.class_num)
elif args_opt.loss_name == 'triplet':
loss = Tripletloss(margin=0.1)
elif args_opt.loss_name == 'quadruplet':
loss = Quadrupletloss(train_batch_size=config.batch_size, samples_each_class=2, margin=0.1)
else:
print('loss no')
loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False)
if args_opt.loss_name == 'softmax':
model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None,
amp_level='O3', keep_batchnorm_fp32=False)
else:
model = Model(net.backbone, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None,
amp_level='O3', keep_batchnorm_fp32=False)
#define callback
cb = []
if config.save_checkpoint and (device_num == 1 or device_id == 0):
config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_epochs * step_size,
keep_checkpoint_max=config.keep_checkpoint_max)
check_name = 'ResNet50_' + args_opt.loss_name
if args_opt.run_modelarts:
ckpt_cb = ModelCheckpoint(prefix=check_name, directory=local_train_url, config=config_ck)
else:
save_ckpt_path = os.path.join(config.save_checkpoint_path, 'model_'+ str(device_id) +'/')
ckpt_cb = ModelCheckpoint(prefix=check_name, directory=save_ckpt_path, config=config_ck)
cb += [ckpt_cb]
cb += [Monitor(lr_init=lr.asnumpy())]
# train model
model.train(config.epoch_size - config.pretrain_epoch_size, dataset, callbacks=cb, dataset_sink_mode=True)
if args_opt.run_modelarts and config.save_checkpoint and (device_num == 1 or device_id == 0):
mox.file.copy_parallel(src_url=local_train_url, dst_url=args_opt.train_url)
|
[
"src.loss.Quadrupletloss",
"argparse.ArgumentParser",
"mindspore.train.callback.ModelCheckpoint",
"src.dataset.create_dataset1",
"moxing.file.copy_parallel",
"numpy.mean",
"mindspore.train.serialization.load_checkpoint",
"os.path.join",
"mindspore.train.serialization.load_param_into_net",
"mindspore.context.set_context",
"src.utility.GetDatasetGenerator_softmax",
"mindspore.context.set_auto_parallel_context",
"mindspore.common.set_seed",
"src.utility.GetDatasetGenerator_triplet",
"src.resnet.resnet50",
"mindspore.train.loss_scale_manager.FixedLossScaleManager",
"src.loss.Softmaxloss",
"src.utility.GetDatasetGenerator_quadruplet",
"mindspore.train.callback.CheckpointConfig",
"mindspore.communication.management.init",
"src.loss.Tripletloss",
"mindspore.train.model.Model",
"os.getenv",
"mindspore.context.reset_auto_parallel_context",
"time.time",
"src.lr_generator.get_lr"
] |
[((1586, 1597), 'mindspore.common.set_seed', 'set_seed', (['(1)'], {}), '(1)\n', (1594, 1597), False, 'from mindspore.common import set_seed\n'), ((1608, 1667), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Image classification"""'}), "(description='Image classification')\n", (1631, 1667), False, 'import argparse\n'), ((5087, 5178), 'mindspore.context.set_context', 'context.set_context', ([], {'mode': 'context.GRAPH_MODE', 'device_target': '"""Ascend"""', 'save_graphs': '(False)'}), "(mode=context.GRAPH_MODE, device_target='Ascend',\n save_graphs=False)\n", (5106, 5178), False, 'from mindspore import context\n'), ((7514, 7639), 'src.dataset.create_dataset1', 'create_dataset', (['dataset_generator'], {'do_train': '(True)', 'batch_size': 'config.batch_size', 'device_num': 'device_num', 'rank_id': 'device_id'}), '(dataset_generator, do_train=True, batch_size=config.\n batch_size, device_num=device_num, rank_id=device_id)\n', (7528, 7639), True, 'from src.dataset import create_dataset1 as create_dataset\n'), ((7735, 7771), 'src.resnet.resnet50', 'resnet50', ([], {'class_num': 'config.class_num'}), '(class_num=config.class_num)\n', (7743, 7771), False, 'from src.resnet import resnet50\n'), ((7975, 8007), 'mindspore.train.serialization.load_checkpoint', 'load_checkpoint', (['checkpoint_path'], {}), '(checkpoint_path)\n', (7990, 8007), False, 'from mindspore.train.serialization import load_checkpoint, load_param_into_net\n'), ((8012, 8057), 'mindspore.train.serialization.load_param_into_net', 'load_param_into_net', (['net.backbone', 'param_dict'], {}), '(net.backbone, param_dict)\n', (8031, 8057), False, 'from mindspore.train.serialization import load_checkpoint, load_param_into_net\n'), ((9120, 9188), 'mindspore.train.loss_scale_manager.FixedLossScaleManager', 'FixedLossScaleManager', (['config.loss_scale'], {'drop_overflow_update': '(False)'}), '(config.loss_scale, drop_overflow_update=False)\n', (9141, 9188), False, 'from mindspore.train.loss_scale_manager import FixedLossScaleManager\n'), ((2963, 2974), 'time.time', 'time.time', ([], {}), '()\n', (2972, 2974), False, 'import time\n'), ((3669, 3680), 'time.time', 'time.time', ([], {}), '()\n', (3678, 3680), False, 'import time\n'), ((5363, 5403), 'mindspore.context.set_context', 'context.set_context', ([], {'device_id': 'device_id'}), '(device_id=device_id)\n', (5382, 5403), False, 'from mindspore import context\n'), ((5949, 6006), 'moxing.file.copy_parallel', 'mox.file.copy_parallel', (['args_opt.data_url', 'local_data_url'], {}), '(args_opt.data_url, local_data_url)\n', (5971, 6006), True, 'import moxing as mox\n'), ((6015, 6072), 'moxing.file.copy_parallel', 'mox.file.copy_parallel', (['args_opt.ckpt_url', 'local_ckpt_url'], {}), '(args_opt.ckpt_url, local_ckpt_url)\n', (6037, 6072), True, 'import moxing as mox\n'), ((6938, 7007), 'src.utility.GetDatasetGenerator_softmax', 'GetDatasetGenerator_softmax', ([], {'data_dir': 'DATA_DIR', 'train_list': 'TRAIN_LIST'}), '(data_dir=DATA_DIR, train_list=TRAIN_LIST)\n', (6965, 7007), False, 'from src.utility import GetDatasetGenerator_softmax, GetDatasetGenerator_triplet, GetDatasetGenerator_quadruplet\n'), ((7848, 7902), 'os.path.join', 'os.path.join', (['local_ckpt_url', 'args_opt.checkpoint_name'], {}), '(local_ckpt_url, args_opt.checkpoint_name)\n', (7860, 7902), False, 'import os\n'), ((8089, 8302), 'src.lr_generator.get_lr', 'get_lr', ([], {'lr_init': 'config.lr_init', 'lr_end': 'config.lr_end', 'lr_max': 'config.lr_max', 'warmup_epochs': 'config.warmup_epochs', 'total_epochs': 'config.epoch_size', 'steps_per_epoch': 'step_size', 'lr_decay_mode': 'config.lr_decay_mode'}), '(lr_init=config.lr_init, lr_end=config.lr_end, lr_max=config.lr_max,\n warmup_epochs=config.warmup_epochs, total_epochs=config.epoch_size,\n steps_per_epoch=step_size, lr_decay_mode=config.lr_decay_mode)\n', (8095, 8302), False, 'from src.lr_generator import get_lr\n'), ((8767, 8840), 'src.loss.Softmaxloss', 'Softmaxloss', ([], {'sparse': '(True)', 'smooth_factor': '(0.1)', 'num_classes': 'config.class_num'}), '(sparse=True, smooth_factor=0.1, num_classes=config.class_num)\n', (8778, 8840), False, 'from src.loss import Softmaxloss\n'), ((9246, 9377), 'mindspore.train.model.Model', 'Model', (['net'], {'loss_fn': 'loss', 'optimizer': 'opt', 'loss_scale_manager': 'loss_scale', 'metrics': 'None', 'amp_level': '"""O3"""', 'keep_batchnorm_fp32': '(False)'}), "(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale,\n metrics=None, amp_level='O3', keep_batchnorm_fp32=False)\n", (9251, 9377), False, 'from mindspore.train.model import Model\n'), ((9422, 9563), 'mindspore.train.model.Model', 'Model', (['net.backbone'], {'loss_fn': 'loss', 'optimizer': 'opt', 'loss_scale_manager': 'loss_scale', 'metrics': 'None', 'amp_level': '"""O3"""', 'keep_batchnorm_fp32': '(False)'}), "(net.backbone, loss_fn=loss, optimizer=opt, loss_scale_manager=\n loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False)\n", (9427, 9563), False, 'from mindspore.train.model import Model\n'), ((9706, 9839), 'mindspore.train.callback.CheckpointConfig', 'CheckpointConfig', ([], {'save_checkpoint_steps': '(config.save_checkpoint_epochs * step_size)', 'keep_checkpoint_max': 'config.keep_checkpoint_max'}), '(save_checkpoint_steps=config.save_checkpoint_epochs *\n step_size, keep_checkpoint_max=config.keep_checkpoint_max)\n', (9722, 9839), False, 'from mindspore.train.callback import ModelCheckpoint, CheckpointConfig\n'), ((10585, 10660), 'moxing.file.copy_parallel', 'mox.file.copy_parallel', ([], {'src_url': 'local_train_url', 'dst_url': 'args_opt.train_url'}), '(src_url=local_train_url, dst_url=args_opt.train_url)\n', (10607, 10660), True, 'import moxing as mox\n'), ((5282, 5304), 'os.getenv', 'os.getenv', (['"""DEVICE_ID"""'], {}), "('DEVICE_ID')\n", (5291, 5304), False, 'import os\n'), ((5331, 5353), 'os.getenv', 'os.getenv', (['"""RANK_SIZE"""'], {}), "('RANK_SIZE')\n", (5340, 5353), False, 'import os\n'), ((5562, 5568), 'mindspore.communication.management.init', 'init', ([], {}), '()\n', (5566, 5568), False, 'from mindspore.communication.management import init\n'), ((5581, 5705), 'mindspore.context.set_auto_parallel_context', 'context.set_auto_parallel_context', ([], {'device_num': 'device_num', 'parallel_mode': 'ParallelMode.DATA_PARALLEL', 'gradients_mean': '(True)'}), '(device_num=device_num, parallel_mode=\n ParallelMode.DATA_PARALLEL, gradients_mean=True)\n', (5614, 5705), False, 'from mindspore import context\n'), ((6276, 6316), 'mindspore.context.set_context', 'context.set_context', ([], {'device_id': 'device_id'}), '(device_id=device_id)\n', (6295, 6316), False, 'from mindspore import context\n'), ((6329, 6335), 'mindspore.communication.management.init', 'init', ([], {}), '()\n', (6333, 6335), False, 'from mindspore.communication.management import init\n'), ((6348, 6385), 'mindspore.context.reset_auto_parallel_context', 'context.reset_auto_parallel_context', ([], {}), '()\n', (6383, 6385), False, 'from mindspore import context\n'), ((6398, 6522), 'mindspore.context.set_auto_parallel_context', 'context.set_auto_parallel_context', ([], {'device_num': 'device_num', 'parallel_mode': 'ParallelMode.DATA_PARALLEL', 'gradients_mean': '(True)'}), '(device_num=device_num, parallel_mode=\n ParallelMode.DATA_PARALLEL, gradients_mean=True)\n', (6431, 6522), False, 'from mindspore import context\n'), ((6636, 6685), 'mindspore.context.set_context', 'context.set_context', ([], {'device_id': 'args_opt.device_id'}), '(device_id=args_opt.device_id)\n', (6655, 6685), False, 'from mindspore import context\n'), ((7134, 7203), 'src.utility.GetDatasetGenerator_triplet', 'GetDatasetGenerator_triplet', ([], {'data_dir': 'DATA_DIR', 'train_list': 'TRAIN_LIST'}), '(data_dir=DATA_DIR, train_list=TRAIN_LIST)\n', (7161, 7203), False, 'from src.utility import GetDatasetGenerator_softmax, GetDatasetGenerator_triplet, GetDatasetGenerator_quadruplet\n'), ((8898, 8921), 'src.loss.Tripletloss', 'Tripletloss', ([], {'margin': '(0.1)'}), '(margin=0.1)\n', (8909, 8921), False, 'from src.loss import Tripletloss\n'), ((9985, 10064), 'mindspore.train.callback.ModelCheckpoint', 'ModelCheckpoint', ([], {'prefix': 'check_name', 'directory': 'local_train_url', 'config': 'config_ck'}), '(prefix=check_name, directory=local_train_url, config=config_ck)\n', (10000, 10064), False, 'from mindspore.train.callback import ModelCheckpoint, CheckpointConfig\n'), ((10203, 10281), 'mindspore.train.callback.ModelCheckpoint', 'ModelCheckpoint', ([], {'prefix': 'check_name', 'directory': 'save_ckpt_path', 'config': 'config_ck'}), '(prefix=check_name, directory=save_ckpt_path, config=config_ck)\n', (10218, 10281), False, 'from mindspore.train.callback import ModelCheckpoint, CheckpointConfig\n'), ((3164, 3175), 'time.time', 'time.time', ([], {}), '()\n', (3173, 3175), False, 'import time\n'), ((3403, 3423), 'numpy.mean', 'np.mean', (['self.losses'], {}), '(self.losses)\n', (3410, 3423), True, 'import numpy as np\n'), ((3814, 3825), 'time.time', 'time.time', ([], {}), '()\n', (3823, 3825), False, 'import time\n'), ((4468, 4488), 'numpy.mean', 'np.mean', (['self.losses'], {}), '(self.losses)\n', (4475, 4488), True, 'import numpy as np\n'), ((6187, 6209), 'os.getenv', 'os.getenv', (['"""DEVICE_ID"""'], {}), "('DEVICE_ID')\n", (6196, 6209), False, 'import os\n'), ((6240, 6262), 'os.getenv', 'os.getenv', (['"""RANK_SIZE"""'], {}), "('RANK_SIZE')\n", (6249, 6262), False, 'import os\n'), ((7333, 7405), 'src.utility.GetDatasetGenerator_quadruplet', 'GetDatasetGenerator_quadruplet', ([], {'data_dir': 'DATA_DIR', 'train_list': 'TRAIN_LIST'}), '(data_dir=DATA_DIR, train_list=TRAIN_LIST)\n', (7363, 7405), False, 'from src.utility import GetDatasetGenerator_softmax, GetDatasetGenerator_triplet, GetDatasetGenerator_quadruplet\n'), ((8982, 9070), 'src.loss.Quadrupletloss', 'Quadrupletloss', ([], {'train_batch_size': 'config.batch_size', 'samples_each_class': '(2)', 'margin': '(0.1)'}), '(train_batch_size=config.batch_size, samples_each_class=2,\n margin=0.1)\n', (8996, 9070), False, 'from src.loss import Quadrupletloss\n')]
|
# -*- coding: utf-8 -*-
"""testing script"""
import os
import sys
from functools import reduce
import numpy as np
import pandas as pd
import nltk # Natural Language Tool Kit
from fuzzywuzzy import fuzz, process # Fuzzy String Matching
import jellyfish # Distance metrics
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import linear_kernel
def get_cosine_distance(doc1, doc2):
"""
"""
tfidf = TfidfVectorizer().fit_transform([doc1, doc2]) # Vectorize the bag of words
cos_dist = linear_kernel(tfidf[0:1], tfidf[1:2]).flatten()[0] # Compute cosine distance
return cos_dist
# killmail_id, killmail_time, solar_system_id, character_id, ship_type_id
# 46643819, 2015-05-15 19:02:00, 30000157, 90000814, 630
l1 = [('Large Shield Extender II', 'Shield Extender'),
('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'),
('Caldari Navy Mjolnir Light Missile', 'Light Missile'),
('Damage Control II', 'Damage Control'),
('50MN Cold-Gas Enduring Microwarpdrive', 'Propulsion Module'),
('Large Shield Extender II', 'Shield Extender'),
('Caldari Navy Scourge Light Missile', 'Light Missile'),
('Caldari Navy Inferno Light Missile', 'Light Missile'),
('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'),
('Phased Scoped Target Painter', 'Target Painter'),
('Caldari Navy Inferno Light Missile', 'Light Missile'),
('Medium Polycarbon Engine Housing I', 'Rig Navigation'),
('Nanofiber Internal Structure II', 'Nanofiber Internal Structure'),
('Ballistic Control System II', 'Ballistic Control system'),
('Ballistic Control System II', 'Ballistic Control system'),
('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'),
('Caldari Navy Inferno Light Missile', 'Light Missile'),
('Caldari Navy Inferno Light Missile', 'Light Missile'),
('Caldari Navy Nova Light Missile', 'Light Missile'),
('Medium Core Defense Field Extender I', 'Rig Shield'),
('Caldari Navy Inferno Light Missile', 'Light Missile'),
('Warp Disruptor II', 'Warp Scrambler'),
('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'),
('Medium Core Defense Field Extender I', 'Rig Shield')]
# killmail_id, killmail_time, solar_system_id, character_id, ship_type_id
# 46643869, 2015-05-15 19:05:00, 30000157, 90000814, 32872
l2 = [('Caldari Navy Antimatter Charge S', 'Hybrid Charge'),
('Caldari Navy Antimatter Charge S', 'Hybrid Charge'),
('Drone Damage Amplifier II', 'Drone Damage Modules'),
('F85 Peripheral Damage System I', 'Damage Control'),
('Null S', 'Advanced Blaster Charge'),
('Caldari Navy Antimatter Charge S', 'Hybrid Charge'),
('Light Ion Blaster II', 'Hybrid Weapon'),
('J5b Enduring Warp Scrambler', 'Warp Scrambler'),
('Light Ion Blaster II', 'Hybrid Weapon'),
('Caldari Navy Antimatter Charge S', 'Hybrid Charge'),
('Drone Damage Amplifier II', 'Drone Damage Modules'),
('Small Transverse Bulkhead I', 'Rig Armor'),
('5MN Y-T8 Compact Microwarpdrive', 'Propulsion Module'),
('Light Ion Blaster II', 'Hybrid Weapon'),
('X5 Enduring Stasis Webifier', 'Stasis Web'),
('Small Transverse Bulkhead I', 'Rig Armor'),
('Warrior II', 'Combat Drone'),
('Small Transverse Bulkhead I', 'Rig Armor'),
('Light Ion Blaster II', 'Hybrid Weapon'),
('Light Ion Blaster II', 'Hybrid Weapon'),
('Caldari Navy Antimatter Charge S', 'Hybrid Charge'),
('Caldari Navy Antimatter Charge S', 'Hybrid Charge')]
# [TEST] Long Text Vectorizers
# The same document should have cosine distance of 1
doc1_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x in l1]) # Create bag of words
doc2_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x in l1]) # Create bag of words
cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt)
print(f"Document 1: {doc1_lt}")
print(f"Document 2: {doc2_lt}")
print(f"Cosine Distance:\n {cos_dist_lt}")
print("==========")
# Long Text Vectorizers
# Let's see how close the long texts are
doc1_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x in l1])
doc2_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x in l2])
cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt)
print(f"Document 1: {doc1_lt}")
print(f"Document 2: {doc2_lt}")
print(f"Cosine Distance:\n {cos_dist_lt}")
print("==========")
# [TEST] Short Text Vectorizers
# Again same texts should have cosine distance of 1
doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in l2])
doc2_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in l2])
cos_dist_st = get_cosine_distance(doc1_st, doc2_st)
print(f"Document 1: {doc1_st}")
print(f"Document 2: {doc2_st}")
print(f"Cosine Distance:\n {cos_dist_st}")
print("==========")
# Short Text Vectorizers
# Let's see how close the short texts are
doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in l1])
doc2_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in l2])
cos_dist_st = get_cosine_distance(doc1_st, doc2_st)
print(f"Document 1: {doc1_st}")
print(f"Document 2: {doc2_st}")
print(f"Cosine Distance:\n {cos_dist_st}")
print("==========")
# Short Text Vectorizers
# Cosine distance should be commutable
doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in l2])
doc2_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in l1])
cos_dist_st = get_cosine_distance(doc1_st, doc2_st)
print(f"Document 1: {doc1_st}")
print(f"Document 2: {doc2_st}")
print(f"Cosine Distance:\n {cos_dist_st}")
|
[
"functools.reduce",
"sklearn.feature_extraction.text.TfidfVectorizer",
"sklearn.metrics.pairwise.linear_kernel"
] |
[((3797, 3848), 'functools.reduce', 'reduce', (["(lambda x, y: f'{x} {y}')", '[x[0] for x in l1]'], {}), "(lambda x, y: f'{x} {y}', [x[0] for x in l1])\n", (3803, 3848), False, 'from functools import reduce\n'), ((3882, 3933), 'functools.reduce', 'reduce', (["(lambda x, y: f'{x} {y}')", '[x[0] for x in l1]'], {}), "(lambda x, y: f'{x} {y}', [x[0] for x in l1])\n", (3888, 3933), False, 'from functools import reduce\n'), ((4213, 4264), 'functools.reduce', 'reduce', (["(lambda x, y: f'{x} {y}')", '[x[0] for x in l1]'], {}), "(lambda x, y: f'{x} {y}', [x[0] for x in l1])\n", (4219, 4264), False, 'from functools import reduce\n'), ((4275, 4326), 'functools.reduce', 'reduce', (["(lambda x, y: f'{x} {y}')", '[x[0] for x in l2]'], {}), "(lambda x, y: f'{x} {y}', [x[0] for x in l2])\n", (4281, 4326), False, 'from functools import reduce\n'), ((4602, 4653), 'functools.reduce', 'reduce', (["(lambda x, y: f'{x} {y}')", '[x[1] for x in l2]'], {}), "(lambda x, y: f'{x} {y}', [x[1] for x in l2])\n", (4608, 4653), False, 'from functools import reduce\n'), ((4664, 4715), 'functools.reduce', 'reduce', (["(lambda x, y: f'{x} {y}')", '[x[1] for x in l2]'], {}), "(lambda x, y: f'{x} {y}', [x[1] for x in l2])\n", (4670, 4715), False, 'from functools import reduce\n'), ((4974, 5025), 'functools.reduce', 'reduce', (["(lambda x, y: f'{x} {y}')", '[x[1] for x in l1]'], {}), "(lambda x, y: f'{x} {y}', [x[1] for x in l1])\n", (4980, 5025), False, 'from functools import reduce\n'), ((5036, 5087), 'functools.reduce', 'reduce', (["(lambda x, y: f'{x} {y}')", '[x[1] for x in l2]'], {}), "(lambda x, y: f'{x} {y}', [x[1] for x in l2])\n", (5042, 5087), False, 'from functools import reduce\n'), ((5343, 5394), 'functools.reduce', 'reduce', (["(lambda x, y: f'{x} {y}')", '[x[1] for x in l2]'], {}), "(lambda x, y: f'{x} {y}', [x[1] for x in l2])\n", (5349, 5394), False, 'from functools import reduce\n'), ((5405, 5456), 'functools.reduce', 'reduce', (["(lambda x, y: f'{x} {y}')", '[x[1] for x in l1]'], {}), "(lambda x, y: f'{x} {y}', [x[1] for x in l1])\n", (5411, 5456), False, 'from functools import reduce\n'), ((454, 471), 'sklearn.feature_extraction.text.TfidfVectorizer', 'TfidfVectorizer', ([], {}), '()\n', (469, 471), False, 'from sklearn.feature_extraction.text import TfidfVectorizer\n'), ((545, 582), 'sklearn.metrics.pairwise.linear_kernel', 'linear_kernel', (['tfidf[0:1]', 'tfidf[1:2]'], {}), '(tfidf[0:1], tfidf[1:2])\n', (558, 582), False, 'from sklearn.metrics.pairwise import linear_kernel\n')]
|
# Generated by Django 2.2 on 2019-05-23 12:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dashboard', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='entrylist',
name='awards',
field=models.CharField(blank=True, default='未设置', max_length=200, null=True),
),
migrations.AlterField(
model_name='entrylist',
name='score_kind',
field=models.CharField(blank=True, default='未设置', max_length=100, null=True),
),
]
|
[
"django.db.models.CharField"
] |
[((328, 398), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '"""未设置"""', 'max_length': '(200)', 'null': '(True)'}), "(blank=True, default='未设置', max_length=200, null=True)\n", (344, 398), False, 'from django.db import migrations, models\n'), ((527, 597), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '"""未设置"""', 'max_length': '(100)', 'null': '(True)'}), "(blank=True, default='未设置', max_length=100, null=True)\n", (543, 597), False, 'from django.db import migrations, models\n')]
|
from aparse import click
from viewformer.utils.click import LazyGroup
@click.group(cls=LazyGroup)
def main():
pass
@main.group(cls=LazyGroup)
def dataset():
pass
@main.group(cls=LazyGroup)
def visualize():
pass
@main.group(cls=LazyGroup)
def model():
pass
@main.group(cls=LazyGroup)
def evaluate():
pass
dataset.add_command('viewformer.data.commands.visualize', 'visualize')
dataset.add_command('viewformer.data.commands.generate', 'generate')
dataset.add_command('viewformer.data.commands.shuffle', 'shuffle')
visualize.add_command('viewformer.commands.visualize_codebook', 'codebook')
model.add_command('viewformer.commands.model_info', 'info')
evaluate.add_command("viewformer.evaluate.evaluate_transformer", "transformer")
evaluate.add_command("viewformer.evaluate.evaluate_transformer_multictx", "transformer-multictx")
evaluate.add_command("viewformer.evaluate.evaluate_transformer_multictx_allimg", "transformer-multictx-allimg")
evaluate.add_command("viewformer.evaluate.evaluate_codebook", "codebook")
evaluate.add_command("viewformer.evaluate.evaluate_sevenscenes", "7scenes")
evaluate.add_command("viewformer.evaluate.evaluate_sevenscenes_baseline", "7scenes-baseline")
evaluate.add_command("viewformer.evaluate.evaluate_sevenscenes_multictx", "7scenes-multictx")
evaluate.add_command("viewformer.evaluate.evaluate_co3d", "co3d")
evaluate.add_command("viewformer.evaluate.generate_gqn_images", "generate-gqn-images")
main.add_command("viewformer.train", "train")
main.add_command("viewformer.commands.generate_codes", 'generate-codes')
main.add_command("viewformer.commands.download_model", 'download-model')
if __name__ == '__main__':
main()
|
[
"aparse.click.group"
] |
[((73, 99), 'aparse.click.group', 'click.group', ([], {'cls': 'LazyGroup'}), '(cls=LazyGroup)\n', (84, 99), False, 'from aparse import click\n')]
|
"""
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : 功能测试
Case Name : 使用gs_guc工具设置客户端编码,不生效
Description :
1. gs_guc set 设置客户端编码集SQL_ASCII
2. gs_guc reload 设置客户端编码集GBK
Expect :
1. 设置不生效
2. 设置不生效
History :
"""
import unittest
from yat.test import macro
from yat.test import Node
from testcase.utils.Logger import Logger
from testcase.utils.CommonSH import CommonSH
class Function(unittest.TestCase):
def setUp(self):
self.commonsh = CommonSH('dbuser')
self.user_node = Node('dbuser')
self.log = Logger()
self.cluster_path = macro.DB_INSTANCE_PATH
self.log.info('''---Opengauss_Function_DML_Set_Case0032开始---''')
def test_encode(self):
cmd0 = "show client_encoding;"
msg0 = self.commonsh.execut_db_sql(cmd0)
self.log.info(msg0)
init = msg0.splitlines()[2].strip()
client = ['SQL_ASCII', 'UTF8']
self.var = client if init == 'UTF8' else list(reversed(client))
def restart_check():
self.commonsh.restart_db_cluster()
status = self.commonsh.get_db_cluster_status()
self.assertTrue("Normal" in status or 'Degraded' in status)
# 检查未生效,还是utf8
cmd = 'show client_encoding;'
msg = self.commonsh.execut_db_sql(cmd)
self.log.info(msg)
return msg
# gs_guc set 设置客户端编码集SQL_ASCII
cmd1 = f'''source {macro.DB_ENV_PATH}
gs_guc set -N all -I all -c "client_encoding='{self.var[0]}'"'''
self.log.info(cmd1)
msg1 = self.user_node.sh(cmd1).result()
self.log.info(msg1)
res = restart_check()
self.assertTrue(self.var[1] in res)
# gs_guc reload 设置客户端编码集GBK
cmd2 = f'''source {macro.DB_ENV_PATH}
gs_guc reload -D {self.cluster_path} -c "client_encoding = 'GBK'"'''
self.log.info(cmd2)
msg2 = self.user_node.sh(cmd2).result()
self.log.info(msg2)
res = restart_check()
self.assertTrue(self.var[1] in res)
def tearDown(self):
self.log.info('''---Opengauss_Function_DML_Set_Case0032结束---''')
|
[
"yat.test.Node",
"testcase.utils.CommonSH.CommonSH",
"testcase.utils.Logger.Logger"
] |
[((956, 974), 'testcase.utils.CommonSH.CommonSH', 'CommonSH', (['"""dbuser"""'], {}), "('dbuser')\n", (964, 974), False, 'from testcase.utils.CommonSH import CommonSH\n'), ((1000, 1014), 'yat.test.Node', 'Node', (['"""dbuser"""'], {}), "('dbuser')\n", (1004, 1014), False, 'from yat.test import Node\n'), ((1034, 1042), 'testcase.utils.Logger.Logger', 'Logger', ([], {}), '()\n', (1040, 1042), False, 'from testcase.utils.Logger import Logger\n')]
|
from GitLabApi import objects
from core.tests.test_view import LoginMethods
from core.tests.test_view import SimpleUrlsTestsCases
from django.db.models import QuerySet
from django.urls import reverse
from groups import models
from groups.sidebar import GroupSidebar, FutureGroupSidebar
from groups.tests import test_forms
from groups.tests import models as test_models
class GitlabWrapperAppNameCase:
class GitlabWrapperAppNameTest(SimpleUrlsTestsCases.SimpleUrlsTests):
app_name = 'groups'
class InitSidebarPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'init_sidebar'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sidebar.html')
self.assertIn('group', response.context)
self.assertIn('sidebar', response.context)
self.assertIsInstance(response.context['group'], objects.Group)
self.assertIsInstance(response.context['sidebar'], GroupSidebar)
class IndexPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'index'
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/index.html')
self.assertIn('group_list', response.context)
all(self.assertIsInstance(group, objects.Group) for group in response.context['group_list'])
class DetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'detail'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/detail.html')
self.assertIn('group', response.context)
self.assertIn('sidebar', response.context)
self.assertIn('unfinished_add_subgroup_list', response.context)
self.assertIn('unfinished_add_project_list', response.context)
self.assertIsInstance(response.context['group'], objects.Group)
self.assertIsInstance(response.context['sidebar'], GroupSidebar)
self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet)
self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet)
class MembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'members'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/members.html')
self.assertIn('group', response.context)
self.assertIn('sidebar', response.context)
self.assertIn('unfinished_task_list', response.context)
self.assertIsInstance(response.context['group'], objects.Group)
self.assertIsInstance(response.context['sidebar'], GroupSidebar)
self.assertIsInstance(response.context['unfinished_task_list'], QuerySet)
class TasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'tasks'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/tasks.html')
self.assertIn('group', response.context)
self.assertIn('sidebar', response.context)
self.assertIn('unfinished_task_list', response.context)
self.assertIn('finished_task_list', response.context)
self.assertIn('new_group_links', response.context)
self.assertIsInstance(response.context['group'], objects.Group)
self.assertIsInstance(response.context['sidebar'], GroupSidebar)
self.assertIsInstance(response.context['unfinished_task_list'], list)
self.assertIsInstance(response.context['finished_task_list'], list)
self.assertIsInstance(response.context['new_group_links'], list)
new_group_links = [
('New Task Group', reverse('groups:new_task_group', kwargs=self.args)),
('New Subgroup', reverse('groups:new_subgroup_task', kwargs=self.args)),
('New Project', reverse('groups:new_project_task', kwargs=self.args)),
('New Member', reverse('groups:new_member_task', kwargs=self.args))
]
for group_link in response.context['new_group_links']:
self.assertIn(group_link, new_group_links)
class NewGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_group'
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, reverse('groups:index'))
class NewSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_subgroup'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args))
class NewTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_task_group'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args))
model = models.TaskGroup.objects.get(
gitlab_group=models.GitlabGroup.objects.get(
gitlab_id=self.args['group_id']))
for key, value in test_forms.TaskGroupFormTests.valid_form_data.items():
self.assertEqual(getattr(model, key), value)
class FutureTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_task_group'
args = {'task_id': None}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()
self.args['task_id'] = self.parent_task.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs=self.args))
model = models.TaskGroup.objects.get(parent_task=self.parent_task)
for key, value in test_forms.TaskGroupFormTests.valid_form_data.items():
self.assertEqual(getattr(model, key), value)
class EditTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'edit_task_group'
args = {'task_group_id': 1}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()
self.task_group = test_models.AddSubgroupCreateMethods().create_task_group(
parent_task=self.parent_task
)
self.args['task_group_id'] = self.task_group.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_group_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
def _test_page_post_valid_data(self):
data = self.get_initial_form_data()
self.assertEqual(data['name'], self.task_group.name)
data['name'] = 'Another Name'
response = self.client.post(self.get_url(), data)
self.assertEqual(response.status_code, 302)
self.task_group.refresh_from_db()
self.assertEqual(self.task_group.name, data['name'])
return response
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
self.task_group.gitlab_group.gitlab_id = 42
self.task_group.gitlab_group.save()
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id}))
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_future_tasks(self):
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id}))
class NewSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_subgroup_task'
args = {'task_group_id': 1}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()
self.task_group = test_models.AddSubgroupCreateMethods().create_task_group(
parent_task=self.parent_task
)
self.args['task_group_id'] = self.task_group.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_group_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
def _test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
model = models.AddSubgroup.objects.get(task_group=self.task_group)
for key, value in test_forms.AddSubgroupFormTests.valid_form_data.items():
self.assertEqual(getattr(model, key), value)
return response
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
self.task_group.gitlab_group.gitlab_id = 42
self.task_group.gitlab_group.save()
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id}))
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id}))
class EditSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'edit_subgroup_task'
args = {'task_id': 1}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()
self.task = test_models.AddSubgroupCreateMethods().create_task(
parent_task=self.parent_task)
self.args['task_id'] = self.task.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
def _test_page_post_valid_data(self):
data = self.get_initial_form_data()
self.assertEqual(data['name'], self.task.name)
data['name'] = 'Another Name'
data['description'] = 'Description'
response = self.client.post(self.get_url(), data)
self.assertEqual(response.status_code, 302)
self.task.refresh_from_db()
self.assertEqual(self.task.name, data['name'])
self.assertEqual(self.task.description, data['description'])
return response
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
self.task.gitlab_group.gitlab_id = 42
self.task.gitlab_group.save()
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id}))
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_future_tasks(self):
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id}))
class NewProjectPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_project'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args))
class NewProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_project_task'
args = {'task_group_id': 1}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddProjectCreateMethods().create_parent_task()
self.task_group = test_models.AddProjectCreateMethods().create_task_group(
parent_task=self.parent_task
)
self.args['task_group_id'] = self.task_group.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_group_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
def _test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
model = models.AddProject.objects.get(task_group=self.task_group)
for key, value in test_forms.AddProjectFormTests.valid_form_data.items():
self.assertEqual(getattr(model, key), value)
return response
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
self.task_group.gitlab_group.gitlab_id = 42
self.task_group.gitlab_group.save()
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id}))
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id}))
class EditProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'edit_project_task'
args = {'task_id': 1}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddProjectCreateMethods().create_parent_task()
self.task = test_models.AddProjectCreateMethods().create_task(
parent_task=self.parent_task)
self.args['task_id'] = self.task.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
def _test_page_post_valid_data(self):
data = self.get_initial_form_data()
self.assertEqual(data['name'], self.task.name)
data['name'] = 'Another Name'
data['description'] = 'Description'
response = self.client.post(self.get_url(), data)
self.assertEqual(response.status_code, 302)
self.task.refresh_from_db()
self.assertEqual(self.task.name, data['name'])
self.assertEqual(self.task.description, data['description'])
return response
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
self.task.gitlab_group.gitlab_id = 42
self.task.gitlab_group.save()
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id}))
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_future_tasks(self):
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id}))
class NewMemberPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_member'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, reverse('groups:members', kwargs=self.args))
class NewMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_member_task'
args = {'task_group_id': 1}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddMemberCreateMethods().create_parent_task()
self.task_group = test_models.AddMemberCreateMethods().create_task_group(
parent_task=self.parent_task
)
self.args['task_group_id'] = self.task_group.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_group_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
def _test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
model = models.AddMember.objects.get(task_group=self.task_group)
for key, value in test_forms.AddMemberFormTests.valid_form_data.items():
self.assertEqual(getattr(model, key), value)
return response
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
self.task_group.gitlab_group.gitlab_id = 42
self.task_group.gitlab_group.save()
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id}))
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id}))
class EditMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'edit_member_task'
args = {'task_id': 1}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddMemberCreateMethods().create_parent_task()
self.task = test_models.AddMemberCreateMethods().create_task(
parent_task=self.parent_task)
self.args['task_id'] = self.task.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
def _test_page_post_valid_data(self):
data = self.get_initial_form_data()
self.assertEqual(data['username'], self.task.username)
data['username'] = 'Another username'
response = self.client.post(self.get_url(), data)
self.assertEqual(response.status_code, 302)
self.task.refresh_from_db()
self.assertEqual(self.task.username, data['username'])
return response
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
self.task.gitlab_group.gitlab_id = 42
self.task.gitlab_group.save()
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id}))
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_future_tasks(self):
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id}))
class NewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_members_from_file'
args = {'group_id': '1'}
def setUp(self):
super().setUp()
for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items():
value.file.seek(0)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data,
**test_forms.MembersFromFileFormTests.valid_file_data})
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args))
class FutureNewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_members_from_file'
args = {'task_id': None}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()
self.args['task_id'] = self.parent_task.id
for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items():
value.file.seek(0)
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data,
**test_forms.MembersFromFileFormTests.valid_file_data})
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs=self.args))
class NewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_subgroup_and_members_from_file'
args = {'group_id': '1'}
def setUp(self):
super().setUp()
for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items():
value.file.seek(0)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data,
**test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data})
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args))
class FutureNewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_subgroup_and_members_from_file'
args = {'task_id': None}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()
self.args['task_id'] = self.parent_task.id
for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items():
value.file.seek(0)
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data,
**test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data})
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs=self.args))
class FutureGroupDetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'future_group_detail'
args = {'task_id': None}
def setUp(self):
super().setUp()
self.task = test_models.AddSubgroupCreateMethods().create_task()
self.args['task_id'] = self.task.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/tasks/detail.html')
self.assertIn('task', response.context)
self.assertIn('sidebar', response.context)
self.assertIn('unfinished_add_subgroup_list', response.context)
self.assertIn('unfinished_add_project_list', response.context)
self.assertIsInstance(response.context['task'], models.AddSubgroup)
self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar)
self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet)
self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet)
class FutureGroupMembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'future_group_members'
args = {'task_id': None}
def setUp(self):
super().setUp()
self.task = test_models.AddSubgroupCreateMethods().create_task()
self.args['task_id'] = self.task.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/tasks/members.html')
self.assertIn('task', response.context)
self.assertIn('sidebar', response.context)
self.assertIn('unfinished_task_list', response.context)
self.assertIsInstance(response.context['task'], models.AddSubgroup)
self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar)
self.assertIsInstance(response.context['unfinished_task_list'], QuerySet)
class FutureGroupTasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'future_group_tasks'
args = {'task_id': None}
def setUp(self):
super().setUp()
self.task = test_models.AddSubgroupCreateMethods().create_task()
self.args['task_id'] = self.task.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/tasks/tasks.html')
self.assertIn('task', response.context)
self.assertIn('sidebar', response.context)
self.assertIn('unfinished_task_list', response.context)
self.assertIn('finished_task_list', response.context)
self.assertIn('new_group_links', response.context)
self.assertIsInstance(response.context['task'], models.AddSubgroup)
self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar)
self.assertIsInstance(response.context['unfinished_task_list'], list)
self.assertIsInstance(response.context['finished_task_list'], list)
self.assertIsInstance(response.context['new_group_links'], list)
new_group_links = [
('New Task Group', reverse('groups:new_task_group', kwargs=self.args)),
('New Subgroup', reverse('groups:new_subgroup_task', kwargs=self.args)),
('New Project', reverse('groups:new_project_task', kwargs=self.args)),
('New Member', reverse('groups:new_member_task', kwargs=self.args))
]
for group_link in response.context['new_group_links']:
self.assertIn(group_link, new_group_links)
class AjaxLoadSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'ajax_load_subgroups'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/ajax/load_subgroups_and_projects.html')
self.assertIn('group_list', response.context)
self.assertIsInstance(response.context['group_list'], list)
all(self.assertIsInstance(group, objects.GroupSubgroup) for group in response.context['group_list'])
self.assertIn('project_list', response.context)
self.assertEqual(response.context['project_list'], [])
class AjaxLoadSubgroupAndProjectsPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'ajax_load_subgroups_and_projects'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/ajax/load_subgroups_and_projects.html')
self.assertIn('group_list', response.context)
self.assertIsInstance(response.context['group_list'], list)
all(self.assertIsInstance(group, objects.GroupSubgroup) for group in response.context['group_list'])
self.assertIn('project_list', response.context)
all(self.assertIsInstance(project, objects.GroupProject) for project in response.context['project_list'])
|
[
"groups.models.AddSubgroup.objects.get",
"groups.tests.test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items",
"groups.tests.test_forms.TaskGroupFormTests.valid_form_data.items",
"groups.tests.models.AddProjectCreateMethods",
"groups.models.AddMember.objects.get",
"groups.tests.test_forms.AddProjectFormTests.valid_form_data.items",
"groups.tests.models.AddMemberCreateMethods",
"groups.models.AddProject.objects.get",
"groups.tests.test_forms.AddMemberFormTests.valid_form_data.items",
"django.urls.reverse",
"groups.models.GitlabGroup.objects.get",
"groups.tests.models.AddSubgroupCreateMethods",
"groups.tests.test_forms.MembersFromFileFormTests.valid_file_data.items",
"groups.models.TaskGroup.objects.get",
"groups.tests.test_forms.AddSubgroupFormTests.valid_form_data.items"
] |
[((7695, 7748), 'groups.tests.test_forms.TaskGroupFormTests.valid_form_data.items', 'test_forms.TaskGroupFormTests.valid_form_data.items', ([], {}), '()\n', (7746, 7748), False, 'from groups.tests import test_forms\n'), ((9189, 9247), 'groups.models.TaskGroup.objects.get', 'models.TaskGroup.objects.get', ([], {'parent_task': 'self.parent_task'}), '(parent_task=self.parent_task)\n', (9217, 9247), False, 'from groups import models\n'), ((9274, 9327), 'groups.tests.test_forms.TaskGroupFormTests.valid_form_data.items', 'test_forms.TaskGroupFormTests.valid_form_data.items', ([], {}), '()\n', (9325, 9327), False, 'from groups.tests import test_forms\n'), ((12802, 12860), 'groups.models.AddSubgroup.objects.get', 'models.AddSubgroup.objects.get', ([], {'task_group': 'self.task_group'}), '(task_group=self.task_group)\n', (12832, 12860), False, 'from groups import models\n'), ((12887, 12942), 'groups.tests.test_forms.AddSubgroupFormTests.valid_form_data.items', 'test_forms.AddSubgroupFormTests.valid_form_data.items', ([], {}), '()\n', (12940, 12942), False, 'from groups.tests import test_forms\n'), ((18090, 18147), 'groups.models.AddProject.objects.get', 'models.AddProject.objects.get', ([], {'task_group': 'self.task_group'}), '(task_group=self.task_group)\n', (18119, 18147), False, 'from groups import models\n'), ((18174, 18228), 'groups.tests.test_forms.AddProjectFormTests.valid_form_data.items', 'test_forms.AddProjectFormTests.valid_form_data.items', ([], {}), '()\n', (18226, 18228), False, 'from groups.tests import test_forms\n'), ((23365, 23421), 'groups.models.AddMember.objects.get', 'models.AddMember.objects.get', ([], {'task_group': 'self.task_group'}), '(task_group=self.task_group)\n', (23393, 23421), False, 'from groups import models\n'), ((23448, 23501), 'groups.tests.test_forms.AddMemberFormTests.valid_form_data.items', 'test_forms.AddMemberFormTests.valid_form_data.items', ([], {}), '()\n', (23499, 23501), False, 'from groups.tests import test_forms\n'), ((26461, 26520), 'groups.tests.test_forms.MembersFromFileFormTests.valid_file_data.items', 'test_forms.MembersFromFileFormTests.valid_file_data.items', ([], {}), '()\n', (26518, 26520), False, 'from groups.tests import test_forms\n'), ((27837, 27896), 'groups.tests.test_forms.MembersFromFileFormTests.valid_file_data.items', 'test_forms.MembersFromFileFormTests.valid_file_data.items', ([], {}), '()\n', (27894, 27896), False, 'from groups.tests import test_forms\n'), ((29336, 29406), 'groups.tests.test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items', 'test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items', ([], {}), '()\n', (29404, 29406), False, 'from groups.tests import test_forms\n'), ((30770, 30840), 'groups.tests.test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items', 'test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items', ([], {}), '()\n', (30838, 30840), False, 'from groups.tests import test_forms\n'), ((5619, 5642), 'django.urls.reverse', 'reverse', (['"""groups:index"""'], {}), "('groups:index')\n", (5626, 5642), False, 'from django.urls import reverse\n'), ((6536, 6578), 'django.urls.reverse', 'reverse', (['"""groups:detail"""'], {'kwargs': 'self.args'}), "('groups:detail', kwargs=self.args)\n", (6543, 6578), False, 'from django.urls import reverse\n'), ((7473, 7514), 'django.urls.reverse', 'reverse', (['"""groups:tasks"""'], {'kwargs': 'self.args'}), "('groups:tasks', kwargs=self.args)\n", (7480, 7514), False, 'from django.urls import reverse\n'), ((9117, 9171), 'django.urls.reverse', 'reverse', (['"""groups:future_group_tasks"""'], {'kwargs': 'self.args'}), "('groups:future_group_tasks', kwargs=self.args)\n", (9124, 9171), False, 'from django.urls import reverse\n'), ((11034, 11123), 'django.urls.reverse', 'reverse', (['"""groups:tasks"""'], {'kwargs': "{'group_id': self.task_group.gitlab_group.gitlab_id}"}), "('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.\n gitlab_id})\n", (11041, 11123), False, 'from django.urls import reverse\n'), ((11337, 11414), 'django.urls.reverse', 'reverse', (['"""groups:future_group_tasks"""'], {'kwargs': "{'task_id': self.parent_task.id}"}), "('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})\n", (11344, 11414), False, 'from django.urls import reverse\n'), ((13333, 13422), 'django.urls.reverse', 'reverse', (['"""groups:tasks"""'], {'kwargs': "{'group_id': self.task_group.gitlab_group.gitlab_id}"}), "('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.\n gitlab_id})\n", (13340, 13422), False, 'from django.urls import reverse\n'), ((13629, 13706), 'django.urls.reverse', 'reverse', (['"""groups:future_group_tasks"""'], {'kwargs': "{'task_id': self.parent_task.id}"}), "('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})\n", (13636, 13706), False, 'from django.urls import reverse\n'), ((15400, 15478), 'django.urls.reverse', 'reverse', (['"""groups:tasks"""'], {'kwargs': "{'group_id': self.task.gitlab_group.gitlab_id}"}), "('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id})\n", (15407, 15478), False, 'from django.urls import reverse\n'), ((15697, 15774), 'django.urls.reverse', 'reverse', (['"""groups:future_group_tasks"""'], {'kwargs': "{'task_id': self.parent_task.id}"}), "('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})\n", (15704, 15774), False, 'from django.urls import reverse\n'), ((16665, 16707), 'django.urls.reverse', 'reverse', (['"""groups:detail"""'], {'kwargs': 'self.args'}), "('groups:detail', kwargs=self.args)\n", (16672, 16707), False, 'from django.urls import reverse\n'), ((18619, 18708), 'django.urls.reverse', 'reverse', (['"""groups:tasks"""'], {'kwargs': "{'group_id': self.task_group.gitlab_group.gitlab_id}"}), "('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.\n gitlab_id})\n", (18626, 18708), False, 'from django.urls import reverse\n'), ((18915, 18992), 'django.urls.reverse', 'reverse', (['"""groups:future_group_tasks"""'], {'kwargs': "{'task_id': self.parent_task.id}"}), "('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})\n", (18922, 18992), False, 'from django.urls import reverse\n'), ((20682, 20760), 'django.urls.reverse', 'reverse', (['"""groups:tasks"""'], {'kwargs': "{'group_id': self.task.gitlab_group.gitlab_id}"}), "('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id})\n", (20689, 20760), False, 'from django.urls import reverse\n'), ((20979, 21056), 'django.urls.reverse', 'reverse', (['"""groups:future_group_tasks"""'], {'kwargs': "{'task_id': self.parent_task.id}"}), "('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})\n", (20986, 21056), False, 'from django.urls import reverse\n'), ((21944, 21987), 'django.urls.reverse', 'reverse', (['"""groups:members"""'], {'kwargs': 'self.args'}), "('groups:members', kwargs=self.args)\n", (21951, 21987), False, 'from django.urls import reverse\n'), ((23892, 23981), 'django.urls.reverse', 'reverse', (['"""groups:tasks"""'], {'kwargs': "{'group_id': self.task_group.gitlab_group.gitlab_id}"}), "('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.\n gitlab_id})\n", (23899, 23981), False, 'from django.urls import reverse\n'), ((24188, 24265), 'django.urls.reverse', 'reverse', (['"""groups:future_group_tasks"""'], {'kwargs': "{'task_id': self.parent_task.id}"}), "('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})\n", (24195, 24265), False, 'from django.urls import reverse\n'), ((25862, 25940), 'django.urls.reverse', 'reverse', (['"""groups:tasks"""'], {'kwargs': "{'group_id': self.task.gitlab_group.gitlab_id}"}), "('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id})\n", (25869, 25940), False, 'from django.urls import reverse\n'), ((26159, 26236), 'django.urls.reverse', 'reverse', (['"""groups:future_group_tasks"""'], {'kwargs': "{'task_id': self.parent_task.id}"}), "('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})\n", (26166, 26236), False, 'from django.urls import reverse\n'), ((27426, 27467), 'django.urls.reverse', 'reverse', (['"""groups:tasks"""'], {'kwargs': 'self.args'}), "('groups:tasks', kwargs=self.args)\n", (27433, 27467), False, 'from django.urls import reverse\n'), ((29032, 29086), 'django.urls.reverse', 'reverse', (['"""groups:future_group_tasks"""'], {'kwargs': 'self.args'}), "('groups:future_group_tasks', kwargs=self.args)\n", (29039, 29086), False, 'from django.urls import reverse\n'), ((30334, 30375), 'django.urls.reverse', 'reverse', (['"""groups:tasks"""'], {'kwargs': 'self.args'}), "('groups:tasks', kwargs=self.args)\n", (30341, 30375), False, 'from django.urls import reverse\n'), ((31998, 32052), 'django.urls.reverse', 'reverse', (['"""groups:future_group_tasks"""'], {'kwargs': 'self.args'}), "('groups:future_group_tasks', kwargs=self.args)\n", (32005, 32052), False, 'from django.urls import reverse\n'), ((4333, 4383), 'django.urls.reverse', 'reverse', (['"""groups:new_task_group"""'], {'kwargs': 'self.args'}), "('groups:new_task_group', kwargs=self.args)\n", (4340, 4383), False, 'from django.urls import reverse\n'), ((4415, 4468), 'django.urls.reverse', 'reverse', (['"""groups:new_subgroup_task"""'], {'kwargs': 'self.args'}), "('groups:new_subgroup_task', kwargs=self.args)\n", (4422, 4468), False, 'from django.urls import reverse\n'), ((4499, 4551), 'django.urls.reverse', 'reverse', (['"""groups:new_project_task"""'], {'kwargs': 'self.args'}), "('groups:new_project_task', kwargs=self.args)\n", (4506, 4551), False, 'from django.urls import reverse\n'), ((4581, 4632), 'django.urls.reverse', 'reverse', (['"""groups:new_member_task"""'], {'kwargs': 'self.args'}), "('groups:new_member_task', kwargs=self.args)\n", (4588, 4632), False, 'from django.urls import reverse\n'), ((7587, 7650), 'groups.models.GitlabGroup.objects.get', 'models.GitlabGroup.objects.get', ([], {'gitlab_id': "self.args['group_id']"}), "(gitlab_id=self.args['group_id'])\n", (7617, 7650), False, 'from groups import models\n'), ((8021, 8059), 'groups.tests.models.AddSubgroupCreateMethods', 'test_models.AddSubgroupCreateMethods', ([], {}), '()\n', (8057, 8059), True, 'from groups.tests import models as test_models\n'), ((9602, 9640), 'groups.tests.models.AddSubgroupCreateMethods', 'test_models.AddSubgroupCreateMethods', ([], {}), '()\n', (9638, 9640), True, 'from groups.tests import models as test_models\n'), ((9688, 9726), 'groups.tests.models.AddSubgroupCreateMethods', 'test_models.AddSubgroupCreateMethods', ([], {}), '()\n', (9724, 9726), True, 'from groups.tests import models as test_models\n'), ((11636, 11674), 'groups.tests.models.AddSubgroupCreateMethods', 'test_models.AddSubgroupCreateMethods', ([], {}), '()\n', (11672, 11674), True, 'from groups.tests import models as test_models\n'), ((11722, 11760), 'groups.tests.models.AddSubgroupCreateMethods', 'test_models.AddSubgroupCreateMethods', ([], {}), '()\n', (11758, 11760), True, 'from groups.tests import models as test_models\n'), ((13924, 13962), 'groups.tests.models.AddSubgroupCreateMethods', 'test_models.AddSubgroupCreateMethods', ([], {}), '()\n', (13960, 13962), True, 'from groups.tests import models as test_models\n'), ((14004, 14042), 'groups.tests.models.AddSubgroupCreateMethods', 'test_models.AddSubgroupCreateMethods', ([], {}), '()\n', (14040, 14042), True, 'from groups.tests import models as test_models\n'), ((16927, 16964), 'groups.tests.models.AddProjectCreateMethods', 'test_models.AddProjectCreateMethods', ([], {}), '()\n', (16962, 16964), True, 'from groups.tests import models as test_models\n'), ((17012, 17049), 'groups.tests.models.AddProjectCreateMethods', 'test_models.AddProjectCreateMethods', ([], {}), '()\n', (17047, 17049), True, 'from groups.tests import models as test_models\n'), ((19208, 19245), 'groups.tests.models.AddProjectCreateMethods', 'test_models.AddProjectCreateMethods', ([], {}), '()\n', (19243, 19245), True, 'from groups.tests import models as test_models\n'), ((19287, 19324), 'groups.tests.models.AddProjectCreateMethods', 'test_models.AddProjectCreateMethods', ([], {}), '()\n', (19322, 19324), True, 'from groups.tests import models as test_models\n'), ((22205, 22241), 'groups.tests.models.AddMemberCreateMethods', 'test_models.AddMemberCreateMethods', ([], {}), '()\n', (22239, 22241), True, 'from groups.tests import models as test_models\n'), ((22289, 22325), 'groups.tests.models.AddMemberCreateMethods', 'test_models.AddMemberCreateMethods', ([], {}), '()\n', (22323, 22325), True, 'from groups.tests import models as test_models\n'), ((24479, 24515), 'groups.tests.models.AddMemberCreateMethods', 'test_models.AddMemberCreateMethods', ([], {}), '()\n', (24513, 24515), True, 'from groups.tests import models as test_models\n'), ((24557, 24593), 'groups.tests.models.AddMemberCreateMethods', 'test_models.AddMemberCreateMethods', ([], {}), '()\n', (24591, 24593), True, 'from groups.tests import models as test_models\n'), ((27699, 27737), 'groups.tests.models.AddSubgroupCreateMethods', 'test_models.AddSubgroupCreateMethods', ([], {}), '()\n', (27735, 27737), True, 'from groups.tests import models as test_models\n'), ((30632, 30670), 'groups.tests.models.AddSubgroupCreateMethods', 'test_models.AddSubgroupCreateMethods', ([], {}), '()\n', (30668, 30670), True, 'from groups.tests import models as test_models\n'), ((32268, 32306), 'groups.tests.models.AddSubgroupCreateMethods', 'test_models.AddSubgroupCreateMethods', ([], {}), '()\n', (32304, 32306), True, 'from groups.tests import models as test_models\n'), ((33600, 33638), 'groups.tests.models.AddSubgroupCreateMethods', 'test_models.AddSubgroupCreateMethods', ([], {}), '()\n', (33636, 33638), True, 'from groups.tests import models as test_models\n'), ((34753, 34791), 'groups.tests.models.AddSubgroupCreateMethods', 'test_models.AddSubgroupCreateMethods', ([], {}), '()\n', (34789, 34791), True, 'from groups.tests import models as test_models\n'), ((36018, 36068), 'django.urls.reverse', 'reverse', (['"""groups:new_task_group"""'], {'kwargs': 'self.args'}), "('groups:new_task_group', kwargs=self.args)\n", (36025, 36068), False, 'from django.urls import reverse\n'), ((36100, 36153), 'django.urls.reverse', 'reverse', (['"""groups:new_subgroup_task"""'], {'kwargs': 'self.args'}), "('groups:new_subgroup_task', kwargs=self.args)\n", (36107, 36153), False, 'from django.urls import reverse\n'), ((36184, 36236), 'django.urls.reverse', 'reverse', (['"""groups:new_project_task"""'], {'kwargs': 'self.args'}), "('groups:new_project_task', kwargs=self.args)\n", (36191, 36236), False, 'from django.urls import reverse\n'), ((36266, 36317), 'django.urls.reverse', 'reverse', (['"""groups:new_member_task"""'], {'kwargs': 'self.args'}), "('groups:new_member_task', kwargs=self.args)\n", (36273, 36317), False, 'from django.urls import reverse\n')]
|
#!/usr/bin/env python3
from api import DB
from api.models.listing import Listing
def get_all_queries():
"""
Returns all stored listing queries.
"""
return list(Listing.query.all())
|
[
"api.models.listing.Listing.query.all"
] |
[((183, 202), 'api.models.listing.Listing.query.all', 'Listing.query.all', ([], {}), '()\n', (200, 202), False, 'from api.models.listing import Listing\n')]
|
import asyncio
import json
from datetime import date
from decimal import Decimal
from typing import Dict
from aiohttp import ClientResponse
from dynaconf import settings
_PRICE_HISTORY_API = f"{settings.BENZAK_API_URL}/price-history/"
async def load_price(logger, session, price: Dict):
logger.debug(
f"calling Benzak price history API:"
f' POST "{_PRICE_HISTORY_API}"'
f" json={json.dumps(price, indent=2, sort_keys=True)}"
)
response: ClientResponse = await session.post(
_PRICE_HISTORY_API,
json=price,
headers={"AUTHORIZATION": settings.BENZAK_API_TOKEN},
)
logger.debug(f"got response: [{response.status} {response.reason}]")
if settings.DEBUG and response.status != 201:
payload = json.dumps(await response.json(), indent=2, sort_keys=True)
logger.debug(f"API response: {payload}")
async def load_prices(
logger, session, prices: Dict[date, Decimal], currency: int, fuel: int
):
logger.debug(
f"loading prices"
f" for currency={currency}, fuel={fuel}:"
f" {len(prices)} prices"
)
logger.debug("creating tasks: load price")
tasks = []
for actual_at, price in prices.items():
payload = {
"at": actual_at.strftime("%Y-%m-%d"),
"price": str(price),
"currency": currency,
"fuel": fuel,
}
task = asyncio.create_task(load_price(logger, session, payload))
tasks.append(task)
logger.debug(f"created {len(tasks)} tasks")
logger.debug("awaiting tasks: load price")
for task in tasks:
await task
logger.debug(f"loaded {len(prices)} prices")
|
[
"json.dumps"
] |
[((411, 454), 'json.dumps', 'json.dumps', (['price'], {'indent': '(2)', 'sort_keys': '(True)'}), '(price, indent=2, sort_keys=True)\n', (421, 454), False, 'import json\n')]
|
"""playnetmano_rm common internal object model"""
from oslo_utils import versionutils
from oslo_versionedobjects import base
from playnetmano_rm import objects
VersionedObjectDictCompat = base.VersionedObjectDictCompat
class Playnetmano_rmObject(base.VersionedObject):
"""Base class for playnetmano_rm objects.
This is the base class for all objects that can be remoted or instantiated
via RPC. Simply defining a sub-class of this class would make it remotely
instantiatable. Objects should implement the "get" class method and the
"save" object method.
"""
OBJ_PROJECT_NAMESPACE = 'playnetmano_rm'
VERSION = '1.0'
@staticmethod
def _from_db_object(context, obj, db_obj):
if db_obj is None:
return None
for field in obj.fields:
if field == 'metadata':
obj['metadata'] = db_obj['meta_data']
else:
obj[field] = db_obj[field]
obj._context = context
obj.obj_reset_changes()
return obj
class Playnetmano_rmObjectRegistry(base.VersionedObjectRegistry):
def registration_hook(self, cls, index):
"""Callback for object registration.
When an object is registered, this function will be called for
maintaining playnetmano_rm.objects.$OBJECT as the highest-versioned
implementation of a given object.
"""
version = versionutils.convert_version_to_tuple(cls.VERSION)
if not hasattr(objects, cls.obj_name()):
setattr(objects, cls.obj_name(), cls)
else:
curr_version = versionutils.convert_version_to_tuple(
getattr(objects, cls.obj_name()).VERSION)
if version >= curr_version:
setattr(objects, cls.obj_name(), cls)
|
[
"oslo_utils.versionutils.convert_version_to_tuple"
] |
[((1417, 1467), 'oslo_utils.versionutils.convert_version_to_tuple', 'versionutils.convert_version_to_tuple', (['cls.VERSION'], {}), '(cls.VERSION)\n', (1454, 1467), False, 'from oslo_utils import versionutils\n')]
|
import random
import datetime
import dateparser
from faker import Faker
from .base import Filth
class DateOfBirthFilth(Filth):
type = 'date_of_birth'
min_age_years = 18
max_age_years = 100
@staticmethod
def generate(faker: Faker) -> str:
"""Generates an example of this ``Filth`` type, usually using the faker python library.
:param faker: The ``Faker`` class from the ``faker`` library
:type faker: Faker
:return: An example of this ``Filth``
:rtype: str
"""
formats = [
'%c', # Tue Aug 16 21:30:00 1988 (en_US); locale dependant
'%x', # 08/16/1988 (en_US); locale dependant
'%a %d %b %Y', # Sun 19 Jan 1999
'%A %d %B %Y', # Sunday 19 January 1999
'%d-%m-%Y', # 15-01-1999
'%A %dth, %B, %Y', # Monday 08th, January, 1973
]
return faker.date_of_birth().strftime(random.choice(formats))
def is_valid(self) -> bool:
"""Check to see if the found filth is valid."""
found_date = dateparser.parse(self.text)
if found_date is None:
return False
years_since_identified_date = datetime.date.today().year - found_date.year
return DateOfBirthFilth.min_age_years <= years_since_identified_date <= DateOfBirthFilth.max_age_years
|
[
"dateparser.parse",
"datetime.date.today",
"random.choice"
] |
[((1071, 1098), 'dateparser.parse', 'dateparser.parse', (['self.text'], {}), '(self.text)\n', (1087, 1098), False, 'import dateparser\n'), ((937, 959), 'random.choice', 'random.choice', (['formats'], {}), '(formats)\n', (950, 959), False, 'import random\n'), ((1193, 1214), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (1212, 1214), False, 'import datetime\n')]
|
import psycopg2
from psycopg2.extensions import connection, cursor
from psycopg2.extras import DictCursor
from typing import Dict
from src.log.logger import logger
from contextlib import contextmanager
@contextmanager
def get_connection(params: Dict[str, str]) -> connection:
"""
Get a connection using a context manager.
:param params: database connection parameters dictionary
:return: psycopg2 connection object
"""
try:
conn = psycopg2.connect(**params)
yield conn
except Exception as e:
logger.error(f"{str(type(e))} during database operation: {e}")
raise e
finally:
# Close database connection if defined.
logger.debug("Closing database connection")
try:
conn.close()
except UnboundLocalError:
pass
@contextmanager
def get_cursor(params: Dict[str, str], commit: bool = True) -> cursor:
"""
Get a connection cursor using a context manager.
:param params: database connection parameters dictionary
:param commit: boolean determining whether changes should be committed
:return: psycopg2 cursor object
"""
with get_connection(params) as conn:
# Acquire cursor from connection
logger.debug("Obtaining database cursor.")
cur = conn.cursor(cursor_factory=DictCursor)
try:
yield cur
if commit:
conn.commit()
finally:
# Close cursor
logger.debug("Closing database cursor.")
cur.close()
def get_sensors_data(cur: psycopg2.extensions.cursor):
"""
Fetches data from sensors' tables
TODO -> parallelize queries
:param cur: database cursor
:return: JSON formatted results
"""
data = {
"temperature": cur.execute("SELECT * FROM sensors.temperature").fetchall(),
"humidity": cur.execute("SELECT * FROM sensors.humidity").fetchall(),
"light": cur.execute("SELECT * FROM sensors.light").fetchall(),
}
return data
|
[
"src.log.logger.logger.debug",
"psycopg2.connect"
] |
[((467, 493), 'psycopg2.connect', 'psycopg2.connect', ([], {}), '(**params)\n', (483, 493), False, 'import psycopg2\n'), ((696, 739), 'src.log.logger.logger.debug', 'logger.debug', (['"""Closing database connection"""'], {}), "('Closing database connection')\n", (708, 739), False, 'from src.log.logger import logger\n'), ((1251, 1293), 'src.log.logger.logger.debug', 'logger.debug', (['"""Obtaining database cursor."""'], {}), "('Obtaining database cursor.')\n", (1263, 1293), False, 'from src.log.logger import logger\n'), ((1491, 1531), 'src.log.logger.logger.debug', 'logger.debug', (['"""Closing database cursor."""'], {}), "('Closing database cursor.')\n", (1503, 1531), False, 'from src.log.logger import logger\n')]
|
import unittest
from test.test01 import soma
class TesteSoma(unittest.TestCase):
def test_retorno_soma_10_10(self):
self .assertEqual(soma(10, 10), 20)
|
[
"test.test01.soma"
] |
[((147, 159), 'test.test01.soma', 'soma', (['(10)', '(10)'], {}), '(10, 10)\n', (151, 159), False, 'from test.test01 import soma\n')]
|
import pytest
from data_structures.heap import Heap
@pytest.fixture
def base_heap():
heap = Heap()
heap.push(1)
heap.push(2)
heap.push(3)
heap.push(4)
heap.push(5)
return heap
def test_heap_init():
basic_heap = Heap()
init_list_heap = Heap([9, 8, 7, 5, 1, 2])
assert isinstance(basic_heap, Heap)
assert isinstance(init_list_heap, Heap)
def test_heap_push():
heap = Heap()
heap.push(2)
heap.push(3)
heap.push(1)
def test_heap_pop(base_heap):
assert base_heap.pop() == 1
assert base_heap.pop() == 2
def test_heap_peek(base_heap):
assert base_heap.peek() == 1
def test_heap_empty():
heap = Heap()
assert heap.empty()
heap.push(1)
assert not heap.empty()
def test_heapify_up_and_down(base_heap):
base_heap.pop()
base_heap.pop()
base_heap.push(8)
base_heap.push(1)
base_heap.push(0)
base_heap.push(9)
assert base_heap.get_heap() == [0, 3, 1, 8, 4, 5, 9]
def test_heapify():
heap = Heap([8, 9, 5, 1, 3, 2, 0, 6])
assert heap.get_heap() == [0, 1, 2, 6, 3, 8, 5, 9]
|
[
"data_structures.heap.Heap"
] |
[((98, 104), 'data_structures.heap.Heap', 'Heap', ([], {}), '()\n', (102, 104), False, 'from data_structures.heap import Heap\n'), ((247, 253), 'data_structures.heap.Heap', 'Heap', ([], {}), '()\n', (251, 253), False, 'from data_structures.heap import Heap\n'), ((275, 299), 'data_structures.heap.Heap', 'Heap', (['[9, 8, 7, 5, 1, 2]'], {}), '([9, 8, 7, 5, 1, 2])\n', (279, 299), False, 'from data_structures.heap import Heap\n'), ((419, 425), 'data_structures.heap.Heap', 'Heap', ([], {}), '()\n', (423, 425), False, 'from data_structures.heap import Heap\n'), ((675, 681), 'data_structures.heap.Heap', 'Heap', ([], {}), '()\n', (679, 681), False, 'from data_structures.heap import Heap\n'), ((1012, 1042), 'data_structures.heap.Heap', 'Heap', (['[8, 9, 5, 1, 3, 2, 0, 6]'], {}), '([8, 9, 5, 1, 3, 2, 0, 6])\n', (1016, 1042), False, 'from data_structures.heap import Heap\n')]
|
import os
import json
import boto3
from botocore.exceptions import ClientError
from cryptography.fernet import Fernet
dynamodb = boto3.resource('dynamodb')
s3 = boto3.resource('s3')
vault_table = dynamodb.Table(os.environ.get('VAULT_TABLE_NAME'))
vault_table_partition_key = os.environ.get('VAULT_TABLE_KEY')
vault_table_sort_key = os.environ.get('VAULT_SORT_KEY')
bucket_name = os.environ.get('S3_BUCKET_NAME')
key_file_name = os.environ.get('ENCRYPTION_KEY')
key_file_destination = "/tmp/" + key_file_name
s3.meta.client.download_file(Bucket=bucket_name, Key=key_file_name, Filename=key_file_destination)
key = open(key_file_destination, "rb").read()
def _decrypt_item_value(value):
f = Fernet(key)
decrypted_value = f.decrypt(value)
return decrypted_value.decode("utf-8")
def _get_vault_item(email, name):
try:
response = vault_table.get_item(
Key={
vault_table_partition_key: email,
vault_table_sort_key: name
}
)
except Exception as e:
print(e)
raise
else:
return response['Item']
def lambda_handler(event, context):
email = event['pathParameters']['email']
name = event['pathParameters']['name']
try:
response = _get_vault_item(email, name)
del response['email']
print(f"RESPONSE: {response}")
response['value'] = json.loads(_decrypt_item_value(response['value'].value))
return {
"statusCode": 200,
"headers": {
"Content-Type": "application/json"
},
"body": json.dumps(response)
}
except Exception as e:
return {
"statusCode": 500,
"headers": {
"Content-Type": "application/json"
},
"body": str(e)
}
|
[
"os.environ.get",
"cryptography.fernet.Fernet",
"boto3.resource",
"json.dumps"
] |
[((130, 156), 'boto3.resource', 'boto3.resource', (['"""dynamodb"""'], {}), "('dynamodb')\n", (144, 156), False, 'import boto3\n'), ((162, 182), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {}), "('s3')\n", (176, 182), False, 'import boto3\n'), ((276, 309), 'os.environ.get', 'os.environ.get', (['"""VAULT_TABLE_KEY"""'], {}), "('VAULT_TABLE_KEY')\n", (290, 309), False, 'import os\n'), ((333, 365), 'os.environ.get', 'os.environ.get', (['"""VAULT_SORT_KEY"""'], {}), "('VAULT_SORT_KEY')\n", (347, 365), False, 'import os\n'), ((380, 412), 'os.environ.get', 'os.environ.get', (['"""S3_BUCKET_NAME"""'], {}), "('S3_BUCKET_NAME')\n", (394, 412), False, 'import os\n'), ((429, 461), 'os.environ.get', 'os.environ.get', (['"""ENCRYPTION_KEY"""'], {}), "('ENCRYPTION_KEY')\n", (443, 461), False, 'import os\n'), ((212, 246), 'os.environ.get', 'os.environ.get', (['"""VAULT_TABLE_NAME"""'], {}), "('VAULT_TABLE_NAME')\n", (226, 246), False, 'import os\n'), ((695, 706), 'cryptography.fernet.Fernet', 'Fernet', (['key'], {}), '(key)\n', (701, 706), False, 'from cryptography.fernet import Fernet\n'), ((1616, 1636), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (1626, 1636), False, 'import json\n')]
|
import errno
import glob
import os
import re
import shutil
from pypadre.core.model.code.code_mixin import CodeMixin, PythonPackage, PythonFile, GenericCall, \
GitIdentifier, RepositoryIdentifier, PipIdentifier, Function
from pypadre.pod.backend.i_padre_backend import IPadreBackend
from pypadre.pod.repository.i_repository import ICodeRepository
from pypadre.pod.repository.local.file.generic.i_file_repository import File
from pypadre.pod.repository.local.file.generic.i_git_repository import IGitRepository
from pypadre.pod.repository.local.file.project_code_repository import CODE_FILE
from pypadre.pod.repository.serializer.serialiser import JSonSerializer
def copy(src, dest):
try:
shutil.copytree(src, dest)
except OSError as e:
# If the error was caused because the source wasn't a directory
if e.errno == errno.ENOTDIR:
shutil.copy(src, dest)
else:
print('Directory not copied. Error: %s' % e)
NAME = "code"
META_FILE = File("metadata.json", JSonSerializer)
# CODE_FILE = File("code.bin", DillSerializer)
class CodeFileRepository(IGitRepository, ICodeRepository):
@staticmethod
def placeholder():
return '{CODE_ID}'
def __init__(self, backend: IPadreBackend):
super().__init__(root_dir=os.path.join(backend.root_dir, NAME), backend=backend)
def _get_by_dir(self, directory):
path = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), directory))[0]
metadata = self.get_file(path, META_FILE)
return self._create_object(metadata, directory)
def _create_object(self, metadata, directory, root_dir=None):
identifier_type = metadata.get(CodeMixin.REPOSITORY_TYPE)
identifier_data = metadata.get(CodeMixin.IDENTIFIER)
identifier = None
if identifier_type == RepositoryIdentifier._RepositoryType.pip:
version = identifier_data.get(PipIdentifier.VERSION)
pip_package = identifier_data.get(PipIdentifier.PIP_PACKAGE)
identifier = PipIdentifier(version=version, pip_package=pip_package)
if identifier_type == RepositoryIdentifier._RepositoryType.git:
path = identifier_data.get(GitIdentifier.PATH)
git_hash = identifier_data.get(GitIdentifier.GIT_HASH)
identifier = GitIdentifier(path=path, git_hash=git_hash)
if identifier is None:
raise ValueError(
"Identifier is not present in the meta information of code object in directory " + directory)
if metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.function):
if root_dir is not None:
fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(root_dir),
os.path.abspath(os.path.join(directory, '..', 'function'))))[0]
else:
fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir),
os.path.abspath(os.path.join(directory, '..', 'function'))))[0]
fn = self.get_file(fn_dir, CODE_FILE)
code = Function(fn=fn, metadata=metadata, repository_identifier=identifier)
elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.package):
code = PythonPackage(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonPackage.PACKAGE), variable=metadata.get(PythonPackage.VARIABLE), repository_identifier=identifier)
elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.python_file):
code = PythonFile(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonFile.PACKAGE), variable=metadata.get(PythonFile.VARIABLE), repository_identifier=identifier)
elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.file):
code = GenericCall(metadata=metadata, cmd=metadata.get(GenericCall.CMD), repository_identifier=identifier)
else:
raise NotImplementedError(metadata.get(CodeMixin.CODE_TYPE) + " couldn't load from type.")
return code
def to_folder_name(self, code):
# TODO only name for folder okay? (maybe a uuid, a digest of a config or similar?)
return str(code.id)
def list(self, search, offset=0, size=100):
if search is not None and "name" in search:
# Shortcut because we know name is the folder name. We don't have to search in metadata.json
name = search.pop("name")
search[self.FOLDER_SEARCH] = re.escape(name)
return super().list(search, offset, size)
def _put(self, obj, *args, directory: str, **kwargs):
code = obj
if isinstance(code, Function):
# TODO fn repository
if not os.path.exists(os.path.abspath(os.path.join(directory, '..', 'function'))):
os.mkdir(os.path.abspath(os.path.join(directory, '..', 'function')))
self.write_file(os.path.abspath(os.path.join(directory, '..', 'function')), CODE_FILE, code.fn, mode="wb")
self.write_file(directory, META_FILE, code.metadata)
# if store_code:
# if isinstance(code, CodeFile):
# code_dir = os.path.join(directory, "code")
# if code.file is not None:
# if not os.path.exists(code_dir):
# os.mkdir(code_dir)
# copy(os.path.join(code.path, code.file), os.path.join(directory, "code", code.file))
# else:
# copy(code.path, code_dir)
# def get_code_hash(self, obj=None, path=None, init_repo=False, **kwargs):
#
# code_hash = git_hash(path=path)
# if code_hash is None and init_repo is True:
# # if there is no repository present in the path, but the user wants to create a repo then
# # Create a repo
# # Add any untracked files and commit those files
# # Get the code_hash of the repo
# # TODO give git an id and hold some reference in workspace???
# dir_path = os.path.dirname(path)
# create_repo(dir_path)
# add_and_commit(dir_path)
# code_hash = git_hash(path=dir_path)
#
# if obj is not None:
# obj.set_hash(code_hash)
|
[
"re.escape",
"pypadre.core.model.code.code_mixin.GitIdentifier",
"pypadre.pod.repository.local.file.generic.i_file_repository.File",
"shutil.copy",
"shutil.copytree",
"pypadre.core.model.code.code_mixin.Function",
"os.path.join",
"pypadre.core.model.code.code_mixin.PipIdentifier"
] |
[((1002, 1039), 'pypadre.pod.repository.local.file.generic.i_file_repository.File', 'File', (['"""metadata.json"""', 'JSonSerializer'], {}), "('metadata.json', JSonSerializer)\n", (1006, 1039), False, 'from pypadre.pod.repository.local.file.generic.i_file_repository import File\n'), ((706, 732), 'shutil.copytree', 'shutil.copytree', (['src', 'dest'], {}), '(src, dest)\n', (721, 732), False, 'import shutil\n'), ((2069, 2124), 'pypadre.core.model.code.code_mixin.PipIdentifier', 'PipIdentifier', ([], {'version': 'version', 'pip_package': 'pip_package'}), '(version=version, pip_package=pip_package)\n', (2082, 2124), False, 'from pypadre.core.model.code.code_mixin import CodeMixin, PythonPackage, PythonFile, GenericCall, GitIdentifier, RepositoryIdentifier, PipIdentifier, Function\n'), ((2349, 2392), 'pypadre.core.model.code.code_mixin.GitIdentifier', 'GitIdentifier', ([], {'path': 'path', 'git_hash': 'git_hash'}), '(path=path, git_hash=git_hash)\n', (2362, 2392), False, 'from pypadre.core.model.code.code_mixin import CodeMixin, PythonPackage, PythonFile, GenericCall, GitIdentifier, RepositoryIdentifier, PipIdentifier, Function\n'), ((3180, 3248), 'pypadre.core.model.code.code_mixin.Function', 'Function', ([], {'fn': 'fn', 'metadata': 'metadata', 'repository_identifier': 'identifier'}), '(fn=fn, metadata=metadata, repository_identifier=identifier)\n', (3188, 3248), False, 'from pypadre.core.model.code.code_mixin import CodeMixin, PythonPackage, PythonFile, GenericCall, GitIdentifier, RepositoryIdentifier, PipIdentifier, Function\n'), ((4622, 4637), 're.escape', 're.escape', (['name'], {}), '(name)\n', (4631, 4637), False, 'import re\n'), ((879, 901), 'shutil.copy', 'shutil.copy', (['src', 'dest'], {}), '(src, dest)\n', (890, 901), False, 'import shutil\n'), ((1300, 1336), 'os.path.join', 'os.path.join', (['backend.root_dir', 'NAME'], {}), '(backend.root_dir, NAME)\n', (1312, 1336), False, 'import os\n'), ((5063, 5104), 'os.path.join', 'os.path.join', (['directory', '""".."""', '"""function"""'], {}), "(directory, '..', 'function')\n", (5075, 5104), False, 'import os\n'), ((4889, 4930), 'os.path.join', 'os.path.join', (['directory', '""".."""', '"""function"""'], {}), "(directory, '..', 'function')\n", (4901, 4930), False, 'import os\n'), ((4975, 5016), 'os.path.join', 'os.path.join', (['directory', '""".."""', '"""function"""'], {}), "(directory, '..', 'function')\n", (4987, 5016), False, 'import os\n'), ((2837, 2878), 'os.path.join', 'os.path.join', (['directory', '""".."""', '"""function"""'], {}), "(directory, '..', 'function')\n", (2849, 2878), False, 'import os\n'), ((3063, 3104), 'os.path.join', 'os.path.join', (['directory', '""".."""', '"""function"""'], {}), "(directory, '..', 'function')\n", (3075, 3104), False, 'import os\n')]
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 8 20:54:26 2018
@author: andrea
"""
import sys
import json
from xml.sax import make_parser
from urllib.request import urlretrieve
from smallsmilhandler import SmallSMILHandler
class KaraokeLocal(SmallSMILHandler):
def __init__(self, fichero):
# Inicializo y construyo la lista
parser = make_parser() # Creo parser
cHandler = SmallSMILHandler() # Creo manejador
parser.setContentHandler(cHandler) # Le paso el parser al manejador
parser.parse(open(fichero))
self.lista = cHandler.get_tags()
def __str__(self):
""" Método para crear la lista de etiquetas """
linea = " "
for elem in self.lista:
linea = linea + elem[0]
atributos = elem[1].items()
for nombre, valor in atributos:
if valor != '':
linea = linea + '\t' + nombre + '=' + '"' + valor + '"'
print(linea)
def to_json(self, fich, fich_json=None):
# Creamos un fichero en formato json
fich_json = json.dumps(self.lista)
if fich_json is None:
fich_json = fich.split('.')[0] + '.json'
json.dump(self.lista, open(fich_json, 'w'))
def do_local(self):
# Recorre la lista y descarga recursos remotos
for diccs in self.lista:
atrib = diccs[1]
for atributos, posi in atrib.items():
if atributos == "src" and posi[0:7] == "http://":
atrib_Nuevo = posi.split('/')[-1]
urlretrieve(posi, atrib_Nuevo)
print("Descargando %s ..." % posi)
if __name__ == "__main__":
if len(sys.argv) != 2:
sys.exit("Usage: python3 karaoke.py file.smil")
try:
obj = open(sys.argv[1])
except (ValueError, IndexError, FileNotFoundError):
sys.exit("Usage: python3 karaoke.py file.smil")
fichero = sys.argv[1]
fich_json = sys.argv[1].replace(".smil", ".json")
obj = KaraokeLocal(fichero)
obj.__init__(fichero)
obj.__str__()
obj.to_json(fich_json)
obj.do_local()
obj.to_json(fich_json, 'local.json')
obj.__str__()
|
[
"json.dumps",
"urllib.request.urlretrieve",
"smallsmilhandler.SmallSMILHandler",
"sys.exit",
"xml.sax.make_parser"
] |
[((377, 390), 'xml.sax.make_parser', 'make_parser', ([], {}), '()\n', (388, 390), False, 'from xml.sax import make_parser\n'), ((425, 443), 'smallsmilhandler.SmallSMILHandler', 'SmallSMILHandler', ([], {}), '()\n', (441, 443), False, 'from smallsmilhandler import SmallSMILHandler\n'), ((1112, 1134), 'json.dumps', 'json.dumps', (['self.lista'], {}), '(self.lista)\n', (1122, 1134), False, 'import json\n'), ((1772, 1819), 'sys.exit', 'sys.exit', (['"""Usage: python3 karaoke.py file.smil"""'], {}), "('Usage: python3 karaoke.py file.smil')\n", (1780, 1819), False, 'import sys\n'), ((1925, 1972), 'sys.exit', 'sys.exit', (['"""Usage: python3 karaoke.py file.smil"""'], {}), "('Usage: python3 karaoke.py file.smil')\n", (1933, 1972), False, 'import sys\n'), ((1618, 1648), 'urllib.request.urlretrieve', 'urlretrieve', (['posi', 'atrib_Nuevo'], {}), '(posi, atrib_Nuevo)\n', (1629, 1648), False, 'from urllib.request import urlretrieve\n')]
|
import os
import sys
import tempfile
from datetime import datetime
from pprint import pprint
import ray
from ray import tune
from ray.rllib.agents import Trainer
from ray.tune.logger import UnifiedLogger
from ray.tune.result import DEFAULT_RESULTS_DIR
# os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
from command_line_tools.run_tools import setup_run
from scenario.trajectory_tracking.experiment.experiment_common import setup_environment
from trainer.coordinated_dps_trainer import CoordinatedDPSTrainer
from trainer.es_actual import ESActualTrainer
from trainer.es_co_trainer import ESCOTrainer
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
def train(rllib_config, reporter):
ego_starting_distance = 600.0
environment, trainer = make_environment_and_controller(None, rllib_config)
# trainer = make_trainer(config)
checkpoint_frequency = 1
max_iters = int(100e3)
# def set_starting_distance(ego_starting_distance):
# trainer.workers.foreach_worker(
# lambda ev: ev.foreach_env(
# lambda env: env.process.set_starting_distance(ego_starting_distance)))
#
# # def set_starting_distance(ego_starting_distance):
# # for worker in trainer._workers:
# # print(worker)
# # worker.env.process.set_starting_distance(ego_starting_distance)
#
# set_starting_distance(ego_starting_distance)
for i in range(max_iters):
result = trainer.train()
reporter(**result)
if i % checkpoint_frequency == 0:
# checkpoint_path = trainer.logdir
# checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'experiment'))
# checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'name'))
# print('ld:', trainer.logdir, 'n:', get_setting(config, 'name'), 'c', get_setting(config, 'checkpoint'),
# 'p',
# checkpoint_path)
# trainer.save(checkpoint_path)
checkpoint_path = trainer.save()
print('saved to checkpoint ', checkpoint_path)
def on_episode_end(info):
# print(info)
episode = info['episode']
# print(info)
# trainer = info['trainer']
base_env = info['env']
# episode.custom_metrics['ego_starting_distance'] = base_env.get_unwrapped()[0].process.ego_starting_distance
print('begin trainer')
default_config = common_default_config
ray_num_cpus = None
if len(sys.argv) >= 4 and sys.argv[-3] == 'ray':
redis_password = sys.argv[-2]
ray_num_cpus = int(sys.argv[-1])
ray.init(address=os.environ["ip_head"], _redis_password=redis_password)
sys.argv = sys.argv[0:-3]
# del sys.argv[-1:-4]
print('ray configuration: ', redis_password, ray_num_cpus, 'argv: ', sys.argv)
else:
if not ray.is_initialized():
ray.init()
print('setup config')
config, run_prefix = setup_run(default_config)
# config, this_env = setup_environment_config(config)
print("Nodes in the Ray cluster:")
pprint(ray.nodes())
pprint(ray.cluster_resources())
if ray_num_cpus is not None:
config['rllib']['num_workers'] = ray_num_cpus - 1
rllib_config = make_rllib_config(config)
print('running tune')
tune.run(
train,
name=config['name'],
trial_name_creator=lambda trial: config['name'],
config=rllib_config,
# local_dir='~/ray_results'
# resources_per_trial={'gpu':1},
)
print('shutting down')
ray.shutdown()
print('done')
|
[
"ray.init",
"command_line_tools.run_tools.setup_run",
"ray.nodes",
"ray.tune.run",
"ray.is_initialized",
"ray.shutdown",
"ray.cluster_resources"
] |
[((2877, 2902), 'command_line_tools.run_tools.setup_run', 'setup_run', (['default_config'], {}), '(default_config)\n', (2886, 2902), False, 'from command_line_tools.run_tools import setup_run\n'), ((3195, 3305), 'ray.tune.run', 'tune.run', (['train'], {'name': "config['name']", 'trial_name_creator': "(lambda trial: config['name'])", 'config': 'rllib_config'}), "(train, name=config['name'], trial_name_creator=lambda trial:\n config['name'], config=rllib_config)\n", (3203, 3305), False, 'from ray import tune\n'), ((3418, 3432), 'ray.shutdown', 'ray.shutdown', ([], {}), '()\n', (3430, 3432), False, 'import ray\n'), ((2563, 2634), 'ray.init', 'ray.init', ([], {'address': "os.environ['ip_head']", '_redis_password': 'redis_password'}), "(address=os.environ['ip_head'], _redis_password=redis_password)\n", (2571, 2634), False, 'import ray\n'), ((3000, 3011), 'ray.nodes', 'ray.nodes', ([], {}), '()\n', (3009, 3011), False, 'import ray\n'), ((3020, 3043), 'ray.cluster_resources', 'ray.cluster_resources', ([], {}), '()\n', (3041, 3043), False, 'import ray\n'), ((2791, 2811), 'ray.is_initialized', 'ray.is_initialized', ([], {}), '()\n', (2809, 2811), False, 'import ray\n'), ((2821, 2831), 'ray.init', 'ray.init', ([], {}), '()\n', (2829, 2831), False, 'import ray\n')]
|
"""Protocol-related functions."""
# Copyright 2020-2021 Blue Brain Project / EPFL
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
from bluepyopt import ephys
from emodelrunner.protocols import (
RampProtocol,
RampThresholdProtocol,
StepProtocol,
StepThresholdProtocol,
RatSSCxThresholdDetectionProtocol,
RatSSCxRinHoldcurrentProtocol,
RatSSCxMainProtocol,
SweepProtocolCustom,
)
from emodelrunner.recordings import RecordingCustom
from emodelrunner.features import define_efeatures
from emodelrunner.synapses.stimuli import (
NrnNetStimStimulusCustom,
NrnVecStimStimulusCustom,
)
logger = logging.getLogger(__name__)
soma_loc = ephys.locations.NrnSeclistCompLocation(
name="soma", seclist_name="somatic", sec_index=0, comp_x=0.5
)
seclist_to_sec = {
"somatic": "soma",
"apical": "apic",
"axonal": "axon",
"myelinated": "myelin",
}
def read_ramp_threshold_protocol(protocol_name, protocol_definition, recordings):
"""Read ramp threshold protocol from definition.
Args:
protocol_name (str): name of the protocol
protocol_definition (dict): contains the protocol configuration data
recordings (bluepyopt.ephys.recordings.CompRecording):
recordings to use with this protocol
Returns:
RampThresholdProtocol: Ramp Protocol depending on cell's threshold current
"""
ramp_definition = protocol_definition["stimuli"]["ramp"]
ramp_stimulus = ephys.stimuli.NrnRampPulse(
ramp_delay=ramp_definition["ramp_delay"],
ramp_duration=ramp_definition["ramp_duration"],
location=soma_loc,
total_duration=ramp_definition["totduration"],
)
holding_stimulus = ephys.stimuli.NrnSquarePulse(
step_delay=0.0,
step_duration=ramp_definition["totduration"],
location=soma_loc,
total_duration=ramp_definition["totduration"],
)
return RampThresholdProtocol(
name=protocol_name,
ramp_stimulus=ramp_stimulus,
holding_stimulus=holding_stimulus,
thresh_perc_start=ramp_definition["thresh_perc_start"],
thresh_perc_end=ramp_definition["thresh_perc_end"],
recordings=recordings,
)
def read_ramp_protocol(protocol_name, protocol_definition, recordings):
"""Read ramp protocol from definition.
Args:
protocol_name (str): name of the protocol
protocol_definition (dict): contains the protocol configuration data
recordings (bluepyopt.ephys.recordings.CompRecording):
recordings to use with this protocol
Returns:
RampProtocol: Ramp Protocol
"""
ramp_definition = protocol_definition["stimuli"]["ramp"]
ramp_stimulus = ephys.stimuli.NrnRampPulse(
ramp_amplitude_start=ramp_definition["ramp_amplitude_start"],
ramp_amplitude_end=ramp_definition["ramp_amplitude_end"],
ramp_delay=ramp_definition["ramp_delay"],
ramp_duration=ramp_definition["ramp_duration"],
location=soma_loc,
total_duration=ramp_definition["totduration"],
)
if "holding" in protocol_definition["stimuli"]:
holding_definition = protocol_definition["stimuli"]["holding"]
holding_stimulus = ephys.stimuli.NrnSquarePulse(
step_amplitude=holding_definition["amp"],
step_delay=holding_definition["delay"],
step_duration=holding_definition["duration"],
location=soma_loc,
total_duration=holding_definition["totduration"],
)
else:
holding_stimulus = None
return RampProtocol(
name=protocol_name,
ramp_stimulus=ramp_stimulus,
holding_stimulus=holding_stimulus,
recordings=recordings,
)
def read_step_protocol(
protocol_name, protocol_definition, recordings, stochkv_det=None
):
"""Read step protocol from definition.
Args:
protocol_name (str): name of the protocol
protocol_definition (dict): contains the protocol configuration data
recordings (bluepyopt.ephys.recordings.CompRecording):
recordings to use with this protocol
stochkv_det (bool): set if stochastic or deterministic
Returns:
StepProtocol: Step Protocol
"""
# pylint: disable=undefined-loop-variable
step_definitions = protocol_definition["stimuli"]["step"]
if isinstance(step_definitions, dict):
step_definitions = [step_definitions]
step_stimuli = []
for step_definition in step_definitions:
step_stim = ephys.stimuli.NrnSquarePulse(
step_amplitude=step_definition["amp"],
step_delay=step_definition["delay"],
step_duration=step_definition["duration"],
location=soma_loc,
total_duration=step_definition["totduration"],
)
step_stimuli.append(step_stim)
if "holding" in protocol_definition["stimuli"]:
holding_definition = protocol_definition["stimuli"]["holding"]
holding_stimulus = ephys.stimuli.NrnSquarePulse(
step_amplitude=holding_definition["amp"],
step_delay=holding_definition["delay"],
step_duration=holding_definition["duration"],
location=soma_loc,
total_duration=holding_definition["totduration"],
)
else:
holding_stimulus = None
if stochkv_det is None:
stochkv_det = (
step_definition["stochkv_det"] if "stochkv_det" in step_definition else None
)
return StepProtocol(
name=protocol_name,
step_stimuli=step_stimuli,
holding_stimulus=holding_stimulus,
recordings=recordings,
stochkv_det=stochkv_det,
)
def read_step_threshold_protocol(
protocol_name, protocol_definition, recordings, stochkv_det=None
):
"""Read step threshold protocol from definition.
Args:
protocol_name (str): name of the protocol
protocol_definition (dict): contains the protocol configuration data
recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol
stochkv_det (bool): set if stochastic or deterministic
Returns:
StepThresholdProtocol: Step Protocol depending on cell's threshold currentd
"""
# pylint: disable=undefined-loop-variable
step_definitions = protocol_definition["stimuli"]["step"]
if isinstance(step_definitions, dict):
step_definitions = [step_definitions]
step_stimuli = []
for step_definition in step_definitions:
step_stim = ephys.stimuli.NrnSquarePulse(
step_delay=step_definition["delay"],
step_duration=step_definition["duration"],
location=soma_loc,
total_duration=step_definition["totduration"],
)
step_stimuli.append(step_stim)
holding_stimulus = ephys.stimuli.NrnSquarePulse(
step_delay=0.0,
step_duration=step_definition["totduration"],
location=soma_loc,
total_duration=step_definition["totduration"],
)
if stochkv_det is None:
stochkv_det = (
step_definition["stochkv_det"] if "stochkv_det" in step_definition else None
)
return StepThresholdProtocol(
name=protocol_name,
step_stimuli=step_stimuli,
holding_stimulus=holding_stimulus,
thresh_perc=step_definition["thresh_perc"],
recordings=recordings,
stochkv_det=stochkv_det,
)
def read_vecstim_protocol(protocol_name, protocol_definition, recordings, syn_locs):
"""Read Vecstim protocol from definitions.
Args:
protocol_name (str): name of the protocol
protocol_definition (dict): dict containing the protocol data
recordings (bluepyopt.ephys.recordings.CompRecording):
recordings to use with this protocol
syn_locs (list of ephys.locations.NrnPointProcessLocation):
locations of the synapses
Returns:
emodelrunner.protocols.SweepProtocolCustom:
a protocol containing Vecstim stimulus activating synapses
"""
stim_definition = protocol_definition["stimuli"]
if stim_definition["vecstim_random"] not in [
"python",
"neuron",
]:
logger.warning(
"vecstim random not set to 'python' nor to 'neuron' in config file."
"vecstim random will be re-set to 'python'."
)
stim_definition["vecstim_random"] = "python"
stim = NrnVecStimStimulusCustom(
syn_locs,
stim_definition["syn_start"],
stim_definition["syn_stop"],
stim_definition["syn_stim_seed"],
stim_definition["vecstim_random"],
)
return SweepProtocolCustom(protocol_name, [stim], recordings)
def read_netstim_protocol(protocol_name, protocol_definition, recordings, syn_locs):
"""Read Netstim protocol from definitions.
Args:
protocol_name (str): name of the protocol
protocol_definition (dict): dict containing the protocol data
recordings (bluepyopt.ephys.recordings.CompRecording):
recordings to use with this protocol
syn_locs (list of ephys.locations.NrnPointProcessLocation):
locations of the synapses
Returns:
emodelrunner.protocols.SweepProtocolCustom:
a protocol containing Netstim stimulus activating synapses
"""
stim_definition = protocol_definition["stimuli"]
stim = NrnNetStimStimulusCustom(
syn_locs,
stim_definition["syn_stop"],
stim_definition["syn_nmb_of_spikes"],
stim_definition["syn_interval"],
stim_definition["syn_start"],
stim_definition["syn_noise"],
)
return SweepProtocolCustom(protocol_name, [stim], recordings)
def get_extra_recording_location(recording_definition, apical_point_isec=-1):
"""Get the location for the extra recording.
Args:
recording_definition (dict): contains the extra recording configuration data
apical_point_isec (int): apical point section index.
Should be given if the recording definition "type" is "somadistanceapic"
Raises:
Exception: if the recording definition "type" is "somadistanceapic" and
apical_point_isec is -1.
Exception: if the 'type' in the recording definition is neither
"somadistance", nor "somadistanceapic", nor "nrnseclistcomp"
Returns:
location of the extra recording
"""
if recording_definition["type"] == "somadistance":
location = ephys.locations.NrnSomaDistanceCompLocation(
name=recording_definition["name"],
soma_distance=recording_definition["somadistance"],
seclist_name=recording_definition["seclist_name"],
)
elif recording_definition["type"] == "somadistanceapic":
if apical_point_isec == -1:
raise Exception(
"Cannot record at a given distance from apical point"
f"if apical_point_isec is {apical_point_isec}."
)
location = ephys.locations.NrnSecSomaDistanceCompLocation(
name=recording_definition["name"],
soma_distance=recording_definition["somadistance"],
sec_name=seclist_to_sec[recording_definition["seclist_name"]],
sec_index=apical_point_isec,
)
elif recording_definition["type"] == "nrnseclistcomp":
location = ephys.locations.NrnSeclistCompLocation(
name=recording_definition["name"],
comp_x=recording_definition["comp_x"],
sec_index=recording_definition["sec_index"],
seclist_name=recording_definition["seclist_name"],
)
else:
raise Exception(f"Recording type {recording_definition['type']} not supported")
return location
def get_recordings(protocol_name, protocol_definition, prefix, apical_point_isec=-1):
"""Get recordings from protocol definition.
Args:
protocol_name (str): name of the protocol
protocol_definition (dict): dict containing the protocol data
prefix (str): prefix used in naming responses, features, recordings, etc.
apical_point_isec (int): apical point section index
Should be given if there is "somadistanceapic" in "type"
of at least one of the extra recording definition
Returns:
list of RecordingCustom
"""
recordings = []
recordings.append(
RecordingCustom(
name=f"{prefix}.{protocol_name}.soma.v",
location=soma_loc,
variable="v",
)
)
if "extra_recordings" in protocol_definition:
for recording_definition in protocol_definition["extra_recordings"]:
location = get_extra_recording_location(
recording_definition, apical_point_isec
)
var = recording_definition["var"]
recording = RecordingCustom(
name=f"{prefix}.{protocol_name}.{location.name}.{var}",
location=location,
variable=var,
)
recordings.append(recording)
return recordings
def add_protocol(
protocols_dict,
protocol_name,
protocol_definition,
recordings,
stochkv_det,
prefix,
syn_locs=None,
):
"""Add protocol from protocol definition to protocols dict.
Args:
protocols_dict (dict): the dict to which to append the protocol
protocol_name (str): name of the protocol
protocol_definition (dict): dict containing the protocol data
recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol
stochkv_det (bool): set if stochastic or deterministic
prefix (str): prefix used in naming responses, features, recordings, etc.
syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses
(if any, else None)
"""
if "type" in protocol_definition and protocol_definition["type"] == "StepProtocol":
protocols_dict[protocol_name] = read_step_protocol(
protocol_name, protocol_definition, recordings, stochkv_det
)
elif (
"type" in protocol_definition
and protocol_definition["type"] == "StepThresholdProtocol"
):
protocols_dict[protocol_name] = read_step_threshold_protocol(
protocol_name, protocol_definition, recordings, stochkv_det
)
elif (
"type" in protocol_definition
and protocol_definition["type"] == "RampThresholdProtocol"
):
protocols_dict[protocol_name] = read_ramp_threshold_protocol(
protocol_name, protocol_definition, recordings
)
elif (
"type" in protocol_definition and protocol_definition["type"] == "RampProtocol"
):
protocols_dict[protocol_name] = read_ramp_protocol(
protocol_name, protocol_definition, recordings
)
elif (
"type" in protocol_definition
and protocol_definition["type"] == "RatSSCxThresholdDetectionProtocol"
):
protocols_dict["ThresholdDetection"] = RatSSCxThresholdDetectionProtocol(
"IDRest",
step_protocol_template=read_step_protocol(
"Threshold", protocol_definition["step_template"], recordings
),
prefix=prefix,
)
elif "type" in protocol_definition and protocol_definition["type"] == "Vecstim":
protocols_dict[protocol_name] = read_vecstim_protocol(
protocol_name, protocol_definition, recordings, syn_locs
)
elif "type" in protocol_definition and protocol_definition["type"] == "Netstim":
protocols_dict[protocol_name] = read_netstim_protocol(
protocol_name, protocol_definition, recordings, syn_locs
)
else:
stimuli = []
for stimulus_definition in protocol_definition["stimuli"]:
stimuli.append(
ephys.stimuli.NrnSquarePulse(
step_amplitude=stimulus_definition["amp"],
step_delay=stimulus_definition["delay"],
step_duration=stimulus_definition["duration"],
location=soma_loc,
total_duration=stimulus_definition["totduration"],
)
)
protocols_dict[protocol_name] = ephys.protocols.SweepProtocol(
name=protocol_name, stimuli=stimuli, recordings=recordings
)
def check_for_forbidden_protocol(protocols_dict):
"""Check for unsupported protocol.
Args:
protocols_dict (dict): contains all protocols to be run
Raises:
Exception: If a protocol that should only be used with MainProtocol is present
in protocols_dict
"""
# Those protocols cannot be used if they are not in MainProtocol
forbidden_prots = [
"RatSSCxRinHoldcurrentProtocol",
"RatSSCxThresholdDetectionProtocol",
"StepThresholdProtocol",
"RampThresholdProtocol",
]
# check the class name of each protocol
for prot in protocols_dict.values():
if type(prot).__name__ in forbidden_prots:
prot_name = type(prot).__name__
raise Exception(
"No MainProtocol found, but {prot} was found."
f"To use {prot_name}, please set MainProtocol."
)
def define_protocols(
protocols_filepath,
stochkv_det=None,
prefix="",
apical_point_isec=-1,
syn_locs=None,
):
"""Define protocols.
Args:
protocols_filename (str): path to the protocols file
stochkv_det (bool): set if stochastic or deterministic
prefix (str): prefix used in naming responses, features, recordings, etc.
apical_point_isec (int): apical point section index
Should be given if there is "somadistanceapic" in "type"
of at least one of the extra recordings
syn_locs (list of ephys.locations.NrnPointProcessLocation):
locations of the synapses (if any, else None)
Returns:
dict containing the protocols
"""
with open(protocols_filepath, "r", encoding="utf-8") as protocol_file:
protocol_definitions = json.load(protocol_file)
if "__comment" in protocol_definitions:
del protocol_definitions["__comment"]
protocols_dict = {}
for protocol_name, protocol_definition in protocol_definitions.items():
if protocol_name not in ["Main", "RinHoldcurrent"]:
recordings = get_recordings(
protocol_name, protocol_definition, prefix, apical_point_isec
)
# add protocol to protocol dict
add_protocol(
protocols_dict,
protocol_name,
protocol_definition,
recordings,
stochkv_det,
prefix,
syn_locs,
)
if "Main" in protocol_definitions.keys():
protocols_dict["RinHoldcurrent"] = RatSSCxRinHoldcurrentProtocol(
"RinHoldCurrent",
rin_protocol_template=protocols_dict["Rin"],
holdi_precision=protocol_definitions["RinHoldcurrent"]["holdi_precision"],
holdi_max_depth=protocol_definitions["RinHoldcurrent"]["holdi_max_depth"],
prefix=prefix,
)
other_protocols = []
for protocol_name in protocol_definitions["Main"]["other_protocols"]:
if protocol_name in protocols_dict:
other_protocols.append(protocols_dict[protocol_name])
pre_protocols = []
if "pre_protocols" in protocol_definitions["Main"]:
for protocol_name in protocol_definitions["Main"]["pre_protocols"]:
pre_protocols.append(protocols_dict[protocol_name])
protocols_dict["Main"] = RatSSCxMainProtocol(
"Main",
rmp_protocol=protocols_dict["RMP"],
rinhold_protocol=protocols_dict["RinHoldcurrent"],
thdetect_protocol=protocols_dict["ThresholdDetection"],
other_protocols=other_protocols,
pre_protocols=pre_protocols,
)
else:
check_for_forbidden_protocol(protocols_dict)
return protocols_dict
def set_main_protocol_efeatures(protocols_dict, efeatures, prefix):
"""Set the efeatures of the main protocol.
Args:
protocols_dict (dict): contains all protocols to be run
If this function is called, should contain the MainProtocol
and the associated protocols (RinHoldCurrent, ThresholdDetection)
efeatures (dict): contains the efeatures
prefix (str): prefix used in naming responses, features, recordings, etc.
"""
protocols_dict["Main"].rmp_efeature = efeatures[f"{prefix}.RMP.soma.v.voltage_base"]
protocols_dict["Main"].rin_efeature = efeatures[
f"{prefix}.Rin.soma.v.ohmic_input_resistance_vb_ssse"
]
protocols_dict["Main"].rin_efeature.stimulus_current = protocols_dict[
"Main"
].rinhold_protocol.rin_protocol_template.step_amplitude
protocols_dict["RinHoldcurrent"].voltagebase_efeature = efeatures[
f"{prefix}.Rin.soma.v.voltage_base"
]
protocols_dict["ThresholdDetection"].holding_voltage = efeatures[
f"{prefix}.Rin.soma.v.voltage_base"
].exp_mean
def create_protocols(
apical_point_isec,
prot_path,
features_path="",
mtype="",
syn_locs=None,
stochkv_det=None,
):
"""Return a dict containing protocols.
Args:
apical_point_isec (int): section index of the apical point
Set to -1 no apical point is used in any extra recordings
prot_path (str): path to the protocols file
features_path (str): path to the features file
mtype (str): morphology name to be used as prefix in output filenames
syn_locs (list): list of synapse locations
stochkv_det (bool): set if stochastic or deterministic
Returns:
ephys.protocols.SequenceProtocol: sequence protocol containing all the protocols
"""
# pylint: disable=unbalanced-tuple-unpacking, too-many-locals
protocols_dict = define_protocols(
prot_path,
stochkv_det,
mtype,
apical_point_isec,
syn_locs,
)
if "Main" in protocols_dict:
efeatures = define_efeatures(
protocols_dict["Main"],
features_path,
mtype,
)
set_main_protocol_efeatures(protocols_dict, efeatures, mtype)
protocols = [protocols_dict["Main"]]
else:
protocols = list(protocols_dict.values())
return ephys.protocols.SequenceProtocol(
"all protocols",
protocols=protocols,
)
|
[
"bluepyopt.ephys.locations.NrnSomaDistanceCompLocation",
"bluepyopt.ephys.protocols.SweepProtocol",
"bluepyopt.ephys.protocols.SequenceProtocol",
"emodelrunner.features.define_efeatures",
"emodelrunner.recordings.RecordingCustom",
"emodelrunner.protocols.StepProtocol",
"emodelrunner.protocols.SweepProtocolCustom",
"emodelrunner.synapses.stimuli.NrnNetStimStimulusCustom",
"bluepyopt.ephys.locations.NrnSecSomaDistanceCompLocation",
"emodelrunner.protocols.RatSSCxRinHoldcurrentProtocol",
"bluepyopt.ephys.locations.NrnSeclistCompLocation",
"bluepyopt.ephys.stimuli.NrnRampPulse",
"bluepyopt.ephys.stimuli.NrnSquarePulse",
"emodelrunner.protocols.RatSSCxMainProtocol",
"json.load",
"emodelrunner.protocols.StepThresholdProtocol",
"emodelrunner.synapses.stimuli.NrnVecStimStimulusCustom",
"emodelrunner.protocols.RampProtocol",
"emodelrunner.protocols.RampThresholdProtocol",
"logging.getLogger"
] |
[((1156, 1183), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1173, 1183), False, 'import logging\n'), ((1196, 1300), 'bluepyopt.ephys.locations.NrnSeclistCompLocation', 'ephys.locations.NrnSeclistCompLocation', ([], {'name': '"""soma"""', 'seclist_name': '"""somatic"""', 'sec_index': '(0)', 'comp_x': '(0.5)'}), "(name='soma', seclist_name='somatic',\n sec_index=0, comp_x=0.5)\n", (1234, 1300), False, 'from bluepyopt import ephys\n'), ((1992, 2182), 'bluepyopt.ephys.stimuli.NrnRampPulse', 'ephys.stimuli.NrnRampPulse', ([], {'ramp_delay': "ramp_definition['ramp_delay']", 'ramp_duration': "ramp_definition['ramp_duration']", 'location': 'soma_loc', 'total_duration': "ramp_definition['totduration']"}), "(ramp_delay=ramp_definition['ramp_delay'],\n ramp_duration=ramp_definition['ramp_duration'], location=soma_loc,\n total_duration=ramp_definition['totduration'])\n", (2018, 2182), False, 'from bluepyopt import ephys\n'), ((2238, 2404), 'bluepyopt.ephys.stimuli.NrnSquarePulse', 'ephys.stimuli.NrnSquarePulse', ([], {'step_delay': '(0.0)', 'step_duration': "ramp_definition['totduration']", 'location': 'soma_loc', 'total_duration': "ramp_definition['totduration']"}), "(step_delay=0.0, step_duration=ramp_definition[\n 'totduration'], location=soma_loc, total_duration=ramp_definition[\n 'totduration'])\n", (2266, 2404), False, 'from bluepyopt import ephys\n'), ((2446, 2696), 'emodelrunner.protocols.RampThresholdProtocol', 'RampThresholdProtocol', ([], {'name': 'protocol_name', 'ramp_stimulus': 'ramp_stimulus', 'holding_stimulus': 'holding_stimulus', 'thresh_perc_start': "ramp_definition['thresh_perc_start']", 'thresh_perc_end': "ramp_definition['thresh_perc_end']", 'recordings': 'recordings'}), "(name=protocol_name, ramp_stimulus=ramp_stimulus,\n holding_stimulus=holding_stimulus, thresh_perc_start=ramp_definition[\n 'thresh_perc_start'], thresh_perc_end=ramp_definition['thresh_perc_end'\n ], recordings=recordings)\n", (2467, 2696), False, 'from emodelrunner.protocols import RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom\n'), ((3244, 3564), 'bluepyopt.ephys.stimuli.NrnRampPulse', 'ephys.stimuli.NrnRampPulse', ([], {'ramp_amplitude_start': "ramp_definition['ramp_amplitude_start']", 'ramp_amplitude_end': "ramp_definition['ramp_amplitude_end']", 'ramp_delay': "ramp_definition['ramp_delay']", 'ramp_duration': "ramp_definition['ramp_duration']", 'location': 'soma_loc', 'total_duration': "ramp_definition['totduration']"}), "(ramp_amplitude_start=ramp_definition[\n 'ramp_amplitude_start'], ramp_amplitude_end=ramp_definition[\n 'ramp_amplitude_end'], ramp_delay=ramp_definition['ramp_delay'],\n ramp_duration=ramp_definition['ramp_duration'], location=soma_loc,\n total_duration=ramp_definition['totduration'])\n", (3270, 3564), False, 'from bluepyopt import ephys\n'), ((4104, 4227), 'emodelrunner.protocols.RampProtocol', 'RampProtocol', ([], {'name': 'protocol_name', 'ramp_stimulus': 'ramp_stimulus', 'holding_stimulus': 'holding_stimulus', 'recordings': 'recordings'}), '(name=protocol_name, ramp_stimulus=ramp_stimulus,\n holding_stimulus=holding_stimulus, recordings=recordings)\n', (4116, 4227), False, 'from emodelrunner.protocols import RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom\n'), ((6038, 6189), 'emodelrunner.protocols.StepProtocol', 'StepProtocol', ([], {'name': 'protocol_name', 'step_stimuli': 'step_stimuli', 'holding_stimulus': 'holding_stimulus', 'recordings': 'recordings', 'stochkv_det': 'stochkv_det'}), '(name=protocol_name, step_stimuli=step_stimuli,\n holding_stimulus=holding_stimulus, recordings=recordings, stochkv_det=\n stochkv_det)\n', (6050, 6189), False, 'from emodelrunner.protocols import RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom\n'), ((7378, 7544), 'bluepyopt.ephys.stimuli.NrnSquarePulse', 'ephys.stimuli.NrnSquarePulse', ([], {'step_delay': '(0.0)', 'step_duration': "step_definition['totduration']", 'location': 'soma_loc', 'total_duration': "step_definition['totduration']"}), "(step_delay=0.0, step_duration=step_definition[\n 'totduration'], location=soma_loc, total_duration=step_definition[\n 'totduration'])\n", (7406, 7544), False, 'from bluepyopt import ephys\n'), ((7738, 7942), 'emodelrunner.protocols.StepThresholdProtocol', 'StepThresholdProtocol', ([], {'name': 'protocol_name', 'step_stimuli': 'step_stimuli', 'holding_stimulus': 'holding_stimulus', 'thresh_perc': "step_definition['thresh_perc']", 'recordings': 'recordings', 'stochkv_det': 'stochkv_det'}), "(name=protocol_name, step_stimuli=step_stimuli,\n holding_stimulus=holding_stimulus, thresh_perc=step_definition[\n 'thresh_perc'], recordings=recordings, stochkv_det=stochkv_det)\n", (7759, 7942), False, 'from emodelrunner.protocols import RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom\n'), ((9000, 9170), 'emodelrunner.synapses.stimuli.NrnVecStimStimulusCustom', 'NrnVecStimStimulusCustom', (['syn_locs', "stim_definition['syn_start']", "stim_definition['syn_stop']", "stim_definition['syn_stim_seed']", "stim_definition['vecstim_random']"], {}), "(syn_locs, stim_definition['syn_start'],\n stim_definition['syn_stop'], stim_definition['syn_stim_seed'],\n stim_definition['vecstim_random'])\n", (9024, 9170), False, 'from emodelrunner.synapses.stimuli import NrnNetStimStimulusCustom, NrnVecStimStimulusCustom\n'), ((9222, 9276), 'emodelrunner.protocols.SweepProtocolCustom', 'SweepProtocolCustom', (['protocol_name', '[stim]', 'recordings'], {}), '(protocol_name, [stim], recordings)\n', (9241, 9276), False, 'from emodelrunner.protocols import RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom\n'), ((9970, 10172), 'emodelrunner.synapses.stimuli.NrnNetStimStimulusCustom', 'NrnNetStimStimulusCustom', (['syn_locs', "stim_definition['syn_stop']", "stim_definition['syn_nmb_of_spikes']", "stim_definition['syn_interval']", "stim_definition['syn_start']", "stim_definition['syn_noise']"], {}), "(syn_locs, stim_definition['syn_stop'],\n stim_definition['syn_nmb_of_spikes'], stim_definition['syn_interval'],\n stim_definition['syn_start'], stim_definition['syn_noise'])\n", (9994, 10172), False, 'from emodelrunner.synapses.stimuli import NrnNetStimStimulusCustom, NrnVecStimStimulusCustom\n'), ((10232, 10286), 'emodelrunner.protocols.SweepProtocolCustom', 'SweepProtocolCustom', (['protocol_name', '[stim]', 'recordings'], {}), '(protocol_name, [stim], recordings)\n', (10251, 10286), False, 'from emodelrunner.protocols import RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom\n'), ((23203, 23273), 'bluepyopt.ephys.protocols.SequenceProtocol', 'ephys.protocols.SequenceProtocol', (['"""all protocols"""'], {'protocols': 'protocols'}), "('all protocols', protocols=protocols)\n", (23235, 23273), False, 'from bluepyopt import ephys\n'), ((3753, 3992), 'bluepyopt.ephys.stimuli.NrnSquarePulse', 'ephys.stimuli.NrnSquarePulse', ([], {'step_amplitude': "holding_definition['amp']", 'step_delay': "holding_definition['delay']", 'step_duration': "holding_definition['duration']", 'location': 'soma_loc', 'total_duration': "holding_definition['totduration']"}), "(step_amplitude=holding_definition['amp'],\n step_delay=holding_definition['delay'], step_duration=\n holding_definition['duration'], location=soma_loc, total_duration=\n holding_definition['totduration'])\n", (3781, 3992), False, 'from bluepyopt import ephys\n'), ((5060, 5287), 'bluepyopt.ephys.stimuli.NrnSquarePulse', 'ephys.stimuli.NrnSquarePulse', ([], {'step_amplitude': "step_definition['amp']", 'step_delay': "step_definition['delay']", 'step_duration': "step_definition['duration']", 'location': 'soma_loc', 'total_duration': "step_definition['totduration']"}), "(step_amplitude=step_definition['amp'],\n step_delay=step_definition['delay'], step_duration=step_definition[\n 'duration'], location=soma_loc, total_duration=step_definition[\n 'totduration'])\n", (5088, 5287), False, 'from bluepyopt import ephys\n'), ((5535, 5774), 'bluepyopt.ephys.stimuli.NrnSquarePulse', 'ephys.stimuli.NrnSquarePulse', ([], {'step_amplitude': "holding_definition['amp']", 'step_delay': "holding_definition['delay']", 'step_duration': "holding_definition['duration']", 'location': 'soma_loc', 'total_duration': "holding_definition['totduration']"}), "(step_amplitude=holding_definition['amp'],\n step_delay=holding_definition['delay'], step_duration=\n holding_definition['duration'], location=soma_loc, total_duration=\n holding_definition['totduration'])\n", (5563, 5774), False, 'from bluepyopt import ephys\n'), ((7081, 7263), 'bluepyopt.ephys.stimuli.NrnSquarePulse', 'ephys.stimuli.NrnSquarePulse', ([], {'step_delay': "step_definition['delay']", 'step_duration': "step_definition['duration']", 'location': 'soma_loc', 'total_duration': "step_definition['totduration']"}), "(step_delay=step_definition['delay'],\n step_duration=step_definition['duration'], location=soma_loc,\n total_duration=step_definition['totduration'])\n", (7109, 7263), False, 'from bluepyopt import ephys\n'), ((11069, 11259), 'bluepyopt.ephys.locations.NrnSomaDistanceCompLocation', 'ephys.locations.NrnSomaDistanceCompLocation', ([], {'name': "recording_definition['name']", 'soma_distance': "recording_definition['somadistance']", 'seclist_name': "recording_definition['seclist_name']"}), "(name=recording_definition[\n 'name'], soma_distance=recording_definition['somadistance'],\n seclist_name=recording_definition['seclist_name'])\n", (11112, 11259), False, 'from bluepyopt import ephys\n'), ((12989, 13082), 'emodelrunner.recordings.RecordingCustom', 'RecordingCustom', ([], {'name': 'f"""{prefix}.{protocol_name}.soma.v"""', 'location': 'soma_loc', 'variable': '"""v"""'}), "(name=f'{prefix}.{protocol_name}.soma.v', location=soma_loc,\n variable='v')\n", (13004, 13082), False, 'from emodelrunner.recordings import RecordingCustom\n'), ((18775, 18799), 'json.load', 'json.load', (['protocol_file'], {}), '(protocol_file)\n', (18784, 18799), False, 'import json\n'), ((19569, 19841), 'emodelrunner.protocols.RatSSCxRinHoldcurrentProtocol', 'RatSSCxRinHoldcurrentProtocol', (['"""RinHoldCurrent"""'], {'rin_protocol_template': "protocols_dict['Rin']", 'holdi_precision': "protocol_definitions['RinHoldcurrent']['holdi_precision']", 'holdi_max_depth': "protocol_definitions['RinHoldcurrent']['holdi_max_depth']", 'prefix': 'prefix'}), "('RinHoldCurrent', rin_protocol_template=\n protocols_dict['Rin'], holdi_precision=protocol_definitions[\n 'RinHoldcurrent']['holdi_precision'], holdi_max_depth=\n protocol_definitions['RinHoldcurrent']['holdi_max_depth'], prefix=prefix)\n", (19598, 19841), False, 'from emodelrunner.protocols import RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom\n'), ((20396, 20641), 'emodelrunner.protocols.RatSSCxMainProtocol', 'RatSSCxMainProtocol', (['"""Main"""'], {'rmp_protocol': "protocols_dict['RMP']", 'rinhold_protocol': "protocols_dict['RinHoldcurrent']", 'thdetect_protocol': "protocols_dict['ThresholdDetection']", 'other_protocols': 'other_protocols', 'pre_protocols': 'pre_protocols'}), "('Main', rmp_protocol=protocols_dict['RMP'],\n rinhold_protocol=protocols_dict['RinHoldcurrent'], thdetect_protocol=\n protocols_dict['ThresholdDetection'], other_protocols=other_protocols,\n pre_protocols=pre_protocols)\n", (20415, 20641), False, 'from emodelrunner.protocols import RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom\n'), ((22904, 22966), 'emodelrunner.features.define_efeatures', 'define_efeatures', (["protocols_dict['Main']", 'features_path', 'mtype'], {}), "(protocols_dict['Main'], features_path, mtype)\n", (22920, 22966), False, 'from emodelrunner.features import define_efeatures\n'), ((11592, 11832), 'bluepyopt.ephys.locations.NrnSecSomaDistanceCompLocation', 'ephys.locations.NrnSecSomaDistanceCompLocation', ([], {'name': "recording_definition['name']", 'soma_distance': "recording_definition['somadistance']", 'sec_name': "seclist_to_sec[recording_definition['seclist_name']]", 'sec_index': 'apical_point_isec'}), "(name=recording_definition[\n 'name'], soma_distance=recording_definition['somadistance'], sec_name=\n seclist_to_sec[recording_definition['seclist_name']], sec_index=\n apical_point_isec)\n", (11638, 11832), False, 'from bluepyopt import ephys\n'), ((13455, 13563), 'emodelrunner.recordings.RecordingCustom', 'RecordingCustom', ([], {'name': 'f"""{prefix}.{protocol_name}.{location.name}.{var}"""', 'location': 'location', 'variable': 'var'}), "(name=f'{prefix}.{protocol_name}.{location.name}.{var}',\n location=location, variable=var)\n", (13470, 13563), False, 'from emodelrunner.recordings import RecordingCustom\n'), ((11956, 12173), 'bluepyopt.ephys.locations.NrnSeclistCompLocation', 'ephys.locations.NrnSeclistCompLocation', ([], {'name': "recording_definition['name']", 'comp_x': "recording_definition['comp_x']", 'sec_index': "recording_definition['sec_index']", 'seclist_name': "recording_definition['seclist_name']"}), "(name=recording_definition['name'],\n comp_x=recording_definition['comp_x'], sec_index=recording_definition[\n 'sec_index'], seclist_name=recording_definition['seclist_name'])\n", (11994, 12173), False, 'from bluepyopt import ephys\n'), ((16910, 17003), 'bluepyopt.ephys.protocols.SweepProtocol', 'ephys.protocols.SweepProtocol', ([], {'name': 'protocol_name', 'stimuli': 'stimuli', 'recordings': 'recordings'}), '(name=protocol_name, stimuli=stimuli,\n recordings=recordings)\n', (16939, 17003), False, 'from bluepyopt import ephys\n'), ((16506, 16749), 'bluepyopt.ephys.stimuli.NrnSquarePulse', 'ephys.stimuli.NrnSquarePulse', ([], {'step_amplitude': "stimulus_definition['amp']", 'step_delay': "stimulus_definition['delay']", 'step_duration': "stimulus_definition['duration']", 'location': 'soma_loc', 'total_duration': "stimulus_definition['totduration']"}), "(step_amplitude=stimulus_definition['amp'],\n step_delay=stimulus_definition['delay'], step_duration=\n stimulus_definition['duration'], location=soma_loc, total_duration=\n stimulus_definition['totduration'])\n", (16534, 16749), False, 'from bluepyopt import ephys\n')]
|
import os
from flask import Flask, redirect
from flask import request
from flask import jsonify
import hashlib
app = Flask(__name__)
c = 0
clients = []
chat = []
#[from, to, status[0sent, 1accepted, 2rejected]]
requests = {}
requests_sent = {}
version = 5
additive = 0
def getUID(ip):
return hashlib.sha256(str(ip).encode("utf8")).hexdigest()
def getUN(ip):
return int(str(ip).replace(".", ""))
def addChat(toAdd, limit = True):
global chat, additive
if limit:
additive = additive + 1
print("new chat: " + toAdd)
toAdd = toAdd.replace("<script>", "").replace("</script>", "")
if(additive > 50):
chat.pop(0)
chat.append(toAdd)
def addClient(uID):
if uID not in clients:
clients.append(uID)
addChat("--- " + uID + " Joined the Chat ---")
print("connection from " + str(request.remote_addr))
def removeClient(uID):
if uID in clients:
clients.remove(uID)
addChat("--- " + uID + " Left the Chat ---")
@app.route('/')
def hello():
global chat, version
uIp = request.access_route[0]
uID = getUID(uIp)
addClient(uID)
view = "<title>A+</title>"
global c
c = c + 1
view = view + "<h3> Public Chat </h3>"
view = view + "Connected as: " + uID + " (" + uIp + ")<br \\>"
view = view + "Refresh the page to access the latest messages."
view = view + "<br \\>-----------------------------------------------------------------------<br \\>"
for i in chat:
view = view + i.replace("<", "").replace(">", "") + "<br \\>"
view = view + "<br \\>-----------------------------------------------------------------------<br \\>"
view = view + "note that only the latest 50 messages are stored and displayed. <br \\><br \\>"
view = view + "<form action=\" " + "/post" + "\" method=\"post\">"
view = view + "<input type=\"text\" name=\"msg\">"
view = view + "<input type=\"submit\">"
view = view + "</form>"
view = view + "<br \\><hr \\>"
view = view + "A+ v. " + str(version) + " | <a href=\"https://raw.githubusercontent.com/jonnelafin/A-/master/LICENSE\">LICENSE</a>"
return(view)
@app.route('/post', methods=['POST'])
def handle_data():
uIp = request.access_route[0]
uID = getUID(uIp)
msg = request.form['msg']
addChat(uID + ": " + msg)
return redirect("/", code=302)
@app.route("/get_my_ip", methods=["GET"])
def get_my_ip():
return jsonify({'ip': request.access_route[0], 'id' : getUID(request.access_route[0])}), 200
@app.route("/announce", methods=["GET"])
def announceThem():
global chat
uIp = request.access_route[0]
uID = getUID(uIp)
addClient(uID)
return jsonify({'you': uID}), 200
@app.route("/unannounce", methods=["GET"])
def unannounceThem():
global chat
uIp = request.access_route[0]
uID = getUID(uIp)
removeClient(uID)
return jsonify({'you': uID}), 200
@app.route("/list", methods=["GET"])
def listAnnounced():
return jsonify({'clients': clients}), 200
@app.route("/req", methods=['POST'])
def requestCH():
global requests, requests_sent
uIp = request.access_route[0]
uID = getUID(uIp)
if "to" in request.form:
to = request.form['to']
# [from, to, status[0sent, 1accepted, 2rejected]]
req = [uID, to, 0]
if not (to in requests):
requests[to] = []
requests[to].append(req)
if not (uID in requests_sent):
requests_sent[uID] = []
requests_sent[uID].append(req)
return redirect("/", code=302)
else:
return jsonify({'error': "400: POST Request expected"}), 400
@app.route("/status", methods=["GET"])
def sendStatus():
global requests, requests_sent
uIp = request.access_route[0]
uID = getUID(uIp)
lis = []
if not (uID in requests_sent):
requests_sent[uID] = []
if not (uID in requests):
requests[uID] = []
return jsonify({'sent': requests_sent[uID], 'received': requests[uID]}), 200
@app.route("/send", methods=["GET"])
def sendView():
view = ""
view = view + "<h3> Send a Chat Request </h3>"
view = view + "<hr \\>"
view = view + "<form action=\" " + "/req" + "\" method=\"post\">"
view = view + "<h4> To: </h4>"
view = view + "<input type=\"text\" name=\"to\"><br \\>"
view = view + "<input type=\"submit\">"
view = view + "</form>"
view = view + "<hr \\>"
return view, 200
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
[
"os.environ.get",
"flask.jsonify",
"flask.Flask",
"flask.redirect"
] |
[((118, 133), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (123, 133), False, 'from flask import Flask, redirect\n'), ((2333, 2356), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (2341, 2356), False, 'from flask import Flask, redirect\n'), ((2676, 2697), 'flask.jsonify', 'jsonify', (["{'you': uID}"], {}), "({'you': uID})\n", (2683, 2697), False, 'from flask import jsonify\n'), ((2873, 2894), 'flask.jsonify', 'jsonify', (["{'you': uID}"], {}), "({'you': uID})\n", (2880, 2894), False, 'from flask import jsonify\n'), ((2969, 2998), 'flask.jsonify', 'jsonify', (["{'clients': clients}"], {}), "({'clients': clients})\n", (2976, 2998), False, 'from flask import jsonify\n'), ((3512, 3535), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (3520, 3535), False, 'from flask import Flask, redirect\n'), ((3911, 3975), 'flask.jsonify', 'jsonify', (["{'sent': requests_sent[uID], 'received': requests[uID]}"], {}), "({'sent': requests_sent[uID], 'received': requests[uID]})\n", (3918, 3975), False, 'from flask import jsonify\n'), ((4516, 4544), 'os.environ.get', 'os.environ.get', (['"""PORT"""', '(5000)'], {}), "('PORT', 5000)\n", (4530, 4544), False, 'import os\n'), ((3561, 3609), 'flask.jsonify', 'jsonify', (["{'error': '400: POST Request expected'}"], {}), "({'error': '400: POST Request expected'})\n", (3568, 3609), False, 'from flask import jsonify\n')]
|
# Copyright (c) 2020, Huawei Technologies.All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import numpy as np
import copy
from common_utils import TestCase, run_tests
from common_device_type import dtypes, instantiate_device_type_tests
from util_test import create_common_tensor
class TestNotEqual(TestCase):
def cpu_op_exec(self, input1, input2):
output = torch.ne(input1, input2)
output = output.numpy().astype(np.int32)
return output
def npu_op_exec(self, input1, input2):
output = torch.ne(input1, input2)
output = output.to("cpu")
output = output.numpy().astype(np.int32)
return output
def cpu_op_inplace_exec(self, input1, input2):
input1.ne_(input2)
output = input1.numpy().astype(np.int32)
return output
def npu_op_inplace_exec(self, input1, input2):
input1.ne_(input2)
output = input1.to("cpu")
output = output.numpy().astype(np.int32)
return output
def npu_op_exec_out(self, input1, input2, out):
torch.ne(input1, input2, out=out)
output = out.to("cpu")
output = output.numpy().astype(np.int32)
return output
def not_equal_scalar_result(self, shape_format):
for item in shape_format:
scalar = np.random.uniform(0, 100)
cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100)
npu_input3 = copy.deepcopy(cpu_input1).to("npu").to(torch.bool)
if cpu_input1.dtype == torch.float16:
cpu_input1 = cpu_input1.to(torch.float32)
cpu_output = self.cpu_op_exec(cpu_input1, scalar)
npu_output = self.npu_op_exec(npu_input1, scalar)
npu_output_out = self.npu_op_exec_out(npu_input1, scalar, npu_input3)
cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, scalar)
npu_output_inp = self.npu_op_inplace_exec(npu_input1, scalar)
self.assertRtolEqual(cpu_output, npu_output)
self.assertRtolEqual(cpu_output, npu_output_out)
self.assertRtolEqual(cpu_output_inp, npu_output_inp)
def not_equal_result(self, shape_format):
for item in shape_format:
cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100)
cpu_input2, npu_input2 = create_common_tensor(item[1], 0, 100)
npu_input3 = copy.deepcopy(cpu_input1).to("npu").to(torch.bool)
if cpu_input1.dtype == torch.float16:
cpu_input1 = cpu_input1.to(torch.float32)
cpu_input2 = cpu_input2.to(torch.float32)
cpu_output = self.cpu_op_exec(cpu_input1, cpu_input2)
npu_output = self.npu_op_exec(npu_input1, npu_input2)
npu_output_out = self.npu_op_exec_out(npu_input1, npu_input2, npu_input3)
cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, cpu_input2)
npu_output_inp = self.npu_op_inplace_exec(npu_input1, npu_input2)
self.assertRtolEqual(cpu_output, npu_output)
self.assertRtolEqual(cpu_output, npu_output_out)
self.assertRtolEqual(cpu_output_inp, npu_output_inp)
def test_not_equal_shape_format_fp16_1d(self, device):
format_list = [-1, 0]
shape_format = [[[np.float16, i, [16]], [np.float16, i, [16]]] for i in format_list]
self.not_equal_result(shape_format)
def test_not_equal_shape_format_fp32_1d(self, device):
format_list = [-1, 0]
shape_format = [[[np.float32, i, [16]], [np.float32, i, [16]]] for i in format_list]
self.not_equal_result(shape_format)
def test_not_equal_shape_format_fp16_2d(self, device):
format_list = [-1, 0]
shape_format = [[[np.float16, i, [448, 1]], [np.float16, i, [448, 1]]] for i in format_list]
self.not_equal_result(shape_format)
def test_not_equal_shape_format_fp32_2d(self, device):
format_list = [-1, 0]
shape_format = [[[np.float32, i, [448, 1]], [np.float32, i, [448, 1]]] for i in format_list]
self.not_equal_result(shape_format)
def test_not_equal_shape_format_fp16_3d(self, device):
format_list = [-1, 0]
shape_format = [[[np.float16, i, [16, 640, 640]], [np.float16, i, [16, 640, 640]]] for i in format_list]
self.not_equal_result(shape_format)
def test_not_equal_shape_format_fp32_3d(self, device):
format_list = [-1, 0, 3]
shape_format = [[[np.float32, i, [16, 640, 640]], [np.float32, i, [16, 640, 640]]] for i in format_list]
self.not_equal_result(shape_format)
def test_not_equal_shape_format_fp16_4d(self, device):
format_list = [-1, 0, 3]
shape_format = [[[np.float16, i, [32, 3, 3, 3]], [np.float16, i, [32, 3, 3, 3]]] for i in format_list]
self.not_equal_result(shape_format)
def test_not_equal_shape_format_fp32_4d(self, device):
format_list = [-1, 0, 3]
shape_format = [[[np.float32, i, [32, 3, 3, 3]], [np.float32, i, [32, 3, 3, 3]]] for i in format_list]
self.not_equal_result(shape_format)
# scala-----------------------------------------------------------------
def test_not_equal_scalar_shape_format_fp16_1d(self, device):
format_list = [-1, 0, 3]
shape_format = [[[np.float16, i, 18]] for i in format_list]
self.not_equal_scalar_result(shape_format)
def test_not_equal_scalar_shape_format_fp32_1d(self, device):
format_list = [-1, 0, 3]
shape_format = [[[np.float32, i, [18]]] for i in format_list]
self.not_equal_scalar_result(shape_format)
def test_not_equal_scalar_shape_format_fp16_2d(self, device):
format_list = [-1, 0]
shape_format = [[[np.float16, i, [64, 7]]] for i in format_list]
self.not_equal_scalar_result(shape_format)
def test_not_equal_scalar_shape_format_fp32_2d(self, device):
format_list = [-1, 0]
shape_format = [[[np.float32, i, [64, 7]]] for i in format_list]
self.not_equal_scalar_result(shape_format)
def test_not_equal_scalar_shape_format_fp32_3d(self, device):
format_list = [-1, 0]
shape_format = [[[np.float32, i, [64, 24, 38]]] for i in format_list]
self.not_equal_scalar_result(shape_format)
def test_not_equal_scalar_shape_format_fp16_4d(self, device):
format_list = [-1, 0]
shape_format = [[[np.float16, i, [32, 3, 3, 3]]] for i in format_list]
self.not_equal_scalar_result(shape_format)
def test_not_equal_scalar_shape_format_fp32_4d(self, device):
format_list = [-1, 0]
shape_format = [[[np.float32, i, [32, 3, 3, 3]]] for i in format_list]
self.not_equal_scalar_result(shape_format)
def test_not_equal_shape_format_int32_1d(self, device):
format_list = [-1, 0]
shape_format = [[[np.int32, i, [16]], [np.int32, i, [16]]] for i in format_list]
self.not_equal_result(shape_format)
def test_not_equal_shape_format_int32_2d(self, device):
format_list = [-1, 0]
shape_format = [[[np.int32, i, [448, 1]], [np.int32, i, [448, 1]]] for i in format_list]
self.not_equal_result(shape_format)
def test_not_equal_shape_format_int32_3d(self, device):
format_list = [-1, 0]
shape_format = [[[np.int32, i, [16, 640, 640]], [np.int32, i, [16, 640, 640]]] for i in format_list]
self.not_equal_result(shape_format)
def test_not_equal_shape_format_int32_4d(self, device):
format_list = [-1, 0]
shape_format = [[[np.int32, i, [32, 3, 3, 3]], [np.int32, i, [32, 3, 3, 3]]] for i in format_list]
self.not_equal_result(shape_format)
instantiate_device_type_tests(TestNotEqual, globals(), except_for="cpu")
if __name__ == "__main__":
run_tests()
|
[
"numpy.random.uniform",
"torch.ne",
"copy.deepcopy",
"common_utils.run_tests",
"util_test.create_common_tensor"
] |
[((8284, 8295), 'common_utils.run_tests', 'run_tests', ([], {}), '()\n', (8293, 8295), False, 'from common_utils import TestCase, run_tests\n'), ((894, 918), 'torch.ne', 'torch.ne', (['input1', 'input2'], {}), '(input1, input2)\n', (902, 918), False, 'import torch\n'), ((1051, 1075), 'torch.ne', 'torch.ne', (['input1', 'input2'], {}), '(input1, input2)\n', (1059, 1075), False, 'import torch\n'), ((1576, 1609), 'torch.ne', 'torch.ne', (['input1', 'input2'], {'out': 'out'}), '(input1, input2, out=out)\n', (1584, 1609), False, 'import torch\n'), ((1821, 1846), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(100)'], {}), '(0, 100)\n', (1838, 1846), True, 'import numpy as np\n'), ((1884, 1921), 'util_test.create_common_tensor', 'create_common_tensor', (['item[0]', '(0)', '(100)'], {}), '(item[0], 0, 100)\n', (1904, 1921), False, 'from util_test import create_common_tensor\n'), ((2764, 2801), 'util_test.create_common_tensor', 'create_common_tensor', (['item[0]', '(0)', '(100)'], {}), '(item[0], 0, 100)\n', (2784, 2801), False, 'from util_test import create_common_tensor\n'), ((2839, 2876), 'util_test.create_common_tensor', 'create_common_tensor', (['item[1]', '(0)', '(100)'], {}), '(item[1], 0, 100)\n', (2859, 2876), False, 'from util_test import create_common_tensor\n'), ((1947, 1972), 'copy.deepcopy', 'copy.deepcopy', (['cpu_input1'], {}), '(cpu_input1)\n', (1960, 1972), False, 'import copy\n'), ((2902, 2927), 'copy.deepcopy', 'copy.deepcopy', (['cpu_input1'], {}), '(cpu_input1)\n', (2915, 2927), False, 'import copy\n')]
|
from elasticsearch.helpers import scan
import utils.helpers as hp
valueField = {
'ps_packetloss': 'packet_loss',
'ps_owd': 'delay_mean',
'ps_retransmits': 'retransmits',
'ps_throughput': 'throughput'
}
def query4Avg(idx, dateFrom, dateTo):
val_fld = valueField[idx]
query = {
"size" : 0,
"query" : {
"bool" : {
"must" : [
{
"range" : {
"timestamp" : {
"gt" : dateFrom,
"lte": dateTo
}
}
},
{
"term" : {
"src_production" : True
}
},
{
"term" : {
"dest_production" : True
}
}
]
}
},
"aggregations" : {
"groupby" : {
"composite" : {
"size" : 9999,
"sources" : [
{
"src" : {
"terms" : {
"field" : "src"
}
}
},
{
"dest" : {
"terms" : {
"field" : "dest"
}
}
},
{
"src_host" : {
"terms" : {
"field" : "src_host"
}
}
},
{
"dest_host" : {
"terms" : {
"field" : "dest_host"
}
}
},
{
"src_site" : {
"terms" : {
"field" : "src_site"
}
}
},
{
"dest_site" : {
"terms" : {
"field" : "dest_site"
}
}
}
]
},
"aggs": {
val_fld: {
"avg": {
"field": val_fld
}
}
}
}
}
}
# print(idx, str(query).replace("\'", "\""))
aggrs = []
aggdata = hp.es.search(index=idx, body=query)
for item in aggdata['aggregations']['groupby']['buckets']:
aggrs.append({'hash': str(item['key']['src']+'-'+item['key']['dest']),
'from':dateFrom, 'to':dateTo,
'src': item['key']['src'], 'dest': item['key']['dest'],
'src_host': item['key']['src_host'], 'dest_host': item['key']['dest_host'],
'src_site': item['key']['src_site'], 'dest_site': item['key']['dest_site'],
'value': item[val_fld]['value'],
'doc_count': item['doc_count']
})
return aggrs
def get_ip_host(idx, dateFrom, dateTo):
def q_ip_host (fld):
return {
"size" : 0,
"query" : {
"bool" : {
"must" : [
{
"range" : {
"timestamp" : {
"from" : dateFrom,
"to" : dateTo
}
}
},
{
"term" : {
"src_production" : True
}
},
{
"term" : {
"dest_production" : True
}
}
]
}
},
"_source" : False,
"stored_fields" : "_none_",
"aggregations" : {
"groupby" : {
"composite" : {
"size" : 9999,
"sources" : [
{
fld : {
"terms" : {
"field" : fld,
"missing_bucket" : True,
"order" : "asc"
}
}
},
{
str(fld+"_host") : {
"terms" : {
"field" : str(fld+"_host"),
"missing_bucket" : True,
"order" : "asc"
}
}
}
]
}
}
}
}
res_ip_host = {}
for field in ['src', 'dest']:
results = hp.es.search(index=idx, body=q_ip_host(field))
for item in results["aggregations"]["groupby"]["buckets"]:
ip = item['key'][field]
host = item['key'][str(field+'_host')]
if ((ip in res_ip_host.keys()) and (host is not None) and (host != ip)) or (ip not in res_ip_host.keys()):
res_ip_host[ip] = host
return res_ip_host
def get_ip_site(idx, dateFrom, dateTo):
def q_ip_site (fld):
return {
"size" : 0,
"query" : {
"bool" : {
"must" : [
{
"range" : {
"timestamp" : {
"from" : dateFrom,
"to" : dateTo
}
}
},
{
"term" : {
"src_production" : True
}
},
{
"term" : {
"dest_production" : True
}
}
]
}
},
"_source" : False,
"stored_fields" : "_none_",
"aggregations" : {
"groupby" : {
"composite" : {
"size" : 9999,
"sources" : [
{
fld : {
"terms" : {
"field" : fld,
"missing_bucket" : True,
"order" : "asc"
}
}
},
{
str(fld+"_site") : {
"terms" : {
"field" : str(fld+"_site"),
"missing_bucket" : True,
"order" : "asc"
}
}
},
{
"ipv6" : {
"terms" : {
"field" : "ipv6",
"missing_bucket" : True,
"order" : "asc"
}
}
}
]
}
}
}
}
res_ip_site = {}
for field in ['src', 'dest']:
results = hp.es.search(index=idx, body=q_ip_site(field))
for item in results["aggregations"]["groupby"]["buckets"]:
ip = item['key'][field]
site = item['key'][str(field+'_site')]
ipv6 = item['key']['ipv6']
if ((ip in res_ip_site.keys()) and (site is not None)) or (ip not in res_ip_site.keys()):
res_ip_site[ip] = [site, ipv6]
return res_ip_site
def get_host_site(idx, dateFrom, dateTo):
def q_host_site (fld):
return {
"size" : 0,
"query" : {
"bool" : {
"must" : [
{
"range" : {
"timestamp" : {
"from" : dateFrom,
"to" : dateTo
}
}
},
{
"term" : {
"src_production" : True
}
},
{
"term" : {
"dest_production" : True
}
}
]
}
},
"_source" : False,
"stored_fields" : "_none_",
"aggregations" : {
"groupby" : {
"composite" : {
"size" : 9999,
"sources" : [
{
str(fld+"_site") : {
"terms" : {
"field" : str(fld+"_site"),
"missing_bucket" : True,
"order" : "asc"
}
}
},
{
str(fld+"_host") : {
"terms" : {
"field" : str(fld+"_host"),
"missing_bucket" : True,
"order" : "asc"
}
}
}
]
}
}
}
}
res_host_site = {}
for field in ['src', 'dest']:
results = hp.es.search(index=idx, body=q_host_site(field))
for item in results["aggregations"]["groupby"]["buckets"]:
site = item['key'][str(field+"_site")]
host = item['key'][str(field+'_host')]
if ((host in res_host_site.keys()) and (site is not None)) or (host not in res_host_site.keys()):
res_host_site[host] = site
return res_host_site
def get_metadata(dateFrom, dateTo):
def q_metadata():
return {
"size" : 0,
"query" : {
"range" : {
"timestamp" : {
"from" : dateFrom,
"to" : dateTo
}
}
},
"_source" : False,
"aggregations" : {
"groupby" : {
"composite" : {
"size" : 9999,
"sources" : [
{
"site" : {
"terms" : {
"field" : "config.site_name.keyword",
"missing_bucket" : True,
"order" : "asc"
}
}
},
{
"admin_email" : {
"terms" : {
"field" : "administrator.email",
"missing_bucket" : True,
"order" : "asc"
}
}
},
{
"admin_name" : {
"terms" : {
"field" : "administrator.name",
"missing_bucket" : True,
"order" : "asc"
}
}
},
{
"ipv6" : {
"terms" : {
"field" : "external_address.ipv6_address",
"missing_bucket" : True,
"order" : "asc"
}
}
},
{
"ipv4" : {
"terms" : {
"field" : "external_address.ipv4_address",
"missing_bucket" : True,
"order" : "asc"
}
}
},
{
"host" : {
"terms" : {
"field" : "host.keyword",
"missing_bucket" : True,
"order" : "asc"
}
}
}
]
}
}
}
}
results = hp.es.search(index='ps_meta', body=q_metadata())
res_meta = {}
for item in results["aggregations"]["groupby"]["buckets"]:
host = item['key']['host']
if ((host in res_meta.keys()) and (item['key']['site'] is not None)) or (host not in res_meta.keys()):
res_meta[host] = {'site': item['key']['site'], 'admin_name': item['key']['admin_name'],
'admin_email': item['key']['admin_email'], 'ipv6': item['key']['ipv6'],
'ipv4': item['key']['ipv4']}
return res_meta
|
[
"utils.helpers.es.search"
] |
[((2956, 2991), 'utils.helpers.es.search', 'hp.es.search', ([], {'index': 'idx', 'body': 'query'}), '(index=idx, body=query)\n', (2968, 2991), True, 'import utils.helpers as hp\n')]
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import pytest
from mock import patch
from night_scheduler.framework.sun.sun import Sun
class TestSun(object):
FAKE_LATITUDE = "00"
FAKE_LONGITUDE = "11"
FAKE_DATE = "YYYY-MM-DD"
FAKE_SUNSET = "99:88:77 PM"
FAKE_SUNRISE_SUNSERT_ORG_ANSWER = {
"results": {
"sunrise": "4:26:42 AM",
"sunset": "99:88:77 PM",
"solar_noon": "11:50:51 AM",
"day_length": "14:48:18",
"civil_twilight_begin": "3:54:08 AM",
"civil_twilight_end": "7:47:34 PM",
"nautical_twilight_begin": "3:12:59 AM",
"nautical_twilight_end": "8:28:43 PM",
"astronomical_twilight_begin": "2:25:39 AM",
"astronomical_twilight_end": "9:16:04 PM"
},
"status": "OK"
}
@classmethod
def setup_method(self, method):
self.patcher_requests_get = patch('requests.get')
self.mock_requests_get = self.patcher_requests_get.start()
self.mock_requests_get.return_value = TestSun.FAKE_SUNRISE_SUNSERT_ORG_ANSWER
self.sun = Sun(latitude=TestSun.FAKE_LATITUDE,
longitude=TestSun.FAKE_LONGITUDE,
date=TestSun.FAKE_DATE)
@classmethod
def teardown_method(self, method):
self.mock_requests_get = self.patcher_requests_get.stop()
# ##############################################################################################
def test__get_sunset__no_params__calou_and_today_called(self):
self.sun.get_sunset()
self.mock_requests_get.assert_called_once_with(url="{}/json?lat={}&lng={}&date={}".format(
Sun.URL,
TestSun.FAKE_LATITUDE,
TestSun.FAKE_LONGITUDE,
TestSun.FAKE_DATE
))
def test__get_sunset__no_params__retuns_sunset_hour(self):
sunset = self.sun.get_sunset()
assert sunset == TestSun.FAKE_SUNSET
|
[
"mock.patch",
"night_scheduler.framework.sun.sun.Sun"
] |
[((1033, 1054), 'mock.patch', 'patch', (['"""requests.get"""'], {}), "('requests.get')\n", (1038, 1054), False, 'from mock import patch\n'), ((1229, 1327), 'night_scheduler.framework.sun.sun.Sun', 'Sun', ([], {'latitude': 'TestSun.FAKE_LATITUDE', 'longitude': 'TestSun.FAKE_LONGITUDE', 'date': 'TestSun.FAKE_DATE'}), '(latitude=TestSun.FAKE_LATITUDE, longitude=TestSun.FAKE_LONGITUDE, date=\n TestSun.FAKE_DATE)\n', (1232, 1327), False, 'from night_scheduler.framework.sun.sun import Sun\n')]
|
#!/usr/bin/env python3
import argparse
import sys
import psutil
from wrap_scriptlet import wrap_scriptlet
def run():
parser = argparse.ArgumentParser()
parser.add_argument('pid')
args = parser.parse_args(sys.argv[1:])
process = psutil.Process(int(args.pid))
return process.cmdline()
sys.exit(wrap_scriptlet(run))
|
[
"wrap_scriptlet.wrap_scriptlet",
"argparse.ArgumentParser"
] |
[((133, 158), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (156, 158), False, 'import argparse\n'), ((317, 336), 'wrap_scriptlet.wrap_scriptlet', 'wrap_scriptlet', (['run'], {}), '(run)\n', (331, 336), False, 'from wrap_scriptlet import wrap_scriptlet\n')]
|
'''
By Zhenghang(<NAME>
############################################################################################
It's a light server based on FLASK micro framework,
1.Requirements: Python 3, Flask and relevant packages
2. How does this work?
(1) Firstly, modify the host IP address of your own environment.
(2) Then run this python file,
A temporary file called 'tempList.csv' will be initialized with default data
(e.g. for signal level RSS it would be -110, magnetic field value would be none)
with order according to the unchanged file 'APs.csv' (to store the AP info in a defined order)
Each time one complete info of AP arrival, (assume there are 60 APs that is detected once, then
the transmission would be repeated 60 times and one symbol called "Done" would be set to '1' for
last time, which means info of one scan has all been sent), the 'tempList.csv' would be refreshed
with one line of AP's info. After 60 times (AP number), the function 'refreshCSV()' would be called.
Then scan info of once would be be copied from 'tempList.csv' and be added in 'xxx.csv'(which stores
all info that is similar to database) and be refreshed in 'oneTime.csv' (for check last time's scan info).
Finally, refresh 'tempList.csv' with default value for next time's transmission.
############################################################################################
'''
# coding: utf-8
from flask import Flask, request
from app import db, models
import csv
import os #to get current path
import importlib
from model import *
#algorithm part
import pandas as pdb
import numpy as np
import tensorflow as tf
from sklearn.preprocessing import scale
import matplotlib.pyplot as plt
PYTHONIOENCODING="UTF-8" #set the utf-8 encode mode
# create the application object
app = Flask(__name__)
#edition
# Write all info in DB into a csv file, without SSID stored, encode mode is UTF-8 (as some SSID contains chinese characters)
#edition
def addAllCSV(): #whole database
with open('APs.csv', 'w', newline='') as csvfile:
if not os.path.getsize('./APs.csv'):
spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)
spamwriter.writerow([ 'BSSID','SSID','Building', 'Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level','GeoX','GeoY','GeoZ'])
users = models.User.query.all()
for u in users:
data = ([u.BSSID, u.SSID, u.Buidling, u.Floor, u.Location_x, u.Location_y, u.Frequency, u.AccX, u.AccY, u.AccZ, u.ORIx, u.ORIy, u.ORIz, u.Level, u.GeoX, u.GeoY, u.GeoZ])
spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)
spamwriter.writerow(data)
#add one time's scanner result
def addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time):
with open('userinput.csv', 'a', newline='') as csvfile:
if not os.path.getsize('./userinput.csv'):
spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)
spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY', 'AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time'])
data = ([
BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time
])
spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)
spamwriter.writerow(data)
def initializeTempList():
with open('mapping.csv', 'r', newline='') as csvfile:
reader = csv.reader(csvfile)
APs = [row[0] for row in reader]
APlength = len(APs)
lists = [[0 for col in range(19)] for row in range(APlength)]
row = 0
for AP in APs:
lists[row][0] = AP
lists[row][1] = 'none'
lists[row][2] = 'none'
lists[row][3] = 'none'
lists[row][4] = 'none'
lists[row][5] = 'none'
lists[row][6] = 'none'
lists[row][7] = 'none'
lists[row][8] = 'none'
lists[row][9] = 'none'
lists[row][10] = 'none'
lists[row][11] = 'none'
lists[row][13] = 'none'
lists[row][14] = '-110'
lists[row][15] = 'none'
lists[row][16] = 'none'
lists[row][17] = 'none'
lists[row][18] = 'none'
row += 1
with open('tempList.csv', 'w', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)
spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time'])
for i in range(0,517):
data = ([
lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4], lists[i][5], lists[i][6], lists[i][7], lists[i][8], lists[i][9], lists[i][10], lists[i][11], lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18]
])
spamwriter.writerow(data)
#Check if the input AP's BSSID is in the mapping.csv, which contains 200 APs
def checkAP(list, AP):
row = 0
for row in range(0,517):
if AP == list[row][0]:
return row
return 'none'
def tempList(BSSID,SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time):
with open('tempList.csv', 'r', newline='') as csvfile:
reader = csv.reader(csvfile)
RSS = [row for row in reader]
#print(RSS,RSS[0][0])
for row in range(1,517):
if RSS[row][0] == BSSID:
RSS[row][1] = SSID
RSS[row][2] = Building
RSS[row][3] = Floor
RSS[row][4] = Location_x
RSS[row][5] = Location_y
RSS[row][6] = Frequency
RSS[row][7] = AccX
RSS[row][8] = AccY
RSS[row][9] = AccZ
RSS[row][10] = ORIx
RSS[row][11] = ORIy
RSS[row][12] = ORIz
RSS[row][13] = Level
RSS[row][14] = GeoX
RSS[row][15] = GeoY
RSS[row][16] = GeoZ
RSS[row][17] = Model
RSS[row][18] = Time
with open('tempList.csv', 'w', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)
spamwriter.writerow(['BSSID', 'SSID', 'Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time'])
for i in range(1,517):
data = ([
RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[i][9], RSS[i][10], RSS[i][11], RSS[i][12], RSS[i][13], RSS[i][14], RSS[i][15], RSS[i][16], RSS[i][17], RSS[i][18]
])
spamwriter.writerow(data)
break
def isEmpty():
with open('xxx.csv', 'a+', newline='') as csvfile: #Check is tempList is empty
if not os.path.getsize('./xxx.csv'): #file not established
spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)
spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x','Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time'])
with open('mapping.csv', 'r', newline='') as csvfile:
reader = csv.reader(csvfile)
APs = [row[0] for row in reader]
APlength = len(APs)
lists = [[0 for col in range(19)] for row in range(APlength)]
row = 0
for AP in APs:
lists[row][0] = AP
lists[row][1] = 'none'
lists[row][2] = 'none'
lists[row][3] = 'none'
lists[row][4] = 'none'
lists[row][5] = 'none'
lists[row][6] = 'none'
lists[row][7] = 'none'
lists[row][8] = 'none'
lists[row][9] = 'none'
lists[row][10] = 'none'
lists[row][11] = 'none'
lists[row][12] = 'none'
lists[row][13] = '-110'
lists[row][14] = 'none'
lists[row][15] = 'none'
lists[row][16] = 'none'
lists[row][17] = 'none'
lists[row][18] = 'none'
row += 1
#edition2
with open('tempList.csv', 'a+', newline='') as csvfile: #Check is tempList is empty
if not os.path.getsize('./tempList.csv'): #file is empty
spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)
spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time'])
for i in range(1,517):
data = ([
lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4], lists[i][5], lists[i][6], lists[i][7], lists[i][8], lists[i][9], lists[i][10], lists[i][11], lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18]
])
print(i)
spamwriter.writerow(data)
def refreshCSV(Building, Floor, Location_x, Location_y, Model):
with open('tempList.csv', 'r', newline='') as csvfile:
reader = csv.reader(csvfile)
RSS = [row for row in reader]
with open('tempList.csv', 'w', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)
spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX', 'GeoY', 'GeoZ', 'Model', 'Time'])
for row in range(1,517):
RSS[row][2] = Building
RSS[row][3] = Floor
RSS[row][4] = Location_x
RSS[row][5] = Location_y
RSS[row][17] = Model
x = ([
RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6], RSS[row][7], RSS[row][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18]
])
spamwriter.writerow(x)
with open('xxx.csv', 'a', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)
#edition3
for row in range(1,517):
data = ([
RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6], RSS[row][7], RSS[row][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18]
])
spamwriter.writerow(data)
with open('oneTime.csv', 'a', newline='') as csvfile:
if not os.path.getsize('./oneTime.csv'): #file is empty
spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)
spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY', 'GeoZ', 'Model', 'Time'])
#edition4
for i in range(1,517):
data = ([
RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[i][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18]
])
spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)
spamwriter.writerow(data)
@app.route('/', methods=['POST'])
def post():
#isEmpty()
#edition5
isEmpty()
BSSID = request.form['BSSID']
Building = request.form['Building']
Floor = request.form['Floor']
Location_x = request.form['Location_x']
Location_y = request.form['Location_y']
Frequency = request.form['Frequency']
Level = request.form['Level']
AccX = request.form['AccX']
AccY = request.form['AccY']
GeoX = request.form['GeoX']
GeoY = request.form['GeoY']
GeoZ = request.form['GeoZ']
Model = request.form['Model']
Time = request.form['Time']
SSID = request.form['SSID']
AccX = request.form['AccX']
AccY = request.form['AccY']
AccZ = request.form['AccZ']
ORIx = request.form['ORIx']
ORIy = request.form['ORIy']
ORIz = request.form['ORIz']
Done = request.form['Done']
#addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time)
tempList(BSSID, SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time)
#refreshCSV(SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time)
#addAPs(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time)
#addCSV(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time)
#addAPs(list)
#addAllCSV()
#addAPs(Building, Room, Location_x, Location_y, SSID,BSSID, Frequency, Level)
#addCSV(Building, Room, Location_x, Location_y, BSSID, Frequency, Level)
if Done == '1':
refreshCSV(Building, Floor, Location_x, Location_y, Model)
initializeTempList()
print('1')
else:
print('0')
return 'OK.'
if __name__ == "__main__":
#Use local host IP for local server
#Or IPV4 address
#app.run(host='192.168.xxx.xxx', debug=True)
app.run(host='192.168.xxx.xxx', debug=True)
'''
#Add RSS info into database whose name is app.db
def addAPs(list):
for row in range(0,517):
u = models.User(BSSID = list[row][0], SSID = list[row][1], Building = list[row][2], Floor = list[row][3], Location_x = list[row][4], Location_y = list[row][5], Frequency = list[row][6], AccX = list[row][7], AccY = list[row][8], AccZ = list[row][9], ORIx = list[row][10], ORIy = list[row][11], ORIz = list[row][12], Level = list[row][13], GeoX=list[row][14], GeoY=list[row][15], GeoZ=list[row][16])
db.session.add(u)
db.session.commit()
#Show all RSS info from database
def showAPs(num):
ap = models.User.query.get(num)
print(ap.BSSID, ap.SSID, ap.Building, ap.Floor,ap.Location_x, ap.Location_y, ap.Frequency, ap.AccX, ap.AccY, ap.AccZ, ap.ORIx, ap.ORIy, ap.ORIz, ap.Level, ap.GeoX, ap.GeoY, ap.GeoZ)
def deleteDB():
users = models.User.query.all()
for u in users:
db.session.delete(u)
db.session.commit()
'''
|
[
"csv.reader",
"csv.writer",
"os.path.getsize",
"flask.Flask",
"app.models.User.query.all"
] |
[((1817, 1832), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (1822, 1832), False, 'from flask import Flask, request\n'), ((2366, 2389), 'app.models.User.query.all', 'models.User.query.all', ([], {}), '()\n', (2387, 2389), False, 'from app import db, models\n'), ((3430, 3488), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""', 'quoting': 'csv.QUOTE_NONE'}), "(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)\n", (3440, 3488), False, 'import csv\n'), ((3613, 3632), 'csv.reader', 'csv.reader', (['csvfile'], {}), '(csvfile)\n', (3623, 3632), False, 'import csv\n'), ((4338, 4396), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""', 'quoting': 'csv.QUOTE_NONE'}), "(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)\n", (4348, 4396), False, 'import csv\n'), ((5352, 5371), 'csv.reader', 'csv.reader', (['csvfile'], {}), '(csvfile)\n', (5362, 5371), False, 'import csv\n'), ((7108, 7127), 'csv.reader', 'csv.reader', (['csvfile'], {}), '(csvfile)\n', (7118, 7127), False, 'import csv\n'), ((8700, 8719), 'csv.reader', 'csv.reader', (['csvfile'], {}), '(csvfile)\n', (8710, 8719), False, 'import csv\n'), ((2075, 2103), 'os.path.getsize', 'os.path.getsize', (['"""./APs.csv"""'], {}), "('./APs.csv')\n", (2090, 2103), False, 'import os\n'), ((2122, 2180), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""', 'quoting': 'csv.QUOTE_NONE'}), "(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)\n", (2132, 2180), False, 'import csv\n'), ((2600, 2658), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""', 'quoting': 'csv.QUOTE_NONE'}), "(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)\n", (2610, 2658), False, 'import csv\n'), ((2950, 2984), 'os.path.getsize', 'os.path.getsize', (['"""./userinput.csv"""'], {}), "('./userinput.csv')\n", (2965, 2984), False, 'import os\n'), ((3003, 3061), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""', 'quoting': 'csv.QUOTE_NONE'}), "(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)\n", (3013, 3061), False, 'import csv\n'), ((6715, 6743), 'os.path.getsize', 'os.path.getsize', (['"""./xxx.csv"""'], {}), "('./xxx.csv')\n", (6730, 6743), False, 'import os\n'), ((6790, 6848), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""', 'quoting': 'csv.QUOTE_NONE'}), "(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)\n", (6800, 6848), False, 'import csv\n'), ((7897, 7930), 'os.path.getsize', 'os.path.getsize', (['"""./tempList.csv"""'], {}), "('./tempList.csv')\n", (7912, 7930), False, 'import os\n'), ((7969, 8027), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""', 'quoting': 'csv.QUOTE_NONE'}), "(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)\n", (7979, 8027), False, 'import csv\n'), ((8829, 8887), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""', 'quoting': 'csv.QUOTE_NONE'}), "(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)\n", (8839, 8887), False, 'import csv\n'), ((9636, 9694), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""', 'quoting': 'csv.QUOTE_NONE'}), "(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)\n", (9646, 9694), False, 'import csv\n'), ((10126, 10158), 'os.path.getsize', 'os.path.getsize', (['"""./oneTime.csv"""'], {}), "('./oneTime.csv')\n", (10141, 10158), False, 'import os\n'), ((10199, 10257), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""', 'quoting': 'csv.QUOTE_NONE'}), "(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)\n", (10209, 10257), False, 'import csv\n'), ((10768, 10826), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""', 'quoting': 'csv.QUOTE_NONE'}), "(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)\n", (10778, 10826), False, 'import csv\n'), ((6018, 6076), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""', 'quoting': 'csv.QUOTE_NONE'}), "(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)\n", (6028, 6076), False, 'import csv\n')]
|
from rx.core import Observable
from rx.internal import extensionmethod
@extensionmethod(Observable)
def do_while(self, condition):
"""Repeats source as long as condition holds emulating a do while loop.
Keyword arguments:
condition -- {Function} The condition which determines if the source
will be repeated.
Returns an observable {Observable} sequence which is repeated as long
as the condition holds.
"""
return Observable.concat([self, Observable.while_do(condition, self)])
|
[
"rx.internal.extensionmethod",
"rx.core.Observable.while_do"
] |
[((74, 101), 'rx.internal.extensionmethod', 'extensionmethod', (['Observable'], {}), '(Observable)\n', (89, 101), False, 'from rx.internal import extensionmethod\n'), ((480, 516), 'rx.core.Observable.while_do', 'Observable.while_do', (['condition', 'self'], {}), '(condition, self)\n', (499, 516), False, 'from rx.core import Observable\n')]
|
import os
import networkx as nx
import scipy.sparse as sp
def search_cycle(dir_adj):
dir_adj = nx.from_scipy_sparse_matrix(A=dir_adj, create_using=nx.DiGraph)
cycles = list(nx.algorithms.cycles.simple_cycles(dir_adj))
num_cycle = len(cycles)
q = []
for i in range(num_cycle):
q.append(len(cycles[i]))
q = set(q)
return q
dataset_path = './data'
dataset_name = 'cornell'
dataset_path = os.path.join(dataset_path, dataset_name)
dir_adj = sp.load_npz(os.path.join(dataset_path, 'adj.npz'))
dir_adj = dir_adj.tocsc()
q = search_cycle(dir_adj)
print(q)
|
[
"networkx.from_scipy_sparse_matrix",
"networkx.algorithms.cycles.simple_cycles",
"os.path.join"
] |
[((424, 464), 'os.path.join', 'os.path.join', (['dataset_path', 'dataset_name'], {}), '(dataset_path, dataset_name)\n', (436, 464), False, 'import os\n'), ((100, 163), 'networkx.from_scipy_sparse_matrix', 'nx.from_scipy_sparse_matrix', ([], {'A': 'dir_adj', 'create_using': 'nx.DiGraph'}), '(A=dir_adj, create_using=nx.DiGraph)\n', (127, 163), True, 'import networkx as nx\n'), ((487, 524), 'os.path.join', 'os.path.join', (['dataset_path', '"""adj.npz"""'], {}), "(dataset_path, 'adj.npz')\n", (499, 524), False, 'import os\n'), ((182, 225), 'networkx.algorithms.cycles.simple_cycles', 'nx.algorithms.cycles.simple_cycles', (['dir_adj'], {}), '(dir_adj)\n', (216, 225), True, 'import networkx as nx\n')]
|
from asitiger.status import (
AxisEnabledStatus,
AxisStatus,
JoystickStatus,
LimitStatus,
MotorStatus,
RampingDirection,
RampingStatus,
Status,
status_from_decimal,
statuses_for_rdstat,
)
RDSTAT_RESPONSE = ":A 10N 138"
def test_status_from_decimal_types():
axis = status_from_decimal(210)
assert isinstance(axis.status, Status)
assert isinstance(axis.enabled, AxisEnabledStatus)
assert isinstance(axis.motor, MotorStatus)
assert isinstance(axis.joystick, JoystickStatus)
assert isinstance(axis.ramping, RampingStatus)
assert isinstance(axis.ramping_direction, RampingDirection)
assert isinstance(axis.upper_limit, LimitStatus)
assert isinstance(axis.lower_limit, LimitStatus)
def test_status_from_decimal_values():
axis = status_from_decimal(210)
assert axis.status == Status.IDLE
assert axis.enabled == AxisEnabledStatus.ENABLED
assert axis.motor == MotorStatus.INACTIVE
assert axis.joystick == JoystickStatus.DISABLED
assert axis.ramping == RampingStatus.RAMPING
assert axis.ramping_direction == RampingDirection.DOWN
assert axis.upper_limit == LimitStatus.CLOSED
assert axis.lower_limit == LimitStatus.CLOSED
def test_statuses_for_rdstat_split():
axes = statuses_for_rdstat(RDSTAT_RESPONSE)
assert len(axes) == 3
def test_statuses_for_rdstat_types():
axes = statuses_for_rdstat(RDSTAT_RESPONSE)
assert isinstance(axes[0], AxisStatus)
assert isinstance(axes[1], Status)
assert isinstance(axes[2], AxisStatus)
def test_from_flag_str():
assert Status.from_flag("N") == Status.IDLE
assert Status.from_flag("B") == Status.BUSY
|
[
"asitiger.status.Status.from_flag",
"asitiger.status.status_from_decimal",
"asitiger.status.statuses_for_rdstat"
] |
[((312, 336), 'asitiger.status.status_from_decimal', 'status_from_decimal', (['(210)'], {}), '(210)\n', (331, 336), False, 'from asitiger.status import AxisEnabledStatus, AxisStatus, JoystickStatus, LimitStatus, MotorStatus, RampingDirection, RampingStatus, Status, status_from_decimal, statuses_for_rdstat\n'), ((809, 833), 'asitiger.status.status_from_decimal', 'status_from_decimal', (['(210)'], {}), '(210)\n', (828, 833), False, 'from asitiger.status import AxisEnabledStatus, AxisStatus, JoystickStatus, LimitStatus, MotorStatus, RampingDirection, RampingStatus, Status, status_from_decimal, statuses_for_rdstat\n'), ((1283, 1319), 'asitiger.status.statuses_for_rdstat', 'statuses_for_rdstat', (['RDSTAT_RESPONSE'], {}), '(RDSTAT_RESPONSE)\n', (1302, 1319), False, 'from asitiger.status import AxisEnabledStatus, AxisStatus, JoystickStatus, LimitStatus, MotorStatus, RampingDirection, RampingStatus, Status, status_from_decimal, statuses_for_rdstat\n'), ((1398, 1434), 'asitiger.status.statuses_for_rdstat', 'statuses_for_rdstat', (['RDSTAT_RESPONSE'], {}), '(RDSTAT_RESPONSE)\n', (1417, 1434), False, 'from asitiger.status import AxisEnabledStatus, AxisStatus, JoystickStatus, LimitStatus, MotorStatus, RampingDirection, RampingStatus, Status, status_from_decimal, statuses_for_rdstat\n'), ((1600, 1621), 'asitiger.status.Status.from_flag', 'Status.from_flag', (['"""N"""'], {}), "('N')\n", (1616, 1621), False, 'from asitiger.status import AxisEnabledStatus, AxisStatus, JoystickStatus, LimitStatus, MotorStatus, RampingDirection, RampingStatus, Status, status_from_decimal, statuses_for_rdstat\n'), ((1648, 1669), 'asitiger.status.Status.from_flag', 'Status.from_flag', (['"""B"""'], {}), "('B')\n", (1664, 1669), False, 'from asitiger.status import AxisEnabledStatus, AxisStatus, JoystickStatus, LimitStatus, MotorStatus, RampingDirection, RampingStatus, Status, status_from_decimal, statuses_for_rdstat\n')]
|
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Film, ExtraInfo, Recenzja, Aktor
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ['id', 'username', 'email','password']
extra_kwargs = {'password': {'required': True, 'write_only': True}}
def create(self, validated_data):
user = User.objects.create_user(**validated_data)
return user
class ExtraInfoSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ExtraInfo
fields = ['czas_trwania','rodzaj']
class RecenzjaSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Recenzja
fields = ('id','opis','gwiazdki','film')
def create(self, instance, validated_data):
instance.opis = validated_data.get('opis', instance.opis)
instance.gwiazdki = validated_data.get('gwiazki', instance.gwiazdki)
instance.save()
return instance
class FilmSerializer(serializers.HyperlinkedModelSerializer):
extra_info = ExtraInfoSerializer(many=False)
recenzje = RecenzjaSerializer(many=True)
class Meta:
model = Film
fields = ['id','tytul', 'opis', 'po_premierze', 'premiera','rok','imdb_rating','extra_info','recenzje']
read_only_fields = ('extra_info','recenzje')
class AktorSerializer(serializers.HyperlinkedModelSerializer):
filmy = FilmSerializer(many=True, read_only=True)
class Meta:
model = Aktor
fields = ['id','imie','nazwisko','filmy']
# def create(self, validated_data):
# filmy = validated_data['filmy']
# del validated_data['filmy']
# aktor = Aktor.objects.create(**validated_data)
# for film in filmy:
# f = Film.objects.create(**film)
# aktor.filmy.add(f)
# aktor.save()
# return aktor
|
[
"django.contrib.auth.models.User.objects.create_user"
] |
[((423, 465), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {}), '(**validated_data)\n', (447, 465), False, 'from django.contrib.auth.models import User\n')]
|
"""
Module containing the definitions and methods to compute
a variety of indices used to study ENSO
"""
from typing import List, Optional, Tuple
import numpy as np
import xarray as xr
from eofs.xarray import Eof
from .core import compute_anomaly, compute_climatology, xconvolve
class ECindex:
"""
Computes the E and C index according to Takahashi
"""
def __init__(
self,
sst_data: xr.DataArray,
isanomaly: bool = False,
climatology: Optional[xr.DataArray] = None,
base_period: Tuple[str, str] = ("1979-01-01", "2009-12-30"),
corr_factor: Optional[List[int]] = None,
smooth_kernel: List[int] = [1, 2, 1],
):
self.sst_data = sst_data
self.base_period = base_period
if climatology is None:
climatology = compute_climatology(self.sst_data, base_period)
self.climatology = climatology
if not isanomaly:
self.sst_data = compute_anomaly(self.sst_data, self.climatology)
self._compute_pcs()
self.smooth_kernel = smooth_kernel
if corr_factor is None:
self._auto_corr_factor()
else:
self.corr_factor = corr_factor
def _compute_pcs(self) -> None:
"""
Compute the principal components
"""
_subset = self.sst_data.sortby("lat").sel(lat=slice(-10, 10))
coslat = np.cos(np.deg2rad(_subset.lat.data))
wgts = np.sqrt(coslat)[..., np.newaxis]
self.solver = Eof(_subset.sel(time=slice(*self.base_period)), weights=wgts)
clim_std = self.solver.eigenvalues(neigs=2) ** (1 / 2)
self.anom_pcs = (
self.solver.projectField(
_subset.drop("month"),
neofs=2,
)
/ clim_std
)
self.anom_smooth_pcs = None
def _corrected_pcs(self) -> xr.DataArray:
"""
Return the pcs with the correction factor applied
"""
return self.anom_pcs * self.corr_factor
def _auto_corr_factor(self) -> None:
"""
Automatically determine the correction factor by estimating
the sign of known events for the E and C index.
"""
_eofs = self.solver.eofs(neofs=2)
_subset = dict(lat=slice(-2, 2), lon=slice(210, 250))
new_corr_factor = np.zeros(2)
new_corr_factor[0] = 1 if _eofs.sel(mode=0, **_subset).mean() > 0 else -1
new_corr_factor[1] = 1 if _eofs.sel(mode=1, **_subset).mean() < 0 else -1
self.corr_factor = new_corr_factor
def _compute_index(self, smooth: bool = False) -> xr.Dataset:
"""
Compute the E and C index
"""
_pcs = self._corrected_pcs()
if smooth is True:
_pcs = xconvolve(_pcs, self._smooth_kernel, dim="time")
pc1 = _pcs.sel(mode=0)
pc2 = _pcs.sel(mode=1)
eindex = (pc1 - pc2) / (2 ** (1 / 2))
eindex.name = "E_index"
cindex = (pc1 + pc2) / (2 ** (1 / 2))
cindex.name = "C_index"
return xr.merge([eindex, cindex])
@property
def corr_factor(self) -> xr.DataArray:
"""
Return the correction factor applied to the first two pcs
"""
return self._corr_factor
@corr_factor.setter
def corr_factor(self, corr_factor: List[int]) -> None:
"""
Set a new correction factor to be applied to the first two pcs
"""
self._corr_factor = xr.DataArray(
np.array(corr_factor),
coords=[("mode", [0, 1])],
)
@property
def smooth_kernel(self) -> xr.DataArray:
"""
Return the smooth kernel used in the first two pcs
"""
return self._smooth_kernel
@smooth_kernel.setter
def smooth_kernel(self, smooth_kernel: List) -> None:
"""
Set a new smooth kernel to be applied to the first two pcs
"""
kernel = np.array(smooth_kernel)
self._smooth_kernel = xr.DataArray(kernel / kernel.sum(), dims=["time"])
@property
def pcs(self) -> xr.DataArray:
"""
Return the first two principal components used
in the computation of the E and C index
"""
return self._corrected_pcs()
@property
def pcs_smooth(self) -> xr.DataArray:
"""
Return the first two principal components smoothed
with the specified smooth_kernel
"""
if self.anom_smooth_pcs is None:
self.anom_smooth_pcs = xconvolve(
self._corrected_pcs(),
self._smooth_kernel,
dim="time",
)
return self.anom_smooth_pcs
@property
def ecindex(self) -> xr.Dataset:
"""
Return the first two principal components rotated,
also known as the E and C index
"""
return self._compute_index()
@property
def ecindex_smooth(self) -> xr.Dataset:
"""
Return the first two principal components smoothed and
rotated, also known as the E and C index
"""
return self._compute_index(smooth=True)
def enzones(data: xr.DataArray, zone: str = "34") -> xr.DataArray:
"""
Computes the mean from the selected El Niño zone, also
know as El Niño Index for each of the zones.
"""
zones = {
"12": {"lat": slice(-10, 0), "lon": slice(270, 280)},
"3": {"lat": slice(-5, 5), "lon": slice(210, 270)},
"34": {"lat": slice(-5, 5), "lon": slice(190, 240)},
"4": {"lat": slice(-5, 5), "lon": slice(160, 210)},
}
return data.sel(**zones[zone]).mean(dim=["lat", "lon"])
|
[
"numpy.deg2rad",
"numpy.zeros",
"xarray.merge",
"numpy.array",
"numpy.sqrt"
] |
[((2338, 2349), 'numpy.zeros', 'np.zeros', (['(2)'], {}), '(2)\n', (2346, 2349), True, 'import numpy as np\n'), ((3047, 3073), 'xarray.merge', 'xr.merge', (['[eindex, cindex]'], {}), '([eindex, cindex])\n', (3055, 3073), True, 'import xarray as xr\n'), ((3931, 3954), 'numpy.array', 'np.array', (['smooth_kernel'], {}), '(smooth_kernel)\n', (3939, 3954), True, 'import numpy as np\n'), ((1404, 1432), 'numpy.deg2rad', 'np.deg2rad', (['_subset.lat.data'], {}), '(_subset.lat.data)\n', (1414, 1432), True, 'import numpy as np\n'), ((1449, 1464), 'numpy.sqrt', 'np.sqrt', (['coslat'], {}), '(coslat)\n', (1456, 1464), True, 'import numpy as np\n'), ((3488, 3509), 'numpy.array', 'np.array', (['corr_factor'], {}), '(corr_factor)\n', (3496, 3509), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
"""
Copyright (c) 2020, University of Southampton
All rights reserved.
Licensed under the BSD 3-Clause License.
See LICENSE.md file in the project root for full license information.
"""
import pynmea2
from auv_nav.sensors import Category, Usbl
from auv_nav.tools.time_conversions import date_time_to_epoch, read_timezone
from oplab import get_file_list, get_raw_folder
def parse_NOC_nmea(mission, vehicle, category, ftype, outpath):
# parser meta data
sensor_string = "autosub"
category = category
output_format = ftype
if category == Category.USBL:
filepath = mission.usbl.filepath
timezone = mission.usbl.timezone
beacon_id = mission.usbl.label
timeoffset = mission.usbl.timeoffset
timezone_offset = read_timezone(timezone)
latitude_reference = mission.origin.latitude
longitude_reference = mission.origin.longitude
usbl = Usbl(
mission.usbl.std_factor,
mission.usbl.std_offset,
latitude_reference,
longitude_reference,
)
usbl.sensor_string = sensor_string
path = get_raw_folder(outpath / ".." / filepath)
file_list = get_file_list(path)
data_list = []
for file in file_list:
with file.open("r", errors="ignore") as nmea_file:
for line in nmea_file.readlines():
parts = line.split("\t")
if len(parts) < 2:
continue
msg = pynmea2.parse(parts[1])
if int(msg.ref_station_id) != beacon_id:
continue
date_str = line.split(" ")[0]
hour_str = str(parts[1]).split(",")[1]
yyyy = int(date_str[6:10])
mm = int(date_str[3:5])
dd = int(date_str[0:2])
hour = int(hour_str[0:2])
mins = int(hour_str[2:4])
secs = int(hour_str[4:6])
msec = int(hour_str[7:10])
epoch_time = date_time_to_epoch(
yyyy, mm, dd, hour, mins, secs, timezone_offset
)
epoch_timestamp = epoch_time + msec / 1000 + timeoffset
msg.timestamp = epoch_timestamp
usbl.from_nmea(msg)
data = usbl.export(output_format)
data_list.append(data)
return data_list
|
[
"oplab.get_raw_folder",
"pynmea2.parse",
"auv_nav.tools.time_conversions.date_time_to_epoch",
"auv_nav.tools.time_conversions.read_timezone",
"oplab.get_file_list",
"auv_nav.sensors.Usbl"
] |
[((791, 814), 'auv_nav.tools.time_conversions.read_timezone', 'read_timezone', (['timezone'], {}), '(timezone)\n', (804, 814), False, 'from auv_nav.tools.time_conversions import date_time_to_epoch, read_timezone\n'), ((939, 1038), 'auv_nav.sensors.Usbl', 'Usbl', (['mission.usbl.std_factor', 'mission.usbl.std_offset', 'latitude_reference', 'longitude_reference'], {}), '(mission.usbl.std_factor, mission.usbl.std_offset, latitude_reference,\n longitude_reference)\n', (943, 1038), False, 'from auv_nav.sensors import Category, Usbl\n'), ((1153, 1194), 'oplab.get_raw_folder', 'get_raw_folder', (["(outpath / '..' / filepath)"], {}), "(outpath / '..' / filepath)\n", (1167, 1194), False, 'from oplab import get_file_list, get_raw_folder\n'), ((1216, 1235), 'oplab.get_file_list', 'get_file_list', (['path'], {}), '(path)\n', (1229, 1235), False, 'from oplab import get_file_list, get_raw_folder\n'), ((1548, 1571), 'pynmea2.parse', 'pynmea2.parse', (['parts[1]'], {}), '(parts[1])\n', (1561, 1571), False, 'import pynmea2\n'), ((2131, 2198), 'auv_nav.tools.time_conversions.date_time_to_epoch', 'date_time_to_epoch', (['yyyy', 'mm', 'dd', 'hour', 'mins', 'secs', 'timezone_offset'], {}), '(yyyy, mm, dd, hour, mins, secs, timezone_offset)\n', (2149, 2198), False, 'from auv_nav.tools.time_conversions import date_time_to_epoch, read_timezone\n')]
|
import unittest
from hupun.page.hupun_goods.goods_information import GoodsInformation
from hupun.page.hupun_goods.goods_information_sku import GoodsInformationsku
from hupun.page.in_sale_store_table.export_file_download_req import ExportFileDownloadReq
from hupun.page.in_sale_store_table.export_task_query import ExportTaskQuery
from hupun.page.in_sale_store_table.table_export import StatementExport
from hupun.page.order import Order
from hupun.page.order_goods import OrderGoods
from hupun.page.purchase_order import PurchaseOrder
from hupun.page.purchase_order_goods import PurchaseOrderGoods
from hupun.page.purchase_store_order import PurchaseStoreOrder
from hupun.page.purchase_store_order_goods import PurchaseStoreOrderGoods
from hupun_slow_crawl.model.es.store_house import StoreHouse
from hupun.page.sync_module.choose_purchase_bill import ChoosePurBill
from hupun.page.sync_module.choose_purchase_bill_sku import ChoosePurBillSku
from hupun.page.sync_module.confirm_purchase_stock import ConfirmPurBillStock
from hupun.page.sync_module.get_purchase_stock_token import PurchaseStockToken
from hupun.page.sync_module.submit_purchase_stock import SubmitPurBillStock
from pyspider.helper.date import Date
class Test(unittest.TestCase):
def _test_order(self):
"""
订单 的测试部分
:return:
"""
Order(True) \
.set_start_time(Date.now().plus_days(-1).to_day_start().format()) \
.set_end_time(Date.now().plus_days(-1).to_day_end().format()).test()
Order(True) \
.set_start_time(Date.now().plus_days(-120).to_day_start().format()) \
.set_end_time(Date.now().plus_days(-120).to_day_end().format()).test()
def _test_order_goods(self):
"""
订单商品详情 的测试部分
:return:
"""
assert OrderGoods('A4380F4D6D153825AB891D632C341A45', 'D1E338D6015630E3AFF2440F3CBBAFAD',
'TB328906912208400576', '2019-01-17T02:49:20Z').test()
def _test_purchase_order(self):
"""
采购订单 的测试部分
:return:
"""
assert PurchaseOrder(True).set_start_time(Date.now().plus_days(-1).format()).test()
def _test_purchase_order_goods(self):
"""
采购订单查看详情 的测试部分
:return:
"""
assert PurchaseOrderGoods('189C28D94B3D390191F1DD1723F9544E').test()
def _test_purchase_store_order(self):
"""
采购入库单 的测试部分
:return:
"""
assert PurchaseStoreOrder(True).set_start_time(Date.now().to_day_start().format()).test()
def _test_purchase_store_order_goods(self):
"""
采购入库单查看详情数据 的测试部分
:return:
"""
assert PurchaseStoreOrderGoods('35414A5328FD3F66B3279E1ACC1E5E47').test()
def _test_statement_export(self):
"""
进销存表报导出 的单元测试
:return:
"""
storage_ids = StoreHouse().get_storage_ids()
storage_uids = ','.join(storage_ids) + ','
StatementExport(storage_uids).set_start_time(Date.now().plus_days(-1).format()).set_end_time(
Date.now().plus_days(-1).format()).test()
def _test_statement_task_query(self):
"""
进销存报表导出记录查询 的单元测试
:return:
"""
compare_date = Date.now()
ExportTaskQuery(compare_date, 1462).set_start_time(Date.now().plus_days(-7).format()).set_end_time(
Date.now().format()).set_delay_seconds(1).test()
def _test_statement_file_download(self):
"""
进销存报表下载 的单元测试
:return:
"""
data = {
"task_id": 3686347,
"oper_uid": "9459514BF68F3C0A84343938A2CD7D75",
"status": 2,
"export_type": 7,
"exportCaption": "进销存报表",
"create_time": "2019-06-10T19:12:24Z",
"download_time": "2019-06-11T12:02:50Z",
"count": 1462,
"download_times": 4,
"oper_nick": None,
"file_path": "export/excel/D1E338D6015630E3AFF2440F3CBBAFAD/进销存报表20190610191250_0(3686347).xlsx",
'$dataType': 'dtExportTask',
'$entityId': '0',
}
ExportFileDownloadReq(data).test()
def _test_choose_purchase_bill(self):
"""
采购入库单 的选择采购订单部分的采购订单详情 的单元测试
:return:
"""
bill_code = 'CD201905300017'
storage_uid = 'FBA807A72474376E8CFBBE9848F271B2'
storage_name = '研发测试仓'
supplier_uid = 'EDF923722E993179829C929468693160'
supplier_name = '测试777777'
ChoosePurBill(bill_code, storage_uid, storage_name, supplier_uid, supplier_name) \
.set_start_time(Date.now().plus_days(-60).format()) \
.set_end_time(Date.now().format()) \
.test()
def _test_choose_purchase_bill_sku(self):
"""
采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试
:return:
"""
bill_uid = '4E914B16058C3D02A42CE6479666A913'
ChoosePurBillSku(bill_uid).test()
def _test_submit_purchase_stock(self):
"""
采购入库单 的提交入库变动的 的单元测试
:return:
"""
data = [
{
"goodsUid": "4AFB3148514C3FA99F332B05AAEC0A92",
"goodsName": "测试--想念",
"specUid": "1000577C001E3D14A8041BC5FD4CCDCE",
"pic1": "http://test.image.yourdream.cc/ai-admin/ffa0d4ab8f89e8a6f79b0239f906a6b7.png",
"specCode": "1919N00002W404",
"specName": None,
"unit_size": 1,
"pchs_unit": None,
"unit": None,
"shouldNums": 87,
"nums": 1,
"discount_rate": 100,
"price": 188,
"pivtLast": 188,
"primePrice": 188,
"base_price": 188,
"tax_rate": 0,
"pchs_bill_uid": "483FAB78DF98341C8A7E0F16577E4F21",
"pchs_bill_code": "CD201905300017",
"appointBillType": 0,
"pchs_detail_uid": "9DC3D695B16A3160BAEDD6E249B01C25",
"pchs_detail_index": "10000",
"remark": None,
"openSN": 0,
"expiration": None,
"total_money": 188,
"pay_type": None,
"pchs_advance_balance": 18128,
"stock_advance_balance": None,
"settle_advance_balance": None,
"tax": 0,
"net_price": 188,
"sn": None,
"$dataType": "v:purchase.stock$dtStockBillDetail"
},
{
"$dataType": "v:purchase.stock$dtStockBillDetail"
}
]
SubmitPurBillStock(data).test()
def _test_confirm_purchase_bill_sku(self):
"""
采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试
:return:
"""
token = PurchaseStockToken().get_result()
ConfirmPurBillStock(token).test()
def _test_get_purchase_stock_token(self):
"""
采购入库单 的选择采购订单部分的采购订单 获取token 的单元测试
:return:
"""
PurchaseStockToken().test()
def _test_get_goods_information(self):
"""
商品信息 的单元测试
:return:
"""
GoodsInformation().test()
def test_get_goods_information_sku(self):
"""
商品信息sku 的单元测试
:return:
"""
goods_uid = 'C59933D09A893FDBB2FE8BB9BDD5E726'
GoodsInformationsku(goods_uid).test()
if __name__ == '__main__':
unittest.main()
|
[
"hupun.page.order_goods.OrderGoods",
"hupun.page.purchase_order.PurchaseOrder",
"hupun.page.sync_module.choose_purchase_bill_sku.ChoosePurBillSku",
"pyspider.helper.date.Date.now",
"hupun_slow_crawl.model.es.store_house.StoreHouse",
"hupun.page.sync_module.confirm_purchase_stock.ConfirmPurBillStock",
"hupun.page.purchase_store_order_goods.PurchaseStoreOrderGoods",
"unittest.main",
"hupun.page.hupun_goods.goods_information.GoodsInformation",
"hupun.page.purchase_order_goods.PurchaseOrderGoods",
"hupun.page.sync_module.get_purchase_stock_token.PurchaseStockToken",
"hupun.page.sync_module.choose_purchase_bill.ChoosePurBill",
"hupun.page.sync_module.submit_purchase_stock.SubmitPurBillStock",
"hupun.page.purchase_store_order.PurchaseStoreOrder",
"hupun.page.in_sale_store_table.table_export.StatementExport",
"hupun.page.in_sale_store_table.export_file_download_req.ExportFileDownloadReq",
"hupun.page.hupun_goods.goods_information_sku.GoodsInformationsku",
"hupun.page.in_sale_store_table.export_task_query.ExportTaskQuery",
"hupun.page.order.Order"
] |
[((7478, 7493), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7491, 7493), False, 'import unittest\n'), ((3246, 3256), 'pyspider.helper.date.Date.now', 'Date.now', ([], {}), '()\n', (3254, 3256), False, 'from pyspider.helper.date import Date\n'), ((1814, 1952), 'hupun.page.order_goods.OrderGoods', 'OrderGoods', (['"""A4380F4D6D153825AB891D632C341A45"""', '"""D1E338D6015630E3AFF2440F3CBBAFAD"""', '"""TB328906912208400576"""', '"""2019-01-17T02:49:20Z"""'], {}), "('A4380F4D6D153825AB891D632C341A45',\n 'D1E338D6015630E3AFF2440F3CBBAFAD', 'TB328906912208400576',\n '2019-01-17T02:49:20Z')\n", (1824, 1952), False, 'from hupun.page.order_goods import OrderGoods\n'), ((2289, 2343), 'hupun.page.purchase_order_goods.PurchaseOrderGoods', 'PurchaseOrderGoods', (['"""189C28D94B3D390191F1DD1723F9544E"""'], {}), "('189C28D94B3D390191F1DD1723F9544E')\n", (2307, 2343), False, 'from hupun.page.purchase_order_goods import PurchaseOrderGoods\n'), ((2684, 2743), 'hupun.page.purchase_store_order_goods.PurchaseStoreOrderGoods', 'PurchaseStoreOrderGoods', (['"""35414A5328FD3F66B3279E1ACC1E5E47"""'], {}), "('35414A5328FD3F66B3279E1ACC1E5E47')\n", (2707, 2743), False, 'from hupun.page.purchase_store_order_goods import PurchaseStoreOrderGoods\n'), ((2875, 2887), 'hupun_slow_crawl.model.es.store_house.StoreHouse', 'StoreHouse', ([], {}), '()\n', (2885, 2887), False, 'from hupun_slow_crawl.model.es.store_house import StoreHouse\n'), ((4131, 4158), 'hupun.page.in_sale_store_table.export_file_download_req.ExportFileDownloadReq', 'ExportFileDownloadReq', (['data'], {}), '(data)\n', (4152, 4158), False, 'from hupun.page.in_sale_store_table.export_file_download_req import ExportFileDownloadReq\n'), ((4921, 4947), 'hupun.page.sync_module.choose_purchase_bill_sku.ChoosePurBillSku', 'ChoosePurBillSku', (['bill_uid'], {}), '(bill_uid)\n', (4937, 4947), False, 'from hupun.page.sync_module.choose_purchase_bill_sku import ChoosePurBillSku\n'), ((6676, 6700), 'hupun.page.sync_module.submit_purchase_stock.SubmitPurBillStock', 'SubmitPurBillStock', (['data'], {}), '(data)\n', (6694, 6700), False, 'from hupun.page.sync_module.submit_purchase_stock import SubmitPurBillStock\n'), ((6853, 6873), 'hupun.page.sync_module.get_purchase_stock_token.PurchaseStockToken', 'PurchaseStockToken', ([], {}), '()\n', (6871, 6873), False, 'from hupun.page.sync_module.get_purchase_stock_token import PurchaseStockToken\n'), ((6895, 6921), 'hupun.page.sync_module.confirm_purchase_stock.ConfirmPurBillStock', 'ConfirmPurBillStock', (['token'], {}), '(token)\n', (6914, 6921), False, 'from hupun.page.sync_module.confirm_purchase_stock import ConfirmPurBillStock\n'), ((7068, 7088), 'hupun.page.sync_module.get_purchase_stock_token.PurchaseStockToken', 'PurchaseStockToken', ([], {}), '()\n', (7086, 7088), False, 'from hupun.page.sync_module.get_purchase_stock_token import PurchaseStockToken\n'), ((7208, 7226), 'hupun.page.hupun_goods.goods_information.GoodsInformation', 'GoodsInformation', ([], {}), '()\n', (7224, 7226), False, 'from hupun.page.hupun_goods.goods_information import GoodsInformation\n'), ((7407, 7437), 'hupun.page.hupun_goods.goods_information_sku.GoodsInformationsku', 'GoodsInformationsku', (['goods_uid'], {}), '(goods_uid)\n', (7426, 7437), False, 'from hupun.page.hupun_goods.goods_information_sku import GoodsInformationsku\n'), ((2090, 2109), 'hupun.page.purchase_order.PurchaseOrder', 'PurchaseOrder', (['(True)'], {}), '(True)\n', (2103, 2109), False, 'from hupun.page.purchase_order import PurchaseOrder\n'), ((2470, 2494), 'hupun.page.purchase_store_order.PurchaseStoreOrder', 'PurchaseStoreOrder', (['(True)'], {}), '(True)\n', (2488, 2494), False, 'from hupun.page.purchase_store_order import PurchaseStoreOrder\n'), ((4688, 4698), 'pyspider.helper.date.Date.now', 'Date.now', ([], {}), '()\n', (4696, 4698), False, 'from pyspider.helper.date import Date\n'), ((1341, 1352), 'hupun.page.order.Order', 'Order', (['(True)'], {}), '(True)\n', (1346, 1352), False, 'from hupun.page.order import Order\n'), ((1524, 1535), 'hupun.page.order.Order', 'Order', (['(True)'], {}), '(True)\n', (1529, 1535), False, 'from hupun.page.order import Order\n'), ((2965, 2994), 'hupun.page.in_sale_store_table.table_export.StatementExport', 'StatementExport', (['storage_uids'], {}), '(storage_uids)\n', (2980, 2994), False, 'from hupun.page.in_sale_store_table.table_export import StatementExport\n'), ((4513, 4598), 'hupun.page.sync_module.choose_purchase_bill.ChoosePurBill', 'ChoosePurBill', (['bill_code', 'storage_uid', 'storage_name', 'supplier_uid', 'supplier_name'], {}), '(bill_code, storage_uid, storage_name, supplier_uid, supplier_name\n )\n', (4526, 4598), False, 'from hupun.page.sync_module.choose_purchase_bill import ChoosePurBill\n'), ((2125, 2135), 'pyspider.helper.date.Date.now', 'Date.now', ([], {}), '()\n', (2133, 2135), False, 'from pyspider.helper.date import Date\n'), ((2510, 2520), 'pyspider.helper.date.Date.now', 'Date.now', ([], {}), '()\n', (2518, 2520), False, 'from pyspider.helper.date import Date\n'), ((3071, 3081), 'pyspider.helper.date.Date.now', 'Date.now', ([], {}), '()\n', (3079, 3081), False, 'from pyspider.helper.date import Date\n'), ((3377, 3387), 'pyspider.helper.date.Date.now', 'Date.now', ([], {}), '()\n', (3385, 3387), False, 'from pyspider.helper.date import Date\n'), ((3265, 3300), 'hupun.page.in_sale_store_table.export_task_query.ExportTaskQuery', 'ExportTaskQuery', (['compare_date', '(1462)'], {}), '(compare_date, 1462)\n', (3280, 3300), False, 'from hupun.page.in_sale_store_table.export_task_query import ExportTaskQuery\n'), ((1461, 1471), 'pyspider.helper.date.Date.now', 'Date.now', ([], {}), '()\n', (1469, 1471), False, 'from pyspider.helper.date import Date\n'), ((1646, 1656), 'pyspider.helper.date.Date.now', 'Date.now', ([], {}), '()\n', (1654, 1656), False, 'from pyspider.helper.date import Date\n'), ((3010, 3020), 'pyspider.helper.date.Date.now', 'Date.now', ([], {}), '()\n', (3018, 3020), False, 'from pyspider.helper.date import Date\n'), ((4624, 4634), 'pyspider.helper.date.Date.now', 'Date.now', ([], {}), '()\n', (4632, 4634), False, 'from pyspider.helper.date import Date\n'), ((1383, 1393), 'pyspider.helper.date.Date.now', 'Date.now', ([], {}), '()\n', (1391, 1393), False, 'from pyspider.helper.date import Date\n'), ((1566, 1576), 'pyspider.helper.date.Date.now', 'Date.now', ([], {}), '()\n', (1574, 1576), False, 'from pyspider.helper.date import Date\n'), ((3316, 3326), 'pyspider.helper.date.Date.now', 'Date.now', ([], {}), '()\n', (3324, 3326), False, 'from pyspider.helper.date import Date\n')]
|
# Generated by Django 3.0.8 on 2020-07-29 00:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('log_api', '0008_auto_20200728_2126'),
]
operations = [
migrations.AlterField(
model_name='execution',
name='archived',
field=models.BooleanField(default=False, verbose_name='Archived'),
),
]
|
[
"django.db.models.BooleanField"
] |
[((341, 400), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Archived"""'}), "(default=False, verbose_name='Archived')\n", (360, 400), False, 'from django.db import migrations, models\n')]
|
import asyncio, random
import nacre
class EightBallSession:
answers = [
"It is certain",
"It is decidedly so",
"Without a doubt",
"Yes definitely",
"You may rely on it",
"As I see it, yes",
"Most likely",
"Outlook good",
"Yes",
"Signs point to yes",
"Reply hazy try again",
"Ask again later",
"Better not tell you now",
"Cannot predict now",
"Concentrate and ask again",
"Don't count on it",
"My reply is no",
"My sources say no",
"Outlook not so good",
"Very doubtful"
]
def __init__(self, pearl, config):
self.pearl = pearl
self.hangouts = self.pearl.hangouts
self.config = config
self.buildHandle()
def build(self):
pass
def buildHandle(self):
messageFilter = nacre.handle.newMessageFilter('^{}\s+8ball(\s.*)?$'.format(self.pearl.config['format']))
async def handle(update):
if nacre.handle.isMessageEvent(update):
event = update.event_notification.event
if messageFilter(event):
await self.respond(event)
self.pearl.updateEvent.addListener(handle)
async def respond(self, event):
message = random.choice(self.answers)
conversation = self.hangouts.getConversation(event=event)
await self.hangouts.send(message, conversation)
def load(pearl, config):
return EightBallSession(pearl, config)
|
[
"nacre.handle.isMessageEvent",
"random.choice"
] |
[((1081, 1108), 'random.choice', 'random.choice', (['self.answers'], {}), '(self.answers)\n', (1094, 1108), False, 'import asyncio, random\n'), ((849, 884), 'nacre.handle.isMessageEvent', 'nacre.handle.isMessageEvent', (['update'], {}), '(update)\n', (876, 884), False, 'import nacre\n')]
|
# Copyright (c) 2013, GreyCube Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import cint
import shutil, os
from frappe.modules import scrub, get_module_path
def copy_report(
module="NPro",
src="Interviews",
tgt="Interview Results",
):
"""usage: copy_report("NPro", "src", "tgt")"""
doc = frappe.copy_doc(frappe.get_doc("Report", src))
doc.report_name = tgt
doc.insert()
frappe.db.commit()
print('Copying "' + src + '" to "' + tgt, '"')
module_path = get_module_path(module)
src_folder = module_path and os.path.join(module_path, "report", scrub(src))
src_path = os.path.join(src_folder, scrub(src) + ".py")
src_script_path = src_folder and os.path.join(src_folder, scrub(src) + ".js")
tgt_folder = module_path and os.path.join(module_path, "report", scrub(tgt))
tgt_path = os.path.join(tgt_folder, scrub(tgt) + ".py")
tgt_script_path = tgt_folder and os.path.join(tgt_folder, scrub(tgt) + ".js")
shutil.copyfile(src_path, tgt_path)
shutil.copyfile(src_script_path, tgt_script_path)
print(src_path, tgt_path)
print(src_script_path, tgt_script_path)
|
[
"frappe.modules.scrub",
"frappe.get_doc",
"frappe.db.commit",
"shutil.copyfile",
"frappe.modules.get_module_path"
] |
[((533, 551), 'frappe.db.commit', 'frappe.db.commit', ([], {}), '()\n', (549, 551), False, 'import frappe\n'), ((622, 645), 'frappe.modules.get_module_path', 'get_module_path', (['module'], {}), '(module)\n', (637, 645), False, 'from frappe.modules import scrub, get_module_path\n'), ((1098, 1133), 'shutil.copyfile', 'shutil.copyfile', (['src_path', 'tgt_path'], {}), '(src_path, tgt_path)\n', (1113, 1133), False, 'import shutil, os\n'), ((1138, 1187), 'shutil.copyfile', 'shutil.copyfile', (['src_script_path', 'tgt_script_path'], {}), '(src_script_path, tgt_script_path)\n', (1153, 1187), False, 'import shutil, os\n'), ((455, 484), 'frappe.get_doc', 'frappe.get_doc', (['"""Report"""', 'src'], {}), "('Report', src)\n", (469, 484), False, 'import frappe\n'), ((715, 725), 'frappe.modules.scrub', 'scrub', (['src'], {}), '(src)\n', (720, 725), False, 'from frappe.modules import scrub, get_module_path\n'), ((767, 777), 'frappe.modules.scrub', 'scrub', (['src'], {}), '(src)\n', (772, 777), False, 'from frappe.modules import scrub, get_module_path\n'), ((939, 949), 'frappe.modules.scrub', 'scrub', (['tgt'], {}), '(tgt)\n', (944, 949), False, 'from frappe.modules import scrub, get_module_path\n'), ((991, 1001), 'frappe.modules.scrub', 'scrub', (['tgt'], {}), '(tgt)\n', (996, 1001), False, 'from frappe.modules import scrub, get_module_path\n'), ((849, 859), 'frappe.modules.scrub', 'scrub', (['src'], {}), '(src)\n', (854, 859), False, 'from frappe.modules import scrub, get_module_path\n'), ((1073, 1083), 'frappe.modules.scrub', 'scrub', (['tgt'], {}), '(tgt)\n', (1078, 1083), False, 'from frappe.modules import scrub, get_module_path\n')]
|
from mazikeen.MakedirsBlock import MakedirsBlock
from mazikeen.GeneratorException import GeneratorException
def generateMakedirs(data):
if not isinstance(data, str):
raise GeneratorException("'makedirs' block not recognized")
return MakedirsBlock(data)
|
[
"mazikeen.GeneratorException.GeneratorException",
"mazikeen.MakedirsBlock.MakedirsBlock"
] |
[((250, 269), 'mazikeen.MakedirsBlock.MakedirsBlock', 'MakedirsBlock', (['data'], {}), '(data)\n', (263, 269), False, 'from mazikeen.MakedirsBlock import MakedirsBlock\n'), ((185, 238), 'mazikeen.GeneratorException.GeneratorException', 'GeneratorException', (['"""\'makedirs\' block not recognized"""'], {}), '("\'makedirs\' block not recognized")\n', (203, 238), False, 'from mazikeen.GeneratorException import GeneratorException\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('idc', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Firewall',
fields=[
('id', models.AutoField(serialize=False, primary_key=True)),
('name', models.CharField(max_length=128, verbose_name='Firewall Name')),
('firewall_id', models.CharField(max_length=128, null=True, verbose_name='OS Firewall UUID', blank=True)),
('desc', models.CharField(max_length=50, null=True, verbose_name='Firewall desc', blank=True)),
('is_default', models.BooleanField(default=False, verbose_name='Default')),
('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')),
('deleted', models.BooleanField(default=False, verbose_name='Deleted')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
('user_data_center', models.ForeignKey(to='idc.UserDataCenter')),
],
options={
'db_table': 'firewall',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='FirewallRules',
fields=[
('id', models.AutoField(serialize=False, primary_key=True)),
('firewall_rules_id', models.CharField(max_length=40, null=True, verbose_name='OS Firewall Rules UUID', blank=True)),
('direction', models.CharField(default=b'ingress', choices=[(b'ingress', 'Ingress'), (b'egress', 'Egress')], max_length=10, blank=True, null=True, verbose_name='Direction')),
('ether_type', models.CharField(default=b'IPv4', choices=[(b'IPv4', 'IPv4'), (b'IPv6', 'IPv6')], max_length=40, blank=True, null=True, verbose_name='Ether type')),
('port_range_min', models.IntegerField(default=0, null=True, verbose_name='Port range min', blank=True)),
('port_range_max', models.IntegerField(default=0, null=True, verbose_name='Port range max', blank=True)),
('protocol', models.CharField(max_length=40, null=True, verbose_name='Protocol', blank=True)),
('remote_group_id', models.CharField(max_length=40, null=True, verbose_name='remote group id UUID', blank=True)),
('remote_ip_prefix', models.CharField(default=b'0.0.0.0/0', max_length=255, null=True, verbose_name='remote ip prefix', blank=True)),
('is_default', models.BooleanField(default=False, verbose_name='Default')),
('deleted', models.BooleanField(default=False, verbose_name='Deleted')),
('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')),
('firewall', models.ForeignKey(to='firewall.Firewall')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
('user_data_center', models.ForeignKey(to='idc.UserDataCenter')),
],
options={
'db_table': 'firewall_rules',
},
bases=(models.Model,),
),
]
|
[
"django.db.migrations.swappable_dependency",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.IntegerField",
"django.db.models.DateTimeField"
] |
[((243, 300), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (274, 300), False, 'from django.db import models, migrations\n'), ((433, 484), 'django.db.models.AutoField', 'models.AutoField', ([], {'serialize': '(False)', 'primary_key': '(True)'}), '(serialize=False, primary_key=True)\n', (449, 484), False, 'from django.db import models, migrations\n'), ((512, 574), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'verbose_name': '"""Firewall Name"""'}), "(max_length=128, verbose_name='Firewall Name')\n", (528, 574), False, 'from django.db import models, migrations\n'), ((609, 701), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'null': '(True)', 'verbose_name': '"""OS Firewall UUID"""', 'blank': '(True)'}), "(max_length=128, null=True, verbose_name='OS Firewall UUID',\n blank=True)\n", (625, 701), False, 'from django.db import models, migrations\n'), ((725, 813), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'null': '(True)', 'verbose_name': '"""Firewall desc"""', 'blank': '(True)'}), "(max_length=50, null=True, verbose_name='Firewall desc',\n blank=True)\n", (741, 813), False, 'from django.db import models, migrations\n'), ((843, 901), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Default"""'}), "(default=False, verbose_name='Default')\n", (862, 901), False, 'from django.db import models, migrations\n'), ((936, 1003), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""Create Date"""'}), "(auto_now_add=True, verbose_name='Create Date')\n", (956, 1003), False, 'from django.db import models, migrations\n'), ((1034, 1092), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Deleted"""'}), "(default=False, verbose_name='Deleted')\n", (1053, 1092), False, 'from django.db import models, migrations\n'), ((1120, 1166), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': 'settings.AUTH_USER_MODEL'}), '(to=settings.AUTH_USER_MODEL)\n', (1137, 1166), False, 'from django.db import models, migrations\n'), ((1206, 1248), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""idc.UserDataCenter"""'}), "(to='idc.UserDataCenter')\n", (1223, 1248), False, 'from django.db import models, migrations\n'), ((1499, 1550), 'django.db.models.AutoField', 'models.AutoField', ([], {'serialize': '(False)', 'primary_key': '(True)'}), '(serialize=False, primary_key=True)\n', (1515, 1550), False, 'from django.db import models, migrations\n'), ((1591, 1689), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'null': '(True)', 'verbose_name': '"""OS Firewall Rules UUID"""', 'blank': '(True)'}), "(max_length=40, null=True, verbose_name=\n 'OS Firewall Rules UUID', blank=True)\n", (1607, 1689), False, 'from django.db import models, migrations\n'), ((1717, 1884), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b'ingress'", 'choices': "[(b'ingress', 'Ingress'), (b'egress', 'Egress')]", 'max_length': '(10)', 'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Direction"""'}), "(default=b'ingress', choices=[(b'ingress', 'Ingress'), (\n b'egress', 'Egress')], max_length=10, blank=True, null=True,\n verbose_name='Direction')\n", (1733, 1884), False, 'from django.db import models, migrations\n'), ((1909, 2059), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b'IPv4'", 'choices': "[(b'IPv4', 'IPv4'), (b'IPv6', 'IPv6')]", 'max_length': '(40)', 'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Ether type"""'}), "(default=b'IPv4', choices=[(b'IPv4', 'IPv4'), (b'IPv6',\n 'IPv6')], max_length=40, blank=True, null=True, verbose_name='Ether type')\n", (1925, 2059), False, 'from django.db import models, migrations\n'), ((2093, 2181), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'null': '(True)', 'verbose_name': '"""Port range min"""', 'blank': '(True)'}), "(default=0, null=True, verbose_name='Port range min',\n blank=True)\n", (2112, 2181), False, 'from django.db import models, migrations\n'), ((2215, 2303), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'null': '(True)', 'verbose_name': '"""Port range max"""', 'blank': '(True)'}), "(default=0, null=True, verbose_name='Port range max',\n blank=True)\n", (2234, 2303), False, 'from django.db import models, migrations\n'), ((2331, 2410), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'null': '(True)', 'verbose_name': '"""Protocol"""', 'blank': '(True)'}), "(max_length=40, null=True, verbose_name='Protocol', blank=True)\n", (2347, 2410), False, 'from django.db import models, migrations\n'), ((2449, 2545), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'null': '(True)', 'verbose_name': '"""remote group id UUID"""', 'blank': '(True)'}), "(max_length=40, null=True, verbose_name=\n 'remote group id UUID', blank=True)\n", (2465, 2545), False, 'from django.db import models, migrations\n'), ((2580, 2694), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b'0.0.0.0/0'", 'max_length': '(255)', 'null': '(True)', 'verbose_name': '"""remote ip prefix"""', 'blank': '(True)'}), "(default=b'0.0.0.0/0', max_length=255, null=True,\n verbose_name='remote ip prefix', blank=True)\n", (2596, 2694), False, 'from django.db import models, migrations\n'), ((2724, 2782), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Default"""'}), "(default=False, verbose_name='Default')\n", (2743, 2782), False, 'from django.db import models, migrations\n'), ((2813, 2871), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Deleted"""'}), "(default=False, verbose_name='Deleted')\n", (2832, 2871), False, 'from django.db import models, migrations\n'), ((2906, 2973), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""Create Date"""'}), "(auto_now_add=True, verbose_name='Create Date')\n", (2926, 2973), False, 'from django.db import models, migrations\n'), ((3005, 3046), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""firewall.Firewall"""'}), "(to='firewall.Firewall')\n", (3022, 3046), False, 'from django.db import models, migrations\n'), ((3074, 3120), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': 'settings.AUTH_USER_MODEL'}), '(to=settings.AUTH_USER_MODEL)\n', (3091, 3120), False, 'from django.db import models, migrations\n'), ((3160, 3202), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""idc.UserDataCenter"""'}), "(to='idc.UserDataCenter')\n", (3177, 3202), False, 'from django.db import models, migrations\n')]
|
import unittest
from solver import buddy_strings
class TestSolver(unittest.TestCase):
def test_buddy_strings(self):
self.assertEqual(buddy_strings("ab" , "ba" ), True )
self.assertEqual(buddy_strings("ab" , "ab" ), False)
self.assertEqual(buddy_strings("aa" , "aa" ), True )
self.assertEqual(buddy_strings("aaaaaaabc", "aaaaaaacb"), True )
self.assertEqual(buddy_strings("" , "aa" ), False)
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"solver.buddy_strings"
] |
[((496, 511), 'unittest.main', 'unittest.main', ([], {}), '()\n', (509, 511), False, 'import unittest\n'), ((140, 165), 'solver.buddy_strings', 'buddy_strings', (['"""ab"""', '"""ba"""'], {}), "('ab', 'ba')\n", (153, 165), False, 'from solver import buddy_strings\n'), ((209, 234), 'solver.buddy_strings', 'buddy_strings', (['"""ab"""', '"""ab"""'], {}), "('ab', 'ab')\n", (222, 234), False, 'from solver import buddy_strings\n'), ((278, 303), 'solver.buddy_strings', 'buddy_strings', (['"""aa"""', '"""aa"""'], {}), "('aa', 'aa')\n", (291, 303), False, 'from solver import buddy_strings\n'), ((347, 386), 'solver.buddy_strings', 'buddy_strings', (['"""aaaaaaabc"""', '"""aaaaaaacb"""'], {}), "('aaaaaaabc', 'aaaaaaacb')\n", (360, 386), False, 'from solver import buddy_strings\n'), ((416, 439), 'solver.buddy_strings', 'buddy_strings', (['""""""', '"""aa"""'], {}), "('', 'aa')\n", (429, 439), False, 'from solver import buddy_strings\n')]
|
#!/usr/bin/env python3
import unittest
from src.executor.Printer import Printer
from src.data.VideoItem import VideoItem
class TestIExecutor(unittest.TestCase):
def test_compiles(self):
self.assertEqual(True, True)
def test_printer(self):
printer = Printer()
printer.run(VideoItem(metadata=None, filepath=None))
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"src.executor.Printer.Printer",
"src.data.VideoItem.VideoItem"
] |
[((381, 396), 'unittest.main', 'unittest.main', ([], {}), '()\n', (394, 396), False, 'import unittest\n'), ((277, 286), 'src.executor.Printer.Printer', 'Printer', ([], {}), '()\n', (284, 286), False, 'from src.executor.Printer import Printer\n'), ((307, 346), 'src.data.VideoItem.VideoItem', 'VideoItem', ([], {'metadata': 'None', 'filepath': 'None'}), '(metadata=None, filepath=None)\n', (316, 346), False, 'from src.data.VideoItem import VideoItem\n')]
|
"""Main execution body for program. Contains GUI interface and exporting class that creates files instead
of generating HTML Reports
Author: <NAME>
Last Updated: 28/02/2017
"""
import argparse
import webbrowser
import textwrap
import xlrd
from tkinter import *
from tkinter import filedialog, ttk
from threading import Thread
try:
from .data import *
from .report import *
from .template_reader import *
except:
from data import *
from report import *
from template_reader import *
terminal = False
"""
Global Variables:
terminal -- boolean value whether program is running through terminal or through GUI
progress -- Progress bar showing progress through program
"""
class DisplayWindow:
"""GUI for application allowing users to interact with program in simpler and more explanatory way
Methods:
dataaskopenfile -- Asks for files to process and displays them in the output window
dataaskopenfolder -- Asks for folder to process and displays the contained files in the output window
filetext -- Fills output box given a list of files
maketemplate -- Links to Create template web page of Data-oracle website
process_report -- Runs program and generates report for all files processed
process_export -- Runs program and creates a file containing analysis of all files processed
removefile -- Removes file from being processed after being selected in output window
reset -- Resets the program removing all files from the process queue and sets progress bar back to the start
templateaskopenfile -- Asks for a template to use during processing and displays it in the output window
Variables:
datafiles -- list of datafiles to be processed
display -- output window Frame object
template -- template to use in process if applicable
"""
def __init__(self):
root = Tk()
root.wm_title("UWA Data-oracle")
self.datafiles = []
self.template = None
# Main Window
mainwindow = Frame(root)
self.display = Frame(mainwindow)
Label(mainwindow, text="Select File(s) or Folder(s) to process: ").grid(row=0, sticky=E, pady=10)
Label(mainwindow, text="Select template file(optional): ").grid(row=1, sticky=E, pady=10)
label3 = Label(mainwindow, text="> Create Template", fg="blue")
label3.bind("<Button-1>", self.maketemplate)
label3.grid(row=2)
Button(mainwindow, text="Browse Files...", command= self.dataaskopenfile).grid(row=0, column=1, padx=5, sticky='ew')
Button(mainwindow, text='Browse Folders...', command= self.dataaskopenfolder).grid(row=0, column=2, padx=5)
Button(mainwindow, text="Browse Templates...", command=self.templateaskopenfile).grid(row=1, column=1, padx=5)
Button(mainwindow, text="View Report", command=self.process_report).grid(row=4, column=1,sticky='ew', padx=5)
Button(mainwindow, text="Export", command=self.process_export).grid(row=4, column=2, sticky='ew')
Button(mainwindow, text="Reset", command=self.reset).grid(row=6, column=1, sticky='ew')
Button(mainwindow, text="Exit", command=mainwindow.quit).grid(row=6, column=2, sticky='ew', pady=5)
self.progress = ttk.Progressbar(mainwindow, orient="horizontal", mode="determinate")
self.progress.grid(row=5, columnspan=3, sticky='ew', padx=10, pady=5)
mainwindow.pack()
# Output Window
self.display.grid(row=0, column=3, rowspan=7, sticky=N)
# Status Bar
self.statusText = StringVar()
self.statusText.set("Waiting for File...")
status = Label(root, textvariable=self.statusText, bd=1, relief=SUNKEN, anchor=W)
status.pack(side=BOTTOM, fill=X)
root.mainloop()
def dataaskopenfile(self):
""" Asks for files to process and displays them in the output window"""
self.reset()
if self.template:
Label(self.display, text=str("Template Selected: " + self.template[0]), anchor='w').pack(fill=X)
self.datafiles = filedialog.askopenfiles(mode='r', filetypes=[('All Files', '.*'),('Csv Files','*.csv'),
('Excel Workbook', '*.xlsx'), ('Excel 97-2003 Workbook', '.xls')],
defaultextension="*.csv")
if self.datafiles is not None:
self.datafiles = [file.name for file in self.datafiles]
Label(self.display, text="Selected Files: ", anchor='w').pack(fill=X)
self.filetext(self.datafiles)
self.statusText.set("Ready to Process Files...")
return self.datafiles
def dataaskopenfolder(self):
"""Asks for folder to process and displays the contained files in the output window"""
self.reset()
if self.template is not None:
Label(self.display, text=str("Template Selected: " + self.template.name), anchor='w').pack(fill=X)
folder = filedialog.askdirectory()
if folder != '':
self.datafiles = []
for file in os.listdir(folder):
self.datafiles.append(os.path.join(folder,file))
Label(self.display, text=str("Selected Folder: " + folder), anchor='w').pack(fill=X)
self.filetext(self.datafiles)
return folder
def filetext(self, files):
"""Provides text for output box given a list of files"""
remove_file = lambda x, m: (lambda p: self.removefile(x, m))
for file in files:
label = Label(self.display, text=str("\t" + file), anchor='w')
if os.name == 'posix':
label.bind("<Button-2>", remove_file(file, label))
else:
label.bind("<Button-3>", remove_file(file, label))
label.pack(fill=X)
def maketemplate(self, event):
"""Opens webbrowser to create template page on Data-oracle website"""
webbrowser.open_new("http://www.data-oracle.com/upload/createTemplate/")
def process_report(self):
"""Runs program and generates report at the end"""
self.progress["value"] = 0
self.setstatus("Processing Files...")
Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'window':self}).start()
def process_export(self):
"""Runs program and exports results to file"""
self.progress["value"] = 0
self.setstatus("Processing Files...")
exportfile = ''
try:
exportfile = filedialog.asksaveasfile(mode='w', defaultextension='*.csv', filetypes=[('Csv Files', '*.csv'),
('All Files', '.*')])
exportfile.close()
Thread(target=process_files, args=(self.datafiles, self.template),
kwargs={'exportfile': exportfile.name, 'window': self}).start()
except PermissionError:
# Occurs if export file is open
self.setstatus("ERROR: Permission Denied, ensure export file is not open in another program")
def removefile(self, file, label):
"""Removes file from process list and removes label"""
print("Removing: ", file)
self.datafiles.remove(file)
label.destroy()
def reset(self):
"""Resets all files"""
mainwindow = self.display.winfo_parent()
mainwindow = self.display._nametowidget(mainwindow)
self.display.destroy()
self.display = Frame(mainwindow)
self.display.grid(row=0, column=3, rowspan=7, sticky=N)
self.setstatus("Waiting for File...")
self.progress["value"] = 0
def templateaskopenfile(self):
"""Asks for template to use in processing"""
self.template = []
template = filedialog.askopenfile(mode='r', filetypes=[('All Files', '.*'), ('Csv Files', '*.csv')],
defaultextension="*.csv")
if template is not None:
self.template.append(template.name)
if hasattr(self, 'templateLabel'):
self.templateLabel.destroy()
self.templateLabel = Label(self.display, text=str("Template Selected: " + self.template[0]), anchor='w')
self.templateLabel.pack(fill=X)
self.setstatus("Ready to Process Folder...")
return self.template
def setmaxprogress(self, max):
self.progress["maximum"] = max
def step_progress(self):
self.progress.step()
def setstatus(self, msg):
self.statusText.set(msg)
class Exporter(object):
"""Class that creates a file containing analysis of all files run in program
Methods:
write_stats -- writes summary of a single data object
write_summary -- writes summary of all files to be run after processing all files
Variables:
filename -- file name to save export file as
total_files -- total number of files processed
total_invalid -- total number of invalid rows
total_empty -- total number of empty columns
total_errors -- total numher of errors throughout files
"""
def __init__(self, filename, offline=True):
self.filename = filename
self.total_files = 0
self.total_invalid = 0
self.total_empty = 0
self.total_errors = 0
self.total_col = 0
if not offline:
with open(self.filename, 'w') as fp:
pass
def write_stats(self, data):
"""Writes statistics of a single data object"""
with open(self.filename, 'r+') as fp:
fp.seek(0,2)
fp.write("Analysis of " + os.path.split(data.filename)[1] + '\n')
self.total_files += 1
fp.write("Number of Invalid rows: " + str(len(data.invalid_rows)) + '\n')
self.total_invalid += len(data.invalid_rows)
empty_columns = [column.header for column in data.columns if column.empty]
fp.write("Number of Empty Columns: " + str(len(empty_columns)) + '\n')
self.total_empty = len(empty_columns)
fp.write("Number of Error Cells: " + str(len(data.errors)) + '\n')
self.total_errors = len(data.errors)
fp.write("Number of Valid Columns: " + str(len(data.columns)) + '\n')
self.total_col = str(len(data.columns))
if data.delimiter_type == ',':
fp.write("Delimiter: comma\n")
else:
fp.write("Delimiter: " + data.delimiter_type + '\n')
fp.write("\n")
def write_summary(self):
"""Writes summary of all files processed"""
temp_file = os.path.join(os.path.split(self.filename)[0],"Tempfile")
with open( temp_file, 'w') as fp:
fp.write("Error Report " + os.path.split(self.filename)[1] + "\n\n")
fp.write("Total Files Analysed: " + str(self.total_files) + "\n")
fp.write("Total Invalid Rows: " + str(self.total_invalid) + "\n")
fp.write("Total Empty Columns: " + str(self.total_empty) + "\n")
fp.write("Total Valid Columns: " + str(self.total_col) + "\n")
fp.write("Total Errors: " + str(self.total_errors) + "\n\n")
with open(self.filename, 'r') as fd:
for line in fd:
fp.write(line)
os.remove(self.filename)
os.rename(temp_file, self.filename)
def write_error(self, data):
"""Writes error message for files not processed fully"""
with open(self.filename, 'r+') as fp:
fp.seek(0,2)
fp.write("Analysis of " + os.path.split(data.filename)[1] + '\n')
fp.write("ERROR: Unable to read file, no readable data detected.\n\n")
def main(*args, **kwargs):
"""
Create Data and Report objects, providing necessary information for them
to run analysis and create desired outputs (i.e. HTML report or writing to exported file).
Keyword Arguments:
args -- Arguments provided to the program at runtime.
exporter -- Exporter object if applicable
"""
exporter = kwargs.pop('exporter', None)
window = kwargs.pop('window', None)
filename = args[0]
print("[Step 1/7] Processing file: ",filename)
print("[Step 2/7] Reading data")
if window is not None:
window.step_progress()
window.setstatus("Processing " + filename + "...")
if len(args) > 1:
temp = Template(args[1])
data = Data(filename, temp)
else:
data = Data(filename)
if not data.raw_data:
print("ERROR: Unable to read file: " + filename)
window.setstatus("ERROR: Unable to read file: " + filename)
if exporter is not None:
exporter.write_error(data)
return None
data.remove_invalid()
data.create_columns()
data.clean()
print("[Step 3/7] Running pre-analysis")
if window is not None:
window.step_progress()
data.pre_analysis()
print("[Step 4/7] Finding Errors")
if window is not None:
window.step_progress()
data.find_errors()
print("[Step 5/7] Running Analysis")
if window is not None:
window.step_progress()
window.setstatus("Running Analysis on " + filename + "...")
data.analysis()
if exporter is None:
print("[Step 6/7] Generating report")
report = Report(data)
str_report = report.html_report()
html = report.gen_html(str_report)
# returns string of html, also generates html report for debugging purposes
print("[Step 7/7] Report Successfully Generated")
print("Completed analysis for: ",filename)
if window is not None:
window.step_progress()
webbrowser.open("file://"+html,new=2)
else:
print("[Step 6/7] Generating report")
exporter.write_stats(data)
print("[Step 7/7] Report Successfully Generated")
if window is not None:
window.step_progress()
print("Completed analysis for: ", filename)
if window is not None:
window.setstatus("Completed Analysis for " + filename)
def get_file_dir(location):
"""Returns the directory of the file with the file name
Keyword arguments:
location -- A file path.
"""
return location.rpartition('\\')
def process_files(files, templates, exportfile='', window=None):
"""Process files and templates and runs the program over them. Converts excel files
and applies template to each file
Keyword arguments:
files -- files to be processed
templates -- files to use as templates in processing
exportfile -- file to export analysis to if applicable
"""
filenames = []
excel = []
for file in files:
name_ext = os.path.splitext(file)
# TODO handle empty sheets
if name_ext[1] == '.xls' or name_ext[1] == '.xlsx':
print("[Step 0/7] Converting to csv file")
wb = xlrd.open_workbook(file)
sheet_names = wb.sheet_names()
if len(sheet_names) == 1:
sh = wb.sheet_by_name(sheet_names[0])
new_name = os.path.splitext(file)[0] + ".csv"
with open(new_name, 'w', newline='') as fp:
wr = csv.writer(fp)
for rownum in range(sh.nrows):
wr.writerow(sh.row_values(rownum))
filenames.append(new_name)
excel.append(new_name)
else:
for sheet in sheet_names:
sh = wb.sheet_by_name(sheet)
new_name = os.path.join(os.path.splitext(file)[0] + "_" + sheet + ".csv")
try:
with open(new_name, 'w', newline='') as fp:
wr = csv.writer(fp)
for rownum in range(sh.nrows):
wr.writerow(sh.row_values(rownum))
except PermissionError:
# If created csv file already exists and is open
window.setstatus("ERROR: Permission Denied, ensure " + new_name + " is not open in another program")
return None
filenames.append(new_name)
excel.append(new_name)
elif name_ext[1] == '.csv':
filenames.append(file)
else:
print("ERROR: Unsupported file type: " + file)
if window is not None:
window.setstatus("WARNING: Unsupported file type " + file)
if exportfile != '':
export = Exporter(exportfile)
else:
export = None
if window is not None:
window.setmaxprogress(len(filenames) * 5.0 + 0.01)
if templates != None or templates:
if len(templates) == 1:
for name in filenames:
main(name, templates[0], exporter=export, window=window)
else:
num_templates = len(templates)
print(num_templates)
num_files = len(filenames)
if num_templates == num_files:
for i in range(0, num_files):
main(filenames[i], templates[i], exporter=export, window=window)
else:
# TODO keep functionality when excel files have multiple sheets
print("Error, different number of files and templates")
else:
for name in filenames:
main(name, exporter=export, window=window)
if export != None:
export.write_summary()
if excel:
for file in excel:
os.remove(file)
if __name__ == '__main__':
"""If the program is run with application.py as the argument to the command line
execution begins here. This will process all the command line arguments before
proceeding.
"""
files = []
templates = []
if len(sys.argv) > 1:
terminal = True
pathname = os.path.dirname(sys.argv[0])
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,\
description=textwrap.dedent('''\
Processes Csv files.
----------------------------------
Can process one or more csv files. Can specify template to describe
data further. Templates can be used to describe one or more csv files.
If using multiple templates for multiple files list templates in the
same order as the files they correspond to.
'''))
parser.add_argument('filenames', nargs='+',\
help='one or more filenames for the processor to analyse')
parser.add_argument('-t', nargs='+', metavar='template', help='a template for the given files')
args = parser.parse_args()
process_files(args.filenames, args.t)
else:
DisplayWindow()
|
[
"textwrap.dedent",
"threading.Thread",
"webbrowser.open",
"webbrowser.open_new",
"xlrd.open_workbook",
"tkinter.ttk.Progressbar",
"tkinter.filedialog.askdirectory",
"tkinter.filedialog.askopenfiles",
"tkinter.filedialog.asksaveasfile",
"tkinter.filedialog.askopenfile"
] |
[((3301, 3369), 'tkinter.ttk.Progressbar', 'ttk.Progressbar', (['mainwindow'], {'orient': '"""horizontal"""', 'mode': '"""determinate"""'}), "(mainwindow, orient='horizontal', mode='determinate')\n", (3316, 3369), False, 'from tkinter import filedialog, ttk\n'), ((4123, 4315), 'tkinter.filedialog.askopenfiles', 'filedialog.askopenfiles', ([], {'mode': '"""r"""', 'filetypes': "[('All Files', '.*'), ('Csv Files', '*.csv'), ('Excel Workbook', '*.xlsx'),\n ('Excel 97-2003 Workbook', '.xls')]", 'defaultextension': '"""*.csv"""'}), "(mode='r', filetypes=[('All Files', '.*'), (\n 'Csv Files', '*.csv'), ('Excel Workbook', '*.xlsx'), (\n 'Excel 97-2003 Workbook', '.xls')], defaultextension='*.csv')\n", (4146, 4315), False, 'from tkinter import filedialog, ttk\n'), ((5044, 5069), 'tkinter.filedialog.askdirectory', 'filedialog.askdirectory', ([], {}), '()\n', (5067, 5069), False, 'from tkinter import filedialog, ttk\n'), ((6009, 6081), 'webbrowser.open_new', 'webbrowser.open_new', (['"""http://www.data-oracle.com/upload/createTemplate/"""'], {}), "('http://www.data-oracle.com/upload/createTemplate/')\n", (6028, 6081), False, 'import webbrowser\n'), ((7891, 8011), 'tkinter.filedialog.askopenfile', 'filedialog.askopenfile', ([], {'mode': '"""r"""', 'filetypes': "[('All Files', '.*'), ('Csv Files', '*.csv')]", 'defaultextension': '"""*.csv"""'}), "(mode='r', filetypes=[('All Files', '.*'), (\n 'Csv Files', '*.csv')], defaultextension='*.csv')\n", (7913, 8011), False, 'from tkinter import filedialog, ttk\n'), ((13878, 13918), 'webbrowser.open', 'webbrowser.open', (["('file://' + html)"], {'new': '(2)'}), "('file://' + html, new=2)\n", (13893, 13918), False, 'import webbrowser\n'), ((6589, 6711), 'tkinter.filedialog.asksaveasfile', 'filedialog.asksaveasfile', ([], {'mode': '"""w"""', 'defaultextension': '"""*.csv"""', 'filetypes': "[('Csv Files', '*.csv'), ('All Files', '.*')]"}), "(mode='w', defaultextension='*.csv', filetypes=[(\n 'Csv Files', '*.csv'), ('All Files', '.*')])\n", (6613, 6711), False, 'from tkinter import filedialog, ttk\n'), ((15123, 15147), 'xlrd.open_workbook', 'xlrd.open_workbook', (['file'], {}), '(file)\n', (15141, 15147), False, 'import xlrd\n'), ((6261, 6357), 'threading.Thread', 'Thread', ([], {'target': 'process_files', 'args': '(self.datafiles, self.template)', 'kwargs': "{'window': self}"}), "(target=process_files, args=(self.datafiles, self.template), kwargs={\n 'window': self})\n", (6267, 6357), False, 'from threading import Thread\n'), ((18259, 18688), 'textwrap.dedent', 'textwrap.dedent', (['""" Processes Csv files.\n ----------------------------------\n Can process one or more csv files. Can specify template to describe\n data further. Templates can be used to describe one or more csv files.\n If using multiple templates for multiple files list templates in the\n same order as the files they correspond to.\n """'], {}), '(\n """ Processes Csv files.\n ----------------------------------\n Can process one or more csv files. Can specify template to describe\n data further. Templates can be used to describe one or more csv files.\n If using multiple templates for multiple files list templates in the\n same order as the files they correspond to.\n """\n )\n', (18274, 18688), False, 'import textwrap\n'), ((6847, 6974), 'threading.Thread', 'Thread', ([], {'target': 'process_files', 'args': '(self.datafiles, self.template)', 'kwargs': "{'exportfile': exportfile.name, 'window': self}"}), "(target=process_files, args=(self.datafiles, self.template), kwargs={\n 'exportfile': exportfile.name, 'window': self})\n", (6853, 6974), False, 'from threading import Thread\n')]
|
from django.test import TestCase, override_settings
from social_django.compat import reverse
@override_settings(SOCIAL_AUTH_GITHUB_KEY = '1', SOCIAL_AUTH_GITHUB_SECRET='2')
class AuthTestcase(TestCase):
def setUp(self):
session = self.client.session
session["github_status"] = "1"
session.save()
def test_begin_view(self):
response = self.client.get(reverse('social:begin', kwargs={'backend': 'github'}))
self.assertEqual(response.status_code, 302)
|
[
"social_django.compat.reverse",
"django.test.override_settings"
] |
[((95, 171), 'django.test.override_settings', 'override_settings', ([], {'SOCIAL_AUTH_GITHUB_KEY': '"""1"""', 'SOCIAL_AUTH_GITHUB_SECRET': '"""2"""'}), "(SOCIAL_AUTH_GITHUB_KEY='1', SOCIAL_AUTH_GITHUB_SECRET='2')\n", (112, 171), False, 'from django.test import TestCase, override_settings\n'), ((393, 446), 'social_django.compat.reverse', 'reverse', (['"""social:begin"""'], {'kwargs': "{'backend': 'github'}"}), "('social:begin', kwargs={'backend': 'github'})\n", (400, 446), False, 'from social_django.compat import reverse\n')]
|
import datetime
import json
import logging
import os
import threading
import time
from abc import ABC, abstractmethod
import pika
from tools.mongo_dao import MongoDB
class StopCondition(ABC):
def __init__(self, stop_condition_parameters: dict, experiment_description: dict, experiment_id: str):
self.event_host = os.getenv("BRISE_EVENT_SERVICE_HOST")
self.event_port = os.getenv("BRISE_EVENT_SERVICE_AMQP_PORT")
self.database = MongoDB(os.getenv("BRISE_DATABASE_HOST"),
os.getenv("BRISE_DATABASE_PORT"),
os.getenv("BRISE_DATABASE_NAME"),
os.getenv("BRISE_DATABASE_USER"),
os.getenv("BRISE_DATABASE_PASS"))
self.experiment_id = experiment_id
self.stop_condition_type = stop_condition_parameters["Name"]
self.decision = False
self.logger = logging.getLogger(stop_condition_parameters["Name"])
self.repetition_interval = datetime.timedelta(**{
experiment_description["StopConditionTriggerLogic"]["InspectionParameters"]["TimeUnit"]:
experiment_description["StopConditionTriggerLogic"]["InspectionParameters"]["RepetitionPeriod"]}).total_seconds()
def start_threads(self):
"""
Start 2 threads.
One thread listens event to shut down Stop Condition.
Second thread run the functionality of Stop Condition (`self_evaluation` method).
"""
self.listen_thread = EventServiceConnection(self)
self.listen_thread.start()
self.thread_is_active = True
self.thread = threading.Thread(target=self.self_evaluation, args=())
self.thread.start()
def stop_threads(self, ch, method, properties, body):
"""
This function stops Stop Condition microservice.
:param ch: pika.Channel
:param method: pika.spec.Basic.GetOk
:param properties: pika.spec.BasicProperties
:param body: empty
"""
self.listen_thread.stop()
self.thread_is_active = False
@abstractmethod
def is_finish(self):
"""
Main logic of Stop Condition should be overridden in this method.
Later, this method will be called in `self_evaluation` method with defined in Experiment Description period.
When the Stop Condition is triggered to stop BRISE,
it changes internal state of variable 'self.decision' to True.
:return: None
"""
def update_expression(self, stop_condition_type: str, decision: bool) -> None:
"""
This function sends event to Stop Condition Validator with command to check StopConditionTriggerLogic expression,
since this particular Stop Condition was triggered.
:param stop_condition_type: Stop Condition identificator
:param decision: Stop Condition decision (boolean)
"""
dictionary_dump = {"experiment_id": self.experiment_id,
"stop_condition_type": stop_condition_type,
"decision": decision
}
body = json.dumps(dictionary_dump)
with pika.BlockingConnection(
pika.ConnectionParameters(host=self.event_host,
port=self.event_port)) as connection:
with connection.channel() as channel:
channel.basic_publish(exchange='',
routing_key='check_stop_condition_expression_queue',
body=body)
def self_evaluation(self):
"""
This function performs self-evaluation of Stop Condition periodically according to user-defined repetition interval.
"""
counter = 0
listen_interval = self.repetition_interval/10
previous_decision = self.decision # for sending the update only when decision changes
while self.thread_is_active:
# time.sleep blocks thread execution for whole time specified in function argument
# and stop message from main-node could be delivered only after this timer ends.
# This code decision is designed to accelerate stopping process.
time.sleep(listen_interval)
counter = counter + 1
if counter % 10 == 0:
counter = 0
numb_of_measured_configurations = 0
try:
numb_of_measured_configurations = \
self.database.get_last_record_by_experiment_id("Experiment_state", self.experiment_id)["Number_of_measured_configs"]
except TypeError:
self.logger.warning(f"No Experiment state is yet available for the experiment {self.experiment_id}")
if numb_of_measured_configurations > 0:
search_space_size = \
self.database.get_last_record_by_experiment_id("Search_space", self.experiment_id)["Search_space_size"]
if numb_of_measured_configurations >= search_space_size:
break
self.is_finish()
if previous_decision != self.decision:
msg = f"{self.__class__.__name__} Stop Condition decision: " \
f"{ 'stop' if self.decision else 'continue'} running Experiment."
self.logger.info(msg)
previous_decision = self.decision
self.update_expression(self.stop_condition_type, self.decision)
def stop_experiment_due_to_failed_sc_creation(self):
"""
This function sends stop_experiment message to main node. It could be triggered only if
Stop Condition initialization fails.
"""
with pika.BlockingConnection(
pika.ConnectionParameters(host=self.event_host,
port=self.event_port)) as connection:
with connection.channel() as channel:
channel.basic_publish(exchange='',
routing_key='stop_experiment_queue',
body="Stop condition is not able to initialize.")
class EventServiceConnection(threading.Thread):
"""
This class is responsible for listening `stop_brise_components` queue
for shutting down Stop Condition (in case of BRISE Experiment termination).
"""
def __init__(self, stop_condition: StopCondition):
"""
The function for initializing consumer thread
:param stop_condition: an instance of Stop Condition object
"""
super(EventServiceConnection, self).__init__()
self.stop_condition: StopCondition = stop_condition
self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=self.stop_condition.event_host,
port=self.stop_condition.event_port))
self.consume_channel = self.connection.channel()
self.termination_result = self.consume_channel.queue_declare(queue='', exclusive=True)
self.termination_queue_name = self.termination_result.method.queue
self.consume_channel.queue_bind(exchange='brise_termination_sender', queue=self.termination_queue_name)
self._is_interrupted = False
self.consume_channel.basic_consume(queue=self.termination_queue_name, auto_ack=True,
on_message_callback=self.stop_condition.stop_threads)
def stop(self):
"""
The function for thread stop
"""
self._is_interrupted = True
def run(self):
"""
Point of entry to tasks results consumers functionality,
listening of queue with task result
"""
try:
while not self._is_interrupted:
self.consume_channel.connection.process_data_events(time_limit=1) # 1 second
finally:
if self.connection.is_open:
self.connection.close()
|
[
"threading.Thread",
"pika.ConnectionParameters",
"json.dumps",
"time.sleep",
"datetime.timedelta",
"os.getenv",
"logging.getLogger"
] |
[((329, 366), 'os.getenv', 'os.getenv', (['"""BRISE_EVENT_SERVICE_HOST"""'], {}), "('BRISE_EVENT_SERVICE_HOST')\n", (338, 366), False, 'import os\n'), ((393, 435), 'os.getenv', 'os.getenv', (['"""BRISE_EVENT_SERVICE_AMQP_PORT"""'], {}), "('BRISE_EVENT_SERVICE_AMQP_PORT')\n", (402, 435), False, 'import os\n'), ((931, 983), 'logging.getLogger', 'logging.getLogger', (["stop_condition_parameters['Name']"], {}), "(stop_condition_parameters['Name'])\n", (948, 983), False, 'import logging\n'), ((1652, 1706), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.self_evaluation', 'args': '()'}), '(target=self.self_evaluation, args=())\n', (1668, 1706), False, 'import threading\n'), ((3161, 3188), 'json.dumps', 'json.dumps', (['dictionary_dump'], {}), '(dictionary_dump)\n', (3171, 3188), False, 'import json\n'), ((468, 500), 'os.getenv', 'os.getenv', (['"""BRISE_DATABASE_HOST"""'], {}), "('BRISE_DATABASE_HOST')\n", (477, 500), False, 'import os\n'), ((534, 566), 'os.getenv', 'os.getenv', (['"""BRISE_DATABASE_PORT"""'], {}), "('BRISE_DATABASE_PORT')\n", (543, 566), False, 'import os\n'), ((600, 632), 'os.getenv', 'os.getenv', (['"""BRISE_DATABASE_NAME"""'], {}), "('BRISE_DATABASE_NAME')\n", (609, 632), False, 'import os\n'), ((666, 698), 'os.getenv', 'os.getenv', (['"""BRISE_DATABASE_USER"""'], {}), "('BRISE_DATABASE_USER')\n", (675, 698), False, 'import os\n'), ((732, 764), 'os.getenv', 'os.getenv', (['"""BRISE_DATABASE_PASS"""'], {}), "('BRISE_DATABASE_PASS')\n", (741, 764), False, 'import os\n'), ((4277, 4304), 'time.sleep', 'time.sleep', (['listen_interval'], {}), '(listen_interval)\n', (4287, 4304), False, 'import time\n'), ((6886, 6990), 'pika.ConnectionParameters', 'pika.ConnectionParameters', ([], {'host': 'self.stop_condition.event_host', 'port': 'self.stop_condition.event_port'}), '(host=self.stop_condition.event_host, port=self.\n stop_condition.event_port)\n', (6911, 6990), False, 'import pika\n'), ((1019, 1237), 'datetime.timedelta', 'datetime.timedelta', ([], {}), "(**{experiment_description['StopConditionTriggerLogic'][\n 'InspectionParameters']['TimeUnit']: experiment_description[\n 'StopConditionTriggerLogic']['InspectionParameters']['RepetitionPeriod']})\n", (1037, 1237), False, 'import datetime\n'), ((3243, 3312), 'pika.ConnectionParameters', 'pika.ConnectionParameters', ([], {'host': 'self.event_host', 'port': 'self.event_port'}), '(host=self.event_host, port=self.event_port)\n', (3268, 3312), False, 'import pika\n'), ((5907, 5976), 'pika.ConnectionParameters', 'pika.ConnectionParameters', ([], {'host': 'self.event_host', 'port': 'self.event_port'}), '(host=self.event_host, port=self.event_port)\n', (5932, 5976), False, 'import pika\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.