code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
from __future__ import unicode_literals
from django.core import mail
from django.core.management import call_command
from django.test import SimpleTestCase
class SendTestEmailManagementCommand(SimpleTestCase):
"""
Test the sending of a test email using the `sendtestemail` command.
"""
def test_send_test_email(self):
"""
The mail is sent with the correct subject and recipient.
"""
recipient = "<EMAIL>"
call_command("sendtestemail", recipient)
self.assertEqual(len(mail.outbox), 1)
mail_message = mail.outbox[0]
self.assertEqual(mail_message.subject[0:15], 'Test email from')
self.assertEqual(mail_message.recipients(), [recipient])
def test_send_test_email_with_multiple_addresses(self):
"""
The mail may be sent with multiple recipients.
"""
recipients = ["<EMAIL>", "<EMAIL>"]
call_command("sendtestemail", recipients[0], recipients[1])
self.assertEqual(len(mail.outbox), 1)
mail_message = mail.outbox[0]
self.assertEqual(mail_message.subject[0:15], 'Test email from')
self.assertEqual(mail_message.recipients(), recipients)
|
[
"django.core.management.call_command"
] |
[((465, 505), 'django.core.management.call_command', 'call_command', (['"""sendtestemail"""', 'recipient'], {}), "('sendtestemail', recipient)\n", (477, 505), False, 'from django.core.management import call_command\n'), ((919, 978), 'django.core.management.call_command', 'call_command', (['"""sendtestemail"""', 'recipients[0]', 'recipients[1]'], {}), "('sendtestemail', recipients[0], recipients[1])\n", (931, 978), False, 'from django.core.management import call_command\n')]
|
#!/usr/bin/env python3
# encoding: utf-8
import pprint
from enum import Enum
from engine.datastore.models.paper_structure import PaperStructure
from engine.datastore.models.text import Text
from engine.preprocessing.text_processor import TextProcessor
from engine.utils.objects.word_hist import WordHist
class Section(PaperStructure):
def __init__(self, data):
self.heading_raw = data.get('heading_raw')
self.heading_proceed = data.get('heading_proceed') if 'heading_proceed' in data else \
TextProcessor.proceed_string(data.get('heading_raw'))
self.section_type = SectionType[data.get('section_type')]
self.imrad_types = [IMRaDType[imrad_type] for imrad_type in data.get('imrad_types')] if 'imrad_types' in data else []
self.text = [Text(text) for text in data.get('text')] if 'text' in data else []
self.subsections = [Section(subsection) for subsection in data.get('subsections')] if 'subsections' in data else []
self.word_hist = WordHist(data.get('word_hist')) if "word_hist" in data else WordHist()
def __str__(self):
pp = pprint.PrettyPrinter(indent=4)
return pp.pformat(self.to_dict())
def to_dict(self):
data = {'section_type': self.section_type.name, 'heading_raw': self.heading_raw, 'heading_proceed': self.heading_proceed,
'text': [], 'subsections': [], 'imrad_types': [], 'word_hist': self.word_hist}
for text in self.text:
data['text'].append(text.to_dict())
for subsection in self.subsections:
data['subsections'].append(subsection.to_dict())
for imrad_type in self.imrad_types:
data['imrad_types'].append(imrad_type.name)
return data
def get_combined_word_hist(self):
if not self.word_hist:
for word in self.heading_proceed.split():
self.word_hist[word] = self.word_hist[word] + 1 if word in self.word_hist else 1
for text in self.text:
for word in text.text_proceed.split():
self.word_hist[word] = self.word_hist[word] + 1 if word in self.word_hist else 1
ret = WordHist(self.word_hist.copy())
for subsection in self.subsections:
ret.append(subsection.get_combined_word_hist())
return ret
def add_text_object(self, text_type, text_raw):
if len(self.subsections):
self.subsections[-1].add_text_object(text_type, text_raw)
else:
self.text.append(Text({"text_type": text_type.name, "text_raw": text_raw}))
def add_subsection(self, section_type, heading):
self.subsections.append(Section({'section_type': section_type.name, 'heading_raw': heading}))
def add_to_imrad(self, imrad_type):
if not any(imrad_type is x for x in self.imrad_types) and \
(not (self.heading_raw.isspace() or self.heading_raw is '')):
self.imrad_types.append(imrad_type)
for subsection in self.subsections:
subsection.add_to_imrad(imrad_type)
def title_exist(self):
return bool(self.heading_proceed)
def text_exist(self):
return any([text for text in self.text if text.text_proceed])
class SectionType(Enum):
ABSTRACT = 1
SECTION = 2
SUBSECTION = 3
SUBSUBSECTION = 4
class IMRaDType(Enum):
ABSTRACT = 0
INTRODUCTION = 1
BACKGROUND = 2
METHODS = 3
RESULTS = 4
DISCUSSION = 5
ACKNOWLEDGE = 6
|
[
"engine.utils.objects.word_hist.WordHist",
"pprint.PrettyPrinter",
"engine.datastore.models.text.Text"
] |
[((1121, 1151), 'pprint.PrettyPrinter', 'pprint.PrettyPrinter', ([], {'indent': '(4)'}), '(indent=4)\n', (1141, 1151), False, 'import pprint\n'), ((1072, 1082), 'engine.utils.objects.word_hist.WordHist', 'WordHist', ([], {}), '()\n', (1080, 1082), False, 'from engine.utils.objects.word_hist import WordHist\n'), ((795, 805), 'engine.datastore.models.text.Text', 'Text', (['text'], {}), '(text)\n', (799, 805), False, 'from engine.datastore.models.text import Text\n'), ((2537, 2594), 'engine.datastore.models.text.Text', 'Text', (["{'text_type': text_type.name, 'text_raw': text_raw}"], {}), "({'text_type': text_type.name, 'text_raw': text_raw})\n", (2541, 2594), False, 'from engine.datastore.models.text import Text\n')]
|
from django.db import models
# Create your models here.
class Contact(models.Model):
street_address = models.CharField(
max_length=100,
null=True,
blank=True
)
city = models.CharField(
max_length=30,
null=True,
blank=True
)
state = models.CharField(
max_length=30,
null=True,
blank=True
)
postal_code = models.CharField(
max_length=20,
null=True,
blank=True
)
country = models.CharField(
max_length=30,
null=True,
blank=True
)
website = models.URLField(null=True, blank=True)
class Meta():
abstract = True
class Person(Contact):
first_name = models.CharField(max_length=50, null=True, blank=True)
last_name = models.CharField(max_length=50, null=True, blank=True)
organization = models.ForeignKey(
'envcontacts.Organization',
on_delete=models.SET_NULL,
null=True,
blank=True
)
MOBILE = 'M'
HOME = 'H'
WORK = 'W'
OTHER = 'O'
PHONE_TYPE_CHOICES = {
(MOBILE, 'Mobile'),
(HOME, 'Home'),
(WORK, 'Work'),
(OTHER, 'Other'),
}
phone1 = models.CharField(max_length=15, null=True, blank=True)
phone1_type = models.CharField(
max_length=1,
choices=PHONE_TYPE_CHOICES,
default=MOBILE,
)
phone2 = models.CharField(max_length=15, null=True, blank=True)
phone2_type = models.CharField(
max_length=1,
choices=PHONE_TYPE_CHOICES,
default=MOBILE,
)
EMAIL_TYPE_CHOICES = {
(HOME, 'Home'),
(WORK, 'Work'),
(OTHER, 'Other'),
}
email1 = models.EmailField(null=True, blank=True)
email1_type = models.CharField(
max_length=1,
choices=EMAIL_TYPE_CHOICES,
default=WORK,
)
email2 = models.EmailField(null=True, blank=True)
email2_type = models.CharField(
max_length=1,
choices=EMAIL_TYPE_CHOICES,
default=WORK,
)
class Meta():
verbose_name_plural='People'
def __str__(self):
name = ''
if self.last_name is not None:
name = self.last_name
if self.first_name is not None:
name += ', ' + self.first_name
elif self.first_name is not None:
name = self.first_name
return name
class Organization(Contact):
name = models.CharField(
max_length=50,
null=True,
blank=True,
help_text='Enter short name for labels and ID',
)
long_name = models.CharField(
max_length=100,
null=True,
blank=True,
help_text='Enter full name of organization',
)
parent_org = models.ForeignKey(
'self',
on_delete=models.SET_NULL,
null=True,
blank=True
)
MOBILE = 'M'
HOME = 'H'
WORK = 'W'
OTHER = 'O'
PHONE_TYPE_CHOICES = {
(MOBILE, 'Mobile'),
(HOME, 'Home'),
(WORK, 'Work'),
(OTHER, 'Other'),
}
phone = models.CharField(max_length=15, null=True, blank=True)
email = models.EmailField(null=True, blank=True)
def __str__(self):
if self.name is not None:
return self.name
elif self.long_name is not None:
return self.long_name
return 'empty'
# class Organization(models.Model):
#
# name = models.CharField(
# max_length=50,
# )
#
# address = models.CharField(
# max_length=100,
# null=True,
# blank=True,
# )
#
# website = models.URLField(null=True, blank=True)
#
# phone = models.CharField(max_length=20, null=True, blank=True)
#
# def __str__(self):
# '''String representation of Organization object. '''
# return self.name
#
#
# # class Manufacturer(Organization):
# # pass
# # # contacts from Person
# #
# # # def __str__(self):
# # # '''String representation of Manufacturer object. '''
# # # return self.name
#
#
# # can we attach this to User?
# class Person(models.Model):
#
# first_name = models.CharField(max_length=20)
# last_name = models.CharField(max_length=20)
#
# email = models.EmailField(null=True, blank=True)
# phone = models.CharField(max_length=20)
#
# class Meta:
# verbose_name_plural = "People"
#
# affiliation = models.ForeignKey(
# 'Organization',
# on_delete=models.SET_NULL,
# null=True,
# blank=True
# )
#
# def __str__(self):
# '''String representation of Person object. '''
# return f'{self.last_name},{self.first_name}'
|
[
"django.db.models.CharField",
"django.db.models.URLField",
"django.db.models.ForeignKey",
"django.db.models.EmailField"
] |
[((109, 164), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'null': '(True)', 'blank': '(True)'}), '(max_length=100, null=True, blank=True)\n', (125, 164), False, 'from django.db import models\n'), ((206, 260), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'null': '(True)', 'blank': '(True)'}), '(max_length=30, null=True, blank=True)\n', (222, 260), False, 'from django.db import models\n'), ((303, 357), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'null': '(True)', 'blank': '(True)'}), '(max_length=30, null=True, blank=True)\n', (319, 357), False, 'from django.db import models\n'), ((406, 460), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'null': '(True)', 'blank': '(True)'}), '(max_length=20, null=True, blank=True)\n', (422, 460), False, 'from django.db import models\n'), ((505, 559), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'null': '(True)', 'blank': '(True)'}), '(max_length=30, null=True, blank=True)\n', (521, 559), False, 'from django.db import models\n'), ((605, 643), 'django.db.models.URLField', 'models.URLField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (620, 643), False, 'from django.db import models\n'), ((730, 784), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'null': '(True)', 'blank': '(True)'}), '(max_length=50, null=True, blank=True)\n', (746, 784), False, 'from django.db import models\n'), ((801, 855), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'null': '(True)', 'blank': '(True)'}), '(max_length=50, null=True, blank=True)\n', (817, 855), False, 'from django.db import models\n'), ((876, 975), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""envcontacts.Organization"""'], {'on_delete': 'models.SET_NULL', 'null': '(True)', 'blank': '(True)'}), "('envcontacts.Organization', on_delete=models.SET_NULL,\n null=True, blank=True)\n", (893, 975), False, 'from django.db import models\n'), ((1222, 1276), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(15)', 'null': '(True)', 'blank': '(True)'}), '(max_length=15, null=True, blank=True)\n', (1238, 1276), False, 'from django.db import models\n'), ((1295, 1369), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1)', 'choices': 'PHONE_TYPE_CHOICES', 'default': 'MOBILE'}), '(max_length=1, choices=PHONE_TYPE_CHOICES, default=MOBILE)\n', (1311, 1369), False, 'from django.db import models\n'), ((1414, 1468), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(15)', 'null': '(True)', 'blank': '(True)'}), '(max_length=15, null=True, blank=True)\n', (1430, 1468), False, 'from django.db import models\n'), ((1487, 1561), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1)', 'choices': 'PHONE_TYPE_CHOICES', 'default': 'MOBILE'}), '(max_length=1, choices=PHONE_TYPE_CHOICES, default=MOBILE)\n', (1503, 1561), False, 'from django.db import models\n'), ((1714, 1754), 'django.db.models.EmailField', 'models.EmailField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1731, 1754), False, 'from django.db import models\n'), ((1773, 1845), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1)', 'choices': 'EMAIL_TYPE_CHOICES', 'default': 'WORK'}), '(max_length=1, choices=EMAIL_TYPE_CHOICES, default=WORK)\n', (1789, 1845), False, 'from django.db import models\n'), ((1890, 1930), 'django.db.models.EmailField', 'models.EmailField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1907, 1930), False, 'from django.db import models\n'), ((1949, 2021), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1)', 'choices': 'EMAIL_TYPE_CHOICES', 'default': 'WORK'}), '(max_length=1, choices=EMAIL_TYPE_CHOICES, default=WORK)\n', (1965, 2021), False, 'from django.db import models\n'), ((2455, 2562), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'null': '(True)', 'blank': '(True)', 'help_text': '"""Enter short name for labels and ID"""'}), "(max_length=50, null=True, blank=True, help_text=\n 'Enter short name for labels and ID')\n", (2471, 2562), False, 'from django.db import models\n'), ((2617, 2722), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'null': '(True)', 'blank': '(True)', 'help_text': '"""Enter full name of organization"""'}), "(max_length=100, null=True, blank=True, help_text=\n 'Enter full name of organization')\n", (2633, 2722), False, 'from django.db import models\n'), ((2775, 2850), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""self"""'], {'on_delete': 'models.SET_NULL', 'null': '(True)', 'blank': '(True)'}), "('self', on_delete=models.SET_NULL, null=True, blank=True)\n", (2792, 2850), False, 'from django.db import models\n'), ((3100, 3154), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(15)', 'null': '(True)', 'blank': '(True)'}), '(max_length=15, null=True, blank=True)\n', (3116, 3154), False, 'from django.db import models\n'), ((3168, 3208), 'django.db.models.EmailField', 'models.EmailField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (3185, 3208), False, 'from django.db import models\n')]
|
import os
import pandas as pd
import sys
sys.path.insert(0, '../')
from LGAIresult import LGAIresult
from utils.common_utils import save_split_txt, load_split_txt, write_excel
def out_excel(ann_list, time_list, pid_list, save_dir):
time_dict = get_dict(time_list)
pid_dict = get_dict(pid_list, '\t')
cols = ['patient_id', 'create_time', '气胸', '气胸位置', '胸腔积液', '积液位置','肋骨骨折', '骨折位置', '结节',\
'条索影', '网格影', '实变影', '磨玻璃密度影', '肺大疱', '肺气肿', '胸膜增厚']
excel = []
for ann_dict in ann_list:
cols_dict = get_excel_info_for_bounds(ann_dict['bounds_info'], cols)
cols_dict['patient_id'] = pid_dict[ann_dict['sub_dir'].split('/')[0]]
cols_dict['create_time'] = time_dict[ann_dict['sub_dir']]
current = []
for key, value in cols_dict.items():
current.append(value)
excel.append(current)
out_df = pd.DataFrame(excel, columns=cols)
write_excel(out_df, save_dir, file_name='njjz.xlsx')
def get_dict(list_path, parse_str=' '):
with open(list_path) as f:
lines = f.readlines()
time_dict = {}
for line in lines:
time_dict[line.split(parse_str)[0]] = line.strip().split(parse_str)[1]
return time_dict
def get_excel_info_for_bounds(bounds_info, cols):
cols_dict = {item: 0 for item in cols}
mapping = {"right": "R", "left": "L"}
location = {'胸腔积液': [], '气胸': [], '肋骨骨折': []}
for bound in bounds_info:
category = bound[0]['category']
cols_dict[category] = 1
if category in ['胸腔积液', '气胸', '肋骨骨折']:
location[category].append(bound[0]['location'])
cols_dict['积液位置'] = ''.join(set(location['胸腔积液']))
cols_dict['气胸位置'] = ''.join(set(location['气胸']))
cols_dict['骨折位置'] = ', '.join(set(location['肋骨骨折']))
return cols_dict
if __name__ == "__main__":
root_dir = '/data/shuzhang/tmp_data/njjz_nm/'
sub_dir_list = '/data/shuzhang/tmp_data/njjz_sub_dirs.txt'
time_list = '/data/shuzhang/tmp_data/njjz_sub_dirs_w_dates.txt'
pid_list = '/data/shuzhang/tmp_data/pid.txt'
save_dir = '/data/shuzhang/tmp_data/'
lg_ai = LGAIresult()
lg_ai.init(root_dir, sub_dir_list)
lg_ai.get_ann_list()
#print(lg_ai.ann_list[0]['bounds_info'][0])
out_excel(lg_ai.ann_list, time_list, pid_list, save_dir)
|
[
"pandas.DataFrame",
"utils.common_utils.write_excel",
"LGAIresult.LGAIresult",
"sys.path.insert"
] |
[((41, 66), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""../"""'], {}), "(0, '../')\n", (56, 66), False, 'import sys\n'), ((882, 915), 'pandas.DataFrame', 'pd.DataFrame', (['excel'], {'columns': 'cols'}), '(excel, columns=cols)\n', (894, 915), True, 'import pandas as pd\n'), ((920, 972), 'utils.common_utils.write_excel', 'write_excel', (['out_df', 'save_dir'], {'file_name': '"""njjz.xlsx"""'}), "(out_df, save_dir, file_name='njjz.xlsx')\n", (931, 972), False, 'from utils.common_utils import save_split_txt, load_split_txt, write_excel\n'), ((2111, 2123), 'LGAIresult.LGAIresult', 'LGAIresult', ([], {}), '()\n', (2121, 2123), False, 'from LGAIresult import LGAIresult\n')]
|
from flask import Flask, render_template,request,session,redirect,url_for,flash
from flask_wtf import FlaskForm
from wtforms import (StringField,SubmitField,BooleanField,DateTimeField,
RadioField,SelectField,TextField,TextAreaField)
from wtforms.validators import DataRequired
app = Flask(__name__)
# TODO improve secret_key
app.config['SECRET_KEY'] = 'MY_SECRET_KEY'
# TODO move to separate file
class InfoForm(FlaskForm):
breed = StringField('What breed are you?', validators=[DataRequired()])
neutered = BooleanField('Have you been neutered?')
mood = RadioField('Choose a mood:', choices=[('mood_one', 'Happy'), ('mood_two', 'Excited')])
food_choice = SelectField(u'Favorite food:', choices=[('chi', 'Chicken'), ('bf', 'Beef'), ('fish', 'Fish')])
feedback = TextAreaField('Feedback')
submit = SubmitField('Submit')
@app.route('/', methods=['GET', 'POST'])
def index():
form = InfoForm()
if form.validate_on_submit():
# using session only because we still havent learned database
session['breed'] = form.breed.data
session['neutered'] = form.neutered.data
session['mood'] = form.mood.data
session['food'] = form.food_choice.data
session['feedback'] = form.feedback.data
flash('Thanks for answering the form.')
return redirect(url_for('thank_you'))
return render_template('home.html', form = form)
@app.route('/thank_you')
def thank_you():
return render_template('thankyou.html')
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
if __name__ == '__main__':
app.run(debug=True)
|
[
"flask.flash",
"wtforms.SelectField",
"wtforms.BooleanField",
"wtforms.RadioField",
"flask.Flask",
"wtforms.TextAreaField",
"wtforms.SubmitField",
"flask.url_for",
"flask.render_template",
"wtforms.validators.DataRequired"
] |
[((304, 319), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (309, 319), False, 'from flask import Flask, render_template, request, session, redirect, url_for, flash\n'), ((538, 577), 'wtforms.BooleanField', 'BooleanField', (['"""Have you been neutered?"""'], {}), "('Have you been neutered?')\n", (550, 577), False, 'from wtforms import StringField, SubmitField, BooleanField, DateTimeField, RadioField, SelectField, TextField, TextAreaField\n'), ((589, 679), 'wtforms.RadioField', 'RadioField', (['"""Choose a mood:"""'], {'choices': "[('mood_one', 'Happy'), ('mood_two', 'Excited')]"}), "('Choose a mood:', choices=[('mood_one', 'Happy'), ('mood_two',\n 'Excited')])\n", (599, 679), False, 'from wtforms import StringField, SubmitField, BooleanField, DateTimeField, RadioField, SelectField, TextField, TextAreaField\n'), ((694, 792), 'wtforms.SelectField', 'SelectField', (['u"""Favorite food:"""'], {'choices': "[('chi', 'Chicken'), ('bf', 'Beef'), ('fish', 'Fish')]"}), "(u'Favorite food:', choices=[('chi', 'Chicken'), ('bf', 'Beef'),\n ('fish', 'Fish')])\n", (705, 792), False, 'from wtforms import StringField, SubmitField, BooleanField, DateTimeField, RadioField, SelectField, TextField, TextAreaField\n'), ((804, 829), 'wtforms.TextAreaField', 'TextAreaField', (['"""Feedback"""'], {}), "('Feedback')\n", (817, 829), False, 'from wtforms import StringField, SubmitField, BooleanField, DateTimeField, RadioField, SelectField, TextField, TextAreaField\n'), ((843, 864), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (854, 864), False, 'from wtforms import StringField, SubmitField, BooleanField, DateTimeField, RadioField, SelectField, TextField, TextAreaField\n'), ((1382, 1421), 'flask.render_template', 'render_template', (['"""home.html"""'], {'form': 'form'}), "('home.html', form=form)\n", (1397, 1421), False, 'from flask import Flask, render_template, request, session, redirect, url_for, flash\n'), ((1478, 1510), 'flask.render_template', 'render_template', (['"""thankyou.html"""'], {}), "('thankyou.html')\n", (1493, 1510), False, 'from flask import Flask, render_template, request, session, redirect, url_for, flash\n'), ((1284, 1323), 'flask.flash', 'flash', (['"""Thanks for answering the form."""'], {}), "('Thanks for answering the form.')\n", (1289, 1323), False, 'from flask import Flask, render_template, request, session, redirect, url_for, flash\n'), ((1569, 1596), 'flask.render_template', 'render_template', (['"""404.html"""'], {}), "('404.html')\n", (1584, 1596), False, 'from flask import Flask, render_template, request, session, redirect, url_for, flash\n'), ((1348, 1368), 'flask.url_for', 'url_for', (['"""thank_you"""'], {}), "('thank_you')\n", (1355, 1368), False, 'from flask import Flask, render_template, request, session, redirect, url_for, flash\n'), ((506, 520), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (518, 520), False, 'from wtforms.validators import DataRequired\n')]
|
import cv2
import glob
import os
# Fill in the output with upscaled frames.
# Make sure to match scale and interpolation mode.
SCALE = 2
if __name__ == "__main__":
frames = sorted(glob.glob("frames/*.jpg"))
for frame_index, frame in enumerate(frames):
output_frame = "output/{:05d}.png".format(frame_index)
if os.path.exists(output_frame):
continue
img = cv2.imread(frame, cv2.IMREAD_COLOR)
simg = cv2.resize(img, (img.shape[1] * SCALE, img.shape[0] * SCALE), interpolation=cv2.INTER_LANCZOS4)
cv2.imwrite(output_frame, simg)
|
[
"cv2.imwrite",
"os.path.exists",
"cv2.imread",
"glob.glob",
"cv2.resize"
] |
[((185, 210), 'glob.glob', 'glob.glob', (['"""frames/*.jpg"""'], {}), "('frames/*.jpg')\n", (194, 210), False, 'import glob\n'), ((326, 354), 'os.path.exists', 'os.path.exists', (['output_frame'], {}), '(output_frame)\n', (340, 354), False, 'import os\n'), ((382, 417), 'cv2.imread', 'cv2.imread', (['frame', 'cv2.IMREAD_COLOR'], {}), '(frame, cv2.IMREAD_COLOR)\n', (392, 417), False, 'import cv2\n'), ((429, 529), 'cv2.resize', 'cv2.resize', (['img', '(img.shape[1] * SCALE, img.shape[0] * SCALE)'], {'interpolation': 'cv2.INTER_LANCZOS4'}), '(img, (img.shape[1] * SCALE, img.shape[0] * SCALE), interpolation\n =cv2.INTER_LANCZOS4)\n', (439, 529), False, 'import cv2\n'), ((530, 561), 'cv2.imwrite', 'cv2.imwrite', (['output_frame', 'simg'], {}), '(output_frame, simg)\n', (541, 561), False, 'import cv2\n')]
|
import flask
from flask import Flask, session, render_template,redirect, url_for
from run import server
@server.route('/')
def index():
return render_template("tbases/t_index.html", startpage=True)
#@server.route('/dashboard/')
#def dashboard():
# return render_template("tbases/t_index.html", startpage=True)
|
[
"run.server.route",
"flask.render_template"
] |
[((110, 127), 'run.server.route', 'server.route', (['"""/"""'], {}), "('/')\n", (122, 127), False, 'from run import server\n'), ((154, 208), 'flask.render_template', 'render_template', (['"""tbases/t_index.html"""'], {'startpage': '(True)'}), "('tbases/t_index.html', startpage=True)\n", (169, 208), False, 'from flask import Flask, session, render_template, redirect, url_for\n')]
|
# Copyright 2020 University of Illinois Board of Trustees. All Rights Reserved.
# Author: <NAME>, DPRG (https://dprg.cs.uiuc.edu)
# This file is part of Baechi, which is released under specific terms. See file License.txt file for full license details.
# ==============================================================================
import argparse
import json
import time
import numpy as np
import tensorflow as tf
from sklearn.linear_model import LinearRegression
from tensorflow.python.client import timeline
from utils import logger
_LOGGER = logger.get_logger(__file__)
def run_benchmark(tensor_size, from_gpu_id, to_gpu_id,
warmup_count=5, num_measurements=20):
with tf.Graph().as_default():
with tf.device('/device:GPU:%d' % from_gpu_id):
x1 = tf.get_variable("var1a", [tensor_size, 1])
x2 = tf.get_variable("var1b", [1, 1])
from_op = tf.matmul(x1, x2, name='from_op')
with tf.device('/device:GPU:%d' % to_gpu_id):
y = tf.get_variable("var2", [1, 1])
out = tf.matmul(from_op, y, name='to_op')
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
# warm up
for _ in range(warmup_count):
sess.run(out)
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata_list = []
for _ in range(num_measurements):
run_metadata = tf.RunMetadata()
sess.run(out, options=run_options, run_metadata=run_metadata)
run_metadata_list.append(run_metadata)
return run_metadata_list
def get_transfer_time(timeline_json, from_op_name='from_op',
to_op_name='to_op'):
data = timeline_json['traceEvents']
end_ts = start_ts = None
for dic in data:
for key, value in dic.items():
if key == 'cat' and value == 'Op':
for key, value in dic.items():
if key == "args" and value['name'] == from_op_name:
new_end_ts = dic['ts'] + dic['dur']
end_ts = max(end_ts or new_end_ts, new_end_ts)
if key == "args" and value['name'] == to_op_name:
new_start_ts = dic['ts']
start_ts = min(start_ts or new_start_ts, new_start_ts)
transfer_time = start_ts - end_ts
assert transfer_time > 0
return transfer_time
def generate_dataset(results):
transfer_times_by_size = []
for tensor_size, run_metadata_list in results:
transfer_times = []
for run_metadata in run_metadata_list:
chrome_trace_str = timeline.Timeline(
run_metadata.step_stats).generate_chrome_trace_format()
timeline_json = json.loads(chrome_trace_str)
transfer_times.append(get_transfer_time(timeline_json))
transfer_times_by_size.append((tensor_size, transfer_times))
X = []
Y = []
for x, ys in transfer_times_by_size:
for y in ys:
X.append(x * 4)
Y.append(y)
return X, Y
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--from_gpu_id', type=int, default=0,
help='From GPU ID')
parser.add_argument('--to_gpu_id', type=int, default=1,
help='To GPU ID')
parser.add_argument('--exponent', type=int, default=30,
help='Max tensor size. 2^(exponent).')
args = parser.parse_args()
tensor_sizes = [2 ** i for i in range(0, args.exponent)]
_LOGGER.info('Running benchmark to measure communication costs')
results = []
for tensor_size in tensor_sizes:
run_metadata_list = run_benchmark(
tensor_size, args.from_gpu_id, args.to_gpu_id)
results.append((tensor_size, run_metadata_list))
X, Y = generate_dataset(results)
reg = LinearRegression().fit([[x] for x in X], [[y] for y in Y])
print('Communication cost function: {} x + {}'.format(
reg.coef_[0][0], reg.intercept_[0]))
if __name__ == "__main__":
main()
|
[
"argparse.ArgumentParser",
"json.loads",
"utils.logger.get_logger",
"tensorflow.global_variables_initializer",
"tensorflow.device",
"tensorflow.Session",
"tensorflow.python.client.timeline.Timeline",
"sklearn.linear_model.LinearRegression",
"tensorflow.matmul",
"tensorflow.RunMetadata",
"tensorflow.Graph",
"tensorflow.RunOptions",
"tensorflow.get_variable"
] |
[((552, 579), 'utils.logger.get_logger', 'logger.get_logger', (['__file__'], {}), '(__file__)\n', (569, 579), False, 'from utils import logger\n'), ((3181, 3206), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (3204, 3206), False, 'import argparse\n'), ((740, 781), 'tensorflow.device', 'tf.device', (["('/device:GPU:%d' % from_gpu_id)"], {}), "('/device:GPU:%d' % from_gpu_id)\n", (749, 781), True, 'import tensorflow as tf\n'), ((800, 842), 'tensorflow.get_variable', 'tf.get_variable', (['"""var1a"""', '[tensor_size, 1]'], {}), "('var1a', [tensor_size, 1])\n", (815, 842), True, 'import tensorflow as tf\n'), ((860, 892), 'tensorflow.get_variable', 'tf.get_variable', (['"""var1b"""', '[1, 1]'], {}), "('var1b', [1, 1])\n", (875, 892), True, 'import tensorflow as tf\n'), ((915, 948), 'tensorflow.matmul', 'tf.matmul', (['x1', 'x2'], {'name': '"""from_op"""'}), "(x1, x2, name='from_op')\n", (924, 948), True, 'import tensorflow as tf\n'), ((962, 1001), 'tensorflow.device', 'tf.device', (["('/device:GPU:%d' % to_gpu_id)"], {}), "('/device:GPU:%d' % to_gpu_id)\n", (971, 1001), True, 'import tensorflow as tf\n'), ((1019, 1050), 'tensorflow.get_variable', 'tf.get_variable', (['"""var2"""', '[1, 1]'], {}), "('var2', [1, 1])\n", (1034, 1050), True, 'import tensorflow as tf\n'), ((1069, 1104), 'tensorflow.matmul', 'tf.matmul', (['from_op', 'y'], {'name': '"""to_op"""'}), "(from_op, y, name='to_op')\n", (1078, 1104), True, 'import tensorflow as tf\n'), ((1119, 1131), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (1129, 1131), True, 'import tensorflow as tf\n'), ((1318, 1369), 'tensorflow.RunOptions', 'tf.RunOptions', ([], {'trace_level': 'tf.RunOptions.FULL_TRACE'}), '(trace_level=tf.RunOptions.FULL_TRACE)\n', (1331, 1369), True, 'import tensorflow as tf\n'), ((2834, 2862), 'json.loads', 'json.loads', (['chrome_trace_str'], {}), '(chrome_trace_str)\n', (2844, 2862), False, 'import json\n'), ((3965, 3983), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {}), '()\n', (3981, 3983), False, 'from sklearn.linear_model import LinearRegression\n'), ((702, 712), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (710, 712), True, 'import tensorflow as tf\n'), ((1162, 1195), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (1193, 1195), True, 'import tensorflow as tf\n'), ((1482, 1498), 'tensorflow.RunMetadata', 'tf.RunMetadata', ([], {}), '()\n', (1496, 1498), True, 'import tensorflow as tf\n'), ((2715, 2757), 'tensorflow.python.client.timeline.Timeline', 'timeline.Timeline', (['run_metadata.step_stats'], {}), '(run_metadata.step_stats)\n', (2732, 2757), False, 'from tensorflow.python.client import timeline\n')]
|
# coding:utf-8
import os
from pathlib import Path
from io import StringIO, BytesIO, IOBase
from typing import Union
FileTypes = (IOBase,)
FileType = Union[FileTypes]
DEFAULT_FILENAME_DATE_FMT = (
"%Y-%m-%d_%H:%M:%S"
) # Format for dates appended to files or dirs.
# This will lexsort in temporal order.
DEFAULT_FILENAME_N_DIGITS = 6
def ensure_dir(dir_):
try:
if not os.path.exists(dir_):
os.mkdir(dir_)
return dir_
except FileExistsError:
if os.path.isfile(dir_):
raise NotADirectoryError("{} exists but is not a directory".format(dir_))
return None
def get_file(file_or_path, mode=None, allow_dir=False):
"""if a file object is passed, return it unaltered, with a flag indicating that the file should not be closed
by the caller (the opener may have other uses for it). In this case, if mode is also passed, it is checked against
the existing file's mode and a ValueError is thrown if they disagree.
If a string is passed, it is treated as a path and a file at that location is opened and returned with a flag
indicating that the file should be closed by the caller.
"""
if not isinstance(file_or_path, FileTypes):
if isinstance(file_or_path, (str, Path)) and os.path.isdir(file_or_path):
if not allow_dir:
raise IsADirectoryError(
"allow_dir=False but {} is a directory".format(file_or_path)
)
else:
close = False
file = Path(file_or_path)
else:
close = True
if mode:
file = open(file_or_path, mode)
else:
file = open(file_or_path)
else:
close = False
file = file_or_path
if mode is not None:
if hasattr(file, "mode") and mode != file.mode:
raise ValueError(
"mode {} was requested, but the given file has mode {}".format(
mode, file.mode
)
)
elif isinstance(file, StringIO) and "b" in mode:
raise ValueError(
"mode {} was requested, but the given file is a {}, which supports only text IO".format(
mode, type(file)
)
)
elif isinstance(file, BytesIO) and "b" not in mode:
raise ValueError(
"mode {} was requested, but the given file is a {}, which supports only binary IO".format(
mode, type(file)
)
)
return file, close
def is_newer(file1, file2):
if file2 is None:
return True
mtime1 = os.stat(file1).st_mtime
if isinstance(file2, (str, bytes, Path)):
mtime2 = os.stat(file2).st_mtime
elif isinstance(file2, (float, int)):
mtime2 = file2
else:
raise TypeError(
"file2 must be str, pathlib.Path, None, float, or int; got {}".format(
type(file2)
)
)
return mtime1 > mtime2
def dir_prefix_and_ext(prefix, ext=None):
dir_ = os.path.dirname(prefix)
if ext is None:
prefix, ext = os.path.splitext(os.path.basename(prefix))
if prefix.startswith(".") and ext == "":
prefix, ext = ext, prefix
else:
prefix = os.path.basename(prefix)
return dir_, prefix, ext
def path_with_ext(file_path, ext=None, disambiguate=False):
file_path, ext_ = _path_with_ext(file_path, ext)
if not ext_ and not ext:
if disambiguate:
file_path, ext_ = _path_with_ext(disambiguate_path(file_path), ext)
else:
raise ValueError(
"no extension specified for file path {}; try passing one manually via the "
"`ext` arg or specify `disambiguate=True`".format(file_path)
)
else:
ext_ = ext or ext_
return file_path, ext_
def _path_with_ext(path, ext=None):
name, ext_ = os.path.splitext(path)
if ext_:
if ext is not None and ext_ != ext:
raise ValueError(
"ambiguous extension; config_file has extension {} while ext is {}".format(
ext_, ext
)
)
ext_ = ext or ext_
p, e = name + ext_, ext_
return p, e
def disambiguate_path(file_path):
"""Find the unique file with path `file_path`, excluding extensions. If there is no such file, raise
FileNotFoundError"""
dir_, name, ext = dir_prefix_and_ext(file_path)
dir_ = dir_ or None # don't allow empty string for dir
paths = [path for path in os.listdir(dir_) if os.path.splitext(path)[0] == name]
if len(paths) == 0:
raise FileNotFoundError(
"No file with any extension found at {}".format(file_path)
)
elif len(paths) != 1:
raise FileNotFoundError(
"Amiguous config path {}; multiple matches found: {}".format(
file_path, paths
)
)
p = os.path.join(dir_ or "", paths[0])
return p
|
[
"os.mkdir",
"os.stat",
"os.path.basename",
"os.path.isdir",
"os.path.dirname",
"os.path.exists",
"os.path.isfile",
"pathlib.Path",
"os.path.splitext",
"os.path.join",
"os.listdir"
] |
[((3191, 3214), 'os.path.dirname', 'os.path.dirname', (['prefix'], {}), '(prefix)\n', (3206, 3214), False, 'import os\n'), ((4066, 4088), 'os.path.splitext', 'os.path.splitext', (['path'], {}), '(path)\n', (4082, 4088), False, 'import os\n'), ((5099, 5133), 'os.path.join', 'os.path.join', (["(dir_ or '')", 'paths[0]'], {}), "(dir_ or '', paths[0])\n", (5111, 5133), False, 'import os\n'), ((2763, 2777), 'os.stat', 'os.stat', (['file1'], {}), '(file1)\n', (2770, 2777), False, 'import os\n'), ((3414, 3438), 'os.path.basename', 'os.path.basename', (['prefix'], {}), '(prefix)\n', (3430, 3438), False, 'import os\n'), ((388, 408), 'os.path.exists', 'os.path.exists', (['dir_'], {}), '(dir_)\n', (402, 408), False, 'import os\n'), ((422, 436), 'os.mkdir', 'os.mkdir', (['dir_'], {}), '(dir_)\n', (430, 436), False, 'import os\n'), ((496, 516), 'os.path.isfile', 'os.path.isfile', (['dir_'], {}), '(dir_)\n', (510, 516), False, 'import os\n'), ((1274, 1301), 'os.path.isdir', 'os.path.isdir', (['file_or_path'], {}), '(file_or_path)\n', (1287, 1301), False, 'import os\n'), ((2850, 2864), 'os.stat', 'os.stat', (['file2'], {}), '(file2)\n', (2857, 2864), False, 'import os\n'), ((3274, 3298), 'os.path.basename', 'os.path.basename', (['prefix'], {}), '(prefix)\n', (3290, 3298), False, 'import os\n'), ((4707, 4723), 'os.listdir', 'os.listdir', (['dir_'], {}), '(dir_)\n', (4717, 4723), False, 'import os\n'), ((1544, 1562), 'pathlib.Path', 'Path', (['file_or_path'], {}), '(file_or_path)\n', (1548, 1562), False, 'from pathlib import Path\n'), ((4727, 4749), 'os.path.splitext', 'os.path.splitext', (['path'], {}), '(path)\n', (4743, 4749), False, 'import os\n')]
|
import http
from socket import timeout
from goprocam import GoProCamera
from .conftest import GoProCameraTest
class WhichCamTest(GoProCameraTest):
def setUp(self):
super().setUp()
# disable this so we can test it separately
self.monkeypatch.setattr(GoProCamera.GoPro, '_prepare_gpcontrol',
lambda self: self)
def test_already_deetected(self):
assert self.goprocam.whichCam() == 'gpcontrol'
def test_detection_exceptions(self):
# this would, of course, be better as a parametrised test
for firmware_version in ['HX', 'FS', 'HD3.02', 'H18', 'HD3.22.01.50']:
self.goprocam._camera = ''
self.responses['/gp/gpControl']['info']['firmware_version'] = \
firmware_version
assert self.goprocam.whichCam() == 'gpcontrol'
def test_auth_detection(self):
self.goprocam._camera = ''
self.responses['/camera/cv'] = 'Hero3'
for firmware_version in ['HD2', '4', 'HD3.1']:
self.responses['/gp/gpControl']['info']['firmware_version'] = \
firmware_version
assert self.goprocam.whichCam() == 'auth'
def test_auth_detection_not_hero3(self):
self.goprocam._camera = ''
self.responses['/camera/cv'] = 'Hero2'
for firmware_version in ['HD2', '4', 'HD3.1']:
self.responses['/gp/gpControl']['info']['firmware_version'] = \
firmware_version
assert self.goprocam.whichCam() == ''
def test_auth_detection_without_gpcontrol(self):
self.goprocam._camera = ''
self.responses = {'/camera/cv': 'Hero3'}
assert self.goprocam.whichCam() == 'auth'
def test_gpcontrol_detection_without_gpcontrol_not_hero3(self):
self.goprocam._camera = ''
del(self.responses['/gp/gpControl'])
self.responses['/camera/cv'] = 'Hero2'
assert self.goprocam.whichCam() == ''
def test_gpcontrol_detection_without_gpcontrol(self):
# this will attempt to power on the camera - which we have intercepted
self.goprocam._camera = ''
self.responses = {}
assert self.goprocam.whichCam() == ''
def test_cv_timeout_while_detecting(self):
self.goprocam._camera = ''
self.responses = {'/camera/cv': timeout()}
assert self.goprocam.whichCam() == ''
def test_gpcontrol_timeout_while_detecting_hero3(self):
self.goprocam._camera = ''
self.responses['/gp/gpControl'] = timeout()
# this copes poorly with errors, so help it along
self.responses['/camera/cv'] = 'Hero3'
assert self.goprocam.whichCam() == 'auth'
def test_gpcontrol_timeout_while_detecting_hero2(self):
self.goprocam._camera = ''
self.responses['/gp/gpControl'] = timeout()
# this copes poorly with errors, so help it along
self.responses['/camera/cv'] = 'Hero2'
assert self.goprocam.whichCam() == ''
def test_gpcontrol_exception_while_detecting(self):
self.goprocam._camera = ''
self.responses['/gp/gpControl'] = http.client.HTTPException()
# this copes poorly with errors, so help it along
self.responses['/camera/cv'] = 'Hero3'
# different power-on!
with self.monkeypatch.context() as m:
def print_verify(args):
assert isinstance(args, http.client.HTTPException) or \
args == 'HERO3/3+'
m.setattr('builtins.print', print_verify)
m.setattr(GoProCamera.GoPro, 'power_on_auth', lambda self: self)
assert self.goprocam.whichCam() == 'auth'
def test_gpcontrol_exception_while_detecting_not_hero3(self):
self.goprocam._camera = ''
self.responses['/gp/gpControl'] = http.client.HTTPException()
# this copes poorly with errors, so help it along
self.responses['/camera/cv'] = 'Hero2'
# different power-on!
with self.monkeypatch.context() as m:
def print_verify(args):
assert isinstance(args, http.client.HTTPException)
m.setattr('builtins.print', print_verify)
m.setattr(GoProCamera.GoPro, 'power_on_auth', lambda self: self)
assert self.goprocam.whichCam() == 'auth'
|
[
"socket.timeout",
"http.client.HTTPException"
] |
[((2535, 2544), 'socket.timeout', 'timeout', ([], {}), '()\n', (2542, 2544), False, 'from socket import timeout\n'), ((2838, 2847), 'socket.timeout', 'timeout', ([], {}), '()\n', (2845, 2847), False, 'from socket import timeout\n'), ((3133, 3160), 'http.client.HTTPException', 'http.client.HTTPException', ([], {}), '()\n', (3158, 3160), False, 'import http\n'), ((3818, 3845), 'http.client.HTTPException', 'http.client.HTTPException', ([], {}), '()\n', (3843, 3845), False, 'import http\n'), ((2340, 2349), 'socket.timeout', 'timeout', ([], {}), '()\n', (2347, 2349), False, 'from socket import timeout\n')]
|
# -*- coding: utf-8 -*-
"""
@File : yolo_label.py
@Author : Jackie
@Description :
"""
import json
import os
from shutil import copyfile
from sys import exit
sets = ['train', 'valid']
classes = ["nie es8","maybach s650","toyota gt8","tesla modelx"] #
def load_vim_label(labelfile):
with open(labelfile, "r") as f:
annotations = json.load(f, encoding='unicode-escape')
image_list = annotations['_via_image_id_list']
print ("<<<image ls: ", image_list)
print (annotations)
def preprocess(imgfolder,targetfolder):
image_list = os.listdir(imgfolder)
print ('total number:', len(image_list))
if not os.path.isdir(targetfolder):
os.makedirs(targetfolder)
for i in range(len(image_list)):
#print(image_list[i])
# 遍历所有文件
source = os.path.join(imgfolder, image_list[i])
target = os.path.join(targetfolder, str(i)+'.jpg')
# adding exception handling
try:
copyfile(source, target)
except IOError as e:
print("Unable to copy file. %s" % e)
exit(1)
except:
print("Unexpected error:", sys.exc_info())
exit(1)
print ("<<<< finish rename imgs!")
if __name__ == "__main__":
# first make sure your images is preprocessed before labeling!
imgfolder = '/Users/liujunyi/Desktop/spottag/summit-training/道路/pics/imgs'
preprocess(imgfolder,'../data/custom/images')
#load_vim_label('../data/custom/labels/car-type.json')
'''
# sets generation
image_list = os.listdir('../data/custom/images')
label_list = os.listdir('../data/custom/labels')
images = [i[:-4] for i in image_list]
print ("<<<<< length before", len(images))
xml_images = [i[:-4] for i in label_list]
print ("<<< images: ", images)
print ("<<< xml_images: ", xml_images)
images = [val for val in images if val in xml_images]
print ("<<<<< length after", len(images))
image_len = len(images)
num_train = image_len - int(image_len * 0.2)
num_test = int(image_len * 0.2)
print ("<<<< NUM TRAIN: ", num_train)
print ("<<<< NUM TEST: ", num_test)
print ("<<<< check if exisits")
if not os.path.exists('./data/custom'):
os.makedirs('./data/custom')
train_file = open('../data/custom/train.txt', 'w')
test_file = open('../data/custom/valid.txt', 'w')
i = 0
for image_id in image_list:
if i < num_train:
# print (">>> images for train: ",image_id)
train_file.write('%s\n' % ('data/custom/images/' + image_id))
else:
# print (">>> images for valid: ",image_id)
test_file.write('%s\n' % ('data/custom/images/' + image_id))
i = i + 1
train_file.close()
test_file.close()
'''
|
[
"json.load",
"os.makedirs",
"os.path.isdir",
"shutil.copyfile",
"os.path.join",
"os.listdir",
"sys.exit"
] |
[((568, 589), 'os.listdir', 'os.listdir', (['imgfolder'], {}), '(imgfolder)\n', (578, 589), False, 'import os\n'), ((354, 393), 'json.load', 'json.load', (['f'], {'encoding': '"""unicode-escape"""'}), "(f, encoding='unicode-escape')\n", (363, 393), False, 'import json\n'), ((647, 674), 'os.path.isdir', 'os.path.isdir', (['targetfolder'], {}), '(targetfolder)\n', (660, 674), False, 'import os\n'), ((684, 709), 'os.makedirs', 'os.makedirs', (['targetfolder'], {}), '(targetfolder)\n', (695, 709), False, 'import os\n'), ((812, 850), 'os.path.join', 'os.path.join', (['imgfolder', 'image_list[i]'], {}), '(imgfolder, image_list[i])\n', (824, 850), False, 'import os\n'), ((972, 996), 'shutil.copyfile', 'copyfile', (['source', 'target'], {}), '(source, target)\n', (980, 996), False, 'from shutil import copyfile\n'), ((1087, 1094), 'sys.exit', 'exit', (['(1)'], {}), '(1)\n', (1091, 1094), False, 'from sys import exit\n'), ((1178, 1185), 'sys.exit', 'exit', (['(1)'], {}), '(1)\n', (1182, 1185), False, 'from sys import exit\n')]
|
# models.py
from app import db
class Passenger(db.Model):
__tablename__ = "passengers"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(30), nullable=False)
dob = db.Column(db.Date, nullable=False)
email = db.Column(db.String(30), unique=True, nullable=False)
address = db.Column(db.Text, nullable=False)
def __repr__(self):
return '<name {}>'.format(self.name)
class Plane(db.Model):
__tablename__ = "planes"
id = db.Column(db.Integer, primary_key=True)
model = db.Column(db.String(30), unique=True, nullable=False)
capacity = db.Column(db.Integer, nullable=False)
flight_number = db.Column(db.String(30), unique=True, nullable=False)
def __repr__(self):
return '<model = {} --- capacity = {} --- flight number = {}>'.format(self.model, self.capacity, self.flight_number)
class Flight(db.Model):
__tablename__ = 'flights'
id = db.Column(db.Integer, primary_key=True)
source = db.Column(db.String(200), nullable=False)
destination = db.Column(db.String(200), nullable=False)
plane_id = db.Column(db.Integer, db.ForeignKey('planes.id'))
plane = db.relationship("Plane", backref=db.backref('flights', lazy=True))
departure_time = db.Column(db.DateTime, nullable=False)
departure_zip_code = db.Column(db.Integer, nullable=False)
arrival_time = db.Column(db.DateTime, nullable=False)
arrival_zip_code = db.Column(db.Integer, nullable=False)
locale = db.Column(db.String(50), nullable=False)
tickets = db.relationship("Ticket", backref="flights", lazy="dynamic", order_by='Ticket.id')
class Ticket(db.Model):
__tablename__ = 'tickets'
id = db.Column(db.Integer, primary_key=True)
seat_number = db.Column(db.String(4), nullable=False)
price = db.Column(db.Float, nullable=False)
available = db.Column(db.Boolean, nullable=False)
flight_id = db.Column(db.Integer, db.ForeignKey('flights.id'), nullable=False)
flight = db.relationship("Flight")
def __repr__(self):
return '<seat_number = {} --- available = {} ----'.format(self.seat_number, self.available)
class AirFare(db.Model):
__tablename__ = 'airfares'
id = db.Column(db.Integer, primary_key=True)
amount = db.Column(db.Numeric, nullable=False)
description = db.Column(db.String(100), nullable=True)
class Transaction(db.Model):
__tablename__ = 'transactions'
id = db.Column(db.Integer, primary_key=True)
# booking_date_time = db.Column(db.DateTime, nullable=False)
passenger_id = db.Column(db.Integer, db.ForeignKey('passengers.id'))
passenger = db.relationship('Passenger', backref='transactions', cascade='save-update')
ticket_id = db.Column(db.Integer, db.ForeignKey('tickets.id'))
ticket = db.relationship('Ticket', backref='transactions', cascade='save-update')
# airfare_id = db.Column(db.Integer, db.ForeignKey('airfares.id'))
# airfare = db.relationship('AirFare', backref='transactions')
|
[
"app.db.backref",
"app.db.relationship",
"app.db.Column",
"app.db.String",
"app.db.ForeignKey"
] |
[((103, 142), 'app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (112, 142), False, 'from app import db\n'), ((205, 239), 'app.db.Column', 'db.Column', (['db.Date'], {'nullable': '(False)'}), '(db.Date, nullable=False)\n', (214, 239), False, 'from app import db\n'), ((320, 354), 'app.db.Column', 'db.Column', (['db.Text'], {'nullable': '(False)'}), '(db.Text, nullable=False)\n', (329, 354), False, 'from app import db\n'), ((489, 528), 'app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (498, 528), False, 'from app import db\n'), ((610, 647), 'app.db.Column', 'db.Column', (['db.Integer'], {'nullable': '(False)'}), '(db.Integer, nullable=False)\n', (619, 647), False, 'from app import db\n'), ((938, 977), 'app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (947, 977), False, 'from app import db\n'), ((1258, 1296), 'app.db.Column', 'db.Column', (['db.DateTime'], {'nullable': '(False)'}), '(db.DateTime, nullable=False)\n', (1267, 1296), False, 'from app import db\n'), ((1322, 1359), 'app.db.Column', 'db.Column', (['db.Integer'], {'nullable': '(False)'}), '(db.Integer, nullable=False)\n', (1331, 1359), False, 'from app import db\n'), ((1379, 1417), 'app.db.Column', 'db.Column', (['db.DateTime'], {'nullable': '(False)'}), '(db.DateTime, nullable=False)\n', (1388, 1417), False, 'from app import db\n'), ((1441, 1478), 'app.db.Column', 'db.Column', (['db.Integer'], {'nullable': '(False)'}), '(db.Integer, nullable=False)\n', (1450, 1478), False, 'from app import db\n'), ((1547, 1634), 'app.db.relationship', 'db.relationship', (['"""Ticket"""'], {'backref': '"""flights"""', 'lazy': '"""dynamic"""', 'order_by': '"""Ticket.id"""'}), "('Ticket', backref='flights', lazy='dynamic', order_by=\n 'Ticket.id')\n", (1562, 1634), False, 'from app import db\n'), ((1696, 1735), 'app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (1705, 1735), False, 'from app import db\n'), ((1806, 1841), 'app.db.Column', 'db.Column', (['db.Float'], {'nullable': '(False)'}), '(db.Float, nullable=False)\n', (1815, 1841), False, 'from app import db\n'), ((1858, 1895), 'app.db.Column', 'db.Column', (['db.Boolean'], {'nullable': '(False)'}), '(db.Boolean, nullable=False)\n', (1867, 1895), False, 'from app import db\n'), ((1992, 2017), 'app.db.relationship', 'db.relationship', (['"""Flight"""'], {}), "('Flight')\n", (2007, 2017), False, 'from app import db\n'), ((2211, 2250), 'app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (2220, 2250), False, 'from app import db\n'), ((2264, 2301), 'app.db.Column', 'db.Column', (['db.Numeric'], {'nullable': '(False)'}), '(db.Numeric, nullable=False)\n', (2273, 2301), False, 'from app import db\n'), ((2437, 2476), 'app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (2446, 2476), False, 'from app import db\n'), ((2632, 2707), 'app.db.relationship', 'db.relationship', (['"""Passenger"""'], {'backref': '"""transactions"""', 'cascade': '"""save-update"""'}), "('Passenger', backref='transactions', cascade='save-update')\n", (2647, 2707), False, 'from app import db\n'), ((2789, 2861), 'app.db.relationship', 'db.relationship', (['"""Ticket"""'], {'backref': '"""transactions"""', 'cascade': '"""save-update"""'}), "('Ticket', backref='transactions', cascade='save-update')\n", (2804, 2861), False, 'from app import db\n'), ((164, 177), 'app.db.String', 'db.String', (['(30)'], {}), '(30)\n', (173, 177), False, 'from app import db\n'), ((262, 275), 'app.db.String', 'db.String', (['(30)'], {}), '(30)\n', (271, 275), False, 'from app import db\n'), ((551, 564), 'app.db.String', 'db.String', (['(30)'], {}), '(30)\n', (560, 564), False, 'from app import db\n'), ((678, 691), 'app.db.String', 'db.String', (['(30)'], {}), '(30)\n', (687, 691), False, 'from app import db\n'), ((1001, 1015), 'app.db.String', 'db.String', (['(200)'], {}), '(200)\n', (1010, 1015), False, 'from app import db\n'), ((1061, 1075), 'app.db.String', 'db.String', (['(200)'], {}), '(200)\n', (1070, 1075), False, 'from app import db\n'), ((1130, 1156), 'app.db.ForeignKey', 'db.ForeignKey', (['"""planes.id"""'], {}), "('planes.id')\n", (1143, 1156), False, 'from app import db\n'), ((1502, 1515), 'app.db.String', 'db.String', (['(50)'], {}), '(50)\n', (1511, 1515), False, 'from app import db\n'), ((1764, 1776), 'app.db.String', 'db.String', (['(4)'], {}), '(4)\n', (1773, 1776), False, 'from app import db\n'), ((1934, 1961), 'app.db.ForeignKey', 'db.ForeignKey', (['"""flights.id"""'], {}), "('flights.id')\n", (1947, 1961), False, 'from app import db\n'), ((2330, 2344), 'app.db.String', 'db.String', (['(100)'], {}), '(100)\n', (2339, 2344), False, 'from app import db\n'), ((2584, 2614), 'app.db.ForeignKey', 'db.ForeignKey', (['"""passengers.id"""'], {}), "('passengers.id')\n", (2597, 2614), False, 'from app import db\n'), ((2747, 2774), 'app.db.ForeignKey', 'db.ForeignKey', (['"""tickets.id"""'], {}), "('tickets.id')\n", (2760, 2774), False, 'from app import db\n'), ((1203, 1235), 'app.db.backref', 'db.backref', (['"""flights"""'], {'lazy': '(True)'}), "('flights', lazy=True)\n", (1213, 1235), False, 'from app import db\n')]
|
import json
import traceback
import sys
from mqtt_performance_tester.mqtt_utils import *
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
### CLASS FOR STORE AN MQTT MESSAGE
class packet():
counter = 0
def __init__(self):
self.protocol = None
self.frame_id = None
self.type = None
self.size = -1
self.payload_size = 0
self.delta_time = -1
self.epoc_time = -1
self.mid = -1
def __repr__(self):
return "--- mid:%s \t%s \tType:%s \tSize:%s \tTime:%s \tEpoc:%s" \
%(self.mid, self.protocol, self.type, self.size, self.delta_time, self.epoc_time)
### CLASS FOR COMPUTE ALL THE PERFORMANCE PARAMS
class mqtt_performance():
def __init__(self, data, num_request, qos=1):
self.num_request = num_request
self.qos = qos
self.data = data
self.packets = []
self.size_tcp = 0
self.size_mqtt = 0
self.size_udp = 0
self.size_others = 0
self.counter = 0
self.num_mqtt = 0
self.num_tcp = 0
self.num_upd = 0
self.num_others = 0
self.mqtt_types = []
self.mqtt_ids = []
self._parse_json()
def _parse_json(self):
self.counter = 0
index = 0 # start_counter
msg = None
for pkt in self.data:
msg = packet()
try:
msg.frame_id = extract_field(pkt, 'frame_id')
msg.size = int(extract_field(pkt, "frame_size"))
# Read TIME
msg.delta_time = extract_field(pkt, "time_delta")
msg.epoc_time = extract_field(pkt, "time_epoch")
for layer in pkt["_source"]['layers']:
if layer == 'mqtt':
logger.debug ("---- Packet: {0}".format(pkt["_source"]['layers'][layer]))
if 'mqtt' in pkt["_source"]['layers']:
self.counter += 1
msg.type = extract_field(pkt, "mqtt_type")
msg.payload_size = extract_field(pkt, "mqtt_size", msg.type)
msg.mid = extract_field(pkt, "mqtt_id", msg.type)
msg.protocol = "mqtt"
logger.debug("MQTT Message Type {0} - ID:{1}".format(msg.type, msg.mid))
logger.debug("Numero di messaggi MQTT: {0}".format(len(pkt["_source"]['layers']['mqtt'])))
if msg.type not in self.mqtt_types:
self.mqtt_types.append(msg.type)
if msg.mid not in self.mqtt_ids or msg.mid == 'NA':
if msg.mid != 'NA':
self.mqtt_ids.append(msg.mid)
self.mqtt_ids.append(msg.mid)
else:
logger.debug("DUP packet %s" %repr(msg))
self.num_mqtt += 1
self.size_mqtt += msg.size
self.packets.append(msg)
elif 'udp' in pkt["_source"]['layers']:
msg.protocol = "udp"
msg.size = extract_field(pkt, "udp_size")
self.payload_size += msg.size
self.num_upd += 1
elif 'tcp' in pkt["_source"]['layers']:
msg.protocol = "tcp"
self.payload_size = int(extract_field(pkt, "tcp_size"))
self.size_tcp += msg.size
self.num_tcp += 1
else:
msg.protocol = extract_field(pkt, "protocols")
self.size_others += msg.size
self.num_others += 1
except Exception as error:
logger.debug(" >>>>> ERROR PARSING Packets %s " %pkt, error)
traceback.print_exc(file=sys.stdout)
## PRINT RESUL
total = 0
logger.debug("Detected %d MQTT packets" %len(self.packets))
for t in self.mqtt_types:
num = len(self.filter_by(t))
logger.debug("--- %d %s " % (num, t))
total += num
logger.debug("--- TOTAL %d" % (total))
logger.debug('#######################################')
logger.debug('--- Total Message: %d' % self.counter)
logger.debug("--- TCP Message: %s " % self.num_tcp)
logger.debug('--- MQTT Message: %d' % self.num_mqtt)
logger.debug('--- UDP Message: %d' % self.num_upd)
logger.debug('--- OTHER Message: %d' % self.num_others)
logger.debug('#######################################')
logger.debug('--- TCP packets size: %d' % self.size_tcp)
logger.debug('--- MQTT packets size: %d' % self.size_mqtt)
logger.debug('--- UPD packets size: %d' % self.size_udp)
logger.debug('--- OTHERS packets size: %d' % self.size_others)
logger.debug('--- TOTAL packets size: %d' % (self.size_mqtt + self.size_tcp+ self.size_udp + self.size_others))
logger.debug('#######################################')
def get_num(self, msg_type):
return len(self.filter_by(msg_type))
def filter_by(self, filter):
output = []
for pkt in self.packets:
if pkt.type == filter:
output.append(pkt)
return output
def find_msg_with_id(self, mid, msg_type):
data = self.filter_by(msg_type)
for msg in data:
if msg.mid == mid:
return msg
return -1
def get_e2e(self):
min = 100000
max = -1
msg_type = MQTT_PUB_ACK
if self.qos == 2:
msg_type = MQTT_PUB_COM
avg_time = 0
counter = 0
data = self.filter_by(msg_type)
for msg in data:
msg_pub = self.find_msg_with_id(msg.mid, MQTT_PUB)
mqtt_time = (float(msg.epoc_time) - float(msg_pub.epoc_time))
if mqtt_time > max:
max = mqtt_time
if mqtt_time < min:
min = mqtt_time
avg_time += mqtt_time
# logger.debug ("%s -- %s " % (repr(msg), repr(msg_pub)))
counter += 1
logger.debug("[E2E] TOTAL TIME: %s " % avg_time)
if counter == 0:
avg_time = 0
else:
avg_time /= counter
logger.debug("[E2E] MIN TIME: %s - MAX TIME: %s" % (min, max))
logger.debug("[E2E] The E2E delay for %s is :%f [N. Pkt=%d]" %(msg_type, avg_time, counter))
return avg_time
def get_pdr(self, num):
filter = MQTT_PUB_ACK
if self.qos == 2:
filter = MQTT_PUB_COM
data = self.filter_by(filter)
counter = len(data)
pdr = (counter *1.0 / self.num_request) * 100
logger.debug("[PDR] The PDR for is %f [n. %d %s Pkt / Pkt sent %d] - REQUEST: %d" % (pdr, counter, filter, self.num_request, num))
return pdr
def get_size(self, protocol):
if protocol == TCP:
return self.size_tcp
elif protocol == MQTT:
return self.size_mqtt
else:
return 0
def get_packet_drop(self, paylod_size):
if self.qos == 1:
num_ack = self.get_num(MQTT_PUB_ACK)
ack_type = MQTT_PUB_ACK
else:
num_ack = self.get_num(MQTT_PUB_COM)
ack_type = MQTT_PUB_COM
size = self.size_tcp + self.size_mqtt
if float(size) == 0:
return 0
pdrop = (num_ack * paylod_size * 1.0) / float(size)
logger.debug("[PDROP] The Packet Drop is %f [n. %s: %d dim: %d] " % (pdrop, ack_type, num_ack, size))
return pdrop
def get_tcp_overhead(self):
size = self.size_tcp + self.size_mqtt
if float(size) == 0:
return 0
overhead = (self.size_tcp*1.0)/size
logger.debug("[TCP_OVERHEAD] TCP[%d] /TOTAL[%d] = %f " % (self.size_tcp, size, overhead))
return overhead
def computeTime(json_file, num_test, qos):
with open(json_file) as file:
pkts = json.load(file)
file.close()
return mqtt_performance(pkts, num_test, qos)
if __name__ == '__main__':
logger.debug('#######################################################')
logger.debug("#")
logger.debug("# Analyze Wireshark data for MQTT Performance analysis")
logger.debug("#")
logger.debug('#######################################################')
sh = logging.StreamHandler()
sh.setLevel(logging.DEBUG)
LOG_FORMAT = '%(levelname)-7s | %(asctime)s | %(name)40s:%(lineno)-3d| %(message)s'
formatter = logging.Formatter(LOG_FORMAT)
sh.setFormatter(formatter)
logger.addHandler(sh)
json_file = "backup/data_1507099161.54/mqtt_qos_1_payload_128_num_req_500.json"
with open(json_file) as file:
pkts = json.load(file)
file.close()
demo = mqtt_performance(pkts, 500, 1)
demo.get_e2e()
demo.get_pdr(500)
demo.get_packet_drop(256)
demo.get_tcp_overhead()
|
[
"json.load",
"traceback.print_exc"
] |
[((8261, 8276), 'json.load', 'json.load', (['file'], {}), '(file)\n', (8270, 8276), False, 'import json\n'), ((9039, 9054), 'json.load', 'json.load', (['file'], {}), '(file)\n', (9048, 9054), False, 'import json\n'), ((4029, 4065), 'traceback.print_exc', 'traceback.print_exc', ([], {'file': 'sys.stdout'}), '(file=sys.stdout)\n', (4048, 4065), False, 'import traceback\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Build an XGBoost model of arrests in the Chicago crime data.
"""
__author__ = "<NAME>"
__email__ = "<EMAIL>"
__copyright__ = "Copyright 2019, <NAME>"
__license__ = "Creative Commons Attribution-ShareAlike 4.0 International License"
__version__ = "1.0"
import xgboost as xgb
from skopt.space import Integer, Real
from sklearn.externals import joblib
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import MinMaxScaler, StandardScaler
from sklearn_pandas import DataFrameMapper
from utility import HyperParameters, Runner
from model import load_clean_data_frame, ordinal_data_mapper
sample = None
iterations = 24
hyper_parameters = HyperParameters(search_space={
'xgb__n_estimators': Integer(100, 500),
'xgb__learning_rate': Real(0.1, 0.3),
'xgb__gamma': Real(0.0001, 100.0, prior='log-uniform'),
'xgb__max_depth': Integer(3, 7),
'xgb__colsample_bytree': Real(0.4, 0.8),
'xgb__colsample_bylevel': Real(0.4, 0.8),
'xgb__colsample_bynode': Real(0.4, 0.8)
})
# Features were selected based on feature importance from experiments.
data_mapper = DataFrameMapper([
(['iucr'], [MinMaxScaler()]),
(['location'], [MinMaxScaler()]),
(['latitude'], [StandardScaler()]),
(['hour'], [MinMaxScaler()]),
(['longitude'], [StandardScaler()]),
(['type'], [MinMaxScaler()]),
(['month'], [MinMaxScaler()]),
(['fbi_code'], [MinMaxScaler()])
])
xgboost_pipeline = Pipeline([
('mapper', ordinal_data_mapper),
('xgb', xgb.XGBClassifier(tree_method='hist'))
])
xgboost_pipeline_fs = Pipeline([
('mapper', data_mapper),
('xgb', xgb.XGBClassifier(tree_method='hist'))
])
def build_xgboost_model():
runner = Runner(
'model/output/xgboost_basic',
load_clean_data_frame(),
'arrest',
xgboost_pipeline,
hyper_parameters
)
runner.run_classification_search_experiment(
'roc_auc',
sample=sample,
n_iter=iterations,
record_predict_proba=True
)
joblib.dump(
runner.trained_estimator,
'model/output/xgboost_basic.joblib'
)
runner = Runner(
'model/output/xgboost_basic_fs',
load_clean_data_frame(),
'arrest',
xgboost_pipeline_fs,
hyper_parameters
)
runner.run_classification_search_experiment(
'roc_auc',
sample=sample,
n_iter=iterations,
record_predict_proba=True
)
joblib.dump(
runner.trained_estimator,
'model/output/xgboost_basic_fs.joblib'
)
if __name__ == '__main__':
build_xgboost_model()
|
[
"sklearn.externals.joblib.dump",
"model.load_clean_data_frame",
"sklearn.preprocessing.StandardScaler",
"skopt.space.Integer",
"sklearn.preprocessing.MinMaxScaler",
"skopt.space.Real",
"xgboost.XGBClassifier"
] |
[((2059, 2133), 'sklearn.externals.joblib.dump', 'joblib.dump', (['runner.trained_estimator', '"""model/output/xgboost_basic.joblib"""'], {}), "(runner.trained_estimator, 'model/output/xgboost_basic.joblib')\n", (2070, 2133), False, 'from sklearn.externals import joblib\n'), ((2492, 2569), 'sklearn.externals.joblib.dump', 'joblib.dump', (['runner.trained_estimator', '"""model/output/xgboost_basic_fs.joblib"""'], {}), "(runner.trained_estimator, 'model/output/xgboost_basic_fs.joblib')\n", (2503, 2569), False, 'from sklearn.externals import joblib\n'), ((1797, 1820), 'model.load_clean_data_frame', 'load_clean_data_frame', ([], {}), '()\n', (1818, 1820), False, 'from model import load_clean_data_frame, ordinal_data_mapper\n'), ((2227, 2250), 'model.load_clean_data_frame', 'load_clean_data_frame', ([], {}), '()\n', (2248, 2250), False, 'from model import load_clean_data_frame, ordinal_data_mapper\n'), ((766, 783), 'skopt.space.Integer', 'Integer', (['(100)', '(500)'], {}), '(100, 500)\n', (773, 783), False, 'from skopt.space import Integer, Real\n'), ((811, 825), 'skopt.space.Real', 'Real', (['(0.1)', '(0.3)'], {}), '(0.1, 0.3)\n', (815, 825), False, 'from skopt.space import Integer, Real\n'), ((845, 885), 'skopt.space.Real', 'Real', (['(0.0001)', '(100.0)'], {'prior': '"""log-uniform"""'}), "(0.0001, 100.0, prior='log-uniform')\n", (849, 885), False, 'from skopt.space import Integer, Real\n'), ((909, 922), 'skopt.space.Integer', 'Integer', (['(3)', '(7)'], {}), '(3, 7)\n', (916, 922), False, 'from skopt.space import Integer, Real\n'), ((953, 967), 'skopt.space.Real', 'Real', (['(0.4)', '(0.8)'], {}), '(0.4, 0.8)\n', (957, 967), False, 'from skopt.space import Integer, Real\n'), ((999, 1013), 'skopt.space.Real', 'Real', (['(0.4)', '(0.8)'], {}), '(0.4, 0.8)\n', (1003, 1013), False, 'from skopt.space import Integer, Real\n'), ((1044, 1058), 'skopt.space.Real', 'Real', (['(0.4)', '(0.8)'], {}), '(0.4, 0.8)\n', (1048, 1058), False, 'from skopt.space import Integer, Real\n'), ((1542, 1579), 'xgboost.XGBClassifier', 'xgb.XGBClassifier', ([], {'tree_method': '"""hist"""'}), "(tree_method='hist')\n", (1559, 1579), True, 'import xgboost as xgb\n'), ((1659, 1696), 'xgboost.XGBClassifier', 'xgb.XGBClassifier', ([], {'tree_method': '"""hist"""'}), "(tree_method='hist')\n", (1676, 1696), True, 'import xgboost as xgb\n'), ((1182, 1196), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {}), '()\n', (1194, 1196), False, 'from sklearn.preprocessing import MinMaxScaler, StandardScaler\n'), ((1220, 1234), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {}), '()\n', (1232, 1234), False, 'from sklearn.preprocessing import MinMaxScaler, StandardScaler\n'), ((1258, 1274), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (1272, 1274), False, 'from sklearn.preprocessing import MinMaxScaler, StandardScaler\n'), ((1294, 1308), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {}), '()\n', (1306, 1308), False, 'from sklearn.preprocessing import MinMaxScaler, StandardScaler\n'), ((1333, 1349), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (1347, 1349), False, 'from sklearn.preprocessing import MinMaxScaler, StandardScaler\n'), ((1369, 1383), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {}), '()\n', (1381, 1383), False, 'from sklearn.preprocessing import MinMaxScaler, StandardScaler\n'), ((1404, 1418), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {}), '()\n', (1416, 1418), False, 'from sklearn.preprocessing import MinMaxScaler, StandardScaler\n'), ((1442, 1456), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {}), '()\n', (1454, 1456), False, 'from sklearn.preprocessing import MinMaxScaler, StandardScaler\n')]
|
import random
import time
from collections import Counter
done = 'false'
#here is the animation
def animate():
Count=0
global done
print('loading… |',end="")
while done == 'false':
time.sleep(0.1)
print('/',end="")
time.sleep(0.1)
print('-',end="")
time.sleep(0.1)
print('\\',end="")
time.sleep(0.1)
Count+=1
if Count==10:
done='true'
print()
print('Done!')
animate()
done = 'false'
Card_Deck=[2,3,4,5,6,7,8,9,10,'J','Q','K','A']
Suits=['♠','♣︎','♥︎','♦']
Deck=['2 ♠','3 ♠','4 ♠','5 ♠','6 ♠','7 ♠','8 ♠','9 ♠','10 ♠','J ♠','Q ♠','K ♠','A ♠',
'2 ♣︎','3 ♣︎','4 ♣︎','5 ♣︎','6 ♣︎','7 ♣︎','8 ♣︎','9 ♣︎','10 ♣︎','J ♣︎','Q ♣︎','K ♣︎','A ♣︎',
'2 ♥︎','3 ♥︎','4 ♥︎︎','5 ♥︎','6 ♥︎','7 ♥︎︎','8 ︎♥︎','9 ♥︎︎','10 ♥︎','J ♥︎','Q ♥︎','K ♥︎','A ♥︎',
'2 ♦︎','3 ♦︎','4 ♦︎︎','5 ♦︎','6 ♦︎','7 ♦︎︎','8 ︎♦','9 ♦','10 ♦︎','J ♦︎','Q ♦','K ♦','A ♦']
Deck_Value=[1,2,3,4,5,6,7,8,9,10,11,12,13,
1,2,3,4,5,6,7,8,9,10,11,12,13,
1,2,3,4,5,6,7,8,9,10,11,12,13,
1,2,3,4,5,6,7,8,9,10,11,12,13]
Spades=[0,1,2,3,4,5,6,7,8,9,10,11,12]
Clubs=[13,14,15,16,17,18,19,20,21,22,23,24,25]
Hearts=[26,27,28,29,30,31,32,33,34,35,36,37,38]
Diamonds=[39,40,41,42,43,44,45,46,47,48,49,50,51]
Aces=[12,25,38,51]
Used_Cards=[]
Stats={}
def deal():
A=random.randint(0,51)
if A not in Used_Cards:
Used_Cards.append(A)
return A
else:
return deal()
def Draw_Five():
A=deal()
B=deal()
C=deal()
D=deal()
E=deal()
Cards_in_Hand=[A,B,C,D,E]
return Cards_in_Hand
def Compare(A,B):
if Deck_Value[A]>Deck_Value[B]:
return 1
elif Deck_Value[A]<Deck_Value[B]:
return -1
else:
return 0
def Is_Straight(Cards):
Card_Value=[]
for i in Cards:
Card_Value.append(Deck_Value[i])
Card_Value.sort()
if Card_Value[0]+1==Card_Value[1] and Card_Value[1]+1==Card_Value[2] and Card_Value[2]+1==Card_Value[3] and Card_Value[3]+1==Card_Value[4]:
return True
elif Card_Value[4] in Aces:
if Card_Value[4]-12==Card_Value[0] and Card_Value[0]+1==Card_Value[1] and Card_Value[1]+1==Card_Value[2] and Card_Value[2]+1==Card_Value[3]:
return True
else:
return False
else:
return False
def Print_Cards(Cards):
Card_Value=[]
for i in Cards:
Card_Value.append(Deck[i])
print(Card_Value)
def Is_Flush(Cards):
return all(item in Spades for item in Cards) or all(item in Clubs for item in Cards) or all(item in Hearts for item in Cards) or all(item in Diamonds for item in Cards)
def Is_Straight_Flush(Cards):
return Is_Straight(Cards) and Is_Flush(Cards)
def Is_Royal_Flush(Cards):
Cards.sort(reverse=1)
return Cards[0] in Aces and Is_Straight_Flush(Cards)
def OAK(Cards):
Card_Value=[]
for i in Cards:
Card_Value.append(Deck_Value[i])
return max(Counter(Card_Value).values())
def Get_MRC(Cards):
Card_Value=[]
for i in Cards:
Card_Value.append(Deck_Value[i])
Values=list(Counter(Card_Value).values())
Keys=list(Counter(Card_Value).keys())
Max_Value_Index=Values.index(max(Values))
return Keys[Max_Value_Index]
#GET Top Two Repeat Cards
def Get_TTRC(Cards):
Card_Value=[]
for i in Cards:
Card_Value.append(Deck_Value[i])
Values=list(Counter(Card_Value).values())
Keys=list(Counter(Card_Value).keys())
if 1 in Values:
Min_Value_Index=Values.index(1)
Keys.pop(Min_Value_Index)
return Keys
def Is_Four_of_a_Kind(Cards):
return OAK(Cards)==4
def Is_Three_of_a_Kind(Cards):
return OAK(Cards)==3
def Is_One_Pair(Cards):
return OAK(Cards)==2
def Is_Two_Pair(Cards):
Card_Value=[]
for i in Cards:
Card_Value.append(Deck_Value[i])
return not Is_Three_of_a_Kind(Cards) and len(Counter(Card_Value).keys())==3
def Is_Full_House(Cards):
Card_Value=[]
for i in Cards:
Card_Value.append(Deck_Value[i])
return len(Counter(Card_Value).keys())==2 and Is_Three_of_a_Kind(Cards)
def Get_High_Card(Cards):
Card_Value=[]
for i in Cards:
Card_Value.append(Deck_Value[i])
Card_Value.sort(reverse=1)
return Card_Value[0]
def Get_2nd_High_Card(Cards):
Card_Value=[]
for i in Cards:
Card_Value.append(Deck_Value[i])
Card_Value.sort(reverse=1)
return Card_Value[1]
def Get_3rd_High_Card(Cards):
Card_Value=[]
for i in Cards:
Card_Value.append(Deck_Value[i])
Card_Value.sort(reverse=1)
return Card_Value[2]
def Get_4th_High_Card(Cards):
Card_Value=[]
for i in Cards:
Card_Value.append(Deck_Value[i])
Card_Value.sort(reverse=1)
return Card_Value[3]
def Get_5th_High_Card(Cards):
Card_Value=[]
for i in Cards:
Card_Value.append(Deck_Value[i])
Card_Value.sort(reverse=1)
return Card_Value[4]
def Play(Name):
Result=10
Cards=Draw_Five()
#Cards=[0,13,2,15,25]
print("Drawing Cards for",Name+"…")
time.sleep(2.5)
Print_Cards(Cards)
if Is_Royal_Flush(Cards):
Result=1
print("You got a Royal Flush and your Highest Card is",Card_Deck[Get_High_Card(Cards)-1])
elif Is_Straight_Flush(Cards):
Result=2
print("You got a Straight Flush and your Highest Card is",Card_Deck[Get_High_Card(Cards)-1])
elif Is_Four_of_a_Kind(Cards):
Result=3
print("You got a Four of a Kind of",Card_Deck[Get_MRC(Cards)-1],"and your Highest Card is",Card_Deck[Get_High_Card(Cards)-1])
elif Is_Full_House(Cards):
Result=4
RepeatCards=[]
for dv in Get_TTRC(Cards):
RepeatCards.append(Card_Deck[dv-1])
print("You got a Full House",RepeatCards,"and your Highest Card is",Card_Deck[Get_High_Card(Cards)-1])
elif Is_Flush(Cards):
Result=5
print("You got a Flush and your Highest Card is",Card_Deck[Get_High_Card(Cards)-1])
elif Is_Straight(Cards):
Result=6
print("You got a Straight and your Highest Card is",Card_Deck[Get_High_Card(Cards)-1])
elif Is_Three_of_a_Kind(Cards):
Result=7
print("You got a Three of a Kind of",Card_Deck[Get_MRC(Cards)-1],"and your Highest Card is",Card_Deck[Get_High_Card(Cards)-1])
elif Is_Two_Pair(Cards):
Result=8
RepeatCards=[]
for dv in Get_TTRC(Cards):
RepeatCards.append(Card_Deck[dv-1])
print("You got Two Pairs",RepeatCards,"and your Highest Card is",Card_Deck[Get_High_Card(Cards)-1])
elif Is_One_Pair(Cards):
Result=9
print("You got a Pair of",Card_Deck[Get_MRC(Cards)-1],"and your Highest Card is",Card_Deck[Get_High_Card(Cards)-1])
else:
print("You got a High Card!", Card_Deck[Get_High_Card(Cards)-1])
#print("Your Highest Card is",Card_Deck[Get_High_Card(Cards)-1])
Result_Array=[Get_High_Card(Cards),Get_2nd_High_Card(Cards),Get_3rd_High_Card(Cards),Get_4th_High_Card(Cards),Get_5th_High_Card(Cards)]
return Cards,Result,Result_Array,Get_MRC(Cards)
def declare_winner(P1_Name,P1_Score,P2_Name,P2_Score):
if P1_Score>P2_Score:
Stats[P1_Name]+=1
print(P1_Name,"Wins!")
elif P1_Score<P2_Score:
Stats[P2_Name]+=1
print(P2_Name,"Wins!")
def breaktie(P1_Name,P1_Result_Array,P2_Name,P2_Result_Array,idx):
if P1_Result_Array[idx]==P2_Result_Array[idx]:
if idx==4:
Stats[P2]+=0.5
Stats[P1]+=0.5
print(P1_Name,"and",P2_Name,"have tied. It's a draw!")
else:
breaktie(P1_Name,P1_Result_Array,P2_Name,P2_Result_Array,idx+1)
else:
declare_winner(P1_Name,P1_Result_Array[idx],P2_Name,P2_Result_Array[idx])
def Check_High_Card(P1,P1_Result_Array,P2,P2_Result_Array):
if P1_Result_Array[0]==P2_Result_Array[0]:
breaktie(P1,P1_Result_Array,P2,P2_Result_Array,1)
else:
declare_winner(P1,P1_Result_Array[0],P2,P2_Result_Array[0])
def Start_Game(P1,P2,Game_Number):
print("______________________________________________")
input(P1 + ", Hit Enter when Ready ")
(P1_Cards,P1_Result,P1_Result_Array,P1_MRC)=Play(P1)
for i in range(1,3,1):
print()
input(P2 + ", Hit Enter when Ready ")
(P2_Cards,P2_Result,P2_Result_Array,P2_MRC)=Play(P2)
for i in range(1,3,1):
print()
#comparing results to find a winner
if P1_Result==P2_Result:
if P1_Result in [3,4,7,9]:
if P1_MRC>P2_MRC:
Stats[P1]+=1
print(P1,"Wins!")
elif P1_MRC<P2_MRC:
Stats[P2]+=1
print(P2,"Wins!")
else:
Check_High_Card(P1,P1_Result_Array,P2,P2_Result_Array)
elif P1_Result==8:
#both players have 2 pairs
P1_TTRC=Get_TTRC(P1_Cards)
P2_TTRC=Get_TTRC(P2_Cards)
if P1_TTRC[0]>P2_TTRC[0] and P1_TTRC[0]>P2_TTRC[1]:
Stats[P1]+=1
print(P1,"Wins!")
elif P1_TTRC[1]>P2_TTRC[0] and P1_TTRC[0]>P2_TTRC[1]:
Stats[P1]+=1
print(P1,"Wins!")
elif P2_TTRC[0]>P1_TTRC[0] and P2_TTRC[0]>P1_TTRC[1]:
Stats[P2]+=1
print(P2,"Wins!")
elif P2_TTRC[1]>P1_TTRC[0] and P2_TTRC[0]>P1_TTRC[1]:
Stats[P2]+=1
print(P2,"Wins!")
else:
Check_High_Card(P1,P1_Result_Array,P2,P2_Result_Array)
else:
Check_High_Card(P1,P1_Result_Array,P2,P2_Result_Array)
elif P1_Result>P2_Result:
Stats[P2]+=1
print(P2,"Wins!")
elif P1_Result<P2_Result:
Stats[P1]+=1
print(P1,"Wins!")
print("Current Stats:",Stats)
print("______________________________________________")
Continue=input("Would You Like to Play Again? ")
if "n" not in Continue and "N" not in Continue:
print("Ok, Starting Game",Game_Number+1)
if len(Used_Cards)>42:
print("Our Virtual Deck has ran out of cards. Shuffling…")
time.sleep(1.5)
print("Deck Incoming!")
Used_Cards.clear()
Start_Game(P1,P2,Game_Number+1)
else:
print("Thank You for Playing Poker Online: Multiplayer (Single Deck Edition)!")
print("Welcome To Poker Online: Multiplayer (Single Deck Edition)!")
print()
P1=input("Player 1, Please Enter Your Name: ")
P2=input("Player 2, Please Enter Your Name: ")
Stats[P1]=0
Stats[P2]=0
Start_Game(P1,P2,1)
|
[
"collections.Counter",
"random.randint",
"time.sleep"
] |
[((1371, 1392), 'random.randint', 'random.randint', (['(0)', '(51)'], {}), '(0, 51)\n', (1385, 1392), False, 'import random\n'), ((4837, 4852), 'time.sleep', 'time.sleep', (['(2.5)'], {}), '(2.5)\n', (4847, 4852), False, 'import time\n'), ((206, 221), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (216, 221), False, 'import time\n'), ((256, 271), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (266, 271), False, 'import time\n'), ((306, 321), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (316, 321), False, 'import time\n'), ((357, 372), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (367, 372), False, 'import time\n'), ((9514, 9529), 'time.sleep', 'time.sleep', (['(1.5)'], {}), '(1.5)\n', (9524, 9529), False, 'import time\n'), ((2867, 2886), 'collections.Counter', 'Counter', (['Card_Value'], {}), '(Card_Value)\n', (2874, 2886), False, 'from collections import Counter\n'), ((3003, 3022), 'collections.Counter', 'Counter', (['Card_Value'], {}), '(Card_Value)\n', (3010, 3022), False, 'from collections import Counter\n'), ((3045, 3064), 'collections.Counter', 'Counter', (['Card_Value'], {}), '(Card_Value)\n', (3052, 3064), False, 'from collections import Counter\n'), ((3281, 3300), 'collections.Counter', 'Counter', (['Card_Value'], {}), '(Card_Value)\n', (3288, 3300), False, 'from collections import Counter\n'), ((3323, 3342), 'collections.Counter', 'Counter', (['Card_Value'], {}), '(Card_Value)\n', (3330, 3342), False, 'from collections import Counter\n'), ((3753, 3772), 'collections.Counter', 'Counter', (['Card_Value'], {}), '(Card_Value)\n', (3760, 3772), False, 'from collections import Counter\n'), ((3895, 3914), 'collections.Counter', 'Counter', (['Card_Value'], {}), '(Card_Value)\n', (3902, 3914), False, 'from collections import Counter\n')]
|
from netfilterqueue import NetfilterQueue
from scapy.all import *
import socket
import re
def print_and_accept(pkt):
ip = IP(pkt.get_payload())
if ip.haslayer("Raw"):
print("IP packet received")
payload = ip["Raw"].load
if payload[0] == 0x16 and payload[5] == 0x01:
new_payload = [x for x in payload]
new_payload[112] == 0x00
new_payload[113] == 0x2f
print("Downgraded AES")
pkt.set_payload(bytes(new_payload))
pkt.accept()
nfqueue = NetfilterQueue()
nfqueue.bind(1, print_and_accept)
s = socket.fromfd(nfqueue.get_fd(), socket.AF_UNIX, socket.SOCK_STREAM)
try:
nfqueue.run_socket(s)
except KeyboardInterrupt:
print('')
s.close()
nfqueue.unbind()
|
[
"netfilterqueue.NetfilterQueue"
] |
[((545, 561), 'netfilterqueue.NetfilterQueue', 'NetfilterQueue', ([], {}), '()\n', (559, 561), False, 'from netfilterqueue import NetfilterQueue\n')]
|
from netCDF4 import Dataset
from dataclasses import dataclass, field
import os
import pickle
import sys
import shutil
import numpy as np
from variables import modelvar
@dataclass
class VariableInfo():
nickname: str = ""
dimensions: tuple = field(default_factory=lambda: ())
name: str = ""
units: str = ""
dtype: str = "d"
class NetCDF_tools():
"""
Basic class to create and write NetCDF files
Parameters
----------
filename : str
The file name to be created.
attrs : dict
The global attributes.
dimensions : list[(name, size), ...]
The list of dimensions.
size==None -> unlimited
variables : list[VariableInfo, ...]
The name of variable.dimensions should match one of dimensions.
"""
def __init__(self, filename, attrs, dimensions, variables):
self.filename = filename
self.attrs = attrs
self.dimensions = {dim[0]: dim[1] for dim in dimensions}
self.variables = {var.nickname: var for var in variables}
def create(self):
"""
Create the empty NetCDF file with
- attributes
- dimensions
- variables
"""
with Dataset(self.filename, "w", format='NETCDF4') as nc:
nc.setncatts(self.attrs)
for dim, size in self.dimensions.items():
nc.createDimension(dim, size)
for infos in self.variables.values():
assert isinstance(infos.dimensions, tuple)
v = nc.createVariable(infos.nickname,
infos.dtype,
infos.dimensions)
v.standard_name = infos.name
v.units = infos.units
def write(self, variables, nc_start={}, data_start={}):
"""
Write variables
Parameters
----------
variables : list[(nickname, data), ...]
where data is an ndarray
nc_start : dict{name: (offset, size)}
name : the dimension name
offset : the offset of that dimension in the NetCDF file
size : the size of data in that dimension
If a dimension is not in nc_start it is assumed that
the data has a size that matches the size defined in
the NetCDF.
data_start : dict{name: (offset, size)}
same that nc_start but for the data in variables
"""
with Dataset(self.filename, "r+") as nc:
for nickname, data in variables.items():
ncidx = self._get_idx(nickname, nc_start)
if isinstance(data, np.ndarray):
dataidx = self._get_idx(nickname, data_start)
nc.variables[nickname][ncidx] = data[dataidx]
else:
nc.variables[nickname][ncidx] = data
def _get_idx(self, nickname, nc_start):
"""
Return the tuple of slices
to either slice through nc.variables or through data
"""
infos = self.variables[nickname]
ncidx = []
for dim in infos.dimensions:
if dim in nc_start:
istart, size = nc_start[dim]
else:
istart, size = 0, self.dimensions[dim]
if size is not None:
ncidx += [slice(istart, istart+size)]
return tuple(ncidx)
class Ncio():
"""
Class that handles all the IO for pyRSW
which includes
- creating and writing model snapshots in the history.nc
- creating and writing model bulk diagnostics in the diags.nc
- saving the param.pkl file
- saving the Python experiment script
"""
def __init__(self, param, grid, batchindex=0):
self.param = param
self.grid = grid
self.batchindex = batchindex
self.nprocs = np.prod(grid.procs)
if self.nprocs > 1:
from mpi4py import MPI
self.MPI = MPI
self._create_output_directory()
self.backup_config()
hist_infos = get_hist_infos(param, grid)
self.hist = NetCDF_tools(self.history_file, *hist_infos)
if not self.singlefile or self.master:
self.hist.create()
self.hist_index = 0
self.write_grid()
diag_infos = get_diag_infos(param, grid)
self.diag = NetCDF_tools(self.diag_file, *diag_infos)
self.diag_index = 0
if self.master:
self.diag.create()
def _create_output_directory(self):
if self.master and not os.path.isdir(self.output_directory):
os.makedirs(self.output_directory)
@property
def myrank(self):
return self.grid.myrank
@property
def master(self):
return self.myrank == 0
@property
def expname(self):
return self.param["expname"]
@property
def singlefile(self):
return self.param["singlefile"]
@property
def output_directory(self):
datadir = os.path.expanduser(self.param["datadir"])
return os.path.join(datadir, self.expname)
@property
def history_file(self):
"""
Full path to the NetCDF history file
"""
his = self._add_batchindex("history")
basicname = f"{his}.nc"
mpiname = f"{his}_{self.myrank:02}.nc"
hisname = basicname if self.singlefile else mpiname
return os.path.join(self.output_directory, hisname)
@property
def diag_file(self):
"""
Full path to the NetCDF diagnostic file
"""
diag = self._add_batchindex("diag")
diagname = f"{diag}.nc"
return os.path.join(self.output_directory, diagname)
def _add_batchindex(self, filename):
if self.param.restart:
return filename + f"_{self.batchindex:02}"
else:
return filename
def backup_config(self):
"""
Backup the experiment configuration into the output directory
- save param in the param.pkl
- save the experiment Python script
"""
if self.master and self.batchindex == 0:
dest = f"{self.output_directory}/param.pkl"
with open(dest, "wb") as fid:
pickle.dump(self.param, fid)
python_launch_script = sys.argv[0]
dest = os.path.join(self.output_directory, f"{self.expname}.py")
shutil.copyfile(python_launch_script, dest)
def write_grid(self):
"""
Write the model grid arrays into the NetCDF file (just once)
"""
xc = self.grid.coord.x(0, self.grid.ic)[0]
yc = self.grid.coord.y(self.grid.jc, 0)[:, 0]
xe = self.grid.coord.x(0, self.grid.ie)[0]
ye = self.grid.coord.y(self.grid.je, 0)[:, 0]
layer = np.arange(self.grid.nz)
msk = self.grid.arrays.msk.view("i")
datagrid = {
"x": xc,
"y": yc,
"xe": xe,
"ye": ye,
"layer": layer,
"msk": msk
}
self._history_write_halo_mpi(datagrid)
def write_hist(self, state, time, kt):
"""
Write a model snapshot into the NetCDF file
"""
datahist = {
"time": time,
"iteration": kt,
}
for name in self.param["var_to_save"]:
vartype = modelvar[name]["type"]
if vartype == "vector":
for axis in "xy":
compname = name+axis
var = state.get(compname)
datahist[compname] = var.getproperunits(self.grid)
else:
var = state.get(name)
datahist[name] = var.getproperunits(self.grid)
nc_start = {"time": (self.hist_index, 1)}
self._history_write_halo_mpi(datahist, nc_start=nc_start)
self.hist_index += 1
def _history_write_halo_mpi(self, data, nc_start={}):
"""
Generic function to write data into the history NetCDF file
handle the following special cases
- write the arrays without the halo
- write in a single history file, even if several MPI ranks
"""
data_start = {}
if not self.param.halo_included:
j0, j1, i0, i1 = self.grid.arrays.hb.domainindices
nx = self.param.nx
ny = self.param.ny
data_start["x"] = (i0, nx)
data_start["y"] = (j0, ny)
data_start["xe"] = (i0, nx+1)
data_start["ye"] = (j0, ny+1)
if self.singlefile:
i0 = self.grid.loc[2]*self.param.nx
j0 = self.grid.loc[1]*self.param.ny
nc_start["x"] = (i0, nx)
nc_start["y"] = (j0, ny)
nc_start["xe"] = (i0, nx+1)
nc_start["ye"] = (j0, ny+1)
if self.singlefile and (self.nprocs > 1):
# all MPI ranks write in the same file
for rank in range(self.nprocs):
if rank == self.myrank:
self.hist.write(data,
nc_start=nc_start,
data_start=data_start)
self.MPI.COMM_WORLD.Barrier()
else:
# each rank writes in its own history file
self.hist.write(data, nc_start=nc_start, data_start=data_start)
def write_diags(self, diags, time, kt):
"""
Write the domain integrated diagnostics into the NetCDF file
"""
datadiag = {
"time": time,
"iteration": kt,
"ke": diags["ke"],
"pe": diags["pe"],
"me": diags["me"],
"enstrophy": diags["potenstrophy"],
}
start = {"time": (self.diag_index, 1)}
if self.master:
self.diag.write(datadiag, nc_start=start)
self.diag_index += 1
def get_hist_infos(param, grid):
attrs = {"model": "pyrsw",
"author": "someone"}
if param.halo_included:
ny, nx = grid.xc.shape
else:
ny, nx = param.ny, param.nx
if param.singlefile:
nx *= param.npx
ny *= param.npy
nz = param.nz
dims = [("time", None), ("layer", nz),
("x", nx), ("y", ny),
("xe", nx+1), ("ye", ny+1)]
infos = [
("time", ("time",), "time", "s"),
("iteration", ("time",), "model iteration", "", "i4"),
("x", ("x",), "x coord at center", "m"),
("y", ("y",), "y coord at center", "m"),
("xe", ("xe",), "x coord at edge", "m"),
("ye", ("ye",), "y coord at edge", "m"),
("layer", ("layer",), "layer index", "", "i1"),
("msk", ("y", "x"), "mask at cell centers", "", "i1"),
]
vardims = {
"scalar": ("time", "layer", "y", "x"),
"u": ("time", "layer", "y", "xe"),
"v": ("time", "layer", "ye", "x"),
"vorticity": ("time", "layer", "ye", "xe")
}
for name in param["var_to_save"]:
longname = modelvar[name]["name"]
units = modelvar[name]["unit"]
vartype = modelvar[name]["type"]
if vartype == "vector":
infos += [(name+"x", vardims["u"], longname+" x-component", units)]
infos += [(name+"y", vardims["v"], longname+" y-component", units)]
else:
infos += [(name, vardims[vartype], longname, units)]
varinfos = [VariableInfo(*info) for info in infos]
hist_infos = (attrs, dims, varinfos)
return hist_infos
def get_diag_infos(param, grid):
attrs = {"model": "pyrsw",
"author": "someone"}
dims = [("time", None)]
infos = [
("time", ("time",), "time", "s"),
("iteration", ("time",), "model iteration", "", "i4"),
("ke", ("time",), "kinetic energy", "m^2 s^-2"),
("pe", ("time",), "mean available potential energy", "m^2 s^-2"),
("me", ("time",), "kinetic + potential energy", "m^2 s^-2"),
("enstrophy", ("time",), "mean enstrophy", "s^-2 m^-2"),
]
varinfos = [VariableInfo(*info) for info in infos]
diag_infos = (attrs, dims, varinfos)
return diag_infos
|
[
"netCDF4.Dataset",
"pickle.dump",
"os.path.join",
"os.makedirs",
"os.path.isdir",
"dataclasses.field",
"numpy.arange",
"shutil.copyfile",
"os.path.expanduser",
"numpy.prod"
] |
[((250, 284), 'dataclasses.field', 'field', ([], {'default_factory': '(lambda : ())'}), '(default_factory=lambda : ())\n', (255, 284), False, 'from dataclasses import dataclass, field\n'), ((3871, 3890), 'numpy.prod', 'np.prod', (['grid.procs'], {}), '(grid.procs)\n', (3878, 3890), True, 'import numpy as np\n'), ((5009, 5050), 'os.path.expanduser', 'os.path.expanduser', (["self.param['datadir']"], {}), "(self.param['datadir'])\n", (5027, 5050), False, 'import os\n'), ((5066, 5101), 'os.path.join', 'os.path.join', (['datadir', 'self.expname'], {}), '(datadir, self.expname)\n', (5078, 5101), False, 'import os\n'), ((5415, 5459), 'os.path.join', 'os.path.join', (['self.output_directory', 'hisname'], {}), '(self.output_directory, hisname)\n', (5427, 5459), False, 'import os\n'), ((5663, 5708), 'os.path.join', 'os.path.join', (['self.output_directory', 'diagname'], {}), '(self.output_directory, diagname)\n', (5675, 5708), False, 'import os\n'), ((6805, 6828), 'numpy.arange', 'np.arange', (['self.grid.nz'], {}), '(self.grid.nz)\n', (6814, 6828), True, 'import numpy as np\n'), ((1205, 1250), 'netCDF4.Dataset', 'Dataset', (['self.filename', '"""w"""'], {'format': '"""NETCDF4"""'}), "(self.filename, 'w', format='NETCDF4')\n", (1212, 1250), False, 'from netCDF4 import Dataset\n'), ((2475, 2503), 'netCDF4.Dataset', 'Dataset', (['self.filename', '"""r+"""'], {}), "(self.filename, 'r+')\n", (2482, 2503), False, 'from netCDF4 import Dataset\n'), ((4615, 4649), 'os.makedirs', 'os.makedirs', (['self.output_directory'], {}), '(self.output_directory)\n', (4626, 4649), False, 'import os\n'), ((6345, 6402), 'os.path.join', 'os.path.join', (['self.output_directory', 'f"""{self.expname}.py"""'], {}), "(self.output_directory, f'{self.expname}.py')\n", (6357, 6402), False, 'import os\n'), ((6415, 6458), 'shutil.copyfile', 'shutil.copyfile', (['python_launch_script', 'dest'], {}), '(python_launch_script, dest)\n', (6430, 6458), False, 'import shutil\n'), ((4565, 4601), 'os.path.isdir', 'os.path.isdir', (['self.output_directory'], {}), '(self.output_directory)\n', (4578, 4601), False, 'import os\n'), ((6249, 6277), 'pickle.dump', 'pickle.dump', (['self.param', 'fid'], {}), '(self.param, fid)\n', (6260, 6277), False, 'import pickle\n')]
|
# Title: 소가 정보섬에 올라온 이유
# Link: https://www.acmicpc.net/problem/17128
import sys
sys.setrecursionlimit(10 ** 6)
read_list_int = lambda: list(map(int, sys.stdin.readline().strip().split(' ')))
def solution(n: int, q: int, cows: list, qs: list):
cows = cows + cows
parts = []
for start in range(n):
parts.append(cows[start]*cows[start+1]*cows[start+2]*cows[start+3])
s = sum(parts)
for q in qs:
s -= 2*(parts[q-1]+parts[q-2]+parts[q-3]+parts[q-4])
parts[q-1] *= -1
parts[q-2] *= -1
parts[q-3] *= -1
parts[q-4] *= -1
print(s)
def main():
n, q = read_list_int()
cows = read_list_int()
qs = read_list_int()
solution(n, q, cows, qs)
if __name__ == '__main__':
main()
|
[
"sys.setrecursionlimit",
"sys.stdin.readline"
] |
[((84, 114), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(10 ** 6)'], {}), '(10 ** 6)\n', (105, 114), False, 'import sys\n'), ((154, 174), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (172, 174), False, 'import sys\n')]
|
"""Module containing the logic for querying dictionary or list object."""
import re
import operator
from dlapp import utils
from dlapp.argumenthelper import validate_argument_type
from dlapp.argumenthelper import validate_argument_is_not_empty
from dlapp.collection import Element
class DLQueryError(Exception):
"""Use to capture error for DLQuery instance"""
class DLQueryDataTypeError(DLQueryError):
"""Use to capture error of unsupported query data type."""
class DLQuery:
"""This is a class for querying dictionary or list object.
Attributes
__________
data (list, tuple, or dict): list or dictionary instance.
Properties
----------
is_dict -> bool
is_list -> bool
Methods
-------
keys() -> dict_keys or odict_keys
values() -> dict_values or odict_values
items() -> dict_items or odict_items
get(index, default=None) -> Any
find(node=None, lookup='', select='') -> List
Raise
-----
TypeError: if failed to invoke ``iter`` built-in function.
"""
def __init__(self, data):
validate_argument_type(list, tuple, dict, data=data)
self.data = data
self._is_dict = None
self._is_list = None
############################################################################
# Special methods
############################################################################
def __len__(self):
return len(self.data)
def __getitem__(self, item):
return self.data[item]
def __iter__(self):
if self.is_dict:
return iter(self.data.keys())
elif self.is_list:
return iter(self.data)
else:
fmt = '{!r} object is not iterable.'
msg = fmt.format(type(self).__name__)
raise TypeError(msg)
def __bool__(self):
return bool(self.data)
def __eq__(self, other):
if isinstance(other, self.__class__):
result = operator.eq(self.data, other.data)
else:
result = operator.eq(self.data, other)
return result
def __ne__(self, other):
if isinstance(other, self.__class__):
result = operator.ne(self.data, other.data)
else:
result = operator.ne(self.data, other)
return result
############################################################################
# properties
############################################################################
@property
def is_dict(self):
"""Check if data of DLQuery is a dictionary data."""
if self._is_dict is None:
self._is_dict = isinstance(self.data, dict)
return self._is_dict
@property
def is_list(self):
"""Check if data of DLQuery is a list or tuple data."""
if self._is_list is None:
self._is_list = isinstance(self.data, (list, tuple))
return self._is_list
############################################################################
# public methods
############################################################################
def keys(self):
"""a set-like object providing a view on D's keys"""
result = utils.foreach(self.data, choice='keys')
return result
def values(self):
"""a set-like object providing a view on D's values"""
result = utils.foreach(self.data, choice='values')
return result
def items(self):
"""a set-like object providing a view on D's items"""
result = utils.foreach(self.data, choice='items')
return result
def get(self, index, default=None, on_exception=False):
"""if DLQuery is a list, then return the value for index if
index is in the list, else default.
if DLQuery is a dictionary, then return the value for key (i.e index)
if key is in the dictionary, else default.
Parameters
----------
index (int, str): a index of list or a key of dictionary.
default (Any): a default value if no element in list or
in dictionary is found.
on_exception (bool): raise Exception if it is True. Default is False.
Returns
-------
Any: any value from DLQuery.data
"""
try:
if self.is_list:
if isinstance(index, int):
return self.data[index]
elif isinstance(index, str):
pattern = r'-?[0-9]+$'
if re.match(pattern, index.strip()):
return self.data[int(index)]
else:
count = index.count(':')
if count == 1:
i, j = [x.strip() for x in index.split(':')]
chks = [
re.match(pattern, i.strip()) or i == '',
re.match(pattern, j.strip()) or j == ''
]
if any(chks):
i = int(i) if i else None
j = int(j) if j else None
slice_obj = slice(i, j)
return self.data[slice_obj]
else:
if on_exception:
return self.data[index]
else:
return default
elif count == 2:
i, j, k = [x.strip() for x in index.split(':')]
chks = [
re.match(pattern, i.strip()) or i == '',
re.match(pattern, j.strip()) or j == '',
re.match(pattern, k.strip()) or k == ''
]
if any(chks):
i = int(i) if i else None
j = int(j) if j else None
k = int(k) if k else None
slice_obj = slice(i, j, k)
return self.data[slice_obj]
else:
if on_exception:
return self.data[index]
else:
return default
else:
if on_exception:
return self.data[index]
else:
return default
else:
return default
else:
key = index
return self.data.get(key, default)
except Exception as ex: # noqa
if on_exception:
raise ex
else:
return default
def find(self, node=None, lookup='', select='', on_exception=False):
"""recursively search a lookup.
Parameters
----------
node (dict, list): a dict, dict-like, list, or list-like instance.
lookup (str): a search pattern.
select (str): a select statement.
on_exception (bool): raise `Exception` if set True, otherwise, return False.
Returns
-------
List: list of Any.
"""
node = node or self.data
lookup = str(lookup).strip()
if lookup == '' and select == '':
return node
validate_argument_is_not_empty(lookup=lookup)
validate_argument_type(list, tuple, dict, node=node)
elm_obj = Element(node, on_exception=on_exception)
records = elm_obj.find(lookup, select=select)
return records
|
[
"operator.ne",
"dlapp.collection.Element",
"operator.eq",
"dlapp.argumenthelper.validate_argument_type",
"dlapp.utils.foreach",
"dlapp.argumenthelper.validate_argument_is_not_empty"
] |
[((1082, 1134), 'dlapp.argumenthelper.validate_argument_type', 'validate_argument_type', (['list', 'tuple', 'dict'], {'data': 'data'}), '(list, tuple, dict, data=data)\n', (1104, 1134), False, 'from dlapp.argumenthelper import validate_argument_type\n'), ((3224, 3263), 'dlapp.utils.foreach', 'utils.foreach', (['self.data'], {'choice': '"""keys"""'}), "(self.data, choice='keys')\n", (3237, 3263), False, 'from dlapp import utils\n'), ((3389, 3430), 'dlapp.utils.foreach', 'utils.foreach', (['self.data'], {'choice': '"""values"""'}), "(self.data, choice='values')\n", (3402, 3430), False, 'from dlapp import utils\n'), ((3554, 3594), 'dlapp.utils.foreach', 'utils.foreach', (['self.data'], {'choice': '"""items"""'}), "(self.data, choice='items')\n", (3567, 3594), False, 'from dlapp import utils\n'), ((7618, 7663), 'dlapp.argumenthelper.validate_argument_is_not_empty', 'validate_argument_is_not_empty', ([], {'lookup': 'lookup'}), '(lookup=lookup)\n', (7648, 7663), False, 'from dlapp.argumenthelper import validate_argument_is_not_empty\n'), ((7672, 7724), 'dlapp.argumenthelper.validate_argument_type', 'validate_argument_type', (['list', 'tuple', 'dict'], {'node': 'node'}), '(list, tuple, dict, node=node)\n', (7694, 7724), False, 'from dlapp.argumenthelper import validate_argument_type\n'), ((7744, 7784), 'dlapp.collection.Element', 'Element', (['node'], {'on_exception': 'on_exception'}), '(node, on_exception=on_exception)\n', (7751, 7784), False, 'from dlapp.collection import Element\n'), ((1974, 2008), 'operator.eq', 'operator.eq', (['self.data', 'other.data'], {}), '(self.data, other.data)\n', (1985, 2008), False, 'import operator\n'), ((2044, 2073), 'operator.eq', 'operator.eq', (['self.data', 'other'], {}), '(self.data, other)\n', (2055, 2073), False, 'import operator\n'), ((2193, 2227), 'operator.ne', 'operator.ne', (['self.data', 'other.data'], {}), '(self.data, other.data)\n', (2204, 2227), False, 'import operator\n'), ((2263, 2292), 'operator.ne', 'operator.ne', (['self.data', 'other'], {}), '(self.data, other)\n', (2274, 2292), False, 'import operator\n')]
|
# from __future__ import print_function # In python 2.7
from flask import render_template
from flask_login import current_user
import datetime
#import forms
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField
from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, NumberRange
#import models
from .models.sellerreview import SellerReview
from .models.seller import Seller
from flask import current_app as app
from flask import Blueprint
bp = Blueprint('sellerreviews', __name__)
#routes to reviews page for certain seller
@bp.route('/sellerreviews/<int:seller_id>/<int:number>', methods=['GET', 'POST'])
def SellerReviews(seller_id, number):
s_reviews = SellerReview.get_all_seller_reviews_for_seller(seller_id, number)
seller_review_stats = SellerReview.get_stats(seller_id)
seller_name = Seller.get_seller_info(seller_id)
SR_check = True
if current_user.is_authenticated:
SR_check = SellerReview.review_check(seller_id, current_user.uid)
total_reviews = SellerReview.get_total_number_seller_reviews_for_seller(seller_id)
return render_template('sellerreviews.html',
sellerreviews = s_reviews,
sellerreviewstats = seller_review_stats,
SRcheck = SR_check,
sellername = seller_name,
number = number,
total = total_reviews)
|
[
"flask.Blueprint",
"flask.render_template"
] |
[((553, 589), 'flask.Blueprint', 'Blueprint', (['"""sellerreviews"""', '__name__'], {}), "('sellerreviews', __name__)\n", (562, 589), False, 'from flask import Blueprint\n'), ((1190, 1378), 'flask.render_template', 'render_template', (['"""sellerreviews.html"""'], {'sellerreviews': 's_reviews', 'sellerreviewstats': 'seller_review_stats', 'SRcheck': 'SR_check', 'sellername': 'seller_name', 'number': 'number', 'total': 'total_reviews'}), "('sellerreviews.html', sellerreviews=s_reviews,\n sellerreviewstats=seller_review_stats, SRcheck=SR_check, sellername=\n seller_name, number=number, total=total_reviews)\n", (1205, 1378), False, 'from flask import render_template\n')]
|
from django.shortcuts import render
from crawler import Crawler
from django.http import HttpResponse
crawlers = {}
def index(request, params=''):
post_data = dict(request.POST)
post_data['urls'] = post_data['url[]']
for link in post_data['urls']:
crawlers[link] = Crawler()
crawlers[link].setUrl(link)
crawlers[link].start()
return render(request, 'crawl/index.html', {'urls': post_data['urls']})
def status(request):
response_text = "Total documents collected: <span class='count'>" + str(Crawler.crawledUrls['size']) + "</span>"
return HttpResponse(str(response_text))
def stop(request):
for key in crawlers.keys():
crawlers[key].stop()
response_text = "Stopped"
return HttpResponse(response_text)
|
[
"django.shortcuts.render",
"crawler.Crawler",
"django.http.HttpResponse"
] |
[((366, 430), 'django.shortcuts.render', 'render', (['request', '"""crawl/index.html"""', "{'urls': post_data['urls']}"], {}), "(request, 'crawl/index.html', {'urls': post_data['urls']})\n", (372, 430), False, 'from django.shortcuts import render\n'), ((719, 746), 'django.http.HttpResponse', 'HttpResponse', (['response_text'], {}), '(response_text)\n', (731, 746), False, 'from django.http import HttpResponse\n'), ((274, 283), 'crawler.Crawler', 'Crawler', ([], {}), '()\n', (281, 283), False, 'from crawler import Crawler\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""This file contains tests for the data_classes.py file.
For specifics on each test, see the docstrings under each function.
"""
__authors__ = ["<NAME>, <NAME>"]
__credits__ = ["<NAME>, <NAME>"]
__Lisence__ = "BSD"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
import pytest
from unittest.mock import patch
from bs4 import BeautifulSoup as Soup
from ..data_classes import Data, Hijack, Leak, Outage
from ..tables import Hijacks_Table, Leaks_Table, Outages_Table
from ..event_types import BGPStream_Website_Event_Types
from itertools import combinations
from .create_HTML import HTML_Creator
# Importing actually runs the tests
#from .test_tables import Test_Hijacks_Table, Test_Leaks_Table, Test_Outages_Table
class Test_Data:
"""Tests all functions within the Data class.
NOTE: You may want to make this not a test class
and simply have all other classes inherit it."""
@staticmethod
def init(event):
type_ = event['event_type']
if type_ == BGPStream_Website_Event_Types.HIJACK.value:
return Hijack('/tmp/')
if type_ == BGPStream_Website_Event_Types.LEAK.value:
return Leak('/tmp/')
if type_ == BGPStream_Website_Event_Types.OUTAGE.value:
return Outage('/tmp/')
@staticmethod
def uncommon_info(event):
type_ = event['event_type']
if type_ == BGPStream_Website_Event_Types.HIJACK.value:
return ['expected_origin_name', 'expected_origin_number',
'detected_origin_name', 'detected_origin_number',
'expected_prefix', 'more_specific_prefix',
'detected_as_path', 'detected_by_bgpmon_peers']
if type_ == BGPStream_Website_Event_Types.LEAK.value:
return ['origin_as_name', 'origin_as_number',
'leaker_as_name', 'leaker_as_number',
'leaked_prefix', 'leaked_to_number', 'leaked_to_name',
'example_as_path', 'detected_by_bgpmon_peers']
if type_ == BGPStream_Website_Event_Types.OUTAGE.value:
return ['as_name', 'as_number',
'number_prefixes_affected', 'percent_prefixes_affected']
def test_append(self, setup):
"""Tests the append function
Should have input for every combo of:
-hijack, leak, outage
-country info vs non country info
And check expected output.
"""
for event in setup.events:
data = Test_Data.init(event)
with patch('lib_bgp_data.utils.utils.get_tags') as mock:
mock.side_effect = setup.open_custom_HTML
data.append(event['row'])
# Columns are retrieved from the Postgres table columns
# which has an 'id' column used as the primary key.
# Not part of row data, so must be removed
cols = data._columns
cols.remove('id')
for i, c in enumerate(cols):
assert data.data[0][i] == event[c]
def test_db_insert(self, setup):
"""Tests the db_insert function
Should have input with the powerset of all the combinations of:
-hijack, leak, outage
-country info vs non country info
And check expected output.
"""
for event in setup.events:
data = Test_Data.init(event)
# need something to insert
with patch('lib_bgp_data.utils.utils.get_tags') as mock:
mock.side_effect = setup.open_custom_HTML
data.append(event['row'])
with data.table() as t:
for IPV4, IPV6 in combinations([True, False], 2):
data.db_insert(IPV4, IPV6)
# db_insert creates indexes
sql = f"""SELECT * FROM pg_indexes
WHERE indexname = '{t.name}_index'"""
assert len(t.execute(sql)) == 1
# db_insert deletes duplicates
sql = f"SELECT DISTINCT * FROM {t.name}"
assert t.get_all() == t.execute(sql)
# check IPV filtering was successful
for IPV, num in zip([IPV4, IPV6], [4, 6]):
if not IPV and event['event_type'] != BGPStream_Website_Event_Types.OUTAGE.value:
sql = f"""SELECT COUNT({t.prefix_column})
FROM {t.name}
WHERE family({t.prefix_column}) = {num}"""
assert t.get_count(sql) == 0
def test_parse_common_elements(self, setup):
"""Tests the parse_common_elements function
Should have input for every combo of:
-hijack, leak, outage
-country info vs non country info
And check expected output.
"""
for event in setup.events:
data = Test_Data.init(event)
with patch('lib_bgp_data.utils.utils.get_tags') as mock:
mock.side_effect = setup.open_custom_HTML
as_info, extended_children = data._parse_common_elements(event['row'])
assert event['as_info'] == as_info
assert event['extended_children'] == extended_children
def test_parse_as_info(self, setup):
"""Tests the parse_as_info function
Should have input for every combo of:
-hijack, leak, outage
-country info vs non country info
-every possible combo if as info formatting
And check expected output.
"""
for event in setup.events:
d = Test_Data.init(event)
as_info = event['as_info']
# the AS info for outages will be a a single string
if isinstance(as_info, str):
assert event['parsed_as_info1'] == d._parse_as_info(as_info)
# for hijacks and leaks, there are 2 pieces of AS info in a list
elif isinstance(as_info, list):
assert event['parsed_as_info1'] == d._parse_as_info(as_info[1])
assert event['parsed_as_info2'] == d._parse_as_info(as_info[3])
def test_format_temp_row(self, setup):
"""Tests the format temp row func function
Make sure list exists with all columns but ID.
"""
# test by putting the same string for every column except ID
# what should returned is just the a list of the same string
# the string that's put for ID should not be found
for event in setup.events:
data = Test_Data.init(event)
# usually initialized in append
data._temp_row = {}
for col in data._columns:
# id columns is ignored
if col == 'id':
data._temp_row[col] = 'should not be here'
# quotes should be removed
else:
data._temp_row[col] = 'no quotes"'
expected = ['no quotes' for i in range(len(data._columns)-1)]
assert data._format_temp_row() == expected
def test_parse_uncommon_info(self, setup):
"""Tests the parse_uncommon_elements function
input all kinds of rows and check expected output.
"""
for event in setup.events:
data = Test_Data.init(event)
# initialize temp row. it's usually initialized in append()
data._temp_row = {}
data._parse_uncommon_info(event['as_info'], event['extended_children'])
for info in Test_Data.uncommon_info(event):
assert data._temp_row[info] == event[info]
|
[
"unittest.mock.patch",
"itertools.combinations"
] |
[((2601, 2643), 'unittest.mock.patch', 'patch', (['"""lib_bgp_data.utils.utils.get_tags"""'], {}), "('lib_bgp_data.utils.utils.get_tags')\n", (2606, 2643), False, 'from unittest.mock import patch\n'), ((3509, 3551), 'unittest.mock.patch', 'patch', (['"""lib_bgp_data.utils.utils.get_tags"""'], {}), "('lib_bgp_data.utils.utils.get_tags')\n", (3514, 3551), False, 'from unittest.mock import patch\n'), ((3764, 3794), 'itertools.combinations', 'combinations', (['[True, False]', '(2)'], {}), '([True, False], 2)\n', (3776, 3794), False, 'from itertools import combinations\n'), ((5115, 5157), 'unittest.mock.patch', 'patch', (['"""lib_bgp_data.utils.utils.get_tags"""'], {}), "('lib_bgp_data.utils.utils.get_tags')\n", (5120, 5157), False, 'from unittest.mock import patch\n')]
|
"""
The goal of this module is to ease the creation of static maps
from this package.
Ideally, this is done headlessly (i.e., no running browser)
and quickly. Given that deck.gl requires WebGL, there aren't
lot of alternatives to using a browser.
Not yet implemented.
"""
from selenium import webdriver
# from selenium.webdriver.chrome.options import Options
# TODO this should be determined programmatically
CHROME_PATH = '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome'
CHROMEDRIVER_PATH = '/usr/local/bin/chromedriver'
WINDOW_SIZE = "1920,1080"
def make_screenshot(url, output):
# options = webdriver.ChromeOptions()
driver = webdriver.Chrome(
executable_path=CHROMEDRIVER_PATH
)
driver.get(url)
driver.save_screenshot(output)
driver.close()
# This may be of interest
# https://github.com/stackgl/headless-gl
raise NotImplementedError(
'This part of the library is not complete')
|
[
"selenium.webdriver.Chrome"
] |
[((656, 707), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'executable_path': 'CHROMEDRIVER_PATH'}), '(executable_path=CHROMEDRIVER_PATH)\n', (672, 707), False, 'from selenium import webdriver\n')]
|
import logging as log
import os
import base64
import json
import numpy as np
from paprika.restraints import DAT_restraint
from parmed.amber import AmberParm
from parmed import Structure
# https://stackoverflow.com/questions/27909658/json-encoder-and-decoder-for-complex-numpy-arrays
# https://stackoverflow.com/a/24375113/901925
# https://stackoverflow.com/questions/3488934/simplejson-and-numpy-array/24375113#24375113
class NumpyEncoder(json.JSONEncoder):
"""Save DAT_restraints as JSON by re-encoding `numpy` arrays."""
def default(self, obj):
"""If input object is an ndarray it will be converted into a dict
holding dtype, shape and the data, base64 encoded.
"""
if isinstance(obj, AmberParm):
log.info("Encountered AmberParm, returning name.")
return obj.name
if isinstance(obj, Structure):
log.warning("Encountered Structure, which does not store filename.")
return ""
if isinstance(obj, np.ndarray):
if obj.flags["C_CONTIGUOUS"]:
obj_data = obj.data
else:
cont_obj = np.ascontiguousarray(obj)
assert cont_obj.flags["C_CONTIGUOUS"]
obj_data = cont_obj.data
data_b64 = base64.b64encode(obj_data)
# obj_data = obj.tolist()
return dict(
__ndarray__=data_b64.decode("utf-8"),
dtype=str(obj.dtype),
shape=obj.shape,
)
elif isinstance(
obj,
(
np.int_,
np.intc,
np.intp,
np.int8,
np.int16,
np.int32,
np.int64,
np.uint8,
np.uint16,
np.uint32,
np.uint64,
),
):
return int(obj)
elif isinstance(obj, (np.float_, np.float16, np.float32, np.float64)):
return float(obj)
elif isinstance(obj, (np.ndarray,)):
return obj.tolist()
# Let the base class default method raise the TypeError
# return json.JSONEncoder(self, obj)
return super(NumpyEncoder, self).default(obj)
def json_numpy_obj_hook(dct):
"""Decodes a previously encoded numpy ndarray with proper shape and dtype.
:param dct: (dict) json encoded ndarray
:return: (ndarray) if input was an encoded ndarray
"""
if isinstance(dct, dict) and "__ndarray__" in dct:
data = base64.b64decode(dct["__ndarray__"])
return np.frombuffer(data, dct["dtype"]).reshape(dct["shape"])
# return dct['__ndarray__']
return dct
def save_restraints(restraint_list, filepath="restraints.json"):
log.debug("Saving restraint information as JSON.")
with open(os.path.join(filepath), "w") as f:
for restraint in restraint_list:
dumped = json.dumps(restraint.__dict__, cls=NumpyEncoder)
f.write(dumped)
f.write("\n")
def load_restraints(filepath="restraints.json"):
log.debug("Loading restraint information from JSON.")
with open(os.path.join(filepath), "r") as f:
json_data = f.read()
restraint_json = json_data.split("\n")
restraints = []
for restraint in restraint_json:
if restraint == "":
continue
loaded = json.loads(restraint, object_hook=json_numpy_obj_hook)
tmp = DAT_restraint()
tmp.__dict__ = loaded
properties = ["mask1", "mask2", "mask3", "mask4", "topology", "instances", "custom_restraint_values",
"auto_apr", "continuous_apr", "attach", "pull", "release", "amber_index"]
for class_property in properties:
if f"_{class_property}" in tmp.__dict__.keys():
tmp.__dict__[class_property] = tmp.__dict__[f"_{class_property}"]
restraints.append(tmp)
return restraints
|
[
"paprika.restraints.DAT_restraint",
"logging.debug",
"json.loads",
"logging.warning",
"numpy.frombuffer",
"numpy.ascontiguousarray",
"base64.b64decode",
"json.dumps",
"logging.info",
"base64.b64encode",
"os.path.join"
] |
[((2779, 2829), 'logging.debug', 'log.debug', (['"""Saving restraint information as JSON."""'], {}), "('Saving restraint information as JSON.')\n", (2788, 2829), True, 'import logging as log\n'), ((3099, 3152), 'logging.debug', 'log.debug', (['"""Loading restraint information from JSON."""'], {}), "('Loading restraint information from JSON.')\n", (3108, 3152), True, 'import logging as log\n'), ((2549, 2585), 'base64.b64decode', 'base64.b64decode', (["dct['__ndarray__']"], {}), "(dct['__ndarray__'])\n", (2565, 2585), False, 'import base64\n'), ((3397, 3451), 'json.loads', 'json.loads', (['restraint'], {'object_hook': 'json_numpy_obj_hook'}), '(restraint, object_hook=json_numpy_obj_hook)\n', (3407, 3451), False, 'import json\n'), ((3466, 3481), 'paprika.restraints.DAT_restraint', 'DAT_restraint', ([], {}), '()\n', (3479, 3481), False, 'from paprika.restraints import DAT_restraint\n'), ((756, 806), 'logging.info', 'log.info', (['"""Encountered AmberParm, returning name."""'], {}), "('Encountered AmberParm, returning name.')\n", (764, 806), True, 'import logging as log\n'), ((886, 954), 'logging.warning', 'log.warning', (['"""Encountered Structure, which does not store filename."""'], {}), "('Encountered Structure, which does not store filename.')\n", (897, 954), True, 'import logging as log\n'), ((1285, 1311), 'base64.b64encode', 'base64.b64encode', (['obj_data'], {}), '(obj_data)\n', (1301, 1311), False, 'import base64\n'), ((2844, 2866), 'os.path.join', 'os.path.join', (['filepath'], {}), '(filepath)\n', (2856, 2866), False, 'import os\n'), ((2941, 2989), 'json.dumps', 'json.dumps', (['restraint.__dict__'], {'cls': 'NumpyEncoder'}), '(restraint.__dict__, cls=NumpyEncoder)\n', (2951, 2989), False, 'import json\n'), ((3167, 3189), 'os.path.join', 'os.path.join', (['filepath'], {}), '(filepath)\n', (3179, 3189), False, 'import os\n'), ((1141, 1166), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['obj'], {}), '(obj)\n', (1161, 1166), True, 'import numpy as np\n'), ((2601, 2634), 'numpy.frombuffer', 'np.frombuffer', (['data', "dct['dtype']"], {}), "(data, dct['dtype'])\n", (2614, 2634), True, 'import numpy as np\n')]
|
import PINN_Base.base_v1 as base_v1
import tensorflow as tf
'''
This is an implementation of the (unnamed)
"Improved fully-connected neural architecture" from
UNDERSTANDING AND MITIGATING GRADIENT PATHOLOGIES IN
PHYSICS-INFORMED NEURAL NETWORKS (Wang, 2020).
I have taken the liberty of naming it based on the authors
likening it to the Transformer. Unlike the transformer, it doesn't
transform the outputs, Z, but rather the inputs, X.
'''
class Domain_Transformer(base_v1.PINN_Base):
def __init__(self,
lower_bound,
upper_bound,
input_dim,
output_dim,
width,
depth,
**kwargs):
self.input_dim = input_dim
self.output_dim = output_dim
self.width = width
self.depth = depth
layers = [self.input_dim] + [self.width] * depth + [self.output_dim]
super().__init__(lower_bound, upper_bound, layers, **kwargs)
def _init_params(self):
# Two layer neural network
transformer_shape = [self.input_dim, self.width, self.width]
# The first encoder network
self.weights_T1, self.biases_T1 = self._init_NN(transformer_shape)
# The second encoder network
self.weights_T2, self.biases_T2 = self._init_NN(transformer_shape)
# The normal "forward" network is initialized by parent
super()._init_params()
def _domain_transformer_forward(self, X, T1, T2, weights, biases):
activations = []
H = 2.0 * (X - self.lower_bound) / \
(self.upper_bound - self.lower_bound) - 1.0
activations.append(H)
for l in range(len(weights) - 1):
W = weights[l]
b = biases[l]
Z = tf.tanh(tf.add(tf.matmul(H, W), b))
H = (1 - Z) * T1 + Z * T2
activations.append(H)
W = weights[-1]
b = biases[-1]
Y = tf.add(tf.matmul(H, W), b)
return Y, activations
def _forward(self, X):
T1, activations_T1 = self._NN(X, self.weights_T1, self.biases_T2)
T2, activations_T2 = self._NN(X, self.weights_T2, self.biases_T2)
U, activations = self._domain_transformer_forward(
X, T1, T2, self.weights, self.biases)
if X == self.X:
self.T1 = T1
self.T2 = T2
self.activations = activations
self.Activations_T1 = activations_T1
self.Activations_T2 = activations_T2
return U
def get_T1(self, X):
return self.sess.run(self.T1, {self.X: X})
def get_T2(self, X):
return self.sess.run(self.T2, {self.X: X})
def get_all_weights(self):
return self.sess.run(self.get_all_weight_variables())
def get_all_weight_variables(self):
return [
self.weights, self.biases,
self.weights_T1, self.biases_T1,
self.weights_T2, self.biases_T2
]
def _count_params(self):
params_main = super()._count_params()
params_T1_weights = self._size_of_variable_list(self.weights_T1)
params_T1_biases = self._size_of_variable_list(self.biases_T1)
params_T2_weights = self._size_of_variable_list(self.weights_T2)
params_T2_biases = self._size_of_variable_list(self.biases_T2)
return params_main + params_T1_weights + params_T1_biases + params_T2_weights + params_T2_biases
def get_architecture_description(self):
params = self._count_params()
return {
"arch_name": "domain_transformer",
"n_params": params,
"shape_main": self.layers[:],
"shape_T1": [self.input_dim, self.width, self.output_dim],
"shape_T2": [self.input_dim, self.width, self.output_dim],
"dtype": "float32" if self.dtype == tf.float32 else "float64"
}
|
[
"tensorflow.matmul"
] |
[((1955, 1970), 'tensorflow.matmul', 'tf.matmul', (['H', 'W'], {}), '(H, W)\n', (1964, 1970), True, 'import tensorflow as tf\n'), ((1795, 1810), 'tensorflow.matmul', 'tf.matmul', (['H', 'W'], {}), '(H, W)\n', (1804, 1810), True, 'import tensorflow as tf\n')]
|
import numpy
from scipy.interpolate import InterpolatedUnivariateSpline as interpolate
from scipy.interpolate import interp1d
from cosmo4d.lab import (UseComplexSpaceOptimizer,
NBodyModel, LPTModel, ZAModel,
LBFGS, ParticleMesh)
#from cosmo4d.lab import mapbias as map
from cosmo4d import lab
from cosmo4d.lab import report, dg, objectives
from abopt.algs.lbfgs import scalar as scalar_diag
from nbodykit.cosmology import Planck15, EHPower, Cosmology
from nbodykit.algorithms.fof import FOF
from nbodykit.lab import KDDensity, BigFileMesh, BigFileCatalog, ArrayCatalog
import sys, os, json, yaml
from solve import solve
from getbiasparams import getbias, eval_bfit
sys.path.append('../')
sys.path.append('../utils/')
import HImodels
#########################################
#Set parameters here
##
cfname = sys.argv[1]
with open(cfname, 'r') as ymlfile: cfg = yaml.load(ymlfile)
for i in cfg['basep'].keys(): locals()[i] = cfg['basep'][i]
h1model = HImodels.ModelA(aa)
truth_pm = ParticleMesh(BoxSize=bs, Nmesh=(nc, nc, nc), dtype='f4')
comm = truth_pm.comm
rank = comm.rank
if numd <= 0: num = -1
else: num = int(bs**3 * numd)
if rank == 0: print('Number of objects : ', num)
objfunc = getattr(objectives, cfg['mods']['objective'])
map = getattr(lab, cfg['mods']['map'])
#
proj = '/project/projectdirs/cosmosim/lbl/chmodi/cosmo4d/'
dfolder = '/global/cscratch1/sd/chmodi/m3127/cm_lowres/%dstepT-B%d/%d-%d-9100-fixed/'%(nsteps, B, bs, nc)
#ofolder = '/global/cscratch1/sd/chmodi/m3127/21cm_cleaning/recon/bias/L%04d-N%04d-T%02d-B%01d/'%(bs, nc, nsteps, B)
ofolder = '/global/cscratch1/sd/chmodi/m3127/21cm_cleaning/recon/L%04d-N%04d/'%(bs, nc)
if pmdisp:
ofolder += 'T%02d-B%01d'%(nsteps, B)
else: ofolder += 'ZA/'
prefix = '_fourier'
if rsdpos: prefix += "_rsdpos"
if masswt:
if h1masswt : fname = 's999_h1massA%s'%prefix
else: fname = 's999_mass%s'%prefix
else: fname = 's999_pos%s'%prefix
optfolder = ofolder + 'opt_%s/'%fname
if truth_pm.comm.rank == 0: print('Output Folder is %s'%optfolder)
for folder in [ofolder, optfolder]:
try: os.makedirs(folder)
except:pass
#########################################
#initiate
klin, plin = numpy.loadtxt('../../data/pklin_1.0000.txt', unpack = True)
ipk = interpolate(klin, plin)
#cosmo = Planck15.clone(Omega_cdm = 0.2685, h = 0.6711, Omega_b = 0.049)
cosmodef = {'omegam':0.309167, 'h':0.677, 'omegab':0.048}
cosmo = Cosmology.from_dict(cosmodef)
data = BigFileCatalog('/global/cscratch1/sd/chmodi/m3127/H1mass/highres/2560-9100-fixed/fastpm_%0.4f/LL-0.200/'%aa)
data = data.gslice(start = 0, stop = num)
data['Mass'] = data['Length']*data.attrs['M0']*1e10
if masswt :
masswt = data['Mass'].copy()
if h1masswt : masswt = h1model.assignhalo(masswt)
else: masswt = data['Mass'].copy()*0 + 1.
hpos, hmass = data['Position'], masswt
rsdfac = 0
if rsdpos:
with open('/global/cscratch1/sd/chmodi/m3127/H1mass/highres/2560-9100-fixed/fastpm_%0.4f/Header/attr-v2'%aa) as ff:
for line in ff.readlines():
if 'RSDFactor' in line: rsdfac = float(line.split()[-2])
hpos = data['Position'] + rsdfac*data['Velocity']*numpy.array([0, 0, 1]).reshape(1, -1)
hlayout = truth_pm.decompose(hpos)
hmesh = truth_pm.paint(hpos, layout=hlayout, mass=hmass)
hmesh /= hmesh.cmean()
hmesh -= 1.
rankweight = sum(masswt.compute())
totweight = comm.allreduce(rankweight)
rankweight = sum((masswt**2).compute())
totweight2 = comm.allreduce(rankweight)
noise = bs**3 / (totweight**2/totweight2)
if rank == 0 : print('Noise : ', noise)
#########################################
#dynamics
stages = numpy.linspace(0.1, aa, nsteps, endpoint=True)
if pmdisp: dynamic_model = NBodyModel(cosmo, truth_pm, B=B, steps=stages)
else: dynamic_model = ZAModel(cosmo, truth_pm, B=B, steps=stages)
if rank == 0: print(dynamic_model)
#noise
#Artifically low noise since the data is constructed from the model
truth_noise_model = map.NoiseModel(truth_pm, None, noisevar*(truth_pm.BoxSize/truth_pm.Nmesh).prod(), 1234)
truth_noise_model = None
#Create and save data if not found
dyn = BigFileCatalog(dfolder + 'fastpm_%0.4f/1'%aa)
s_truth = BigFileMesh(dfolder + 'linear', 'LinearDensityK').paint()
mock_model_setup = map.MockModel(dynamic_model, rsdpos=rsdpos, rsdfac=rsdfac)
fpos, linear, linearsq, shear = mock_model_setup.get_code().compute(['x', 'linear', 'linearsq', 'shear'], init={'parameters': s_truth})
grid = truth_pm.generate_uniform_particle_grid(shift=0.0, dtype='f4')
params, bmod = getbias(truth_pm, hmesh, [linear, linearsq, shear], fpos, grid)
title = ['%0.3f'%i for i in params]
kerror, perror = eval_bfit(hmesh, bmod, optfolder, noise=noise, title=title, fsize=15)
ipkerror = interp1d(kerror, perror, bounds_error=False, fill_value=(perror[0], perror[-1]))
mock_model = map.MockModel(dynamic_model, params=params, rsdpos=rsdpos, rsdfac=rsdfac)
data_p = mock_model.make_observable(s_truth)
data_p.mapp = hmesh.copy()
data_p.save(optfolder+'datap/')
if rank == 0: print('datap saved')
#data_n = truth_noise_model.add_noise(data_p)
#data_n.save(optfolder+'datan/')
#if rank == 0: print('datan saved')
fit_p = mock_model.make_observable(s_truth)
fit_p.save(optfolder+'fitp/')
if rank == 0: print('fitp saved')
##
if rank == 0: print('data_p, data_n created')
################################################
#Optimizer
if cfg['init']['sinit'] is None:
s_init = truth_pm.generate_whitenoise(777, mode='complex')\
.apply(lambda k, v: v * (ipk(sum(ki **2 for ki in k) **0.5) / v.BoxSize.prod()) ** 0.5)\
.c2r()*0.001
sms = [4.0, 2.0, 1.0, 0.5, 0.0]
else:
s_init = BigFileMesh(cfg['init']['sinit'], 's').paint()
sms = cfg['init']['sms']
if sms is None: [4.0, 2.0, 1.0, 0.5, 0.0]
x0 = s_init
N0 = nc
C = x0.BoxSize[0] / x0.Nmesh[0]
for Ns in sms:
if truth_pm.comm.rank == 0: print('\nDo for cell smoothing of %0.2f\n'%(Ns))
sml = C * Ns
rtol = 0.005
run = '%d-%0.2f'%(N0, Ns)
if Ns == sms[0]:
if cfg['init']['sinit'] is not None: run += '-nit_%d-sm_%.2f'%(cfg['init']['nit'], cfg['init']['sml'])
obj = objfunc(mock_model, truth_noise_model, data_p, prior_ps=ipk, error_ps=ipkerror, sml=sml)
x0 = solve(N0, x0, rtol, run, Ns, prefix, mock_model, obj, data_p, truth_pm, optfolder, saveit=20, showit=5, title=None)
#########################################
##def gaussian_smoothing(sm):
## def kernel(k, v):
## return numpy.exp(- 0.5 * sm ** 2 * sum(ki ** 2 for ki in k)) * v
## return kernel
##
#########################################
#optimizer
##
##def solve(Nmesh, x0, rtol, run, Nsm):
##
## pm = truth_pm.resize(Nmesh=(Nmesh, Nmesh, Nmesh))
## atol = pm.Nmesh.prod() * rtol
## x0 = pm.upsample(x0, keep_mean=True)
## #data = data_n.downsample(pm)
## #IDEAL no noise limit
## data = data_p.downsample(pm)
##
## # smooth the data. This breaks the noise model but we don't need it
## # for lower resolution anyways.
## sml = pm.BoxSize[0] / Nmesh * Nsm
##
## #dynamic_model = ZAModel(cosmo, truth_pm, B=B, steps=stages)
## #mock_model = map.MockModel(dynamic_model)
##
## # an approximate noise model, due to smoothing this is correct only at large scale.
## noise_model = truth_noise_model #.downsample(pm)
##
## obj = map.SmoothedObjective(mock_model, noise_model, data, prior_ps=pk, sml=sml)#, noised=noised)
##
## prior, chi2 = obj.get_code().compute(['prior', 'chi2'], init={'parameters': data.s})
## if pm.comm.rank == 0: print('Prior, chi2 : ', prior, chi2) # for 2d chi2 is close to total pixels.
##
## fit_p = mock_model.make_observable(data.s)
## #r = obj.evaluate(fit_p, data)
## r = dg.evaluate(fit_p, data)
##
## try:
## os.makedirs(optfolder + '%s' % run)
## except:
## pass
## try:
## os.makedirs(optfolder + '%s/2pt' % run)
## except:
## pass
## dg.save_report(r, optfolder + "%s/truth.png" % run, pm)
## dg.save_2ptreport(r, optfolder + "%s/2pt/truth.png" % run, pm)
##
##
## optimizer = LBFGS(m=10, diag_update=scalar_diag)
##
## prob = obj.get_problem(atol=atol, precond=UseComplexSpaceOptimizer)
##
## def monitor(state):
## if pm.comm.rank == 0:
## print(state)
## if state.nit % 5 == 0:
## fit_p = mock_model.make_observable(state['x'])
## if state.nit % 20 == 0:
## fit_p.save(optfolder + '%s/%04d/fit_p' % (run, state['nit']))
## r = obj.evaluate(fit_p, data)
## #obj.save_report(r, optfolder + "%s/%s%02d-%04d.png"% (run, prefix, int(Nsm*10), state['nit']))
## dg.save_report(r, optfolder + "%s/%s_N%02d-%04d.png"% (run, prefix, int(Nsm*10), state['nit']), pm)
## dg.save_2ptreport(r, optfolder + "%s/2pt/%s_N%02d-%04d.png"% (run, prefix, int(Nsm*10), state['nit']), pm)
## if pm.comm.rank == 0:
## print('saved')
##
## state = optimizer.minimize(prob, x0=x0, monitor=monitor)
## fit_p = mock_model.make_observable(state['x'])
## fit_p.save(optfolder + '%s/best-fit' % run)
## r = dg.evaluate(fit_p, data)
## dg.save_report(r, optfolder + "%s/%s%02d-best-fit.png" % (run, prefix, int(Nsm*10)), pm)
## dg.save_2ptreport(r, optfolder + "%s/2pt/%s_N%02d-best-fit.png" % (run, prefix, int(Nsm*10)), pm)
## return state.x
##
|
[
"sys.path.append",
"cosmo4d.lab.ParticleMesh",
"nbodykit.lab.BigFileCatalog",
"nbodykit.cosmology.Cosmology.from_dict",
"yaml.load",
"scipy.interpolate.InterpolatedUnivariateSpline",
"solve.solve",
"os.makedirs",
"getbiasparams.eval_bfit",
"nbodykit.lab.BigFileMesh",
"cosmo4d.lab.NBodyModel",
"HImodels.ModelA",
"numpy.loadtxt",
"numpy.linspace",
"cosmo4d.lab.ZAModel",
"getbiasparams.getbias",
"scipy.interpolate.interp1d",
"numpy.array"
] |
[((716, 738), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (731, 738), False, 'import sys, os, json, yaml\n'), ((739, 767), 'sys.path.append', 'sys.path.append', (['"""../utils/"""'], {}), "('../utils/')\n", (754, 767), False, 'import sys, os, json, yaml\n'), ((1003, 1022), 'HImodels.ModelA', 'HImodels.ModelA', (['aa'], {}), '(aa)\n', (1018, 1022), False, 'import HImodels\n'), ((1035, 1091), 'cosmo4d.lab.ParticleMesh', 'ParticleMesh', ([], {'BoxSize': 'bs', 'Nmesh': '(nc, nc, nc)', 'dtype': '"""f4"""'}), "(BoxSize=bs, Nmesh=(nc, nc, nc), dtype='f4')\n", (1047, 1091), False, 'from cosmo4d.lab import UseComplexSpaceOptimizer, NBodyModel, LPTModel, ZAModel, LBFGS, ParticleMesh\n'), ((2221, 2278), 'numpy.loadtxt', 'numpy.loadtxt', (['"""../../data/pklin_1.0000.txt"""'], {'unpack': '(True)'}), "('../../data/pklin_1.0000.txt', unpack=True)\n", (2234, 2278), False, 'import numpy\n'), ((2287, 2310), 'scipy.interpolate.InterpolatedUnivariateSpline', 'interpolate', (['klin', 'plin'], {}), '(klin, plin)\n', (2298, 2310), True, 'from scipy.interpolate import InterpolatedUnivariateSpline as interpolate\n'), ((2450, 2479), 'nbodykit.cosmology.Cosmology.from_dict', 'Cosmology.from_dict', (['cosmodef'], {}), '(cosmodef)\n', (2469, 2479), False, 'from nbodykit.cosmology import Planck15, EHPower, Cosmology\n'), ((2488, 2608), 'nbodykit.lab.BigFileCatalog', 'BigFileCatalog', (["('/global/cscratch1/sd/chmodi/m3127/H1mass/highres/2560-9100-fixed/fastpm_%0.4f/LL-0.200/'\n % aa)"], {}), "(\n '/global/cscratch1/sd/chmodi/m3127/H1mass/highres/2560-9100-fixed/fastpm_%0.4f/LL-0.200/'\n % aa)\n", (2502, 2608), False, 'from nbodykit.lab import KDDensity, BigFileMesh, BigFileCatalog, ArrayCatalog\n'), ((3669, 3715), 'numpy.linspace', 'numpy.linspace', (['(0.1)', 'aa', 'nsteps'], {'endpoint': '(True)'}), '(0.1, aa, nsteps, endpoint=True)\n', (3683, 3715), False, 'import numpy\n'), ((4143, 4190), 'nbodykit.lab.BigFileCatalog', 'BigFileCatalog', (["(dfolder + 'fastpm_%0.4f/1' % aa)"], {}), "(dfolder + 'fastpm_%0.4f/1' % aa)\n", (4157, 4190), False, 'from nbodykit.lab import KDDensity, BigFileMesh, BigFileCatalog, ArrayCatalog\n'), ((4556, 4619), 'getbiasparams.getbias', 'getbias', (['truth_pm', 'hmesh', '[linear, linearsq, shear]', 'fpos', 'grid'], {}), '(truth_pm, hmesh, [linear, linearsq, shear], fpos, grid)\n', (4563, 4619), False, 'from getbiasparams import getbias, eval_bfit\n'), ((4673, 4742), 'getbiasparams.eval_bfit', 'eval_bfit', (['hmesh', 'bmod', 'optfolder'], {'noise': 'noise', 'title': 'title', 'fsize': '(15)'}), '(hmesh, bmod, optfolder, noise=noise, title=title, fsize=15)\n', (4682, 4742), False, 'from getbiasparams import getbias, eval_bfit\n'), ((4754, 4839), 'scipy.interpolate.interp1d', 'interp1d', (['kerror', 'perror'], {'bounds_error': '(False)', 'fill_value': '(perror[0], perror[-1])'}), '(kerror, perror, bounds_error=False, fill_value=(perror[0], perror[-1])\n )\n', (4762, 4839), False, 'from scipy.interpolate import interp1d\n'), ((914, 932), 'yaml.load', 'yaml.load', (['ymlfile'], {}), '(ymlfile)\n', (923, 932), False, 'import sys, os, json, yaml\n'), ((3743, 3789), 'cosmo4d.lab.NBodyModel', 'NBodyModel', (['cosmo', 'truth_pm'], {'B': 'B', 'steps': 'stages'}), '(cosmo, truth_pm, B=B, steps=stages)\n', (3753, 3789), False, 'from cosmo4d.lab import UseComplexSpaceOptimizer, NBodyModel, LPTModel, ZAModel, LBFGS, ParticleMesh\n'), ((3812, 3855), 'cosmo4d.lab.ZAModel', 'ZAModel', (['cosmo', 'truth_pm'], {'B': 'B', 'steps': 'stages'}), '(cosmo, truth_pm, B=B, steps=stages)\n', (3819, 3855), False, 'from cosmo4d.lab import UseComplexSpaceOptimizer, NBodyModel, LPTModel, ZAModel, LBFGS, ParticleMesh\n'), ((6249, 6368), 'solve.solve', 'solve', (['N0', 'x0', 'rtol', 'run', 'Ns', 'prefix', 'mock_model', 'obj', 'data_p', 'truth_pm', 'optfolder'], {'saveit': '(20)', 'showit': '(5)', 'title': 'None'}), '(N0, x0, rtol, run, Ns, prefix, mock_model, obj, data_p, truth_pm,\n optfolder, saveit=20, showit=5, title=None)\n', (6254, 6368), False, 'from solve import solve\n'), ((2118, 2137), 'os.makedirs', 'os.makedirs', (['folder'], {}), '(folder)\n', (2129, 2137), False, 'import sys, os, json, yaml\n'), ((4199, 4248), 'nbodykit.lab.BigFileMesh', 'BigFileMesh', (["(dfolder + 'linear')", '"""LinearDensityK"""'], {}), "(dfolder + 'linear', 'LinearDensityK')\n", (4210, 4248), False, 'from nbodykit.lab import KDDensity, BigFileMesh, BigFileCatalog, ArrayCatalog\n'), ((5673, 5711), 'nbodykit.lab.BigFileMesh', 'BigFileMesh', (["cfg['init']['sinit']", '"""s"""'], {}), "(cfg['init']['sinit'], 's')\n", (5684, 5711), False, 'from nbodykit.lab import KDDensity, BigFileMesh, BigFileCatalog, ArrayCatalog\n'), ((3178, 3200), 'numpy.array', 'numpy.array', (['[0, 0, 1]'], {}), '([0, 0, 1])\n', (3189, 3200), False, 'import numpy\n')]
|
#!/usr/bin/env python3
from __future__ import annotations
from typing import List, Dict, Any, ValuesView, TypeVar, Iterable, ItemsView
from processor.setting import Setting
import threading
T = TypeVar("T", bound="LocalRotary")
class LocalRotary:
def __init__(self, config: Dict[str, Setting]):
self.config = config
self._alarms: Dict[str, Any] = {}
# Will be set by changing a value, unset by access (to_dict)
self._changed = threading.Event()
# Cached for simplicity (dicts are ordered)
self._items: List[str] = list(self.config.keys())
def changed(self):
"""
This should always be called when an item in the rotary is changed
"""
self._changed.set()
def to_dict(self) -> Dict[str, Any]:
"Convert config to dict"
return {
k: v.to_dict()
for k, v in self.config.items()
if k not in ["C02 Setting", "Current Setting", "Reset Setting"]
}
@property
def alarms(self) -> Dict[str, Dict[str, float]]:
return self._alarms
@alarms.setter
def alarms(self, item: Dict[str, Dict[str, float]]):
self._alarms = item
def __getitem__(self, item: str):
return self.config[item]
def values(self) -> ValuesView[Setting]:
return self.config.values()
def items(self) -> ItemsView[str, Setting]:
return self.config.items()
def __repr__(self) -> str:
out = f"{self.__class__.__name__}(\n"
for key, value in self.config.items():
out += f" {key} : {value}\n"
return out + "\n)"
def __enter__(self: T) -> T:
return self
def __exit__(self, *args) -> None:
return None
def __contains__(self, key: str):
return key in self.config
def __iter__(self) -> Iterable[str]:
return iter(self.config)
def external_update(self) -> None:
"Update the display after a live setting (CurrentSetting) is changed externally"
pass
def time_left(self) -> float:
"Amount of time left on timer"
raise NotImplementedError()
def last_interaction(self) -> float:
"Timestamp of last interaction"
raise NotImplementedError()
|
[
"typing.TypeVar",
"threading.Event"
] |
[((198, 231), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {'bound': '"""LocalRotary"""'}), "('T', bound='LocalRotary')\n", (205, 231), False, 'from typing import List, Dict, Any, ValuesView, TypeVar, Iterable, ItemsView\n'), ((470, 487), 'threading.Event', 'threading.Event', ([], {}), '()\n', (485, 487), False, 'import threading\n')]
|
from sys import stderr
CONFIG_PATH = '/odin.cfg'
CONFIG_FILE_DOCS = """The configuration file should contain these settings:
ODIN_API_ROOT=https://example.com/odin_api
ODIN_SECRET=<secret encryption key>
JOB_API_ROOT=https://example.com/job_api
JOB_API_USERNAME=<username>
JOB_API_PASSWORD=<password>
It may contain:
JOB_API_VERSION=v4
"""
def validate_config(config):
"""Return True if ok, else False"""
def error(msg):
stderr.write(msg + '\n')
error.ok = False
error.ok = True
required = ['ODIN_API_ROOT', 'ODIN_SECRET',
'JOB_API_ROOT', 'JOB_API_USERNAME',
'JOB_API_PASSWORD']
for key in required:
if key not in config or not config[key]:
error('Missing in config: %s' % key)
if not error.ok:
return False
for api_root in ('ODIN_API_ROOT', 'JOB_API_ROOT'):
url = config[api_root]
if not url.startswith('http'):
error('%s does not look like an url: %s' % (api_root, url))
if url.endswith('/'):
error('%s must not end with /' % api_root)
optional = ['JOB_API_VERSION']
if not set(config.keys()).issubset(required + optional):
error("Config contains too invalid settings: {}".format(
set(config.keys()).difference(required + optional)
))
return error.ok
def load_config(config_file=None):
if config_file is None:
config_file = CONFIG_PATH
with open(config_file) as inp:
conf = dict(row.strip().split('=', 1) for row in inp if row.strip())
for k, v in conf.items():
conf[k] = v.strip('"')
return conf
def validate_project_name(project_name):
"""Must be ascii alnum and start with letter"""
if not project_name or not isinstance(project_name, str):
return False
if not project_name[0].isalpha():
return False
if not project_name.isalnum():
return False
return True
|
[
"sys.stderr.write"
] |
[((442, 466), 'sys.stderr.write', 'stderr.write', (["(msg + '\\n')"], {}), "(msg + '\\n')\n", (454, 466), False, 'from sys import stderr\n')]
|
from re import L
import sys
from typing import List
from tensorflow.python.ops.gen_array_ops import gather
sys.path.append('.')
import json
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
from random import randint, randrange
from environment.base.base import BaseEnvironment
from environment.custom.resource_v3.reward import RewardFactory, ReducedNodeUsage
from environment.custom.resource_v3.misc.utils import compute_remaining_resources, round_half_up
from environment.custom.resource_v3.node import Node as History
from environment.custom.resource_v3.resource import Resource as Request
class ResourceEnvironmentV3(BaseEnvironment):
def __init__(self, name: str, opts: dict):
super(ResourceEnvironmentV3, self).__init__(name)
###########################################
##### PROBLEM CONFIGS FROM JSON FILE ######
###########################################
self.gather_stats: bool = False
self.generate_request_on_the_fly: bool = opts['generate_request_on_the_fly']
self.mask_nodes_in_mha: bool = opts['mask_nodes_in_mha']
self.seed_value: int = opts['seed_value']
self.normalization_factor: int = opts['normalization_factor']
self.decimal_precision: int = opts['decimal_precision']
self.batch_size: int = opts['batch_size']
self.num_features: int = opts['num_features']
self.num_profiles: int = opts['num_profiles']
self.profiles_sample_size: int = opts['profiles_sample_size']
assert self.num_profiles >= self.profiles_sample_size, 'Resource sample size should be less than total number of resources'
self.EOS_CODE: int = opts['EOS_CODE']
self.EOS_BIN = np.full((1, self.num_features), self.EOS_CODE, dtype='float32')
self.node_sample_size: int = opts['node_sample_size'] + 1 # + 1 because of the EOS bin
self.req_min_val: int = opts['req_min_val']
self.req_max_val: int = opts['req_max_val']
self.node_min_val: int = opts['node_min_val']
self.node_max_val: int = opts['node_max_val']
################################################
##### MATERIALIZED VARIABLES FROM CONFIGS ######
################################################
self.decoding_step = self.node_sample_size
self.rewarder = RewardFactory(
opts['reward'],
self.EOS_BIN
)
if isinstance(self.rewarder, ReducedNodeUsage):
self.is_empty = np.zeros((self.batch_size, self.node_sample_size + self.profiles_sample_size, 1), dtype='float32')
# First position is EOS
self.is_empty[:, 0, 0] = self.EOS_BIN[0][0]
else:
self.is_empty = None
# Generate req profiles
self.total_profiles = self.generate_dataset()
# Problem batch
self.batch, self.history = self.generate_batch()
# Default masks
# Will be updated during at each step() call
self.bin_net_mask,\
self.resource_net_mask,\
self.mha_used_mask = self.generate_masks()
def reset(self):
# Reset decoding step
self.decoding_step = self.node_sample_size
if isinstance(self.rewarder, ReducedNodeUsage):
self.is_empty = np.zeros(
(self.batch_size, self.node_sample_size + self.profiles_sample_size, 1), dtype='float32')
# First position is EOS
self.is_empty[:, 0, 0] = self.EOS_BIN[0][0]
self.batch, self.history = self.generate_batch()
self.bin_net_mask,\
self.resource_net_mask,\
self.mha_used_mask = self.generate_masks()
return self.state()
def state(self):
decoder_input = self.batch[:, self.decoding_step]
decoder_input = np.expand_dims(decoder_input, axis=1)
batch = self.batch.copy()
if isinstance(self.rewarder, ReducedNodeUsage):
batch = self.add_is_empty_dim(batch, self.is_empty)
return batch,\
decoder_input,\
self.bin_net_mask.copy(),\
self.mha_used_mask.copy()
def step(self, bin_ids: List[int], feasible_bin_mask):
# Default is not done
isDone = False
req_ids = tf.fill(self.batch_size, self.decoding_step)
batch_size = self.batch.shape[0]
num_elems = self.batch.shape[1]
batch_indices = tf.range(batch_size, dtype='int32')
# Copy the state before updating the values
copy_batch = self.batch.copy()
# Grab the selected nodes and resources
nodes: np.ndarray = self.batch[batch_indices, bin_ids]
reqs: np.ndarray = self.batch[batch_indices, req_ids]
# Compute remaining resources after placing reqs at nodes
remaining_resources = compute_remaining_resources(
nodes, reqs, self.decimal_precision)
# Update the batch state
self.batch[batch_indices, bin_ids] = remaining_resources
# Keep EOS node intact
self.batch[batch_indices, 0] = self.EOS_BIN
# Item taken mask it
self.resource_net_mask[batch_indices, req_ids] = 1
# Update node masks
dominant_resource = tf.reduce_min(remaining_resources, axis=-1)
is_full = tf.cast(tf.equal(dominant_resource, 0), dtype='float32')
# Mask full nodes/bins
self.bin_net_mask[batch_indices, bin_ids] = is_full
self.bin_net_mask[:, 0] = 0 # EOS is always available
# Update the MHA masks
self.mha_used_mask[batch_indices, :, :, req_ids] = 1
if self.mask_nodes_in_mha:
self.mha_used_mask[batch_indices, :, :, bin_ids] = tf.reshape(
is_full, (self.batch_size, 1, 1)
)
# EOS is always available
self.mha_used_mask[batch_indices, :, :, 0] = 0
if np.all(self.resource_net_mask == 1):
isDone = True
# Compute rewards
rewards = self.rewarder.compute_reward(
self.batch, # Already updated values of nodes, i.e., after insertion
copy_batch, # Original values of nodes, i.e., before insertion
self.node_sample_size,
nodes,
reqs,
feasible_bin_mask,
bin_ids,
self.is_empty
)
rewards = tf.reshape(rewards, (batch_size, 1))
#else:
# rewards = tf.zeros((batch_size, 1), dtype='float32')
info = {
'bin_net_mask': self.bin_net_mask.copy(),
'resource_net_mask': self.resource_net_mask.copy(),
'mha_used_mask': self.mha_used_mask.copy(),
# 'num_resource_to_place': self.num_profiles
}
if self.gather_stats:
self.place_reqs(bin_ids, req_ids, reqs)
# Pick next decoder_input
self.decoding_step += 1
if self.decoding_step < self.node_sample_size + self.profiles_sample_size:
decoder_input = self.batch[:, self.decoding_step]
decoder_input = np.expand_dims(decoder_input, axis=1)
else:
# We are done. No need to generate decoder input
decoder_input = np.array([None])
batch = self.batch.copy()
if isinstance(self.rewarder, ReducedNodeUsage):
batch = self.add_is_empty_dim(batch, self.is_empty)
return batch, decoder_input, rewards, isDone, info
def generate_dataset(self):
profiles = tf.random.uniform(
(self.num_profiles, self.num_features),
minval=self.req_min_val,
maxval=self.req_max_val,
dtype='int32',
seed=self.seed_value
) / self.normalization_factor
return tf.cast(profiles, dtype="float32")
def generate_batch(self):
history = []
elem_size = self.node_sample_size + self.profiles_sample_size
batch: np.ndarray = np.zeros(
(self.batch_size, elem_size, self.num_features),
dtype="float32"
)
# Generate nodes states
nodes = tf.random.uniform(
(self.batch_size, self.node_sample_size, self.num_features),
minval=self.node_min_val,
maxval=self.node_max_val,
dtype="int32",
seed=self.seed_value
) / self.normalization_factor
batch[:, :self.node_sample_size, :] = tf.cast(nodes, dtype="float32")
# Replace first position with EOS node
batch[:, 0, :] = self.EOS_BIN
if self.generate_request_on_the_fly:
# Generate reqs
reqs = tf.random.uniform(
(self.batch_size, self.profiles_sample_size, self.num_features),
minval=self.req_min_val,
maxval=self.req_max_val,
dtype="int32",
seed=self.seed_value
) / self.normalization_factor
batch[:, self.node_sample_size:, :] = tf.cast(reqs, dtype="float32")
else:
# Sample profiles and add them to batch instances
for index in range(self.batch_size):
shuffled_profiles = tf.random.shuffle(self.total_profiles)
batch[index, self.node_sample_size:, :] = shuffled_profiles[:self.profiles_sample_size]
# Create node instances that will gather stats
if self.gather_stats:
history = self.build_history(batch)
return batch, history
def generate_masks(self):
elem_size = self.node_sample_size + self.profiles_sample_size
# Represents positions marked as "0" where resource Ptr Net can point
profiles_net_mask = np.zeros((self.batch_size, elem_size), dtype='float32')
# Represents positions marked as "0" where bin Ptr Net can point
nodes_net_mask = np.ones(
(self.batch_size, elem_size), dtype='float32')
# Default mask for resources
#for batch_id in range(self.batch_size):
# for i in range(self.node_sample_size):
# profiles_net_mask[batch_id, i] = 1
profiles_net_mask[:, :self.node_sample_size] = 1
# Default mask for bin
nodes_net_mask = nodes_net_mask - profiles_net_mask
# For Transformer's multi head attention
mha_used_mask = np.zeros_like(profiles_net_mask)
mha_used_mask = mha_used_mask[:, np.newaxis, np.newaxis, :]
return nodes_net_mask, profiles_net_mask, mha_used_mask
def sample_action(self):
batch_indices = tf.range(self.batch.shape[0], dtype='int32')
resource_ids = tf.fill(self.batch_size, self.decoding_step)
# Decode the resources
decoded_resources = self.batch[batch_indices, resource_ids]
decoded_resources = np.expand_dims(decoded_resources, axis=1)
bins_mask = self.build_feasible_mask(self.batch,
decoded_resources,
self.bin_net_mask
)
bins_probs = np.random.uniform(size=self.bin_net_mask.shape)
bins_probs = tf.nn.softmax(bins_probs - (bins_mask*10e6), axis=-1)
dist_bin = tfp.distributions.Categorical(probs = bins_probs)
bin_ids = dist_bin.sample()
return bin_ids, bins_mask
def add_stats_to_agent_config(self, agent_config: dict):
agent_config['num_resources'] = self.profiles_sample_size
agent_config['num_bins'] = self.node_sample_size
agent_config['tensor_size'] = self.node_sample_size + self.profiles_sample_size
agent_config['batch_size'] = self.batch_size
# Init the object
agent_config["encoder_embedding"] = {}
if isinstance(self.rewarder, ReducedNodeUsage):
agent_config["encoder_embedding"]["common"] = False
agent_config["encoder_embedding"]["num_bin_features"] = 4
agent_config["encoder_embedding"]["num_resource_features"] = 3
else:
agent_config["encoder_embedding"]["common"] = True
# If using the same embedding layer these vars are unused
agent_config["encoder_embedding"]["num_bin_features"] = None
agent_config["encoder_embedding"]["num_resource_features"] = None
return agent_config
def set_testing_mode(self,
batch_size,
node_sample_size,
profiles_sample_size,
node_min_val,
node_max_val
) -> None:
self.gather_stats = True
self.batch_size = batch_size
self.node_min_val = node_min_val
self.node_max_val = node_max_val
self.node_sample_size = node_sample_size + 1 # +1 For EOS node
self.profiles_sample_size = profiles_sample_size
def build_history(self, batch):
history = []
for batch_id, instance in enumerate(batch):
nodes = []
for id, bin in enumerate(instance[:self.node_sample_size]):
nodes.append(
History(
batch_id,
id,
bin
)
)
history.append(nodes)
return history
def place_reqs(self, bin_ids: List[int], req_ids: List[int], reqs: np.ndarray):
for batch_index, bin_id in enumerate(bin_ids):
node: History = self.history[batch_index][bin_id]
req_id = req_ids[batch_index]
req = Request(
batch_index,
req_id,
reqs[batch_index]
)
node.insert_req(req)
def build_feasible_mask(self, state, resources, bin_net_mask):
if isinstance(self.rewarder, ReducedNodeUsage):
state = self.remove_is_empty_dim(state)
batch = state.shape[0]
num_elems = state.shape[1]
# Add batch dim to resources
# resource_demands = np.reshape(resources, (batch, 1, self.num_features))
# Tile to match the num elems
resource_demands = tf.tile(resources, [1, num_elems, 1])
# Compute remaining resources after placement
# remaining_resources = state - resource_demands
remaining_resources = compute_remaining_resources(
state, resource_demands, self.decimal_precision
)
dominant_resource = tf.reduce_min(remaining_resources, axis=-1)
# Ensure that it's greater that 0
# i.e., that node is not overloaded
after_place = tf.less(dominant_resource, 0)
after_place = tf.cast(after_place, dtype='float32')
# Can't point to resources positions
feasible_mask = tf.maximum(after_place, bin_net_mask)
feasible_mask = feasible_mask.numpy()
assert np.all(dominant_resource*(1-feasible_mask) >= 0), 'Masking Scheme Is Wrong!'
# EOS is always available for pointing
feasible_mask[:, 0] = 0
# Return as is. At this moment node can be overloaded
return feasible_mask
def add_is_empty_dim(self, batch, is_empty):
batch = np.concatenate([batch, is_empty], axis=-1)
return round_half_up(batch, 2)
def remove_is_empty_dim(self, batch):
batch = batch[:, :, :self.num_features]
return round_half_up(batch, 2)
def print_history(self, print_details = False) -> None: # pragma: no cover
for batch_id in range(self.batch_size):
print('_________________________________')
node: History
for node in self.history[batch_id]:
node.print(print_details)
print('_________________________________')
return
def store_dataset(self, location) -> None:
np.savetxt(location, self.total_profiles)
def load_dataset(self, location):
self.total_profiles = np.loadtxt(location)
if __name__ == "__main__": # pragma: no cover
env_name = 'ResourceEnvironmentV3'
with open(f"configs/ResourceV3.json") as json_file:
params = json.load(json_file)
env_configs = params['env_config']
env_configs['batch_size'] = 2
env = ResourceEnvironmentV3(env_name, env_configs)
state, dec, bin_net_mask, mha_mask = env.state()
# env.print_history()
feasible_net_mask = env.build_feasible_mask(state, dec, bin_net_mask)
bin_ids = [0,1]
resource_ids = None
next, decoder_input, rewards, isDone, info = env.step(bin_ids, feasible_net_mask)
next, decoder_input, rewards, isDone, info = env.step(bin_ids, feasible_net_mask)
env.reset()
next, decoder_input, rewards, isDone, info = env.step(bin_ids, feasible_net_mask)
next, decoder_input, rewards, isDone, info = env.step(bin_ids, feasible_net_mask)
a = 1
|
[
"tensorflow.maximum",
"tensorflow.reshape",
"numpy.ones",
"sys.path.append",
"numpy.full",
"tensorflow.nn.softmax",
"numpy.zeros_like",
"tensorflow.random.uniform",
"environment.custom.resource_v3.resource.Resource",
"tensorflow_probability.distributions.Categorical",
"environment.custom.resource_v3.misc.utils.compute_remaining_resources",
"tensorflow.less",
"numpy.savetxt",
"tensorflow.cast",
"numpy.loadtxt",
"tensorflow.reduce_min",
"tensorflow.equal",
"environment.custom.resource_v3.node.Node",
"tensorflow.range",
"tensorflow.tile",
"numpy.all",
"numpy.concatenate",
"tensorflow.random.shuffle",
"numpy.random.uniform",
"environment.custom.resource_v3.misc.utils.round_half_up",
"json.load",
"environment.custom.resource_v3.reward.RewardFactory",
"numpy.expand_dims",
"numpy.zeros",
"tensorflow.fill",
"numpy.array"
] |
[((109, 129), 'sys.path.append', 'sys.path.append', (['"""."""'], {}), "('.')\n", (124, 129), False, 'import sys\n'), ((1760, 1823), 'numpy.full', 'np.full', (['(1, self.num_features)', 'self.EOS_CODE'], {'dtype': '"""float32"""'}), "((1, self.num_features), self.EOS_CODE, dtype='float32')\n", (1767, 1823), True, 'import numpy as np\n'), ((2381, 2424), 'environment.custom.resource_v3.reward.RewardFactory', 'RewardFactory', (["opts['reward']", 'self.EOS_BIN'], {}), "(opts['reward'], self.EOS_BIN)\n", (2394, 2424), False, 'from environment.custom.resource_v3.reward import RewardFactory, ReducedNodeUsage\n'), ((3857, 3894), 'numpy.expand_dims', 'np.expand_dims', (['decoder_input'], {'axis': '(1)'}), '(decoder_input, axis=1)\n', (3871, 3894), True, 'import numpy as np\n'), ((4312, 4356), 'tensorflow.fill', 'tf.fill', (['self.batch_size', 'self.decoding_step'], {}), '(self.batch_size, self.decoding_step)\n', (4319, 4356), True, 'import tensorflow as tf\n'), ((4463, 4498), 'tensorflow.range', 'tf.range', (['batch_size'], {'dtype': '"""int32"""'}), "(batch_size, dtype='int32')\n", (4471, 4498), True, 'import tensorflow as tf\n'), ((4870, 4934), 'environment.custom.resource_v3.misc.utils.compute_remaining_resources', 'compute_remaining_resources', (['nodes', 'reqs', 'self.decimal_precision'], {}), '(nodes, reqs, self.decimal_precision)\n', (4897, 4934), False, 'from environment.custom.resource_v3.misc.utils import compute_remaining_resources, round_half_up\n'), ((5296, 5339), 'tensorflow.reduce_min', 'tf.reduce_min', (['remaining_resources'], {'axis': '(-1)'}), '(remaining_resources, axis=-1)\n', (5309, 5339), True, 'import tensorflow as tf\n'), ((5944, 5979), 'numpy.all', 'np.all', (['(self.resource_net_mask == 1)'], {}), '(self.resource_net_mask == 1)\n', (5950, 5979), True, 'import numpy as np\n'), ((6417, 6453), 'tensorflow.reshape', 'tf.reshape', (['rewards', '(batch_size, 1)'], {}), '(rewards, (batch_size, 1))\n', (6427, 6453), True, 'import tensorflow as tf\n'), ((7830, 7864), 'tensorflow.cast', 'tf.cast', (['profiles'], {'dtype': '"""float32"""'}), "(profiles, dtype='float32')\n", (7837, 7864), True, 'import tensorflow as tf\n'), ((8017, 8091), 'numpy.zeros', 'np.zeros', (['(self.batch_size, elem_size, self.num_features)'], {'dtype': '"""float32"""'}), "((self.batch_size, elem_size, self.num_features), dtype='float32')\n", (8025, 8091), True, 'import numpy as np\n'), ((8488, 8519), 'tensorflow.cast', 'tf.cast', (['nodes'], {'dtype': '"""float32"""'}), "(nodes, dtype='float32')\n", (8495, 8519), True, 'import tensorflow as tf\n'), ((9787, 9842), 'numpy.zeros', 'np.zeros', (['(self.batch_size, elem_size)'], {'dtype': '"""float32"""'}), "((self.batch_size, elem_size), dtype='float32')\n", (9795, 9842), True, 'import numpy as np\n'), ((9941, 9995), 'numpy.ones', 'np.ones', (['(self.batch_size, elem_size)'], {'dtype': '"""float32"""'}), "((self.batch_size, elem_size), dtype='float32')\n", (9948, 9995), True, 'import numpy as np\n'), ((10432, 10464), 'numpy.zeros_like', 'np.zeros_like', (['profiles_net_mask'], {}), '(profiles_net_mask)\n', (10445, 10464), True, 'import numpy as np\n'), ((10657, 10701), 'tensorflow.range', 'tf.range', (['self.batch.shape[0]'], {'dtype': '"""int32"""'}), "(self.batch.shape[0], dtype='int32')\n", (10665, 10701), True, 'import tensorflow as tf\n'), ((10726, 10770), 'tensorflow.fill', 'tf.fill', (['self.batch_size', 'self.decoding_step'], {}), '(self.batch_size, self.decoding_step)\n', (10733, 10770), True, 'import tensorflow as tf\n'), ((10910, 10951), 'numpy.expand_dims', 'np.expand_dims', (['decoded_resources'], {'axis': '(1)'}), '(decoded_resources, axis=1)\n', (10924, 10951), True, 'import numpy as np\n'), ((11206, 11253), 'numpy.random.uniform', 'np.random.uniform', ([], {'size': 'self.bin_net_mask.shape'}), '(size=self.bin_net_mask.shape)\n', (11223, 11253), True, 'import numpy as np\n'), ((11275, 11334), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['(bins_probs - bins_mask * 10000000.0)'], {'axis': '(-1)'}), '(bins_probs - bins_mask * 10000000.0, axis=-1)\n', (11288, 11334), True, 'import tensorflow as tf\n'), ((11349, 11396), 'tensorflow_probability.distributions.Categorical', 'tfp.distributions.Categorical', ([], {'probs': 'bins_probs'}), '(probs=bins_probs)\n', (11378, 11396), True, 'import tensorflow_probability as tfp\n'), ((14290, 14327), 'tensorflow.tile', 'tf.tile', (['resources', '[1, num_elems, 1]'], {}), '(resources, [1, num_elems, 1])\n', (14297, 14327), True, 'import tensorflow as tf\n'), ((14470, 14546), 'environment.custom.resource_v3.misc.utils.compute_remaining_resources', 'compute_remaining_resources', (['state', 'resource_demands', 'self.decimal_precision'], {}), '(state, resource_demands, self.decimal_precision)\n', (14497, 14546), False, 'from environment.custom.resource_v3.misc.utils import compute_remaining_resources, round_half_up\n'), ((14602, 14645), 'tensorflow.reduce_min', 'tf.reduce_min', (['remaining_resources'], {'axis': '(-1)'}), '(remaining_resources, axis=-1)\n', (14615, 14645), True, 'import tensorflow as tf\n'), ((14763, 14792), 'tensorflow.less', 'tf.less', (['dominant_resource', '(0)'], {}), '(dominant_resource, 0)\n', (14770, 14792), True, 'import tensorflow as tf\n'), ((14815, 14852), 'tensorflow.cast', 'tf.cast', (['after_place'], {'dtype': '"""float32"""'}), "(after_place, dtype='float32')\n", (14822, 14852), True, 'import tensorflow as tf\n'), ((14923, 14960), 'tensorflow.maximum', 'tf.maximum', (['after_place', 'bin_net_mask'], {}), '(after_place, bin_net_mask)\n', (14933, 14960), True, 'import tensorflow as tf\n'), ((15031, 15083), 'numpy.all', 'np.all', (['(dominant_resource * (1 - feasible_mask) >= 0)'], {}), '(dominant_resource * (1 - feasible_mask) >= 0)\n', (15037, 15083), True, 'import numpy as np\n'), ((15346, 15388), 'numpy.concatenate', 'np.concatenate', (['[batch, is_empty]'], {'axis': '(-1)'}), '([batch, is_empty], axis=-1)\n', (15360, 15388), True, 'import numpy as np\n'), ((15404, 15427), 'environment.custom.resource_v3.misc.utils.round_half_up', 'round_half_up', (['batch', '(2)'], {}), '(batch, 2)\n', (15417, 15427), False, 'from environment.custom.resource_v3.misc.utils import compute_remaining_resources, round_half_up\n'), ((15538, 15561), 'environment.custom.resource_v3.misc.utils.round_half_up', 'round_half_up', (['batch', '(2)'], {}), '(batch, 2)\n', (15551, 15561), False, 'from environment.custom.resource_v3.misc.utils import compute_remaining_resources, round_half_up\n'), ((15989, 16030), 'numpy.savetxt', 'np.savetxt', (['location', 'self.total_profiles'], {}), '(location, self.total_profiles)\n', (15999, 16030), True, 'import numpy as np\n'), ((16108, 16128), 'numpy.loadtxt', 'np.loadtxt', (['location'], {}), '(location)\n', (16118, 16128), True, 'import numpy as np\n'), ((16294, 16314), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (16303, 16314), False, 'import json\n'), ((2544, 2647), 'numpy.zeros', 'np.zeros', (['(self.batch_size, self.node_sample_size + self.profiles_sample_size, 1)'], {'dtype': '"""float32"""'}), "((self.batch_size, self.node_sample_size + self.\n profiles_sample_size, 1), dtype='float32')\n", (2552, 2647), True, 'import numpy as np\n'), ((3337, 3440), 'numpy.zeros', 'np.zeros', (['(self.batch_size, self.node_sample_size + self.profiles_sample_size, 1)'], {'dtype': '"""float32"""'}), "((self.batch_size, self.node_sample_size + self.\n profiles_sample_size, 1), dtype='float32')\n", (3345, 3440), True, 'import numpy as np\n'), ((5366, 5396), 'tensorflow.equal', 'tf.equal', (['dominant_resource', '(0)'], {}), '(dominant_resource, 0)\n', (5374, 5396), True, 'import tensorflow as tf\n'), ((5759, 5803), 'tensorflow.reshape', 'tf.reshape', (['is_full', '(self.batch_size, 1, 1)'], {}), '(is_full, (self.batch_size, 1, 1))\n', (5769, 5803), True, 'import tensorflow as tf\n'), ((7137, 7174), 'numpy.expand_dims', 'np.expand_dims', (['decoder_input'], {'axis': '(1)'}), '(decoder_input, axis=1)\n', (7151, 7174), True, 'import numpy as np\n'), ((7278, 7294), 'numpy.array', 'np.array', (['[None]'], {}), '([None])\n', (7286, 7294), True, 'import numpy as np\n'), ((7571, 7720), 'tensorflow.random.uniform', 'tf.random.uniform', (['(self.num_profiles, self.num_features)'], {'minval': 'self.req_min_val', 'maxval': 'self.req_max_val', 'dtype': '"""int32"""', 'seed': 'self.seed_value'}), "((self.num_profiles, self.num_features), minval=self.\n req_min_val, maxval=self.req_max_val, dtype='int32', seed=self.seed_value)\n", (7588, 7720), True, 'import tensorflow as tf\n'), ((8175, 8351), 'tensorflow.random.uniform', 'tf.random.uniform', (['(self.batch_size, self.node_sample_size, self.num_features)'], {'minval': 'self.node_min_val', 'maxval': 'self.node_max_val', 'dtype': '"""int32"""', 'seed': 'self.seed_value'}), "((self.batch_size, self.node_sample_size, self.\n num_features), minval=self.node_min_val, maxval=self.node_max_val,\n dtype='int32', seed=self.seed_value)\n", (8192, 8351), True, 'import tensorflow as tf\n'), ((9062, 9092), 'tensorflow.cast', 'tf.cast', (['reqs'], {'dtype': '"""float32"""'}), "(reqs, dtype='float32')\n", (9069, 9092), True, 'import tensorflow as tf\n'), ((13709, 13756), 'environment.custom.resource_v3.resource.Resource', 'Request', (['batch_index', 'req_id', 'reqs[batch_index]'], {}), '(batch_index, req_id, reqs[batch_index])\n', (13716, 13756), True, 'from environment.custom.resource_v3.resource import Resource as Request\n'), ((8707, 8886), 'tensorflow.random.uniform', 'tf.random.uniform', (['(self.batch_size, self.profiles_sample_size, self.num_features)'], {'minval': 'self.req_min_val', 'maxval': 'self.req_max_val', 'dtype': '"""int32"""', 'seed': 'self.seed_value'}), "((self.batch_size, self.profiles_sample_size, self.\n num_features), minval=self.req_min_val, maxval=self.req_max_val, dtype=\n 'int32', seed=self.seed_value)\n", (8724, 8886), True, 'import tensorflow as tf\n'), ((9254, 9292), 'tensorflow.random.shuffle', 'tf.random.shuffle', (['self.total_profiles'], {}), '(self.total_profiles)\n', (9271, 9292), True, 'import tensorflow as tf\n'), ((13215, 13241), 'environment.custom.resource_v3.node.Node', 'History', (['batch_id', 'id', 'bin'], {}), '(batch_id, id, bin)\n', (13222, 13241), True, 'from environment.custom.resource_v3.node import Node as History\n')]
|
# Copyright 2021 Rosalind Franklin Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import subprocess
import itertools
import pandas as pd
import yaml
from tqdm import tqdm
from icecream import ic # for debugging
from . import metadata as mdMod
class Motioncorr:
"""
Class encapsulating a Motioncorr object
"""
def __init__(self, project_name, mc2_params, md_in, logger):
"""
Initialise Motioncorr object
ARGS:
project_name (str) :: Name of current project
mc2_params (Params) :: Parameters read in from yaml file
md_in (Metadata) :: Metadata containing information of images
logger (Logger) :: Logger for recording events
"""
self.proj_name = project_name
self.logObj = logger
self.log = []
self.prmObj = mc2_params
self.params = self.prmObj.params
self._process_list = self.params['System']['process_list']
self.meta = pd.DataFrame(md_in.metadata)
self.meta = self.meta[self.meta['ts'].isin(self._process_list)]
self._set_output_path()
# Get index of available GPU
self.use_gpu = self._get_gpu_nvidia_smi()
# Set GPU index as new column in metadata
self.meta = self.meta.assign(gpu=self.use_gpu[0])
self.no_processes = False
self._check_processed_images()
# Check if output folder exists, create if not
if not os.path.isdir(self.params['System']['output_path']):
subprocess.run(['mkdir', self.params['System']['output_path']],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding='ascii')
def _check_processed_images(self):
"""
Method to check images which have already been processed before
"""
# Create new empty internal output metadata if no record exists
if not os.path.isfile(self.proj_name + '_mc2_mdout.yaml'):
self.meta_out = pd.DataFrame(columns=self.meta.columns)
# Read in serialised metadata and turn into DataFrame if record exists
else:
_meta_record = mdMod.read_md_yaml(project_name=self.proj_name,
job_type='motioncorr',
filename=self.proj_name + '_mc2_mdout.yaml')
self.meta_out = pd.DataFrame(_meta_record.metadata)
# Compare output metadata and output folder
# If a file (in specified TS) is in record but missing, remove from record
if len(self.meta_out) > 0:
self._missing = self.meta_out.loc[~self.meta_out['output'].apply(lambda x: os.path.isfile(x))]
self._missing_specified = pd.DataFrame(columns=self.meta.columns)
for curr_ts in self.params['System']['process_list']:
self._missing_specified = self._missing_specified.append(self._missing[self._missing['ts']==curr_ts],
ignore_index=True,
)
self._merged = self.meta_out.merge(self._missing_specified, how='left', indicator=True)
self.meta_out = self.meta_out[self._merged['_merge']=='left_only']
if len(self._missing_specified) > 0:
self.logObj(f"Info: {len(self._missing_specified)} images in record missing in folder. Will be added back for processing.")
# Drop the items in input metadata if they are in the output record
_ignored = self.meta[self.meta.output.isin(self.meta_out.output)]
if len(_ignored) > 0 and len(_ignored) < len(self.meta):
self.logObj(f"Info: {len(_ignored)} images had been processed and will be omitted.")
elif len(_ignored) == len(self.meta):
self.logObj(f"Info: All specified images had been processed. Nothing will be done.")
self.no_processes = True
self.meta = self.meta[~self.meta.output.isin(self.meta_out.output)]
@staticmethod
def _get_gpu_nvidia_smi():
"""
Subroutine to get visible GPU ID(s) from nvidia-smi
"""
nv_uuid = subprocess.run(['nvidia-smi', '--list-gpus'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding='ascii')
nv_processes = subprocess.run(['nvidia-smi', '--query-compute-apps=gpu_uuid', '--format=csv'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding='ascii')
# catch the visible GPUs
if nv_uuid.returncode != 0 or nv_processes.returncode != 0:
raise AssertionError(f"Error in Ot2Rec.Motioncorr._get_gpu_from_nvidia_smi: "
f"nvidia-smi returned an error: {nv_uuid.stderr}")
else:
nv_uuid = nv_uuid.stdout.strip('\n').split('\n')
nv_processes = subprocess.run(['nvidia-smi', '--query-compute-apps=gpu_uuid', '--format=csv'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding='ascii')
visible_gpu = []
for gpu in nv_uuid:
id_idx = gpu.find('GPU ')
uuid_idx = gpu.find('UUID')
gpu_id = gpu[id_idx + 4:id_idx + 6].strip(' ').strip(':')
gpu_uuid = gpu[uuid_idx + 5:-1].strip(' ')
# discard the GPU hosting a process
if gpu_uuid not in nv_processes.stdout.split('\n'):
visible_gpu.append(gpu_id)
if visible_gpu:
return visible_gpu
else:
raise ValueError(f'Error in metadata._get_gpu_from_nvidia_smi: {len(nv_uuid)} GPU detected, but none of them is free.')
def _set_output_path(self):
"""
Subroutine to set output path for motioncorr'd images
"""
self.meta['output'] = self.meta.apply(
lambda row: f"{self.params['System']['output_path']}"
f"{self.params['System']['output_prefix']}_{row['ts']:03}_{row['angles']}.mrc", axis=1)
def _get_command(self, image):
"""
Subroutine to get commands for running MotionCor2
ARGS:
image (tuple): metadata for current image (in_path, out_path, #GPU)
RETURNS:
list
"""
in_path, out_path, gpu_number = image
if self.params['System']['source_TIFF']:
image_type = 'InTiff'
else:
image_type = 'InMrc'
# Set FtBin parameter for MC2
ftbin = self.params['MC2']['desired_pixel_size'] / self.params['MC2']['pixel_size']
return [self.params['MC2']['MC2_path'],
f'-{image_type}', in_path,
'-OutMrc', out_path,
'-Gpu', gpu_number,
'-GpuMemUsage', str(self.params['System']['gpu_memory_usage']),
'-Gain', self.params['MC2']['gain_reference'],
'-Tol', str(self.params['MC2']['tolerance']),
'-Patch', ','.join(str(i) for i in self.params['MC2']['patch_size']),
'-Iter', str(self.params['MC2']['max_iterations']),
'-Group', '1' if self.params['MC2']['use_subgroups'] else '0',
'-FtBin', str(ftbin),
'-PixSize', str(self.params['MC2']['pixel_size']),
'-Throw', str(self.params['MC2']['discard_frames_top']),
'-Trunc', str(self.params['MC2']['discard_frames_bottom']),
]
@staticmethod
def _yield_chunks(iterable, size):
"""
Subroutine to get chunks for GPU processing
"""
iterator = iter(iterable)
for first in iterator:
yield itertools.chain([first], itertools.islice(iterator, size - 1))
def run_mc2(self):
"""
Subroutine to run MotionCor2
"""
# Process tilt-series one at a time
ts_list = self.params['System']['process_list']
tqdm_iter = tqdm(ts_list, ncols=100)
for curr_ts in tqdm_iter:
tqdm_iter.set_description(f"Processing TS {curr_ts}...")
self._curr_meta = self.meta.loc[self.meta.ts==curr_ts]
while len(self._curr_meta) > 0:
# Get commands to run MC2
mc_commands = [self._get_command((_in, _out, _gpu))
for _in, _out, _gpu in zip(self._curr_meta.file_paths, self._curr_meta.output, self._curr_meta.gpu)]
jobs = (subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for cmd in mc_commands)
# run subprocess by chunks of GPU
chunks = self._yield_chunks(jobs, len(self.use_gpu) * self.params['System']['jobs_per_gpu'])
for job in chunks:
# from the moment the next line is read, every process in job are spawned
for process in [i for i in job]:
self.log.append(process.communicate()[0].decode('UTF-8'))
self.update_mc2_metadata()
self.export_metadata()
def update_mc2_metadata(self):
"""
Subroutine to update metadata after one set of runs
"""
# Search for files with output paths specified in the metadata
# If the files don't exist, keep the line in the input metadata
# If they do, move them to the output metadata
self.meta_out = self.meta_out.append(self.meta.loc[self.meta['output'].apply(lambda x: os.path.isfile(x))],
ignore_index=True)
self.meta = self.meta.loc[~self.meta['output'].apply(lambda x: os.path.isfile(x))]
self._curr_meta = self._curr_meta.loc[~self._curr_meta['output'].apply(lambda x: os.path.isfile(x))]
def export_metadata(self):
"""
Method to serialise output metadata, export as yaml
"""
yaml_file = self.proj_name + '_mc2_mdout.yaml'
with open(yaml_file, 'w') as f:
yaml.dump(self.meta_out.to_dict(), f, indent=4, sort_keys=False)
|
[
"pandas.DataFrame",
"subprocess.run",
"tqdm.tqdm",
"subprocess.Popen",
"os.path.isdir",
"os.path.isfile",
"itertools.islice"
] |
[((1506, 1534), 'pandas.DataFrame', 'pd.DataFrame', (['md_in.metadata'], {}), '(md_in.metadata)\n', (1518, 1534), True, 'import pandas as pd\n'), ((4814, 4929), 'subprocess.run', 'subprocess.run', (["['nvidia-smi', '--list-gpus']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'encoding': '"""ascii"""'}), "(['nvidia-smi', '--list-gpus'], stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, encoding='ascii')\n", (4828, 4929), False, 'import subprocess\n'), ((5048, 5201), 'subprocess.run', 'subprocess.run', (["['nvidia-smi', '--query-compute-apps=gpu_uuid', '--format=csv']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'encoding': '"""ascii"""'}), "(['nvidia-smi', '--query-compute-apps=gpu_uuid',\n '--format=csv'], stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n encoding='ascii')\n", (5062, 5201), False, 'import subprocess\n'), ((8868, 8892), 'tqdm.tqdm', 'tqdm', (['ts_list'], {'ncols': '(100)'}), '(ts_list, ncols=100)\n', (8872, 8892), False, 'from tqdm import tqdm\n'), ((1988, 2039), 'os.path.isdir', 'os.path.isdir', (["self.params['System']['output_path']"], {}), "(self.params['System']['output_path'])\n", (2001, 2039), False, 'import os\n'), ((2053, 2187), 'subprocess.run', 'subprocess.run', (["['mkdir', self.params['System']['output_path']]"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'encoding': '"""ascii"""'}), "(['mkdir', self.params['System']['output_path']], stdout=\n subprocess.PIPE, stderr=subprocess.PIPE, encoding='ascii')\n", (2067, 2187), False, 'import subprocess\n'), ((2488, 2538), 'os.path.isfile', 'os.path.isfile', (["(self.proj_name + '_mc2_mdout.yaml')"], {}), "(self.proj_name + '_mc2_mdout.yaml')\n", (2502, 2538), False, 'import os\n'), ((2568, 2607), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': 'self.meta.columns'}), '(columns=self.meta.columns)\n', (2580, 2607), True, 'import pandas as pd\n'), ((2977, 3012), 'pandas.DataFrame', 'pd.DataFrame', (['_meta_record.metadata'], {}), '(_meta_record.metadata)\n', (2989, 3012), True, 'import pandas as pd\n'), ((3329, 3368), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': 'self.meta.columns'}), '(columns=self.meta.columns)\n', (3341, 3368), True, 'import pandas as pd\n'), ((5686, 5839), 'subprocess.run', 'subprocess.run', (["['nvidia-smi', '--query-compute-apps=gpu_uuid', '--format=csv']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'encoding': '"""ascii"""'}), "(['nvidia-smi', '--query-compute-apps=gpu_uuid',\n '--format=csv'], stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n encoding='ascii')\n", (5700, 5839), False, 'import subprocess\n'), ((8623, 8659), 'itertools.islice', 'itertools.islice', (['iterator', '(size - 1)'], {}), '(iterator, size - 1)\n', (8639, 8659), False, 'import itertools\n'), ((9382, 9453), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT'}), '(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n', (9398, 9453), False, 'import subprocess\n'), ((10437, 10454), 'os.path.isfile', 'os.path.isfile', (['x'], {}), '(x)\n', (10451, 10454), False, 'import os\n'), ((10593, 10610), 'os.path.isfile', 'os.path.isfile', (['x'], {}), '(x)\n', (10607, 10610), False, 'import os\n'), ((10702, 10719), 'os.path.isfile', 'os.path.isfile', (['x'], {}), '(x)\n', (10716, 10719), False, 'import os\n'), ((3271, 3288), 'os.path.isfile', 'os.path.isfile', (['x'], {}), '(x)\n', (3285, 3288), False, 'import os\n')]
|
import tensorflow as tf
from Globals import *
from BaseNet import *
class GlimpseNet(BaseNet):
def __init__(self):
self.imageSize = constants['imageSize']
self.imageChannel = constants['imageChannel']
self.numGlimpseResolution = constants['numGlimpseResolution']
self.glimpseOutputSize = constants['glimpseOutputSize']
self.glimpseDim = self.imageChannel * self.glimpseOutputSize * \
self.glimpseOutputSize * self.numGlimpseResolution
# linear layer processing retina encoding
with tf.variable_scope('g0') as scope:
self.wg0 = self.variableWithWeightDecay(
'weights', [self.glimpseDim, 128], 1e-4, 0.0)
self.bg0 = self.variableOnGpu(
'biases', [128], tf.constant_initializer(0.0))
# linear layer processing location
with tf.variable_scope('g1') as scope:
self.wg1 = self.variableWithWeightDecay(
'weights', [2, 128], 1e-4, 0.0)
self.bg1 = self.variableOnGpu(
'biases', [128], tf.constant_initializer(0.0))
# linear layer processing previouse two linear layers
with tf.variable_scope('g2') as scope:
self.wg2 = self.variableWithWeightDecay(
'weights', [256, 256], 1e-4, 0.0)
self.bg2 = self.variableOnGpu(
'biases', [256], tf.constant_initializer(0.0))
def forward(self, glimpses, locations):
glimpses = tf.reshape(glimpses, [-1, self.glimpseDim])
out = tf.matmul(glimpses, self.wg0)
bias = tf.nn.bias_add(out, self.bg0)
self.g0 = tf.nn.relu(bias)
# self.activationSummary(self.g0)
out = tf.matmul(locations, self.wg1)
bias = tf.nn.bias_add(out, self.bg1)
self.g1 = tf.nn.relu(bias)
# self.activationSummary(self.g1)
combined = tf.concat([self.g0, self.g1], axis=1)
out = tf.matmul(combined, self.wg2)
bias = tf.nn.bias_add(out, self.bg2)
self.g2 = tf.nn.relu(bias)
# self.activationSummary(self.g2)
return self.g2
|
[
"tensorflow.nn.relu",
"tensorflow.constant_initializer",
"tensorflow.reshape",
"tensorflow.concat",
"tensorflow.variable_scope",
"tensorflow.matmul",
"tensorflow.nn.bias_add"
] |
[((1494, 1537), 'tensorflow.reshape', 'tf.reshape', (['glimpses', '[-1, self.glimpseDim]'], {}), '(glimpses, [-1, self.glimpseDim])\n', (1504, 1537), True, 'import tensorflow as tf\n'), ((1552, 1581), 'tensorflow.matmul', 'tf.matmul', (['glimpses', 'self.wg0'], {}), '(glimpses, self.wg0)\n', (1561, 1581), True, 'import tensorflow as tf\n'), ((1597, 1626), 'tensorflow.nn.bias_add', 'tf.nn.bias_add', (['out', 'self.bg0'], {}), '(out, self.bg0)\n', (1611, 1626), True, 'import tensorflow as tf\n'), ((1645, 1661), 'tensorflow.nn.relu', 'tf.nn.relu', (['bias'], {}), '(bias)\n', (1655, 1661), True, 'import tensorflow as tf\n'), ((1719, 1749), 'tensorflow.matmul', 'tf.matmul', (['locations', 'self.wg1'], {}), '(locations, self.wg1)\n', (1728, 1749), True, 'import tensorflow as tf\n'), ((1765, 1794), 'tensorflow.nn.bias_add', 'tf.nn.bias_add', (['out', 'self.bg1'], {}), '(out, self.bg1)\n', (1779, 1794), True, 'import tensorflow as tf\n'), ((1813, 1829), 'tensorflow.nn.relu', 'tf.nn.relu', (['bias'], {}), '(bias)\n', (1823, 1829), True, 'import tensorflow as tf\n'), ((1892, 1929), 'tensorflow.concat', 'tf.concat', (['[self.g0, self.g1]'], {'axis': '(1)'}), '([self.g0, self.g1], axis=1)\n', (1901, 1929), True, 'import tensorflow as tf\n'), ((1944, 1973), 'tensorflow.matmul', 'tf.matmul', (['combined', 'self.wg2'], {}), '(combined, self.wg2)\n', (1953, 1973), True, 'import tensorflow as tf\n'), ((1989, 2018), 'tensorflow.nn.bias_add', 'tf.nn.bias_add', (['out', 'self.bg2'], {}), '(out, self.bg2)\n', (2003, 2018), True, 'import tensorflow as tf\n'), ((2037, 2053), 'tensorflow.nn.relu', 'tf.nn.relu', (['bias'], {}), '(bias)\n', (2047, 2053), True, 'import tensorflow as tf\n'), ((558, 581), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""g0"""'], {}), "('g0')\n", (575, 581), True, 'import tensorflow as tf\n'), ((870, 893), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""g1"""'], {}), "('g1')\n", (887, 893), True, 'import tensorflow as tf\n'), ((1187, 1210), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""g2"""'], {}), "('g2')\n", (1204, 1210), True, 'import tensorflow as tf\n'), ((783, 811), 'tensorflow.constant_initializer', 'tf.constant_initializer', (['(0.0)'], {}), '(0.0)\n', (806, 811), True, 'import tensorflow as tf\n'), ((1081, 1109), 'tensorflow.constant_initializer', 'tf.constant_initializer', (['(0.0)'], {}), '(0.0)\n', (1104, 1109), True, 'import tensorflow as tf\n'), ((1400, 1428), 'tensorflow.constant_initializer', 'tf.constant_initializer', (['(0.0)'], {}), '(0.0)\n', (1423, 1428), True, 'import tensorflow as tf\n')]
|
# Generated by Django 4.0.3 on 2022-03-20 10:33
import cloudinary.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('app', '0004_post_slug'),
]
operations = [
migrations.AlterModelOptions(
name='business',
options={'ordering': ['-created_at']},
),
migrations.AlterModelOptions(
name='neighbourhood',
options={'ordering': ['-created_at']},
),
migrations.AlterModelOptions(
name='post',
options={'ordering': ['-created_at']},
),
migrations.AddField(
model_name='business',
name='image',
field=cloudinary.models.CloudinaryField(blank=True, max_length=255, null=True, verbose_name='image'),
),
migrations.AlterField(
model_name='business',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='businesses', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='post',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='posts', to=settings.AUTH_USER_MODEL),
),
]
|
[
"django.db.models.ForeignKey",
"django.db.migrations.AlterModelOptions"
] |
[((313, 402), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""business"""', 'options': "{'ordering': ['-created_at']}"}), "(name='business', options={'ordering': [\n '-created_at']})\n", (341, 402), False, 'from django.db import migrations, models\n'), ((442, 536), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""neighbourhood"""', 'options': "{'ordering': ['-created_at']}"}), "(name='neighbourhood', options={'ordering': [\n '-created_at']})\n", (470, 536), False, 'from django.db import migrations, models\n'), ((576, 661), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""post"""', 'options': "{'ordering': ['-created_at']}"}), "(name='post', options={'ordering': ['-created_at']}\n )\n", (604, 661), False, 'from django.db import migrations, models\n'), ((1017, 1140), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""businesses"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='businesses', to=settings.AUTH_USER_MODEL)\n", (1034, 1140), False, 'from django.db import migrations, models\n'), ((1253, 1371), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""posts"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='posts', to=settings.AUTH_USER_MODEL)\n", (1270, 1371), False, 'from django.db import migrations, models\n')]
|
"""
Module for data exploration for ARIMA modeling.
This module contains the back-end exploration of river run flow rate
data and exogenous predictors to determine the best way to create a
time-series model of the data. Note that since this module was only used
once (i.e. is not called in order to create ongoing predictions),
it is not accompanied by any unit testing.
Functions:
daily_avg: takes time series with measurements on different
timeframes and creates a dataframe with daily averages for flow
rate and exogenous predictors
test_stationarity: implements Dickey-Fuller test and rolling average
plots to check for stationarity of the time series
plot_autocorrs: creates plots of autocorrelation function and partial
autocorrelation function to help determine p and q parameters for ARIMA
model
test_model: runs stationarity tests and acf/pcf tests and then
creates ARIMA model for one run and plots results
"""
import datetime
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from statsmodels.tsa.arima_model import ARIMA
from statsmodels.tsa.stattools import acf, pacf
from statsmodels.tsa.stattools import adfuller
from statsmodels.tsa.stattools import arma_order_select_ic
from riverrunner.repository import Repository
REPO = Repository()
def daily_avg(time_series):
"""Creates dataframe needed for modelling
Takes time series with measurements on different timeframes and creates a
dataframe with daily averages for flow rate and exogenous predictors.
Args:
time_series: dataframe with metrics for one run_id, assumes output
from get_measurements function
Returns:
DataFrame: containing daily measurements
"""
precip = time_series[time_series.metric_id == '00003']
precip['date_time'] = pd.to_datetime(precip['date_time'], utc=True)
precip.index = precip['date_time']
precip_daily = precip.resample('D').sum()
flow = time_series[time_series.metric_id == '00060']
flow['date_time'] = pd.to_datetime(flow['date_time'], utc=True)
flow.index = flow['date_time']
flow_daily = flow.resample('D').mean()
temp = time_series[time_series.metric_id == '00001']
temp['date_time'] = pd.to_datetime(temp['date_time'], utc=True)
temp.index = temp['date_time']
temp_daily = temp.resample('D').mean()
time_series_daily = temp_daily.merge(flow_daily, how='inner',
left_index=True, right_index=True)\
.merge(precip_daily, how='inner', left_index=True, right_index=True)
time_series_daily.columns = ['temp', 'flow', 'precip']
return time_series_daily
def test_stationarity(time_series):
"""Visual and statistical tests to test for stationarity of flow rate.
Performs Dickey-Fuller statistical test for stationarity at 0.05 level of
significance and plots 12-month rolling mean and standard deviation
against raw data for visual review of stationarity.
Args:
time_series: dataframe containing flow rate and exogneous predictor data
for one river run (assumes output of daily_avg function).
Returns:
bool: True if data is stationary according to Dickey-Fuller test at
0.05 level of significance, False otherwise.
plot: containing rolling mean and standard deviation against raw data
time series.
"""
# Determine rolling statistics
rollmean = time_series.rolling(window=365, center=False).mean()
rollstd = time_series.rolling(window=365, center=False).std()
# Plot rolling statistics
plt.plot(time_series, color='blue', label='Raw Data')
plt.plot(rollmean, color='red', label='Rolling Mean')
plt.plot(rollstd, color='orange', label='Rolling Standard Deviation')
plt.title('Rolling Statistics')
plt.legend()
plt.show()
# Dickey-Fuller test
dftest = adfuller(time_series, autolag='BIC')
return bool(dftest[0] < dftest[4]['1%'])
def plot_autocorrs(time_series):
"""
Creates plots of auto-correlation function (acf) and partial
auto-correlation function(pacf) to help determine p and q parameters
for ARIMA model.
Args:
time_series: dataframe containing flow rate and exogneous predictor
data for one river run (assumes output of daily_avg function).
Returns:
plots: containing acf and pacf of flow rate against number of lags.
"""
lag_acf = acf(time_series['flow'], nlags=400)
lag_pacf = pacf(time_series['flow'], method='ols')
plt.subplot(121)
plt.plot(lag_acf)
plt.axhline(y=0, linestyle='--', color='gray')
plt.axhline(y=-1.96 / np.sqrt(len(time_series)),
linestyle='--', color='gray')
plt.axhline(y=1.96 / np.sqrt(len(time_series)),
linestyle='--', color='gray')
plt.title('ACF')
plt.subplot(122)
plt.plot(lag_pacf)
plt.axhline(y=0, linestyle='--', color='gray')
plt.axhline(y=-1.96 / np.sqrt(len(time_series)),
linestyle='--', color='gray')
plt.axhline(y=1.96 / np.sqrt(len(time_series)),
linestyle='--', color='gray')
plt.title('PACF')
plt.tight_layout()
def test_model(run_id):
"""Function to test model for one run
Args:
run_id: run for which to test model
Returns: plots showing model results
"""
# Retrieve data for one run to model
start = datetime.datetime(2014, 5, 18)
end = datetime.datetime(2018, 5, 17)
test_measures = REPO.get_measurements(run_id=run_id,
start_date=start,
end_date=end)
# Average data and create train/test split
measures_daily = daily_avg(test_measures)
train_measures_daily = measures_daily[:-6]
test_measures_daily = measures_daily[-7:]
train_measures_daily = train_measures_daily.dropna()
# Check if data is stationary
test_stationarity(train_measures_daily['flow'])
# Determine p and q parameters for ARIMA model
params = arma_order_select_ic(train_measures_daily['flow'], ic='aic')
# Build and fit model
mod = ARIMA(train_measures_daily['flow'],
order=(params.aic_min_order[0], 0, params.aic_min_order[1]),
exog=train_measures_daily[['temp', 'precip']]).fit()
test_measures_daily.loc[:, 'prediction'] = \
mod.forecast(steps=7, exog=test_measures_daily[['temp', 'precip']])[0]
train_measures_daily.loc[:, 'model'] = mod.predict()
# Plot results
plt.plot(test_measures_daily[['flow', 'prediction']])
plt.plot(train_measures_daily[['flow', 'model']]['2015-07':])
plt.legend(['Test values', 'Prediction', 'Train values', 'Model'])
|
[
"matplotlib.pyplot.title",
"riverrunner.repository.Repository",
"matplotlib.pyplot.subplot",
"statsmodels.tsa.stattools.adfuller",
"matplotlib.pyplot.show",
"matplotlib.pyplot.axhline",
"matplotlib.pyplot.plot",
"statsmodels.tsa.arima_model.ARIMA",
"statsmodels.tsa.stattools.pacf",
"matplotlib.pyplot.legend",
"statsmodels.tsa.stattools.arma_order_select_ic",
"datetime.datetime",
"pandas.to_datetime",
"statsmodels.tsa.stattools.acf",
"matplotlib.pyplot.tight_layout"
] |
[((1308, 1320), 'riverrunner.repository.Repository', 'Repository', ([], {}), '()\n', (1318, 1320), False, 'from riverrunner.repository import Repository\n'), ((1831, 1876), 'pandas.to_datetime', 'pd.to_datetime', (["precip['date_time']"], {'utc': '(True)'}), "(precip['date_time'], utc=True)\n", (1845, 1876), True, 'import pandas as pd\n'), ((2044, 2087), 'pandas.to_datetime', 'pd.to_datetime', (["flow['date_time']"], {'utc': '(True)'}), "(flow['date_time'], utc=True)\n", (2058, 2087), True, 'import pandas as pd\n'), ((2248, 2291), 'pandas.to_datetime', 'pd.to_datetime', (["temp['date_time']"], {'utc': '(True)'}), "(temp['date_time'], utc=True)\n", (2262, 2291), True, 'import pandas as pd\n'), ((3611, 3664), 'matplotlib.pyplot.plot', 'plt.plot', (['time_series'], {'color': '"""blue"""', 'label': '"""Raw Data"""'}), "(time_series, color='blue', label='Raw Data')\n", (3619, 3664), True, 'import matplotlib.pyplot as plt\n'), ((3669, 3722), 'matplotlib.pyplot.plot', 'plt.plot', (['rollmean'], {'color': '"""red"""', 'label': '"""Rolling Mean"""'}), "(rollmean, color='red', label='Rolling Mean')\n", (3677, 3722), True, 'import matplotlib.pyplot as plt\n'), ((3727, 3796), 'matplotlib.pyplot.plot', 'plt.plot', (['rollstd'], {'color': '"""orange"""', 'label': '"""Rolling Standard Deviation"""'}), "(rollstd, color='orange', label='Rolling Standard Deviation')\n", (3735, 3796), True, 'import matplotlib.pyplot as plt\n'), ((3801, 3832), 'matplotlib.pyplot.title', 'plt.title', (['"""Rolling Statistics"""'], {}), "('Rolling Statistics')\n", (3810, 3832), True, 'import matplotlib.pyplot as plt\n'), ((3837, 3849), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (3847, 3849), True, 'import matplotlib.pyplot as plt\n'), ((3854, 3864), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3862, 3864), True, 'import matplotlib.pyplot as plt\n'), ((3904, 3940), 'statsmodels.tsa.stattools.adfuller', 'adfuller', (['time_series'], {'autolag': '"""BIC"""'}), "(time_series, autolag='BIC')\n", (3912, 3940), False, 'from statsmodels.tsa.stattools import adfuller\n'), ((4459, 4494), 'statsmodels.tsa.stattools.acf', 'acf', (["time_series['flow']"], {'nlags': '(400)'}), "(time_series['flow'], nlags=400)\n", (4462, 4494), False, 'from statsmodels.tsa.stattools import acf, pacf\n'), ((4510, 4549), 'statsmodels.tsa.stattools.pacf', 'pacf', (["time_series['flow']"], {'method': '"""ols"""'}), "(time_series['flow'], method='ols')\n", (4514, 4549), False, 'from statsmodels.tsa.stattools import acf, pacf\n'), ((4555, 4571), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(121)'], {}), '(121)\n', (4566, 4571), True, 'import matplotlib.pyplot as plt\n'), ((4576, 4593), 'matplotlib.pyplot.plot', 'plt.plot', (['lag_acf'], {}), '(lag_acf)\n', (4584, 4593), True, 'import matplotlib.pyplot as plt\n'), ((4598, 4644), 'matplotlib.pyplot.axhline', 'plt.axhline', ([], {'y': '(0)', 'linestyle': '"""--"""', 'color': '"""gray"""'}), "(y=0, linestyle='--', color='gray')\n", (4609, 4644), True, 'import matplotlib.pyplot as plt\n'), ((4846, 4862), 'matplotlib.pyplot.title', 'plt.title', (['"""ACF"""'], {}), "('ACF')\n", (4855, 4862), True, 'import matplotlib.pyplot as plt\n'), ((4867, 4883), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(122)'], {}), '(122)\n', (4878, 4883), True, 'import matplotlib.pyplot as plt\n'), ((4888, 4906), 'matplotlib.pyplot.plot', 'plt.plot', (['lag_pacf'], {}), '(lag_pacf)\n', (4896, 4906), True, 'import matplotlib.pyplot as plt\n'), ((4911, 4957), 'matplotlib.pyplot.axhline', 'plt.axhline', ([], {'y': '(0)', 'linestyle': '"""--"""', 'color': '"""gray"""'}), "(y=0, linestyle='--', color='gray')\n", (4922, 4957), True, 'import matplotlib.pyplot as plt\n'), ((5159, 5176), 'matplotlib.pyplot.title', 'plt.title', (['"""PACF"""'], {}), "('PACF')\n", (5168, 5176), True, 'import matplotlib.pyplot as plt\n'), ((5181, 5199), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (5197, 5199), True, 'import matplotlib.pyplot as plt\n'), ((5426, 5456), 'datetime.datetime', 'datetime.datetime', (['(2014)', '(5)', '(18)'], {}), '(2014, 5, 18)\n', (5443, 5456), False, 'import datetime\n'), ((5467, 5497), 'datetime.datetime', 'datetime.datetime', (['(2018)', '(5)', '(17)'], {}), '(2018, 5, 17)\n', (5484, 5497), False, 'import datetime\n'), ((6067, 6127), 'statsmodels.tsa.stattools.arma_order_select_ic', 'arma_order_select_ic', (["train_measures_daily['flow']"], {'ic': '"""aic"""'}), "(train_measures_daily['flow'], ic='aic')\n", (6087, 6127), False, 'from statsmodels.tsa.stattools import arma_order_select_ic\n'), ((6556, 6609), 'matplotlib.pyplot.plot', 'plt.plot', (["test_measures_daily[['flow', 'prediction']]"], {}), "(test_measures_daily[['flow', 'prediction']])\n", (6564, 6609), True, 'import matplotlib.pyplot as plt\n'), ((6614, 6675), 'matplotlib.pyplot.plot', 'plt.plot', (["train_measures_daily[['flow', 'model']]['2015-07':]"], {}), "(train_measures_daily[['flow', 'model']]['2015-07':])\n", (6622, 6675), True, 'import matplotlib.pyplot as plt\n'), ((6680, 6746), 'matplotlib.pyplot.legend', 'plt.legend', (["['Test values', 'Prediction', 'Train values', 'Model']"], {}), "(['Test values', 'Prediction', 'Train values', 'Model'])\n", (6690, 6746), True, 'import matplotlib.pyplot as plt\n'), ((6165, 6312), 'statsmodels.tsa.arima_model.ARIMA', 'ARIMA', (["train_measures_daily['flow']"], {'order': '(params.aic_min_order[0], 0, params.aic_min_order[1])', 'exog': "train_measures_daily[['temp', 'precip']]"}), "(train_measures_daily['flow'], order=(params.aic_min_order[0], 0,\n params.aic_min_order[1]), exog=train_measures_daily[['temp', 'precip']])\n", (6170, 6312), False, 'from statsmodels.tsa.arima_model import ARIMA\n')]
|
import pytest
import json
@pytest.mark.usefixtures('cleanup_db')
async def test_todo_api(app, test_cli):
"""
testing todo api
"""
# GET
resp = await test_cli.get('/api/todo')
assert resp.status == 200
resp_json = await resp.json()
assert len(resp_json['todo_list']) == 0
# POST
resp = await test_cli.post(
'/api/todo',
data=json.dumps({
'name': 'new_todo',
}),
headers={'Content-Type': 'application/json'}
)
assert resp.status == 201
# GET
resp = await test_cli.get('/api/todo')
assert resp.status == 200
resp_json = await resp.json()
assert len(resp_json['todo_list']) == 1
assert resp_json['todo_list'][0]['name'] == 'new_todo'
# DELETE
resp = await test_cli.delete(
'/api/todo/1',
)
assert resp.status == 200
# GET
resp = await test_cli.get('/api/todo')
assert resp.status == 200
resp_json = await resp.json()
assert len(resp_json['todo_list']) == 0
|
[
"pytest.mark.usefixtures",
"json.dumps"
] |
[((29, 66), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""cleanup_db"""'], {}), "('cleanup_db')\n", (52, 66), False, 'import pytest\n'), ((384, 416), 'json.dumps', 'json.dumps', (["{'name': 'new_todo'}"], {}), "({'name': 'new_todo'})\n", (394, 416), False, 'import json\n')]
|
import os
import numpy as np
import matplotlib.pyplot as plt
try:
import python_scripts.nalu.io as nalu
except ImportError:
raise ImportError('Download https://github.com/lawsonro3/python_scripts/blob/master/python_scripts/nalu/nalu_functions.py')
if __name__ == '__main__':
root_dir = '/Users/mlawson/GoogleDrive/Work/NREL/Projects/HFM-ECP/nrel_5mw/results/cori_data/'
if os.path.isdir(root_dir) is False:
raise Exception('root_dir does not exist')
####################################
# Load gC data
####################################
file_gC_13 = root_dir+'gCoarse.13/nrel_5mw_gCoarse.log'
th_gC_13,t_gC_13 = nalu.read_log(file_gC_13)
t_gC_13_avg = np.mean(t_gC_13[375:425,:],axis=0)
file_gC_26 = root_dir+'gCoarse.26/nrel_5mw_gCoarse.log'
th_gC_26,t_gC_26 = nalu.read_log(file_gC_26)
t_gC_26_avg = np.mean(t_gC_26[300:350,:],axis=0)
file_gC_52 = root_dir+'gCoarse.52/nrel_5mw_gCoarse.log'
th_gC_52,t_gC_52 = nalu.read_log(file_gC_52)
t_gC_52_avg = np.mean(t_gC_52[500:550,:],axis=0)
file_gC_104 = root_dir+'gCoarse.104/nrel_5mw_gCoarse.log'
th_gC_104,t_gC_104 = nalu.read_log(file_gC_104)
t_gC_104_avg = np.mean(t_gC_104[200:250,:],axis=0)
dofs_gC = 24846302 # num_nodes_gC
nodes_gC = np.array([[13],[26],[52],[104]])
cores_gC = nodes_gC*32
dof_per_core_gC = dofs_gC/cores_gC
t_avg_gC = np.array([t_gC_13_avg,t_gC_26_avg,t_gC_52_avg,t_gC_104_avg])
t_avg_gC = np.append(nodes_gC,t_avg_gC,axis=1)
t_avg_gC = np.append(cores_gC,t_avg_gC,axis=1)
t_avg_gC = np.append(dof_per_core_gC,t_avg_gC,axis=1)
t_avg_headers_gC = ['dof_per_core_gC','cores_gC','nodes_gC']
t_avg_headers_gC = t_avg_headers_gC + th_gC_13
linear_time_gC = t_avg_gC[0,-1]*(cores_gC[0]/cores_gC) # linear scaling
####################################`
# Load g1 data
####################################
file_g1_512 = root_dir+'g1.512/nrel_5mw_g1.log'
th_g1_512,t_g1_512 = nalu.read_log(file_g1_512)
t_g1_512_avg = np.mean(t_g1_512[-50:,:],axis=0)
file_g1_1024 = root_dir+'g1.1024/nrel_5mw_g1.log'
th_g1_1024,t_g1_1024 = nalu.read_log(file_g1_1024)
t_g1_1024_avg = np.mean(t_g1_1024[-50:,:],axis=0)
# file_g1_1536 = root_dir+'g1oarse.52/nrel_5mw_g1oarse.log'
# th_g1_1536,t_g1_1536 = nalu.read_log(file_g1_1536)
# t_g1_1536_avg = np.mean(t_g1_1536[500:550,:],axis=0)
dofs_g1 = 761112205 # num_nodes_g1
nodes_g1 = np.array([[512],[1024]])#,[1536]])
cores_g1 = nodes_g1*32
dof_per_core_g1 = dofs_g1/cores_g1
t_avg_g1 = np.array([t_g1_512_avg,t_g1_1024_avg])#,t_g1_1536_avg])
t_avg_g1 = np.append(nodes_g1,t_avg_g1,axis=1)
t_avg_g1 = np.append(cores_g1,t_avg_g1,axis=1)
t_avg_g1 = np.append(dof_per_core_g1,t_avg_g1,axis=1)
t_avg_headers_g1 = ['dof_per_core_g1','cores_g1','nodes_g1']
t_avg_headers_g1 = t_avg_headers_g1 + th_g1_512
linear_time_g1 = t_avg_g1[0,-1]*(cores_g1[0]/cores_g1) # linear scaling
####################################
## Plots
####################################
fig1 = '24.8 M Nodes (gCoarse) Timing'
fig2 = '761.1 M Nodes (g1) Timing'
fig3 = 'Nalu Scaling on Cori - Cores'
fig4 = 'Nalu Scaling on Cori - DOFs per Core'
####################################
# gC plotting
####################################
caption_text_gC = '* NREL 5 MW on Cori Haswell noodes\n* 32 MPI ranks/node 1 OMP thread\n* Muelu solver stack with the v27.xml settings\n * 24.8 M DOF'
plt.figure(fig1,figsize=[10,10])
plt.title(fig1)
for i in np.arange(1,5,1):
plt.plot(t_gC_13[:,i],label=th_gC_13[i]+' 416 cores_gC')
plt.plot(t_gC_26[:,i],label=th_gC_26[i]+' 832 cores_gC')
plt.plot(t_gC_52[:,i],label=th_gC_52[i]+' 1664 cores_gC')
plt.plot(t_gC_104[:,i],label=th_gC_104[i]+'3328 cores_gC')
plt.legend()
plt.xlabel('Timestep')
plt.ylabel('Time (s)')
plt.text(0, 100,caption_text_gC, fontsize=12)
label = '24.8 M DOF, 32 MPI/node, 1 OMP thread, muelu v27.xml'
plt.figure(fig3,figsize=[10,10])
plt.title(fig3)
plt.loglog(t_avg_gC[:,1],t_avg_gC[:,-1],'ks-',label=label)
plt.loglog(cores_gC,linear_time_gC,'k--',label='Linear')
plt.xlabel('Cores')
plt.ylabel('Mean Time per Timestep (s)')
plt.legend()
plt.figure(fig4,figsize=[10,10])
plt.title(fig4)
plt.loglog(t_avg_gC[:,0],t_avg_gC[:,-1],'ks-',label=label)
plt.loglog(dof_per_core_gC,linear_time_gC,'k--',label='linear')
plt.xlabel('DOFs per Core')
plt.ylabel('Mean Time per Timestep (s)')
plt.legend()
####################################
# g1 plotting
####################################
caption_text_g1 = '* NREL 5 MW on Cori Haswell noodes\n* 32 MPI ranks/node 1 OMP thread\n* Muelu solver stack with the v27.xml settings\n 761.1 M DOF'
color = 'tab:red'
plt.figure(fig2,figsize=[10,10])
plt.title(fig2)
for i in np.arange(1,5,1):
plt.plot(t_g1_512[:,i],label=th_g1_512[i]+' 16,384 cores_g1')
plt.plot(t_g1_1024[:,i],label=th_g1_1024[i]+' 32,768 cores_g1')
#plt.plot(t_g1_1536[:,i],label=th_g1_1536[i]+'49,152 cores_g1')
plt.legend()
plt.xlabel('Timestep')
plt.ylabel('Time (s)')
plt.text(0, 100,caption_text_g1, fontsize=12)
label = '761.1 M DOFs, 32 MPI/node, 1 OMP thread, muelu v27.xml'
plt.figure(fig3,figsize=[10,10])
plt.loglog(t_avg_g1[:,1],t_avg_g1[:,-1],'s-',label=label,color=color)
plt.loglog(cores_g1,linear_time_g1,'--',label='Linear',color=color)
plt.xlabel('Cores')
plt.ylabel('Mean Time per Timestep (s)')
plt.legend()
plt.figure(fig4,figsize=[10,10])
plt.loglog(t_avg_g1[:,0],t_avg_g1[:,-1],'s-',label=label,color=color)
plt.loglog(dof_per_core_g1,linear_time_g1,'--',label='linear',color=color)
plt.xlabel('DOFs per Core')
plt.ylabel('Mean Time per Timestep (s)')
plt.legend()
####################################
# Save plots
####################################
plt.figure(fig1); plt.savefig(root_dir+fig1+'.png',dpi=400)
plt.figure(fig2); plt.savefig(root_dir+fig2+'.png',dpi=400)
plt.figure(fig3); plt.savefig(root_dir+fig3+'.png',dpi=400)
plt.figure(fig4); plt.savefig(root_dir+fig4+'.png',dpi=400)
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.loglog",
"matplotlib.pyplot.plot",
"os.path.isdir",
"python_scripts.nalu.io.read_log",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.text",
"numpy.append",
"matplotlib.pyplot.figure",
"numpy.mean",
"numpy.array",
"numpy.arange",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.savefig"
] |
[((662, 687), 'python_scripts.nalu.io.read_log', 'nalu.read_log', (['file_gC_13'], {}), '(file_gC_13)\n', (675, 687), True, 'import python_scripts.nalu.io as nalu\n'), ((706, 742), 'numpy.mean', 'np.mean', (['t_gC_13[375:425, :]'], {'axis': '(0)'}), '(t_gC_13[375:425, :], axis=0)\n', (713, 742), True, 'import numpy as np\n'), ((825, 850), 'python_scripts.nalu.io.read_log', 'nalu.read_log', (['file_gC_26'], {}), '(file_gC_26)\n', (838, 850), True, 'import python_scripts.nalu.io as nalu\n'), ((869, 905), 'numpy.mean', 'np.mean', (['t_gC_26[300:350, :]'], {'axis': '(0)'}), '(t_gC_26[300:350, :], axis=0)\n', (876, 905), True, 'import numpy as np\n'), ((988, 1013), 'python_scripts.nalu.io.read_log', 'nalu.read_log', (['file_gC_52'], {}), '(file_gC_52)\n', (1001, 1013), True, 'import python_scripts.nalu.io as nalu\n'), ((1032, 1068), 'numpy.mean', 'np.mean', (['t_gC_52[500:550, :]'], {'axis': '(0)'}), '(t_gC_52[500:550, :], axis=0)\n', (1039, 1068), True, 'import numpy as np\n'), ((1155, 1181), 'python_scripts.nalu.io.read_log', 'nalu.read_log', (['file_gC_104'], {}), '(file_gC_104)\n', (1168, 1181), True, 'import python_scripts.nalu.io as nalu\n'), ((1201, 1238), 'numpy.mean', 'np.mean', (['t_gC_104[200:250, :]'], {'axis': '(0)'}), '(t_gC_104[200:250, :], axis=0)\n', (1208, 1238), True, 'import numpy as np\n'), ((1291, 1326), 'numpy.array', 'np.array', (['[[13], [26], [52], [104]]'], {}), '([[13], [26], [52], [104]])\n', (1299, 1326), True, 'import numpy as np\n'), ((1406, 1469), 'numpy.array', 'np.array', (['[t_gC_13_avg, t_gC_26_avg, t_gC_52_avg, t_gC_104_avg]'], {}), '([t_gC_13_avg, t_gC_26_avg, t_gC_52_avg, t_gC_104_avg])\n', (1414, 1469), True, 'import numpy as np\n'), ((1482, 1519), 'numpy.append', 'np.append', (['nodes_gC', 't_avg_gC'], {'axis': '(1)'}), '(nodes_gC, t_avg_gC, axis=1)\n', (1491, 1519), True, 'import numpy as np\n'), ((1533, 1570), 'numpy.append', 'np.append', (['cores_gC', 't_avg_gC'], {'axis': '(1)'}), '(cores_gC, t_avg_gC, axis=1)\n', (1542, 1570), True, 'import numpy as np\n'), ((1584, 1628), 'numpy.append', 'np.append', (['dof_per_core_gC', 't_avg_gC'], {'axis': '(1)'}), '(dof_per_core_gC, t_avg_gC, axis=1)\n', (1593, 1628), True, 'import numpy as np\n'), ((2000, 2026), 'python_scripts.nalu.io.read_log', 'nalu.read_log', (['file_g1_512'], {}), '(file_g1_512)\n', (2013, 2026), True, 'import python_scripts.nalu.io as nalu\n'), ((2046, 2080), 'numpy.mean', 'np.mean', (['t_g1_512[-50:, :]'], {'axis': '(0)'}), '(t_g1_512[-50:, :], axis=0)\n', (2053, 2080), True, 'import numpy as np\n'), ((2161, 2188), 'python_scripts.nalu.io.read_log', 'nalu.read_log', (['file_g1_1024'], {}), '(file_g1_1024)\n', (2174, 2188), True, 'import python_scripts.nalu.io as nalu\n'), ((2209, 2244), 'numpy.mean', 'np.mean', (['t_g1_1024[-50:, :]'], {'axis': '(0)'}), '(t_g1_1024[-50:, :], axis=0)\n', (2216, 2244), True, 'import numpy as np\n'), ((2480, 2505), 'numpy.array', 'np.array', (['[[512], [1024]]'], {}), '([[512], [1024]])\n', (2488, 2505), True, 'import numpy as np\n'), ((2597, 2636), 'numpy.array', 'np.array', (['[t_g1_512_avg, t_g1_1024_avg]'], {}), '([t_g1_512_avg, t_g1_1024_avg])\n', (2605, 2636), True, 'import numpy as np\n'), ((2668, 2705), 'numpy.append', 'np.append', (['nodes_g1', 't_avg_g1'], {'axis': '(1)'}), '(nodes_g1, t_avg_g1, axis=1)\n', (2677, 2705), True, 'import numpy as np\n'), ((2719, 2756), 'numpy.append', 'np.append', (['cores_g1', 't_avg_g1'], {'axis': '(1)'}), '(cores_g1, t_avg_g1, axis=1)\n', (2728, 2756), True, 'import numpy as np\n'), ((2770, 2814), 'numpy.append', 'np.append', (['dof_per_core_g1', 't_avg_g1'], {'axis': '(1)'}), '(dof_per_core_g1, t_avg_g1, axis=1)\n', (2779, 2814), True, 'import numpy as np\n'), ((3539, 3573), 'matplotlib.pyplot.figure', 'plt.figure', (['fig1'], {'figsize': '[10, 10]'}), '(fig1, figsize=[10, 10])\n', (3549, 3573), True, 'import matplotlib.pyplot as plt\n'), ((3576, 3591), 'matplotlib.pyplot.title', 'plt.title', (['fig1'], {}), '(fig1)\n', (3585, 3591), True, 'import matplotlib.pyplot as plt\n'), ((3605, 3623), 'numpy.arange', 'np.arange', (['(1)', '(5)', '(1)'], {}), '(1, 5, 1)\n', (3614, 3623), True, 'import numpy as np\n'), ((3890, 3902), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (3900, 3902), True, 'import matplotlib.pyplot as plt\n'), ((3907, 3929), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Timestep"""'], {}), "('Timestep')\n", (3917, 3929), True, 'import matplotlib.pyplot as plt\n'), ((3934, 3956), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (3944, 3956), True, 'import matplotlib.pyplot as plt\n'), ((3961, 4007), 'matplotlib.pyplot.text', 'plt.text', (['(0)', '(100)', 'caption_text_gC'], {'fontsize': '(12)'}), '(0, 100, caption_text_gC, fontsize=12)\n', (3969, 4007), True, 'import matplotlib.pyplot as plt\n'), ((4079, 4113), 'matplotlib.pyplot.figure', 'plt.figure', (['fig3'], {'figsize': '[10, 10]'}), '(fig3, figsize=[10, 10])\n', (4089, 4113), True, 'import matplotlib.pyplot as plt\n'), ((4116, 4131), 'matplotlib.pyplot.title', 'plt.title', (['fig3'], {}), '(fig3)\n', (4125, 4131), True, 'import matplotlib.pyplot as plt\n'), ((4136, 4199), 'matplotlib.pyplot.loglog', 'plt.loglog', (['t_avg_gC[:, 1]', 't_avg_gC[:, -1]', '"""ks-"""'], {'label': 'label'}), "(t_avg_gC[:, 1], t_avg_gC[:, -1], 'ks-', label=label)\n", (4146, 4199), True, 'import matplotlib.pyplot as plt\n'), ((4199, 4258), 'matplotlib.pyplot.loglog', 'plt.loglog', (['cores_gC', 'linear_time_gC', '"""k--"""'], {'label': '"""Linear"""'}), "(cores_gC, linear_time_gC, 'k--', label='Linear')\n", (4209, 4258), True, 'import matplotlib.pyplot as plt\n'), ((4260, 4279), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Cores"""'], {}), "('Cores')\n", (4270, 4279), True, 'import matplotlib.pyplot as plt\n'), ((4284, 4324), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Mean Time per Timestep (s)"""'], {}), "('Mean Time per Timestep (s)')\n", (4294, 4324), True, 'import matplotlib.pyplot as plt\n'), ((4329, 4341), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4339, 4341), True, 'import matplotlib.pyplot as plt\n'), ((4347, 4381), 'matplotlib.pyplot.figure', 'plt.figure', (['fig4'], {'figsize': '[10, 10]'}), '(fig4, figsize=[10, 10])\n', (4357, 4381), True, 'import matplotlib.pyplot as plt\n'), ((4384, 4399), 'matplotlib.pyplot.title', 'plt.title', (['fig4'], {}), '(fig4)\n', (4393, 4399), True, 'import matplotlib.pyplot as plt\n'), ((4404, 4467), 'matplotlib.pyplot.loglog', 'plt.loglog', (['t_avg_gC[:, 0]', 't_avg_gC[:, -1]', '"""ks-"""'], {'label': 'label'}), "(t_avg_gC[:, 0], t_avg_gC[:, -1], 'ks-', label=label)\n", (4414, 4467), True, 'import matplotlib.pyplot as plt\n'), ((4467, 4533), 'matplotlib.pyplot.loglog', 'plt.loglog', (['dof_per_core_gC', 'linear_time_gC', '"""k--"""'], {'label': '"""linear"""'}), "(dof_per_core_gC, linear_time_gC, 'k--', label='linear')\n", (4477, 4533), True, 'import matplotlib.pyplot as plt\n'), ((4535, 4562), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""DOFs per Core"""'], {}), "('DOFs per Core')\n", (4545, 4562), True, 'import matplotlib.pyplot as plt\n'), ((4567, 4607), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Mean Time per Timestep (s)"""'], {}), "('Mean Time per Timestep (s)')\n", (4577, 4607), True, 'import matplotlib.pyplot as plt\n'), ((4612, 4624), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4622, 4624), True, 'import matplotlib.pyplot as plt\n'), ((4908, 4942), 'matplotlib.pyplot.figure', 'plt.figure', (['fig2'], {'figsize': '[10, 10]'}), '(fig2, figsize=[10, 10])\n', (4918, 4942), True, 'import matplotlib.pyplot as plt\n'), ((4945, 4960), 'matplotlib.pyplot.title', 'plt.title', (['fig2'], {}), '(fig2)\n', (4954, 4960), True, 'import matplotlib.pyplot as plt\n'), ((4974, 4992), 'numpy.arange', 'np.arange', (['(1)', '(5)', '(1)'], {}), '(1, 5, 1)\n', (4983, 4992), True, 'import numpy as np\n'), ((5211, 5223), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (5221, 5223), True, 'import matplotlib.pyplot as plt\n'), ((5228, 5250), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Timestep"""'], {}), "('Timestep')\n", (5238, 5250), True, 'import matplotlib.pyplot as plt\n'), ((5255, 5277), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (5265, 5277), True, 'import matplotlib.pyplot as plt\n'), ((5282, 5328), 'matplotlib.pyplot.text', 'plt.text', (['(0)', '(100)', 'caption_text_g1'], {'fontsize': '(12)'}), '(0, 100, caption_text_g1, fontsize=12)\n', (5290, 5328), True, 'import matplotlib.pyplot as plt\n'), ((5402, 5436), 'matplotlib.pyplot.figure', 'plt.figure', (['fig3'], {'figsize': '[10, 10]'}), '(fig3, figsize=[10, 10])\n', (5412, 5436), True, 'import matplotlib.pyplot as plt\n'), ((5439, 5514), 'matplotlib.pyplot.loglog', 'plt.loglog', (['t_avg_g1[:, 1]', 't_avg_g1[:, -1]', '"""s-"""'], {'label': 'label', 'color': 'color'}), "(t_avg_g1[:, 1], t_avg_g1[:, -1], 's-', label=label, color=color)\n", (5449, 5514), True, 'import matplotlib.pyplot as plt\n'), ((5513, 5584), 'matplotlib.pyplot.loglog', 'plt.loglog', (['cores_g1', 'linear_time_g1', '"""--"""'], {'label': '"""Linear"""', 'color': 'color'}), "(cores_g1, linear_time_g1, '--', label='Linear', color=color)\n", (5523, 5584), True, 'import matplotlib.pyplot as plt\n'), ((5585, 5604), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Cores"""'], {}), "('Cores')\n", (5595, 5604), True, 'import matplotlib.pyplot as plt\n'), ((5609, 5649), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Mean Time per Timestep (s)"""'], {}), "('Mean Time per Timestep (s)')\n", (5619, 5649), True, 'import matplotlib.pyplot as plt\n'), ((5654, 5666), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (5664, 5666), True, 'import matplotlib.pyplot as plt\n'), ((5672, 5706), 'matplotlib.pyplot.figure', 'plt.figure', (['fig4'], {'figsize': '[10, 10]'}), '(fig4, figsize=[10, 10])\n', (5682, 5706), True, 'import matplotlib.pyplot as plt\n'), ((5709, 5784), 'matplotlib.pyplot.loglog', 'plt.loglog', (['t_avg_g1[:, 0]', 't_avg_g1[:, -1]', '"""s-"""'], {'label': 'label', 'color': 'color'}), "(t_avg_g1[:, 0], t_avg_g1[:, -1], 's-', label=label, color=color)\n", (5719, 5784), True, 'import matplotlib.pyplot as plt\n'), ((5783, 5861), 'matplotlib.pyplot.loglog', 'plt.loglog', (['dof_per_core_g1', 'linear_time_g1', '"""--"""'], {'label': '"""linear"""', 'color': 'color'}), "(dof_per_core_g1, linear_time_g1, '--', label='linear', color=color)\n", (5793, 5861), True, 'import matplotlib.pyplot as plt\n'), ((5862, 5889), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""DOFs per Core"""'], {}), "('DOFs per Core')\n", (5872, 5889), True, 'import matplotlib.pyplot as plt\n'), ((5894, 5934), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Mean Time per Timestep (s)"""'], {}), "('Mean Time per Timestep (s)')\n", (5904, 5934), True, 'import matplotlib.pyplot as plt\n'), ((5939, 5951), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (5949, 5951), True, 'import matplotlib.pyplot as plt\n'), ((6056, 6072), 'matplotlib.pyplot.figure', 'plt.figure', (['fig1'], {}), '(fig1)\n', (6066, 6072), True, 'import matplotlib.pyplot as plt\n'), ((6074, 6120), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(root_dir + fig1 + '.png')"], {'dpi': '(400)'}), "(root_dir + fig1 + '.png', dpi=400)\n", (6085, 6120), True, 'import matplotlib.pyplot as plt\n'), ((6120, 6136), 'matplotlib.pyplot.figure', 'plt.figure', (['fig2'], {}), '(fig2)\n', (6130, 6136), True, 'import matplotlib.pyplot as plt\n'), ((6138, 6184), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(root_dir + fig2 + '.png')"], {'dpi': '(400)'}), "(root_dir + fig2 + '.png', dpi=400)\n", (6149, 6184), True, 'import matplotlib.pyplot as plt\n'), ((6184, 6200), 'matplotlib.pyplot.figure', 'plt.figure', (['fig3'], {}), '(fig3)\n', (6194, 6200), True, 'import matplotlib.pyplot as plt\n'), ((6202, 6248), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(root_dir + fig3 + '.png')"], {'dpi': '(400)'}), "(root_dir + fig3 + '.png', dpi=400)\n", (6213, 6248), True, 'import matplotlib.pyplot as plt\n'), ((6248, 6264), 'matplotlib.pyplot.figure', 'plt.figure', (['fig4'], {}), '(fig4)\n', (6258, 6264), True, 'import matplotlib.pyplot as plt\n'), ((6266, 6312), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(root_dir + fig4 + '.png')"], {'dpi': '(400)'}), "(root_dir + fig4 + '.png', dpi=400)\n", (6277, 6312), True, 'import matplotlib.pyplot as plt\n'), ((392, 415), 'os.path.isdir', 'os.path.isdir', (['root_dir'], {}), '(root_dir)\n', (405, 415), False, 'import os\n'), ((3631, 3691), 'matplotlib.pyplot.plot', 'plt.plot', (['t_gC_13[:, i]'], {'label': "(th_gC_13[i] + ' 416 cores_gC')"}), "(t_gC_13[:, i], label=th_gC_13[i] + ' 416 cores_gC')\n", (3639, 3691), True, 'import matplotlib.pyplot as plt\n'), ((3696, 3756), 'matplotlib.pyplot.plot', 'plt.plot', (['t_gC_26[:, i]'], {'label': "(th_gC_26[i] + ' 832 cores_gC')"}), "(t_gC_26[:, i], label=th_gC_26[i] + ' 832 cores_gC')\n", (3704, 3756), True, 'import matplotlib.pyplot as plt\n'), ((3761, 3822), 'matplotlib.pyplot.plot', 'plt.plot', (['t_gC_52[:, i]'], {'label': "(th_gC_52[i] + ' 1664 cores_gC')"}), "(t_gC_52[:, i], label=th_gC_52[i] + ' 1664 cores_gC')\n", (3769, 3822), True, 'import matplotlib.pyplot as plt\n'), ((3827, 3889), 'matplotlib.pyplot.plot', 'plt.plot', (['t_gC_104[:, i]'], {'label': "(th_gC_104[i] + '3328 cores_gC')"}), "(t_gC_104[:, i], label=th_gC_104[i] + '3328 cores_gC')\n", (3835, 3889), True, 'import matplotlib.pyplot as plt\n'), ((5000, 5065), 'matplotlib.pyplot.plot', 'plt.plot', (['t_g1_512[:, i]'], {'label': "(th_g1_512[i] + ' 16,384 cores_g1')"}), "(t_g1_512[:, i], label=th_g1_512[i] + ' 16,384 cores_g1')\n", (5008, 5065), True, 'import matplotlib.pyplot as plt\n'), ((5070, 5137), 'matplotlib.pyplot.plot', 'plt.plot', (['t_g1_1024[:, i]'], {'label': "(th_g1_1024[i] + ' 32,768 cores_g1')"}), "(t_g1_1024[:, i], label=th_g1_1024[i] + ' 32,768 cores_g1')\n", (5078, 5137), True, 'import matplotlib.pyplot as plt\n')]
|
""" Test module for barbante.recommendation.RecommenderHRChunks class.
"""
import nose.tools
import barbante.tests as tests
from barbante.recommendation.tests.fixtures.HybridRecommenderFixture import HybridRecommenderFixture
class TestRecommenderHRChunks(HybridRecommenderFixture):
""" Class for testing barbante.recommendation.RecommenderHRChunks.
"""
def __init__(self):
super().__init__()
self.set_algorithm('HRChunks')
def test_merge_algorithm_contributions(self):
""" Tests the merge based on fixed slices.
"""
recommendations = {"UBCF": [[[50], "UBCF_1"],
[[30], "UBCF_2"],
[[10], "UBCF_3"],
[[5], "UBCF_4"],
[[2], "UBCF_5"]],
"PBCF": [[[50], "PBCF_1"],
[[30], "PBCF_2"],
[[10], "PBCF_3"],
[[5], "PBCF_4"]],
"CB": [[[50], "CB_1"],
[[40], "CB_2"],
[[30], "CB_3"],
[[20], "CB_4"],
[[10], "CB_5"],
[[9], "CB_6"],
[[8], "CB_7"],
[[7], "CB_8"],
[[4], "CB_9"]],
"POP": [[[50], "POP_1"],
[[30], "POP_2"],
[[10], "POP_3"],
[[5], "POP_4"],
[[4], "POP_5"],
[[3], "POP_6"],
[[4], "POP_7"]]}
session = tests.init_session(user_id="u_eco_1", algorithm=self.algorithm)
recommender = session.get_recommender()
merged_recommendations = recommender.merge_algorithm_contributions(recommendations, 20)
products_rank = [rec[1] for rec in merged_recommendations]
nose.tools.eq_(products_rank,
['UBCF_1', 'PBCF_1', 'CB_1', 'CB_2',
'UBCF_2', 'PBCF_2', 'CB_3', 'CB_4',
'UBCF_3', 'PBCF_3', 'CB_5', 'CB_6',
'UBCF_4', 'PBCF_4', 'CB_7', 'CB_8',
'UBCF_5', 'CB_9'],
"Wrong rank after merge")
|
[
"barbante.tests.init_session"
] |
[((1885, 1948), 'barbante.tests.init_session', 'tests.init_session', ([], {'user_id': '"""u_eco_1"""', 'algorithm': 'self.algorithm'}), "(user_id='u_eco_1', algorithm=self.algorithm)\n", (1903, 1948), True, 'import barbante.tests as tests\n')]
|
from collections import defaultdict
with open('day10/input.txt', 'r') as file:
data = sorted([int(x.strip()) for x in file.readlines()])
data = [0] + data
data.append(data[-1] + 3)
jolt_1, jolt_3 = 0, 0
for i in range(len(data)):
current = data[i - 1]
if (data[i] - current) == 1:
jolt_1 += 1
elif (data[i] - current) == 3:
jolt_3 += 1
jumps = [1, 2, 3]
routes = defaultdict(int) # default value is 0
routes[0] = 1
for i in data[1:]:
routes[i] = sum([routes[i - j] for j in jumps])
print(f"Result 1: {jolt_1 * jolt_3}\nResult 2: {routes[data[-1]]}")
|
[
"collections.defaultdict"
] |
[((410, 426), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (421, 426), False, 'from collections import defaultdict\n')]
|
from fastai.vision.all import *
from fastai.basics import *
from upit.models.cyclegan import *
from upit.train.cyclegan import *
from upit.data.unpaired import *
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--experiment_name', type=str, default='horse2zebra')
parser.add_argument('--model_name', type=str, default='cyclegan', choices=['cyclegan', 'dualgan','ganilla'])
parser.add_argument('--batch_size', type=int, default=1, help='Batch size')
parser.add_argument('--epochs_flat', type=int, default=100, help='Number of epochs with flat LR')
parser.add_argument('--epochs_decay', type=int, default=100, help='Number of epochs with linear decay of LR')
parser.add_argument('--lr', type=float, default=0.0002, help='Learning rate')
parser.add_argument('--gpu', type=int, default=0, help='GPU ID')
args = parser.parse_args()
torch.cuda.set_device(args.gpu)
dls = get_dls_from_hf("huggan/horse2zebra", load_size=286, bs=args.batch_size)
if args.model_name == 'cyclegan':
cycle_gan = CycleGAN()
learn = cycle_learner(dls, cycle_gan, opt_func=partial(Adam,mom=0.5,sqr_mom=0.999))
elif args.model_name == 'dualgan':
dual_gan = DualGAN()
learn = dual_learner(dls, dual_gan, opt_func=partial(Adam,mom=0.5,sqr_mom=0.999))
elif args.model_name == 'ganilla':
ganilla = GANILLA()
learn = cycle_learner(dls, ganilla, opt_func=partial(Adam,mom=0.5,sqr_mom=0.999))
learn.fit_flat_lin(args.epochs_flat, args.epochs_decay, args.lr)
learn.save(args.experiment_name+'_'+args.model_name+'_'+str(args.batch_size)+'_'+str(args.epochs_flat)+'_'+str(args.epochs_decay)+'_'+str(args.lr))
learn.model.push_to_hub(args.experiment_name+'_'+args.model_name)
|
[
"argparse.ArgumentParser"
] |
[((229, 254), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (252, 254), False, 'import argparse\n')]
|
# Generated by Django 3.1.8 on 2021-05-04 15:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0002_auto_20210504_1433'),
]
operations = [
migrations.RemoveField(
model_name='user',
name='first_name',
),
migrations.RemoveField(
model_name='user',
name='last_name',
),
migrations.AddField(
model_name='user',
name='name',
field=models.CharField(blank=True, max_length=255, verbose_name='Name of User'),
),
]
|
[
"django.db.migrations.RemoveField",
"django.db.models.CharField"
] |
[((233, 293), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""user"""', 'name': '"""first_name"""'}), "(model_name='user', name='first_name')\n", (255, 293), False, 'from django.db import migrations, models\n'), ((338, 397), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""user"""', 'name': '"""last_name"""'}), "(model_name='user', name='last_name')\n", (360, 397), False, 'from django.db import migrations, models\n'), ((537, 610), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(255)', 'verbose_name': '"""Name of User"""'}), "(blank=True, max_length=255, verbose_name='Name of User')\n", (553, 610), False, 'from django.db import migrations, models\n')]
|
from tkinter import *
from datetime import datetime
# Colors
black: str = "#3d3d3d" # Preto
white: str = "#fafcff" # Branco
green: str = "#21c25c" # Verde
red: str = "#eb463b" # Vermelho
grey: str = "#dedcdc" # Cinza
blue: str = "#3080f0" # Azul
wallpeper: str = white
color = black
window = Tk()
window.title("Digital Clock")
window.geometry("380x150")
window.resizable(width=FALSE, height=FALSE)
window.configure(bg=white)
bt = Button(window, width=20, text='OK')
bt.place(x=100, y=150)
def clock():
time = datetime.now()
hour = time.strftime("%H:%M:%S")
week_day = time.strftime("%A")
day = time.day
mont = time.strftime("%B")
year = time.strftime("%Y")
text.config(text=hour)
text.after(1000, clock)
text1.config(text=week_day + " " + str(day) + "/" + str(mont) + "/" + str(year))
text = Label(window, text="", font='Arial 70 ', bg=wallpeper, fg=color)
text.grid(row=0, column=0, stick=NW, padx=5)
text1 = Label(window, text="", font="Arial 20 italic", bg=wallpeper, fg=color)
text1.grid(row=1, column=0, stick=NW, padx=5)
clock()
window.mainloop()
|
[
"datetime.datetime.now"
] |
[((525, 539), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (537, 539), False, 'from datetime import datetime\n')]
|
import re
from ._abstract import AbstractScraper
from ._utils import get_minutes
class CookingCircle(AbstractScraper):
@classmethod
def host(cls):
return "cookingcircle.com"
def author(self):
return (
self.soup.find("div", {"class": "recipe-author"})
.findChild("span", {"class": "text-uppercase"})
.get_text()
)
def title(self):
return self.schema.title()
def category(self):
return self.schema.category()
def total_time(self):
ul = self.soup.find("ul", {"class": "single-method-overview__times"})
totalTime = None
for li in ul.find_all("li"):
if li.span.get_text().lower() == "total time:":
totalTime = li.span.find_next().get_text()
if totalTime is not None:
totalTime = re.findall("[0-9]+", totalTime)[0]
return get_minutes(totalTime)
def yields(self):
return self.schema.yields()
def image(self):
return self.schema.image()
def ingredients(self):
ulList = (
self.soup.find(
"div", {"class": "single-ingredients__group", "data-unit": "metric"}
)
.findChild("ul", {"class": "single-ingredients__list"})
.findChildren("li")
)
ingredients = []
for li in ulList:
ingredients.append(
li.get_text().replace("\t", "").replace("\n\n", " ").replace("\n", "")
)
return ingredients
def instructions(self):
ulList = self.soup.find("ul", {"class": "single-method__method"}).findChildren(
"li"
)
instructions = []
for li in ulList:
instructions.append(li.get_text().strip().replace("\n", " "))
return "\n".join(instructions)
|
[
"re.findall"
] |
[((854, 885), 're.findall', 're.findall', (['"""[0-9]+"""', 'totalTime'], {}), "('[0-9]+', totalTime)\n", (864, 885), False, 'import re\n')]
|
import codecs
import collections
import sys
import csv
import os
from os.path import basename, dirname
import pandas as pd
import magic
import mimetypes
from cchardet import UniversalDetector
from validator.logger import get_logger
tmp_dir = None
logger = get_logger(__name__)
def extract_data(path, standard):
if looks_like_csv(path):
media_type = 'text/csv'
else:
path, media_type = convert_to_csv(path)
return csv_to_dict(path, media_type, standard)
def convert_to_csv(path):
media_type = magic.from_file(path, mime=True)
tmp_path = csv_path(tmp_dir, path)
try:
excel = pd.read_excel(path)
except:
excel = None
if excel is not None:
excel.to_csv(tmp_path, index=None, header=True)
return tmp_path, media_type
logger.info(f"Unable to convert {path} from {media_type} to CSV")
with open(tmp_path, 'w') as out:
pass
return tmp_path, media_type
def csv_to_dict(csv_file, media_type, standard):
result = {
'meta_data': {
'headers_found': [],
'additional_headers': [],
'missing_headers': [],
'media_type': media_type,
'suffix': suffix_for_media_type(media_type),
},
'rows': [],
'data': [],
}
encoding = detect_encoding(csv_file)
with codecs.open(csv_file, encoding=encoding['encoding']) as f:
reader = csv.DictReader(f)
if reader.fieldnames:
result['meta_data']['headers_found'] = reader.fieldnames
result['meta_data']['additional_headers'] = list(set(result['meta_data']['headers_found']) - set(standard.current_standard_headers()))
result['meta_data']['missing_headers'] = list(set(standard.current_standard_headers()) - set(result['meta_data']['headers_found']))
for row in reader:
to_check = collections.OrderedDict()
for column in standard.current_standard_headers():
value = row.get(column, None)
if value is not None:
to_check[column] = row.get(column)
result['rows'].append(to_check)
result['data'].append(row)
return result
def detect_encoding(file):
detector = UniversalDetector()
detector.reset()
with open(file, 'rb') as f:
for row in f:
detector.feed(row)
if detector.done:
break
detector.close()
return detector.result
def suffix_for_media_type(media_type):
suffix = {
'application/vnd.ms-excel': '.xls',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': '.xlsx',
}
return suffix.get(media_type, mimetypes.guess_extension(media_type))
def get_markdown_for_field(field_name):
from pathlib import Path
current_directory = Path(__file__).parent.resolve()
markdown_file = Path(current_directory, 'markdown', f'{field_name}.md')
with open(markdown_file) as f:
content = f.read()
return content
def looks_like_csv(file):
try:
encoding = detect_encoding(file)
with open(file, encoding=encoding['encoding']) as f:
content = f.read()
if content.lower().startswith('<!doctype html'):
return False
csv.Sniffer().sniff(content)
return True
except Exception as e: # noqa
return False
def csv_path(_dir, path):
path = os.path.join(_dir, basename(path)) if _dir else path
return path + ".csv"
def save_csv(data, file):
if data:
fieldnames = data[0].keys()
if fieldnames:
writer = csv.DictWriter(file, fieldnames=fieldnames)
writer.writeheader()
for row in data:
writer.writerow(row)
|
[
"magic.from_file",
"cchardet.UniversalDetector",
"validator.logger.get_logger",
"codecs.open",
"os.path.basename",
"csv.DictReader",
"csv.Sniffer",
"pandas.read_excel",
"pathlib.Path",
"collections.OrderedDict",
"mimetypes.guess_extension",
"csv.DictWriter"
] |
[((259, 279), 'validator.logger.get_logger', 'get_logger', (['__name__'], {}), '(__name__)\n', (269, 279), False, 'from validator.logger import get_logger\n'), ((532, 564), 'magic.from_file', 'magic.from_file', (['path'], {'mime': '(True)'}), '(path, mime=True)\n', (547, 564), False, 'import magic\n'), ((2278, 2297), 'cchardet.UniversalDetector', 'UniversalDetector', ([], {}), '()\n', (2295, 2297), False, 'from cchardet import UniversalDetector\n'), ((2916, 2971), 'pathlib.Path', 'Path', (['current_directory', '"""markdown"""', 'f"""{field_name}.md"""'], {}), "(current_directory, 'markdown', f'{field_name}.md')\n", (2920, 2971), False, 'from pathlib import Path\n'), ((630, 649), 'pandas.read_excel', 'pd.read_excel', (['path'], {}), '(path)\n', (643, 649), True, 'import pandas as pd\n'), ((1373, 1425), 'codecs.open', 'codecs.open', (['csv_file'], {'encoding': "encoding['encoding']"}), "(csv_file, encoding=encoding['encoding'])\n", (1384, 1425), False, 'import codecs\n'), ((1449, 1466), 'csv.DictReader', 'csv.DictReader', (['f'], {}), '(f)\n', (1463, 1466), False, 'import csv\n'), ((2730, 2767), 'mimetypes.guess_extension', 'mimetypes.guess_extension', (['media_type'], {}), '(media_type)\n', (2755, 2767), False, 'import mimetypes\n'), ((1902, 1927), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (1925, 1927), False, 'import collections\n'), ((3492, 3506), 'os.path.basename', 'basename', (['path'], {}), '(path)\n', (3500, 3506), False, 'from os.path import basename, dirname\n'), ((3672, 3715), 'csv.DictWriter', 'csv.DictWriter', (['file'], {'fieldnames': 'fieldnames'}), '(file, fieldnames=fieldnames)\n', (3686, 3715), False, 'import csv\n'), ((2864, 2878), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (2868, 2878), False, 'from pathlib import Path\n'), ((3325, 3338), 'csv.Sniffer', 'csv.Sniffer', ([], {}), '()\n', (3336, 3338), False, 'import csv\n')]
|
# -*- coding: utf-8 -*-
__author__ = """<NAME>"""
__email__ = "<EMAIL>"
import tensorflow as tf
import tf_quat2rot
class TestGraphMode(tf.test.TestCase):
@tf.function
def _run_in_graph(self, batch_shape=(2, 1, 3)):
self.assertTrue(not tf.executing_eagerly())
random_quats = tf_quat2rot.random_uniform_quaternion(batch_dim=batch_shape)
random_rotations = tf_quat2rot.quaternion_to_rotation_matrix(random_quats)
random_quats_restored = tf_quat2rot.rotation_matrix_to_quaternion(
random_rotations
)
return random_quats, random_quats_restored
def test_graph_mode(self):
with self.session(use_gpu=False):
# single random quaternion
batch_shape = (2, 1, 3)
random_quats, random_quats_restored = self._run_in_graph(batch_shape)
self.assertEqual(batch_shape + (4,), random_quats_restored.shape)
self.assertAllClose(random_quats, random_quats_restored)
|
[
"tensorflow.executing_eagerly",
"tf_quat2rot.rotation_matrix_to_quaternion",
"tf_quat2rot.quaternion_to_rotation_matrix",
"tf_quat2rot.random_uniform_quaternion"
] |
[((302, 362), 'tf_quat2rot.random_uniform_quaternion', 'tf_quat2rot.random_uniform_quaternion', ([], {'batch_dim': 'batch_shape'}), '(batch_dim=batch_shape)\n', (339, 362), False, 'import tf_quat2rot\n'), ((390, 445), 'tf_quat2rot.quaternion_to_rotation_matrix', 'tf_quat2rot.quaternion_to_rotation_matrix', (['random_quats'], {}), '(random_quats)\n', (431, 445), False, 'import tf_quat2rot\n'), ((478, 537), 'tf_quat2rot.rotation_matrix_to_quaternion', 'tf_quat2rot.rotation_matrix_to_quaternion', (['random_rotations'], {}), '(random_rotations)\n', (519, 537), False, 'import tf_quat2rot\n'), ((255, 277), 'tensorflow.executing_eagerly', 'tf.executing_eagerly', ([], {}), '()\n', (275, 277), True, 'import tensorflow as tf\n')]
|
from flask_wtf.form import FlaskForm
from wtforms.fields.core import StringField
from wtforms.fields.simple import SubmitField
from wtforms.validators import DataRequired
class SearchBox(FlaskForm):
"""Placeholder for a future implementation"""
string = StringField('Search for a post, user or project', validators=[
DataRequired()])
submit = SubmitField('Search')
|
[
"wtforms.validators.DataRequired",
"wtforms.fields.simple.SubmitField"
] |
[((351, 372), 'wtforms.fields.simple.SubmitField', 'SubmitField', (['"""Search"""'], {}), "('Search')\n", (362, 372), False, 'from wtforms.fields.simple import SubmitField\n'), ((324, 338), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (336, 338), False, 'from wtforms.validators import DataRequired\n')]
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import os
import compas_rhino
from compas.utilities import geometric_key
from fofin.shell import Shell
from fofin.shellartist import ShellArtist
from compas_rhino.selectors import VertexSelector
from compas_rhino.selectors import EdgeSelector
from compas_rhino.modifiers import VertexModifier
from compas_rhino.modifiers import EdgeModifier
# ==============================================================================
# I/O
# ==============================================================================
HERE = os.path.dirname(__file__)
DATA = os.path.join(HERE, 'data')
FILE = os.path.join(DATA, 'fofin.json')
# ==============================================================================
# Shell
# ==============================================================================
shell = Shell.from_json(FILE)
# ==============================================================================
# Visualization helpers
# ==============================================================================
artist = ShellArtist(shell, layer="Mesh")
artist.clear_layer()
artist.draw_vertices()
artist.draw_edges()
artist.redraw()
def redraw():
artist.clear_layer()
artist.draw_vertices()
artist.draw_edges()
artist.redraw()
# ==============================================================================
# Vertex attributes
# ==============================================================================
while True:
keys = VertexSelector.select_vertices(shell)
if not keys:
break
if VertexModifier.update_vertex_attributes(shell, keys):
shell.fofin()
redraw()
# ==============================================================================
# Export result
# ==============================================================================
shell.to_json(FILE)
# ==============================================================================
# Visualize result
# ==============================================================================
artist.clear_layer()
artist.draw_vertices()
artist.draw_edges()
artist.draw_faces()
artist.draw_forces(scale=0.01)
artist.draw_reactions(scale=0.1)
|
[
"fofin.shell.Shell.from_json",
"fofin.shellartist.ShellArtist",
"os.path.dirname",
"compas_rhino.modifiers.VertexModifier.update_vertex_attributes",
"compas_rhino.selectors.VertexSelector.select_vertices",
"os.path.join"
] |
[((631, 656), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (646, 656), False, 'import os\n'), ((664, 690), 'os.path.join', 'os.path.join', (['HERE', '"""data"""'], {}), "(HERE, 'data')\n", (676, 690), False, 'import os\n'), ((698, 730), 'os.path.join', 'os.path.join', (['DATA', '"""fofin.json"""'], {}), "(DATA, 'fofin.json')\n", (710, 730), False, 'import os\n'), ((911, 932), 'fofin.shell.Shell.from_json', 'Shell.from_json', (['FILE'], {}), '(FILE)\n', (926, 932), False, 'from fofin.shell import Shell\n'), ((1130, 1162), 'fofin.shellartist.ShellArtist', 'ShellArtist', (['shell'], {'layer': '"""Mesh"""'}), "(shell, layer='Mesh')\n", (1141, 1162), False, 'from fofin.shellartist import ShellArtist\n'), ((1561, 1598), 'compas_rhino.selectors.VertexSelector.select_vertices', 'VertexSelector.select_vertices', (['shell'], {}), '(shell)\n', (1591, 1598), False, 'from compas_rhino.selectors import VertexSelector\n'), ((1637, 1689), 'compas_rhino.modifiers.VertexModifier.update_vertex_attributes', 'VertexModifier.update_vertex_attributes', (['shell', 'keys'], {}), '(shell, keys)\n', (1676, 1689), False, 'from compas_rhino.modifiers import VertexModifier\n')]
|
# coding: utf8
import locale
import logging
from dialog import Dialog
from kalliope.core import OrderListener
from kalliope.core.ConfigurationManager import SettingLoader
from kalliope.core.SynapseLauncher import SynapseLauncher
from kalliope.neurons.say.say import Say
logging.basicConfig()
logger = logging.getLogger("kalliope")
class ShellGui:
def __init__(self, brain=None):
"""
Load a GUI in a shell console for testing TTS, STT and brain configuration
:param brain: The Brain object provided by the brain.yml
:type brain: Brain
.. seealso:: Brain
"""
# override brain
self.brain = brain
# get settings
sl = SettingLoader()
self.settings = sl.settings
locale.setlocale(locale.LC_ALL, '')
self.d = Dialog(dialog="dialog")
self.d.set_background_title("Kalliope shell UI")
self.show_main_menu()
def show_main_menu(self):
"""
Main menu of the shell UI.
Provide a list of action the user can select to test his settings
"""
code, tag = self.d.menu("Test your Kalliope settings from this menu",
choices=[("TTS", "Text to Speech"),
("STT", "Speech to text"),
("Synapses", "Run a synapse")])
if code == self.d.OK:
if tag == "STT":
self.show_stt_test_menu()
if tag == "TTS":
self.show_tts_test_menu()
if tag == "Synapses":
self.show_synapses_test_menu()
def show_stt_test_menu(self):
"""
Show the list of available STT.
Clicking on a STT will load the engine to catch the user audio and return a text
"""
# we get STT from settings
stt_list = self.settings.stts
logger.debug("Loaded stt list: %s" % str(stt_list))
choices = self._get_choices_tuple_from_list(stt_list)
code, tag = self.d.menu("Select the STT to test:",
choices=choices)
# go back to the main menu if we choose "cancel"
if code == self.d.CANCEL:
self.show_main_menu()
# if ok, call the target TTS engine and catch audio
if code == self.d.OK:
self.d.infobox("Please talk now")
# the callback funtion will print the translated audio into text on the screen
order_listener = OrderListener(callback=self.callback_stt, stt=str(tag))
order_listener.load_stt_plugin()
def show_tts_test_menu(self, sentence_to_test=None):
"""
A menu for testing text to speech
- select a TTS engine to test
- type a sentence
- press ok and listen the generated audio from the typed text
:param sentence_to_test: the screen written sentence to test
"""
continue_bool = True
# if we don't have yet a sentence to test, we ask the user to type one
if sentence_to_test is None:
# First, we ask the user to type a sentence that will be passed in the TTS
code, sentence_to_test = self.d.inputbox("Please type the sentence you want to test", height=20, width=50)
if code == self.d.CANCEL:
self.show_main_menu()
continue_bool = False
if code == self.d.OK:
continue_bool = True
if continue_bool:
# we get TTS from settings
tts_list = self.settings.ttss
# create a list of tuple that can be used by the dialog menu
choices = self._get_choices_tuple_from_list(tts_list)
code, tag = self.d.menu("Sentence to test: %s" % sentence_to_test,
choices=choices)
if code == self.d.CANCEL:
self.show_tts_test_menu()
if code == self.d.OK:
self._run_tts_test(tag, sentence_to_test)
# then go back to this menu with the same sentence
# if the user want to test the same text with another TTS
self.show_tts_test_menu(sentence_to_test=sentence_to_test)
@staticmethod
def _run_tts_test(tts_name, sentence_to_test):
"""
Call the TTS
:param tts_name: Name of the TTS module to launch
:param sentence_to_test: String text to send to the TTS engine
"""
sentence_to_test = sentence_to_test.encode('utf-8')
tts_name = tts_name.encode('utf-8')
Say(message=sentence_to_test, tts=tts_name)
@staticmethod
def _get_choices_tuple_from_list(list_to_convert):
"""
Return a list of tup that can be used in Dialog menu
:param list_to_convert: List of object to convert into tuple
:return: List of choices
:rtype: List
"""
# create a list of tuple that can be used by the dialog menu
choices = list()
for el in list_to_convert:
tup = (str(el.name), str(el.parameters))
choices.append(tup)
logger.debug("Add el to the list: %s with parameters: %s" % (str(el.name), str(el.parameters)))
return choices
def callback_stt(self, audio):
"""
Callback function called after the STT has finish his job
Print the text of what the STT engine think we said on the screen
:param audio: Text from the translated audio
"""
code = self.d.msgbox("The STT engine think you said:\n %s" % audio, width=50)
if code == self.d.OK:
self.show_stt_test_menu()
def show_synapses_test_menu(self):
"""
Show a list of available synapse in the brain to run it directly
"""
# create a tuple for the list menu
choices = list()
x = 0
for el in self.brain.synapses:
tup = (str(el.name), str(x))
choices.append(tup)
x += 1
code, tag = self.d.menu("Select a synapse to run",
choices=choices)
if code == self.d.CANCEL:
self.show_main_menu()
if code == self.d.OK:
logger.debug("Run synapse from GUI: %s" % tag)
SynapseLauncher.start_synapse_by_name(tag, brain=self.brain)
self.show_synapses_test_menu()
|
[
"kalliope.neurons.say.say.Say",
"kalliope.core.ConfigurationManager.SettingLoader",
"logging.basicConfig",
"kalliope.core.SynapseLauncher.SynapseLauncher.start_synapse_by_name",
"dialog.Dialog",
"locale.setlocale",
"logging.getLogger"
] |
[((274, 295), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '()\n', (293, 295), False, 'import logging\n'), ((305, 334), 'logging.getLogger', 'logging.getLogger', (['"""kalliope"""'], {}), "('kalliope')\n", (322, 334), False, 'import logging\n'), ((705, 720), 'kalliope.core.ConfigurationManager.SettingLoader', 'SettingLoader', ([], {}), '()\n', (718, 720), False, 'from kalliope.core.ConfigurationManager import SettingLoader\n'), ((765, 800), 'locale.setlocale', 'locale.setlocale', (['locale.LC_ALL', '""""""'], {}), "(locale.LC_ALL, '')\n", (781, 800), False, 'import locale\n'), ((819, 842), 'dialog.Dialog', 'Dialog', ([], {'dialog': '"""dialog"""'}), "(dialog='dialog')\n", (825, 842), False, 'from dialog import Dialog\n'), ((4605, 4648), 'kalliope.neurons.say.say.Say', 'Say', ([], {'message': 'sentence_to_test', 'tts': 'tts_name'}), '(message=sentence_to_test, tts=tts_name)\n', (4608, 4648), False, 'from kalliope.neurons.say.say import Say\n'), ((6330, 6390), 'kalliope.core.SynapseLauncher.SynapseLauncher.start_synapse_by_name', 'SynapseLauncher.start_synapse_by_name', (['tag'], {'brain': 'self.brain'}), '(tag, brain=self.brain)\n', (6367, 6390), False, 'from kalliope.core.SynapseLauncher import SynapseLauncher\n')]
|
"""CLI functions for edges-cal."""
import click
import papermill as pm
import yaml
from datetime import datetime
from nbconvert import PDFExporter
from pathlib import Path
from rich.console import Console
from traitlets.config import Config
from edges_cal import cal_coefficients as cc
console = Console()
main = click.Group()
@main.command()
@click.argument("config", type=click.Path(dir_okay=False, file_okay=True, exists=True))
@click.argument("path", type=click.Path(dir_okay=True, file_okay=False, exists=True))
@click.option(
"-o",
"--out",
type=click.Path(dir_okay=True, file_okay=False, exists=True),
default=".",
help="output directory",
)
@click.option(
"-c",
"--cache-dir",
type=click.Path(dir_okay=True, file_okay=False),
default=".",
help="directory in which to keep/search for the cache",
)
@click.option(
"-p/-P",
"--plot/--no-plot",
default=True,
help="whether to make diagnostic plots of calibration solutions.",
)
@click.option(
"-s",
"--simulators",
multiple=True,
default=[],
help="antenna simulators to create diagnostic plots for.",
)
def run(config, path, out, cache_dir, plot, simulators):
"""Calibrate using lab measurements in PATH, and make all relevant plots."""
out = Path(out)
with open(config, "r") as fl:
settings = yaml.load(fl, Loader=yaml.FullLoader)
if cache_dir != ".":
settings.update(load_kwargs={"cache_dir": cache_dir})
obs = cc.CalibrationObservation(path=path, **settings)
if plot:
# Plot Calibrator properties
fig = obs.plot_raw_spectra()
fig.savefig(out / "raw_spectra.png")
figs = obs.plot_s11_models()
for kind, fig in figs.items():
fig.savefig(out / f"{kind}_s11_model.png")
fig = obs.plot_calibrated_temps(bins=256)
fig.savefig(out / "calibrated_temps.png")
fig = obs.plot_coefficients()
fig.savefig(out / "calibration_coefficients.png")
# Calibrate and plot antsim
for name in simulators:
antsim = obs.new_load(load_name=name)
fig = obs.plot_calibrated_temp(antsim, bins=256)
fig.savefig(out / f"{name}_calibrated_temp.png")
# Write out data
obs.write(out / obs.path.parent.name)
@main.command()
@click.argument("config", type=click.Path(dir_okay=False, file_okay=True, exists=True))
@click.argument("path", type=click.Path(dir_okay=True, file_okay=False, exists=True))
@click.option(
"-w", "--max-wterms", type=int, default=20, help="maximum number of wterms"
)
@click.option(
"-r/-R",
"--repeats/--no-repeats",
default=False,
help="explore repeats of switch and receiver s11",
)
@click.option(
"-n/-N", "--runs/--no-runs", default=False, help="explore runs of s11 measurements"
)
@click.option(
"-c", "--max-cterms", type=int, default=20, help="maximum number of cterms"
)
@click.option(
"-w", "--max-wterms", type=int, default=20, help="maximum number of wterms"
)
@click.option(
"-r/-R",
"--repeats/--no-repeats",
default=False,
help="explore repeats of switch and receiver s11",
)
@click.option(
"-n/-N", "--runs/--no-runs", default=False, help="explore runs of s11 measurements"
)
@click.option(
"-t",
"--delta-rms-thresh",
type=float,
default=0,
help="threshold marking rms convergence",
)
@click.option(
"-o",
"--out",
type=click.Path(dir_okay=True, file_okay=False, exists=True),
default=".",
help="output directory",
)
@click.option(
"-c",
"--cache-dir",
type=click.Path(dir_okay=True, file_okay=False),
default=".",
help="directory in which to keep/search for the cache",
)
def sweep(
config,
path,
max_cterms,
max_wterms,
repeats,
runs,
delta_rms_thresh,
out,
cache_dir,
):
"""Perform a sweep of number of terms to obtain the best parameter set."""
with open(config, "r") as fl:
settings = yaml.load(fl, Loader=yaml.FullLoader)
if cache_dir != ".":
settings.update(cache_dir=cache_dir)
obs = cc.CalibrationObservation(path=path, **settings)
cc.perform_term_sweep(
obs,
direc=out,
verbose=True,
max_cterms=max_cterms,
max_wterms=max_wterms,
explore_repeat_nums=repeats,
explore_run_nums=runs,
delta_rms_thresh=delta_rms_thresh,
)
@main.command()
@click.argument("path", type=click.Path(dir_okay=True, file_okay=False, exists=True))
@click.option(
"-c",
"--config",
default=None,
type=click.Path(dir_okay=False, file_okay=True, exists=True),
help="a YAML config file specifying parameters of the calibration",
)
@click.option(
"-o",
"--out",
type=click.Path(dir_okay=True, file_okay=False, exists=True),
default=None,
help="output directory",
)
@click.option(
"-d",
"--cache-dir",
type=click.Path(dir_okay=True, file_okay=False),
default=".",
help="directory in which to keep/search for the cache",
)
@click.option("-r/-R", "--report/--no-report", default=True)
@click.option("-u/-U", "--upload/--no-upload", default=False, help="auto-upload file")
@click.option("-t", "--title", type=str, help="title of the memo", default=None)
@click.option(
"-a",
"--author",
type=str,
help="adds an author to the author list",
default=None,
multiple=True,
)
@click.option("-n", "--memo", type=int, help="which memo number to use", default=None)
@click.option("-q/-Q", "--quiet/--loud", default=False)
@click.option("-p/-P", "--pdf/--no-pdf", default=True)
@click.option("--cterms", type=int, default=8)
@click.option("--wterms", type=int, default=10)
def report(
config,
path,
out,
cache_dir,
report,
upload,
title,
author,
memo,
quiet,
pdf,
cterms,
wterms,
):
"""Make a full notebook report on a given calibration."""
single_notebook = Path(__file__).parent / "notebooks/calibrate-observation.ipynb"
console.print(f"Creating report for '{path}'...")
path = Path(path)
if out is None:
out = path / "outputs"
else:
out = Path(out)
if not out.exists():
out.mkdir()
# Describe the filename...
fname = Path(f"calibration_{datetime.now().strftime('%Y-%m-%d-%H.%M.%S')}.ipynb")
if config is not None:
with open(config, "r") as fl:
settings = yaml.load(fl, Loader=yaml.FullLoader)
else:
settings = {}
if "cterms" not in settings:
settings["cterms"] = cterms
if "wterms" not in settings:
settings["wterms"] = wterms
console.print("Settings:")
for k, v in settings.items():
console.print(f"\t{k}: {v}")
settings.update(observation=str(path))
if cache_dir != ".":
settings.update(cache_dir=cache_dir)
# This actually runs the notebook itself.
pm.execute_notebook(
str(single_notebook),
out / fname,
parameters=settings,
kernel_name="edges",
)
console.print(f"Saved interactive notebook to '{out/fname}'")
if pdf: # pragma: nocover
make_pdf(out, fname)
if upload:
upload_memo(out / fname.with_suffix(".pdf"), title, memo, quiet)
@main.command()
@click.argument("path", type=click.Path(dir_okay=True, file_okay=False, exists=True))
@click.argument("cmppath", type=click.Path(dir_okay=True, file_okay=False, exists=True))
@click.option(
"-c",
"--config",
default=None,
type=click.Path(dir_okay=False, file_okay=True, exists=True),
help="a YAML config file specifying parameters of the calibration",
)
@click.option(
"-C",
"--config-cmp",
default=None,
type=click.Path(dir_okay=False, file_okay=True, exists=True),
help="a YAML config file specifying parameters of the comparison calibration",
)
@click.option(
"-o",
"--out",
type=click.Path(dir_okay=True, file_okay=False, exists=True),
default=None,
help="output directory",
)
@click.option(
"-d",
"--cache-dir",
type=click.Path(dir_okay=True, file_okay=False),
default=".",
help="directory in which to keep/search for the cache",
)
@click.option("-r/-R", "--report/--no-report", default=True)
@click.option("-u/-U", "--upload/--no-upload", default=False, help="auto-upload file")
@click.option("-t", "--title", type=str, help="title of the memo", default=None)
@click.option(
"-a",
"--author",
type=str,
help="adds an author to the author list",
default=None,
multiple=True,
)
@click.option("-n", "--memo", type=int, help="which memo number to use", default=None)
@click.option("-q/-Q", "--quiet/--loud", default=False)
@click.option("-p/-P", "--pdf/--no-pdf", default=True)
@click.option("--cterms", type=int, default=8)
@click.option("--wterms", type=int, default=10)
@click.option("--cterms-comp", type=int, default=8)
@click.option("--wterms-comp", type=int, default=10)
def compare(
path,
cmppath,
config,
config_cmp,
out,
cache_dir,
report,
upload,
title,
author,
memo,
quiet,
pdf,
cterms,
wterms,
cterms_comp,
wterms_comp,
):
"""Make a full notebook comparison report between two observations."""
single_notebook = Path(__file__).parent / "notebooks/compare-observation.ipynb"
console.print(f"Creating comparison report for '{path}' compared to '{cmppath}'")
path = Path(path)
cmppath = Path(cmppath)
if out is None:
out = path / "outputs"
else:
out = Path(out)
if not out.exists():
out.mkdir()
# Describe the filename...
fname = Path(
f"calibration-compare-{cmppath.name}_"
f"{datetime.now().strftime('%Y-%m-%d-%H.%M.%S')}.ipynb"
)
if config is not None:
with open(config, "r") as fl:
settings = yaml.load(fl, Loader=yaml.FullLoader)
else:
settings = {}
if "cterms" not in settings:
settings["cterms"] = cterms
if "wterms" not in settings:
settings["wterms"] = wterms
if config_cmp is not None:
with open(config_cmp, "r") as fl:
settings_cmp = yaml.load(fl, Loader=yaml.FullLoader)
else:
settings_cmp = {}
if "cterms" not in settings_cmp:
settings_cmp["cterms"] = cterms_comp
if "wterms" not in settings_cmp:
settings_cmp["wterms"] = wterms_comp
console.print("Settings for Primary:")
for k, v in settings.items():
console.print(f"\t{k}: {v}")
console.print("Settings for Comparison:")
for k, v in settings_cmp.items():
console.print(f"\t{k}: {v}")
if cache_dir != ".":
lk = settings.get("load_kwargs", {})
lk.update(cache_dir=cache_dir)
settings.update(load_kwargs=lk)
lk = settings_cmp.get("load_kwargs", {})
lk.update(cache_dir=cache_dir)
settings_cmp.update(load_kwargs=lk)
# This actually runs the notebook itself.
pm.execute_notebook(
str(single_notebook),
out / fname,
parameters={
"observation": str(path),
"cmp_observation": str(cmppath),
"obs_config_": settings,
"cmp_config_": settings_cmp,
},
kernel_name="edges",
)
console.print(f"Saved interactive notebook to '{out/fname}'")
# Now output the notebook to pdf
if pdf: # pragma: nocover
make_pdf(out, fname)
if upload:
upload_memo(out / fname.with_suffix(".pdf"), title, memo, quiet)
def make_pdf(out, fname):
"""Make a PDF out of an ipynb."""
# Now output the notebook to pdf
if report:
c = Config()
c.TemplateExporter.exclude_input_prompt = True
c.TemplateExporter.exclude_output_prompt = True
c.TemplateExporter.exclude_input = True
exporter = PDFExporter(config=c)
body, resources = exporter.from_filename(out / fname)
with open(out / fname.with_suffix(".pdf"), "wb") as fl:
fl.write(body)
console.print(f"Saved PDF to '{out / fname.with_suffix('.pdf')}'")
def upload_memo(fname, title, memo, quiet): # pragma: nocover
"""Upload as memo to loco.lab.asu.edu."""
try:
import upload_memo # noqa
except ImportError:
raise ImportError(
"You need to manually install upload-memo to use this option."
)
opts = ["memo", "upload", "-f", str(fname)]
if title:
opts.extend(["-t", title])
if memo:
opts.extend(["-n", memo])
if quiet:
opts.append("-q")
run(opts)
|
[
"yaml.load",
"nbconvert.PDFExporter",
"click.Group",
"edges_cal.cal_coefficients.CalibrationObservation",
"click.option",
"traitlets.config.Config",
"pathlib.Path",
"edges_cal.cal_coefficients.perform_term_sweep",
"click.Path",
"rich.console.Console",
"datetime.datetime.now"
] |
[((298, 307), 'rich.console.Console', 'Console', ([], {}), '()\n', (305, 307), False, 'from rich.console import Console\n'), ((316, 329), 'click.Group', 'click.Group', ([], {}), '()\n', (327, 329), False, 'import click\n'), ((851, 978), 'click.option', 'click.option', (['"""-p/-P"""', '"""--plot/--no-plot"""'], {'default': '(True)', 'help': '"""whether to make diagnostic plots of calibration solutions."""'}), "('-p/-P', '--plot/--no-plot', default=True, help=\n 'whether to make diagnostic plots of calibration solutions.')\n", (863, 978), False, 'import click\n'), ((994, 1119), 'click.option', 'click.option', (['"""-s"""', '"""--simulators"""'], {'multiple': '(True)', 'default': '[]', 'help': '"""antenna simulators to create diagnostic plots for."""'}), "('-s', '--simulators', multiple=True, default=[], help=\n 'antenna simulators to create diagnostic plots for.')\n", (1006, 1119), False, 'import click\n'), ((2496, 2590), 'click.option', 'click.option', (['"""-w"""', '"""--max-wterms"""'], {'type': 'int', 'default': '(20)', 'help': '"""maximum number of wterms"""'}), "('-w', '--max-wterms', type=int, default=20, help=\n 'maximum number of wterms')\n", (2508, 2590), False, 'import click\n'), ((2593, 2711), 'click.option', 'click.option', (['"""-r/-R"""', '"""--repeats/--no-repeats"""'], {'default': '(False)', 'help': '"""explore repeats of switch and receiver s11"""'}), "('-r/-R', '--repeats/--no-repeats', default=False, help=\n 'explore repeats of switch and receiver s11')\n", (2605, 2711), False, 'import click\n'), ((2727, 2829), 'click.option', 'click.option', (['"""-n/-N"""', '"""--runs/--no-runs"""'], {'default': '(False)', 'help': '"""explore runs of s11 measurements"""'}), "('-n/-N', '--runs/--no-runs', default=False, help=\n 'explore runs of s11 measurements')\n", (2739, 2829), False, 'import click\n'), ((2832, 2926), 'click.option', 'click.option', (['"""-c"""', '"""--max-cterms"""'], {'type': 'int', 'default': '(20)', 'help': '"""maximum number of cterms"""'}), "('-c', '--max-cterms', type=int, default=20, help=\n 'maximum number of cterms')\n", (2844, 2926), False, 'import click\n'), ((2929, 3023), 'click.option', 'click.option', (['"""-w"""', '"""--max-wterms"""'], {'type': 'int', 'default': '(20)', 'help': '"""maximum number of wterms"""'}), "('-w', '--max-wterms', type=int, default=20, help=\n 'maximum number of wterms')\n", (2941, 3023), False, 'import click\n'), ((3026, 3144), 'click.option', 'click.option', (['"""-r/-R"""', '"""--repeats/--no-repeats"""'], {'default': '(False)', 'help': '"""explore repeats of switch and receiver s11"""'}), "('-r/-R', '--repeats/--no-repeats', default=False, help=\n 'explore repeats of switch and receiver s11')\n", (3038, 3144), False, 'import click\n'), ((3160, 3262), 'click.option', 'click.option', (['"""-n/-N"""', '"""--runs/--no-runs"""'], {'default': '(False)', 'help': '"""explore runs of s11 measurements"""'}), "('-n/-N', '--runs/--no-runs', default=False, help=\n 'explore runs of s11 measurements')\n", (3172, 3262), False, 'import click\n'), ((3265, 3375), 'click.option', 'click.option', (['"""-t"""', '"""--delta-rms-thresh"""'], {'type': 'float', 'default': '(0)', 'help': '"""threshold marking rms convergence"""'}), "('-t', '--delta-rms-thresh', type=float, default=0, help=\n 'threshold marking rms convergence')\n", (3277, 3375), False, 'import click\n'), ((5054, 5113), 'click.option', 'click.option', (['"""-r/-R"""', '"""--report/--no-report"""'], {'default': '(True)'}), "('-r/-R', '--report/--no-report', default=True)\n", (5066, 5113), False, 'import click\n'), ((5115, 5205), 'click.option', 'click.option', (['"""-u/-U"""', '"""--upload/--no-upload"""'], {'default': '(False)', 'help': '"""auto-upload file"""'}), "('-u/-U', '--upload/--no-upload', default=False, help=\n 'auto-upload file')\n", (5127, 5205), False, 'import click\n'), ((5202, 5281), 'click.option', 'click.option', (['"""-t"""', '"""--title"""'], {'type': 'str', 'help': '"""title of the memo"""', 'default': 'None'}), "('-t', '--title', type=str, help='title of the memo', default=None)\n", (5214, 5281), False, 'import click\n'), ((5283, 5399), 'click.option', 'click.option', (['"""-a"""', '"""--author"""'], {'type': 'str', 'help': '"""adds an author to the author list"""', 'default': 'None', 'multiple': '(True)'}), "('-a', '--author', type=str, help=\n 'adds an author to the author list', default=None, multiple=True)\n", (5295, 5399), False, 'import click\n'), ((5423, 5512), 'click.option', 'click.option', (['"""-n"""', '"""--memo"""'], {'type': 'int', 'help': '"""which memo number to use"""', 'default': 'None'}), "('-n', '--memo', type=int, help='which memo number to use',\n default=None)\n", (5435, 5512), False, 'import click\n'), ((5510, 5564), 'click.option', 'click.option', (['"""-q/-Q"""', '"""--quiet/--loud"""'], {'default': '(False)'}), "('-q/-Q', '--quiet/--loud', default=False)\n", (5522, 5564), False, 'import click\n'), ((5566, 5619), 'click.option', 'click.option', (['"""-p/-P"""', '"""--pdf/--no-pdf"""'], {'default': '(True)'}), "('-p/-P', '--pdf/--no-pdf', default=True)\n", (5578, 5619), False, 'import click\n'), ((5621, 5666), 'click.option', 'click.option', (['"""--cterms"""'], {'type': 'int', 'default': '(8)'}), "('--cterms', type=int, default=8)\n", (5633, 5666), False, 'import click\n'), ((5668, 5714), 'click.option', 'click.option', (['"""--wterms"""'], {'type': 'int', 'default': '(10)'}), "('--wterms', type=int, default=10)\n", (5680, 5714), False, 'import click\n'), ((8215, 8274), 'click.option', 'click.option', (['"""-r/-R"""', '"""--report/--no-report"""'], {'default': '(True)'}), "('-r/-R', '--report/--no-report', default=True)\n", (8227, 8274), False, 'import click\n'), ((8276, 8366), 'click.option', 'click.option', (['"""-u/-U"""', '"""--upload/--no-upload"""'], {'default': '(False)', 'help': '"""auto-upload file"""'}), "('-u/-U', '--upload/--no-upload', default=False, help=\n 'auto-upload file')\n", (8288, 8366), False, 'import click\n'), ((8363, 8442), 'click.option', 'click.option', (['"""-t"""', '"""--title"""'], {'type': 'str', 'help': '"""title of the memo"""', 'default': 'None'}), "('-t', '--title', type=str, help='title of the memo', default=None)\n", (8375, 8442), False, 'import click\n'), ((8444, 8560), 'click.option', 'click.option', (['"""-a"""', '"""--author"""'], {'type': 'str', 'help': '"""adds an author to the author list"""', 'default': 'None', 'multiple': '(True)'}), "('-a', '--author', type=str, help=\n 'adds an author to the author list', default=None, multiple=True)\n", (8456, 8560), False, 'import click\n'), ((8584, 8673), 'click.option', 'click.option', (['"""-n"""', '"""--memo"""'], {'type': 'int', 'help': '"""which memo number to use"""', 'default': 'None'}), "('-n', '--memo', type=int, help='which memo number to use',\n default=None)\n", (8596, 8673), False, 'import click\n'), ((8671, 8725), 'click.option', 'click.option', (['"""-q/-Q"""', '"""--quiet/--loud"""'], {'default': '(False)'}), "('-q/-Q', '--quiet/--loud', default=False)\n", (8683, 8725), False, 'import click\n'), ((8727, 8780), 'click.option', 'click.option', (['"""-p/-P"""', '"""--pdf/--no-pdf"""'], {'default': '(True)'}), "('-p/-P', '--pdf/--no-pdf', default=True)\n", (8739, 8780), False, 'import click\n'), ((8782, 8827), 'click.option', 'click.option', (['"""--cterms"""'], {'type': 'int', 'default': '(8)'}), "('--cterms', type=int, default=8)\n", (8794, 8827), False, 'import click\n'), ((8829, 8875), 'click.option', 'click.option', (['"""--wterms"""'], {'type': 'int', 'default': '(10)'}), "('--wterms', type=int, default=10)\n", (8841, 8875), False, 'import click\n'), ((8877, 8927), 'click.option', 'click.option', (['"""--cterms-comp"""'], {'type': 'int', 'default': '(8)'}), "('--cterms-comp', type=int, default=8)\n", (8889, 8927), False, 'import click\n'), ((8929, 8980), 'click.option', 'click.option', (['"""--wterms-comp"""'], {'type': 'int', 'default': '(10)'}), "('--wterms-comp', type=int, default=10)\n", (8941, 8980), False, 'import click\n'), ((1286, 1295), 'pathlib.Path', 'Path', (['out'], {}), '(out)\n', (1290, 1295), False, 'from pathlib import Path\n'), ((1486, 1534), 'edges_cal.cal_coefficients.CalibrationObservation', 'cc.CalibrationObservation', ([], {'path': 'path'}), '(path=path, **settings)\n', (1511, 1534), True, 'from edges_cal import cal_coefficients as cc\n'), ((4111, 4159), 'edges_cal.cal_coefficients.CalibrationObservation', 'cc.CalibrationObservation', ([], {'path': 'path'}), '(path=path, **settings)\n', (4136, 4159), True, 'from edges_cal import cal_coefficients as cc\n'), ((4165, 4358), 'edges_cal.cal_coefficients.perform_term_sweep', 'cc.perform_term_sweep', (['obs'], {'direc': 'out', 'verbose': '(True)', 'max_cterms': 'max_cterms', 'max_wterms': 'max_wterms', 'explore_repeat_nums': 'repeats', 'explore_run_nums': 'runs', 'delta_rms_thresh': 'delta_rms_thresh'}), '(obs, direc=out, verbose=True, max_cterms=max_cterms,\n max_wterms=max_wterms, explore_repeat_nums=repeats, explore_run_nums=\n runs, delta_rms_thresh=delta_rms_thresh)\n', (4186, 4358), True, 'from edges_cal import cal_coefficients as cc\n'), ((6092, 6102), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (6096, 6102), False, 'from pathlib import Path\n'), ((9465, 9475), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (9469, 9475), False, 'from pathlib import Path\n'), ((9490, 9503), 'pathlib.Path', 'Path', (['cmppath'], {}), '(cmppath)\n', (9494, 9503), False, 'from pathlib import Path\n'), ((1349, 1386), 'yaml.load', 'yaml.load', (['fl'], {'Loader': 'yaml.FullLoader'}), '(fl, Loader=yaml.FullLoader)\n', (1358, 1386), False, 'import yaml\n'), ((379, 434), 'click.Path', 'click.Path', ([], {'dir_okay': '(False)', 'file_okay': '(True)', 'exists': '(True)'}), '(dir_okay=False, file_okay=True, exists=True)\n', (389, 434), False, 'import click\n'), ((465, 520), 'click.Path', 'click.Path', ([], {'dir_okay': '(True)', 'file_okay': '(False)', 'exists': '(True)'}), '(dir_okay=True, file_okay=False, exists=True)\n', (475, 520), False, 'import click\n'), ((569, 624), 'click.Path', 'click.Path', ([], {'dir_okay': '(True)', 'file_okay': '(False)', 'exists': '(True)'}), '(dir_okay=True, file_okay=False, exists=True)\n', (579, 624), False, 'import click\n'), ((727, 769), 'click.Path', 'click.Path', ([], {'dir_okay': '(True)', 'file_okay': '(False)'}), '(dir_okay=True, file_okay=False)\n', (737, 769), False, 'import click\n'), ((3991, 4028), 'yaml.load', 'yaml.load', (['fl'], {'Loader': 'yaml.FullLoader'}), '(fl, Loader=yaml.FullLoader)\n', (4000, 4028), False, 'import yaml\n'), ((2352, 2407), 'click.Path', 'click.Path', ([], {'dir_okay': '(False)', 'file_okay': '(True)', 'exists': '(True)'}), '(dir_okay=False, file_okay=True, exists=True)\n', (2362, 2407), False, 'import click\n'), ((2438, 2493), 'click.Path', 'click.Path', ([], {'dir_okay': '(True)', 'file_okay': '(False)', 'exists': '(True)'}), '(dir_okay=True, file_okay=False, exists=True)\n', (2448, 2493), False, 'import click\n'), ((3441, 3496), 'click.Path', 'click.Path', ([], {'dir_okay': '(True)', 'file_okay': '(False)', 'exists': '(True)'}), '(dir_okay=True, file_okay=False, exists=True)\n', (3451, 3496), False, 'import click\n'), ((3599, 3641), 'click.Path', 'click.Path', ([], {'dir_okay': '(True)', 'file_okay': '(False)'}), '(dir_okay=True, file_okay=False)\n', (3609, 3641), False, 'import click\n'), ((6179, 6188), 'pathlib.Path', 'Path', (['out'], {}), '(out)\n', (6183, 6188), False, 'from pathlib import Path\n'), ((4468, 4523), 'click.Path', 'click.Path', ([], {'dir_okay': '(True)', 'file_okay': '(False)', 'exists': '(True)'}), '(dir_okay=True, file_okay=False, exists=True)\n', (4478, 4523), False, 'import click\n'), ((4593, 4648), 'click.Path', 'click.Path', ([], {'dir_okay': '(False)', 'file_okay': '(True)', 'exists': '(True)'}), '(dir_okay=False, file_okay=True, exists=True)\n', (4603, 4648), False, 'import click\n'), ((4771, 4826), 'click.Path', 'click.Path', ([], {'dir_okay': '(True)', 'file_okay': '(False)', 'exists': '(True)'}), '(dir_okay=True, file_okay=False, exists=True)\n', (4781, 4826), False, 'import click\n'), ((4930, 4972), 'click.Path', 'click.Path', ([], {'dir_okay': '(True)', 'file_okay': '(False)'}), '(dir_okay=True, file_okay=False)\n', (4940, 4972), False, 'import click\n'), ((9580, 9589), 'pathlib.Path', 'Path', (['out'], {}), '(out)\n', (9584, 9589), False, 'from pathlib import Path\n'), ((7326, 7381), 'click.Path', 'click.Path', ([], {'dir_okay': '(True)', 'file_okay': '(False)', 'exists': '(True)'}), '(dir_okay=True, file_okay=False, exists=True)\n', (7336, 7381), False, 'import click\n'), ((7415, 7470), 'click.Path', 'click.Path', ([], {'dir_okay': '(True)', 'file_okay': '(False)', 'exists': '(True)'}), '(dir_okay=True, file_okay=False, exists=True)\n', (7425, 7470), False, 'import click\n'), ((7540, 7595), 'click.Path', 'click.Path', ([], {'dir_okay': '(False)', 'file_okay': '(True)', 'exists': '(True)'}), '(dir_okay=False, file_okay=True, exists=True)\n', (7550, 7595), False, 'import click\n'), ((7743, 7798), 'click.Path', 'click.Path', ([], {'dir_okay': '(False)', 'file_okay': '(True)', 'exists': '(True)'}), '(dir_okay=False, file_okay=True, exists=True)\n', (7753, 7798), False, 'import click\n'), ((7932, 7987), 'click.Path', 'click.Path', ([], {'dir_okay': '(True)', 'file_okay': '(False)', 'exists': '(True)'}), '(dir_okay=True, file_okay=False, exists=True)\n', (7942, 7987), False, 'import click\n'), ((8091, 8133), 'click.Path', 'click.Path', ([], {'dir_okay': '(True)', 'file_okay': '(False)'}), '(dir_okay=True, file_okay=False)\n', (8101, 8133), False, 'import click\n'), ((11703, 11711), 'traitlets.config.Config', 'Config', ([], {}), '()\n', (11709, 11711), False, 'from traitlets.config import Config\n'), ((11891, 11912), 'nbconvert.PDFExporter', 'PDFExporter', ([], {'config': 'c'}), '(config=c)\n', (11902, 11912), False, 'from nbconvert import PDFExporter\n'), ((5961, 5975), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (5965, 5975), False, 'from pathlib import Path\n'), ((6442, 6479), 'yaml.load', 'yaml.load', (['fl'], {'Loader': 'yaml.FullLoader'}), '(fl, Loader=yaml.FullLoader)\n', (6451, 6479), False, 'import yaml\n'), ((9304, 9318), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (9308, 9318), False, 'from pathlib import Path\n'), ((9892, 9929), 'yaml.load', 'yaml.load', (['fl'], {'Loader': 'yaml.FullLoader'}), '(fl, Loader=yaml.FullLoader)\n', (9901, 9929), False, 'import yaml\n'), ((10202, 10239), 'yaml.load', 'yaml.load', (['fl'], {'Loader': 'yaml.FullLoader'}), '(fl, Loader=yaml.FullLoader)\n', (10211, 10239), False, 'import yaml\n'), ((6299, 6313), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6311, 6313), False, 'from datetime import datetime\n'), ((9744, 9758), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (9756, 9758), False, 'from datetime import datetime\n')]
|
#
# SPDX-Copyright: Copyright 2018 Capital One Services, LLC
# SPDX-License-Identifier: MIT
# Copyright 2018 Capital One Services, LLC
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial
# portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT
# OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
import moneymovement, unittest
from models import TransferType, TransferRequestStatus, TransferRequest
class MoneyMovementTest(unittest.TestCase):
def test_moneymovement(self):
base_url = 'https://api-sandbox.capitalone.com'
# need OAuth2
client_id = '83c59ee7d6a4479c8e142422cbe9022a'
client_secret = '<KEY>'
moneymovement.setup_oauth(client_id, client_secret, base_url)
accounts = moneymovement.get_eligible_accounts()
# self.assertEqual(4, len(accounts["accounts"]))
capitalone_savings = accounts["accounts"][0]
capitalone_checking = accounts["accounts"][1]
external_checking = accounts["accounts"][2]
external_checking_2 = accounts["accounts"][3]
print(accounts)
print(capitalone_savings["availableBalance"])
print()
#print(capitalone_checking)
print()
# print(external_checking)
print(external_checking_2)
# POST /money-movement/transfer-requests ACH
transfer_request = TransferRequest()
transfer_request.originMoneyMovementAccountReferenceId = external_checking["moneyMovementAccountReferenceId"]
transfer_request.destinationMoneyMovementAccountReferenceId = capitalone_savings["moneyMovementAccountReferenceId"]
transfer_request.transferAmount = 10.45 # Upto 2 decimal places
transfer_request.currencyCode = "USD" # optional Default: USD
transfer_request.transferDate = "2018-11-17"
transfer_request.memo = "dream car" # optional
transfer_request.transferType = TransferType.ACH.value
transfer_request.frequency = "OneTime" # optional Default: OneTime
transfer_response_ach = moneymovement.initiate_transfer(transfer_request)
print(transfer_response_ach)
#self.assertEqual(TransferRequestStatus.SCHEDULED.value, transfer_response_ach["transferRequestStatus"])
print(capitalone_savings["availableBalance"])
print()
'''
# POST /money-movement/transfer-requests Internal
transfer_request.originMoneyMovementAccountReferenceId = capitalone_checking["moneyMovementAccountReferenceId"]
transfer_request.transferType = TransferType.INTERNAL.value
transfer_response_internal = moneymovement.initiate_transfer(transfer_request)
self.assertEqual(TransferRequestStatus.SCHEDULED.value, transfer_response_internal["transferRequestStatus"])
# GET /money-movement/transfer-requests/{transferRequestId}
transfer_request_id = transfer_response_ach["transferRequestId"]
transfer_request_ach = moneymovement.get_transfer_request(transfer_request_id)
self.assertEqual(transfer_request_id, transfer_request_ach["transferRequestId"])
'''
# GET /money-movement/transfer-requests
filters = {
"fromDate": "2018-11-16",
"toDate": "2018-11-18",
"transferType": None,
"transferRequestStatus": None
}
transfer_requests = moneymovement.get_transfer_requests(capitalone_savings["moneyMovementAccountReferenceId"], filters)
transfers = transfer_requests['transferRequests']
for transfer in transfers:
print(transfer['transferRequestId'] + transfer['memo'])
print(transfer_requests)
#self.assertEqual(transfer_requests["transferRequests"][0]["transferType"], TransferType.ACH.value);
'''
# PATCH /money-movement/transfer-requests/{transferRequestId}
moneymovement.update_transfer_request(transfer_request_id, TransferRequestStatus.CANCELLED.value)
'''
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"moneymovement.setup_oauth",
"models.TransferRequest",
"moneymovement.get_eligible_accounts",
"moneymovement.initiate_transfer",
"moneymovement.get_transfer_requests"
] |
[((5003, 5018), 'unittest.main', 'unittest.main', ([], {}), '()\n', (5016, 5018), False, 'import moneymovement, unittest\n'), ((1548, 1609), 'moneymovement.setup_oauth', 'moneymovement.setup_oauth', (['client_id', 'client_secret', 'base_url'], {}), '(client_id, client_secret, base_url)\n', (1573, 1609), False, 'import moneymovement, unittest\n'), ((1630, 1667), 'moneymovement.get_eligible_accounts', 'moneymovement.get_eligible_accounts', ([], {}), '()\n', (1665, 1667), False, 'import moneymovement, unittest\n'), ((2246, 2263), 'models.TransferRequest', 'TransferRequest', ([], {}), '()\n', (2261, 2263), False, 'from models import TransferType, TransferRequestStatus, TransferRequest\n'), ((2966, 3015), 'moneymovement.initiate_transfer', 'moneymovement.initiate_transfer', (['transfer_request'], {}), '(transfer_request)\n', (2997, 3015), False, 'import moneymovement, unittest\n'), ((4346, 4450), 'moneymovement.get_transfer_requests', 'moneymovement.get_transfer_requests', (["capitalone_savings['moneyMovementAccountReferenceId']", 'filters'], {}), "(capitalone_savings[\n 'moneyMovementAccountReferenceId'], filters)\n", (4381, 4450), False, 'import moneymovement, unittest\n')]
|
import jax.numpy as np
from tfc import mtfc
from tfc.utils import egrad, NLLS
from tfc.utils.PlotlyMakePlot import MakePlot
# Constants:
n = [40,40]
nC = [2,[1,2]]
m = 40
r0 = 2.
rf = 4.
th0 = 0.
thf = 2.*np.pi
realSoln = lambda r,th: 4.*(-1024.+r**10)*np.sin(5.*th)/(1023.*r**5)
# Create TFC class:
myTfc = mtfc(n,nC,m,x0=[r0,th0],xf=[rf,thf])
H = myTfc.H
x = myTfc.x
# Create constrained expression:
g = lambda xi,*x: np.dot(H(*x),xi)
u1 = lambda xi,*x: g(xi,*x)+\
(x[0]-rf)/(r0-rf)*(0.-g(xi,r0*np.ones_like(x[0]),x[1]))+\
(x[0]-r0)/(rf-r0)*(4.*np.sin(5.*x[1])-g(xi,rf*np.ones_like(x[0]),x[1]))
u = lambda xi,*x: u1(xi,*x)+\
-x[1]/(2.*np.pi)*(u1(xi,x[0],thf*np.ones_like(x[1]))-u1(xi,x[0],th0*np.ones_like(x[1])))+\
(-x[1]**2+2.*np.pi*x[1])/(4.*np.pi)*(egrad(u1,2)(xi,x[0],thf*np.ones_like(x[1]))-egrad(u1,2)(xi,x[0],th0*np.ones_like(x[1])))
# Create the loss function:
ur = egrad(u,1)
u2r = egrad(ur,1)
u2th = egrad(egrad(u,2),2)
L = lambda xi: u2r(xi,*x)+1./x[0]*ur(xi,*x)+1./x[0]**2*u2th(xi,*x)
# Solve the problem:
xi = np.zeros(H(*x).shape[1])
xi,it,time = NLLS(xi,L,timer=True)
# Print out statistics:
print("Solution time: {0} seconds".format(time))
# Plot the solution:
R,Th = np.meshgrid(np.linspace(r0,rf,50),np.linspace(th0,thf,200))
dark = (R.flatten(),Th.flatten())
X = R*np.cos(Th)
Y = R*np.sin(Th)
U = u(xi,*dark).reshape((200,50))
p = MakePlot("x","y",zlabs="u(x,y,g(x,y))")
p.Surface(x=X,y=Y,z=U,showscale=False)
p.show()
# Plot the error
err = np.abs(realSoln(R,Th)-U)
p = MakePlot("x","y",zlabs="Error")
p.Surface(x=X,y=Y,z=err,showscale=False)
p.show()
|
[
"tfc.utils.egrad",
"jax.numpy.linspace",
"tfc.utils.NLLS",
"jax.numpy.cos",
"tfc.utils.PlotlyMakePlot.MakePlot",
"tfc.mtfc",
"jax.numpy.ones_like",
"jax.numpy.sin"
] |
[((312, 354), 'tfc.mtfc', 'mtfc', (['n', 'nC', 'm'], {'x0': '[r0, th0]', 'xf': '[rf, thf]'}), '(n, nC, m, x0=[r0, th0], xf=[rf, thf])\n', (316, 354), False, 'from tfc import mtfc\n'), ((976, 987), 'tfc.utils.egrad', 'egrad', (['u', '(1)'], {}), '(u, 1)\n', (981, 987), False, 'from tfc.utils import egrad, NLLS\n'), ((993, 1005), 'tfc.utils.egrad', 'egrad', (['ur', '(1)'], {}), '(ur, 1)\n', (998, 1005), False, 'from tfc.utils import egrad, NLLS\n'), ((1164, 1187), 'tfc.utils.NLLS', 'NLLS', (['xi', 'L'], {'timer': '(True)'}), '(xi, L, timer=True)\n', (1168, 1187), False, 'from tfc.utils import egrad, NLLS\n'), ((1456, 1497), 'tfc.utils.PlotlyMakePlot.MakePlot', 'MakePlot', (['"""x"""', '"""y"""'], {'zlabs': '"""u(x,y,g(x,y))"""'}), "('x', 'y', zlabs='u(x,y,g(x,y))')\n", (1464, 1497), False, 'from tfc.utils.PlotlyMakePlot import MakePlot\n'), ((1597, 1630), 'tfc.utils.PlotlyMakePlot.MakePlot', 'MakePlot', (['"""x"""', '"""y"""'], {'zlabs': '"""Error"""'}), "('x', 'y', zlabs='Error')\n", (1605, 1630), False, 'from tfc.utils.PlotlyMakePlot import MakePlot\n'), ((1018, 1029), 'tfc.utils.egrad', 'egrad', (['u', '(2)'], {}), '(u, 2)\n', (1023, 1029), False, 'from tfc.utils import egrad, NLLS\n'), ((1301, 1324), 'jax.numpy.linspace', 'np.linspace', (['r0', 'rf', '(50)'], {}), '(r0, rf, 50)\n', (1312, 1324), True, 'import jax.numpy as np\n'), ((1323, 1349), 'jax.numpy.linspace', 'np.linspace', (['th0', 'thf', '(200)'], {}), '(th0, thf, 200)\n', (1334, 1349), True, 'import jax.numpy as np\n'), ((1390, 1400), 'jax.numpy.cos', 'np.cos', (['Th'], {}), '(Th)\n', (1396, 1400), True, 'import jax.numpy as np\n'), ((1407, 1417), 'jax.numpy.sin', 'np.sin', (['Th'], {}), '(Th)\n', (1413, 1417), True, 'import jax.numpy as np\n'), ((256, 272), 'jax.numpy.sin', 'np.sin', (['(5.0 * th)'], {}), '(5.0 * th)\n', (262, 272), True, 'import jax.numpy as np\n'), ((590, 608), 'jax.numpy.sin', 'np.sin', (['(5.0 * x[1])'], {}), '(5.0 * x[1])\n', (596, 608), True, 'import jax.numpy as np\n'), ((834, 846), 'tfc.utils.egrad', 'egrad', (['u1', '(2)'], {}), '(u1, 2)\n', (839, 846), False, 'from tfc.utils import egrad, NLLS\n'), ((878, 890), 'tfc.utils.egrad', 'egrad', (['u1', '(2)'], {}), '(u1, 2)\n', (883, 890), False, 'from tfc.utils import egrad, NLLS\n'), ((614, 632), 'jax.numpy.ones_like', 'np.ones_like', (['x[0]'], {}), '(x[0])\n', (626, 632), True, 'import jax.numpy as np\n'), ((858, 876), 'jax.numpy.ones_like', 'np.ones_like', (['x[1]'], {}), '(x[1])\n', (870, 876), True, 'import jax.numpy as np\n'), ((902, 920), 'jax.numpy.ones_like', 'np.ones_like', (['x[1]'], {}), '(x[1])\n', (914, 920), True, 'import jax.numpy as np\n'), ((521, 539), 'jax.numpy.ones_like', 'np.ones_like', (['x[0]'], {}), '(x[0])\n', (533, 539), True, 'import jax.numpy as np\n'), ((721, 739), 'jax.numpy.ones_like', 'np.ones_like', (['x[1]'], {}), '(x[1])\n', (733, 739), True, 'import jax.numpy as np\n'), ((756, 774), 'jax.numpy.ones_like', 'np.ones_like', (['x[1]'], {}), '(x[1])\n', (768, 774), True, 'import jax.numpy as np\n')]
|
# -*- coding: utf-8 -*-
# COPYRIGHT 2017 <NAME>
# Truth network model analysis
from __future__ import print_function
import numpy as np
import tellurium as te
import antimony
import generate
import util
import clustering
def classify(setup, s_arr, c_arr):
"""
Ground truth classification. Returns initial perturbation response,
perturbation response, classification, and reaction index
:param g_truth: ground truth network matrix
:param s_truth: ground truth species concentrations
:param k_truth: ground truth rate constants
:param num_node: ground truth numbder of nodes
:param num_bound: ground truth numbder of boundary species
:param k_pert: perturbation amount
:param Thres: classification threshold
:rtype: list
"""
antimony.clearPreviousLoads()
# Strip and translate to string
t_s = setup.t_s.astype('str')
t_k = setup.t_k[setup.t_k != np.array(0)].astype('str')
#t_k_count = np.count_nonzero(setup.t_net)
t_ant = generate.generateAntimonyNew(setup.t_net, t_s, t_k, s_arr, c_arr)
#r_ind = np.array(np.where(setup.t_net != np.array(0))).T
r_ind = util.getPersistantOrder(setup.t_net, setup.p_net)
rr = te.loada(t_ant)
rr.reset() # ALWAYS RESET
rr.conservedMoietyAnalysis = True
pert_i = rr.steadyStateNamedArray() # Initial steady state
r_comb = clustering.getListOfCombinations(r_ind)
# Pertubation for rate constants
k_pert_output_i = np.empty([len(r_comb), setup.num_float])
for i in range(len(r_comb)):
k_pert_output_i[i] = util.perturbRate(rr, r_comb[i], setup.k_pert)
# Classification for rate constants
k_class_i = np.empty([len(r_comb), setup.num_float], dtype=int)
for i in range(len(r_comb)):
for j in range(setup.num_float):
k_diff = (k_pert_output_i[i][j] - pert_i[0][j])
if (np.abs(k_diff) > setup.Thres):
if k_diff < 0.:
k_class_i[i][j] = 1
else:
k_class_i[i][j] = 2
else:
k_class_i[i][j] = 0
antimony.clearPreviousLoads()
return pert_i[0], k_pert_output_i, k_class_i
def compareClass(t_analysis, k_class):
"""
Return indices of network matrices that fall into the same category and
those that does not fall into the same category as the result from true
network
:param t_analysis:
:param k_class:
"""
t_net_ind = []
nt_net_ind = []
for i in range(len(k_class)):
if np.array_equal(t_analysis[2], k_class[i]):
t_net_ind.append(i)
else:
nt_net_ind.append(i)
return t_net_ind, nt_net_ind
#@profile
def compareIndClass(t_analysis, k_class_i):
"""
Checks a single instance of classification against the true result. Returns
True if classification is identical and false otherwise
:param t_analysis:
:param k_class:
"""
partial = False
if np.array_equal(t_analysis, k_class_i):
partial = True
return partial
#def compareClass(p_r_ind, t_analysis, k_class, net_ind_group):
# """
# Return indices for network matrices that fall into the same category and
# those that does not fall into the same category as the output of ground
# truth model
#
# :param p_r_ind: persistant index
# :param t_analysis: output of ground truth classification
# :param k_class: classification output resulting from perturbing reaction
# :param net_ind_group: grouped reaction index
# :rtype: list
# """
#
# t_net_ind = []
# nt_net_ind = []
#
# for i in range(len(p_r_ind)):
# row = p_r_ind[i][0]
# col = p_r_ind[i][1]
#
# # Get generated classification from target indices
# t_k_class = sorted_k_class[row][col]
#
# # Get truth classification from target indices
# comp1 = np.array([np.in1d(t_analysis[3].T[0], row),
# np.in1d(t_analysis[3].T[1], col)])
#
# truth_k_class = t_analysis[2][comp1.all(axis=0)]
#
# # Get indices where generated classification and truth
# # classification is the same
# # TODO: Currently this matches all binary values
# ind_id = np.where((t_k_class == truth_k_class).all(axis=1))[0]
#
# # Network matrix indices that match with truth classification
# t_net_ind_i = net_ind_group[row][col][ind_id]
# # Network matrix indices that does not match with truth classification
# nt_net_ind_i = np.setdiff1d(net_ind_group[row][col], t_net_ind_i)
# t_net_ind.append(t_net_ind_i)
# nt_net_ind.append(nt_net_ind_i)
#
# return t_net_ind, nt_net_ind
|
[
"numpy.array_equal",
"numpy.abs",
"util.perturbRate",
"generate.generateAntimonyNew",
"numpy.array",
"antimony.clearPreviousLoads",
"tellurium.loada",
"util.getPersistantOrder",
"clustering.getListOfCombinations"
] |
[((785, 814), 'antimony.clearPreviousLoads', 'antimony.clearPreviousLoads', ([], {}), '()\n', (812, 814), False, 'import antimony\n'), ((1014, 1079), 'generate.generateAntimonyNew', 'generate.generateAntimonyNew', (['setup.t_net', 't_s', 't_k', 's_arr', 'c_arr'], {}), '(setup.t_net, t_s, t_k, s_arr, c_arr)\n', (1042, 1079), False, 'import generate\n'), ((1159, 1208), 'util.getPersistantOrder', 'util.getPersistantOrder', (['setup.t_net', 'setup.p_net'], {}), '(setup.t_net, setup.p_net)\n', (1182, 1208), False, 'import util\n'), ((1218, 1233), 'tellurium.loada', 'te.loada', (['t_ant'], {}), '(t_ant)\n', (1226, 1233), True, 'import tellurium as te\n'), ((1383, 1422), 'clustering.getListOfCombinations', 'clustering.getListOfCombinations', (['r_ind'], {}), '(r_ind)\n', (1415, 1422), False, 'import clustering\n'), ((2133, 2162), 'antimony.clearPreviousLoads', 'antimony.clearPreviousLoads', ([], {}), '()\n', (2160, 2162), False, 'import antimony\n'), ((3034, 3071), 'numpy.array_equal', 'np.array_equal', (['t_analysis', 'k_class_i'], {}), '(t_analysis, k_class_i)\n', (3048, 3071), True, 'import numpy as np\n'), ((1591, 1636), 'util.perturbRate', 'util.perturbRate', (['rr', 'r_comb[i]', 'setup.k_pert'], {}), '(rr, r_comb[i], setup.k_pert)\n', (1607, 1636), False, 'import util\n'), ((2578, 2619), 'numpy.array_equal', 'np.array_equal', (['t_analysis[2]', 'k_class[i]'], {}), '(t_analysis[2], k_class[i])\n', (2592, 2619), True, 'import numpy as np\n'), ((1901, 1915), 'numpy.abs', 'np.abs', (['k_diff'], {}), '(k_diff)\n', (1907, 1915), True, 'import numpy as np\n'), ((923, 934), 'numpy.array', 'np.array', (['(0)'], {}), '(0)\n', (931, 934), True, 'import numpy as np\n')]
|
import sys
from jinja2 import Environment, FileSystemLoader
from os import path, makedirs, getcwd
curves = []
curves_string = ""
PQ_L1_CURVES = ["bike1l1cpa", "bike1l1fo",
"frodo640aes", "frodo640shake",
"hqc128_1_cca2",
"kyber512", "kyber90s512",
"ntru_hps2048509",
"lightsaber",
"sidhp434", "sidhp503", "sikep434", "sikep503"]
PQ_L3_CURVES = ["bike1l3cpa", "bike1l3fo",
"frodo976aes", "frodo976shake",
"hqc192",
"kyber768", "kyber90s768",
"ntru_hps2048677", "ntru_hrss701",
"saber",
"sidhp610", "sikep610",
"ntrulpr761", "sntrup761",]
PQ_L5_CURVES = ["frodo1344aes", "frodo1344shake",
"hqc256_1_cca2", "hqc256_2_cca2", "hqc256_3_cca2",
"kyber1024", "kyber90s1024",
"ntru_hps4096821",
"firesaber",
"sidhp751", "sikep751"]
ECDH_L1_CURVES = ['p256']
ECDH_L3_CURVES = ['p384']
ECDH_L5_CURVES = ['p521']
for pq_curve in PQ_L1_CURVES:
continue
curves.append(pq_curve)
for ecdh_curve in ECDH_L1_CURVES:
c = f"{ecdh_curve}_{pq_curve}"
curves.append(c)
for pq_curve in PQ_L3_CURVES:
curves.append(pq_curve)
for ecdh_curve in ECDH_L3_CURVES:
c = f"{ecdh_curve}_{pq_curve}"
curves.append(c)
for pq_curve in PQ_L5_CURVES:
continue
curves.append(pq_curve)
for ecdh_curve in ECDH_L5_CURVES:
c = f"{ecdh_curve}_{pq_curve}"
curves.append(c)
#curves = curves + ['P-256', 'P-384', 'P-521']
curves = curves + ['P-384']
file_loader = FileSystemLoader('.') # directory of template file
env = Environment(loader=file_loader)
template = 'eap_tls_kem_template.conf.j2'
template = env.get_template(template) # load template file
BASE_DIR = '../confs'
CONF_DIR = f'{BASE_DIR}/kem'
if not path.exists(BASE_DIR):
makedirs(BASE_DIR)
if not path.exists(CONF_DIR):
makedirs(CONF_DIR)
for curve in curves:
curves_string += f"{curve}:"
filename = f"{CONF_DIR}/eap_tls_{curve}.conf"
f = open(filename, 'w')
output = template.render(curve=curve)
f.write(output)
f.close()
print(curves_string)
|
[
"jinja2.FileSystemLoader",
"os.path.exists",
"os.makedirs",
"jinja2.Environment"
] |
[((1682, 1703), 'jinja2.FileSystemLoader', 'FileSystemLoader', (['"""."""'], {}), "('.')\n", (1698, 1703), False, 'from jinja2 import Environment, FileSystemLoader\n'), ((1739, 1770), 'jinja2.Environment', 'Environment', ([], {'loader': 'file_loader'}), '(loader=file_loader)\n', (1750, 1770), False, 'from jinja2 import Environment, FileSystemLoader\n'), ((1931, 1952), 'os.path.exists', 'path.exists', (['BASE_DIR'], {}), '(BASE_DIR)\n', (1942, 1952), False, 'from os import path, makedirs, getcwd\n'), ((1958, 1976), 'os.makedirs', 'makedirs', (['BASE_DIR'], {}), '(BASE_DIR)\n', (1966, 1976), False, 'from os import path, makedirs, getcwd\n'), ((1985, 2006), 'os.path.exists', 'path.exists', (['CONF_DIR'], {}), '(CONF_DIR)\n', (1996, 2006), False, 'from os import path, makedirs, getcwd\n'), ((2012, 2030), 'os.makedirs', 'makedirs', (['CONF_DIR'], {}), '(CONF_DIR)\n', (2020, 2030), False, 'from os import path, makedirs, getcwd\n')]
|
"""
test_const_ionization.py
Author: <NAME>
Affiliation: University of Colorado at Boulder
Created on: Thu Oct 16 14:46:48 MDT 2014
Description:
"""
import ares
import numpy as np
import matplotlib.pyplot as pl
from ares.physics.CrossSections import PhotoIonizationCrossSection as sigma
s_per_yr = ares.physics.Constants.s_per_yr
pars = \
{
'problem_type': 0,
'grid_cells': 1,
'initial_ionization': [1.-1e-6, 1e-6],
#'initial_temperature': 1e4,# make cold so collisional ionization is negligible
'isothermal': False,
'stop_time': 10.0,
'plane_parallel': True,
'recombination': False, # To match analytical solution
'source_type': 'toy',
'source_qdot': 1e4, # solver fails when this is large (like 1e10)
'source_lifetime': 1e10,
'source_E': [13.60000001],
'source_LE': [1.0],
'secondary_ionization': 0,
'collisional_ionization': 0,
'logdtDataDump': 0.5,
'initial_timestep': 1e-15,
}
def test(rtol=1e-2):
# Numerical solution
sim = ares.simulations.RaySegment(**pars)
sim.run()
t, xHII = sim.CellEvolution(field='h_2')
fig = pl.figure(1, figsize=(8, 12))
ax1 = fig.add_subplot(211); ax2 = fig.add_subplot(212)
ax1.loglog(t / s_per_yr, xHII, color='k', label='numerical')
ax1.set_ylim(1e-8, 5)
ax1.set_ylabel(r'$x_{\mathrm{HII}}$')
# Analytic solution: exponential time evolution
sigma0 = sigma(pars['source_E'][0])
qdot = pars['source_qdot']
Gamma = qdot * sigma0
xi0 = pars['initial_ionization'][1]
C = 1. - xi0
def xi(t, Gamma=Gamma):
return 1. - C * np.exp(-Gamma * t)
xHII_anyl = np.array(list(map(xi, t)))
ax1.scatter(t / s_per_yr, xHII_anyl, color='b', facecolors='none', s=100,
label='analytic')
ax1.legend(loc='upper left', fontsize=14)
# Only test accuracy at somewhat later times
mask = t > 0
err = np.abs(xHII[mask] - xHII_anyl[mask]) / xHII_anyl[mask]
ax2.loglog(t / s_per_yr, err)
ax2.set_xlabel(r'$t \ (\mathrm{yr})$')
ax2.set_ylabel(r'rel. error')
pl.draw()
pl.savefig('{!s}.png'.format(__file__[0:__file__.rfind('.')]))
pl.close()
assert np.allclose(xHII[mask], xHII_anyl[mask], rtol=rtol, atol=0)
if __name__ == '__main__':
test()
|
[
"numpy.abs",
"matplotlib.pyplot.close",
"numpy.allclose",
"matplotlib.pyplot.draw",
"matplotlib.pyplot.figure",
"ares.physics.CrossSections.PhotoIonizationCrossSection",
"numpy.exp",
"ares.simulations.RaySegment"
] |
[((976, 1011), 'ares.simulations.RaySegment', 'ares.simulations.RaySegment', ([], {}), '(**pars)\n', (1003, 1011), False, 'import ares\n'), ((1095, 1124), 'matplotlib.pyplot.figure', 'pl.figure', (['(1)'], {'figsize': '(8, 12)'}), '(1, figsize=(8, 12))\n', (1104, 1124), True, 'import matplotlib.pyplot as pl\n'), ((1392, 1418), 'ares.physics.CrossSections.PhotoIonizationCrossSection', 'sigma', (["pars['source_E'][0]"], {}), "(pars['source_E'][0])\n", (1397, 1418), True, 'from ares.physics.CrossSections import PhotoIonizationCrossSection as sigma\n'), ((2073, 2082), 'matplotlib.pyplot.draw', 'pl.draw', ([], {}), '()\n', (2080, 2082), True, 'import matplotlib.pyplot as pl\n'), ((2154, 2164), 'matplotlib.pyplot.close', 'pl.close', ([], {}), '()\n', (2162, 2164), True, 'import matplotlib.pyplot as pl\n'), ((2185, 2244), 'numpy.allclose', 'np.allclose', (['xHII[mask]', 'xHII_anyl[mask]'], {'rtol': 'rtol', 'atol': '(0)'}), '(xHII[mask], xHII_anyl[mask], rtol=rtol, atol=0)\n', (2196, 2244), True, 'import numpy as np\n'), ((1893, 1929), 'numpy.abs', 'np.abs', (['(xHII[mask] - xHII_anyl[mask])'], {}), '(xHII[mask] - xHII_anyl[mask])\n', (1899, 1929), True, 'import numpy as np\n'), ((1590, 1608), 'numpy.exp', 'np.exp', (['(-Gamma * t)'], {}), '(-Gamma * t)\n', (1596, 1608), True, 'import numpy as np\n')]
|
import scipy.io
import numpy as np
import sys
import os.path
import matplotlib.pyplot as plt
trans = [139.62,119.43,36.48,14.5]
mdata = []
def avgWaveSpeed(data,ampStart,ampEnd,freq,transducers,index1,index2):
total = 0
count = 0
print(data)
zer = highestPoint(data,ampStart,0)[0]
tz = np.arange(ampStart,ampEnd,(1/freq))
for i in tz:
tmp = highestPoint(data,i,zer)
#print(tmp)
print(tmp, " " , index1 , " ", index2)
total = total + (transducers[index2]-transducers[index1])/(tmp[index2+1] -tmp[index1+1])
count = count +1
total = total/count
return abs(total*1000)
def highestPoint(data,val,start):
x = []
x.append(0)
for b in range(start,len(data)):
count = 0
i = data[b]
#print(i," ",count)
for z in i :
if(z[0] > val):
x.append(count)
break
count = count + 1
lowest = 10000
highest = 0
for v in x:
if(v <= lowest):
lowest = v
if(v>= highest):
highest = v
x[0] = lowest
x.append(highest)
return x
def cailbration(data):
high = False
for x in data:
if(x[0]>2):
high = True
break
if(high):
for z in range(0,len(data)):
data[z] = ((data[z]*0.5001 + 1.0032 - 1.01325)*10.1974)+10
else:
for z in range(0,len(data)):
data[z] = ((data[z]*3.1277 - 0.263 - 1.01325)*10.1974)+10
return data
def onclick(event):
print('%s click: button=%d, x=%d, y=%d, xdata=%f, ydata=%f' %
('double' if event.dblclick else 'single', event.button,
event.x, event.y, event.xdata, event.ydata))
text = ""
if(os.path.isfile('testfile.txt')):
file2 = open('testfile.txt')
text = file2.read()
file2.close()
file = open('testfile.txt',"w")
file.write(text + str(event.ydata)+'\n')
file.close()
mdata = []
x = open('testfile.txt').read().split('\n')
if(len(x) >2):
#print(x)
mdata.append(float(x[0]))
mdata.append(float(x[1]))
file = open('testfile.txt',"w")
file.write("")
file.close()
#print(avgWaveSpeed(data,mdata[0],mdata[1],10,trans,2,0))
def main(file,idx,x):
fig = plt.figure(x)
gid = 200+(x*10)+idx
#if(x==1):
#fig = plt.figure(3)
#else:
#fig = plt.figure(4)
#location = input('MatLabFile Location\n')
location = file
mat = scipy.io.loadmat(location)
data = []
x = mat.get('VoltageAI0')[0][0][1][0][0]
time = []
for i in range(0,x):
time.append(i/1000)
#print(time)
for i in range(0,10):
tmp = 'VoltageAI'+str(i)
if(mat.get(tmp)==None):
break
else:
data.append(cailbration(mat.get(tmp)[0][0][0]))
colors = ['b','y','m','k','r']
count = 0
#zxcv = avgWaveSpeed(data,29.5,31,10,trans,2,0)
pltinone = True
#zxc = input("All in one? y/n?\n")
zxc = "y"
if(zxc =="n"):
pltinone = False
fig = plt.figure(2)
for i in data:
if(pltinone):
plt.subplot(gid)
line = plt.plot(time,i)
import random
r = lambda: random.randint(0,255)
colorz = ('#%02X%02X%02X' % (r(),r(),r()))
plt.setp(line,'color',colorz,'antialiased',True)
else:
cur = 221 + count
plt.subplot(cur)
plt.ylabel('Bar ( gauge )')
plt.xlabel('Time ( s )')
line = plt.plot(time,i)
plt.setp(line,'color',colors[count],'antialiased',True)
count = count+1
plt.ylabel('Meters of water( m )')
plt.xlabel('Time ( s )')
cid = fig.canvas.mpl_connect('button_press_event', onclick)
plt.axis([0,8, 25, 35])
fig.canvas.mpl_disconnect(cid)
#return zxcv
return 1
sumx = 0
vals = []
main("D:\\Files\\Documents\\Programming\\PythonFiles\\SUWSS\\TDMS\\24July2018_Intact_1.mat",1,1)
#for i in range(1,2):
# print(i)
# sumx = sumx+(main('\TDMS\24July2018_Intact_'+str(i)+'.mat',i,2))
#print(sumx)
'''
sumy= 0
i = 6
for i in range(6,11):
print(i)
sumy = sumy+(main('LL Pipe Case\\24July2018_LL_'+str(i)+'.mat',240+i-5,2))
sumy = (sumy/5)
'''
#print(abs(sumx-sumy))
plt.show()
|
[
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.show",
"random.randint",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.setp",
"matplotlib.pyplot.axis",
"matplotlib.pyplot.figure",
"numpy.arange",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel"
] |
[((4374, 4384), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4382, 4384), True, 'import matplotlib.pyplot as plt\n'), ((309, 346), 'numpy.arange', 'np.arange', (['ampStart', 'ampEnd', '(1 / freq)'], {}), '(ampStart, ampEnd, 1 / freq)\n', (318, 346), True, 'import numpy as np\n'), ((2359, 2372), 'matplotlib.pyplot.figure', 'plt.figure', (['x'], {}), '(x)\n', (2369, 2372), True, 'import matplotlib.pyplot as plt\n'), ((3735, 3769), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Meters of water( m )"""'], {}), "('Meters of water( m )')\n", (3745, 3769), True, 'import matplotlib.pyplot as plt\n'), ((3774, 3798), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time ( s )"""'], {}), "('Time ( s )')\n", (3784, 3798), True, 'import matplotlib.pyplot as plt\n'), ((3867, 3891), 'matplotlib.pyplot.axis', 'plt.axis', (['[0, 8, 25, 35]'], {}), '([0, 8, 25, 35])\n', (3875, 3891), True, 'import matplotlib.pyplot as plt\n'), ((3144, 3157), 'matplotlib.pyplot.figure', 'plt.figure', (['(2)'], {}), '(2)\n', (3154, 3157), True, 'import matplotlib.pyplot as plt\n'), ((3211, 3227), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gid'], {}), '(gid)\n', (3222, 3227), True, 'import matplotlib.pyplot as plt\n'), ((3247, 3264), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'i'], {}), '(time, i)\n', (3255, 3264), True, 'import matplotlib.pyplot as plt\n'), ((3403, 3455), 'matplotlib.pyplot.setp', 'plt.setp', (['line', '"""color"""', 'colorz', '"""antialiased"""', '(True)'], {}), "(line, 'color', colorz, 'antialiased', True)\n", (3411, 3455), True, 'import matplotlib.pyplot as plt\n'), ((3508, 3524), 'matplotlib.pyplot.subplot', 'plt.subplot', (['cur'], {}), '(cur)\n', (3519, 3524), True, 'import matplotlib.pyplot as plt\n'), ((3537, 3564), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Bar ( gauge )"""'], {}), "('Bar ( gauge )')\n", (3547, 3564), True, 'import matplotlib.pyplot as plt\n'), ((3577, 3601), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time ( s )"""'], {}), "('Time ( s )')\n", (3587, 3601), True, 'import matplotlib.pyplot as plt\n'), ((3621, 3638), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'i'], {}), '(time, i)\n', (3629, 3638), True, 'import matplotlib.pyplot as plt\n'), ((3650, 3709), 'matplotlib.pyplot.setp', 'plt.setp', (['line', '"""color"""', 'colors[count]', '"""antialiased"""', '(True)'], {}), "(line, 'color', colors[count], 'antialiased', True)\n", (3658, 3709), True, 'import matplotlib.pyplot as plt\n'), ((3314, 3336), 'random.randint', 'random.randint', (['(0)', '(255)'], {}), '(0, 255)\n', (3328, 3336), False, 'import random\n')]
|
#!/usr/bin/env python
import os
import getpass
import requests
import json
import base64
import socket
from smtplib import SMTP
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.base import MIMEBase
from email.header import Header
from email.utils import parseaddr, formataddr, COMMASPACE
from email import encoders
from hysds.celery import app
from hysds.es_util import get_mozart_es, get_grq_es
from hysds_commons.net_utils import get_container_host_ip
def read_context():
with open("_context.json", "r") as f:
cxt = json.load(f)
return cxt
def get_hostname():
"""Get hostname."""
try:
return socket.getfqdn()
except Exception as e:
print(e)
print("socket.getfqdn() failed, passing...")
pass
try:
return socket.gethostbyname(socket.gethostname())
except Exception as e:
print(e)
raise RuntimeError(
"Failed to resolve hostname for full email address. Check system."
)
def send_email(sender, cc, bcc, subject, body, attachments=None):
"""
Send an email.
All arguments should be Unicode strings (plain ASCII works as well).
Only the real name part of sender and recipient addresses may contain
non-ASCII characters.
The email will be properly MIME encoded and delivered though SMTP to
172.17.0.1. This is easy to change if you want something different.
The charset of the email will be the first one out of US-ASCII, ISO-8859-1
and UTF-8 that can represent all the characters occurring in the email.
"""
recipients = cc + bcc # combined recipients
# Header class is smart enough to try US-ASCII, then the charset we
# provide, then fall back to UTF-8.
header_charset = "ISO-8859-1"
# We must choose the body charset manually
for body_charset in "US-ASCII", "ISO-8859-1", "UTF-8":
try:
body.encode(body_charset)
except UnicodeError:
pass
else:
break
# Split real name (which is optional) and email address parts
sender_name, sender_addr = parseaddr(sender)
parsed_cc = [parseaddr(rec) for rec in cc]
parsed_bcc = [parseaddr(rec) for rec in bcc]
# We must always pass Unicode strings to Header, otherwise it will
# use RFC 2047 encoding even on plain ASCII strings.
unicode_parsed_cc = []
for recipient_name, recipient_addr in parsed_cc:
recipient_name = str(Header(str(recipient_name), header_charset))
# Make sure email addresses do not contain non-ASCII characters
recipient_addr = recipient_addr.encode("ascii")
recipient_addr = recipient_addr.decode()
unicode_parsed_cc.append((recipient_name, recipient_addr))
unicode_parsed_bcc = []
for recipient_name, recipient_addr in parsed_bcc:
recipient_name = str(Header(str(recipient_name), header_charset))
# Make sure email addresses do not contain non-ASCII characters
recipient_addr = recipient_addr.encode("ascii")
recipient_addr = recipient_addr.decode()
unicode_parsed_bcc.append((recipient_name, recipient_addr))
# Create the message ('plain' stands for Content-Type: text/plain)
msg = MIMEMultipart()
msg["CC"] = COMMASPACE.join(
[
formataddr((recipient_name, recipient_addr))
for recipient_name, recipient_addr in unicode_parsed_cc
]
)
msg["BCC"] = COMMASPACE.join(
[
formataddr((recipient_name, recipient_addr))
for recipient_name, recipient_addr in unicode_parsed_bcc
]
)
msg["Subject"] = Header(str(subject), header_charset)
msg["FROM"] = "<EMAIL>"
msg.attach(MIMEText(body.encode(body_charset), "plain", body_charset))
# Add attachments
if isinstance(attachments, dict):
for fname in attachments:
part = MIMEBase("application", "octet-stream")
part.set_payload(attachments[fname])
encoders.encode_base64(part)
part.add_header("Content-Disposition", 'attachment; filename="%s"' % fname)
msg.attach(part)
# Send the message via SMTP to docker host
smtp_url = "smtp://%s:25" % get_container_host_ip()
print("smtp_url : %s", smtp_url)
smtp = SMTP(get_container_host_ip())
smtp.sendmail(sender, recipients, msg.as_string())
smtp.quit()
def get_cities(src):
"""Return list of cities."""
cities = []
for city in src.get("city", []):
cities.append("%s, %s" % (city.get("name", ""), city.get("admin1_name", "")))
return cities
def get_value(d, key):
"""Return value from source based on key."""
for k in key.split("."):
if k in d:
d = d[k]
else:
return None
if isinstance(d, list):
return ", ".join([str(i) for i in d])
else:
return d
def get_metadata_snippet(src, snippet_cfg):
"""Return body text for metadata snippet."""
body = ""
for k, label in snippet_cfg:
val = get_value(src, k)
if val is not None:
body += "%s: %s\n" % (label, val)
body += "location type: %s\n" % src.get("location", {}).get("type", None)
body += "location coordinates: %s\n" % src.get("location", {}).get(
"coordinates", []
)
cities = get_cities(src)
body += "Closest cities: %s" % "\n\t\t".join(cities)
return body
def get_facetview_link(link, _id, version=None):
"""
Return link to object_id in FacetView interface.
:param link: str
:param _id: str, _id for elasticsearch document
:param version: str
:return: constructed URL for facetview
"""
if link.endswith("/"):
link = link[:-1]
origin = link.split("/")[-1:]
print(origin)
if "figaro" in origin:
term = "job_id"
else:
term = "_id"
if version is None:
query_string = 'query_string="' + term + '%3A%5C"' + _id + '%5C""'
else:
query_string = 'query_string="' + term + '%3A%5C"' + _id + '%5C""&system_version="' + version + '"'
print(_id)
return "%s/?%s" % (link, query_string)
if __name__ == "__main__":
path = "/".join(__file__.split("/")[0:-1])
settings_file = os.path.join(path, "settings.json")
settings_file = os.path.normpath(settings_file) # normalizing the path
settings = json.load(open(settings_file))
context = read_context()
object_id = context["id"]
url = context["url"]
emails = context["emails"]
rule_name = context["name"]
component = context["component"]
if component == "mozart" or component == "figaro":
es = get_mozart_es()
index = app.conf["STATUS_ALIAS"]
facetview_url = app.conf["MOZART_URL"]
facetview_url = "/".join(facetview_url.split("/")[0:-2]) + "/hysds_ui/figaro"
else: # "tosca"
es = get_grq_es()
index = app.conf["DATASET_ALIAS"]
facetview_url = app.conf["MOZART_URL"]
facetview_url = "/".join(facetview_url.split("/")[0:-2]) + "/hysds_ui/tosca"
cc_recipients = [i.strip() for i in emails.split(",")]
bcc_recipients = []
email_subject = "[monitor] (notify_by_email:%s) %s" % (rule_name, object_id)
email_body = "Product with id %s was ingested." % object_id
email_attachments = None
query = {
"query": {
"term": {
"_id": object_id
}
}
}
result = es.search(index=index, body=query) # can't use get_by_id on alias
if result["hits"]["total"]["value"] > 0:
doc = result["hits"]["hits"][0]
email_body += "\n\n%s" % get_metadata_snippet(doc, settings["SNIPPET_CFG"])
email_body += "\n\nThe entire metadata json for this product has been attached for your convenience.\n\n"
email_attachments = {
"metadata.json": json.dumps(doc, indent=2) # attach metadata json
}
# attach browse images
doc_source = doc["_source"]
if len(doc_source["browse_urls"]) > 0:
browse_url = doc_source["browse_urls"][0]
if len(doc_source["images"]) > 0:
email_body += "Browse images have been attached as well.\n\n"
for i in doc_source["images"]:
small_img = i["small_img"]
small_img_url = os.path.join(browse_url, small_img)
r = requests.get(small_img_url)
if r.status_code != 200:
continue
email_attachments[small_img] = r.content
else:
doc = None
email_body += "\n\n"
email_body += "You may access the product here:\n\n%s" % url
system_version = None if doc is None else doc.get("system_version")
facet_url = get_facetview_link(facetview_url, object_id, system_version)
if facet_url is not None:
email_body += (
"\n\nYou may view this product in FacetView here:\n\n%s" % facet_url
)
email_body += (
"\n\nNOTE: You may have to cut and paste the FacetView link into your "
)
email_body += "browser's address bar to prevent your email client from escaping the curly brackets."
username_email = "%s@%s" % (getpass.getuser(), get_hostname())
send_email(
username_email,
cc_recipients,
bcc_recipients,
email_subject,
email_body,
attachments=email_attachments,
)
|
[
"hysds.es_util.get_grq_es",
"json.load",
"getpass.getuser",
"os.path.join",
"email.mime.base.MIMEBase",
"email.encoders.encode_base64",
"json.dumps",
"email.mime.multipart.MIMEMultipart",
"socket.gethostname",
"socket.getfqdn",
"os.path.normpath",
"hysds.es_util.get_mozart_es",
"hysds_commons.net_utils.get_container_host_ip",
"requests.get",
"email.utils.formataddr",
"email.utils.parseaddr"
] |
[((2154, 2171), 'email.utils.parseaddr', 'parseaddr', (['sender'], {}), '(sender)\n', (2163, 2171), False, 'from email.utils import parseaddr, formataddr, COMMASPACE\n'), ((3281, 3296), 'email.mime.multipart.MIMEMultipart', 'MIMEMultipart', ([], {}), '()\n', (3294, 3296), False, 'from email.mime.multipart import MIMEMultipart\n'), ((6289, 6324), 'os.path.join', 'os.path.join', (['path', '"""settings.json"""'], {}), "(path, 'settings.json')\n", (6301, 6324), False, 'import os\n'), ((6345, 6376), 'os.path.normpath', 'os.path.normpath', (['settings_file'], {}), '(settings_file)\n', (6361, 6376), False, 'import os\n'), ((585, 597), 'json.load', 'json.load', (['f'], {}), '(f)\n', (594, 597), False, 'import json\n'), ((687, 703), 'socket.getfqdn', 'socket.getfqdn', ([], {}), '()\n', (701, 703), False, 'import socket\n'), ((2189, 2203), 'email.utils.parseaddr', 'parseaddr', (['rec'], {}), '(rec)\n', (2198, 2203), False, 'from email.utils import parseaddr, formataddr, COMMASPACE\n'), ((2237, 2251), 'email.utils.parseaddr', 'parseaddr', (['rec'], {}), '(rec)\n', (2246, 2251), False, 'from email.utils import parseaddr, formataddr, COMMASPACE\n'), ((4269, 4292), 'hysds_commons.net_utils.get_container_host_ip', 'get_container_host_ip', ([], {}), '()\n', (4290, 4292), False, 'from hysds_commons.net_utils import get_container_host_ip\n'), ((4346, 4369), 'hysds_commons.net_utils.get_container_host_ip', 'get_container_host_ip', ([], {}), '()\n', (4367, 4369), False, 'from hysds_commons.net_utils import get_container_host_ip\n'), ((6702, 6717), 'hysds.es_util.get_mozart_es', 'get_mozart_es', ([], {}), '()\n', (6715, 6717), False, 'from hysds.es_util import get_mozart_es, get_grq_es\n'), ((6926, 6938), 'hysds.es_util.get_grq_es', 'get_grq_es', ([], {}), '()\n', (6936, 6938), False, 'from hysds.es_util import get_mozart_es, get_grq_es\n'), ((859, 879), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (877, 879), False, 'import socket\n'), ((3352, 3396), 'email.utils.formataddr', 'formataddr', (['(recipient_name, recipient_addr)'], {}), '((recipient_name, recipient_addr))\n', (3362, 3396), False, 'from email.utils import parseaddr, formataddr, COMMASPACE\n'), ((3537, 3581), 'email.utils.formataddr', 'formataddr', (['(recipient_name, recipient_addr)'], {}), '((recipient_name, recipient_addr))\n', (3547, 3581), False, 'from email.utils import parseaddr, formataddr, COMMASPACE\n'), ((3942, 3981), 'email.mime.base.MIMEBase', 'MIMEBase', (['"""application"""', '"""octet-stream"""'], {}), "('application', 'octet-stream')\n", (3950, 3981), False, 'from email.mime.base import MIMEBase\n'), ((4043, 4071), 'email.encoders.encode_base64', 'encoders.encode_base64', (['part'], {}), '(part)\n', (4065, 4071), False, 'from email import encoders\n'), ((7913, 7938), 'json.dumps', 'json.dumps', (['doc'], {'indent': '(2)'}), '(doc, indent=2)\n', (7923, 7938), False, 'import json\n'), ((9303, 9320), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (9318, 9320), False, 'import getpass\n'), ((8396, 8431), 'os.path.join', 'os.path.join', (['browse_url', 'small_img'], {}), '(browse_url, small_img)\n', (8408, 8431), False, 'import os\n'), ((8456, 8483), 'requests.get', 'requests.get', (['small_img_url'], {}), '(small_img_url)\n', (8468, 8483), False, 'import requests\n')]
|
import torch
from torch.distributions import Normal, Categorical, kl_divergence as kl
from scvi.models.classifier import Classifier
from scvi.models.modules import Encoder, DecoderSCVI
from scvi.models.utils import broadcast_labels
from scvi.models.vae import VAE
class VAEC(VAE):
r"""A semi-supervised Variational auto-encoder model - inspired from M2 model,
as described in (https://arxiv.org/pdf/1406.5298.pdf)
Args:
:n_input: Number of input genes.
:n_batch: Default: ``0``.
:n_labels: Default: ``0``.
:n_hidden: Number of hidden. Default: ``128``.
:n_latent: Default: ``1``.
:n_layers: Number of layers. Default: ``1``.
:dropout_rate: Default: ``0.1``.
:dispersion: Default: ``"gene"``.
:log_variational: Default: ``True``.
:reconstruction_loss: Default: ``"zinb"``.
:y_prior: Default: None, but will be initialized to uniform probability over the cell types if not specified
Examples:
>>> gene_dataset = CortexDataset()
>>> vaec = VAEC(gene_dataset.nb_genes, n_batch=gene_dataset.n_batches * False,
... n_labels=gene_dataset.n_labels)
>>> gene_dataset = SyntheticDataset(n_labels=3)
>>> vaec = VAEC(gene_dataset.nb_genes, n_batch=gene_dataset.n_batches * False,
... n_labels=3, y_prior=torch.tensor([[0.1,0.5,0.4]]))
"""
def __init__(self, n_input, n_batch, n_labels, n_hidden=128, n_latent=10, n_layers=1, dropout_rate=0.1,
y_prior=None, dispersion="gene", log_variational=True, reconstruction_loss="zinb"):
super(VAEC, self).__init__(n_input, n_batch, n_labels, n_hidden=n_hidden, n_latent=n_latent, n_layers=n_layers,
dropout_rate=dropout_rate, dispersion=dispersion, log_variational=log_variational,
reconstruction_loss=reconstruction_loss)
self.z_encoder = Encoder(n_input, n_latent, n_cat_list=[n_labels], n_hidden=n_hidden, n_layers=n_layers,
dropout_rate=dropout_rate)
self.decoder = DecoderSCVI(n_latent, n_input, n_cat_list=[n_batch, n_labels], n_layers=n_layers,
n_hidden=n_hidden, dropout_rate=dropout_rate)
self.y_prior = torch.nn.Parameter(
y_prior if y_prior is not None else (1 / n_labels) * torch.ones(1, n_labels), requires_grad=False
)
self.classifier = Classifier(n_input, n_hidden, n_labels, n_layers=n_layers, dropout_rate=dropout_rate)
def classify(self, x):
x = torch.log(1 + x)
return self.classifier(x)
def forward(self, x, local_l_mean, local_l_var, batch_index=None, y=None):
is_labelled = False if y is None else True
# Prepare for sampling
x_ = torch.log(1 + x)
ql_m, ql_v, library = self.l_encoder(x_)
# Enumerate choices of label
ys, xs, library_s, batch_index_s = (
broadcast_labels(
y, x, library, batch_index, n_broadcast=self.n_labels
)
)
if self.log_variational:
xs_ = torch.log(1 + xs)
# Sampling
qz_m, qz_v, zs = self.z_encoder(xs_, ys)
px_scale, px_r, px_rate, px_dropout = self.decoder(self.dispersion, zs, library_s, batch_index_s, ys)
reconst_loss = self._reconstruction_loss(xs, px_rate, px_r, px_dropout, batch_index_s, ys)
# KL Divergence
mean = torch.zeros_like(qz_m)
scale = torch.ones_like(qz_v)
kl_divergence_z = kl(Normal(qz_m, torch.sqrt(qz_v)), Normal(mean, scale)).sum(dim=1)
kl_divergence_l = kl(Normal(ql_m, torch.sqrt(ql_v)), Normal(local_l_mean, torch.sqrt(local_l_var))).sum(dim=1)
if is_labelled:
return reconst_loss, kl_divergence_z + kl_divergence_l
reconst_loss = reconst_loss.view(self.n_labels, -1)
probs = self.classifier(x_)
reconst_loss = (reconst_loss.t() * probs).sum(dim=1)
kl_divergence = (kl_divergence_z.view(self.n_labels, -1).t() * probs).sum(dim=1)
kl_divergence += kl(Categorical(probs=probs),
Categorical(probs=self.y_prior.repeat(probs.size(0), 1)))
kl_divergence += kl_divergence_l
return reconst_loss, kl_divergence
|
[
"torch.ones_like",
"scvi.models.classifier.Classifier",
"scvi.models.modules.Encoder",
"torch.ones",
"torch.distributions.Categorical",
"torch.zeros_like",
"torch.sqrt",
"scvi.models.utils.broadcast_labels",
"scvi.models.modules.DecoderSCVI",
"torch.distributions.Normal",
"torch.log"
] |
[((1939, 2057), 'scvi.models.modules.Encoder', 'Encoder', (['n_input', 'n_latent'], {'n_cat_list': '[n_labels]', 'n_hidden': 'n_hidden', 'n_layers': 'n_layers', 'dropout_rate': 'dropout_rate'}), '(n_input, n_latent, n_cat_list=[n_labels], n_hidden=n_hidden,\n n_layers=n_layers, dropout_rate=dropout_rate)\n', (1946, 2057), False, 'from scvi.models.modules import Encoder, DecoderSCVI\n'), ((2110, 2242), 'scvi.models.modules.DecoderSCVI', 'DecoderSCVI', (['n_latent', 'n_input'], {'n_cat_list': '[n_batch, n_labels]', 'n_layers': 'n_layers', 'n_hidden': 'n_hidden', 'dropout_rate': 'dropout_rate'}), '(n_latent, n_input, n_cat_list=[n_batch, n_labels], n_layers=\n n_layers, n_hidden=n_hidden, dropout_rate=dropout_rate)\n', (2121, 2242), False, 'from scvi.models.modules import Encoder, DecoderSCVI\n'), ((2464, 2554), 'scvi.models.classifier.Classifier', 'Classifier', (['n_input', 'n_hidden', 'n_labels'], {'n_layers': 'n_layers', 'dropout_rate': 'dropout_rate'}), '(n_input, n_hidden, n_labels, n_layers=n_layers, dropout_rate=\n dropout_rate)\n', (2474, 2554), False, 'from scvi.models.classifier import Classifier\n'), ((2590, 2606), 'torch.log', 'torch.log', (['(1 + x)'], {}), '(1 + x)\n', (2599, 2606), False, 'import torch\n'), ((2817, 2833), 'torch.log', 'torch.log', (['(1 + x)'], {}), '(1 + x)\n', (2826, 2833), False, 'import torch\n'), ((2978, 3049), 'scvi.models.utils.broadcast_labels', 'broadcast_labels', (['y', 'x', 'library', 'batch_index'], {'n_broadcast': 'self.n_labels'}), '(y, x, library, batch_index, n_broadcast=self.n_labels)\n', (2994, 3049), False, 'from scvi.models.utils import broadcast_labels\n'), ((3480, 3502), 'torch.zeros_like', 'torch.zeros_like', (['qz_m'], {}), '(qz_m)\n', (3496, 3502), False, 'import torch\n'), ((3519, 3540), 'torch.ones_like', 'torch.ones_like', (['qz_v'], {}), '(qz_v)\n', (3534, 3540), False, 'import torch\n'), ((3142, 3159), 'torch.log', 'torch.log', (['(1 + xs)'], {}), '(1 + xs)\n', (3151, 3159), False, 'import torch\n'), ((4123, 4147), 'torch.distributions.Categorical', 'Categorical', ([], {'probs': 'probs'}), '(probs=probs)\n', (4134, 4147), False, 'from torch.distributions import Normal, Categorical, kl_divergence as kl\n'), ((2382, 2405), 'torch.ones', 'torch.ones', (['(1)', 'n_labels'], {}), '(1, n_labels)\n', (2392, 2405), False, 'import torch\n'), ((3603, 3622), 'torch.distributions.Normal', 'Normal', (['mean', 'scale'], {}), '(mean, scale)\n', (3609, 3622), False, 'from torch.distributions import Normal, Categorical, kl_divergence as kl\n'), ((3584, 3600), 'torch.sqrt', 'torch.sqrt', (['qz_v'], {}), '(qz_v)\n', (3594, 3600), False, 'import torch\n'), ((3677, 3693), 'torch.sqrt', 'torch.sqrt', (['ql_v'], {}), '(ql_v)\n', (3687, 3693), False, 'import torch\n'), ((3717, 3740), 'torch.sqrt', 'torch.sqrt', (['local_l_var'], {}), '(local_l_var)\n', (3727, 3740), False, 'import torch\n')]
|
"""
===============
Demo Gridspec02
===============
"""
import matplotlib.pyplot as plt
from matplotlib.gridspec import GridSpec
def make_ticklabels_invisible(fig):
for i, ax in enumerate(fig.axes):
ax.text(0.5, 0.5, "ax%d" % (i+1), va="center", ha="center")
ax.tick_params(labelbottom=False, labelleft=False)
fig = plt.figure()
gs = GridSpec(3, 3)
ax1 = plt.subplot(gs[0, :])
# identical to ax1 = plt.subplot(gs.new_subplotspec((0, 0), colspan=3))
ax2 = plt.subplot(gs[1, :-1])
ax3 = plt.subplot(gs[1:, -1])
ax4 = plt.subplot(gs[-1, 0])
ax5 = plt.subplot(gs[-1, -2])
fig.suptitle("GridSpec")
make_ticklabels_invisible(fig)
plt.show()
|
[
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.show",
"matplotlib.gridspec.GridSpec"
] |
[((341, 353), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (351, 353), True, 'import matplotlib.pyplot as plt\n'), ((360, 374), 'matplotlib.gridspec.GridSpec', 'GridSpec', (['(3)', '(3)'], {}), '(3, 3)\n', (368, 374), False, 'from matplotlib.gridspec import GridSpec\n'), ((381, 402), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[0, :]'], {}), '(gs[0, :])\n', (392, 402), True, 'import matplotlib.pyplot as plt\n'), ((481, 504), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[1, :-1]'], {}), '(gs[1, :-1])\n', (492, 504), True, 'import matplotlib.pyplot as plt\n'), ((511, 534), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[1:, -1]'], {}), '(gs[1:, -1])\n', (522, 534), True, 'import matplotlib.pyplot as plt\n'), ((541, 563), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[-1, 0]'], {}), '(gs[-1, 0])\n', (552, 563), True, 'import matplotlib.pyplot as plt\n'), ((570, 593), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[-1, -2]'], {}), '(gs[-1, -2])\n', (581, 593), True, 'import matplotlib.pyplot as plt\n'), ((652, 662), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (660, 662), True, 'import matplotlib.pyplot as plt\n')]
|
from django import template
register = template.Library()
@register.inclusion_tag('sortable_column_snippet.html')
def sortable_column(request, pretty_name, identifier, default=False):
current = request.GET.get('sort', identifier if default else None)
return {
'pretty_name': pretty_name,
'identifier': identifier,
'request': request,
'selected': identifier == current,
}
|
[
"django.template.Library"
] |
[((41, 59), 'django.template.Library', 'template.Library', ([], {}), '()\n', (57, 59), False, 'from django import template\n')]
|
# -*- coding: utf-8 -*-
"""
SkyAlchemy
Copyright ©2016 <NAME>
Licensed under the terms of the MIT License.
See LICENSE for details.
@author: <NAME>
"""
from __future__ import unicode_literals
import struct
from collections import OrderedDict
from io import BytesIO
import os
import os.path as osp
import ctypes
import ctypes.wintypes
#%% unpack and data
from skyrimtypes import unpack, RefID
import skyrimdata
#%%
class Savegame(object):
"""This class loads a The Elder Scrolls V: Skyrim savegame file and parses
useful information.
"""
def __init__(self, filename, load_now=True):
self.filename = filename
if load_now:
for i in self.loadGame():
pass
def loadGame(self):
filename = self.filename
#%%
d = OrderedDict() # Data storage
f = open(filename, 'rb') # TODO: replace
f.seek(0)
if True:
# with open(filename, 'rb') as f:
# File
d['magic'] = f.read(13)
yield f.tell()
if d['magic'] != b'TESV_SAVEGAME':
raise AssertionError("Incorrect magic in file header")
d['headerSize'] = unpack("uint32", f)
# Header
header = BytesIO(f.read(d['headerSize']))
d['version'] = unpack("uint32", header)
if not 7 <= d['version'] <= 9:
raise AssertionError("Only versions 7 to 9 are supported")
d['saveNumber'] = unpack("uint32", header)
d['playerName'] = unpack("wstring", header)
d['playerLevel'] = unpack("uint32", header)
d['playerLocation'] = unpack("wstring", header)
d['gameDate'] = unpack("wstring", header)
d['playerRaceEditorId'] = unpack("wstring", header)
d['playerSex'] = {0: "male", 1: "female"}[unpack("uint16", header)]
d['playerCurExp'] = unpack("float32", header)
d['playerLvlUpExp'] = unpack("float32", header)
d['filetime'] = unpack("filetime", header)
d['shotWidth'] = unpack("uint32", header)
d['shotHeight'] = unpack("uint32", header)
yield f.tell()
# Back to file
d['screenshotData'] = f.read(3*d['shotWidth']*d['shotHeight'])
from PIL import Image
d['screenshotImage'] = Image.frombytes("RGB",
(d['shotWidth'], d['shotHeight']),
d['screenshotData'])
yield f.tell()
d['formVersion'] = unpack("uint8", f)
d['pluginInfoSize'] = unpack("uint32", f)
# Plugin
plugin = BytesIO(f.read(d['pluginInfoSize']))
d['pluginCount'] = unpack("uint8", plugin)
d['plugins'] = [unpack("wstring", plugin)
for i in range(d['pluginCount'])]
yield f.tell()
# File Location Table
formIDArrayCountOffset = unpack("uint32", f)
unknownTable3Offset = unpack("uint32", f)
globalDataTable1Offset = unpack("uint32", f)
globalDataTable2Offset = unpack("uint32", f)
changeFormsOffset = unpack("uint32", f)
globalDataTable3Offset = unpack("uint32", f)
globalDataTable1Count = unpack("uint32", f)
globalDataTable2Count = unpack("uint32", f)
globalDataTable3Count = unpack("uint32", f)
changeFormCount = unpack("uint32", f)
f.read(4*15) # unused
yield f.tell()
# Global Data 1
f.seek(globalDataTable1Offset)
gdata1 = []
for i in range(globalDataTable1Count):
gdata1.append(unpack("globalData", f))
yield f.tell()
# Global Data 2
f.seek(globalDataTable2Offset)
gdata2 = []
for i in range(globalDataTable2Count):
gdata2.append(unpack("globalData", f))
yield f.tell()
# changeForms
f.seek(changeFormsOffset)
d_changeforms = []
for i in range(changeFormCount):
d_changeforms.append(unpack("ChangeForm", f))
yield f.tell()
d['changeforms'] = d_changeforms
# Global Data 3
yield f.tell()
f.seek(globalDataTable3Offset)
gdata3 = []
for i in range(globalDataTable3Count):
gdata3.append(unpack("globalData", f))
yield f.tell()
d['gdata'] = {v[1]:v[2] for v in (gdata1 + gdata2 + gdata3)}
# formID
f.seek(formIDArrayCountOffset)
formIDArrayCount = unpack("uint32", f)
d['formid'] = struct.Struct('{}I'.format(formIDArrayCount)).unpack(
f.read(formIDArrayCount*4))
yield f.tell()
# Visited Worldspace
visitedWorldspaceArrayCount = unpack("uint32", f)
d['visitedWorldspaceArray'] = struct.Struct('{}I'.format(
visitedWorldspaceArrayCount)).unpack(f.read(
visitedWorldspaceArrayCount*4))
yield f.tell()
# unknownTable3
f.seek(unknownTable3Offset)
ukt3count = unpack("uint32", f)
assert(len(f.read()) == ukt3count)
# EOF
assert(f.read() == b"")
yield f.tell()
# Inventory
for cf in d['changeforms']:
if cf.type == 1 and cf.formid.value == 0x14:
break
d['inventory'] = cf.d['inventory']
self.d = d
def populate_ids(self):
for k, created in self.d['gdata']['Created Objects'].items():
for item in created:
# print "Created", k, hex(item.refID.value)
RefID.createdid[item.refID.value] = item
for i, formid in enumerate(self.d['formid']):
if formid in RefID.defaultid:
RefID.formid[i+1] = RefID.defaultid[formid]
elif formid in RefID.createdid:
RefID.formid[i+1] = RefID.createdid[formid]
def player_ingrs(self):
for inv_item in self.d['inventory']:
if (inv_item.item.type not in {'C', 'F'} and
inv_item.item.name.type == "INGR"):
yield (inv_item.itemcount, inv_item.item.value)
#%%
def getSaveGames():
"""Get list of savegame files"""
dll = ctypes.windll.shell32
buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH + 1)
try:
if dll.SHGetSpecialFolderPathW(None, buf, 0x0005, False):
savedir = osp.join(buf.value, "My Games", "Skyrim", "Saves")
if not osp.exists(savedir) or not osp.isdir(savedir):
raise RuntimeError("Could not find savegame directory.")
else:
raise RuntimeError("Could not find savegame directory.")
except:
raise RuntimeError("Could not find savegame directory.")
savegames = [osp.join(savedir, f) for f in os.listdir(savedir) if f.endswith(".ess")]
return savegames
#%%
def test_savegame():
#%%
savegames = getSaveGames()
for filename in savegames:
sg = Savegame(filename)
sg.populate_createdid()
|
[
"os.path.join",
"os.path.isdir",
"os.path.exists",
"ctypes.create_unicode_buffer",
"collections.OrderedDict",
"PIL.Image.frombytes",
"os.listdir",
"skyrimtypes.unpack"
] |
[((6534, 6592), 'ctypes.create_unicode_buffer', 'ctypes.create_unicode_buffer', (['(ctypes.wintypes.MAX_PATH + 1)'], {}), '(ctypes.wintypes.MAX_PATH + 1)\n', (6562, 6592), False, 'import ctypes\n'), ((800, 813), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (811, 813), False, 'from collections import OrderedDict\n'), ((7057, 7077), 'os.path.join', 'osp.join', (['savedir', 'f'], {}), '(savedir, f)\n', (7065, 7077), True, 'import os.path as osp\n'), ((1186, 1205), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'f'], {}), "('uint32', f)\n", (1192, 1205), False, 'from skyrimtypes import unpack, RefID\n'), ((1308, 1332), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'header'], {}), "('uint32', header)\n", (1314, 1332), False, 'from skyrimtypes import unpack, RefID\n'), ((1481, 1505), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'header'], {}), "('uint32', header)\n", (1487, 1505), False, 'from skyrimtypes import unpack, RefID\n'), ((1536, 1561), 'skyrimtypes.unpack', 'unpack', (['"""wstring"""', 'header'], {}), "('wstring', header)\n", (1542, 1561), False, 'from skyrimtypes import unpack, RefID\n'), ((1593, 1617), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'header'], {}), "('uint32', header)\n", (1599, 1617), False, 'from skyrimtypes import unpack, RefID\n'), ((1652, 1677), 'skyrimtypes.unpack', 'unpack', (['"""wstring"""', 'header'], {}), "('wstring', header)\n", (1658, 1677), False, 'from skyrimtypes import unpack, RefID\n'), ((1706, 1731), 'skyrimtypes.unpack', 'unpack', (['"""wstring"""', 'header'], {}), "('wstring', header)\n", (1712, 1731), False, 'from skyrimtypes import unpack, RefID\n'), ((1770, 1795), 'skyrimtypes.unpack', 'unpack', (['"""wstring"""', 'header'], {}), "('wstring', header)\n", (1776, 1795), False, 'from skyrimtypes import unpack, RefID\n'), ((1908, 1933), 'skyrimtypes.unpack', 'unpack', (['"""float32"""', 'header'], {}), "('float32', header)\n", (1914, 1933), False, 'from skyrimtypes import unpack, RefID\n'), ((1968, 1993), 'skyrimtypes.unpack', 'unpack', (['"""float32"""', 'header'], {}), "('float32', header)\n", (1974, 1993), False, 'from skyrimtypes import unpack, RefID\n'), ((2022, 2048), 'skyrimtypes.unpack', 'unpack', (['"""filetime"""', 'header'], {}), "('filetime', header)\n", (2028, 2048), False, 'from skyrimtypes import unpack, RefID\n'), ((2078, 2102), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'header'], {}), "('uint32', header)\n", (2084, 2102), False, 'from skyrimtypes import unpack, RefID\n'), ((2133, 2157), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'header'], {}), "('uint32', header)\n", (2139, 2157), False, 'from skyrimtypes import unpack, RefID\n'), ((2356, 2434), 'PIL.Image.frombytes', 'Image.frombytes', (['"""RGB"""', "(d['shotWidth'], d['shotHeight'])", "d['screenshotData']"], {}), "('RGB', (d['shotWidth'], d['shotHeight']), d['screenshotData'])\n", (2371, 2434), False, 'from PIL import Image\n'), ((2575, 2593), 'skyrimtypes.unpack', 'unpack', (['"""uint8"""', 'f'], {}), "('uint8', f)\n", (2581, 2593), False, 'from skyrimtypes import unpack, RefID\n'), ((2628, 2647), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'f'], {}), "('uint32', f)\n", (2634, 2647), False, 'from skyrimtypes import unpack, RefID\n'), ((2758, 2781), 'skyrimtypes.unpack', 'unpack', (['"""uint8"""', 'plugin'], {}), "('uint8', plugin)\n", (2764, 2781), False, 'from skyrimtypes import unpack, RefID\n'), ((2996, 3015), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'f'], {}), "('uint32', f)\n", (3002, 3015), False, 'from skyrimtypes import unpack, RefID\n'), ((3050, 3069), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'f'], {}), "('uint32', f)\n", (3056, 3069), False, 'from skyrimtypes import unpack, RefID\n'), ((3107, 3126), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'f'], {}), "('uint32', f)\n", (3113, 3126), False, 'from skyrimtypes import unpack, RefID\n'), ((3164, 3183), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'f'], {}), "('uint32', f)\n", (3170, 3183), False, 'from skyrimtypes import unpack, RefID\n'), ((3216, 3235), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'f'], {}), "('uint32', f)\n", (3222, 3235), False, 'from skyrimtypes import unpack, RefID\n'), ((3273, 3292), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'f'], {}), "('uint32', f)\n", (3279, 3292), False, 'from skyrimtypes import unpack, RefID\n'), ((3329, 3348), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'f'], {}), "('uint32', f)\n", (3335, 3348), False, 'from skyrimtypes import unpack, RefID\n'), ((3385, 3404), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'f'], {}), "('uint32', f)\n", (3391, 3404), False, 'from skyrimtypes import unpack, RefID\n'), ((3441, 3460), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'f'], {}), "('uint32', f)\n", (3447, 3460), False, 'from skyrimtypes import unpack, RefID\n'), ((3491, 3510), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'f'], {}), "('uint32', f)\n", (3497, 3510), False, 'from skyrimtypes import unpack, RefID\n'), ((4742, 4761), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'f'], {}), "('uint32', f)\n", (4748, 4761), False, 'from skyrimtypes import unpack, RefID\n'), ((4988, 5007), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'f'], {}), "('uint32', f)\n", (4994, 5007), False, 'from skyrimtypes import unpack, RefID\n'), ((5306, 5325), 'skyrimtypes.unpack', 'unpack', (['"""uint32"""', 'f'], {}), "('uint32', f)\n", (5312, 5325), False, 'from skyrimtypes import unpack, RefID\n'), ((6690, 6740), 'os.path.join', 'osp.join', (['buf.value', '"""My Games"""', '"""Skyrim"""', '"""Saves"""'], {}), "(buf.value, 'My Games', 'Skyrim', 'Saves')\n", (6698, 6740), True, 'import os.path as osp\n'), ((7087, 7106), 'os.listdir', 'os.listdir', (['savedir'], {}), '(savedir)\n', (7097, 7106), False, 'import os\n'), ((1850, 1874), 'skyrimtypes.unpack', 'unpack', (['"""uint16"""', 'header'], {}), "('uint16', header)\n", (1856, 1874), False, 'from skyrimtypes import unpack, RefID\n'), ((2810, 2835), 'skyrimtypes.unpack', 'unpack', (['"""wstring"""', 'plugin'], {}), "('wstring', plugin)\n", (2816, 2835), False, 'from skyrimtypes import unpack, RefID\n'), ((3749, 3772), 'skyrimtypes.unpack', 'unpack', (['"""globalData"""', 'f'], {}), "('globalData', f)\n", (3755, 3772), False, 'from skyrimtypes import unpack, RefID\n'), ((3981, 4004), 'skyrimtypes.unpack', 'unpack', (['"""globalData"""', 'f'], {}), "('globalData', f)\n", (3987, 4004), False, 'from skyrimtypes import unpack, RefID\n'), ((4214, 4237), 'skyrimtypes.unpack', 'unpack', (['"""ChangeForm"""', 'f'], {}), "('ChangeForm', f)\n", (4220, 4237), False, 'from skyrimtypes import unpack, RefID\n'), ((4518, 4541), 'skyrimtypes.unpack', 'unpack', (['"""globalData"""', 'f'], {}), "('globalData', f)\n", (4524, 4541), False, 'from skyrimtypes import unpack, RefID\n'), ((6760, 6779), 'os.path.exists', 'osp.exists', (['savedir'], {}), '(savedir)\n', (6770, 6779), True, 'import os.path as osp\n'), ((6787, 6805), 'os.path.isdir', 'osp.isdir', (['savedir'], {}), '(savedir)\n', (6796, 6805), True, 'import os.path as osp\n')]
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2021, <NAME>
# Copyright (c) 2020, <NAME>
# All rights reserved.
#
# Licensed under the BSD 3-Clause License:
# http://opensource.org/licenses/BSD-3-Clause
#
import os
import pathlib
import shutil
import time
import tkinter as tk
import tkinter.filedialog
import webbrowser
from ..config import Config
from ..err import Text2qtiError
from ..qti import QTI
from ..quiz import Quiz
from .. import version
#from tkinter.ttk import * #possibly not needed
def main():
config = Config()
config.load()
file_name = ''
window = tk.Tk()
window.title('text2CC')
# Bring window to front and put in focus
window.iconify()
window.update()
window.deiconify()
# Window grid setup
current_row = 0
column_count = 4
header_label = tk.Label(
window,
text='text2cc – Create quizzes in Common Cartridge format from Markdown-based plain text',
font=(None, 16),
)
header_label.grid(
row=current_row, column=0, columnspan=column_count, padx=(30, 30),
sticky='nsew',
)
#current_row += 1
# header_link_label = tk.Label(
# window,
# text='github.com/gpoore/text2qti',
# font=(None, 14), fg='blue', cursor='hand2',
# )
# header_link_label.bind('<Button-1>', lambda x: webbrowser.open_new('https://github.com/gpoore/text2qti'))
# header_link_label.grid(
# row=current_row, column=0, columnspan=column_count, padx=(30, 30),
# sticky='nsew',
# )
current_row += 1
version_label = tk.Label(
window,
text=f'Version: {version.__version__}',
)
version_label.grid(
row=current_row, column=0, columnspan=column_count, padx=(30, 30), pady=(0, 30),
sticky='nsew',
)
current_row += 1
file_browser_label = tk.Label(
window,
text='Quiz file:\n(plain text file)',
justify='right',
)
file_browser_label.grid(
row=current_row, column=0, padx=(30, 5), pady=(5, 25),
sticky='nse',
)
last_dir = None
def browse_files():
nonlocal file_name
nonlocal last_dir
if last_dir is None:
initialdir = pathlib.Path('~').expanduser()
else:
initialdir = last_dir
file_name = tkinter.filedialog.askopenfilename(
initialdir=initialdir,
title='Select a quiz file',
filetypes=[('Quiz files', '*.md;*.txt')],
)
if file_name:
if last_dir is None:
last_dir = pathlib.Path(file_name).parent
file_browser_button.config(text=f'"{file_name}"', fg='green')
else:
file_browser_button.config(text=f'<none selected>', fg='red')
file_browser_button = tk.Button(
window,
text='<none selected>',
fg='red',
command=browse_files,
)
file_browser_button.grid(
row=current_row, column=1, columnspan=column_count-1, padx=(0, 30), pady=(5, 25),
sticky='nsew',
)
file_browser_button.bind('<Return>', lambda e: browse_files())
current_row += 1
#Above here set good
# advanced_options_label = tk.Label(
# window,
# text='Advanced options – LaTeX math & executable code',
# justify='right',
# )
# advanced_options_label.grid(
# row=current_row, column=1, columnspan=2, padx=(0, 0), pady=(5, 5),
# sticky='nsw',
# )
# current_row += 1
# latex_url_label = tk.Label(
# window,
# text='LaTeX math rendering URL:\n(for Canvas and similar systems)',
# justify='right',
# )
# latex_url_label.grid(
# row=current_row, column=0, padx=(30, 5), pady=(5, 5),
# sticky='nse',
# )
# latex_url_entry = tk.Entry(window, width=100)
# latex_url_entry.grid(
# row=current_row, column=1, columnspan=column_count-1, padx=(0, 30), pady=(5, 5),
# sticky='nsew',
# )
# if 'latex_render_url' in config:
# latex_url_entry.insert(1, f"{config['latex_render_url']}")
# current_row += 1
# pandoc_exists = bool(shutil.which('pandoc'))
# pandoc_mathml_label = tk.Label(
# window,
# text='Convert LaTeX math to MathML:\n(requires Pandoc; ignores rendering URL)',
# justify='right',
# )
# if not pandoc_exists:
# pandoc_mathml_label['fg'] = 'gray'
# pandoc_mathml_label.grid(
# row=current_row, column=0, padx=(30, 5), pady=(5, 5),
# sticky='nse',
# )
# pandoc_mathml_bool = tk.BooleanVar()
# def pandoc_mathml_command():
# if pandoc_mathml_bool.get():
# latex_url_label['fg'] = 'gray'
# latex_url_entry['fg'] = 'gray'
# else:
# latex_url_label['fg'] = 'black'
# latex_url_entry['fg'] = 'black'
# if pandoc_exists:
# pandoc_mathml_button = tk.Checkbutton(
# window,
# variable=pandoc_mathml_bool,
# command=pandoc_mathml_command,
# )
# pandoc_mathml_bool.set(config['pandoc_mathml'])
# else:
# pandoc_mathml_button = tk.Checkbutton(
# window,
# state=tk.DISABLED,
# )
# pandoc_mathml_button.grid(
# row=current_row, column=1, sticky='w',
# )
# current_row += 1
def run():
run_message_text.delete(1.0, tk.END)
run_message_text['fg'] = 'gray'
run_message_text.insert(tk.INSERT, 'Starting...')
run_message_text.update()
error_message = None
if not file_name:
error_message = 'Must select a quiz file'
run_message_text.delete(1.0, tk.END)
run_message_text.insert(tk.INSERT, error_message)
run_message_text['fg'] = 'red'
return
#if latex_url_entry.get():
#config['latex_render_url'] = latex_url_entry.get()
#config['pandoc_mathml'] = pandoc_mathml_bool.get()
file_path = pathlib.Path(file_name)
try:
text = file_path.read_text(encoding='utf-8-sig') # Handle BOM for Windows
except FileNotFoundError:
error_message = f'File "{file_path}" does not exist.'
except PermissionError as e:
error_message = f'File "{file_path}" cannot be read due to permission error. Technical details:\n\n{e}'
except UnicodeDecodeError as e:
error_message = f'File "{file_path}" is not encoded in valid UTF-8. Technical details:\n\n{e}'
except Exception as e:
error_message = f'An error occurred in reading the quiz file. Technical details:\n\n{e}'
if error_message:
run_message_text.delete(1.0, tk.END)
run_message_text.insert(tk.INSERT, error_message)
run_message_text['fg'] = 'red'
return
cwd = pathlib.Path.cwd()
os.chdir(file_path.parent)
try:
quiz = Quiz(text, config=config, source_name=file_path.as_posix())
qti = QTI(quiz)
qti.save(f'{file_path.stem}.zip')
except Text2qtiError as e:
error_message = f'Quiz creation failed:\n\n{e}'
except Exception as e:
error_message = f'Quiz creation failed unexpectedly. Technical details:\n\n{e}'
finally:
os.chdir(cwd)
if error_message:
run_message_text.delete(1.0, tk.END)
run_message_text.insert(tk.INSERT, error_message)
run_message_text['fg'] = 'red'
else:
run_message_text.delete(1.0, tk.END)
run_message_text.insert(tk.INSERT, f'Created quiz "{file_path.parent.as_posix()}/{file_path.stem}.zip"')
run_message_text['fg'] = 'green'
run_button = tk.Button(
window,
text='RUN',
font=(None, 14),
command=run,
)
run_button.grid(
row=current_row, column=1, columnspan=2, padx=(0, 0), pady=(30, 30),
sticky='nsew',
)
run_button.bind('<Return>', lambda e: run())
current_row += 1
run_message_label = tk.Label(
window,
text='\nRun Summary:\n',
relief='ridge',
width=120,
)
run_message_label.grid(
row=current_row, column=0, columnspan=column_count, padx=(30, 30), pady=(0, 0),
sticky='nsew',
)
current_row += 1
run_message_frame = tk.Frame(
window,
width=120, height=40,
borderwidth=1, relief='sunken', bg='white',
)
run_message_frame.grid(
row=current_row, column=0, columnspan=column_count, padx=(30, 30), pady=(0, 30),
sticky='nsew',
)
run_message_scrollbar = tk.Scrollbar(run_message_frame)
run_message_scrollbar.pack(
side='right', fill='y',
)
run_message_text = tk.Text(
run_message_frame,
width=10, height=10, borderwidth=0, highlightthickness=0,
wrap='word',
yscrollcommand=run_message_scrollbar.set,
)
run_message_text.insert(tk.INSERT, 'Waiting...')
run_message_text['fg'] = 'gray'
run_message_scrollbar.config(command=run_message_text.yview)
run_message_text.pack(
side='left', fill='both', expand=True,
padx=(5, 5), pady=(5, 5),
)
def About_page():
abt = tk.Toplevel(window)
def close_about():
abt.destroy()
abt.grab_set()
abt.title("About")
lbl=tk.Label(
abt,
text ='Text2CC \n \n Copyright © 2021, <NAME> \n Copyright © 2020, <NAME> \n ',
#anchor='e',
justify='center',
)
lbl.grid(
# column=0,
# columnspan=4,
# row=1,
# rowspan=4,
pady=3,
padx=30,
# sticky="NW"
)
vlbl=tk.Label(
abt,
text =f'Version: {version.__version__} \n',
#bg = 'red',
#anchor="CENTER",
#width=max((len(text)))
)
vlbl.grid(
# column=0,
# columnspan=4,
# row=2,
# rowspan=4,
pady=3,
# padx=30,
# sticky="NW"
)
liclbl = tk.Label(
abt,
text='License: BSD 3-Clause',
#font=(None, 14),
fg='blue', cursor='hand2',
)
liclbl.bind('<Button-1>', lambda x: webbrowser.open_new('https://opensource.org/licenses/BSD-3-Clause'))
liclbl.grid(
row=current_row, column=0, columnspan=column_count, padx=(30, 30),
#sticky='nsew',
)
OK_BTN=tk.Button(
abt,
text="OK",
command=close_about,
anchor= 'center',
)
OK_BTN.bind('<Return>', lambda e: close_about())
OK_BTN.grid(
# column=2,
# columnspan=1,
# row=5,
# padx=30,
pady=30,
# sticky="se"
)
OK_BTN.focus_set()
# Gets the requested values of the height and widht.
windowWidth = abt.winfo_reqwidth()
windowHeight = abt.winfo_reqheight()
# Gets both half the screen width/height and window width/height
positionRight = int(abt.winfo_screenwidth()/2 - windowWidth/2)
positionDown = int(abt.winfo_screenheight()/2 - windowHeight/2)
# Positions the window in the center of the page.
abt.geometry("+{}+{}".format(positionRight, positionDown))
#abt.geometry("225x100+{}+{}".format(positionRight, positionDown))
abt.resizable(height= None, width= None)
abt.focus()
#abt.bind('<Return>', close_about)
def open_help():
webbrowser.open_new('https://github.com/dlehman83/text2cc/blob/master/README.md')
menubar = tk.Menu(window)
filemenu = tk.Menu(menubar, tearoff=False)
filemenu.add_command(label="Open",command=browse_files,underline=0)
filemenu.add_command(label="Run",command=run,underline=0)
filemenu.add_command(label="Exit",command=window.quit,underline=1)
menubar.add_cascade(label="File", menu=filemenu,underline=0)
helpmenu = tk.Menu(menubar, tearoff=False)
helpmenu.add_command(label="Help",underline=0,command=open_help)
helpmenu.add_command(label="About",command=About_page,underline=0)
menubar.add_cascade(label="Help", menu=helpmenu,underline=0)
# Gets the requested values of the height and widht.
windowWidth = window.winfo_reqwidth()
windowHeight = window.winfo_reqheight()
# Gets both half the screen width/height and window width/height
positionRight = int(window.winfo_screenwidth()/2 - windowWidth/2)
positionDown = int(window.winfo_screenheight()/2 - windowHeight/2)
# Positions the window in the center of the page.
window.geometry("+{}+{}".format(positionRight, positionDown))
window.config(menu=menubar)
window.bind('<Return>',run)
window.mainloop()
|
[
"tkinter.Text",
"webbrowser.open_new",
"tkinter.Menu",
"tkinter.Button",
"tkinter.Scrollbar",
"pathlib.Path",
"tkinter.Toplevel",
"os.chdir",
"tkinter.Frame",
"pathlib.Path.cwd",
"tkinter.Label",
"tkinter.Tk"
] |
[((583, 590), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (588, 590), True, 'import tkinter as tk\n'), ((815, 949), 'tkinter.Label', 'tk.Label', (['window'], {'text': '"""text2cc – Create quizzes in Common Cartridge format from Markdown-based plain text"""', 'font': '(None, 16)'}), "(window, text=\n 'text2cc – Create quizzes in Common Cartridge format from Markdown-based plain text'\n , font=(None, 16))\n", (823, 949), True, 'import tkinter as tk\n'), ((1574, 1632), 'tkinter.Label', 'tk.Label', (['window'], {'text': 'f"""Version: {version.__version__}"""'}), "(window, text=f'Version: {version.__version__}')\n", (1582, 1632), True, 'import tkinter as tk\n'), ((1846, 1920), 'tkinter.Label', 'tk.Label', (['window'], {'text': '"""Quiz file:\n(plain text file)"""', 'justify': '"""right"""'}), '(window, text="""Quiz file:\n(plain text file)""", justify=\'right\')\n', (1854, 1920), True, 'import tkinter as tk\n'), ((2796, 2869), 'tkinter.Button', 'tk.Button', (['window'], {'text': '"""<none selected>"""', 'fg': '"""red"""', 'command': 'browse_files'}), "(window, text='<none selected>', fg='red', command=browse_files)\n", (2805, 2869), True, 'import tkinter as tk\n'), ((7803, 7862), 'tkinter.Button', 'tk.Button', (['window'], {'text': '"""RUN"""', 'font': '(None, 14)', 'command': 'run'}), "(window, text='RUN', font=(None, 14), command=run)\n", (7812, 7862), True, 'import tkinter as tk\n'), ((8125, 8193), 'tkinter.Label', 'tk.Label', (['window'], {'text': '"""\nRun Summary:\n"""', 'relief': '"""ridge"""', 'width': '(120)'}), "(window, text='\\nRun Summary:\\n', relief='ridge', width=120)\n", (8133, 8193), True, 'import tkinter as tk\n'), ((8425, 8512), 'tkinter.Frame', 'tk.Frame', (['window'], {'width': '(120)', 'height': '(40)', 'borderwidth': '(1)', 'relief': '"""sunken"""', 'bg': '"""white"""'}), "(window, width=120, height=40, borderwidth=1, relief='sunken', bg=\n 'white')\n", (8433, 8512), True, 'import tkinter as tk\n'), ((8713, 8744), 'tkinter.Scrollbar', 'tk.Scrollbar', (['run_message_frame'], {}), '(run_message_frame)\n', (8725, 8744), True, 'import tkinter as tk\n'), ((8838, 8986), 'tkinter.Text', 'tk.Text', (['run_message_frame'], {'width': '(10)', 'height': '(10)', 'borderwidth': '(0)', 'highlightthickness': '(0)', 'wrap': '"""word"""', 'yscrollcommand': 'run_message_scrollbar.set'}), "(run_message_frame, width=10, height=10, borderwidth=0,\n highlightthickness=0, wrap='word', yscrollcommand=run_message_scrollbar.set\n )\n", (8845, 8986), True, 'import tkinter as tk\n'), ((12063, 12078), 'tkinter.Menu', 'tk.Menu', (['window'], {}), '(window)\n', (12070, 12078), True, 'import tkinter as tk\n'), ((12094, 12125), 'tkinter.Menu', 'tk.Menu', (['menubar'], {'tearoff': '(False)'}), '(menubar, tearoff=False)\n', (12101, 12125), True, 'import tkinter as tk\n'), ((12411, 12442), 'tkinter.Menu', 'tk.Menu', (['menubar'], {'tearoff': '(False)'}), '(menubar, tearoff=False)\n', (12418, 12442), True, 'import tkinter as tk\n'), ((6031, 6054), 'pathlib.Path', 'pathlib.Path', (['file_name'], {}), '(file_name)\n', (6043, 6054), False, 'import pathlib\n'), ((6900, 6918), 'pathlib.Path.cwd', 'pathlib.Path.cwd', ([], {}), '()\n', (6916, 6918), False, 'import pathlib\n'), ((6927, 6953), 'os.chdir', 'os.chdir', (['file_path.parent'], {}), '(file_path.parent)\n', (6935, 6953), False, 'import os\n'), ((9340, 9359), 'tkinter.Toplevel', 'tk.Toplevel', (['window'], {}), '(window)\n', (9351, 9359), True, 'import tkinter as tk\n'), ((9501, 9621), 'tkinter.Label', 'tk.Label', (['abt'], {'text': '"""Text2CC \n \n Copyright © 2021, <NAME> \n Copyright © 2020, <NAME> \n """', 'justify': '"""center"""'}), '(abt, text=\n """Text2CC \n \n Copyright © 2021, <NAME> \n Copyright © 2020, <NAME> \n """,\n justify=\'center\')\n', (9509, 9621), True, 'import tkinter as tk\n'), ((9932, 9990), 'tkinter.Label', 'tk.Label', (['abt'], {'text': 'f"""Version: {version.__version__} \n"""'}), "(abt, text=f'Version: {version.__version__} \\n')\n", (9940, 9990), True, 'import tkinter as tk\n'), ((10358, 10428), 'tkinter.Label', 'tk.Label', (['abt'], {'text': '"""License: BSD 3-Clause"""', 'fg': '"""blue"""', 'cursor': '"""hand2"""'}), "(abt, text='License: BSD 3-Clause', fg='blue', cursor='hand2')\n", (10366, 10428), True, 'import tkinter as tk\n'), ((10816, 10879), 'tkinter.Button', 'tk.Button', (['abt'], {'text': '"""OK"""', 'command': 'close_about', 'anchor': '"""center"""'}), "(abt, text='OK', command=close_about, anchor='center')\n", (10825, 10879), True, 'import tkinter as tk\n'), ((11966, 12052), 'webbrowser.open_new', 'webbrowser.open_new', (['"""https://github.com/dlehman83/text2cc/blob/master/README.md"""'], {}), "(\n 'https://github.com/dlehman83/text2cc/blob/master/README.md')\n", (11985, 12052), False, 'import webbrowser\n'), ((7367, 7380), 'os.chdir', 'os.chdir', (['cwd'], {}), '(cwd)\n', (7375, 7380), False, 'import os\n'), ((10577, 10644), 'webbrowser.open_new', 'webbrowser.open_new', (['"""https://opensource.org/licenses/BSD-3-Clause"""'], {}), "('https://opensource.org/licenses/BSD-3-Clause')\n", (10596, 10644), False, 'import webbrowser\n'), ((2220, 2237), 'pathlib.Path', 'pathlib.Path', (['"""~"""'], {}), "('~')\n", (2232, 2237), False, 'import pathlib\n'), ((2576, 2599), 'pathlib.Path', 'pathlib.Path', (['file_name'], {}), '(file_name)\n', (2588, 2599), False, 'import pathlib\n')]
|
from flask import Flask, request, jsonify
from services import MongoDBService
app = Flask(__name__)
@app.route("/")
def root():
return "Welcome to Storage Manager!"
@app.route("/health")
def health():
return "ok"
@app.route("/databases", methods=["GET"])
def all_databases():
databases = MongoDBService().list_databases()
return jsonify(databases)
@app.route("/databases", methods=["POST"])
def create_database():
database = request.json.get("database", None)
if database is None:
return {
"status": "error",
"message": "Database name is required!"
}, 400
mongodb = MongoDBService().connect()
try:
mongodb.create_database(database)
except Exception as e:
return {
"status": "error",
"message": "%s" % (e)
}, 500
user = request.json.get("user", None)
if user is None:
return {
"status": "ok",
"database": database
}
username = user.get("username", None)
password = user.get("password", None)
permission = user.get("permission", "readWrite")
if (username is None) or (password is None):
return {
"status": "error",
"message": "Username and password are required to create database with a user!"
}, 400
try:
mongodb.create_user(
database,
{
"username": username,
"password": password,
"permission": permission
}
)
except Exception as e:
return {
"status": "error",
"message": "%s" % (e)
}, 500
return {
"status": "ok",
"database": database,
"username": username
}
@app.route("/databases/<database>", methods=["DELETE"])
def drop_database(database):
try:
MongoDBService().drop_database(database)
except Exception as e:
return {
"status": "error",
"message": "%s" % (e)
}, 500
return {
"status": "ok"
}
@app.route("/users", methods=["POST"])
def create_user():
database = request.json.get("database", None)
user = request.json.get("user", {})
username = user.get("username", None)
password = user.get("password", None)
permission = user.get("permission", "readWrite")
if (database is None) or (username is None) or (password is None):
return {
"status": "error",
"message": "Username, password, database are required to create a user!"
}, 400
try:
MongoDBService().create_user(
database,
{
"username": username,
"password": password,
"permission": permission
}
)
except Exception as e:
return {
"status": "error",
"message": "%s" % (e)
}, 500
return {
"status": "ok",
"database": database,
"username": username
}
@app.route("/users/<username>", methods=["PUT", "PATCH"])
def update_user(username):
database = request.json.get("database", None)
user = request.json.get("user", {})
password = user.get("password", None)
permission = user.get("permission", None)
if database is None:
return {
"status": "error",
"message": "Database name is required to update a user!"
}, 400
if (password is None) and (permission is None):
return {
"status": "error",
"message": "Password or permission is required to update a user!"
}, 400
try:
MongoDBService().update_user(
database,
{
"username": username,
"password": password,
"permission": permission
}
)
except Exception as e:
return {
"status": "error",
"message": "%s" % (e)
}, 500
return {
"status": "ok",
"database": database,
"username": username
}
@app.route("/users/<username>", methods=["DELETE"])
def drop_user(username):
database = request.json.get("database", None)
if database is None:
return {
"status": "error",
"message": "Database name is required to drop a user!"
}, 400
try:
MongoDBService().drop_user(database, username)
except Exception as e:
return {
"status": "error",
"message": "%s" % (e)
}, 500
return {
"status": "ok"
}
if __name__ == "__main__":
app.run()
|
[
"flask.request.json.get",
"flask.jsonify",
"flask.Flask",
"services.MongoDBService"
] |
[((86, 101), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (91, 101), False, 'from flask import Flask, request, jsonify\n'), ((350, 368), 'flask.jsonify', 'jsonify', (['databases'], {}), '(databases)\n', (357, 368), False, 'from flask import Flask, request, jsonify\n'), ((451, 485), 'flask.request.json.get', 'request.json.get', (['"""database"""', 'None'], {}), "('database', None)\n", (467, 485), False, 'from flask import Flask, request, jsonify\n'), ((857, 887), 'flask.request.json.get', 'request.json.get', (['"""user"""', 'None'], {}), "('user', None)\n", (873, 887), False, 'from flask import Flask, request, jsonify\n'), ((2169, 2203), 'flask.request.json.get', 'request.json.get', (['"""database"""', 'None'], {}), "('database', None)\n", (2185, 2203), False, 'from flask import Flask, request, jsonify\n'), ((2215, 2243), 'flask.request.json.get', 'request.json.get', (['"""user"""', '{}'], {}), "('user', {})\n", (2231, 2243), False, 'from flask import Flask, request, jsonify\n'), ((3154, 3188), 'flask.request.json.get', 'request.json.get', (['"""database"""', 'None'], {}), "('database', None)\n", (3170, 3188), False, 'from flask import Flask, request, jsonify\n'), ((3200, 3228), 'flask.request.json.get', 'request.json.get', (['"""user"""', '{}'], {}), "('user', {})\n", (3216, 3228), False, 'from flask import Flask, request, jsonify\n'), ((4215, 4249), 'flask.request.json.get', 'request.json.get', (['"""database"""', 'None'], {}), "('database', None)\n", (4231, 4249), False, 'from flask import Flask, request, jsonify\n'), ((305, 321), 'services.MongoDBService', 'MongoDBService', ([], {}), '()\n', (319, 321), False, 'from services import MongoDBService\n'), ((642, 658), 'services.MongoDBService', 'MongoDBService', ([], {}), '()\n', (656, 658), False, 'from services import MongoDBService\n'), ((1887, 1903), 'services.MongoDBService', 'MongoDBService', ([], {}), '()\n', (1901, 1903), False, 'from services import MongoDBService\n'), ((2619, 2635), 'services.MongoDBService', 'MongoDBService', ([], {}), '()\n', (2633, 2635), False, 'from services import MongoDBService\n'), ((3687, 3703), 'services.MongoDBService', 'MongoDBService', ([], {}), '()\n', (3701, 3703), False, 'from services import MongoDBService\n'), ((4424, 4440), 'services.MongoDBService', 'MongoDBService', ([], {}), '()\n', (4438, 4440), False, 'from services import MongoDBService\n')]
|
#!/usr/bin/env python
# coding: utf-8
# # Registration 101
#
# Image registration is a critical tool in longitudinal monitoring:
#
# - Estimation of local changes
# - Comparison to same animal (less variance)
# - [3R's](https://www.nc3rs.org.uk/the-3rs)
#
#
#
# ## Goal of tutorial:
# - Introduce the concept of aligning images
# - Demonstrate the use of numerical comparison between images
# - Introduce concept of optimisation
# - Registering images within framework (Thesis: <NAME> & <NAME>)
# ## setting up a responsive enviroment
# In[1]:
# Please do not edit this code, it is important for choosing a compatible rendered for images
# libraries for viewing images
import sys
sys.path.append("reg101_files")
from image_viewing import horizontal_pane, overlay_RGB, overlay_RGB_cost
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
# numpy as always
import numpy as np
# In[2]:
# this is a way of finding the fastest image display on your platform
print("Using:", matplotlib.get_backend())
gui_env = ["WXAgg", "TKAgg", "QT5Agg", "GTKAgg", "Qt4Agg"]
for gui in gui_env:
try:
print("testing", gui)
matplotlib.use(gui, warn=False, force=True)
from matplotlib import pyplot as plt
break
except:
continue
print("Using:", matplotlib.get_backend())
# ## What does registration actually do?
#
# In the example bellow there are images from 2 weeks.
#
# The position of the animal in the scanner was different each.
#
# In[3]:
get_ipython().run_line_magic("matplotlib", "notebook")
# this allows the image to be viewed in the notebook with edge
matplotlib.rcParams["figure.figsize"] = (8, 7)
images = [
mpimg.imread("reg101_files/week2.tiff"),
mpimg.imread("reg101_files/week3.tiff"),
]
horizontal_pane(images) # shows images side by side
# How can we interpret these images?
#
# In in vivo studies, it is still common to *dynamic histomorphometry*, where markers are given to the mice at different intervals over the course if a study. These are built into the bone matrix and afterwards can be labeled and visualised in histological sections.
#
# 
#
# In the above image above the green strain was used to mark the surface where bone formed. While the red is marking Osteoid (newly formed un minerlaised bone). The method is limited to purely observation of anabolic events. Resorption of bone removes material and therefore there is nothing which can be stained.
#
# Inspired by these histological images we can create a virtual histology image. In these images we will "stain" the change of mineralisation between the two weeks. Both images are grey-scale (1 -color) , we can emulate the histology by using colour channels. Here we put the *Later* week in *green* and the *Former* week in *red*, both weeks are averaged in the blue channel.
#
# When the images are aligned we see white (R+G+B) when the *Former* week is brighter we see *green* and when the *later* week is brighter we see *red*. This is essentially formation == green and resorption == red. The function bellow does this for use automically.
#
# In[4]:
# puts the images into the Red(week1) and G(week2) channel of an image
overlay_RGB(images)
# These images are clearly not well aligned. We will now discuss how to align the images.
# ## Overlaying an image
#
# Registration involves finding the best set of transormation paramters for overlaying an image.
#
# Run the following cell and try to find the best x,y,theta for aligning the images.
# In[5]:
# manual transform
from ipywidgets import interact, interactive, fixed, interact_manual
import ipywidgets as widgets
from IPython.display import clear_output, display
from scipy.ndimage import affine_transform
def move_overlay(image1, image2, dx, dy):
T = np.identity(3)
T[0, 2] = dy
T[1, 2] = dx
images = [image1, affine_transform(image2, T)]
overlay_RGB(images)
clear_output(True)
# In[6]:
interactive(
move_overlay,
image1=fixed(images[0]),
image2=fixed(images[1]),
dx=(-50, 50, 0.25),
dy=(-50, 50, 0.25),
)
# In[7]:
class_x = [-4.5, -6.5, -5.0, -4.25, -4, -5, -4.7]
class_y = [-18.5, -22, -20.5, -20, -19, -19, -19.5]
print("Class variance in X: ", np.mean(class_x))
print("Class variance in Y: ", np.mean(class_y))
# ## Cost functions
#
# We have now demonstrated the use of a powerful neural network (you) on finding the best fit. In realitiy the computer is not aware of what the images look like. The registration algorithm needs to numerically determine the goodness of fit, it can then optimize until the correct paramters are found.
#
# Image we have two images X and Y. The lease squared difference would be as follows:
#
# \begin{equation*}
# C^{LS} = \sum \left( X - Y \right)^2
# \end{equation*}
#
# Where this is the pixel-wise sum of both images. In Python it looks like this:
#
#
# ```python
# def least_squared(X,Y):
# delta = X-Y
# square_delta = delta**2
# return np.sum(square_delta)
#
# ```
#
# Another meausure of similarity is the correlations function:
# \begin{equation*}
# C^{cor} = \frac{\sum \left( X \cdot Y \right) }{\sqrt{\sum X^2}.\sqrt{\sum Y^2}}
# \end{equation*}
#
# In python it looks like this:
# ```python
# def correlation(Image1,Image2):
# corr = np.corrcoef(Image1,y=Image2)
# return(corr[0,1])
# ```
#
# ### excercise 2:
# Align the images again, this time try and use the cost function to determine your next move.
#
# In[8]:
# abs diference cost
def least_squared(Image1, Image2):
delta = Image1 - Image2
return np.sum(delta ** 2)
# In[9]:
def correlation(Image1, Image2):
return -1 * np.corrcoef(Image1.flatten(), y=Image2.flatten())[0, 1]
# In[10]:
c_funs = {"correlation": correlation, "least_squared": least_squared}
# In[11]:
cost = {c: [] for c in c_funs}
cost_function_names = [n for n in c_funs]
def move_overlay(image1, image2, dx, dy, cost_history, cost_function, cfuncs):
T = np.identity(3)
T[0, 2] = dy
T[1, 2] = dx
images = [image1, affine_transform(image2, T)]
overlay_RGB_cost(images, cost_history, cost_function, cfuncs, dx, dy)
clear_output(True)
# In[12]:
interactive(
move_overlay,
image1=fixed(images[0]),
image2=fixed(images[1]),
dx=(-60, 60, 0.5),
dy=(-60, 60, 0.5),
cost_history=fixed(cost),
cost_function=cost_function_names,
cfuncs=fixed(c_funs),
)
# In[13]:
comutational_cost = {"correlation": 0, "least_squared": 0, "abs_squared_difference": 0}
for function, v in cost.items():
for pix in v:
comutational_cost[function] += pix[3]
print("The total pixel cost was:", comutational_cost)
# This is quite an intenive task. For each movmement you made every pixel was evaluated to determine the cost function. Ideally we should align the images close so that they need less transformations. This can be done through two ways:
#
# 1. An initial guess
#
# 2. A multi-resolution scheme
#
# A good initial guess is a holy grail in image registration, these could involve calcuating principle axes and centerms of mass. However during fracture healing the changes in the bone are large, the formation of new material can cause priciple axes to flip or swap.
#
# A multi-resolution scheme on the other hand reduces the problem size, progressivly increasing it until the images are comapred at the native resoltuion. This system is "easy" to implement and has an inherent strength over the naive aproach.
#
# Images contain different frequencies. In general flat areas are low frequency whilke edges (and noise) are high frequency. The lower resolution images are predominatnly low frequency information and have comparatively less noise. A pyramid approach is effectivly using a smoothed cost function, avoiding local minima, while the progressive increasing of the resolution adds high frequency correction to the solution.
#
# In[14]:
def split_frequency(frequency, image):
f = np.fft.fft2(image) # forward fft
fshift = np.fft.fftshift(f) # shift to center frequencies
hff = np.copy(fshift)
origin = np.array(hff.shape) * 0.5
y, x = np.ogrid[-origin[0] : origin[0], -origin[1] : origin[1]]
mask = (
x * x + y * y <= frequency * frequency
) # mask for high and low pass filtering
hff[mask] = 0 # high pass filter
lff = np.copy(fshift)
lff[mask != 1] = 0 # low pass filter
hff_ishift = np.fft.ifftshift(hff) # inverse shift
lff_ishift = np.fft.ifftshift(lff) # inverse shift
lff_back = np.fft.ifft2(lff_ishift) # inverse fft
hff_back = np.fft.ifft2(hff_ishift) # inverse fft
hff_back = np.abs(hff_back)
lff_back = np.abs(lff_back)
# contrast adjustment for viewing image
hff_back /= np.percentile(hff_back, 99)
hff_back[hff_back > 1] = 1.0
horizontal_pane([image, (lff_back), hff_back])
# In[15]:
interactive(split_frequency, frequency=(0, 204, 1), image=fixed(images[0]))
# ## Pyrmaid registration
#
# - Frequencies are dependent on the image resolution
# - Easiest way to create a pyramid is to create a stack of rescaled images
# In[16]:
# manual transform
from scipy.ndimage.interpolation import zoom
cost_pyramid = {c: [] for c in c_funs}
def move_overlay_pyramid(
image1, image2, dx, dy, cost_history, cost_function, cfuncs, level, history
):
level = int(level)
level = 2 ** (level - 1)
if level != 1:
i1 = zoom(image1, 1 / level, order=0)
i2 = zoom(image2, 1 / level, order=0)
else:
i1 = image1
i2 = image2
T = np.identity(3)
T[0, 2] = dy * 1 / level
T[1, 2] = dx * 1 / level
if level != 1:
images = [i1, affine_transform(i2, T, order=0)]
else:
images = [i1, affine_transform(i2, T)]
if len(cost_history) > 0:
overlay_RGB_cost(images, cost_history, cost_function, cfuncs, dx, dy, history)
else:
print("Move around to make some history")
clear_output(True)
# In[17]:
interactive(
move_overlay_pyramid,
image1=fixed(images[0]),
image2=fixed(images[1]),
dx=(-60, 60, 0.5),
dy=(-60, 60, 0.5),
cost_history=fixed(cost_pyramid),
cost_function=cost_function_names,
cfuncs=fixed(c_funs),
level=[5, 4, 3, 2, 1],
history=[None, 10, 20, 100],
)
# In[18]:
comutational_cost_pyramid = {"correlation": 0, "least_squared": 0}
for function, v in cost_pyramid.items():
for pix in v:
comutational_cost_pyramid[function] += pix[3]
print("The total pixel cost was:", comutational_cost_pyramid)
# # Automated registration
#
# This is cleary a difficult problem to do by hand. As we have these functions it should be easy to minimise them. Actualy there is a whole field of mathematics and computer science devoted towards this: Optimisation. It is not really "easy" to do.
#
# There are several course at the ETH which can give actually useful information on this:
# - 401-0647-00L Introduction to Mathematical Optimization
# - 227-0707-00L Optimization Methods for Engineers (good for Graeme)
# - 261-5110-00L Optimization for Data Science (Machine learning)
# - 401-3904-00L Convex Optimization (Applied to specific problems where the only minima is the global minimum)
#
# In the bellow example the registration is performed using a [Evolutionary Algorithm](https://en.wikipedia.org/wiki/Evolutionary_algorithm). Simply put a random population of initial guesses are used, the cost function is determined for each and only the fittest X% are mated to create a new set of guesses.
#
# To avoid being captured in local minima *mutations* are introduced, which introduce new paramters values to the increasingly homogenus population.
# In[19]:
# opitimizer with least squared
from scipy.optimize import minimize as ls
from scipy.optimize import differential_evolution
def correlation(x, i1, i2, path):
x = np.array(x)
T = np.identity(3)
T[0, 2] = x[1]
T[1, 2] = x[0]
images = [i1.flatten(), affine_transform(i2, T).flatten()]
delta = -1 * np.corrcoef(images[0], y=images[1])[0, 1]
path.append((x[0], x[1], delta))
return delta
def least_squared(x, i1, i2, path):
x = np.array(x)
T = np.identity(3)
T[0, 2] = x[1]
T[1, 2] = x[0]
images = [i1, affine_transform(i2, T)]
delta = np.sum((images[0] - images[1]) ** 2)
path.append((x[0], x[1], delta))
return delta
path_corralation = []
optimum_c = differential_evolution(
correlation,
[(-60, 30), (-60, 30)],
args=(images[0], images[1], path_corralation),
tol=0.00125,
) # ,method='Powell',options={'eps':0.5})
path_least_squared = []
optimum_ls = differential_evolution(
least_squared,
[(-60, 30), (-60, 30)],
args=(images[0], images[1], path_least_squared),
tol=0.00125,
) # ,method='Powell',options={'eps':0.5})
# We have now searched for the best transform using both cost functions. What do they look like?
# In[20]:
# Using the correlation cost function
x = optimum_c["x"]
T = np.identity(3)
T[0, 2] = x[1]
T[1, 2] = x[0]
overlay_RGB([images[0], affine_transform(images[1], T)])
# In[21]:
# Using the Least squared cost function
x = optimum_ls["x"]
T = np.identity(3)
T[0, 2] = x[1]
T[1, 2] = x[0]
overlay_RGB([images[0], affine_transform(images[1], T)])
# In[22]:
# difference in the images
diff = []
x = optimum_ls["x"]
T = np.identity(3)
T[0, 2] = x[1]
T[1, 2] = x[0]
diff.append(affine_transform(images[1], T))
x = optimum_c["x"]
T = np.identity(3)
T[0, 2] = x[1]
T[1, 2] = x[0]
diff.append(affine_transform(images[1], T))
overlay_RGB(diff)
# In[23]:
print("Difference in the transformation", optimum_ls["x"] - optimum_c["x"])
# In the cell bellow the cost functions are plotted. This can be done as the optimization we used search an entire range of parameters.
#
# In[24]:
p_c = np.array(path_corralation)
p_ls = np.array(path_least_squared)
import matplotlib.tri as mtri
fig = plt.figure()
matplotlib.rcParams["figure.figsize"] = (9, 10)
"""
ax = fig.add_subplot(111, projection='3d')
ax.plot_trisurf(p_c[:,0],p_c[:,1],p_c[:,2],cmap=plt.cm.jet)
ax.set_title("Correlation")
ax.set_xlabel("dx")
ax.set_ylabel("dy")
ax.set_zlabel("cost (-)")
"""
ax = fig.add_subplot(111, projection="3d")
ax.set_title("Least Squared")
ax.set_xlabel("dx")
ax.set_ylabel("dy")
ax.set_zlabel("cost (-)")
ax.plot_trisurf(p_ls[:, 0], p_ls[:, 1], p_ls[:, 2], cmap=plt.cm.jet)
# # Conclusion to part 1
#
# - Image registration is not black magic
# - It is repetitive but simple maths
# - Algorithms are fallible:
# - Local minima in solution
# - Orientation of images
# - Qualitiy of images
# - Understanding your chosen cost function crucial for:
# - Choosing an algorithm
# - Knowing if your data is sufficient
|
[
"numpy.sum",
"numpy.abs",
"matplotlib.pyplot.figure",
"numpy.mean",
"ipywidgets.fixed",
"numpy.fft.ifft2",
"matplotlib.get_backend",
"sys.path.append",
"image_viewing.overlay_RGB",
"numpy.fft.ifftshift",
"numpy.copy",
"numpy.identity",
"image_viewing.horizontal_pane",
"scipy.ndimage.interpolation.zoom",
"scipy.ndimage.affine_transform",
"matplotlib.image.imread",
"numpy.corrcoef",
"scipy.optimize.differential_evolution",
"numpy.percentile",
"numpy.fft.fftshift",
"matplotlib.use",
"numpy.fft.fft2",
"IPython.display.clear_output",
"numpy.array",
"image_viewing.overlay_RGB_cost"
] |
[((688, 719), 'sys.path.append', 'sys.path.append', (['"""reg101_files"""'], {}), "('reg101_files')\n", (703, 719), False, 'import sys\n'), ((1790, 1813), 'image_viewing.horizontal_pane', 'horizontal_pane', (['images'], {}), '(images)\n', (1805, 1813), False, 'from image_viewing import horizontal_pane, overlay_RGB, overlay_RGB_cost\n'), ((3229, 3248), 'image_viewing.overlay_RGB', 'overlay_RGB', (['images'], {}), '(images)\n', (3240, 3248), False, 'from image_viewing import horizontal_pane, overlay_RGB, overlay_RGB_cost\n'), ((12472, 12595), 'scipy.optimize.differential_evolution', 'differential_evolution', (['correlation', '[(-60, 30), (-60, 30)]'], {'args': '(images[0], images[1], path_corralation)', 'tol': '(0.00125)'}), '(correlation, [(-60, 30), (-60, 30)], args=(images[0],\n images[1], path_corralation), tol=0.00125)\n', (12494, 12595), False, 'from scipy.optimize import differential_evolution\n'), ((12690, 12818), 'scipy.optimize.differential_evolution', 'differential_evolution', (['least_squared', '[(-60, 30), (-60, 30)]'], {'args': '(images[0], images[1], path_least_squared)', 'tol': '(0.00125)'}), '(least_squared, [(-60, 30), (-60, 30)], args=(images[\n 0], images[1], path_least_squared), tol=0.00125)\n', (12712, 12818), False, 'from scipy.optimize import differential_evolution\n'), ((13048, 13062), 'numpy.identity', 'np.identity', (['(3)'], {}), '(3)\n', (13059, 13062), True, 'import numpy as np\n'), ((13228, 13242), 'numpy.identity', 'np.identity', (['(3)'], {}), '(3)\n', (13239, 13242), True, 'import numpy as np\n'), ((13405, 13419), 'numpy.identity', 'np.identity', (['(3)'], {}), '(3)\n', (13416, 13419), True, 'import numpy as np\n'), ((13517, 13531), 'numpy.identity', 'np.identity', (['(3)'], {}), '(3)\n', (13528, 13531), True, 'import numpy as np\n'), ((13606, 13623), 'image_viewing.overlay_RGB', 'overlay_RGB', (['diff'], {}), '(diff)\n', (13617, 13623), False, 'from image_viewing import horizontal_pane, overlay_RGB, overlay_RGB_cost\n'), ((13873, 13899), 'numpy.array', 'np.array', (['path_corralation'], {}), '(path_corralation)\n', (13881, 13899), True, 'import numpy as np\n'), ((13907, 13935), 'numpy.array', 'np.array', (['path_least_squared'], {}), '(path_least_squared)\n', (13915, 13935), True, 'import numpy as np\n'), ((13975, 13987), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (13985, 13987), True, 'from matplotlib import pyplot as plt\n'), ((1013, 1037), 'matplotlib.get_backend', 'matplotlib.get_backend', ([], {}), '()\n', (1035, 1037), False, 'import matplotlib\n'), ((1314, 1338), 'matplotlib.get_backend', 'matplotlib.get_backend', ([], {}), '()\n', (1336, 1338), False, 'import matplotlib\n'), ((1701, 1740), 'matplotlib.image.imread', 'mpimg.imread', (['"""reg101_files/week2.tiff"""'], {}), "('reg101_files/week2.tiff')\n", (1713, 1740), True, 'import matplotlib.image as mpimg\n'), ((1746, 1785), 'matplotlib.image.imread', 'mpimg.imread', (['"""reg101_files/week3.tiff"""'], {}), "('reg101_files/week3.tiff')\n", (1758, 1785), True, 'import matplotlib.image as mpimg\n'), ((3827, 3841), 'numpy.identity', 'np.identity', (['(3)'], {}), '(3)\n', (3838, 3841), True, 'import numpy as np\n'), ((3931, 3950), 'image_viewing.overlay_RGB', 'overlay_RGB', (['images'], {}), '(images)\n', (3942, 3950), False, 'from image_viewing import horizontal_pane, overlay_RGB, overlay_RGB_cost\n'), ((3955, 3973), 'IPython.display.clear_output', 'clear_output', (['(True)'], {}), '(True)\n', (3967, 3973), False, 'from IPython.display import clear_output, display\n'), ((4273, 4289), 'numpy.mean', 'np.mean', (['class_x'], {}), '(class_x)\n', (4280, 4289), True, 'import numpy as np\n'), ((4322, 4338), 'numpy.mean', 'np.mean', (['class_y'], {}), '(class_y)\n', (4329, 4338), True, 'import numpy as np\n'), ((5618, 5636), 'numpy.sum', 'np.sum', (['(delta ** 2)'], {}), '(delta ** 2)\n', (5624, 5636), True, 'import numpy as np\n'), ((6015, 6029), 'numpy.identity', 'np.identity', (['(3)'], {}), '(3)\n', (6026, 6029), True, 'import numpy as np\n'), ((6119, 6188), 'image_viewing.overlay_RGB_cost', 'overlay_RGB_cost', (['images', 'cost_history', 'cost_function', 'cfuncs', 'dx', 'dy'], {}), '(images, cost_history, cost_function, cfuncs, dx, dy)\n', (6135, 6188), False, 'from image_viewing import horizontal_pane, overlay_RGB, overlay_RGB_cost\n'), ((6193, 6211), 'IPython.display.clear_output', 'clear_output', (['(True)'], {}), '(True)\n', (6205, 6211), False, 'from IPython.display import clear_output, display\n'), ((8007, 8025), 'numpy.fft.fft2', 'np.fft.fft2', (['image'], {}), '(image)\n', (8018, 8025), True, 'import numpy as np\n'), ((8054, 8072), 'numpy.fft.fftshift', 'np.fft.fftshift', (['f'], {}), '(f)\n', (8069, 8072), True, 'import numpy as np\n'), ((8114, 8129), 'numpy.copy', 'np.copy', (['fshift'], {}), '(fshift)\n', (8121, 8129), True, 'import numpy as np\n'), ((8391, 8406), 'numpy.copy', 'np.copy', (['fshift'], {}), '(fshift)\n', (8398, 8406), True, 'import numpy as np\n'), ((8466, 8487), 'numpy.fft.ifftshift', 'np.fft.ifftshift', (['hff'], {}), '(hff)\n', (8482, 8487), True, 'import numpy as np\n'), ((8522, 8543), 'numpy.fft.ifftshift', 'np.fft.ifftshift', (['lff'], {}), '(lff)\n', (8538, 8543), True, 'import numpy as np\n'), ((8576, 8600), 'numpy.fft.ifft2', 'np.fft.ifft2', (['lff_ishift'], {}), '(lff_ishift)\n', (8588, 8600), True, 'import numpy as np\n'), ((8631, 8655), 'numpy.fft.ifft2', 'np.fft.ifft2', (['hff_ishift'], {}), '(hff_ishift)\n', (8643, 8655), True, 'import numpy as np\n'), ((8686, 8702), 'numpy.abs', 'np.abs', (['hff_back'], {}), '(hff_back)\n', (8692, 8702), True, 'import numpy as np\n'), ((8718, 8734), 'numpy.abs', 'np.abs', (['lff_back'], {}), '(lff_back)\n', (8724, 8734), True, 'import numpy as np\n'), ((8795, 8822), 'numpy.percentile', 'np.percentile', (['hff_back', '(99)'], {}), '(hff_back, 99)\n', (8808, 8822), True, 'import numpy as np\n'), ((8861, 8905), 'image_viewing.horizontal_pane', 'horizontal_pane', (['[image, lff_back, hff_back]'], {}), '([image, lff_back, hff_back])\n', (8876, 8905), False, 'from image_viewing import horizontal_pane, overlay_RGB, overlay_RGB_cost\n'), ((9609, 9623), 'numpy.identity', 'np.identity', (['(3)'], {}), '(3)\n', (9620, 9623), True, 'import numpy as np\n'), ((9995, 10013), 'IPython.display.clear_output', 'clear_output', (['(True)'], {}), '(True)\n', (10007, 10013), False, 'from IPython.display import clear_output, display\n'), ((11922, 11933), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (11930, 11933), True, 'import numpy as np\n'), ((11942, 11956), 'numpy.identity', 'np.identity', (['(3)'], {}), '(3)\n', (11953, 11956), True, 'import numpy as np\n'), ((12217, 12228), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (12225, 12228), True, 'import numpy as np\n'), ((12237, 12251), 'numpy.identity', 'np.identity', (['(3)'], {}), '(3)\n', (12248, 12251), True, 'import numpy as np\n'), ((12345, 12381), 'numpy.sum', 'np.sum', (['((images[0] - images[1]) ** 2)'], {}), '((images[0] - images[1]) ** 2)\n', (12351, 12381), True, 'import numpy as np\n'), ((13462, 13492), 'scipy.ndimage.affine_transform', 'affine_transform', (['images[1]', 'T'], {}), '(images[1], T)\n', (13478, 13492), False, 'from scipy.ndimage import affine_transform\n'), ((13574, 13604), 'scipy.ndimage.affine_transform', 'affine_transform', (['images[1]', 'T'], {}), '(images[1], T)\n', (13590, 13604), False, 'from scipy.ndimage import affine_transform\n'), ((1165, 1208), 'matplotlib.use', 'matplotlib.use', (['gui'], {'warn': '(False)', 'force': '(True)'}), '(gui, warn=False, force=True)\n', (1179, 1208), False, 'import matplotlib\n'), ((3898, 3925), 'scipy.ndimage.affine_transform', 'affine_transform', (['image2', 'T'], {}), '(image2, T)\n', (3914, 3925), False, 'from scipy.ndimage import affine_transform\n'), ((4029, 4045), 'ipywidgets.fixed', 'fixed', (['images[0]'], {}), '(images[0])\n', (4034, 4045), False, 'from ipywidgets import interact, interactive, fixed, interact_manual\n'), ((4058, 4074), 'ipywidgets.fixed', 'fixed', (['images[1]'], {}), '(images[1])\n', (4063, 4074), False, 'from ipywidgets import interact, interactive, fixed, interact_manual\n'), ((6086, 6113), 'scipy.ndimage.affine_transform', 'affine_transform', (['image2', 'T'], {}), '(image2, T)\n', (6102, 6113), False, 'from scipy.ndimage import affine_transform\n'), ((6268, 6284), 'ipywidgets.fixed', 'fixed', (['images[0]'], {}), '(images[0])\n', (6273, 6284), False, 'from ipywidgets import interact, interactive, fixed, interact_manual\n'), ((6297, 6313), 'ipywidgets.fixed', 'fixed', (['images[1]'], {}), '(images[1])\n', (6302, 6313), False, 'from ipywidgets import interact, interactive, fixed, interact_manual\n'), ((6378, 6389), 'ipywidgets.fixed', 'fixed', (['cost'], {}), '(cost)\n', (6383, 6389), False, 'from ipywidgets import interact, interactive, fixed, interact_manual\n'), ((6441, 6454), 'ipywidgets.fixed', 'fixed', (['c_funs'], {}), '(c_funs)\n', (6446, 6454), False, 'from ipywidgets import interact, interactive, fixed, interact_manual\n'), ((8143, 8162), 'numpy.array', 'np.array', (['hff.shape'], {}), '(hff.shape)\n', (8151, 8162), True, 'import numpy as np\n'), ((8980, 8996), 'ipywidgets.fixed', 'fixed', (['images[0]'], {}), '(images[0])\n', (8985, 8996), False, 'from ipywidgets import interact, interactive, fixed, interact_manual\n'), ((9472, 9504), 'scipy.ndimage.interpolation.zoom', 'zoom', (['image1', '(1 / level)'], {'order': '(0)'}), '(image1, 1 / level, order=0)\n', (9476, 9504), False, 'from scipy.ndimage.interpolation import zoom\n'), ((9518, 9550), 'scipy.ndimage.interpolation.zoom', 'zoom', (['image2', '(1 / level)'], {'order': '(0)'}), '(image2, 1 / level, order=0)\n', (9522, 9550), False, 'from scipy.ndimage.interpolation import zoom\n'), ((9852, 9930), 'image_viewing.overlay_RGB_cost', 'overlay_RGB_cost', (['images', 'cost_history', 'cost_function', 'cfuncs', 'dx', 'dy', 'history'], {}), '(images, cost_history, cost_function, cfuncs, dx, dy, history)\n', (9868, 9930), False, 'from image_viewing import horizontal_pane, overlay_RGB, overlay_RGB_cost\n'), ((10078, 10094), 'ipywidgets.fixed', 'fixed', (['images[0]'], {}), '(images[0])\n', (10083, 10094), False, 'from ipywidgets import interact, interactive, fixed, interact_manual\n'), ((10107, 10123), 'ipywidgets.fixed', 'fixed', (['images[1]'], {}), '(images[1])\n', (10112, 10123), False, 'from ipywidgets import interact, interactive, fixed, interact_manual\n'), ((10188, 10207), 'ipywidgets.fixed', 'fixed', (['cost_pyramid'], {}), '(cost_pyramid)\n', (10193, 10207), False, 'from ipywidgets import interact, interactive, fixed, interact_manual\n'), ((10259, 10272), 'ipywidgets.fixed', 'fixed', (['c_funs'], {}), '(c_funs)\n', (10264, 10272), False, 'from ipywidgets import interact, interactive, fixed, interact_manual\n'), ((12308, 12331), 'scipy.ndimage.affine_transform', 'affine_transform', (['i2', 'T'], {}), '(i2, T)\n', (12324, 12331), False, 'from scipy.ndimage import affine_transform\n'), ((13117, 13147), 'scipy.ndimage.affine_transform', 'affine_transform', (['images[1]', 'T'], {}), '(images[1], T)\n', (13133, 13147), False, 'from scipy.ndimage import affine_transform\n'), ((13297, 13327), 'scipy.ndimage.affine_transform', 'affine_transform', (['images[1]', 'T'], {}), '(images[1], T)\n', (13313, 13327), False, 'from scipy.ndimage import affine_transform\n'), ((9723, 9755), 'scipy.ndimage.affine_transform', 'affine_transform', (['i2', 'T'], {'order': '(0)'}), '(i2, T, order=0)\n', (9739, 9755), False, 'from scipy.ndimage import affine_transform\n'), ((9789, 9812), 'scipy.ndimage.affine_transform', 'affine_transform', (['i2', 'T'], {}), '(i2, T)\n', (9805, 9812), False, 'from scipy.ndimage import affine_transform\n'), ((12075, 12110), 'numpy.corrcoef', 'np.corrcoef', (['images[0]'], {'y': 'images[1]'}), '(images[0], y=images[1])\n', (12086, 12110), True, 'import numpy as np\n'), ((12023, 12046), 'scipy.ndimage.affine_transform', 'affine_transform', (['i2', 'T'], {}), '(i2, T)\n', (12039, 12046), False, 'from scipy.ndimage import affine_transform\n')]
|
# Add your Python code here. E.g.
#radio 1
from microbit import *
import radio
radio.on()
# any channel from 0 to 100 can be used for privacy.
radio.config(channel=5)
while True:
if button_a.was_pressed():
radio.send('HAPPY')
sleep(200)
elif button_b.was_pressed():
radio.send('SAD')
sleep(200)
|
[
"radio.config",
"radio.send",
"radio.on"
] |
[((80, 90), 'radio.on', 'radio.on', ([], {}), '()\n', (88, 90), False, 'import radio\n'), ((145, 168), 'radio.config', 'radio.config', ([], {'channel': '(5)'}), '(channel=5)\n', (157, 168), False, 'import radio\n'), ((221, 240), 'radio.send', 'radio.send', (['"""HAPPY"""'], {}), "('HAPPY')\n", (231, 240), False, 'import radio\n'), ((301, 318), 'radio.send', 'radio.send', (['"""SAD"""'], {}), "('SAD')\n", (311, 318), False, 'import radio\n')]
|
from SciDataTool.Functions import AxisError
from SciDataTool.Classes.Norm_vector import Norm_vector
def get_axis_periodic(self, Nper, is_aper=False):
"""Returns the vector 'axis' taking symmetries into account.
Parameters
----------
self: DataLinspace
a DataLinspace object
Nper: int
number of periods
is_antiperiod: bool
return values on a semi period (only for antiperiodic signals)
Returns
-------
New_axis: DataLinspace
Axis with requested (anti-)periodicities
"""
# Dynamic import to avoid loop
module = __import__("SciDataTool.Classes.DataLinspace", fromlist=["DataLinspace"])
DataLinspace = getattr(module, "DataLinspace")
try:
# Reduce axis to the given periodicity
Nper = Nper * 2 if is_aper else Nper
values = self.get_values()
N = self.get_length()
if N % Nper != 0:
raise AxisError("length of axis is not divisible by the number of periods")
values_per = values[: int(N / Nper)]
for norm in self.normalizations.values():
if isinstance(norm, Norm_vector):
norm.vector = norm.vector[: int(N / Nper)]
if is_aper:
sym = "antiperiod"
else:
sym = "period"
if Nper == 1 and sym == "period":
symmetries = dict()
else:
symmetries = {sym: Nper}
New_axis = DataLinspace(
initial=self.initial,
final=values_per[-1],
number=int(N / Nper),
include_endpoint=True,
name=self.name,
unit=self.unit,
symmetries=symmetries,
normalizations=self.normalizations.copy(),
is_components=self.is_components,
symbol=self.symbol,
)
except AxisError:
# Periodicity cannot be applied, return full axis
New_axis = self.copy()
return New_axis
|
[
"SciDataTool.Functions.AxisError"
] |
[((931, 1000), 'SciDataTool.Functions.AxisError', 'AxisError', (['"""length of axis is not divisible by the number of periods"""'], {}), "('length of axis is not divisible by the number of periods')\n", (940, 1000), False, 'from SciDataTool.Functions import AxisError\n')]
|
# ---
# jupyter:
# jupytext:
# cell_metadata_filter: -all
# comment_magics: true
# formats: ipynb,py:percent
# text_representation:
# extension: .py
# format_name: percent
# format_version: '1.3'
# jupytext_version: 1.13.8
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# %%
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from datetime import datetime as dt
from tqdm.notebook import tqdm
import os, sys
from mapping_parenting_tech import PROJECT_DIR, logging
INPUT_DIR = PROJECT_DIR / "inputs/data/play_store"
OUTPUT_DIR = PROJECT_DIR / "outputs/data"
# %%
chunk_size = 10 ** 4
file = INPUT_DIR / "Google-Playstore.csv"
file_size = os.path.getsize(file)
start = dt.now()
print(f"Started at {start}")
data_types = {
"App Name": str,
"App Id": str,
"Category": str,
"Rating": float,
"Rating Count": str,
"Installs": str,
"Minimum Installs": str,
"Maximum Installs": str,
"Free": bool,
"Price": float,
"Currency": str,
"Size": str,
"Minimum Android": str,
"Developer Id": str,
"Developer Website": str,
"Developer Email": str,
"Released": str,
"Last Updated": str,
"Content Rating": str,
"Privacy Policy": str,
"Ad Supported": bool,
"In App Purchases": bool,
"Editors Choice": bool,
"Scraped Time": str,
}
date_cols = ["Released", "Last Updated", "Scraped Time"]
data_chunks = []
for data_chunk in pd.read_csv(
file, dtype=data_types, parse_dates=date_cols, chunksize=chunk_size
):
data_chunks.append(data_chunk)
sys.stdout.write(
f"Loaded {len(data_chunks)} chunks of (maybe) {int(file_size / chunk_size)}\r"
)
sys.stdout.flush()
print("\n")
print("Concatenating dataframe")
df = pd.concat(data_chunks, ignore_index=True)
end = dt.now()
duration = end - start
print(f"Completed at {end}\nStep took {duration}s")
# %%
df.shape
# %%
plot_df = df[["App Id", "Released"]]
plot_df["year_released"] = plot_df["Released"].dt.year
plot_df["Month released"] = plot_df["Released"].dt.month
plot_df = plot_df.groupby("year_released", as_index=False).agg(
app_count=("App Id", "count"),
months_in_year=("Month released", lambda x: x.nunique()),
)
plot_df["apps_per_month"] = plot_df["app_count"] / plot_df["months_in_year"]
plot_df["growth"] = plot_df["apps_per_month"].pct_change()
plot_df.plot.bar(x="year_released", y=["growth"], figsize=(10, 8), ylim=(0, 2.2))
print("Average growth: ", plot_df["apps_per_month"].mean())
# %%
plot_df.to_csv(OUTPUT_DIR / "play_store_growth.csv")
# %%
df["Minimum Installs"].fillna(0, inplace=True)
# %%
df = df.astype({"Minimum Installs": "int64"})
# %%
df.columns
# %%
cat_sizes = (
df.groupby("Category")
.agg(cat_size=("Category", "count"))
.sort_values("cat_size", ascending=False)
)
cat_sizes = cat_sizes.assign(
size_pc=(cat_sizes.cat_size / cat_sizes.cat_size.sum()) * 100
)
# %%
cat_sizes
# %%
import altair as alt
# %%
fig = (
alt.Chart(cat_sizes.reset_index(), width=700, height=550)
.mark_bar()
.encode(x="size_pc:Q", y=alt.Y("Category:N", sort="-x"), tooltip="size_pc")
)
fig
# %%
# cat_sizes.reset_index().sort_values("size_pc", ascending=False).to_csv("category_sizes.csv")
# %%
app_installs_df = df.groupby("Minimum Installs").agg(
installCount=("Minimum Installs", "count"), av_score=("Rating", "mean")
)
app_installs_df = app_installs_df[app_installs_df.index != 0]
# %%
base = alt.Chart(app_installs_df.reset_index(), width=700, height=700).encode(
x=alt.X("Minimum Installs", scale=alt.Scale(type="log"))
)
counts = base.mark_point(size=60, filled=True).encode(
alt.Y("installCount", axis=alt.Axis(title="Number of installs"))
)
scores = base.mark_line(stroke="red").encode(
alt.Y("av_score", axis=alt.Axis(title="Average score"))
)
alt.layer(counts, scores).resolve_scale(y="independent")
# %%
fig = (
alt.Chart(app_installs_df.reset_index(), width=700, height=500)
.mark_point()
.encode(
x=alt.X("Minimum Installs", scale=alt.Scale(type="log", base=10)),
y="installCount",
)
)
fig + fig.transform_loess("Minimum Installs", "installCount").mark_line()
# %%
# basic_app_details = df[
# [
# "appId",
# "cluster",
# "minInstalls",
# "score",
# "ratings",
# "reviews",
# "price",
# "free",
# "containsAds",
# "offersIAP",
# ]
# ]
basic_app_details = df[
[
"App Id",
"Category",
"Rating",
"Minimum Installs",
"Free",
"Price",
"Ad Supported",
"In App Purchases",
]
]
# %%
plotter = (
basic_app_details.groupby("Category")
.agg(
cluster_size=("Category", "count"),
free=("Free", "sum"),
IAPs=("In App Purchases", "sum"),
ads=("Ad Supported", "sum"),
)
.reset_index()
)
turn_to_pc = ["free", "ads", "IAPs"]
for i in turn_to_pc:
plotter[f"{i}_pc"] = plotter[i] / plotter.cluster_size
plotter
# %%
data_map = {
"free_pc": "Number of free apps",
"IAPs_pc": "Number of apps with in-app purchases",
"ads_pc": "Number of apps with ads",
}
# %%
mean_free = plotter.free_pc.mean()
mean_IAPs = plotter.IAPs_pc.mean()
mean_ads = plotter.ads_pc.mean()
print(
f" Mean number of free apps:\t{mean_free*100}%\n",
f"Mean number of apps with IAPs:\t{mean_IAPs*100}%\n",
f"Mean number of apps with Ads:\t{mean_ads*100}%",
)
# %%
df = plotter.sort_values("free_pc", ascending=False)
bar_width = round(1 / len(data_map), 2) - 0.1
fig, ax = plt.subplots(figsize=(18, 9))
plt.setp(
ax.get_xticklabels(), rotation=45, horizontalalignment="right", fontsize="medium"
)
plt.grid(visible=True, axis="y", which="major")
ax.set_ylabel("Percentage of apps")
x = np.arange(len(df.Category))
for i, (key, value) in enumerate(data_map.items()):
ax.bar(x + (i * bar_width), df[key], label=data_map[key], width=bar_width)
ax.set_xticks(x + (len(data_map) * bar_width) / len(data_map))
ax.set_xticklabels(df.Category.unique())
fig.legend(loc="upper left")
|
[
"altair.Y",
"pandas.read_csv",
"os.path.getsize",
"matplotlib.pyplot.subplots",
"altair.Axis",
"altair.layer",
"sys.stdout.flush",
"altair.Scale",
"datetime.datetime.now",
"pandas.concat",
"matplotlib.pyplot.grid"
] |
[((750, 771), 'os.path.getsize', 'os.path.getsize', (['file'], {}), '(file)\n', (765, 771), False, 'import os, sys\n'), ((781, 789), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (787, 789), True, 'from datetime import datetime as dt\n'), ((1513, 1598), 'pandas.read_csv', 'pd.read_csv', (['file'], {'dtype': 'data_types', 'parse_dates': 'date_cols', 'chunksize': 'chunk_size'}), '(file, dtype=data_types, parse_dates=date_cols, chunksize=chunk_size\n )\n', (1524, 1598), True, 'import pandas as pd\n'), ((1827, 1868), 'pandas.concat', 'pd.concat', (['data_chunks'], {'ignore_index': '(True)'}), '(data_chunks, ignore_index=True)\n', (1836, 1868), True, 'import pandas as pd\n'), ((1876, 1884), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (1882, 1884), True, 'from datetime import datetime as dt\n'), ((5640, 5669), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(18, 9)'}), '(figsize=(18, 9))\n', (5652, 5669), True, 'import matplotlib.pyplot as plt\n'), ((5768, 5815), 'matplotlib.pyplot.grid', 'plt.grid', ([], {'visible': '(True)', 'axis': '"""y"""', 'which': '"""major"""'}), "(visible=True, axis='y', which='major')\n", (5776, 5815), True, 'import matplotlib.pyplot as plt\n'), ((1756, 1774), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1772, 1774), False, 'import os, sys\n'), ((3151, 3181), 'altair.Y', 'alt.Y', (['"""Category:N"""'], {'sort': '"""-x"""'}), "('Category:N', sort='-x')\n", (3156, 3181), True, 'import altair as alt\n'), ((3894, 3919), 'altair.layer', 'alt.layer', (['counts', 'scores'], {}), '(counts, scores)\n', (3903, 3919), True, 'import altair as alt\n'), ((3744, 3780), 'altair.Axis', 'alt.Axis', ([], {'title': '"""Number of installs"""'}), "(title='Number of installs')\n", (3752, 3780), True, 'import altair as alt\n'), ((3858, 3889), 'altair.Axis', 'alt.Axis', ([], {'title': '"""Average score"""'}), "(title='Average score')\n", (3866, 3889), True, 'import altair as alt\n'), ((3632, 3653), 'altair.Scale', 'alt.Scale', ([], {'type': '"""log"""'}), "(type='log')\n", (3641, 3653), True, 'import altair as alt\n'), ((4106, 4136), 'altair.Scale', 'alt.Scale', ([], {'type': '"""log"""', 'base': '(10)'}), "(type='log', base=10)\n", (4115, 4136), True, 'import altair as alt\n')]
|
#!/usr/bin/env python3
import sys
from password.generate import generate
from password.validate import validate
from password.pwn_check import main
if __name__ == "__main__":
if not sys.argv[1:]:
while True:
try:
text = input(
"""Select the option you like to run:
1) Generate a password
2) Validate a password
3) Check if password is pwned\n"""
)
if text == "1":
length = input('How many characters? ')
uppercase = input('Should contain uppercase letters? ')
numbers = input('Should contain numbers? ')
symbols = input('Should contain special characters? ')
print(generate(
int(length),
include_uppercase=uppercase,
include_numbers=numbers,
include_symbols=symbols
))
break
elif text == "2":
length = input('Minimum length? ')
lowercase = input('How many lowercase letters? ')
uppercase = input('How many uppercase letters? ')
numbers = input('How many numbers? ')
symbols = input('How many symbols? ')
password = input('Enter the password: ')
validated = validate(
password,
lowercase=lowercase,
uppercase=uppercase,
numbers=numbers,
symbols=symbols,
length=length
)
print('Password valid!') if validated else print('Invalid password!')
break
elif text == "3":
main(input('Enter a password: '))
break
else:
print("Invalid option. Please try again.")
except RuntimeError:
print("There was an error with the API call, please fix it ASAP!")
raise
except KeyboardInterrupt:
print("\nGoodbye!")
break
|
[
"password.validate.validate"
] |
[((1504, 1618), 'password.validate.validate', 'validate', (['password'], {'lowercase': 'lowercase', 'uppercase': 'uppercase', 'numbers': 'numbers', 'symbols': 'symbols', 'length': 'length'}), '(password, lowercase=lowercase, uppercase=uppercase, numbers=\n numbers, symbols=symbols, length=length)\n', (1512, 1618), False, 'from password.validate import validate\n')]
|
"""A command line interface to processes files."""
import click
from file_processing_pipeline.process import process_end_of_day
from file_processing_pipeline.io import CSV
@click.command()
@click.option("-d", "--data-set",
help="The data set to import, e.g. end_of_day.",
default='end_of_day',
required=True)
@click.option("-i", "--input-root",
help="The directory containing the dataset, or the excel workbook (not just dir).",
required=True)
@click.option("-r", "--reference-root",
help="The directory containing the reference data,"
" or the excel workbook (not just dir).",
required=True)
@click.option("-o", "--output-root",
help="The directory where to output the data.",
required=True)
@click.option("-t", "--file-type",
help="The input file type (csv or excel).",
default=CSV,
required=True)
def cli(data_set,
input_root,
reference_root,
output_root,
file_type):
if data_set == 'end_of_day':
process_end_of_day(input_path_root=input_root,
ref_path_root=reference_root,
output_path_root=output_root,
file_type=file_type)
def main(): # pragma: nocover
cli(auto_envvar_prefix='FPP') # pylint: disable=unexpected-keyword-arg,no-value-for-parameter
|
[
"click.option",
"file_processing_pipeline.process.process_end_of_day",
"click.command"
] |
[((176, 191), 'click.command', 'click.command', ([], {}), '()\n', (189, 191), False, 'import click\n'), ((193, 320), 'click.option', 'click.option', (['"""-d"""', '"""--data-set"""'], {'help': '"""The data set to import, e.g. end_of_day."""', 'default': '"""end_of_day"""', 'required': '(True)'}), "('-d', '--data-set', help=\n 'The data set to import, e.g. end_of_day.', default='end_of_day',\n required=True)\n", (205, 320), False, 'import click\n'), ((355, 498), 'click.option', 'click.option', (['"""-i"""', '"""--input-root"""'], {'help': '"""The directory containing the dataset, or the excel workbook (not just dir)."""', 'required': '(True)'}), "('-i', '--input-root', help=\n 'The directory containing the dataset, or the excel workbook (not just dir).'\n , required=True)\n", (367, 498), False, 'import click\n'), ((518, 672), 'click.option', 'click.option', (['"""-r"""', '"""--reference-root"""'], {'help': '"""The directory containing the reference data, or the excel workbook (not just dir)."""', 'required': '(True)'}), "('-r', '--reference-root', help=\n 'The directory containing the reference data, or the excel workbook (not just dir).'\n , required=True)\n", (530, 672), False, 'import click\n'), ((714, 817), 'click.option', 'click.option', (['"""-o"""', '"""--output-root"""'], {'help': '"""The directory where to output the data."""', 'required': '(True)'}), "('-o', '--output-root', help=\n 'The directory where to output the data.', required=True)\n", (726, 817), False, 'import click\n'), ((842, 952), 'click.option', 'click.option', (['"""-t"""', '"""--file-type"""'], {'help': '"""The input file type (csv or excel)."""', 'default': 'CSV', 'required': '(True)'}), "('-t', '--file-type', help=\n 'The input file type (csv or excel).', default=CSV, required=True)\n", (854, 952), False, 'import click\n'), ((1134, 1265), 'file_processing_pipeline.process.process_end_of_day', 'process_end_of_day', ([], {'input_path_root': 'input_root', 'ref_path_root': 'reference_root', 'output_path_root': 'output_root', 'file_type': 'file_type'}), '(input_path_root=input_root, ref_path_root=reference_root,\n output_path_root=output_root, file_type=file_type)\n', (1152, 1265), False, 'from file_processing_pipeline.process import process_end_of_day\n')]
|
""" Official evaluation script for SQuAD version 2.0.
Modified by XLNet authors to update `find_best_threshold` scripts for SQuAD V2.0
"""
import collections
import json
import re
import string
def get_raw_scores(qa_ids, actuals, preds):
"""
Computes exact match and F1 scores without applying any unanswerable probability threshold.
Args:
qa_ids (list): Unique ids corresponding to the answers in `actuals`.
actuals (list): List of ground truth answers.
preds (dict): Dictionary with qa_id as keys and predicted answers as values.
Returns:
tuple: (exact_match, f1)
"""
# Helper functions
def _normalize_answer(s):
"""Lower text and remove punctuation, articles and extra whitespace."""
def remove_articles(text):
regex = re.compile(r"\b(a|an|the)\b", re.UNICODE)
return re.sub(regex, " ", text)
def white_space_fix(text):
return " ".join(text.split())
def remove_punc(text):
exclude = set(string.punctuation)
return "".join(ch for ch in text if ch not in exclude)
def lower(text):
return text.lower()
return white_space_fix(remove_articles(remove_punc(lower(s))))
def _get_tokens(s):
"""Normalizes text and returns white-space tokenized tokens. """
if not s:
return []
return _normalize_answer(s).split()
def _compute_exact(a_gold, a_pred):
"""Compute the exact match between two sentences after normalization.
Returns:
int: 1 if two sentences match exactly after normalization,
0 otherwise.
"""
return int(_normalize_answer(a_gold) == _normalize_answer(a_pred))
def _compute_f1(a_gold, a_pred):
"""
Compute F1 score based on token overlapping between two
sentences.
"""
gold_toks = _get_tokens(a_gold)
pred_toks = _get_tokens(a_pred)
common = collections.Counter(gold_toks) & collections.Counter(pred_toks)
num_same = sum(common.values())
if len(gold_toks) == 0 or len(pred_toks) == 0:
# If either is no-answer, then F1 is 1 if they agree, 0 otherwise
return int(gold_toks == pred_toks)
if num_same == 0:
return 0
precision = 1.0 * num_same / len(pred_toks)
recall = 1.0 * num_same / len(gold_toks)
f1 = (2 * precision * recall) / (precision + recall)
return f1
# Helper functions end
exact_scores = {}
f1_scores = {}
for qid, gold_answers in zip(qa_ids, actuals):
if not gold_answers:
# For unanswerable questions, only correct answer is empty string
gold_answers = [""]
if qid not in preds:
print("Missing prediction for %s" % qid)
continue
a_pred = preds[qid]
# Take max over all gold answers
if isinstance(gold_answers, str):
gold_answers = [gold_answers]
exact_scores[qid] = max(_compute_exact(a, a_pred) for a in gold_answers)
f1_scores[qid] = max(_compute_f1(a, a_pred) for a in gold_answers)
return exact_scores, f1_scores
def find_best_thresh(preds, scores, na_probs, qid_to_has_ans, unanswerable_exists=False):
"""
Find the best threshold to determine a question is impossible to answer.
Args:
preds (dict): Dictionary with qa_id as keys and predicted answers as values.
scores (dict): Dictionary with qa_id as keys and raw evaluation scores (exact_match or
f1) as values.
na_probs (dict): Dictionary with qa_id as keys and unanswerable probabilities as values.
qid_to_has_ans (dict): Dictionary with qa_id as keys boolean values indicating if the
question has answer as values.
unanswerable_exists (bool, optional): Whether there is unanswerable questions in the data.
Defaults to False.
Returns:
tuple: score after applying best threshold, best threshold, (score for answerable
questions after applying best threshold, if unanswerable_exists=True)
"""
num_no_ans = sum(1 for k in qid_to_has_ans if not qid_to_has_ans[k])
# If na_prob > threshold, the question is considered as unanswerable by the prediction.
# Initially, the threshold is 0. All questions are considered as unanswerable by the
# predictions. So cur_score is the number of actual unanswerable questions (i.e. correctly
# predicted as unanswerable in the data.
cur_score = num_no_ans
best_score = cur_score
best_thresh = 0.0
# Sorted in ascending order
qid_list = sorted(na_probs, key=lambda k: na_probs[k])
for i, qid in enumerate(qid_list):
# When using the cur_na_prob as threshold, all predictions with na_prob > na_prob_cur are
# considered as unanswerable. Current question is considered answerable.
if qid not in scores:
continue
if qid_to_has_ans[qid]:
# Current question has ground truth answer, the prediction is correct. The raw score
# is added to cur_score
diff = scores[qid]
else:
# Current question doesn't have ground truth answer.
if preds[qid]:
# Prediction is not empty, incorrect. cur_score -= 1
diff = -1
else:
# Prediction is empty, correct, the original score 1 from num_no_ans is preserved.
diff = 0
cur_score += diff
if cur_score > best_score:
# When cur_score > best_score, the threshold can increase so that more questions are
# considered as answerable and fewer questions are considered as unanswerable.
# Imagine a PDF with two humps with some overlapping, the x axis is the na_prob. The
# hump on the left is answerable questions and the hump on the right is unanswerable
# questions.
# At some point, the number of actual answerable questions decreases, and we got more
# penalty from considering unanswerable questions as answerable than the score added
# from actual answerable questions, we will not change the threshold anymore and the
# optimal threshold is found.
best_score = cur_score
best_thresh = na_probs[qid]
if not unanswerable_exists:
return 100.0 * best_score / len(scores), best_thresh
else:
has_ans_score, has_ans_cnt = 0, 0
for qid in qid_list:
if not qid_to_has_ans[qid]:
continue
has_ans_cnt += 1
if qid not in scores:
continue
has_ans_score += scores[qid]
return 100.0 * best_score / len(scores), best_thresh, 1.0 * has_ans_score / has_ans_cnt
def find_all_best_thresh(
main_eval, preds, exact_raw, f1_raw, na_probs, qid_to_has_ans, unanswerable_exists=False
):
"""
Update raw evaluation scores by finding the best threshold to determine a question is
impossible to answer.
Args:
main_eval (dict): Dictionary with raw evaluation scores without apply any threshold.
preds (dict): Dictionary with qa_id as keys and predicted answers as values.
exact_raw (dict): Dictionary with qa_id as keys and raw exact_match scores as values.
f1_raw (dict): Dictionary with qa_id as keys and raw f1 scores as values.
na_probs (dict): Dictionary with qa_id as keys and unanswerable probabilities as values.
qid_to_has_ans (dict): Dictionary with qa_id as keys boolean values indicating if the
question has answer as values.
unanswerable_exists (bool, optional): Whether there is unanswerable questions in the data.
Defaults to False.
Returns:
dict: Updated `main_eval` with scores after applying best threshold and best threshold
for each score.
"""
all_exact = find_best_thresh(preds, exact_raw, na_probs, qid_to_has_ans, unanswerable_exists)
all_f1 = find_best_thresh(preds, f1_raw, na_probs, qid_to_has_ans, unanswerable_exists)
main_eval["best_exact"] = all_exact[0]
main_eval["best_exact_thresh"] = all_exact[1]
main_eval["best_f1"] = all_f1[0]
main_eval["best_f1_thresh"] = all_f1[1]
if unanswerable_exists:
main_eval["has_ans_exact"] = all_exact[2]
main_eval["has_ans_f1"] = all_f1[2]
def evaluate_qa(
actual_dataset, preds, na_probs=None, na_prob_thresh=0, unanswerable_exists=False, out_file=None
):
"""
Evaluate question answering prediction results against ground truth answers.
Args:
Evaluates question answering model performance.
Args:
actual_dataset (:class:`utils_nlp.dataset.pytorch.QADataset`): Input question answering
dataset with ground truth answers.
preds (dict): The key of the dictionary is the qa_id in the original
:class:`utils_nlp.dataset.pytorch.QADataset`. The values of the dictionary are
the predicted answer texts in string type.
na_probs (dict, optional): Dictionary of qa_id and unanswerable probability pairs.
If None, unanswerable probabilities are all set to zero. Defaults to None.
na_prob_thresh (float, optional): Probability threshold to predict a question to be
unanswerable. For an unanswerable question, if `na_probs` > `na_prob_thresh`,
the prediction is considered as correct. Otherwise, the prediction is considered as
incorrect. Defaults to 0.
out_file (str, optional): Path of the file to save the evaluation results to.
Defaults to None.
Returns:
dict: A dictionary with exact_match and f1 values.
"""
# Helper functions
def _apply_no_ans_threshold(scores, na_probs, qid_to_has_ans, na_prob_thresh):
"""Update the input scores by applying unanswerable probability threshold."""
new_scores = {}
for qid, s in scores.items():
pred_na = na_probs[qid] > na_prob_thresh
if pred_na:
new_scores[qid] = float(not qid_to_has_ans[qid])
else:
new_scores[qid] = s
return new_scores
def _make_eval_dict(exact_scores, f1_scores, qid_list=None):
"""Create a dictionary of evaluation results."""
if not qid_list:
total = len(exact_scores)
return collections.OrderedDict(
[
("exact", 100.0 * sum(exact_scores.values()) / total),
("f1", 100.0 * sum(f1_scores.values()) / total),
("total", total),
]
)
else:
total = len(qid_list)
return collections.OrderedDict(
[
("exact", 100.0 * sum(exact_scores[k] for k in qid_list) / total),
("f1", 100.0 * sum(f1_scores[k] for k in qid_list) / total),
("total", total),
]
)
def _merge_eval(main_eval, new_eval, prefix):
"""Merge multiple evaluation result dictionaries."""
for k in new_eval:
main_eval["%s_%s" % (prefix, k)] = new_eval[k]
# Helper functions end
if na_probs is None:
na_probs_available = False
na_probs = {k: 0.0 for k in preds}
else:
na_probs_available = True
qa_ids = [item.qa_id for item in actual_dataset]
actuals = [item.answer_text for item in actual_dataset]
qid_to_has_ans = {qa_id: bool(ans) for (qa_id, ans) in zip(qa_ids, actuals)}
has_ans_qids = [k for k, v in qid_to_has_ans.items() if v]
no_ans_qids = [k for k, v in qid_to_has_ans.items() if not v]
exact_raw, f1_raw = get_raw_scores(qa_ids, actuals, preds)
exact_thresh = _apply_no_ans_threshold(exact_raw, na_probs, qid_to_has_ans, na_prob_thresh)
f1_thresh = _apply_no_ans_threshold(f1_raw, na_probs, qid_to_has_ans, na_prob_thresh)
out_eval = _make_eval_dict(exact_thresh, f1_thresh)
if has_ans_qids:
has_ans_eval = _make_eval_dict(exact_thresh, f1_thresh, qid_list=has_ans_qids)
_merge_eval(out_eval, has_ans_eval, "HasAns")
if no_ans_qids:
no_ans_eval = _make_eval_dict(exact_thresh, f1_thresh, qid_list=no_ans_qids)
_merge_eval(out_eval, no_ans_eval, "NoAns")
if na_probs_available:
find_all_best_thresh(
out_eval, preds, exact_raw, f1_raw, na_probs, qid_to_has_ans, unanswerable_exists
)
if out_file:
with open(out_file, "w") as f:
json.dump(out_eval, f)
else:
print(json.dumps(out_eval, indent=2))
return out_eval
|
[
"json.dump",
"json.dumps",
"collections.Counter",
"re.sub",
"re.compile"
] |
[((849, 891), 're.compile', 're.compile', (['"""\\\\b(a|an|the)\\\\b"""', 're.UNICODE'], {}), "('\\\\b(a|an|the)\\\\b', re.UNICODE)\n", (859, 891), False, 'import re\n'), ((910, 934), 're.sub', 're.sub', (['regex', '""" """', 'text'], {}), "(regex, ' ', text)\n", (916, 934), False, 'import re\n'), ((2044, 2074), 'collections.Counter', 'collections.Counter', (['gold_toks'], {}), '(gold_toks)\n', (2063, 2074), False, 'import collections\n'), ((2077, 2107), 'collections.Counter', 'collections.Counter', (['pred_toks'], {}), '(pred_toks)\n', (2096, 2107), False, 'import collections\n'), ((12793, 12815), 'json.dump', 'json.dump', (['out_eval', 'f'], {}), '(out_eval, f)\n', (12802, 12815), False, 'import json\n'), ((12840, 12870), 'json.dumps', 'json.dumps', (['out_eval'], {'indent': '(2)'}), '(out_eval, indent=2)\n', (12850, 12870), False, 'import json\n')]
|
# Python-bioformats is distributed under the GNU General Public
# License, but this file is licensed under the more permissive BSD
# license. See the accompanying file LICENSE for details.
#
# Copyright (c) 2009-2014 Broad Institute
# All rights reserved.
'''formatwriter.py - mechanism to wrap a bioformats WriterWrapper and ImageWriter
The following file formats can be written using Bio-Formats:
- TIFF (uncompressed or LZW)
- OME-TIFF (uncompressed or LZW)
- JPEG
- PNG
- AVI (uncompressed)
- QuickTime (uncompressed is supported natively; additional codecs use QTJava)
- Encapsulated PostScript (EPS)
Support for OME-XML in the near future.
The writer API (see loci.formats.IFormatWriter) is very similar to the reader
API, in that files are written one plane at time (rather than all at once).
All writers allow the output file to be changed before the last plane has
been written. This allows you to write to any number of output files using
the same writer and output settings (compression, frames per second, etc.),
and is especially useful for formats that do not support multiple images per
file.
'''
from __future__ import absolute_import, print_function, unicode_literals
__version__ = "$Revision$"
import numpy as np
import os
import sys
import javabridge as jutil
from .. import bioformats
import javabridge
from ..bioformats import omexml as ome
def write_image(pathname, pixels, pixel_type,
c = 0, z = 0, t = 0,
size_c = 1, size_z = 1, size_t = 1,
channel_names = None):
"""Write the image using bioformats.
:param filename: save to this filename
:param pixels: the image to save
:param pixel_type: save using this pixel type
:param c: the image's channel index
:param z: the image's `z` index
:param t: the image's `t` index
:param size_c: # of channels in the stack
:param size_z: # of z stacks
:param size_t: # of timepoints in the stack
:param channel_names: names of the channels (make up names if not present).
"""
omexml = ome.OMEXML()
omexml.image(0).Name = os.path.split(pathname)[1]
p = omexml.image(0).Pixels
assert isinstance(p, ome.OMEXML.Pixels)
p.SizeX = pixels.shape[1]
p.SizeY = pixels.shape[0]
p.SizeC = size_c
p.SizeT = size_t
p.SizeZ = size_z
p.DimensionOrder = ome.DO_XYCZT
p.PixelType = pixel_type
index = c + size_c * z + size_c * size_z * t
if pixels.ndim == 3:
p.SizeC = pixels.shape[2]
p.Channel(0).SamplesPerPixel = pixels.shape[2]
omexml.structured_annotations.add_original_metadata(
ome.OM_SAMPLES_PER_PIXEL, str(pixels.shape[2]))
elif size_c > 1:
p.channel_count = size_c
pixel_buffer = convert_pixels_to_buffer(pixels, pixel_type)
xml = omexml.to_xml()
script = """
importClass(Packages.loci.formats.services.OMEXMLService,
Packages.loci.common.services.ServiceFactory,
Packages.loci.formats.ImageWriter);
var service = new ServiceFactory().getInstance(OMEXMLService);
var metadata = service.createOMEXMLMetadata(xml);
var writer = new ImageWriter();
writer.setMetadataRetrieve(metadata);
writer.setId(path);
writer.setInterleaved(true);
writer.saveBytes(index, buffer);
writer.close();
"""
jutil.run_script(script,
dict(path=pathname,
xml=xml,
index=index,
buffer=pixel_buffer))
def convert_pixels_to_buffer(pixels, pixel_type):
'''Convert the pixels in the image into a buffer of the right pixel type
pixels - a 2d monochrome or color image
pixel_type - one of the OME pixel types
returns a 1-d byte array
'''
if pixel_type in (ome.PT_UINT8, ome.PT_INT8, ome.PT_BIT):
as_dtype = np.uint8
elif pixel_type in (ome.PT_UINT16, ome.PT_INT16):
as_dtype = "<u2"
elif pixel_type in (ome.PT_UINT32, ome.PT_INT32):
as_dtype = "<u4"
elif pixel_type == ome.PT_FLOAT:
as_dtype = "<f4"
elif pixel_type == ome.PT_DOUBLE:
as_dtype = "<f8"
else:
raise NotImplementedError("Unsupported pixel type: %d" % pixel_type)
buf = np.frombuffer(np.ascontiguousarray(pixels, as_dtype).data, np.uint8)
env = jutil.get_env()
return env.make_byte_array(buf)
def make_iformat_writer_class(class_name):
'''Bind a Java class that implements IFormatWriter to a Python class
Returns a class that implements IFormatWriter through calls to the
implemented class passed in. The returned class can be subclassed to
provide additional bindings.
'''
class IFormatWriter(object):
'''A wrapper for loci.formats.IFormatWriter
See http://hudson.openmicroscopy.org.uk/job/LOCI/javadoc/loci/formats/ImageWriter.html
'''
canDoStacks = jutil.make_method('canDoStacks', '()Z',
'Reports whether the writer can save multiple images to a single file.')
getColorModel = jutil.make_method('getColorModel', '()Ljava/awt/image/ColorModel;',
'Gets the color model.')
getCompression = jutil.make_method('getCompression', '()Ljava/lang/String;',
'Gets the current compression type.')
getCompressionTypes = jutil.make_method('getCompressionTypes', '()[Ljava/lang/String;',
'Gets the available compression types.')
getFramesPerSecond = jutil.make_method('getFramesPerSecond', '()I',
'Gets the frames per second to use when writing.')
getMetadataRetrieve = jutil.make_method('getMetadataRetrieve', '()Lloci/formats/meta/MetadataRetrieve;',
'Retrieves the current metadata retrieval object for this writer.')
getPixelTypes = jutil.make_method('getPixelTypes', '()[I',
'Gets the supported pixel types.')
# getPixelTypes = jutil.make_method('getPixelTypes', '(Ljava/lang/String;)[I',
# 'Gets the supported pixel types for the given codec.')
isInterleaved = jutil.make_method('isInterleaved', '()Z',
'Gets whether or not the channels in an image are interleaved.')
isSupportedType = jutil.make_method('isSupportedType', '(I)Z',
'Checks if the given pixel type is supported.')
saveBytes = jutil.make_method('saveBytes', '([BZ)V',
'Saves the given byte array to the current file.')
saveBytesIB = jutil.make_method('saveBytes', '(I[B)V',
'Saves bytes, first arg is image #')
# saveBytes = jutil.make_method('saveBytes', '([BIZZ)V',
# 'Saves the given byte array to the given series in the current file.')
savePlane = jutil.make_method('savePlane', '(Ljava/lang/Object;Z)V',
'Saves the given image plane to the current file.')
# savePlane = jutil.make_method('savePlane', '(Ljava/lang/Object;IZZ)V',
# 'Saves the given image plane to the given series in the current file.')
setColorModel = jutil.make_method('setColorModel', '(Ljava/awt/image/ColorModel;)V',
'Sets the color model.')
setCompression = jutil.make_method('setCompression', '(Ljava/lang/String;)V',
'Sets the current compression type.')
setFramesPerSecond = jutil.make_method('setFramesPerSecond', '(I)V',
'Sets the frames per second to use when writing.')
setInterleaved = jutil.make_method('setInterleaved', '(Z)V',
'Sets whether or not the channels in an image are interleaved.')
setMetadataRetrieve = jutil.make_method('setMetadataRetrieve', '(Lloci/formats/meta/MetadataRetrieve;)V',
'Sets the metadata retrieval object from which to retrieve standardized metadata.')
setValidBitsPerPixel = jutil.make_method(
'setValidBitsPerPixel', '(I)V',
'Sets the number of valid bits per pixel')
setSeries = jutil.make_method(
'setSeries', '(I)V',
'''Set the series for the image file
series - the zero-based index of the image stack in the file,
for instance in a multi-image tif.''')
return IFormatWriter
def make_image_writer_class():
'''Return an image writer class for the given Java environment'''
env = jutil.get_env()
class_name = 'loci/formats/ImageWriter'
klass = env.find_class(class_name)
base_klass = env.find_class('loci/formats/IFormatWriter')
IFormatWriter = make_iformat_writer_class(class_name)
#
# This uses the writers.txt file from inside the loci_tools.jar
#
class_list = jutil.make_instance("loci/formats/ClassList",
"(Ljava/lang/String;"
"Ljava/lang/Class;" # base
"Ljava/lang/Class;)V", # location in jar
"writers.txt", base_klass, klass)
class ImageWriter(IFormatWriter):
new_fn = jutil.make_new(class_name, '(Lloci/formats/ClassList;)V')
def __init__(self):
self.new_fn(class_list)
setId = jutil.make_method('setId', '(Ljava/lang/String;)V',
'Sets the current file name.')
addStatusListener = jutil.make_method('addStatusListener', '()Lloci/formats/StatusListener;',
'Adds a listener for status update events.')
close = jutil.make_method('close','()V',
'Closes currently open file(s) and frees allocated memory.')
getFormat = jutil.make_method('getFormat', '()Ljava/lang/String;',
'Gets the name of this file format.')
getNativeDataType = jutil.make_method('getNativeDataType', '()Ljava/lang/Class;',
'Returns the native data type of image planes for this reader, as returned by IFormatReader.openPlane(int, int, int, int, int) or IFormatWriter#saveData.')
getStatusListeners = jutil.make_method('getStatusListeners', '()[Lloci/formats/StatusListener;',
'Gets a list of all registered status update listeners.')
getSuffixes = jutil.make_method('getSuffixes', '()Ljava/lang/String;',
'Gets the default file suffixes for this file format.')
getWriter = jutil.make_method('getWriter', '()Lloci/formats/IFormatWriter;',
'Gets the writer used to save the current file.')
# getWriter = jutil.make_method('getWriter', '(Ljava/lang/Class)Lloci/formats/IFormatWriter;',
# 'Gets the file format writer instance matching the given class.')
# getWriter = jutil.make_method('getWriter', '(Ljava/lang/String;)Lloci/formats/IFormatWriter;',
# 'Gets the writer used to save the given file.')
getWriters = jutil.make_method('getWriters', '()[Lloci/formats/IFormatWriter;',
'Gets all constituent file format writers.')
isThisType = jutil.make_method('isThisType', '(Ljava/lang/String;)Z',
'Checks if the given string is a valid filename for this file format.')
removeStatusListener = jutil.make_method('removeStatusListener', '(Lloci/formats/StatusListener;)V',
'Saves the given byte array to the current file.')
return ImageWriter
def make_ome_tiff_writer_class():
'''Return a class that wraps loci.formats.out.OMETiffWriter'''
class_name = 'loci/formats/out/OMETiffWriter'
IFormatWriter = make_iformat_writer_class(class_name)
class OMETiffWriter(IFormatWriter):
def __init__(self):
self.new_fn = jutil.make_new(self.class_name, '()V')
self.setId = jutil.make_method('setId', '(Ljava/lang/String;)V',
'Sets the current file name.')
self.close = jutil.make_method(
'close','()V',
'Closes currently open file(s) and frees allocated memory.')
self.saveBytesIFD = jutil.make_method(
'saveBytes', '(I[BLloci/formats/tiff/IFD;)V',
'''save a byte array to an image channel
index - image index
bytes - byte array to save
ifd - a loci.formats.tiff.IFD instance that gives all of the
IFD values associated with the channel''')
self.new_fn()
return OMETiffWriter
def make_writer_wrapper_class(class_name):
'''Make an ImageWriter wrapper class
class_name - the name of the wrapper class
You can instantiate an instance of the wrapper class like this:
writer = XXX(ImageWriter())
'''
IFormatWriter = make_iformat_writer_class(class_name)
class WriterWrapper(IFormatWriter):
__doc__ = '''A wrapper for %s
See http://hudson.openmicroscopy.org.uk/job/LOCI/javadoc/loci/formats/ImageWriter.html
'''%class_name
new_fn = jutil.make_new(class_name, '(Lloci/formats/IFormatWriter;)V')
def __init__(self, writer):
self.new_fn(writer)
setId = jutil.make_method('setId', '(Ljava/lang/String;)V',
'Sets the current file name.')
return WriterWrapper
def make_format_writer_class(class_name):
'''Make a FormatWriter wrapper class
class_name - the name of a class that implements loci.formats.FormatWriter
Known names in the loci.formats.out package:
APNGWriter, AVIWriter, EPSWriter, ICSWriter, ImageIOWriter,
JPEG2000Writer, JPEGWriter, LegacyQTWriter, OMETiffWriter,
OMEXMLWriter, QTWriter, TiffWriter
'''
new_fn = jutil.make_new(class_name,
'(Ljava/lang/String;Ljava/lang/String;)V')
class FormatWriter(object):
__doc__ = '''A wrapper for %s implementing loci.formats.FormatWriter
See http://hudson.openmicroscopy.org.uk/job/LOCI/javadoc/loci/formats/FormatWriter'''%class_name
def __init__(self):
self.new_fn()
canDoStacks = jutil.make_method('canDoStacks','()Z',
'Reports whether the writer can save multiple images to a single file')
getColorModel = jutil.make_method('getColorModel',
'()Ljava/awt/image/ColorModel;',
'Gets the color model')
getCompression = jutil.make_method('getCompression',
'()Ljava/lang/String;',
'Gets the current compression type')
getCompressionTypes = jutil.make_method('getCompressionTypes',
'()[Ljava/lang/String;',
'Gets the available compression types')
getFramesPerSecond = jutil.make_method('getFramesPerSecond',
'()I', "Gets the frames per second to use when writing")
getMetadataRetrieve = jutil.make_method('getMetadataRetrieve',
'()Lloci/formats/meta/MetadataRetrieve;',
'Retrieves the current metadata retrieval object for this writer.')
getPixelTypes = jutil.make_method('getPixelTypes',
'()[I')
isInterleaved = jutil.make_method('isInterleaved','()Z',
'Gets whether or not the channels in an image are interleaved')
isSupportedType = jutil.make_method('isSupportedType','(I)Z',
'Checks if the given pixel type is supported')
saveBytes = jutil.make_method('saveBytes', '([BZ)V',
'Saves the given byte array to the current file')
setColorModel = jutil.make_method('setColorModel',
'(Ljava/awt/image/ColorModel;)V',
'Sets the color model')
setCompression = jutil.make_method('setCompression',
'(Ljava/lang/String;)V',
'Sets the current compression type')
setFramesPerSecond = jutil.make_method('setFramesPerSecond',
'(I)V',
'Sets the frames per second to use when writing')
setId = jutil.make_method('setId','(Ljava/lang/String;)V',
'Sets the current file name')
setInterleaved = jutil.make_method('setInterleaved', '(Z)V',
'Sets whether or not the channels in an image are interleaved')
setMetadataRetrieve = jutil.make_method('setMetadataRetrieve',
'(Lloci/formats/meta/MetadataRetrieve;)V',
'Sets the metadata retrieval object from which to retrieve standardized metadata')
return FormatWriter
def getRGBColorSpace():
'''Get a Java object that represents an RGB color space
See java.awt.color.ColorSpace: this returns the linear RGB color space
'''
cs_linear_rgb = jutil.get_static_field('java/awt/color/ColorSpace',
'CS_LINEAR_RGB', 'I')
return jutil.static_call('java/awt/color/ColorSpace', 'getInstance',
'(I)Ljava/awt/color/ColorSpace;',
cs_linear_rgb)
def getGrayColorSpace():
'''Get a Java object that represents an RGB color space
See java.awt.color.ColorSpace: this returns the linear RGB color space
'''
cs_gray = jutil.get_static_field('java/awt/color/ColorSpace',
'CS_GRAY', 'I')
return jutil.static_call('java/awt/color/ColorSpace', 'getInstance',
'(I)Ljava/awt/color/ColorSpace;',
cs_gray)
'''Constant for color model transparency indicating bitmask transparency'''
BITMASK = 'BITMASK'
'''Constant for color model transparency indicting an opaque color model'''
OPAQUE = 'OPAQUE'
'''Constant for color model transparency indicating a transparent color model'''
TRANSPARENT = 'TRANSPARENT'
'''Constant for color model transfer type indicating byte per pixel'''
TYPE_BYTE = 'TYPE_BYTE'
'''Constant for color model transfer type indicating unsigned short per pixel'''
TYPE_USHORT = 'TYPE_USHORT'
'''Constant for color model transfer type indicating integer per pixel'''
TYPE_INT = 'TYPE_INT'
def getColorModel(color_space,
has_alpha=False,
is_alpha_premultiplied = False,
transparency = OPAQUE,
transfer_type = TYPE_BYTE):
'''Return a java.awt.image.ColorModel color model
color_space - a java.awt.color.ColorSpace such as returned by
getGrayColorSpace or getRGBColorSpace
has_alpha - True if alpha channel is specified
is_alpha_premultiplied - True if other channel values have already
been reduced by the alpha multiplier, False if the channel values are
independent of the multiplier.
transparency - one of BITMASK, OPAQUE or TRANSPARENT.
transfer_type - one of TYPE_BYTE, TYPE_USHORT, TYPE_INT
'''
jtransparency = jutil.get_static_field('java/awt/Transparency',
transparency,
'I')
jtransfer_type = jutil.get_static_field('java/awt/image/DataBuffer',
transfer_type, 'I')
return jutil.make_instance('java/awt/image/ComponentColorModel',
'(Ljava/awt/color/ColorSpace;ZZII)V',
color_space, has_alpha, is_alpha_premultiplied,
jtransparency, jtransfer_type)
if __name__ == "__main__":
import wx
import matplotlib.backends.backend_wxagg as mmmm
from .. import bioformats
from .formatreader import *
from .metadatatools import *
app = wx.PySimpleApp()
# dlg = wx.FileDialog(None)
# if dlg.ShowModal()==wx.ID_OK:
# filename = dlg.Path
# else:
# app.Exit()
# sys.exit()
filename = '/Users/afraser/Desktop/cpa_example/images/AS_09125_050116000001_A01f00d0.png'
filename = '/Users/afraser/Desktop/wedding/header.jpg'
out_file = '/Users/afraser/Desktop/test_output.avi'
try:
os.remove(out_file)
print('previous output file deleted')
except:
print('no output file to delete')
env = jutil.attach()
ImageReader = make_image_reader_class()
ChannelSeparator = make_reader_wrapper_class("loci/formats/ChannelSeparator")
FormatTools = make_format_tools_class()
# writer testing
ImageWriter = make_image_writer_class()
writer = ImageWriter()
w = 400
h = 400
c = 3
z = 1
t = 4
images = []
for tt in range(t):
images += [(np.random.rand(w, h, c) * 255).astype('uint8')]
imeta = createOMEXMLMetadata()
meta = wrap_imetadata_object(imeta)
meta.createRoot()
meta.setPixelsBigEndian(True, 0, 0)
meta.setPixelsDimensionOrder('XYCZT', 0, 0)
meta.setPixelsPixelType(FormatTools.getPixelTypeString(FormatTools.UINT8), 0, 0)
meta.setPixelsSizeX(w, 0, 0)
meta.setPixelsSizeY(h, 0, 0)
meta.setPixelsSizeC(c, 0, 0)
meta.setPixelsSizeZ(z, 0, 0)
meta.setPixelsSizeT(t, 0, 0)
meta.setLogicalChannelSamplesPerPixel(c, 0, 0)
print('big endian:', meta.getPixelsBigEndian(0, 0))
print('dim order:', meta.getPixelsDimensionOrder(0, 0))
print('pixel type:', meta.getPixelsPixelType(0, 0))
print('size x:', meta.getPixelsSizeX(0, 0))
print('size y:', meta.getPixelsSizeY(0, 0))
print('size c:', meta.getPixelsSizeC(0, 0))
print('size z:', meta.getPixelsSizeZ(0, 0))
print('size t:', meta.getPixelsSizeT(0, 0))
print('samples per pixel:', meta.getLogicalChannelSamplesPerPixel(0, 0))
writer.setMetadataRetrieve(meta)
writer.setId(out_file)
for image in images:
if len(image.shape)==3 and image.shape[2] == 3:
save_im = np.array([image[:,:,0], image[:,:,1], image[:,:,2]]).astype(np.uint8).flatten()
else:
save_im = image.astype(np.uint8).flatten()
writer.saveBytes(env.make_byte_array(save_im), (image is images[-1]))
writer.close()
print('Done writing image :)')
# import PIL.Image as Image
# im = Image.open(out_file, 'r')
# im.show()
jutil.detach()
app.MainLoop()
|
[
"os.remove",
"javabridge.static_call",
"javabridge.get_env",
"javabridge.get_static_field",
"numpy.random.rand",
"javabridge.make_new",
"javabridge.make_method",
"javabridge.make_instance",
"numpy.array",
"wx.PySimpleApp",
"os.path.split",
"numpy.ascontiguousarray",
"javabridge.detach",
"javabridge.attach"
] |
[((4342, 4357), 'javabridge.get_env', 'jutil.get_env', ([], {}), '()\n', (4355, 4357), True, 'import javabridge as jutil\n'), ((8923, 8938), 'javabridge.get_env', 'jutil.get_env', ([], {}), '()\n', (8936, 8938), True, 'import javabridge as jutil\n'), ((9239, 9385), 'javabridge.make_instance', 'jutil.make_instance', (['"""loci/formats/ClassList"""', '"""(Ljava/lang/String;Ljava/lang/Class;Ljava/lang/Class;)V"""', '"""writers.txt"""', 'base_klass', 'klass'], {}), "('loci/formats/ClassList',\n '(Ljava/lang/String;Ljava/lang/Class;Ljava/lang/Class;)V',\n 'writers.txt', base_klass, klass)\n", (9258, 9385), True, 'import javabridge as jutil\n'), ((14552, 14621), 'javabridge.make_new', 'jutil.make_new', (['class_name', '"""(Ljava/lang/String;Ljava/lang/String;)V"""'], {}), "(class_name, '(Ljava/lang/String;Ljava/lang/String;)V')\n", (14566, 14621), True, 'import javabridge as jutil\n'), ((18217, 18290), 'javabridge.get_static_field', 'jutil.get_static_field', (['"""java/awt/color/ColorSpace"""', '"""CS_LINEAR_RGB"""', '"""I"""'], {}), "('java/awt/color/ColorSpace', 'CS_LINEAR_RGB', 'I')\n", (18239, 18290), True, 'import javabridge as jutil\n'), ((18345, 18459), 'javabridge.static_call', 'jutil.static_call', (['"""java/awt/color/ColorSpace"""', '"""getInstance"""', '"""(I)Ljava/awt/color/ColorSpace;"""', 'cs_linear_rgb'], {}), "('java/awt/color/ColorSpace', 'getInstance',\n '(I)Ljava/awt/color/ColorSpace;', cs_linear_rgb)\n", (18362, 18459), True, 'import javabridge as jutil\n'), ((18698, 18765), 'javabridge.get_static_field', 'jutil.get_static_field', (['"""java/awt/color/ColorSpace"""', '"""CS_GRAY"""', '"""I"""'], {}), "('java/awt/color/ColorSpace', 'CS_GRAY', 'I')\n", (18720, 18765), True, 'import javabridge as jutil\n'), ((18820, 18928), 'javabridge.static_call', 'jutil.static_call', (['"""java/awt/color/ColorSpace"""', '"""getInstance"""', '"""(I)Ljava/awt/color/ColorSpace;"""', 'cs_gray'], {}), "('java/awt/color/ColorSpace', 'getInstance',\n '(I)Ljava/awt/color/ColorSpace;', cs_gray)\n", (18837, 18928), True, 'import javabridge as jutil\n'), ((20331, 20397), 'javabridge.get_static_field', 'jutil.get_static_field', (['"""java/awt/Transparency"""', 'transparency', '"""I"""'], {}), "('java/awt/Transparency', transparency, 'I')\n", (20353, 20397), True, 'import javabridge as jutil\n'), ((20505, 20576), 'javabridge.get_static_field', 'jutil.get_static_field', (['"""java/awt/image/DataBuffer"""', 'transfer_type', '"""I"""'], {}), "('java/awt/image/DataBuffer', transfer_type, 'I')\n", (20527, 20576), True, 'import javabridge as jutil\n'), ((20632, 20814), 'javabridge.make_instance', 'jutil.make_instance', (['"""java/awt/image/ComponentColorModel"""', '"""(Ljava/awt/color/ColorSpace;ZZII)V"""', 'color_space', 'has_alpha', 'is_alpha_premultiplied', 'jtransparency', 'jtransfer_type'], {}), "('java/awt/image/ComponentColorModel',\n '(Ljava/awt/color/ColorSpace;ZZII)V', color_space, has_alpha,\n is_alpha_premultiplied, jtransparency, jtransfer_type)\n", (20651, 20814), True, 'import javabridge as jutil\n'), ((21100, 21116), 'wx.PySimpleApp', 'wx.PySimpleApp', ([], {}), '()\n', (21114, 21116), False, 'import wx\n'), ((21623, 21637), 'javabridge.attach', 'jutil.attach', ([], {}), '()\n', (21635, 21637), True, 'import javabridge as jutil\n'), ((23578, 23592), 'javabridge.detach', 'jutil.detach', ([], {}), '()\n', (23590, 23592), True, 'import javabridge as jutil\n'), ((2112, 2135), 'os.path.split', 'os.path.split', (['pathname'], {}), '(pathname)\n', (2125, 2135), False, 'import os\n'), ((4912, 5028), 'javabridge.make_method', 'jutil.make_method', (['"""canDoStacks"""', '"""()Z"""', '"""Reports whether the writer can save multiple images to a single file."""'], {}), "('canDoStacks', '()Z',\n 'Reports whether the writer can save multiple images to a single file.')\n", (4929, 5028), True, 'import javabridge as jutil\n'), ((5089, 5185), 'javabridge.make_method', 'jutil.make_method', (['"""getColorModel"""', '"""()Ljava/awt/image/ColorModel;"""', '"""Gets the color model."""'], {}), "('getColorModel', '()Ljava/awt/image/ColorModel;',\n 'Gets the color model.')\n", (5106, 5185), True, 'import javabridge as jutil\n'), ((5249, 5350), 'javabridge.make_method', 'jutil.make_method', (['"""getCompression"""', '"""()Ljava/lang/String;"""', '"""Gets the current compression type."""'], {}), "('getCompression', '()Ljava/lang/String;',\n 'Gets the current compression type.')\n", (5266, 5350), True, 'import javabridge as jutil\n'), ((5420, 5530), 'javabridge.make_method', 'jutil.make_method', (['"""getCompressionTypes"""', '"""()[Ljava/lang/String;"""', '"""Gets the available compression types."""'], {}), "('getCompressionTypes', '()[Ljava/lang/String;',\n 'Gets the available compression types.')\n", (5437, 5530), True, 'import javabridge as jutil\n'), ((5604, 5705), 'javabridge.make_method', 'jutil.make_method', (['"""getFramesPerSecond"""', '"""()I"""', '"""Gets the frames per second to use when writing."""'], {}), "('getFramesPerSecond', '()I',\n 'Gets the frames per second to use when writing.')\n", (5621, 5705), True, 'import javabridge as jutil\n'), ((5779, 5937), 'javabridge.make_method', 'jutil.make_method', (['"""getMetadataRetrieve"""', '"""()Lloci/formats/meta/MetadataRetrieve;"""', '"""Retrieves the current metadata retrieval object for this writer."""'], {}), "('getMetadataRetrieve',\n '()Lloci/formats/meta/MetadataRetrieve;',\n 'Retrieves the current metadata retrieval object for this writer.')\n", (5796, 5937), True, 'import javabridge as jutil\n'), ((6002, 6079), 'javabridge.make_method', 'jutil.make_method', (['"""getPixelTypes"""', '"""()[I"""', '"""Gets the supported pixel types."""'], {}), "('getPixelTypes', '()[I', 'Gets the supported pixel types.')\n", (6019, 6079), True, 'import javabridge as jutil\n'), ((6330, 6440), 'javabridge.make_method', 'jutil.make_method', (['"""isInterleaved"""', '"""()Z"""', '"""Gets whether or not the channels in an image are interleaved."""'], {}), "('isInterleaved', '()Z',\n 'Gets whether or not the channels in an image are interleaved.')\n", (6347, 6440), True, 'import javabridge as jutil\n'), ((6505, 6601), 'javabridge.make_method', 'jutil.make_method', (['"""isSupportedType"""', '"""(I)Z"""', '"""Checks if the given pixel type is supported."""'], {}), "('isSupportedType', '(I)Z',\n 'Checks if the given pixel type is supported.')\n", (6522, 6601), True, 'import javabridge as jutil\n'), ((6662, 6757), 'javabridge.make_method', 'jutil.make_method', (['"""saveBytes"""', '"""([BZ)V"""', '"""Saves the given byte array to the current file."""'], {}), "('saveBytes', '([BZ)V',\n 'Saves the given byte array to the current file.')\n", (6679, 6757), True, 'import javabridge as jutil\n'), ((6814, 6891), 'javabridge.make_method', 'jutil.make_method', (['"""saveBytes"""', '"""(I[B)V"""', '"""Saves bytes, first arg is image #"""'], {}), "('saveBytes', '(I[B)V', 'Saves bytes, first arg is image #')\n", (6831, 6891), True, 'import javabridge as jutil\n'), ((7126, 7238), 'javabridge.make_method', 'jutil.make_method', (['"""savePlane"""', '"""(Ljava/lang/Object;Z)V"""', '"""Saves the given image plane to the current file."""'], {}), "('savePlane', '(Ljava/lang/Object;Z)V',\n 'Saves the given image plane to the current file.')\n", (7143, 7238), True, 'import javabridge as jutil\n'), ((7488, 7585), 'javabridge.make_method', 'jutil.make_method', (['"""setColorModel"""', '"""(Ljava/awt/image/ColorModel;)V"""', '"""Sets the color model."""'], {}), "('setColorModel', '(Ljava/awt/image/ColorModel;)V',\n 'Sets the color model.')\n", (7505, 7585), True, 'import javabridge as jutil\n'), ((7649, 7751), 'javabridge.make_method', 'jutil.make_method', (['"""setCompression"""', '"""(Ljava/lang/String;)V"""', '"""Sets the current compression type."""'], {}), "('setCompression', '(Ljava/lang/String;)V',\n 'Sets the current compression type.')\n", (7666, 7751), True, 'import javabridge as jutil\n'), ((7820, 7922), 'javabridge.make_method', 'jutil.make_method', (['"""setFramesPerSecond"""', '"""(I)V"""', '"""Sets the frames per second to use when writing."""'], {}), "('setFramesPerSecond', '(I)V',\n 'Sets the frames per second to use when writing.')\n", (7837, 7922), True, 'import javabridge as jutil\n'), ((7991, 8103), 'javabridge.make_method', 'jutil.make_method', (['"""setInterleaved"""', '"""(Z)V"""', '"""Sets whether or not the channels in an image are interleaved."""'], {}), "('setInterleaved', '(Z)V',\n 'Sets whether or not the channels in an image are interleaved.')\n", (8008, 8103), True, 'import javabridge as jutil\n'), ((8173, 8353), 'javabridge.make_method', 'jutil.make_method', (['"""setMetadataRetrieve"""', '"""(Lloci/formats/meta/MetadataRetrieve;)V"""', '"""Sets the metadata retrieval object from which to retrieve standardized metadata."""'], {}), "('setMetadataRetrieve',\n '(Lloci/formats/meta/MetadataRetrieve;)V',\n 'Sets the metadata retrieval object from which to retrieve standardized metadata.'\n )\n", (8190, 8353), True, 'import javabridge as jutil\n'), ((8420, 8516), 'javabridge.make_method', 'jutil.make_method', (['"""setValidBitsPerPixel"""', '"""(I)V"""', '"""Sets the number of valid bits per pixel"""'], {}), "('setValidBitsPerPixel', '(I)V',\n 'Sets the number of valid bits per pixel')\n", (8437, 8516), True, 'import javabridge as jutil\n'), ((8558, 8768), 'javabridge.make_method', 'jutil.make_method', (['"""setSeries"""', '"""(I)V"""', '"""Set the series for the image file\n\n series - the zero-based index of the image stack in the file,\n for instance in a multi-image tif."""'], {}), '(\'setSeries\', \'(I)V\',\n """Set the series for the image file\n\n series - the zero-based index of the image stack in the file,\n for instance in a multi-image tif."""\n )\n', (8575, 8768), True, 'import javabridge as jutil\n'), ((9612, 9669), 'javabridge.make_new', 'jutil.make_new', (['class_name', '"""(Lloci/formats/ClassList;)V"""'], {}), "(class_name, '(Lloci/formats/ClassList;)V')\n", (9626, 9669), True, 'import javabridge as jutil\n'), ((9751, 9837), 'javabridge.make_method', 'jutil.make_method', (['"""setId"""', '"""(Ljava/lang/String;)V"""', '"""Sets the current file name."""'], {}), "('setId', '(Ljava/lang/String;)V',\n 'Sets the current file name.')\n", (9768, 9837), True, 'import javabridge as jutil\n'), ((9896, 10018), 'javabridge.make_method', 'jutil.make_method', (['"""addStatusListener"""', '"""()Lloci/formats/StatusListener;"""', '"""Adds a listener for status update events."""'], {}), "('addStatusListener', '()Lloci/formats/StatusListener;',\n 'Adds a listener for status update events.')\n", (9913, 10018), True, 'import javabridge as jutil\n'), ((10077, 10175), 'javabridge.make_method', 'jutil.make_method', (['"""close"""', '"""()V"""', '"""Closes currently open file(s) and frees allocated memory."""'], {}), "('close', '()V',\n 'Closes currently open file(s) and frees allocated memory.')\n", (10094, 10175), True, 'import javabridge as jutil\n'), ((10225, 10321), 'javabridge.make_method', 'jutil.make_method', (['"""getFormat"""', '"""()Ljava/lang/String;"""', '"""Gets the name of this file format."""'], {}), "('getFormat', '()Ljava/lang/String;',\n 'Gets the name of this file format.')\n", (10242, 10321), True, 'import javabridge as jutil\n'), ((10384, 10610), 'javabridge.make_method', 'jutil.make_method', (['"""getNativeDataType"""', '"""()Ljava/lang/Class;"""', '"""Returns the native data type of image planes for this reader, as returned by IFormatReader.openPlane(int, int, int, int, int) or IFormatWriter#saveData."""'], {}), "('getNativeDataType', '()Ljava/lang/Class;',\n 'Returns the native data type of image planes for this reader, as returned by IFormatReader.openPlane(int, int, int, int, int) or IFormatWriter#saveData.'\n )\n", (10401, 10610), True, 'import javabridge as jutil\n'), ((10677, 10814), 'javabridge.make_method', 'jutil.make_method', (['"""getStatusListeners"""', '"""()[Lloci/formats/StatusListener;"""', '"""Gets a list of all registered status update listeners."""'], {}), "('getStatusListeners', '()[Lloci/formats/StatusListener;',\n 'Gets a list of all registered status update listeners.')\n", (10694, 10814), True, 'import javabridge as jutil\n'), ((10880, 10996), 'javabridge.make_method', 'jutil.make_method', (['"""getSuffixes"""', '"""()Ljava/lang/String;"""', '"""Gets the default file suffixes for this file format."""'], {}), "('getSuffixes', '()Ljava/lang/String;',\n 'Gets the default file suffixes for this file format.')\n", (10897, 10996), True, 'import javabridge as jutil\n'), ((11053, 11171), 'javabridge.make_method', 'jutil.make_method', (['"""getWriter"""', '"""()Lloci/formats/IFormatWriter;"""', '"""Gets the writer used to save the current file."""'], {}), "('getWriter', '()Lloci/formats/IFormatWriter;',\n 'Gets the writer used to save the current file.')\n", (11070, 11171), True, 'import javabridge as jutil\n'), ((11625, 11740), 'javabridge.make_method', 'jutil.make_method', (['"""getWriters"""', '"""()[Lloci/formats/IFormatWriter;"""', '"""Gets all constituent file format writers."""'], {}), "('getWriters', '()[Lloci/formats/IFormatWriter;',\n 'Gets all constituent file format writers.')\n", (11642, 11740), True, 'import javabridge as jutil\n'), ((11797, 11929), 'javabridge.make_method', 'jutil.make_method', (['"""isThisType"""', '"""(Ljava/lang/String;)Z"""', '"""Checks if the given string is a valid filename for this file format."""'], {}), "('isThisType', '(Ljava/lang/String;)Z',\n 'Checks if the given string is a valid filename for this file format.')\n", (11814, 11929), True, 'import javabridge as jutil\n'), ((11996, 12132), 'javabridge.make_method', 'jutil.make_method', (['"""removeStatusListener"""', '"""(Lloci/formats/StatusListener;)V"""', '"""Saves the given byte array to the current file."""'], {}), "('removeStatusListener',\n '(Lloci/formats/StatusListener;)V',\n 'Saves the given byte array to the current file.')\n", (12013, 12132), True, 'import javabridge as jutil\n'), ((13798, 13859), 'javabridge.make_new', 'jutil.make_new', (['class_name', '"""(Lloci/formats/IFormatWriter;)V"""'], {}), "(class_name, '(Lloci/formats/IFormatWriter;)V')\n", (13812, 13859), True, 'import javabridge as jutil\n'), ((13945, 14031), 'javabridge.make_method', 'jutil.make_method', (['"""setId"""', '"""(Ljava/lang/String;)V"""', '"""Sets the current file name."""'], {}), "('setId', '(Ljava/lang/String;)V',\n 'Sets the current file name.')\n", (13962, 14031), True, 'import javabridge as jutil\n'), ((14941, 15056), 'javabridge.make_method', 'jutil.make_method', (['"""canDoStacks"""', '"""()Z"""', '"""Reports whether the writer can save multiple images to a single file"""'], {}), "('canDoStacks', '()Z',\n 'Reports whether the writer can save multiple images to a single file')\n", (14958, 15056), True, 'import javabridge as jutil\n'), ((15116, 15211), 'javabridge.make_method', 'jutil.make_method', (['"""getColorModel"""', '"""()Ljava/awt/image/ColorModel;"""', '"""Gets the color model"""'], {}), "('getColorModel', '()Ljava/awt/image/ColorModel;',\n 'Gets the color model')\n", (15133, 15211), True, 'import javabridge as jutil\n'), ((15317, 15417), 'javabridge.make_method', 'jutil.make_method', (['"""getCompression"""', '"""()Ljava/lang/String;"""', '"""Gets the current compression type"""'], {}), "('getCompression', '()Ljava/lang/String;',\n 'Gets the current compression type')\n", (15334, 15417), True, 'import javabridge as jutil\n'), ((15530, 15639), 'javabridge.make_method', 'jutil.make_method', (['"""getCompressionTypes"""', '"""()[Ljava/lang/String;"""', '"""Gets the available compression types"""'], {}), "('getCompressionTypes', '()[Ljava/lang/String;',\n 'Gets the available compression types')\n", (15547, 15639), True, 'import javabridge as jutil\n'), ((15761, 15861), 'javabridge.make_method', 'jutil.make_method', (['"""getFramesPerSecond"""', '"""()I"""', '"""Gets the frames per second to use when writing"""'], {}), "('getFramesPerSecond', '()I',\n 'Gets the frames per second to use when writing')\n", (15778, 15861), True, 'import javabridge as jutil\n'), ((15935, 16093), 'javabridge.make_method', 'jutil.make_method', (['"""getMetadataRetrieve"""', '"""()Lloci/formats/meta/MetadataRetrieve;"""', '"""Retrieves the current metadata retrieval object for this writer."""'], {}), "('getMetadataRetrieve',\n '()Lloci/formats/meta/MetadataRetrieve;',\n 'Retrieves the current metadata retrieval object for this writer.')\n", (15952, 16093), True, 'import javabridge as jutil\n'), ((16207, 16249), 'javabridge.make_method', 'jutil.make_method', (['"""getPixelTypes"""', '"""()[I"""'], {}), "('getPixelTypes', '()[I')\n", (16224, 16249), True, 'import javabridge as jutil\n'), ((16316, 16425), 'javabridge.make_method', 'jutil.make_method', (['"""isInterleaved"""', '"""()Z"""', '"""Gets whether or not the channels in an image are interleaved"""'], {}), "('isInterleaved', '()Z',\n 'Gets whether or not the channels in an image are interleaved')\n", (16333, 16425), True, 'import javabridge as jutil\n'), ((16489, 16584), 'javabridge.make_method', 'jutil.make_method', (['"""isSupportedType"""', '"""(I)Z"""', '"""Checks if the given pixel type is supported"""'], {}), "('isSupportedType', '(I)Z',\n 'Checks if the given pixel type is supported')\n", (16506, 16584), True, 'import javabridge as jutil\n'), ((16644, 16738), 'javabridge.make_method', 'jutil.make_method', (['"""saveBytes"""', '"""([BZ)V"""', '"""Saves the given byte array to the current file"""'], {}), "('saveBytes', '([BZ)V',\n 'Saves the given byte array to the current file')\n", (16661, 16738), True, 'import javabridge as jutil\n'), ((16797, 16893), 'javabridge.make_method', 'jutil.make_method', (['"""setColorModel"""', '"""(Ljava/awt/image/ColorModel;)V"""', '"""Sets the color model"""'], {}), "('setColorModel', '(Ljava/awt/image/ColorModel;)V',\n 'Sets the color model')\n", (16814, 16893), True, 'import javabridge as jutil\n'), ((16999, 17100), 'javabridge.make_method', 'jutil.make_method', (['"""setCompression"""', '"""(Ljava/lang/String;)V"""', '"""Sets the current compression type"""'], {}), "('setCompression', '(Ljava/lang/String;)V',\n 'Sets the current compression type')\n", (17016, 17100), True, 'import javabridge as jutil\n'), ((17212, 17313), 'javabridge.make_method', 'jutil.make_method', (['"""setFramesPerSecond"""', '"""(I)V"""', '"""Sets the frames per second to use when writing"""'], {}), "('setFramesPerSecond', '(I)V',\n 'Sets the frames per second to use when writing')\n", (17229, 17313), True, 'import javabridge as jutil\n'), ((17420, 17505), 'javabridge.make_method', 'jutil.make_method', (['"""setId"""', '"""(Ljava/lang/String;)V"""', '"""Sets the current file name"""'], {}), "('setId', '(Ljava/lang/String;)V',\n 'Sets the current file name')\n", (17437, 17505), True, 'import javabridge as jutil\n'), ((17560, 17671), 'javabridge.make_method', 'jutil.make_method', (['"""setInterleaved"""', '"""(Z)V"""', '"""Sets whether or not the channels in an image are interleaved"""'], {}), "('setInterleaved', '(Z)V',\n 'Sets whether or not the channels in an image are interleaved')\n", (17577, 17671), True, 'import javabridge as jutil\n'), ((17741, 17920), 'javabridge.make_method', 'jutil.make_method', (['"""setMetadataRetrieve"""', '"""(Lloci/formats/meta/MetadataRetrieve;)V"""', '"""Sets the metadata retrieval object from which to retrieve standardized metadata"""'], {}), "('setMetadataRetrieve',\n '(Lloci/formats/meta/MetadataRetrieve;)V',\n 'Sets the metadata retrieval object from which to retrieve standardized metadata'\n )\n", (17758, 17920), True, 'import javabridge as jutil\n'), ((21492, 21511), 'os.remove', 'os.remove', (['out_file'], {}), '(out_file)\n', (21501, 21511), False, 'import os\n'), ((4277, 4315), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['pixels', 'as_dtype'], {}), '(pixels, as_dtype)\n', (4297, 4315), True, 'import numpy as np\n'), ((12503, 12541), 'javabridge.make_new', 'jutil.make_new', (['self.class_name', '"""()V"""'], {}), "(self.class_name, '()V')\n", (12517, 12541), True, 'import javabridge as jutil\n'), ((12567, 12653), 'javabridge.make_method', 'jutil.make_method', (['"""setId"""', '"""(Ljava/lang/String;)V"""', '"""Sets the current file name."""'], {}), "('setId', '(Ljava/lang/String;)V',\n 'Sets the current file name.')\n", (12584, 12653), True, 'import javabridge as jutil\n'), ((12718, 12816), 'javabridge.make_method', 'jutil.make_method', (['"""close"""', '"""()V"""', '"""Closes currently open file(s) and frees allocated memory."""'], {}), "('close', '()V',\n 'Closes currently open file(s) and frees allocated memory.')\n", (12735, 12816), True, 'import javabridge as jutil\n'), ((12877, 13206), 'javabridge.make_method', 'jutil.make_method', (['"""saveBytes"""', '"""(I[BLloci/formats/tiff/IFD;)V"""', '"""save a byte array to an image channel\n\n index - image index\n bytes - byte array to save\n ifd - a loci.formats.tiff.IFD instance that gives all of the\n IFD values associated with the channel"""'], {}), '(\'saveBytes\', \'(I[BLloci/formats/tiff/IFD;)V\',\n """save a byte array to an image channel\n\n index - image index\n bytes - byte array to save\n ifd - a loci.formats.tiff.IFD instance that gives all of the\n IFD values associated with the channel"""\n )\n', (12894, 13206), True, 'import javabridge as jutil\n'), ((22016, 22039), 'numpy.random.rand', 'np.random.rand', (['w', 'h', 'c'], {}), '(w, h, c)\n', (22030, 22039), True, 'import numpy as np\n'), ((23209, 23267), 'numpy.array', 'np.array', (['[image[:, :, 0], image[:, :, 1], image[:, :, 2]]'], {}), '([image[:, :, 0], image[:, :, 1], image[:, :, 2]])\n', (23217, 23267), True, 'import numpy as np\n')]
|
from matplotlib import pyplot as plt
def imshow(img, **kwargs):
if len(img.shape) == 2 and 'cmap' not in kwargs:
return plt.imshow(img, cmap=plt.cm.gray, **kwargs)
if len(img.shape) == 3 and img.shape[2] == 3:
return plt.imshow(img[:, :, ::-1], **kwargs)
return plt.imshow(img, **kwargs)
|
[
"matplotlib.pyplot.imshow"
] |
[((292, 317), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {}), '(img, **kwargs)\n', (302, 317), True, 'from matplotlib import pyplot as plt\n'), ((134, 177), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {'cmap': 'plt.cm.gray'}), '(img, cmap=plt.cm.gray, **kwargs)\n', (144, 177), True, 'from matplotlib import pyplot as plt\n'), ((243, 280), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img[:, :, ::-1]'], {}), '(img[:, :, ::-1], **kwargs)\n', (253, 280), True, 'from matplotlib import pyplot as plt\n')]
|
# -*- coding: utf-8 -*-
"""
Ce fichier contient l'implémentation dans une interface graphique de la logique du Game of Life
Il ne contient pas le code de la Class Espace utilisée pour
Created on Wed Feb 17 14:36:56 2021
@author: <NAME>
"""
import os
import game_of_life_logique as gol
from tkinter import *
os.chdir(os.path.dirname(__file__))
def from_rgb(rgb):
"""prend un tuple rgb et le transforme en string héxadécimal de couleur tkinter
-----------------------------------
Entrées : tuple rgb de 0 à 255 (R,G,B)
Sortie : string d'héxadécimal
note : fonction prise d'internet.
"""
return "#%02x%02x%02x" % rgb
class Fenetre(Frame):
def __init__(self, espace, master=None):
"""initialisation de la fenetre
-----------------------------------
Entrées : espace enfant et master de la fenetre
Sortie : objet fenetre
"""
self.master = master
self.master.title("<NAME> Game of Life")
self.master.config(bg="grey")
self.espace = espace
self.col_vie = from_rgb((255, 255, 255))
self.col_mort = from_rgb((0, 0, 0))
def reset_espace(self):
"""retour à l'état initial de l'espace de l'utilisateur
-----------------------------------
Entrée : input du bouton de retour à zéro
Sortie : retour à zéro de l'espace et des variables affichées
"""
self.espace.retour_zero()
self.var_pop.set("Population=" + str(self.espace.pop_init))
self.var_str_iter.set("0 itérations ")
self.affiche_base()
def setup_canvas(self):
"""initialisation du canvas et de ses labels correspondants
-----------------------------------
Entrées : aucunes
Sortie : affichage du cadre du canvas et ce qu'il y a à l'intérieur
"""
# frame qui contient le canvas, ses informations, les outils à gauche,et les paramètres à droite, il se fait pack dans init_parametres()
self.frame_para_canvas = Frame(highlightbackground="black", bg="light grey")
# frame qui contient le canvas et ses informations, se fait pack dans init_parametres() pour avoir le volet de création d'espace à gauche
self.frame_canvas = Frame(master=self.frame_para_canvas, highlightbackground="black", highlightthickness=2, bg="light grey")
# frame qui contient les informations sur l'espace courant
self.frame_labels = Frame(master=self.frame_canvas, highlightbackground="black", highlightthickness=1, bg="light grey")
self.var_str_iter = StringVar()
self.var_str_iter.set(str(self.espace.n_iter) + " itérations ")
self.var_dim = StringVar()
self.var_dim.set("Dimensions : longueur=" + str(self.espace.dim[0]) + ", largeur=" + str(self.espace.dim[1]))
self.var_pop = StringVar()
self.var_pop.set("Population=" + str(self.espace.liste_pop[0]))
self.label_pop = Label(self.frame_labels, textvariable=self.var_pop)
self.label_dim = Label(self.frame_labels, textvariable=self.var_dim)
self.label_iter = Label(self.frame_labels, textvariable=self.var_str_iter)
self.label_iter.pack(side=LEFT)
self.label_dim.pack(side=LEFT)
self.label_pop.pack(side=LEFT)
self.frame_labels.pack(fill="x")
# fin de l'initialisation du frame qui contient les informations sur l'espace courant
self.canvas = Canvas(self.frame_canvas, bg="grey", width=self.espace.dim[0]*10, height=self.espace.dim[1]*10)
self.canvas.bind("<Button>", self.click_canvas) # bind pour les clics seuls
self.canvas.bind("<B1-Motion>", self.creer_xy) # bind pour les clics gauches maintenus en mouvement
self.canvas.bind("<B3-Motion>", self.delete_xy) # bind pour les clics droits maintenus en mouvement
self.affiche_base() # affiche_base() pour afficher l'état initial de l'espace courant
self.canvas.pack()
def iter_affiche(self):
"""effectue 1 itération sur tout l'espace et affiche les changements
-----------------------------------
Entrées : aucunes
Sortie : affichage sur le canvas Tkinter
"""
liste_chg = self.espace.iteration() # extraie la liste des changements de l'itération n à n+1
self.var_str_iter.set(str(self.espace.n_iter) + " itérations")
self.var_pop.set("Population=" + str(self.espace.liste_pop[-1]))
self.affichage_frame(liste_chg)
def n_iterations(self):
"""effectue n itération sur tout l'espace et affiche les changements
-----------------------------------
Entrées : nombre entier dans le widget Entry correspondant
Sortie : affichage sur le canvas Tkinter
"""
try:
n = int(self.entree_nb.get())
for i in range(n):
self.iter_affiche()
self.frame_canvas.update_idletasks()
except Exception:
self.entree_nb.delete(0, END)
self.entree_nb.insert(0, "PAS UN NOMBRE")
def regen_dens(self):
"""lance la fonction regen_espace et gère les erreurs
-----------------------------------
Entrées : bouton correspondant cliqué
Sortie : affichage de la nouvelle génération
"""
try:
self.espace.regen_espace(int(self.entree_regen.get()))
self.var_pop.set("Population=" + str(self.espace.liste_pop[-1]))
self.affiche_base()
except Exception:
self.entree_regen.delete(0, END)
self.entree_regen.insert(0, "PAS UN NOMBRE")
def pos_canvas(self, event):
"""retourne la position du curseur dans le canvas tkinter
-----------------------------------
Entrées : objet event tkinter
Sortie : position x, y du curseur par rapport au coin du canvas
"""
x = event.x_root - self.canvas.winfo_rootx()
y = event.y_root - self.canvas.winfo_rooty()
return x, y
def delete_xy(self, event):
"""supprime la cellule aux coordonées du curseur si il est dans le canvas
pour les clicks continus (EN MOUVEMENT)
-----------------------------------
Entrées : objet event tkinter
Sortie : actualisation du canvas et de l'espace avec la nouvelle cellule
"""
x, y = self.pos_canvas(event)
x_carte = x//10
y_carte = y//10
liste_chg = []
if -1 < x_carte < self.espace.dim[0] and -1 < y_carte < self.espace.dim[1] and self.espace.carte[x_carte][y_carte] == 1:
self.espace.carte[x_carte][y_carte] = 0
self.espace.liste_pop[-1] -= 1
liste_chg.append((0, x_carte, y_carte))
self.affichage_frame(liste_chg)
self.var_pop.set("Population=" + str(self.espace.liste_pop[-1]))
def creer_xy(self, event):
"""créé une cellule aux coordonées du curseur si il est dans le canvas
pour les clicks continus (EN MOUVEMENT)
-----------------------------------
Entrées : objet event tkinter
Sortie : actualisation du canvas et de l'espace avec la nouvelle cellule
"""
x, y = self.pos_canvas(event)
x_carte = x//10
y_carte = y//10
liste_chg = []
if -1 < x_carte < self.espace.dim[0] and -1 < y_carte < self.espace.dim[1] and self.espace.carte[x_carte][y_carte] == 0:
self.espace.carte[x_carte][y_carte] = 1
self.espace.liste_pop[-1] += 1
liste_chg.append((1, x_carte, y_carte))
self.affichage_frame(liste_chg)
self.var_pop.set("Population=" + str(self.espace.liste_pop[-1]))
def click_canvas(self, event):
"""créé ou détruit une cellule aux coordonées du curseur si il est dans le canvas
pour les clicks seuls (PAS DE MOUVEMENT)
-----------------------------------
Entrées : objet event tkinter
Sortie : actualisation du canvas et de l'espace avec la nouvelle cellule
"""
x, y = self.pos_canvas(event)
x_carte = x//10
y_carte = y//10
liste_chg = []
if event.num == 1 and self.espace.carte[x_carte][y_carte] == 0:
self.espace.carte[x_carte][y_carte] = 1
self.espace.liste_pop[-1] += 1
liste_chg.append((1, x_carte, y_carte))
elif event.num == 3 and self.espace.carte[x_carte][y_carte] == 1:
self.espace.carte[x_carte][y_carte] = 0
self.espace.liste_pop[-1] -= 1
liste_chg.append((0, x_carte, y_carte))
self.affichage_frame(liste_chg)
def init_interface(self):
"""initialisation de la barre d'action en dessous du canvas
-----------------------------------
Entrées : aucunes
Sortie : affichage de la barre d'action fonctionnelle
"""
# frame qui contient tous les élements du bas de la fenêtre
self.frame_interface = Frame(highlightbackground="black", highlightthickness=3, bg="light grey")
# frame pour les boutons d'itération
self.frame_boutons_iter = Frame(master=self.frame_interface, highlightbackground="black", highlightthickness=1, bg="light grey")
self.bouton_iter = Button(self.frame_boutons_iter, text="1 itération", command=self.iter_affiche)
self.entree_nb = Entry(self.frame_boutons_iter)
self.bouton_start = Button(self.frame_boutons_iter, text="N itérations", command=self.n_iterations)
self.label_iter = Label(self.frame_boutons_iter, text="itération de l'espace")
self.label_iter.pack(fill="x")
self.bouton_iter.pack(fill="x")
self.entree_nb.pack(fill="x")
self.bouton_start.pack(fill="x")
self.frame_boutons_iter.pack(side=LEFT, fill="y")
# fin de l'initialisation du volet d'itération
# frame des 2 actions "spéciales", le retour à zéro et la regénération de l'espace courant avec une nouvelle densité
self.frame_special = Frame(master=self.frame_interface, highlightbackground="black", highlightthickness=1, bg="light grey")
self.label_special = Label(self.frame_special, text="reset et regénération")
self.bouton_reset = Button(self.frame_special, text="Redémarrer à l'état initial", command=self.reset_espace)
self.entree_regen = Entry(self.frame_special)
self.bouton_regen = Button(self.frame_special, text="regénérer avec la densité indiquée", command=self.regen_dens)
self.label_special.pack(fill="x")
self.bouton_reset.pack(fill="x")
self.entree_regen.pack(fill="x")
self.bouton_regen.pack(fill="x")
self.frame_special.pack(side=LEFT, fill="y")
# fin de l'initialisation du volet spécial
# frame du chargement et de la sauvegarde
self.frame_sauv_charg = Frame(master=self.frame_interface, highlightbackground="black", highlightthickness=1, bg="light grey")
self.label_sauv_charg = Label(self.frame_sauv_charg, text="Sauvegarde et chargement")
self.entree_sauv_charg = Entry(self.frame_sauv_charg)
self.bouton_charg = Button(self.frame_sauv_charg, text="charger depuis", command=self.chargement_win)
self.bouton_sauv = Button(self.frame_sauv_charg, text="sauvegarder dans", command=self.sauvegarde_win)
self.label_sauv_charg.pack(fill="x")
self.entree_sauv_charg.pack(fill="x")
self.bouton_charg.pack(fill="x")
self.bouton_sauv.pack(fill="x")
self.frame_sauv_charg.pack(side=LEFT, fill="y")
# fin de l'initialisation du volet de chargement et de sauvegarde
# frame du bouton d'affichage du graphique
self.frame_graphique = Frame(master=self.frame_interface, highlightbackground="black", highlightthickness=1, bg="light grey")
self.label_graph = Label(self.frame_graphique, text="affichage du graphique\nde population")
self.bouton_graph = Button(self.frame_graphique, text="Afficher la courbe", command=self.espace.graphique)
self.label_graph.pack()
self.bouton_graph.pack(fill="both")
self.frame_graphique.pack(side=LEFT, fill="y")
# fin de l'initialisation du volet d'affichage du graphique
# frame du bestiaire
self.frame_formes = Frame(master=self.frame_interface, highlightbackground="black", highlightthickness=1, bg="light grey")
self.label_formes = Label(self.frame_formes, text="Bestiaire\n (x, y) puis nom de la forme")
self.label_formes.pack()
self.frame_pos = Frame(master=self.frame_formes, highlightbackground="black", bg="light grey")
self.entree_pos_x = Entry(self.frame_pos)
self.entree_pos_x.pack(side=LEFT, fill="x") # entrée de la position x
self.entree_pos_y = Entry(self.frame_pos)
self.entree_pos_y.pack(side=LEFT, fill="x") # entrée de la position y
self.frame_pos.pack(fill="x")
self.entree_forme = Entry(self.frame_formes)
self.entree_forme.pack(fill="x") # entrée du nom de la forme
self.bouton_forme = Button(self.frame_formes, text="afficher la forme", command=self.dessine_forme)
self.bouton_forme.pack(fill="x") # bouton pour rajouter la forme donnée
self.frame_formes.pack(side=LEFT, fill="both")
# fin de l'initialisation du volet du bestiaire
self.frame_interface.pack(fill="x")
# fin de l'intialisation du volet inférieur de l'interface
def dessine_forme(self):
"""dessine une des formes du dictionaire de formes
-----------------------------------
Entrées : position x, y du coin supérieur gauche de la forme, et so nom
Sorties : forme dessinée sur l'écran ou message(s) d'erreur dans les entrées
"""
try:
self.espace.carte = self.espace.forme_donnee(int(self.entree_pos_x.get()), int(self.entree_pos_y.get()), self.espace.carte, self.entree_forme.get())
self.entree_pos_x.delete(0, END)
self.entree_pos_y.delete(0, END)
self.affiche_base()
self.var_pop.set("Population=" + str(self.espace.compte_pop()))
except ValueError: # ValueError ne sort que si x ou y n'est pas un nombre
self.entree_pos_x.delete(0, END)
self.entree_pos_x.insert(0, "POSITION INVALIDE")
self.entree_pos_y.delete(0, END)
self.entree_pos_y.insert(0, "POSITION INVALIDE")
except IndexError: # IndexError sort si x ou y ou un point de la forme rajoutée est en dehors de la carte
self.entree_pos_x.delete(0, END)
self.entree_pos_x.insert(0, "POSITION HORS-CARTE")
self.entree_pos_y.delete(0, END)
self.entree_pos_y.insert(0, "POSITION HORS-CARTE")
except KeyError: # KeyError ne sort que si la forme voulue n'existe pas
self.entree_forme.delete(0, END)
self.entree_forme.insert(0, "FORME INVALIDE")
def sauvegarde_win(self):
"""utilisation de la fonction de sauvegarde de l'espace depuis l'interface
-----------------------------------
Entrées : nom du fichier dans lequel sauvegarder donné par l'entrée correspondante
Sortie : fichier sauvegardé
"""
try:
gol.sauvegarde(self.entree_sauv_charg.get(), self.espace.carte)
self.entree_sauv_charg.delete(0, END)
self.entree_sauv_charg.insert(0, "Sauvegarde terminée")
except OSError:
self.entree_sauv_charg.delete(0, END)
self.entree_sauv_charg.insert(0, "NOM INVALIDE")
def changement_espace(self):
"""protocole à suivre pour changer les commandes que chaque bouton fait à un nouvel espace
-----------------------------------
Entrées : aucune
Sortie : affichage du nouvel espace sur un canvas de bonne taille
"""
self.canvas.delete('all')
self.canvas.config(width=self.espace.dim[0]*10, height=self.espace.dim[1]*10)
self.bouton_graph.config(command=self.espace.graphique)
# on change la commande du bouton de graphique car c'est le seul bouton qui execute directement une méthode de l'espace
self.var_str_iter.set(str(self.espace.n_iter) + " itérations")
self.var_pop.set("Population=" + str(self.espace.liste_pop[-1]))
self.affiche_base()
def chargement_win(self):
"""utilisation de la fonction de chargement de l'espace depuis l'interface
-----------------------------------
Entrées : nom du fichier depuis lequel charger donné par l'entrée correspondante
Sortie : espace changé au nouvel espace si tout a ben fonctionné
"""
try:
self.espace = gol.chargement(self.entree_sauv_charg.get())
self.changement_espace()
self.entree_sauv_charg.delete(0, END)
self.entree_sauv_charg.insert(0, "Chargement terminé")
except FileNotFoundError:
self.entree_sauv_charg.delete(0, END)
self.entree_sauv_charg.insert(0, "FICHIER INEXISTANT")
except IndexError:
self.entree_sauv_charg.delete(0, END)
self.entree_sauv_charg.insert(0, "MAUVAIS FORMATTAGE")
except OSError:
self.entree_sauv_charg.delete(0, END)
self.entree_sauv_charg.insert(0, "FICHIER INEXISTANT")
def introduction(self):
"""Fonction pour avoir une fenêtre apparaître afin d'expliquer le jeu de la vie et ses règles
---------------------
Entrées : aucunes
Sorties : fenêtre d'introduction au programme"""
fenetre_intro = Toplevel(self.master)
texte_intro = """Bienvenue,
ceci est une recréation en python du Jeu de la Vie créé par <NAME> en 1970
le jeu se déroule sur un quadrillage, ici torique, où chaque cellule est vivante ou morte
les règles sont simples:
- si une cellule morte a exactement trois voisins vivants, elle devient vivante
- si une cellule vivante a entre 2 et 3 voisins vivants inclu, elle reste vivante, sinon, elle meurt
pour commencer à jouer, fermez cette fenêtre !
"""
label_intro = Label(fenetre_intro, text=texte_intro)
label_intro.pack()
def affichage_frame(self, liste_chg):
"""affichage d'une image sur un canvas déjà rempli
-----------------------------------
Entrées : liste de changements opérés par une itération
Sortie : affichage mis à jour avec les bons changements
"""
for chg, x, y in liste_chg: # chg est 0 ou 1, x et y sont des coordonnées entières
self.canvas.delete(self.rects[x][y]) # on supprime le rectangle affiché de cette position pour ne pas en accumuler au fil du temps
if chg == 0:
self.rects[x][y] = self.canvas.create_rectangle(x*10, y*10, (x+1)*10, (y+1)*10, fill=self.col_mort, outline=from_rgb((128, 128, 128)))
else: # on modifie la valeur de cette position du quadrillage de rectangles avec un nouveau rectangle
self.rects[x][y] = self.canvas.create_rectangle(x*10, y*10, (x+1)*10, (y+1)*10, fill=self.col_vie, outline=from_rgb((128, 128, 128)))
def affiche_base(self):
"""affichage d'une image de la carte de l'espace de la fenetre,
lent mais essentiel pour l'initialisation du canvas, par exemple
-----------------------------------
Entrées : aucune
Sortie : mise à jour du canvas
"""
# création d'une liste à deux dimensions pour stocker les valeurs d'indentification des rectangles du canvas
self.rects = []
self.canvas.delete('all')
for x in range(self.espace.dim[0]):
self.rects.append([])
for y in range(self.espace.dim[1]):
if self.espace.carte[x][y] == 0:
self.rects[x].append(self.canvas.create_rectangle(x*10, y*10, (x+1)*10, (y+1)*10, fill=self.col_mort, outline=from_rgb((128, 128, 128))))
else:
self.rects[x].append(self.canvas.create_rectangle(x*10, y*10, (x+1)*10, (y+1)*10, fill=self.col_vie, outline=from_rgb((128, 128, 128))))
def creer_nouv_espace(self):
"""création d'un nouvel espace selon les tailles données
-----------------------------------
Entrées : dimensions du nouvel espace issues des paramètres
Sortie : mise à jour du canvas
"""
self.espace = gol.Espace(self.taille_x_para.get(), self.taille_y_para.get(), 5)
self.changement_espace()
self.affiche_base()
def applique_param(self):
"""applique les paramètres choisis dans le volet à droite du canvas
-----------------------------------
Entrées : couleurs RGB des cellules mortes et vivantes, et nombre de voisins pour les cellules
Sorties : changement des couleurs des cellules dans le Canvas et des règles du jeu
"""
self.col_vie = from_rgb((self.rouge_vie.get(), self.vert_vie.get(), self.bleu_vie.get()))
self.col_mort = from_rgb((self.rouge_morts.get(), self.vert_morts.get(), self.bleu_morts.get()))
self.espace.voisins_min = self.nb_min_vois.get()
self.espace.voisins_max = self.nb_max_vois.get()
self.espace.voisins_res = self.nb_res.get()
self.affiche_base()
def clique_bestiaire(self, event):
"""lorsque l'utilisateur clique sur la liste, met à jour la forme choisie
-----------------------------------
Entrées : aucunes
Sorties : changement de la forme choisie dans le boite de selection du bestiaire
"""
bete_choisie = self.liste_bestiaire.get("active")
self.entree_forme.delete(0, END)
self.entree_forme.insert("end", bete_choisie)
def init_parametres(self):
"""méthode pour initialiser le volet de paramétrage à côté du Canvas.
options : - changement de taille de carte
- changement de couleur d'affichages des cellules mortes et vivantes
- changement de nombre minimum et maximum d evoisins pour vivantes
- changement de nombre de voisins pour vivre
- bouton pour appliquer les changements
-----------------------------------
Entrées : aucune
Sortie : ajout de la liste du contenu du bestiaire,
de l'espace de création d'un nouvel espace, et des paramètres dans la fenêtre
"""
self.frame_espace_bestiaire = Frame(master=self.frame_para_canvas, highlightbackground="black", highlightthickness=3, bg="light grey")
# frame qui permet de changer la taille de la carte en en créant une nouvelle
self.frame_espace = Frame(master=self.frame_espace_bestiaire, highlightbackground="black", highlightthickness=1, bg="light grey")
self.label_espace = Label(master=self.frame_espace, text="volet de création d'un espace")
self.label_espace.pack(fill="x")
self.label_taille_x = Label(master=self.frame_espace, text="Longueur de l'espace voulu")
self.label_taille_x.pack()
self.taille_x_para = IntVar()
self.taille_x_para.set(self.espace.dim[0])
self.taille_x_scale = Scale(master=self.frame_espace, from_=5, to=100, variable=self.taille_x_para, orient=HORIZONTAL)
self.taille_x_scale.pack(fill="x")
self.label_taille_y = Label(master=self.frame_espace, text="Largeur de l'espace voulu")
self.label_taille_y.pack()
self.taille_y_para = IntVar()
self.taille_y_para.set(self.espace.dim[1])
self.taille_y_scale = Scale(master=self.frame_espace, from_=5, to=100, variable=self.taille_y_para, orient=HORIZONTAL)
self.taille_y_scale.pack(fill="x")
self.bouton_nouv_espace = Button(master=self.frame_espace, text="créer un nouvel espace", command=self.creer_nouv_espace)
self.bouton_nouv_espace.pack(fill="x")
self.frame_espace.pack(fill="x")
# fin de l'initialisation de ce premier volet
# initialisation de l'affichage du contenu du bestiaire actuel
self.frame_bestiaire = Frame(master=self.frame_espace_bestiaire, highlightbackground="black", highlightthickness=3, bg="light grey")
self.label_bestiaire = Label(master=self.frame_bestiaire, text="contenu du bestiaire")
self.label_bestiaire.pack()
self.liste_bestiaire = Listbox(master=self.frame_bestiaire)
for bete in gol.Espace.formes.keys():
self.liste_bestiaire.insert("end", bete)
self.liste_bestiaire.bind("<Button>", self.clique_bestiaire)
self.liste_bestiaire.pack(fill="x")
self.frame_bestiaire.pack(fill="x")
# fin de l'initialisation de l'afichage du contenu du bestiaire actuel
# création de la frame pour les autres paramètres qui s'appliquent avec un autre bouton
self.frame_autres = Frame(master=self.frame_para_canvas, highlightbackground="black", highlightthickness=1, bg="light grey")
# frame des sliders de couleur RVB pour les cellules mortes
self.frame_col_morts = Frame(master=self.frame_autres, highlightbackground="black", highlightthickness=1, bg="light grey")
self.label_col_morts = Label(master=self.frame_col_morts, text="couleur des cellules mortes")
self.label_col_morts.pack()
self.rouge_morts = IntVar()
self.rouge_morts.set(0)
self.rouge_morts_scale = Scale(master=self.frame_col_morts, from_=0, to=255, variable=self.rouge_morts, orient=HORIZONTAL, bg="red")
self.rouge_morts_scale.pack(fill="x")
self.vert_morts = IntVar()
self.vert_morts.set(0)
self.vert_morts_scale = Scale(master=self.frame_col_morts, from_=0, to=255, variable=self.vert_morts, orient=HORIZONTAL, bg="green")
self.vert_morts_scale.pack(fill="x")
self.bleu_morts = IntVar()
self.bleu_morts.set(0)
self.bleu_morts_scale = Scale(master=self.frame_col_morts, from_=0, to=255, variable=self.bleu_morts, orient=HORIZONTAL, bg="blue")
self.bleu_morts_scale.pack(fill="x")
self.frame_col_morts.pack(fill="x")
# fin de l'initialisation des sliders de couleur RVB pour les cellules mortes
# frame des sliders de couleur RVB pour les cellules vivantes
self.frame_col_vie = Frame(master=self.frame_autres, highlightbackground="black", highlightthickness=1, bg="light grey")
self.label_col_vie = Label(master=self.frame_col_vie, text="couleur des cellules vivantes")
self.label_col_vie.pack()
self.rouge_vie = IntVar()
self.rouge_vie.set(255)
self.rouge_vie_scale = Scale(master=self.frame_col_vie, from_=0, to=255, variable=self.rouge_vie, orient=HORIZONTAL, bg="red")
self.rouge_vie_scale.pack(fill="x")
self.vert_vie = IntVar()
self.vert_vie.set(255)
self.vert_vie_scale = Scale(master=self.frame_col_vie, from_=0, to=255, variable=self.vert_vie, orient=HORIZONTAL, bg="green")
self.vert_vie_scale.pack(fill="x")
self.bleu_vie = IntVar()
self.bleu_vie.set(255)
self.bleu_vie_scale = Scale(master=self.frame_col_vie, from_=0, to=255, variable=self.bleu_vie, orient=HORIZONTAL, bg="blue")
self.bleu_vie_scale.pack(fill="x")
self.frame_col_vie.pack(fill="x")
# fin de l'initialisation des sliders de couleur RVB pour les cellules vivantes
# frame des sliders de règles de l'espace courant
self.frame_regles_espace = Frame(master=self.frame_autres, highlightbackground="black", highlightthickness=1, bg="light grey")
self.label_regles = Label(master=self.frame_regles_espace, text="règles de l'espace courant")
self.label_regles.pack()
self.label_res = Label(master=self.frame_regles_espace, text="nombre de voisins pour ressuciter")
self.label_res.pack()
self.nb_res = IntVar()
self.nb_res.set(3)
self.scale_res = Scale(master=self.frame_regles_espace, from_=0, to=8, variable=self.nb_res, orient=HORIZONTAL)
self.scale_res.pack(fill="x")
self.label_min_vois = Label(master=self.frame_regles_espace, text="minimum de voisins pour survivre")
self.label_min_vois.pack()
self.nb_min_vois = IntVar()
self.nb_min_vois.set(2)
self.scale_min_vois = Scale(master=self.frame_regles_espace, from_=0, to=8, variable=self.nb_min_vois, orient=HORIZONTAL)
self.scale_min_vois.pack(fill="x")
self.label_max_vois = Label(master=self.frame_regles_espace, text="maximum de voisins pour survivre")
self.label_max_vois.pack()
self.nb_max_vois = IntVar()
self.nb_max_vois.set(3)
self.scale_max_vois = Scale(master=self.frame_regles_espace, from_=0, to=8, variable=self.nb_max_vois, orient=HORIZONTAL)
self.scale_max_vois.pack(fill="x")
self.frame_regles_espace.pack(fill="x")
# fin de l'initialisation des sliders de règles de l'espace courant
# bouton pour appliquer les paramètres de couleur et de règles
self.bouton_applique = Button(master=self.frame_autres, text="appliquer les paramètres", command=self.applique_param)
self.bouton_applique.pack(fill="x")
self.frame_espace_bestiaire.pack(side=LEFT)
self.frame_canvas.pack(side=LEFT)
self.frame_autres.pack(side=LEFT)
self.frame_para_canvas.pack()
def main():
"""Fonction Main qui assure le démarrage du programme
-----------------------------------
Entrées : aucunes
Sorties : fin de programme si il y a un crash
"""
espace_user = gol.Espace(50, 50, 300)
master = Tk()
wind = Fenetre(espace_user, master)
wind.setup_canvas()
wind.init_parametres()
wind.init_interface()
wind.introduction()
try:
master.mainloop()
except Exception:
master.destroy()
main()
# ce qui manque :
# plus de formes
|
[
"os.path.dirname",
"game_of_life_logique.Espace",
"game_of_life_logique.Espace.formes.keys"
] |
[((328, 353), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (343, 353), False, 'import os\n'), ((30227, 30250), 'game_of_life_logique.Espace', 'gol.Espace', (['(50)', '(50)', '(300)'], {}), '(50, 50, 300)\n', (30237, 30250), True, 'import game_of_life_logique as gol\n'), ((24937, 24961), 'game_of_life_logique.Espace.formes.keys', 'gol.Espace.formes.keys', ([], {}), '()\n', (24959, 24961), True, 'import game_of_life_logique as gol\n')]
|
"""
App main entry point
:author: <NAME>
:copyright: Copyright 2021, LINKS Foundation
:version: 1.0.0
..
Copyright 2021 LINKS Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Third Party
from fastapi import FastAPI
from fastapi.openapi.utils import get_openapi
from fastapi.openapi.docs import get_redoc_html
from fastapi.staticfiles import StaticFiles
# Internal
from .routers import galileo, ublox
from .db.postgresql import get_database
# --------------------------------------------------------------------------------------------
# Instantiate
database = get_database()
app = FastAPI(docs_url=None, redoc_url=None)
app.include_router(galileo.router)
app.include_router(ublox.router)
app.mount("/static", StaticFiles(directory="static"), name="static")
@app.on_event("startup")
async def startup():
await database.connect()
@app.on_event("shutdown")
async def shutdown():
await database.disconnect()
@app.get("/api/v1/galileo/docs", include_in_schema=False)
async def custom_redoc_ui_html():
return get_redoc_html(
openapi_url=app.openapi_url,
title="UbloxApi",
redoc_js_url="/static/redoc.standalone.js",
redoc_favicon_url="/static/satellite.png",
)
def custom_openapi():
if app.openapi_schema:
return app.openapi_schema
openapi_schema = get_openapi(
title="UbloxApi",
version="1.0.0",
routes=app.routes,
)
openapi_schema["info"]["x-logo"] = {"url": "/static/logo_full.png"}
app.openapi_schema = openapi_schema
return app.openapi_schema
app.openapi = custom_openapi
|
[
"fastapi.staticfiles.StaticFiles",
"fastapi.openapi.utils.get_openapi",
"fastapi.openapi.docs.get_redoc_html",
"fastapi.FastAPI"
] |
[((1122, 1160), 'fastapi.FastAPI', 'FastAPI', ([], {'docs_url': 'None', 'redoc_url': 'None'}), '(docs_url=None, redoc_url=None)\n', (1129, 1160), False, 'from fastapi import FastAPI\n'), ((1250, 1281), 'fastapi.staticfiles.StaticFiles', 'StaticFiles', ([], {'directory': '"""static"""'}), "(directory='static')\n", (1261, 1281), False, 'from fastapi.staticfiles import StaticFiles\n'), ((1562, 1715), 'fastapi.openapi.docs.get_redoc_html', 'get_redoc_html', ([], {'openapi_url': 'app.openapi_url', 'title': '"""UbloxApi"""', 'redoc_js_url': '"""/static/redoc.standalone.js"""', 'redoc_favicon_url': '"""/static/satellite.png"""'}), "(openapi_url=app.openapi_url, title='UbloxApi', redoc_js_url=\n '/static/redoc.standalone.js', redoc_favicon_url='/static/satellite.png')\n", (1576, 1715), False, 'from fastapi.openapi.docs import get_redoc_html\n'), ((1856, 1921), 'fastapi.openapi.utils.get_openapi', 'get_openapi', ([], {'title': '"""UbloxApi"""', 'version': '"""1.0.0"""', 'routes': 'app.routes'}), "(title='UbloxApi', version='1.0.0', routes=app.routes)\n", (1867, 1921), False, 'from fastapi.openapi.utils import get_openapi\n')]
|
import os, json
with open("PubChemElements_all.json") as fo:
data = json.load(fo)
all_elements = []
for j in data['Table']['Row']:
element_obj = {}
for element in list(zip(data['Table']['Columns']['Column'], j['Cell'])):
property = element[0]
value = element[1]
element_obj[property] = value
all_elements.append(element_obj)
with open("my_elements_data.json", 'w') as fo:
json.dump(all_elements, fo, indent=2)
|
[
"json.dump",
"json.load"
] |
[((73, 86), 'json.load', 'json.load', (['fo'], {}), '(fo)\n', (82, 86), False, 'import os, json\n'), ((456, 493), 'json.dump', 'json.dump', (['all_elements', 'fo'], {'indent': '(2)'}), '(all_elements, fo, indent=2)\n', (465, 493), False, 'import os, json\n')]
|
from a.filea import ClassA
from a.b.fileb import ClassB
# class_name: foo.bar.Bar
def import_class(class_name):
components = class_name.split('.')
module = __import__(components[0])
for comp in components[1:]:
# print(repr(comp))
module = getattr(module, comp)
return module
if __name__ == '__main__':
a = ClassA()
print(repr(a))
a.hello()
b = ClassB()
print(repr(b))
b.hello()
reflection_clsa = import_class('a.filea.ClassA')
print(repr(reflection_clsa))
reflection_a = reflection_clsa()
print(repr(reflection_a))
reflection_a.hello()
reflection_clsb = import_class('a.b.fileb.ClassB')
print(repr(reflection_clsb))
reflection_b = reflection_clsb()
print(repr(reflection_b))
reflection_b.hello()
|
[
"a.b.fileb.ClassB",
"a.filea.ClassA"
] |
[((346, 354), 'a.filea.ClassA', 'ClassA', ([], {}), '()\n', (352, 354), False, 'from a.filea import ClassA\n'), ((397, 405), 'a.b.fileb.ClassB', 'ClassB', ([], {}), '()\n', (403, 405), False, 'from a.b.fileb import ClassB\n')]
|
# This module initiates the checkpoint
# processing of FTI files.
import os
import glob
import os.path
import time
from fnmatch import fnmatch
import configparser
import posix_read_ckpts
import subprocess
import sys
# variables used for input validation
fti_levels = (1, 2, 3, 4)
output_formats = ('CSV', 'HDF5', 'data')
# runtime variables of FTI (ckpt and meta)
config_file = ""
ckpt_dir = ""
meta_dir = ""
global_dir = ""
group_size = 0
nbHeads = 0
nodeSize = 0
totalRanks = 0
ioMode = 0
ckpt_abs_path = ""
meta_abs_path = ""
execution_id = ""
level_meta_dir = ""
level_dir = ""
# This function reads the config_file
# and sets FTI parameters
def init_config_params(config_file):
global execution_id
global ckpt_dir
global meta_dir
global global_dir
global group_size
global nbHeads
global nodeSize
global ioMode
if os.path.isfile(config_file) is False:
print("Configuration file not found")
sys.exit(2001)
else:
config = configparser.ConfigParser()
config.read(config_file)
execution_id = config['restart']['exec_id']
ckpt_dir = config['basic']['ckpt_dir']
meta_dir = config['basic']['meta_dir']
global_dir = config['basic']['glbl_dir']
group_size = config['basic']['group_size']
nbHeads = config['basic']['head']
nodeSize = config['basic']['node_size']
ioMode = config['basic']['ckpt_io']
# This function processes FTI's files
# given config_file and set the absolute
# paths of meta files and ckpt files
def process_fti_paths(config_file):
global ckpt_dir
global meta_dir
global ckpt_abs_path
global meta_abs_path
# ckpt dir
dir_path = os.path.dirname(os.path.realpath(config_file))
# concatenate paths
if level_dir == '/l4/':
# switch to global_dir
ckpt_dir = global_dir
if ckpt_dir.startswith('./') is True: # same directory as config
ckpt_abs_path = dir_path + ckpt_dir.replace('.', '')
elif "." not in ckpt_dir: # absolute path
# set dir
ckpt_abs_path = ckpt_dir
else: # relative path
# iterate over the number of '../' found in ckpt_path
os.chdir(dir_path)
dirs = ckpt_dir.count("..")
for i in range(dirs):
os.chdir("..")
# concatenate the remaining part
for i in range(dirs):
# remove ../
ckpt_dir = ckpt_dir.replace('../', '')
os.chdir(ckpt_dir)
ckpt_abs_path = os.getcwd()
print("ckpt_abs_path ", ckpt_abs_path)
# meta dir
dir_path = os.path.dirname(os.path.realpath(config_file))
print(dir_path)
# concatenate paths
if meta_dir.startswith('./') is True: # same directory as config
# omit dot + concatenate the rest of the path
meta_abs_path = dir_path + meta_dir.replace('.', '')
elif "." not in meta_dir: # absolute path
# set dir
meta_abs_path = meta_dir
else: # relative path
# iterate over the number of '../' found in ckpt_path
os.chdir(dir_path)
dirs = meta_dir.count("..")
for i in range(dirs):
os.chdir("..")
# concatenate the remaining part
for i in range(dirs):
# remove ../
meta_dir = meta_dir.replace('../', '')
os.chdir(meta_dir)
meta_abs_path = os.getcwd()
print("meta_abs_path ", meta_abs_path)
# This function returns the path of the
# ckpt corresponding to rank_id
def find_ckpt_file(rank_id):
pattern_ckpt_file = ""
pattern_ckpt_path = execution_id+level_dir
if level_dir == '/l1/' or level_dir == '/l4/': # local
pattern_ckpt_file = "*-Rank"+str(rank_id)+".fti"
if level_dir == '/l4/' and ioMode == "2": # global
pattern_ckpt_file = "-mpiio.fti"#Ckpt1-mpiio.fti
ckpt_file = ""
for root, dirs, files in os.walk(os.path.abspath(ckpt_abs_path)):
for file in files:
file = os.path.join(root, file)
if pattern_ckpt_path in file and pattern_ckpt_file in file:
ckpt_file = file
if level_dir == '/l4/' and ioMode == "2": # global
PFSfile = ckpt_file
# recover from L4 to tmp/
ckpt_file = recover_mpiio_l4(rank_id, PFSfile)
if ckpt_file == "":
print("Checkpoint file not found")
sys.exit(2002)
return ckpt_file
# This function is called if io=2 and level=4
# it recovers the file from l4 directory in mpiio format
# to tmp/file in posix format
def recover_mpiio_l4(rank_id, PFSfile):
# preparing input for mpiio recovery
global nodeSize
global nbApprocs
global nbNodes
global nbHeads
nodeSize = int(nodeSize)
nbHeads = int(nbHeads)
nbApprocs = nodeSize - nbHeads
nbNodes = totalRanks / nodeSize if nodeSize else 0
nbNodes = int(nbNodes)
executable_path = "./mpiio/"
# get fileSize from metafile
# read ckpt_file_size entry of second section
fileSize = 0
meta_pattern = "sector"
meta_file = ""
for root, dirs, files in os.walk(os.path.abspath(meta_abs_path)):
for file in files:
if file.startswith(meta_pattern) is True:
file = os.path.join(root, file)
print(file)
meta_file = file
break
# processing the meta file for the size
config = configparser.ConfigParser()
config.read(meta_file)
fileSize = config['0']['ckpt_file_size']
os.chdir(executable_path)
cmd = "./mpiio_main "+str(rank_id)+" "+str(PFSfile)+" "+str(fileSize)+" "+str(nbApprocs)+" "+str(nbNodes)
subprocess.check_call(cmd, shell=True)
print("Rank ", str(rank_id), " is done copying...")
print(
"MPI-IO recovery finished successfully. "
"Now current dir is",
os.getcwd())
# look for what has been stored under /tmp
ckpt_path = os.getcwd()+"/tmp" # Ckpt1-mpiio.fti
pattern_ckpt_file = "*.fti"
ckpt_file = ""
# find file in this directory
for root, dirs, files in os.walk(os.path.abspath(ckpt_path)):
for file in files:
file = os.path.join(root, file)
if fnmatch(file, pattern_ckpt_file):
ckpt_file = file
if ckpt_path == "":
print("Could not recover from MPI-IO")
sys.exit()
return ckpt_file
# This function returns the path of the
# meta corresponding to the ckpt_file
# note: for now it works with level 1
def find_meta_file(ckpt_file):
meta_file = ""
if level_dir == '/l4/' and ioMode == "2":
print("should take any sector file")
for path, subdirs, files in os.walk(meta_abs_path):
for file in files:
file = meta_abs_path+'/'+execution_id+level_dir+file
meta_file = file
break
# traverse all meta files in the directory
else: # levels (1,2,3)
for path, subdirs, files in os.walk(meta_abs_path):
for file in files:
file = meta_abs_path+'/'+execution_id+level_dir+file
if os.path.isfile(file) is True:
config = configparser.ConfigParser()
config.read(file)
ckpt = ckpt_file.rsplit('/', 1)[1]
for section in config.sections():
if section.isdigit() is True:
if config[section]['ckpt_file_name'] == ckpt:
meta_file = file
break
if meta_file == "":
print("Metadata file not found")
sys.exit(2004)
return meta_file
# This function sets FTI's files paths
# depending on the level where the ckpt is stored
def process_level(level):
global level_dir
level_dir = '/l'+str(level)+'/'
# print("level dir : ", level_dir)
# This function compares ckpt directories
# and returns the level to which the last ckpt was stored
def get_latest_ckpt():
latest = max(glob.glob(
os.path.join(ckpt_abs_path, '*/')), key=os.path.getmtime)
latest = latest.rsplit('/', 1)[0]
latest = latest.rsplit('/', 1)[1]
level = latest[1]
return level
# API to read the checkpoints given config and rank
# def read_checkpoints(config_file, rank_id, level=None, output=None):
def read_checkpoints(config_file, rank_id, ranks=None,
level=None, output=None):
init_config_params(config_file)
if level in fti_levels:
process_level(level)
elif level is None:
# check for latest ckpt
last_level = get_latest_ckpt()
process_level(level)
else:
# invalid fti level
print("Invalid FTI level")
sys.exit(1001)
if output is not None and output not in output_formats:
print("Wrong output format. Choose one")
print("CSV (default):: Comma Separated Values file")
print("HDF5 :: Hierarchical Data Format file")
print("data :: numpy array")
sys.exit(1002)
elif output is None:
# default output format (CSV)
output = 'CSV'
if level == 4 and ioMode == 2 and ranks is None:
print("Total # of ranks is required when reading MPI-IO"
" chekpoints from level 4")
sys.exit(1003)
global totalRanks
totalRanks = ranks
process_fti_paths(config_file)
ckpt_file = find_ckpt_file(rank_id)
meta_file = find_meta_file(ckpt_file)
print("Processing ", ckpt_file, " using meta ", meta_file)
# posix_read_ckpts.read_checkpoint(
# ckpt_file, meta_file, config_file, group_size, level, output)
if output == "data":
return posix_read_ckpts.read_checkpoint(
ckpt_file, meta_file, config_file, group_size, level, output)
else:
posix_read_ckpts.read_checkpoint(
ckpt_file, meta_file, config_file, group_size, level, output)
|
[
"os.path.abspath",
"subprocess.check_call",
"os.getcwd",
"os.path.realpath",
"os.walk",
"posix_read_ckpts.read_checkpoint",
"os.path.isfile",
"fnmatch.fnmatch",
"configparser.ConfigParser",
"os.path.join",
"os.chdir",
"sys.exit"
] |
[((5376, 5403), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (5401, 5403), False, 'import configparser\n'), ((5481, 5506), 'os.chdir', 'os.chdir', (['executable_path'], {}), '(executable_path)\n', (5489, 5506), False, 'import os\n'), ((5621, 5659), 'subprocess.check_call', 'subprocess.check_call', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (5642, 5659), False, 'import subprocess\n'), ((861, 888), 'os.path.isfile', 'os.path.isfile', (['config_file'], {}), '(config_file)\n', (875, 888), False, 'import os\n'), ((953, 967), 'sys.exit', 'sys.exit', (['(2001)'], {}), '(2001)\n', (961, 967), False, 'import sys\n'), ((995, 1022), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (1020, 1022), False, 'import configparser\n'), ((1726, 1755), 'os.path.realpath', 'os.path.realpath', (['config_file'], {}), '(config_file)\n', (1742, 1755), False, 'import os\n'), ((2608, 2637), 'os.path.realpath', 'os.path.realpath', (['config_file'], {}), '(config_file)\n', (2624, 2637), False, 'import os\n'), ((3892, 3922), 'os.path.abspath', 'os.path.abspath', (['ckpt_abs_path'], {}), '(ckpt_abs_path)\n', (3907, 3922), False, 'import os\n'), ((4350, 4364), 'sys.exit', 'sys.exit', (['(2002)'], {}), '(2002)\n', (4358, 4364), False, 'import sys\n'), ((5073, 5103), 'os.path.abspath', 'os.path.abspath', (['meta_abs_path'], {}), '(meta_abs_path)\n', (5088, 5103), False, 'import os\n'), ((5803, 5814), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5812, 5814), False, 'import os\n'), ((5880, 5891), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5889, 5891), False, 'import os\n'), ((6040, 6066), 'os.path.abspath', 'os.path.abspath', (['ckpt_path'], {}), '(ckpt_path)\n', (6055, 6066), False, 'import os\n'), ((6301, 6311), 'sys.exit', 'sys.exit', ([], {}), '()\n', (6309, 6311), False, 'import sys\n'), ((6628, 6650), 'os.walk', 'os.walk', (['meta_abs_path'], {}), '(meta_abs_path)\n', (6635, 6650), False, 'import os\n'), ((6936, 6958), 'os.walk', 'os.walk', (['meta_abs_path'], {}), '(meta_abs_path)\n', (6943, 6958), False, 'import os\n'), ((7619, 7633), 'sys.exit', 'sys.exit', (['(2004)'], {}), '(2004)\n', (7627, 7633), False, 'import sys\n'), ((9032, 9046), 'sys.exit', 'sys.exit', (['(1002)'], {}), '(1002)\n', (9040, 9046), False, 'import sys\n'), ((9296, 9310), 'sys.exit', 'sys.exit', (['(1003)'], {}), '(1003)\n', (9304, 9310), False, 'import sys\n'), ((9691, 9789), 'posix_read_ckpts.read_checkpoint', 'posix_read_ckpts.read_checkpoint', (['ckpt_file', 'meta_file', 'config_file', 'group_size', 'level', 'output'], {}), '(ckpt_file, meta_file, config_file,\n group_size, level, output)\n', (9723, 9789), False, 'import posix_read_ckpts\n'), ((9813, 9911), 'posix_read_ckpts.read_checkpoint', 'posix_read_ckpts.read_checkpoint', (['ckpt_file', 'meta_file', 'config_file', 'group_size', 'level', 'output'], {}), '(ckpt_file, meta_file, config_file,\n group_size, level, output)\n', (9845, 9911), False, 'import posix_read_ckpts\n'), ((2196, 2214), 'os.chdir', 'os.chdir', (['dir_path'], {}), '(dir_path)\n', (2204, 2214), False, 'import os\n'), ((2463, 2481), 'os.chdir', 'os.chdir', (['ckpt_dir'], {}), '(ckpt_dir)\n', (2471, 2481), False, 'import os\n'), ((2506, 2517), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2515, 2517), False, 'import os\n'), ((3063, 3081), 'os.chdir', 'os.chdir', (['dir_path'], {}), '(dir_path)\n', (3071, 3081), False, 'import os\n'), ((3330, 3348), 'os.chdir', 'os.chdir', (['meta_dir'], {}), '(meta_dir)\n', (3338, 3348), False, 'import os\n'), ((3373, 3384), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3382, 3384), False, 'import os\n'), ((3971, 3995), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (3983, 3995), False, 'import os\n'), ((6115, 6139), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (6127, 6139), False, 'import os\n'), ((6155, 6187), 'fnmatch.fnmatch', 'fnmatch', (['file', 'pattern_ckpt_file'], {}), '(file, pattern_ckpt_file)\n', (6162, 6187), False, 'from fnmatch import fnmatch\n'), ((8030, 8063), 'os.path.join', 'os.path.join', (['ckpt_abs_path', '"""*/"""'], {}), "(ckpt_abs_path, '*/')\n", (8042, 8063), False, 'import os\n'), ((8728, 8742), 'sys.exit', 'sys.exit', (['(1001)'], {}), '(1001)\n', (8736, 8742), False, 'import sys\n'), ((2293, 2307), 'os.chdir', 'os.chdir', (['""".."""'], {}), "('..')\n", (2301, 2307), False, 'import os\n'), ((3160, 3174), 'os.chdir', 'os.chdir', (['""".."""'], {}), "('..')\n", (3168, 3174), False, 'import os\n'), ((5210, 5234), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (5222, 5234), False, 'import os\n'), ((7096, 7116), 'os.path.isfile', 'os.path.isfile', (['file'], {}), '(file)\n', (7110, 7116), False, 'import os\n'), ((7155, 7182), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (7180, 7182), False, 'import configparser\n')]
|
import rec as rec
from django.core.management.base import BaseCommand
import pandas as pd
from sklearn.metrics.pairwise import cosine_similarity
import plotly.offline as py
import plotly.graph_objects as go
from django.db.models import Sum
import slug
import http.client
import json
from shop.models import Rec, Product, Category
class Command(BaseCommand):
help = "collect ft articles"
# define logic of command
def handle(self, *args, **options):
conn = http.client.HTTPSConnection("www.primemobileparts.com")
payload = ""
headers = {'authorization': "Bearer <KEY>"}
conn.request("GET", "/api/user-order-report",payload, headers)
res = conn.getresponse()
data = res.read()
data.decode("utf-8")
x = json.loads(data.decode("utf-8"))
df = pd.DataFrame(x['reportData'])
df = df.dropna(subset=['user_id', 'product_id', 'quantity'])
customer_item_matrix = df.pivot_table(
index='user_id',
columns='product_id',
values='quantity',
aggfunc='sum'
)
print(df.head())
customer_item_matrix = customer_item_matrix.applymap(lambda x: 1 if x > 0 else 0)
user_user_sim_matrix = pd.DataFrame(
cosine_similarity(customer_item_matrix)
)
user_user_sim_matrix.columns = customer_item_matrix.index
user_user_sim_matrix['user_id'] = customer_item_matrix.index
user_user_sim_matrix = user_user_sim_matrix.set_index('user_id')
# user_user_sim_matrix.loc[737.0].sort_values(ascending=False) # Angelaya benzer kullaniclar
#items_bought_by_A = set(customer_item_matrix.loc[737.0].iloc[customer_item_matrix.loc[737.0].nonzero()].index)
#items_bought_by_B = set(customer_item_matrix.loc[685.0].iloc[customer_item_matrix.loc[685.0].nonzero()].index)
#items_to_recommend_to_B = items_bought_by_A - items_bought_by_B
#items_to_recommend_to_B
item_item_sim_matrix = pd.DataFrame(
cosine_similarity(customer_item_matrix.T)
)
item_item_sim_matrix.columns = customer_item_matrix.T.index
item_item_sim_matrix['product_id'] = customer_item_matrix.T.index
item_item_sim_matrix = item_item_sim_matrix.set_index('product_id')
for y in df['product_id']:
#Category.objects.get_or_create(name=z, slug = z.lower().replace(' ', '-'))
#f = Category.objects.get(name = z)
#Product.objects.get_or_create(name=y, price = p, category_id = f.id, slug=y.lower().replace(' ', '-'))
dict = {}
dict["products"] = {}
top_10_similar_items = list(
item_item_sim_matrix \
.loc[y] \
.sort_values(ascending=False) \
.iloc[1:13] \
.index
)
dict["products"][y] = [i for i in top_10_similar_items]
#print(y)
#print(top_10_similar_items)
#print(dict)
rec = json.dumps(dict)
#recs = json.loads(rec)
print(rec)
conn = http.client.HTTPSConnection("www.primemobileparts.com")
headers = {
'content-type': "application/json",
'authorization': "Bearer XgXLQTAvcOwn4Q4LycjR0W1hViX5ChenEepGTcyPo37C3TBCy6ubDxu1FiHt"
}
conn.request("POST", "/api/product-related", rec, headers)
res = conn.getresponse()
data = res.read()
#print(data.decode("utf-8"))
#Product.objects.get_or_create(name=y)
#print('%s added' % (top_10_similar_items,))
#Rec.product.add(d)
self.stdout.write('post complete')
|
[
"pandas.DataFrame",
"sklearn.metrics.pairwise.cosine_similarity",
"json.dumps"
] |
[((834, 863), 'pandas.DataFrame', 'pd.DataFrame', (["x['reportData']"], {}), "(x['reportData'])\n", (846, 863), True, 'import pandas as pd\n'), ((1285, 1324), 'sklearn.metrics.pairwise.cosine_similarity', 'cosine_similarity', (['customer_item_matrix'], {}), '(customer_item_matrix)\n', (1302, 1324), False, 'from sklearn.metrics.pairwise import cosine_similarity\n'), ((2048, 2089), 'sklearn.metrics.pairwise.cosine_similarity', 'cosine_similarity', (['customer_item_matrix.T'], {}), '(customer_item_matrix.T)\n', (2065, 2089), False, 'from sklearn.metrics.pairwise import cosine_similarity\n'), ((3112, 3128), 'json.dumps', 'json.dumps', (['dict'], {}), '(dict)\n', (3122, 3128), False, 'import json\n')]
|
import itertools
import math
from qiskit import QuantumRegister, QuantumCircuit, ClassicalRegister
from qiskit.circuit import Gate, InstructionSet
from qiskit.dagcircuit import DAGCircuit
from qiskit.extensions.standard import *
from qiskit.qasm import pi
def toffoli(number_qubits: int):
assert number_qubits >= 2
q = QuantumRegister(number_qubits)
qc = QuantumCircuit(q, name="toffoli")
# for i in range(number_qubits-1):
# qc.h(controls[i])
qc.ntoffoli(q[number_qubits-1], *q[0:number_qubits-1])
# qc.measure(controls, c_controls)
# qc.measure(target, c_target)
return qc
class NcrxGate(Gate):
"""n-controlled x rotation gate."""
def __init__(self, theta, tgt, *ctls, circ=None):
"""Create new Toffoli gate."""
assert len(ctls) >= 1
super().__init__(f"c^{len(ctls)}rx", [theta], [tgt] + list(ctls), circ)
def _define_decompositions(self):
decomposition = DAGCircuit()
nr_qubits = len(self.qargs)
q = QuantumRegister(nr_qubits)
last_control = q[1]
target = q[0]
decomposition.add_qreg(q)
if nr_qubits == 2:
# Equal to crx of theta
crx_theta = Cu3Gate(self.params[0], -pi/2, pi/2, last_control, target)
decomposition.apply_operation_back(crx_theta)
else:
# Recurse
rule = [
# C-sqrt(rx(theta)) gate
Cu3Gate(self.params[0]/2, -pi/2, pi/2, last_control, target),
NcrxGate(pi, last_control, *q[2:]), # toffoli
Cu3Gate(self.params[0]/2, -pi/2, pi/2, last_control, target).inverse(),
NcrxGate(pi, last_control, *q[2:]), # toffoli
NcrxGate(self.params[0]/2, target, *q[2:]) # c^nrx(theta/2) gate on n-1 qubits
]
for inst in rule:
decomposition.apply_operation_back(inst)
# decomposition.apply_operation_back(ToffoliGate(q[1], q[2], q[0]))
self._decompositions = [decomposition]
def inverse(self):
"""Invert this gate."""
return self # self-inverse
def reapply(self, circ):
"""Reapply this gate to corresponding qubits in circ."""
self._modifiers(circ.ncrx(self.params[0], self.qargs[0], *self.qargs[1:]))
def ncrx(self, theta, tgt, *ctls):
"""Apply n-controlled x-rotation(theta) to target from controls"""
if all(isinstance(ctl, QuantumRegister) for ctl in ctls) and \
isinstance(tgt, QuantumRegister) and \
all(len(ctl) == len(tgt) for ctl in ctls):
instructions = InstructionSet()
for i in range(ctls[0].size):
instructions.add(self.ntoffoli(theta, (tgt, i), *zip(ctls, itertools.repeat(i))))
return instructions
for ctl in ctls:
self._check_qubit(ctl)
self._check_qubit(tgt)
self._check_dups(list(ctls) + [tgt])
return self._attach(NcrxGate(theta, tgt, *ctls, circ=self))
def ntoffoli(self, tgt, *ctls):
"""Apply n-controlled Toffoli to tgt with controls."""
if all(isinstance(ctl, QuantumRegister) for ctl in ctls) and \
isinstance(tgt, QuantumRegister) and \
all(len(ctl) == len(tgt) for ctl in ctls):
instructions = InstructionSet()
for i in range(ctls[0].size):
instructions.add(self.ntoffoli((tgt, i), *zip(ctls, itertools.repeat(i))))
return instructions
for ctl in ctls:
self._check_qubit(ctl)
self._check_qubit(tgt)
self._check_dups(list(ctls) + [tgt])
return self._attach(NcrxGate(pi, tgt, *ctls, circ=self))
QuantumCircuit.ncrx = ncrx
QuantumCircuit.ntoffoli = ntoffoli
|
[
"qiskit.QuantumCircuit",
"qiskit.circuit.InstructionSet",
"qiskit.dagcircuit.DAGCircuit",
"itertools.repeat",
"qiskit.QuantumRegister"
] |
[((330, 360), 'qiskit.QuantumRegister', 'QuantumRegister', (['number_qubits'], {}), '(number_qubits)\n', (345, 360), False, 'from qiskit import QuantumRegister, QuantumCircuit, ClassicalRegister\n'), ((370, 403), 'qiskit.QuantumCircuit', 'QuantumCircuit', (['q'], {'name': '"""toffoli"""'}), "(q, name='toffoli')\n", (384, 403), False, 'from qiskit import QuantumRegister, QuantumCircuit, ClassicalRegister\n'), ((948, 960), 'qiskit.dagcircuit.DAGCircuit', 'DAGCircuit', ([], {}), '()\n', (958, 960), False, 'from qiskit.dagcircuit import DAGCircuit\n'), ((1009, 1035), 'qiskit.QuantumRegister', 'QuantumRegister', (['nr_qubits'], {}), '(nr_qubits)\n', (1024, 1035), False, 'from qiskit import QuantumRegister, QuantumCircuit, ClassicalRegister\n'), ((2608, 2624), 'qiskit.circuit.InstructionSet', 'InstructionSet', ([], {}), '()\n', (2622, 2624), False, 'from qiskit.circuit import Gate, InstructionSet\n'), ((3258, 3274), 'qiskit.circuit.InstructionSet', 'InstructionSet', ([], {}), '()\n', (3272, 3274), False, 'from qiskit.circuit import Gate, InstructionSet\n'), ((2734, 2753), 'itertools.repeat', 'itertools.repeat', (['i'], {}), '(i)\n', (2750, 2753), False, 'import itertools\n'), ((3377, 3396), 'itertools.repeat', 'itertools.repeat', (['i'], {}), '(i)\n', (3393, 3396), False, 'import itertools\n')]
|
#!/usr/bin/python
'''
Script to record from roku device via WinTV HVR-1950
'''
from __future__ import (absolute_import, division, print_function, unicode_literals)
from time import sleep
from roku_app.run_encoding import run_encoding
if __name__ == '__main__':
try:
run_encoding()
except Exception as exc:
print('Caught exception %s' % exc)
sleep(10)
|
[
"time.sleep",
"roku_app.run_encoding.run_encoding"
] |
[((285, 299), 'roku_app.run_encoding.run_encoding', 'run_encoding', ([], {}), '()\n', (297, 299), False, 'from roku_app.run_encoding import run_encoding\n'), ((380, 389), 'time.sleep', 'sleep', (['(10)'], {}), '(10)\n', (385, 389), False, 'from time import sleep\n')]
|
from gym.spaces import Discrete
from ray.rllib.models.action_dist import ActionDistribution
from ray.rllib.models.tf.tf_action_dist import Categorical
from ray.rllib.models.torch.torch_action_dist import TorchCategorical
from ray.rllib.utils.annotations import override
from ray.rllib.utils.exploration.stochastic_sampling import StochasticSampling
from ray.rllib.utils.framework import TensorType
from ray.rllib.utils.framework import get_variable
from ray.rllib.utils.from_config import from_config
from ray.rllib.utils.schedules import Schedule, PiecewiseSchedule
from typing import Union
class SoftQSchedule(StochasticSampling):
"""Special case of StochasticSampling w/ Categorical and temperature param.
Returns a stochastic sample from a Categorical parameterized by the model
output divided by the temperature. Returns the argmax iff explore=False.
"""
def __init__(self, action_space, *, framework,
initial_temperature=1.0, final_temperature=0.0,
temperature_timesteps=int(1e5),
temperature_schedule=None, **kwargs):
"""Initializes a SoftQ Exploration object.
Args:
action_space (Space): The gym action space used by the environment.
temperature (Schedule): The temperature to divide model outputs by
before creating the Categorical distribution to sample from.
framework (str): One of None, "tf", "torch".
temperature_schedule (Optional[Schedule]): An optional Schedule object
to use (instead of constructing one from the given parameters).
"""
assert isinstance(action_space, Discrete)
super().__init__(action_space, framework=framework, **kwargs)
self.temperature_schedule = \
from_config(Schedule, temperature_schedule, framework=framework) or \
PiecewiseSchedule(
endpoints=[
(0, initial_temperature), (temperature_timesteps, final_temperature)],
outside_value=final_temperature,
framework=self.framework)
# The current timestep value (tf-var or python int).
self.last_timestep = get_variable(
0, framework=framework, tf_name="timestep")
self.temperature = self.temperature_schedule(self.last_timestep)
@override(StochasticSampling)
def get_exploration_action(self,
action_distribution: ActionDistribution,
timestep: Union[int, TensorType],
explore: bool = True):
cls = type(action_distribution)
assert cls in [Categorical, TorchCategorical]
self.last_timestep = timestep
# TODO This step changes the Q value, even when we are not exploring, create an issue
# Quick correction
if explore:
self.temperature = self.temperature_schedule(timestep if timestep is not None else self.last_timestep)
else:
self.temperature = 1.0
# Re-create the action distribution with the correct temperature applied.
dist = cls(
action_distribution.inputs,
self.model,
temperature=self.temperature)
# Delegate to super method.
return super().get_exploration_action(action_distribution=dist, timestep=timestep, explore=explore)
|
[
"ray.rllib.utils.framework.get_variable",
"ray.rllib.utils.annotations.override",
"ray.rllib.utils.from_config.from_config",
"ray.rllib.utils.schedules.PiecewiseSchedule"
] |
[((2356, 2384), 'ray.rllib.utils.annotations.override', 'override', (['StochasticSampling'], {}), '(StochasticSampling)\n', (2364, 2384), False, 'from ray.rllib.utils.annotations import override\n'), ((2207, 2263), 'ray.rllib.utils.framework.get_variable', 'get_variable', (['(0)'], {'framework': 'framework', 'tf_name': '"""timestep"""'}), "(0, framework=framework, tf_name='timestep')\n", (2219, 2263), False, 'from ray.rllib.utils.framework import get_variable\n'), ((1805, 1869), 'ray.rllib.utils.from_config.from_config', 'from_config', (['Schedule', 'temperature_schedule'], {'framework': 'framework'}), '(Schedule, temperature_schedule, framework=framework)\n', (1816, 1869), False, 'from ray.rllib.utils.from_config import from_config\n'), ((1887, 2055), 'ray.rllib.utils.schedules.PiecewiseSchedule', 'PiecewiseSchedule', ([], {'endpoints': '[(0, initial_temperature), (temperature_timesteps, final_temperature)]', 'outside_value': 'final_temperature', 'framework': 'self.framework'}), '(endpoints=[(0, initial_temperature), (\n temperature_timesteps, final_temperature)], outside_value=\n final_temperature, framework=self.framework)\n', (1904, 2055), False, 'from ray.rllib.utils.schedules import Schedule, PiecewiseSchedule\n')]
|
from flask import jsonify
from flask_playground.routes.exceps import ValidationError
from flask_playground.routes.v1 import api_v1_routes
@api_v1_routes.errorhandler(ValidationError)
def bad_request(e):
response = jsonify({"message": e.args[0]})
response.status_code = 400
return response
@api_v1_routes.app_errorhandler(404)
def not_found(e):
response = jsonify({"message": "Invalid resource URI"})
response.status_code = 404
return response
@api_v1_routes.errorhandler(405)
def method_not_supported(e):
response = jsonify({"message": "The method is not supported"})
response.status_code = 405
return response
@api_v1_routes.app_errorhandler(500)
def internal_server_error(e):
response = jsonify({"error": "Internal server error", "message": e.args[0]})
response.status_code = 500
return response
|
[
"flask_playground.routes.v1.api_v1_routes.app_errorhandler",
"flask.jsonify",
"flask_playground.routes.v1.api_v1_routes.errorhandler"
] |
[((142, 185), 'flask_playground.routes.v1.api_v1_routes.errorhandler', 'api_v1_routes.errorhandler', (['ValidationError'], {}), '(ValidationError)\n', (168, 185), False, 'from flask_playground.routes.v1 import api_v1_routes\n'), ((307, 342), 'flask_playground.routes.v1.api_v1_routes.app_errorhandler', 'api_v1_routes.app_errorhandler', (['(404)'], {}), '(404)\n', (337, 342), False, 'from flask_playground.routes.v1 import api_v1_routes\n'), ((475, 506), 'flask_playground.routes.v1.api_v1_routes.errorhandler', 'api_v1_routes.errorhandler', (['(405)'], {}), '(405)\n', (501, 506), False, 'from flask_playground.routes.v1 import api_v1_routes\n'), ((657, 692), 'flask_playground.routes.v1.api_v1_routes.app_errorhandler', 'api_v1_routes.app_errorhandler', (['(500)'], {}), '(500)\n', (687, 692), False, 'from flask_playground.routes.v1 import api_v1_routes\n'), ((221, 252), 'flask.jsonify', 'jsonify', (["{'message': e.args[0]}"], {}), "({'message': e.args[0]})\n", (228, 252), False, 'from flask import jsonify\n'), ((376, 420), 'flask.jsonify', 'jsonify', (["{'message': 'Invalid resource URI'}"], {}), "({'message': 'Invalid resource URI'})\n", (383, 420), False, 'from flask import jsonify\n'), ((551, 602), 'flask.jsonify', 'jsonify', (["{'message': 'The method is not supported'}"], {}), "({'message': 'The method is not supported'})\n", (558, 602), False, 'from flask import jsonify\n'), ((738, 803), 'flask.jsonify', 'jsonify', (["{'error': 'Internal server error', 'message': e.args[0]}"], {}), "({'error': 'Internal server error', 'message': e.args[0]})\n", (745, 803), False, 'from flask import jsonify\n')]
|
import logging
def get_logger(log_file=None, name='radiomics_logger'):
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
# stream handler will send message to stdout
formatter = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
logger.addHandler(ch)
if log_file is not None:
fh = logging.FileHandler(log_file)
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
logger.addHandler(fh)
return logger
|
[
"logging.Formatter",
"logging.StreamHandler",
"logging.FileHandler",
"logging.getLogger"
] |
[((86, 109), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (103, 109), False, 'import logging\n'), ((211, 305), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s [%(levelname)s] %(message)s"""'], {'datefmt': '"""%Y-%m-%d %H:%M:%S"""'}), "('%(asctime)s [%(levelname)s] %(message)s', datefmt=\n '%Y-%m-%d %H:%M:%S')\n", (228, 305), False, 'import logging\n'), ((310, 333), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (331, 333), False, 'import logging\n'), ((464, 493), 'logging.FileHandler', 'logging.FileHandler', (['log_file'], {}), '(log_file)\n', (483, 493), False, 'import logging\n')]
|
import numpy as nump
import math
import random
import folium
# import simplekml as simplekml
from models.Line import Line
from models.Pos import Pos
import time
from gedcomoptions import gvOptions
from folium.plugins import FloatImage, AntPath, MiniMap, HeatMapWithTime
legend_file = 'legend.png'
lgd_txt = '<span style="color: {col};">{txt}</span>'
def dift(l):
d = ((random.random() * 0.001) - 0.0005)
#d = 0
if (l):
return (float(l)+d)
else:
return None
class MyMarkClusters:
def __init__(self, mymap, step):
self.pmarker = dict()
self.markercluster = dict()
self.mymap = mymap
self.step = step
def mark (self, spot, when=None):
if spot and spot.lat and spot.lon:
cnt = 1
if (when):
# TODO this is a range date hack
if type(when) == type (" "):
when = when[0:4]
when = int(when) - (int(when) % self.step)
markname = str(spot.lat)+ str(spot.lon) + str(when)
else:
markname = str(spot.lat)+","+ str(spot.lon)
if (markname in self.pmarker.keys()):
cnt = self.pmarker[markname][2]+1
self.pmarker[markname] = (spot.lat, spot.lon, cnt, when)
def checkmarker(self, lat, long, name):
if lat and long:
markname = str(lat)+","+ str(long)
if (self.cmarker[mark] == 1):
return None
if (markname in self.markercluster.keys()):
return self.markercluster[markname]
else:
self.markercluster[markname] = folium.MarkerCluster(name), add_to(self.mymap)
return self.markercluster[markname]
class foliumExporter:
def __init__(self, gOptions : gvOptions):
self.file_name = gOptions.Result
self.max_line_weight = gOptions.MaxLineWeight
self.gOptions = gOptions
self.fm = folium.Map(location=[0, 0], zoom_start=2)
backTypes = ('Open Street Map', 'Stamen Terrain', 'CartoDB Positron', 'Stamen Toner', 'Stamen Watercolor', 'Cartodbdark_matter')
if (self.gOptions.MapStyle < 1 or self.gOptions.MapStyle > len(backTypes)):
self.gOptions.MapStyle = 3
for bt in range(0,4):
folium.raster_layers.TileLayer(backTypes[bt], name=backTypes[bt]).add_to(self.fm)
if (self.gOptions.mapMini):
folium.plugins.MiniMap(toggle_display=True).add_to(self.fm)
random.seed()
self.gOptions.step()
def setoptions(self):
return
def Done(self):
self.fm.save(self.file_name)
self.gOptions.stop()
# self.fm = None
def getFeatureGroup(self, thename, depth):
if not thename in self.fglastname:
self.fglastname[thename] = [folium.FeatureGroup(name= thename, show=False), 0, 0]
thefg = self.fglastname[thename][0]
self.fglastname[thename][1] += 1
self.fglastname[thename][2] = depth
return thefg
def export(self, main: Pos, lines: [Line], ntag =""):
SortByLast = (self.gOptions.GroupBy == 1)
SortByPerson = (self.gOptions.GroupBy == 2)
fm = self.fm
self.gOptions.step("Preparing")
self.fglastname = dict()
flr = folium.FeatureGroup(name= lgd_txt.format(txt= 'Relations', col='green'), show=False )
flp = folium.FeatureGroup(name= lgd_txt.format(txt= 'People', col='Black'), show=False )
mycluster = MyMarkClusters(fm, self.gOptions.HeatMapTimeStep)
""" *****************************
HEAT MAP Section
*****************************
"""
if self.gOptions.HeatMapTimeLine:
print("building clusters")
self.gOptions.step("Building Heatmap Clusters")
for line in lines:
if (self.gOptions.step()):
break
if (hasattr(line,'style') and line.style == 'Life'):
if line.human.birth and line.human.birth.pos:
mycluster.mark(line.human.birth.pos, line.human.birth.whenyear())
minyear = line.human.birth.whenyearnum()
else:
minyear = None
if line.human.death and line.human.death.when:
maxyear = line.human.death.whenyearnum(True)
else:
maxyear = None
for mids in (line.midpoints):
y = mids.whenyear()
if y:
if minyear:
minyear = min(int(y), minyear)
else:
minyear = int(y)
y = mids.whenyear(True)
if y:
if maxyear:
maxyear = max(int(y), maxyear)
else:
maxyear = int(y)
if minyear and maxyear:
activepos = Pos(None, None)
if line.human.birth and line.human.birth.pos:
(activepos.lat, activepos.lon) = (line.human.birth.pos.lat, line.human.birth.pos.lon)
for year in range(minyear,maxyear):
for mids in (line.midpoints):
if mids.whenyearnum() == year:
activepos = mids.pos
if activepos and activepos.lat and activepos.lon:
mycluster.mark(activepos, year)
if line.human.death and line.human.death.pos:
mycluster.mark(line.human.death.pos, line.human.death.whenyearnum())
years= []
for marker in mycluster.pmarker:
self.gOptions.step()
if type(mycluster.pmarker[marker][3]) == type(' '):
print (mycluster.pmarker[marker])
theyear = mycluster.pmarker[marker][3]
if theyear and not theyear in years:
years.append(theyear)
years.sort()
heat_data = [[] for _ in range (0,len(years))]
for mkyear in range(0,len(years)):
self.gOptions.step()
for markname in (mycluster.pmarker):
if years[mkyear] == mycluster.pmarker[markname][3]:
heat_data[mkyear].append([mycluster.pmarker[markname][0], mycluster.pmarker[markname][1], mycluster.pmarker[markname][2]])
#Normalize the data
mx=0
for i in range(len(heat_data)):
for j in range(len(heat_data[i])):
mx = max(mx, heat_data[i][j][2])
for i in range(len(heat_data)):
for j in range(len(heat_data[i])):
heat_data[i][j][2] = float(heat_data[i][j][2])/mx
hm = folium.plugins.HeatMapWithTime(heat_data,index = years , name= 'Heatmap', max_opacity=0.9, min_speed=1, speed_step=1, max_speed=25,
gradient={'0':'Navy', '0.25':'Blue','0.5':'Green', '0.75':'Yellow','1': 'Red'})
fm.add_child( hm)
else:
for line in lines:
self.gOptions.step()
mycluster.mark(line.a)
mycluster.mark(line.b)
if line.midpoints:
for mids in (line.midpoints):
mycluster.mark(mids.pos, None)
fg = folium.FeatureGroup(name= lgd_txt.format(txt= 'Heatmap', col='black'), show=(self.gOptions.HeatMap))
heat_data = []
for markname in (mycluster.pmarker):
self.gOptions.step()
heat_data.append([mycluster.pmarker[markname][0], mycluster.pmarker[markname][1], mycluster.pmarker[markname][2]])
hm = folium.plugins.HeatMap(heat_data,max_opacity=0.8, name= 'Heatmap')
fg.add_child(hm)
fm.add_child( fg)
#My to use the jquery hack to MAGIC HACK fix the Folium code to use Font Awesome!
# missing tag a the end on purpose
fm.default_js.append(['hack.js', 'https://use.fontawesome.com/releases/v5.15.4/js/all.js" data-auto-replace-svg="nest'])
""" *****************************
Line Drawing Section
*****************************
"""
i = 0
self.gOptions.step("Building lines")
for line in (list(filter (lambda line: hasattr(line,'style'), lines))):
self.gOptions.step()
i += 1
if ( line.style == 'Life'):
flc = flp
aicc = 'orange'
aici = 'child'
bicc = 'gray'
bici = 'cross'
lc = '#' + line.color.to_hexa()
da = []
ln = line.parentofhuman
g = ""
markertipname = "Life of " + line.name
fancyname = line.style + " of "+ line.parentofhuman
markhome = 'house'
else:
flc = flr
aicc = 'green'
aici = 'baby'
bicc = 'green'
lc = 'green'
da = [5,5]
if (line.style == 'father'):
lc = 'blue'
lc = '#2b8cbe'
bici = 'male'
bicc = 'blue'
if (line.style == 'mother'):
lc = 'pink'
bici = 'female'
bicc = 'pink'
ln = line.name
g = line.name.split(' ',2)[0]
markertipname = line.name + " " + line.style + " of "+ line.parentofhuman
fancyname = line.name + " " + line.style + " of "+ line.parentofhuman
fg = None
newfg = False
labelname = str(i) +' '+ ln
if (len(labelname) > 25): labelname = labelname[1:25] +"..."
gn = lgd_txt.format( txt=labelname, col= lc)
fm_line = []
bextra = "Born {}".format(line.human.birth.whenyear()) if line.human.birth and line.human.birth.when else ''
dextra = "Died {}".format(line.human.death.whenyear()) if line.human.death and line.human.death.when else ''
fancyname = fancyname + "<br>" + bextra +" "+ dextra if (bextra != '') or (dextra != '') else fancyname
if line.human.photo:
fancyname = fancyname + "<img src='{}' width='150'>".format(line.human.photo)
difta = diftb = None
if (line.a and line.a.lat and line.a.lon):
# color = father/mother, born = baby, male, female
difta = [dift(line.a.lat), dift(line.a.lon)]
if self.gOptions.MarksOn:
if self.gOptions.BornMark:
mk = folium.features.Marker(difta,tooltip=markertipname , popup=fancyname, opacity=.5, icon=folium.Icon(color=aicc,icon=aici, prefix='fa' ))
if SortByLast:
fg = self.getFeatureGroup(line.human.surname, line.prof)
if SortByPerson:
fg = self.getFeatureGroup(line.parentofhuman, line.prof)
if (not fg):
fg = folium.FeatureGroup(name= gn, show=False)
newfg = True
fg.add_child(mk)
# 'tombstone' or 'cross'
if (line.b and line.b.lat and line.b.lon):
diftb = [dift(line.b.lat), dift(line.b.lon)]
if self.gOptions.MarksOn:
mk = folium.features.Marker(diftb,tooltip =markertipname , popup=fancyname, opacity=.5,icon=folium.Icon(color=bicc,icon=bici, prefix='fa', extraClasses = 'fas'))
if SortByLast:
fg = self.getFeatureGroup(line.human.surname, line.prof)
if SortByPerson:
fg = self.getFeatureGroup(line.parentofhuman, line.prof)
if (not fg):
fg = folium.FeatureGroup(name= gn, show=False)
newfg = True
fg.add_child(mk)
if difta:
fm_line.append(tuple(difta))
if line.midpoints:
# Change line type
lc = "gray"
for mids in (line.midpoints):
midspot = tuple([dift(mids.pos.lat), dift(mids.pos.lon)])
fm_line.append(midspot)
if self.gOptions.HomeMarker and fg:
if mids.what == 'home':
mker = 'home'
mkcolor = bicc
tip = mids.where
else:
mker = 'shoe-prints'
mkcolor = 'lightgray'
if mids.what:
tip = mids.what + ' ' + mids.where
else:
tip = '?? ' + mids.where
mk = folium.features.Marker(midspot,tooltip =tip, opacity=.5, icon=folium.Icon(color=mkcolor,icon=mker, prefix='fa', extraClasses = 'fas'))
fg.add_child(mk)
if diftb:
fm_line.append(tuple(diftb))
if (len(fm_line) > 1):
lcolor = line.color.to_hexa()
lcolor = lc
if line.prof:
lwidth = max(int(self.max_line_weight/math.exp(0.5*line.prof)), 2)
else:
lwidth = 1
if self.gOptions.UseAntPath:
if line.style == 'Life':
pl = folium.plugins.AntPath(fm_line, weight=lwidth, opacity=.7, tooltip=ln, popup=fancyname, color=lcolor, lineJoin='arcs')
else:
pl = folium.features.PolyLine(fm_line, color=lcolor, weight=lwidth, opacity=1, tooltip=ln, popup=fancyname, dash_array = da, lineJoin='arcs' )
else:
pl = folium.features.PolyLine(fm_line, color=lcolor, weight=lwidth, opacity=1, tooltip=ln, popup=fancyname, dash_array = da, lineJoin='arcs')
if (pl):
if SortByLast:
fg = self.getFeatureGroup(line.human.surname, line.prof)
if SortByPerson:
fg = self.getFeatureGroup(line.parentofhuman, line.prof)
if (not fg):
fg = folium.FeatureGroup(name= gn, show=False)
newfg = True
fg.add_child(pl)
print(f"Name:{line.human.name:30};\tParent:{line.parentofhuman:30};\tStyle:{line.style};\tfrom:{line.a}; to:{line.b}")
# Did we just create a feature group for this person?
if newfg:
fg.layer_name = fg.layer_name + " ({})".format(len(fm_line) + 1 if diftb else 0 + 1 if diftb else 0)
fm.add_child(fg)
for fgn in sorted(self.fglastname.keys(), key=lambda x: self.fglastname[x][2], reverse = False ):
# print ("]]{} : {}".format(fgn, fglastname[fgn][1]))
self.fglastname[fgn][0].layer_name = "{} : {}".format(fgn, self.fglastname[fgn][1])
fm.add_child(self.fglastname[fgn][0])
sc = False if self.gOptions.showLayerControl else True
folium.map.LayerControl('topleft', collapsed= sc).add_to(fm)
if main and main.birth and main.birth.pos and main.birth.pos.lat:
#TODO Look at MarkerClusters
if self.gOptions.MarkStarOn:
folium.Marker([dift(main.birth.pos.lat), dift(main.birth.pos.lon)], tooltip = main.name, opacity=0.5, icon=folium.Icon(color='lightred',icon='star', prefix='fa', iconSize = ['50%', '50%'])).add_to(fm)
else:
print ("No GPS locations to generate a map.")
# TODO Add a legend
# FloatImage(image_file, bottom=0, left=86).add_to(fm)
if SortByLast:
print ("Number of FG lastName: {}".format(len(self.fglastname)))
self.Done()
return
|
[
"folium.features.PolyLine",
"folium.plugins.MiniMap",
"math.exp",
"folium.FeatureGroup",
"folium.MarkerCluster",
"models.Pos.Pos",
"folium.map.LayerControl",
"folium.plugins.HeatMap",
"folium.plugins.AntPath",
"folium.raster_layers.TileLayer",
"random.random",
"random.seed",
"folium.Map",
"folium.plugins.HeatMapWithTime",
"folium.Icon"
] |
[((2002, 2043), 'folium.Map', 'folium.Map', ([], {'location': '[0, 0]', 'zoom_start': '(2)'}), '(location=[0, 0], zoom_start=2)\n', (2012, 2043), False, 'import folium\n'), ((2572, 2585), 'random.seed', 'random.seed', ([], {}), '()\n', (2583, 2585), False, 'import random\n'), ((378, 393), 'random.random', 'random.random', ([], {}), '()\n', (391, 393), False, 'import random\n'), ((7402, 7624), 'folium.plugins.HeatMapWithTime', 'folium.plugins.HeatMapWithTime', (['heat_data'], {'index': 'years', 'name': '"""Heatmap"""', 'max_opacity': '(0.9)', 'min_speed': '(1)', 'speed_step': '(1)', 'max_speed': '(25)', 'gradient': "{'0': 'Navy', '0.25': 'Blue', '0.5': 'Green', '0.75': 'Yellow', '1': 'Red'}"}), "(heat_data, index=years, name='Heatmap',\n max_opacity=0.9, min_speed=1, speed_step=1, max_speed=25, gradient={'0':\n 'Navy', '0.25': 'Blue', '0.5': 'Green', '0.75': 'Yellow', '1': 'Red'})\n", (7432, 7624), False, 'import folium\n'), ((8405, 8471), 'folium.plugins.HeatMap', 'folium.plugins.HeatMap', (['heat_data'], {'max_opacity': '(0.8)', 'name': '"""Heatmap"""'}), "(heat_data, max_opacity=0.8, name='Heatmap')\n", (8427, 8471), False, 'import folium\n'), ((2901, 2946), 'folium.FeatureGroup', 'folium.FeatureGroup', ([], {'name': 'thename', 'show': '(False)'}), '(name=thename, show=False)\n', (2920, 2946), False, 'import folium\n'), ((16026, 16074), 'folium.map.LayerControl', 'folium.map.LayerControl', (['"""topleft"""'], {'collapsed': 'sc'}), "('topleft', collapsed=sc)\n", (16049, 16074), False, 'import folium\n'), ((1678, 1704), 'folium.MarkerCluster', 'folium.MarkerCluster', (['name'], {}), '(name)\n', (1698, 1704), False, 'import folium\n'), ((2356, 2421), 'folium.raster_layers.TileLayer', 'folium.raster_layers.TileLayer', (['backTypes[bt]'], {'name': 'backTypes[bt]'}), '(backTypes[bt], name=backTypes[bt])\n', (2386, 2421), False, 'import folium\n'), ((2495, 2538), 'folium.plugins.MiniMap', 'folium.plugins.MiniMap', ([], {'toggle_display': '(True)'}), '(toggle_display=True)\n', (2517, 2538), False, 'import folium\n'), ((14670, 14808), 'folium.features.PolyLine', 'folium.features.PolyLine', (['fm_line'], {'color': 'lcolor', 'weight': 'lwidth', 'opacity': '(1)', 'tooltip': 'ln', 'popup': 'fancyname', 'dash_array': 'da', 'lineJoin': '"""arcs"""'}), "(fm_line, color=lcolor, weight=lwidth, opacity=1,\n tooltip=ln, popup=fancyname, dash_array=da, lineJoin='arcs')\n", (14694, 14808), False, 'import folium\n'), ((5335, 5350), 'models.Pos.Pos', 'Pos', (['None', 'None'], {}), '(None, None)\n', (5338, 5350), False, 'from models.Pos import Pos\n'), ((12659, 12699), 'folium.FeatureGroup', 'folium.FeatureGroup', ([], {'name': 'gn', 'show': '(False)'}), '(name=gn, show=False)\n', (12678, 12699), False, 'import folium\n'), ((14327, 14450), 'folium.plugins.AntPath', 'folium.plugins.AntPath', (['fm_line'], {'weight': 'lwidth', 'opacity': '(0.7)', 'tooltip': 'ln', 'popup': 'fancyname', 'color': 'lcolor', 'lineJoin': '"""arcs"""'}), "(fm_line, weight=lwidth, opacity=0.7, tooltip=ln,\n popup=fancyname, color=lcolor, lineJoin='arcs')\n", (14349, 14450), False, 'import folium\n'), ((14493, 14631), 'folium.features.PolyLine', 'folium.features.PolyLine', (['fm_line'], {'color': 'lcolor', 'weight': 'lwidth', 'opacity': '(1)', 'tooltip': 'ln', 'popup': 'fancyname', 'dash_array': 'da', 'lineJoin': '"""arcs"""'}), "(fm_line, color=lcolor, weight=lwidth, opacity=1,\n tooltip=ln, popup=fancyname, dash_array=da, lineJoin='arcs')\n", (14517, 14631), False, 'import folium\n'), ((15131, 15171), 'folium.FeatureGroup', 'folium.FeatureGroup', ([], {'name': 'gn', 'show': '(False)'}), '(name=gn, show=False)\n', (15150, 15171), False, 'import folium\n'), ((11827, 11867), 'folium.FeatureGroup', 'folium.FeatureGroup', ([], {'name': 'gn', 'show': '(False)'}), '(name=gn, show=False)\n', (11846, 11867), False, 'import folium\n'), ((12261, 12328), 'folium.Icon', 'folium.Icon', ([], {'color': 'bicc', 'icon': 'bici', 'prefix': '"""fa"""', 'extraClasses': '"""fas"""'}), "(color=bicc, icon=bici, prefix='fa', extraClasses='fas')\n", (12272, 12328), False, 'import folium\n'), ((11458, 11505), 'folium.Icon', 'folium.Icon', ([], {'color': 'aicc', 'icon': 'aici', 'prefix': '"""fa"""'}), "(color=aicc, icon=aici, prefix='fa')\n", (11469, 11505), False, 'import folium\n'), ((13749, 13819), 'folium.Icon', 'folium.Icon', ([], {'color': 'mkcolor', 'icon': 'mker', 'prefix': '"""fa"""', 'extraClasses': '"""fas"""'}), "(color=mkcolor, icon=mker, prefix='fa', extraClasses='fas')\n", (13760, 13819), False, 'import folium\n'), ((14146, 14171), 'math.exp', 'math.exp', (['(0.5 * line.prof)'], {}), '(0.5 * line.prof)\n', (14154, 14171), False, 'import math\n'), ((16387, 16472), 'folium.Icon', 'folium.Icon', ([], {'color': '"""lightred"""', 'icon': '"""star"""', 'prefix': '"""fa"""', 'iconSize': "['50%', '50%']"}), "(color='lightred', icon='star', prefix='fa', iconSize=['50%', '50%']\n )\n", (16398, 16472), False, 'import folium\n')]
|
import time
from datetime import datetime
def getGuestTime():
curr_datetime = datetime.now()
dt_string = curr_datetime.strftime("%d/%m/%Y")
ti_string = curr_datetime.strftime("%H:%M:%S")
return "{0} {1}".format(dt_string, ti_string)
def getGuestTimezone():
is_dst = time.daylight and time.localtime().tm_isdst > 0
utc_offset = - (time.altzone if is_dst else time.timezone)/3600
if utc_offset > 0:
marker = "+"
else:
marker = "-"
tzone = "UTC{0}{1}".format(marker, utc_offset)
return tzone
|
[
"datetime.datetime.now",
"time.localtime"
] |
[((83, 97), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (95, 97), False, 'from datetime import datetime\n'), ((306, 322), 'time.localtime', 'time.localtime', ([], {}), '()\n', (320, 322), False, 'import time\n')]
|
import boto3
import io
import base64
from PIL import Image
from django.contrib.auth.models import User
from django.contrib.auth import get_user_model
from django.core.files.uploadedfile import InMemoryUploadedFile, TemporaryUploadedFile
from rest_framework import generics, permissions
from rest_framework_jwt.settings import api_settings
from rest_framework.response import Response
from rest_framework.views import status
from geophoto.settings import AWS_S3_BASE_URL, AWS_STORAGE_BUCKET_NAME
from .decorators import validate_request_data_photo
from .serializers import PhotoSerializer, UserSerializer
from .models import *
User = get_user_model()
# Get the JWT settings
jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER
jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER
class ListUsers(generics.ListCreateAPIView):
"""
GET users/
"""
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = (permissions.AllowAny,)
def get(self, request, *args, **kwargs):
try:
users = self.queryset.filter(**kwargs)
users_serialized = self.serializer_class(users, many=True).data
return Response(users_serialized)
except User.DoesNotExist:
return Response(
data={
"message": "User does not exist"
},
status=status.HTTP_404_NOT_FOUND
)
class ListSearchAround(generics.ListCreateAPIView):
"""
POST search_around/
"""
serializer_class = PhotoSerializer
permission_classes = (permissions.AllowAny,)
def post(self, request, *args, **kwargs):
dist = request.data.get('distance')
loc_lat = request.data.get('location_lat')
loc_lon = request.data.get('location_lon')
data = {}
if loc_lon and loc_lon and dist:
query = """
SELECT
uuid,
url,
title,
location AS point,
photo.user_id,
created_at,
prov.codiprov AS provincia,
mun.codimuni AS municipi,
comarca.codicomar AS comarca
FROM geophoto_api_photo photo
LEFT JOIN geophoto_api_provincia AS prov
ON ST_Contains(prov.geom, photo.location)
LEFT JOIN geophoto_api_municipi AS mun
ON ST_Contains(mun.geom, photo.location)
LEFT JOIN geophoto_api_comarca AS comarca
ON ST_Contains(comarca.geom, photo.location)
WHERE ST_DWithin(
ST_Transform(photo.location, 4326)::geography,
ST_SetSRID(ST_Makepoint({lon}, {lat}), 4326)::geography,
{dist}
)
ORDER BY ST_Distance(
ST_SetSRID(ST_MakePoint({lon}, {lat}), 4326),
ST_Transform(photo.location, 4326)
);
""".format(
lon=loc_lon,
lat=loc_lat,
dist=dist
)
rows = Photo.objects.raw(raw_query=query)
data = self.serializer_class(rows, many=True).data
return Response(data)
class ListWithinAround(generics.ListCreateAPIView):
"""
POST search_within/
"""
serializer_class = PhotoSerializer
permission_classes = (permissions.AllowAny,)
def get_photos_taken_in_provincia(self, name):
query = """
SELECT
uuid,
url,
title,
location AS point,
photo.user_id,
created_at,
prov.codiprov AS provincia,
mun.codimuni AS municipi,
comarca.codicomar AS comarca
FROM geophoto_api_photo AS photo
LEFT JOIN geophoto_api_provincia AS prov
ON ST_Contains(prov.geom, photo.location)
AND prov.nomprov ILIKE '%%{prov_name}%%'
LEFT JOIN geophoto_api_user u
ON u.id = photo.user_id
LEFT JOIN geophoto_api_municipi AS mun
ON ST_Contains(mun.geom, photo.location)
LEFT JOIN geophoto_api_comarca AS comarca
ON ST_Contains(comarca.geom, photo.location)
""".format(prov_name=name)
rows = Photo.objects.raw(raw_query=query)
response_data = self.serializer_class(rows, many=True).data
return response_data
def get_photos_taken_in_comarca(self, name):
query = """
SELECT
uuid,
url,
title,
location AS point,
photo.user_id,
created_at,
prov.codiprov AS provincia,
mun.codimuni AS municipi,
comarca.codicomar AS comarca
FROM geophoto_api_photo AS photo
JOIN geophoto_api_provincia AS prov
ON ST_Contains(prov.geom, photo.location)
JOIN geophoto_api_user u
ON u.id = photo.user_id
JOIN geophoto_api_municipi AS mun
ON ST_Contains(mun.geom, photo.location)
JOIN geophoto_api_comarca AS comarca
ON ST_Contains(comarca.geom, photo.location)
AND comarca.nomcomar ILIKE '%%{comarca_name}%%'
""".format(comarca_name=name)
rows = Photo.objects.raw(raw_query=query)
response_data = self.serializer_class(rows, many=True).data
return response_data
def get_photos_taken_in_municipi(self, name):
query = """
SELECT
uuid,
url,
title,
location AS point,
photo.user_id,
created_at,
prov.codiprov AS provincia,
mun.codimuni AS municipi,
comarca.codicomar AS comarca
FROM geophoto_api_photo AS photo
JOIN geophoto_api_provincia AS prov
ON ST_Contains(prov.geom, photo.location)
JOIN geophoto_api_user u
ON u.id = photo.user_id
JOIN geophoto_api_municipi AS mun
ON ST_Contains(mun.geom, photo.location)
AND mun.nommuni ILIKE '%%{mun_name}%%'
JOIN geophoto_api_comarca AS comarca
ON ST_Contains(comarca.geom, photo.location)
""".format(mun_name=name)
rows = Photo.objects.raw(raw_query=query)
response_data = self.serializer_class(rows, many=True).data
return response_data
def post(self, request, *args, **kwargs):
zone = request.data.get('zone')
zone_type = request.data.get('zone_type', '')
response_data = {}
if zone_type not in ('provincia', 'comarca', 'municipi'):
response_status = status.HTTP_400_BAD_REQUEST
else:
try:
if zone_type == 'provincia':
response_data = self.get_photos_taken_in_provincia(zone)
elif zone_type == 'comarca':
response_data = self.get_photos_taken_in_comarca(zone)
elif zone_type == 'municipi':
response_data = self.get_photos_taken_in_municipi(zone)
response_status = status.HTTP_200_OK
except Exception as e:
response_status = status.HTTP_500_INTERNAL_SERVER_ERROR
return Response(data=response_data, status=response_status)
class ListCreatePhotos(generics.ListCreateAPIView):
"""
GET photo/
POST photo/
"""
queryset = Photo.objects.all().order_by('-date_uploaded')
serializer_class = PhotoSerializer
permission_classes = (permissions.IsAuthenticated,)
@staticmethod
def get_bytesIO(data):
if isinstance(data, (InMemoryUploadedFile, TemporaryUploadedFile)):
photo_buf = io.BytesIO(data.file.read())
else:
b64_decoded = base64.b64decode(data)
photo_buf = io.BytesIO(b64_decoded)
return photo_buf
@staticmethod
def upload_s3_photo(photo_binary, key=None):
s3 = boto3.client('s3')
if key is None:
key = uuid.uuid4().hex[:6] + '.jpg'
s3.upload_fileobj(photo_binary, AWS_STORAGE_BUCKET_NAME, key)
url = "{aws_s3_url}{bucket_name}/{key}".format(
aws_s3_url=AWS_S3_BASE_URL,
bucket_name=AWS_STORAGE_BUCKET_NAME,
key=key
)
return url
@staticmethod
def generate_photo_name(photo_name):
return photo_name[:6] + '.jpg'
@validate_request_data_photo
def post(self, request, *args, **kwargs):
date_uploaded = datetime.today().strftime('%Y-%m-%d')
photo_file = request.data['photo']
bytes_data = self.get_bytesIO(photo_file)
exif_data = Photo.extract_exif_data(Image.open(bytes_data))
created_photo = None
try:
if not exif_data['location']:
lat = request.data.get('latitude', None)
lon = request.data.get('longitude', None)
exif_data['location'] = Photo.create_point(lat=lat, lon=lon)
if not exif_data['created_at']:
date = request.data.get('date', False)
exif_data['created_at'] = datetime.strptime(date, '%Y:%m:%d %H:%M:%S').strftime('%Y-%m-%d %H:%M:%S')
create_vals = {
'title': request.data["title"],
'date_uploaded': date_uploaded,
'user': request.user,
}
create_vals.update(exif_data)
created_photo = Photo.objects.create(**create_vals)
respose_data = {
'message': 'Photo posted successfully!'
}
response_status = status.HTTP_201_CREATED
except Exception as e:
respose_data = {
"message": "Internal server error."
}
response_status = status.HTTP_500_INTERNAL_SERVER_ERROR
print(e)
if created_photo is not None:
bytes_data = self.get_bytesIO(photo_file)
key = self.generate_photo_name(created_photo.uuid.hex)
url = self.upload_s3_photo(bytes_data, key=key)
created_photo.url = url
created_photo.save()
return Response(
data=respose_data,
status=response_status
)
def get(self, request, *args, **kwargs):
try:
photos = self.queryset.get(kwargs=kwargs)
return Response(self.serializer_class(photos).data)
except Photo.DoesNotExist:
return Response(
data={
"message": "Photo does not exist"
},
status=status.HTTP_404_NOT_FOUND
)
class RegisterUsers(generics.CreateAPIView):
"""
POST auth/register/
"""
permission_classes = (permissions.AllowAny,)
def post(self, request, *args, **kwargs):
username = request.data.get("username", "")
password = request.data.get("password", "")
email = request.data.get("email", "")
if not username and not password and not email:
return Response(
data={
"message": "username, password and email is required to register a user"
},
status=status.HTTP_400_BAD_REQUEST
)
full_name = request.data.get('full_name', '')
new_user = User.objects.create_user(
username=username, password=password, email=email, full_name=full_name
)
return Response(
data=UserSerializer(new_user).data,
status=status.HTTP_201_CREATED
)
class ListUserPhotos(generics.ListAPIView):
"""
GET search_my_photos/
"""
queryset = Photo.objects.all().order_by('-date_uploaded')
serializer_class = PhotoSerializer
permission_classes = (permissions.IsAuthenticated,)
def post(self, request, *args, **kwargs):
try:
username = request.data.get('username', None)
query = """
SELECT
uuid,
url,
title,
location AS point,
photo.user_id,
created_at,
prov.codiprov AS provincia,
mun.codimuni AS municipi,
comarca.codicomar AS comarca
FROM geophoto_api_photo AS photo
JOIN geophoto_api_provincia AS prov
ON ST_Contains(prov.geom, photo.location)
LEFT JOIN geophoto_api_user u
ON u.id = photo.user_id
LEFT JOIN geophoto_api_municipi AS mun
ON ST_Contains(mun.geom, photo.location)
LEFT JOIN geophoto_api_comarca AS comarca
ON ST_Contains(comarca.geom, photo.location)
WHERE u.username ilike '{username}'
""".format(username=username)
rows = Photo.objects.raw(raw_query=query)
response_data = self.serializer_class(rows, many=True).data
return Response(response_data)
except Photo.DoesNotExist:
return Response(
data={
"message": "User does not exist"
},
status=status.HTTP_404_NOT_FOUND
)
|
[
"io.BytesIO",
"boto3.client",
"django.contrib.auth.get_user_model",
"django.contrib.auth.models.User.objects.create_user",
"base64.b64decode",
"PIL.Image.open",
"rest_framework.response.Response",
"django.contrib.auth.models.User.objects.all"
] |
[((637, 653), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (651, 653), False, 'from django.contrib.auth import get_user_model\n'), ((880, 898), 'django.contrib.auth.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (896, 898), False, 'from django.contrib.auth.models import User\n'), ((3316, 3330), 'rest_framework.response.Response', 'Response', (['data'], {}), '(data)\n', (3324, 3330), False, 'from rest_framework.response import Response\n'), ((7606, 7658), 'rest_framework.response.Response', 'Response', ([], {'data': 'response_data', 'status': 'response_status'}), '(data=response_data, status=response_status)\n', (7614, 7658), False, 'from rest_framework.response import Response\n'), ((8310, 8328), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (8322, 8328), False, 'import boto3\n'), ((10522, 10573), 'rest_framework.response.Response', 'Response', ([], {'data': 'respose_data', 'status': 'response_status'}), '(data=respose_data, status=response_status)\n', (10530, 10573), False, 'from rest_framework.response import Response\n'), ((11700, 11800), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': 'username', 'password': 'password', 'email': 'email', 'full_name': 'full_name'}), '(username=username, password=password, email=email,\n full_name=full_name)\n', (11724, 11800), False, 'from django.contrib.auth.models import User\n'), ((1192, 1218), 'rest_framework.response.Response', 'Response', (['users_serialized'], {}), '(users_serialized)\n', (1200, 1218), False, 'from rest_framework.response import Response\n'), ((8133, 8155), 'base64.b64decode', 'base64.b64decode', (['data'], {}), '(data)\n', (8149, 8155), False, 'import base64\n'), ((8180, 8203), 'io.BytesIO', 'io.BytesIO', (['b64_decoded'], {}), '(b64_decoded)\n', (8190, 8203), False, 'import io\n'), ((9046, 9068), 'PIL.Image.open', 'Image.open', (['bytes_data'], {}), '(bytes_data)\n', (9056, 9068), False, 'from PIL import Image\n'), ((11416, 11550), 'rest_framework.response.Response', 'Response', ([], {'data': "{'message': 'username, password and email is required to register a user'}", 'status': 'status.HTTP_400_BAD_REQUEST'}), "(data={'message':\n 'username, password and email is required to register a user'}, status=\n status.HTTP_400_BAD_REQUEST)\n", (11424, 11550), False, 'from rest_framework.response import Response\n'), ((13420, 13443), 'rest_framework.response.Response', 'Response', (['response_data'], {}), '(response_data)\n', (13428, 13443), False, 'from rest_framework.response import Response\n'), ((1272, 1360), 'rest_framework.response.Response', 'Response', ([], {'data': "{'message': 'User does not exist'}", 'status': 'status.HTTP_404_NOT_FOUND'}), "(data={'message': 'User does not exist'}, status=status.\n HTTP_404_NOT_FOUND)\n", (1280, 1360), False, 'from rest_framework.response import Response\n'), ((10839, 10928), 'rest_framework.response.Response', 'Response', ([], {'data': "{'message': 'Photo does not exist'}", 'status': 'status.HTTP_404_NOT_FOUND'}), "(data={'message': 'Photo does not exist'}, status=status.\n HTTP_404_NOT_FOUND)\n", (10847, 10928), False, 'from rest_framework.response import Response\n'), ((13498, 13586), 'rest_framework.response.Response', 'Response', ([], {'data': "{'message': 'User does not exist'}", 'status': 'status.HTTP_404_NOT_FOUND'}), "(data={'message': 'User does not exist'}, status=status.\n HTTP_404_NOT_FOUND)\n", (13506, 13586), False, 'from rest_framework.response import Response\n')]
|
import logging
from functools import lru_cache
from urllib.parse import urlencode, quote_plus
from boto_utils import fetch_job_manifest, paginate
from botocore.exceptions import ClientError
from utils import remove_none, retry_wrapper
logger = logging.getLogger(__name__)
def save(s3, client, buf, bucket, key, metadata, source_version=None):
"""
Save a buffer to S3, preserving any existing properties on the object
"""
# Get Object Settings
request_payer_args, _ = get_requester_payment(client, bucket)
object_info_args, _ = get_object_info(client, bucket, key, source_version)
tagging_args, _ = get_object_tags(client, bucket, key, source_version)
acl_args, acl_resp = get_object_acl(client, bucket, key, source_version)
extra_args = {
**request_payer_args,
**object_info_args,
**tagging_args,
**acl_args,
**{"Metadata": metadata},
}
logger.info("Object settings: %s", extra_args)
# Write Object Back to S3
logger.info("Saving updated object to s3://%s/%s", bucket, key)
contents = buf.read()
with s3.open("s3://{}/{}".format(bucket, key), "wb", **extra_args) as f:
f.write(contents)
s3.invalidate_cache() # TODO: remove once https://github.com/dask/s3fs/issues/294 is resolved
new_version_id = f.version_id
logger.info("Object uploaded to S3")
# GrantWrite cannot be set whilst uploading therefore ACLs need to be restored separately
write_grantees = ",".join(get_grantees(acl_resp, "WRITE"))
if write_grantees:
logger.info("WRITE grant found. Restoring additional grantees for object")
client.put_object_acl(
Bucket=bucket,
Key=key,
VersionId=new_version_id,
**{**request_payer_args, **acl_args, "GrantWrite": write_grantees,}
)
logger.info("Processing of file s3://%s/%s complete", bucket, key)
return new_version_id
@lru_cache()
def get_requester_payment(client, bucket):
"""
Generates a dict containing the request payer args supported when calling S3.
GetBucketRequestPayment call will be cached
:returns tuple containing the info formatted for ExtraArgs and the raw response
"""
request_payer = client.get_bucket_request_payment(Bucket=bucket)
return (
remove_none(
{
"RequestPayer": "requester"
if request_payer["Payer"] == "Requester"
else None,
}
),
request_payer,
)
@lru_cache()
def get_object_info(client, bucket, key, version_id=None):
"""
Generates a dict containing the non-ACL/Tagging args supported when uploading to S3.
HeadObject call will be cached
:returns tuple containing the info formatted for ExtraArgs and the raw response
"""
kwargs = {"Bucket": bucket, "Key": key, **get_requester_payment(client, bucket)[0]}
if version_id:
kwargs["VersionId"] = version_id
object_info = client.head_object(**kwargs)
return (
remove_none(
{
"CacheControl": object_info.get("CacheControl"),
"ContentDisposition": object_info.get("ContentDisposition"),
"ContentEncoding": object_info.get("ContentEncoding"),
"ContentLanguage": object_info.get("ContentLanguage"),
"ContentType": object_info.get("ContentType"),
"Expires": object_info.get("Expires"),
"Metadata": object_info.get("Metadata"),
"ServerSideEncryption": object_info.get("ServerSideEncryption"),
"StorageClass": object_info.get("StorageClass"),
"SSECustomerAlgorithm": object_info.get("SSECustomerAlgorithm"),
"SSEKMSKeyId": object_info.get("SSEKMSKeyId"),
"WebsiteRedirectLocation": object_info.get("WebsiteRedirectLocation"),
}
),
object_info,
)
@lru_cache()
def get_object_tags(client, bucket, key, version_id=None):
"""
Generates a dict containing the Tagging args supported when uploading to S3
GetObjectTagging call will be cached
:returns tuple containing tagging formatted for ExtraArgs and the raw response
"""
kwargs = {"Bucket": bucket, "Key": key}
if version_id:
kwargs["VersionId"] = version_id
tagging = client.get_object_tagging(**kwargs)
return (
remove_none(
{
"Tagging": urlencode(
{tag["Key"]: tag["Value"] for tag in tagging["TagSet"]},
quote_via=quote_plus,
)
}
),
tagging,
)
@lru_cache()
def get_object_acl(client, bucket, key, version_id=None):
"""
Generates a dict containing the ACL args supported when uploading to S3
GetObjectAcl call will be cached
:returns tuple containing ACL formatted for ExtraArgs and the raw response
"""
kwargs = {"Bucket": bucket, "Key": key, **get_requester_payment(client, bucket)[0]}
if version_id:
kwargs["VersionId"] = version_id
acl = client.get_object_acl(**kwargs)
existing_owner = {"id={}".format(acl["Owner"]["ID"])}
return (
remove_none(
{
"GrantFullControl": ",".join(
existing_owner | get_grantees(acl, "FULL_CONTROL")
),
"GrantRead": ",".join(get_grantees(acl, "READ")),
"GrantReadACP": ",".join(get_grantees(acl, "READ_ACP")),
"GrantWriteACP": ",".join(get_grantees(acl, "WRITE_ACP")),
}
),
acl,
)
def get_grantees(acl, grant_type):
prop_map = {
"CanonicalUser": ("ID", "id"),
"AmazonCustomerByEmail": ("EmailAddress", "emailAddress"),
"Group": ("URI", "uri"),
}
filtered = [
grantee["Grantee"]
for grantee in acl.get("Grants")
if grantee["Permission"] == grant_type
]
grantees = set()
for grantee in filtered:
identifier_type = grantee["Type"]
identifier_prop = prop_map[identifier_type]
grantees.add("{}={}".format(identifier_prop[1], grantee[identifier_prop[0]]))
return grantees
@lru_cache()
def validate_bucket_versioning(client, bucket):
resp = client.get_bucket_versioning(Bucket=bucket)
versioning_enabled = resp.get("Status") == "Enabled"
mfa_delete_enabled = resp.get("MFADelete") == "Enabled"
if not versioning_enabled:
raise ValueError("Bucket {} does not have versioning enabled".format(bucket))
if mfa_delete_enabled:
raise ValueError("Bucket {} has MFA Delete enabled".format(bucket))
return True
@lru_cache()
def fetch_manifest(manifest_object):
return fetch_job_manifest(manifest_object)
def delete_old_versions(client, input_bucket, input_key, new_version):
try:
resp = list(
paginate(
client,
client.list_object_versions,
["Versions", "DeleteMarkers"],
Bucket=input_bucket,
Prefix=input_key,
VersionIdMarker=new_version,
KeyMarker=input_key,
)
)
versions = [el[0] for el in resp if el[0] is not None]
delete_markers = [el[1] for el in resp if el[1] is not None]
versions.extend(delete_markers)
sorted_versions = sorted(versions, key=lambda x: x["LastModified"])
version_ids = [v["VersionId"] for v in sorted_versions]
errors = []
max_deletions = 1000
for i in range(0, len(version_ids), max_deletions):
resp = client.delete_objects(
Bucket=input_bucket,
Delete={
"Objects": [
{"Key": input_key, "VersionId": version_id}
for version_id in version_ids[i : i + max_deletions]
],
"Quiet": True,
},
)
errors.extend(resp.get("Errors", []))
if len(errors) > 0:
raise DeleteOldVersionsError(
errors=[
"Delete object {} version {} failed: {}".format(
e["Key"], e["VersionId"], e["Message"]
)
for e in errors
]
)
except ClientError as e:
raise DeleteOldVersionsError(errors=[str(e)])
def verify_object_versions_integrity(
client, bucket, key, from_version_id, to_version_id
):
def raise_exception(msg):
raise IntegrityCheckFailedError(msg, client, bucket, key, to_version_id)
conflict_error_template = "A {} ({}) was detected for the given object between read and write operations ({} and {})."
not_found_error_template = "Previous version ({}) has been deleted."
object_versions = retry_wrapper(client.list_object_versions)(
Bucket=bucket,
Prefix=key,
VersionIdMarker=to_version_id,
KeyMarker=key,
MaxKeys=1,
)
versions = object_versions.get("Versions", [])
delete_markers = object_versions.get("DeleteMarkers", [])
all_versions = versions + delete_markers
if not len(all_versions):
return raise_exception(not_found_error_template.format(from_version_id))
prev_version = all_versions[0]
prev_version_id = prev_version["VersionId"]
if prev_version_id != from_version_id:
conflicting_version_type = (
"delete marker" if "ETag" not in prev_version else "version"
)
return raise_exception(
conflict_error_template.format(
conflicting_version_type,
prev_version_id,
from_version_id,
to_version_id,
)
)
return True
def rollback_object_version(client, bucket, key, version, on_error):
""" Delete newly created object version as soon as integrity conflict is detected """
try:
return client.delete_object(Bucket=bucket, Key=key, VersionId=version)
except ClientError as e:
err_message = "ClientError: {}. Version rollback caused by version integrity conflict failed".format(
str(e)
)
on_error(err_message)
except Exception as e:
err_message = "Unknown error: {}. Version rollback caused by version integrity conflict failed".format(
str(e)
)
on_error(err_message)
class DeleteOldVersionsError(Exception):
def __init__(self, errors):
super().__init__("\n".join(errors))
self.errors = errors
class IntegrityCheckFailedError(Exception):
def __init__(self, message, client, bucket, key, version_id):
self.message = message
self.client = client
self.bucket = bucket
self.key = key
self.version_id = version_id
|
[
"boto_utils.fetch_job_manifest",
"urllib.parse.urlencode",
"utils.remove_none",
"boto_utils.paginate",
"functools.lru_cache",
"utils.retry_wrapper",
"logging.getLogger"
] |
[((247, 274), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (264, 274), False, 'import logging\n'), ((1944, 1955), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (1953, 1955), False, 'from functools import lru_cache\n'), ((2531, 2542), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (2540, 2542), False, 'from functools import lru_cache\n'), ((3960, 3971), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (3969, 3971), False, 'from functools import lru_cache\n'), ((4679, 4690), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (4688, 4690), False, 'from functools import lru_cache\n'), ((6238, 6249), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (6247, 6249), False, 'from functools import lru_cache\n'), ((6712, 6723), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (6721, 6723), False, 'from functools import lru_cache\n'), ((6772, 6807), 'boto_utils.fetch_job_manifest', 'fetch_job_manifest', (['manifest_object'], {}), '(manifest_object)\n', (6790, 6807), False, 'from boto_utils import fetch_job_manifest, paginate\n'), ((2319, 2416), 'utils.remove_none', 'remove_none', (["{'RequestPayer': 'requester' if request_payer['Payer'] == 'Requester' else None\n }"], {}), "({'RequestPayer': 'requester' if request_payer['Payer'] ==\n 'Requester' else None})\n", (2330, 2416), False, 'from utils import remove_none, retry_wrapper\n'), ((8900, 8942), 'utils.retry_wrapper', 'retry_wrapper', (['client.list_object_versions'], {}), '(client.list_object_versions)\n', (8913, 8942), False, 'from utils import remove_none, retry_wrapper\n'), ((6923, 7096), 'boto_utils.paginate', 'paginate', (['client', 'client.list_object_versions', "['Versions', 'DeleteMarkers']"], {'Bucket': 'input_bucket', 'Prefix': 'input_key', 'VersionIdMarker': 'new_version', 'KeyMarker': 'input_key'}), "(client, client.list_object_versions, ['Versions', 'DeleteMarkers'],\n Bucket=input_bucket, Prefix=input_key, VersionIdMarker=new_version,\n KeyMarker=input_key)\n", (6931, 7096), False, 'from boto_utils import fetch_job_manifest, paginate\n'), ((4480, 4572), 'urllib.parse.urlencode', 'urlencode', (["{tag['Key']: tag['Value'] for tag in tagging['TagSet']}"], {'quote_via': 'quote_plus'}), "({tag['Key']: tag['Value'] for tag in tagging['TagSet']},\n quote_via=quote_plus)\n", (4489, 4572), False, 'from urllib.parse import urlencode, quote_plus\n')]
|
from itertools import chain
import tensorflow as tf
from libspn.graph.node import OpNode, Input
from libspn import utils
from libspn.inference.type import InferenceType
from libspn.exceptions import StructureError
from libspn.utils.serialization import register_serializable
@register_serializable
class Concat(OpNode):
"""An op node that concatenates all inputs into a single output tensor.
Args:
*inputs: Inputs of this node. See :meth:`~libspn.Input.as_input` for
possible values.
name (str): Name of the node.
"""
def __init__(self, *inputs, name="Concat"):
super().__init__(inference_type=InferenceType.MARGINAL, name=name)
self.set_inputs(*inputs)
def serialize(self):
data = super().serialize()
data['inputs'] = [(i.node.name, i.indices) for i in self._inputs]
return data
def deserialize(self, data):
super().deserialize(data)
self.set_inputs()
def deserialize_inputs(self, data, nodes_by_name):
super().deserialize_inputs(data, nodes_by_name)
self._inputs = tuple(Input(nodes_by_name[nn], i)
for nn, i in data['inputs'])
@property
def inputs(self):
return self._inputs
def set_inputs(self, *inputs):
"""Set the inputs of this node. If no arguments are given, all existing
inputs get disconnected.
Args:
*inputs (input_like): Inputs of this node. See
:meth:`~libspn.Input.as_input` for possible inputs.
"""
self._inputs = self._parse_inputs(*inputs)
def add_inputs(self, *inputs):
"""Add more inputs to this node.
Args:
*inputs (input_like): Inputs of this node. See
:meth:`~libspn.Input.as_input` for possible inputs.
"""
self._inputs = self._inputs + self._parse_inputs(*inputs)
@property
def _const_out_size(self):
return False
@utils.docinherit(OpNode)
def _compute_out_size(self, *input_out_sizes):
if not self._inputs:
raise StructureError("%s is missing inputs." % self)
return sum(self._gather_input_sizes(*input_out_sizes))
@utils.docinherit(OpNode)
def _compute_scope(self, *input_scopes):
if not self._inputs:
raise StructureError("%s is missing inputs." % self)
input_scopes = self._gather_input_scopes(*input_scopes)
return list(chain.from_iterable(input_scopes))
@utils.docinherit(OpNode)
def _compute_valid(self, *input_scopes):
if not self._inputs:
raise StructureError("%s is missing inputs." % self)
_, *input_scopes_ = self._gather_input_scopes(*input_scopes)
# If already invalid, return None
if any(s is None for s in input_scopes_):
return None
else:
return self._compute_scope(*input_scopes)
@utils.docinherit(OpNode)
@utils.lru_cache
def _compute_log_value(self, *input_tensors):
# Check inputs
if not self._inputs:
raise StructureError("%s is missing inputs." % self)
gathered_inputs = self._gather_input_tensors(*input_tensors)
# Concatenate inputs
return tf.concat(gathered_inputs, axis=1)
@utils.docinherit(OpNode)
def _compute_log_mpe_value(self, *input_tensors):
return self._compute_log_value(*input_tensors)
@utils.lru_cache
def _compute_log_mpe_path(self, counts, *input_values, use_unweighted=False):
# Check inputs
if not self._inputs:
raise StructureError("%s is missing inputs." % self)
# Split counts for each input
input_sizes = self.get_input_sizes(*input_values)
split = tf.split(counts, num_or_size_splits=input_sizes, axis=1)
return self._scatter_to_input_tensors(*[(t, v) for t, v in
zip(split, input_values)])
|
[
"libspn.exceptions.StructureError",
"tensorflow.concat",
"libspn.graph.node.Input",
"libspn.utils.docinherit",
"tensorflow.split",
"itertools.chain.from_iterable"
] |
[((1984, 2008), 'libspn.utils.docinherit', 'utils.docinherit', (['OpNode'], {}), '(OpNode)\n', (2000, 2008), False, 'from libspn import utils\n'), ((2223, 2247), 'libspn.utils.docinherit', 'utils.docinherit', (['OpNode'], {}), '(OpNode)\n', (2239, 2247), False, 'from libspn import utils\n'), ((2512, 2536), 'libspn.utils.docinherit', 'utils.docinherit', (['OpNode'], {}), '(OpNode)\n', (2528, 2536), False, 'from libspn import utils\n'), ((2935, 2959), 'libspn.utils.docinherit', 'utils.docinherit', (['OpNode'], {}), '(OpNode)\n', (2951, 2959), False, 'from libspn import utils\n'), ((3303, 3327), 'libspn.utils.docinherit', 'utils.docinherit', (['OpNode'], {}), '(OpNode)\n', (3319, 3327), False, 'from libspn import utils\n'), ((3262, 3296), 'tensorflow.concat', 'tf.concat', (['gathered_inputs'], {'axis': '(1)'}), '(gathered_inputs, axis=1)\n', (3271, 3296), True, 'import tensorflow as tf\n'), ((3770, 3826), 'tensorflow.split', 'tf.split', (['counts'], {'num_or_size_splits': 'input_sizes', 'axis': '(1)'}), '(counts, num_or_size_splits=input_sizes, axis=1)\n', (3778, 3826), True, 'import tensorflow as tf\n'), ((2107, 2153), 'libspn.exceptions.StructureError', 'StructureError', (["('%s is missing inputs.' % self)"], {}), "('%s is missing inputs.' % self)\n", (2121, 2153), False, 'from libspn.exceptions import StructureError\n'), ((2340, 2386), 'libspn.exceptions.StructureError', 'StructureError', (["('%s is missing inputs.' % self)"], {}), "('%s is missing inputs.' % self)\n", (2354, 2386), False, 'from libspn.exceptions import StructureError\n'), ((2471, 2504), 'itertools.chain.from_iterable', 'chain.from_iterable', (['input_scopes'], {}), '(input_scopes)\n', (2490, 2504), False, 'from itertools import chain\n'), ((2629, 2675), 'libspn.exceptions.StructureError', 'StructureError', (["('%s is missing inputs.' % self)"], {}), "('%s is missing inputs.' % self)\n", (2643, 2675), False, 'from libspn.exceptions import StructureError\n'), ((3101, 3147), 'libspn.exceptions.StructureError', 'StructureError', (["('%s is missing inputs.' % self)"], {}), "('%s is missing inputs.' % self)\n", (3115, 3147), False, 'from libspn.exceptions import StructureError\n'), ((3611, 3657), 'libspn.exceptions.StructureError', 'StructureError', (["('%s is missing inputs.' % self)"], {}), "('%s is missing inputs.' % self)\n", (3625, 3657), False, 'from libspn.exceptions import StructureError\n'), ((1109, 1136), 'libspn.graph.node.Input', 'Input', (['nodes_by_name[nn]', 'i'], {}), '(nodes_by_name[nn], i)\n', (1114, 1136), False, 'from libspn.graph.node import OpNode, Input\n')]
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.modules.export_file import export_to_files
from frappe.model.document import Document
class DeskPage(Document):
def validate(self):
if (not (frappe.flags.in_install or frappe.flags.in_patch or frappe.flags.in_test or frappe.flags.in_fixtures)
and not frappe.conf.developer_mode):
frappe.throw(_("You need to be in developer mode to edit this document"))
def on_update(self):
export_to_files(record_list=[['Desk Page', self.name]], record_module=self.module)
|
[
"frappe.modules.export_file.export_to_files",
"frappe._"
] |
[((610, 697), 'frappe.modules.export_file.export_to_files', 'export_to_files', ([], {'record_list': "[['Desk Page', self.name]]", 'record_module': 'self.module'}), "(record_list=[['Desk Page', self.name]], record_module=self.\n module)\n", (625, 697), False, 'from frappe.modules.export_file import export_to_files\n'), ((524, 583), 'frappe._', '_', (['"""You need to be in developer mode to edit this document"""'], {}), "('You need to be in developer mode to edit this document')\n", (525, 583), False, 'from frappe import _\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import ephem
# See whether asking for the rising-time of Mars hangs indefinitely.
class Launchpad236872Tests(unittest.TestCase):
def runTest(self):
mars = ephem.Mars()
boston = ephem.city('Boston')
boston.date = ephem.Date('2008/5/29 15:59:16')
boston.next_rising(mars)
|
[
"ephem.city",
"ephem.Date",
"ephem.Mars"
] |
[((232, 244), 'ephem.Mars', 'ephem.Mars', ([], {}), '()\n', (242, 244), False, 'import ephem\n'), ((262, 282), 'ephem.city', 'ephem.city', (['"""Boston"""'], {}), "('Boston')\n", (272, 282), False, 'import ephem\n'), ((305, 337), 'ephem.Date', 'ephem.Date', (['"""2008/5/29 15:59:16"""'], {}), "('2008/5/29 15:59:16')\n", (315, 337), False, 'import ephem\n')]
|
# Copyright 2016 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# https://github.com/kmbnw/rank_metrics/blob/master/python/ndcg.py
from __future__ import absolute_import, annotations
import numpy as np
"""
Implementation of normalized discounted cumulative gain.
Handy for testing ranking algorithms.
https://en.wikipedia.org/wiki/Discounted_cumulative_gain
"""
def cum_gain(relevance):
"""
Calculate cumulative gain.
This ignores the position of a result, but may still be generally useful.
@param relevance: Graded relevances of the results.
@type relevance: C{seq} or C{numpy.array}
"""
if relevance is None or len(relevance) < 1:
return 0.0
return np.asarray(relevance).sum()
def dcg(relevance, alternate=True):
"""
Calculate discounted cumulative gain.
@param relevance: Graded and ordered relevances of the results.
@type relevance: C{seq} or C{numpy.array}
@param alternate: True to use the alternate scoring (intended to
place more emphasis on relevant results).
@type alternate: C{bool}
"""
if relevance is None or len(relevance) < 1:
return 0.0
rel = np.asarray(relevance)
p = len(rel)
if alternate:
# from wikipedia: "An alternative formulation of
# DCG[5] places stronger emphasis on retrieving relevant documents"
log2i = np.log2(np.asarray(range(1, p + 1)) + 1)
return ((np.power(2, rel) - 1) / log2i).sum()
else:
log2i = np.log2(range(2, p + 1))
return rel[0] + (rel[1:] / log2i).sum()
def idcg(relevance, alternate=True):
"""
Calculate ideal discounted cumulative gain (maximum possible DCG).
@param relevance: Graded and ordered relevances of the results.
@type relevance: C{seq} or C{numpy.array}
@param alternate: True to use the alternate scoring (intended to
place more emphasis on relevant results).
@type alternate: C{bool}
"""
if relevance is None or len(relevance) < 1:
return 0.0
# guard copy before sort
rel = np.asarray(relevance).copy()
rel.sort()
return dcg(rel[::-1], alternate)
def ndcg(relevance, nranks, alternate=True):
"""
Calculate normalized discounted cumulative gain.
@param relevance: Graded and ordered relevances of the results.
@type relevance: C{seq} or C{numpy.array}
@param nranks: Number of ranks to use when calculating NDCG.
Will be used to rightpad with zeros if len(relevance) is less
than nranks
@type nranks: C{int}
@param alternate: True to use the alternate scoring (intended to
place more emphasis on relevant results).
@type alternate: C{bool}
"""
if relevance is None or len(relevance) < 1:
return 0.0
if nranks < 1:
raise Exception("nranks < 1")
rel = np.asarray(relevance)
pad = max(0, nranks - len(rel))
# pad could be zero in which case this will no-op
rel = np.pad(rel, (0, pad), "constant")
# now slice downto nranks
rel = rel[0 : min(nranks, len(rel))]
ideal_dcg = idcg(rel, alternate)
if ideal_dcg == 0:
return 0.0
return dcg(rel, alternate) / ideal_dcg
|
[
"numpy.pad",
"numpy.power",
"numpy.asarray"
] |
[((1683, 1704), 'numpy.asarray', 'np.asarray', (['relevance'], {}), '(relevance)\n', (1693, 1704), True, 'import numpy as np\n'), ((3342, 3363), 'numpy.asarray', 'np.asarray', (['relevance'], {}), '(relevance)\n', (3352, 3363), True, 'import numpy as np\n'), ((3465, 3498), 'numpy.pad', 'np.pad', (['rel', '(0, pad)', '"""constant"""'], {}), "(rel, (0, pad), 'constant')\n", (3471, 3498), True, 'import numpy as np\n'), ((1221, 1242), 'numpy.asarray', 'np.asarray', (['relevance'], {}), '(relevance)\n', (1231, 1242), True, 'import numpy as np\n'), ((2578, 2599), 'numpy.asarray', 'np.asarray', (['relevance'], {}), '(relevance)\n', (2588, 2599), True, 'import numpy as np\n'), ((1949, 1965), 'numpy.power', 'np.power', (['(2)', 'rel'], {}), '(2, rel)\n', (1957, 1965), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
from isbndb import ISBNdbException
from isbndb.models import *
from isbndb.client import ISBNdbClient
from isbndb.catalog import *
from unittest import TestCase
ACCESS_KEY = "<KEY>"
class ISBNdbTest(TestCase):
def setup(self):
self.client = ISBNdbClient( access_key=ACCESS_KEY )
def teardown(self):
pass
def test_connection(self):
catalog = BookCollection(self.client)
result = catalog.isbn('0210406240', results='authors')
if __name__ == "__main__":
from unittest import main
main( )
|
[
"unittest.main",
"isbndb.client.ISBNdbClient"
] |
[((567, 573), 'unittest.main', 'main', ([], {}), '()\n', (571, 573), False, 'from unittest import main\n'), ((285, 320), 'isbndb.client.ISBNdbClient', 'ISBNdbClient', ([], {'access_key': 'ACCESS_KEY'}), '(access_key=ACCESS_KEY)\n', (297, 320), False, 'from isbndb.client import ISBNdbClient\n')]
|
import nerdle_cfg
import re
import luigi
import d6tflow
import itertools
import pandas as pd
import numpy as np
#helper functions
def check_len_int(nerdle):
nerdle_str = ''.join(nerdle)
try:
return all(len(x)==len(str(int(x))) for x in re.split('\+|\-|\*|\/|==',nerdle_str))
except:
return False
def rt_is_num(nerdle):
rt_arr = nerdle[np.where(np.array(nerdle)=='==')[0][0]+1:]
test_str = ''.join(rt_arr)
return test_str.isnumeric()
def join_elems_of_tups(list_o_tups):
return list(map(lambda x: ''.join(x),list_o_tups))
def test_eval(nerdle):
test_str = ''.join(nerdle)
try:
return eval(test_str)
except:
return False
class buildNerdles(d6tflow.tasks.TaskPqPandas):
nerdle_len = luigi.IntParameter()
def run(self):
nerdle_len = self.nerdle_len
nerdles = list(itertools.combinations_with_replacement(nerdle_cfg.nerd_list,nerdle_len))
#TODO: Optimize second list comprehension using filter if possible
nerdles = list(filter(
lambda nerdle: ('==' in nerdle)&
bool(any(i in nerdle for i in [x for x in nerdle_cfg.nerd_op_list if x!="=="])),nerdles))
nerdle_ser = pd.Series(nerdles)
nerdle_df = pd.DataFrame(nerdle_ser)
nerdle_df.columns=['nerdle_combinations']
#for each nerdle combination create permutations
nerdle_df['perms'] = nerdle_df['nerdle_combinations'].apply(itertools.permutations,nerdle_len)
# can't start or end with an equals sign and turns permutation tuples into a list
nerdle_df['perm_red_stend_equal'] = nerdle_df['perms'].apply(lambda y: filter(lambda x:(list(x)[0]!='==')&(list(x)[-1]!='=='),y))
# equal sign appears only once
nerdle_df['perm_equal_once'] = nerdle_df['perm_red_stend_equal'].apply(lambda y: filter(lambda x: x.count('==')==1,y))
# elements to the right of the equal sign must be a number
nerdle_df['right_equal_must_be_number'] = nerdle_df['perm_equal_once'].apply(lambda y: filter(lambda x: rt_is_num(x),y))
#length of string has to be 9
nerdle_df['len_check'] = nerdle_df['right_equal_must_be_number'].apply(lambda y: filter(lambda x: len(x)==nerdle_len,y))
#check that non operater numbers are of proper length
nerdle_df['non_op_num_check'] = nerdle_df['len_check'].apply(lambda y: filter(lambda x: check_len_int(x),y))
#check that string evals properly
nerdle_df['eval_check'] = nerdle_df['non_op_num_check'].apply(lambda y: filter(lambda x: test_eval(x),y))
self.save(nerdle_df)
|
[
"pandas.DataFrame",
"re.split",
"itertools.combinations_with_replacement",
"numpy.array",
"pandas.Series",
"luigi.IntParameter"
] |
[((762, 782), 'luigi.IntParameter', 'luigi.IntParameter', ([], {}), '()\n', (780, 782), False, 'import luigi\n'), ((1223, 1241), 'pandas.Series', 'pd.Series', (['nerdles'], {}), '(nerdles)\n', (1232, 1241), True, 'import pandas as pd\n'), ((1262, 1286), 'pandas.DataFrame', 'pd.DataFrame', (['nerdle_ser'], {}), '(nerdle_ser)\n', (1274, 1286), True, 'import pandas as pd\n'), ((873, 946), 'itertools.combinations_with_replacement', 'itertools.combinations_with_replacement', (['nerdle_cfg.nerd_list', 'nerdle_len'], {}), '(nerdle_cfg.nerd_list, nerdle_len)\n', (912, 946), False, 'import itertools\n'), ((254, 296), 're.split', 're.split', (['"""\\\\+|\\\\-|\\\\*|\\\\/|=="""', 'nerdle_str'], {}), "('\\\\+|\\\\-|\\\\*|\\\\/|==', nerdle_str)\n", (262, 296), False, 'import re\n'), ((379, 395), 'numpy.array', 'np.array', (['nerdle'], {}), '(nerdle)\n', (387, 395), True, 'import numpy as np\n')]
|
__author__ = 'grahamhub'
import pygame
import random
import sys
import time
# colors
black = (0, 0, 0)
white = (255, 255, 255)
blue = (35, 25, 255)
green = (35, 255, 25)
red = (255, 35, 25)
count = 0
# width/height of snake segments
seg_width = 15
seg_height = 15
# space between each segment
seg_margin = 3
# set initial speed
x_change = seg_width + seg_margin
y_change = 0
def play():
while True:
font = pygame.font.Font(None, 60)
font.set_bold(True)
title = font.render("Press Enter to Play", True, white)
titlepos = title.get_rect()
titlepos.centerx = screen.get_rect().centerx
titlepos.centery = screen.get_rect().centery
screen.blit(title, titlepos)
pygame.display.flip()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_RETURN:
return True
def score():
while True:
font = pygame.font.Font(None, 60)
font.set_bold(True)
title = font.render("Your score was " + str(count) + "!", True, white)
titlepos = title.get_rect()
titlepos.centerx = screen.get_rect().centerx
titlepos.centery = screen.get_rect().centery
screen.blit(title, titlepos)
pygame.display.flip()
time.sleep(3)
break
def replay():
while True:
font = pygame.font.Font(None, 60)
font.set_bold(True)
title = font.render("Press Enter to Replay", True, white)
titlepos = title.get_rect()
titlepos.centerx = screen.get_rect().centerx
titlepos.centery = screen.get_rect().centery
screen.blit(title, titlepos)
pygame.display.flip()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_RETURN:
return True
if event.key == pygame.K_ESCAPE:
sys.exit()
class Segment(pygame.sprite.Sprite):
# class to represent one segment of the snake
def __init__(self, x, y):
super(Segment, self).__init__()
# set height/width
self.image = pygame.Surface([seg_width, seg_height])
self.image.fill(white)
# starting pos(top left corner)
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
class Block(pygame.sprite.Sprite):
# class for blocks to collect
def __init__(self):
super(Block, self).__init__()
self.image = pygame.Surface([seg_width, seg_height])
self.image.fill(green)
self.rect = self.image.get_rect()
# spawning the block
def spawn(self):
self.rect.x = random.randrange(10, 790)
self.rect.y = random.randrange(10, 590)
class Enemy(pygame.sprite.Sprite):
def __init__(self):
super(Enemy, self).__init__()
self.image = pygame.Surface([seg_width, seg_height])
self.image.fill(red)
self.rect = self.image.get_rect()
def spawn(self):
self.rect.x = random.randrange(10, 790)
self.rect.y = random.randrange(10, 590)
pygame.init()
screen = pygame.display.set_mode([800, 600])
pygame.display.set_caption('Snake')
points = pygame.sprite.Group()
obstacles = pygame.sprite.Group()
allspriteslist = pygame.sprite.Group()
block = Block()
points.add(block)
# create the snake
snake_segs = []
for i in range(5):
x = 250 - (seg_width + seg_margin) * i
y = 30
segment = Segment(x, y)
snake_segs.append(segment)
allspriteslist.add(segment)
clock = pygame.time.Clock()
enemies = []
def addenemy():
enemy = Enemy()
enemy.spawn()
enemies.append(enemy)
obstacles.add(enemies)
obstacles.draw(screen)
# spawn the first block
block.spawn()
if play() is True:
while True:
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
# speed = a segment plus the margin
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
x_change = (seg_width + seg_margin) * -1
y_change = 0
if event.key == pygame.K_RIGHT:
x_change = (seg_width + seg_margin)
y_change = 0
if event.key == pygame.K_UP:
x_change = 0
y_change = (seg_height + seg_margin) * -1
if event.key == pygame.K_DOWN:
x_change = 0
y_change = (seg_height + seg_margin)
# so that the snake doesn't keep growing:
old_segment = snake_segs.pop()
allspriteslist.remove(old_segment)
# where the new segment will be:
x = snake_segs[0].rect.x + x_change
y = snake_segs[0].rect.y + y_change
segment = Segment(x, y)
# if out of bounds
if x > 800 or x < 0:
allspriteslist.empty()
screen.fill(black)
break
if y > 600 or y < 0:
allspriteslist.empty()
screen.fill(black)
break
# put new segment into list
snake_segs.insert(0, segment)
allspriteslist.add(segment)
screen.fill(blue)
points.draw(screen)
obstacles.draw(screen)
allspriteslist.draw(screen)
# check for collisions
blocks_hit = pygame.sprite.spritecollide(segment, points, False)
if blocks_hit:
snake_segs.append(segment)
allspriteslist.add(segment)
block.spawn()
points.add(block)
addenemy()
count += 1
endgame = pygame.sprite.spritecollide(segment, obstacles, True)
if endgame:
allspriteslist.empty()
screen.fill(black)
break
pygame.display.flip()
# set speed
clock.tick(10)
score()
screen.fill(black)
if replay() is True:
for i in range(count):
enemies.pop()
obstacles.empty()
snake_segs = []
for i in range(5):
x = 250 - (seg_width + seg_margin) * i
y = 30
segment = Segment(x, y)
snake_segs.append(segment)
allspriteslist.add(segment)
x_change = (seg_width + seg_margin)
y_change = 0
count -= count
|
[
"pygame.Surface",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.init",
"pygame.display.flip",
"pygame.sprite.Group",
"time.sleep",
"random.randrange",
"pygame.sprite.spritecollide",
"pygame.font.Font",
"pygame.display.set_caption",
"pygame.time.Clock",
"sys.exit"
] |
[((3140, 3153), 'pygame.init', 'pygame.init', ([], {}), '()\n', (3151, 3153), False, 'import pygame\n'), ((3164, 3199), 'pygame.display.set_mode', 'pygame.display.set_mode', (['[800, 600]'], {}), '([800, 600])\n', (3187, 3199), False, 'import pygame\n'), ((3201, 3236), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Snake"""'], {}), "('Snake')\n", (3227, 3236), False, 'import pygame\n'), ((3247, 3268), 'pygame.sprite.Group', 'pygame.sprite.Group', ([], {}), '()\n', (3266, 3268), False, 'import pygame\n'), ((3281, 3302), 'pygame.sprite.Group', 'pygame.sprite.Group', ([], {}), '()\n', (3300, 3302), False, 'import pygame\n'), ((3320, 3341), 'pygame.sprite.Group', 'pygame.sprite.Group', ([], {}), '()\n', (3339, 3341), False, 'import pygame\n'), ((3585, 3604), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (3602, 3604), False, 'import pygame\n'), ((425, 451), 'pygame.font.Font', 'pygame.font.Font', (['None', '(60)'], {}), '(None, 60)\n', (441, 451), False, 'import pygame\n'), ((731, 752), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (750, 752), False, 'import pygame\n'), ((774, 792), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (790, 792), False, 'import pygame\n'), ((966, 992), 'pygame.font.Font', 'pygame.font.Font', (['None', '(60)'], {}), '(None, 60)\n', (982, 992), False, 'import pygame\n'), ((1287, 1308), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (1306, 1308), False, 'import pygame\n'), ((1317, 1330), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1327, 1330), False, 'import time\n'), ((1392, 1418), 'pygame.font.Font', 'pygame.font.Font', (['None', '(60)'], {}), '(None, 60)\n', (1408, 1418), False, 'import pygame\n'), ((1700, 1721), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (1719, 1721), False, 'import pygame\n'), ((1743, 1761), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (1759, 1761), False, 'import pygame\n'), ((2177, 2216), 'pygame.Surface', 'pygame.Surface', (['[seg_width, seg_height]'], {}), '([seg_width, seg_height])\n', (2191, 2216), False, 'import pygame\n'), ((2533, 2572), 'pygame.Surface', 'pygame.Surface', (['[seg_width, seg_height]'], {}), '([seg_width, seg_height])\n', (2547, 2572), False, 'import pygame\n'), ((2715, 2740), 'random.randrange', 'random.randrange', (['(10)', '(790)'], {}), '(10, 790)\n', (2731, 2740), False, 'import random\n'), ((2763, 2788), 'random.randrange', 'random.randrange', (['(10)', '(590)'], {}), '(10, 590)\n', (2779, 2788), False, 'import random\n'), ((2909, 2948), 'pygame.Surface', 'pygame.Surface', (['[seg_width, seg_height]'], {}), '([seg_width, seg_height])\n', (2923, 2948), False, 'import pygame\n'), ((3064, 3089), 'random.randrange', 'random.randrange', (['(10)', '(790)'], {}), '(10, 790)\n', (3080, 3089), False, 'import random\n'), ((3112, 3137), 'random.randrange', 'random.randrange', (['(10)', '(590)'], {}), '(10, 590)\n', (3128, 3137), False, 'import random\n'), ((3877, 3895), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (3893, 3895), False, 'import pygame\n'), ((5614, 5665), 'pygame.sprite.spritecollide', 'pygame.sprite.spritecollide', (['segment', 'points', '(False)'], {}), '(segment, points, False)\n', (5641, 5665), False, 'import pygame\n'), ((5920, 5973), 'pygame.sprite.spritecollide', 'pygame.sprite.spritecollide', (['segment', 'obstacles', '(True)'], {}), '(segment, obstacles, True)\n', (5947, 5973), False, 'import pygame\n'), ((6107, 6128), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (6126, 6128), False, 'import pygame\n'), ((1958, 1968), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1966, 1968), False, 'import sys\n'), ((3963, 3973), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3971, 3973), False, 'import sys\n')]
|
from django.utils.translation import gettext as _
from rest_framework import serializers
from ...core.utils import format_plaintext_for_html
from ..models import Ban
__all__ = ["BanMessageSerializer", "BanDetailsSerializer"]
def serialize_message(message):
if message:
return {"plain": message, "html": format_plaintext_for_html(message)}
class BanMessageSerializer(serializers.ModelSerializer):
message = serializers.SerializerMethodField()
class Meta:
model = Ban
fields = ["message", "expires_on"]
def get_message(self, obj):
if obj.user_message:
message = obj.user_message
elif obj.check_type == Ban.IP:
message = _("Your IP address is banned.")
else:
message = _("You are banned.")
return serialize_message(message)
class BanDetailsSerializer(serializers.ModelSerializer):
user_message = serializers.SerializerMethodField()
staff_message = serializers.SerializerMethodField()
class Meta:
model = Ban
fields = ["user_message", "staff_message", "expires_on"]
def get_user_message(self, obj):
return serialize_message(obj.user_message)
def get_staff_message(self, obj):
return serialize_message(obj.staff_message)
|
[
"django.utils.translation.gettext",
"rest_framework.serializers.SerializerMethodField"
] |
[((428, 463), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (461, 463), False, 'from rest_framework import serializers\n'), ((916, 951), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (949, 951), False, 'from rest_framework import serializers\n'), ((972, 1007), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (1005, 1007), False, 'from rest_framework import serializers\n'), ((706, 737), 'django.utils.translation.gettext', '_', (['"""Your IP address is banned."""'], {}), "('Your IP address is banned.')\n", (707, 737), True, 'from django.utils.translation import gettext as _\n'), ((774, 794), 'django.utils.translation.gettext', '_', (['"""You are banned."""'], {}), "('You are banned.')\n", (775, 794), True, 'from django.utils.translation import gettext as _\n')]
|
from ..helpers import eos
from ..helpers import alfaFunctions
from ..helpers.eosHelpers import A_fun, B_fun, getCubicCoefficients, getMixFugacity,getMixFugacityCoef, dAdT_fun
from ..solvers.cubicSolver import cubic_solver
from ..helpers import temperatureCorrelations as tempCorr
from ..helpers import mixing_rules
from numpy import log, exp, sqrt,absolute, array,sum
from scipy.optimize import fsolve, newton, root
from scipy.integrate import quad
def solve_eos(t,p,tc,pc,acentric,liq_compositions,vap_compositions,kij,method='pr',alfa_function='alfa_peng_robinson',mixing_rule='van_der_waals',diagram=False,properties=False,heat_capacities=None):
# Vectorization
tc = array(tc)
pc= array(pc)
acentric = array(acentric)
liq_compositions=array(liq_compositions)
vap_compositions = array(vap_compositions)
kij = array(kij)
# Method selection
eos_fun = eos.selector(method)
u,w,omega_a,omega_b,L = eos_fun()
# Alfa function selection
alfa_fun = alfaFunctions.selector(alfa_function)
alfa= alfa_fun(t,tc,acentric)
Ai = A_fun(t,p,tc,pc,acentric,omega_a,alfa)
Bi = B_fun(t,p,tc,pc,omega_b)
# Mixing rules
mixing_rule_used = mixing_rules.selector(mixing_rule)
A_liq,B_liq,A_i_liq,Aij_liq,dAdT_liq = mixing_rule_used(liq_compositions,tc,acentric,kij,Ai,Bi,alfa,alfa_fun,t)
A_vap,B_vap,A_i_vap,Aij_vap,dAdT_vap = mixing_rule_used(vap_compositions,tc,acentric,kij,Ai,Bi,alfa,alfa_fun,t)
coefficients_liq = getCubicCoefficients(A_liq,B_liq,u,w)
coefficients_vap = getCubicCoefficients(A_vap,B_vap,u,w)
z_liq= cubic_solver(coefficients_liq,diagram,B_liq)
z_vap = cubic_solver(coefficients_vap,diagram,B_vap)
z_liq = z_liq[0] if isinstance(z_liq,tuple) else z_liq
z_vap = z_vap[1] if isinstance(z_vap,tuple) else z_vap
liq_fugacity_coef = getMixFugacityCoef(z_liq,A_liq,B_liq,A_i_liq,Bi,L)
vap_fugacity_coef = getMixFugacityCoef(z_vap,A_vap,B_vap,A_i_vap,Bi,L)
if(properties):
liq_fugacity = getMixFugacity(z_liq,A_liq,B_liq,A_i_liq,B_liq,L,liq_compositions,p)
vap_fugacity = getMixFugacity(z_vap,A_vap,B_vap,A_i_vap,B_vap,L,vap_compositions,p)
heat_capacities = array(heat_capacities)
ideal_enthalpies = get_ideal_enthalpy(heat_capacities,t)
ideal_entropies = get_ideal_entropy(heat_capacities,t,p)
dAdt = dAdT_fun(t,p,tc,pc,acentric,omega_a,alfa_fun)
enthalpy_liq = get_real_enthalpy(ideal_enthalpies,t,z_liq,A_liq,dAdt,B_liq,L)
enthalpy_vap = get_real_enthalpy(ideal_enthalpies,t,z_vap,A_vap,dAdt,B_vap,L)
entropy_liq = get_real_entropy(ideal_entropies,z_liq,A_liq,dAdt,B_liq,L)
entropy_vap = get_real_entropy(ideal_entropies,z_vap,A_vap,dAdt,B_vap,L)
response = {
"liq_fugacity":liq_fugacity,
"vap_fugacity":vap_fugacity,
"enthalpy_liq":enthalpy_liq,
"enthalpy_vap":enthalpy_vap,
"entropy_liq":entropy_liq,
"entropy_vap":entropy_vap,
"z_liq":z_liq,
"z_vap":z_vap,
"liq_compositions":liq_compositions,
"vap_compositions":vap_compositions
}
return response
return (liq_fugacity_coef,vap_fugacity_coef)
def bubble_temperature(t,p,tc,pc,acentric,liq_compositions,vap_compositions,kij,delta_t=0.1,method='pr',alfa_function='alfa_peng_robinson',mixing_rule='van_der_waals'):
liq_fugacity_coef,vap_fugacity_coef = solve_eos(t,p,tc,pc,acentric,liq_compositions,vap_compositions,kij,method,alfa_function,mixing_rule)
Ki = liq_fugacity_coef/vap_fugacity_coef
Sy = sum(Ki*liq_compositions)
E = log(Sy)
attempts=0
new_t=t
new_vap_compositions = vap_compositions
while(absolute(E) >= 1e-9):
if(attempts == 500):
return 'Problem can not be solved'
t0 = new_t + delta_t
liq_fugacity_coef0,vap_fugacity_coef0 = solve_eos(t0,p,tc,pc,acentric,liq_compositions,new_vap_compositions,kij,method,alfa_function,mixing_rule)
Ki0 = liq_fugacity_coef0/vap_fugacity_coef0
Sy0 = sum(Ki0*liq_compositions)
E0 = log(Sy0)
new_t = (new_t*t0*(E0-E))/(t0*E0-new_t*E)
Sy = sum(Ki*liq_compositions)
new_vap_compositions = (Ki*liq_compositions)/Sy
liq_fugacity_coef,vap_fugacity_coef = solve_eos(new_t,p,tc,pc,acentric,liq_compositions,new_vap_compositions,kij,method,alfa_function,mixing_rule)
Ki = liq_fugacity_coef/vap_fugacity_coef
Sy = sum(Ki*liq_compositions)
E=log(Sy)
attempts +=1
return(new_t,p,liq_compositions,new_vap_compositions)
def bubble_pressure(t,p,tc,pc,acentric,liq_compositions,vap_compositions,kij,delta_p=0.001,method='pr',alfa_function='alfa_peng_robinson',mixing_rule='van_der_waals'):
liq_fugacity_coef,vap_fugacity_coef = solve_eos(t,p,tc,pc,acentric,liq_compositions,vap_compositions,kij,method,alfa_function,mixing_rule)
Ki = liq_fugacity_coef/vap_fugacity_coef
Sy = sum(Ki*liq_compositions)
E = Sy -1
attempts=0
new_p=p
new_vap_compositions = vap_compositions
while(absolute(E) >= 1e-9):
if(attempts == 100):
return 'Probleam can not be solved'
p0=new_p*(1+delta_p)
liq_fugacity_coef0,vap_fugacity_coef0 = solve_eos(t,p0,tc,pc,acentric,liq_compositions,new_vap_compositions,kij,method,alfa_function,mixing_rule)
Ki0 = liq_fugacity_coef0/vap_fugacity_coef0
Sy0 = sum(Ki0*liq_compositions)
E0=Sy0-1
new_p = (new_p*p0*(E0-E))/(p0*E0-new_p*E)
Sy = sum(Ki*liq_compositions)
new_vap_compositions = (Ki*liq_compositions)/Sy
liq_fugacity_coef,vap_fugacity_coef = solve_eos(t,new_p,tc,pc,acentric,liq_compositions,new_vap_compositions,kij,method,alfa_function,mixing_rule)
Ki = liq_fugacity_coef/vap_fugacity_coef
Sy = sum(Ki*liq_compositions)
E = Sy -1
attempts +=1
return(t,new_p,liq_compositions,new_vap_compositions)
def dew_temperature(t,p,tc,pc,acentric,liq_compositions,vap_compositions,kij,delta_t=0.1,method='pr',alfa_function='alfa_peng_robinson',mixing_rule='van_der_waals'):
liq_fugacity_coef,vap_fugacity_coef = solve_eos(t,p,tc,pc,acentric,liq_compositions,vap_compositions,kij,method,alfa_function,mixing_rule)
Ki = liq_fugacity_coef/vap_fugacity_coef
Sx = sum(vap_compositions/Ki)
E = log(Sx)
attempts=0
new_t=t
new_liq_compositions = liq_compositions
while(absolute(E) >= 1e-9):
if(attempts == 500):
return 'Probleam can not be solved'
t0 = new_t + delta_t
liq_fugacity_coef0,vap_fugacity_coef0 = solve_eos(t0,p,tc,pc,acentric,new_liq_compositions,vap_compositions,kij,method,alfa_function,mixing_rule)
Ki0 = liq_fugacity_coef0/vap_fugacity_coef0
Sx0 = sum(vap_compositions/Ki0)
E0 = log(Sx0)
new_t = (new_t*t0*(E0-E))/(t0*E0-new_t*E)
Sx = sum(vap_compositions/Ki)
new_liq_compositions = vap_compositions/(Ki*Sx)
liq_fugacity_coef,vap_fugacity_coef = solve_eos(new_t,p,tc,pc,acentric,new_liq_compositions,vap_compositions,kij,method,alfa_function,mixing_rule)
Ki = liq_fugacity_coef/vap_fugacity_coef
Sx = sum(vap_compositions/Ki)
E = log(Sx)
attempts +=1
return(new_t,p,new_liq_compositions,vap_compositions)
def dew_pressure(t,p,tc,pc,acentric,liq_compositions,vap_compositions,kij,delta_p=0.001,method='pr',alfa_function='alfa_peng_robinson',mixing_rule='van_der_waals'):
liq_fugacity_coef,vap_fugacity_coef = solve_eos(t,p,tc,pc,acentric,liq_compositions,vap_compositions,kij,method,alfa_function,mixing_rule)
Ki = liq_fugacity_coef/vap_fugacity_coef
Sx = sum(vap_compositions/Ki)
E = Sx -1
attempts=0
new_p=p
new_liq_compositions = liq_compositions
while(absolute(E) >= 1e-9):
if(attempts == 100):
return 'Probleam can not be solved'
p0=new_p*(1+delta_p)
liq_fugacity_coef0,vap_fugacity_coef0 = solve_eos(t,p0,tc,pc,acentric,new_liq_compositions,vap_compositions,kij,method,alfa_function,mixing_rule)
Ki0 = liq_fugacity_coef0/vap_fugacity_coef0
Sx0 = sum(vap_compositions/Ki0)
E0=Sx0-1
new_p = (new_p*p0*(E0-E))/(p0*E0-new_p*E)
Sx = sum(vap_compositions/Ki)
new_liq_compositions = vap_compositions/(Ki*Sx)
liq_fugacity_coef,vap_fugacity_coef = solve_eos(t,new_p,tc,pc,acentric,new_liq_compositions,vap_compositions,kij,method,alfa_function,mixing_rule)
Ki = liq_fugacity_coef/vap_fugacity_coef
Sx = sum(vap_compositions/Ki)
E = Sx -1
attempts +=1
return(t,new_p,new_liq_compositions,vap_compositions)
def flash(t,p,tc,pc,acentric,feed_compositions,liq_compositions,vap_compositions,v_f,kij,delta_p=0.0001,method='pr',alfa_function='alfa_peng_robinson',mixing_rule='van_der_waals'):
tau=1
while(absolute(tau)> 1e-5):
liq_fugacity_coef,vap_fugacity_coef = solve_eos(t,p,tc,pc,acentric,liq_compositions,vap_compositions,kij,method,alfa_function,mixing_rule)
Ki = liq_fugacity_coef/vap_fugacity_coef
S = sum((feed_compositions*(Ki-1))/(1+(v_f*(Ki-1))))
S0 = sum((-feed_compositions*(Ki-1)**2)/(1+v_f*(Ki-1))**2)
v_f = v_f-(S/S0)
liq_compositions0 = feed_compositions/(1+v_f*(Ki-1))
Sx=sum(liq_compositions0)
liq_compositions = liq_compositions0/Sx
vap_compositions0=liq_compositions0*Ki
Sy=sum(vap_compositions0)
vap_compositions=vap_compositions0/Sy
tau=sum(absolute(liq_compositions*liq_fugacity_coef-vap_compositions*vap_fugacity_coef))
return (t,p,feed_compositions,liq_compositions,vap_compositions,v_f)
def get_ideal_enthalpy(heat_capacities,t):
ideal_enthalpies = []
for cp in heat_capacities:
number, constants = cp
heat_capacity_equation = tempCorr.selector(number)
enthalpy,_ = quad(heat_capacity_equation,298,t,args=(constants,))
ideal_enthalpies.append(enthalpy)
return array(ideal_enthalpies)
def get_ideal_entropy(heat_capacities,t,p):
R=8.314
ideal_entropies = []
for cp in heat_capacities:
number,constants = cp
heat_capacity_equation = lambda t,constants :tempCorr.selector(number)(t,constants)/t
I,_ = quad(heat_capacity_equation,298,t,args=(constants,))
entropy = I - R*log(p)
ideal_entropies.append(entropy)
return array(ideal_entropies)
def get_real_enthalpy(ideal_enthalpies,t,z,A,dAdt,B,L):
R=8.314
enthalpies = ideal_enthalpies + R*t*(z-1+((dAdt-A)/B)*L(z,B))
return enthalpies
def get_real_entropy(ideal_entropies,z,A,dAdt,B,L):
R=8.314
entropies = ideal_entropies + R*(log(z-B)+dAdt/B*L(z,B))
return entropies
|
[
"numpy.absolute",
"numpy.sum",
"numpy.log",
"scipy.integrate.quad",
"numpy.array"
] |
[((680, 689), 'numpy.array', 'array', (['tc'], {}), '(tc)\n', (685, 689), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((698, 707), 'numpy.array', 'array', (['pc'], {}), '(pc)\n', (703, 707), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((723, 738), 'numpy.array', 'array', (['acentric'], {}), '(acentric)\n', (728, 738), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((760, 783), 'numpy.array', 'array', (['liq_compositions'], {}), '(liq_compositions)\n', (765, 783), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((807, 830), 'numpy.array', 'array', (['vap_compositions'], {}), '(vap_compositions)\n', (812, 830), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((841, 851), 'numpy.array', 'array', (['kij'], {}), '(kij)\n', (846, 851), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((3638, 3664), 'numpy.sum', 'sum', (['(Ki * liq_compositions)'], {}), '(Ki * liq_compositions)\n', (3641, 3664), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((3671, 3678), 'numpy.log', 'log', (['Sy'], {}), '(Sy)\n', (3674, 3678), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((5039, 5065), 'numpy.sum', 'sum', (['(Ki * liq_compositions)'], {}), '(Ki * liq_compositions)\n', (5042, 5065), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((6406, 6432), 'numpy.sum', 'sum', (['(vap_compositions / Ki)'], {}), '(vap_compositions / Ki)\n', (6409, 6432), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((6439, 6446), 'numpy.log', 'log', (['Sx'], {}), '(Sx)\n', (6442, 6446), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((7779, 7805), 'numpy.sum', 'sum', (['(vap_compositions / Ki)'], {}), '(vap_compositions / Ki)\n', (7782, 7805), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((10130, 10153), 'numpy.array', 'array', (['ideal_enthalpies'], {}), '(ideal_enthalpies)\n', (10135, 10153), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((10545, 10567), 'numpy.array', 'array', (['ideal_entropies'], {}), '(ideal_entropies)\n', (10550, 10567), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((2222, 2244), 'numpy.array', 'array', (['heat_capacities'], {}), '(heat_capacities)\n', (2227, 2244), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((3761, 3772), 'numpy.absolute', 'absolute', (['E'], {}), '(E)\n', (3769, 3772), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((4129, 4156), 'numpy.sum', 'sum', (['(Ki0 * liq_compositions)'], {}), '(Ki0 * liq_compositions)\n', (4132, 4156), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((4168, 4176), 'numpy.log', 'log', (['Sy0'], {}), '(Sy0)\n', (4171, 4176), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((4240, 4266), 'numpy.sum', 'sum', (['(Ki * liq_compositions)'], {}), '(Ki * liq_compositions)\n', (4243, 4266), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((4538, 4564), 'numpy.sum', 'sum', (['(Ki * liq_compositions)'], {}), '(Ki * liq_compositions)\n', (4541, 4564), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((4573, 4580), 'numpy.log', 'log', (['Sy'], {}), '(Sy)\n', (4576, 4580), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((5162, 5173), 'numpy.absolute', 'absolute', (['E'], {}), '(E)\n', (5170, 5173), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((5511, 5538), 'numpy.sum', 'sum', (['(Ki0 * liq_compositions)'], {}), '(Ki0 * liq_compositions)\n', (5514, 5538), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((5617, 5643), 'numpy.sum', 'sum', (['(Ki * liq_compositions)'], {}), '(Ki * liq_compositions)\n', (5620, 5643), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((5915, 5941), 'numpy.sum', 'sum', (['(Ki * liq_compositions)'], {}), '(Ki * liq_compositions)\n', (5918, 5941), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((6529, 6540), 'numpy.absolute', 'absolute', (['E'], {}), '(E)\n', (6537, 6540), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((6878, 6905), 'numpy.sum', 'sum', (['(vap_compositions / Ki0)'], {}), '(vap_compositions / Ki0)\n', (6881, 6905), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((6917, 6925), 'numpy.log', 'log', (['Sx0'], {}), '(Sx0)\n', (6920, 6925), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((6989, 7015), 'numpy.sum', 'sum', (['(vap_compositions / Ki)'], {}), '(vap_compositions / Ki)\n', (6992, 7015), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((7287, 7313), 'numpy.sum', 'sum', (['(vap_compositions / Ki)'], {}), '(vap_compositions / Ki)\n', (7290, 7313), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((7324, 7331), 'numpy.log', 'log', (['Sx'], {}), '(Sx)\n', (7327, 7331), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((7902, 7913), 'numpy.absolute', 'absolute', (['E'], {}), '(E)\n', (7910, 7913), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((8251, 8278), 'numpy.sum', 'sum', (['(vap_compositions / Ki0)'], {}), '(vap_compositions / Ki0)\n', (8254, 8278), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((8357, 8383), 'numpy.sum', 'sum', (['(vap_compositions / Ki)'], {}), '(vap_compositions / Ki)\n', (8360, 8383), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((8655, 8681), 'numpy.sum', 'sum', (['(vap_compositions / Ki)'], {}), '(vap_compositions / Ki)\n', (8658, 8681), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((8991, 9004), 'numpy.absolute', 'absolute', (['tau'], {}), '(tau)\n', (8999, 9004), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((9221, 9277), 'numpy.sum', 'sum', (['(feed_compositions * (Ki - 1) / (1 + v_f * (Ki - 1)))'], {}), '(feed_compositions * (Ki - 1) / (1 + v_f * (Ki - 1)))\n', (9224, 9277), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((9283, 9350), 'numpy.sum', 'sum', (['(-feed_compositions * (Ki - 1) ** 2 / (1 + v_f * (Ki - 1)) ** 2)'], {}), '(-feed_compositions * (Ki - 1) ** 2 / (1 + v_f * (Ki - 1)) ** 2)\n', (9286, 9350), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((9434, 9456), 'numpy.sum', 'sum', (['liq_compositions0'], {}), '(liq_compositions0)\n', (9437, 9456), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((9563, 9585), 'numpy.sum', 'sum', (['vap_compositions0'], {}), '(vap_compositions0)\n', (9566, 9585), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((10023, 10078), 'scipy.integrate.quad', 'quad', (['heat_capacity_equation', '(298)', 't'], {'args': '(constants,)'}), '(heat_capacity_equation, 298, t, args=(constants,))\n', (10027, 10078), False, 'from scipy.integrate import quad\n'), ((10405, 10460), 'scipy.integrate.quad', 'quad', (['heat_capacity_equation', '(298)', 't'], {'args': '(constants,)'}), '(heat_capacity_equation, 298, t, args=(constants,))\n', (10409, 10460), False, 'from scipy.integrate import quad\n'), ((9648, 9737), 'numpy.absolute', 'absolute', (['(liq_compositions * liq_fugacity_coef - vap_compositions * vap_fugacity_coef)'], {}), '(liq_compositions * liq_fugacity_coef - vap_compositions *\n vap_fugacity_coef)\n', (9656, 9737), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((10482, 10488), 'numpy.log', 'log', (['p'], {}), '(p)\n', (10485, 10488), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n'), ((10827, 10837), 'numpy.log', 'log', (['(z - B)'], {}), '(z - B)\n', (10830, 10837), False, 'from numpy import log, exp, sqrt, absolute, array, sum\n')]
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import json
import math
import numpy as np
import tokenization
import six
import tensorflow as tf
from tensorflow import logging
class EvalResults(object):
def __init__(self, capacity):
self.metrics = {}
self.capacity = capacity
def add_dict(self, indict):
for key,value in indict.iteritems():
if key in self.metrics:
if len(self.metrics[key]) == self.capacity:
self.metrics[key].pop(0)
else:
self.metrics[key] = []
if isinstance(value, list):
self.metrics[key].append(value[-1])
else:
self.metrics[key].append(value)
def to_string(self):
res = ["%s:%.2f"%(key, np.mean(self.metrics[key]))
for key in self.metrics.keys()]
return " ".join(res)
class CQAExample(object):
"""A single training/test example."""
def __init__(self,
qas_id,
question_text,
doc_tokens,
orig_answer_text=None,
start_position=None,
end_position=None):
self.qas_id = qas_id
self.question_text = question_text
self.doc_tokens = doc_tokens
self.orig_answer_text = orig_answer_text
self.start_position = start_position
self.end_position = end_position
def __str__(self):
return self.__repr__()
def __repr__(self):
s = ""
s += "qas_id: %s" % (tokenization.printable_text(self.qas_id))
s += ", question_text: %s" % (
tokenization.printable_text(self.question_text))
s += ", doc_tokens: [%s]" % (" ".join(self.doc_tokens))
if self.start_position:
s += ", start_position: %d" % (self.start_position)
if self.start_position:
s += ", end_position: %d" % (self.end_position)
return s
class InputExample(object):
"""A single training/test example for simple sequence classification."""
def __init__(self, guid, text_a, text_b=None, label=None):
"""Constructs a InputExample.
Args:
guid: Unique id for the example.
text_a: string. The untokenized text of the first sequence. For single
sequence tasks, only this sequence must be specified.
text_b: (Optional) string. The untokenized text of the second sequence.
Only must be specified for sequence pair tasks.
label: (Optional) string. The label of the example. This should be
specified for train and dev examples, but not for test examples.
"""
self.guid = guid
self.text_a = text_a
self.text_b = text_b
self.label = label
class InputPretrainExample(object):
"""A single training/test example for pretrain task."""
def __init__(self, guid, input_ids, input_mask, segment_ids, masked_lm_positions,
masked_lm_ids, masked_lm_weights, next_sentence_labels):
"""Constructs a InputExample.
Args:
guid: Unique id for the example.
text_a: string. The untokenized text of the first sequence. For single
sequence tasks, only this sequence must be specified.
text_b: (Optional) string. The untokenized text of the second sequence.
Only must be specified for sequence pair tasks.
label: (Optional) string. The label of the example. This should be
specified for train and dev examples, but not for test examples.
"""
self.guid = guid
self.input_ids = input_ids
self.input_mask = input_mask
self.segment_ids = segment_ids
self.masked_lm_positions = masked_lm_positions
self.masked_lm_ids = masked_lm_ids
self.masked_lm_weights = masked_lm_weights
self.next_sentence_labels = next_sentence_labels
class InputCQAFeatures(object):
"""A single set of features of data."""
def __init__(self,
unique_id,
example_index,
doc_span_index,
tokens,
token_to_orig_map,
token_is_max_context,
input_ids,
input_mask,
segment_ids,
start_position=None,
end_position=None):
self.unique_id = unique_id
self.example_index = example_index
self.doc_span_index = doc_span_index
self.tokens = tokens
self.token_to_orig_map = token_to_orig_map
self.token_is_max_context = token_is_max_context
self.input_ids = input_ids
self.input_mask = input_mask
self.segment_ids = segment_ids
self.start_position = start_position
self.end_position = end_position
class InputFeatures(object):
"""A single set of features of data."""
def __init__(self, input_ids, input_mask, segment_ids, label_id):
self.input_ids = input_ids
self.input_mask = input_mask
self.segment_ids = segment_ids
self.label_id = label_id
def read_tsv(input_file, quotechar=None):
"""Reads a tab separated value file."""
import csv
with tf.gfile.Open(input_file, "r") as f:
reader = csv.reader(f, delimiter="\t", quotechar=quotechar)
lines = []
for line in reader:
lines.append(line)
return lines
def read_examples_do_nothing(input_file, is_training):
"""do nothing but just return input_file, reserved for tfrecord data"""
return input_file
def read_textmatch_examples(input_file, is_training):
"""Creates examples for the training and dev sets."""
if is_training:
set_type = 'train'
else:
set_type = 'dev'
examples = []
for (i, line) in enumerate(read_tsv(input_file)):
if i == 0:
continue
guid = "%s-%s" % (set_type, i)
text_a = tokenization.convert_to_unicode(line[3])
text_b = tokenization.convert_to_unicode(line[4])
label = tokenization.convert_to_unicode(line[0])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
def read_cikm_examples(input_file, is_training):
"""Creates examples for the training and dev sets."""
if is_training:
set_type = 'train'
else:
set_type = 'dev'
examples = []
lengths = []
for (i, line) in enumerate(read_tsv(input_file)):
if i == 0:
continue
guid = "%s-%s" % (set_type, i)
lengths.append(len(line[1].split()) + len(line[2].split()))
text_a = tokenization.convert_to_unicode(line[1])
text_b = tokenization.convert_to_unicode(line[2])
label = tokenization.convert_to_unicode(line[0])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
print('length', np.mean(lengths))
raise Exception
return examples
def read_review_examples(input_file, is_training):
"""Creates examples for the training and dev sets."""
fold_id = 9 # fold 9 for training, the rest for testing
if is_training:
set_type = 'train'
else:
set_type = 'dev'
examples = []
lengths = []
for (i, line) in enumerate(read_tsv(input_file)):
# if is_training:
# if int(line[1]) == fold_id:
# continue
# else:
# if int(line[1]) != fold_id:
# continue
if int(line[1]) != fold_id:
continue
lengths.append(len(line[2].split()))
# guid = "%s-%s" % (set_type, i)
# text_a = tokenization.convert_to_unicode(line[2])
# text_b = None
# label = tokenization.convert_to_unicode(line[0])
# examples.append(
# InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
print('length', np.mean(lengths))
raise Exception
return examples
def read_ae_examples(input_file, is_training):
"""Creates examples for the training and dev sets."""
if is_training:
set_type = 'train'
else:
set_type = 'dev'
examples = []
for (i, line) in enumerate(read_tsv(input_file)):
if i == 0:
continue
guid = "%s-%s" % (set_type, i)
text_a = ' '.join(tokenization.convert_to_unicode(line[0]).split('|'))
text_b = ' '.join(tokenization.convert_to_unicode(line[1]).split('|'))
if float(line[2]) > 0.5:
label = tokenization.convert_to_unicode('1')
else:
label = tokenization.convert_to_unicode('0')
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
def read_pretrain_examples(input_file, is_training):
"""Creates examples for the training and dev sets."""
fold_id = 9 # fold 9 for training, the rest for testing
if is_training:
set_type = 'train'
else:
set_type = 'dev'
examples = []
for (i, line) in enumerate(read_tsv(input_file)):
tokens = line
if i < 3:
print(i, line)
if len(tokens) != 7:
print(len(tokens))
for (i, token) in enumerate(tokens):
print(i, token)
raise Exception
guid = "%s-%s" % (set_type, i)
# print(len(tokens[0].split(',')), len(tokens[1].split(',')),
# len(tokens[2].split(',')), len(tokens[3].split(',')),
# len(tokens[4].split(',')), len(tokens[5].split(',')),
# len(tokens[6].split(',')))
examples.append(InputPretrainExample(
guid=guid,
input_ids=[int(idx) for idx in tokens[0].split(',')],
input_mask=[int(idx) for idx in tokens[1].split(',')],
segment_ids=[int(idx) for idx in tokens[2].split(',')],
masked_lm_positions=[int(idx) for idx in tokens[3].split(',')],
masked_lm_ids=[int(idx) for idx in tokens[4].split(',')],
masked_lm_weights=[float(idx) for idx in tokens[5].split(',')],
next_sentence_labels=int(tokens[6])))
return examples
# def read_coqa_examples(input_file, is_training):
# """Read a CoQA json file into a list of CQAExample."""
# with tf.gfile.Open(input_file, "r") as reader:
# input_data = json.load(reader)["data"]
#
# def is_whitespace(c):
# if c == " " or c == "\t" or c == "\r" or c == "\n" or ord(c) == 0x202F:
# return True
# return False
#
# examples = []
# for entry in input_data[:10]:
# paragraph_text = entry["story"]
# doc_tokens = []
# char_to_word_offset = []
# prev_is_whitespace = True
# for c in paragraph_text:
# if is_whitespace(c):
# prev_is_whitespace = True
# else:
# if prev_is_whitespace:
# doc_tokens.append(c)
# else:
# doc_tokens[-1] += c
# prev_is_whitespace = False
# char_to_word_offset.append(len(doc_tokens) - 1)
#
# ############################################################
# # convert the convasational QAs to squad format, with history
# ############################################################
#
# story_id = entry['id']
# questions = [(item['input_text'], story_id + str(item['turn_id'])) for item in entry['questions']] # [(question, question_id), ()]
# answers = [(item['span_text'], item['span_start']) for item in entry['answers']]
#
# qas = []
# for i, (question, answer) in enumerate(zip(questions, answers)):
# start_index = 0 if i - int(FLAGS.history) < 0 else i - int(FLAGS.history)
# end_index = i
# question_with_histories = ''
# # prepend historical questions and answers
# for each_question, each_answer in zip(questions[start_index: end_index], answers[start_index: end_index]):
# question_with_histories += each_question[0] + ' ' + each_answer[0] + ' '
# # add the current question
# question_with_histories += question[0]
# if answer[1] == -1:
# qas.append({'id': question[1], 'question': question_with_histories, 'answers': [{'answer_start': -1, 'text': "unknown"}]})
# else:
# qas.append({'id': question[1], 'question': question_with_histories, 'answers': [{'answer_start': answer[1], 'text': answer[0]}]})
#
# for qa in qas:
# qas_id = qa["id"]
# question_text = qa["question"]
# start_position = None
# end_position = None
# orig_answer_text = None
#
# # if is_training:
# # we read in the groundtruth answer bothing druing training and predicting, because we need to compute acc and f1 at predicting time.
# if len(qa["answers"]) != 1:
# raise ValueError(
# "For training, each question should have exactly 1 answer.")
# answer = qa["answers"][0]
# orig_answer_text = answer["text"]
# answer_offset = answer["answer_start"]
# answer_length = len(orig_answer_text)
# start_position = char_to_word_offset[answer_offset]
# end_position = char_to_word_offset[answer_offset + answer_length - 1]
# # Only add answers where the text can be exactly recovered from the
# # document. If this CAN'T happen it's likely due to weird Unicode
# # stuff so we will just skip the example.
# #
# # Note that this means for training mode, every example is NOT
# # guaranteed to be preserved.
# actual_text = " ".join(doc_tokens[start_position:(end_position + 1)])
# cleaned_answer_text = " ".join(
# tokenization.whitespace_tokenize(orig_answer_text))
# if actual_text.find(cleaned_answer_text) == -1:
# logging.warning("Could not find answer: '%s' vs. '%s'",
# actual_text, cleaned_answer_text)
# continue
#
# example = CQAExample(
# qas_id=qas_id,
# question_text=question_text,
# doc_tokens=doc_tokens,
# orig_answer_text=orig_answer_text,
# start_position=start_position,
# end_position=end_position)
# examples.append(example)
# return examples
def convert_examples_to_features_do_nothing(examples, tokenizer, max_seq_length,
doc_stride, max_query_length, is_training):
"""do nothing but just return examples, reserved for tfrecord data"""
return examples
def convert_examples_to_features_cqa(examples, tokenizer, max_seq_length,
doc_stride, max_query_length, is_training):
"""Loads a data file into a list of `InputBatch`s."""
unique_id = 1000000000
features = []
for (example_index, example) in enumerate(examples):
query_tokens = tokenizer.tokenize(example.question_text)
if len(query_tokens) > max_query_length:
query_tokens = query_tokens[0:max_query_length]
tok_to_orig_index = []
orig_to_tok_index = []
all_doc_tokens = []
for (i, token) in enumerate(example.doc_tokens):
orig_to_tok_index.append(len(all_doc_tokens))
sub_tokens = tokenizer.tokenize(token)
for sub_token in sub_tokens:
tok_to_orig_index.append(i)
all_doc_tokens.append(sub_token)
tok_start_position = None
tok_end_position = None
# if is_training:
# we do this for both training and predicting, because we need also start/end position at testing time to compute acc and f1
tok_start_position = orig_to_tok_index[example.start_position]
if example.end_position < len(example.doc_tokens) - 1:
tok_end_position = orig_to_tok_index[example.end_position + 1] - 1
else:
tok_end_position = len(all_doc_tokens) - 1
(tok_start_position, tok_end_position) = _improve_answer_span(
all_doc_tokens, tok_start_position, tok_end_position, tokenizer,
example.orig_answer_text)
# The -3 accounts for [CLS], [SEP] and [SEP]
max_tokens_for_doc = max_seq_length - len(query_tokens) - 3
# We can have documents that are longer than the maximum sequence length.
# To deal with this we do a sliding window approach, where we take chunks
# of the up to our max length with a stride of `doc_stride`.
_DocSpan = collections.namedtuple( # pylint: disable=invalid-name
"DocSpan", ["start", "length"])
doc_spans = []
start_offset = 0
while start_offset < len(all_doc_tokens):
length = len(all_doc_tokens) - start_offset
if length > max_tokens_for_doc:
length = max_tokens_for_doc
doc_spans.append(_DocSpan(start=start_offset, length=length))
if start_offset + length == len(all_doc_tokens):
break
start_offset += min(length, doc_stride)
for (doc_span_index, doc_span) in enumerate(doc_spans):
tokens = []
token_to_orig_map = {}
token_is_max_context = {}
segment_ids = []
tokens.append("[CLS]")
segment_ids.append(0)
for token in query_tokens:
tokens.append(token)
segment_ids.append(0)
tokens.append("[SEP]")
segment_ids.append(0)
for i in range(doc_span.length):
split_token_index = doc_span.start + i
token_to_orig_map[len(tokens)] = tok_to_orig_index[split_token_index]
is_max_context = _check_is_max_context(doc_spans, doc_span_index,
split_token_index)
token_is_max_context[len(tokens)] = is_max_context
tokens.append(all_doc_tokens[split_token_index])
segment_ids.append(1)
tokens.append("[SEP]")
segment_ids.append(1)
input_ids = tokenizer.convert_tokens_to_ids(tokens)
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
input_mask = [1] * len(input_ids)
# Zero-pad up to the sequence length.
while len(input_ids) < max_seq_length:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
assert len(input_ids) == max_seq_length
assert len(input_mask) == max_seq_length
assert len(segment_ids) == max_seq_length
start_position = None
end_position = None
if is_training:
# For training, if our document chunk does not contain an annotation
# we throw it out, since there is nothing to predict.
doc_start = doc_span.start
doc_end = doc_span.start + doc_span.length - 1
if (example.start_position < doc_start or
example.end_position < doc_start or
example.start_position > doc_end or example.end_position > doc_end):
continue
doc_offset = len(query_tokens) + 2
start_position = tok_start_position - doc_start + doc_offset
end_position = tok_end_position - doc_start + doc_offset
else:
# when predicting, we donot throw out any doc span to prevent label leaking
doc_start = doc_span.start
doc_end = doc_span.start + doc_span.length - 1
doc_offset = len(query_tokens) + 2
start_position = tok_start_position - doc_start + doc_offset
end_position = tok_end_position - doc_start + doc_offset
if example_index < 20:
logging.info("*** Example ***")
logging.info("unique_id: %s" % (unique_id))
logging.info("example_index: %s" % (example_index))
logging.info("doc_span_index: %s" % (doc_span_index))
logging.info("tokens: %s" % " ".join(
[tokenization.printable_text(x) for x in tokens]))
logging.info("token_to_orig_map: %s" % " ".join(
["%d:%d" % (x, y) for (x, y) in six.iteritems(token_to_orig_map)]))
logging.info("token_is_max_context: %s" % " ".join([
"%d:%s" % (x, y) for (x, y) in six.iteritems(token_is_max_context)
]))
logging.info("input_ids: %s" % " ".join([str(x) for x in input_ids]))
logging.info(
"input_mask: %s" % " ".join([str(x) for x in input_mask]))
logging.info(
"segment_ids: %s" % " ".join([str(x) for x in segment_ids]))
if is_training:
answer_text = " ".join(tokens[start_position:(end_position + 1)])
logging.info("start_position: %d" % (start_position))
logging.info("end_position: %d" % (end_position))
logging.info(
"answer: %s" % (tokenization.printable_text(answer_text)))
features.append(
InputFeatures(
unique_id=unique_id,
example_index=example_index,
doc_span_index=doc_span_index,
tokens=tokens,
token_to_orig_map=token_to_orig_map,
token_is_max_context=token_is_max_context,
input_ids=input_ids,
input_mask=input_mask,
segment_ids=segment_ids,
start_position=start_position,
end_position=end_position))
unique_id += 1
return features
def convert_examples_to_features(examples, label_list, max_seq_length,
tokenizer, model_type='classification'):
"""Loads a data file into a list of `InputBatch`s."""
label_map = {}
for (i, label) in enumerate(label_list):
label_map[label] = i
features = []
for (ex_index, example) in enumerate(examples):
tokens_a = tokenizer.tokenize(example.text_a)
tokens_b = None
if example.text_b:
tokens_b = tokenizer.tokenize(example.text_b)
if tokens_b:
# Modifies `tokens_a` and `tokens_b` in place so that the total
# length is less than the specified length.
# Account for [CLS], [SEP], [SEP] with "- 3"
_truncate_seq_pair(tokens_a, tokens_b, max_seq_length - 3)
else:
# Account for [CLS] and [SEP] with "- 2"
if len(tokens_a) > max_seq_length - 2:
tokens_a = tokens_a[0:(max_seq_length - 2)]
# The convention in BERT is:
# (a) For sequence pairs:
# tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP]
# type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1
# (b) For single sequences:
# tokens: [CLS] the dog is hairy . [SEP]
# type_ids: 0 0 0 0 0 0 0
#
# Where "type_ids" are used to indicate whether this is the first
# sequence or the second sequence. The embedding vectors for `type=0` and
# `type=1` were learned during pre-training and are added to the wordpiece
# embedding vector (and position vector). This is not *strictly* necessary
# since the [SEP] token unambigiously separates the sequences, but it makes
# it easier for the model to learn the concept of sequences.
#
# For classification tasks, the first vector (corresponding to [CLS]) is
# used as as the "sentence vector". Note that this only makes sense because
# the entire model is fine-tuned.
tokens = []
segment_ids = []
tokens.append("[CLS]")
segment_ids.append(0)
for token in tokens_a:
tokens.append(token)
segment_ids.append(0)
tokens.append("[SEP]")
segment_ids.append(0)
if tokens_b:
for token in tokens_b:
tokens.append(token)
segment_ids.append(1)
tokens.append("[SEP]")
segment_ids.append(1)
input_ids = tokenizer.convert_tokens_to_ids(tokens)
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
input_mask = [1] * len(input_ids)
# Zero-pad up to the sequence length.
while len(input_ids) < max_seq_length:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
assert len(input_ids) == max_seq_length
assert len(input_mask) == max_seq_length
assert len(segment_ids) == max_seq_length
if model_type == 'classification':
label_id = label_map[example.label]
else:
label_id = float(example.label)
if ex_index < 5:
logging.info("*** Example ***")
logging.info("guid: %s" % (example.guid))
logging.info("tokens: %s" % " ".join(
[tokenization.printable_text(x) for x in tokens]))
logging.info("input_ids: %s" % " ".join([str(x) for x in input_ids]))
logging.info("input_mask: %s" % " ".join([str(x) for x in input_mask]))
logging.info(
"segment_ids: %s" % " ".join([str(x) for x in segment_ids]))
logging.info("label: %s (id = %d)" % (example.label, label_id))
features.append(
InputFeatures(
input_ids=input_ids,
input_mask=input_mask,
segment_ids=segment_ids,
label_id=label_id))
return features
def _truncate_seq_pair(tokens_a, tokens_b, max_length):
"""Truncates a sequence pair in place to the maximum length."""
# This is a simple heuristic which will always truncate the longer sequence
# one token at a time. This makes more sense than truncating an equal percent
# of tokens from each, since if one sequence is very short then each token
# that's truncated likely contains more information than a longer sequence.
while True:
total_length = len(tokens_a) + len(tokens_b)
if total_length <= max_length:
break
if len(tokens_a) > len(tokens_b):
tokens_a.pop()
else:
tokens_b.pop()
def _improve_answer_span(doc_tokens, input_start, input_end, tokenizer,
orig_answer_text):
"""Returns tokenized answer spans that better match the annotated answer."""
# The SQuAD annotations are character based. We first project them to
# whitespace-tokenized words. But then after WordPiece tokenization, we can
# often find a "better match". For example:
#
# Question: What year was <NAME> born?
# Context: The leader was <NAME> (1895-1943).
# Answer: 1895
#
# The original whitespace-tokenized answer will be "(1895-1943).". However
# after tokenization, our tokens will be "( 1895 - 1943 ) .". So we can match
# the exact answer, 1895.
#
# However, this is not always possible. Consider the following:
#
# Question: What country is the top exporter of electornics?
# Context: The Japanese electronics industry is the lagest in the world.
# Answer: Japan
#
# In this case, the annotator chose "Japan" as a character sub-span of
# the word "Japanese". Since our WordPiece tokenizer does not split
# "Japanese", we just use "Japanese" as the annotation. This is fairly rare
# in SQuAD, but does happen.
tok_answer_text = " ".join(tokenizer.tokenize(orig_answer_text))
for new_start in range(input_start, input_end + 1):
for new_end in range(input_end, new_start - 1, -1):
text_span = " ".join(doc_tokens[new_start:(new_end + 1)])
if text_span == tok_answer_text:
return (new_start, new_end)
return (input_start, input_end)
def _check_is_max_context(doc_spans, cur_span_index, position):
"""Check if this is the 'max context' doc span for the token."""
# Because of the sliding window approach taken to scoring documents, a single
# token can appear in multiple documents. E.g.
# Doc: the man went to the store and bought a gallon of milk
# Span A: the man went to the
# Span B: to the store and bought
# Span C: and bought a gallon of
# ...
#
# Now the word 'bought' will have two scores from spans B and C. We only
# want to consider the score with "maximum context", which we define as
# the *minimum* of its left and right context (the *sum* of left and
# right context will always be the same, of course).
#
# In the example the maximum context for 'bought' would be span C since
# it has 1 left context and 3 right context, while span B has 4 left context
# and 0 right context.
best_score = None
best_span_index = None
for (span_index, doc_span) in enumerate(doc_spans):
end = doc_span.start + doc_span.length - 1
if position < doc_span.start:
continue
if position > end:
continue
num_left_context = position - doc_span.start
num_right_context = end - position
score = min(num_left_context, num_right_context) + 0.01 * doc_span.length
if best_score is None or score > best_score:
best_score = score
best_span_index = span_index
return cur_span_index == best_span_index
def write_predictions(all_examples, all_features, all_results, n_best_size,
max_answer_length, do_lower_case, output_prediction_file,
output_nbest_file):
"""Write final predictions to the json file."""
logging.info("Writing predictions to: %s" % (output_prediction_file))
logging.info("Writing nbest to: %s" % (output_nbest_file))
example_index_to_features = collections.defaultdict(list)
for feature in all_features:
example_index_to_features[feature.example_index].append(feature)
unique_id_to_result = {}
for result in all_results:
unique_id_to_result[result.unique_id] = result
_PrelimPrediction = collections.namedtuple( # pylint: disable=invalid-name
"PrelimPrediction",
["feature_index", "start_index", "end_index", "start_logit", "end_logit"])
all_predictions = collections.OrderedDict()
all_nbest_json = collections.OrderedDict()
for (example_index, example) in enumerate(all_examples):
features = example_index_to_features[example_index]
prelim_predictions = []
for (feature_index, feature) in enumerate(features):
result = unique_id_to_result[feature.unique_id]
start_indexes = _get_best_indexes(result.start_logits, n_best_size)
end_indexes = _get_best_indexes(result.end_logits, n_best_size)
for start_index in start_indexes:
for end_index in end_indexes:
# We could hypothetically create invalid predictions, e.g., predict
# that the start of the span is in the question. We throw out all
# invalid predictions.
if start_index >= len(feature.tokens):
continue
if end_index >= len(feature.tokens):
continue
if start_index not in feature.token_to_orig_map:
continue
if end_index not in feature.token_to_orig_map:
continue
if not feature.token_is_max_context.get(start_index, False):
continue
if end_index < start_index:
continue
length = end_index - start_index + 1
if length > max_answer_length:
continue
prelim_predictions.append(
_PrelimPrediction(
feature_index=feature_index,
start_index=start_index,
end_index=end_index,
start_logit=result.start_logits[start_index],
end_logit=result.end_logits[end_index]))
prelim_predictions = sorted(
prelim_predictions,
key=lambda x: (x.start_logit + x.end_logit),
reverse=True)
_NbestPrediction = collections.namedtuple( # pylint: disable=invalid-name
"NbestPrediction", ["text", "start_logit", "end_logit"])
seen_predictions = {}
nbest = []
for pred in prelim_predictions:
if len(nbest) >= n_best_size:
break
feature = features[pred.feature_index]
tok_tokens = feature.tokens[pred.start_index:(pred.end_index + 1)]
orig_doc_start = feature.token_to_orig_map[pred.start_index]
orig_doc_end = feature.token_to_orig_map[pred.end_index]
orig_tokens = example.doc_tokens[orig_doc_start:(orig_doc_end + 1)]
tok_text = " ".join(tok_tokens)
# De-tokenize WordPieces that have been split off.
tok_text = tok_text.replace(" ##", "")
tok_text = tok_text.replace("##", "")
# Clean whitespace
tok_text = tok_text.strip()
tok_text = " ".join(tok_text.split())
orig_text = " ".join(orig_tokens)
final_text = get_final_text(tok_text, orig_text, do_lower_case)
if final_text in seen_predictions:
continue
seen_predictions[final_text] = True
nbest.append(
_NbestPrediction(
text=final_text,
start_logit=pred.start_logit,
end_logit=pred.end_logit))
# In very rare edge cases we could have no valid predictions. So we
# just create a nonce prediction in this case to avoid failure.
if not nbest:
nbest.append(
_NbestPrediction(text="empty", start_logit=0.0, end_logit=0.0))
assert len(nbest) >= 1
total_scores = []
for entry in nbest:
total_scores.append(entry.start_logit + entry.end_logit)
probs = _compute_softmax(total_scores)
nbest_json = []
for (i, entry) in enumerate(nbest):
output = collections.OrderedDict()
output["text"] = entry.text
output["probability"] = probs[i]
output["start_logit"] = entry.start_logit
output["end_logit"] = entry.end_logit
nbest_json.append(output)
assert len(nbest_json) >= 1
all_predictions[example.qas_id] = nbest_json[0]["text"]
all_nbest_json[example.qas_id] = nbest_json
with tf.gfile.GFile(output_prediction_file, "w") as writer:
writer.write(json.dumps(all_predictions, indent=4) + "\n")
with tf.gfile.GFile(output_nbest_file, "w") as writer:
writer.write(json.dumps(all_nbest_json, indent=4) + "\n")
def get_final_text(pred_text, orig_text, do_lower_case):
"""Project the tokenized prediction back to the original text."""
# When we created the data, we kept track of the alignment between original
# (whitespace tokenized) tokens and our WordPiece tokenized tokens. So
# now `orig_text` contains the span of our original text corresponding to the
# span that we predicted.
#
# However, `orig_text` may contain extra characters that we don't want in
# our prediction.
#
# For example, let's say:
# pred_text = <NAME>
# orig_text = <NAME>
#
# We don't want to return `orig_text` because it contains the extra "'s".
#
# We don't want to return `pred_text` because it's already been normalized
# (the SQuAD eval script also does punctuation stripping/lower casing but
# our tokenizer does additional normalization like stripping accent
# characters).
#
# What we really want to return is "<NAME>".
#
# Therefore, we have to apply a semi-complicated alignment heruistic between
# `pred_text` and `orig_text` to get a character-to-charcter alignment. This
# can fail in certain cases in which case we just return `orig_text`.
def _strip_spaces(text):
ns_chars = []
ns_to_s_map = collections.OrderedDict()
for (i, c) in enumerate(text):
if c == " ":
continue
ns_to_s_map[len(ns_chars)] = i
ns_chars.append(c)
ns_text = "".join(ns_chars)
return (ns_text, ns_to_s_map)
# We first tokenize `orig_text`, strip whitespace from the result
# and `pred_text`, and check if they are the same length. If they are
# NOT the same length, the heuristic has failed. If they are the same
# length, we assume the characters are one-to-one aligned.
tokenizer = tokenization.BasicTokenizer(do_lower_case=do_lower_case)
tok_text = " ".join(tokenizer.tokenize(orig_text))
start_position = tok_text.find(pred_text)
if start_position == -1:
if FLAGS.verbose_logging:
logging.info(
"Unable to find text: '%s' in '%s'" % (pred_text, orig_text))
return orig_text
end_position = start_position + len(pred_text) - 1
(orig_ns_text, orig_ns_to_s_map) = _strip_spaces(orig_text)
(tok_ns_text, tok_ns_to_s_map) = _strip_spaces(tok_text)
if len(orig_ns_text) != len(tok_ns_text):
if FLAGS.verbose_logging:
logging.info("Length not equal after stripping spaces: '%s' vs '%s'",
orig_ns_text, tok_ns_text)
return orig_text
# We then project the characters in `pred_text` back to `orig_text` using
# the character-to-character alignment.
tok_s_to_ns_map = {}
for (i, tok_index) in six.iteritems(tok_ns_to_s_map):
tok_s_to_ns_map[tok_index] = i
orig_start_position = None
if start_position in tok_s_to_ns_map:
ns_start_position = tok_s_to_ns_map[start_position]
if ns_start_position in orig_ns_to_s_map:
orig_start_position = orig_ns_to_s_map[ns_start_position]
if orig_start_position is None:
if FLAGS.verbose_logging:
logging.info("Couldn't map start position")
return orig_text
orig_end_position = None
if end_position in tok_s_to_ns_map:
ns_end_position = tok_s_to_ns_map[end_position]
if ns_end_position in orig_ns_to_s_map:
orig_end_position = orig_ns_to_s_map[ns_end_position]
if orig_end_position is None:
if FLAGS.verbose_logging:
logging.info("Couldn't map end position")
return orig_text
output_text = orig_text[orig_start_position:(orig_end_position + 1)]
return output_text
def _get_best_indexes(logits, n_best_size):
"""Get the n-best logits from a list."""
index_and_score = sorted(enumerate(logits), key=lambda x: x[1], reverse=True)
best_indexes = []
for i in range(len(index_and_score)):
if i >= n_best_size:
break
best_indexes.append(index_and_score[i][0])
return best_indexes
def _compute_softmax(scores):
"""Compute softmax probability over raw logits."""
if not scores:
return []
max_score = None
for score in scores:
if max_score is None or score > max_score:
max_score = score
exp_scores = []
total_sum = 0.0
for score in scores:
x = math.exp(score - max_score)
exp_scores.append(x)
total_sum += x
probs = []
for score in exp_scores:
probs.append(score / total_sum)
return probs
def get_dict(train_batch):
b_input_ids = train_batch['input_ids']
b_input_mask = train_batch['input_mask']
b_segment_ids = train_batch['segment_ids']
b_labels = train_batch['label_id']
return b_input_ids,b_input_mask,b_segment_ids,b_labels
|
[
"math.exp",
"tokenization.printable_text",
"csv.reader",
"tensorflow.logging.info",
"json.dumps",
"collections.defaultdict",
"numpy.mean",
"tensorflow.gfile.GFile",
"collections.namedtuple",
"tokenization.BasicTokenizer",
"collections.OrderedDict",
"tokenization.convert_to_unicode",
"six.iteritems",
"tensorflow.gfile.Open"
] |
[((30082, 30149), 'tensorflow.logging.info', 'logging.info', (["('Writing predictions to: %s' % output_prediction_file)"], {}), "('Writing predictions to: %s' % output_prediction_file)\n", (30094, 30149), False, 'from tensorflow import logging\n'), ((30156, 30212), 'tensorflow.logging.info', 'logging.info', (["('Writing nbest to: %s' % output_nbest_file)"], {}), "('Writing nbest to: %s' % output_nbest_file)\n", (30168, 30212), False, 'from tensorflow import logging\n'), ((30248, 30277), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (30271, 30277), False, 'import collections\n'), ((30525, 30646), 'collections.namedtuple', 'collections.namedtuple', (['"""PrelimPrediction"""', "['feature_index', 'start_index', 'end_index', 'start_logit', 'end_logit']"], {}), "('PrelimPrediction', ['feature_index', 'start_index',\n 'end_index', 'start_logit', 'end_logit'])\n", (30547, 30646), False, 'import collections\n'), ((30715, 30740), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (30738, 30740), False, 'import collections\n'), ((30762, 30787), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (30785, 30787), False, 'import collections\n'), ((37322, 37378), 'tokenization.BasicTokenizer', 'tokenization.BasicTokenizer', ([], {'do_lower_case': 'do_lower_case'}), '(do_lower_case=do_lower_case)\n', (37349, 37378), False, 'import tokenization\n'), ((38272, 38302), 'six.iteritems', 'six.iteritems', (['tok_ns_to_s_map'], {}), '(tok_ns_to_s_map)\n', (38285, 38302), False, 'import six\n'), ((5270, 5300), 'tensorflow.gfile.Open', 'tf.gfile.Open', (['input_file', '"""r"""'], {}), "(input_file, 'r')\n", (5283, 5300), True, 'import tensorflow as tf\n'), ((5322, 5372), 'csv.reader', 'csv.reader', (['f'], {'delimiter': '"""\t"""', 'quotechar': 'quotechar'}), "(f, delimiter='\\t', quotechar=quotechar)\n", (5332, 5372), False, 'import csv\n'), ((5981, 6021), 'tokenization.convert_to_unicode', 'tokenization.convert_to_unicode', (['line[3]'], {}), '(line[3])\n', (6012, 6021), False, 'import tokenization\n'), ((6039, 6079), 'tokenization.convert_to_unicode', 'tokenization.convert_to_unicode', (['line[4]'], {}), '(line[4])\n', (6070, 6079), False, 'import tokenization\n'), ((6096, 6136), 'tokenization.convert_to_unicode', 'tokenization.convert_to_unicode', (['line[0]'], {}), '(line[0])\n', (6127, 6136), False, 'import tokenization\n'), ((6707, 6747), 'tokenization.convert_to_unicode', 'tokenization.convert_to_unicode', (['line[1]'], {}), '(line[1])\n', (6738, 6747), False, 'import tokenization\n'), ((6765, 6805), 'tokenization.convert_to_unicode', 'tokenization.convert_to_unicode', (['line[2]'], {}), '(line[2])\n', (6796, 6805), False, 'import tokenization\n'), ((6822, 6862), 'tokenization.convert_to_unicode', 'tokenization.convert_to_unicode', (['line[0]'], {}), '(line[0])\n', (6853, 6862), False, 'import tokenization\n'), ((6989, 7005), 'numpy.mean', 'np.mean', (['lengths'], {}), '(lengths)\n', (6996, 7005), True, 'import numpy as np\n'), ((7989, 8005), 'numpy.mean', 'np.mean', (['lengths'], {}), '(lengths)\n', (7996, 8005), True, 'import numpy as np\n'), ((16921, 16975), 'collections.namedtuple', 'collections.namedtuple', (['"""DocSpan"""', "['start', 'length']"], {}), "('DocSpan', ['start', 'length'])\n", (16943, 16975), False, 'import collections\n'), ((32822, 32901), 'collections.namedtuple', 'collections.namedtuple', (['"""NbestPrediction"""', "['text', 'start_logit', 'end_logit']"], {}), "('NbestPrediction', ['text', 'start_logit', 'end_logit'])\n", (32844, 32901), False, 'import collections\n'), ((35211, 35254), 'tensorflow.gfile.GFile', 'tf.gfile.GFile', (['output_prediction_file', '"""w"""'], {}), "(output_prediction_file, 'w')\n", (35225, 35254), True, 'import tensorflow as tf\n'), ((35343, 35381), 'tensorflow.gfile.GFile', 'tf.gfile.GFile', (['output_nbest_file', '"""w"""'], {}), "(output_nbest_file, 'w')\n", (35357, 35381), True, 'import tensorflow as tf\n'), ((36761, 36786), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (36784, 36786), False, 'import collections\n'), ((39924, 39951), 'math.exp', 'math.exp', (['(score - max_score)'], {}), '(score - max_score)\n', (39932, 39951), False, 'import math\n'), ((1639, 1679), 'tokenization.printable_text', 'tokenization.printable_text', (['self.qas_id'], {}), '(self.qas_id)\n', (1666, 1679), False, 'import tokenization\n'), ((1732, 1779), 'tokenization.printable_text', 'tokenization.printable_text', (['self.question_text'], {}), '(self.question_text)\n', (1759, 1779), False, 'import tokenization\n'), ((8599, 8635), 'tokenization.convert_to_unicode', 'tokenization.convert_to_unicode', (['"""1"""'], {}), "('1')\n", (8630, 8635), False, 'import tokenization\n'), ((8670, 8706), 'tokenization.convert_to_unicode', 'tokenization.convert_to_unicode', (['"""0"""'], {}), "('0')\n", (8701, 8706), False, 'import tokenization\n'), ((25320, 25351), 'tensorflow.logging.info', 'logging.info', (['"""*** Example ***"""'], {}), "('*** Example ***')\n", (25332, 25351), False, 'from tensorflow import logging\n'), ((25358, 25397), 'tensorflow.logging.info', 'logging.info', (["('guid: %s' % example.guid)"], {}), "('guid: %s' % example.guid)\n", (25370, 25397), False, 'from tensorflow import logging\n'), ((25756, 25819), 'tensorflow.logging.info', 'logging.info', (["('label: %s (id = %d)' % (example.label, label_id))"], {}), "('label: %s (id = %d)' % (example.label, label_id))\n", (25768, 25819), False, 'from tensorflow import logging\n'), ((34794, 34819), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (34817, 34819), False, 'import collections\n'), ((37557, 37631), 'tensorflow.logging.info', 'logging.info', (['("Unable to find text: \'%s\' in \'%s\'" % (pred_text, orig_text))'], {}), '("Unable to find text: \'%s\' in \'%s\'" % (pred_text, orig_text))\n', (37569, 37631), False, 'from tensorflow import logging\n'), ((37948, 38048), 'tensorflow.logging.info', 'logging.info', (['"""Length not equal after stripping spaces: \'%s\' vs \'%s\'"""', 'orig_ns_text', 'tok_ns_text'], {}), '("Length not equal after stripping spaces: \'%s\' vs \'%s\'",\n orig_ns_text, tok_ns_text)\n', (37960, 38048), False, 'from tensorflow import logging\n'), ((38680, 38723), 'tensorflow.logging.info', 'logging.info', (['"""Couldn\'t map start position"""'], {}), '("Couldn\'t map start position")\n', (38692, 38723), False, 'from tensorflow import logging\n'), ((39070, 39111), 'tensorflow.logging.info', 'logging.info', (['"""Couldn\'t map end position"""'], {}), '("Couldn\'t map end position")\n', (39082, 39111), False, 'from tensorflow import logging\n'), ((20361, 20392), 'tensorflow.logging.info', 'logging.info', (['"""*** Example ***"""'], {}), "('*** Example ***')\n", (20373, 20392), False, 'from tensorflow import logging\n'), ((20409, 20450), 'tensorflow.logging.info', 'logging.info', (["('unique_id: %s' % unique_id)"], {}), "('unique_id: %s' % unique_id)\n", (20421, 20450), False, 'from tensorflow import logging\n'), ((20469, 20518), 'tensorflow.logging.info', 'logging.info', (["('example_index: %s' % example_index)"], {}), "('example_index: %s' % example_index)\n", (20481, 20518), False, 'from tensorflow import logging\n'), ((20537, 20588), 'tensorflow.logging.info', 'logging.info', (["('doc_span_index: %s' % doc_span_index)"], {}), "('doc_span_index: %s' % doc_span_index)\n", (20549, 20588), False, 'from tensorflow import logging\n'), ((35287, 35324), 'json.dumps', 'json.dumps', (['all_predictions'], {'indent': '(4)'}), '(all_predictions, indent=4)\n', (35297, 35324), False, 'import json\n'), ((35414, 35450), 'json.dumps', 'json.dumps', (['all_nbest_json'], {'indent': '(4)'}), '(all_nbest_json, indent=4)\n', (35424, 35450), False, 'import json\n'), ((872, 898), 'numpy.mean', 'np.mean', (['self.metrics[key]'], {}), '(self.metrics[key])\n', (879, 898), True, 'import numpy as np\n'), ((8414, 8454), 'tokenization.convert_to_unicode', 'tokenization.convert_to_unicode', (['line[0]'], {}), '(line[0])\n', (8445, 8454), False, 'import tokenization\n'), ((8493, 8533), 'tokenization.convert_to_unicode', 'tokenization.convert_to_unicode', (['line[1]'], {}), '(line[1])\n', (8524, 8533), False, 'import tokenization\n'), ((21489, 21540), 'tensorflow.logging.info', 'logging.info', (["('start_position: %d' % start_position)"], {}), "('start_position: %d' % start_position)\n", (21501, 21540), False, 'from tensorflow import logging\n'), ((21563, 21610), 'tensorflow.logging.info', 'logging.info', (["('end_position: %d' % end_position)"], {}), "('end_position: %d' % end_position)\n", (21575, 21610), False, 'from tensorflow import logging\n'), ((21687, 21727), 'tokenization.printable_text', 'tokenization.printable_text', (['answer_text'], {}), '(answer_text)\n', (21714, 21727), False, 'import tokenization\n'), ((25455, 25485), 'tokenization.printable_text', 'tokenization.printable_text', (['x'], {}), '(x)\n', (25482, 25485), False, 'import tokenization\n'), ((20666, 20696), 'tokenization.printable_text', 'tokenization.printable_text', (['x'], {}), '(x)\n', (20693, 20696), False, 'import tokenization\n'), ((20833, 20865), 'six.iteritems', 'six.iteritems', (['token_to_orig_map'], {}), '(token_to_orig_map)\n', (20846, 20865), False, 'import six\n'), ((20989, 21024), 'six.iteritems', 'six.iteritems', (['token_is_max_context'], {}), '(token_is_max_context)\n', (21002, 21024), False, 'import six\n')]
|
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import all_call.train
import numpy as np
import json
import sys
import pandas as pd
import re
import os
from glob import glob
from arguments import yaml_reader
# default parameters for inference
DEFAULT_MODEL_PARAMS = (-0.0107736, 0.00244419, 0.0, 0.00440608)
DEFAULT_READ_DROP = (479.596, -21.4382)
DEFAULT_READ_DROP_REL = (1.18332, -0.0475454)
DEFAULT_FIT_FUNCTION = "linear"
# functions for training
fit_functions = {"const": all_call.train.const_rate, "linear": all_call.train.linear_rate, "n2": all_call.train.n2_rate, "exp": all_call.train.exp_rate}
def load_arguments():
"""
Loads all arguments and sets default values.
:return: argparse arguments
"""
parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter)
parser.add_argument('dir_structure', type=path_exists, help='Directory with multiple Dante results directories. '
'Each Dante directory has filled "all_profiles.txt" and "all_profiles.true" files.')
# training.add_argument('--model-fig', type=str, default=None, help="File to write .png file with comparison of models and train data. Suffix determines the type of image file.")
# parser.add_argument('--profiles', type=str, required=True, help="TSV file or .npy file with one or more profiles. Required.")
parser.add_argument('--output-params', type=convert_to_absolute, default=None, help="File with parameters of the model to save to. Default: dir_structure/params.txt")
parser.add_argument('--output-profile', type=convert_to_absolute, default=None, help="File, where to collect all the profiles. Default: dir_structure/all_profiles.txt")
parser.add_argument('--output-true', type=convert_to_absolute, default=None, help="File, where to collect all the true values. Default: dir_structure/all_profiles.true")
parser.add_argument('--input-true', type=convert_to_absolute, default=None, help="File, with all the true values. Default: collect from Dante predictions")
parser.add_argument('--config-dir', type=path_exists, default=None, help="Directory, where to save new config files. Default: without saving")
parser.add_argument('--fit-function', choices=fit_functions.keys(), default="linear", help="Function to approximate deletion rate of STRs. Default: linear")
parser.add_argument('-v', '--verbosity-level', type=int, choices=range(3), default=1, help="Level of verbosity, default 1.")
parser.add_argument('-p', '--prepare', action='store_true', help="Only prepare files, do not run training.")
# input_args.add_argument('-l', '--len_repeating', type=int, default=3, help="Length of the STR. Used for read drop modelling.")
args = parser.parse_args()
# check
if args.output_profile is None:
args.output_profile = '%s/all_profiles.txt' % args.dir_structure
if args.output_true is None:
args.output_true = '%s/all_profiles.true' % args.dir_structure
if args.output_params is None:
args.output_params = '%s/params.txt' % args.dir_structure
return args
def convert_to_absolute(path):
"""
Converts to absolute path, do not check if exists.
:param path: str - path
:return: str - absolute path
"""
return os.path.abspath(path)
def path_exists(path):
"""
Checks if the supplied path exists.
:param path: str - path to a file or dir
:return: str - absolute path to a file or dir
"""
try:
path = convert_to_absolute(path)
except Exception:
print('ERROR: %s directory does not exists' % path)
exit(-1)
return path
def crawl_dante(dir_structure):
"""
Crawl Dante dir and collect config, profile, and true_vals files
:param dir_structure: str - directory above the Dante directory structures, here we start the crawl
:return: list(str) x3 - list of paths to configs, profiles, and true values
"""
# read all configs
configs = glob('%s/*/config.yaml' % dir_structure)
good_configs = []
profiles = []
true_vals = []
# check if every config has its profiles and true_vals
for config in configs:
profile = '%s/all_profiles.txt' % os.path.dirname(config)
if not os.path.exists(profile):
print('WARNING: "%s" exists but "%s" does not!!' % (config, profile))
continue
true_val = '%s/all_profiles.true' % os.path.dirname(config)
if not os.path.exists(true_val):
print('WARNING: "%s" exists but "%s" does not!!' % (config, true_val))
continue
# all ok, write them:
good_configs.append(config)
profiles.append(profile)
true_vals.append(true_val)
return good_configs, profiles, true_vals
def get_name(path):
"""
Get directory name from path to config/profile/...
:param path: str - path
:return: str - directory name without blanks
"""
directory = path.split('/')[-2]
directory = directory.replace(' ', '_')
return directory
def update_config(config_path, save_dir, params_file):
"""
Create new config file with inputs from the outputs of Dante.
:param config_path: str - path to the config file
:param save_dir: str - directory where to save the new config
:param save_dir: str - directory where to save the new config
:return: None
"""
# gather inputs:
directory = os.path.dirname(config_path)
inputs = glob('%s/*/annotations*' % directory)
inputs += glob('%s/*/filtered_primer*' % directory)
# read the old config:
config = yaml_reader.load_arguments(config_path)
# update the config with new inputs
config['inputs'] = []
for input in inputs:
config['inputs'].append({'path': input})
# update the config with new params
config['allcall']['param_file'] = params_file
# add "_retrained" to output dirs
config['general']['output_dir'] = '%s_retrained' % config['general']['output_dir']
# write it
name = get_name(config_path)
config_name = '%s/%s_config.yaml' % (save_dir, name)
yaml_reader.save_arguments(config, config_name)
def merge_profiles(profiles, output_file):
"""
Merge all profiles according to the name of dirs and output them.
:param profiles: list(str) - list of paths to profiles
:param output_file: str - output file for merged file
:return: pd.DataFrame - merged DataFrame with all data
"""
if len(profiles) == 0:
return None
# create empty dataframe
all_profiles = pd.DataFrame()
# and fill it
for profile in profiles:
name = get_name(profile)
# get the maximal number of columns:
max_cols = 0
with open(profile) as f:
for line in f:
max_cols = max(max_cols, line.count('\t'))
# write to aggregated file:
current = pd.read_csv(profile, sep='\t', header=None, names=['index'] + range(max_cols), index_col=0, parse_dates=True, engine='python')
current.index = list(map(lambda x: '%s_%s' % (name, x), current.index))
all_profiles = pd.concat([all_profiles, current])
# fill not available data:
all_profiles = all_profiles.fillna(0)
all_profiles = all_profiles.applymap(lambda x: x if type(x) is str else str(int(x)))
all_profiles.sort_index(inplace=True)
# save it:
all_profiles.to_csv(output_file, sep='\t')
# return it
return all_profiles
def read_dante(filename):
"""
Read profiles from CSV from Dante.
:param filename: str - filename to read
:return: Pandas.DataFrame with read profiles or None if no read occurred
"""
# now try to load tsv file:
name = filename.split('/')[-2]
try:
profiles = pd.read_csv(filename, sep="\t", header=None, index_col=None, parse_dates=True)
except Exception:
return None
new_profiles = pd.DataFrame()
max_str = max(profiles.max(0)[1:]) + 2
if profiles is not None:
for column in profiles.columns[1:]:
vals = np.zeros(max_str, dtype=int)
for i, c in enumerate(profiles[column]):
vals[int(c)] += profiles.iloc[i][0]
new_profiles['%s_%d' % (name, column - 1)] = vals
if len(new_profiles.index) > 0:
profiles = new_profiles.transpose()
return profiles
def fix_profile_file(filename):
"""
Fix profile file to be able to read as a tsv.
:param filename: str - filename to fix
"""
# read the file
with open(filename) as f:
lines = f.readlines()
# find separator
sep = '\t' if len(lines[0].split('\t')) >= len(lines[0].split(None)) else None
# count the number of columns:
cols = np.zeros_like(lines, dtype=int)
for i, line in enumerate(lines):
cols[i] = len(line.split(sep))
# print with the highest number
max_cols = max(cols)
with open(filename, 'w') as f:
for i, line in enumerate(lines):
f.write(line.strip())
# append enough zeros
for _ in range(max_cols - cols[i]):
f.write('\t0')
f.write('\n')
def read_profiles(filename):
"""
Read profiles from CSV or from .npy file.
:param filename: str - filename to read
:return: Pandas.DataFrame with read profiles or None if no read occurred
"""
# first try to load numpy array
try:
profiles = np.load(filename)
except IOError:
profiles = None
if profiles is not None:
profiles = pd.DataFrame(data=profiles[np.newaxis], index=[int(filename.split('.')[0].split('/')[-1])])
# now try to load tsv file:
if profiles is None:
try:
fix_profile_file(filename)
profiles = pd.read_csv(filename, sep='\t', header=None, index_col=0, parse_dates=True)
except IOError:
profiles = None
return profiles
def read_true(filename):
"""
Read true values from json file or from .true file.
:param filename: str - json file to read
:return: dict - values read from the json file or None if no read occurred
"""
class WrongCountError(Exception):
pass
true_values = None
try:
with open(filename) as f:
true_values = json.load(f)
except Exception:
pass
if true_values is None:
try:
with open(filename) as f:
true_values = {}
for line in f:
split = line.split()
if len(split) == 3:
m = re.search(r'_\d+$', split[0])
name = split[0]
if m is None:
name += '_1'
true_values[name] = (int(split[1]), int(split[2]))
elif len(split) > 3:
raise WrongCountError("Wrong number of parsed elements (expected 3, got %d)" % len(split))
except Exception as e:
print('ERROR: ', e)
return None
return true_values
def read_params(filename):
"""
Reads all parameters written with write_params(print_all=True)
:param filename: str - filename to read parameters from, if None, load default params
:return: 4-tuple, 2-tuple, function - parameters for model, read count drop, and error function for model distributions
"""
if filename is None:
return DEFAULT_MODEL_PARAMS, DEFAULT_READ_DROP, DEFAULT_READ_DROP_REL, DEFAULT_FIT_FUNCTION
# read 2nd and last line of the file
with open(filename) as f:
lines = f.readlines()
fit_function = lines[1].strip().split()[1]
split = list(map(float, lines[-1].strip().split()))
if len(split) < 8:
print("ERROR: parameters were not read successfully, using defaults!", file=sys.stderr)
return DEFAULT_MODEL_PARAMS, DEFAULT_READ_DROP, DEFAULT_READ_DROP_REL, DEFAULT_FIT_FUNCTION
# extract parameters from last line of file
model_params = tuple(split[0:4])
read_drop_params = tuple(split[4:6])
read_drop_params_rel = tuple(split[6:8])
return model_params, read_drop_params, read_drop_params_rel, fit_function
|
[
"pandas.DataFrame",
"os.path.abspath",
"numpy.zeros_like",
"numpy.load",
"argparse.ArgumentParser",
"json.load",
"pandas.read_csv",
"os.path.dirname",
"arguments.yaml_reader.save_arguments",
"os.path.exists",
"numpy.zeros",
"glob.glob",
"arguments.yaml_reader.load_arguments",
"re.search",
"pandas.concat"
] |
[((757, 816), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'formatter_class': 'RawDescriptionHelpFormatter'}), '(formatter_class=RawDescriptionHelpFormatter)\n', (771, 816), False, 'from argparse import ArgumentParser, RawDescriptionHelpFormatter\n'), ((3315, 3336), 'os.path.abspath', 'os.path.abspath', (['path'], {}), '(path)\n', (3330, 3336), False, 'import os\n'), ((4019, 4059), 'glob.glob', 'glob', (["('%s/*/config.yaml' % dir_structure)"], {}), "('%s/*/config.yaml' % dir_structure)\n", (4023, 4059), False, 'from glob import glob\n'), ((5462, 5490), 'os.path.dirname', 'os.path.dirname', (['config_path'], {}), '(config_path)\n', (5477, 5490), False, 'import os\n'), ((5504, 5541), 'glob.glob', 'glob', (["('%s/*/annotations*' % directory)"], {}), "('%s/*/annotations*' % directory)\n", (5508, 5541), False, 'from glob import glob\n'), ((5556, 5597), 'glob.glob', 'glob', (["('%s/*/filtered_primer*' % directory)"], {}), "('%s/*/filtered_primer*' % directory)\n", (5560, 5597), False, 'from glob import glob\n'), ((5639, 5678), 'arguments.yaml_reader.load_arguments', 'yaml_reader.load_arguments', (['config_path'], {}), '(config_path)\n', (5665, 5678), False, 'from arguments import yaml_reader\n'), ((6147, 6194), 'arguments.yaml_reader.save_arguments', 'yaml_reader.save_arguments', (['config', 'config_name'], {}), '(config, config_name)\n', (6173, 6194), False, 'from arguments import yaml_reader\n'), ((6598, 6612), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (6610, 6612), True, 'import pandas as pd\n'), ((7950, 7964), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (7962, 7964), True, 'import pandas as pd\n'), ((8775, 8806), 'numpy.zeros_like', 'np.zeros_like', (['lines'], {'dtype': 'int'}), '(lines, dtype=int)\n', (8788, 8806), True, 'import numpy as np\n'), ((7165, 7199), 'pandas.concat', 'pd.concat', (['[all_profiles, current]'], {}), '([all_profiles, current])\n', (7174, 7199), True, 'import pandas as pd\n'), ((7809, 7887), 'pandas.read_csv', 'pd.read_csv', (['filename'], {'sep': '"""\t"""', 'header': 'None', 'index_col': 'None', 'parse_dates': '(True)'}), "(filename, sep='\\t', header=None, index_col=None, parse_dates=True)\n", (7820, 7887), True, 'import pandas as pd\n'), ((9472, 9489), 'numpy.load', 'np.load', (['filename'], {}), '(filename)\n', (9479, 9489), True, 'import numpy as np\n'), ((4249, 4272), 'os.path.dirname', 'os.path.dirname', (['config'], {}), '(config)\n', (4264, 4272), False, 'import os\n'), ((4288, 4311), 'os.path.exists', 'os.path.exists', (['profile'], {}), '(profile)\n', (4302, 4311), False, 'import os\n'), ((4460, 4483), 'os.path.dirname', 'os.path.dirname', (['config'], {}), '(config)\n', (4475, 4483), False, 'import os\n'), ((4499, 4523), 'os.path.exists', 'os.path.exists', (['true_val'], {}), '(true_val)\n', (4513, 4523), False, 'import os\n'), ((8102, 8130), 'numpy.zeros', 'np.zeros', (['max_str'], {'dtype': 'int'}), '(max_str, dtype=int)\n', (8110, 8130), True, 'import numpy as np\n'), ((9808, 9883), 'pandas.read_csv', 'pd.read_csv', (['filename'], {'sep': '"""\t"""', 'header': 'None', 'index_col': '(0)', 'parse_dates': '(True)'}), "(filename, sep='\\t', header=None, index_col=0, parse_dates=True)\n", (9819, 9883), True, 'import pandas as pd\n'), ((10325, 10337), 'json.load', 'json.load', (['f'], {}), '(f)\n', (10334, 10337), False, 'import json\n'), ((10626, 10655), 're.search', 're.search', (['"""_\\\\d+$"""', 'split[0]'], {}), "('_\\\\d+$', split[0])\n", (10635, 10655), False, 'import re\n')]
|
import os
import sys
import argparse
parse = argparse.ArgumentParser()
parse.add_argument("--type", type=str,choices=['origin', 'grist',], help="run initial file or grist file")
parse.add_argument("--times", type=int, help="time to run code")
flags, unparsed = parse.parse_known_args(sys.argv[1:])
for i in range(flags.times):
command = f"nohup python -u scripts/one_time_runner.py --type {flags.type} > {flags.type}_{flags.times}.log 2>&1 &"
os.system(command)
|
[
"os.system",
"argparse.ArgumentParser"
] |
[((46, 71), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (69, 71), False, 'import argparse\n'), ((453, 471), 'os.system', 'os.system', (['command'], {}), '(command)\n', (462, 471), False, 'import os\n')]
|
import pandas as pd
import numpy as np
import geopandas as gpd
import glob
import rasterio
import rasterio.mask
from shapely.geometry import box
import os
shp_file_path = r'C:\Doutorado\BD\IBGE\IBGE_Estruturas_cartograficas_Brasil\2017\Unidades_Censitarias\Setores_Censitarios\*shp'
gdf= gpd.read_file(glob.glob(shp_file_path)[0])
gdf_origin_bounds = gpd.GeoSeries(box(*gdf.total_bounds), crs=gdf.crs)
Para = gdf[gdf['CD_GEOCODM'].str.startswith('15')]
def get_bounds_from_gdf(gdf_bounds, epsg):
return gdf_bounds.to_crs(epsg)
main_dir = r'C:\Users\<NAME>\Downloads\temp\Global Impervious Surfaces products\Global Impervious Surfaces products'
ending = '*.tif*'
tiff_files = glob.glob( os.path.join(main_dir, ending))
print('Total number of files in directory: ', len(tiff_files))
# Filtering files outside the Main GDF:
valid_tiffs = []
for tiff_file_path in tiff_files:
with rasterio.open(tiff_file_path) as src:
#out_image, out_transform = rasterio.mask.mask(src, shapes, crop=True)
out_meta = src.meta
Bounds = box(*src.bounds)
gdf_bounds = get_bounds_from_gdf(gdf_origin_bounds, out_meta['crs'].to_epsg()).values
if (gdf_bounds.intersects(Bounds) or gdf_bounds.within(Bounds)
or gdf_bounds.contains(Bounds) or gdf_bounds.crosses(Bounds)
):
valid_tiffs.append(tiff_file_path)
print('Valid Total Files: ', len(valid_tiffs))
ref_dir = os.path.dirname(os.path.dirname(main_dir))
saving_paths = os.path.join(ref_dir, 'Valid_files.csv')
to_file = pd.Series(valid_tiffs, name='paths')
to_file.index.name = 'ID'
to_file.to_csv(saving_paths)
|
[
"rasterio.open",
"os.path.dirname",
"pandas.Series",
"glob.glob",
"os.path.join",
"shapely.geometry.box"
] |
[((1523, 1563), 'os.path.join', 'os.path.join', (['ref_dir', '"""Valid_files.csv"""'], {}), "(ref_dir, 'Valid_files.csv')\n", (1535, 1563), False, 'import os\n'), ((1575, 1611), 'pandas.Series', 'pd.Series', (['valid_tiffs'], {'name': '"""paths"""'}), "(valid_tiffs, name='paths')\n", (1584, 1611), True, 'import pandas as pd\n'), ((370, 392), 'shapely.geometry.box', 'box', (['*gdf.total_bounds'], {}), '(*gdf.total_bounds)\n', (373, 392), False, 'from shapely.geometry import box\n'), ((712, 742), 'os.path.join', 'os.path.join', (['main_dir', 'ending'], {}), '(main_dir, ending)\n', (724, 742), False, 'import os\n'), ((1480, 1505), 'os.path.dirname', 'os.path.dirname', (['main_dir'], {}), '(main_dir)\n', (1495, 1505), False, 'import os\n'), ((307, 331), 'glob.glob', 'glob.glob', (['shp_file_path'], {}), '(shp_file_path)\n', (316, 331), False, 'import glob\n'), ((924, 953), 'rasterio.open', 'rasterio.open', (['tiff_file_path'], {}), '(tiff_file_path)\n', (937, 953), False, 'import rasterio\n'), ((1086, 1102), 'shapely.geometry.box', 'box', (['*src.bounds'], {}), '(*src.bounds)\n', (1089, 1102), False, 'from shapely.geometry import box\n')]
|
"""
Color and Fill Scales
=====================
Scales control how a plot maps data values to the visual values of an
aesthetic.
"""
# sphinx_gallery_thumbnail_path = "gallery_py\_scales\_color_and_fill.png"
from datetime import datetime
import pandas as pd
from lets_plot import *
LetsPlot.setup_html()
# %%
mpg_df = pd.read_csv('https://raw.githubusercontent.com/JetBrains/lets-plot-docs/master/data/mpg.csv')
ec_df = pd.read_csv('https://raw.githubusercontent.com/JetBrains/lets-plot-docs/master/data/economics.csv', \
parse_dates=['date'])
ec_df = ec_df[ec_df.date > datetime(2000, 1, 1)]
# %%
# %% [markdown]
#
# Discrete
# ~~~~~~~~
# %%
p = ggplot(mpg_df, aes(x='fl')) + geom_bar(aes(color='fl', fill='fl'), alpha=.5)
p
# %%
p + scale_color_brewer(type='seq', palette='Blues') + \
scale_fill_brewer(type='seq', palette='Blues')
# %%
p + scale_color_grey(start=0, end=.7) + \
scale_fill_grey(start=0, end=.7)
# %%
# %% [markdown]
#
# Continuous
# ~~~~~~~~~~
# %%
p = ggplot(ec_df, aes(x='psavert')) + geom_histogram(aes(fill='psavert'))
p
# %%
p + scale_fill_gradient(low='#2c7fb8', high='#edf8b1')
# %%
p + scale_fill_gradient2(low='#1a9641', mid='#ffffbf', high='#d7191c')
# %%
p + scale_fill_hue(l=80, c=150)
|
[
"pandas.read_csv",
"datetime.datetime"
] |
[((653, 756), 'pandas.read_csv', 'pd.read_csv', (['"""https://raw.githubusercontent.com/JetBrains/lets-plot-docs/master/data/mpg.csv"""'], {}), "(\n 'https://raw.githubusercontent.com/JetBrains/lets-plot-docs/master/data/mpg.csv'\n )\n", (664, 756), True, 'import pandas as pd\n'), ((661, 792), 'pandas.read_csv', 'pd.read_csv', (['"""https://raw.githubusercontent.com/JetBrains/lets-plot-docs/master/data/economics.csv"""'], {'parse_dates': "['date']"}), "(\n 'https://raw.githubusercontent.com/JetBrains/lets-plot-docs/master/data/economics.csv'\n , parse_dates=['date'])\n", (672, 792), True, 'import pandas as pd\n'), ((681, 701), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)'], {}), '(2000, 1, 1)\n', (689, 701), False, 'from datetime import datetime\n')]
|
# -*- python -*-
# This software was produced by NIST, an agency of the U.S. government,
# and by statute is not subject to copyright in the United States.
# Recipients of this software assume all responsibilities associated
# with its operation, modification and maintenance. However, to
# facilitate maintenance we ask that before distributing modified
# versions of this software, you first contact the authors at
# <EMAIL>.
from ooflib.SWIG.common import config
from ooflib.SWIG.common import ooferror
from ooflib.SWIG.common import switchboard
from ooflib.SWIG.engine import elementshape
from ooflib.SWIG.engine import masterelement
from ooflib.common import debug
from ooflib.common import enum
from ooflib.common import labeltree
from ooflib.common import microstructure
from ooflib.common import parallel_enable
from ooflib.common import utils
from ooflib.common.IO import automatic
from ooflib.common.IO import datafile
from ooflib.common.IO import filenameparam
from ooflib.common.IO import mainmenu
from ooflib.common.IO import microstructureIO
from ooflib.common.IO import oofmenu
from ooflib.common.IO import parameter
from ooflib.common.IO import reporter
from ooflib.common.IO import whoville
from ooflib.common.IO import xmlmenudump
from ooflib.engine import bdycondition
from ooflib.engine import evolve
from ooflib.engine import fieldinit
from ooflib.engine import meshcrosssection
from ooflib.engine import meshmod
from ooflib.engine import meshstatus
from ooflib.engine import outputschedule
from ooflib.engine import skeletoncontext
from ooflib.engine import subproblemcontext
from ooflib.engine.IO import meshparameters
from ooflib.engine.IO import skeletonIO
from ooflib.engine.IO import subproblemmenu
if parallel_enable.enabled():
from ooflib.engine.IO import meshIPC
import ooflib.engine.mesh
import types
import string
SyncMeshParameter = ooflib.engine.mesh.SyncMeshParameter
OOF = mainmenu.OOF
meshmenu = mainmenu.OOF.addItem(oofmenu.OOFMenuItem(
'Mesh',
cli_only=1,
help='Tools for creating and manipulating Meshes.',
discussion="""<para>
The <command>Mesh</command> menu contains tools for creating and
manipulating finite element &meshes;, including methods for
defining &fields; and determining which &equations; to <link
linkend='MenuItem-OOF.Mesh.Solve'>solve</link>.
</para>"""))
settingsmenu = mainmenu.OOF.Settings.addItem(oofmenu.OOFMenuItem(
'Mesh_Defaults',
help='Default values for Mesh parameters'))
####################
# Look for an enclosing mesh parameter -- if not found, use the
# enclosing skeleton parameter. Mesh copying needs the first case,
# new mesh construction needs the second.
def meshNameResolver(param, startname):
if param.automatic():
basename = 'mesh'
else:
basename = startname
try:
meshname = param.group['mesh'].value
except IndexError:
skelname = param.group['skeleton'].value
skelpath = labeltree.makePath(skelname)
else:
skelpath = labeltree.makePath(meshname)[:-1]
return ooflib.engine.mesh.meshes.uniqueName(skelpath + [basename])
###################################
def newMesh(menuitem, name, skeleton, element_types):
# if parallel_enable.enabled():
# # skeleton is a string!
# # The following ASSUMES there are exactly three element_types:
# #(D_typename, T_typename and Q_typename, for edgement, Tri and Quad)
# meshIPC.parallel_newMesh(name,skeleton,
# element_types[0].name,
# element_types[1].name,
# element_types[2].name)
# else:
edict = {}
for eltype in element_types:
el = masterelement.getMasterElementFromEnum(eltype)
edict[el.shape().name()] = el
skelpath = labeltree.makePath(skeleton)
skelctxt = skeletoncontext.skeletonContexts[skelpath]
skelctxt.begin_reading()
try:
skel = skelctxt.getObject()
femesh = skel.femesh(edict)
if femesh is not None:
meshctxt = ooflib.engine.mesh.meshes.add(
skelpath+[name], femesh,
parent=skelctxt,
skeleton=skel,
elementdict=edict,
materialfactory=None)
meshctxt.createDefaultSubProblem()
meshctxt.setStatus(meshstatus.Unsolved("New mesh."))
finally:
skelctxt.end_reading()
switchboard.notify("redraw")
class MasterElementTypesParameter(enum.ListOfEnumsParameter):
def __init__(self, name, value=None, default=None, tip=None):
enum.ListOfEnumsParameter.__init__(
self, name,
elementshape.enumClasses.values(),
#masterelement.getMasterElementEnumClasses(),
value, default, tip)
def clone(self):
return self.__class__(self.name, self.value, self.default, self.tip)
def valueDesc(self):
return "A list of element types."
newmeshcmd = meshmenu.addItem(oofmenu.OOFMenuItem(
'New',
callback=newMesh,
params=parameter.ParameterGroup(
whoville.AutoWhoNameParameter('name', value=automatic.automatic,
resolver=meshNameResolver,
tip="Name of the new Mesh"),
whoville.WhoParameter('skeleton', skeletoncontext.skeletonContexts,
tip=parameter.emptyTipString),
MasterElementTypesParameter('element_types',
tip='A list of finite element types'),
## parameter.BooleanParameter('split_interface', value=0,
## tip='Split the mesh along interfaces?')
),
help='Create a new Mesh from a Skeleton.',
discussion=xmlmenudump.loadFile('DISCUSSIONS/engine/menu/newmesh.xml')
))
# The element_types parameter in the New Mesh menu item needs to be
# recreated whenever new MasterElement types are defined.
def buildNewMeshCmd():
params = parameter.ParameterGroup(
newmeshcmd.get_arg('name'),
newmeshcmd.get_arg('skeleton'),
MasterElementTypesParameter('element_types'))
newmeshcmd.replace_args(params)
switchboard.requestCallback("new master element", buildNewMeshCmd)
#####################################
def renameMesh(menuitem, mesh, name):
if parallel_enable.enabled():
meshIPC.ipcmeshmenu.Rename(mesh=mesh,name=name)
return
oldmeshpath = labeltree.makePath(mesh)
themesh = ooflib.engine.mesh.meshes[oldmeshpath]
themesh.reserve()
themesh.begin_writing()
try:
themesh.rename(name, exclude=oldmeshpath[-1])
finally:
themesh.end_writing()
themesh.cancel_reservation()
meshmenu.addItem(oofmenu.OOFMenuItem(
'Rename',
callback=renameMesh,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
whoville.WhoNameParameter('name', value='',
tip='New name for the mesh.')
],
help="Rename a Mesh.",
discussion="<para> Assign a new name to a &mesh;. </para>"))
#######################################
def deleteMesh(menuitem, mesh):
if parallel_enable.enabled():
meshIPC.ipcmeshmenu.Delete(mesh=mesh)
return
meshctxt = ooflib.engine.mesh.meshes[mesh]
subproblems = meshctxt.subproblems()
for subproblem in subproblems:
subproblem.begin_writing()
try:
subproblem.destroy()
finally:
subproblem.end_writing()
meshctxt.reserve()
meshctxt.begin_writing()
try:
meshctxt.destroy() # removes mesh from ooflib.engine.mesh.meshes
finally:
meshctxt.end_writing()
meshctxt.cancel_reservation()
meshmenu.addItem(oofmenu.OOFMenuItem(
'Delete',
callback=deleteMesh,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString)
],
help="Delete a Mesh.",
discussion="""<para>
Delete a &mesh;. Its &skel; and µ are
<emphasis>not</emphasis> deleted.
</para>"""
))
#######################################
def copyMesh(menuitem, mesh, name, copy_field, copy_equation, copy_bc):
if parallel_enable.enabled():
meshIPC.ipcmeshmenu.Copy(mesh=mesh,name=name,
copy_field=copy_field,
copy_equation=copy_equation, copy_bc=copy_bc)
return
notifications = set()
basemesh = ooflib.engine.mesh.meshes[mesh]
basemesh.begin_reading()
try:
edict = basemesh.elementdict
copiedmeshname = name
skel = basemesh.getSkeleton()
skelpath = labeltree.makePath(basemesh.path())[:-1]
#Interface branch, pass skeleton path to femesh
copiedfemesh = skel.femesh(edict, basemesh.materialfactory)
newmesh = ooflib.engine.mesh.meshes.add(
skelpath+[copiedmeshname],
copiedfemesh,
parent=skeletoncontext.skeletonContexts[skelpath],
skeleton=skel, elementdict=edict,
materialfactory=basemesh.materialfactory)
newmesh.reserve()
newmesh.begin_writing()
try:
copiedmesh = skelpath+[copiedmeshname]
copiedmeshfullname = string.join(copiedmesh,":")
for subpctxt in basemesh.subproblems():
newsubpctxt = subpctxt.clone(newmesh, copy_field, copy_equation,
notifications)
if copy_field:
for field in subpctxt.all_compound_fields():
newsubpctxt.getObject().acquire_field_data(
field, subpctxt.getObject())
# end loop over subproblems
newmesh.getObject().setCurrentTime(
basemesh.getObject().getCurrentTime())
if copy_field:
for field in newmesh.all_subproblem_fields():
if (config.dimension() == 2 and
basemesh.femesh().in_plane(field)):
newmesh.set_in_plane_field(field, 1)
notifications.add(("field inplane",
copiedmeshfullname, field.name(), 1))
try:
initializer = basemesh.initializers[field]
except KeyError:
pass
else:
newmesh.set_field_initializer(field, initializer)
notifications.add(("field initialized"))
if copy_bc:
for (bcname, bc) in basemesh.allBoundaryConds():
#Interface branch
#Don't copy the invisible Float BCs associated with
#interfaces. (see femesh.spy)
if bcname.find('_cntnty_')==0:
continue
copied = bc.copy(bc.boundary)
copied.add_to_mesh(bcname, copiedmesh)
if copy_field and copy_bc and copy_equation:
newmesh.setStatus(basemesh.status)
else:
newmesh.setStatus(meshstatus.Unsolved("New copy"))
finally:
newmesh.end_writing()
newmesh.cancel_reservation()
finally:
basemesh.end_reading()
for n in notifications:
## TODO OPT: remove duplicate notifications
switchboard.notify(*n)
meshmenu.addItem(oofmenu.OOFMenuItem(
'Copy', callback=copyMesh,
params= parameter.ParameterGroup(
whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
whoville.AutoWhoNameParameter('name', value=automatic.automatic,
resolver=meshNameResolver,
tip="Name of the copied Mesh. Use automatic selection, or type in a name."),
parameter.BooleanParameter('copy_field', value=1, tip='Copy fields?'),
parameter.BooleanParameter('copy_equation', value=1, tip='Copy equation?'),
parameter.BooleanParameter('copy_bc', value=1,
tip='Copy boundary conditions?') ),
help="Copy a Mesh.",
discussion=xmlmenudump.loadFile('DISCUSSIONS/engine/menu/copymesh.xml')
))
#=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=#
# Copy the field state (definitions, active-ness, planarity) of one
# mesh into another. This is the backwards-compatible deprecated
# version that uses the Mesh's default subproblem. The preferred
# version is in subproblemmenu.py.
def _copyFieldState(menuitem, source, target):
if source == target:
raise ooferror.ErrUserError('Source and target must differ!')
if parallel_enable.enabled():
meshIPC.ipcmeshmenu.Copy_Field_State(source=source,target=target)
return
notifications = []
source_mesh = ooflib.engine.mesh.meshes[source]
target_mesh = ooflib.engine.mesh.meshes[target]
source_subp = source_mesh.get_default_subproblem()
target_subp = target_mesh.get_default_subproblem()
source_subp.begin_reading()
target_subp.reserve()
target_subp.begin_writing()
try:
source_obj = source_subp.getObject()
target_obj = target_subp.getObject()
source_fields = source_subp.all_compound_fields()
target_fields = target_subp.all_compound_fields()
# Undefine all the fields in the target that are not in the source.
for f in target_fields:
if not source_obj.is_defined_field(f):
target_obj.undefine_field(f)
notifications.append(
("field defined", target_subp.path(), f.name(), 0))
for f in source_fields:
# Definition.
if not target_obj.is_defined_field(f):
target_obj.define_field(f)
notifications.append(
("field defined", target_subp.path(), f.name(), 1))
# Activation.
if source_obj.is_active_field(f):
if not target_obj.is_active_field(f):
target_obj.activate_field(f)
notifications.append(
("field activated", target_subp.path(), f.name(), 1))
else:
if target_obj.is_active_field(f):
target_obj.deactivate_field(f)
notifications.append(
("field activated", target_subp.path(), f.name(), 0))
# Planarity.
if config.dimension() == 2:
inplane = source_mesh.femesh().in_plane(f)
if target_mesh.femesh().in_plane(f) != inplane:
target_mesh.set_in_plane_field(f, inplane)
notifications.append(("field inplane", target, f.name(),
inplane))
try:
initializer = source_mesh.initializers[f]
except KeyError:
pass
else:
target_mesh.set_field_initializer(f, initializer)
notifications.append(("field initialized"))
finally:
source_subp.end_reading()
target_subp.end_writing()
target_subp.cancel_reservation()
# Make all the switchboard notifications outside the locked region.
for n in notifications:
switchboard.notify(*n)
# Update BCs
target_subp.autoenableBCs()
target_subp.changed("Field state changed.")
switchboard.notify("redraw")
target_mesh.setStatus(meshstatus.Unsolved("Copied fields"))
meshmenu.addItem(oofmenu.OOFMenuItem(
'Copy_Field_State',
callback=_copyFieldState,
params=[whoville.WhoParameter('source',ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
whoville.WhoParameter('target',ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString)],
help="Copy the Field state (defined, active, etc) from one Mesh to another.",
discussion="""<para>
This command copies the &field; state from the default
&subproblem; in one &mesh; to another, meaning that the same
&fields; will be defined, active, and in-plane in the
<varname>target</varname> &mesh; as in the
<varname>source</varname> &mesh;. If &fields; were explicitly
<link linkend='MenuItem-OOF.Mesh.Set_Field_Initializer'>initialized</link>
in the source &mesh;, the initializers will be copied, but the
command does <emphasis>not</emphasis> copy the &field; values.
(This is because the source and target meshes might have quite
different geometries.)</para>
<para>DEPRECATED. Use <xref
linkend='MenuItem-OOF.Subproblem.Copy_Field_State'/> instead.
</para>"""
) )
# Likewise for equation state. This is also deprecated. See
# subproblemmenu.py for the preferred version.
def _copyEquationState(menuitem, source, target):
if source == target:
raise ooferror.ErrUserError('Source and target must differ!')
if parallel_enable.enabled():
meshIPC.ipcmeshmenu.Copy_Equation_State(source=source,target=target)
return
notifications = []
source_subp = ooflib.engine.mesh.meshes[source].get_default_subproblem()
target_subp = ooflib.engine.mesh.meshes[target].get_default_subproblem()
source_subp.begin_reading()
target_subp.reserve()
target_subp.begin_writing()
try:
source_obj = source_subp.getObject()
target_obj = target_subp.getObject()
source_eqns = source_obj.all_equations()
target_eqns = target_obj.all_equations()
for e in target_eqns:
if not source_obj.is_active_equation(e):
target_obj.deactivate_equation(e)
notifications.append(
("equation activated", target_subp.path(), e.name(), 0))
if config.devel()>=1:
if not source_obj.is_kinetically_active_equation(e):
target_obj.kinetic_deactivate_equation(e)
notifications.append(
('kinetics activated', target_subp.path(), e.name(), 0))
if not source_obj.is_dynamically_active_equation(e):
target_obj.deactivate_dynamics(e)
notifications.append(
('dynamics activated', target_subp.path(), e.name(), 0))
for e in source_eqns:
if not target_obj.is_active_equation(e):
target_obj.activate_equation(e)
notifications.append(
("equation activated", target_subp.path(), e.name(), 1))
if config.devel()>=1:
if not target_obj.is_kinetically_active_equation(e):
target_obj.kinetic_activate_equation(e)
notifications.append(
('kinetics activated', target_subp.path(), e.name(), 1))
if not target_obj.is_dynamically_active_equation(e):
target_obj.activate_dynamics(e)
notifications.append(
('dynamics activated', target_subp.path(), e.name(), 1))
finally:
source_subp.end_reading()
target_subp.end_writing()
target_subp.cancel_reservation()
for n in notifications:
switchboard.notify(*n)
target_subp.autoenableBCs()
target_subp.changed("Equations changed.")
meshmenu.addItem(oofmenu.OOFMenuItem(
'Copy_Equation_State',
callback=_copyEquationState,
params=[whoville.WhoParameter('source',ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
whoville.WhoParameter('target',ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString)],
help="Copy the set of active Equations from one Mesh to another.",
discussion="""<para>
This command copies the &equation; state from the default
&subproblem; in one &mesh; to the default &subproblem; in another,
meaning that the same &equations; will be active in the
<varname>target</varname> &subproblem; as in the
<varname>source</varname> &subproblem;.
</para>"""
) )
#=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=#
# Field definition and activation
fieldmenu = meshmenu.addItem(oofmenu.OOFMenuItem(
'Field',
help='Define and activate Fields.',
discussion="""<para>
The <command>Field</command> menu contains the commands that
define and set the properties of &fields; on &meshes;.
</para>"""))
def _defineField(menuitem, mesh, field):
## This has been rewritten to use the default subproblem, for
## backwards compatibility. The menuitem is deprecated -- use
## Subproblem.Field.Define instead.
if parallel_enable.enabled():
meshIPC.ipcfieldmenu.Define(mesh=mesh,field=field)
else:
meshcontext = ooflib.engine.mesh.meshes[mesh]
subpcontext = meshcontext.get_default_subproblem()
subpcontext.reserve()
subpcontext.begin_writing()
try:
subpcontext.getObject().define_field(field)
finally:
subpcontext.end_writing()
subpcontext.cancel_reservation()
switchboard.notify("field defined", subpcontext.path(), field.name(), 1)
subpcontext.autoenableBCs()
subpcontext.changed("Field defined.")
meshcontext.setStatus(meshstatus.Unsolved("New fields defined"))
def _undefineField(menuitem, mesh, field):
## Also deprecated. Use Subproblem.Field.Undefine instead.
if parallel_enable.enabled():
meshIPC.ipcfieldmenu.Undefine(mesh=mesh,field=field)
else:
meshcontext = ooflib.engine.mesh.meshes[mesh]
subpcontext = meshcontext.get_default_subproblem()
subpcontext.reserve()
subpcontext.begin_writing()
try:
subpcontext.getObject().undefine_field(field)
# After undefining a Field, the data cache in the mesh has
# the wrong number of dofs in it. We could in principle
# delete the correct dofs from each cache entry, but it
# might be slow (especially for a disk cache). The
# simpler thing to do is to just delete the whole cache.
subpcontext.getParent().clearDataCache()
finally:
subpcontext.end_writing()
subpcontext.cancel_reservation()
subpcontext.autoenableBCs()
subpcontext.changed("Field undefined.")
switchboard.notify("field defined", subpcontext.path(), field.name(), 0)
meshcontext.setStatus(meshstatus.Unsolved("New fields defined"))
fieldmenu.addItem(oofmenu.OOFMenuItem(
'Define',
callback=_defineField,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
meshparameters.FieldParameter('field', tip=parameter.emptyTipString)
],
help="Define a Field on a Mesh. Only defined Fields may be given values.",
## TODO: Fix discussion
discussion=xmlmenudump.loadFile('DISCUSSIONS/engine/menu/definefield.xml')
))
fieldmenu.addItem(oofmenu.OOFMenuItem(
'Undefine',
callback=_undefineField,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
meshparameters.FieldParameter('field', tip=parameter.emptyTipString)
],
help="Undefine a Field on a Mesh. Only defined Fields may be given values.",
discussion="""<para>
Undefine a &field; on a &mesh;'s default &subproblem;. This frees
the memory used to store the &field; components and destroys their
values, unless other &subproblems; are using the &field;. See <xref
linkend='MenuItem-OOF.Mesh.Field.Define'/>. DEPRECATED.
</para>"""
))
def _activateField(menuitem, mesh, field):
activation = False
if parallel_enable.enabled():
meshIPC.ipcfieldmenu.Activate(mesh=mesh,field=field)
else:
meshcontext = ooflib.engine.mesh.meshes[mesh]
subpcontext = meshcontext.get_default_subproblem()
subpcontext.reserve()
subpcontext.begin_writing()
try:
subp = subpcontext.getObject()
if subp.is_defined_field(field):
subp.activate_field(field)
activation = True
else:
reporter.report(
"You must define a Field before you can activate it.")
finally:
subpcontext.end_writing()
subpcontext.cancel_reservation()
if activation:
subpcontext.autoenableBCs()
switchboard.notify("field activated", subpcontext.path(),
field.name(), 1)
subpcontext.changed("Field activated.")
meshcontext.setStatus(meshstatus.Unsolved("Field activated"))
def _deactivateField(menuitem, mesh, field):
deactivation = False
if parallel_enable.enabled():
meshIPC.ipcfieldmenu.Deactivate(mesh=mesh,field=field)
else:
meshcontext = ooflib.engine.mesh.meshes[mesh]
subpcontext = meshcontext.get_default_subproblem()
subpcontext.reserve()
subpcontext.begin_writing()
try:
subp = subpcontext.getObject()
if subp.is_active_field(field):
subp.deactivate_field(field)
deactivation = True
else:
reporter.report(
"You must define and activate a Field before you can deactivate it.")
finally:
subpcontext.end_writing()
subpcontext.cancel_reservation()
if deactivation:
subpcontext.autoenableBCs()
switchboard.notify("field activated", subpcontext.path(),
field.name(), 0)
subpcontext.changed("Field deactivated.")
meshcontext.setStatus(meshstatus.Unsolved("Field deactivated"))
fieldmenu.addItem(oofmenu.OOFMenuItem(
"Activate",
callback=_activateField,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
meshparameters.FieldParameter('field', tip=parameter.emptyTipString)
],
help="Activate a Field. The solver finds the values of active Fields.",
discussion=xmlmenudump.loadFile('DISCUSSIONS/engine/menu/activatefield.xml')
))
fieldmenu.addItem(oofmenu.OOFMenuItem(
'Deactivate',
callback=_deactivateField,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
meshparameters.FieldParameter('field', tip=parameter.emptyTipString)
],
help="Deactivate a Field. The solver finds the values of active Fields.",
discussion="""<para>
Deactivating a &field; means that its values will not be found
when the &mesh; is <link
linkend="MenuItem-OOF.Mesh.Solve">solved</link>. See <xref
linkend='MenuItem-OOF.Mesh.Field.Activate'/>.
</para>"""
))
def _inPlaneField(menuitem, mesh, field):
if parallel_enable.enabled():
meshIPC.ipcfieldmenu.In_Plane(mesh=mesh,field=field)
else:
meshcontext = ooflib.engine.mesh.meshes[mesh]
meshcontext.reserve()
meshcontext.begin_writing()
try:
meshcontext.set_in_plane_field(field, 1)
finally:
meshcontext.end_writing()
meshcontext.cancel_reservation()
switchboard.notify("field inplane", meshcontext.path(), field.name(), 1)
meshcontext.changed("Field planarity changed.")
# meshcontext.setStatus(meshstatus.Unsolved("Field planarity changed"))
def _outOfPlaneField(menuitem, mesh, field):
if parallel_enable.enabled():
meshIPC.ipcfieldmenu.Out_of_Plane(mesh=mesh,field=field)
else:
meshcontext = ooflib.engine.mesh.meshes[mesh]
meshcontext.reserve()
meshcontext.begin_writing()
try:
meshcontext.set_in_plane_field(field, 0)
finally:
meshcontext.end_writing()
meshcontext.cancel_reservation()
switchboard.notify("field inplane", meshcontext.path(),
field.name(), 0)
meshcontext.changed("Field planarity changed.")
# meshcontext.setStatus(meshstatus.Unsolved("Field planarity changed"))
if config.dimension() == 2:
fieldmenu.addItem(oofmenu.OOFMenuItem(
'In_Plane',
callback=_inPlaneField,
params=[
whoville.WhoParameter(
'mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
meshparameters.FieldParameter(
'field', tip=parameter.emptyTipString)
],
help="In-plane Fields are constrained to have no z-components.",
discussion="""<para>
This command invokes <link
linkend='Section-Concepts-Mesh-3D'>generalized plane-strain</link>
for the given &field; on all &subproblems; on the given &mesh;.
The out-of-plane derivatives of the &field; are taken to be zero.
See <xref linkend='MenuItem-OOF.Mesh.Field.Out_of_Plane'/>.>
</para>"""
))
fieldmenu.addItem(oofmenu.OOFMenuItem(
'Out_of_Plane',
callback=_outOfPlaneField,
params=[
whoville.WhoParameter(
'mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
meshparameters.FieldParameter(
'field', tip=parameter.emptyTipString)
],
help="Out-of-plane Fields are allowed to have z-components.",
discussion="""<para>
This command disables <link
linkend='Section-Concepts-Mesh-3D'>generalized plane-strain</link>
for the given &field; on all &subproblems; on the given &mesh;.
The out-of-plane derivatives of the &field; will be computed.
Generally, it's necessary to <link
linkend='MenuItem-OOF.Mesh.Equation.Activate'>activate</link> a
<link
linkend='Section-Concepts-Mesh-Equation-PlaneFlux'>plane-flux
equation</link> in order to solve for the out-of-plane derivatives
of a &field;.
</para>"""
))
#=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=#
# Field initialization
# Field initialization often involves FloatBC initialization, which is
# really just another type of Field initialization.
## Assign an initializer to a field. This doesn't actually *apply*
## the initializer, so field values at nodes aren't changed.
def initField(menuitem, mesh, field, initializer):
# This routine is repeated almost verbatim in meshIO.py, where
# it's used to initialize meshes loaded from files.
meshcontext = ooflib.engine.mesh.meshes[mesh]
meshcontext.reserve()
meshcontext.begin_writing()
try:
meshcontext.set_field_initializer(field, initializer)
finally:
meshcontext.end_writing()
meshcontext.cancel_reservation()
switchboard.notify("field initializer set")
# for subproblem in meshcontext.subproblems():
# if field in subproblem.all_fields():
# subproblem.changed()
# switchboard.notify("redraw")
meshmenu.addItem(oofmenu.OOFMenuItem(
'Set_Field_Initializer',
callback = initField,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
meshparameters.FieldParameter('field',
tip=parameter.emptyTipString,
outofplane=True),
fieldinit.FieldInitParameter('initializer',
tip=parameter.emptyTipString)
],
help="Determine how to assign values to a Field on a Mesh.",
discussion=xmlmenudump.loadFile('DISCUSSIONS/engine/menu/initfield.xml')
))
# When using subproblems, the field initializers have to be copied
# separately from the Field state, because the initializers live
# in the mesh and the state flags live in the subproblems.
def _copyFieldInits(menuitem, source, target):
if source == target:
return
if parallel_enable.enabled():
meshIPT.ipcmeshmenu.Copy_Field_Initializers(source=source,target=target)
return
notifications=[]
source_mesh = ooflib.engine.mesh.meshes[source]
target_mesh = ooflib.engine.mesh.meshes[target]
source_mesh.begin_reading()
target_mesh.reserve()
target_mesh.begin_writing()
try:
# Copy Field initializers
source_fields = source_mesh.all_subproblem_fields()
target_fields = target_mesh.all_subproblem_fields()
for f in source_fields:
if f in target_fields:
try:
initializer=source_mesh.initializers[f]
except KeyError:
pass
else:
target_mesh.set_field_initializer(f, initializer)
notifications.append(("field initialized"))
# Copy FloatBC inititalizers
for bcname in source_mesh.allBndyCondNames():
initializer = source_mesh.get_bc_initializer(bcname)
if initializer:
# Check that the target mesh has a FloatBC with this name
try:
targetbc = target_mesh.getBdyCondition(bcname)
except KeyError:
pass
else:
if isinstance(targetbc, bdycondition.FloatBC):
target_mesh.set_bc_initializer(bcname, initializer)
finally:
source_mesh.end_reading()
target_mesh.end_writing()
target_mesh.cancel_reservation()
for n in notifications:
switchboard.notify(*n)
meshmenu.addItem(oofmenu.OOFMenuItem(
'Copy_Field_Initializers',
callback=_copyFieldInits,
params=[whoville.WhoParameter('source', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
whoville.WhoParameter('target', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString)],
help="Copy all of the relevant Field initializers from one Mesh to another.",
discussion="""<para>
Copy all of the &field; initialization functions from the source
&mesh; to the target &mesh;. This does <emphasis>not</emphasis> actually
initialize the &fields; in the target &mesh;. If a &field; is not
defined in the target &mesh;, its initializer will not be copied.
</para>"""
))
def _clearFieldInit(menuitem, mesh, field):
themesh = ooflib.engine.mesh.meshes[mesh]
themesh.reserve()
themesh.begin_writing()
try:
themesh.remove_initializer(field)
finally:
themesh.end_writing()
themesh.cancel_reservation()
switchboard.notify("field initializer set")
meshmenu.addItem(oofmenu.OOFMenuItem(
'Clear_Field_Initializer',
callback=_clearFieldInit,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
meshparameters.FieldParameter('field',
outofplane=True,
tip=parameter.emptyTipString)],
help="Remove the initializer for the given Field.",
discussion="""<para>
Remove the initializer for the given &field; from the given
&mesh;. This does not change the values of the &field; itself,
but prevents it from being reinitialized later.
</para>"""
))
def _clearFieldInits(menuitem, mesh):
themesh = ooflib.engine.mesh.meshes[mesh]
themesh.reserve()
themesh.begin_writing()
try:
for fld in themesh.all_subproblem_fields():
themesh.remove_initializer(fld)
themesh.remove_all_bc_initializers()
finally:
themesh.end_writing()
themesh.cancel_reservation()
switchboard.notify("field initializer set")
meshmenu.addItem(oofmenu.OOFMenuItem(
'Clear_Field_Initializers',
callback=_clearFieldInits,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString)],
help="Remove all Field initializers from the current Mesh.",
discussion="""<para>
Remove all the &field; and boundary condition initializers from
the given &mesh;. This does not change the values of the &fields;
themselves, but prevents them from being reinitialized later.
</para>"""
))
def applyFieldInits(menuitem, mesh):
themesh = ooflib.engine.mesh.meshes[mesh]
themesh.reserve()
themesh.begin_writing()
try:
themesh.initialize_fields(themesh.getObject().getCurrentTime())
themesh.initialize_bcs(themesh.getObject().getCurrentTime())
finally:
themesh.end_writing()
themesh.cancel_reservation()
switchboard.notify("mesh data changed", themesh)
themesh.setStatus(meshstatus.Unsolved("Fields initialized."))
switchboard.notify("redraw")
meshmenu.addItem(oofmenu.OOFMenuItem(
'Apply_Field_Initializers',
callback=applyFieldInits,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString)],
help="Initialize all Fields.",
discussion=xmlmenudump.loadFile('DISCUSSIONS/engine/menu/applyinit.xml')
))
def _applyFieldInitsAtTime(menuitem, mesh, time):
themesh = ooflib.engine.mesh.meshes[mesh]
themesh.reserve()
themesh.begin_writing()
try:
themesh.initialize_fields(time)
themesh.initialize_bcs(time)
finally:
themesh.end_writing()
themesh.cancel_reservation()
switchboard.notify("mesh data changed", themesh)
themesh.setStatus(meshstatus.Unsolved("Fields initialized."))
switchboard.notify("draw at time", time)
meshmenu.addItem(oofmenu.OOFMenuItem(
'Apply_Field_Initializers_at_Time',
callback=_applyFieldInitsAtTime,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
parameter.FloatParameter('time', 0.0,
tip=parameter.emptyTipString)],
help="Initialize all Fields and reset the Mesh's time.",
discussion=xmlmenudump.loadFile('DISCUSSIONS/engine/menu/applyinittime.xml')
))
#=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=#
# Equations
eqnmenu = meshmenu.addItem(oofmenu.OOFMenuItem('Equation',
help='Activate equations.'))
def _activateEquation(menuitem, mesh, equation):
if parallel_enable.enabled():
meshIPC.ipceqnmenu.Activate(mesh=mesh,equation=equation)
else:
meshcontext = ooflib.engine.mesh.meshes[mesh]
subpcontext = meshcontext.get_default_subproblem()
subpcontext.reserve()
subpcontext.begin_writing()
try:
subpcontext.getObject().activate_equation(equation)
finally:
subpcontext.end_writing()
subpcontext.cancel_reservation()
subpcontext.autoenableBCs()
switchboard.notify('equation activated', subpcontext.path(),
equation.name(), 1)
subpcontext.changed("Equation activated.")
def _deactivateEquation(menuitem, mesh, equation):
if parallel_enable.enabled():
meshIPC.ipceqnmenu.Deactivate(mesh=mesh,equation=equation)
else:
meshcontext = ooflib.engine.mesh.meshes[mesh]
subpcontext = meshcontext.get_default_subproblem()
subpcontext.reserve()
subpcontext.begin_writing()
try:
subpcontext.getObject().deactivate_equation(equation)
finally:
subpcontext.end_writing()
subpcontext.cancel_reservation()
switchboard.notify('equation activated', subpcontext.path(),
equation.name(), 0)
subpcontext.autoenableBCs()
subpcontext.changed("Equation deactivated.")
eqnmenu.addItem(oofmenu.OOFMenuItem(
'Activate',
callback=_activateEquation,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
meshparameters.EquationParameter('equation',
tip=parameter.emptyTipString)
],
help="Activate an Equation. The Solver solves the active Equations.",
discussion="""<para>
Activate the given &equation; on the default &subproblem; on the
given &mesh;. Activated &equations; are the ones that will be
<link linkend='MenuItem-OOF.Mesh.Solve'>solved</link>. For a
solution to be possible, the active &equations; must involve
&fluxes; that are produced by &properties; in the &mesh;, and
those &properties; must couple to <link
linkend='MenuItem-OOF.Mesh.Field.Define'>defined</link> &fields;.
There must be as many active &equations; as there are <link
linkend='MenuItem-OOF.Mesh.Field.Activate'>active</link> &fields;</para>
<para> DEPRECATED. Use <xref
linkend="MenuItem-OOF.Subproblem.Equation.Activate"/> instead.
</para>"""
))
eqnmenu.addItem(oofmenu.OOFMenuItem(
'Deactivate',
callback=_deactivateEquation,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
meshparameters.EquationParameter('equation',
tip=parameter.emptyTipString)
],
help="Deactivate an Equation. The Solver solves the active Equations.",
discussion="""<para>
Deactivate the given &equation; on the default &subproblem; on the
given &mesh;. See <xref
linkend='MenuItem-OOF.Mesh.Equation.Deactivate'/>.</para>
<para> DEPRECATED. USE <xref
linkend="MenuItem-OOF.Subproblem.Equation.Deactivate"/> instead.
</para>"""
))
###########################################
# Cross sections
csmenu = meshmenu.addItem(oofmenu.OOFMenuItem(
'Cross_Section',
help="Create and manipulate Mesh cross sections for plotting.",
discussion=xmlmenudump.loadFile("DISCUSSIONS/engine/menu/cross_section.xml")
))
def csnameresolver(param, name):
if param.automatic():
basename = 'cs'
else:
basename = name
meshname = param.group['mesh'].value
if meshname is not None:
meshpath = labeltree.makePath(meshname)
meshctxt = ooflib.engine.mesh.meshes[meshpath]
return meshctxt.uniqueCSName(basename)
csnameparam = parameter.AutomaticNameParameter(
'name', value=automatic.automatic, tip="Name of the cross section.",
resolver=csnameresolver)
csmeshparam = whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString)
csparamgroup = parameter.ParameterGroup(csnameparam, csmeshparam)
def _newCS(menuitem, mesh, name, cross_section):
meshctxt = ooflib.engine.mesh.meshes[mesh]
meshctxt.reserve()
meshctxt.begin_writing()
try:
meshctxt.addCrossSection(name, cross_section)
finally:
meshctxt.end_writing()
meshctxt.cancel_reservation()
switchboard.notify("cross sections changed")
switchboard.notify("redraw")
csmenu.addItem(oofmenu.OOFMenuItem(
'New',
callback=_newCS,
params=csparamgroup + [
parameter.RegisteredParameter('cross_section',
meshcrosssection.MeshCrossSection,
tip="New cross section object.") ],
help="Create a new cross section on a Mesh.",
discussion=xmlmenudump.loadFile('DISCUSSIONS/engine/menu/new_cross_section.xml')
))
def _delCS(menuitem, mesh, name):
meshctxt = ooflib.engine.mesh.meshes[mesh]
meshctxt.reserve()
meshctxt.begin_writing()
try:
meshctxt.removeCrossSection(name)
finally:
meshctxt.end_writing()
meshctxt.cancel_reservation()
switchboard.notify("cross sections changed")
switchboard.notify("redraw")
csmenu.addItem(oofmenu.OOFMenuItem(
'Remove',
callback=_delCS,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
parameter.StringParameter('name', tip='Cross section to remove.')],
help='Delete a cross section from a mesh.',
discussion="""<para>
Delete the cross section named <varname>name</varname> from the &mesh;
named <varname>mesh</varname>.
</para>"""))
def _selectCS(menuitem, mesh, cross_section):
meshctxt = ooflib.engine.mesh.meshes[mesh]
meshctxt.reserve()
meshctxt.begin_writing()
try:
meshctxt.selectCrossSection(cross_section)
finally:
meshctxt.end_writing()
meshctxt.cancel_reservation()
switchboard.notify("cross sections changed")
switchboard.notify("redraw")
csmenu.addItem(oofmenu.OOFMenuItem(
'Select',
callback=_selectCS,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
parameter.StringParameter('cross_section', tip='Cross section to select.')],
help="Select a cross section on a mesh.",
discussion=xmlmenudump.loadFile('DISCUSSIONS/engine/menu/select_cs.xml')
))
def _deselectCS(menuitem, mesh):
meshctxt = ooflib.engine.mesh.meshes[mesh]
meshctxt.reserve()
meshctxt.begin_writing()
try:
meshctxt.deselectCrossSection()
finally:
meshctxt.end_writing()
meshctxt.cancel_reservation()
switchboard.notify("cross sections changed")
switchboard.notify("redraw")
csmenu.addItem(oofmenu.OOFMenuItem(
'Deselect',
callback=_deselectCS,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString)],
help="Deselect all cross sections on a mesh.",
discussion=xmlmenudump.loadFile('DISCUSSIONS/engine/menu/deselect_cs.xml')
))
def _renameCS(menuitem, mesh, cross_section, name):
meshctxt = ooflib.engine.mesh.meshes[mesh]
meshctxt.reserve()
meshctxt.begin_writing()
try:
meshctxt.renameCrossSection(cross_section, name)
finally:
meshctxt.end_writing()
meshctxt.cancel_reservation()
switchboard.notify("cross sections changed")
csmenu.addItem(oofmenu.OOFMenuItem(
'Rename',
callback=_renameCS,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
parameter.StringParameter('cross_section',
tip='Cross section to rename.'),
parameter.StringParameter('name',
tip='New name for the cross section.')
],
help="Rename a cross section on a mesh.",
discussion="<para>Assign a new name to a cross section.</para>"))
def _editCS(menuitem, mesh, name, cross_section):
meshctxt = ooflib.engine.mesh.meshes[mesh]
meshctxt.reserve()
meshctxt.begin_writing()
try:
meshctxt.replaceCrossSection(name, cross_section)
finally:
meshctxt.end_writing()
meshctxt.cancel_reservation()
switchboard.notify("cross sections changed")
switchboard.notify("redraw")
csmenu.addItem(oofmenu.OOFMenuItem(
'Edit',
callback=_editCS,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
parameter.StringParameter('name', tip='Cross section to edit.'),
parameter.RegisteredParameter('cross_section',
meshcrosssection.MeshCrossSection,
tip='New value for the cross section.')
],
help="Reparametrize a cross section on a mesh.",
discussion=xmlmenudump.loadFile('DISCUSSIONS/engine/menu/edit_cs.xml')
))
def _copyCS(menuitem, current, cross_section, mesh, name):
sourcemesh = ooflib.engine.mesh.meshes[current]
sourcemesh.begin_reading()
try:
cs = sourcemesh.getCrossSection(cross_section).clone()
finally:
sourcemesh.end_reading()
targetmesh = ooflib.engine.mesh.meshes[mesh]
targetmesh.reserve()
targetmesh.begin_writing()
try:
targetmesh.addCrossSection(name,cs)
finally:
targetmesh.end_writing()
targetmesh.cancel_reservation()
switchboard.notify("cross sections changed")
csmenu.addItem(oofmenu.OOFMenuItem(
'Copy',
callback=_copyCS,
params=[whoville.WhoParameter('current', ooflib.engine.mesh.meshes,
tip='Mesh to copy the cross section from.'),
parameter.StringParameter('cross_section',
tip='Cross section to copy.')
]
+ parameter.ParameterGroup(
whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip='Mesh to copy the cross section to.'),
parameter.AutomaticNameParameter('name',
value=automatic.automatic,
resolver=csnameresolver,
tip='Name of the copied cross section.')),
help="Copy a cross section, possibly to a different Mesh.",
discussion=xmlmenudump.loadFile('DISCUSSIONS/engine/menu/copy_cs.xml')
))
#######################################
def saveMesh(menuitem, filename, mode, format, mesh):
from ooflib.engine.IO import meshIO # avoids import loop
meshcontext = ooflib.engine.mesh.meshes[mesh]
meshcontext.begin_reading()
try:
if meshcontext.outOfSync():
raise ooferror.ErrUserError(
"The Mesh must be rebuilt before it can be saved.")
meshpath = labeltree.makePath(mesh)
skelpath = meshpath[:2]
skelcontext = skeletoncontext.skeletonContexts[skelpath]
if format==datafile.ABAQUS:
meshIO.writeABAQUSfromMesh(filename, mode.string(), meshcontext)
else:
dfile = datafile.writeDataFile(filename, mode.string(), format)
microstructureIO.writeMicrostructure(dfile,
skelcontext.getParent())
skeletonIO.writeSkeleton(dfile, skelcontext)
meshIO.writeMesh(dfile, meshcontext)
dfile.close()
finally:
meshcontext.end_reading()
OOF.File.Save.addItem(oofmenu.OOFMenuItem(
'Mesh',
callback = saveMesh,
ordering=80,
params = [
filenameparam.WriteFileNameParameter('filename', tip="Name of the file."),
filenameparam.WriteModeParameter(
'mode', tip="'w' to (over)write and 'a' to append."),
enum.EnumParameter('format', datafile.DataFileFormatExt, datafile.ASCII,
tip="Format of the file."),
SyncMeshParameter('mesh', tip='Name of the Mesh.')],
help="Save a Mesh to a file.",
discussion=xmlmenudump.loadFile('DISCUSSIONS/engine/menu/savemesh.xml')
))
def _fixmenu(*args):
if ooflib.engine.mesh.meshes.nActual() == 0:
OOF.File.Save.Mesh.disable()
else:
OOF.File.Save.Mesh.enable()
_fixmenu()
switchboard.requestCallback(('new who', 'Mesh'), _fixmenu)
switchboard.requestCallback(('remove who', 'Mesh'), _fixmenu)
##########################
def modifyMesh(menuitem, mesh, modifier):
# The structure is same as "skeletonmenu._modify()"
meshcontext = ooflib.engine.mesh.meshes[mesh]
meshcontext.reserve()
meshcontext.begin_writing()
try:
modifier.apply(meshcontext)
finally:
meshcontext.end_writing()
meshcontext.cancel_reservation()
modifier.signal(meshcontext)
modifier.setStatus(meshcontext)
switchboard.notify('Mesh modified', mesh, modifier) # caught by Mesh page
OOF.Mesh.addItem(oofmenu.OOFMenuItem(
'Modify',
callback=modifyMesh,
params=[
whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes, tip=parameter.emptyTipString),
parameter.RegisteredParameter('modifier', meshmod.MeshModification,
tip="Mesh modifier.")
],
help="Make changes to a Mesh.",
discussion=xmlmenudump.loadFile('DISCUSSIONS/engine/menu/modify_mesh.xml')
))
#=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=#
# SC Patch Recovery
if config.devel()>=1:
def recoverFluxes(menuitem, mesh):
meshcontext = ooflib.engine.mesh.meshes[mesh]
skel = meshcontext.getSkeleton()
femesh = meshcontext.femesh()
femesh.create_scpatch(skel)
femesh.flux_recovery()
OOF.Mesh.addItem(oofmenu.OOFMenuItem(
'SCPRecovery',
callback=recoverFluxes,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString)],
help="Superconvergent Patch Recovery."))
#=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=#
# Putting this item in meshdatacache.spy causes a nasty import loop.
from ooflib.SWIG.engine import meshdatacache
def _dummy(*args, **kwargs): pass
settingsmenu.addItem(oofmenu.OOFMenuItem(
'Data_Cache_Type',
callback=_dummy, # Just setting the parameter is enough.
params = [meshdatacache.cacheTypeParam],
help="Set the storage method for time step data in new Meshes.",
discussion=xmlmenudump.loadFile('DISCUSSIONS/engine/menu/datacachetype.xml')
))
def _consistencyTolerance(menuitem, tolerance, max_iterations):
subproblemcontext.consistencyTolerance = tolerance
evolve.maxconsistencysteps = max_iterations
settingsmenu.addItem(oofmenu.OOFMenuItem(
"SelfConsistency",
callback=_consistencyTolerance,
params=[
parameter.FloatParameter(
"tolerance",
subproblemcontext.consistencyTolerance,
tip="Relative tolerance for consistency."),
parameter.IntParameter(
"max_iterations",
evolve.maxconsistencysteps,
tip="Maximum number of iterations to perform.")
],
help="Set the tolerance and iteration limit used when self-consistently solving multiple subproblems simultaneously.",
discussion=xmlmenudump.loadFile(
'DISCUSSIONS/engine/menu/selfconsistency.xml')))
#=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=#
from ooflib.SWIG.engine import properties
def _numericalDiff(menuitem, epsilon):
properties.cvar.deriv_eps = epsilon
settingsmenu.addItem(oofmenu.OOFMenuItem(
"Numerical_Differentiation",
callback=_numericalDiff,
params=[
parameter.FloatParameter(
"epsilon",
properties.cvar.deriv_eps,
tip="Increment for numerical differentiation")],
help="Set the increment used for approximate derivatives when exact derivatives are not available.",
discussion=xmlmenudump.loadFile(
'DISCUSSIONS/engine/menu/numericaldiff.xml')
))
#=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=#
def _removeAllSolvers(menuitem, mesh):
meshctxt = ooflib.engine.mesh.meshes[mesh]
for subprob in meshctxt.subproblems():
subprob.begin_writing()
try:
subprob.solver_mode = None
finally:
subprob.end_writing()
switchboard.notify("subproblem solvers changed")
OOF.Mesh.addItem(oofmenu.OOFMenuItem(
'Remove_All_Solvers',
callback=_removeAllSolvers,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString)],
help='Remove the Solvers from all Subproblems.',
discussion=xmlmenudump.loadFile('DISCUSSIONS/engine/menu/removesolvers.xml')
))
#=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=#
def _copyAllSolvers(menuitem, source, target):
sourceMesh = ooflib.engine.mesh.meshes[source]
targetMesh = ooflib.engine.mesh.meshes[target]
sourceMesh.begin_reading()
solvers = {}
try:
for subp in sourceMesh.subproblems():
if subp.solver_mode is not None:
solvers[subp.name()] = subp.solver_mode.clone()
finally:
sourceMesh.end_reading()
meshpath = targetMesh.path()
for name, solver in solvers.items():
subppath = meshpath + ":" + name
try:
targetsubp = ooflib.engine.subproblemcontext.subproblems[subppath]
except KeyError:
pass
else:
subproblemmenu.setSolver(menuitem, subppath, solver)
OOF.Mesh.addItem(oofmenu.OOFMenuItem(
'Copy_All_Solvers',
callback=_copyAllSolvers,
params=[
whoville.WhoParameter('source',
ooflib.engine.mesh.meshes,
tip="Mesh to copy the solvers from."),
whoville.WhoParameter('target',
ooflib.engine.mesh.meshes,
tip="Mesh to which to copy the solvers.")
],
help="Copy all solvers from one mesh to another.",
discussion=xmlmenudump.loadFile(
'DISCUSSIONS/engine/menu/copyallsolvers.xml')
))
#=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=#
def _setSubproblemOrder(menuitem, mesh, subproblems):
meshctxt = ooflib.engine.mesh.meshes[mesh]
for order,subprobname in enumerate(subproblems):
subprob = meshctxt.get_subproblem(subprobname)
subprob.solveOrder = order
switchboard.notify("subproblems reordered", meshctxt)
OOF.Mesh.addItem(oofmenu.OOFMenuItem(
'ReorderSubproblems',
callback=_setSubproblemOrder,
params=[whoville.WhoParameter(
'mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
parameter.ListOfStringsParameter(
'subproblems',
tip='A list of Subproblem names in the order in which they should be solved.')
],
help="Set the order in which subproblems will be solved.",
discussion=xmlmenudump.loadFile(
'DISCUSSIONS/engine/menu/reordersubp.xml')
))
#=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=##=--=#
import time
def _solve(menuitem, mesh, endtime):
meshctxt = ooflib.engine.mesh.meshes[mesh]
meshctxt.reserve()
meshctxt.begin_writing()
try:
if not meshctxt.status.solvable:
raise ooferror.ErrUserError('Mesh is not solvable! '
+ meshctxt.status.getDetails())
t = time.clock()
evolve.evolve(meshctxt, endtime)
reporter.report("Elapsed time:", time.clock()-t, "seconds")
finally:
meshctxt.end_writing()
meshctxt.cancel_reservation()
switchboard.notify("mesh solved", meshctxt)
switchboard.notify("draw at time", meshctxt.getCurrentTime())
OOF.Mesh.addItem(oofmenu.OOFMenuItem(
'Solve',
callback=_solve,
params=[whoville.WhoParameter('mesh', ooflib.engine.mesh.meshes,
tip=parameter.emptyTipString),
parameter.FloatParameter('endtime', tip='Ending time.')
],
help='Solve or evolve the mesh.',
discussion=xmlmenudump.loadFile("DISCUSSIONS/engine/menu/solve.xml")
))
|
[
"ooflib.common.IO.whoville.AutoWhoNameParameter",
"ooflib.SWIG.common.switchboard.notify",
"ooflib.SWIG.common.config.devel",
"ooflib.common.IO.parameter.ListOfStringsParameter",
"ooflib.engine.IO.meshparameters.FieldParameter",
"ooflib.common.IO.parameter.StringParameter",
"ooflib.engine.IO.meshIPC.ipcmeshmenu.Rename",
"ooflib.engine.IO.subproblemmenu.setSolver",
"ooflib.engine.IO.skeletonIO.writeSkeleton",
"string.join",
"ooflib.engine.IO.meshIPC.ipcfieldmenu.Define",
"ooflib.engine.IO.meshIPC.ipcmeshmenu.Copy",
"ooflib.common.IO.parameter.BooleanParameter",
"ooflib.common.IO.parameter.IntParameter",
"ooflib.common.IO.parameter.ParameterGroup",
"time.clock",
"ooflib.common.IO.parameter.FloatParameter",
"ooflib.engine.IO.meshIO.writeMesh",
"ooflib.common.IO.filenameparam.WriteModeParameter",
"ooflib.engine.fieldinit.FieldInitParameter",
"ooflib.common.IO.parameter.RegisteredParameter",
"ooflib.engine.evolve.evolve",
"ooflib.common.IO.oofmenu.OOFMenuItem",
"ooflib.common.enum.EnumParameter",
"ooflib.engine.IO.meshIPC.ipcmeshmenu.Copy_Equation_State",
"ooflib.engine.IO.meshIPC.ipcfieldmenu.Activate",
"ooflib.common.IO.xmlmenudump.loadFile",
"ooflib.common.labeltree.makePath",
"ooflib.common.IO.parameter.AutomaticNameParameter",
"ooflib.engine.meshstatus.Unsolved",
"ooflib.common.IO.whoville.WhoParameter",
"ooflib.engine.IO.meshIPC.ipcfieldmenu.Deactivate",
"ooflib.common.IO.reporter.report",
"ooflib.engine.IO.meshIPC.ipcmeshmenu.Delete",
"ooflib.SWIG.common.switchboard.requestCallback",
"ooflib.SWIG.engine.masterelement.getMasterElementFromEnum",
"ooflib.SWIG.common.ooferror.ErrUserError",
"ooflib.engine.IO.meshIPC.ipcfieldmenu.Undefine",
"ooflib.common.parallel_enable.enabled",
"ooflib.engine.IO.meshIPC.ipcfieldmenu.In_Plane",
"ooflib.engine.IO.meshIPC.ipcmeshmenu.Copy_Field_State",
"ooflib.SWIG.common.config.dimension",
"ooflib.SWIG.engine.elementshape.enumClasses.values",
"ooflib.common.IO.whoville.WhoNameParameter",
"ooflib.common.IO.filenameparam.WriteFileNameParameter",
"ooflib.engine.IO.meshparameters.EquationParameter",
"ooflib.engine.IO.meshIPC.ipceqnmenu.Deactivate",
"ooflib.engine.IO.meshIPC.ipcfieldmenu.Out_of_Plane",
"ooflib.engine.IO.meshIPC.ipceqnmenu.Activate"
] |
[((1733, 1758), 'ooflib.common.parallel_enable.enabled', 'parallel_enable.enabled', ([], {}), '()\n', (1756, 1758), False, 'from ooflib.common import parallel_enable\n'), ((6190, 6256), 'ooflib.SWIG.common.switchboard.requestCallback', 'switchboard.requestCallback', (['"""new master element"""', 'buildNewMeshCmd'], {}), "('new master element', buildNewMeshCmd)\n", (6217, 6256), False, 'from ooflib.SWIG.common import switchboard\n'), ((43154, 43285), 'ooflib.common.IO.parameter.AutomaticNameParameter', 'parameter.AutomaticNameParameter', (['"""name"""'], {'value': 'automatic.automatic', 'tip': '"""Name of the cross section."""', 'resolver': 'csnameresolver'}), "('name', value=automatic.automatic, tip=\n 'Name of the cross section.', resolver=csnameresolver)\n", (43186, 43285), False, 'from ooflib.common.IO import parameter\n'), ((43305, 43396), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (43326, 43396), False, 'from ooflib.common.IO import whoville\n'), ((43444, 43494), 'ooflib.common.IO.parameter.ParameterGroup', 'parameter.ParameterGroup', (['csnameparam', 'csmeshparam'], {}), '(csnameparam, csmeshparam)\n', (43468, 43494), False, 'from ooflib.common.IO import parameter\n'), ((51882, 51940), 'ooflib.SWIG.common.switchboard.requestCallback', 'switchboard.requestCallback', (["('new who', 'Mesh')", '_fixmenu'], {}), "(('new who', 'Mesh'), _fixmenu)\n", (51909, 51940), False, 'from ooflib.SWIG.common import switchboard\n'), ((51941, 52002), 'ooflib.SWIG.common.switchboard.requestCallback', 'switchboard.requestCallback', (["('remove who', 'Mesh')", '_fixmenu'], {}), "(('remove who', 'Mesh'), _fixmenu)\n", (51968, 52002), False, 'from ooflib.SWIG.common import switchboard\n'), ((1964, 2357), 'ooflib.common.IO.oofmenu.OOFMenuItem', 'oofmenu.OOFMenuItem', (['"""Mesh"""'], {'cli_only': '(1)', 'help': '"""Tools for creating and manipulating Meshes."""', 'discussion': '"""<para>\n The <command>Mesh</command> menu contains tools for creating and\n manipulating finite element &meshes;, including methods for\n defining &fields; and determining which &equations; to <link\n linkend=\'MenuItem-OOF.Mesh.Solve\'>solve</link>.\n </para>"""'}), '(\'Mesh\', cli_only=1, help=\n \'Tools for creating and manipulating Meshes.\', discussion=\n """<para>\n The <command>Mesh</command> menu contains tools for creating and\n manipulating finite element &meshes;, including methods for\n defining &fields; and determining which &equations; to <link\n linkend=\'MenuItem-OOF.Mesh.Solve\'>solve</link>.\n </para>"""\n )\n', (1983, 2357), False, 'from ooflib.common.IO import oofmenu\n'), ((2407, 2486), 'ooflib.common.IO.oofmenu.OOFMenuItem', 'oofmenu.OOFMenuItem', (['"""Mesh_Defaults"""'], {'help': '"""Default values for Mesh parameters"""'}), "('Mesh_Defaults', help='Default values for Mesh parameters')\n", (2426, 2486), False, 'from ooflib.common.IO import oofmenu\n'), ((3847, 3875), 'ooflib.common.labeltree.makePath', 'labeltree.makePath', (['skeleton'], {}), '(skeleton)\n', (3865, 3875), False, 'from ooflib.common import labeltree\n'), ((4468, 4496), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""redraw"""'], {}), "('redraw')\n", (4486, 4496), False, 'from ooflib.SWIG.common import switchboard\n'), ((6342, 6367), 'ooflib.common.parallel_enable.enabled', 'parallel_enable.enabled', ([], {}), '()\n', (6365, 6367), False, 'from ooflib.common import parallel_enable\n'), ((6459, 6483), 'ooflib.common.labeltree.makePath', 'labeltree.makePath', (['mesh'], {}), '(mesh)\n', (6477, 6483), False, 'from ooflib.common import labeltree\n'), ((7254, 7279), 'ooflib.common.parallel_enable.enabled', 'parallel_enable.enabled', ([], {}), '()\n', (7277, 7279), False, 'from ooflib.common import parallel_enable\n'), ((8327, 8352), 'ooflib.common.parallel_enable.enabled', 'parallel_enable.enabled', ([], {}), '()\n', (8350, 8352), False, 'from ooflib.common import parallel_enable\n'), ((12897, 12922), 'ooflib.common.parallel_enable.enabled', 'parallel_enable.enabled', ([], {}), '()\n', (12920, 12922), False, 'from ooflib.common import parallel_enable\n'), ((15678, 15706), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""redraw"""'], {}), "('redraw')\n", (15696, 15706), False, 'from ooflib.SWIG.common import switchboard\n'), ((17263, 17288), 'ooflib.common.parallel_enable.enabled', 'parallel_enable.enabled', ([], {}), '()\n', (17286, 17288), False, 'from ooflib.common import parallel_enable\n'), ((20593, 20833), 'ooflib.common.IO.oofmenu.OOFMenuItem', 'oofmenu.OOFMenuItem', (['"""Field"""'], {'help': '"""Define and activate Fields."""', 'discussion': '"""<para>\n The <command>Field</command> menu contains the commands that\n define and set the properties of &fields; on &meshes;.\n </para>"""'}), '(\'Field\', help=\'Define and activate Fields.\', discussion\n =\n """<para>\n The <command>Field</command> menu contains the commands that\n define and set the properties of &fields; on &meshes;.\n </para>"""\n )\n', (20612, 20833), False, 'from ooflib.common.IO import oofmenu\n'), ((21056, 21081), 'ooflib.common.parallel_enable.enabled', 'parallel_enable.enabled', ([], {}), '()\n', (21079, 21081), False, 'from ooflib.common import parallel_enable\n'), ((21852, 21877), 'ooflib.common.parallel_enable.enabled', 'parallel_enable.enabled', ([], {}), '()\n', (21875, 21877), False, 'from ooflib.common import parallel_enable\n'), ((24225, 24250), 'ooflib.common.parallel_enable.enabled', 'parallel_enable.enabled', ([], {}), '()\n', (24248, 24250), False, 'from ooflib.common import parallel_enable\n'), ((25292, 25317), 'ooflib.common.parallel_enable.enabled', 'parallel_enable.enabled', ([], {}), '()\n', (25315, 25317), False, 'from ooflib.common import parallel_enable\n'), ((27486, 27511), 'ooflib.common.parallel_enable.enabled', 'parallel_enable.enabled', ([], {}), '()\n', (27509, 27511), False, 'from ooflib.common import parallel_enable\n'), ((28140, 28165), 'ooflib.common.parallel_enable.enabled', 'parallel_enable.enabled', ([], {}), '()\n', (28163, 28165), False, 'from ooflib.common import parallel_enable\n'), ((28776, 28794), 'ooflib.SWIG.common.config.dimension', 'config.dimension', ([], {}), '()\n', (28792, 28794), False, 'from ooflib.SWIG.common import config\n'), ((31454, 31497), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""field initializer set"""'], {}), "('field initializer set')\n", (31472, 31497), False, 'from ooflib.SWIG.common import switchboard\n'), ((32649, 32674), 'ooflib.common.parallel_enable.enabled', 'parallel_enable.enabled', ([], {}), '()\n', (32672, 32674), False, 'from ooflib.common import parallel_enable\n'), ((35341, 35384), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""field initializer set"""'], {}), "('field initializer set')\n", (35359, 35384), False, 'from ooflib.SWIG.common import switchboard\n'), ((36459, 36502), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""field initializer set"""'], {}), "('field initializer set')\n", (36477, 36502), False, 'from ooflib.SWIG.common import switchboard\n'), ((37427, 37475), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""mesh data changed"""', 'themesh'], {}), "('mesh data changed', themesh)\n", (37445, 37475), False, 'from ooflib.SWIG.common import switchboard\n'), ((37546, 37574), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""redraw"""'], {}), "('redraw')\n", (37564, 37574), False, 'from ooflib.SWIG.common import switchboard\n'), ((38243, 38291), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""mesh data changed"""', 'themesh'], {}), "('mesh data changed', themesh)\n", (38261, 38291), False, 'from ooflib.SWIG.common import switchboard\n'), ((38362, 38402), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""draw at time"""', 'time'], {}), "('draw at time', time)\n", (38380, 38402), False, 'from ooflib.SWIG.common import switchboard\n'), ((39032, 39091), 'ooflib.common.IO.oofmenu.OOFMenuItem', 'oofmenu.OOFMenuItem', (['"""Equation"""'], {'help': '"""Activate equations."""'}), "('Equation', help='Activate equations.')\n", (39051, 39091), False, 'from ooflib.common.IO import oofmenu\n'), ((39197, 39222), 'ooflib.common.parallel_enable.enabled', 'parallel_enable.enabled', ([], {}), '()\n', (39220, 39222), False, 'from ooflib.common import parallel_enable\n'), ((39918, 39943), 'ooflib.common.parallel_enable.enabled', 'parallel_enable.enabled', ([], {}), '()\n', (39941, 39943), False, 'from ooflib.common import parallel_enable\n'), ((43793, 43837), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""cross sections changed"""'], {}), "('cross sections changed')\n", (43811, 43837), False, 'from ooflib.SWIG.common import switchboard\n'), ((43842, 43870), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""redraw"""'], {}), "('redraw')\n", (43860, 43870), False, 'from ooflib.SWIG.common import switchboard\n'), ((44571, 44615), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""cross sections changed"""'], {}), "('cross sections changed')\n", (44589, 44615), False, 'from ooflib.SWIG.common import switchboard\n'), ((44620, 44648), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""redraw"""'], {}), "('redraw')\n", (44638, 44648), False, 'from ooflib.SWIG.common import switchboard\n'), ((45428, 45472), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""cross sections changed"""'], {}), "('cross sections changed')\n", (45446, 45472), False, 'from ooflib.SWIG.common import switchboard\n'), ((45477, 45505), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""redraw"""'], {}), "('redraw')\n", (45495, 45505), False, 'from ooflib.SWIG.common import switchboard\n'), ((46194, 46238), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""cross sections changed"""'], {}), "('cross sections changed')\n", (46212, 46238), False, 'from ooflib.SWIG.common import switchboard\n'), ((46243, 46271), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""redraw"""'], {}), "('redraw')\n", (46261, 46271), False, 'from ooflib.SWIG.common import switchboard\n'), ((46927, 46971), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""cross sections changed"""'], {}), "('cross sections changed')\n", (46945, 46971), False, 'from ooflib.SWIG.common import switchboard\n'), ((47864, 47908), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""cross sections changed"""'], {}), "('cross sections changed')\n", (47882, 47908), False, 'from ooflib.SWIG.common import switchboard\n'), ((47913, 47941), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""redraw"""'], {}), "('redraw')\n", (47931, 47941), False, 'from ooflib.SWIG.common import switchboard\n'), ((49102, 49146), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""cross sections changed"""'], {}), "('cross sections changed')\n", (49120, 49146), False, 'from ooflib.SWIG.common import switchboard\n'), ((52444, 52495), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""Mesh modified"""', 'mesh', 'modifier'], {}), "('Mesh modified', mesh, modifier)\n", (52462, 52495), False, 'from ooflib.SWIG.common import switchboard\n'), ((53063, 53077), 'ooflib.SWIG.common.config.devel', 'config.devel', ([], {}), '()\n', (53075, 53077), False, 'from ooflib.SWIG.common import config\n'), ((56158, 56206), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""subproblem solvers changed"""'], {}), "('subproblem solvers changed')\n", (56176, 56206), False, 'from ooflib.SWIG.common import switchboard\n'), ((58386, 58439), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""subproblems reordered"""', 'meshctxt'], {}), "('subproblems reordered', meshctxt)\n", (58404, 58439), False, 'from ooflib.SWIG.common import switchboard\n'), ((59711, 59754), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['"""mesh solved"""', 'meshctxt'], {}), "('mesh solved', meshctxt)\n", (59729, 59754), False, 'from ooflib.SWIG.common import switchboard\n'), ((3747, 3793), 'ooflib.SWIG.engine.masterelement.getMasterElementFromEnum', 'masterelement.getMasterElementFromEnum', (['eltype'], {}), '(eltype)\n', (3785, 3793), False, 'from ooflib.SWIG.engine import masterelement\n'), ((6377, 6425), 'ooflib.engine.IO.meshIPC.ipcmeshmenu.Rename', 'meshIPC.ipcmeshmenu.Rename', ([], {'mesh': 'mesh', 'name': 'name'}), '(mesh=mesh, name=name)\n', (6403, 6425), False, 'from ooflib.engine.IO import meshIPC\n'), ((7289, 7326), 'ooflib.engine.IO.meshIPC.ipcmeshmenu.Delete', 'meshIPC.ipcmeshmenu.Delete', ([], {'mesh': 'mesh'}), '(mesh=mesh)\n', (7315, 7326), False, 'from ooflib.engine.IO import meshIPC\n'), ((8362, 8481), 'ooflib.engine.IO.meshIPC.ipcmeshmenu.Copy', 'meshIPC.ipcmeshmenu.Copy', ([], {'mesh': 'mesh', 'name': 'name', 'copy_field': 'copy_field', 'copy_equation': 'copy_equation', 'copy_bc': 'copy_bc'}), '(mesh=mesh, name=name, copy_field=copy_field,\n copy_equation=copy_equation, copy_bc=copy_bc)\n', (8386, 8481), False, 'from ooflib.engine.IO import meshIPC\n'), ((11565, 11587), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['*n'], {}), '(*n)\n', (11583, 11587), False, 'from ooflib.SWIG.common import switchboard\n'), ((12834, 12889), 'ooflib.SWIG.common.ooferror.ErrUserError', 'ooferror.ErrUserError', (['"""Source and target must differ!"""'], {}), "('Source and target must differ!')\n", (12855, 12889), False, 'from ooflib.SWIG.common import ooferror\n'), ((12932, 12998), 'ooflib.engine.IO.meshIPC.ipcmeshmenu.Copy_Field_State', 'meshIPC.ipcmeshmenu.Copy_Field_State', ([], {'source': 'source', 'target': 'target'}), '(source=source, target=target)\n', (12968, 12998), False, 'from ooflib.engine.IO import meshIPC\n'), ((15552, 15574), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['*n'], {}), '(*n)\n', (15570, 15574), False, 'from ooflib.SWIG.common import switchboard\n'), ((15734, 15770), 'ooflib.engine.meshstatus.Unsolved', 'meshstatus.Unsolved', (['"""Copied fields"""'], {}), "('Copied fields')\n", (15753, 15770), False, 'from ooflib.engine import meshstatus\n'), ((17200, 17255), 'ooflib.SWIG.common.ooferror.ErrUserError', 'ooferror.ErrUserError', (['"""Source and target must differ!"""'], {}), "('Source and target must differ!')\n", (17221, 17255), False, 'from ooflib.SWIG.common import ooferror\n'), ((17298, 17367), 'ooflib.engine.IO.meshIPC.ipcmeshmenu.Copy_Equation_State', 'meshIPC.ipcmeshmenu.Copy_Equation_State', ([], {'source': 'source', 'target': 'target'}), '(source=source, target=target)\n', (17337, 17367), False, 'from ooflib.engine.IO import meshIPC\n'), ((19569, 19591), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['*n'], {}), '(*n)\n', (19587, 19591), False, 'from ooflib.SWIG.common import switchboard\n'), ((21091, 21142), 'ooflib.engine.IO.meshIPC.ipcfieldmenu.Define', 'meshIPC.ipcfieldmenu.Define', ([], {'mesh': 'mesh', 'field': 'field'}), '(mesh=mesh, field=field)\n', (21118, 21142), False, 'from ooflib.engine.IO import meshIPC\n'), ((21887, 21940), 'ooflib.engine.IO.meshIPC.ipcfieldmenu.Undefine', 'meshIPC.ipcfieldmenu.Undefine', ([], {'mesh': 'mesh', 'field': 'field'}), '(mesh=mesh, field=field)\n', (21916, 21940), False, 'from ooflib.engine.IO import meshIPC\n'), ((24260, 24313), 'ooflib.engine.IO.meshIPC.ipcfieldmenu.Activate', 'meshIPC.ipcfieldmenu.Activate', ([], {'mesh': 'mesh', 'field': 'field'}), '(mesh=mesh, field=field)\n', (24289, 24313), False, 'from ooflib.engine.IO import meshIPC\n'), ((25327, 25382), 'ooflib.engine.IO.meshIPC.ipcfieldmenu.Deactivate', 'meshIPC.ipcfieldmenu.Deactivate', ([], {'mesh': 'mesh', 'field': 'field'}), '(mesh=mesh, field=field)\n', (25358, 25382), False, 'from ooflib.engine.IO import meshIPC\n'), ((27521, 27574), 'ooflib.engine.IO.meshIPC.ipcfieldmenu.In_Plane', 'meshIPC.ipcfieldmenu.In_Plane', ([], {'mesh': 'mesh', 'field': 'field'}), '(mesh=mesh, field=field)\n', (27550, 27574), False, 'from ooflib.engine.IO import meshIPC\n'), ((28175, 28232), 'ooflib.engine.IO.meshIPC.ipcfieldmenu.Out_of_Plane', 'meshIPC.ipcfieldmenu.Out_of_Plane', ([], {'mesh': 'mesh', 'field': 'field'}), '(mesh=mesh, field=field)\n', (28208, 28232), False, 'from ooflib.engine.IO import meshIPC\n'), ((34249, 34271), 'ooflib.SWIG.common.switchboard.notify', 'switchboard.notify', (['*n'], {}), '(*n)\n', (34267, 34271), False, 'from ooflib.SWIG.common import switchboard\n'), ((37498, 37540), 'ooflib.engine.meshstatus.Unsolved', 'meshstatus.Unsolved', (['"""Fields initialized."""'], {}), "('Fields initialized.')\n", (37517, 37540), False, 'from ooflib.engine import meshstatus\n'), ((38314, 38356), 'ooflib.engine.meshstatus.Unsolved', 'meshstatus.Unsolved', (['"""Fields initialized."""'], {}), "('Fields initialized.')\n", (38333, 38356), False, 'from ooflib.engine import meshstatus\n'), ((39232, 39289), 'ooflib.engine.IO.meshIPC.ipceqnmenu.Activate', 'meshIPC.ipceqnmenu.Activate', ([], {'mesh': 'mesh', 'equation': 'equation'}), '(mesh=mesh, equation=equation)\n', (39259, 39289), False, 'from ooflib.engine.IO import meshIPC\n'), ((39953, 40012), 'ooflib.engine.IO.meshIPC.ipceqnmenu.Deactivate', 'meshIPC.ipceqnmenu.Deactivate', ([], {'mesh': 'mesh', 'equation': 'equation'}), '(mesh=mesh, equation=equation)\n', (39982, 40012), False, 'from ooflib.engine.IO import meshIPC\n'), ((43008, 43036), 'ooflib.common.labeltree.makePath', 'labeltree.makePath', (['meshname'], {}), '(meshname)\n', (43026, 43036), False, 'from ooflib.common import labeltree\n'), ((50478, 50502), 'ooflib.common.labeltree.makePath', 'labeltree.makePath', (['mesh'], {}), '(mesh)\n', (50496, 50502), False, 'from ooflib.common import labeltree\n'), ((59502, 59514), 'time.clock', 'time.clock', ([], {}), '()\n', (59512, 59514), False, 'import time\n'), ((59523, 59555), 'ooflib.engine.evolve.evolve', 'evolve.evolve', (['meshctxt', 'endtime'], {}), '(meshctxt, endtime)\n', (59536, 59555), False, 'from ooflib.engine import evolve\n'), ((2972, 3000), 'ooflib.common.labeltree.makePath', 'labeltree.makePath', (['skelname'], {}), '(skelname)\n', (2990, 3000), False, 'from ooflib.common import labeltree\n'), ((3030, 3058), 'ooflib.common.labeltree.makePath', 'labeltree.makePath', (['meshname'], {}), '(meshname)\n', (3048, 3058), False, 'from ooflib.common import labeltree\n'), ((4707, 4740), 'ooflib.SWIG.engine.elementshape.enumClasses.values', 'elementshape.enumClasses.values', ([], {}), '()\n', (4738, 4740), False, 'from ooflib.SWIG.engine import elementshape\n'), ((5766, 5825), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/newmesh.xml"""'], {}), "('DISCUSSIONS/engine/menu/newmesh.xml')\n", (5786, 5825), False, 'from ooflib.common.IO import xmlmenudump\n'), ((9393, 9421), 'string.join', 'string.join', (['copiedmesh', '""":"""'], {}), "(copiedmesh, ':')\n", (9404, 9421), False, 'import string\n'), ((12369, 12429), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/copymesh.xml"""'], {}), "('DISCUSSIONS/engine/menu/copymesh.xml')\n", (12389, 12429), False, 'from ooflib.common.IO import xmlmenudump\n'), ((21694, 21735), 'ooflib.engine.meshstatus.Unsolved', 'meshstatus.Unsolved', (['"""New fields defined"""'], {}), "('New fields defined')\n", (21713, 21735), False, 'from ooflib.engine import meshstatus\n'), ((22890, 22931), 'ooflib.engine.meshstatus.Unsolved', 'meshstatus.Unsolved', (['"""New fields defined"""'], {}), "('New fields defined')\n", (22909, 22931), False, 'from ooflib.engine import meshstatus\n'), ((23358, 23421), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/definefield.xml"""'], {}), "('DISCUSSIONS/engine/menu/definefield.xml')\n", (23378, 23421), False, 'from ooflib.common.IO import xmlmenudump\n'), ((26706, 26771), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/activatefield.xml"""'], {}), "('DISCUSSIONS/engine/menu/activatefield.xml')\n", (26726, 26771), False, 'from ooflib.common.IO import xmlmenudump\n'), ((32292, 32353), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/initfield.xml"""'], {}), "('DISCUSSIONS/engine/menu/initfield.xml')\n", (32312, 32353), False, 'from ooflib.common.IO import xmlmenudump\n'), ((37861, 37922), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/applyinit.xml"""'], {}), "('DISCUSSIONS/engine/menu/applyinit.xml')\n", (37881, 37922), False, 'from ooflib.common.IO import xmlmenudump\n'), ((38848, 38913), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/applyinittime.xml"""'], {}), "('DISCUSSIONS/engine/menu/applyinittime.xml')\n", (38868, 38913), False, 'from ooflib.common.IO import xmlmenudump\n'), ((42728, 42793), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/cross_section.xml"""'], {}), "('DISCUSSIONS/engine/menu/cross_section.xml')\n", (42748, 42793), False, 'from ooflib.common.IO import xmlmenudump\n'), ((44223, 44292), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/new_cross_section.xml"""'], {}), "('DISCUSSIONS/engine/menu/new_cross_section.xml')\n", (44243, 44292), False, 'from ooflib.common.IO import xmlmenudump\n'), ((45857, 45918), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/select_cs.xml"""'], {}), "('DISCUSSIONS/engine/menu/select_cs.xml')\n", (45877, 45918), False, 'from ooflib.common.IO import xmlmenudump\n'), ((46552, 46615), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/deselect_cs.xml"""'], {}), "('DISCUSSIONS/engine/menu/deselect_cs.xml')\n", (46572, 46615), False, 'from ooflib.common.IO import xmlmenudump\n'), ((48525, 48584), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/edit_cs.xml"""'], {}), "('DISCUSSIONS/engine/menu/edit_cs.xml')\n", (48545, 48584), False, 'from ooflib.common.IO import xmlmenudump\n'), ((49998, 50057), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/copy_cs.xml"""'], {}), "('DISCUSSIONS/engine/menu/copy_cs.xml')\n", (50018, 50057), False, 'from ooflib.common.IO import xmlmenudump\n'), ((50368, 50441), 'ooflib.SWIG.common.ooferror.ErrUserError', 'ooferror.ErrUserError', (['"""The Mesh must be rebuilt before it can be saved."""'], {}), "('The Mesh must be rebuilt before it can be saved.')\n", (50389, 50441), False, 'from ooflib.SWIG.common import ooferror\n'), ((50945, 50989), 'ooflib.engine.IO.skeletonIO.writeSkeleton', 'skeletonIO.writeSkeleton', (['dfile', 'skelcontext'], {}), '(dfile, skelcontext)\n', (50969, 50989), False, 'from ooflib.engine.IO import skeletonIO\n'), ((51002, 51038), 'ooflib.engine.IO.meshIO.writeMesh', 'meshIO.writeMesh', (['dfile', 'meshcontext'], {}), '(dfile, meshcontext)\n', (51018, 51038), False, 'from ooflib.engine.IO import meshIO\n'), ((51647, 51707), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/savemesh.xml"""'], {}), "('DISCUSSIONS/engine/menu/savemesh.xml')\n", (51667, 51707), False, 'from ooflib.common.IO import xmlmenudump\n'), ((52887, 52950), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/modify_mesh.xml"""'], {}), "('DISCUSSIONS/engine/menu/modify_mesh.xml')\n", (52907, 52950), False, 'from ooflib.common.IO import xmlmenudump\n'), ((54111, 54176), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/datacachetype.xml"""'], {}), "('DISCUSSIONS/engine/menu/datacachetype.xml')\n", (54131, 54176), False, 'from ooflib.common.IO import xmlmenudump\n'), ((55001, 55068), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/selfconsistency.xml"""'], {}), "('DISCUSSIONS/engine/menu/selfconsistency.xml')\n", (55021, 55068), False, 'from ooflib.common.IO import xmlmenudump\n'), ((55719, 55784), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/numericaldiff.xml"""'], {}), "('DISCUSSIONS/engine/menu/numericaldiff.xml')\n", (55739, 55784), False, 'from ooflib.common.IO import xmlmenudump\n'), ((56507, 56572), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/removesolvers.xml"""'], {}), "('DISCUSSIONS/engine/menu/removesolvers.xml')\n", (56527, 56572), False, 'from ooflib.common.IO import xmlmenudump\n'), ((57343, 57395), 'ooflib.engine.IO.subproblemmenu.setSolver', 'subproblemmenu.setSolver', (['menuitem', 'subppath', 'solver'], {}), '(menuitem, subppath, solver)\n', (57367, 57395), False, 'from ooflib.engine.IO import subproblemmenu\n'), ((57966, 58032), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/copyallsolvers.xml"""'], {}), "('DISCUSSIONS/engine/menu/copyallsolvers.xml')\n", (57986, 58032), False, 'from ooflib.common.IO import xmlmenudump\n'), ((58981, 59044), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/reordersubp.xml"""'], {}), "('DISCUSSIONS/engine/menu/reordersubp.xml')\n", (59001, 59044), False, 'from ooflib.common.IO import xmlmenudump\n'), ((60156, 60213), 'ooflib.common.IO.xmlmenudump.loadFile', 'xmlmenudump.loadFile', (['"""DISCUSSIONS/engine/menu/solve.xml"""'], {}), "('DISCUSSIONS/engine/menu/solve.xml')\n", (60176, 60213), False, 'from ooflib.common.IO import xmlmenudump\n'), ((4386, 4418), 'ooflib.engine.meshstatus.Unsolved', 'meshstatus.Unsolved', (['"""New mesh."""'], {}), "('New mesh.')\n", (4405, 4418), False, 'from ooflib.engine import meshstatus\n'), ((5125, 5249), 'ooflib.common.IO.whoville.AutoWhoNameParameter', 'whoville.AutoWhoNameParameter', (['"""name"""'], {'value': 'automatic.automatic', 'resolver': 'meshNameResolver', 'tip': '"""Name of the new Mesh"""'}), "('name', value=automatic.automatic, resolver=\n meshNameResolver, tip='Name of the new Mesh')\n", (5154, 5249), False, 'from ooflib.common.IO import whoville\n'), ((5318, 5420), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""skeleton"""', 'skeletoncontext.skeletonContexts'], {'tip': 'parameter.emptyTipString'}), "('skeleton', skeletoncontext.skeletonContexts, tip=\n parameter.emptyTipString)\n", (5339, 5420), False, 'from ooflib.common.IO import whoville\n'), ((6820, 6911), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (6841, 6911), False, 'from ooflib.common.IO import whoville\n'), ((6954, 7027), 'ooflib.common.IO.whoville.WhoNameParameter', 'whoville.WhoNameParameter', (['"""name"""'], {'value': '""""""', 'tip': '"""New name for the mesh."""'}), "('name', value='', tip='New name for the mesh.')\n", (6979, 7027), False, 'from ooflib.common.IO import whoville\n'), ((7917, 8008), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (7938, 8008), False, 'from ooflib.common.IO import whoville\n'), ((11701, 11792), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (11722, 11792), False, 'from ooflib.common.IO import whoville\n'), ((11819, 11996), 'ooflib.common.IO.whoville.AutoWhoNameParameter', 'whoville.AutoWhoNameParameter', (['"""name"""'], {'value': 'automatic.automatic', 'resolver': 'meshNameResolver', 'tip': '"""Name of the copied Mesh. Use automatic selection, or type in a name."""'}), "('name', value=automatic.automatic, resolver=\n meshNameResolver, tip=\n 'Name of the copied Mesh. Use automatic selection, or type in a name.')\n", (11848, 11996), False, 'from ooflib.common.IO import whoville\n'), ((12060, 12129), 'ooflib.common.IO.parameter.BooleanParameter', 'parameter.BooleanParameter', (['"""copy_field"""'], {'value': '(1)', 'tip': '"""Copy fields?"""'}), "('copy_field', value=1, tip='Copy fields?')\n", (12086, 12129), False, 'from ooflib.common.IO import parameter\n'), ((12135, 12209), 'ooflib.common.IO.parameter.BooleanParameter', 'parameter.BooleanParameter', (['"""copy_equation"""'], {'value': '(1)', 'tip': '"""Copy equation?"""'}), "('copy_equation', value=1, tip='Copy equation?')\n", (12161, 12209), False, 'from ooflib.common.IO import parameter\n'), ((12215, 12294), 'ooflib.common.IO.parameter.BooleanParameter', 'parameter.BooleanParameter', (['"""copy_bc"""'], {'value': '(1)', 'tip': '"""Copy boundary conditions?"""'}), "('copy_bc', value=1, tip='Copy boundary conditions?')\n", (12241, 12294), False, 'from ooflib.common.IO import parameter\n'), ((14712, 14730), 'ooflib.SWIG.common.config.dimension', 'config.dimension', ([], {}), '()\n', (14728, 14730), False, 'from ooflib.SWIG.common import config\n'), ((15878, 15971), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""source"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('source', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (15899, 15971), False, 'from ooflib.common.IO import whoville\n'), ((16013, 16106), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""target"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('target', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (16034, 16106), False, 'from ooflib.common.IO import whoville\n'), ((18111, 18125), 'ooflib.SWIG.common.config.devel', 'config.devel', ([], {}), '()\n', (18123, 18125), False, 'from ooflib.SWIG.common import config\n'), ((18895, 18909), 'ooflib.SWIG.common.config.devel', 'config.devel', ([], {}), '()\n', (18907, 18909), False, 'from ooflib.SWIG.common import config\n'), ((19783, 19876), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""source"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('source', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (19804, 19876), False, 'from ooflib.common.IO import whoville\n'), ((19918, 20011), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""target"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('target', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (19939, 20011), False, 'from ooflib.common.IO import whoville\n'), ((23026, 23117), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (23047, 23117), False, 'from ooflib.common.IO import whoville\n'), ((23160, 23228), 'ooflib.engine.IO.meshparameters.FieldParameter', 'meshparameters.FieldParameter', (['"""field"""'], {'tip': 'parameter.emptyTipString'}), "('field', tip=parameter.emptyTipString)\n", (23189, 23228), False, 'from ooflib.engine.IO import meshparameters\n'), ((23526, 23617), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (23547, 23617), False, 'from ooflib.common.IO import whoville\n'), ((23660, 23728), 'ooflib.engine.IO.meshparameters.FieldParameter', 'meshparameters.FieldParameter', (['"""field"""'], {'tip': 'parameter.emptyTipString'}), "('field', tip=parameter.emptyTipString)\n", (23689, 23728), False, 'from ooflib.engine.IO import meshparameters\n'), ((24714, 24784), 'ooflib.common.IO.reporter.report', 'reporter.report', (['"""You must define a Field before you can activate it."""'], {}), "('You must define a Field before you can activate it.')\n", (24729, 24784), False, 'from ooflib.common.IO import reporter\n'), ((25174, 25212), 'ooflib.engine.meshstatus.Unsolved', 'meshstatus.Unsolved', (['"""Field activated"""'], {}), "('Field activated')\n", (25193, 25212), False, 'from ooflib.engine import meshstatus\n'), ((25786, 25876), 'ooflib.common.IO.reporter.report', 'reporter.report', (['"""You must define and activate a Field before you can deactivate it."""'], {}), "(\n 'You must define and activate a Field before you can deactivate it.')\n", (25801, 25876), False, 'from ooflib.common.IO import reporter\n'), ((26265, 26305), 'ooflib.engine.meshstatus.Unsolved', 'meshstatus.Unsolved', (['"""Field deactivated"""'], {}), "('Field deactivated')\n", (26284, 26305), False, 'from ooflib.engine import meshstatus\n'), ((26404, 26495), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (26425, 26495), False, 'from ooflib.common.IO import whoville\n'), ((26538, 26606), 'ooflib.engine.IO.meshparameters.FieldParameter', 'meshparameters.FieldParameter', (['"""field"""'], {'tip': 'parameter.emptyTipString'}), "('field', tip=parameter.emptyTipString)\n", (26567, 26606), False, 'from ooflib.engine.IO import meshparameters\n'), ((26880, 26971), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (26901, 26971), False, 'from ooflib.common.IO import whoville\n'), ((27014, 27082), 'ooflib.engine.IO.meshparameters.FieldParameter', 'meshparameters.FieldParameter', (['"""field"""'], {'tip': 'parameter.emptyTipString'}), "('field', tip=parameter.emptyTipString)\n", (27043, 27082), False, 'from ooflib.engine.IO import meshparameters\n'), ((31773, 31864), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (31794, 31864), False, 'from ooflib.common.IO import whoville\n'), ((31907, 31996), 'ooflib.engine.IO.meshparameters.FieldParameter', 'meshparameters.FieldParameter', (['"""field"""'], {'tip': 'parameter.emptyTipString', 'outofplane': '(True)'}), "('field', tip=parameter.emptyTipString,\n outofplane=True)\n", (31936, 31996), False, 'from ooflib.engine.IO import meshparameters\n'), ((32090, 32163), 'ooflib.engine.fieldinit.FieldInitParameter', 'fieldinit.FieldInitParameter', (['"""initializer"""'], {'tip': 'parameter.emptyTipString'}), "('initializer', tip=parameter.emptyTipString)\n", (32118, 32163), False, 'from ooflib.engine import fieldinit\n'), ((34384, 34477), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""source"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('source', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (34405, 34477), False, 'from ooflib.common.IO import whoville\n'), ((34520, 34613), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""target"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('target', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (34541, 34613), False, 'from ooflib.common.IO import whoville\n'), ((35497, 35588), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (35518, 35588), False, 'from ooflib.common.IO import whoville\n'), ((35631, 35721), 'ooflib.engine.IO.meshparameters.FieldParameter', 'meshparameters.FieldParameter', (['"""field"""'], {'outofplane': '(True)', 'tip': 'parameter.emptyTipString'}), "('field', outofplane=True, tip=parameter.\n emptyTipString)\n", (35660, 35721), False, 'from ooflib.engine.IO import meshparameters\n'), ((36617, 36708), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (36638, 36708), False, 'from ooflib.common.IO import whoville\n'), ((37688, 37779), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (37709, 37779), False, 'from ooflib.common.IO import whoville\n'), ((38531, 38622), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (38552, 38622), False, 'from ooflib.common.IO import whoville\n'), ((38665, 38732), 'ooflib.common.IO.parameter.FloatParameter', 'parameter.FloatParameter', (['"""time"""', '(0.0)'], {'tip': 'parameter.emptyTipString'}), "('time', 0.0, tip=parameter.emptyTipString)\n", (38689, 38732), False, 'from ooflib.common.IO import parameter\n'), ((40684, 40775), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (40705, 40775), False, 'from ooflib.common.IO import whoville\n'), ((40818, 40892), 'ooflib.engine.IO.meshparameters.EquationParameter', 'meshparameters.EquationParameter', (['"""equation"""'], {'tip': 'parameter.emptyTipString'}), "('equation', tip=parameter.emptyTipString)\n", (40850, 40892), False, 'from ooflib.engine.IO import meshparameters\n'), ((41859, 41950), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (41880, 41950), False, 'from ooflib.common.IO import whoville\n'), ((41993, 42067), 'ooflib.engine.IO.meshparameters.EquationParameter', 'meshparameters.EquationParameter', (['"""equation"""'], {'tip': 'parameter.emptyTipString'}), "('equation', tip=parameter.emptyTipString)\n", (42025, 42067), False, 'from ooflib.engine.IO import meshparameters\n'), ((44733, 44824), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (44754, 44824), False, 'from ooflib.common.IO import whoville\n'), ((44867, 44932), 'ooflib.common.IO.parameter.StringParameter', 'parameter.StringParameter', (['"""name"""'], {'tip': '"""Cross section to remove."""'}), "('name', tip='Cross section to remove.')\n", (44892, 44932), False, 'from ooflib.common.IO import parameter\n'), ((45593, 45684), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (45614, 45684), False, 'from ooflib.common.IO import whoville\n'), ((45719, 45793), 'ooflib.common.IO.parameter.StringParameter', 'parameter.StringParameter', (['"""cross_section"""'], {'tip': '"""Cross section to select."""'}), "('cross_section', tip='Cross section to select.')\n", (45744, 45793), False, 'from ooflib.common.IO import parameter\n'), ((46363, 46454), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (46384, 46454), False, 'from ooflib.common.IO import whoville\n'), ((47059, 47150), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (47080, 47150), False, 'from ooflib.common.IO import whoville\n'), ((47193, 47267), 'ooflib.common.IO.parameter.StringParameter', 'parameter.StringParameter', (['"""cross_section"""'], {'tip': '"""Cross section to rename."""'}), "('cross_section', tip='Cross section to rename.')\n", (47218, 47267), False, 'from ooflib.common.IO import parameter\n'), ((47319, 47391), 'ooflib.common.IO.parameter.StringParameter', 'parameter.StringParameter', (['"""name"""'], {'tip': '"""New name for the cross section."""'}), "('name', tip='New name for the cross section.')\n", (47344, 47391), False, 'from ooflib.common.IO import parameter\n'), ((48033, 48124), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (48054, 48124), False, 'from ooflib.common.IO import whoville\n'), ((48167, 48230), 'ooflib.common.IO.parameter.StringParameter', 'parameter.StringParameter', (['"""name"""'], {'tip': '"""Cross section to edit."""'}), "('name', tip='Cross section to edit.')\n", (48192, 48230), False, 'from ooflib.common.IO import parameter\n'), ((48244, 48370), 'ooflib.common.IO.parameter.RegisteredParameter', 'parameter.RegisteredParameter', (['"""cross_section"""', 'meshcrosssection.MeshCrossSection'], {'tip': '"""New value for the cross section."""'}), "('cross_section', meshcrosssection.\n MeshCrossSection, tip='New value for the cross section.')\n", (48273, 48370), False, 'from ooflib.common.IO import parameter\n'), ((51229, 51302), 'ooflib.common.IO.filenameparam.WriteFileNameParameter', 'filenameparam.WriteFileNameParameter', (['"""filename"""'], {'tip': '"""Name of the file."""'}), "('filename', tip='Name of the file.')\n", (51265, 51302), False, 'from ooflib.common.IO import filenameparam\n'), ((51308, 51398), 'ooflib.common.IO.filenameparam.WriteModeParameter', 'filenameparam.WriteModeParameter', (['"""mode"""'], {'tip': '"""\'w\' to (over)write and \'a\' to append."""'}), '(\'mode\', tip=\n "\'w\' to (over)write and \'a\' to append.")\n', (51340, 51398), False, 'from ooflib.common.IO import filenameparam\n'), ((51416, 51519), 'ooflib.common.enum.EnumParameter', 'enum.EnumParameter', (['"""format"""', 'datafile.DataFileFormatExt', 'datafile.ASCII'], {'tip': '"""Format of the file."""'}), "('format', datafile.DataFileFormatExt, datafile.ASCII,\n tip='Format of the file.')\n", (51434, 51519), False, 'from ooflib.common import enum\n'), ((52613, 52704), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (52634, 52704), False, 'from ooflib.common.IO import whoville\n'), ((52705, 52799), 'ooflib.common.IO.parameter.RegisteredParameter', 'parameter.RegisteredParameter', (['"""modifier"""', 'meshmod.MeshModification'], {'tip': '"""Mesh modifier."""'}), "('modifier', meshmod.MeshModification, tip=\n 'Mesh modifier.')\n", (52734, 52799), False, 'from ooflib.common.IO import parameter\n'), ((54487, 54612), 'ooflib.common.IO.parameter.FloatParameter', 'parameter.FloatParameter', (['"""tolerance"""', 'subproblemcontext.consistencyTolerance'], {'tip': '"""Relative tolerance for consistency."""'}), "('tolerance', subproblemcontext.\n consistencyTolerance, tip='Relative tolerance for consistency.')\n", (54511, 54612), False, 'from ooflib.common.IO import parameter\n'), ((54670, 54791), 'ooflib.common.IO.parameter.IntParameter', 'parameter.IntParameter', (['"""max_iterations"""', 'evolve.maxconsistencysteps'], {'tip': '"""Maximum number of iterations to perform."""'}), "('max_iterations', evolve.maxconsistencysteps, tip=\n 'Maximum number of iterations to perform.')\n", (54692, 54791), False, 'from ooflib.common.IO import parameter\n'), ((55430, 55544), 'ooflib.common.IO.parameter.FloatParameter', 'parameter.FloatParameter', (['"""epsilon"""', 'properties.cvar.deriv_eps'], {'tip': '"""Increment for numerical differentiation"""'}), "('epsilon', properties.cvar.deriv_eps, tip=\n 'Increment for numerical differentiation')\n", (55454, 55544), False, 'from ooflib.common.IO import parameter\n'), ((56316, 56407), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (56337, 56407), False, 'from ooflib.common.IO import whoville\n'), ((57526, 57627), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""source"""', 'ooflib.engine.mesh.meshes'], {'tip': '"""Mesh to copy the solvers from."""'}), "('source', ooflib.engine.mesh.meshes, tip=\n 'Mesh to copy the solvers from.')\n", (57547, 57627), False, 'from ooflib.common.IO import whoville\n'), ((57704, 57809), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""target"""', 'ooflib.engine.mesh.meshes'], {'tip': '"""Mesh to which to copy the solvers."""'}), "('target', ooflib.engine.mesh.meshes, tip=\n 'Mesh to which to copy the solvers.')\n", (57725, 57809), False, 'from ooflib.common.IO import whoville\n'), ((58563, 58654), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (58584, 58654), False, 'from ooflib.common.IO import whoville\n'), ((58708, 58839), 'ooflib.common.IO.parameter.ListOfStringsParameter', 'parameter.ListOfStringsParameter', (['"""subproblems"""'], {'tip': '"""A list of Subproblem names in the order in which they should be solved."""'}), "('subproblems', tip=\n 'A list of Subproblem names in the order in which they should be solved.')\n", (58740, 58839), False, 'from ooflib.common.IO import parameter\n'), ((59597, 59609), 'time.clock', 'time.clock', ([], {}), '()\n', (59607, 59609), False, 'import time\n'), ((59903, 59994), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (59924, 59994), False, 'from ooflib.common.IO import whoville\n'), ((60035, 60090), 'ooflib.common.IO.parameter.FloatParameter', 'parameter.FloatParameter', (['"""endtime"""'], {'tip': '"""Ending time."""'}), "('endtime', tip='Ending time.')\n", (60059, 60090), False, 'from ooflib.common.IO import parameter\n'), ((11307, 11338), 'ooflib.engine.meshstatus.Unsolved', 'meshstatus.Unsolved', (['"""New copy"""'], {}), "('New copy')\n", (11326, 11338), False, 'from ooflib.engine import meshstatus\n'), ((28925, 29016), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (28946, 29016), False, 'from ooflib.common.IO import whoville\n'), ((29058, 29126), 'ooflib.engine.IO.meshparameters.FieldParameter', 'meshparameters.FieldParameter', (['"""field"""'], {'tip': 'parameter.emptyTipString'}), "('field', tip=parameter.emptyTipString)\n", (29087, 29126), False, 'from ooflib.engine.IO import meshparameters\n'), ((29746, 29837), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (29767, 29837), False, 'from ooflib.common.IO import whoville\n'), ((29879, 29947), 'ooflib.engine.IO.meshparameters.FieldParameter', 'meshparameters.FieldParameter', (['"""field"""'], {'tip': 'parameter.emptyTipString'}), "('field', tip=parameter.emptyTipString)\n", (29908, 29947), False, 'from ooflib.engine.IO import meshparameters\n'), ((43972, 44091), 'ooflib.common.IO.parameter.RegisteredParameter', 'parameter.RegisteredParameter', (['"""cross_section"""', 'meshcrosssection.MeshCrossSection'], {'tip': '"""New cross section object."""'}), "('cross_section', meshcrosssection.\n MeshCrossSection, tip='New cross section object.')\n", (44001, 44091), False, 'from ooflib.common.IO import parameter\n'), ((49230, 49338), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""current"""', 'ooflib.engine.mesh.meshes'], {'tip': '"""Mesh to copy the cross section from."""'}), "('current', ooflib.engine.mesh.meshes, tip=\n 'Mesh to copy the cross section from.')\n", (49251, 49338), False, 'from ooflib.common.IO import whoville\n'), ((49381, 49453), 'ooflib.common.IO.parameter.StringParameter', 'parameter.StringParameter', (['"""cross_section"""'], {'tip': '"""Cross section to copy."""'}), "('cross_section', tip='Cross section to copy.')\n", (49406, 49453), False, 'from ooflib.common.IO import parameter\n'), ((49542, 49645), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': '"""Mesh to copy the cross section to."""'}), "('mesh', ooflib.engine.mesh.meshes, tip=\n 'Mesh to copy the cross section to.')\n", (49563, 49645), False, 'from ooflib.common.IO import whoville\n'), ((49672, 49809), 'ooflib.common.IO.parameter.AutomaticNameParameter', 'parameter.AutomaticNameParameter', (['"""name"""'], {'value': 'automatic.automatic', 'resolver': 'csnameresolver', 'tip': '"""Name of the copied cross section."""'}), "('name', value=automatic.automatic,\n resolver=csnameresolver, tip='Name of the copied cross section.')\n", (49704, 49809), False, 'from ooflib.common.IO import parameter\n'), ((53435, 53526), 'ooflib.common.IO.whoville.WhoParameter', 'whoville.WhoParameter', (['"""mesh"""', 'ooflib.engine.mesh.meshes'], {'tip': 'parameter.emptyTipString'}), "('mesh', ooflib.engine.mesh.meshes, tip=parameter.\n emptyTipString)\n", (53456, 53526), False, 'from ooflib.common.IO import whoville\n'), ((10091, 10109), 'ooflib.SWIG.common.config.dimension', 'config.dimension', ([], {}), '()\n', (10107, 10109), False, 'from ooflib.SWIG.common import config\n')]
|
# coding: utf-8
import numpy as np
import matplotlib.pyplot as plt
import Transform as Transform
import DiffDriveRobot
class Wheel(object):
"""docstring for Wheel."""
def __init__(self):
super(Wheel, self).__init__()
self.speed = 0
def setSpeed(self, speed):
self.speed = speed
def getSpeed(self):
return self.speed
def getDist(self, dT):
return self.speed * dT
class Robot(object):
"""docstring for Robot."""
def __init__(self, x, y, theta, robot):
super(Robot, self).__init__()
self.polygon = np.array([[-150, -150], [-150, 150], [150, 150], [150, -150], [-150, -150]],dtype =float)
self.x = x
self.y = y
self.theta = theta
self.robot = robot
self.MEntreAxes = 200
self.OEntreAxes = 250
self.xC = x
self.yC = y
self.thetaC = theta
self.XErr = 0
self.YErr = 0
self.ThetaErr = 0
self.DistErr = 0
self.CapErr = 0
self.alpha = []
self.thetaa = []
self.DistErra = []
# mutateurs
def setX(self, x):
self.x = x
def setY(self, y):
self.y = y
def setTheta(self, theta):
self.theta = theta
def setXC(self, xC):
self.xC = xC
def setYC(self, yC):
self.yC = yC
def setThetaC(self, thetaC):
self.thetaC = thetaC
# asscenseurs
def getX(self):
return self.x
def getY(self):
return self.y
def getTheta(self):
return self.theta
#autres methodes
#fonctions traduisant le fonctionment du robot (modèle)
def updateOdometry(self, dT):
dOL = self.robot.getLeftEncoderDist(dT)
dOR = self.robot.getRightEncoderDist(dT)
dXrobot = (dOR + dOL)/2
dTheta = (dOR - dOL)/self.OEntreAxes
self.theta = self.theta + dTheta
if(self.theta <= -np.pi): self.theta = self.theta + 2*np.pi
if(self.theta > np.pi): self.theta = self.theta - 2*np.pi
self.x = self.x + dXrobot*np.cos(self.theta)
self.y = self.y + dXrobot*np.sin(self.theta)
def computeError(self): # Equations 11 & 12
self.XErr = self.xC - self.x
self.YErr = self.yC - self.y
self.ThetaErr = self.thetaC - self.theta #unused
Kp = 1
Kalpha = 5
alpha = np.arctan2(self.YErr, self.XErr)-self.theta
if alpha <= -np.pi: alpha+= 2*np.pi
if alpha > +np.pi: alpha-= 2*np.pi
self.thetaa.append(self.theta)
self.alpha.append(alpha)
self.DistErr = Kp*np.sqrt(self.XErr**2 + self.YErr**2)*np.cos(alpha)
# self.CapErr = Kp*np.sin(alpha)*np.cos(alpha) + Kalpha*alpha
self.CapErr = Kalpha*np.sin(alpha)*np.cos(alpha)
self.DistErra.append(self.DistErr)
def setConsign(self):
V = self.DistErr
Omega = self.CapErr
VMG = (V - Omega * self.MEntreAxes/2)/1 #1 = wheelRadius
VMD = (V + Omega * self.MEntreAxes/2)/1
self.robot.setLeftMotorSpeed(VMG)
self.robot.setRightMotorSpeed(VMD)
def draw(self):
shape2 = np.transpose(Transform.rotate(self.polygon, self.theta))
shape2 = np.transpose(Transform.translate(np.transpose(shape2), self.x, self.y))
plt.plot( shape2[0], shape2[1])
plt.plot( self.xC, self.yC , 'bx')
def update(self, dT):
self.updateOdometry(dT)
self.computeError()
self.setConsign()
if __name__== "__main__":
import main
|
[
"numpy.arctan2",
"matplotlib.pyplot.plot",
"numpy.transpose",
"Transform.rotate",
"numpy.sin",
"numpy.array",
"numpy.cos",
"numpy.sqrt"
] |
[((587, 680), 'numpy.array', 'np.array', (['[[-150, -150], [-150, 150], [150, 150], [150, -150], [-150, -150]]'], {'dtype': 'float'}), '([[-150, -150], [-150, 150], [150, 150], [150, -150], [-150, -150]],\n dtype=float)\n', (595, 680), True, 'import numpy as np\n'), ((3291, 3321), 'matplotlib.pyplot.plot', 'plt.plot', (['shape2[0]', 'shape2[1]'], {}), '(shape2[0], shape2[1])\n', (3299, 3321), True, 'import matplotlib.pyplot as plt\n'), ((3331, 3363), 'matplotlib.pyplot.plot', 'plt.plot', (['self.xC', 'self.yC', '"""bx"""'], {}), "(self.xC, self.yC, 'bx')\n", (3339, 3363), True, 'import matplotlib.pyplot as plt\n'), ((2368, 2400), 'numpy.arctan2', 'np.arctan2', (['self.YErr', 'self.XErr'], {}), '(self.YErr, self.XErr)\n', (2378, 2400), True, 'import numpy as np\n'), ((2635, 2648), 'numpy.cos', 'np.cos', (['alpha'], {}), '(alpha)\n', (2641, 2648), True, 'import numpy as np\n'), ((2762, 2775), 'numpy.cos', 'np.cos', (['alpha'], {}), '(alpha)\n', (2768, 2775), True, 'import numpy as np\n'), ((3150, 3192), 'Transform.rotate', 'Transform.rotate', (['self.polygon', 'self.theta'], {}), '(self.polygon, self.theta)\n', (3166, 3192), True, 'import Transform as Transform\n'), ((2065, 2083), 'numpy.cos', 'np.cos', (['self.theta'], {}), '(self.theta)\n', (2071, 2083), True, 'import numpy as np\n'), ((2118, 2136), 'numpy.sin', 'np.sin', (['self.theta'], {}), '(self.theta)\n', (2124, 2136), True, 'import numpy as np\n'), ((2598, 2638), 'numpy.sqrt', 'np.sqrt', (['(self.XErr ** 2 + self.YErr ** 2)'], {}), '(self.XErr ** 2 + self.YErr ** 2)\n', (2605, 2638), True, 'import numpy as np\n'), ((2748, 2761), 'numpy.sin', 'np.sin', (['alpha'], {}), '(alpha)\n', (2754, 2761), True, 'import numpy as np\n'), ((3244, 3264), 'numpy.transpose', 'np.transpose', (['shape2'], {}), '(shape2)\n', (3256, 3264), True, 'import numpy as np\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.