code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
# Generated by Django 3.1.4 on 2021-05-02 09:23
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Main', '0068_remove_product_discount'),
]
operations = [
migrations.RenameField(
model_name='product',
old_name='advantage',
new_name='generalTitle',
),
]
|
[
"django.db.migrations.RenameField"
] |
[((229, 325), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""product"""', 'old_name': '"""advantage"""', 'new_name': '"""generalTitle"""'}), "(model_name='product', old_name='advantage', new_name\n ='generalTitle')\n", (251, 325), False, 'from django.db import migrations\n')]
|
"""This module defines context managers which are used to trap exceptions
and exit Python cleanly with specific exit_codes which are then seen as
the numerical exit status of the process and ultimately Batch job.
The exit_on_exception() context manager is used to bracket a block of code
by mapping all exceptions onto some log output and a call to sys.exit():
with exit_on_exception(exit_codes.SOME_CODE, "Parts of the ERROR", "message output", "on exception."):
... the code you're trapping to SOME_CODE when things go wrong ...
The exit_on_exception() manager also enables simulating errors by defining the
CALDP_SIMULATE_ERROR=N environment variable. When the manager is called with a
code matching CALDP_SIMULATE_ERROR, instead of running the code block it fakes
an exception by performing the corresponding log output and sys.exit() call. A
few error codes are simulated more directly, particularly memory errors.
The exit_receiver() manager is used to bracket the top level of your code,
nominally main(), and land the CaldpExit() exception raised by
exit_on_exception() after the stack has been unwound and cleanup functions
performed. exit_receiver() then exits Python with the error code originally
passed into exit_on_exception().
>>> from caldp import log
>>> log.set_test_mode()
>>> log.reset()
"""
import sys
import os
import contextlib
import traceback
import resource
import time
import random
from caldp import log
from caldp import exit_codes
# ==============================================================================
class CaldpExit(SystemExit):
"""Handle like SystemExit, but we definitely threw it."""
class SubprocessFailure(Exception):
"""A called subprocess failed and may require signal reporting.
In Python, a negative subprocess returncode indicates that the absolete
value of the returncode is a signal number which killed the subprocess.
For completeness, in Linux, the program exit_code is a byte value. If the
sign bit is set, a signal and/or core dump occurred. The byte reported as
exit_code may be unsigned. The lower bits of the returncode define either
the program's exit status or a signum identifying the signal which killed
the process.
"""
def __init__(self, returncode):
self.returncode = returncode
@contextlib.contextmanager
def exit_on_exception(exit_code, *args):
"""exit_on_exception is a context manager which issues an error message
based on *args and then does sys.exit(exit_code) if an exception is
raised within the corresponding "with block".
>>> with exit_on_exception(1, "As expected", "it did not fail."):
... print("do it.")
do it.
>>> try: #doctest: +ELLIPSIS
... with exit_on_exception(2, "As expected", "it failed."):
... raise Exception("It failed!")
... print("do it.")
... except SystemExit:
... log.divider()
... print("Trapping SystemExit normally caught by exit_reciever() at top level.")
ERROR - ----------------------------- Fatal Exception -----------------------------
ERROR - As expected it failed.
ERROR - Traceback (most recent call last):
ERROR - File ".../sysexit.py", line ..., in exit_on_exception
ERROR - yield
ERROR - File "<doctest ...exit_on_exception[1]>", line ..., in <module>
ERROR - raise Exception("It failed!")
ERROR - Exception: It failed!
EXIT - CMDLINE_ERROR[2]: The program command line invocation was incorrect.
INFO - ---------------------------------------------------------------------------
Trapping SystemExit normally caught by exit_reciever() at top level.
Never printed 'do it.' SystemExit is caught for testing.
If CALDP_SIMULATE_ERROR is set to one of exit_codes, it will cause the
with exit_on_exception() block to act as if a failure has occurred:
>>> os.environ["CALDP_SIMULATE_ERROR"] = "2"
>>> try: #doctest: +ELLIPSIS
... with exit_on_exception(2, "As expected a failure was simulated"):
... print("should not see this")
... except SystemExit:
... pass
ERROR - ----------------------------- Fatal Exception -----------------------------
ERROR - As expected a failure was simulated
ERROR - Traceback (most recent call last):
ERROR - File ".../sysexit.py", line ..., in exit_on_exception
ERROR - raise RuntimeError(f"Simulating error = {simulated_code}")
ERROR - RuntimeError: Simulating error = 2
EXIT - CMDLINE_ERROR[2]: The program command line invocation was incorrect.
>>> os.environ["CALDP_SIMULATE_ERROR"] = str(exit_codes.CALDP_MEMORY_ERROR)
>>> try: #doctest: +ELLIPSIS
... with exit_on_exception(2, "Memory errors don't have to match"):
... print("Oh unhappy day.")
... except SystemExit:
... pass
ERROR - ----------------------------- Fatal Exception -----------------------------
ERROR - Memory errors don't have to match
ERROR - Traceback (most recent call last):
ERROR - File ".../sysexit.py", line ..., in exit_on_exception
ERROR - raise MemoryError("Simulated CALDP MemoryError.")
ERROR - MemoryError: Simulated CALDP MemoryError.
EXIT - CALDP_MEMORY_ERROR[32]: CALDP generated a Python MemoryError during processing or preview creation.
>>> os.environ["CALDP_SIMULATE_ERROR"] = str(exit_codes.OS_MEMORY_ERROR)
>>> try: #doctest: +ELLIPSIS
... with exit_on_exception(2, "Memory errors don't have to match"):
... print("Oh unhappy day.")
... except SystemExit:
... pass
ERROR - ----------------------------- Fatal Exception -----------------------------
ERROR - Memory errors don't have to match
ERROR - Traceback (most recent call last):
ERROR - File ".../sysexit.py", line ..., in exit_on_exception
ERROR - raise OSError("Cannot allocate memory...")
ERROR - OSError: Cannot allocate memory...
EXIT - OS_MEMORY_ERROR[34]: Python raised OSError(Cannot allocate memory...), possibly fork failure.
>>> os.environ["CALDP_SIMULATE_ERROR"] = "999"
>>> with exit_on_exception(3, "Only matching error codes are simulated."):
... print("should print normally")
should print normally
>>> del os.environ["CALDP_SIMULATE_ERROR"]
>>> saved, os._exit = os._exit, lambda x: print(f"os._exit({x})")
>>> with exit_receiver(): #doctest: +ELLIPSIS
... with exit_on_exception(exit_codes.STAGE1_ERROR, "Failure running processing stage1."):
... raise SubprocessFailure(-8)
ERROR - ----------------------------- Fatal Exception -----------------------------
ERROR - Failure running processing stage1.
ERROR - Traceback (most recent call last):
ERROR - File ".../caldp/sysexit.py", line ..., in exit_on_exception
ERROR - yield
ERROR - File "<doctest caldp.sysexit.exit_on_exception[...]>", line ..., in <module>
ERROR - raise SubprocessFailure(-8)
ERROR - caldp.sysexit.SubprocessFailure: -8
EXIT - Killed by UNIX signal SIGFPE[8]: 'Floating-point exception (ANSI).'
EXIT - STAGE1_ERROR[23]: An error occurred in this instrument's stage1 processing step. e.g. calxxx
os._exit(23)
>>> with exit_receiver(): #doctest: +ELLIPSIS
... with exit_on_exception(exit_codes.STAGE1_ERROR, "Failure running processing stage1."):
... raise OSError("Something other than memory")
ERROR - ----------------------------- Fatal Exception -----------------------------
ERROR - Failure running processing stage1.
ERROR - Traceback (most recent call last):
ERROR - File ".../sysexit.py", line ..., in exit_on_exception
ERROR - yield
ERROR - File "<doctest ...sysexit.exit_on_exception[...]>", line ..., in <module>
ERROR - raise OSError("Something other than memory")
ERROR - OSError: Something other than memory
EXIT - STAGE1_ERROR[23]: An error occurred in this instrument's stage1 processing step. e.g. calxxx
os._exit(23)
>>> os._exit = saved
"""
simulated_code = int(os.environ.get("CALDP_SIMULATE_ERROR", "0"))
try:
if simulated_code == exit_codes.CALDP_MEMORY_ERROR:
raise MemoryError("Simulated CALDP MemoryError.")
elif simulated_code == exit_codes.OS_MEMORY_ERROR:
raise OSError("Cannot allocate memory...")
elif simulated_code == exit_codes.SUBPROCESS_MEMORY_ERROR:
print("MemoryError", file=sys.stderr) # Output to process log determines final program exit status
raise RuntimeError("Simulated subprocess memory error with subsequent generic program exception.")
elif simulated_code == exit_codes.CONTAINER_MEMORY_ERROR:
log.info("Simulating hard memory error by allocating memory")
_ = bytearray(1024 * 2 ** 30) # XXXX does not trigger container limit as intended
elif exit_code == simulated_code:
raise RuntimeError(f"Simulating error = {simulated_code}")
yield
# don't mask memory errors or nested exit_on_exception handlers
except MemoryError:
_report_exception(exit_codes.CALDP_MEMORY_ERROR, args)
raise CaldpExit(exit_codes.CALDP_MEMORY_ERROR)
except OSError as exc:
if "Cannot allocate memory" in str(exc) + repr(exc):
_report_exception(exit_codes.OS_MEMORY_ERROR, args)
raise CaldpExit(exit_codes.OS_MEMORY_ERROR)
else:
_report_exception(exit_code, args)
raise CaldpExit(exit_code)
except CaldpExit:
raise
# below as always exit_code defines what will be CALDP's program exit status.
# in contrast, exc.returncode is the subprocess exit status of a failed subprocess which may
# define an OS signal that killed the process.
except SubprocessFailure as exc:
_report_exception(exit_code, args, exc.returncode)
raise CaldpExit(exit_code)
except Exception:
_report_exception(exit_code, args)
raise CaldpExit(exit_code)
def _report_exception(exit_code, args=None, returncode=None):
"""Issue trigger output for exit_on_exception, including `exit_code` and
error message defined by `args`, as well as traceback.
"""
log.divider("Fatal Exception", func=log.error)
if args:
log.error(*args)
for line in traceback.format_exc().splitlines():
if line != "NoneType: None":
log.error(line)
if returncode and returncode < 0:
print(exit_codes.explain_signal(-returncode))
print(exit_codes.explain(exit_code))
@contextlib.contextmanager
def exit_receiver():
"""Use this contextmanager to bracket your top level code and land the sys.exit()
exceptions thrown by _raise_exit_exception() and exit_on_exception().
This program structure enables sys.exit() to fully unwind the stack doing
cleanup, then calls the low level os._exit() function which does no cleanup
as the "last thing".
If SystemExit is not raised by the code nested in the "with" block then
exit_receiver() essentially does nothing.
The program is exited with the numerical code passed to sys.exit().
>>> saved, os._exit = os._exit, lambda x: print(f"os._exit({x})")
>>> with exit_receiver(): #doctest: +ELLIPSIS
... print("Oh happy day.")
Oh happy day.
os._exit(0)
Generic unhandled exceptions are mapped to GENERIC_ERROR (1):
>>> def foo():
... print("foo!")
... bar()
>>> def bar():
... print("bar!")
... raise RuntimeError()
>>> with exit_receiver(): #doctest: +ELLIPSIS
... foo()
foo!
bar!
ERROR - ----------------------------- Fatal Exception -----------------------------
ERROR - Untrapped non-memory exception.
ERROR - Traceback (most recent call last):
ERROR - File ".../caldp/sysexit.py", line ..., in exit_receiver
ERROR - yield # go off and execute the block
ERROR - File "<doctest caldp.sysexit.exit_receiver[...]>", line ..., in <module>
ERROR - foo()
ERROR - File "<doctest caldp.sysexit.exit_receiver[...]>", line ..., in foo
ERROR - bar()
ERROR - File "<doctest caldp.sysexit.exit_receiver[...]>", line ..., in bar
ERROR - raise RuntimeError()
ERROR - RuntimeError
EXIT - GENERIC_ERROR[1]: An error with no specific CALDP handling occurred somewhere.
os._exit(1)
MemoryError is remapped to CALDP_MEMORY_ERROR (32) inside exit_on_exception or not:
>>> with exit_receiver(): #doctest: +ELLIPSIS
... raise MemoryError("CALDP used up all memory directly.")
ERROR - ----------------------------- Fatal Exception -----------------------------
ERROR - Untrapped memory exception.
ERROR - Traceback (most recent call last):
ERROR - File ".../caldp/sysexit.py", line ..., in exit_receiver
ERROR - yield # go off and execute the block
ERROR - File "<doctest caldp.sysexit.exit_receiver[...]>", line ..., in <module>
ERROR - raise MemoryError("CALDP used up all memory directly.")
ERROR - MemoryError: CALDP used up all memory directly.
EXIT - CALDP_MEMORY_ERROR[32]: CALDP generated a Python MemoryError during processing or preview creation.
os._exit(32)
Inside exit_on_exception, exit status is remapped to the exit_code parameter
of exit_on_exception():
>>> with exit_receiver(): #doctest: +ELLIPSIS
... raise OSError("Cannot allocate memory...")
ERROR - ----------------------------- Fatal Exception -----------------------------
ERROR - Untrapped OSError cannot callocate memory
ERROR - Traceback (most recent call last):
ERROR - File ".../sysexit.py", line ..., in exit_receiver
ERROR - yield # go off and execute the block
ERROR - File "<doctest ...sysexit.exit_receiver[...]>", line ..., in <module>
ERROR - raise OSError("Cannot allocate memory...")
ERROR - OSError: Cannot allocate memory...
EXIT - OS_MEMORY_ERROR[34]: Python raised OSError(Cannot allocate memory...), possibly fork failure.
os._exit(34)
>>> with exit_receiver(): #doctest: +ELLIPSIS
... raise OSError("Some non-memory os error.")
ERROR - ----------------------------- Fatal Exception -----------------------------
ERROR - Untrapped OSError, generic.
ERROR - Traceback (most recent call last):
ERROR - File ".../sysexit.py", line ..., in exit_receiver
ERROR - yield # go off and execute the block
ERROR - File "<doctest ...sysexit.exit_receiver[...]>", line ..., in <module>
ERROR - raise OSError("Some non-memory os error.")
ERROR - OSError: Some non-memory os error.
EXIT - GENERIC_ERROR[1]: An error with no specific CALDP handling occurred somewhere.
os._exit(1)
>>> with exit_receiver(): #doctest: +ELLIPSIS
... with exit_on_exception(exit_codes.STAGE1_ERROR, "Stage1 processing failed for <ippssoot>"):
... raise RuntimeError("Some obscure error")
ERROR - ----------------------------- Fatal Exception -----------------------------
ERROR - Stage1 processing failed for <ippssoot>
ERROR - Traceback (most recent call last):
ERROR - File ".../sysexit.py", line ..., in exit_on_exception
ERROR - yield
ERROR - File "<doctest ...sysexit.exit_receiver[...]>", line ..., in <module>
ERROR - raise RuntimeError("Some obscure error")
ERROR - RuntimeError: Some obscure error
EXIT - STAGE1_ERROR[23]: An error occurred in this instrument's stage1 processing step. e.g. calxxx
os._exit(23)
>>> os._exit = saved
"""
try:
# log.info("Container memory limit is: ", get_linux_memory_limit())
yield # go off and execute the block
code = exit_codes.SUCCESS
except CaldpExit as exc:
code = exc.code
# Already reported deeper
except MemoryError:
code = exit_codes.CALDP_MEMORY_ERROR
_report_exception(code, ("Untrapped memory exception.",))
except OSError as exc:
if "Cannot allocate memory" in str(exc) + repr(exc):
code = exit_codes.OS_MEMORY_ERROR
args = ("Untrapped OSError cannot callocate memory",)
else:
code = exit_codes.GENERIC_ERROR
args = ("Untrapped OSError, generic.",)
_report_exception(code, args)
except BaseException: # Catch absolutely everything.
code = exit_codes.GENERIC_ERROR
_report_exception(code, ("Untrapped non-memory exception.",))
os._exit(code)
def get_linux_memory_limit(): # pragma: no cover
"""This generally shows the full address space by default.
>> limit = get_linux_memory_limit()
>> assert isinstance(limit, int)
"""
if os.path.isfile("/sys/fs/cgroup/memory/memory.limit_in_bytes"):
with open("/sys/fs/cgroup/memory/memory.limit_in_bytes") as limit:
mem = int(limit.read())
return mem
else:
raise RuntimeError("get_linux_memory_limit() failed.") # pragma: no cover
def set_process_memory_limit(mem_in_bytes):
"""This can be used to limit the available address space / memory to
something less than is allocated to the container. Potentially that
will cause Python to generate a MemoryError rather than forcing a
container memory limit kill.
"""
resource.setrlimit(resource.RLIMIT_AS, (mem_in_bytes, mem_in_bytes)) # pragma: no cover
# ==============================================================================
def retry(func, max_retries=3, min_sleep=1, max_sleep=60, backoff=2, exceptions=(Exception, SystemExit)):
"""a decorator for retrying a function call on exception
max_retries: number of times to retry
min_sleep: starting value for backing off, in seconds
max_sleep: sleep value not to exceed, in seconds
backoff: the exponential factor
exceptions: tuple of exceptions to catch and retry
"""
def decor(*args, **kwargs):
tried = 0
while tried < max_retries:
try:
return func(*args, **kwargs)
except exceptions as e:
# otherwise e is lost to the namespace cleanup,
# and we may need to raise it later
exc = e
tried += 1
sleep = exponential_backoff(tried)
log.warning(
f"{func.__name__} raised exception, using retry {tried} of {max_retries}, sleeping for {sleep} seconds "
)
time.sleep(sleep)
# if we're here, no attempt to call func() succeeded
raise exc
return decor
def exponential_backoff(iteration, min_sleep=1, max_sleep=64, backoff=2):
"""given the current number of attempts, return a sleep time using an exponential backoff algorithm
iteration: the current amount of retries used
min_sleep: minimum value to wait before retry, in seconds
max_sleep: maximum value to wait before retry, in seconds
note: if you allow too many retries that cause the backoff to exceed max_sleep,
you will lose the benefit of jitter
see i.e. https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/
"""
# random uniform number(0.5,1) * backoff^iteration, but clip to min_backoff, max_backoff
return max(min(random.uniform(0.5, 1) * backoff ** iteration, max_sleep), min_sleep)
# ==============================================================================
def test(): # pragma: no cover
from doctest import testmod
import caldp.sysexit
temp, os._exit = os._exit, lambda x: print(f"os._exit({x})")
test_result = testmod(caldp.sysexit)
os._exit = temp
return test_result
if __name__ == "__main__": # pragma: no cover
print(test())
|
[
"caldp.log.warning",
"random.uniform",
"resource.setrlimit",
"caldp.log.error",
"os.environ.get",
"time.sleep",
"os.path.isfile",
"os._exit",
"caldp.exit_codes.explain_signal",
"traceback.format_exc",
"caldp.log.divider",
"caldp.exit_codes.explain",
"caldp.log.info",
"doctest.testmod"
] |
[((10284, 10330), 'caldp.log.divider', 'log.divider', (['"""Fatal Exception"""'], {'func': 'log.error'}), "('Fatal Exception', func=log.error)\n", (10295, 10330), False, 'from caldp import log\n'), ((16581, 16595), 'os._exit', 'os._exit', (['code'], {}), '(code)\n', (16589, 16595), False, 'import os\n'), ((16804, 16865), 'os.path.isfile', 'os.path.isfile', (['"""/sys/fs/cgroup/memory/memory.limit_in_bytes"""'], {}), "('/sys/fs/cgroup/memory/memory.limit_in_bytes')\n", (16818, 16865), False, 'import os\n'), ((17398, 17466), 'resource.setrlimit', 'resource.setrlimit', (['resource.RLIMIT_AS', '(mem_in_bytes, mem_in_bytes)'], {}), '(resource.RLIMIT_AS, (mem_in_bytes, mem_in_bytes))\n', (17416, 17466), False, 'import resource\n'), ((19713, 19735), 'doctest.testmod', 'testmod', (['caldp.sysexit'], {}), '(caldp.sysexit)\n', (19720, 19735), False, 'from doctest import testmod\n'), ((8114, 8157), 'os.environ.get', 'os.environ.get', (['"""CALDP_SIMULATE_ERROR"""', '"""0"""'], {}), "('CALDP_SIMULATE_ERROR', '0')\n", (8128, 8157), False, 'import os\n'), ((10352, 10368), 'caldp.log.error', 'log.error', (['*args'], {}), '(*args)\n', (10361, 10368), False, 'from caldp import log\n'), ((10589, 10618), 'caldp.exit_codes.explain', 'exit_codes.explain', (['exit_code'], {}), '(exit_code)\n', (10607, 10618), False, 'from caldp import exit_codes\n'), ((10385, 10407), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (10405, 10407), False, 'import traceback\n'), ((10471, 10486), 'caldp.log.error', 'log.error', (['line'], {}), '(line)\n', (10480, 10486), False, 'from caldp import log\n'), ((10539, 10577), 'caldp.exit_codes.explain_signal', 'exit_codes.explain_signal', (['(-returncode)'], {}), '(-returncode)\n', (10564, 10577), False, 'from caldp import exit_codes\n'), ((19385, 19407), 'random.uniform', 'random.uniform', (['(0.5)', '(1)'], {}), '(0.5, 1)\n', (19399, 19407), False, 'import random\n'), ((18410, 18537), 'caldp.log.warning', 'log.warning', (['f"""{func.__name__} raised exception, using retry {tried} of {max_retries}, sleeping for {sleep} seconds """'], {}), "(\n f'{func.__name__} raised exception, using retry {tried} of {max_retries}, sleeping for {sleep} seconds '\n )\n", (18421, 18537), False, 'from caldp import log\n'), ((18582, 18599), 'time.sleep', 'time.sleep', (['sleep'], {}), '(sleep)\n', (18592, 18599), False, 'import time\n'), ((8772, 8833), 'caldp.log.info', 'log.info', (['"""Simulating hard memory error by allocating memory"""'], {}), "('Simulating hard memory error by allocating memory')\n", (8780, 8833), False, 'from caldp import log\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-05-03 01:15
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('authentication', '0003_auto_20161030_0401'),
]
operations = [
migrations.RemoveField(
model_name='permission',
name='role',
),
migrations.RemoveField(
model_name='userprofile',
name='image',
),
migrations.RemoveField(
model_name='userprofile',
name='role',
),
migrations.DeleteModel(
name='Permission',
),
migrations.DeleteModel(
name='Role',
),
]
|
[
"django.db.migrations.RemoveField",
"django.db.migrations.DeleteModel"
] |
[((297, 357), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""permission"""', 'name': '"""role"""'}), "(model_name='permission', name='role')\n", (319, 357), False, 'from django.db import migrations\n'), ((402, 464), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""userprofile"""', 'name': '"""image"""'}), "(model_name='userprofile', name='image')\n", (424, 464), False, 'from django.db import migrations\n'), ((509, 570), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""userprofile"""', 'name': '"""role"""'}), "(model_name='userprofile', name='role')\n", (531, 570), False, 'from django.db import migrations\n'), ((615, 656), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Permission"""'}), "(name='Permission')\n", (637, 656), False, 'from django.db import migrations\n'), ((689, 724), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Role"""'}), "(name='Role')\n", (711, 724), False, 'from django.db import migrations\n')]
|
#ini adalah file pertama yang akan dibaca
from flask import Flask
from config import Config
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
app = Flask(__name__)
app.config.from_object(Config)
db = SQLAlchemy(app)
migrate = Migrate(app, db)
from .models.users import User
|
[
"flask_sqlalchemy.SQLAlchemy",
"flask.Flask",
"flask_migrate.Migrate"
] |
[((174, 189), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (179, 189), False, 'from flask import Flask\n'), ((227, 242), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (237, 242), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((253, 269), 'flask_migrate.Migrate', 'Migrate', (['app', 'db'], {}), '(app, db)\n', (260, 269), False, 'from flask_migrate import Migrate\n')]
|
# @createTime : 2019/10/30 9:13
# @author : Mou
# @fileName: plan-schedule.py
# 计划排产部分前端接口
import json
from flask import current_app
from flask import request
from mesService import config_dict
from mesService.lib.pgwrap.db import connection
class PlanSchedule(object):
def getsortlist(self):
reqparam = request.data
try:
reqparam = json.loads(reqparam)
count = reqparam['count']
wipordertype = reqparam['wipordertype']
base_sql = "select get_wipsortlist(%d,%d);"%(count,wipordertype)
result = current_app.db.query_one(base_sql)
except:
result = {
"status":"server error",
"message":"search error"
}
res = json.dumps(result)
return res
if result:
return result[0]
else:
result = {
"status":"error",
"message":"search error"
}
res = json.dumps(result)
return res
|
[
"flask.current_app.db.query_one",
"json.loads",
"json.dumps"
] |
[((372, 392), 'json.loads', 'json.loads', (['reqparam'], {}), '(reqparam)\n', (382, 392), False, 'import json\n'), ((581, 615), 'flask.current_app.db.query_one', 'current_app.db.query_one', (['base_sql'], {}), '(base_sql)\n', (605, 615), False, 'from flask import current_app\n'), ((1004, 1022), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (1014, 1022), False, 'import json\n'), ((769, 787), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (779, 787), False, 'import json\n')]
|
import pandas as pd
import numpy as np
from statsmodels.formula.api import ols
import plotly_express
import plotly.graph_objs as go
from plotly.subplots import make_subplots
# Read in data
batter_data = pd.read_csv("~/Desktop/MLB_FA/Data/fg_bat_data.csv")
del batter_data['Age']
print(len(batter_data))
print(batter_data.head())
pitcher_data = pd.read_csv("~/Desktop/MLB_FA/Data/fg_pitch_data.csv")
del pitcher_data['Age']
print(len(pitcher_data))
print(pitcher_data.head())
salary_data = pd.read_csv("~/Desktop/MLB_FA/Data/salary_data.csv")
print(len(salary_data))
injury_data = pd.read_csv("~/Desktop/MLB_FA/Data/injury_data_use.csv")
# Check for whether there is overlap between injury data and the salary data players
# injury_data_players = injury_data['Player'].unique()
# mutual = salary_data[salary_data['Player'].isin(injury_data_players)] # 945 out of 1135 players included
# excl = salary_data[~salary_data['Player'].isin(injury_data_players)]
# print(len(excl['Player'].unique())) # 129 unique players injury data omitted; use mlb.com trans for these
# Define inflation
def npv(df, rate):
r = rate
df['Salary'] = pd.to_numeric(df['Salary'])
df['AAV'] = salary_data['Salary'] / df['Years']
df['NPV'] = 0
df['NPV'] = round(df['AAV'] * (1 - (1 / ((1 + r) ** df['Years']))) / r, 2)
return df
salary_data = npv(salary_data, 0.05)
# Lagged metrics to see if there is carryover value / value in continuity
class Metrics:
def lagged_batter(df):
df['WAR'] = pd.to_numeric(df['WAR'])
df['y_n1_war'] = df.groupby("Name")['WAR'].shift(1)
df['y_n2_war'] = df.groupby("Name")['y_n1_war'].shift(1)
df['y_n3_war'] = df.groupby("Name")['y_n2_war'].shift(1)
df['y_n4_war'] = df.groupby("Name")['y_n3_war'].shift(1)
df['y_n5_war'] = df.groupby("Name")['y_n4_war'].shift(1)
df['y_n6_war'] = df.groupby("Name")['y_n5_war'].shift(1)
df['wOBA'] = pd.to_numeric(df['wOBA'])
df['y_n1_wOBA'] = df.groupby("Name")['wOBA'].shift(1)
df['y_n2_wOBA'] = df.groupby("Name")['y_n1_wOBA'].shift(1)
df['y_n3_wOBA'] = df.groupby("Name")['y_n2_wOBA'].shift(1)
df['y_n4_wOBA'] = df.groupby("Name")['y_n3_wOBA'].shift(1)
df['wRC+'] = pd.to_numeric(df['wRC+'])
df['y_n1_wRC+'] = df.groupby("Name")['wRC+'].shift(1)
df['y_n2_wRC+'] = df.groupby("Name")['y_n1_wRC+'].shift(1)
df['y_n1_war_pa'] = df.groupby("Name")['WAR_PA'].shift(1)
df['y_n2_war_pa'] = df.groupby("Name")['y_n1_war_pa'].shift(1)
df['y_n3_war_pa'] = df.groupby("Name")['y_n2_war_pa'].shift(1)
df['y_n4_war_pa'] = df.groupby("Name")['y_n3_war_pa'].shift(1)
df['y_n5_war_pa'] = df.groupby("Name")['y_n4_war_pa'].shift(1)
df['y_n6_war_pa'] = df.groupby("Name")['y_n5_war_pa'].shift(1)
df["BB%"] = df["BB%"].apply(lambda x: x.replace("%", ""))
df['BB%'] = pd.to_numeric(df['BB%'])
df["K%"] = df["K%"].apply(lambda x: x.replace("%", ""))
df['K%'] = pd.to_numeric(df['K%'])
df.rename(columns={'BB%': 'BBpct', 'K%': 'Kpct'}, inplace=True)
return df
def lagged_pitcher(df):
df['WAR'] = pd.to_numeric(df['WAR'])
df['y_n1_war'] = df.groupby("Name")['WAR'].shift(1)
df['y_n2_war'] = df.groupby("Name")['y_n1_war'].shift(1)
df['y_n3_war'] = df.groupby("Name")['y_n2_war'].shift(1)
df['y_n4_war'] = df.groupby("Name")['y_n3_war'].shift(1)
df['y_n5_war'] = df.groupby("Name")['y_n4_war'].shift(1)
df['y_n6_war'] = df.groupby("Name")['y_n5_war'].shift(1)
# df['ERA-'] = pd.to_numeric(df['ERA-'])
# df['y_n1_ERA-'] = df.groupby("Name")['ERA-'].shift(1)
# df['y_n2_ERA-'] = df.groupby("Name")['y_n1_ERA-'].shift(1)
df['xFIP'] = pd.to_numeric(df['xFIP'])
df['y_n1_xFIP'] = df.groupby("Name")['xFIP'].shift(1)
df['y_n2_xFIP'] = df.groupby("Name")['y_n1_xFIP'].shift(1)
df['y_n1_war_tbf'] = df.groupby("Name")['WAR_TBF'].shift(1)
df['y_n2_war_tbf'] = df.groupby("Name")['y_n1_war_tbf'].shift(1)
df['y_n3_war_tbf'] = df.groupby("Name")['y_n2_war_tbf'].shift(1)
df['y_n4_war_tbf'] = df.groupby("Name")['y_n3_war_tbf'].shift(1)
df['y_n5_war_tbf'] = df.groupby("Name")['y_n4_war_tbf'].shift(1)
df['y_n6_war_tbf'] = df.groupby("Name")['y_n5_war_tbf'].shift(1)
df['BB%'] = df['BB%'].astype(str)
df["BB%"] = df["BB%"].apply(lambda x: x.replace("%", ""))
df['BB%'] = pd.to_numeric(df['BB%'])
df['K%'] = df['K%'].astype(str)
df["K%"] = df["K%"].apply(lambda x: x.replace("%", ""))
df['K%'] = pd.to_numeric(df['K%'])
df['K-BB%'] = df['K-BB%'].astype(str)
df["K-BB%"] = df["K-BB%"].apply(lambda x: x.replace("%", ""))
df['K-BB%'] = pd.to_numeric(df['K-BB%'])
df['SwStr%'] = df['SwStr%'].astype(str)
df["SwStr%"] = df["SwStr%"].apply(lambda x: x.replace("%", ""))
df['SwStr%'] = pd.to_numeric(df['SwStr%'])
df['LOB%'] = df['LOB%'].astype(str)
df["LOB%"] = df["LOB%"].apply(lambda x: x.replace("%", ""))
df['LOB%'] = pd.to_numeric(df['LOB%'])
# df['CB%'] = df['CB%'].astype(str)
# df["CB%"] = df["CB%"].apply(lambda x: x.replace("%", ""))
# df['CB%'] = pd.to_numeric(df['CB%'])
df.rename(columns={'BB%': 'BBpct', 'K%': 'Kpct', 'K-BB%': 'K_minus_BBpct', 'CB%': 'CBpct',
'SwStr%': 'Swstrpct'}, inplace=True)
return df
def fix_position(df):
df['Position'] = np.where(df['Position'] == "OF", "CF", df['Position'])
df['Position'] = np.where((df['Position'] == "LF") | (df['Position'] == "RF"),
"Corner Outfield", df['Position'])
df['Position'] = np.where(df['Position'] == "P", "RP", df['Position'])
# df['Position'] = np.where(df['Position'] == "SP", 1, df['Position'])
# df['Position'] = np.where(df['Position'] == "C", 2, df['Position'])
# df['Position'] = np.where(df['Position'] == "1B", 3, df['Position'])
# df['Position'] = np.where(df['Position'] == "2B", 4, df['Position'])
# df['Position'] = np.where(df['Position'] == "3B", 5, df['Position'])
# df['Position'] = np.where(df['Position'] == "SS", 6, df['Position'])
# df['Position'] = np.where(df['Position'] == "Corner Outfield", 7, df['Position'])
# df['Position'] = np.where(df['Position'] == "CF", 8, df['Position'])
# df['Position'] = np.where(df['Position'] == "RP", 9, df['Position'])
# df['Position'] = np.where(df['Position'] == "DH", 10, df['Position'])
return df
def rate_stats_batter(df):
df['WAR_PA'] = df['WAR'] / df['PA'] # add in rate based WAR (per PA, game played, etc)
df['oWAR_PA'] = df['oWAR'] / df['PA']
df['WAR_PA'] = round(df['WAR_PA'], 3)
df['oWAR_PA'] = round(df['oWAR_PA'], 3)
return df
def rate_stats_pitcher(df):
df['WAR_TBF'] = df['WAR'] / df['TBF'] # add in rate based WAR (per IP, etc)
# df['WAR_IP'] = df['WAR'] / df['IP']
df['wFB_TBF'] = df['wFB'] / df['TBF']
df['WAR_TBF'] = round(df['WAR_TBF'], 3)
# df['WAR_IP'] = round(df['WAR_IP'], 3)
df['wFB_TBF'] = round(df['wFB_TBF'], 3)
return df
def injury_engineering(df):
df['two_year_inj_avg'] = 0
df.loc[:, "two_year_inj_avg"] = (
df.groupby("Player")["injury_duration"].shift(1) / df.groupby("Player")["injury_duration"].shift(
2) - 1)
df['Injury'] = df['Injury'].fillna("None")
df['injury_duration'] = df['injury_duration'].fillna(0)
return df
def short_season_fix_batter(df):
df['WAR_162'] = np.where(df['Year'] == 2021, df['WAR']*2.3, df['WAR'])
df['PA_162'] = np.where(df['Year'] == 2021, df['PA']*2.3, df['PA'])
df['oWAR_162'] = np.where(df['Year'] == 2021, df['oWAR'] * 2.3, df['oWAR'])
df['dWAR_162'] = np.where(df['Year'] == 2021, df['dWAR'] * 2.3, df['dWAR'])
return df
def short_season_fix_pitcher(df):
df['WAR_162'] = np.where(df['Year'] == 2021, df['WAR']*2.3, df['WAR'])
df['IP_162'] = np.where(df['Year'] == 2021, df['IP']*2.3, df['IP'])
return df
class NonLinearVars():
def fg_batter_vars(df):
df['WAR_sq'] = np.where(df['WAR'] > 0, df['WAR'] ** 2, df['WAR'] * 2)
df['y_n1_war_sq'] = np.where(df['y_n1_war'] > 0, df['y_n1_war'] ** 2, df['y_n1_war'] * 2)
df['y_n2_war_sq'] = np.where(df['y_n2_war'] > 0, df['y_n2_war'] ** 2, df['y_n2_war'] * 2)
df['y_n3_war_sq'] = np.where(df['y_n3_war'] > 0, df['y_n3_war'] ** 2, df['y_n3_war'] * 2)
df['y_n4_war_sq'] = np.where(df['y_n4_war'] > 0, df['y_n4_war'] ** 2, df['y_n4_war'] * 2)
df['y_n5_war_sq'] = np.where(df['y_n5_war'] > 0, df['y_n5_war'] ** 2, df['y_n5_war'] * 2)
df['y_n6_war_sq'] = np.where(df['y_n6_war'] > 0, df['y_n6_war'] ** 2, df['y_n6_war'] * 2)
df['y_n1_wOBA_sq'] = df['y_n1_wOBA'] ** 2
df['y_n2_wOBA_sq'] = df['y_n2_wOBA'] ** 2
df['y_n1_wRC+_sq'] = df['y_n1_wRC+'] ** 2
df['y_n2_wRC+_sq'] = df['y_n2_wRC+'] ** 2
return df
def fg_pitcher_vars(df):
df['WAR_sq'] = df['WAR'] **2
df['y_n1_war_sq'] = np.where(df['y_n1_war'] > 0, df['y_n1_war'] ** 2, df['y_n1_war'] * 2)
df['y_n2_war_sq'] = np.where(df['y_n2_war'] > 0, df['y_n2_war'] ** 2, df['y_n2_war'] * 2)
df['y_n3_war_sq'] = np.where(df['y_n3_war'] > 0, df['y_n3_war'] ** 2, df['y_n3_war'] * 2)
df['y_n4_war_sq'] = np.where(df['y_n4_war'] > 0, df['y_n4_war'] ** 2, df['y_n4_war'] * 2)
df['y_n5_war_sq'] = np.where(df['y_n5_war'] > 0, df['y_n5_war'] ** 2, df['y_n5_war'] * 2)
df['y_n6_war_sq'] = np.where(df['y_n6_war'] > 0, df['y_n6_war'] ** 2, df['y_n6_war'] * 2)
# df['ERA-_sq'] = df['ERA-'] **2
# df['y_n1_ERA-_sq'] = df['y_n1_ERA-'] **2
# df['y_n2_ERA-_sq'] = df['y_n2_ERA-'] **2
df['xFIP_sq'] = df['xFIP'] **2
df['y_n1_xFIP_sq'] = df['y_n1_xFIP'] **2
df['y_n2_xFIP_sq'] = df['y_n2_xFIP'] **2
return df
def salary_vars(df):
# df['Age'] = df['Age'].astype('int')
df['Age_sq'] = df['Age'] ** 2
df['Age_log'] = np.log(df['Age'])
return df
# Attach the injury data to the players, merge on player and year
def merge_injuries(salary_df, injury_df):
merged_df = pd.merge(salary_df, injury_df, how='left', left_on=['Player', 'Season'], right_on=['Player', 'Year'])
del merged_df['Year']
return merged_df
# MA
print(len(salary_data))
salary_data = merge_injuries(salary_data, injury_data)
print(len(salary_data))
salary_data['injury_duration'] = salary_data['injury_duration'].fillna(0)
salary_data = Metrics.injury_engineering(salary_data)
# Lag
batter_data = Metrics.short_season_fix_batter(batter_data)
batter_data = Metrics.rate_stats_batter(batter_data)
batter_data = Metrics.lagged_batter(batter_data)
pitcher_data = Metrics.short_season_fix_pitcher(pitcher_data)
pitcher_data = Metrics.rate_stats_pitcher(pitcher_data)
pitcher_data = Metrics.lagged_pitcher(pitcher_data)
# Position fix
salary_data = Metrics.fix_position(salary_data)
# Non Linears
batter_data = NonLinearVars.fg_batter_vars(batter_data)
pitcher_data = NonLinearVars.fg_pitcher_vars(pitcher_data)
salary_data = NonLinearVars.salary_vars(salary_data)
# Merge data sets (one pitcher, one batter)
batter_merged = pd.merge(batter_data, salary_data, left_on=['Name', 'Year'], right_on=['Player', 'Season'])
batter_merged = batter_merged[(batter_merged['Position'] != "SP") & (batter_merged['Position'] != "RP")] # remove P's
print(len(batter_merged))
pitcher_merged = pd.merge(pitcher_data, salary_data, left_on=['Name', 'Year'], right_on=['Player', 'Season'])
pitcher_merged = pitcher_merged[(pitcher_merged['Position'] == "SP") | (pitcher_merged['Position'] == "RP")] # keep P's
print(len(pitcher_merged))
# Begin modeling
# train_data_batter = batter_merged[(batter_merged['Year'] != max(batter_merged['Year']))]
# train_data_pitcher = pitcher_merged[(pitcher_merged['Year'] != max(pitcher_merged['Year']))]
train_data_batter = batter_merged.loc[~batter_merged['NPV'].isnull()]
train_data_pitcher = pitcher_merged.loc[~pitcher_merged['NPV'].isnull()]
test_data_batter = batter_merged[
# (batter_merged['Year'] == max(batter_merged['Year']))
# &
(np.isnan(batter_merged['NPV']))]
test_data_pitcher = pitcher_merged[
# (pitcher_merged['Year'] == max(pitcher_merged['Year']))
# &
(np.isnan(pitcher_merged['NPV']))]
train_data_batter.to_csv('~/Desktop/MLB_FA/Data/train_data_batter.csv', index=False)
train_data_pitcher.to_csv('~/Desktop/MLB_FA/Data/train_data_pitcher.csv', index=False)
test_data_batter.to_csv('~/Desktop/MLB_FA/Data/test_data_batter.csv', index=False)
test_data_pitcher.to_csv('~/Desktop/MLB_FA/Data/test_data_pitcher.csv', index=False)
fit = ols('NPV ~ C(Position) + WAR_sq + WAR + Age', data=train_data_batter).fit()
fit.summary() # 0.597 r-sq, 0.587 adj r-sq
# Plot NPV / WAR to see nonlinear relationship
plot_data = train_data_batter[(train_data_batter['Year'] > 2010)]
fig = plotly_express.scatter(plot_data, x="dWAR", y="NPV", color='Position',
hover_data=['Player', 'Position', 'Year', 'Prev Team'],
title="dWAR, NPV Colored By Position (since {})".format(min(plot_data['Year'])))
fig.show()
# Plot WAR / Rate WAR
plot_data = batter_data[(batter_data['Year'] == 2021) & (batter_data['PA'] > 100)]
fig = plotly_express.scatter(plot_data, x="PA", y="dWAR", color='Name')
fig.update_layout(
hoverlabel=dict(
bgcolor="white",
font_size=10,
font_family="Arial"
)
)
fig.show()
# remove linear WAR
# Let's add a season factor and qualifying offer
fit = ols('NPV ~ C(Position) + C(Season) + WAR_sq + Age + Qual + WAR_PA', data=train_data_batter).fit()
fit.summary()
# Getting better, but there's more unexplained variance. Let's try log of Age and prior season's WAR
# Log Age
fit = ols('NPV ~ C(Position) + C(Season) + y_n1_war_sq + WAR_sq + Age_log + Qual + WAR_PA + y_n1_war_pa',
data=train_data_batter).fit()
fit.summary()
# Still marginally improving. Up to around 50% of the variance explained.
# WAR is a counting stat, let's add in base-running UBR, non-log Age
# UBR
fit = ols('NPV ~ C(Position) + y_n1_war_sq + WAR_sq + Age + UBR + Qual', data=train_data_batter).fit()
fit.summary()
# Try some new variables (e.g. OPS, ISO, wRC+, wOBA, y_n2_war_sq, etc)
fit = ols('NPV ~ C(Position) + y_n2_war_sq + y_n1_war_sq + WAR_sq + Age + UBR + Qual + wOBA + ISO',
data=train_data_batter).fit()
fit.summary()
# Now let's consider only deals signed for multiple-years
train_data_batter_multiyear = train_data_batter[(train_data_batter['Years'] > 1)]
fit = ols('NPV ~ C(Position) + y_n1_war_sq + WAR_sq + Age + UBR + Qual', data=train_data_batter_multiyear).fit()
fit.summary()
# Single year only
train_data_batter_single = train_data_batter[(train_data_batter['Years'] == 1)]
fit = ols('NPV ~ C(Position) + y_n1_war_sq + WAR_sq + Age + Qual', data=train_data_batter_single).fit()
fit.summary()
# So what are team's using to assess these single year contracts?
fit = ols('NPV ~ ISO + WAR_sq + y_n1_war_sq + y_n2_war_sq + wGDP + BABIP + Qual', data=train_data_batter_single).fit()
fit.summary()
# Now add injury duration
fit = ols('NPV ~ ISO + WAR_sq + y_n1_war_sq + y_n2_war_sq + injury_duration + Qual', data=train_data_batter).fit()
fit.summary()
# Kitchen sink
fit_rate = ols('NPV ~ BBpct + Kpct + AVG + OBP + SLG + OPS + ISO + Spd + BABIP + UBR + wGDP + wSB + wRC + '
'wRAA + wOBA + WAR + dWAR + oWAR + Year + WAR_PA + oWAR_PA + y_n1_war + y_n2_war + y_n3_war + '
'y_n4_war + y_n5_war + y_n6_war + y_n1_wOBA + y_n2_wOBA + y_n3_wOBA + y_n4_wOBA + '
'y_n1_war_pa + y_n2_war_pa + y_n3_war_pa + y_n4_war_pa + y_n5_war_pa + y_n6_war_pa +'
'WAR_sq + y_n1_war_sq + y_n2_war_sq + y_n3_war_sq + y_n4_war_sq + y_n5_war_sq + y_n6_war_sq + '
'y_n1_wOBA_sq + y_n2_wOBA_sq + Position + Age + Qual + injury_duration', data=train_data_batter).fit()
fit_rate.summary()
# Remove unwanted vars
fit_rate = ols('NPV ~ Kpct + Year + y_n1_war +'
'y_n1_wOBA + y_n2_war_pa + WAR_sq + y_n1_war_sq +'
'Age + Qual', data=train_data_batter).fit()
fit_rate.summary()
# PITCHERS
train_data_pitcher['pos_dummy'] = np.where(train_data_pitcher['Position'] == "SP", 1, 0)
fit = ols('NPV ~ WAR_sq + Age + Qual + pos_dummy + FBv + Kpct + y_n1_war_sq', data=train_data_pitcher).fit()
fit.summary()
# Predict WAR
fit = ols('WAR ~ FBv + Kpct + BBpct + FIP + IP + wFB + pos_dummy', data=train_data_pitcher).fit()
fit.summary()
# Let's add in injury duration
train_data_pitcher['injury_duration_log'] = np.log(train_data_pitcher['injury_duration'])
fit = ols('NPV ~ WAR_sq + Age + Qual + injury_duration + pos_dummy', data=train_data_pitcher).fit()
fit.summary()
# Add FBv
fit = ols('NPV ~ WAR_sq + Age + Qual + injury_duration + FBv + pos_dummy', data=train_data_pitcher).fit()
fit.summary()
# Kpct
fit = ols('NPV ~ WAR_sq + Age + Qual + injury_duration + FBv + Kpct + pos_dummy + BBpct', data=train_data_pitcher).fit()
fit.summary()
# CBv
fit = ols('NPV ~ Age + Qual + injury_duration + FBv + Kpct + CBv + pos_dummy', data=train_data_pitcher).fit()
fit.summary()
# Rate stats
fit_rate = ols(
'NPV ~ Age + WAR_TBF + y_n1_war_tbf + y_n2_war_tbf + FBv + xFIP_sq + pos_dummy + injury_duration + Qual',
data=train_data_pitcher).fit()
fit_rate.summary()
multi_year_pitcher = train_data_pitcher[(train_data_pitcher['Years'] > 1)]
fit_rate_multi = ols(
'NPV ~ Age + WAR_TBF + y_n1_war_tbf + y_n2_war_tbf + FBv + xFIP_sq + pos_dummy + injury_duration',
data=multi_year_pitcher).fit()
fit_rate_multi.summary()
# Change position and Season to random effect
batter_grp = batter_merged.groupby(['Season']).agg({
'NPV': sum,
'WAR': sum,
'Name': 'nunique'
}).reset_index()
batter_grp['NPV'] = batter_grp['NPV'] / 1000000
fig = plotly_express.bar(batter_grp, x="Season", y="NPV",
color_continuous_scale=plotly_express.colors.qualitative.D3,
title="Yearly total NPV and total WAR")
fig.add_trace(go.Scatter(x=batter_grp['Season'], y=batter_grp['WAR'], line=dict(color='red'), name='WAR'),
row=1, col=1)
fig.show()
# Create figure with secondary y-axis
fig = make_subplots(specs=[[{"secondary_y": True}]])
# Add traces
fig.add_trace(
go.Bar(x=batter_grp['Season'], y=batter_grp['NPV'], name="NPV total"),
secondary_y=False,
)
fig.add_trace(
go.Scatter(x=batter_grp['Season'], y=batter_grp['WAR'], name="WAR total"),
secondary_y=True,
)
# Add figure title
fig.update_layout(
title_text="Yearly total NPV and total WAR"
)
# Set x-axis title
fig.update_xaxes(title_text="Off-Season Year")
# Set y-axes titles
fig.update_yaxes(title_text="<b>NPV</b> total ($ Millions)", secondary_y=False)
fig.update_yaxes(title_text="<b>WAR</b> total", secondary_y=True)
fig.show()
|
[
"numpy.log",
"pandas.read_csv",
"plotly.graph_objs.Scatter",
"pandas.merge",
"numpy.isnan",
"numpy.where",
"statsmodels.formula.api.ols",
"plotly_express.scatter",
"plotly.subplots.make_subplots",
"plotly_express.bar",
"pandas.to_numeric",
"plotly.graph_objs.Bar"
] |
[((204, 256), 'pandas.read_csv', 'pd.read_csv', (['"""~/Desktop/MLB_FA/Data/fg_bat_data.csv"""'], {}), "('~/Desktop/MLB_FA/Data/fg_bat_data.csv')\n", (215, 256), True, 'import pandas as pd\n'), ((346, 400), 'pandas.read_csv', 'pd.read_csv', (['"""~/Desktop/MLB_FA/Data/fg_pitch_data.csv"""'], {}), "('~/Desktop/MLB_FA/Data/fg_pitch_data.csv')\n", (357, 400), True, 'import pandas as pd\n'), ((492, 544), 'pandas.read_csv', 'pd.read_csv', (['"""~/Desktop/MLB_FA/Data/salary_data.csv"""'], {}), "('~/Desktop/MLB_FA/Data/salary_data.csv')\n", (503, 544), True, 'import pandas as pd\n'), ((584, 640), 'pandas.read_csv', 'pd.read_csv', (['"""~/Desktop/MLB_FA/Data/injury_data_use.csv"""'], {}), "('~/Desktop/MLB_FA/Data/injury_data_use.csv')\n", (595, 640), True, 'import pandas as pd\n'), ((11549, 11645), 'pandas.merge', 'pd.merge', (['batter_data', 'salary_data'], {'left_on': "['Name', 'Year']", 'right_on': "['Player', 'Season']"}), "(batter_data, salary_data, left_on=['Name', 'Year'], right_on=[\n 'Player', 'Season'])\n", (11557, 11645), True, 'import pandas as pd\n'), ((11804, 11901), 'pandas.merge', 'pd.merge', (['pitcher_data', 'salary_data'], {'left_on': "['Name', 'Year']", 'right_on': "['Player', 'Season']"}), "(pitcher_data, salary_data, left_on=['Name', 'Year'], right_on=[\n 'Player', 'Season'])\n", (11812, 11901), True, 'import pandas as pd\n'), ((13655, 13720), 'plotly_express.scatter', 'plotly_express.scatter', (['plot_data'], {'x': '"""PA"""', 'y': '"""dWAR"""', 'color': '"""Name"""'}), "(plot_data, x='PA', y='dWAR', color='Name')\n", (13677, 13720), False, 'import plotly_express\n'), ((16600, 16654), 'numpy.where', 'np.where', (["(train_data_pitcher['Position'] == 'SP')", '(1)', '(0)'], {}), "(train_data_pitcher['Position'] == 'SP', 1, 0)\n", (16608, 16654), True, 'import numpy as np\n'), ((16981, 17026), 'numpy.log', 'np.log', (["train_data_pitcher['injury_duration']"], {}), "(train_data_pitcher['injury_duration'])\n", (16987, 17026), True, 'import numpy as np\n'), ((18229, 18391), 'plotly_express.bar', 'plotly_express.bar', (['batter_grp'], {'x': '"""Season"""', 'y': '"""NPV"""', 'color_continuous_scale': 'plotly_express.colors.qualitative.D3', 'title': '"""Yearly total NPV and total WAR"""'}), "(batter_grp, x='Season', y='NPV', color_continuous_scale=\n plotly_express.colors.qualitative.D3, title=\n 'Yearly total NPV and total WAR')\n", (18247, 18391), False, 'import plotly_express\n'), ((18624, 18670), 'plotly.subplots.make_subplots', 'make_subplots', ([], {'specs': "[[{'secondary_y': True}]]"}), "(specs=[[{'secondary_y': True}]])\n", (18637, 18670), False, 'from plotly.subplots import make_subplots\n'), ((1143, 1170), 'pandas.to_numeric', 'pd.to_numeric', (["df['Salary']"], {}), "(df['Salary'])\n", (1156, 1170), True, 'import pandas as pd\n'), ((10515, 10620), 'pandas.merge', 'pd.merge', (['salary_df', 'injury_df'], {'how': '"""left"""', 'left_on': "['Player', 'Season']", 'right_on': "['Player', 'Year']"}), "(salary_df, injury_df, how='left', left_on=['Player', 'Season'],\n right_on=['Player', 'Year'])\n", (10523, 10620), True, 'import pandas as pd\n'), ((12500, 12530), 'numpy.isnan', 'np.isnan', (["batter_merged['NPV']"], {}), "(batter_merged['NPV'])\n", (12508, 12530), True, 'import numpy as np\n'), ((12644, 12675), 'numpy.isnan', 'np.isnan', (["pitcher_merged['NPV']"], {}), "(pitcher_merged['NPV'])\n", (12652, 12675), True, 'import numpy as np\n'), ((18704, 18773), 'plotly.graph_objs.Bar', 'go.Bar', ([], {'x': "batter_grp['Season']", 'y': "batter_grp['NPV']", 'name': '"""NPV total"""'}), "(x=batter_grp['Season'], y=batter_grp['NPV'], name='NPV total')\n", (18710, 18773), True, 'import plotly.graph_objs as go\n'), ((18820, 18893), 'plotly.graph_objs.Scatter', 'go.Scatter', ([], {'x': "batter_grp['Season']", 'y': "batter_grp['WAR']", 'name': '"""WAR total"""'}), "(x=batter_grp['Season'], y=batter_grp['WAR'], name='WAR total')\n", (18830, 18893), True, 'import plotly.graph_objs as go\n'), ((1511, 1535), 'pandas.to_numeric', 'pd.to_numeric', (["df['WAR']"], {}), "(df['WAR'])\n", (1524, 1535), True, 'import pandas as pd\n'), ((1943, 1968), 'pandas.to_numeric', 'pd.to_numeric', (["df['wOBA']"], {}), "(df['wOBA'])\n", (1956, 1968), True, 'import pandas as pd\n'), ((2254, 2279), 'pandas.to_numeric', 'pd.to_numeric', (["df['wRC+']"], {}), "(df['wRC+'])\n", (2267, 2279), True, 'import pandas as pd\n'), ((2918, 2942), 'pandas.to_numeric', 'pd.to_numeric', (["df['BB%']"], {}), "(df['BB%'])\n", (2931, 2942), True, 'import pandas as pd\n'), ((3026, 3049), 'pandas.to_numeric', 'pd.to_numeric', (["df['K%']"], {}), "(df['K%'])\n", (3039, 3049), True, 'import pandas as pd\n'), ((3190, 3214), 'pandas.to_numeric', 'pd.to_numeric', (["df['WAR']"], {}), "(df['WAR'])\n", (3203, 3214), True, 'import pandas as pd\n'), ((3805, 3830), 'pandas.to_numeric', 'pd.to_numeric', (["df['xFIP']"], {}), "(df['xFIP'])\n", (3818, 3830), True, 'import pandas as pd\n'), ((4523, 4547), 'pandas.to_numeric', 'pd.to_numeric', (["df['BB%']"], {}), "(df['BB%'])\n", (4536, 4547), True, 'import pandas as pd\n'), ((4672, 4695), 'pandas.to_numeric', 'pd.to_numeric', (["df['K%']"], {}), "(df['K%'])\n", (4685, 4695), True, 'import pandas as pd\n'), ((4835, 4861), 'pandas.to_numeric', 'pd.to_numeric', (["df['K-BB%']"], {}), "(df['K-BB%'])\n", (4848, 4861), True, 'import pandas as pd\n'), ((5006, 5033), 'pandas.to_numeric', 'pd.to_numeric', (["df['SwStr%']"], {}), "(df['SwStr%'])\n", (5019, 5033), True, 'import pandas as pd\n'), ((5168, 5193), 'pandas.to_numeric', 'pd.to_numeric', (["df['LOB%']"], {}), "(df['LOB%'])\n", (5181, 5193), True, 'import pandas as pd\n'), ((5589, 5643), 'numpy.where', 'np.where', (["(df['Position'] == 'OF')", '"""CF"""', "df['Position']"], {}), "(df['Position'] == 'OF', 'CF', df['Position'])\n", (5597, 5643), True, 'import numpy as np\n'), ((5669, 5769), 'numpy.where', 'np.where', (["((df['Position'] == 'LF') | (df['Position'] == 'RF'))", '"""Corner Outfield"""', "df['Position']"], {}), "((df['Position'] == 'LF') | (df['Position'] == 'RF'),\n 'Corner Outfield', df['Position'])\n", (5677, 5769), True, 'import numpy as np\n'), ((5825, 5878), 'numpy.where', 'np.where', (["(df['Position'] == 'P')", '"""RP"""', "df['Position']"], {}), "(df['Position'] == 'P', 'RP', df['Position'])\n", (5833, 5878), True, 'import numpy as np\n'), ((7799, 7855), 'numpy.where', 'np.where', (["(df['Year'] == 2021)", "(df['WAR'] * 2.3)", "df['WAR']"], {}), "(df['Year'] == 2021, df['WAR'] * 2.3, df['WAR'])\n", (7807, 7855), True, 'import numpy as np\n'), ((7877, 7931), 'numpy.where', 'np.where', (["(df['Year'] == 2021)", "(df['PA'] * 2.3)", "df['PA']"], {}), "(df['Year'] == 2021, df['PA'] * 2.3, df['PA'])\n", (7885, 7931), True, 'import numpy as np\n'), ((7955, 8013), 'numpy.where', 'np.where', (["(df['Year'] == 2021)", "(df['oWAR'] * 2.3)", "df['oWAR']"], {}), "(df['Year'] == 2021, df['oWAR'] * 2.3, df['oWAR'])\n", (7963, 8013), True, 'import numpy as np\n'), ((8039, 8097), 'numpy.where', 'np.where', (["(df['Year'] == 2021)", "(df['dWAR'] * 2.3)", "df['dWAR']"], {}), "(df['Year'] == 2021, df['dWAR'] * 2.3, df['dWAR'])\n", (8047, 8097), True, 'import numpy as np\n'), ((8179, 8235), 'numpy.where', 'np.where', (["(df['Year'] == 2021)", "(df['WAR'] * 2.3)", "df['WAR']"], {}), "(df['Year'] == 2021, df['WAR'] * 2.3, df['WAR'])\n", (8187, 8235), True, 'import numpy as np\n'), ((8257, 8311), 'numpy.where', 'np.where', (["(df['Year'] == 2021)", "(df['IP'] * 2.3)", "df['IP']"], {}), "(df['Year'] == 2021, df['IP'] * 2.3, df['IP'])\n", (8265, 8311), True, 'import numpy as np\n'), ((8404, 8458), 'numpy.where', 'np.where', (["(df['WAR'] > 0)", "(df['WAR'] ** 2)", "(df['WAR'] * 2)"], {}), "(df['WAR'] > 0, df['WAR'] ** 2, df['WAR'] * 2)\n", (8412, 8458), True, 'import numpy as np\n'), ((8487, 8556), 'numpy.where', 'np.where', (["(df['y_n1_war'] > 0)", "(df['y_n1_war'] ** 2)", "(df['y_n1_war'] * 2)"], {}), "(df['y_n1_war'] > 0, df['y_n1_war'] ** 2, df['y_n1_war'] * 2)\n", (8495, 8556), True, 'import numpy as np\n'), ((8585, 8654), 'numpy.where', 'np.where', (["(df['y_n2_war'] > 0)", "(df['y_n2_war'] ** 2)", "(df['y_n2_war'] * 2)"], {}), "(df['y_n2_war'] > 0, df['y_n2_war'] ** 2, df['y_n2_war'] * 2)\n", (8593, 8654), True, 'import numpy as np\n'), ((8683, 8752), 'numpy.where', 'np.where', (["(df['y_n3_war'] > 0)", "(df['y_n3_war'] ** 2)", "(df['y_n3_war'] * 2)"], {}), "(df['y_n3_war'] > 0, df['y_n3_war'] ** 2, df['y_n3_war'] * 2)\n", (8691, 8752), True, 'import numpy as np\n'), ((8781, 8850), 'numpy.where', 'np.where', (["(df['y_n4_war'] > 0)", "(df['y_n4_war'] ** 2)", "(df['y_n4_war'] * 2)"], {}), "(df['y_n4_war'] > 0, df['y_n4_war'] ** 2, df['y_n4_war'] * 2)\n", (8789, 8850), True, 'import numpy as np\n'), ((8879, 8948), 'numpy.where', 'np.where', (["(df['y_n5_war'] > 0)", "(df['y_n5_war'] ** 2)", "(df['y_n5_war'] * 2)"], {}), "(df['y_n5_war'] > 0, df['y_n5_war'] ** 2, df['y_n5_war'] * 2)\n", (8887, 8948), True, 'import numpy as np\n'), ((8977, 9046), 'numpy.where', 'np.where', (["(df['y_n6_war'] > 0)", "(df['y_n6_war'] ** 2)", "(df['y_n6_war'] * 2)"], {}), "(df['y_n6_war'] > 0, df['y_n6_war'] ** 2, df['y_n6_war'] * 2)\n", (8985, 9046), True, 'import numpy as np\n'), ((9360, 9429), 'numpy.where', 'np.where', (["(df['y_n1_war'] > 0)", "(df['y_n1_war'] ** 2)", "(df['y_n1_war'] * 2)"], {}), "(df['y_n1_war'] > 0, df['y_n1_war'] ** 2, df['y_n1_war'] * 2)\n", (9368, 9429), True, 'import numpy as np\n'), ((9458, 9527), 'numpy.where', 'np.where', (["(df['y_n2_war'] > 0)", "(df['y_n2_war'] ** 2)", "(df['y_n2_war'] * 2)"], {}), "(df['y_n2_war'] > 0, df['y_n2_war'] ** 2, df['y_n2_war'] * 2)\n", (9466, 9527), True, 'import numpy as np\n'), ((9556, 9625), 'numpy.where', 'np.where', (["(df['y_n3_war'] > 0)", "(df['y_n3_war'] ** 2)", "(df['y_n3_war'] * 2)"], {}), "(df['y_n3_war'] > 0, df['y_n3_war'] ** 2, df['y_n3_war'] * 2)\n", (9564, 9625), True, 'import numpy as np\n'), ((9654, 9723), 'numpy.where', 'np.where', (["(df['y_n4_war'] > 0)", "(df['y_n4_war'] ** 2)", "(df['y_n4_war'] * 2)"], {}), "(df['y_n4_war'] > 0, df['y_n4_war'] ** 2, df['y_n4_war'] * 2)\n", (9662, 9723), True, 'import numpy as np\n'), ((9752, 9821), 'numpy.where', 'np.where', (["(df['y_n5_war'] > 0)", "(df['y_n5_war'] ** 2)", "(df['y_n5_war'] * 2)"], {}), "(df['y_n5_war'] > 0, df['y_n5_war'] ** 2, df['y_n5_war'] * 2)\n", (9760, 9821), True, 'import numpy as np\n'), ((9850, 9919), 'numpy.where', 'np.where', (["(df['y_n6_war'] > 0)", "(df['y_n6_war'] ** 2)", "(df['y_n6_war'] * 2)"], {}), "(df['y_n6_war'] > 0, df['y_n6_war'] ** 2, df['y_n6_war'] * 2)\n", (9858, 9919), True, 'import numpy as np\n'), ((10352, 10369), 'numpy.log', 'np.log', (["df['Age']"], {}), "(df['Age'])\n", (10358, 10369), True, 'import numpy as np\n'), ((13026, 13095), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ C(Position) + WAR_sq + WAR + Age"""'], {'data': 'train_data_batter'}), "('NPV ~ C(Position) + WAR_sq + WAR + Age', data=train_data_batter)\n", (13029, 13095), False, 'from statsmodels.formula.api import ols\n'), ((13931, 14027), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ C(Position) + C(Season) + WAR_sq + Age + Qual + WAR_PA"""'], {'data': 'train_data_batter'}), "('NPV ~ C(Position) + C(Season) + WAR_sq + Age + Qual + WAR_PA', data=\n train_data_batter)\n", (13934, 14027), False, 'from statsmodels.formula.api import ols\n'), ((14161, 14290), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ C(Position) + C(Season) + y_n1_war_sq + WAR_sq + Age_log + Qual + WAR_PA + y_n1_war_pa"""'], {'data': 'train_data_batter'}), "('NPV ~ C(Position) + C(Season) + y_n1_war_sq + WAR_sq + Age_log + Qual + WAR_PA + y_n1_war_pa'\n , data=train_data_batter)\n", (14164, 14290), False, 'from statsmodels.formula.api import ols\n'), ((14472, 14568), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ C(Position) + y_n1_war_sq + WAR_sq + Age + UBR + Qual"""'], {'data': 'train_data_batter'}), "('NPV ~ C(Position) + y_n1_war_sq + WAR_sq + Age + UBR + Qual', data=\n train_data_batter)\n", (14475, 14568), False, 'from statsmodels.formula.api import ols\n'), ((14662, 14785), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ C(Position) + y_n2_war_sq + y_n1_war_sq + WAR_sq + Age + UBR + Qual + wOBA + ISO"""'], {'data': 'train_data_batter'}), "('NPV ~ C(Position) + y_n2_war_sq + y_n1_war_sq + WAR_sq + Age + UBR + Qual + wOBA + ISO'\n , data=train_data_batter)\n", (14665, 14785), False, 'from statsmodels.formula.api import ols\n'), ((14958, 15064), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ C(Position) + y_n1_war_sq + WAR_sq + Age + UBR + Qual"""'], {'data': 'train_data_batter_multiyear'}), "('NPV ~ C(Position) + y_n1_war_sq + WAR_sq + Age + UBR + Qual', data=\n train_data_batter_multiyear)\n", (14961, 15064), False, 'from statsmodels.formula.api import ols\n'), ((15186, 15283), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ C(Position) + y_n1_war_sq + WAR_sq + Age + Qual"""'], {'data': 'train_data_batter_single'}), "('NPV ~ C(Position) + y_n1_war_sq + WAR_sq + Age + Qual', data=\n train_data_batter_single)\n", (15189, 15283), False, 'from statsmodels.formula.api import ols\n'), ((15372, 15482), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ ISO + WAR_sq + y_n1_war_sq + y_n2_war_sq + wGDP + BABIP + Qual"""'], {'data': 'train_data_batter_single'}), "('NPV ~ ISO + WAR_sq + y_n1_war_sq + y_n2_war_sq + wGDP + BABIP + Qual',\n data=train_data_batter_single)\n", (15375, 15482), False, 'from statsmodels.formula.api import ols\n'), ((15532, 15638), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ ISO + WAR_sq + y_n1_war_sq + y_n2_war_sq + injury_duration + Qual"""'], {'data': 'train_data_batter'}), "('NPV ~ ISO + WAR_sq + y_n1_war_sq + y_n2_war_sq + injury_duration + Qual',\n data=train_data_batter)\n", (15535, 15638), False, 'from statsmodels.formula.api import ols\n'), ((15682, 16227), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ BBpct + Kpct + AVG + OBP + SLG + OPS + ISO + Spd + BABIP + UBR + wGDP + wSB + wRC + wRAA + wOBA + WAR + dWAR + oWAR + Year + WAR_PA + oWAR_PA + y_n1_war + y_n2_war + y_n3_war + y_n4_war + y_n5_war + y_n6_war + y_n1_wOBA + y_n2_wOBA + y_n3_wOBA + y_n4_wOBA + y_n1_war_pa + y_n2_war_pa + y_n3_war_pa + y_n4_war_pa + y_n5_war_pa + y_n6_war_pa +WAR_sq + y_n1_war_sq + y_n2_war_sq + y_n3_war_sq + y_n4_war_sq + y_n5_war_sq + y_n6_war_sq + y_n1_wOBA_sq + y_n2_wOBA_sq + Position + Age + Qual + injury_duration"""'], {'data': 'train_data_batter'}), "('NPV ~ BBpct + Kpct + AVG + OBP + SLG + OPS + ISO + Spd + BABIP + UBR + wGDP + wSB + wRC + wRAA + wOBA + WAR + dWAR + oWAR + Year + WAR_PA + oWAR_PA + y_n1_war + y_n2_war + y_n3_war + y_n4_war + y_n5_war + y_n6_war + y_n1_wOBA + y_n2_wOBA + y_n3_wOBA + y_n4_wOBA + y_n1_war_pa + y_n2_war_pa + y_n3_war_pa + y_n4_war_pa + y_n5_war_pa + y_n6_war_pa +WAR_sq + y_n1_war_sq + y_n2_war_sq + y_n3_war_sq + y_n4_war_sq + y_n5_war_sq + y_n6_war_sq + y_n1_wOBA_sq + y_n2_wOBA_sq + Position + Age + Qual + injury_duration'\n , data=train_data_batter)\n", (15685, 16227), False, 'from statsmodels.formula.api import ols\n'), ((16373, 16497), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ Kpct + Year + y_n1_war +y_n1_wOBA + y_n2_war_pa + WAR_sq + y_n1_war_sq +Age + Qual"""'], {'data': 'train_data_batter'}), "('NPV ~ Kpct + Year + y_n1_war +y_n1_wOBA + y_n2_war_pa + WAR_sq + y_n1_war_sq +Age + Qual'\n , data=train_data_batter)\n", (16376, 16497), False, 'from statsmodels.formula.api import ols\n'), ((16661, 16761), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ WAR_sq + Age + Qual + pos_dummy + FBv + Kpct + y_n1_war_sq"""'], {'data': 'train_data_pitcher'}), "('NPV ~ WAR_sq + Age + Qual + pos_dummy + FBv + Kpct + y_n1_war_sq',\n data=train_data_pitcher)\n", (16664, 16761), False, 'from statsmodels.formula.api import ols\n'), ((16799, 16889), 'statsmodels.formula.api.ols', 'ols', (['"""WAR ~ FBv + Kpct + BBpct + FIP + IP + wFB + pos_dummy"""'], {'data': 'train_data_pitcher'}), "('WAR ~ FBv + Kpct + BBpct + FIP + IP + wFB + pos_dummy', data=\n train_data_pitcher)\n", (16802, 16889), False, 'from statsmodels.formula.api import ols\n'), ((17033, 17125), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ WAR_sq + Age + Qual + injury_duration + pos_dummy"""'], {'data': 'train_data_pitcher'}), "('NPV ~ WAR_sq + Age + Qual + injury_duration + pos_dummy', data=\n train_data_pitcher)\n", (17036, 17125), False, 'from statsmodels.formula.api import ols\n'), ((17158, 17256), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ WAR_sq + Age + Qual + injury_duration + FBv + pos_dummy"""'], {'data': 'train_data_pitcher'}), "('NPV ~ WAR_sq + Age + Qual + injury_duration + FBv + pos_dummy', data=\n train_data_pitcher)\n", (17161, 17256), False, 'from statsmodels.formula.api import ols\n'), ((17286, 17399), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ WAR_sq + Age + Qual + injury_duration + FBv + Kpct + pos_dummy + BBpct"""'], {'data': 'train_data_pitcher'}), "('NPV ~ WAR_sq + Age + Qual + injury_duration + FBv + Kpct + pos_dummy + BBpct'\n , data=train_data_pitcher)\n", (17289, 17399), False, 'from statsmodels.formula.api import ols\n'), ((17428, 17529), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ Age + Qual + injury_duration + FBv + Kpct + CBv + pos_dummy"""'], {'data': 'train_data_pitcher'}), "('NPV ~ Age + Qual + injury_duration + FBv + Kpct + CBv + pos_dummy',\n data=train_data_pitcher)\n", (17431, 17529), False, 'from statsmodels.formula.api import ols\n'), ((17571, 17710), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ Age + WAR_TBF + y_n1_war_tbf + y_n2_war_tbf + FBv + xFIP_sq + pos_dummy + injury_duration + Qual"""'], {'data': 'train_data_pitcher'}), "('NPV ~ Age + WAR_TBF + y_n1_war_tbf + y_n2_war_tbf + FBv + xFIP_sq + pos_dummy + injury_duration + Qual'\n , data=train_data_pitcher)\n", (17574, 17710), False, 'from statsmodels.formula.api import ols\n'), ((17833, 17965), 'statsmodels.formula.api.ols', 'ols', (['"""NPV ~ Age + WAR_TBF + y_n1_war_tbf + y_n2_war_tbf + FBv + xFIP_sq + pos_dummy + injury_duration"""'], {'data': 'multi_year_pitcher'}), "('NPV ~ Age + WAR_TBF + y_n1_war_tbf + y_n2_war_tbf + FBv + xFIP_sq + pos_dummy + injury_duration'\n , data=multi_year_pitcher)\n", (17836, 17965), False, 'from statsmodels.formula.api import ols\n')]
|
# Libraries
import pandas as pd
import numpy as np
import interface
import time
def get_dataframes(start_time, year=2010):
dataframes = None
columns_to_drop = interface.get_columns_to_drop()
amount_csv = interface.get_amount_of_csv()
for year in range(year, year+amount_csv):
print('------------------------------------------------------------')
path = "datasets/Rendimiento por estudiante "+str(year)+".csv"
# Leemos los datos y separamos por ; porque algunos nombres de establecimientos poseen comas y dan error
encoding = 'utf-8'
if year == 2014 or year == 2015:
encoding = 'latin'
if year == 2016 or year == 2018 or year == 2019:
encoding += '-sig'
print('Reading: '+path+' ('+encoding+')')
interface.get_time(start_time)
df = pd.read_csv(path, sep=';', low_memory=False, encoding=encoding)
interface.get_ram(info='File loaded')
interface.get_time(start_time)
df.columns = map(str.upper, df.columns)
drop = []
df_columns = df.columns.values.tolist()
for column in columns_to_drop:
if column in df_columns:
drop.append(column)
#print('Dropped tables:', drop)
df.drop(columns=drop, inplace=True, axis=1)
# Limpiar datos: Están en todos los años
df.fillna({'SIT_FIN': '-'}, inplace=True)
df['SIT_FIN'] = df['SIT_FIN'].replace([' '], '-')
df['COD_SEC'] = df['COD_SEC'].replace([' '], 0)
df['COD_ESPE'] = df['COD_ESPE'].replace([' '], 0)
df["PROM_GRAL"] = df["PROM_GRAL"].str.replace(',', ".").astype(float)
# Faltan estos datos, rellenar vacios
if year <= 2012:
df["COD_PRO_RBD"] = np.nan # Está en 2013+
df["COD_JOR"] = np.nan # Está en 2013+
if year <= 2013: # Esta solo en los años 2010-2013
df['INT_ALU'] = df['INT_ALU'].replace(['.'], 2)
df['INT_ALU'] = df['INT_ALU'].replace([' '], 2)
df["COD_ENSE2"] = np.nan # Está en 2014+
if year >= 2014: # Rellenar con vacíos
df['INT_ALU'] = np.nan
#print('Cantidad de datos:', len(df))
if dataframes is None:
dataframes = df
else:
dataframes = pd.concat([dataframes, df], ignore_index=True)
#print(df.dtypes)
del df
#print(dataframes.columns.values.tolist())
interface.get_ram(info='Added year to dataframe: ' + str(year))
interface.get_time(start_time)
print('------------------------------------------------------------')
interface.get_ram(info='Instance dataframe 2010-2019')
interface.get_time(start_time)
return dataframes
if __name__ == "__main__":
# Inicio del programa
interface.get_ram(info='Starting program')
start_time = time.time()
# Cargar los datos base a la base de datos
interface.drop_dimensions()
interface.create_dimensions()
interface.insert_static_dimensions()
interface.get_time(start_time)
# Instanciar todos los dataframes en uno general ya limpiados
df = get_dataframes(start_time)
# Convertir la variable MRUN
interface.get_ram(info='Converting dataframe types')
interface.get_time(start_time)
df['MRUN'] = df['MRUN'].astype('string')
interface.get_ram(info='Types converted')
interface.get_time(start_time)
# Crear comunas de establecimiento y alumno, estan en todos los años (no está en la documentación)
headers_com = ["COD_COM", "NOM_COM"]
# Comunas donde están los establecimientos
data_com_rbd = [df["COD_COM_RBD"], df["NOM_COM_RBD"]]
df_com_rbd = pd.concat(data_com_rbd, axis=1, keys=headers_com)
# Comunas donde provienen los alumnos
data_com_alu = [df["COD_COM_ALU"], df["NOM_COM_ALU"]]
df_com_alu = pd.concat(data_com_alu, axis=1, keys=headers_com)
# Concatenamos las columnas
df_com = pd.concat([df_com_rbd,df_com_alu])
df_com = df_com.drop_duplicates(subset=['COD_COM'])
df_com = df_com.reset_index(drop=True)
# Insertamos datos a la dimensión comuna
interface.insert_dim_comuna(df_com.values.tolist())
interface.get_time(start_time)
# Elimina residuales ram
del headers_com, data_com_rbd, df_com_rbd, data_com_alu, df_com_alu, df_com
df.drop(columns=['NOM_COM_RBD','NOM_COM_ALU'], inplace=True, axis=1)
interface.get_ram(info='Dispose columns "comuna"')
interface.get_time(start_time)
# Agregar establecimientos
data_establecimiento = [df["RBD"], df["DGV_RBD"], df["NOM_RBD"], df["RURAL_RBD"], df["COD_DEPE"], df["COD_REG_RBD"], df["COD_SEC"], df["COD_COM_RBD"]]
headers_establecimiento = ['rbd', 'dgv_rbd', 'nom_rbd', 'rural_rbd', 'cod_depe', 'cod_reg_rbd', 'cod_sec', 'cod_com']
interface.copy_from_stringio(table_name='establecimiento', data=data_establecimiento, headers=headers_establecimiento, remove_duplicates=['rbd','dgv_rbd'])
del data_establecimiento, headers_establecimiento
df.drop(columns=['NOM_RBD','RURAL_RBD','COD_DEPE','COD_REG_RBD','COD_SEC','COD_COM_RBD'], inplace=True, axis=1)
interface.get_ram(info='Dispose columns "establecimiento"')
interface.get_time(start_time)
# Agregar alumnos
data_alumno = [df["MRUN"], df["FEC_NAC_ALU"], df["GEN_ALU"], df["COD_COM_ALU"], df["INT_ALU"]]
headers_alumno = ["mrun", "fec_nac_alu", "gen_alu", "cod_com", "int_alu"]
interface.copy_from_stringio(table_name='alumno', data=data_alumno, headers=headers_alumno, remove_duplicates=['mrun'])
del data_alumno, headers_alumno
df.drop(columns=['FEC_NAC_ALU','GEN_ALU','COD_COM_ALU','INT_ALU'], inplace=True, axis=1)
interface.get_ram(info='Dispose columns "alumnos"')
interface.get_time(start_time)
"""
### TESTING ###
print('DROP TESTING')
df.drop(columns=['NOM_COM_RBD','NOM_COM_ALU','NOM_RBD','RURAL_RBD','COD_DEPE','COD_REG_RBD','COD_SEC','COD_COM_RBD','FEC_NAC_ALU','GEN_ALU','COD_COM_ALU','INT_ALU'], inplace=True, axis=1)
print('TESTING DROPPED')
### TESTING ###
"""
# Agregar notas
data_notas = [df["AGNO"], df["MRUN"], df["RBD"], df["DGV_RBD"], df["PROM_GRAL"], df["SIT_FIN"], df['ASISTENCIA'], df['LET_CUR'], df["COD_ENSE"], df["COD_ENSE2"], df["COD_JOR"]]
head_notas = ['agno', 'mrun', 'rbd', 'dgv_rbd', 'prom_gral', 'sit_fin', 'asistencia', 'let_cur', 'cod_ense', 'cod_ense2', 'cod_jor']
interface.copy_from_stringio(table_name='notas', data=data_notas, headers=head_notas, remove_duplicates=['agno','mrun'])
del data_notas, head_notas
interface.get_ram(info='Inserted all data to database')
interface.get_time(start_time)
del df
interface.get_ram(info='Dispose dataframe and finish program')
interface.get_time(start_time)
|
[
"interface.get_columns_to_drop",
"pandas.read_csv",
"interface.get_time",
"time.time",
"interface.drop_dimensions",
"interface.create_dimensions",
"interface.insert_static_dimensions",
"interface.copy_from_stringio",
"interface.get_ram",
"pandas.concat",
"interface.get_amount_of_csv"
] |
[((168, 199), 'interface.get_columns_to_drop', 'interface.get_columns_to_drop', ([], {}), '()\n', (197, 199), False, 'import interface\n'), ((217, 246), 'interface.get_amount_of_csv', 'interface.get_amount_of_csv', ([], {}), '()\n', (244, 246), False, 'import interface\n'), ((2654, 2708), 'interface.get_ram', 'interface.get_ram', ([], {'info': '"""Instance dataframe 2010-2019"""'}), "(info='Instance dataframe 2010-2019')\n", (2671, 2708), False, 'import interface\n'), ((2713, 2743), 'interface.get_time', 'interface.get_time', (['start_time'], {}), '(start_time)\n', (2731, 2743), False, 'import interface\n'), ((2830, 2872), 'interface.get_ram', 'interface.get_ram', ([], {'info': '"""Starting program"""'}), "(info='Starting program')\n", (2847, 2872), False, 'import interface\n'), ((2890, 2901), 'time.time', 'time.time', ([], {}), '()\n', (2899, 2901), False, 'import time\n'), ((2958, 2985), 'interface.drop_dimensions', 'interface.drop_dimensions', ([], {}), '()\n', (2983, 2985), False, 'import interface\n'), ((2990, 3019), 'interface.create_dimensions', 'interface.create_dimensions', ([], {}), '()\n', (3017, 3019), False, 'import interface\n'), ((3024, 3060), 'interface.insert_static_dimensions', 'interface.insert_static_dimensions', ([], {}), '()\n', (3058, 3060), False, 'import interface\n'), ((3065, 3095), 'interface.get_time', 'interface.get_time', (['start_time'], {}), '(start_time)\n', (3083, 3095), False, 'import interface\n'), ((3241, 3293), 'interface.get_ram', 'interface.get_ram', ([], {'info': '"""Converting dataframe types"""'}), "(info='Converting dataframe types')\n", (3258, 3293), False, 'import interface\n'), ((3298, 3328), 'interface.get_time', 'interface.get_time', (['start_time'], {}), '(start_time)\n', (3316, 3328), False, 'import interface\n'), ((3378, 3419), 'interface.get_ram', 'interface.get_ram', ([], {'info': '"""Types converted"""'}), "(info='Types converted')\n", (3395, 3419), False, 'import interface\n'), ((3424, 3454), 'interface.get_time', 'interface.get_time', (['start_time'], {}), '(start_time)\n', (3442, 3454), False, 'import interface\n'), ((3728, 3777), 'pandas.concat', 'pd.concat', (['data_com_rbd'], {'axis': '(1)', 'keys': 'headers_com'}), '(data_com_rbd, axis=1, keys=headers_com)\n', (3737, 3777), True, 'import pandas as pd\n'), ((3895, 3944), 'pandas.concat', 'pd.concat', (['data_com_alu'], {'axis': '(1)', 'keys': 'headers_com'}), '(data_com_alu, axis=1, keys=headers_com)\n', (3904, 3944), True, 'import pandas as pd\n'), ((3990, 4025), 'pandas.concat', 'pd.concat', (['[df_com_rbd, df_com_alu]'], {}), '([df_com_rbd, df_com_alu])\n', (3999, 4025), True, 'import pandas as pd\n'), ((4229, 4259), 'interface.get_time', 'interface.get_time', (['start_time'], {}), '(start_time)\n', (4247, 4259), False, 'import interface\n'), ((4446, 4496), 'interface.get_ram', 'interface.get_ram', ([], {'info': '"""Dispose columns "comuna\\""""'}), '(info=\'Dispose columns "comuna"\')\n', (4463, 4496), False, 'import interface\n'), ((4501, 4531), 'interface.get_time', 'interface.get_time', (['start_time'], {}), '(start_time)\n', (4519, 4531), False, 'import interface\n'), ((4846, 5011), 'interface.copy_from_stringio', 'interface.copy_from_stringio', ([], {'table_name': '"""establecimiento"""', 'data': 'data_establecimiento', 'headers': 'headers_establecimiento', 'remove_duplicates': "['rbd', 'dgv_rbd']"}), "(table_name='establecimiento', data=\n data_establecimiento, headers=headers_establecimiento,\n remove_duplicates=['rbd', 'dgv_rbd'])\n", (4874, 5011), False, 'import interface\n'), ((5176, 5235), 'interface.get_ram', 'interface.get_ram', ([], {'info': '"""Dispose columns "establecimiento\\""""'}), '(info=\'Dispose columns "establecimiento"\')\n', (5193, 5235), False, 'import interface\n'), ((5240, 5270), 'interface.get_time', 'interface.get_time', (['start_time'], {}), '(start_time)\n', (5258, 5270), False, 'import interface\n'), ((5475, 5599), 'interface.copy_from_stringio', 'interface.copy_from_stringio', ([], {'table_name': '"""alumno"""', 'data': 'data_alumno', 'headers': 'headers_alumno', 'remove_duplicates': "['mrun']"}), "(table_name='alumno', data=data_alumno, headers\n =headers_alumno, remove_duplicates=['mrun'])\n", (5503, 5599), False, 'import interface\n'), ((5728, 5779), 'interface.get_ram', 'interface.get_ram', ([], {'info': '"""Dispose columns "alumnos\\""""'}), '(info=\'Dispose columns "alumnos"\')\n', (5745, 5779), False, 'import interface\n'), ((5784, 5814), 'interface.get_time', 'interface.get_time', (['start_time'], {}), '(start_time)\n', (5802, 5814), False, 'import interface\n'), ((6462, 6588), 'interface.copy_from_stringio', 'interface.copy_from_stringio', ([], {'table_name': '"""notas"""', 'data': 'data_notas', 'headers': 'head_notas', 'remove_duplicates': "['agno', 'mrun']"}), "(table_name='notas', data=data_notas, headers=\n head_notas, remove_duplicates=['agno', 'mrun'])\n", (6490, 6588), False, 'import interface\n'), ((6618, 6673), 'interface.get_ram', 'interface.get_ram', ([], {'info': '"""Inserted all data to database"""'}), "(info='Inserted all data to database')\n", (6635, 6673), False, 'import interface\n'), ((6678, 6708), 'interface.get_time', 'interface.get_time', (['start_time'], {}), '(start_time)\n', (6696, 6708), False, 'import interface\n'), ((6726, 6788), 'interface.get_ram', 'interface.get_ram', ([], {'info': '"""Dispose dataframe and finish program"""'}), "(info='Dispose dataframe and finish program')\n", (6743, 6788), False, 'import interface\n'), ((6793, 6823), 'interface.get_time', 'interface.get_time', (['start_time'], {}), '(start_time)\n', (6811, 6823), False, 'import interface\n'), ((805, 835), 'interface.get_time', 'interface.get_time', (['start_time'], {}), '(start_time)\n', (823, 835), False, 'import interface\n'), ((849, 912), 'pandas.read_csv', 'pd.read_csv', (['path'], {'sep': '""";"""', 'low_memory': '(False)', 'encoding': 'encoding'}), "(path, sep=';', low_memory=False, encoding=encoding)\n", (860, 912), True, 'import pandas as pd\n'), ((921, 958), 'interface.get_ram', 'interface.get_ram', ([], {'info': '"""File loaded"""'}), "(info='File loaded')\n", (938, 958), False, 'import interface\n'), ((967, 997), 'interface.get_time', 'interface.get_time', (['start_time'], {}), '(start_time)\n', (985, 997), False, 'import interface\n'), ((2544, 2574), 'interface.get_time', 'interface.get_time', (['start_time'], {}), '(start_time)\n', (2562, 2574), False, 'import interface\n'), ((2325, 2371), 'pandas.concat', 'pd.concat', (['[dataframes, df]'], {'ignore_index': '(True)'}), '([dataframes, df], ignore_index=True)\n', (2334, 2371), True, 'import pandas as pd\n')]
|
# test_log_format.py
"""Unit tests for lta/log_format.py."""
import sys
from requests.exceptions import HTTPError
from .test_util import ObjectLiteral
from lta.log_format import StructuredFormatter
class LiteralRecord(ObjectLiteral):
"""
LiteralRecord is a literal LogRecord.
This class creates an ObjectLiteral that also implements the (getMessage)
method which is often called on LogRecord objects.
This is useful for creating LogRecord literals to be used as return
values from mocked API calls.
"""
def getMessage(self):
"""Format the log message."""
return self.msg % self.args
def test_constructor_default() -> None:
"""Test that StructuredFormatter can be created without any parameters."""
sf = StructuredFormatter()
assert sf.component_type is None
assert sf.component_name is None
assert sf.indent is None
assert sf.separators == (',', ':')
def test_constructor_supplied() -> None:
"""Test that StructuredFormatter can be created with parameters."""
sf = StructuredFormatter(component_type="Picker", component_name="test-picker", ndjson=False)
assert sf.component_type == "Picker"
assert sf.component_name == "test-picker"
assert sf.indent == 4
assert sf.separators == (', ', ': ')
def test_format_default() -> None:
"""Test that StructuredFormatter (no params) provides proper output."""
sf = StructuredFormatter()
log_record = LiteralRecord(
name="lta.picker",
msg="ConnectionError trying to PATCH /status/picker with heartbeat",
args=[],
levelname="ERROR",
levelno=40,
pathname="/home/pmeade/github/lta/lta/picker.py",
filename="picker.py",
module="picker",
exc_info=None,
exc_text=None,
stack_info=None,
lineno=102,
funcName="patch_status_heartbeat",
created=1547003161.046467,
msecs=46.46706581115723,
relativeCreated=93.13035011291504,
thread=140013641434880,
threadName="MainThread",
processName="MainProcess",
process=8147
)
json_text = sf.format(log_record)
assert json_text.startswith("{")
assert json_text.endswith("}")
assert json_text.find("\n") == -1
assert json_text.find("component_type") == -1
assert json_text.find("component_name") == -1
assert json_text.find("timestamp") != -1
def test_format_supplied() -> None:
"""Test that StructuredFormatter (with params) provides proper output."""
sf = StructuredFormatter(component_type="Picker", component_name="test-picker", ndjson=False)
log_record = LiteralRecord(
name="lta.picker",
msg="ConnectionError trying to PATCH /status/picker with heartbeat",
args=[],
levelname="ERROR",
levelno=40,
pathname="/home/pmeade/github/lta/lta/picker.py",
filename="picker.py",
module="picker",
exc_info=None,
exc_text=None,
stack_info=None,
lineno=102,
funcName="patch_status_heartbeat",
created=1547003161.046467,
msecs=46.46706581115723,
relativeCreated=93.13035011291504,
thread=140013641434880,
threadName="MainThread",
processName="MainProcess",
process=8147
)
json_text = sf.format(log_record)
assert json_text.startswith("{")
assert json_text.endswith("}")
assert json_text.find("\n") != -1
assert json_text.find("component_type") != -1
assert json_text.find("component_name") != -1
assert json_text.find("timestamp") != -1
def test_missing_exc_info() -> None:
"""Test that StructuredFormatter (no params) provides proper output."""
sf = StructuredFormatter()
log_record = LiteralRecord(
name="lta.picker",
msg="ConnectionError trying to PATCH /status/picker with heartbeat",
args=[],
levelname="ERROR",
levelno=40,
pathname="/home/pmeade/github/lta/lta/picker.py",
filename="picker.py",
module="picker",
exc_text=None,
stack_info=None,
lineno=102,
funcName="patch_status_heartbeat",
created=1547003161.046467,
msecs=46.46706581115723,
relativeCreated=93.13035011291504,
thread=140013641434880,
threadName="MainThread",
processName="MainProcess",
process=8147
)
json_text = sf.format(log_record)
assert json_text.startswith("{")
assert json_text.endswith("}")
assert json_text.find("\n") == -1
assert json_text.find("component_type") == -1
assert json_text.find("component_name") == -1
assert json_text.find("timestamp") != -1
def test_exc_info_tuple() -> None:
"""Test that StructuredFormatter (no params) provides proper output."""
sf = StructuredFormatter()
log_record = LiteralRecord(
name="lta.picker",
msg="ConnectionError trying to PATCH /status/picker with heartbeat",
args=[],
levelname="ERROR",
levelno=40,
pathname="/home/pmeade/github/lta/lta/picker.py",
filename="picker.py",
module="picker",
exc_text=None,
stack_info=None,
lineno=102,
funcName="patch_status_heartbeat",
created=1547003161.046467,
msecs=46.46706581115723,
relativeCreated=93.13035011291504,
thread=140013641434880,
threadName="MainThread",
processName="MainProcess",
process=8147
)
try:
raise HTTPError("451 Unavailable For Legal Reasons")
except HTTPError:
log_record.exc_info = sys.exc_info()
json_text = sf.format(log_record)
assert json_text.startswith("{")
assert json_text.endswith("}")
assert json_text.find("\n") == -1
assert json_text.find("component_type") == -1
assert json_text.find("component_name") == -1
assert json_text.find("timestamp") != -1
|
[
"lta.log_format.StructuredFormatter",
"sys.exc_info",
"requests.exceptions.HTTPError"
] |
[((768, 789), 'lta.log_format.StructuredFormatter', 'StructuredFormatter', ([], {}), '()\n', (787, 789), False, 'from lta.log_format import StructuredFormatter\n'), ((1056, 1148), 'lta.log_format.StructuredFormatter', 'StructuredFormatter', ([], {'component_type': '"""Picker"""', 'component_name': '"""test-picker"""', 'ndjson': '(False)'}), "(component_type='Picker', component_name='test-picker',\n ndjson=False)\n", (1075, 1148), False, 'from lta.log_format import StructuredFormatter\n'), ((1421, 1442), 'lta.log_format.StructuredFormatter', 'StructuredFormatter', ([], {}), '()\n', (1440, 1442), False, 'from lta.log_format import StructuredFormatter\n'), ((2546, 2638), 'lta.log_format.StructuredFormatter', 'StructuredFormatter', ([], {'component_type': '"""Picker"""', 'component_name': '"""test-picker"""', 'ndjson': '(False)'}), "(component_type='Picker', component_name='test-picker',\n ndjson=False)\n", (2565, 2638), False, 'from lta.log_format import StructuredFormatter\n'), ((3737, 3758), 'lta.log_format.StructuredFormatter', 'StructuredFormatter', ([], {}), '()\n', (3756, 3758), False, 'from lta.log_format import StructuredFormatter\n'), ((4836, 4857), 'lta.log_format.StructuredFormatter', 'StructuredFormatter', ([], {}), '()\n', (4855, 4857), False, 'from lta.log_format import StructuredFormatter\n'), ((5543, 5589), 'requests.exceptions.HTTPError', 'HTTPError', (['"""451 Unavailable For Legal Reasons"""'], {}), "('451 Unavailable For Legal Reasons')\n", (5552, 5589), False, 'from requests.exceptions import HTTPError\n'), ((5642, 5656), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (5654, 5656), False, 'import sys\n')]
|
# Example
# -------
#
# connectivity_check_v2.py
from pyats import aetest
import re
import logging
# get your logger for your script
logger = logging.getLogger(__name__)
class CommonSetup(aetest.CommonSetup):
# CommonSetup-SubSec1
@aetest.subsection
def check_topology(
self,
testbed,
HQ_C1_name = 'HQ-C1',
HQ_C2_name = 'HQ-C2',
HQ_C3_name = 'HQ-C3',
HQ_C4_name = 'HQ-C4',
BR1_C1_name = 'BR1-C1',
BR2_C1_name = 'BR2-C1'):
HQ_C1 = testbed.devices[HQ_C1_name]
HQ_C2 = testbed.devices[HQ_C2_name]
HQ_C3 = testbed.devices[HQ_C3_name]
HQ_C4 = testbed.devices[HQ_C4_name]
BR1_C1 = testbed.devices[BR1_C1_name]
BR2_C1 = testbed.devices[BR2_C1_name]
# add them to testscript parameters
self.parent.parameters.update(
HQ_C1 = HQ_C1,
HQ_C2 = HQ_C2,
HQ_C3 = HQ_C3,
HQ_C4 = HQ_C4,
BR1_C1 = BR1_C1,
BR2_C1 = BR2_C1)
# CommonSetup-SubSec
@aetest.subsection
def establish_connections(self, steps, HQ_C1, HQ_C2, HQ_C3, HQ_C4, BR1_C1, BR2_C1):
with steps.start('Connecting to %s' % HQ_C1.name):
HQ_C1.connect()
with steps.start('Connecting to %s' % HQ_C2.name):
HQ_C2.connect()
with steps.start('Connecting to %s' % HQ_C3.name):
HQ_C3.connect()
with steps.start('Connecting to %s' % HQ_C4.name):
HQ_C4.connect()
with steps.start('Connecting to %s' % BR1_C1.name):
BR1_C1.connect()
with steps.start('Connecting to %s' % BR2_C1.name):
BR2_C1.connect()
@aetest.subsection
def setup_ip_addresses(self, steps, HQ_C1, HQ_C2, HQ_C3, HQ_C4, BR1_C1, BR2_C1):
with steps.start('Setup static IPv4 to %s' % HQ_C1.name):
HQ_C1.execute('ip 10.255.100.10/27 10.255.100.1')
with steps.start('Setup static IPv4 to %s' % HQ_C2.name):
HQ_C2.execute('ip 10.255.100.40/27 10.255.100.33')
with steps.start('Setup static IPv4 to %s' % HQ_C3.name):
HQ_C3.execute('ip 10.255.100.70/27 10.255.100.65')
with steps.start('Setup static IPv4 to %s' % HQ_C4.name):
HQ_C4.execute('ip 10.255.100.100/27 10.255.100.97')
with steps.start('Setup static IPv4 to %s' % BR1_C1.name):
BR1_C1.execute('ip 10.1.100.10/27 10.1.100.1')
with steps.start('Setup static IPv4 to %s' % BR2_C1.name):
BR2_C1.execute('ip 10.2.100.10/27 10.2.100.1')
# TestCases
class TESTCASE_1_PING_FROM_HQ_CLIENTS_TO_ISP(aetest.Testcase):
@aetest.test
def T1_PING_FROM_HQ_C1_TO_ISP(self, HQ_C1):
try:
result = HQ_C1.execute('ping 8.8.8.8 -c 5')
except Exception as e:
self.failed('Something go wrong'.format(str(e)), goto = ['exit'])
else:
match = re.search('timeout', result) or re.search('not reachable|unreachable', result)
print('################')
print('Result is =>', result)
print('Math is =>', match)
print('################')
if match:
print('Math is => FIND', match)
print('################')
self.failed()
else:
print('Math is => NOT FIND')
print('################')
@aetest.test
def T2_PING_FROM_HQ_C2_TO_ISP(self, HQ_C2):
try:
result = HQ_C2.execute('ping 8.8.8.8 -c 5')
except Exception as e:
self.failed('Something go wrong'.format(str(e)), goto = ['exit'])
else:
match = re.search('timeout', result) or re.search('not reachable|unreachable', result)
print('################')
print('Result is =>', result)
print('Math is =>', match)
print('################')
if match:
print('Math is => FIND', match)
print('################')
self.failed()
else:
print('Math is => NOT FIND')
print('################')
@aetest.test
def T3_PING_FROM_HQ_C3_TO_ISP(self, HQ_C3):
try:
result = HQ_C3.execute('ping 8.8.8.8 -c 5')
except Exception as e:
self.failed('Something go wrong'.format(str(e)), goto=['exit'])
else:
match = re.search('timeout', result) or re.search('not reachable|unreachable', result)
print('################')
print('Result is =>', result)
print('Math is =>', match)
print('################')
if match:
print('Math is => FIND', match)
print('################')
self.failed()
else:
print('Math is => NOT FIND')
print('################')
@aetest.test
def T4_PING_FROM_HQ_C4_TO_ISP(self, HQ_C4):
try:
result = HQ_C4.execute('ping 8.8.8.8 -c 5')
except Exception as e:
self.failed('Something go wrong'.format(str(e)), goto=['exit'])
else:
match = re.search('timeout', result) or re.search('not reachable|unreachable', result)
print('################')
print('Result is =>', result)
print('Math is =>', match)
print('################')
if match:
print('Math is => FIND', match)
print('################')
self.failed()
else:
print('Math is => NOT FIND')
print('################')
class TESTCASE_2_PING_FROM_BR1_CLIENTS_TO_ISP(aetest.Testcase):
@aetest.test
def T1_PING_FROM_BR1_C1_TO_ISP(self, BR1_C1):
try:
result = BR1_C1.execute('ping 8.8.8.8 -c 5')
except Exception as e:
self.failed('Something go wrong'.format(str(e)), goto = ['exit'])
else:
match = re.search('timeout', result) or re.search('not reachable|unreachable', result)
print('################')
print('Result is =>', result)
print('Math is =>', match)
print('################')
if match:
print('Math is => FIND', match)
print('################')
self.failed()
else:
print('Math is => NOT FIND')
print('################')
class TESTCASE_3_PING_FROM_BR2_CLIENTS_TO_ISP(aetest.Testcase):
@aetest.test
def T1_PING_FROM_BR2_C1_TO_ISP(self, BR2_C1):
try:
result = BR2_C1.execute('ping 8.8.8.8 -c 5')
except Exception as e:
self.failed('Something go wrong'.format(str(e)), goto = ['exit'])
else:
match = re.search('timeout', result) or re.search('not reachable|unreachable', result)
print('################')
print('Result is =>', result)
print('Math is =>', match)
print('################')
if match:
print('Math is => FIND', match)
print('################')
self.failed()
else:
print('Math is => NOT FIND')
print('################')
class TESTCASE_4_PING_FROM_HQ_CLIENTS_TO_HQ_S1(aetest.Testcase):
@aetest.test
def T1_PING_FROM_HQ_C1_TO_HQ_S1(self, HQ_C1):
try:
result = HQ_C1.execute('ping 10.255.255.2 -c 5')
except Exception as e:
self.failed('Something go wrong'.format(str(e)), goto = ['exit'])
else:
match = re.search('timeout', result) or re.search('not reachable|unreachable', result)
print('################')
print('Result is =>', result)
print('Math is =>', match)
print('################')
if match:
print('Math is => FIND', match)
print('################')
self.failed()
else:
print('Math is => NOT FIND')
print('################')
@aetest.test
def T2_PING_FROM_HQ_C2_TO_HQ_S1(self, HQ_C2):
try:
result = HQ_C2.execute('ping 10.255.255.2 -c 5')
except Exception as e:
self.failed('Something go wrong'.format(str(e)), goto = ['exit'])
else:
match = re.search('timeout', result) or re.search('not reachable|unreachable', result)
print('################')
print('Result is =>', result)
print('Math is =>', match)
print('################')
if match:
print('Math is => FIND', match)
print('################')
self.failed()
else:
print('Math is => NOT FIND')
print('################')
@aetest.test
def T3_PING_FROM_HQ_C3_TO_HQ_S1(self, HQ_C3):
try:
result = HQ_C3.execute('ping 10.255.255.2 -c 5')
except Exception as e:
self.failed('Something go wrong'.format(str(e)), goto=['exit'])
else:
match = re.search('timeout', result) or re.search('not reachable|unreachable', result)
print('################')
print('Result is =>', result)
print('Math is =>', match)
print('################')
if match:
print('Math is => FIND', match)
print('################')
self.failed()
else:
print('Math is => NOT FIND')
print('################')
@aetest.test
def T4_PING_FROM_HQ_C4_TO_HQ_S1(self, HQ_C4):
try:
result = HQ_C4.execute('ping 10.255.255.2 -c 5')
except Exception as e:
self.failed('Something go wrong'.format(str(e)), goto=['exit'])
else:
match = re.search('timeout', result) or re.search('not reachable|unreachable', result)
print('################')
print('Result is =>', result)
print('Math is =>', match)
print('################')
if match:
print('Math is => FIND', match)
print('################')
self.failed()
else:
print('Math is => NOT FIND')
print('################')
class TESTCASE_5_PING_FROM_BR1_CLIENTS_TO_HQ_S1(aetest.Testcase):
@aetest.test
def T1_PING_FROM_BR1_C1_TO_HQ_S1(self, BR1_C1):
try:
result = BR1_C1.execute('ping 10.255.255.2 -c 5')
except Exception as e:
self.failed('Something go wrong'.format(str(e)), goto = ['exit'])
else:
match = re.search('timeout', result) or re.search('not reachable|unreachable', result)
print('################')
print('Result is =>', result)
print('Math is =>', match)
print('################')
if match:
print('Math is => FIND', match)
print('################')
self.failed()
else:
print('Math is => NOT FIND')
print('################')
class TESTCASE_6_PING_FROM_BR2_CLIENTS_TO_HQ_S1(aetest.Testcase):
@aetest.test
def T1_PING_FROM_BR2_C1_TO_HQ_S1(self, BR2_C1):
try:
result = BR2_C1.execute('ping 10.255.255.2 -c 5')
except Exception as e:
self.failed('Something go wrong'.format(str(e)), goto = ['exit'])
else:
match = re.search('timeout', result) or re.search('not reachable|unreachable', result)
print('################')
print('Result is =>', result)
print('Math is =>', match)
print('################')
if match:
print('Math is => FIND', match)
print('################')
self.failed()
else:
print('Math is => NOT FIND')
print('################')
class TESTCASE_7_TRACEROUTE_FROM_HQ_CLIENTS_TO_ISP(aetest.Testcase):
@aetest.test
def T1_TRACE_FROM_HQ_C1_TO_ISP(self, HQ_C1):
try:
result = HQ_C1.execute('trace 8.8.8.8 -P 6')
except Exception as e:
self.failed('Something go wrong'.format(str(e)), goto = ['exit'])
else:
match = re.search('\* \* \*', result) or re.search('Destination host unreachable', result)
print('################')
print('Result is =>', result)
print('Math is =>', match)
print('################')
if match:
print('Math is => FIND', match)
print('################')
self.failed()
else:
print('Math is => NOT FIND')
print('################')
@aetest.test
def T2_TRACE_FROM_HQ_C2_TO_ISP(self, HQ_C2):
try:
result = HQ_C2.execute('trace 8.8.8.8 -P 6')
except Exception as e:
self.failed('Something go wrong'.format(str(e)), goto = ['exit'])
else:
match = re.search('\* \* \*', result) or re.search('Destination host unreachable', result)
print('################')
print('Result is =>', result)
print('Math is =>', match)
print('################')
if match:
print('Math is => FIND', match)
print('################')
self.failed()
else:
print('Math is => NOT FIND')
print('################')
@aetest.test
def T3_TRACE_FROM_HQ_C3_TO_ISP(self, HQ_C3):
try:
result = HQ_C3.execute('trace 8.8.8.8 -P 6')
except Exception as e:
self.failed('Something go wrong'.format(str(e)), goto = ['exit'])
else:
match = re.search('\* \* \*', result) or re.search('Destination host unreachable', result)
print('################')
print('Result is =>', result)
print('Math is =>', match)
print('################')
if match:
print('Math is => FIND', match)
print('################')
self.failed()
else:
print('Math is => NOT FIND')
print('################')
@aetest.test
def T4_TRACE_FROM_HQ_C4_TO_ISP(self, HQ_C4):
try:
result = HQ_C4.execute('trace 8.8.8.8 -P 6')
except Exception as e:
self.failed('Something go wrong'.format(str(e)), goto = ['exit'])
else:
match = re.search('\* \* \*', result) or re.search('Destination host unreachable', result)
print('################')
print('Result is =>', result)
print('Math is =>', match)
print('################')
if match:
print('Math is => FIND', match)
print('################')
self.failed()
else:
print('Math is => NOT FIND')
print('################')
# CommonCleanup
class CommonCleanup(aetest.CommonCleanup):
@aetest.subsection
def disconnect(self, steps, HQ_C1, HQ_C2, HQ_C3, HQ_C4, BR1_C1, BR2_C1):
with steps.start('Disconnecting from %s' % HQ_C1.name):
HQ_C1.disconnect()
with steps.start('Disconnecting from %s' % HQ_C2.name):
HQ_C2.disconnect()
with steps.start('Disconnecting from %s' % HQ_C3.name):
HQ_C3.disconnect()
with steps.start('Disconnecting from %s' % HQ_C4.name):
HQ_C4.disconnect()
with steps.start('Disconnecting from %s' % BR1_C1.name):
BR1_C1.disconnect()
with steps.start('Disconnecting from %s' % BR2_C1.name):
BR2_C1.disconnect()
if __name__ == '__main__':
import argparse
from pyats.topology import loader
parser = argparse.ArgumentParser()
parser.add_argument('--testbed', dest = 'testbed',
type = loader.load)
args, unknown = parser.parse_known_args()
aetest.main(**vars(args))
|
[
"re.search",
"argparse.ArgumentParser",
"logging.getLogger"
] |
[((146, 173), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (163, 173), False, 'import logging\n'), ((16164, 16189), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (16187, 16189), False, 'import argparse\n'), ((2955, 2983), 're.search', 're.search', (['"""timeout"""', 'result'], {}), "('timeout', result)\n", (2964, 2983), False, 'import re\n'), ((2987, 3033), 're.search', 're.search', (['"""not reachable|unreachable"""', 'result'], {}), "('not reachable|unreachable', result)\n", (2996, 3033), False, 'import re\n'), ((3716, 3744), 're.search', 're.search', (['"""timeout"""', 'result'], {}), "('timeout', result)\n", (3725, 3744), False, 'import re\n'), ((3748, 3794), 're.search', 're.search', (['"""not reachable|unreachable"""', 'result'], {}), "('not reachable|unreachable', result)\n", (3757, 3794), False, 'import re\n'), ((4475, 4503), 're.search', 're.search', (['"""timeout"""', 'result'], {}), "('timeout', result)\n", (4484, 4503), False, 'import re\n'), ((4507, 4553), 're.search', 're.search', (['"""not reachable|unreachable"""', 'result'], {}), "('not reachable|unreachable', result)\n", (4516, 4553), False, 'import re\n'), ((5234, 5262), 're.search', 're.search', (['"""timeout"""', 'result'], {}), "('timeout', result)\n", (5243, 5262), False, 'import re\n'), ((5266, 5312), 're.search', 're.search', (['"""not reachable|unreachable"""', 'result'], {}), "('not reachable|unreachable', result)\n", (5275, 5312), False, 'import re\n'), ((6063, 6091), 're.search', 're.search', (['"""timeout"""', 'result'], {}), "('timeout', result)\n", (6072, 6091), False, 'import re\n'), ((6095, 6141), 're.search', 're.search', (['"""not reachable|unreachable"""', 'result'], {}), "('not reachable|unreachable', result)\n", (6104, 6141), False, 'import re\n'), ((6892, 6920), 're.search', 're.search', (['"""timeout"""', 'result'], {}), "('timeout', result)\n", (6901, 6920), False, 'import re\n'), ((6924, 6970), 're.search', 're.search', (['"""not reachable|unreachable"""', 'result'], {}), "('not reachable|unreachable', result)\n", (6933, 6970), False, 'import re\n'), ((7726, 7754), 're.search', 're.search', (['"""timeout"""', 'result'], {}), "('timeout', result)\n", (7735, 7754), False, 'import re\n'), ((7758, 7804), 're.search', 're.search', (['"""not reachable|unreachable"""', 'result'], {}), "('not reachable|unreachable', result)\n", (7767, 7804), False, 'import re\n'), ((8494, 8522), 're.search', 're.search', (['"""timeout"""', 'result'], {}), "('timeout', result)\n", (8503, 8522), False, 'import re\n'), ((8526, 8572), 're.search', 're.search', (['"""not reachable|unreachable"""', 'result'], {}), "('not reachable|unreachable', result)\n", (8535, 8572), False, 'import re\n'), ((9260, 9288), 're.search', 're.search', (['"""timeout"""', 'result'], {}), "('timeout', result)\n", (9269, 9288), False, 'import re\n'), ((9292, 9338), 're.search', 're.search', (['"""not reachable|unreachable"""', 'result'], {}), "('not reachable|unreachable', result)\n", (9301, 9338), False, 'import re\n'), ((10026, 10054), 're.search', 're.search', (['"""timeout"""', 'result'], {}), "('timeout', result)\n", (10035, 10054), False, 'import re\n'), ((10058, 10104), 're.search', 're.search', (['"""not reachable|unreachable"""', 'result'], {}), "('not reachable|unreachable', result)\n", (10067, 10104), False, 'import re\n'), ((10864, 10892), 're.search', 're.search', (['"""timeout"""', 'result'], {}), "('timeout', result)\n", (10873, 10892), False, 'import re\n'), ((10896, 10942), 're.search', 're.search', (['"""not reachable|unreachable"""', 'result'], {}), "('not reachable|unreachable', result)\n", (10905, 10942), False, 'import re\n'), ((11702, 11730), 're.search', 're.search', (['"""timeout"""', 'result'], {}), "('timeout', result)\n", (11711, 11730), False, 'import re\n'), ((11734, 11780), 're.search', 're.search', (['"""not reachable|unreachable"""', 'result'], {}), "('not reachable|unreachable', result)\n", (11743, 11780), False, 'import re\n'), ((12535, 12567), 're.search', 're.search', (['"""\\\\* \\\\* \\\\*"""', 'result'], {}), "('\\\\* \\\\* \\\\*', result)\n", (12544, 12567), False, 'import re\n'), ((12568, 12617), 're.search', 're.search', (['"""Destination host unreachable"""', 'result'], {}), "('Destination host unreachable', result)\n", (12577, 12617), False, 'import re\n'), ((13302, 13334), 're.search', 're.search', (['"""\\\\* \\\\* \\\\*"""', 'result'], {}), "('\\\\* \\\\* \\\\*', result)\n", (13311, 13334), False, 'import re\n'), ((13335, 13384), 're.search', 're.search', (['"""Destination host unreachable"""', 'result'], {}), "('Destination host unreachable', result)\n", (13344, 13384), False, 'import re\n'), ((14069, 14101), 're.search', 're.search', (['"""\\\\* \\\\* \\\\*"""', 'result'], {}), "('\\\\* \\\\* \\\\*', result)\n", (14078, 14101), False, 'import re\n'), ((14102, 14151), 're.search', 're.search', (['"""Destination host unreachable"""', 'result'], {}), "('Destination host unreachable', result)\n", (14111, 14151), False, 'import re\n'), ((14836, 14868), 're.search', 're.search', (['"""\\\\* \\\\* \\\\*"""', 'result'], {}), "('\\\\* \\\\* \\\\*', result)\n", (14845, 14868), False, 'import re\n'), ((14869, 14918), 're.search', 're.search', (['"""Destination host unreachable"""', 'result'], {}), "('Destination host unreachable', result)\n", (14878, 14918), False, 'import re\n')]
|
import wx
import numpy as np
import time
from wx import glcanvas
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.arrays import vbo
from OpenGL.GL import shaders
from readobj import Obj3D
__author__ = '<NAME>'
__version__ = '0.1.0'
vertexShader = """
#version 120
void main() {
gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
}
"""
fragmentShader = """
#version 120
void main() {
gl_FragColor = vec4( .9, .9, .9, 1 );
}
"""
class GLFrame( glcanvas.GLCanvas ):
"""A simple class for using OpenGL with wxPython."""
near_plane = 0.1
far_plane = 100
world_pos = (0, 0, -6)
world_rot = (0, 0, 0)
def __init__(self, parent):
self.GLinitialized = False
attribList = (glcanvas.WX_GL_RGBA, # RGBA
glcanvas.WX_GL_DOUBLEBUFFER, # Double Buffered
glcanvas.WX_GL_DEPTH_SIZE, 24) # 24 bit
super(GLFrame, self).__init__( parent, attribList=attribList )
#
# Create the canvas
self.context = glcanvas.GLContext( self )
self.left_down = False
#
# Set the event handlers.
self.Bind(wx.EVT_ERASE_BACKGROUND, self.processEraseBackgroundEvent)
self.Bind(wx.EVT_SIZE, self.processSizeEvent)
self.Bind(wx.EVT_PAINT, self.processPaintEvent)
self.Bind(wx.EVT_MOUSEWHEEL, self.processWheelEvent)
self.Bind(wx.EVT_MOTION, self.processMotion)
self.Bind(wx.EVT_LEFT_DOWN, self.processLeftDown)
self.Bind(wx.EVT_LEFT_UP, self.processLeftUp)
#
# Canvas Proxy Methods
def GetGLExtents(self):
"""Get the extents of the OpenGL canvas."""
return self.GetClientSize()
#
# wxPython Window Handlers
def processLeftDown( self, event ):
self.last_pos = event.GetPosition()
self.left_down = True
def processLeftUp( self, event ):
self.left_down = False
def processMotion( self, event ):
if self.left_down:
pos = event.GetPosition()
diff = (pos-self.last_pos)
self.world_rot = ( self.world_rot[0]+diff[1], self.world_rot[1]+diff[0], self.world_rot[2] )
# print( )
self.last_pos = pos
self.Refresh( False )
def processWheelEvent( self, event ):
delta = event.GetWheelRotation() / 100
self.world_pos = ( self.world_pos[0], self.world_pos[1], self.world_pos[2]+delta )
self.Refresh( False )
def processEraseBackgroundEvent( self, event ):
"""Process the erase background event."""
pass # Do nothing, to avoid flashing on MSWin
def processSizeEvent( self, event ):
self.Show()
self.SetCurrent( self.context )
size = self.GetGLExtents()
self.OnReshape( size.width, size.height )
self.Refresh( False )
event.Skip()
def processPaintEvent(self, event):
self.SetCurrent( self.context )
# This is a 'perfect' time to initialize OpenGL ... only if we need to
if not self.GLinitialized:
self.OnInitGL()
self.GLinitialized = True
self.OnDraw()
event.Skip()
#
# GLFrame OpenGL Event Handlers
def OnInitGL(self):
"""Initialize OpenGL for use in the window."""
glClearColor(1, 1, 1, 1)
VERTEX_SHADER = shaders.compileShader( vertexShader, GL_VERTEX_SHADER )
FRAGMENT_SHADER = shaders.compileShader( fragmentShader, GL_FRAGMENT_SHADER )
self.shader = shaders.compileProgram( VERTEX_SHADER, FRAGMENT_SHADER )
cube = Obj3D( 'testdata\cube.obj' )
data = cube.getVerticesFlat()
self.vbo = vbo.VBO( np.array( data, 'f' ) )
def OnReshape( self, width, height ):
"""Reshape the OpenGL viewport based on the dimensions of the window."""
glViewport( 0, 0, width, height )
glMatrixMode( GL_PROJECTION )
glLoadIdentity()
# glOrtho( -0.5, 0.5, -0.5, 0.5, -1, 1 )
gluPerspective( 45.0, width/height, self.near_plane, self.far_plane )
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def OnDraw( self ):
glPushMatrix()
glTranslate( self.world_pos[0], self.world_pos[1], self.world_pos[2] )
glRotated( self.world_rot[1], 0, 1, 0 )
glRotated( self.world_rot[0], 1, 0, 0 )
glClear( GL_COLOR_BUFFER_BIT )
shaders.glUseProgram( self.shader )
self.vbo.bind()
glEnableClientState( GL_VERTEX_ARRAY );
glVertexPointerf( self.vbo )
glDrawArrays( GL_TRIANGLES, 0, len( self.vbo ) )
self.vbo.unbind()
glDisableClientState( GL_VERTEX_ARRAY );
shaders.glUseProgram( 0 )
glPopMatrix()
self.SwapBuffers()
class Window( wx.Frame ):
def __init__( self, *args, **kwargs ):
super().__init__( *args, **kwargs )
self.initUI()
def initUI( self ):
panel = GLFrame(self)
panel.Bind(wx.EVT_RIGHT_DOWN, self.OnRightDown)
wx.StaticText( panel, label='Boilerplate Code', pos=( 10, 10 ) )
fmenu = wx.Menu()
self.popupMenu = wx.Menu()
fitem = fmenu.Append( wx.ID_OPEN, '&Open\tCtrl+O', 'Open file' )
self.popupMenu.Append( wx.ID_OPEN, '&Open\tCtrl+O', 'Open file' )
self.Bind( wx.EVT_MENU, self.onOpen, fitem )
fmenu.AppendSeparator()
fitem = fmenu.Append( wx.ID_EXIT, 'E&xit\tCtrl+Q', 'Exit Application' )
self.popupMenu.Append( wx.ID_EXIT, 'E&xit\tCtrl+Q', 'Exit Application' )
self.Bind(wx.EVT_MENU, self.onQuit, fitem)
mbar = wx.MenuBar()
mbar.Append( fmenu, '&File' )
self.SetMenuBar( mbar )
self.Show()
def OnRightDown(self, event):
self.PopupMenu( self.popupMenu, event.GetPosition() )
def onQuit( self, event ):
self.Close()
def onOpen( self, event ):
print( 'open' )
class Application( wx.App ):
def run( self ):
frame = Window(None, -1, 'Boilerplate Window', size=(400,300))
frame.Show()
self.MainLoop()
self.Destroy()
Application().run()
|
[
"wx.Menu",
"readobj.Obj3D",
"OpenGL.GL.shaders.glUseProgram",
"wx.glcanvas.GLContext",
"wx.StaticText",
"numpy.array",
"OpenGL.GL.shaders.compileProgram",
"OpenGL.GL.shaders.compileShader",
"wx.MenuBar"
] |
[((1079, 1103), 'wx.glcanvas.GLContext', 'glcanvas.GLContext', (['self'], {}), '(self)\n', (1097, 1103), False, 'from wx import glcanvas\n'), ((3450, 3503), 'OpenGL.GL.shaders.compileShader', 'shaders.compileShader', (['vertexShader', 'GL_VERTEX_SHADER'], {}), '(vertexShader, GL_VERTEX_SHADER)\n', (3471, 3503), False, 'from OpenGL.GL import shaders\n'), ((3532, 3589), 'OpenGL.GL.shaders.compileShader', 'shaders.compileShader', (['fragmentShader', 'GL_FRAGMENT_SHADER'], {}), '(fragmentShader, GL_FRAGMENT_SHADER)\n', (3553, 3589), False, 'from OpenGL.GL import shaders\n'), ((3623, 3677), 'OpenGL.GL.shaders.compileProgram', 'shaders.compileProgram', (['VERTEX_SHADER', 'FRAGMENT_SHADER'], {}), '(VERTEX_SHADER, FRAGMENT_SHADER)\n', (3645, 3677), False, 'from OpenGL.GL import shaders\n'), ((3696, 3723), 'readobj.Obj3D', 'Obj3D', (['"""testdata\\\\cube.obj"""'], {}), "('testdata\\\\cube.obj')\n", (3701, 3723), False, 'from readobj import Obj3D\n'), ((4537, 4570), 'OpenGL.GL.shaders.glUseProgram', 'shaders.glUseProgram', (['self.shader'], {}), '(self.shader)\n', (4557, 4570), False, 'from OpenGL.GL import shaders\n'), ((4822, 4845), 'OpenGL.GL.shaders.glUseProgram', 'shaders.glUseProgram', (['(0)'], {}), '(0)\n', (4842, 4845), False, 'from OpenGL.GL import shaders\n'), ((5195, 5255), 'wx.StaticText', 'wx.StaticText', (['panel'], {'label': '"""Boilerplate Code"""', 'pos': '(10, 10)'}), "(panel, label='Boilerplate Code', pos=(10, 10))\n", (5208, 5255), False, 'import wx\n'), ((5285, 5294), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (5292, 5294), False, 'import wx\n'), ((5320, 5329), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (5327, 5329), False, 'import wx\n'), ((5816, 5828), 'wx.MenuBar', 'wx.MenuBar', ([], {}), '()\n', (5826, 5828), False, 'import wx\n'), ((3791, 3810), 'numpy.array', 'np.array', (['data', '"""f"""'], {}), "(data, 'f')\n", (3799, 3810), True, 'import numpy as np\n')]
|
from django.conf.urls import url
from .. import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^add/$', views.request_add)
]
|
[
"django.conf.urls.url"
] |
[((76, 112), 'django.conf.urls.url', 'url', (['"""^$"""', 'views.index'], {'name': '"""index"""'}), "('^$', views.index, name='index')\n", (79, 112), False, 'from django.conf.urls import url\n'), ((119, 151), 'django.conf.urls.url', 'url', (['"""^add/$"""', 'views.request_add'], {}), "('^add/$', views.request_add)\n", (122, 151), False, 'from django.conf.urls import url\n')]
|
from flask import Flask, render_template, url_for, request, redirect,flash
from flask_sqlalchemy import SQLAlchemy
from datetime import datetime
app = Flask(__name__)
@app.route("/", methods=["POST", "GET"])
def Base():
if request.method == "POST":
name = request.form["name"]
email = request.form["email"]
message = request.form["message"]
return redirect(url_for('Thankyou'))
else:
return render_template('index.html')
@app.route('/Thankyou', methods=["POST", "GET"])
def Thankyou():
return render_template('Thankyou2.html')
if __name__ == "__main__":
app.run(debug=True)
|
[
"flask.url_for",
"flask.Flask",
"flask.render_template"
] |
[((156, 171), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (161, 171), False, 'from flask import Flask, render_template, url_for, request, redirect, flash\n'), ((534, 567), 'flask.render_template', 'render_template', (['"""Thankyou2.html"""'], {}), "('Thankyou2.html')\n", (549, 567), False, 'from flask import Flask, render_template, url_for, request, redirect, flash\n'), ((419, 448), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (434, 448), False, 'from flask import Flask, render_template, url_for, request, redirect, flash\n'), ((380, 399), 'flask.url_for', 'url_for', (['"""Thankyou"""'], {}), "('Thankyou')\n", (387, 399), False, 'from flask import Flask, render_template, url_for, request, redirect, flash\n')]
|
#!/usr/bin/env python3
# Copyright (c) 2021 Fraunhofer AISEC. See the COPYRIGHT
# file at the top-level directory of this distribution.
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import shutil
import os
import subprocess
import tarfile
from pathlib import Path
from collections import namedtuple
arc = namedtuple("arc", "board, cpu_arc")
build_path = 'build'
build_lib_test_path = 'build_lib_test'
results_path = 'packaged'
def remove_folder(path):
"""
Removes a folder.
"""
if os.path.exists(path):
shutil.rmtree(path)
def clean_all():
"""
Removes all build artefacts and the already saved static libraries in
folder packaged/.
"""
print("\nClean all!\n")
clean()
remove_folder(results_path)
def clean():
"""
Removes all build artefacts.
"""
remove_folder(build_path)
remove_folder(build_lib_test_path)
def execute_ext(cmd):
"""
Executes an external program.
cmd: program with arguments
"""
process = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
process.wait()
for line in process.stdout:
print(line)
if "FAIL" in str(line):
exit()
def build(name, opt, arc):
"""
Builds a static library.
name: name of the library -- libuoscore.a or libuedhoc.a
opt: optimization level
arc: the name of the architecture (the Zephyr OS board name)
"""
# crate a file containing make variable indicating the optimization level
# and the library which we want to build -- osocre or edhoc
print("\n")
print("===================================================================")
print("\nBuilding " + name + " for architecture " +
arc.cpu_arc + " with optimization " + opt + "\n")
print("===================================================================")
os.mkdir(build_lib_test_path)
f = open(build_lib_test_path + "/opt", "x")
f.write("OPT = " + opt + "\n")
f.write("LIB_NAME = " + name + "\n")
f.close()
m = open("src/main.h", "w+")
if (name == 'libuoscore.a'):
m.write("#define OSCORE_TESTS")
if (name == 'libuedhoc.a'):
m.write("#define EDHOC_TESTS")
m.close
# build with west
execute_ext(['west', 'build', '-b='+arc.board])
def save(name, arc):
"""
Saves a oscore or edhoc library for a specific architecture in folder
packaged.
name: name of the library -- libuoscore.a or libuedhoc.a
arc: the name of the architecture (the Zephyr OS board name)
"""
print("\nSaving!\n")
Path(results_path).mkdir(parents=True, exist_ok=True)
name_only = os.path.splitext(os.path.basename(name))[0]
t = tarfile.open(results_path + '/' + name_only +
'_' + arc.cpu_arc + '.tar.gz', 'x')
t.add(build_lib_test_path + '/' + 'libtest.a', arcname=name)
if (name == 'libuedhoc.a'):
t.add('../modules/edhoc/edhoc.h', arcname='edhoc.h')
if (name == 'libuoscore.a'):
t.add('../modules/oscore/oscore.h', arcname='oscore.h')
t.close()
def test(arc):
"""
Tests a static library agains the test vectors.
arc: architecture
"""
if (
(arc.board == 'native_posix_64') |
(arc.board == 'native_posix')):
print("\nTesting!\n")
execute_ext(['west', 'build', '-t', 'run'])
else:
execute_ext(['west', 'flash'])
input(
"Examine the results printed over the debugger and press Enter to continue...")
def run_tests(name, arc):
"""
Builds, tests and saves an oscore or an edhoc static library for a specific
architecture. The tests are executed for libraries build with different
optimizations.
name: name of the library -- libuoscore.a or libuedhoc.a
arc: the name of the architecture (the Zephyr OS board name)
"""
opt = ("-O0", "-O1", "-O2", "-O3")
for o in opt:
clean()
build(name, o, arc)
test(arc)
save(name, arc)
def main():
"""
Builds static libraries from uOSCORE and uEDHOC for different
architectures, tests the libraries agains the test vectors and saves the
tested libraries in the folder packaged
"""
clean_all()
# x86
#run_tests('libuoscore.a', arc('native_posix', 'x86'))
run_tests('libuedhoc.a', arc('native_posix', 'x86'))
# x86-64
#run_tests('libuoscore.a', arc('native_posix_64', 'x86-64'))
#run_tests('libuedhoc.a', arc('native_posix_64', 'x86-64'))
# to run the following tests a real hardware must be connect to the PC
# executing this script. The results of the test can be examined over a serial consol such as GTKterm
# Cortex M0
#run_tests('libuoscore.a', arc('nrf51dk_nrf51422', 'cortex-m0'))
#run_tests('libuedhoc.a', arc('nrf51dk_nrf51422', 'cortex-m0'))
# Cortex M3
#run_tests('libuoscore.a', arc('nucleo_l152re', 'cortex-m3'))
#run_tests('libuedhoc.a', arc('nucleo_l152re', 'cortex-m3'))
# Cortex M4
#run_tests('libuoscore.a', arc('nrf52dk_nrf52832','cortex-m4'))
#run_tests('libuedhoc.a', arc('nrf52dk_nrf52832','cortex-m4'))
#run_tests('libuoscore.a', arc('nrf52840dk_nrf52840','cortex-m4'))
#run_tests('libuedhoc.a', arc('nrf52840dk_nrf52840','cortex-m4'))
# Cortex M33
#run_tests('libuoscore.a', arc('nrf9160dk_nrf9160', 'cortex-m33'))
#run_tests('libuedhoc.a', arc('nrf9160dk_nrf9160', 'cortex-m33'))
if __name__ == "__main__":
main()
|
[
"os.mkdir",
"subprocess.Popen",
"os.path.basename",
"os.path.exists",
"pathlib.Path",
"collections.namedtuple",
"tarfile.open",
"shutil.rmtree"
] |
[((558, 593), 'collections.namedtuple', 'namedtuple', (['"""arc"""', '"""board, cpu_arc"""'], {}), "('arc', 'board, cpu_arc')\n", (568, 593), False, 'from collections import namedtuple\n'), ((755, 775), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (769, 775), False, 'import os\n'), ((1263, 1332), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n', (1279, 1332), False, 'import subprocess\n'), ((2133, 2162), 'os.mkdir', 'os.mkdir', (['build_lib_test_path'], {}), '(build_lib_test_path)\n', (2141, 2162), False, 'import os\n'), ((2973, 3058), 'tarfile.open', 'tarfile.open', (["(results_path + '/' + name_only + '_' + arc.cpu_arc + '.tar.gz')", '"""x"""'], {}), "(results_path + '/' + name_only + '_' + arc.cpu_arc + '.tar.gz',\n 'x')\n", (2985, 3058), False, 'import tarfile\n'), ((785, 804), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (798, 804), False, 'import shutil\n'), ((2850, 2868), 'pathlib.Path', 'Path', (['results_path'], {}), '(results_path)\n', (2854, 2868), False, 'from pathlib import Path\n'), ((2937, 2959), 'os.path.basename', 'os.path.basename', (['name'], {}), '(name)\n', (2953, 2959), False, 'import os\n')]
|
"""FizzBuzz Game, by <NAME> <EMAIL>
A number game where you also race against the clock.
Tags: tiny, beginner, game, math"""
__version__ = 0
import sys, time
print('''Fizz Buzz Game, by <NAME> <EMAIL>
Starting with 1, enter increasing numbers.
However, if the number is a multiple of 3, type "fizz" instead of
the number. If the number is a multiple of 5, type "buzz". If the
the number of is a multiple of 3 and 5, type "fizzbuzz".
So the pattern is:
1 2 fizz 4 buzz fizz 7 8 fizz buzz 11 fizz 13 14 fizzbuzz 16...
A doom clock is counting down. Entering correct responses gives you
more time. How long can you keep entering the correct pattern?''')
input('Press Enter to begin...')
number = 1
doomClock = time.time() + 10 # Player starts with 10 seconds.
while True: # Main game loop.
# Determine the correct response for the current number:
if number % 3 == 0 and number % 5 == 0:
correctResponse = 'fizzbuzz'
elif number % 3 == 0:
correctResponse = 'fizz'
elif number % 5 == 0:
correctResponse = 'buzz'
else:
correctResponse = str(number)
# For the first 16 responses, give them the answer:
if number <= 16:
hint = '(Enter ' + correctResponse + ') '
elif number == 17:
hint = '(You are on your own now!) '
else:
hint = ''
# Get the player's response:
response = input('Next response: ' + hint)
response = response.lower().replace(' ', '')
# See if the player has lost:
if response != correctResponse:
print('NOOOOO! Correct response: ' + correctResponse)
print('Thanks for playing!')
sys.exit()
elif time.time() > doomClock:
print('NOOOOO! You have run out of time!')
print('Thanks for playing!')
sys.exit()
# If the player was right, add 2 seconds to the doom clock.
doomClock += 2
secondsRemaining = round(doomClock - time.time(), 1)
print('DOOM CLOCK: ' + str(secondsRemaining) + ' seconds remaining')
print()
number += 1 # Proceed to the next number to enter.
|
[
"sys.exit",
"time.time"
] |
[((712, 723), 'time.time', 'time.time', ([], {}), '()\n', (721, 723), False, 'import sys, time\n'), ((1635, 1645), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1643, 1645), False, 'import sys, time\n'), ((1655, 1666), 'time.time', 'time.time', ([], {}), '()\n', (1664, 1666), False, 'import sys, time\n'), ((1776, 1786), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1784, 1786), False, 'import sys, time\n'), ((1912, 1923), 'time.time', 'time.time', ([], {}), '()\n', (1921, 1923), False, 'import sys, time\n')]
|
from django.core.mail import send_mail
from django.contrib.auth.mixins import LoginRequiredMixin
from django.shortcuts import render,redirect,reverse
from django.http import HttpResponse
from .models import Lead,Agent,Category
from .forms import LeadForm, LeadModelForm,CustomUserCreationForm,AssignAgentForm,LeadCategoryUpdateForm
from django.views import generic
from agents.mixins import OrganizerAndLoginRequiredMixin
#CRUD+L - Create, Retrieve, Update and Delete + List
class SignupView(generic.CreateView):
template_name='registration/signup.html'
form_class=CustomUserCreationForm
def get_success_url(self):
return reverse("login")
class LandingPageView(generic.TemplateView):
template_name='landing.html'
def landing_page(request):
return render(request, 'landing.html')
class HomePageView(LoginRequiredMixin,generic.ListView):
template_name='leads/home.html'
context_object_name = "leads"
def get_queryset(self):
user = self.request.user
# initial queryset of the leads for the entire organisation
if user.is_organizer:
queryset=Lead.objects.filter(organisation=user.userprofile, agent__isnull=False)
else:
queryset=Lead.objects.filter(organisation=user.agent.organisation, agent__isnull=False)
# filter for the agent that is logged in
queryset = queryset.filter(agent__user=user)
return queryset
def get_context_data(self,**kwargs):
context = super(HomePageView,self).get_context_data(**kwargs)
user = self.request.user
if user.is_organizer:
queryset=Lead.objects.filter(organisation=user.userprofile, agent__isnull=True)
context.update({
"unassigned_leads":queryset
})
return context
def home_page(request):
leads = Lead.objects.all()
context={
'leads':leads,
}
return render(request, 'leads/home.html', context)
class LeadDetailView(LoginRequiredMixin,generic.DetailView):
template_name='leads/detail.html'
context_object_name = "lead"
def get_queryset(self):
user = self.request.user
# initial queryset of the leads for the entire organisation
if user.is_organizer:
queryset=Lead.objects.filter(organisation=user.userprofile)
else:
queryset=Lead.objects.filter(organisation=user.agent.organisation)
# filter for the agent that is logged in
queryset = queryset.filter(agent__user=user)
return queryset
def lead_detail(request,pk):
lead = Lead.objects.get(id=pk)
context = {
'lead':lead,
}
return render(request, 'leads/detail.html', context)
class LeadCreateView(OrganizerAndLoginRequiredMixin,generic.CreateView):
template_name='leads/create.html'
form_class=LeadModelForm
def get_success_url(self):
return reverse("leads:home")
def form_valid(self,form):
lead = form.save(commit=False)
lead.organisation = self.request.user.userprofile
lead.save()
send_mail(
subject="A lead has been created",
message="Go to the site to check it out",
from_email='<EMAIL>',
recipient_list=['<EMAIL>']
)
return super(LeadCreateView,self).form_valid(form)
def lead_create(request):
form = LeadModelForm()
if request.method == "POST":
form = LeadModelForm(request.POST)
if form.is_valid():
form.save()
return redirect("/")
context = {
'form':form
}
return render(request, 'leads/create.html', context)
class LeadUpdateView(OrganizerAndLoginRequiredMixin,generic.UpdateView):
template_name='leads/update.html'
form_class=LeadModelForm
def get_queryset(self):
user = self.request.user
# initial queryset of the leads for the entire organisation
return Lead.objects.filter(organisation=user.userprofile)
def get_success_url(self):
return reverse("leads:home")
def lead_update(request,pk):
lead = Lead.objects.get(id=pk)
form = LeadModelForm(instance=lead)
if request.method == "POST":
form = LeadModelForm(request.POST,instance=lead)
if form.is_valid():
form.save()
return redirect("/")
context = {
'form':form
}
return render(request, 'leads/update.html', context)
class LeadDeleteView(OrganizerAndLoginRequiredMixin,generic.DeleteView):
template_name='leads/delete.html'
def get_queryset(self):
user = self.request.user
# initial queryset of the leads for the entire organisation
return Lead.objects.filter(organisation=user.userprofile)
def get_success_url(self):
return reverse("leads:home")
def lead_delete(request,pk):
lead = Lead.objects.get(id=pk)
lead.delete()
return redirect('/')
class AssignAgentView(OrganizerAndLoginRequiredMixin,generic.FormView):
template_name='leads/assign_agent.html'
form_class=AssignAgentForm
def get_form_kwargs(self,**kwargs):
kwargs = super(AssignAgentView,self).get_form_kwargs(**kwargs)
kwargs.update({"request":self.request})
return kwargs
def get_success_url(self):
return reverse("leads:home")
def form_valid(self,form):
agent = form.cleaned_data["agent"]
lead = Lead.objects.get(id=self.kwargs["pk"])
lead.agent = agent
lead.save()
return super(AssignAgentView,self).form_valid(form)
class CategoryListView(LoginRequiredMixin,generic.ListView):
template_name = "leads/category_list.html"
context_object_name = "category_list"
def get_context_data(self,**kwargs):
context = super(CategoryListView,self).get_context_data(**kwargs)
user = self.request.user
# initial queryset of the leads for the entire organisation
if user.is_organizer:
queryset=Lead.objects.filter(organisation=user.userprofile)
else:
queryset=Lead.objects.filter(organisation=user.agent.organisation)
context.update({
"unassigned_lead_count":queryset.filter(category__isnull=True).count()
})
return context
def get_queryset(self):
user = self.request.user
# initial queryset of the leads for the entire organisation
if user.is_organizer:
queryset=Category.objects.filter(organisation=user.userprofile)
else:
queryset=Category.objects.filter(organisation=user.agent.organisation)
return queryset
class CategoryDetailView(LoginRequiredMixin,generic.DetailView):
template_name="leads/category_detail.html"
context_object_name = "category"
# def get_context_data(self,**kwargs):
# context = super(CategoryDetailView,self).get_context_data(**kwargs)
# # qs = Lead.objects.filter(category=self.get_object()) this is kind of the same as the leads variable
# leads = self.get_object().leads.all()
# # self.get_object().lead_set.all() this is how to call all the leads related to the category when it is beig used as a foreing key
# # if you have a related name set in the model you can use self.get_object()./**Insert Realated name**//.all() -> self.get_object().leads.all()
# context.update({
# "leads":leads
# })
# return context
def get_queryset(self):
user = self.request.user
# initial queryset of the leads for the entire organisation
if user.is_organizer:
queryset=Category.objects.filter(organisation=user.userprofile)
else:
queryset=Category.objects.filter(organisation=user.agent.organisation)
return queryset
class LeadCategoryUpdateView(LoginRequiredMixin,generic.UpdateView):
template_name='leads/category_update.html'
form_class=LeadCategoryUpdateForm
def get_queryset(self):
user = self.request.user
# initial queryset of the leads for the entire organisation
if user.is_organizer:
queryset=Lead.objects.filter(organisation=user.userprofile)
else:
queryset=Lead.objects.filter(organisation=user.agent.organisation)
queryset = queryset.filter(agent__user=user)
return queryset
def get_success_url(self):
return reverse("leads:detail-view",kwargs={"pk":self.get_object().id})
# def lead_create(request):
# form = LeadForm()
# if request.method == "POST":
# form = LeadForm(request.POST)
# if form.is_valid():
# first_name=form.cleaned_data['first_name']
# last_name=form.cleaned_data['last_name']
# age=form.cleaned_data['age']
# agent = Agent.objects.first()
# Lead.objects.create(
# first_name=first_name,
# last_name=last_name,
# age=age,
# agent=agent)
# return redirect("/")
# context = {
# 'form':form
# }
# return render(request, 'leads/create.html', context)
# def lead_update(request,pk):
# lead = Lead.objects.get(id=pk)
# form = LeadForm()
# if request.method == "POST":
# form = LeadForm(request.POST)
# if form.is_valid():
# first_name=form.cleaned_data['first_name']
# last_name=form.cleaned_data['last_name']
# age=form.cleaned_data['age']
# agent = Agent.objects.first()
# lead.first_name=first_name,
# lead.last_name=last_name,
# lead.age=age,
# lead.agent=agent
# lead.save()
# return redirect("/")
# context = {
# 'form':form
# }
# return render(request, 'leads/update.html', context)
|
[
"django.shortcuts.render",
"django.core.mail.send_mail",
"django.shortcuts.redirect",
"django.shortcuts.reverse"
] |
[((784, 815), 'django.shortcuts.render', 'render', (['request', '"""landing.html"""'], {}), "(request, 'landing.html')\n", (790, 815), False, 'from django.shortcuts import render, redirect, reverse\n'), ((1943, 1986), 'django.shortcuts.render', 'render', (['request', '"""leads/home.html"""', 'context'], {}), "(request, 'leads/home.html', context)\n", (1949, 1986), False, 'from django.shortcuts import render, redirect, reverse\n'), ((2701, 2746), 'django.shortcuts.render', 'render', (['request', '"""leads/detail.html"""', 'context'], {}), "(request, 'leads/detail.html', context)\n", (2707, 2746), False, 'from django.shortcuts import render, redirect, reverse\n'), ((3643, 3688), 'django.shortcuts.render', 'render', (['request', '"""leads/create.html"""', 'context'], {}), "(request, 'leads/create.html', context)\n", (3649, 3688), False, 'from django.shortcuts import render, redirect, reverse\n'), ((4430, 4475), 'django.shortcuts.render', 'render', (['request', '"""leads/update.html"""', 'context'], {}), "(request, 'leads/update.html', context)\n", (4436, 4475), False, 'from django.shortcuts import render, redirect, reverse\n'), ((4952, 4965), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (4960, 4965), False, 'from django.shortcuts import render, redirect, reverse\n'), ((648, 664), 'django.shortcuts.reverse', 'reverse', (['"""login"""'], {}), "('login')\n", (655, 664), False, 'from django.shortcuts import render, redirect, reverse\n'), ((2936, 2957), 'django.shortcuts.reverse', 'reverse', (['"""leads:home"""'], {}), "('leads:home')\n", (2943, 2957), False, 'from django.shortcuts import render, redirect, reverse\n'), ((3115, 3261), 'django.core.mail.send_mail', 'send_mail', ([], {'subject': '"""A lead has been created"""', 'message': '"""Go to the site to check it out"""', 'from_email': '"""<EMAIL>"""', 'recipient_list': "['<EMAIL>']"}), "(subject='A lead has been created', message=\n 'Go to the site to check it out', from_email='<EMAIL>', recipient_list=\n ['<EMAIL>'])\n", (3124, 3261), False, 'from django.core.mail import send_mail\n'), ((4076, 4097), 'django.shortcuts.reverse', 'reverse', (['"""leads:home"""'], {}), "('leads:home')\n", (4083, 4097), False, 'from django.shortcuts import render, redirect, reverse\n'), ((4834, 4855), 'django.shortcuts.reverse', 'reverse', (['"""leads:home"""'], {}), "('leads:home')\n", (4841, 4855), False, 'from django.shortcuts import render, redirect, reverse\n'), ((5347, 5368), 'django.shortcuts.reverse', 'reverse', (['"""leads:home"""'], {}), "('leads:home')\n", (5354, 5368), False, 'from django.shortcuts import render, redirect, reverse\n'), ((3574, 3587), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (3582, 3587), False, 'from django.shortcuts import render, redirect, reverse\n'), ((4364, 4377), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (4372, 4377), False, 'from django.shortcuts import render, redirect, reverse\n')]
|
# coding=utf-8
''' test case for loss
'''
import tensorflow as tf
from segelectri.loss_metrics.loss import FocalLoss, LovaszLoss, DiceLoss, BoundaryLoss
class TestLoss(tf.test.TestCase):
def setUp(self):
self.y_true = tf.random.uniform((2, 512, 512),
minval=0,
maxval=3,
dtype=tf.int64)
self.y_pred = tf.random.uniform((2, 512, 512, 3),
minval=0,
maxval=1,
dtype=tf.float32)
def test_focal_loss(self):
focall = FocalLoss()
loss = focall(self.y_true, self.y_pred)
self.assertAllEqual(loss.shape, ())
def test_lovasz_loss(self):
lovaszl = LovaszLoss()
loss = lovaszl(self.y_true, self.y_pred)
self.assertAllEqual(loss.shape, ())
def test_cross_entropy_loss(self):
scce = tf.keras.losses.SparseCategoricalCrossentropy()
loss = scce(self.y_true, self.y_pred)
self.assertAllEqual(loss.shape, ())
def test_dice_loss(self):
dicel = DiceLoss()
loss = dicel(self.y_true, self.y_pred)
self.assertAllEqual(loss.shape, ())
def test_boundary_loss(self):
boundaryl = BoundaryLoss()
loss = boundaryl(self.y_true, self.y_pred)
self.assertAllEqual(loss.shape, ())
|
[
"tensorflow.keras.losses.SparseCategoricalCrossentropy",
"segelectri.loss_metrics.loss.FocalLoss",
"segelectri.loss_metrics.loss.DiceLoss",
"tensorflow.random.uniform",
"segelectri.loss_metrics.loss.LovaszLoss",
"segelectri.loss_metrics.loss.BoundaryLoss"
] |
[((232, 300), 'tensorflow.random.uniform', 'tf.random.uniform', (['(2, 512, 512)'], {'minval': '(0)', 'maxval': '(3)', 'dtype': 'tf.int64'}), '((2, 512, 512), minval=0, maxval=3, dtype=tf.int64)\n', (249, 300), True, 'import tensorflow as tf\n'), ((443, 516), 'tensorflow.random.uniform', 'tf.random.uniform', (['(2, 512, 512, 3)'], {'minval': '(0)', 'maxval': '(1)', 'dtype': 'tf.float32'}), '((2, 512, 512, 3), minval=0, maxval=1, dtype=tf.float32)\n', (460, 516), True, 'import tensorflow as tf\n'), ((686, 697), 'segelectri.loss_metrics.loss.FocalLoss', 'FocalLoss', ([], {}), '()\n', (695, 697), False, 'from segelectri.loss_metrics.loss import FocalLoss, LovaszLoss, DiceLoss, BoundaryLoss\n'), ((841, 853), 'segelectri.loss_metrics.loss.LovaszLoss', 'LovaszLoss', ([], {}), '()\n', (851, 853), False, 'from segelectri.loss_metrics.loss import FocalLoss, LovaszLoss, DiceLoss, BoundaryLoss\n'), ((1002, 1049), 'tensorflow.keras.losses.SparseCategoricalCrossentropy', 'tf.keras.losses.SparseCategoricalCrossentropy', ([], {}), '()\n', (1047, 1049), True, 'import tensorflow as tf\n'), ((1187, 1197), 'segelectri.loss_metrics.loss.DiceLoss', 'DiceLoss', ([], {}), '()\n', (1195, 1197), False, 'from segelectri.loss_metrics.loss import FocalLoss, LovaszLoss, DiceLoss, BoundaryLoss\n'), ((1344, 1358), 'segelectri.loss_metrics.loss.BoundaryLoss', 'BoundaryLoss', ([], {}), '()\n', (1356, 1358), False, 'from segelectri.loss_metrics.loss import FocalLoss, LovaszLoss, DiceLoss, BoundaryLoss\n')]
|
"""Dynamo access"""
import os
from time import sleep
import boto.dynamodb2
from .khan_logger import KhanLogger
__author__ = 'mattjmorris'
class Dynamo(object):
def __init__(self, access_key=None, secret=None):
"""
If access_key and/or secret are not passed in, assumes we are accessing erenev's aws account and that the
access info is stored as environment variables on the current server.
Connection and Table are available to clients via self properties, in case clients wish to use those objects
directly.
"""
access_key = access_key or os.getenv('VEN_S3_ACCESS_KEY')
secret = secret or os.getenv('VEN_S3_SECRET')
self.connection = boto.dynamodb2.connect_to_region(region_name='eu-west-1', aws_access_key_id=access_key,
aws_secret_access_key=secret)
self.logger = KhanLogger(origin=self.__class__.__name__)
def modify_throughput(self, requested_read, requested_write, table):
"""
Used to change the throughput of a specific table
"""
read, write, num_dec_today, table_status = self.get_table_info(table)
while requested_read != read or requested_write != write:
self.logger.info(msg="Modifying {} from {}, {} to {}, {}".format(table.table_name, read, write,
requested_read, requested_write))
new_read, new_write = self._new_read_write(read, requested_read, write, requested_write)
self.logger.info(msg="going to request read {} and write {}".format(new_read, new_write))
if (new_read < read or new_write < write) and num_dec_today >= 4:
# Todo - replace with custom error and handle in client code
raise ValueError("Sorry, can't do any more decreases today.")
table.update(throughput={'read': new_read, 'write': new_write})
sleep_secs = 30
table_status = 'UPDATING'
self.logger.info(msg="Sleeping for {} secs before starting".format(sleep_secs))
sleep(sleep_secs)
while table_status == 'UPDATING':
self.logger.info(msg="Sleeping for {} secs".format(sleep_secs))
sleep(sleep_secs)
read, write, num_dec_today, table_status = self.get_table_info(table)
return read, write
def _new_read_write(self, read, requested_read, write, requested_write):
"""
Ensures that we change throughput in the correct amounts so as to not cause DDB to yell at us.
"""
if requested_read == 0:
read_change_prop = 0
else:
read_change_prop = requested_read / float(read)
# max increase allowed is a doubling
if read_change_prop > 2:
new_read = read * 2
else:
new_read = requested_read
if requested_write == 0:
write_change_prop = 0
else:
write_change_prop = requested_write / float(write)
if write_change_prop > 2:
new_write = write * 2
else:
new_write = requested_write
return new_read, new_write
def get_table_info(self, table):
"""
Returns meta information about the table, such as read speed, write speed, current status,
and number of decreases today. Useful for figuring out how to change throughput.
"""
desc = table.describe()
status = desc['Table']['TableStatus']
throughput = desc['Table']['ProvisionedThroughput']
num_decreases = throughput['NumberOfDecreasesToday']
read = throughput['ReadCapacityUnits']
write = throughput['WriteCapacityUnits']
return read, write, num_decreases, status
|
[
"os.getenv",
"time.sleep"
] |
[((606, 636), 'os.getenv', 'os.getenv', (['"""VEN_S3_ACCESS_KEY"""'], {}), "('VEN_S3_ACCESS_KEY')\n", (615, 636), False, 'import os\n'), ((664, 690), 'os.getenv', 'os.getenv', (['"""VEN_S3_SECRET"""'], {}), "('VEN_S3_SECRET')\n", (673, 690), False, 'import os\n'), ((2166, 2183), 'time.sleep', 'sleep', (['sleep_secs'], {}), '(sleep_secs)\n', (2171, 2183), False, 'from time import sleep\n'), ((2326, 2343), 'time.sleep', 'sleep', (['sleep_secs'], {}), '(sleep_secs)\n', (2331, 2343), False, 'from time import sleep\n')]
|
# A bit of setup
from __future__ import print_function
import Models
import code_base.solver as slvr
from code_base.data_utils import *
from code_base.layers import *
from code_base.solver import Solver
settings.time_analysis['logger_enabled'] = False
# for auto-reloading external modules
# see http://stackoverflow.com/questions/1907993/autoreload-of-modules-in-ipython
def rel_error(x, y):
""" returns relative error """
return np.max(np.abs(x - y) / (np.maximum(1e-8, np.abs(x) + np.abs(y))))
def getSolver(model, data, alpha, alpha_decay, epoch=10, batch_size=128):
return Solver(model, data, num_epochs=epoch, batch_size=batch_size,
update_rule='adam',
optim_config={
'learning_rate': alpha,
}, lr_decay=alpha_decay, verbose=True, print_every=1)
def train_model(model_key):
slvr._file.write('\n\n>>>> MODEL - ' + model_key + ' <<<<')
model = Models.Models[model_key]
solver = getSolver(model=model, data=data, alpha=3e-3, alpha_decay=0.5, epoch=15)
start = datetime.datetime.now()
solver.train()
end = datetime.datetime.now()
slvr._file.write('\nTotal time taken: ' + str(end - start))
slvr._file.flush()
model_key = model_key + '_alpha3e-3'
save_metrics(solver,model_key)
save_model(model, './models/cnn_model_' + model_key + '.p')
def save_metrics(solver, model_key):
pickle.dump(solver.loss_history,open('./metrics/'+model_key+'_loss_history.p','wb'))
pickle.dump(solver.train_acc_history,open('./metrics/'+model_key+'_train_acc_history.p','wb'))
pickle.dump(solver.val_acc_history,open('./metrics/'+model_key+'_val_acc_history.p','wb'))
data = pickle.load(open('./data.p', 'rb'), encoding='latin1')
# create augmented data - mirror image
# aug_X_train = np.flip(data['X_train'], 3)
# data['X_train'] = np.concatenate((data['X_train'], aug_X_train), 0)
# data['y_train'] = np.concatenate((data['y_train'], data['y_train']), 0)
for k, v in data.items():
print('%s: ' % k, v.shape)
train_model('conv32_filter7_fc256_drop0')
train_model('conv32_filter7_fc256_drop02')
# train_model('conv64_filter5_fc512_drop0')
# train_model('conv64_filter5_fc512_drop03')
# train_model('conv128_filter3_fc1024_drop0')
# train_model('conv128_filter3_fc1024_drop04')
|
[
"code_base.solver._file.flush",
"code_base.solver._file.write",
"code_base.solver.Solver"
] |
[((596, 775), 'code_base.solver.Solver', 'Solver', (['model', 'data'], {'num_epochs': 'epoch', 'batch_size': 'batch_size', 'update_rule': '"""adam"""', 'optim_config': "{'learning_rate': alpha}", 'lr_decay': 'alpha_decay', 'verbose': '(True)', 'print_every': '(1)'}), "(model, data, num_epochs=epoch, batch_size=batch_size, update_rule=\n 'adam', optim_config={'learning_rate': alpha}, lr_decay=alpha_decay,\n verbose=True, print_every=1)\n", (602, 775), False, 'from code_base.solver import Solver\n'), ((880, 939), 'code_base.solver._file.write', 'slvr._file.write', (["('\\n\\n>>>> MODEL - ' + model_key + ' <<<<')"], {}), "('\\n\\n>>>> MODEL - ' + model_key + ' <<<<')\n", (896, 939), True, 'import code_base.solver as slvr\n'), ((1220, 1238), 'code_base.solver._file.flush', 'slvr._file.flush', ([], {}), '()\n', (1236, 1238), True, 'import code_base.solver as slvr\n')]
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from network import network_pb2 as network_dot_network__pb2
class NetworkStub(object):
"""Network service is usesd to gain visibility into networks
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Connect = channel.unary_unary(
'/network.Network/Connect',
request_serializer=network_dot_network__pb2.ConnectRequest.SerializeToString,
response_deserializer=network_dot_network__pb2.ConnectResponse.FromString,
)
self.Graph = channel.unary_unary(
'/network.Network/Graph',
request_serializer=network_dot_network__pb2.GraphRequest.SerializeToString,
response_deserializer=network_dot_network__pb2.GraphResponse.FromString,
)
self.Nodes = channel.unary_unary(
'/network.Network/Nodes',
request_serializer=network_dot_network__pb2.NodesRequest.SerializeToString,
response_deserializer=network_dot_network__pb2.NodesResponse.FromString,
)
self.Routes = channel.unary_unary(
'/network.Network/Routes',
request_serializer=network_dot_network__pb2.RoutesRequest.SerializeToString,
response_deserializer=network_dot_network__pb2.RoutesResponse.FromString,
)
self.Services = channel.unary_unary(
'/network.Network/Services',
request_serializer=network_dot_network__pb2.ServicesRequest.SerializeToString,
response_deserializer=network_dot_network__pb2.ServicesResponse.FromString,
)
self.Status = channel.unary_unary(
'/network.Network/Status',
request_serializer=network_dot_network__pb2.StatusRequest.SerializeToString,
response_deserializer=network_dot_network__pb2.StatusResponse.FromString,
)
class NetworkServicer(object):
"""Network service is usesd to gain visibility into networks
"""
def Connect(self, request, context):
"""Connect to the network
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Graph(self, request, context):
"""Returns the entire network graph
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Nodes(self, request, context):
"""Returns a list of known nodes in the network
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Routes(self, request, context):
"""Returns a list of known routes in the network
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Services(self, request, context):
"""Returns a list of known services based on routes
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Status(self, request, context):
"""Status returns network status
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_NetworkServicer_to_server(servicer, server):
rpc_method_handlers = {
'Connect': grpc.unary_unary_rpc_method_handler(
servicer.Connect,
request_deserializer=network_dot_network__pb2.ConnectRequest.FromString,
response_serializer=network_dot_network__pb2.ConnectResponse.SerializeToString,
),
'Graph': grpc.unary_unary_rpc_method_handler(
servicer.Graph,
request_deserializer=network_dot_network__pb2.GraphRequest.FromString,
response_serializer=network_dot_network__pb2.GraphResponse.SerializeToString,
),
'Nodes': grpc.unary_unary_rpc_method_handler(
servicer.Nodes,
request_deserializer=network_dot_network__pb2.NodesRequest.FromString,
response_serializer=network_dot_network__pb2.NodesResponse.SerializeToString,
),
'Routes': grpc.unary_unary_rpc_method_handler(
servicer.Routes,
request_deserializer=network_dot_network__pb2.RoutesRequest.FromString,
response_serializer=network_dot_network__pb2.RoutesResponse.SerializeToString,
),
'Services': grpc.unary_unary_rpc_method_handler(
servicer.Services,
request_deserializer=network_dot_network__pb2.ServicesRequest.FromString,
response_serializer=network_dot_network__pb2.ServicesResponse.SerializeToString,
),
'Status': grpc.unary_unary_rpc_method_handler(
servicer.Status,
request_deserializer=network_dot_network__pb2.StatusRequest.FromString,
response_serializer=network_dot_network__pb2.StatusResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'network.Network', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Network(object):
"""Network service is usesd to gain visibility into networks
"""
@staticmethod
def Connect(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/network.Network/Connect',
network_dot_network__pb2.ConnectRequest.SerializeToString,
network_dot_network__pb2.ConnectResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Graph(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/network.Network/Graph',
network_dot_network__pb2.GraphRequest.SerializeToString,
network_dot_network__pb2.GraphResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Nodes(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/network.Network/Nodes',
network_dot_network__pb2.NodesRequest.SerializeToString,
network_dot_network__pb2.NodesResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Routes(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/network.Network/Routes',
network_dot_network__pb2.RoutesRequest.SerializeToString,
network_dot_network__pb2.RoutesResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Services(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/network.Network/Services',
network_dot_network__pb2.ServicesRequest.SerializeToString,
network_dot_network__pb2.ServicesResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Status(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/network.Network/Status',
network_dot_network__pb2.StatusRequest.SerializeToString,
network_dot_network__pb2.StatusResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
|
[
"grpc.method_handlers_generic_handler",
"grpc.unary_unary_rpc_method_handler",
"grpc.experimental.unary_unary"
] |
[((5845, 5921), 'grpc.method_handlers_generic_handler', 'grpc.method_handlers_generic_handler', (['"""network.Network"""', 'rpc_method_handlers'], {}), "('network.Network', rpc_method_handlers)\n", (5881, 5921), False, 'import grpc\n'), ((4024, 4240), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.Connect'], {'request_deserializer': 'network_dot_network__pb2.ConnectRequest.FromString', 'response_serializer': 'network_dot_network__pb2.ConnectResponse.SerializeToString'}), '(servicer.Connect, request_deserializer=\n network_dot_network__pb2.ConnectRequest.FromString, response_serializer\n =network_dot_network__pb2.ConnectResponse.SerializeToString)\n', (4059, 4240), False, 'import grpc\n'), ((4328, 4538), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.Graph'], {'request_deserializer': 'network_dot_network__pb2.GraphRequest.FromString', 'response_serializer': 'network_dot_network__pb2.GraphResponse.SerializeToString'}), '(servicer.Graph, request_deserializer=\n network_dot_network__pb2.GraphRequest.FromString, response_serializer=\n network_dot_network__pb2.GraphResponse.SerializeToString)\n', (4363, 4538), False, 'import grpc\n'), ((4626, 4836), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.Nodes'], {'request_deserializer': 'network_dot_network__pb2.NodesRequest.FromString', 'response_serializer': 'network_dot_network__pb2.NodesResponse.SerializeToString'}), '(servicer.Nodes, request_deserializer=\n network_dot_network__pb2.NodesRequest.FromString, response_serializer=\n network_dot_network__pb2.NodesResponse.SerializeToString)\n', (4661, 4836), False, 'import grpc\n'), ((4925, 5138), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.Routes'], {'request_deserializer': 'network_dot_network__pb2.RoutesRequest.FromString', 'response_serializer': 'network_dot_network__pb2.RoutesResponse.SerializeToString'}), '(servicer.Routes, request_deserializer=\n network_dot_network__pb2.RoutesRequest.FromString, response_serializer=\n network_dot_network__pb2.RoutesResponse.SerializeToString)\n', (4960, 5138), False, 'import grpc\n'), ((5229, 5452), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.Services'], {'request_deserializer': 'network_dot_network__pb2.ServicesRequest.FromString', 'response_serializer': 'network_dot_network__pb2.ServicesResponse.SerializeToString'}), '(servicer.Services, request_deserializer\n =network_dot_network__pb2.ServicesRequest.FromString,\n response_serializer=network_dot_network__pb2.ServicesResponse.\n SerializeToString)\n', (5264, 5452), False, 'import grpc\n'), ((5537, 5750), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.Status'], {'request_deserializer': 'network_dot_network__pb2.StatusRequest.FromString', 'response_serializer': 'network_dot_network__pb2.StatusResponse.SerializeToString'}), '(servicer.Status, request_deserializer=\n network_dot_network__pb2.StatusRequest.FromString, response_serializer=\n network_dot_network__pb2.StatusResponse.SerializeToString)\n', (5572, 5750), False, 'import grpc\n'), ((6456, 6764), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/network.Network/Connect"""', 'network_dot_network__pb2.ConnectRequest.SerializeToString', 'network_dot_network__pb2.ConnectResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target, '/network.Network/Connect',\n network_dot_network__pb2.ConnectRequest.SerializeToString,\n network_dot_network__pb2.ConnectResponse.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (6485, 6764), False, 'import grpc\n'), ((7116, 7418), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/network.Network/Graph"""', 'network_dot_network__pb2.GraphRequest.SerializeToString', 'network_dot_network__pb2.GraphResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target, '/network.Network/Graph',\n network_dot_network__pb2.GraphRequest.SerializeToString,\n network_dot_network__pb2.GraphResponse.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (7145, 7418), False, 'import grpc\n'), ((7770, 8072), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/network.Network/Nodes"""', 'network_dot_network__pb2.NodesRequest.SerializeToString', 'network_dot_network__pb2.NodesResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target, '/network.Network/Nodes',\n network_dot_network__pb2.NodesRequest.SerializeToString,\n network_dot_network__pb2.NodesResponse.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (7799, 8072), False, 'import grpc\n'), ((8425, 8730), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/network.Network/Routes"""', 'network_dot_network__pb2.RoutesRequest.SerializeToString', 'network_dot_network__pb2.RoutesResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target, '/network.Network/Routes',\n network_dot_network__pb2.RoutesRequest.SerializeToString,\n network_dot_network__pb2.RoutesResponse.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (8454, 8730), False, 'import grpc\n'), ((9085, 9396), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/network.Network/Services"""', 'network_dot_network__pb2.ServicesRequest.SerializeToString', 'network_dot_network__pb2.ServicesResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target, '/network.Network/Services',\n network_dot_network__pb2.ServicesRequest.SerializeToString,\n network_dot_network__pb2.ServicesResponse.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (9114, 9396), False, 'import grpc\n'), ((9749, 10054), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/network.Network/Status"""', 'network_dot_network__pb2.StatusRequest.SerializeToString', 'network_dot_network__pb2.StatusResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target, '/network.Network/Status',\n network_dot_network__pb2.StatusRequest.SerializeToString,\n network_dot_network__pb2.StatusResponse.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (9778, 10054), False, 'import grpc\n')]
|
from executor.executor import Executor
from argparse import ArgumentParser
from configs.mesh import add_mesh_switch_arguments
from configs.mesh import SimpleBlockMeshConfig, SimpleBlockMeshArguments
from configs.mesh import RailMeshArguments, RailMeshConfig
from configs.fragmentation import FragmentationConfig, FragmentationArguments
from configs.execution import ExecutionConfig, ExecutionArguments
from mesh_generator.simple_generator import SimpleBlockMeshGenerator
from mesh_generator.rail_generator import RailMeshGenerator
from sys import argv
# TODO use subprocess.getoutput()
# @brief Beam end load task, only two configurable parameters and two restrictions (3 functions)
# @restrictions
# 1) Stress is not more than specified value
# 2) Deformation is not more than specified value
# @criterion
# 1) Weight should be minimum
class BeamSolver:
def __init__(self):
self.k_max_deformation = 2.139e-6
self.k_max_stress = 775900
self.k_density = 7850
self.k_mm_to_m = 0.001
# Create default mesh generator config and fragmentation config
self.mesh_config = SimpleBlockMeshConfig()
self.fragmentation_config = FragmentationConfig()
self.execution_config = ExecutionConfig()
self.execution_config.execution_folder = "/home/lenferd/OpenFOAM/lenferd-v1906/run/beamEndLoad-20-04-25/"
self.execution_config.output_dir = self.execution_config.execution_folder + "out/"
self.execution_config.prepare_env_script = "$HOME/prog/OpenFOAM/OpenFOAM-dev/etc/bashrc_modified"
def set_plane_sizes(self, height, width):
self.mesh_config.length_mm = 1000
self.mesh_config.height_mm = height
self.mesh_config.width_mm = width
mesh = SimpleBlockMeshGenerator(self.mesh_config, self.fragmentation_config, self.execution_config)
mesh.create()
mesh.generate()
# Deformation not more then
def constraint_0(self):
deformation_name = "D"
# FIXME execution for reproduced constrain. Need to use hash if possible
executor = Executor(self.execution_config, self.mesh_config, self.fragmentation_config)
executor.run()
results = executor.get_results()
print("==== D constraint_0")
print(results)
print(results[deformation_name])
print(results[deformation_name] < self.k_max_deformation)
print(results[deformation_name] - self.k_max_deformation)
return results[deformation_name] - self.k_max_deformation
# Stress not more then
def constraint_1(self):
stresss_name = "D"
executor = Executor(self.execution_config, self.mesh_config, self.fragmentation_config)
executor.run()
results = executor.get_results()
print("==== stress constraint_1")
print(results)
print(results[stresss_name])
print(results[stresss_name] < self.k_max_stress)
print(results[stresss_name] - self.k_max_stress)
return results[stresss_name] - self.k_max_stress
# Weight (minimum should be)
def criterion_0(self):
print("==== mass criterion_0")
weight = self.k_density * \
self.mesh_config.width_mm * self.k_mm_to_m \
* self.mesh_config.height_mm * self.k_mm_to_m \
* self.mesh_config.length_mm * self.k_mm_to_m
print(weight)
return weight
if __name__ == '__main__':
# print("BEAM SOLVER")
# print("args: {}".format(argv))
parameters = argv[1]
paramList = parameters.split(";")
dict = {}
for param in paramList:
split_result = param.split(":")
pair = {split_result[0]: split_result[1]}
dict.update(pair)
# print(dict)
dict["Points"] = dict["Points"].split(",")
dict["Points"] = [float(i) for i in dict["Points"]]
# print(dict)
function = dict["Function"]
points = dict["Points"]
# Create BeamSolver
beamSolver = BeamSolver()
# first - height, second - width
beamSolver.set_plane_sizes(points[0], points[1])
result = None
if function == "constraint.0":
result = beamSolver.constraint_0()
if function == "constraint.1":
result = beamSolver.constraint_1()
if function == "criterion.0":
result = beamSolver.criterion_0()
print("BeamSolver:[{}]".format(result))
|
[
"configs.execution.ExecutionConfig",
"configs.mesh.SimpleBlockMeshConfig",
"executor.executor.Executor",
"configs.fragmentation.FragmentationConfig",
"mesh_generator.simple_generator.SimpleBlockMeshGenerator"
] |
[((1120, 1143), 'configs.mesh.SimpleBlockMeshConfig', 'SimpleBlockMeshConfig', ([], {}), '()\n', (1141, 1143), False, 'from configs.mesh import SimpleBlockMeshConfig, SimpleBlockMeshArguments\n'), ((1180, 1201), 'configs.fragmentation.FragmentationConfig', 'FragmentationConfig', ([], {}), '()\n', (1199, 1201), False, 'from configs.fragmentation import FragmentationConfig, FragmentationArguments\n'), ((1234, 1251), 'configs.execution.ExecutionConfig', 'ExecutionConfig', ([], {}), '()\n', (1249, 1251), False, 'from configs.execution import ExecutionConfig, ExecutionArguments\n'), ((1755, 1852), 'mesh_generator.simple_generator.SimpleBlockMeshGenerator', 'SimpleBlockMeshGenerator', (['self.mesh_config', 'self.fragmentation_config', 'self.execution_config'], {}), '(self.mesh_config, self.fragmentation_config, self.\n execution_config)\n', (1779, 1852), False, 'from mesh_generator.simple_generator import SimpleBlockMeshGenerator\n'), ((2086, 2162), 'executor.executor.Executor', 'Executor', (['self.execution_config', 'self.mesh_config', 'self.fragmentation_config'], {}), '(self.execution_config, self.mesh_config, self.fragmentation_config)\n', (2094, 2162), False, 'from executor.executor import Executor\n'), ((2628, 2704), 'executor.executor.Executor', 'Executor', (['self.execution_config', 'self.mesh_config', 'self.fragmentation_config'], {}), '(self.execution_config, self.mesh_config, self.fragmentation_config)\n', (2636, 2704), False, 'from executor.executor import Executor\n')]
|
from glob import glob
from os import path
import pytest
import audiofile as af
import numpy as np
import audresample
def set_ones(signal, channels):
signal[channels, :] = 1
return signal
def mixdown(signal):
return np.atleast_2d(np.mean(signal, axis=0))
@pytest.mark.parametrize(
'signal, channels, mixdown, upmix, always_copy, expect',
[
# empty signal
(
np.zeros(0, dtype=np.float32),
None,
False,
None,
False,
np.zeros((1, 0), dtype=np.float32),
),
(
np.zeros((1, 0), dtype=np.float32),
None,
False,
None,
False,
np.zeros((1, 0), dtype=np.float32),
),
(
np.zeros((1, 0), dtype=np.float32),
0,
False,
None,
False,
np.zeros((1, 0), dtype=np.float32),
),
(
np.zeros((1, 0), dtype=np.float32),
1,
False,
'repeat',
False,
np.zeros((1, 0), dtype=np.float32),
),
(
np.zeros((1, 0), dtype=np.float32),
1,
False,
'zeros',
False,
np.zeros((1, 0), dtype=np.float32),
),
(
np.zeros((1, 0), dtype=np.float32),
[0, 2],
False,
'zeros',
False,
np.zeros((2, 0), dtype=np.float32),
),
# single channel
(
np.zeros((16000,)),
None,
False,
None,
False,
np.zeros((1, 16000), dtype=np.float32),
),
(
np.zeros((1, 16000), np.float32),
None,
False,
None,
False,
np.zeros((1, 16000), dtype=np.float32),
),
(
np.zeros((1, 16000), np.float32),
None,
True,
None,
False,
np.zeros((1, 16000), dtype=np.float32),
),
(
np.zeros((1, 16000), np.float32),
0,
False,
None,
False,
np.zeros((1, 16000), dtype=np.float32),
),
(
np.zeros((1, 16000), np.float32),
0,
True,
None,
False,
np.zeros((1, 16000), dtype=np.float32),
),
(
np.ones((1, 16000), np.float32),
0,
True,
'zeros',
False,
np.ones((1, 16000), dtype=np.float32),
),
(
np.ones((1, 16000), np.float32),
1,
True,
'repeat',
False,
np.ones((1, 16000), dtype=np.float32),
),
(
np.ones((1, 16000), np.float32),
1,
True,
'zeros',
False,
np.zeros((1, 16000), dtype=np.float32),
),
(
np.ones((1, 16000), np.float32),
-2,
True,
'zeros',
False,
np.ones((1, 16000), dtype=np.float32),
),
(
np.ones((1, 16000), np.float32),
[0, 2],
False,
'zeros',
False,
np.concatenate(
[
np.ones((1, 16000), dtype=np.float32),
np.zeros((1, 16000), dtype=np.float32),
]
),
),
(
np.ones((1, 16000), np.float32),
[0, 2],
True,
'zeros',
False,
0.5 * np.ones((1, 16000), dtype=np.float32),
),
# multiple channels
(
set_ones(np.zeros((4, 16000), np.float32), 2),
2,
False,
None,
False,
np.ones((1, 16000), dtype=np.float32),
),
(
set_ones(np.zeros((4, 16000), np.float32), -1),
-1,
False,
None,
False,
np.ones((1, 16000), dtype=np.float32),
),
(
set_ones(np.zeros((4, 16000), np.float32), [1, 3]),
[1, 3],
False,
None,
False,
np.ones((2, 16000), dtype=np.float32),
),
(
set_ones(np.zeros((4, 16000), np.float32), [0, 1, 2, 3]),
[0, 1, 2, 3],
False,
None,
False,
np.ones((4, 16000), dtype=np.float32),
),
(
set_ones(np.zeros((4, 16000), np.float32), [0, 1, 2]),
range(3),
False,
None,
False,
np.ones((3, 16000), dtype=np.float32),
),
(
set_ones(np.zeros((3, 16000), np.float32), 0),
[1, 0, 0],
False,
None,
False,
set_ones(np.zeros((3, 16000), np.float32), [1, 2]),
),
(
set_ones(np.zeros((3, 16000), np.float32), 0),
[3, 0, 0],
False,
'zeros',
False,
set_ones(np.zeros((3, 16000), np.float32), [1, 2]),
),
(
set_ones(np.zeros((3, 16000), np.float32), 0),
[3, 0, 0],
False,
'repeat',
False,
np.ones((3, 16000), np.float32),
),
(
set_ones(np.zeros((3, 16000), np.float32), 0),
[-6, 0, 0],
False,
'repeat',
False,
np.ones((3, 16000), np.float32),
),
# multiple channels with mixdown
(
audresample.am_fm_synth(16000, 2, 16000),
None,
True,
None,
False,
mixdown(audresample.am_fm_synth(16000, 2, 16000)),
),
(
audresample.am_fm_synth(16000, 3, 16000),
[0, 1],
True,
None,
False,
mixdown(audresample.am_fm_synth(16000, 2, 16000)),
),
# always copy
(
np.zeros((1, 16000), dtype=np.float32),
None,
False,
None,
True,
np.zeros((1, 16000), dtype=np.float32),
),
# wrong channel index
pytest.param(
np.zeros((2, 16000)),
2,
False,
None,
False,
None,
marks=pytest.mark.xfail(raises=ValueError),
),
pytest.param(
np.zeros((2, 16000)),
[0, 1, 2],
False,
None,
False,
None,
marks=pytest.mark.xfail(raises=ValueError),
),
# wrong input shape
pytest.param(
np.zeros((16000, 2, 3)),
None,
False,
None,
False,
None,
marks=pytest.mark.xfail(raises=RuntimeError),
),
# wrong upmix type
pytest.param(
np.zeros((2, 16000)),
2,
False,
'fancy',
False,
None,
marks=pytest.mark.xfail(raises=ValueError),
),
]
)
def test_resample_signal(
signal,
channels,
mixdown,
upmix,
always_copy,
expect,
):
result = audresample.remix(
signal,
channels,
mixdown,
upmix=upmix,
always_copy=always_copy,
)
np.testing.assert_equal(result, expect)
if signal.size > 0 and\
channels is None and\
not mixdown and\
signal.dtype == np.float32:
if always_copy:
assert id(signal) != id(result)
else:
assert id(signal) == id(result)
|
[
"audresample.am_fm_synth",
"audresample.remix",
"numpy.zeros",
"numpy.ones",
"numpy.mean",
"numpy.testing.assert_equal",
"pytest.mark.xfail"
] |
[((7552, 7639), 'audresample.remix', 'audresample.remix', (['signal', 'channels', 'mixdown'], {'upmix': 'upmix', 'always_copy': 'always_copy'}), '(signal, channels, mixdown, upmix=upmix, always_copy=\n always_copy)\n', (7569, 7639), False, 'import audresample\n'), ((7686, 7725), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['result', 'expect'], {}), '(result, expect)\n', (7709, 7725), True, 'import numpy as np\n'), ((247, 270), 'numpy.mean', 'np.mean', (['signal'], {'axis': '(0)'}), '(signal, axis=0)\n', (254, 270), True, 'import numpy as np\n'), ((412, 441), 'numpy.zeros', 'np.zeros', (['(0)'], {'dtype': 'np.float32'}), '(0, dtype=np.float32)\n', (420, 441), True, 'import numpy as np\n'), ((529, 563), 'numpy.zeros', 'np.zeros', (['(1, 0)'], {'dtype': 'np.float32'}), '((1, 0), dtype=np.float32)\n', (537, 563), True, 'import numpy as np\n'), ((598, 632), 'numpy.zeros', 'np.zeros', (['(1, 0)'], {'dtype': 'np.float32'}), '((1, 0), dtype=np.float32)\n', (606, 632), True, 'import numpy as np\n'), ((720, 754), 'numpy.zeros', 'np.zeros', (['(1, 0)'], {'dtype': 'np.float32'}), '((1, 0), dtype=np.float32)\n', (728, 754), True, 'import numpy as np\n'), ((789, 823), 'numpy.zeros', 'np.zeros', (['(1, 0)'], {'dtype': 'np.float32'}), '((1, 0), dtype=np.float32)\n', (797, 823), True, 'import numpy as np\n'), ((908, 942), 'numpy.zeros', 'np.zeros', (['(1, 0)'], {'dtype': 'np.float32'}), '((1, 0), dtype=np.float32)\n', (916, 942), True, 'import numpy as np\n'), ((977, 1011), 'numpy.zeros', 'np.zeros', (['(1, 0)'], {'dtype': 'np.float32'}), '((1, 0), dtype=np.float32)\n', (985, 1011), True, 'import numpy as np\n'), ((1100, 1134), 'numpy.zeros', 'np.zeros', (['(1, 0)'], {'dtype': 'np.float32'}), '((1, 0), dtype=np.float32)\n', (1108, 1134), True, 'import numpy as np\n'), ((1169, 1203), 'numpy.zeros', 'np.zeros', (['(1, 0)'], {'dtype': 'np.float32'}), '((1, 0), dtype=np.float32)\n', (1177, 1203), True, 'import numpy as np\n'), ((1291, 1325), 'numpy.zeros', 'np.zeros', (['(1, 0)'], {'dtype': 'np.float32'}), '((1, 0), dtype=np.float32)\n', (1299, 1325), True, 'import numpy as np\n'), ((1360, 1394), 'numpy.zeros', 'np.zeros', (['(1, 0)'], {'dtype': 'np.float32'}), '((1, 0), dtype=np.float32)\n', (1368, 1394), True, 'import numpy as np\n'), ((1487, 1521), 'numpy.zeros', 'np.zeros', (['(2, 0)'], {'dtype': 'np.float32'}), '((2, 0), dtype=np.float32)\n', (1495, 1521), True, 'import numpy as np\n'), ((1581, 1599), 'numpy.zeros', 'np.zeros', (['(16000,)'], {}), '((16000,))\n', (1589, 1599), True, 'import numpy as np\n'), ((1687, 1725), 'numpy.zeros', 'np.zeros', (['(1, 16000)'], {'dtype': 'np.float32'}), '((1, 16000), dtype=np.float32)\n', (1695, 1725), True, 'import numpy as np\n'), ((1760, 1792), 'numpy.zeros', 'np.zeros', (['(1, 16000)', 'np.float32'], {}), '((1, 16000), np.float32)\n', (1768, 1792), True, 'import numpy as np\n'), ((1880, 1918), 'numpy.zeros', 'np.zeros', (['(1, 16000)'], {'dtype': 'np.float32'}), '((1, 16000), dtype=np.float32)\n', (1888, 1918), True, 'import numpy as np\n'), ((1953, 1985), 'numpy.zeros', 'np.zeros', (['(1, 16000)', 'np.float32'], {}), '((1, 16000), np.float32)\n', (1961, 1985), True, 'import numpy as np\n'), ((2072, 2110), 'numpy.zeros', 'np.zeros', (['(1, 16000)'], {'dtype': 'np.float32'}), '((1, 16000), dtype=np.float32)\n', (2080, 2110), True, 'import numpy as np\n'), ((2145, 2177), 'numpy.zeros', 'np.zeros', (['(1, 16000)', 'np.float32'], {}), '((1, 16000), np.float32)\n', (2153, 2177), True, 'import numpy as np\n'), ((2262, 2300), 'numpy.zeros', 'np.zeros', (['(1, 16000)'], {'dtype': 'np.float32'}), '((1, 16000), dtype=np.float32)\n', (2270, 2300), True, 'import numpy as np\n'), ((2335, 2367), 'numpy.zeros', 'np.zeros', (['(1, 16000)', 'np.float32'], {}), '((1, 16000), np.float32)\n', (2343, 2367), True, 'import numpy as np\n'), ((2451, 2489), 'numpy.zeros', 'np.zeros', (['(1, 16000)'], {'dtype': 'np.float32'}), '((1, 16000), dtype=np.float32)\n', (2459, 2489), True, 'import numpy as np\n'), ((2524, 2555), 'numpy.ones', 'np.ones', (['(1, 16000)', 'np.float32'], {}), '((1, 16000), np.float32)\n', (2531, 2555), True, 'import numpy as np\n'), ((2642, 2679), 'numpy.ones', 'np.ones', (['(1, 16000)'], {'dtype': 'np.float32'}), '((1, 16000), dtype=np.float32)\n', (2649, 2679), True, 'import numpy as np\n'), ((2714, 2745), 'numpy.ones', 'np.ones', (['(1, 16000)', 'np.float32'], {}), '((1, 16000), np.float32)\n', (2721, 2745), True, 'import numpy as np\n'), ((2833, 2870), 'numpy.ones', 'np.ones', (['(1, 16000)'], {'dtype': 'np.float32'}), '((1, 16000), dtype=np.float32)\n', (2840, 2870), True, 'import numpy as np\n'), ((2905, 2936), 'numpy.ones', 'np.ones', (['(1, 16000)', 'np.float32'], {}), '((1, 16000), np.float32)\n', (2912, 2936), True, 'import numpy as np\n'), ((3023, 3061), 'numpy.zeros', 'np.zeros', (['(1, 16000)'], {'dtype': 'np.float32'}), '((1, 16000), dtype=np.float32)\n', (3031, 3061), True, 'import numpy as np\n'), ((3096, 3127), 'numpy.ones', 'np.ones', (['(1, 16000)', 'np.float32'], {}), '((1, 16000), np.float32)\n', (3103, 3127), True, 'import numpy as np\n'), ((3215, 3252), 'numpy.ones', 'np.ones', (['(1, 16000)'], {'dtype': 'np.float32'}), '((1, 16000), dtype=np.float32)\n', (3222, 3252), True, 'import numpy as np\n'), ((3287, 3318), 'numpy.ones', 'np.ones', (['(1, 16000)', 'np.float32'], {}), '((1, 16000), np.float32)\n', (3294, 3318), True, 'import numpy as np\n'), ((3630, 3661), 'numpy.ones', 'np.ones', (['(1, 16000)', 'np.float32'], {}), '((1, 16000), np.float32)\n', (3637, 3661), True, 'import numpy as np\n'), ((3989, 4026), 'numpy.ones', 'np.ones', (['(1, 16000)'], {'dtype': 'np.float32'}), '((1, 16000), dtype=np.float32)\n', (3996, 4026), True, 'import numpy as np\n'), ((4193, 4230), 'numpy.ones', 'np.ones', (['(1, 16000)'], {'dtype': 'np.float32'}), '((1, 16000), dtype=np.float32)\n', (4200, 4230), True, 'import numpy as np\n'), ((4405, 4442), 'numpy.ones', 'np.ones', (['(2, 16000)'], {'dtype': 'np.float32'}), '((2, 16000), dtype=np.float32)\n', (4412, 4442), True, 'import numpy as np\n'), ((4629, 4666), 'numpy.ones', 'np.ones', (['(4, 16000)'], {'dtype': 'np.float32'}), '((4, 16000), dtype=np.float32)\n', (4636, 4666), True, 'import numpy as np\n'), ((4846, 4883), 'numpy.ones', 'np.ones', (['(3, 16000)'], {'dtype': 'np.float32'}), '((3, 16000), dtype=np.float32)\n', (4853, 4883), True, 'import numpy as np\n'), ((5509, 5540), 'numpy.ones', 'np.ones', (['(3, 16000)', 'np.float32'], {}), '((3, 16000), np.float32)\n', (5516, 5540), True, 'import numpy as np\n'), ((5718, 5749), 'numpy.ones', 'np.ones', (['(3, 16000)', 'np.float32'], {}), '((3, 16000), np.float32)\n', (5725, 5749), True, 'import numpy as np\n'), ((5825, 5865), 'audresample.am_fm_synth', 'audresample.am_fm_synth', (['(16000)', '(2)', '(16000)'], {}), '(16000, 2, 16000)\n', (5848, 5865), False, 'import audresample\n'), ((6036, 6076), 'audresample.am_fm_synth', 'audresample.am_fm_synth', (['(16000)', '(3)', '(16000)'], {}), '(16000, 3, 16000)\n', (6059, 6076), False, 'import audresample\n'), ((6271, 6309), 'numpy.zeros', 'np.zeros', (['(1, 16000)'], {'dtype': 'np.float32'}), '((1, 16000), dtype=np.float32)\n', (6279, 6309), True, 'import numpy as np\n'), ((6396, 6434), 'numpy.zeros', 'np.zeros', (['(1, 16000)'], {'dtype': 'np.float32'}), '((1, 16000), dtype=np.float32)\n', (6404, 6434), True, 'import numpy as np\n'), ((6511, 6531), 'numpy.zeros', 'np.zeros', (['(2, 16000)'], {}), '((2, 16000))\n', (6519, 6531), True, 'import numpy as np\n'), ((6723, 6743), 'numpy.zeros', 'np.zeros', (['(2, 16000)'], {}), '((2, 16000))\n', (6731, 6743), True, 'import numpy as np\n'), ((6971, 6994), 'numpy.zeros', 'np.zeros', (['(16000, 2, 3)'], {}), '((16000, 2, 3))\n', (6979, 6994), True, 'import numpy as np\n'), ((7218, 7238), 'numpy.zeros', 'np.zeros', (['(2, 16000)'], {}), '((2, 16000))\n', (7226, 7238), True, 'import numpy as np\n'), ((3759, 3796), 'numpy.ones', 'np.ones', (['(1, 16000)'], {'dtype': 'np.float32'}), '((1, 16000), dtype=np.float32)\n', (3766, 3796), True, 'import numpy as np\n'), ((3868, 3900), 'numpy.zeros', 'np.zeros', (['(4, 16000)', 'np.float32'], {}), '((4, 16000), np.float32)\n', (3876, 3900), True, 'import numpy as np\n'), ((4070, 4102), 'numpy.zeros', 'np.zeros', (['(4, 16000)', 'np.float32'], {}), '((4, 16000), np.float32)\n', (4078, 4102), True, 'import numpy as np\n'), ((4274, 4306), 'numpy.zeros', 'np.zeros', (['(4, 16000)', 'np.float32'], {}), '((4, 16000), np.float32)\n', (4282, 4306), True, 'import numpy as np\n'), ((4486, 4518), 'numpy.zeros', 'np.zeros', (['(4, 16000)', 'np.float32'], {}), '((4, 16000), np.float32)\n', (4494, 4518), True, 'import numpy as np\n'), ((4710, 4742), 'numpy.zeros', 'np.zeros', (['(4, 16000)', 'np.float32'], {}), '((4, 16000), np.float32)\n', (4718, 4742), True, 'import numpy as np\n'), ((4927, 4959), 'numpy.zeros', 'np.zeros', (['(3, 16000)', 'np.float32'], {}), '((3, 16000), np.float32)\n', (4935, 4959), True, 'import numpy as np\n'), ((5065, 5097), 'numpy.zeros', 'np.zeros', (['(3, 16000)', 'np.float32'], {}), '((3, 16000), np.float32)\n', (5073, 5097), True, 'import numpy as np\n'), ((5150, 5182), 'numpy.zeros', 'np.zeros', (['(3, 16000)', 'np.float32'], {}), '((3, 16000), np.float32)\n', (5158, 5182), True, 'import numpy as np\n'), ((5291, 5323), 'numpy.zeros', 'np.zeros', (['(3, 16000)', 'np.float32'], {}), '((3, 16000), np.float32)\n', (5299, 5323), True, 'import numpy as np\n'), ((5376, 5408), 'numpy.zeros', 'np.zeros', (['(3, 16000)', 'np.float32'], {}), '((3, 16000), np.float32)\n', (5384, 5408), True, 'import numpy as np\n'), ((5584, 5616), 'numpy.zeros', 'np.zeros', (['(3, 16000)', 'np.float32'], {}), '((3, 16000), np.float32)\n', (5592, 5616), True, 'import numpy as np\n'), ((5960, 6000), 'audresample.am_fm_synth', 'audresample.am_fm_synth', (['(16000)', '(2)', '(16000)'], {}), '(16000, 2, 16000)\n', (5983, 6000), False, 'import audresample\n'), ((6173, 6213), 'audresample.am_fm_synth', 'audresample.am_fm_synth', (['(16000)', '(2)', '(16000)'], {}), '(16000, 2, 16000)\n', (6196, 6213), False, 'import audresample\n'), ((6640, 6676), 'pytest.mark.xfail', 'pytest.mark.xfail', ([], {'raises': 'ValueError'}), '(raises=ValueError)\n', (6657, 6676), False, 'import pytest\n'), ((6860, 6896), 'pytest.mark.xfail', 'pytest.mark.xfail', ([], {'raises': 'ValueError'}), '(raises=ValueError)\n', (6877, 6896), False, 'import pytest\n'), ((7106, 7144), 'pytest.mark.xfail', 'pytest.mark.xfail', ([], {'raises': 'RuntimeError'}), '(raises=RuntimeError)\n', (7123, 7144), False, 'import pytest\n'), ((7350, 7386), 'pytest.mark.xfail', 'pytest.mark.xfail', ([], {'raises': 'ValueError'}), '(raises=ValueError)\n', (7367, 7386), False, 'import pytest\n'), ((3465, 3502), 'numpy.ones', 'np.ones', (['(1, 16000)'], {'dtype': 'np.float32'}), '((1, 16000), dtype=np.float32)\n', (3472, 3502), True, 'import numpy as np\n'), ((3524, 3562), 'numpy.zeros', 'np.zeros', (['(1, 16000)'], {'dtype': 'np.float32'}), '((1, 16000), dtype=np.float32)\n', (3532, 3562), True, 'import numpy as np\n')]
|
from typing import List, Dict, Optional
from jass.agents.agent import Agent
from jass.agents.state import PlayCardState, ChooseTrumpState
from jass.logic.card import Card, Suit
from jass.logic.exceptions import IllegalMoveError
from jass.logic.hand import Hand
class Player:
def __init__(self, name: str, agent: Agent):
self.__name: str = name
self.__agent: Agent = agent
self.__hand: Hand = None
@property
def hand_cards(self) -> List[Card]:
return self.__hand.cards
def give(self, hand: Hand) -> None:
self.__hand = hand
def play(self, trump: Suit, trump_chooser: 'Player', players: List['Player'], trick_cards: Dict['Player', Card],
round_tricks: List[Dict['Player', Card]]) -> Card:
assert self.__hand is not None
assert self == players[0]
cards_on_table = [trick_cards[p] for p in players if p in trick_cards]
cards_playable = self.__hand.playable_cards(cards_played=cards_on_table, trump=trump)
state = PlayCardState(
trick_trump=trump,
trump_chooser_idx=players.index(trump_chooser),
player_hand=self.__hand.cards,
playable_cards=cards_playable,
trick_history=cards_on_table,
round_history=[[trick[p] for p in players] for trick in round_tricks]
)
card = self.__agent.play_card(state).card_to_play
self.__hand.play(card, cards_played=cards_on_table, trump=trump)
return card
def choose_trump(self, can_chibre) -> Optional[Suit]:
if self.__hand is None:
raise IllegalMoveError('Cannot choose trump before having cards')
state = ChooseTrumpState(self.__hand.cards, can_chibre=can_chibre) # todo: allow chibre
return self.__agent.choose_trump(state).suit
def reward(self, points: int, is_last_trick: bool) -> None:
self.__agent.trick_end(reward=points, done=is_last_trick)
def has_7_diamonds(self) -> bool:
return self.__hand.has(Card(7, Suit.diamonds))
def __eq__(self, other: 'Player') -> bool:
return self.__name == other.__name
def __hash__(self) -> int:
return hash(self.__name)
def __repr__(self) -> str:
return self.__name
|
[
"jass.logic.card.Card",
"jass.agents.state.ChooseTrumpState",
"jass.logic.exceptions.IllegalMoveError"
] |
[((1692, 1750), 'jass.agents.state.ChooseTrumpState', 'ChooseTrumpState', (['self.__hand.cards'], {'can_chibre': 'can_chibre'}), '(self.__hand.cards, can_chibre=can_chibre)\n', (1708, 1750), False, 'from jass.agents.state import PlayCardState, ChooseTrumpState\n'), ((1616, 1675), 'jass.logic.exceptions.IllegalMoveError', 'IllegalMoveError', (['"""Cannot choose trump before having cards"""'], {}), "('Cannot choose trump before having cards')\n", (1632, 1675), False, 'from jass.logic.exceptions import IllegalMoveError\n'), ((2027, 2049), 'jass.logic.card.Card', 'Card', (['(7)', 'Suit.diamonds'], {}), '(7, Suit.diamonds)\n', (2031, 2049), False, 'from jass.logic.card import Card, Suit\n')]
|
# Generated by Django 2.0 on 2019-02-25 19:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('portfolio', '0007_auto_20190225_1849'),
]
operations = [
migrations.AddField(
model_name='portfoliopage',
name='git_url',
field=models.URLField(blank=True, null=True),
),
migrations.AddField(
model_name='portfoliopage',
name='linkedin_url',
field=models.URLField(blank=True, null=True),
),
migrations.AddField(
model_name='portfoliopage',
name='source',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
|
[
"django.db.models.CharField",
"django.db.models.URLField"
] |
[((342, 380), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (357, 380), False, 'from django.db import migrations, models\n'), ((513, 551), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (528, 551), False, 'from django.db import migrations, models\n'), ((678, 733), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(255)', 'null': '(True)'}), '(blank=True, max_length=255, null=True)\n', (694, 733), False, 'from django.db import migrations, models\n')]
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Category'
db.create_table('panda_category', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=256, db_index=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=64)),
))
db.send_create_signal('panda', ['Category'])
# Adding model 'TaskStatus'
db.create_table('panda_taskstatus', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('task_name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('status', self.gf('django.db.models.fields.CharField')(default='PENDING', max_length=50)),
('message', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('start', self.gf('django.db.models.fields.DateTimeField')(null=True)),
('end', self.gf('django.db.models.fields.DateTimeField')(null=True)),
('traceback', self.gf('django.db.models.fields.TextField')(default=None, null=True, blank=True)),
('creator', self.gf('django.db.models.fields.related.ForeignKey')(related_name='tasks', null=True, to=orm['auth.User'])),
))
db.send_create_signal('panda', ['TaskStatus'])
# Adding model 'Dataset'
db.create_table('panda_dataset', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=256, db_index=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=256)),
('description', self.gf('django.db.models.fields.TextField')(blank=True)),
('initial_upload', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='initial_upload_for', null=True, to=orm['panda.DataUpload'])),
('columns', self.gf('panda.fields.JSONField')(default=None, null=True)),
('sample_data', self.gf('panda.fields.JSONField')(default=None, null=True)),
('row_count', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('current_task', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['panda.TaskStatus'], null=True, blank=True)),
('creation_date', self.gf('django.db.models.fields.DateTimeField')(null=True)),
('creator', self.gf('django.db.models.fields.related.ForeignKey')(related_name='datasets', to=orm['auth.User'])),
('last_modified', self.gf('django.db.models.fields.DateTimeField')(default=None, null=True, blank=True)),
('last_modification', self.gf('django.db.models.fields.TextField')(default=None, null=True, blank=True)),
('last_modified_by', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
))
db.send_create_signal('panda', ['Dataset'])
# Adding M2M table for field categories on 'Dataset'
db.create_table('panda_dataset_categories', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('dataset', models.ForeignKey(orm['panda.dataset'], null=False)),
('category', models.ForeignKey(orm['panda.category'], null=False))
))
db.create_unique('panda_dataset_categories', ['dataset_id', 'category_id'])
# Adding model 'DataUpload'
db.create_table('panda_dataupload', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('filename', self.gf('django.db.models.fields.CharField')(max_length=256)),
('original_filename', self.gf('django.db.models.fields.CharField')(max_length=256)),
('size', self.gf('django.db.models.fields.IntegerField')()),
('creator', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('creation_date', self.gf('django.db.models.fields.DateTimeField')()),
('dataset', self.gf('django.db.models.fields.related.ForeignKey')(related_name='data_uploads', null=True, to=orm['panda.Dataset'])),
('data_type', self.gf('django.db.models.fields.CharField')(max_length=4, null=True, blank=True)),
('encoding', self.gf('django.db.models.fields.CharField')(default='utf-8', max_length=32)),
('dialect', self.gf('panda.fields.JSONField')(null=True)),
('columns', self.gf('panda.fields.JSONField')(null=True)),
('sample_data', self.gf('panda.fields.JSONField')(null=True)),
('imported', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('panda', ['DataUpload'])
# Adding model 'Export'
db.create_table('panda_export', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('filename', self.gf('django.db.models.fields.CharField')(max_length=256)),
('original_filename', self.gf('django.db.models.fields.CharField')(max_length=256)),
('size', self.gf('django.db.models.fields.IntegerField')()),
('creator', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('creation_date', self.gf('django.db.models.fields.DateTimeField')()),
('dataset', self.gf('django.db.models.fields.related.ForeignKey')(related_name='exports', to=orm['panda.Dataset'])),
))
db.send_create_signal('panda', ['Export'])
# Adding model 'Notification'
db.create_table('panda_notification', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('recipient', self.gf('django.db.models.fields.related.ForeignKey')(related_name='notifications', to=orm['auth.User'])),
('message', self.gf('django.db.models.fields.TextField')()),
('type', self.gf('django.db.models.fields.CharField')(default='Info', max_length=16)),
('sent_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('read_at', self.gf('django.db.models.fields.DateTimeField')(default=None, null=True, blank=True)),
('related_task', self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['panda.TaskStatus'], null=True)),
('related_dataset', self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['panda.Dataset'], null=True)),
))
db.send_create_signal('panda', ['Notification'])
# Adding model 'RelatedUpload'
db.create_table('panda_relatedupload', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('filename', self.gf('django.db.models.fields.CharField')(max_length=256)),
('original_filename', self.gf('django.db.models.fields.CharField')(max_length=256)),
('size', self.gf('django.db.models.fields.IntegerField')()),
('creator', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('creation_date', self.gf('django.db.models.fields.DateTimeField')()),
('dataset', self.gf('django.db.models.fields.related.ForeignKey')(related_name='related_uploads', to=orm['panda.Dataset'])),
))
db.send_create_signal('panda', ['RelatedUpload'])
# Adding model 'UserProfile'
db.create_table('panda_userprofile', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['auth.User'], unique=True)),
('activation_key', self.gf('django.db.models.fields.CharField')(max_length=40)),
))
db.send_create_signal('panda', ['UserProfile'])
def backwards(self, orm):
# Deleting model 'Category'
db.delete_table('panda_category')
# Deleting model 'TaskStatus'
db.delete_table('panda_taskstatus')
# Deleting model 'Dataset'
db.delete_table('panda_dataset')
# Removing M2M table for field categories on 'Dataset'
db.delete_table('panda_dataset_categories')
# Deleting model 'DataUpload'
db.delete_table('panda_dataupload')
# Deleting model 'Export'
db.delete_table('panda_export')
# Deleting model 'Notification'
db.delete_table('panda_notification')
# Deleting model 'RelatedUpload'
db.delete_table('panda_relatedupload')
# Deleting model 'UserProfile'
db.delete_table('panda_userprofile')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('<PASSWORD>.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'panda.category': {
'Meta': {'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '256', 'db_index': 'True'})
},
'panda.dataset': {
'Meta': {'ordering': "['-creation_date']", 'object_name': 'Dataset'},
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'datasets'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['panda.Category']"}),
'columns': ('panda.fields.JSONField', [], {'default': 'None', 'null': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'datasets'", 'to': "orm['auth.User']"}),
'current_task': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['panda.TaskStatus']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initial_upload': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'initial_upload_for'", 'null': 'True', 'to': "orm['panda.DataUpload']"}),
'last_modification': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'last_modified_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'row_count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'sample_data': ('panda.fields.JSONField', [], {'default': 'None', 'null': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '256', 'db_index': 'True'})
},
'panda.dataupload': {
'Meta': {'ordering': "['creation_date']", 'object_name': 'DataUpload'},
'columns': ('panda.fields.JSONField', [], {'null': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'data_type': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
'dataset': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'data_uploads'", 'null': 'True', 'to': "orm['panda.Dataset']"}),
'dialect': ('panda.fields.JSONField', [], {'null': 'True'}),
'encoding': ('django.db.models.fields.CharField', [], {'default': "'utf-8'", 'max_length': '32'}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'imported': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'sample_data': ('panda.fields.JSONField', [], {'null': 'True'}),
'size': ('django.db.models.fields.IntegerField', [], {})
},
'panda.export': {
'Meta': {'ordering': "['creation_date']", 'object_name': 'Export'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'dataset': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'exports'", 'to': "orm['panda.Dataset']"}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'size': ('django.db.models.fields.IntegerField', [], {})
},
'panda.notification': {
'Meta': {'ordering': "['-sent_at']", 'object_name': 'Notification'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'read_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'recipient': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notifications'", 'to': "orm['auth.User']"}),
'related_dataset': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['panda.Dataset']", 'null': 'True'}),
'related_task': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['panda.TaskStatus']", 'null': 'True'}),
'sent_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Info'", 'max_length': '16'})
},
'panda.relatedupload': {
'Meta': {'ordering': "['creation_date']", 'object_name': 'RelatedUpload'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'dataset': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'related_uploads'", 'to': "orm['panda.Dataset']"}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'size': ('django.db.models.fields.IntegerField', [], {})
},
'panda.taskstatus': {
'Meta': {'object_name': 'TaskStatus'},
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tasks'", 'null': 'True', 'to': "orm['auth.User']"}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'PENDING'", 'max_length': '50'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'traceback': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'})
},
'panda.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'activation_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['panda']
|
[
"south.db.db.delete_table",
"south.db.db.create_unique",
"django.db.models.ForeignKey",
"django.db.models.AutoField",
"south.db.db.send_create_signal"
] |
[((561, 605), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""panda"""', "['Category']"], {}), "('panda', ['Category'])\n", (582, 605), False, 'from south.db import db\n'), ((1494, 1540), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""panda"""', "['TaskStatus']"], {}), "('panda', ['TaskStatus'])\n", (1515, 1540), False, 'from south.db import db\n'), ((3160, 3203), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""panda"""', "['Dataset']"], {}), "('panda', ['Dataset'])\n", (3181, 3203), False, 'from south.db import db\n'), ((3590, 3665), 'south.db.db.create_unique', 'db.create_unique', (['"""panda_dataset_categories"""', "['dataset_id', 'category_id']"], {}), "('panda_dataset_categories', ['dataset_id', 'category_id'])\n", (3606, 3665), False, 'from south.db import db\n'), ((4960, 5006), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""panda"""', "['DataUpload']"], {}), "('panda', ['DataUpload'])\n", (4981, 5006), False, 'from south.db import db\n'), ((5756, 5798), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""panda"""', "['Export']"], {}), "('panda', ['Export'])\n", (5777, 5798), False, 'from south.db import db\n'), ((6784, 6832), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""panda"""', "['Notification']"], {}), "('panda', ['Notification'])\n", (6805, 6832), False, 'from south.db import db\n'), ((7604, 7653), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""panda"""', "['RelatedUpload']"], {}), "('panda', ['RelatedUpload'])\n", (7625, 7653), False, 'from south.db import db\n'), ((8049, 8096), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""panda"""', "['UserProfile']"], {}), "('panda', ['UserProfile'])\n", (8070, 8096), False, 'from south.db import db\n'), ((8182, 8215), 'south.db.db.delete_table', 'db.delete_table', (['"""panda_category"""'], {}), "('panda_category')\n", (8197, 8215), False, 'from south.db import db\n'), ((8263, 8298), 'south.db.db.delete_table', 'db.delete_table', (['"""panda_taskstatus"""'], {}), "('panda_taskstatus')\n", (8278, 8298), False, 'from south.db import db\n'), ((8343, 8375), 'south.db.db.delete_table', 'db.delete_table', (['"""panda_dataset"""'], {}), "('panda_dataset')\n", (8358, 8375), False, 'from south.db import db\n'), ((8448, 8491), 'south.db.db.delete_table', 'db.delete_table', (['"""panda_dataset_categories"""'], {}), "('panda_dataset_categories')\n", (8463, 8491), False, 'from south.db import db\n'), ((8539, 8574), 'south.db.db.delete_table', 'db.delete_table', (['"""panda_dataupload"""'], {}), "('panda_dataupload')\n", (8554, 8574), False, 'from south.db import db\n'), ((8618, 8649), 'south.db.db.delete_table', 'db.delete_table', (['"""panda_export"""'], {}), "('panda_export')\n", (8633, 8649), False, 'from south.db import db\n'), ((8699, 8736), 'south.db.db.delete_table', 'db.delete_table', (['"""panda_notification"""'], {}), "('panda_notification')\n", (8714, 8736), False, 'from south.db import db\n'), ((8787, 8825), 'south.db.db.delete_table', 'db.delete_table', (['"""panda_relatedupload"""'], {}), "('panda_relatedupload')\n", (8802, 8825), False, 'from south.db import db\n'), ((8874, 8910), 'south.db.db.delete_table', 'db.delete_table', (['"""panda_userprofile"""'], {}), "('panda_userprofile')\n", (8889, 8910), False, 'from south.db import db\n'), ((3339, 3411), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'primary_key': '(True)', 'auto_created': '(True)'}), "(verbose_name='ID', primary_key=True, auto_created=True)\n", (3355, 3411), False, 'from django.db import models\n'), ((3438, 3489), 'django.db.models.ForeignKey', 'models.ForeignKey', (["orm['panda.dataset']"], {'null': '(False)'}), "(orm['panda.dataset'], null=False)\n", (3455, 3489), False, 'from django.db import models\n'), ((3517, 3569), 'django.db.models.ForeignKey', 'models.ForeignKey', (["orm['panda.category']"], {'null': '(False)'}), "(orm['panda.category'], null=False)\n", (3534, 3569), False, 'from django.db import models\n')]
|
import requests
import json
import base64
import numpy as np
import matplotlib.pyplot as plt
import pickle
import imageio
def get_jsonstr(url):
url = "http://172.16.58.3:8089/api/problem?stuid=031804104"
response = requests.get(url)
jsonstr = json.loads(response.text)
return jsonstr
def split_image(img): # 输入为图像矩阵np
'''分割图像'''
imgs = []
for i in range(0,900,300):
for j in range(0,900,300):
imgs.append(img[i:i+300,j:j+300].tolist())
return (imgs) # 返回值是九块图像矩阵的列表
def encode_image(title_image,store_image):
'''图像编码为数字'''
current_table = [] # 图像对应的表数字编码
ans_type = list(range(1,10)) # 答案类型
for ls_title in title_image:
try:
pos_code = store_image.index(ls_title)+1
current_table.append(pos_code)
ans_type.remove(pos_code)
except:
current_table.append(0) # IndexError:空格匹配不到
return current_table,ans_type[0] # 返回表编码和答案类型
def main(json_image):
# 读取无框字符分割成9份后的图像列表
save_name = 'ls_img.pkl'
pkl_file = open(save_name, 'rb')
store_images = pickle.load(pkl_file)
pkl_file.close()
# 获取题给图像
bs64_img = base64.b64decode(json_image) # 图像是base64编码
np_img = imageio.imread(bs64_img)
title_image = split_image(np_img)
for ls_store in store_images: # 遍历存储的所有无框字符
count = 0
for ls_title in title_image: # 遍历题给图像块
if (np.array(ls_title) == 255).all() == True: # 被挖去的空白
continue # 跳过
if ls_title in ls_store: # 该图块在无框字符中
count += 1
else:
break
if count == 8: # 除空白块外都相同,则判就是该无框字符,对题给图块进行编码
current_table, ans_type = encode_image(title_image, ls_store)
return current_table,ans_type
if __name__ == "__main__":
# 读取无框字符分割成9份后的图像列表
save_name = 'ls_img.pkl'
pkl_file = open(save_name,'rb')
store_images = pickle.load(pkl_file)
pkl_file.close()
# 获取题给图像
url = "http://47.102.118.1:8089/api/problem?stuid=031804104"
response = requests.get(url)
jsonstr = json.loads(response.text)
bs64_img = base64.b64decode(jsonstr['img']) #图像是base64编码
np_img = imageio.imread(bs64_img)
title_image = split_image(np_img)
plt.imshow(np_img)
plt.show()
for ls_store in store_images: #遍历存储的所存储的无框字符
count = 0
for ls_title in title_image: #遍历题给图像块
if (np.array(ls_title) == 255).all() == True: # 被挖去的空白
continue # 跳过
if ls_title in ls_store: # 该图块在无框字符中
count += 1
else:
break
if count == 8: # 除空白块外都相同,则判就是该无框字符,对题给图块进行编码
current_table,ans_type = encode_image(title_image,ls_store)
print(current_table, ans_type)
ls = [331,332,333,334,335,336,337,338,339]
for i in range(9):
plt.subplot(ls[i])
plt.imshow(np.array(ls_store[i]))
plt.show()
for i in range(9):
plt.subplot(ls[i])
plt.imshow(np.array(title_image[i]))
plt.show()
break
|
[
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.show",
"json.loads",
"matplotlib.pyplot.imshow",
"imageio.imread",
"base64.b64decode",
"pickle.load",
"numpy.array",
"requests.get"
] |
[((234, 251), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (246, 251), False, 'import requests\n'), ((267, 292), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (277, 292), False, 'import json\n'), ((1137, 1158), 'pickle.load', 'pickle.load', (['pkl_file'], {}), '(pkl_file)\n', (1148, 1158), False, 'import pickle\n'), ((1213, 1241), 'base64.b64decode', 'base64.b64decode', (['json_image'], {}), '(json_image)\n', (1229, 1241), False, 'import base64\n'), ((1271, 1295), 'imageio.imread', 'imageio.imread', (['bs64_img'], {}), '(bs64_img)\n', (1285, 1295), False, 'import imageio\n'), ((2007, 2028), 'pickle.load', 'pickle.load', (['pkl_file'], {}), '(pkl_file)\n', (2018, 2028), False, 'import pickle\n'), ((2149, 2166), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (2161, 2166), False, 'import requests\n'), ((2182, 2207), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (2192, 2207), False, 'import json\n'), ((2224, 2256), 'base64.b64decode', 'base64.b64decode', (["jsonstr['img']"], {}), "(jsonstr['img'])\n", (2240, 2256), False, 'import base64\n'), ((2284, 2308), 'imageio.imread', 'imageio.imread', (['bs64_img'], {}), '(bs64_img)\n', (2298, 2308), False, 'import imageio\n'), ((2353, 2371), 'matplotlib.pyplot.imshow', 'plt.imshow', (['np_img'], {}), '(np_img)\n', (2363, 2371), True, 'import matplotlib.pyplot as plt\n'), ((2377, 2387), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2385, 2387), True, 'import matplotlib.pyplot as plt\n'), ((3121, 3131), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3129, 3131), True, 'import matplotlib.pyplot as plt\n'), ((3283, 3293), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3291, 3293), True, 'import matplotlib.pyplot as plt\n'), ((3030, 3048), 'matplotlib.pyplot.subplot', 'plt.subplot', (['ls[i]'], {}), '(ls[i])\n', (3041, 3048), True, 'import matplotlib.pyplot as plt\n'), ((3189, 3207), 'matplotlib.pyplot.subplot', 'plt.subplot', (['ls[i]'], {}), '(ls[i])\n', (3200, 3207), True, 'import matplotlib.pyplot as plt\n'), ((3081, 3102), 'numpy.array', 'np.array', (['ls_store[i]'], {}), '(ls_store[i])\n', (3089, 3102), True, 'import numpy as np\n'), ((3240, 3264), 'numpy.array', 'np.array', (['title_image[i]'], {}), '(title_image[i])\n', (3248, 3264), True, 'import numpy as np\n'), ((1472, 1490), 'numpy.array', 'np.array', (['ls_title'], {}), '(ls_title)\n', (1480, 1490), True, 'import numpy as np\n'), ((2523, 2541), 'numpy.array', 'np.array', (['ls_title'], {}), '(ls_title)\n', (2531, 2541), True, 'import numpy as np\n')]
|
import setuptools
with open('README.md', 'r') as readme_file:
long_description = readme_file.read()
with open('requirements.txt', 'r') as requirements_file:
requirements = requirements_file.read().splitlines()
setuptools.setup(
name="ipasc_tool",
version="0.1.3",
author="International Photoacoustic Standardisation Consortium (IPASC)",
description="Standardised Data Access Tool of IPASC",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
packages=setuptools.find_packages(include=["ipasc_tool", "ipasc_tool.*"]),
install_requires=requirements,
python_requires=">=3.7"
)
|
[
"setuptools.find_packages"
] |
[((540, 604), 'setuptools.find_packages', 'setuptools.find_packages', ([], {'include': "['ipasc_tool', 'ipasc_tool.*']"}), "(include=['ipasc_tool', 'ipasc_tool.*'])\n", (564, 604), False, 'import setuptools\n')]
|
from git_sentry.handlers.access_controlled_git_object import AccessControlledGitObject
from git_sentry.handlers.git_repo import GitRepo
from git_sentry.handlers.git_user import GitUser
class GitTeam(AccessControlledGitObject):
def __init__(self, git_object):
super().__init__(git_object)
def name(self):
return self._git_object.name
def login(self):
return self.name()
def as_dict(self):
return self._git_object.as_json()
def add_to_repo(self, repository_name, permission):
self._git_object.add_repository(repository_name, permission)
def repositories(self):
return [GitRepo(r) for r in self._git_object.repositories()]
def grant_access(self, user, role='member'):
if self.permission_for(user) != 'maintainer':
self._git_object.add_or_update_membership(user, role)
def revoke_access(self, username):
super().revoke_access(username)
def members(self, role=None):
return [GitUser(u) for u in self._git_object.members(role)]
def permission_for(self, username):
if any(m.login() == username for m in self.members('maintainer')):
return 'maintainer'
if any(m.login() == username for m in self.members('member')):
return 'member'
return None
def __eq__(self, other):
return self.name() == other.name()
def __repr__(self):
return f'GitTeam[{self.name()}]'
def __str__(self):
return f'GitTeam[{self.name()}]'
|
[
"git_sentry.handlers.git_repo.GitRepo",
"git_sentry.handlers.git_user.GitUser"
] |
[((646, 656), 'git_sentry.handlers.git_repo.GitRepo', 'GitRepo', (['r'], {}), '(r)\n', (653, 656), False, 'from git_sentry.handlers.git_repo import GitRepo\n'), ((1000, 1010), 'git_sentry.handlers.git_user.GitUser', 'GitUser', (['u'], {}), '(u)\n', (1007, 1010), False, 'from git_sentry.handlers.git_user import GitUser\n')]
|
import numpy as np
import argparse
import imutils
import cv2
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image", required = True, help = "Path to the image")
ap.add_argument("-i2", "--image2", required = True, help = "Path to the image 2")
ap.add_argument("-i3", "--image3", required = True, help = "Path to the image 3")
args = vars(ap.parse_args())
image = cv2.imread(args["image"])
# NOTE: CHAPTER 6
cv2.imshow("Original", image)
# 6.1 translation Left(-ve)/right(+ve) followed by up(-ve)/down(+ve)
M = np.float32([[1, 0, 25], [0, 1, 50]])
shifted = cv2.warpAffine(image, M, (image.shape[1], image.shape[0]))
cv2.imshow("Shifted Down and Right", shifted)
# 6.1 translation
M = np.float32([[1, 0, -50], [0, 1, -90]])
shifted = cv2.warpAffine(image, M, (image.shape[0], image.shape[0]))
cv2.imshow("Shifted Up and Left", shifted)
# 6.2 in imutils.py
# 6.3 translate using imutils
shifted = imutils.translate(image, 0, 100)
cv2.imshow("Shifted Down", shifted)
cv2.waitKey(0)
cv2.destroyAllWindows()
# 6.4 rotate counter-clockwise by default
(h, w) = image.shape[:2]
center = (w // 2, h // 2)
M = cv2.getRotationMatrix2D(center, 45, 1.0)
rotated = cv2.warpAffine(image, M, (w, h))
cv2.imshow("rotated by 45 degrees", rotated)
# 6.4 rotate -ve to rotate clockwise
M = cv2.getRotationMatrix2D(center, -90, 1.0)
rotated = cv2.warpAffine(image, M, (w, h))
cv2.imshow("rotated by -90 degrees", rotated)
# 6.5 move rotate to imutils.py
# 6.6 rotate using imutils.py
rotated = imutils.rotate(image, 180)
cv2.imshow("Rotated by 180 Degrees", rotated)
cv2.waitKey(0)
cv2.destroyAllWindows()
# 6.7 resize
r = 150.0 / image.shape[1] # ratio - width = 150px
dim = (150, int(image.shape[0] * r))
resized = cv2.resize(image, dim, interpolation = cv2.INTER_AREA) # could also use INTER_LINEAR
# INTER_CUBIC or INTER_NEAREST
cv2.imshow("Resized (Width)", resized)
# 6.8 resize
r = 50.0 / image.shape[1] # ratio - height = 50px
dim = (50, int(image.shape[0] * r))
resized = cv2.resize(image, dim, interpolation = cv2.INTER_AREA)
cv2.imshow("Resized (Height)", resized)
# 6.11
# 6.9 resize in imutils.py
resized = imutils.resize(image, width = 66)
print("shape: {} pixels".format(resized.shape)) # NOTE: height width order not width height
cv2.imshow("Resized via Function", resized)
# 6.10 resize height via imutils.py
resized = imutils.resize(image, height = 110)
print("shape: {} pixels".format(resized.shape)) # NOTE: height width order not width height
cv2.imshow("Resized via Function height 50", resized)
cv2.waitKey(0)
cv2.destroyAllWindows()
# 6.12 flipping
flipped = cv2.flip(image, 1)
cv2.imshow("Flipped Horizontally", flipped)
flipped = cv2.flip(image, 0)
cv2.imshow("Flipped Vertically", flipped)
flipped = cv2.flip(image, -1)
cv2.imshow("Flipped Horizontally & Vertically", flipped)
cv2.waitKey(0)
# 6.13 crop [y_start:y_end, x_start:x_end]
cropped = image[30:120, 240:335]
cv2.imshow("T-Rex Face", cropped)
cv2.waitKey(0)
cv2.destroyAllWindows()
# 6.14 arithmetic
# cv2 uses max and min
print(" max of 255: {}".format(cv2.add(np.uint8([200]), np.uint8([100]))))
print(" min of 0: {}".format(cv2.add(np.uint8([ 50]), np.uint8([100]))))
# np wraps around
print("wrap around: {}".format(np.uint8([200]) + np.uint8([100])))
print("wrap around: {}".format(np.uint8([ 50]) + np.uint8([100])))
# 6.17 arithmetic on images
M = np.ones(image.shape, dtype = "uint8") * 100
added = cv2.add(image, M)
cv2.imshow("Added", added)
M = np.ones(image.shape, dtype = "uint8") *50
subtracted = cv2.subtract(image, M)
cv2.imshow("Subtracted", subtracted)
cv2.waitKey(0)
# 6.18 bitwise operations
rectangle = np.zeros((300, 300), dtype = "uint8")
cv2.rectangle(rectangle, (25, 25), (275, 275), 255, -1)
cv2.imshow("Rectangle", rectangle)
circle = np.zeros((300, 300), dtype = "uint8")
cv2.circle(circle, (150, 150), 150, 255, -1)
cv2.imshow("Circle", circle)
cv2.waitKey(0)
# 6.19 bitwise AND
bitwiseAnd = cv2.bitwise_and(rectangle, circle)
cv2.imshow("AND", bitwiseAnd)
cv2.waitKey(0)
# 6.19 bitwise OR
bitwiseOr = cv2.bitwise_or(rectangle, circle)
cv2.imshow("OR", bitwiseOr)
cv2.waitKey(0)
# 6.19 bitwise XOR
bitwiseXor = cv2.bitwise_xor(rectangle, circle)
cv2.imshow("XOR", bitwiseXor)
cv2.waitKey(0)
# 6.19 bitwise NOT
bitwiseNot = cv2.bitwise_not(circle)
cv2.imshow("NOT", bitwiseNot)
cv2.waitKey(0)
cv2.destroyAllWindows()
# 6.20 masking
image2 = cv2.imread(args["image2"])
cv2.imshow("Original2", image2)
mask = np.zeros(image2.shape[:2], dtype = "uint8")
(cX, cY) = (image2.shape[1] // 2, image2.shape[0] // 2)
cv2.rectangle(mask, (cX - 75, cY -75), (cX + 75, cY +75), 255, -1)
cv2.imshow("Mask", mask)
masked = cv2.bitwise_and(image2, image2, mask = mask)
cv2.imshow("Mask Applied to Image", masked)
cv2.waitKey(0)
# 6.21 masking circle
mask = np.zeros(image2.shape[:2], dtype = "uint8")
cv2.circle(mask, (cX, cY), 100, 255, -1)
masked = cv2.bitwise_and(image2, image2, mask = mask)
cv2.imshow("Mask", mask)
cv2.imshow("Mask Applied to Image", masked)
cv2.waitKey(0)
# 6.22 splitting and merging channels
image3 = cv2.imread(args["image3"])
(B, G, R) = cv2.split(image3)
cv2.imshow("Red", R)
cv2.imshow("Green", G)
cv2.imshow("Blue", B)
merged = cv2.merge([B, G, R])
cv2.imshow("Merged", merged)
cv2.waitKey(0)
cv2.destroyAllWindows()
# 6.23 merge only colour channel
zeros = np.zeros(image3.shape[:2], dtype = "uint8")
cv2.imshow("Red", cv2.merge([zeros, zeros, R]))
cv2.imshow("Green", cv2.merge([zeros, G, zeros]))
cv2.imshow("Blue", cv2.merge([B, zeros, zeros]))
cv2.waitKey(0)
cv2.destroyAllWindows()
# 6.24 colorspaces
cv2.imshow("Original", image2)
gray = cv2.cvtColor(image2, cv2.COLOR_BGR2GRAY)
cv2.imshow("Gray", gray)
hsv = cv2.cvtColor(image2, cv2.COLOR_BGR2HSV)
cv2.imshow("HSV", hsv)
lab = cv2.cvtColor(image2, cv2.COLOR_BGR2LAB)
cv2.imshow("L*a*b*", lab)
cv2.waitKey(0)
|
[
"argparse.ArgumentParser",
"cv2.bitwise_and",
"numpy.ones",
"cv2.warpAffine",
"cv2.rectangle",
"imutils.translate",
"imutils.resize",
"cv2.imshow",
"cv2.getRotationMatrix2D",
"cv2.subtract",
"cv2.cvtColor",
"cv2.split",
"cv2.destroyAllWindows",
"cv2.resize",
"cv2.circle",
"cv2.bitwise_not",
"cv2.bitwise_xor",
"numpy.uint8",
"cv2.waitKey",
"imutils.rotate",
"cv2.bitwise_or",
"cv2.flip",
"cv2.merge",
"cv2.add",
"numpy.float32",
"numpy.zeros",
"cv2.imread"
] |
[((67, 92), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (90, 92), False, 'import argparse\n'), ((373, 398), 'cv2.imread', 'cv2.imread', (["args['image']"], {}), "(args['image'])\n", (383, 398), False, 'import cv2\n'), ((419, 448), 'cv2.imshow', 'cv2.imshow', (['"""Original"""', 'image'], {}), "('Original', image)\n", (429, 448), False, 'import cv2\n'), ((523, 559), 'numpy.float32', 'np.float32', (['[[1, 0, 25], [0, 1, 50]]'], {}), '([[1, 0, 25], [0, 1, 50]])\n', (533, 559), True, 'import numpy as np\n'), ((570, 628), 'cv2.warpAffine', 'cv2.warpAffine', (['image', 'M', '(image.shape[1], image.shape[0])'], {}), '(image, M, (image.shape[1], image.shape[0]))\n', (584, 628), False, 'import cv2\n'), ((629, 674), 'cv2.imshow', 'cv2.imshow', (['"""Shifted Down and Right"""', 'shifted'], {}), "('Shifted Down and Right', shifted)\n", (639, 674), False, 'import cv2\n'), ((698, 736), 'numpy.float32', 'np.float32', (['[[1, 0, -50], [0, 1, -90]]'], {}), '([[1, 0, -50], [0, 1, -90]])\n', (708, 736), True, 'import numpy as np\n'), ((747, 805), 'cv2.warpAffine', 'cv2.warpAffine', (['image', 'M', '(image.shape[0], image.shape[0])'], {}), '(image, M, (image.shape[0], image.shape[0]))\n', (761, 805), False, 'import cv2\n'), ((806, 848), 'cv2.imshow', 'cv2.imshow', (['"""Shifted Up and Left"""', 'shifted'], {}), "('Shifted Up and Left', shifted)\n", (816, 848), False, 'import cv2\n'), ((910, 942), 'imutils.translate', 'imutils.translate', (['image', '(0)', '(100)'], {}), '(image, 0, 100)\n', (927, 942), False, 'import imutils\n'), ((943, 978), 'cv2.imshow', 'cv2.imshow', (['"""Shifted Down"""', 'shifted'], {}), "('Shifted Down', shifted)\n", (953, 978), False, 'import cv2\n'), ((979, 993), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (990, 993), False, 'import cv2\n'), ((994, 1017), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1015, 1017), False, 'import cv2\n'), ((1117, 1157), 'cv2.getRotationMatrix2D', 'cv2.getRotationMatrix2D', (['center', '(45)', '(1.0)'], {}), '(center, 45, 1.0)\n', (1140, 1157), False, 'import cv2\n'), ((1168, 1200), 'cv2.warpAffine', 'cv2.warpAffine', (['image', 'M', '(w, h)'], {}), '(image, M, (w, h))\n', (1182, 1200), False, 'import cv2\n'), ((1201, 1245), 'cv2.imshow', 'cv2.imshow', (['"""rotated by 45 degrees"""', 'rotated'], {}), "('rotated by 45 degrees', rotated)\n", (1211, 1245), False, 'import cv2\n'), ((1288, 1329), 'cv2.getRotationMatrix2D', 'cv2.getRotationMatrix2D', (['center', '(-90)', '(1.0)'], {}), '(center, -90, 1.0)\n', (1311, 1329), False, 'import cv2\n'), ((1340, 1372), 'cv2.warpAffine', 'cv2.warpAffine', (['image', 'M', '(w, h)'], {}), '(image, M, (w, h))\n', (1354, 1372), False, 'import cv2\n'), ((1373, 1418), 'cv2.imshow', 'cv2.imshow', (['"""rotated by -90 degrees"""', 'rotated'], {}), "('rotated by -90 degrees', rotated)\n", (1383, 1418), False, 'import cv2\n'), ((1492, 1518), 'imutils.rotate', 'imutils.rotate', (['image', '(180)'], {}), '(image, 180)\n', (1506, 1518), False, 'import imutils\n'), ((1519, 1564), 'cv2.imshow', 'cv2.imshow', (['"""Rotated by 180 Degrees"""', 'rotated'], {}), "('Rotated by 180 Degrees', rotated)\n", (1529, 1564), False, 'import cv2\n'), ((1565, 1579), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (1576, 1579), False, 'import cv2\n'), ((1580, 1603), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1601, 1603), False, 'import cv2\n'), ((1717, 1769), 'cv2.resize', 'cv2.resize', (['image', 'dim'], {'interpolation': 'cv2.INTER_AREA'}), '(image, dim, interpolation=cv2.INTER_AREA)\n', (1727, 1769), False, 'import cv2\n'), ((1898, 1936), 'cv2.imshow', 'cv2.imshow', (['"""Resized (Width)"""', 'resized'], {}), "('Resized (Width)', resized)\n", (1908, 1936), False, 'import cv2\n'), ((2048, 2100), 'cv2.resize', 'cv2.resize', (['image', 'dim'], {'interpolation': 'cv2.INTER_AREA'}), '(image, dim, interpolation=cv2.INTER_AREA)\n', (2058, 2100), False, 'import cv2\n'), ((2103, 2142), 'cv2.imshow', 'cv2.imshow', (['"""Resized (Height)"""', 'resized'], {}), "('Resized (Height)', resized)\n", (2113, 2142), False, 'import cv2\n'), ((2188, 2219), 'imutils.resize', 'imutils.resize', (['image'], {'width': '(66)'}), '(image, width=66)\n', (2202, 2219), False, 'import imutils\n'), ((2314, 2357), 'cv2.imshow', 'cv2.imshow', (['"""Resized via Function"""', 'resized'], {}), "('Resized via Function', resized)\n", (2324, 2357), False, 'import cv2\n'), ((2405, 2438), 'imutils.resize', 'imutils.resize', (['image'], {'height': '(110)'}), '(image, height=110)\n', (2419, 2438), False, 'import imutils\n'), ((2533, 2586), 'cv2.imshow', 'cv2.imshow', (['"""Resized via Function height 50"""', 'resized'], {}), "('Resized via Function height 50', resized)\n", (2543, 2586), False, 'import cv2\n'), ((2587, 2601), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (2598, 2601), False, 'import cv2\n'), ((2602, 2625), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (2623, 2625), False, 'import cv2\n'), ((2653, 2671), 'cv2.flip', 'cv2.flip', (['image', '(1)'], {}), '(image, 1)\n', (2661, 2671), False, 'import cv2\n'), ((2672, 2715), 'cv2.imshow', 'cv2.imshow', (['"""Flipped Horizontally"""', 'flipped'], {}), "('Flipped Horizontally', flipped)\n", (2682, 2715), False, 'import cv2\n'), ((2727, 2745), 'cv2.flip', 'cv2.flip', (['image', '(0)'], {}), '(image, 0)\n', (2735, 2745), False, 'import cv2\n'), ((2746, 2787), 'cv2.imshow', 'cv2.imshow', (['"""Flipped Vertically"""', 'flipped'], {}), "('Flipped Vertically', flipped)\n", (2756, 2787), False, 'import cv2\n'), ((2799, 2818), 'cv2.flip', 'cv2.flip', (['image', '(-1)'], {}), '(image, -1)\n', (2807, 2818), False, 'import cv2\n'), ((2819, 2875), 'cv2.imshow', 'cv2.imshow', (['"""Flipped Horizontally & Vertically"""', 'flipped'], {}), "('Flipped Horizontally & Vertically', flipped)\n", (2829, 2875), False, 'import cv2\n'), ((2876, 2890), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (2887, 2890), False, 'import cv2\n'), ((2968, 3001), 'cv2.imshow', 'cv2.imshow', (['"""T-Rex Face"""', 'cropped'], {}), "('T-Rex Face', cropped)\n", (2978, 3001), False, 'import cv2\n'), ((3002, 3016), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (3013, 3016), False, 'import cv2\n'), ((3017, 3040), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (3038, 3040), False, 'import cv2\n'), ((3534, 3551), 'cv2.add', 'cv2.add', (['image', 'M'], {}), '(image, M)\n', (3541, 3551), False, 'import cv2\n'), ((3552, 3578), 'cv2.imshow', 'cv2.imshow', (['"""Added"""', 'added'], {}), "('Added', added)\n", (3562, 3578), False, 'import cv2\n'), ((3639, 3661), 'cv2.subtract', 'cv2.subtract', (['image', 'M'], {}), '(image, M)\n', (3651, 3661), False, 'import cv2\n'), ((3662, 3698), 'cv2.imshow', 'cv2.imshow', (['"""Subtracted"""', 'subtracted'], {}), "('Subtracted', subtracted)\n", (3672, 3698), False, 'import cv2\n'), ((3699, 3713), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (3710, 3713), False, 'import cv2\n'), ((3753, 3788), 'numpy.zeros', 'np.zeros', (['(300, 300)'], {'dtype': '"""uint8"""'}), "((300, 300), dtype='uint8')\n", (3761, 3788), True, 'import numpy as np\n'), ((3791, 3846), 'cv2.rectangle', 'cv2.rectangle', (['rectangle', '(25, 25)', '(275, 275)', '(255)', '(-1)'], {}), '(rectangle, (25, 25), (275, 275), 255, -1)\n', (3804, 3846), False, 'import cv2\n'), ((3847, 3881), 'cv2.imshow', 'cv2.imshow', (['"""Rectangle"""', 'rectangle'], {}), "('Rectangle', rectangle)\n", (3857, 3881), False, 'import cv2\n'), ((3892, 3927), 'numpy.zeros', 'np.zeros', (['(300, 300)'], {'dtype': '"""uint8"""'}), "((300, 300), dtype='uint8')\n", (3900, 3927), True, 'import numpy as np\n'), ((3930, 3974), 'cv2.circle', 'cv2.circle', (['circle', '(150, 150)', '(150)', '(255)', '(-1)'], {}), '(circle, (150, 150), 150, 255, -1)\n', (3940, 3974), False, 'import cv2\n'), ((3975, 4003), 'cv2.imshow', 'cv2.imshow', (['"""Circle"""', 'circle'], {}), "('Circle', circle)\n", (3985, 4003), False, 'import cv2\n'), ((4004, 4018), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (4015, 4018), False, 'import cv2\n'), ((4052, 4086), 'cv2.bitwise_and', 'cv2.bitwise_and', (['rectangle', 'circle'], {}), '(rectangle, circle)\n', (4067, 4086), False, 'import cv2\n'), ((4087, 4116), 'cv2.imshow', 'cv2.imshow', (['"""AND"""', 'bitwiseAnd'], {}), "('AND', bitwiseAnd)\n", (4097, 4116), False, 'import cv2\n'), ((4117, 4131), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (4128, 4131), False, 'import cv2\n'), ((4163, 4196), 'cv2.bitwise_or', 'cv2.bitwise_or', (['rectangle', 'circle'], {}), '(rectangle, circle)\n', (4177, 4196), False, 'import cv2\n'), ((4197, 4224), 'cv2.imshow', 'cv2.imshow', (['"""OR"""', 'bitwiseOr'], {}), "('OR', bitwiseOr)\n", (4207, 4224), False, 'import cv2\n'), ((4225, 4239), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (4236, 4239), False, 'import cv2\n'), ((4273, 4307), 'cv2.bitwise_xor', 'cv2.bitwise_xor', (['rectangle', 'circle'], {}), '(rectangle, circle)\n', (4288, 4307), False, 'import cv2\n'), ((4308, 4337), 'cv2.imshow', 'cv2.imshow', (['"""XOR"""', 'bitwiseXor'], {}), "('XOR', bitwiseXor)\n", (4318, 4337), False, 'import cv2\n'), ((4338, 4352), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (4349, 4352), False, 'import cv2\n'), ((4386, 4409), 'cv2.bitwise_not', 'cv2.bitwise_not', (['circle'], {}), '(circle)\n', (4401, 4409), False, 'import cv2\n'), ((4410, 4439), 'cv2.imshow', 'cv2.imshow', (['"""NOT"""', 'bitwiseNot'], {}), "('NOT', bitwiseNot)\n", (4420, 4439), False, 'import cv2\n'), ((4440, 4454), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (4451, 4454), False, 'import cv2\n'), ((4455, 4478), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (4476, 4478), False, 'import cv2\n'), ((4504, 4530), 'cv2.imread', 'cv2.imread', (["args['image2']"], {}), "(args['image2'])\n", (4514, 4530), False, 'import cv2\n'), ((4531, 4562), 'cv2.imshow', 'cv2.imshow', (['"""Original2"""', 'image2'], {}), "('Original2', image2)\n", (4541, 4562), False, 'import cv2\n'), ((4571, 4612), 'numpy.zeros', 'np.zeros', (['image2.shape[:2]'], {'dtype': '"""uint8"""'}), "(image2.shape[:2], dtype='uint8')\n", (4579, 4612), True, 'import numpy as np\n'), ((4671, 4739), 'cv2.rectangle', 'cv2.rectangle', (['mask', '(cX - 75, cY - 75)', '(cX + 75, cY + 75)', '(255)', '(-1)'], {}), '(mask, (cX - 75, cY - 75), (cX + 75, cY + 75), 255, -1)\n', (4684, 4739), False, 'import cv2\n'), ((4738, 4762), 'cv2.imshow', 'cv2.imshow', (['"""Mask"""', 'mask'], {}), "('Mask', mask)\n", (4748, 4762), False, 'import cv2\n'), ((4773, 4815), 'cv2.bitwise_and', 'cv2.bitwise_and', (['image2', 'image2'], {'mask': 'mask'}), '(image2, image2, mask=mask)\n', (4788, 4815), False, 'import cv2\n'), ((4818, 4861), 'cv2.imshow', 'cv2.imshow', (['"""Mask Applied to Image"""', 'masked'], {}), "('Mask Applied to Image', masked)\n", (4828, 4861), False, 'import cv2\n'), ((4862, 4876), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (4873, 4876), False, 'import cv2\n'), ((4907, 4948), 'numpy.zeros', 'np.zeros', (['image2.shape[:2]'], {'dtype': '"""uint8"""'}), "(image2.shape[:2], dtype='uint8')\n", (4915, 4948), True, 'import numpy as np\n'), ((4951, 4991), 'cv2.circle', 'cv2.circle', (['mask', '(cX, cY)', '(100)', '(255)', '(-1)'], {}), '(mask, (cX, cY), 100, 255, -1)\n', (4961, 4991), False, 'import cv2\n'), ((5001, 5043), 'cv2.bitwise_and', 'cv2.bitwise_and', (['image2', 'image2'], {'mask': 'mask'}), '(image2, image2, mask=mask)\n', (5016, 5043), False, 'import cv2\n'), ((5046, 5070), 'cv2.imshow', 'cv2.imshow', (['"""Mask"""', 'mask'], {}), "('Mask', mask)\n", (5056, 5070), False, 'import cv2\n'), ((5071, 5114), 'cv2.imshow', 'cv2.imshow', (['"""Mask Applied to Image"""', 'masked'], {}), "('Mask Applied to Image', masked)\n", (5081, 5114), False, 'import cv2\n'), ((5115, 5129), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (5126, 5129), False, 'import cv2\n'), ((5178, 5204), 'cv2.imread', 'cv2.imread', (["args['image3']"], {}), "(args['image3'])\n", (5188, 5204), False, 'import cv2\n'), ((5217, 5234), 'cv2.split', 'cv2.split', (['image3'], {}), '(image3)\n', (5226, 5234), False, 'import cv2\n'), ((5236, 5256), 'cv2.imshow', 'cv2.imshow', (['"""Red"""', 'R'], {}), "('Red', R)\n", (5246, 5256), False, 'import cv2\n'), ((5257, 5279), 'cv2.imshow', 'cv2.imshow', (['"""Green"""', 'G'], {}), "('Green', G)\n", (5267, 5279), False, 'import cv2\n'), ((5280, 5301), 'cv2.imshow', 'cv2.imshow', (['"""Blue"""', 'B'], {}), "('Blue', B)\n", (5290, 5301), False, 'import cv2\n'), ((5312, 5332), 'cv2.merge', 'cv2.merge', (['[B, G, R]'], {}), '([B, G, R])\n', (5321, 5332), False, 'import cv2\n'), ((5333, 5361), 'cv2.imshow', 'cv2.imshow', (['"""Merged"""', 'merged'], {}), "('Merged', merged)\n", (5343, 5361), False, 'import cv2\n'), ((5362, 5376), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (5373, 5376), False, 'import cv2\n'), ((5377, 5400), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (5398, 5400), False, 'import cv2\n'), ((5443, 5484), 'numpy.zeros', 'np.zeros', (['image3.shape[:2]'], {'dtype': '"""uint8"""'}), "(image3.shape[:2], dtype='uint8')\n", (5451, 5484), True, 'import numpy as np\n'), ((5634, 5648), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (5645, 5648), False, 'import cv2\n'), ((5649, 5672), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (5670, 5672), False, 'import cv2\n'), ((5693, 5723), 'cv2.imshow', 'cv2.imshow', (['"""Original"""', 'image2'], {}), "('Original', image2)\n", (5703, 5723), False, 'import cv2\n'), ((5732, 5772), 'cv2.cvtColor', 'cv2.cvtColor', (['image2', 'cv2.COLOR_BGR2GRAY'], {}), '(image2, cv2.COLOR_BGR2GRAY)\n', (5744, 5772), False, 'import cv2\n'), ((5773, 5797), 'cv2.imshow', 'cv2.imshow', (['"""Gray"""', 'gray'], {}), "('Gray', gray)\n", (5783, 5797), False, 'import cv2\n'), ((5805, 5844), 'cv2.cvtColor', 'cv2.cvtColor', (['image2', 'cv2.COLOR_BGR2HSV'], {}), '(image2, cv2.COLOR_BGR2HSV)\n', (5817, 5844), False, 'import cv2\n'), ((5845, 5867), 'cv2.imshow', 'cv2.imshow', (['"""HSV"""', 'hsv'], {}), "('HSV', hsv)\n", (5855, 5867), False, 'import cv2\n'), ((5875, 5914), 'cv2.cvtColor', 'cv2.cvtColor', (['image2', 'cv2.COLOR_BGR2LAB'], {}), '(image2, cv2.COLOR_BGR2LAB)\n', (5887, 5914), False, 'import cv2\n'), ((5915, 5940), 'cv2.imshow', 'cv2.imshow', (['"""L*a*b*"""', 'lab'], {}), "('L*a*b*', lab)\n", (5925, 5940), False, 'import cv2\n'), ((5941, 5955), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (5952, 5955), False, 'import cv2\n'), ((3482, 3517), 'numpy.ones', 'np.ones', (['image.shape'], {'dtype': '"""uint8"""'}), "(image.shape, dtype='uint8')\n", (3489, 3517), True, 'import numpy as np\n'), ((3584, 3619), 'numpy.ones', 'np.ones', (['image.shape'], {'dtype': '"""uint8"""'}), "(image.shape, dtype='uint8')\n", (3591, 3619), True, 'import numpy as np\n'), ((5505, 5533), 'cv2.merge', 'cv2.merge', (['[zeros, zeros, R]'], {}), '([zeros, zeros, R])\n', (5514, 5533), False, 'import cv2\n'), ((5555, 5583), 'cv2.merge', 'cv2.merge', (['[zeros, G, zeros]'], {}), '([zeros, G, zeros])\n', (5564, 5583), False, 'import cv2\n'), ((5604, 5632), 'cv2.merge', 'cv2.merge', (['[B, zeros, zeros]'], {}), '([B, zeros, zeros])\n', (5613, 5632), False, 'import cv2\n'), ((3154, 3169), 'numpy.uint8', 'np.uint8', (['[200]'], {}), '([200])\n', (3162, 3169), True, 'import numpy as np\n'), ((3171, 3186), 'numpy.uint8', 'np.uint8', (['[100]'], {}), '([100])\n', (3179, 3186), True, 'import numpy as np\n'), ((3229, 3243), 'numpy.uint8', 'np.uint8', (['[50]'], {}), '([50])\n', (3237, 3243), True, 'import numpy as np\n'), ((3246, 3261), 'numpy.uint8', 'np.uint8', (['[100]'], {}), '([100])\n', (3254, 3261), True, 'import numpy as np\n'), ((3346, 3361), 'numpy.uint8', 'np.uint8', (['[200]'], {}), '([200])\n', (3354, 3361), True, 'import numpy as np\n'), ((3364, 3379), 'numpy.uint8', 'np.uint8', (['[100]'], {}), '([100])\n', (3372, 3379), True, 'import numpy as np\n'), ((3413, 3427), 'numpy.uint8', 'np.uint8', (['[50]'], {}), '([50])\n', (3421, 3427), True, 'import numpy as np\n'), ((3431, 3446), 'numpy.uint8', 'np.uint8', (['[100]'], {}), '([100])\n', (3439, 3446), True, 'import numpy as np\n')]
|
import datetime
import random
import csv
import json
# TODO: Fix * imports
from django.shortcuts import *
from django.contrib.auth.decorators import login_required, user_passes_test
from django.contrib.auth import logout as auth_logout
from social.apps.django_app.default.models import UserSocialAuth
from gnip_search.gnip_search_api import QueryError as GNIPQueryError
from chart import Chart
from timeframe import Timeframe
from frequency import Frequency
from tweets import Tweets
from home.utils import *
# import twitter
KEYWORD_RELEVANCE_THRESHOLD = .1 # Only show related terms if > 10%
TWEET_QUERY_COUNT = 10 # For real identification, > 100. Max of 500 via Search API.
DEFAULT_TIMEFRAME = 1 # When not specified or needed to constrain, this # of days lookback
TIMEDELTA_DEFAULT_TIMEFRAME = datetime.timedelta(days=DEFAULT_TIMEFRAME)
TIMEDELTA_DEFAULT_30 = datetime.timedelta(days=30)
DATE_FORMAT = "%Y-%m-%d %H:%M"
DATE_FORMAT_JSON = "%Y-%m-%dT%H:%M:%S"
def login(request):
"""
Returns login page for given request
"""
context = {"request": request}
return render_to_response('login.html', context, context_instance=RequestContext(request))
@login_required
# @user_passes_test(lambda u: u.is_staff or u.is_superuser, login_url='/')
def home(request):
"""
Returns home page for given request
"""
query = request.GET.get("query", "")
context = {"request": request, "query0": query}
tweets = []
return render_to_response('home.html', context, context_instance=RequestContext(request))
@login_required
def query_chart(request):
"""
Returns query chart for given request
"""
# TODO: Move this to one line e.g. queries to query
query = request.GET.get("query", None)
queries = request.GET.getlist("queries[]")
if query:
queries = [query]
request_timeframe = Timeframe(start = request.GET.get("start", None),
end = request.GET.get("end", None),
interval = request.GET.get("interval", "hour"))
response_chart = None
try:
response_chart = Chart(queries = queries,
start = request_timeframe.start,
end = request_timeframe.end,
interval = request_timeframe.interval)
except GNIPQueryError as e:
return handleQueryError(e)
response_data = {}
response_data['days'] = request_timeframe.days
response_data['start'] = request_timeframe.start.strftime(DATE_FORMAT_JSON)
response_data['end'] = request_timeframe.end.strftime(DATE_FORMAT_JSON)
response_data['columns'] = response_chart.columns
response_data['total'] = response_chart.total
return HttpResponse(json.dumps(response_data), content_type="application/json")
@login_required
def query_frequency(request):
query = request.GET.get("query", None)
response_data = {}
sample = 500
if query is not None:
# Get Timeframe e.g. process time from request
request_timeframe = Timeframe(start = request.GET.get("start", None),
end = request.GET.get("end", None),
interval = request.GET.get("interval", "hour"))
data = None
try:
# Query GNIP and get frequency
data = Frequency(query = query,
sample = sample,
start = request_timeframe.start,
end = request_timeframe.end)
except GNIPQueryError as e:
return handleQueryError(e)
response_data["frequency"] = data.freq
response_data["sample"] = sample
return HttpResponse(json.dumps(response_data), content_type="application/json")
@login_required
def query_tweets(request):
"""
Returns tweet query
"""
request_timeframe = Timeframe(start = request.GET.get("start", None),
end = request.GET.get("end", None),
interval = request.GET.get("interval", "hour"))
query_count = int(request.GET.get("embedCount", TWEET_QUERY_COUNT))
export = request.GET.get("export", None)
query = request.GET.get("query", "")
try:
tweets = Tweets(query=query, query_count=query_count, start=request_timeframe.start, end=request_timeframe.end, export=export)
except GNIPQueryError as e:
return handleQueryError(e)
response_data = {}
if export == "csv":
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="export.csv"'
writer = csv.writer(response, delimiter=',', quotechar="'", quoting=csv.QUOTE_ALL)
writer.writerow(['count','time','id','user_screen_name','user_id','status','retweet_count','favorite_count','is_retweet','in_reply_to_tweet_id','in_reply_to_screen_name'])
count = 0;
for t in tweets.get_data():
count = count + 1
body = t['body'].encode('ascii', 'replace')
status_id = t['id']
status_id = status_id[status_id.rfind(':')+1:]
user_id = t['actor']['id']
user_id = user_id[user_id.rfind(':')+1:]
writer.writerow([count, t['postedTime'], status_id, t['actor']['preferredUsername'], user_id, body, t['retweetCount'], t['favoritesCount'], 'X', 'X', 'X'])
return response
else:
response_data['tweets'] = tweets.get_data()
return HttpResponse(json.dumps(response_data), content_type="application/json")
def logout(request):
"""
Returns a redirect response and logs out user
"""
auth_logout(request)
return HttpResponseRedirect('/')
|
[
"csv.writer",
"chart.Chart",
"tweets.Tweets",
"json.dumps",
"django.contrib.auth.logout",
"datetime.timedelta",
"frequency.Frequency"
] |
[((833, 875), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': 'DEFAULT_TIMEFRAME'}), '(days=DEFAULT_TIMEFRAME)\n', (851, 875), False, 'import datetime\n'), ((899, 926), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(30)'}), '(days=30)\n', (917, 926), False, 'import datetime\n'), ((5777, 5797), 'django.contrib.auth.logout', 'auth_logout', (['request'], {}), '(request)\n', (5788, 5797), True, 'from django.contrib.auth import logout as auth_logout\n'), ((2150, 2272), 'chart.Chart', 'Chart', ([], {'queries': 'queries', 'start': 'request_timeframe.start', 'end': 'request_timeframe.end', 'interval': 'request_timeframe.interval'}), '(queries=queries, start=request_timeframe.start, end=request_timeframe\n .end, interval=request_timeframe.interval)\n', (2155, 2272), False, 'from chart import Chart\n'), ((2796, 2821), 'json.dumps', 'json.dumps', (['response_data'], {}), '(response_data)\n', (2806, 2821), False, 'import json\n'), ((3820, 3845), 'json.dumps', 'json.dumps', (['response_data'], {}), '(response_data)\n', (3830, 3845), False, 'import json\n'), ((4380, 4501), 'tweets.Tweets', 'Tweets', ([], {'query': 'query', 'query_count': 'query_count', 'start': 'request_timeframe.start', 'end': 'request_timeframe.end', 'export': 'export'}), '(query=query, query_count=query_count, start=request_timeframe.start,\n end=request_timeframe.end, export=export)\n', (4386, 4501), False, 'from tweets import Tweets\n'), ((4769, 4842), 'csv.writer', 'csv.writer', (['response'], {'delimiter': '""","""', 'quotechar': '"""\'"""', 'quoting': 'csv.QUOTE_ALL'}), '(response, delimiter=\',\', quotechar="\'", quoting=csv.QUOTE_ALL)\n', (4779, 4842), False, 'import csv\n'), ((5625, 5650), 'json.dumps', 'json.dumps', (['response_data'], {}), '(response_data)\n', (5635, 5650), False, 'import json\n'), ((3418, 3518), 'frequency.Frequency', 'Frequency', ([], {'query': 'query', 'sample': 'sample', 'start': 'request_timeframe.start', 'end': 'request_timeframe.end'}), '(query=query, sample=sample, start=request_timeframe.start, end=\n request_timeframe.end)\n', (3427, 3518), False, 'from frequency import Frequency\n')]
|
import pytest
import mxnet as mx
import numpy as np
from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples
from mxfusion.components.distributions import Gamma, GammaMeanVariance
from mxfusion.util.testutils import numpy_array_reshape
from mxfusion.util.testutils import MockMXNetRandomGenerator
@pytest.mark.usefixtures("set_seed")
class TestGammaDistribution(object):
@pytest.mark.parametrize("dtype, mean, mean_isSamples, variance, variance_isSamples, rv, rv_isSamples, num_samples", [
(np.float64, np.random.uniform(0,10,size=(5,2)), True, np.random.uniform(1,10,size=(2)), False, np.random.uniform(1,10,size=(5,3,2)), True, 5),
(np.float64, np.random.uniform(0,10,size=(5,2)), True, np.random.uniform(1,10,size=(2)), False, np.random.uniform(1,10,size=(3,2)), False, 5),
(np.float64, np.random.uniform(0,10,size=(2)), False, np.random.uniform(1,10,size=(2)), False, np.random.uniform(1,10,size=(3,2)), False, 5),
(np.float64, np.random.uniform(0,10,size=(5,2)), True, np.random.uniform(1,10,size=(5,3,2)), True, np.random.uniform(1,10,size=(5,3,2)), True, 5),
(np.float32, np.random.uniform(0,10,size=(5,2)), True, np.random.uniform(1,10,size=(2)), False, np.random.uniform(1,10,size=(5,3,2)), True, 5),
])
def test_log_pdf_mean_variance(self, dtype, mean, mean_isSamples, variance, variance_isSamples,
rv, rv_isSamples, num_samples):
import scipy as sp
isSamples_any = any([mean_isSamples, variance_isSamples, rv_isSamples])
rv_shape = rv.shape[1:] if rv_isSamples else rv.shape
n_dim = 1 + len(rv.shape) if isSamples_any and not rv_isSamples else len(rv.shape)
mean_np = numpy_array_reshape(mean, mean_isSamples, n_dim)
variance_np = numpy_array_reshape(variance, variance_isSamples, n_dim)
rv_np = numpy_array_reshape(rv, rv_isSamples, n_dim)
beta_np = mean_np / variance_np
alpha_np = mean_np * beta_np
log_pdf_np = sp.stats.gamma.logpdf(rv_np, a=alpha_np, loc=0, scale=1./beta_np)
mean_mx = mx.nd.array(mean, dtype=dtype)
if not mean_isSamples:
mean_mx = add_sample_dimension(mx.nd, mean_mx)
variance_mx = mx.nd.array(variance, dtype=dtype)
if not variance_isSamples:
variance_mx = add_sample_dimension(mx.nd, variance_mx)
rv_mx = mx.nd.array(rv, dtype=dtype)
if not rv_isSamples:
rv_mx = add_sample_dimension(mx.nd, rv_mx)
gamma = GammaMeanVariance.define_variable(mean=mean_mx, variance=variance_mx, shape=rv_shape, dtype=dtype).factor
variables = {gamma.mean.uuid: mean_mx, gamma.variance.uuid: variance_mx, gamma.random_variable.uuid: rv_mx}
log_pdf_rt = gamma.log_pdf(F=mx.nd, variables=variables)
assert np.issubdtype(log_pdf_rt.dtype, dtype)
assert is_sampled_array(mx.nd, log_pdf_rt) == isSamples_any
if isSamples_any:
assert get_num_samples(mx.nd, log_pdf_rt) == num_samples
if np.issubdtype(dtype, np.float64):
rtol, atol = 1e-7, 1e-10
else:
rtol, atol = 1e-4, 1e-5
assert np.allclose(log_pdf_np, log_pdf_rt.asnumpy(), rtol=rtol, atol=atol)
@pytest.mark.parametrize(
"dtype, mean, mean_isSamples, variance, variance_isSamples, rv_shape, num_samples",[
(np.float64, np.random.rand(5,2), True, np.random.rand(2)+0.1, False, (3,2), 5),
(np.float64, np.random.rand(2), False, np.random.rand(5,2)+0.1, True, (3,2), 5),
(np.float64, np.random.rand(2), False, np.random.rand(2)+0.1, False, (3,2), 5),
(np.float64, np.random.rand(5,2), True, np.random.rand(5,3,2)+0.1, True, (3,2), 5),
(np.float32, np.random.rand(5,2), True, np.random.rand(2)+0.1, False, (3,2), 5),
])
def test_draw_samples_mean_variance(self, dtype, mean, mean_isSamples, variance,
variance_isSamples, rv_shape, num_samples):
n_dim = 1 + len(rv_shape)
out_shape = (num_samples,) + rv_shape
mean_np = mx.nd.array(np.broadcast_to(numpy_array_reshape(mean, mean_isSamples, n_dim), shape=out_shape), dtype=dtype)
variance_np = mx.nd.array(np.broadcast_to(numpy_array_reshape(variance, variance_isSamples, n_dim), shape=out_shape), dtype=dtype)
gamma = GammaMeanVariance.define_variable(shape=rv_shape, dtype=dtype).factor
mean_mx = mx.nd.array(mean, dtype=dtype)
if not mean_isSamples:
mean_mx = add_sample_dimension(mx.nd, mean_mx)
variance_mx = mx.nd.array(variance, dtype=dtype)
if not variance_isSamples:
variance_mx = add_sample_dimension(mx.nd, variance_mx)
variables = {gamma.mean.uuid: mean_mx, gamma.variance.uuid: variance_mx}
mx.random.seed(0)
rv_samples_rt = gamma.draw_samples(
F=mx.nd, variables=variables, num_samples=num_samples)
mx.random.seed(0)
beta_np = mean_np / variance_np
alpha_np = mean_np * beta_np
rv_samples_mx = mx.nd.random.gamma(alpha=alpha_np, beta=beta_np, dtype=dtype)
assert np.issubdtype(rv_samples_rt.dtype, dtype)
assert is_sampled_array(mx.nd, rv_samples_rt)
assert get_num_samples(mx.nd, rv_samples_rt) == num_samples
if np.issubdtype(dtype, np.float64):
rtol, atol = 1e-7, 1e-10
else:
rtol, atol = 1e-4, 1e-5
assert np.allclose(rv_samples_mx.asnumpy(), rv_samples_rt.asnumpy(), rtol=rtol, atol=atol)
@pytest.mark.parametrize("dtype, alpha, alpha_isSamples, beta, beta_isSamples, rv, rv_isSamples, num_samples", [
(np.float64, np.random.uniform(0,10,size=(5,2)), True, np.random.uniform(1,10,size=(2)), False, np.random.uniform(1,10,size=(5,3,2)), True, 5),
(np.float64, np.random.uniform(0,10,size=(5,2)), True, np.random.uniform(1,10,size=(2)), False, np.random.uniform(1,10,size=(3,2)), False, 5),
(np.float64, np.random.uniform(0,10,size=(2)), False, np.random.uniform(1,10,size=(2)), False, np.random.uniform(1,10,size=(3,2)), False, 5),
(np.float64, np.random.uniform(0,10,size=(5,2)), True, np.random.uniform(1,10,size=(5,3,2)), True, np.random.uniform(1,10,size=(5,3,2)), True, 5),
(np.float32, np.random.uniform(0,10,size=(5,2)), True, np.random.uniform(1,10,size=(2)), False, np.random.uniform(1,10,size=(5,3,2)), True, 5),
])
def test_log_pdf(self, dtype, alpha, alpha_isSamples, beta, beta_isSamples,
rv, rv_isSamples, num_samples):
import scipy as sp
isSamples_any = any([alpha_isSamples, beta_isSamples, rv_isSamples])
rv_shape = rv.shape[1:] if rv_isSamples else rv.shape
n_dim = 1 + len(rv.shape) if isSamples_any and not rv_isSamples else len(rv.shape)
alpha_np = numpy_array_reshape(alpha, alpha_isSamples, n_dim)
beta_np = numpy_array_reshape(beta, beta_isSamples, n_dim)
rv_np = numpy_array_reshape(rv, rv_isSamples, n_dim)
log_pdf_np = sp.stats.gamma.logpdf(rv_np, a=alpha_np, loc=0, scale=1./beta_np)
gamma = Gamma.define_variable(shape=rv_shape, dtype=dtype).factor
alpha_mx = mx.nd.array(alpha, dtype=dtype)
if not alpha_isSamples:
alpha_mx = add_sample_dimension(mx.nd, alpha_mx)
beta_mx = mx.nd.array(beta, dtype=dtype)
if not beta_isSamples:
beta_mx = add_sample_dimension(mx.nd, beta_mx)
rv_mx = mx.nd.array(rv, dtype=dtype)
if not rv_isSamples:
rv_mx = add_sample_dimension(mx.nd, rv_mx)
variables = {gamma.alpha.uuid: alpha_mx, gamma.beta.uuid: beta_mx, gamma.random_variable.uuid: rv_mx}
log_pdf_rt = gamma.log_pdf(F=mx.nd, variables=variables)
assert np.issubdtype(log_pdf_rt.dtype, dtype)
assert is_sampled_array(mx.nd, log_pdf_rt) == isSamples_any
if isSamples_any:
assert get_num_samples(mx.nd, log_pdf_rt) == num_samples
if np.issubdtype(dtype, np.float64):
rtol, atol = 1e-7, 1e-10
else:
rtol, atol = 1e-4, 1e-5
assert np.allclose(log_pdf_np, log_pdf_rt.asnumpy(), rtol=rtol, atol=atol)
@pytest.mark.parametrize(
"dtype, alpha, alpha_isSamples, beta, beta_isSamples, rv_shape, num_samples",[
(np.float64, np.random.rand(5,2), True, np.random.rand(2)+0.1, False, (3,2), 5),
(np.float64, np.random.rand(2), False, np.random.rand(5,2)+0.1, True, (3,2), 5),
(np.float64, np.random.rand(2), False, np.random.rand(2)+0.1, False, (3,2), 5),
(np.float64, np.random.rand(5,2), True, np.random.rand(5,3,2)+0.1, True, (3,2), 5),
(np.float32, np.random.rand(5,2), True, np.random.rand(2)+0.1, False, (3,2), 5),
])
def test_draw_samples(self, dtype, alpha, alpha_isSamples, beta,
beta_isSamples, rv_shape, num_samples):
n_dim = 1 + len(rv_shape)
out_shape = (num_samples,) + rv_shape
alpha_np = mx.nd.array(np.broadcast_to(numpy_array_reshape(alpha, alpha_isSamples, n_dim), shape=out_shape), dtype=dtype)
beta_np = mx.nd.array(np.broadcast_to(numpy_array_reshape(beta, beta_isSamples, n_dim), shape=out_shape), dtype=dtype)
gamma = Gamma.define_variable(shape=rv_shape, dtype=dtype).factor
alpha_mx = mx.nd.array(alpha, dtype=dtype)
if not alpha_isSamples:
alpha_mx = add_sample_dimension(mx.nd, alpha_mx)
beta_mx = mx.nd.array(beta, dtype=dtype)
if not beta_isSamples:
beta_mx = add_sample_dimension(mx.nd, beta_mx)
variables = {gamma.alpha.uuid: alpha_mx, gamma.beta.uuid: beta_mx}
mx.random.seed(0)
rv_samples_rt = gamma.draw_samples(
F=mx.nd, variables=variables, num_samples=num_samples)
mx.random.seed(0)
rv_samples_mx = mx.nd.random.gamma(alpha=alpha_np, beta=beta_np, dtype=dtype)
assert np.issubdtype(rv_samples_rt.dtype, dtype)
assert is_sampled_array(mx.nd, rv_samples_rt)
assert get_num_samples(mx.nd, rv_samples_rt) == num_samples
if np.issubdtype(dtype, np.float64):
rtol, atol = 1e-7, 1e-10
else:
rtol, atol = 1e-4, 1e-5
assert np.allclose(rv_samples_mx.asnumpy(), rv_samples_rt.asnumpy(), rtol=rtol, atol=atol)
|
[
"numpy.random.uniform",
"mxfusion.components.distributions.GammaMeanVariance.define_variable",
"mxfusion.components.distributions.Gamma.define_variable",
"mxfusion.components.variables.runtime_variable.is_sampled_array",
"mxnet.random.seed",
"numpy.random.rand",
"scipy.stats.gamma.logpdf",
"mxfusion.components.variables.runtime_variable.add_sample_dimension",
"mxnet.nd.array",
"mxnet.nd.random.gamma",
"mxfusion.util.testutils.numpy_array_reshape",
"pytest.mark.usefixtures",
"mxfusion.components.variables.runtime_variable.get_num_samples",
"numpy.issubdtype"
] |
[((358, 393), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""set_seed"""'], {}), "('set_seed')\n", (381, 393), False, 'import pytest\n'), ((1760, 1808), 'mxfusion.util.testutils.numpy_array_reshape', 'numpy_array_reshape', (['mean', 'mean_isSamples', 'n_dim'], {}), '(mean, mean_isSamples, n_dim)\n', (1779, 1808), False, 'from mxfusion.util.testutils import numpy_array_reshape\n'), ((1831, 1887), 'mxfusion.util.testutils.numpy_array_reshape', 'numpy_array_reshape', (['variance', 'variance_isSamples', 'n_dim'], {}), '(variance, variance_isSamples, n_dim)\n', (1850, 1887), False, 'from mxfusion.util.testutils import numpy_array_reshape\n'), ((1904, 1948), 'mxfusion.util.testutils.numpy_array_reshape', 'numpy_array_reshape', (['rv', 'rv_isSamples', 'n_dim'], {}), '(rv, rv_isSamples, n_dim)\n', (1923, 1948), False, 'from mxfusion.util.testutils import numpy_array_reshape\n'), ((2047, 2115), 'scipy.stats.gamma.logpdf', 'sp.stats.gamma.logpdf', (['rv_np'], {'a': 'alpha_np', 'loc': '(0)', 'scale': '(1.0 / beta_np)'}), '(rv_np, a=alpha_np, loc=0, scale=1.0 / beta_np)\n', (2068, 2115), True, 'import scipy as sp\n'), ((2132, 2162), 'mxnet.nd.array', 'mx.nd.array', (['mean'], {'dtype': 'dtype'}), '(mean, dtype=dtype)\n', (2143, 2162), True, 'import mxnet as mx\n'), ((2275, 2309), 'mxnet.nd.array', 'mx.nd.array', (['variance'], {'dtype': 'dtype'}), '(variance, dtype=dtype)\n', (2286, 2309), True, 'import mxnet as mx\n'), ((2428, 2456), 'mxnet.nd.array', 'mx.nd.array', (['rv'], {'dtype': 'dtype'}), '(rv, dtype=dtype)\n', (2439, 2456), True, 'import mxnet as mx\n'), ((2860, 2898), 'numpy.issubdtype', 'np.issubdtype', (['log_pdf_rt.dtype', 'dtype'], {}), '(log_pdf_rt.dtype, dtype)\n', (2873, 2898), True, 'import numpy as np\n'), ((3073, 3105), 'numpy.issubdtype', 'np.issubdtype', (['dtype', 'np.float64'], {}), '(dtype, np.float64)\n', (3086, 3105), True, 'import numpy as np\n'), ((4465, 4495), 'mxnet.nd.array', 'mx.nd.array', (['mean'], {'dtype': 'dtype'}), '(mean, dtype=dtype)\n', (4476, 4495), True, 'import mxnet as mx\n'), ((4608, 4642), 'mxnet.nd.array', 'mx.nd.array', (['variance'], {'dtype': 'dtype'}), '(variance, dtype=dtype)\n', (4619, 4642), True, 'import mxnet as mx\n'), ((4835, 4852), 'mxnet.random.seed', 'mx.random.seed', (['(0)'], {}), '(0)\n', (4849, 4852), True, 'import mxnet as mx\n'), ((4973, 4990), 'mxnet.random.seed', 'mx.random.seed', (['(0)'], {}), '(0)\n', (4987, 4990), True, 'import mxnet as mx\n'), ((5092, 5153), 'mxnet.nd.random.gamma', 'mx.nd.random.gamma', ([], {'alpha': 'alpha_np', 'beta': 'beta_np', 'dtype': 'dtype'}), '(alpha=alpha_np, beta=beta_np, dtype=dtype)\n', (5110, 5153), True, 'import mxnet as mx\n'), ((5170, 5211), 'numpy.issubdtype', 'np.issubdtype', (['rv_samples_rt.dtype', 'dtype'], {}), '(rv_samples_rt.dtype, dtype)\n', (5183, 5211), True, 'import numpy as np\n'), ((5227, 5265), 'mxfusion.components.variables.runtime_variable.is_sampled_array', 'is_sampled_array', (['mx.nd', 'rv_samples_rt'], {}), '(mx.nd, rv_samples_rt)\n', (5243, 5265), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((5346, 5378), 'numpy.issubdtype', 'np.issubdtype', (['dtype', 'np.float64'], {}), '(dtype, np.float64)\n', (5359, 5378), True, 'import numpy as np\n'), ((6868, 6918), 'mxfusion.util.testutils.numpy_array_reshape', 'numpy_array_reshape', (['alpha', 'alpha_isSamples', 'n_dim'], {}), '(alpha, alpha_isSamples, n_dim)\n', (6887, 6918), False, 'from mxfusion.util.testutils import numpy_array_reshape\n'), ((6937, 6985), 'mxfusion.util.testutils.numpy_array_reshape', 'numpy_array_reshape', (['beta', 'beta_isSamples', 'n_dim'], {}), '(beta, beta_isSamples, n_dim)\n', (6956, 6985), False, 'from mxfusion.util.testutils import numpy_array_reshape\n'), ((7002, 7046), 'mxfusion.util.testutils.numpy_array_reshape', 'numpy_array_reshape', (['rv', 'rv_isSamples', 'n_dim'], {}), '(rv, rv_isSamples, n_dim)\n', (7021, 7046), False, 'from mxfusion.util.testutils import numpy_array_reshape\n'), ((7068, 7136), 'scipy.stats.gamma.logpdf', 'sp.stats.gamma.logpdf', (['rv_np'], {'a': 'alpha_np', 'loc': '(0)', 'scale': '(1.0 / beta_np)'}), '(rv_np, a=alpha_np, loc=0, scale=1.0 / beta_np)\n', (7089, 7136), True, 'import scipy as sp\n'), ((7228, 7259), 'mxnet.nd.array', 'mx.nd.array', (['alpha'], {'dtype': 'dtype'}), '(alpha, dtype=dtype)\n', (7239, 7259), True, 'import mxnet as mx\n'), ((7371, 7401), 'mxnet.nd.array', 'mx.nd.array', (['beta'], {'dtype': 'dtype'}), '(beta, dtype=dtype)\n', (7382, 7401), True, 'import mxnet as mx\n'), ((7508, 7536), 'mxnet.nd.array', 'mx.nd.array', (['rv'], {'dtype': 'dtype'}), '(rv, dtype=dtype)\n', (7519, 7536), True, 'import mxnet as mx\n'), ((7812, 7850), 'numpy.issubdtype', 'np.issubdtype', (['log_pdf_rt.dtype', 'dtype'], {}), '(log_pdf_rt.dtype, dtype)\n', (7825, 7850), True, 'import numpy as np\n'), ((8025, 8057), 'numpy.issubdtype', 'np.issubdtype', (['dtype', 'np.float64'], {}), '(dtype, np.float64)\n', (8038, 8057), True, 'import numpy as np\n'), ((9372, 9403), 'mxnet.nd.array', 'mx.nd.array', (['alpha'], {'dtype': 'dtype'}), '(alpha, dtype=dtype)\n', (9383, 9403), True, 'import mxnet as mx\n'), ((9515, 9545), 'mxnet.nd.array', 'mx.nd.array', (['beta'], {'dtype': 'dtype'}), '(beta, dtype=dtype)\n', (9526, 9545), True, 'import mxnet as mx\n'), ((9720, 9737), 'mxnet.random.seed', 'mx.random.seed', (['(0)'], {}), '(0)\n', (9734, 9737), True, 'import mxnet as mx\n'), ((9858, 9875), 'mxnet.random.seed', 'mx.random.seed', (['(0)'], {}), '(0)\n', (9872, 9875), True, 'import mxnet as mx\n'), ((9900, 9961), 'mxnet.nd.random.gamma', 'mx.nd.random.gamma', ([], {'alpha': 'alpha_np', 'beta': 'beta_np', 'dtype': 'dtype'}), '(alpha=alpha_np, beta=beta_np, dtype=dtype)\n', (9918, 9961), True, 'import mxnet as mx\n'), ((9978, 10019), 'numpy.issubdtype', 'np.issubdtype', (['rv_samples_rt.dtype', 'dtype'], {}), '(rv_samples_rt.dtype, dtype)\n', (9991, 10019), True, 'import numpy as np\n'), ((10035, 10073), 'mxfusion.components.variables.runtime_variable.is_sampled_array', 'is_sampled_array', (['mx.nd', 'rv_samples_rt'], {}), '(mx.nd, rv_samples_rt)\n', (10051, 10073), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((10154, 10186), 'numpy.issubdtype', 'np.issubdtype', (['dtype', 'np.float64'], {}), '(dtype, np.float64)\n', (10167, 10186), True, 'import numpy as np\n'), ((2216, 2252), 'mxfusion.components.variables.runtime_variable.add_sample_dimension', 'add_sample_dimension', (['mx.nd', 'mean_mx'], {}), '(mx.nd, mean_mx)\n', (2236, 2252), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((2371, 2411), 'mxfusion.components.variables.runtime_variable.add_sample_dimension', 'add_sample_dimension', (['mx.nd', 'variance_mx'], {}), '(mx.nd, variance_mx)\n', (2391, 2411), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((2506, 2540), 'mxfusion.components.variables.runtime_variable.add_sample_dimension', 'add_sample_dimension', (['mx.nd', 'rv_mx'], {}), '(mx.nd, rv_mx)\n', (2526, 2540), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((2557, 2660), 'mxfusion.components.distributions.GammaMeanVariance.define_variable', 'GammaMeanVariance.define_variable', ([], {'mean': 'mean_mx', 'variance': 'variance_mx', 'shape': 'rv_shape', 'dtype': 'dtype'}), '(mean=mean_mx, variance=variance_mx, shape\n =rv_shape, dtype=dtype)\n', (2590, 2660), False, 'from mxfusion.components.distributions import Gamma, GammaMeanVariance\n'), ((2914, 2949), 'mxfusion.components.variables.runtime_variable.is_sampled_array', 'is_sampled_array', (['mx.nd', 'log_pdf_rt'], {}), '(mx.nd, log_pdf_rt)\n', (2930, 2949), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((4377, 4439), 'mxfusion.components.distributions.GammaMeanVariance.define_variable', 'GammaMeanVariance.define_variable', ([], {'shape': 'rv_shape', 'dtype': 'dtype'}), '(shape=rv_shape, dtype=dtype)\n', (4410, 4439), False, 'from mxfusion.components.distributions import Gamma, GammaMeanVariance\n'), ((4549, 4585), 'mxfusion.components.variables.runtime_variable.add_sample_dimension', 'add_sample_dimension', (['mx.nd', 'mean_mx'], {}), '(mx.nd, mean_mx)\n', (4569, 4585), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((4704, 4744), 'mxfusion.components.variables.runtime_variable.add_sample_dimension', 'add_sample_dimension', (['mx.nd', 'variance_mx'], {}), '(mx.nd, variance_mx)\n', (4724, 4744), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((5281, 5318), 'mxfusion.components.variables.runtime_variable.get_num_samples', 'get_num_samples', (['mx.nd', 'rv_samples_rt'], {}), '(mx.nd, rv_samples_rt)\n', (5296, 5318), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((7151, 7201), 'mxfusion.components.distributions.Gamma.define_variable', 'Gamma.define_variable', ([], {'shape': 'rv_shape', 'dtype': 'dtype'}), '(shape=rv_shape, dtype=dtype)\n', (7172, 7201), False, 'from mxfusion.components.distributions import Gamma, GammaMeanVariance\n'), ((7315, 7352), 'mxfusion.components.variables.runtime_variable.add_sample_dimension', 'add_sample_dimension', (['mx.nd', 'alpha_mx'], {}), '(mx.nd, alpha_mx)\n', (7335, 7352), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((7455, 7491), 'mxfusion.components.variables.runtime_variable.add_sample_dimension', 'add_sample_dimension', (['mx.nd', 'beta_mx'], {}), '(mx.nd, beta_mx)\n', (7475, 7491), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((7586, 7620), 'mxfusion.components.variables.runtime_variable.add_sample_dimension', 'add_sample_dimension', (['mx.nd', 'rv_mx'], {}), '(mx.nd, rv_mx)\n', (7606, 7620), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((7866, 7901), 'mxfusion.components.variables.runtime_variable.is_sampled_array', 'is_sampled_array', (['mx.nd', 'log_pdf_rt'], {}), '(mx.nd, log_pdf_rt)\n', (7882, 7901), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((9295, 9345), 'mxfusion.components.distributions.Gamma.define_variable', 'Gamma.define_variable', ([], {'shape': 'rv_shape', 'dtype': 'dtype'}), '(shape=rv_shape, dtype=dtype)\n', (9316, 9345), False, 'from mxfusion.components.distributions import Gamma, GammaMeanVariance\n'), ((9459, 9496), 'mxfusion.components.variables.runtime_variable.add_sample_dimension', 'add_sample_dimension', (['mx.nd', 'alpha_mx'], {}), '(mx.nd, alpha_mx)\n', (9479, 9496), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((9599, 9635), 'mxfusion.components.variables.runtime_variable.add_sample_dimension', 'add_sample_dimension', (['mx.nd', 'beta_mx'], {}), '(mx.nd, beta_mx)\n', (9619, 9635), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((10089, 10126), 'mxfusion.components.variables.runtime_variable.get_num_samples', 'get_num_samples', (['mx.nd', 'rv_samples_rt'], {}), '(mx.nd, rv_samples_rt)\n', (10104, 10126), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((3012, 3046), 'mxfusion.components.variables.runtime_variable.get_num_samples', 'get_num_samples', (['mx.nd', 'log_pdf_rt'], {}), '(mx.nd, log_pdf_rt)\n', (3027, 3046), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((576, 613), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(10)'], {'size': '(5, 2)'}), '(0, 10, size=(5, 2))\n', (593, 613), True, 'import numpy as np\n'), ((619, 651), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(2)'}), '(1, 10, size=2)\n', (636, 651), True, 'import numpy as np\n'), ((660, 700), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(5, 3, 2)'}), '(1, 10, size=(5, 3, 2))\n', (677, 700), True, 'import numpy as np\n'), ((729, 766), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(10)'], {'size': '(5, 2)'}), '(0, 10, size=(5, 2))\n', (746, 766), True, 'import numpy as np\n'), ((771, 803), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(2)'}), '(1, 10, size=2)\n', (788, 803), True, 'import numpy as np\n'), ((812, 849), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(3, 2)'}), '(1, 10, size=(3, 2))\n', (829, 849), True, 'import numpy as np\n'), ((880, 912), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(10)'], {'size': '(2)'}), '(0, 10, size=2)\n', (897, 912), True, 'import numpy as np\n'), ((921, 953), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(2)'}), '(1, 10, size=2)\n', (938, 953), True, 'import numpy as np\n'), ((962, 999), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(3, 2)'}), '(1, 10, size=(3, 2))\n', (979, 999), True, 'import numpy as np\n'), ((1030, 1067), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(10)'], {'size': '(5, 2)'}), '(0, 10, size=(5, 2))\n', (1047, 1067), True, 'import numpy as np\n'), ((1072, 1112), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(5, 3, 2)'}), '(1, 10, size=(5, 3, 2))\n', (1089, 1112), True, 'import numpy as np\n'), ((1116, 1156), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(5, 3, 2)'}), '(1, 10, size=(5, 3, 2))\n', (1133, 1156), True, 'import numpy as np\n'), ((1185, 1222), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(10)'], {'size': '(5, 2)'}), '(0, 10, size=(5, 2))\n', (1202, 1222), True, 'import numpy as np\n'), ((1227, 1259), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(2)'}), '(1, 10, size=2)\n', (1244, 1259), True, 'import numpy as np\n'), ((1268, 1308), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(5, 3, 2)'}), '(1, 10, size=(5, 3, 2))\n', (1285, 1308), True, 'import numpy as np\n'), ((4140, 4188), 'mxfusion.util.testutils.numpy_array_reshape', 'numpy_array_reshape', (['mean', 'mean_isSamples', 'n_dim'], {}), '(mean, mean_isSamples, n_dim)\n', (4159, 4188), False, 'from mxfusion.util.testutils import numpy_array_reshape\n'), ((4271, 4327), 'mxfusion.util.testutils.numpy_array_reshape', 'numpy_array_reshape', (['variance', 'variance_isSamples', 'n_dim'], {}), '(variance, variance_isSamples, n_dim)\n', (4290, 4327), False, 'from mxfusion.util.testutils import numpy_array_reshape\n'), ((3422, 3442), 'numpy.random.rand', 'np.random.rand', (['(5)', '(2)'], {}), '(5, 2)\n', (3436, 3442), True, 'import numpy as np\n'), ((3511, 3528), 'numpy.random.rand', 'np.random.rand', (['(2)'], {}), '(2)\n', (3525, 3528), True, 'import numpy as np\n'), ((3600, 3617), 'numpy.random.rand', 'np.random.rand', (['(2)'], {}), '(2)\n', (3614, 3617), True, 'import numpy as np\n'), ((3688, 3708), 'numpy.random.rand', 'np.random.rand', (['(5)', '(2)'], {}), '(5, 2)\n', (3702, 3708), True, 'import numpy as np\n'), ((3780, 3800), 'numpy.random.rand', 'np.random.rand', (['(5)', '(2)'], {}), '(5, 2)\n', (3794, 3800), True, 'import numpy as np\n'), ((7964, 7998), 'mxfusion.components.variables.runtime_variable.get_num_samples', 'get_num_samples', (['mx.nd', 'log_pdf_rt'], {}), '(mx.nd, log_pdf_rt)\n', (7979, 7998), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, is_sampled_array, get_num_samples\n'), ((5706, 5743), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(10)'], {'size': '(5, 2)'}), '(0, 10, size=(5, 2))\n', (5723, 5743), True, 'import numpy as np\n'), ((5749, 5781), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(2)'}), '(1, 10, size=2)\n', (5766, 5781), True, 'import numpy as np\n'), ((5790, 5830), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(5, 3, 2)'}), '(1, 10, size=(5, 3, 2))\n', (5807, 5830), True, 'import numpy as np\n'), ((5859, 5896), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(10)'], {'size': '(5, 2)'}), '(0, 10, size=(5, 2))\n', (5876, 5896), True, 'import numpy as np\n'), ((5901, 5933), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(2)'}), '(1, 10, size=2)\n', (5918, 5933), True, 'import numpy as np\n'), ((5942, 5979), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(3, 2)'}), '(1, 10, size=(3, 2))\n', (5959, 5979), True, 'import numpy as np\n'), ((6010, 6042), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(10)'], {'size': '(2)'}), '(0, 10, size=2)\n', (6027, 6042), True, 'import numpy as np\n'), ((6051, 6083), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(2)'}), '(1, 10, size=2)\n', (6068, 6083), True, 'import numpy as np\n'), ((6092, 6129), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(3, 2)'}), '(1, 10, size=(3, 2))\n', (6109, 6129), True, 'import numpy as np\n'), ((6160, 6197), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(10)'], {'size': '(5, 2)'}), '(0, 10, size=(5, 2))\n', (6177, 6197), True, 'import numpy as np\n'), ((6202, 6242), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(5, 3, 2)'}), '(1, 10, size=(5, 3, 2))\n', (6219, 6242), True, 'import numpy as np\n'), ((6246, 6286), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(5, 3, 2)'}), '(1, 10, size=(5, 3, 2))\n', (6263, 6286), True, 'import numpy as np\n'), ((6315, 6352), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(10)'], {'size': '(5, 2)'}), '(0, 10, size=(5, 2))\n', (6332, 6352), True, 'import numpy as np\n'), ((6357, 6389), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(2)'}), '(1, 10, size=2)\n', (6374, 6389), True, 'import numpy as np\n'), ((6398, 6438), 'numpy.random.uniform', 'np.random.uniform', (['(1)', '(10)'], {'size': '(5, 3, 2)'}), '(1, 10, size=(5, 3, 2))\n', (6415, 6438), True, 'import numpy as np\n'), ((9068, 9118), 'mxfusion.util.testutils.numpy_array_reshape', 'numpy_array_reshape', (['alpha', 'alpha_isSamples', 'n_dim'], {}), '(alpha, alpha_isSamples, n_dim)\n', (9087, 9118), False, 'from mxfusion.util.testutils import numpy_array_reshape\n'), ((9197, 9245), 'mxfusion.util.testutils.numpy_array_reshape', 'numpy_array_reshape', (['beta', 'beta_isSamples', 'n_dim'], {}), '(beta, beta_isSamples, n_dim)\n', (9216, 9245), False, 'from mxfusion.util.testutils import numpy_array_reshape\n'), ((8369, 8389), 'numpy.random.rand', 'np.random.rand', (['(5)', '(2)'], {}), '(5, 2)\n', (8383, 8389), True, 'import numpy as np\n'), ((8458, 8475), 'numpy.random.rand', 'np.random.rand', (['(2)'], {}), '(2)\n', (8472, 8475), True, 'import numpy as np\n'), ((8547, 8564), 'numpy.random.rand', 'np.random.rand', (['(2)'], {}), '(2)\n', (8561, 8564), True, 'import numpy as np\n'), ((8635, 8655), 'numpy.random.rand', 'np.random.rand', (['(5)', '(2)'], {}), '(5, 2)\n', (8649, 8655), True, 'import numpy as np\n'), ((8727, 8747), 'numpy.random.rand', 'np.random.rand', (['(5)', '(2)'], {}), '(5, 2)\n', (8741, 8747), True, 'import numpy as np\n'), ((3449, 3466), 'numpy.random.rand', 'np.random.rand', (['(2)'], {}), '(2)\n', (3463, 3466), True, 'import numpy as np\n'), ((3537, 3557), 'numpy.random.rand', 'np.random.rand', (['(5)', '(2)'], {}), '(5, 2)\n', (3551, 3557), True, 'import numpy as np\n'), ((3626, 3643), 'numpy.random.rand', 'np.random.rand', (['(2)'], {}), '(2)\n', (3640, 3643), True, 'import numpy as np\n'), ((3715, 3738), 'numpy.random.rand', 'np.random.rand', (['(5)', '(3)', '(2)'], {}), '(5, 3, 2)\n', (3729, 3738), True, 'import numpy as np\n'), ((3807, 3824), 'numpy.random.rand', 'np.random.rand', (['(2)'], {}), '(2)\n', (3821, 3824), True, 'import numpy as np\n'), ((8396, 8413), 'numpy.random.rand', 'np.random.rand', (['(2)'], {}), '(2)\n', (8410, 8413), True, 'import numpy as np\n'), ((8484, 8504), 'numpy.random.rand', 'np.random.rand', (['(5)', '(2)'], {}), '(5, 2)\n', (8498, 8504), True, 'import numpy as np\n'), ((8573, 8590), 'numpy.random.rand', 'np.random.rand', (['(2)'], {}), '(2)\n', (8587, 8590), True, 'import numpy as np\n'), ((8662, 8685), 'numpy.random.rand', 'np.random.rand', (['(5)', '(3)', '(2)'], {}), '(5, 3, 2)\n', (8676, 8685), True, 'import numpy as np\n'), ((8754, 8771), 'numpy.random.rand', 'np.random.rand', (['(2)'], {}), '(2)\n', (8768, 8771), True, 'import numpy as np\n')]
|
import hashlib
import os
from .. import FileBuilder
from .file_builder_test import FileBuilderTest
class HashDirsTest(FileBuilderTest):
"""Tests a hash directory build operation.
The build operation computes SHA-256 hashes for all of the files and
directories in a given root directory. A directory's hash
incorporates the hashes and names of the files and directories in
the directory.
This tests nested subbuilds, as each directory and file hash
operation has its own subbuild.
"""
def setUp(self):
super().setUp()
self._build_number = 0
self._input_dir = os.path.join(self._temp_dir, 'Input')
os.mkdir(self._input_dir)
def _hash_file(self, builder, filename):
"""Build file function that computes a file's hash."""
digest = hashlib.sha256()
with builder.read_binary(filename) as file_:
bytes_ = file_.read(1024)
while len(bytes_) > 0:
digest.update(bytes_)
bytes_ = file_.read(1024)
hash_ = digest.hexdigest()
return {
'build': self._build_number,
'hash': hash_,
}
def _hash_dirs(self, builder, dir_):
"""Subbuild function that computes a directory's hash."""
digest = hashlib.sha256()
subfile_results = {}
for subfile in sorted(builder.list_dir(dir_)):
digest.update(subfile.encode())
absolute_subfile = os.path.join(dir_, subfile)
if builder.is_file(absolute_subfile):
subfile_result = builder.subbuild(
'hash_file', self._hash_file, absolute_subfile)
else:
subfile_result = builder.subbuild(
'hash_dirs', self._hash_dirs, absolute_subfile)
subfile_results[subfile] = subfile_result
digest.update(subfile_result['hash'].encode())
hash_ = digest.hexdigest()
return {
'build': self._build_number,
'hash': hash_,
'subfiles': subfile_results,
}
def _build(self):
"""Execute the "hash dirs" build operation."""
self._build_number += 1
return FileBuilder.build(
self._cache_filename, 'hash_dirs_test', self._hash_dirs,
self._input_dir)
def _file_hash(self, hashes, *components):
"""Return the item in ``hashes`` for the specified file.
Return the ``'build'`` and ``'hash'`` entries of the item in
``hashes`` for ``os.path.join(self._input_dir, *components)``,
if any.
Returns:
dict<str, object>: The result.
"""
subhashes = hashes
for component in components:
if ('subfiles' not in subhashes or
component not in subhashes['subfiles']):
return None
subhashes = subhashes['subfiles'][component]
return {
'build': subhashes['build'],
'hash': subhashes['hash'],
}
def test_hash_dirs(self):
"""Test ``FileBuilder`` with the hash directory build operation."""
os.makedirs(os.path.join(self._input_dir, 'Book', 'Bus', 'Apple'))
os.mkdir(os.path.join(self._input_dir, 'Yarn'))
os.mkdir(os.path.join(self._input_dir, 'Window'))
self._write(
os.path.join(self._input_dir, 'Book', 'Cartwheel.txt'), 'Circle')
self._write(os.path.join(self._input_dir, 'Book', 'Igloo.txt'), 'Wide')
self._write(
os.path.join(self._input_dir, 'Book', 'Bus', 'Apple', 'Leaf.txt'),
'Alphabet')
self._write(
os.path.join(self._input_dir, 'Window', 'Cabinet.txt'), 'Orange')
hashes1 = self._build()
root_hash1 = self._file_hash(hashes1)
book_hash1 = self._file_hash(hashes1, 'Book')
bus_hash1 = self._file_hash(hashes1, 'Book', 'Bus')
apple_hash1 = self._file_hash(hashes1, 'Book', 'Bus', 'Apple')
yarn_hash1 = self._file_hash(hashes1, 'Yarn')
window_hash1 = self._file_hash(hashes1, 'Window')
cartwheel_hash1 = self._file_hash(hashes1, 'Book', 'Cartwheel.txt')
igloo_hash1 = self._file_hash(hashes1, 'Book', 'Igloo.txt')
leaf_hash1 = self._file_hash(
hashes1, 'Book', 'Bus', 'Apple', 'Leaf.txt')
cabinet_hash1 = self._file_hash(hashes1, 'Window', 'Cabinet.txt')
self.assertIsNotNone(root_hash1)
self.assertIsNotNone(book_hash1)
self.assertIsNotNone(bus_hash1)
self.assertIsNotNone(apple_hash1)
self.assertIsNotNone(yarn_hash1)
self.assertIsNotNone(window_hash1)
self.assertIsNotNone(cartwheel_hash1)
self.assertIsNotNone(igloo_hash1)
self.assertIsNotNone(leaf_hash1)
self.assertIsNotNone(cabinet_hash1)
self._write(
os.path.join(self._input_dir, 'Window', 'Cabinet.txt'), 'Bicycle')
hashes2 = self._build()
root_hash2 = self._file_hash(hashes2)
book_hash2 = self._file_hash(hashes2, 'Book')
bus_hash2 = self._file_hash(hashes2, 'Book', 'Bus')
apple_hash2 = self._file_hash(hashes2, 'Book', 'Bus', 'Apple')
yarn_hash2 = self._file_hash(hashes2, 'Yarn')
window_hash2 = self._file_hash(hashes2, 'Window')
cartwheel_hash2 = self._file_hash(hashes2, 'Book', 'Cartwheel.txt')
igloo_hash2 = self._file_hash(hashes2, 'Book', 'Igloo.txt')
leaf_hash2 = self._file_hash(
hashes2, 'Book', 'Bus', 'Apple', 'Leaf.txt')
cabinet_hash2 = self._file_hash(hashes2, 'Window', 'Cabinet.txt')
self.assertNotEqual(root_hash1['hash'], root_hash2['hash'])
self.assertEqual(2, root_hash2['build'])
self.assertNotEqual(window_hash1['hash'], window_hash2['hash'])
self.assertEqual(2, window_hash2['build'])
self.assertNotEqual(cabinet_hash1['hash'], cabinet_hash2['hash'])
self.assertEqual(2, cabinet_hash2['build'])
self.assertEqual(book_hash1, book_hash2)
self.assertEqual(bus_hash1, bus_hash2)
self.assertEqual(apple_hash1, apple_hash2)
self.assertEqual(yarn_hash1, yarn_hash2)
self.assertEqual(cartwheel_hash1, cartwheel_hash2)
self.assertEqual(igloo_hash1, igloo_hash2)
self.assertEqual(leaf_hash1, leaf_hash2)
self._write(
os.path.join(self._input_dir, 'Book', 'Bus', 'Clock.txt'),
'Flower')
self._write(os.path.join(self._input_dir, 'Yarn', 'Road.txt'), 'Sky')
os.mkdir(os.path.join(self._input_dir, 'Fruit'))
os.remove(os.path.join(self._input_dir, 'Window', 'Cabinet.txt'))
hashes3 = self._build()
root_hash3 = self._file_hash(hashes3)
book_hash3 = self._file_hash(hashes3, 'Book')
bus_hash3 = self._file_hash(hashes3, 'Book', 'Bus')
apple_hash3 = self._file_hash(hashes3, 'Book', 'Bus', 'Apple')
yarn_hash3 = self._file_hash(hashes3, 'Yarn')
window_hash3 = self._file_hash(hashes3, 'Window')
fruit_hash3 = self._file_hash(hashes3, 'Fruit')
cartwheel_hash3 = self._file_hash(hashes3, 'Book', 'Cartwheel.txt')
igloo_hash3 = self._file_hash(hashes3, 'Book', 'Igloo.txt')
leaf_hash3 = self._file_hash(
hashes3, 'Book', 'Bus', 'Apple', 'Leaf.txt')
cabinet_hash3 = self._file_hash(hashes3, 'Window', 'Cabinet.txt')
clock_hash3 = self._file_hash(hashes3, 'Book', 'Bus', 'Clock.txt')
road_hash3 = self._file_hash(hashes3, 'Yarn', 'Road.txt')
self.assertNotEqual(root_hash2['hash'], root_hash3['hash'])
self.assertEqual(3, root_hash3['build'])
self.assertNotEqual(book_hash2['hash'], book_hash3['hash'])
self.assertEqual(3, book_hash3['build'])
self.assertNotEqual(bus_hash2['hash'], bus_hash3['hash'])
self.assertEqual(3, bus_hash3['build'])
self.assertNotEqual(yarn_hash2['hash'], yarn_hash3['hash'])
self.assertEqual(3, yarn_hash3['build'])
self.assertNotEqual(window_hash2['hash'], window_hash3['hash'])
self.assertEqual(3, window_hash3['build'])
self.assertIsNone(cabinet_hash3)
self.assertEqual(apple_hash2, apple_hash3)
self.assertEqual(cartwheel_hash2, cartwheel_hash3)
self.assertEqual(igloo_hash2, igloo_hash3)
self.assertEqual(leaf_hash2, leaf_hash3)
self.assertEqual(3, fruit_hash3['build'])
self.assertEqual(3, clock_hash3['build'])
self.assertEqual(3, road_hash3['build'])
hashes4 = self._build()
root_hash4 = self._file_hash(hashes4)
book_hash4 = self._file_hash(hashes4, 'Book')
bus_hash4 = self._file_hash(hashes4, 'Book', 'Bus')
apple_hash4 = self._file_hash(hashes4, 'Book', 'Bus', 'Apple')
yarn_hash4 = self._file_hash(hashes4, 'Yarn')
window_hash4 = self._file_hash(hashes4, 'Window')
fruit_hash4 = self._file_hash(hashes4, 'Fruit')
cartwheel_hash4 = self._file_hash(hashes4, 'Book', 'Cartwheel.txt')
igloo_hash4 = self._file_hash(hashes4, 'Book', 'Igloo.txt')
leaf_hash4 = self._file_hash(
hashes4, 'Book', 'Bus', 'Apple', 'Leaf.txt')
clock_hash4 = self._file_hash(hashes4, 'Book', 'Bus', 'Clock.txt')
road_hash4 = self._file_hash(hashes4, 'Yarn', 'Road.txt')
self.assertNotEqual(root_hash3, root_hash4)
self.assertEqual(book_hash3, book_hash4)
self.assertEqual(bus_hash3, bus_hash4)
self.assertEqual(apple_hash3, apple_hash4)
self.assertEqual(yarn_hash3, yarn_hash4)
self.assertEqual(window_hash3, window_hash4)
self.assertEqual(fruit_hash3, fruit_hash4)
self.assertEqual(cartwheel_hash3, cartwheel_hash4)
self.assertEqual(igloo_hash3, igloo_hash4)
self.assertEqual(leaf_hash3, leaf_hash4)
self.assertEqual(clock_hash3, clock_hash4)
self.assertEqual(road_hash3, road_hash4)
hashes5 = self._build()
self.assertEqual(5, hashes5['build'])
self.assertEqual(3, hashes5['subfiles']['Book']['build'])
hashes6 = self._build()
self.assertEqual(6, hashes6['build'])
self.assertEqual(3, hashes6['subfiles']['Book']['build'])
|
[
"os.mkdir",
"hashlib.sha256",
"os.path.join"
] |
[((624, 661), 'os.path.join', 'os.path.join', (['self._temp_dir', '"""Input"""'], {}), "(self._temp_dir, 'Input')\n", (636, 661), False, 'import os\n'), ((670, 695), 'os.mkdir', 'os.mkdir', (['self._input_dir'], {}), '(self._input_dir)\n', (678, 695), False, 'import os\n'), ((822, 838), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (836, 838), False, 'import hashlib\n'), ((1300, 1316), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (1314, 1316), False, 'import hashlib\n'), ((1476, 1503), 'os.path.join', 'os.path.join', (['dir_', 'subfile'], {}), '(dir_, subfile)\n', (1488, 1503), False, 'import os\n'), ((3171, 3224), 'os.path.join', 'os.path.join', (['self._input_dir', '"""Book"""', '"""Bus"""', '"""Apple"""'], {}), "(self._input_dir, 'Book', 'Bus', 'Apple')\n", (3183, 3224), False, 'import os\n'), ((3243, 3280), 'os.path.join', 'os.path.join', (['self._input_dir', '"""Yarn"""'], {}), "(self._input_dir, 'Yarn')\n", (3255, 3280), False, 'import os\n'), ((3299, 3338), 'os.path.join', 'os.path.join', (['self._input_dir', '"""Window"""'], {}), "(self._input_dir, 'Window')\n", (3311, 3338), False, 'import os\n'), ((3373, 3427), 'os.path.join', 'os.path.join', (['self._input_dir', '"""Book"""', '"""Cartwheel.txt"""'], {}), "(self._input_dir, 'Book', 'Cartwheel.txt')\n", (3385, 3427), False, 'import os\n'), ((3459, 3509), 'os.path.join', 'os.path.join', (['self._input_dir', '"""Book"""', '"""Igloo.txt"""'], {}), "(self._input_dir, 'Book', 'Igloo.txt')\n", (3471, 3509), False, 'import os\n'), ((3552, 3617), 'os.path.join', 'os.path.join', (['self._input_dir', '"""Book"""', '"""Bus"""', '"""Apple"""', '"""Leaf.txt"""'], {}), "(self._input_dir, 'Book', 'Bus', 'Apple', 'Leaf.txt')\n", (3564, 3617), False, 'import os\n'), ((3676, 3730), 'os.path.join', 'os.path.join', (['self._input_dir', '"""Window"""', '"""Cabinet.txt"""'], {}), "(self._input_dir, 'Window', 'Cabinet.txt')\n", (3688, 3730), False, 'import os\n'), ((4887, 4941), 'os.path.join', 'os.path.join', (['self._input_dir', '"""Window"""', '"""Cabinet.txt"""'], {}), "(self._input_dir, 'Window', 'Cabinet.txt')\n", (4899, 4941), False, 'import os\n'), ((6399, 6456), 'os.path.join', 'os.path.join', (['self._input_dir', '"""Book"""', '"""Bus"""', '"""Clock.txt"""'], {}), "(self._input_dir, 'Book', 'Bus', 'Clock.txt')\n", (6411, 6456), False, 'import os\n'), ((6500, 6549), 'os.path.join', 'os.path.join', (['self._input_dir', '"""Yarn"""', '"""Road.txt"""'], {}), "(self._input_dir, 'Yarn', 'Road.txt')\n", (6512, 6549), False, 'import os\n'), ((6575, 6613), 'os.path.join', 'os.path.join', (['self._input_dir', '"""Fruit"""'], {}), "(self._input_dir, 'Fruit')\n", (6587, 6613), False, 'import os\n'), ((6633, 6687), 'os.path.join', 'os.path.join', (['self._input_dir', '"""Window"""', '"""Cabinet.txt"""'], {}), "(self._input_dir, 'Window', 'Cabinet.txt')\n", (6645, 6687), False, 'import os\n')]
|
# ======================================================================
# Program Alarm
# Advent of Code 2019 Day 02 -- <NAME> -- https://adventofcode.com
#
# Computer simulation by Dr. <NAME> III
# ======================================================================
# ======================================================================
# a o c _ p a . p y
# ======================================================================
"Solve the Program Alarm problem for Advent of Code 2019 day 03"
# ----------------------------------------------------------------------
# import
# ----------------------------------------------------------------------
import argparse
import sys
import intcode
# ----------------------------------------------------------------------
# constants
# ----------------------------------------------------------------------
# ----------------------------------------------------------------------
# parse_commnd_line
# ----------------------------------------------------------------------
def parse_command_line():
"Parse the command line options"
# 1. Create the command line parser
desc = 'Program Alarm - day 02 of Advent of Code 2019'
sample = 'sample: python aoc_pa.py input.txt'
parser = argparse.ArgumentParser(description=desc,
epilog=sample)
parser.add_argument('-v', '--verbose', action='store_true', default=False,
dest='verbose', help='Print status messages to stdout')
parser.add_argument('-p', '--part', action='store', default=1, type=int,
dest='part', help='Puzzle Part (1 or 2)')
parser.add_argument('-t', '--max-time', action='store', default=0, type=int,
dest='maxtime', help='Maximum timer ticks before quitting')
parser.add_argument('filepath', metavar='FILENAME', action='store', type=str,
help="Location of puzzle input")
# 2. Get the options and arguments
return parser.parse_args()
# ----------------------------------------------------------------------
# part_one
# ----------------------------------------------------------------------
def part_one(args, input_lines):
"Process part one of the puzzle"
# 1. Optionally select fixex
noun = None
verb = None
if len(input_lines[0]) > 100:
print("Fixing up input at 1 and 2 to be 12 and 2")
noun = 12
verb = 2
# 3. Create the computer with fixes
computer = intcode.IntCode(text=input_lines[0], noun=noun, verb=verb)
if args.verbose:
print("The computer has %d positions" % len(computer.positions))
print(computer.instructions())
# 3. Run the computer until it stops
solution = computer.run(max_steps=args.maxtime, watch=args.verbose)
# 4. Check it ran out of time
if solution is None:
print("No solution found after %d steps" % args.maxtime)
# 5. Check it stopped with an error
elif solution != intcode.STOP_HLT:
print("Computer alarm %d" % solution)
solution = None
# 6. The solution is at position 0
else:
solution = computer.fetch(intcode.ADDR_RSLT)
print("The solution is %d" % (solution))
# 7. Return result
return solution is not None
# ----------------------------------------------------------------------
# part_two
# ----------------------------------------------------------------------
def part_two(args, input_lines):
"Process part two of the puzzle"
# 1. Set target
target = 19690720
if args.verbose:
print("The target is %d" % target)
# 2. Loop over possible nouns
for noun in range(100):
# 3. Loop over possible verbs
if args.verbose:
print("Checking noun = %d" % noun)
for verb in range(100):
# 4. Create the computer
computer = intcode.IntCode(text=input_lines[0], noun=noun, verb=verb)
# 5. Run the computer until it stops
solution = computer.run(max_steps=args.maxtime)
# 6. Check it ran out of time
if solution is None:
print("No solution found after %d steps for noun = %d and verb = %d" %
(args.maxtime, noun, verb))
return False
# 7. Check it stopped with an error
if solution != intcode.STOP_HLT:
print("Computer alarm %d with noun = %d and verb = %d" %
(solution, noun, verb))
return False
# 8. The solution is at position 0
solution = computer.fetch(intcode.ADDR_RSLT)
if solution == target:
print("Target of %d found with noun = %d and verb = %d" %
(solution, noun, verb))
print("Solution = %d" % (100 * noun + verb))
return True
# 9. Unsuccessful
print("Target of %d not found" % target)
return False
# ----------------------------------------------------------------------
# from_file
# ----------------------------------------------------------------------
def from_file(filepath):
"Read the file"
return from_text(open(filepath).read())
# ----------------------------------------------------------------------
# from_text
# ----------------------------------------------------------------------
def from_text(text):
"Break the text into trimed, non-comment lines"
# 1. We start with no lines
lines = []
# 2. Loop for lines in the text
for line in text.split('\n'):
# 3. But ignore blank and non-claim lines
line = line.rstrip(' \r')
if not line:
continue
if line.startswith('#'):
continue
# 4. Add the line
lines.append(line)
# 5. Return a list of clean lines
return lines
# ----------------------------------------------------------------------
# main
# ----------------------------------------------------------------------
def main():
"""Read Program Alarm and solve it"""
# 1. Get the command line options
args = parse_command_line()
# 2. Read the puzzle file
input_text = from_file(args.filepath)
# 3. Process the appropiate part of the puzzle
if args.part == 1:
result = part_one(args, input_text)
else:
result = part_two(args, input_text)
# 5. Set return code (0 if solution found, 2 if not)
if result:
sys.exit(0)
sys.exit(2)
# ----------------------------------------------------------------------
# module initialization
# ----------------------------------------------------------------------
if __name__ == '__main__':
main()
# ======================================================================
# end a o c _ p a . p y end
# ======================================================================
|
[
"intcode.IntCode",
"argparse.ArgumentParser",
"sys.exit"
] |
[((1497, 1553), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'desc', 'epilog': 'sample'}), '(description=desc, epilog=sample)\n', (1520, 1553), False, 'import argparse\n'), ((2839, 2897), 'intcode.IntCode', 'intcode.IntCode', ([], {'text': 'input_lines[0]', 'noun': 'noun', 'verb': 'verb'}), '(text=input_lines[0], noun=noun, verb=verb)\n', (2854, 2897), False, 'import intcode\n'), ((7224, 7235), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (7232, 7235), False, 'import sys\n'), ((7207, 7218), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (7215, 7218), False, 'import sys\n'), ((4341, 4399), 'intcode.IntCode', 'intcode.IntCode', ([], {'text': 'input_lines[0]', 'noun': 'noun', 'verb': 'verb'}), '(text=input_lines[0], noun=noun, verb=verb)\n', (4356, 4399), False, 'import intcode\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Author: <NAME>, Finland 2014-2018
#
# This file is part of Kunquat.
#
# CC0 1.0 Universal, http://creativecommons.org/publicdomain/zero/1.0/
#
# To the extent possible under law, Kunquat Affirmers have waived all
# copyright and related or neighboring rights to Kunquat.
#
from copy import deepcopy
from optparse import Option, SUPPRESS_HELP
import ast
import os
import os.path
import shutil
import subprocess
import sys
sys.dont_write_bytecode = True
import support.fabricate as fabricate
import scripts.command as command
from scripts.cc import get_cc
import scripts.configure as configure
from scripts.build_libs import build_libkunquat, build_libkunquatfile
from scripts.test_libkunquat import test_libkunquat
from scripts.build_examples import build_examples
from scripts.install_libs import install_libkunquat, install_libkunquatfile
from scripts.install_examples import install_examples
from scripts.install_share import install_share
import options
# Add definitions of options.py as command line switches
cmdline_opts = []
opt_vars = []
options_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'options.py')
with open(options_path) as f:
data = f.read()
raw_entries = [e.strip() for e in data.split('\n\n') if e.strip()]
type_names = { str: 'string', int: 'int' }
for raw_entry in raw_entries:
lines = raw_entry.split('\n')
desc_lines = lines[:-1]
def_line = lines[-1]
desc = '\n'.join(dl[1:].strip() for dl in desc_lines)
var_name, _, value_str = (s.strip() for s in def_line.partition('='))
name = '--' + var_name.replace('_', '-')
value = ast.literal_eval(value_str)
opt_vars.append(var_name)
if type(value) == bool:
first_word = var_name.split('_')[0]
if first_word == 'enable':
negated_name = name.replace('enable', 'disable', 1)
elif first_word == 'with':
negated_name = name.replace('with', 'without', 1)
else:
assert False
if value == True:
negated_desc = (desc.replace('enable', 'disable', 1)
if desc.startswith('enable') else ('do not ' + desc))
full_desc = '{} (default: enabled)'.format(negated_desc)
neg_opt = Option(
negated_name,
action='store_false',
dest=var_name,
help=full_desc)
pos_opt = Option(
name, action='store_true', dest=var_name, help=SUPPRESS_HELP)
else:
full_desc = '{} (default: disabled)'.format(desc)
pos_opt = Option(
name, action='store_true', dest=var_name, help=full_desc)
neg_opt = Option(
negated_name,
action='store_false',
dest=var_name,
help=SUPPRESS_HELP)
cmdline_opts.extend((neg_opt, pos_opt))
elif value == None:
if var_name == 'cc':
desc = ('select C compiler'
' (supported values: gcc (default), clang)')
option = Option(name, type='choice', choices=['gcc', 'clang'], help=desc)
cmdline_opts.append(option)
else:
assert False
else:
type_name = type_names[type(value)]
full_desc = '{} (default: {})'.format(desc, value)
option = Option(name, type=type_name, help=full_desc)
cmdline_opts.append(option)
def process_cmd_line():
for var_name in opt_vars:
override = fabricate.main.options.__dict__[var_name]
if override != None:
options.__dict__[var_name] = override
# Make sure the installation prefix is absolute
options.prefix = os.path.abspath(os.path.expanduser(options.prefix))
class PrettyBuilder(fabricate.Builder):
def __init__(self, *args, **kwargs):
fabricate.Builder.__init__(self, *args, **kwargs)
def echo(self, message):
'''Suppress printing of an empty string.'''
if message:
fabricate.Builder.echo(self, message)
def build():
process_cmd_line()
if options.enable_python_tests and options.enable_long_tests:
python_modules = ['scripts', 'kunquat']
fabricate.run('pylint', *python_modules)
fabricate.run('flake8', *python_modules)
cc = get_cc(options.cc)
cc.set_debug(options.enable_debug)
if options.enable_debug_asserts:
cc.add_define('ENABLE_DEBUG_ASSERTS')
#if options.enable_profiling:
# compile_flags.append('-pg')
# link_flags.append('-pg')
if options.enable_native_arch:
cc.set_native_arch()
if options.optimise not in range(5):
print('Unsupported optimisation level: {}'.format(options.optimise),
file=sys.stderr)
sys.exit(1)
cc.set_optimisation(options.optimise)
builder = PrettyBuilder()
if options.enable_python_bindings:
try:
python_cmd = command.PythonCommand()
except RuntimeError:
print('Python bindings were requested but Python 2.7 was not found.',
file=sys.stderr)
sys.exit(1)
if options.enable_tests_mem_debug:
try:
output = subprocess.check_output(
['valgrind', '--version'], stderr=subprocess.STDOUT)
except (OSError, subprocess.CalledProcessError):
output = b''
if not output.startswith(b'valgrind'):
print('Memory debugging of libkunquat tests was requested'
' but Valgrind was not found.',
file=sys.stderr)
sys.exit(1)
# Check dependencies
configure.test_add_common_external_deps(builder, options, cc)
# Build libkunquat
if options.enable_libkunquat:
libkunquat_cc = deepcopy(cc)
configure.test_add_libkunquat_external_deps(builder, options, libkunquat_cc)
build_libkunquat(builder, options, libkunquat_cc)
# Build libkunquatfile
if options.enable_libkunquatfile:
libkunquatfile_cc = deepcopy(cc)
configure.test_add_libkunquatfile_external_deps(
builder, options, libkunquatfile_cc)
build_libkunquatfile(builder, options, libkunquatfile_cc)
# Run tests
if options.enable_tests:
test_cc = deepcopy(cc)
configure.test_add_test_deps(builder, options, test_cc)
test_libkunquat(builder, options, test_cc)
if options.enable_python_tests:
fabricate.run(
'env',
'LD_LIBRARY_PATH=build/src/lib',
'python3',
'-m',
'unittest',
'discover',
'-v')
# Build examples
if options.enable_examples:
build_examples(builder)
def clean():
if os.path.exists('build'):
# Remove Python-specific build directories first
for name in os.listdir('build'):
expected_suffix = '-{}.{}'.format(sys.version_info[0], sys.version_info[1])
if name.endswith(expected_suffix) or name == 'lib':
path = os.path.join('build', name)
shutil.rmtree(path)
fabricate.autoclean()
def install():
build()
install_builder = None
if options.enable_libkunquat:
install_libkunquat(
install_builder, options.prefix, options.enable_libkunquat_dev)
if options.enable_libkunquatfile:
install_libkunquatfile(
install_builder, options.prefix, options.enable_libkunquatfile_dev)
if options.enable_examples:
install_examples(install_builder, options.prefix)
install_share(install_builder, options.prefix)
if options.enable_python_bindings:
python_cmd = command.PythonCommand()
args = ['py-setup.py', 'install', '--prefix={}'.format(options.prefix)]
if not options.enable_export:
args.append('--disable-export')
if not options.enable_player:
args.append('--disable-player')
if not options.enable_tracker:
args.append('--disable-tracker')
try:
python_cmd.run(install_builder, *args)
except subprocess.CalledProcessError:
sys.exit(1)
fabricate.main(extra_options=cmdline_opts)
|
[
"scripts.configure.test_add_libkunquatfile_external_deps",
"support.fabricate.main",
"scripts.install_share.install_share",
"support.fabricate.Builder.echo",
"scripts.cc.get_cc",
"shutil.rmtree",
"os.path.join",
"scripts.command.PythonCommand",
"scripts.test_libkunquat.test_libkunquat",
"scripts.configure.test_add_test_deps",
"scripts.install_libs.install_libkunquat",
"os.path.exists",
"support.fabricate.run",
"copy.deepcopy",
"support.fabricate.autoclean",
"scripts.install_libs.install_libkunquatfile",
"scripts.install_examples.install_examples",
"os.path.realpath",
"subprocess.check_output",
"scripts.build_examples.build_examples",
"scripts.configure.test_add_libkunquat_external_deps",
"scripts.build_libs.build_libkunquatfile",
"os.listdir",
"sys.exit",
"scripts.configure.test_add_common_external_deps",
"support.fabricate.Builder.__init__",
"scripts.build_libs.build_libkunquat",
"optparse.Option",
"ast.literal_eval",
"os.path.expanduser"
] |
[((8568, 8610), 'support.fabricate.main', 'fabricate.main', ([], {'extra_options': 'cmdline_opts'}), '(extra_options=cmdline_opts)\n', (8582, 8610), True, 'import support.fabricate as fabricate\n'), ((4591, 4609), 'scripts.cc.get_cc', 'get_cc', (['options.cc'], {}), '(options.cc)\n', (4597, 4609), False, 'from scripts.cc import get_cc\n'), ((5939, 6000), 'scripts.configure.test_add_common_external_deps', 'configure.test_add_common_external_deps', (['builder', 'options', 'cc'], {}), '(builder, options, cc)\n', (5978, 6000), True, 'import scripts.configure as configure\n'), ((7120, 7143), 'os.path.exists', 'os.path.exists', (['"""build"""'], {}), "('build')\n", (7134, 7143), False, 'import os\n'), ((7487, 7508), 'support.fabricate.autoclean', 'fabricate.autoclean', ([], {}), '()\n', (7506, 7508), True, 'import support.fabricate as fabricate\n'), ((7972, 8018), 'scripts.install_share.install_share', 'install_share', (['install_builder', 'options.prefix'], {}), '(install_builder, options.prefix)\n', (7985, 8018), False, 'from scripts.install_share import install_share\n'), ((1149, 1175), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1165, 1175), False, 'import os\n'), ((1700, 1727), 'ast.literal_eval', 'ast.literal_eval', (['value_str'], {}), '(value_str)\n', (1716, 1727), False, 'import ast\n'), ((3999, 4033), 'os.path.expanduser', 'os.path.expanduser', (['options.prefix'], {}), '(options.prefix)\n', (4017, 4033), False, 'import os\n'), ((4127, 4176), 'support.fabricate.Builder.__init__', 'fabricate.Builder.__init__', (['self', '*args'], {}), '(self, *args, **kwargs)\n', (4153, 4176), True, 'import support.fabricate as fabricate\n'), ((4491, 4531), 'support.fabricate.run', 'fabricate.run', (['"""pylint"""', '*python_modules'], {}), "('pylint', *python_modules)\n", (4504, 4531), True, 'import support.fabricate as fabricate\n'), ((4540, 4580), 'support.fabricate.run', 'fabricate.run', (['"""flake8"""', '*python_modules'], {}), "('flake8', *python_modules)\n", (4553, 4580), True, 'import support.fabricate as fabricate\n'), ((5065, 5076), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5073, 5076), False, 'import sys\n'), ((6083, 6095), 'copy.deepcopy', 'deepcopy', (['cc'], {}), '(cc)\n', (6091, 6095), False, 'from copy import deepcopy\n'), ((6104, 6180), 'scripts.configure.test_add_libkunquat_external_deps', 'configure.test_add_libkunquat_external_deps', (['builder', 'options', 'libkunquat_cc'], {}), '(builder, options, libkunquat_cc)\n', (6147, 6180), True, 'import scripts.configure as configure\n'), ((6190, 6239), 'scripts.build_libs.build_libkunquat', 'build_libkunquat', (['builder', 'options', 'libkunquat_cc'], {}), '(builder, options, libkunquat_cc)\n', (6206, 6239), False, 'from scripts.build_libs import build_libkunquat, build_libkunquatfile\n'), ((6334, 6346), 'copy.deepcopy', 'deepcopy', (['cc'], {}), '(cc)\n', (6342, 6346), False, 'from copy import deepcopy\n'), ((6355, 6443), 'scripts.configure.test_add_libkunquatfile_external_deps', 'configure.test_add_libkunquatfile_external_deps', (['builder', 'options', 'libkunquatfile_cc'], {}), '(builder, options,\n libkunquatfile_cc)\n', (6402, 6443), True, 'import scripts.configure as configure\n'), ((6466, 6523), 'scripts.build_libs.build_libkunquatfile', 'build_libkunquatfile', (['builder', 'options', 'libkunquatfile_cc'], {}), '(builder, options, libkunquatfile_cc)\n', (6486, 6523), False, 'from scripts.build_libs import build_libkunquat, build_libkunquatfile\n'), ((6588, 6600), 'copy.deepcopy', 'deepcopy', (['cc'], {}), '(cc)\n', (6596, 6600), False, 'from copy import deepcopy\n'), ((6609, 6664), 'scripts.configure.test_add_test_deps', 'configure.test_add_test_deps', (['builder', 'options', 'test_cc'], {}), '(builder, options, test_cc)\n', (6637, 6664), True, 'import scripts.configure as configure\n'), ((6674, 6716), 'scripts.test_libkunquat.test_libkunquat', 'test_libkunquat', (['builder', 'options', 'test_cc'], {}), '(builder, options, test_cc)\n', (6689, 6716), False, 'from scripts.test_libkunquat import test_libkunquat\n'), ((7074, 7097), 'scripts.build_examples.build_examples', 'build_examples', (['builder'], {}), '(builder)\n', (7088, 7097), False, 'from scripts.build_examples import build_examples\n'), ((7222, 7241), 'os.listdir', 'os.listdir', (['"""build"""'], {}), "('build')\n", (7232, 7241), False, 'import os\n'), ((7609, 7696), 'scripts.install_libs.install_libkunquat', 'install_libkunquat', (['install_builder', 'options.prefix', 'options.enable_libkunquat_dev'], {}), '(install_builder, options.prefix, options.\n enable_libkunquat_dev)\n', (7627, 7696), False, 'from scripts.install_libs import install_libkunquat, install_libkunquatfile\n'), ((7917, 7966), 'scripts.install_examples.install_examples', 'install_examples', (['install_builder', 'options.prefix'], {}), '(install_builder, options.prefix)\n', (7933, 7966), False, 'from scripts.install_examples import install_examples\n'), ((8080, 8103), 'scripts.command.PythonCommand', 'command.PythonCommand', ([], {}), '()\n', (8101, 8103), True, 'import scripts.command as command\n'), ((4291, 4328), 'support.fabricate.Builder.echo', 'fabricate.Builder.echo', (['self', 'message'], {}), '(self, message)\n', (4313, 4328), True, 'import support.fabricate as fabricate\n'), ((5228, 5251), 'scripts.command.PythonCommand', 'command.PythonCommand', ([], {}), '()\n', (5249, 5251), True, 'import scripts.command as command\n'), ((5498, 5574), 'subprocess.check_output', 'subprocess.check_output', (["['valgrind', '--version']"], {'stderr': 'subprocess.STDOUT'}), "(['valgrind', '--version'], stderr=subprocess.STDOUT)\n", (5521, 5574), False, 'import subprocess\n'), ((5897, 5908), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5905, 5908), False, 'import sys\n'), ((6770, 6874), 'support.fabricate.run', 'fabricate.run', (['"""env"""', '"""LD_LIBRARY_PATH=build/src/lib"""', '"""python3"""', '"""-m"""', '"""unittest"""', '"""discover"""', '"""-v"""'], {}), "('env', 'LD_LIBRARY_PATH=build/src/lib', 'python3', '-m',\n 'unittest', 'discover', '-v')\n", (6783, 6874), True, 'import support.fabricate as fabricate\n'), ((7764, 7859), 'scripts.install_libs.install_libkunquatfile', 'install_libkunquatfile', (['install_builder', 'options.prefix', 'options.enable_libkunquatfile_dev'], {}), '(install_builder, options.prefix, options.\n enable_libkunquatfile_dev)\n', (7786, 7859), False, 'from scripts.install_libs import install_libkunquat, install_libkunquatfile\n'), ((2380, 2453), 'optparse.Option', 'Option', (['negated_name'], {'action': '"""store_false"""', 'dest': 'var_name', 'help': 'full_desc'}), "(negated_name, action='store_false', dest=var_name, help=full_desc)\n", (2386, 2453), False, 'from optparse import Option, SUPPRESS_HELP\n'), ((2578, 2646), 'optparse.Option', 'Option', (['name'], {'action': '"""store_true"""', 'dest': 'var_name', 'help': 'SUPPRESS_HELP'}), "(name, action='store_true', dest=var_name, help=SUPPRESS_HELP)\n", (2584, 2646), False, 'from optparse import Option, SUPPRESS_HELP\n'), ((2782, 2846), 'optparse.Option', 'Option', (['name'], {'action': '"""store_true"""', 'dest': 'var_name', 'help': 'full_desc'}), "(name, action='store_true', dest=var_name, help=full_desc)\n", (2788, 2846), False, 'from optparse import Option, SUPPRESS_HELP\n'), ((2898, 2975), 'optparse.Option', 'Option', (['negated_name'], {'action': '"""store_false"""', 'dest': 'var_name', 'help': 'SUPPRESS_HELP'}), "(negated_name, action='store_false', dest=var_name, help=SUPPRESS_HELP)\n", (2904, 2975), False, 'from optparse import Option, SUPPRESS_HELP\n'), ((3628, 3672), 'optparse.Option', 'Option', (['name'], {'type': 'type_name', 'help': 'full_desc'}), '(name, type=type_name, help=full_desc)\n', (3634, 3672), False, 'from optparse import Option, SUPPRESS_HELP\n'), ((5412, 5423), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5420, 5423), False, 'import sys\n'), ((7418, 7445), 'os.path.join', 'os.path.join', (['"""build"""', 'name'], {}), "('build', name)\n", (7430, 7445), False, 'import os\n'), ((7462, 7481), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (7475, 7481), False, 'import shutil\n'), ((8554, 8565), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (8562, 8565), False, 'import sys\n'), ((3326, 3390), 'optparse.Option', 'Option', (['name'], {'type': '"""choice"""', 'choices': "['gcc', 'clang']", 'help': 'desc'}), "(name, type='choice', choices=['gcc', 'clang'], help=desc)\n", (3332, 3390), False, 'from optparse import Option, SUPPRESS_HELP\n')]
|
"""
Segmenting real-world sounds correctly with synthetic sounds
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
It's easy to figure out if a sound is being correcly segmented if the
signal at hand is well defined, and repeatable, like in many technological/
engineering applications. However, in bioacoustics, or
a more open-ended field recording situation, it can be very hard
to know the kind of signal that'll be recorded, or what its
parameters are.
Just because an output is produced by the package, it doesn't
always lead to a meaningful result. Given a set of parameters,
any function will produce an output as long as its sensible. This
means, with one set of parameters/methods the CF segment might
be 10ms long, while with another more lax parameter set it might
be 20ms long! Remember, as always, `GIGO <https://en.wikipedia.org/wiki/Garbage_in,_garbage_out>`_ (Garbage In, Garbage Out):P.
How to segment a sound into CF and FM segments in an accurate
way?
Synthetic calls to the rescue
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
Synthetic calls are sounds that we know to have specific properties
and can be used to test if a parameter set/ segmentation method
is capable of correctly segmenting our real-world sounds and
uncovering the true underlying properties.
The `simulate_calls` module has a bunch of helper functions
which allow the creation of FM sweeps, constant frequency
tones and silences. In combination, these can be used to
get a feeling for which segmentation methods and parameter sets
work well for your real-world sound (bat, bird, cat, <insert sound source of choice>)
Generating a 'classical' CF-FM bat call
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
"""
import matplotlib.pyplot as plt
import numpy as np
import scipy.signal as signal
from itsfm.simulate_calls import make_cffm_call,make_tone, make_fm_chirp, silence
from itsfm.view_horseshoebat_call import visualise_call
from itsfm.segment_horseshoebat_call import segment_call_into_cf_fm
from itsfm.signal_processing import dB, rms
fs = 96000
call_props = {'cf':(40000, 0.01),
'upfm':(38000,0.002),
'downfm':(30000,0.003)}
cffm_call, freq_profile = make_cffm_call(call_props, fs)
cffm_call *= signal.tukey(cffm_call.size, 0.1)
w,s = visualise_call(cffm_call, fs, fft_size=128)
# %%
# Remember, the terminal frequencies and durations of the CF-FM calls can be adjusted to the
# calls of your species of interest!!
# %%
# A multi-component bird call
# >>>>>>>>>>>>>>>>>>>>>>>>>>>
#
# Let's make a sound with two FMs and CFs, and gaps in between
fs = 44100
fm1 = make_fm_chirp(1000, 5000, 0.01, fs)
cf1 = make_tone(5000, 0.005, fs)
fm2 = make_fm_chirp(5500, 9000, 0.01, fs)
cf2 = make_tone(8000, 0.005, fs)
gap = silence(0.005, fs)
synth_birdcall = np.concatenate((gap,
fm1, gap,
cf1, gap,
fm2, gap,
cf2,
gap))
w, s = visualise_call(synth_birdcall, fs, fft_size=64)
# %%
# Let there be Noise
# >>>>>>>>>>>>>>>>>>
#
# Any kind of field recording *will* have some form of noise. Each of the
# the segmentation methods is differently susceptible to noise, and it's
# a good idea to test how well they can tolerate it. For starters, let's
# just add white noise and simulate different signal-to-noise ratios (SNR).
noisy_bird_call = synth_birdcall.copy()
noisy_bird_call += np.random.normal(0,10**(-10/20), noisy_bird_call.size)
noisy_bird_call /= np.max(np.abs(noisy_bird_call)) # keep sample values between +/- 1
# %%
# Estimate an approximate SNR by looking at the rms of the gaps to that of
# a song component
level_background = dB(rms(noisy_bird_call[gap.size]))
level_song = dB(rms(noisy_bird_call[gap.size:2*gap.size]))
snr_approx = level_song-level_background
print('The SNR is approximately: %f'%np.around(snr_approx))
w, s = visualise_call(noisy_bird_call, fs, fft_size=64)
# %%
# We could try to run the segmentation + measurement on a noisy sound straight away,
# but this might lead to poor measurements. Now, let's bandpass the audio
# to remove the ambient noise outside of the song's range.
|
[
"itsfm.view_horseshoebat_call.visualise_call",
"numpy.abs",
"itsfm.simulate_calls.make_cffm_call",
"itsfm.simulate_calls.silence",
"scipy.signal.tukey",
"itsfm.signal_processing.rms",
"itsfm.simulate_calls.make_fm_chirp",
"numpy.around",
"itsfm.simulate_calls.make_tone",
"numpy.random.normal",
"numpy.concatenate"
] |
[((2172, 2202), 'itsfm.simulate_calls.make_cffm_call', 'make_cffm_call', (['call_props', 'fs'], {}), '(call_props, fs)\n', (2186, 2202), False, 'from itsfm.simulate_calls import make_cffm_call, make_tone, make_fm_chirp, silence\n'), ((2216, 2249), 'scipy.signal.tukey', 'signal.tukey', (['cffm_call.size', '(0.1)'], {}), '(cffm_call.size, 0.1)\n', (2228, 2249), True, 'import scipy.signal as signal\n'), ((2259, 2302), 'itsfm.view_horseshoebat_call.visualise_call', 'visualise_call', (['cffm_call', 'fs'], {'fft_size': '(128)'}), '(cffm_call, fs, fft_size=128)\n', (2273, 2302), False, 'from itsfm.view_horseshoebat_call import visualise_call\n'), ((2593, 2628), 'itsfm.simulate_calls.make_fm_chirp', 'make_fm_chirp', (['(1000)', '(5000)', '(0.01)', 'fs'], {}), '(1000, 5000, 0.01, fs)\n', (2606, 2628), False, 'from itsfm.simulate_calls import make_cffm_call, make_tone, make_fm_chirp, silence\n'), ((2635, 2661), 'itsfm.simulate_calls.make_tone', 'make_tone', (['(5000)', '(0.005)', 'fs'], {}), '(5000, 0.005, fs)\n', (2644, 2661), False, 'from itsfm.simulate_calls import make_cffm_call, make_tone, make_fm_chirp, silence\n'), ((2668, 2703), 'itsfm.simulate_calls.make_fm_chirp', 'make_fm_chirp', (['(5500)', '(9000)', '(0.01)', 'fs'], {}), '(5500, 9000, 0.01, fs)\n', (2681, 2703), False, 'from itsfm.simulate_calls import make_cffm_call, make_tone, make_fm_chirp, silence\n'), ((2710, 2736), 'itsfm.simulate_calls.make_tone', 'make_tone', (['(8000)', '(0.005)', 'fs'], {}), '(8000, 0.005, fs)\n', (2719, 2736), False, 'from itsfm.simulate_calls import make_cffm_call, make_tone, make_fm_chirp, silence\n'), ((2743, 2761), 'itsfm.simulate_calls.silence', 'silence', (['(0.005)', 'fs'], {}), '(0.005, fs)\n', (2750, 2761), False, 'from itsfm.simulate_calls import make_cffm_call, make_tone, make_fm_chirp, silence\n'), ((2780, 2841), 'numpy.concatenate', 'np.concatenate', (['(gap, fm1, gap, cf1, gap, fm2, gap, cf2, gap)'], {}), '((gap, fm1, gap, cf1, gap, fm2, gap, cf2, gap))\n', (2794, 2841), True, 'import numpy as np\n'), ((3017, 3064), 'itsfm.view_horseshoebat_call.visualise_call', 'visualise_call', (['synth_birdcall', 'fs'], {'fft_size': '(64)'}), '(synth_birdcall, fs, fft_size=64)\n', (3031, 3064), False, 'from itsfm.view_horseshoebat_call import visualise_call\n'), ((3474, 3533), 'numpy.random.normal', 'np.random.normal', (['(0)', '(10 ** (-10 / 20))', 'noisy_bird_call.size'], {}), '(0, 10 ** (-10 / 20), noisy_bird_call.size)\n', (3490, 3533), True, 'import numpy as np\n'), ((3942, 3990), 'itsfm.view_horseshoebat_call.visualise_call', 'visualise_call', (['noisy_bird_call', 'fs'], {'fft_size': '(64)'}), '(noisy_bird_call, fs, fft_size=64)\n', (3956, 3990), False, 'from itsfm.view_horseshoebat_call import visualise_call\n'), ((3555, 3578), 'numpy.abs', 'np.abs', (['noisy_bird_call'], {}), '(noisy_bird_call)\n', (3561, 3578), True, 'import numpy as np\n'), ((3739, 3769), 'itsfm.signal_processing.rms', 'rms', (['noisy_bird_call[gap.size]'], {}), '(noisy_bird_call[gap.size])\n', (3742, 3769), False, 'from itsfm.signal_processing import dB, rms\n'), ((3788, 3831), 'itsfm.signal_processing.rms', 'rms', (['noisy_bird_call[gap.size:2 * gap.size]'], {}), '(noisy_bird_call[gap.size:2 * gap.size])\n', (3791, 3831), False, 'from itsfm.signal_processing import dB, rms\n'), ((3911, 3932), 'numpy.around', 'np.around', (['snr_approx'], {}), '(snr_approx)\n', (3920, 3932), True, 'import numpy as np\n')]
|
import rebase as rb
import pickle
from datetime import datetime
import rebase.util.api_request as api_request
class Predicter():
@classmethod
def load_data(cls, pred, start_date, end_date):
site_config = rb.Site.get(pred.site_id)
return pred.load_data(site_config, start_date, end_date)
@classmethod
def load_latest_data(cls, predicter):
Predicter.load_data(predicter)
@classmethod
def train(cls, pred, params, start_date, end_date):
weather_df, observation_df = Predicter.load_data(pred, start_date, end_date)
dataset = pred.preprocess(weather_df, observation_df)
return pred.train(dataset, params)
@classmethod
def hyperparam_search(self, pred, params_list):
models = []
for p in params_list:
model, score = pred.train(dataset, p)
@classmethod
def deploy(cls, pred):
print("Deploying {}".format(pred.name))
path = 'platform/v1/site/train/{}'.format(pred.site_id)
response = api_request.post(path)
if response.status_code == 200:
print("Success!")
else:
print("Failed")
@classmethod
def predict(cls, pred):
Predicter.load_latest_data()
@classmethod
def status(cls, pred):
path = '/platform/v1/site/train/state/{}'.format(pred.site_id)
r = api_request.get(path)
status = {'status': None, 'history': []}
if r.status_code == 200:
data = r.json()
if len(data) > 0:
status['status'] = data[-1]['state']
status['history'] = data
return status
class Model():
def setup(self):
pass
def load_data(self, site_config, start_date, end_date):
"""This method should load the data for training
Args:
site_config (dict): config for the site
start_date (datetime): the start date for the period
end_date (datetime): the end date for the period
Returns:
- pd.DataFrame: one df
- pd.DataFrame: one df
"""
raise NotImplementedError(
'Your subclass must implement the load_data() method'
)
def load_latest_data(self, site_config):
"""This method should load the predict data for training
Args:
site_config (dict): config for the site
Returns:
"""
raise NotImplementedError(
'Your subclass must implement the load_data() method'
)
def preprocess(self, weather_data, observation_data=None):
raise NotImplementedError(
'Your subclass must implement the preprocess() method'
)
def train(self, train_set, params={}):
raise NotImplementedError(
'Your subclass must implement the train() method'
)
# weather_df - weather for a ref time
# target_observations - like recent production power, could be used for intraday
def predict(self, predict_set):
raise NotImplementedError(
'Your subclass must implement the predict() method'
)
# serialize() should be overriden with custom serialization
# method if @param model can't be pickled
def serialize(self, model):
return pickle.dumps(model)
# deserialize() should be overriden with custom deserialization method
# if @param serialized_model can't be loaded from pickle
def deserialize(self, serialized_model):
return pickle.loads(serialized_model)
|
[
"pickle.loads",
"rebase.util.api_request.post",
"rebase.util.api_request.get",
"rebase.Site.get",
"pickle.dumps"
] |
[((223, 248), 'rebase.Site.get', 'rb.Site.get', (['pred.site_id'], {}), '(pred.site_id)\n', (234, 248), True, 'import rebase as rb\n'), ((1027, 1049), 'rebase.util.api_request.post', 'api_request.post', (['path'], {}), '(path)\n', (1043, 1049), True, 'import rebase.util.api_request as api_request\n'), ((1374, 1395), 'rebase.util.api_request.get', 'api_request.get', (['path'], {}), '(path)\n', (1389, 1395), True, 'import rebase.util.api_request as api_request\n'), ((3312, 3331), 'pickle.dumps', 'pickle.dumps', (['model'], {}), '(model)\n', (3324, 3331), False, 'import pickle\n'), ((3529, 3559), 'pickle.loads', 'pickle.loads', (['serialized_model'], {}), '(serialized_model)\n', (3541, 3559), False, 'import pickle\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 17 15:24:18 2020
@author: dhulls
"""
from anastruct import SystemElements
import numpy as np
class TrussModel:
def HF(self, young1=None, young2=None, area1=None, area2=None, P1=None, P2=None, P3=None, P4=None, P5=None, P6=None):
ss = SystemElements()
# young1 = 2.1e11
# area1 = 2e-3
# young2 = 2.1e11
# area2 = 1e-3
ss.add_truss_element(location=[[0, 0], [4,0]], EA=(area1*young1))
ss.add_truss_element(location=[[4, 0], [8,0]], EA=(area1*young1))
ss.add_truss_element(location=[[8, 0], [12,0]], EA=(area1*young1))
ss.add_truss_element(location=[[12, 0], [16,0]], EA=(area1*young1))
ss.add_truss_element(location=[[16, 0], [20,0]], EA=(area1*young1))
ss.add_truss_element(location=[[20, 0], [24,0]], EA=(area1*young1))
ss.add_truss_element(location=[[2, 2], [6,2]], EA=(area1*young1))
ss.add_truss_element(location=[[6, 2], [10,2]], EA=(area1*young1))
ss.add_truss_element(location=[[10, 2], [14,2]], EA=(area1*young1))
ss.add_truss_element(location=[[14, 2], [18,2]], EA=(area1*young1))
ss.add_truss_element(location=[[18, 2], [22,2]], EA=(area1*young1))
ss.add_truss_element(location=[[0, 0], [2,2]], EA=(area2*young2))
ss.add_truss_element(location=[[2,2], [4,0]], EA=(area2*young2))
ss.add_truss_element(location=[[4,0], [6,2]], EA=(area2*young2))
ss.add_truss_element(location=[[6,2], [8,0]], EA=(area2*young2))
ss.add_truss_element(location=[[8,0], [10,2]], EA=(area2*young2))
ss.add_truss_element(location=[[10,2], [12,0]], EA=(area2*young2))
ss.add_truss_element(location=[[12,0], [14,2]], EA=(area2*young2))
ss.add_truss_element(location=[[14,2], [16,0]], EA=(area2*young2))
ss.add_truss_element(location=[[16,0], [18,2]], EA=(area2*young2))
ss.add_truss_element(location=[[18,2], [20,0]], EA=(area2*young2))
ss.add_truss_element(location=[[20,0], [22,2]], EA=(area2*young2))
ss.add_truss_element(location=[[22,2], [24,0]], EA=(area2*young2))
ss.add_support_hinged(node_id=1)
ss.add_support_roll(node_id=7, direction='x')
# P1 = -5e4
# P2 = -5e4
# P3 = -5e4
# P4 = -5e4
# P5 = -5e4
# P6 = -5e4
ss.point_load(node_id=8, Fy=P1)
ss.point_load(node_id=9, Fy=P2)
ss.point_load(node_id=10, Fy=P3)
ss.point_load(node_id=11, Fy=P4)
ss.point_load(node_id=12, Fy=P5)
ss.point_load(node_id=13, Fy=P6)
ss.solve()
# ss.show_structure()
# ss.show_displacement(factor=10)
K = ss.get_node_results_system(node_id=4)['uy']
return np.array(K)
def LF(self, young1=None, young2=None, area1=None, area2=None, P1=None, P2=None, P3=None, P4=None, P5=None, P6=None):
ss = SystemElements()
# young1 = 2.1e11
# area1 = 2e-3
# young2 = 2.1e11
# area2 = 1e-3
ss.add_truss_element(location=[[0, 0], [12,0]], EA=(area1*young1))
ss.add_truss_element(location=[[12, 0], [24,0]], EA=(area1*young1))
ss.add_truss_element(location=[[6, 2], [18,2]], EA=(area1*young1))
ss.add_truss_element(location=[[0, 0], [6,2]], EA=(area2*young2))
ss.add_truss_element(location=[[6,2], [12,0]], EA=(area2*young2))
ss.add_truss_element(location=[[12,0], [18,2]], EA=(area2*young2))
ss.add_truss_element(location=[[18,2], [24,0]], EA=(area2*young2))
ss.add_support_hinged(node_id=1)
ss.add_support_roll(node_id=3, direction='x')
# P1 = -5e4
# P2 = -5e4
# P3 = -5e4
# P4 = -5e4
# P5 = -5e4
# P6 = -5e4
ss.point_load(node_id=4, Fy=np.sum([P1,P2,P3]))
ss.point_load(node_id=5, Fy=np.sum([P4,P5,P6]))
ss.solve()
# ss.show_structure()
# ss.show_displacement(factor=10)
K = ss.get_node_results_system(node_id=4)['uy']
return np.array(K)
|
[
"numpy.array",
"anastruct.SystemElements",
"numpy.sum"
] |
[((333, 349), 'anastruct.SystemElements', 'SystemElements', ([], {}), '()\n', (347, 349), False, 'from anastruct import SystemElements\n'), ((2828, 2839), 'numpy.array', 'np.array', (['K'], {}), '(K)\n', (2836, 2839), True, 'import numpy as np\n'), ((2989, 3005), 'anastruct.SystemElements', 'SystemElements', ([], {}), '()\n', (3003, 3005), False, 'from anastruct import SystemElements\n'), ((4156, 4167), 'numpy.array', 'np.array', (['K'], {}), '(K)\n', (4164, 4167), True, 'import numpy as np\n'), ((3900, 3920), 'numpy.sum', 'np.sum', (['[P1, P2, P3]'], {}), '([P1, P2, P3])\n', (3906, 3920), True, 'import numpy as np\n'), ((3956, 3976), 'numpy.sum', 'np.sum', (['[P4, P5, P6]'], {}), '([P4, P5, P6])\n', (3962, 3976), True, 'import numpy as np\n')]
|
from django.db import models
from django.urls import reverse
from datetime import date
# Create your models here.
class Photo(models.Model):
"""猫猫相片的数据库模型"""
image = models.ImageField(
'图像',
upload_to='image/'
)
title = models.CharField('标题', blank=True, max_length=8)
description = models.TextField('图片描述', blank=True)
author = models.ForeignKey(
'campus.User',
verbose_name='拍摄者',
on_delete=models.SET_NULL,
null = True,
blank = True,
related_name = 'photos',
related_query_name = 'photo'
)
author_name = models.CharField('拍摄者名称', max_length=16, blank=True)
date = models.DateField('拍摄日期', default=date.today, null=True, blank=True)
cats = models.ManyToManyField(
'cat.Cat',
verbose_name='出镜猫猫们',
related_name='photos',
related_query_name='photo'
)
class Meta:
verbose_name = '相片'
verbose_name_plural = '相片'
def __str__(self):
name = ''
if self.cats.count() < 3:
for cat in self.cats.all():
name = name + str(cat) + '-'
else:
cats = self.cats.all()
name = str(cats[0]) + '-...-' + str(cats[1]) + '-'
if self.title:
name = name + self.title + '-'
if self.date:
name = name + str(self.date.year) + '-'
return name[:-1]
def get_absolute_url(self):
return reverse('file:photo', {'pk': self.pk})
def get_author(self):
"""拍摄者名称"""
if self.author:
return self.author.username
elif self.author_name:
return self.author_name
else:
return '佚名'
|
[
"django.db.models.TextField",
"django.db.models.ManyToManyField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.ImageField",
"django.urls.reverse",
"django.db.models.DateField"
] |
[((177, 220), 'django.db.models.ImageField', 'models.ImageField', (['"""图像"""'], {'upload_to': '"""image/"""'}), "('图像', upload_to='image/')\n", (194, 220), False, 'from django.db import models\n'), ((259, 307), 'django.db.models.CharField', 'models.CharField', (['"""标题"""'], {'blank': '(True)', 'max_length': '(8)'}), "('标题', blank=True, max_length=8)\n", (275, 307), False, 'from django.db import models\n'), ((326, 362), 'django.db.models.TextField', 'models.TextField', (['"""图片描述"""'], {'blank': '(True)'}), "('图片描述', blank=True)\n", (342, 362), False, 'from django.db import models\n'), ((376, 538), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""campus.User"""'], {'verbose_name': '"""拍摄者"""', 'on_delete': 'models.SET_NULL', 'null': '(True)', 'blank': '(True)', 'related_name': '"""photos"""', 'related_query_name': '"""photo"""'}), "('campus.User', verbose_name='拍摄者', on_delete=models.\n SET_NULL, null=True, blank=True, related_name='photos',\n related_query_name='photo')\n", (393, 538), False, 'from django.db import models\n'), ((624, 676), 'django.db.models.CharField', 'models.CharField', (['"""拍摄者名称"""'], {'max_length': '(16)', 'blank': '(True)'}), "('拍摄者名称', max_length=16, blank=True)\n", (640, 676), False, 'from django.db import models\n'), ((688, 755), 'django.db.models.DateField', 'models.DateField', (['"""拍摄日期"""'], {'default': 'date.today', 'null': '(True)', 'blank': '(True)'}), "('拍摄日期', default=date.today, null=True, blank=True)\n", (704, 755), False, 'from django.db import models\n'), ((772, 883), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['"""cat.Cat"""'], {'verbose_name': '"""出镜猫猫们"""', 'related_name': '"""photos"""', 'related_query_name': '"""photo"""'}), "('cat.Cat', verbose_name='出镜猫猫们', related_name=\n 'photos', related_query_name='photo')\n", (794, 883), False, 'from django.db import models\n'), ((1493, 1531), 'django.urls.reverse', 'reverse', (['"""file:photo"""', "{'pk': self.pk}"], {}), "('file:photo', {'pk': self.pk})\n", (1500, 1531), False, 'from django.urls import reverse\n')]
|
"""
ASGI config for DjAI project.
It exposes the ASGI callable as a module-level variable named ``application``
For more information on this file, see
docs.djangoproject.com/en/dev/howto/deployment/asgi
"""
# ref: django-configurations.readthedocs.io
import os
# from django.core.asgi import get_asgi_application
from configurations.asgi import get_asgi_application
os.environ.setdefault(key='DJANGO_SETTINGS_MODULE', value='settings')
os.environ.setdefault(key='DJANGO_CONFIGURATION', value='Default')
application = get_asgi_application()
|
[
"configurations.asgi.get_asgi_application",
"os.environ.setdefault"
] |
[((376, 445), 'os.environ.setdefault', 'os.environ.setdefault', ([], {'key': '"""DJANGO_SETTINGS_MODULE"""', 'value': '"""settings"""'}), "(key='DJANGO_SETTINGS_MODULE', value='settings')\n", (397, 445), False, 'import os\n'), ((446, 512), 'os.environ.setdefault', 'os.environ.setdefault', ([], {'key': '"""DJANGO_CONFIGURATION"""', 'value': '"""Default"""'}), "(key='DJANGO_CONFIGURATION', value='Default')\n", (467, 512), False, 'import os\n'), ((529, 551), 'configurations.asgi.get_asgi_application', 'get_asgi_application', ([], {}), '()\n', (549, 551), False, 'from configurations.asgi import get_asgi_application\n')]
|
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pprint import pformat
class Command(BaseCommand):
args = '<setting>'
help = 'Outputs the value of the given setting name'
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError('Please enter exactly one setting name!')
name = args[0]
if hasattr(settings, name):
self.stdout.write(pformat(getattr(settings, name), indent=4, width=160))
else:
self.stderr.write('no setting with name %s available!' % name)
|
[
"django.core.management.base.CommandError"
] |
[((323, 377), 'django.core.management.base.CommandError', 'CommandError', (['"""Please enter exactly one setting name!"""'], {}), "('Please enter exactly one setting name!')\n", (335, 377), False, 'from django.core.management.base import BaseCommand, CommandError\n')]
|
from estruturadedados.avltree import AVL
from estruturadedados.queue import Queue
from biometria.biometria import Biometria as Bio
from bancodedados.paths import *
import json
from os import listdir, remove
class GerenciadorPrincipal():
def __init__(self):
self.gerVacina = GerenciadorVacina()
self.gerPessoas = GerenciadorPessoas()
self.gerBiometria = GerenciadorBiometria()
def cadastrarVacina(self, vacina):
self.gerVacina.cadastrarVacina(vacina)
def cadastrarPessoa(self, pessoa):
self.gerPessoas.cadastrarPessoa(pessoa=pessoa)
def retornarPessoa(self, chave, tipo):
return self.gerPessoas.procurarPessoa(chave, tipo)
def retornarBioNova(self):
return self.gerBiometria.cadastrarBiometria()
def vacinarPessoa(self, pessoa, vacina):
self.gerPessoas.vacinarPessoa(pessoa, vacina)
self.gerVacina.diminuirEstoque(vacina.getLote())
def retornarVacinaValida(self, fab=None):
vacina = self.gerVacina.getVacina(fab=fab)
return vacina
def retornarQuantidadeEstoque(self):
return self.gerVacina.retornarEstoque()
def retornarPessoaBio(self, path):
nomeBio = self.gerBiometria.compararBiometria(path)
if nomeBio:
pessoaB = self.retornarPessoa(nomeBio, 'bio')
return pessoaB
return False
def excluirCadastro(self, pessoa):
self.gerPessoas.excluirPessoa(pessoa)
try:
self.gerBiometria.excluirBiometria(pessoa.getBiometria())
except:
pass
def retornarArvoreVacinas(self):
return self.gerVacina.arvoreVacinas
def retornarArvoreCPF(self):
return self.gerPessoas.arvorePessoasCPF
def retornarArvoreBio(self):
return self.gerPessoas.arvorePessoasBiometria
class GerenciadorPessoas():
def __init__(self):
self.arvorePessoasCPF = AVL()
self.arvorePessoasBiometria = AVL()
self._carregarArvore(VACBIO)
self._carregarArvore(VACCPF)
def _carregarArvore(self, caminho):
arvore, tipoPessoa, lastAtt = self._chooseType(caminho)
try:
with open(f'{caminho}', 'r') as nomeArquivo:
listaPessoas = json.load(nomeArquivo)
for k, v in listaPessoas.items():
chave = k
pessoa = tipoPessoa(v['nome'], v['idade'], v['dose'], v['vacina'], v[f'{lastAtt}'])
arvore.insert(chave, valor=pessoa)
except:
with open(f'{caminho}', 'w') as f:
data = {}
json.dump(data, f, indent=4, ensure_ascii=False)
def cadastrarPessoa(self, pessoa):
arvore, chave, caminho = self._chooseArvore(pessoa=pessoa)
arvore.insert(chave, valor=pessoa)
with open(f'{caminho}', 'r+', encoding='UTF-8') as nomeArquivo:
listaPessoa = json.load(nomeArquivo)
listaPessoa[chave] = pessoa.lineRegistry()
with open(f'{caminho}', 'w', encoding='UTF-8') as nomeArquivo:
json.dump(listaPessoa, nomeArquivo, indent=4, ensure_ascii=False)
def vacinarPessoa(self, pessoa, vacina):
arvore, chave, caminho = self._chooseArvore(pessoa=pessoa)
pArvore = arvore.search(chave)
pArvore.getValor().setDose(1)
pArvore.getValor().setVacina(vacina.fabricante)
with open(f'{caminho}', 'r+', encoding='UTF-8') as nomeArquivo:
listaPessoas = json.load(nomeArquivo)
p = listaPessoas[chave]
p['vacina'] = vacina.getFabricante()
p['dose'] += 1
with open(f'{caminho}', 'w', encoding='UTF-8') as nomeArquivo:
json.dump(listaPessoas, nomeArquivo, indent=4, ensure_ascii=False)
def excluirPessoa(self, pessoa):
arvore, chave, caminho = self._chooseArvore(pessoa=pessoa)
arvore.delete(chave)
with open(f'{caminho}', 'r+', encoding='UTF-8') as nomeArquivo:
listaPessoas = json.load(nomeArquivo)
listaPessoas.pop(chave)
with open(f'{caminho}', 'w', encoding='UTF-8') as nomeArquivo:
json.dump(listaPessoas, nomeArquivo, indent=4, ensure_ascii=False)
def procurarPessoa(self, chave, tipo):
arvore = self._chooseArvore(tipo=tipo)
pessoa = arvore.search(chave)
return pessoa.getValor()
def _chooseType(self, caminho):
arvore = self.arvorePessoasCPF if caminho == VACCPF else self.arvorePessoasBiometria
tipoPessoa = PessoaCPF if caminho == VACCPF else PessoaBiometria
lastAtt = 'cpf' if caminho == VACCPF else 'biometria'
return arvore, tipoPessoa, lastAtt
def _chooseArvore(self, tipo=None, pessoa=None):
if tipo:
arvore = self.arvorePessoasCPF if tipo == 'cpf' else self.arvorePessoasBiometria
return arvore
if pessoa:
arvore = self.arvorePessoasCPF if pessoa.__class__.__name__ == 'PessoaCPF' else self.arvorePessoasBiometria
chave = pessoa.getCpf() if arvore == self.arvorePessoasCPF else pessoa.getBiometria()
path = VACCPF if arvore == self.arvorePessoasCPF else VACBIO
return arvore, chave, path
class Pessoa:
def __init__(self, nome, idade, dose=0, vacina=None):
self.nome = nome
self.idade = idade
self.dose = dose
self.vacina = self.setVacina(vacina)
def isVac(self):
if self.dose > 1:
return True
return False
def getNomeVacina(self):
if self.vacina == 'N/A':
return self.vacina
return self.vacina
def setVacina(self, valor):
if valor == None:
return 'N/A'
else:
return valor
def getDose(self):
return self.dose
def setDose(self, valor):
self.dose += valor
def __repr__(self):
return f'| NOME:{self.nome} \n| IDADE: {self.idade}\n| DOSE VACINA: {self.dose}'
class PessoaCPF(Pessoa):
def __init__(self, nome, idade, dose=0, vacina=None, cpf=0):
super().__init__(nome, idade, dose, vacina)
self.cpf = cpf
def getCpf(self):
return self.cpf
def lineRegistry(self):
return {'nome': self.nome, 'idade': self.idade, 'vacina': self.getNomeVacina(), 'dose': self.dose, 'cpf': self.cpf}
class PessoaBiometria(Pessoa):
def __init__(self, nome, idade, dose=0, vacina=None, biom=0):
super().__init__(nome, idade, dose, vacina)
self.biometria = biom
def getBiometria(self):
return self.biometria
def associarBiometria(self, biometria):
self.biometria = biometria
def lineRegistry(self):
return {'nome': self.nome, 'idade': self.idade, 'vacina': self.getNomeVacina(), 'dose': self.dose, 'biometria': self.biometria}
class GerenciadorBiometria():
def __init__(self):
self.arvoreBiometrias = AVL()
self._carregarArvore()
def cadastrarBiometria(self):
biometria = Bio.criar('_')
self.arvoreBiometrias.insert(str(biometria))
return biometria
def compararBiometria(self, path):
nome = nameFromPath(path)
caminho = caminhoFromPath(path)
biometriaBD = self._procurarBiometria(nome)
if biometriaBD:
biometriaTeste = Bio.leArquivo(nome, path=caminho)
biometriaBD = Bio.leArquivo(biometriaBD.getChave())
arvoreTeste = self._carregarArvoreTeste(biometriaTeste)
arvoreBD = self._carregarArvoreTeste(biometriaBD)
if self._igual(arvoreBD.getRoot(), arvoreTeste.getRoot()):
return nome
return False
def _pegarNomes(self):
nomes = [".".join(f.split(".")[:-1]) for f in listdir(path=BIO) if f.endswith('.json')]
return nomes
def excluirBiometria(self, nome):
remove(f'{BIO}{nome}.json')
self.arvoreBiometrias.delete(nome)
def _carregarArvore(self):
nomes = self._pegarNomes()
self.arvoreBiometrias.inserirLista(nomes)
def _carregarArvoreTeste(self, lista):
arvore = AVL()
arvore.inserirLista(lista)
return arvore
def _procurarBiometria(self, chave):
try:
biometria = self.arvoreBiometrias.search(chave)
except:
return False
return biometria
def _igual(self, p1, p2):
if p1 == None and p2 == None:
return True
if p1 == None or p2 == None:
return False
fila1 = Queue()
fila2 = Queue()
fila1.push(p1)
fila2.push(p2)
count = 0
while not fila1.isEmpty() and not fila2.isEmpty():
pos1 = fila1.first.valor
pos2 = fila2.first.valor
if pos1.getChave() != pos2.getChave():
return False
fila1.pop()
fila2.pop()
count +=1
if count > 40:
return True
if pos1.getLeft() and pos2.getLeft():
fila1.push(pos1.getLeft())
fila2.push(pos2.getLeft())
elif pos1.getLeft() or pos2.getLeft():
return False
if pos1.getRight() and pos2.getRight():
fila1.push(pos1.getRight())
fila2.push(pos2.getRight())
elif pos1.getRight() or pos2.getRight():
return False
return True
class GerenciadorVacina():
def __init__(self):
self.arvoreVacinas = AVL()
self.estoque = 0
self._carregarArvore()
def _carregarArvore(self):
try:
with open(f'{VACI}', 'r', encoding='UTF-8') as nomeArquivo:
listaVacinas = json.load(nomeArquivo)
for k, v in listaVacinas.items():
if v['quantidade'] == 0:
continue
vacina = Vacina(v['fabricante'], v['lote'], v['quantidade'])
self.setEstoque(v['quantidade'])
self.arvoreVacinas.insert(k, valor=vacina)
except:
with open(f'{VACI}', 'w', encoding='UTF-8') as nomeArquivo:
data = {}
json.dump(data, nomeArquivo, indent=4, ensure_ascii=False)
def cadastrarVacina(self, vacina):
self.arvoreVacinas.insert(vacina.getLote(), valor=vacina)
self.setEstoque(vacina.quantidade)
with open(f'{VACI}', 'r+', encoding='UTF-8') as nomeArquivo:
listaVacinas = json.load(nomeArquivo)
listaVacinas[f'{vacina.getLote()}'] = vacina.lineRegistry()
with open(f'{VACI}', 'w', encoding='UTF-8') as nomeArquivo:
json.dump(listaVacinas, nomeArquivo, indent=4, ensure_ascii=False)
def diminuirEstoque(self, lote):
vacina = self.arvoreVacinas.search(lote)
vacina.getValor().setQuantidade(-1)
self.setEstoque(-1)
if not vacina.valor.temVacina():
self.arvoreVacinas.delete(lote)
with open(f'{VACI}', 'r+', encoding='UTF-8') as nomeArquivo:
listaVacinas = json.load(nomeArquivo)
vacina = listaVacinas[lote]
vacina['quantidade'] -= 1
with open(f'{VACI}', 'w', encoding='UTF-8') as nomeArquivo:
json.dump(listaVacinas, nomeArquivo, indent=4, ensure_ascii=False)
def getVacina(self, fab=None):
if self.arvoreVacinas.isEmpty():
return None
if fab == 'N/A':
return self.arvoreVacinas.getRoot().getValor()
for node in self.arvoreVacinas:
if node.getValor().getFabricante() == fab and node.getValor().temVacina():
return node.getValor()
def retornarEstoque(self):
return self.estoque
def setEstoque(self, qnt):
if qnt > 0:
self.estoque += qnt
elif qnt < 0:
self.estoque = self.estoque - 1
else:
self.estoque = 0
class Vacina:
def __init__(self, fab, lote, quantidade=0):
self.fabricante = fab
self.lote = lote
self.quantidade = quantidade
def setQuantidade(self, qnt):
if self.quantidade == 0:
self.quantidade = 0
elif qnt > 0:
self.quantidade += qnt
elif qnt < 0:
self.quantidade = self.quantidade - 1
else:
self.quantidade = 0
def temVacina(self):
if self.quantidade == 0:
return False
return True
def getLote(self):
return self.lote
def getFabricante(self):
return self.fabricante
def lineRegistry(self):
return {'fabricante': self.fabricante, 'lote': self.lote, 'quantidade': self.quantidade}
def __repr__(self):
return f'| Fabricante: {self.fabricante}\n| Quantidade: {self.quantidade}\n| Lote: {self.lote}'
|
[
"estruturadedados.queue.Queue",
"json.dump",
"os.remove",
"json.load",
"biometria.biometria.Biometria.criar",
"biometria.biometria.Biometria.leArquivo",
"estruturadedados.avltree.AVL",
"os.listdir"
] |
[((1918, 1923), 'estruturadedados.avltree.AVL', 'AVL', ([], {}), '()\n', (1921, 1923), False, 'from estruturadedados.avltree import AVL\n'), ((1962, 1967), 'estruturadedados.avltree.AVL', 'AVL', ([], {}), '()\n', (1965, 1967), False, 'from estruturadedados.avltree import AVL\n'), ((6925, 6930), 'estruturadedados.avltree.AVL', 'AVL', ([], {}), '()\n', (6928, 6930), False, 'from estruturadedados.avltree import AVL\n'), ((7017, 7031), 'biometria.biometria.Biometria.criar', 'Bio.criar', (['"""_"""'], {}), "('_')\n", (7026, 7031), True, 'from biometria.biometria import Biometria as Bio\n'), ((7869, 7896), 'os.remove', 'remove', (['f"""{BIO}{nome}.json"""'], {}), "(f'{BIO}{nome}.json')\n", (7875, 7896), False, 'from os import listdir, remove\n'), ((8118, 8123), 'estruturadedados.avltree.AVL', 'AVL', ([], {}), '()\n', (8121, 8123), False, 'from estruturadedados.avltree import AVL\n'), ((8533, 8540), 'estruturadedados.queue.Queue', 'Queue', ([], {}), '()\n', (8538, 8540), False, 'from estruturadedados.queue import Queue\n'), ((8557, 8564), 'estruturadedados.queue.Queue', 'Queue', ([], {}), '()\n', (8562, 8564), False, 'from estruturadedados.queue import Queue\n'), ((9507, 9512), 'estruturadedados.avltree.AVL', 'AVL', ([], {}), '()\n', (9510, 9512), False, 'from estruturadedados.avltree import AVL\n'), ((2920, 2942), 'json.load', 'json.load', (['nomeArquivo'], {}), '(nomeArquivo)\n', (2929, 2942), False, 'import json\n'), ((3081, 3146), 'json.dump', 'json.dump', (['listaPessoa', 'nomeArquivo'], {'indent': '(4)', 'ensure_ascii': '(False)'}), '(listaPessoa, nomeArquivo, indent=4, ensure_ascii=False)\n', (3090, 3146), False, 'import json\n'), ((3493, 3515), 'json.load', 'json.load', (['nomeArquivo'], {}), '(nomeArquivo)\n', (3502, 3515), False, 'import json\n'), ((3711, 3777), 'json.dump', 'json.dump', (['listaPessoas', 'nomeArquivo'], {'indent': '(4)', 'ensure_ascii': '(False)'}), '(listaPessoas, nomeArquivo, indent=4, ensure_ascii=False)\n', (3720, 3777), False, 'import json\n'), ((4011, 4033), 'json.load', 'json.load', (['nomeArquivo'], {}), '(nomeArquivo)\n', (4020, 4033), False, 'import json\n'), ((4153, 4219), 'json.dump', 'json.dump', (['listaPessoas', 'nomeArquivo'], {'indent': '(4)', 'ensure_ascii': '(False)'}), '(listaPessoas, nomeArquivo, indent=4, ensure_ascii=False)\n', (4162, 4219), False, 'import json\n'), ((7329, 7362), 'biometria.biometria.Biometria.leArquivo', 'Bio.leArquivo', (['nome'], {'path': 'caminho'}), '(nome, path=caminho)\n', (7342, 7362), True, 'from biometria.biometria import Biometria as Bio\n'), ((10503, 10525), 'json.load', 'json.load', (['nomeArquivo'], {}), '(nomeArquivo)\n', (10512, 10525), False, 'import json\n'), ((10678, 10744), 'json.dump', 'json.dump', (['listaVacinas', 'nomeArquivo'], {'indent': '(4)', 'ensure_ascii': '(False)'}), '(listaVacinas, nomeArquivo, indent=4, ensure_ascii=False)\n', (10687, 10744), False, 'import json\n'), ((11085, 11107), 'json.load', 'json.load', (['nomeArquivo'], {}), '(nomeArquivo)\n', (11094, 11107), False, 'import json\n'), ((11266, 11332), 'json.dump', 'json.dump', (['listaVacinas', 'nomeArquivo'], {'indent': '(4)', 'ensure_ascii': '(False)'}), '(listaVacinas, nomeArquivo, indent=4, ensure_ascii=False)\n', (11275, 11332), False, 'import json\n'), ((2248, 2270), 'json.load', 'json.load', (['nomeArquivo'], {}), '(nomeArquivo)\n', (2257, 2270), False, 'import json\n'), ((7759, 7776), 'os.listdir', 'listdir', ([], {'path': 'BIO'}), '(path=BIO)\n', (7766, 7776), False, 'from os import listdir, remove\n'), ((9717, 9739), 'json.load', 'json.load', (['nomeArquivo'], {}), '(nomeArquivo)\n', (9726, 9739), False, 'import json\n'), ((2615, 2663), 'json.dump', 'json.dump', (['data', 'f'], {'indent': '(4)', 'ensure_ascii': '(False)'}), '(data, f, indent=4, ensure_ascii=False)\n', (2624, 2663), False, 'import json\n'), ((10195, 10253), 'json.dump', 'json.dump', (['data', 'nomeArquivo'], {'indent': '(4)', 'ensure_ascii': '(False)'}), '(data, nomeArquivo, indent=4, ensure_ascii=False)\n', (10204, 10253), False, 'import json\n')]
|
import numpy as np
import pandas as pd
from sklearn import datasets
from sklearn.model_selection import train_test_split
test_size = 0.25
def sampling(**kwargs):
if kwargs['dataset'] == 'moons':
X, y = datasets.make_moons(n_samples=kwargs['sample_size'],
noise=kwargs['noise'],
random_state=5)
return train_test_split(X,
y.astype(str),
test_size=kwargs['test_size'],
random_state=5), X, y
elif kwargs['dataset'] == 'circles':
X, y = datasets.make_circles(n_samples=kwargs['sample_size'],
noise=kwargs['noise'],
factor=0.5,
random_state=1)
return train_test_split(X,
y.astype(str),
test_size=kwargs['test_size'],
random_state=5), X, y
elif kwargs['dataset'] == 'LS':
X, y = datasets.make_classification(n_samples=kwargs['sample_size'],
n_features=2,
n_redundant=0,
n_informative=2,
random_state=2,
n_clusters_per_class=1)
rng = np.random.RandomState(2)
X += kwargs['noise'] * rng.uniform(size=X.shape)
return train_test_split(X,
y.astype(str),
test_size=kwargs['test_size'],
random_state=5), X, y
else:
return ValueError('error!')
def df_split(**kwargs):
_df = kwargs['df']
return train_test_split(
_df[['x', 'y']].to_numpy(),
_df['c'].to_numpy().astype(str),
test_size=kwargs['test_size'],
random_state=5), _df[['x', 'y']].to_numpy(), _df['c'].to_numpy()
def data_split(**kwargs):
return train_test_split(kwargs['X'],
kwargs['y'].astype(str),
test_size=kwargs['test_size'],
random_state=5), kwargs['X'], kwargs['y']
|
[
"sklearn.datasets.make_circles",
"sklearn.datasets.make_classification",
"numpy.random.RandomState",
"sklearn.datasets.make_moons"
] |
[((217, 312), 'sklearn.datasets.make_moons', 'datasets.make_moons', ([], {'n_samples': "kwargs['sample_size']", 'noise': "kwargs['noise']", 'random_state': '(5)'}), "(n_samples=kwargs['sample_size'], noise=kwargs['noise'],\n random_state=5)\n", (236, 312), False, 'from sklearn import datasets\n'), ((636, 746), 'sklearn.datasets.make_circles', 'datasets.make_circles', ([], {'n_samples': "kwargs['sample_size']", 'noise': "kwargs['noise']", 'factor': '(0.5)', 'random_state': '(1)'}), "(n_samples=kwargs['sample_size'], noise=kwargs['noise'\n ], factor=0.5, random_state=1)\n", (657, 746), False, 'from sklearn import datasets\n'), ((1104, 1255), 'sklearn.datasets.make_classification', 'datasets.make_classification', ([], {'n_samples': "kwargs['sample_size']", 'n_features': '(2)', 'n_redundant': '(0)', 'n_informative': '(2)', 'random_state': '(2)', 'n_clusters_per_class': '(1)'}), "(n_samples=kwargs['sample_size'], n_features=2,\n n_redundant=0, n_informative=2, random_state=2, n_clusters_per_class=1)\n", (1132, 1255), False, 'from sklearn import datasets\n'), ((1487, 1511), 'numpy.random.RandomState', 'np.random.RandomState', (['(2)'], {}), '(2)\n', (1508, 1511), True, 'import numpy as np\n')]
|
# Generated by Django 3.0.2 on 2020-02-14 09:12
import django.contrib.postgres.fields.jsonb
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("cases", "0019_auto_20200120_0604"),
("algorithms", "0019_auto_20200210_0523"),
]
operations = [
migrations.RenameField(
model_name="algorithm",
old_name="visible_to_public",
new_name="public",
),
migrations.AddField(
model_name="result",
name="comment",
field=models.TextField(blank=True, default=""),
),
migrations.AddField(
model_name="result",
name="public",
field=models.BooleanField(
default=False,
help_text="If True, allow anyone to view this result along with the input image. Otherwise, only the job creator and algorithm editor will have permission to view this result.",
),
),
migrations.AlterField(
model_name="result",
name="images",
field=models.ManyToManyField(
editable=False,
related_name="algorithm_results",
to="cases.Image",
),
),
migrations.AlterField(
model_name="result",
name="job",
field=models.OneToOneField(
editable=False,
on_delete=django.db.models.deletion.CASCADE,
to="algorithms.Job",
),
),
migrations.AlterField(
model_name="result",
name="output",
field=django.contrib.postgres.fields.jsonb.JSONField(
default=dict, editable=False
),
),
]
|
[
"django.db.models.TextField",
"django.db.models.OneToOneField",
"django.db.models.ManyToManyField",
"django.db.migrations.RenameField",
"django.db.models.BooleanField"
] |
[((361, 460), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""algorithm"""', 'old_name': '"""visible_to_public"""', 'new_name': '"""public"""'}), "(model_name='algorithm', old_name='visible_to_public',\n new_name='public')\n", (383, 460), False, 'from django.db import migrations, models\n'), ((613, 653), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'default': '""""""'}), "(blank=True, default='')\n", (629, 653), False, 'from django.db import migrations, models\n'), ((773, 995), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': '"""If True, allow anyone to view this result along with the input image. Otherwise, only the job creator and algorithm editor will have permission to view this result."""'}), "(default=False, help_text=\n 'If True, allow anyone to view this result along with the input image. Otherwise, only the job creator and algorithm editor will have permission to view this result.'\n )\n", (792, 995), False, 'from django.db import migrations, models\n'), ((1154, 1249), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'editable': '(False)', 'related_name': '"""algorithm_results"""', 'to': '"""cases.Image"""'}), "(editable=False, related_name='algorithm_results', to\n ='cases.Image')\n", (1176, 1249), False, 'from django.db import migrations, models\n'), ((1426, 1533), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'editable': '(False)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""algorithms.Job"""'}), "(editable=False, on_delete=django.db.models.deletion.\n CASCADE, to='algorithms.Job')\n", (1446, 1533), False, 'from django.db import migrations, models\n')]
|
import numpy as np
def get_predecessor(T,P):
# copy the inputs
T = np.copy(T)
P = np.copy(P)
P_size = P.shape[0]
T_size = T.shape[0]
adj = np.zeros((P_size + T_size,P_size + T_size))
# predecessor for Text
for i in range(1,T_size):
adj[i, i-1] = 1
# predecessor for Pattern
for i in range(1,P_size):
adj[T_size+i, T_size+i-1] = 1
return adj
def get_graph_struct(T, P, h_i, h_j, h_s):
# copy the inputs
T = np.copy(T)
P = np.copy(P)
P_size = P.shape[0]
T_size = T.shape[0]
adj = np.zeros((P_size + T_size,P_size + T_size))
for i in range(h_s+1, h_i):
adj[i, h_i] = 1
adj[T_size, T_size + h_j] = 1
for i in range(T_size):
adj[i, T_size+h_j] = 1
for i in range(P_size):
adj[i+T_size, h_i] = 1
return adj
def get_seq_mat(T,P):
n = T.shape[0]
m = P.shape[0]
mat = np.eye((n+m))
# connect each character to its previous
for i in range(1,n+m):
if i == n:
# don't do it for the start of the pattern
continue
mat[i, i-1] = 1
# connect each character in text to its equal charcter in the pattern
for i in range(n):
for j in range(m):
if T[i] == P[j]:
mat[i, j+n] = 1
mat[j+n, i] = 1
# connect the start of the pattern with all character upfront
mat[n, n+1:] = 1
return mat
def get_t(T, P, s):
i = s
j = 0
N = T.shape[0]
M = P.shape[0]
while i < N:
if T[i] != P[j]:
return i
j +=1
i +=1
if j >= M:
return i
return N - 1
def get_bipartite_mat(T, P, s, num_classes=3):
'''
args
-----------------------------
T: the text
P: the pattern
s: current hint s
returns
-----------------------------
mat: constructed mat as the following:
1- all irrelevant edges will have a value of 0
2- relevant edges will have a value of 1 if they are equal,
otherwise they will have a value of 2
'''
# length of the text
N = T.shape[0]
# length of the pattern
M = P.shape[0]
mat = np.zeros((N+M, N+M), dtype=np.int)
t = get_t(T, P, s)
for i in range(M):
p_char = P[i]
for j in range(s,t):
t_char = T[j]
if t_char == p_char:
mat[j, i+N] = 1
mat[i+N, j] = 1
else:
mat[j, i+N] = 2
mat[i+N, j] = 2
one_hot_mat = np.zeros((N+M, N+M, num_classes), dtype=np.int)
for i in range(len(mat)):
for j in range(len(mat[0])):
class_id = mat[i, j]
one_hot_mat[i, j, class_id] = 1
return one_hot_mat
#=== *** ===#
def get_everything_matched_to_this_point(T, P, s):
'''
return a binary mask for the pattern
'''
result = np.zeros(T.shape[0] + P.shape[0],dtype=np.int)
i = s
j = 0
while j < P.shape[0]:
if T[i] == P[j]:
result[T.shape[0]+j] = 1
i+=1
j+=1
else:
break
return result
def get_bipartite_mat_from_pattern_to_text(T, P, s):
# length of the text
N = T.shape[0]
# length of the pattern
M = P.shape[0]
mat = np.zeros((N+M, N+M), dtype=np.int)
for i in range(M):
p_char = P[i]
for j in range(s,N):
t_char = T[j]
if t_char == p_char:
mat[j, i+N] = 1
mat[i+N, j] = 1
else:
mat[j, i+N] = 2
mat[i+N, j] = 2
def get_seq_mat_i_j(T, P , i ,j, s):
n = T.shape[0]
m = P.shape[0]
mat = np.zeros((n+m, n+m))
# connect each character to its previous
# for i in range(1,n+m):
# if i == n:
# # don't do it for the start of the pattern
# continue
# mat[i, i-1] = 1
# connect node i with node j
mat[i, j+n] = 1
mat[j+n, i] = 1
# connect node s with i
mat[s, i] = 1
mat[i,s] = 1
# connect first node in P with node
mat[n,n+j] = 1
return mat
def get_edge_mat(T, P, start, end):
'''
edge between start and end
'''
mat = np.zeros((n+m,n+m))
mat[start, end] = 1
return mat
|
[
"numpy.eye",
"numpy.zeros",
"numpy.copy"
] |
[((72, 82), 'numpy.copy', 'np.copy', (['T'], {}), '(T)\n', (79, 82), True, 'import numpy as np\n'), ((89, 99), 'numpy.copy', 'np.copy', (['P'], {}), '(P)\n', (96, 99), True, 'import numpy as np\n'), ((154, 198), 'numpy.zeros', 'np.zeros', (['(P_size + T_size, P_size + T_size)'], {}), '((P_size + T_size, P_size + T_size))\n', (162, 198), True, 'import numpy as np\n'), ((451, 461), 'numpy.copy', 'np.copy', (['T'], {}), '(T)\n', (458, 461), True, 'import numpy as np\n'), ((468, 478), 'numpy.copy', 'np.copy', (['P'], {}), '(P)\n', (475, 478), True, 'import numpy as np\n'), ((533, 577), 'numpy.zeros', 'np.zeros', (['(P_size + T_size, P_size + T_size)'], {}), '((P_size + T_size, P_size + T_size))\n', (541, 577), True, 'import numpy as np\n'), ((864, 877), 'numpy.eye', 'np.eye', (['(n + m)'], {}), '(n + m)\n', (870, 877), True, 'import numpy as np\n'), ((2028, 2066), 'numpy.zeros', 'np.zeros', (['(N + M, N + M)'], {'dtype': 'np.int'}), '((N + M, N + M), dtype=np.int)\n', (2036, 2066), True, 'import numpy as np\n'), ((2325, 2376), 'numpy.zeros', 'np.zeros', (['(N + M, N + M, num_classes)'], {'dtype': 'np.int'}), '((N + M, N + M, num_classes), dtype=np.int)\n', (2333, 2376), True, 'import numpy as np\n'), ((2653, 2700), 'numpy.zeros', 'np.zeros', (['(T.shape[0] + P.shape[0])'], {'dtype': 'np.int'}), '(T.shape[0] + P.shape[0], dtype=np.int)\n', (2661, 2700), True, 'import numpy as np\n'), ((2998, 3036), 'numpy.zeros', 'np.zeros', (['(N + M, N + M)'], {'dtype': 'np.int'}), '((N + M, N + M), dtype=np.int)\n', (3006, 3036), True, 'import numpy as np\n'), ((3340, 3364), 'numpy.zeros', 'np.zeros', (['(n + m, n + m)'], {}), '((n + m, n + m))\n', (3348, 3364), True, 'import numpy as np\n'), ((3823, 3847), 'numpy.zeros', 'np.zeros', (['(n + m, n + m)'], {}), '((n + m, n + m))\n', (3831, 3847), True, 'import numpy as np\n')]
|
import os
import sys
proj_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0,proj_dir)
import random
from itertools import repeat
import utils.blockworld as blockworld
from model.utils.Search_Tree import *
class BFS_Agent:
"""An agent performing exhaustive BFS search. This can take a long time to finish."""
def __init__(self, world=None,shuffle=False,random_seed=None):
self.world = world
self.shuffle = shuffle
self.random_seed = random_seed
def __str__(self):
"""Yields a string representation of the agent"""
return self.__class__.__name__+' shuffle:'+str(self.shuffle)+' random seed: '+str(self.random_seed)
def set_world(self,world):
self.world = world
def get_parameters(self):
"""Returns dictionary of agent parameters."""
return {
'agent_type':self.__class__.__name__,
'random_seed':self.random_seed
}
def search(self,current_nodes):
"""Performs one expansion of the nodes in current nodes. Returns either list of expanded nodes, found solution node or empty list. To introduce randomness, the current nodes can be shuffled."""
cost = 0 #track number of states that are evaluated
if self.shuffle:
random.seed(self.random_seed) #fix random seed
random.shuffle(current_nodes)
next_nodes = [] #holds the nodes we get from the current expansion step
for node in current_nodes: #expand current nodes
possible_actions = node.state.possible_actions()
children = []
for action in possible_actions:
child = Node(node.state.transition(action),node.actions+[action]) #generate new node
#check if child node is winning
cost += 1
if child.state.is_win():
#we've found a winning state
return "Winning", child, cost
next_nodes.append(child)
return "Ongoing",next_nodes, cost
def act(self, steps = None, verbose = False):
"""Makes the agent act, including changing the world state."""
#Ensure that we have a random seed if none is set
states_evaluated = 0
if self.random_seed is None: self.random_seed = random.randint(0,99999)
#check if we even can act
if self.world.status()[0] != 'Ongoing':
print("Can't act with world in status",self.world.status())
return [],{'states_evaluated':states_evaluated}
# if steps is not None:
# print("Limited number of steps selected. This is not lookahead, are you sure?")
#perform BFS search
current_nodes = [Node(self.world.current_state,[])] #initialize root node
result = "Ongoing"
while current_nodes != [] and result == "Ongoing":
#keep expanding until solution is found or there are no further states to expand
result, out, cost = self.search(current_nodes) #run expansion step
states_evaluated += cost
if result != "Winning":
current_nodes = out #we have no solution, just the next states to expand
if verbose: print("Found",len(current_nodes),"to evaluate at cost",cost)
#if we've found a solution
if result == "Winning":
actions = out.actions
actions = actions[0:steps] #extract the steps to take. None gives complete list
if verbose: print("Found solution with ",len(actions),"actions")
#apply steps to world
for action in actions: self.world.apply_action(action)
if verbose: print("Done, reached world status: ",self.world.status())
#only returns however many steps we actually acted, not the entire sequence
else:
actions = []
if verbose: print("Found no solution")
return actions,{'states_evaluated':states_evaluated}
|
[
"random.randint",
"random.shuffle",
"os.path.realpath",
"sys.path.insert",
"random.seed"
] |
[((94, 122), 'sys.path.insert', 'sys.path.insert', (['(0)', 'proj_dir'], {}), '(0, proj_dir)\n', (109, 122), False, 'import sys\n'), ((65, 91), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (81, 91), False, 'import os\n'), ((1310, 1339), 'random.seed', 'random.seed', (['self.random_seed'], {}), '(self.random_seed)\n', (1321, 1339), False, 'import random\n'), ((1369, 1398), 'random.shuffle', 'random.shuffle', (['current_nodes'], {}), '(current_nodes)\n', (1383, 1398), False, 'import random\n'), ((2330, 2354), 'random.randint', 'random.randint', (['(0)', '(99999)'], {}), '(0, 99999)\n', (2344, 2354), False, 'import random\n')]
|
from telegram.ext import Updater, CommandHandler, ConversationHandler, MessageHandler, Filters, CallbackQueryHandler
from env import TOKEN
from commands import show_challs, choose_chall_to_show, SHOWS_CHOSEN_CHALL
from commands import try_answer, choose_chall_to_answer, check_answer, CHOOSE_CHALL_TO_ANSWER
def start(update, context):
welcome_txt = ['Hello, welcome to RoyalFlushBot!']
welcome_txt.append(
'The bot of "Royal Flush: A Puzzle Story", a puzzle hunt game about \
playing cards, poker hands, kings, queens and brain challenges. \
[Early Access Version]'
)
update.message.reply_text('\n'.join(welcome_txt))
def main():
updater = Updater(token=TOKEN, use_context=True)
dp = updater.dispatcher
dp.add_handler(CommandHandler('start', start))
dp.add_handler(ConversationHandler(
entry_points=[CommandHandler('show', show_challs)],
states={
SHOWS_CHOSEN_CHALL: [CallbackQueryHandler(choose_chall_to_show)],
},
fallbacks=[]
))
dp.add_handler(ConversationHandler(
entry_points=[CommandHandler('try', try_answer)],
states={
CHOOSE_CHALL_TO_ANSWER: [
CallbackQueryHandler(choose_chall_to_answer),
MessageHandler(Filters.text, check_answer)
]
},
fallbacks=[]
))
updater.start_polling()
updater.idle()
if __name__ == '__main__':
print('=== BOT ATIVADO ===')
print('Digite Ctrl + C para desativar.')
main()
print('=== BOT DESATIVADO ===')
|
[
"telegram.ext.Updater",
"telegram.ext.CommandHandler",
"telegram.ext.MessageHandler",
"telegram.ext.CallbackQueryHandler"
] |
[((706, 744), 'telegram.ext.Updater', 'Updater', ([], {'token': 'TOKEN', 'use_context': '(True)'}), '(token=TOKEN, use_context=True)\n', (713, 744), False, 'from telegram.ext import Updater, CommandHandler, ConversationHandler, MessageHandler, Filters, CallbackQueryHandler\n'), ((796, 826), 'telegram.ext.CommandHandler', 'CommandHandler', (['"""start"""', 'start'], {}), "('start', start)\n", (810, 826), False, 'from telegram.ext import Updater, CommandHandler, ConversationHandler, MessageHandler, Filters, CallbackQueryHandler\n'), ((892, 927), 'telegram.ext.CommandHandler', 'CommandHandler', (['"""show"""', 'show_challs'], {}), "('show', show_challs)\n", (906, 927), False, 'from telegram.ext import Updater, CommandHandler, ConversationHandler, MessageHandler, Filters, CallbackQueryHandler\n'), ((1133, 1166), 'telegram.ext.CommandHandler', 'CommandHandler', (['"""try"""', 'try_answer'], {}), "('try', try_answer)\n", (1147, 1166), False, 'from telegram.ext import Updater, CommandHandler, ConversationHandler, MessageHandler, Filters, CallbackQueryHandler\n'), ((982, 1024), 'telegram.ext.CallbackQueryHandler', 'CallbackQueryHandler', (['choose_chall_to_show'], {}), '(choose_chall_to_show)\n', (1002, 1024), False, 'from telegram.ext import Updater, CommandHandler, ConversationHandler, MessageHandler, Filters, CallbackQueryHandler\n'), ((1243, 1287), 'telegram.ext.CallbackQueryHandler', 'CallbackQueryHandler', (['choose_chall_to_answer'], {}), '(choose_chall_to_answer)\n', (1263, 1287), False, 'from telegram.ext import Updater, CommandHandler, ConversationHandler, MessageHandler, Filters, CallbackQueryHandler\n'), ((1306, 1348), 'telegram.ext.MessageHandler', 'MessageHandler', (['Filters.text', 'check_answer'], {}), '(Filters.text, check_answer)\n', (1320, 1348), False, 'from telegram.ext import Updater, CommandHandler, ConversationHandler, MessageHandler, Filters, CallbackQueryHandler\n')]
|
import random
from copy import deepcopy
def print_board(board, max_width):
for row in range(len(board)):
for col in range(len(board)):
print("{:>{}}".format(board[row][col], max_width), end='')
print()
def win_check(board, player, n, row, col):
horizontal, vertical, diagonal_down, diagonal_up = True, True, True, True
# Check for horizontal win
for i in range(n):
if board[row][i] != player:
horizontal = False
# Check for vertical win
for i in range(n):
if board[i][col] != player:
vertical = False
# check for downwards diagonal (i.e. top left to bottom right)
for i in range(n):
if board[i][i] != player:
diagonal_down = False
# Check for upwards diagonal (i.e. bottom left to top right)
for i in range(n):
if board[i][n - 1 - i] != player:
diagonal_up = False
return horizontal or vertical or diagonal_down or diagonal_up
def vs_bot(board, n, possible_moves, difficulty):
max_width = len(str(n ** 2)) + 1
while True:
print_board(board, max_width)
num = int(input("Player - Input location: "))
if num < 0 or num >= (n ** 2):
print("Please choose a valid location!")
continue
row = num // n
col = num % n
if board[row][col] == 'O' or board[row][col] == 'X':
print("Cannot replace a player's piece!")
continue
board[row][col] = 'O'
possible_moves.remove(num)
if win_check(board, 'O', n, row, col):
print_board(board, max_width)
print("You win!")
break
if not possible_moves:
print_board(board, max_width)
print("Draw! Board is full.")
break
# Bot move begins here
print("Bot is thinking...")
bot_num = -1
check = random.randint(0, 100)
# Medium difficulty - 50% chance of bot being easy, 50% chance being abyssal
if difficulty == 2:
if check <= 50:
difficulty = 0
else:
difficulty = 4
# Hard difficulty - 20% chance of bot being easy, 80% chance being abyssal
elif difficulty == 3:
if check <= 20:
difficulty = 0
else:
difficulty = 4
print(possible_moves)
# Easy difficulty - Bot selects a random move
if difficulty == 1:
bot_num = random.choice(possible_moves)
# Abyssal difficulty - Bot utilizes minimax to find optimal move
elif difficulty == 4:
temp, bot_num = minimax(board, n, possible_moves, True)
if bot_num == -1:
print("Bot has forfeited! You won!")
break
row = bot_num // n
col = bot_num % n
board[row][col] = 'X'
possible_moves.remove(bot_num)
if win_check(board, 'X', n, row, col):
print_board(board, max_width)
print("You lost!")
break
if not possible_moves:
print_board(board, max_width)
print("Draw! Board is full.")
break
# Returns winning player (O or X), or D if draw
def find_winner(board, n):
for i in range(n):
horizontal = True
for j in range(0, n - 1):
if board[i][j] == '.':
break
if board[i][j] != board[i][j + 1]:
horizontal = False
if horizontal:
return board[i][0]
for i in range(n):
vertical = True
for j in range(0, n - 1):
if board[j][i] == '.':
break
if board[j][i] != board[j + 1][i]:
vertical = False
if vertical:
return board[0][i]
diagonal_down = True
for i in range(0, n - 1):
if board[i][i] == '.':
break
if board[i][i] != board[i + 1][i + 1]:
diagonal_down = False
if diagonal_down:
return board[0][0]
diagonal_up = True
for i in range(0, n - 1):
if board[i][n - 1 - i] == '.':
break
if board[i][n - 1 - i] != board[i + 1][n - 2 - i]:
diagonal_up = False
if diagonal_up:
return board[0][n - 1]
return 'D'
def minimax(board, n, possible_moves, maximizing_player):
best_move = -1
if not possible_moves:
winner = find_winner(board, n)
if winner == 'O':
return -1, best_move
elif winner == 'X':
return 1, best_move
else:
return 0, best_move
if maximizing_player:
value = -10
for move in possible_moves:
new_board = deepcopy(board)
new_possible = deepcopy(possible_moves)
row = move // n
col = move % n
new_board[row][col] = 'X'
new_possible.remove(move)
new_value, new_move = minimax(new_board, n, new_possible, False)
if new_value > value:
value = new_value
best_move = move
return value, best_move
else:
value = 10
for move in possible_moves:
new_board = deepcopy(board)
new_possible = deepcopy(possible_moves)
row = move // n
col = move % n
new_board[row][col] = 'O'
new_possible.remove(move)
new_value, new_move = minimax(new_board, n, new_possible, True)
if new_value < value:
value = new_value
best_move = move
return value, best_move
def vs_player(board, n, possible_moves):
max_width = len(str(n ** 2)) + 1
player = 'O'
while True:
print_board(board, max_width)
num = int(input("Player " + player + " - Input location: "))
if num < 0 or num >= (n ** 2):
print("Please choose a valid location!")
continue
row = num // n
col = num % n
if board[row][col] == 'O' or board[row][col] == 'X':
print("Cannot replace a player's piece!")
continue
board[row][col] = player
possible_moves.remove(num)
if not possible_moves:
print_board(board, max_width)
print("Draw! Board is full.")
break
if win_check(board, player, n, row, col):
print_board(board, max_width)
print("Player " + player + " wins!")
break
if player == 'O':
player = 'X'
else:
player = 'O'
def main():
while True:
n = int(input("Input size of tic-tac-toe board: "))
if n > 1:
break
else:
print("Board cannot be smaller than size 2!")
board = []
possible_moves = []
for i in range(n):
new_row = []
for j in range(n):
new_row.append(i * n + j)
possible_moves.append(i * n + j)
board.append(new_row)
print("Select game mode:")
while True:
print("1 - Easy bot")
print("2 - Medium bot")
print("3 - Hard bot")
print("4 - Abyssal bot (You're not expected to win!)")
print("5 - Multiplayer")
play_type = int(input("Your choice: "))
if play_type == 1:
vs_bot(board, n, possible_moves, 1)
break
elif play_type == 2:
vs_bot(board, n, possible_moves, 2)
break
elif play_type == 3:
vs_bot(board, n, possible_moves, 3)
break
elif play_type == 4:
vs_bot(board, n, possible_moves, 4)
break
elif play_type == 5:
vs_player(board, n, possible_moves)
break
else:
print("Invalid option!")
print("Game over! Press return to close...")
input()
main()
|
[
"copy.deepcopy",
"random.choice",
"random.randint"
] |
[((1922, 1944), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (1936, 1944), False, 'import random\n'), ((2523, 2552), 'random.choice', 'random.choice', (['possible_moves'], {}), '(possible_moves)\n', (2536, 2552), False, 'import random\n'), ((4770, 4785), 'copy.deepcopy', 'deepcopy', (['board'], {}), '(board)\n', (4778, 4785), False, 'from copy import deepcopy\n'), ((4813, 4837), 'copy.deepcopy', 'deepcopy', (['possible_moves'], {}), '(possible_moves)\n', (4821, 4837), False, 'from copy import deepcopy\n'), ((5270, 5285), 'copy.deepcopy', 'deepcopy', (['board'], {}), '(board)\n', (5278, 5285), False, 'from copy import deepcopy\n'), ((5313, 5337), 'copy.deepcopy', 'deepcopy', (['possible_moves'], {}), '(possible_moves)\n', (5321, 5337), False, 'from copy import deepcopy\n')]
|
print('start identity_percent')
import os
import pandas as pd
import numpy as np
from sklearn.cluster import KMeans
import subprocess
from selseq_main import *#all?
from selseq_constant import *
def clustering_kmeans_aln(aln_file,itself=True):
'''input file the aligned sequence
output clustering by kmeans files
'''
aln_file = calculate_identity_percent(aln_file,itself=True)
if any((aln_file.identity_matrix<60).any()):
kmeans = KMeans(n_clusters=2)
kmeans.fit(aln_file.identity_matrix)
y_kmeans = kmeans.predict(aln_file.identity_matrix)
for kmeans_index in range(len(y_kmeans)):
name_aln_file_0 = aln_file.file_dir[0:-4] + '_0'
name_aln_file_1 = aln_file.file_dir[0:-4] + '_1'
if y_kmeans[kmeans_index] == 0:
with open(name_aln_file_0,'a') as aln_clustered:
aln_clustered.write(aln_file.name_lst[kmeans_index] + aln_file.seq_lst[kmeans_index].replace('-','').replace('\n','') + '\n')
if y_kmeans[kmeans_index] == 1:
with open(name_aln_file_1,'a') as aln_clustered:
aln_clustered.write(aln_file.name_lst[kmeans_index] + aln_file.seq_lst[kmeans_index].replace('-','').replace('\n','') + '\n')
subprocess.call('muscle ' + '-in ' +name_aln_file_0 + ' -out ' + name_aln_file_0 + '.aln 2>' + HOME_DIRECTORY + '111',shell = True)
subprocess.call('muscle ' + '-in ' +name_aln_file_1 + ' -out ' + name_aln_file_1 + '.aln 2>' + HOME_DIRECTORY + '111',shell = True)
clustering_kmeans_aln(name_aln_file_0 + '.aln',itself=True)
clustering_kmeans_aln(name_aln_file_1 + '.aln',itself=True)
os.remove(name_aln_file_0)
os.remove(name_aln_file_1)
os.remove(aln_file.file_dir)
else:
return aln_file
def calculate_identity_percent(aln_file,itself=True):
'''input file the aligned sequence
output SequenceFasta with identity_percent and identity_matrix
itself - parametr for calculate identity percent the alone sequence
'''
aln_file = SequenceFasta(aln_file)
aln_file.seq_process(strip=False)
data_persent = pd.Series()
identity_matrix = pd.DataFrame()
if itself and len(aln_file.seq_lst) == 1:
data_persent[find_tag('seq_id',aln_file.name_lst[0])+'and'+find_tag('seq_id',aln_file.name_lst[0])] = 110
aln_file.data_persent = data_persent
identity_matrix = pd.DataFrame([])
aln_file.identity_matrix = identity_matrix
return aln_file
else:
name_lst_seq_id = []
for name_seq in aln_file.name_lst:
name_lst_seq_id.append(find_tag('seq_id',name_seq))
array_100 = np.zeros((len(aln_file.name_lst), len(aln_file.name_lst))) +100
identity_matrix = pd.DataFrame(array_100,columns=name_lst_seq_id,index=name_lst_seq_id)
n=0
identical = 0
for seq_id_1 in range(len(aln_file.seq_lst)):
n += 1
for seq_id_2 in range(n,len(aln_file.seq_lst)):
for character1, character2 in zip(aln_file.seq_lst[seq_id_1],aln_file.seq_lst[seq_id_2]):
if character1 == character2:
identical +=1
seq_1 = find_tag('seq_id',aln_file.name_lst[seq_id_1])
seq_2 = find_tag('seq_id',aln_file.name_lst[seq_id_2])
persent_identical = identical / len(aln_file.seq_lst[seq_id_1]) * 100
data_persent[seq_1+'and'+seq_2] = persent_identical
identity_matrix[seq_1][seq_2] = persent_identical
identity_matrix[seq_2][seq_1] = persent_identical
identical = 0
aln_file.data_persent = data_persent
aln_file.identity_matrix = identity_matrix
return aln_file
def clustering_aln(directory):
directory_files = os.listdir(directory)
for file in directory_files:
if file.endswith('.aln'):
clustering_kmeans_aln(ALNDATA_DIRECTORY + file,itself=True)
def enumeration_identity_percent(directory):
'''Just for plot'''
data_persent_for_plot = pd.Series()
directory_files = os.listdir(directory)
for file in directory_files:
if file.endswith('.aln'):
aln_file = calculate_identity_percent(ALNDATA_DIRECTORY + file,itself=True)
data_persent_for_plot = data_persent_for_plot.append(aln_file.data_persent)
return data_persent_for_plot
print('end indentity_persent')
|
[
"pandas.DataFrame",
"os.remove",
"sklearn.cluster.KMeans",
"subprocess.call",
"pandas.Series",
"os.listdir"
] |
[((2316, 2327), 'pandas.Series', 'pd.Series', ([], {}), '()\n', (2325, 2327), True, 'import pandas as pd\n'), ((2350, 2364), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (2362, 2364), True, 'import pandas as pd\n'), ((4055, 4076), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (4065, 4076), False, 'import os\n'), ((4330, 4341), 'pandas.Series', 'pd.Series', ([], {}), '()\n', (4339, 4341), True, 'import pandas as pd\n'), ((4368, 4389), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (4378, 4389), False, 'import os\n'), ((487, 507), 'sklearn.cluster.KMeans', 'KMeans', ([], {'n_clusters': '(2)'}), '(n_clusters=2)\n', (493, 507), False, 'from sklearn.cluster import KMeans\n'), ((1408, 1543), 'subprocess.call', 'subprocess.call', (["('muscle ' + '-in ' + name_aln_file_0 + ' -out ' + name_aln_file_0 +\n '.aln 2>' + HOME_DIRECTORY + '111')"], {'shell': '(True)'}), "('muscle ' + '-in ' + name_aln_file_0 + ' -out ' +\n name_aln_file_0 + '.aln 2>' + HOME_DIRECTORY + '111', shell=True)\n", (1423, 1543), False, 'import subprocess\n'), ((1548, 1683), 'subprocess.call', 'subprocess.call', (["('muscle ' + '-in ' + name_aln_file_1 + ' -out ' + name_aln_file_1 +\n '.aln 2>' + HOME_DIRECTORY + '111')"], {'shell': '(True)'}), "('muscle ' + '-in ' + name_aln_file_1 + ' -out ' +\n name_aln_file_1 + '.aln 2>' + HOME_DIRECTORY + '111', shell=True)\n", (1563, 1683), False, 'import subprocess\n'), ((1826, 1852), 'os.remove', 'os.remove', (['name_aln_file_0'], {}), '(name_aln_file_0)\n', (1835, 1852), False, 'import os\n'), ((1861, 1887), 'os.remove', 'os.remove', (['name_aln_file_1'], {}), '(name_aln_file_1)\n', (1870, 1887), False, 'import os\n'), ((1896, 1924), 'os.remove', 'os.remove', (['aln_file.file_dir'], {}), '(aln_file.file_dir)\n', (1905, 1924), False, 'import os\n'), ((2609, 2625), 'pandas.DataFrame', 'pd.DataFrame', (['[]'], {}), '([])\n', (2621, 2625), True, 'import pandas as pd\n'), ((2958, 3029), 'pandas.DataFrame', 'pd.DataFrame', (['array_100'], {'columns': 'name_lst_seq_id', 'index': 'name_lst_seq_id'}), '(array_100, columns=name_lst_seq_id, index=name_lst_seq_id)\n', (2970, 3029), True, 'import pandas as pd\n')]
|
from os import name
import pathlib
from discord.ext import commands
import discord
from dislash import InteractionClient, ActionRow, Button, ButtonStyle, SelectMenu, SelectOption
from colored import fore, back, style
from PIL import Image, ImageFont, ImageDraw, ImageEnhance
from zeee_bot.common import glob
class Test(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name="test")
async def ___test(self, ctx):
row = ActionRow(
Button(
style=ButtonStyle.green,
label="sexy bread",
custom_id="bread_btn"
)
)
msg = await ctx.send("마 눌러바라 게이야", components=[row])
on_click = msg.create_click_listener(timeout=5)
@on_click.matching_id("bread_btn")
async def on_bread_button(inter):
await inter.reply("헤으응 부끄러웟", delete_after=2.5)
@on_click.timeout
async def on_timeout():
await msg.delete()
await ctx.send("응애 타임아웃!")
def drawProgressBar(self, d, x, y, w, h, progress, bg="black", fg="red"):
# draw background
d.ellipse((x+w, y, x+h+w, y+h), fill=bg, outline=None)
d.ellipse((x, y, x+h, y+h), fill=bg, outline=None)
d.rectangle((x+(h/2), y, x+w+(h/2), y+h), fill=bg, outline=None)
# draw progress bar
w *= progress
d.ellipse((x+w, y, x+h+w, y+h),fill=fg, outline=None)
d.ellipse((x, y, x+h, y+h),fill=fg, outline=None)
d.rectangle((x+(h/2), y, x+w+(h/2), y+h),fill=fg, outline=None)
return d
@commands.command(name='ㅅ')
async def testtest(self, ctx):
a = 'get base img.'
msg = await ctx.send(a)
base_img = Image.open(f"{pathlib.Path(__file__).parent.parent}/images/now_base.png").convert("RGBA")
draw = ImageDraw.Draw(base_img)
color = (96, 197, 241)
draw = self.drawProgressBar(draw, 15, 11, 572.5, 29, 0.5, bg=color, fg=color)
# ImageDraw.floodfill(base_img, xy=(14,24), value=color, thresh=40)
a += "\nwriting image."
await msg.edit(content=a)
base_img.save('test2.png')
a += "\nDone."
await msg.delete()
await ctx.send(file=discord.File("test2.png"))
@commands.command(name="test2")
async def __test2(self, ctx):
msg = await ctx.send(
"마 한번 골라바라 게이야",
components=[
SelectMenu(
custom_id = "bread_sexy",
placeholder="골라바라 게이야 낄낄",
max_values=2,
options=[
SelectOption("빵", "빵"),
SelectOption("빵빵", "빵빵"),
SelectOption("빵빵빵", "빵빵빵")
]
)
]
)
inter = await msg.wait_for_dropdown()
labels = [option.value for option in inter.select_menu.selected_options]
await msg.edit(content="골라부럇구만!", components=[])
await inter.reply(f"{''.join(labels)}")
def setup(bot: commands.Bot):
bot.add_cog(Test(bot))
|
[
"discord.ext.commands.command",
"discord.File",
"dislash.Button",
"pathlib.Path",
"dislash.SelectOption",
"PIL.ImageDraw.Draw"
] |
[((394, 423), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""test"""'}), "(name='test')\n", (410, 423), False, 'from discord.ext import commands\n'), ((1609, 1635), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""ㅅ"""'}), "(name='ㅅ')\n", (1625, 1635), False, 'from discord.ext import commands\n'), ((2297, 2327), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""test2"""'}), "(name='test2')\n", (2313, 2327), False, 'from discord.ext import commands\n'), ((1856, 1880), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['base_img'], {}), '(base_img)\n', (1870, 1880), False, 'from PIL import Image, ImageFont, ImageDraw, ImageEnhance\n'), ((495, 569), 'dislash.Button', 'Button', ([], {'style': 'ButtonStyle.green', 'label': '"""sexy bread"""', 'custom_id': '"""bread_btn"""'}), "(style=ButtonStyle.green, label='sexy bread', custom_id='bread_btn')\n", (501, 569), False, 'from dislash import InteractionClient, ActionRow, Button, ButtonStyle, SelectMenu, SelectOption\n'), ((2264, 2289), 'discord.File', 'discord.File', (['"""test2.png"""'], {}), "('test2.png')\n", (2276, 2289), False, 'import discord\n'), ((1765, 1787), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (1777, 1787), False, 'import pathlib\n'), ((2655, 2677), 'dislash.SelectOption', 'SelectOption', (['"""빵"""', '"""빵"""'], {}), "('빵', '빵')\n", (2667, 2677), False, 'from dislash import InteractionClient, ActionRow, Button, ButtonStyle, SelectMenu, SelectOption\n'), ((2703, 2727), 'dislash.SelectOption', 'SelectOption', (['"""빵빵"""', '"""빵빵"""'], {}), "('빵빵', '빵빵')\n", (2715, 2727), False, 'from dislash import InteractionClient, ActionRow, Button, ButtonStyle, SelectMenu, SelectOption\n'), ((2753, 2779), 'dislash.SelectOption', 'SelectOption', (['"""빵빵빵"""', '"""빵빵빵"""'], {}), "('빵빵빵', '빵빵빵')\n", (2765, 2779), False, 'from dislash import InteractionClient, ActionRow, Button, ButtonStyle, SelectMenu, SelectOption\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
try:
from setuptools import setup
except ImportError:
from os import system
system('pip install --user setuptools')
from setuptools import setup
setup(
name='automated',
version='1.3.2',
description='Automatizador de tarefas - LEDA',
license='MIT',
classifiers=[
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
url='https://github.com/gabrielfern/automated-leda-tasks',
author='<NAME>',
author_email='<EMAIL>',
packages=['automated'],
install_requires=['requests', 'python-crontab'],
)
|
[
"os.system",
"setuptools.setup"
] |
[((249, 644), 'setuptools.setup', 'setup', ([], {'name': '"""automated"""', 'version': '"""1.3.2"""', 'description': '"""Automatizador de tarefas - LEDA"""', 'license': '"""MIT"""', 'classifiers': "['Programming Language :: Python :: 2', 'Programming Language :: Python :: 3']", 'url': '"""https://github.com/gabrielfern/automated-leda-tasks"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'packages': "['automated']", 'install_requires': "['requests', 'python-crontab']"}), "(name='automated', version='1.3.2', description=\n 'Automatizador de tarefas - LEDA', license='MIT', classifiers=[\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 3'], url=\n 'https://github.com/gabrielfern/automated-leda-tasks', author='<NAME>',\n author_email='<EMAIL>', packages=['automated'], install_requires=[\n 'requests', 'python-crontab'])\n", (254, 644), False, 'from setuptools import setup\n'), ((174, 213), 'os.system', 'system', (['"""pip install --user setuptools"""'], {}), "('pip install --user setuptools')\n", (180, 213), False, 'from os import system\n')]
|
from __future__ import absolute_import, print_function
import os
import numpy as np
from subprocess import Popen, PIPE
from Bio.PDB.Polypeptide import aa1 as AA_STANDARD
from ....featuresComputer import FeatureComputerException
from ...seqToolManager import SeqToolManager
from .al2coWorkers.parsePsiBlast import parsePsiBlast
from utils import myMakeDir, tryToRemove
class Al2coManager(SeqToolManager):
'''
Computes al2co and processes their outputs. Extends class seqToolManager
'''
VAR_LIST= ["al2coScore", "al2coScoreNorm"]
BAD_SCORE_CONSERVATION = "-1048576" #Something went wrong tag
def __init__(self, computedFeatsRootDir, winSize=None, statusManager=None):
'''
:param computedFeatsRootDir: str. root path where results will be saved
:param winSize: int>=1 or None. The size of the windows for sliding window if desired
:param statusManager: class that implements .setStatus(msg) to communicate
'''
SeqToolManager.__init__(self, computedFeatsRootDir, winSize)
self.al2coOutPath= myMakeDir(self.computedFeatsRootDir,"al2co")
if winSize:
self.al2coPathWindowed= myMakeDir(self.computedFeatsRootDir,"al2co_wSize"+str(winSize))
else:
self.al2coPathWindowed= None
def getFinalPath(self):
'''
returns path where results are saved
:return al2coOutPath: str
'''
return self.al2coOutPath
def getFNames(self, prefixExtended):
'''
Returns a dict that contains the fnames that will be used by al2co
:param prefixExtended. prefix for output fnames.
:return list of fnames: [ fname1, fnam2, ...]
'''
al2coProc= os.path.join(self.al2coOutPath, prefixExtended+".al2co.gz")
fNames=[al2coProc]
if not self.winSize is None:
al2coWindowedOutName= os.path.join(self.al2coPathWindowed, prefixExtended+".wsize"+str(self.winSize)+".al2co.gz")
fNames+= [al2coWindowedOutName]
return fNames
def computeFromSeqStructMapper(self, seqStructMap, prefixExtended, psiblastOutName, pssmOutNameRaw):
'''
Computes al2co for the sequence seqStr, that is contained at fastaInFname. This sequence is
associated with prefixExtended as an unambiguous id
:param seqStructMap: computeFeatures.seqStep.seqToolManagers.seqExtraction.SeqStructMapper
:param prefixExtended: str. unambiguous id of the sequence that will be the prefix of output names
:param psiblastOutName: str. Path to psiblast aligments results
:param pssmOutNameRaw: str. Path to psiblast pssms results
'''
msaFname= None
prefix, chainType, chainId= self.splitExtendedPrefix(prefixExtended)[:3]
seqStr, fastaFname= seqStructMap.getSeq(chainType, chainId) # repeat as psiBlastManager can modify seqs
seqStructMap.setCurrentSeq(seqStr, chainType, chainId)
if self.checkAlreayComputed(prefixExtended):
print("Al2co already computed for %s"%prefixExtended)
return 0
fNames= self.getFNames(prefixExtended)
print("launching al2co over %s"%prefixExtended)
al2coProcName= fNames[0]
al2coRawName= os.path.join(self.al2coOutPath, prefixExtended+".fasta.csv")
try:
if os.path.isfile(psiblastOutName):
alignedSeqsDict= parsePsiBlast( inputSeq=seqStr, psiBlastOut=psiblastOutName)
filteredSeqsFname= self.runCdHit(alignedSeqsDict, inputSeq=seqStr, psiBlastOut=psiblastOutName)
msaFname= self.runClustalW(filteredSeqsFname, psiBlastOut=psiblastOutName)
cmd= [self.al2coBin, "-i", msaFname,"-m", "0", "-f", "2", "-a", "F", "-b", "50",
"-g", "0.50", "-w", "1", "-c", "0", "-o", al2coRawName, "-t", al2coProcName]
print(" ".join(cmd))
process= Popen(cmd, stdout=PIPE, stderr=PIPE)
processOut= process.communicate()
if len(processOut[1])>0:
print("Error computing al2co. Caught stdin/stderr:\n",processOut[0],processOut[1])
else:
print("Error computing al2co. Psiout does not exists for %s"%(prefixExtended))
al2coRawName=None
dataList= self.processAl2co(seqStr, seqStructMap, prefixExtended, al2coRawName, al2coProcName)
if self.winSize:
self.makeWindowed( dataList, ["al2co", "al2coNorm"], [Al2coManager.BAD_SCORE_CONSERVATION]*2, [None]*2,
fNames[1])
except (Exception, KeyboardInterrupt):
self.tryToRemoveAllFnames(prefixExtended)
raise
finally:
if msaFname: tryToRemove(msaFname)
def processAl2co(self, seq, seqStructMap, prefixExtended, al2coRaw, al2coProc):
'''
Reads al2co output file and writes another one with tabulated format, headers and
some error checking.
:param: seq: str. Sequence of the chain
:param prefixExtended: str. unambiguous id of the sequence that will be the prefix of output names
:param al2coRaw: str. Path to al2co results
:param al2coProc: str. Path where formatted results will be saved.
'''
if al2coRaw is None:
conserData = [(letter, Al2coManager.BAD_SCORE_CONSERVATION) for letter in seq]
else:
try:
conserData = self.loadRawAl2co(al2coRaw)
except IOError:
conserData= [ (letter, Al2coManager.BAD_SCORE_CONSERVATION) for letter in seq]
prefix, chainType, chainId= self.splitExtendedPrefix(prefixExtended)[:3]
# print(len(conserData)); raw_input("enter")
try:
alcoIx=0
seqIx=0
seqLen= len(seq)
letters, conserVals = zip(* conserData)
conserVals= [float(elem) for elem in conserVals]
alcoLen= len(conserData)
dataList=[]
listOfRowsToPrint=[]
mean_val= np.mean(conserVals)
std_val= np.std(conserVals)
while seqIx<seqLen and alcoIx<alcoLen:
letter= seq[seqIx]
letterAl2co, consVal= conserData[alcoIx]
if letterAl2co== letter or (letterAl2co=="-" and letter=="X"):
structIndex= seqStructMap.seqToStructIndex(chainType, chainId, seqIx, asString= True)
# print(seqIx, letter, alcoIx, structIndex)
if structIndex:
if self.filterOutLabels and structIndex[-1].isalpha():
continue
else:
structIndex=str(seqIx)+"?"
if std_val!=0:
consValNormalized= (float(consVal)- mean_val)/std_val
else:
consValNormalized=float(consVal)
dataList.append( ( (chainId, structIndex,letter), ( [consVal], [str(consValNormalized)],) ) )
listOfRowsToPrint.append( "%s %s %s %s %s"%( chainId, structIndex, letter, consVal, consValNormalized) )
alcoIx+=1
seqIx+=1
elif not letter in AA_STANDARD and letterAl2co=="-":
alcoIx+=1
seqIx+=1
elif letterAl2co=="-":
alcoIx+=1
else:
print(conserData)
print(alcoIx, seqIx)
raise ValueError("Al2co mismatch %s %s "%(letterAl2co, letter))
# print(len(listOfRowsToPrint)); raw_input("enter to continue")
self.writeResultsFromDataDictSingleChain( {chainId: listOfRowsToPrint }, outName= al2coProc)
return dataList
except (KeyboardInterrupt, Exception):
print("Exception happend computing %s"%al2coProc)
tryToRemove(al2coProc)
raise
finally:
if al2coRaw is not None:
tryToRemove(al2coRaw)
pass
def loadRawAl2co(self, filename):
'''
Loads an al2co file
:param fname: str. Path to al2co file.
:return list of strings. ["row0_Al2co","row1Al2co"...]
'''
conserv= []
for line in open(filename):
lineArray=line.split()
if lineArray[0][0].isdigit():
conserv.append(lineArray[1:3])
else:
break
return conserv
def runCdHit(self, allHits, inputSeq, psiBlastOut, pairSeqIdThr=0.95):
tmpName= os.path.basename(psiBlastOut).split(".")[0]
tmpName= os.path.join(self.tmp, tmpName)
cdhitInName= tmpName+".in-cdhit"
cdhitOutName= tmpName+".out-cdhit"
try:
with open(cdhitInName, "w") as f:
for hit in allHits:
f.write("> %s\n"%(hit["target_full_id"]))
f.write("%s\n"%(hit["targetSeq"].replace("-","")) )
if(pairSeqIdThr > .70 and pairSeqIdThr <= 1.00): n=5
elif (pairSeqIdThr <= .70 and pairSeqIdThr >= .55): n=4
elif (pairSeqIdThr < .55 and pairSeqIdThr >= .50): n=3
elif (pairSeqIdThr < .50 and pairSeqIdThr >= .40): n=2
else: raise ValueError("Error, just .4<=pairSeqIdThr<=1.00 allowed")
cdhitCmd= [self.cdHitBin, "-i", cdhitInName, "-o", cdhitOutName, "-n", str(n),
"-c", str(pairSeqIdThr), "-T", str(self.psiBlastNThrs)]
print(" ".join(cdhitCmd))
proc = Popen(cdhitCmd, stdin= PIPE, stdout=PIPE, stderr=PIPE)
output= proc.communicate()
if output== None or output[1]!="" or "There was an error cd-hit psiblast" in output[0]:
print(output)
print ("Error when parsing %s for al2Co"%psiBlastOut)
raise FeatureComputerException("Error when cd-hit %s for al2Co"%psiBlastOut)
with open(cdhitOutName, "r+") as f:
fileData = f.read()
f.seek(0, 0)
f.write("> InputSeq\n")
f.write("%s\n"%(inputSeq.replace("-","")) )
f.write(fileData+"\n")
return cdhitOutName
except (Exception, KeyboardInterrupt):
tryToRemove(cdhitOutName)
raise
finally:
tryToRemove(cdhitInName)
def runClustalW(self, filteredSeqsFname, psiBlastOut, clustalWOutName=None):
tmpFnameCommon= ".".join(filteredSeqsFname.split(".")[:-1])
if clustalWOutName is None:
clustalWOutName= tmpFnameCommon+".clustalw"
clustalCommand=[self.clustalW, "-infile=%s"%filteredSeqsFname, "-outfile=%s"%clustalWOutName, "-outorder=INPUT"]
print(" ".join(clustalCommand))
try :
proc = Popen(clustalCommand, stdin= PIPE, stdout=PIPE, stderr=PIPE)
output= proc.communicate()
if output== None or output[1]!="" or "There was an error parsing psiblast, clustalw" in output[0]:
print(output)
print ("Error when clustalw %s for al2Co"%psiBlastOut)
raise FeatureComputerException("Error when clustalw %s for al2Co"%psiBlastOut)
return clustalWOutName
except (Exception, KeyboardInterrupt):
tryToRemove(clustalWOutName)
raise
finally:
tryToRemove(filteredSeqsFname)
tryToRemove(filteredSeqsFname+".clstr")
tryToRemove( tmpFnameCommon+".dnd")
|
[
"subprocess.Popen",
"os.path.basename",
"numpy.std",
"os.path.isfile",
"numpy.mean",
"utils.tryToRemove",
"os.path.join",
"utils.myMakeDir"
] |
[((1043, 1088), 'utils.myMakeDir', 'myMakeDir', (['self.computedFeatsRootDir', '"""al2co"""'], {}), "(self.computedFeatsRootDir, 'al2co')\n", (1052, 1088), False, 'from utils import myMakeDir, tryToRemove\n'), ((1654, 1715), 'os.path.join', 'os.path.join', (['self.al2coOutPath', "(prefixExtended + '.al2co.gz')"], {}), "(self.al2coOutPath, prefixExtended + '.al2co.gz')\n", (1666, 1715), False, 'import os\n'), ((3104, 3166), 'os.path.join', 'os.path.join', (['self.al2coOutPath', "(prefixExtended + '.fasta.csv')"], {}), "(self.al2coOutPath, prefixExtended + '.fasta.csv')\n", (3116, 3166), False, 'import os\n'), ((7860, 7891), 'os.path.join', 'os.path.join', (['self.tmp', 'tmpName'], {}), '(self.tmp, tmpName)\n', (7872, 7891), False, 'import os\n'), ((3183, 3214), 'os.path.isfile', 'os.path.isfile', (['psiblastOutName'], {}), '(psiblastOutName)\n', (3197, 3214), False, 'import os\n'), ((5638, 5657), 'numpy.mean', 'np.mean', (['conserVals'], {}), '(conserVals)\n', (5645, 5657), True, 'import numpy as np\n'), ((5673, 5691), 'numpy.std', 'np.std', (['conserVals'], {}), '(conserVals)\n', (5679, 5691), True, 'import numpy as np\n'), ((8689, 8742), 'subprocess.Popen', 'Popen', (['cdhitCmd'], {'stdin': 'PIPE', 'stdout': 'PIPE', 'stderr': 'PIPE'}), '(cdhitCmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n', (8694, 8742), False, 'from subprocess import Popen, PIPE\n'), ((9388, 9412), 'utils.tryToRemove', 'tryToRemove', (['cdhitInName'], {}), '(cdhitInName)\n', (9399, 9412), False, 'from utils import myMakeDir, tryToRemove\n'), ((9821, 9880), 'subprocess.Popen', 'Popen', (['clustalCommand'], {'stdin': 'PIPE', 'stdout': 'PIPE', 'stderr': 'PIPE'}), '(clustalCommand, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n', (9826, 9880), False, 'from subprocess import Popen, PIPE\n'), ((10331, 10361), 'utils.tryToRemove', 'tryToRemove', (['filteredSeqsFname'], {}), '(filteredSeqsFname)\n', (10342, 10361), False, 'from utils import myMakeDir, tryToRemove\n'), ((10368, 10409), 'utils.tryToRemove', 'tryToRemove', (["(filteredSeqsFname + '.clstr')"], {}), "(filteredSeqsFname + '.clstr')\n", (10379, 10409), False, 'from utils import myMakeDir, tryToRemove\n'), ((10414, 10450), 'utils.tryToRemove', 'tryToRemove', (["(tmpFnameCommon + '.dnd')"], {}), "(tmpFnameCommon + '.dnd')\n", (10425, 10450), False, 'from utils import myMakeDir, tryToRemove\n'), ((3718, 3754), 'subprocess.Popen', 'Popen', (['cmd'], {'stdout': 'PIPE', 'stderr': 'PIPE'}), '(cmd, stdout=PIPE, stderr=PIPE)\n', (3723, 3754), False, 'from subprocess import Popen, PIPE\n'), ((4459, 4480), 'utils.tryToRemove', 'tryToRemove', (['msaFname'], {}), '(msaFname)\n', (4470, 4480), False, 'from utils import myMakeDir, tryToRemove\n'), ((7206, 7228), 'utils.tryToRemove', 'tryToRemove', (['al2coProc'], {}), '(al2coProc)\n', (7217, 7228), False, 'from utils import myMakeDir, tryToRemove\n'), ((7293, 7314), 'utils.tryToRemove', 'tryToRemove', (['al2coRaw'], {}), '(al2coRaw)\n', (7304, 7314), False, 'from utils import myMakeDir, tryToRemove\n'), ((9331, 9356), 'utils.tryToRemove', 'tryToRemove', (['cdhitOutName'], {}), '(cdhitOutName)\n', (9342, 9356), False, 'from utils import myMakeDir, tryToRemove\n'), ((10271, 10299), 'utils.tryToRemove', 'tryToRemove', (['clustalWOutName'], {}), '(clustalWOutName)\n', (10282, 10299), False, 'from utils import myMakeDir, tryToRemove\n'), ((7803, 7832), 'os.path.basename', 'os.path.basename', (['psiBlastOut'], {}), '(psiBlastOut)\n', (7819, 7832), False, 'import os\n')]
|
import os
import numpy as np
import cv2
from glob import glob
import tensorflow as tf
from sklearn.model_selection import train_test_split
def load_data(path, split=0.1):
images = sorted(glob(os.path.join(path, "images/*")))
masks = sorted(glob(os.path.join(path, "masks/*")))
total_size = len(images)
valid_size = int(split * total_size)
test_size = int(split * total_size)
train_x, valid_x = train_test_split(images, test_size=valid_size, random_state=42)
train_y, valid_y = train_test_split(masks, test_size=valid_size, random_state=42)
train_x, test_x = train_test_split(train_x, test_size=test_size, random_state=42)
train_y, test_y = train_test_split(train_y, test_size=test_size, random_state=42)
return (train_x, train_y), (valid_x, valid_y), (test_x, test_y)
def read_image(path):
path = path.decode()
x = cv2.imread(path, cv2.IMREAD_COLOR)
x = cv2.resize(x, (256, 256))
x = x/255.0
return x
def read_mask(path):
path = path.decode()
x = cv2.imread(path, cv2.IMREAD_GRAYSCALE)
x = cv2.resize(x, (256, 256))
x = x/255.0
x = np.expand_dims(x, axis=-1)
return x
def tf_parse(x, y):
def _parse(x, y):
x = read_image(x)
y = read_mask(y)
return x, y
x, y = tf.numpy_function(_parse, [x, y], [tf.float64, tf.float64])
x.set_shape([256, 256, 3])
y.set_shape([256, 256, 1])
return x, y
def tf_dataset(x, y, batch=8):
dataset = tf.data.Dataset.from_tensor_slices((x, y))
dataset = dataset.map(tf_parse)
dataset = dataset.batch(batch)
dataset = dataset.repeat()
return dataset
|
[
"sklearn.model_selection.train_test_split",
"numpy.expand_dims",
"tensorflow.data.Dataset.from_tensor_slices",
"cv2.imread",
"tensorflow.numpy_function",
"os.path.join",
"cv2.resize"
] |
[((422, 485), 'sklearn.model_selection.train_test_split', 'train_test_split', (['images'], {'test_size': 'valid_size', 'random_state': '(42)'}), '(images, test_size=valid_size, random_state=42)\n', (438, 485), False, 'from sklearn.model_selection import train_test_split\n'), ((509, 571), 'sklearn.model_selection.train_test_split', 'train_test_split', (['masks'], {'test_size': 'valid_size', 'random_state': '(42)'}), '(masks, test_size=valid_size, random_state=42)\n', (525, 571), False, 'from sklearn.model_selection import train_test_split\n'), ((595, 658), 'sklearn.model_selection.train_test_split', 'train_test_split', (['train_x'], {'test_size': 'test_size', 'random_state': '(42)'}), '(train_x, test_size=test_size, random_state=42)\n', (611, 658), False, 'from sklearn.model_selection import train_test_split\n'), ((681, 744), 'sklearn.model_selection.train_test_split', 'train_test_split', (['train_y'], {'test_size': 'test_size', 'random_state': '(42)'}), '(train_y, test_size=test_size, random_state=42)\n', (697, 744), False, 'from sklearn.model_selection import train_test_split\n'), ((870, 904), 'cv2.imread', 'cv2.imread', (['path', 'cv2.IMREAD_COLOR'], {}), '(path, cv2.IMREAD_COLOR)\n', (880, 904), False, 'import cv2\n'), ((913, 938), 'cv2.resize', 'cv2.resize', (['x', '(256, 256)'], {}), '(x, (256, 256))\n', (923, 938), False, 'import cv2\n'), ((1023, 1061), 'cv2.imread', 'cv2.imread', (['path', 'cv2.IMREAD_GRAYSCALE'], {}), '(path, cv2.IMREAD_GRAYSCALE)\n', (1033, 1061), False, 'import cv2\n'), ((1070, 1095), 'cv2.resize', 'cv2.resize', (['x', '(256, 256)'], {}), '(x, (256, 256))\n', (1080, 1095), False, 'import cv2\n'), ((1120, 1146), 'numpy.expand_dims', 'np.expand_dims', (['x'], {'axis': '(-1)'}), '(x, axis=-1)\n', (1134, 1146), True, 'import numpy as np\n'), ((1286, 1345), 'tensorflow.numpy_function', 'tf.numpy_function', (['_parse', '[x, y]', '[tf.float64, tf.float64]'], {}), '(_parse, [x, y], [tf.float64, tf.float64])\n', (1303, 1345), True, 'import tensorflow as tf\n'), ((1470, 1512), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['(x, y)'], {}), '((x, y))\n', (1504, 1512), True, 'import tensorflow as tf\n'), ((198, 228), 'os.path.join', 'os.path.join', (['path', '"""images/*"""'], {}), "(path, 'images/*')\n", (210, 228), False, 'import os\n'), ((255, 284), 'os.path.join', 'os.path.join', (['path', '"""masks/*"""'], {}), "(path, 'masks/*')\n", (267, 284), False, 'import os\n')]
|
from queue import PriorityQueue
from burrow import Burrow, parse
def part1(rows: list[str]) -> int | None:
return go(rows)
def part2(rows: list[str]) -> int | None:
new_rows = list(rows[:3]) + [
" #D#C#B#A#",
" #D#B#A#C#",
] + list(rows[3:])
return go(new_rows)
def go(rows: list[str]) -> int | None:
burrow = parse(rows)
burrows: PriorityQueue = PriorityQueue()
burrows.put((burrow.min_cost_to_solution(), burrow))
seen = {burrow: 0}
min_cost = 0 if burrow.final() else None
while burrows.qsize():
min_cost_to_solution, burrow = burrows.get()
if min_cost and min_cost <= min_cost_to_solution:
break
old_cost = seen[burrow]
for extra_cost, new_burrow in move(burrow):
new_cost = old_cost + extra_cost
if (
(not min_cost or new_cost < min_cost)
and (new_burrow not in seen or new_cost < seen[new_burrow])
):
seen[new_burrow] = new_cost
if new_burrow.final():
min_cost = new_cost
else:
burrows.put((new_cost + new_burrow.min_cost_to_solution(), new_burrow))
return min_cost
def move(burrow: Burrow) -> list[tuple[int, Burrow]]:
for amphipod in burrow.amphipods:
new_burrow = amphipod.move_home(burrow)
if new_burrow:
return [new_burrow]
return [
new_burrow
for amphipod in burrow.amphipods
for new_burrow in amphipod.move_hallway(burrow)
]
def dump(burrow: Burrow) -> None:
for row in range(burrow.height):
for column in range(13 if row < 3 else 11):
amphipod = burrow[row, column]
if amphipod:
print(amphipod.kind, end='')
continue
if (
row == 0 or
column in (0, 12) and row == 1 or
column in (0, 1, 2, 4, 6, 8, 10, 11, 12) and row == 2 or
column in (2, 4, 6, 8, 10) and 2 < row < burrow.height - 1 or
column in (2, 3, 4, 5, 6, 7, 8, 9,
10) and row == burrow.height - 1
):
print('#', end='')
continue
if row > 2 and (column < 2 or column > 10):
print(' ', end='')
continue
print('.', end='')
print()
|
[
"queue.PriorityQueue",
"burrow.parse"
] |
[((354, 365), 'burrow.parse', 'parse', (['rows'], {}), '(rows)\n', (359, 365), False, 'from burrow import Burrow, parse\n'), ((395, 410), 'queue.PriorityQueue', 'PriorityQueue', ([], {}), '()\n', (408, 410), False, 'from queue import PriorityQueue\n')]
|
import re
def CodelandUsernameValidation(strParam):
# code goes here
valid = "false"
if strParam[0].isalpha():
if 4 < len(strParam) < 25:
if strParam[-1] != '_':
if re.match('^[a-zA-Z0-9_]+$', strParam):
valid = "true"
# code goes here
return valid
# keep this function call here
print(CodelandUsernameValidation(input()))
|
[
"re.match"
] |
[((216, 253), 're.match', 're.match', (['"""^[a-zA-Z0-9_]+$"""', 'strParam'], {}), "('^[a-zA-Z0-9_]+$', strParam)\n", (224, 253), False, 'import re\n')]
|
#!/usr/bin/env python
import os
import subprocess
TID_FILE = "src/tiddlers/system/plugins/security_tools/twsm.tid"
VERSION_FILE = "VERSION"
def get_commit_count():
return int(subprocess.check_output(["git", "rev-list", "--count", "HEAD"]).decode('utf-8'))
def main():
with open(VERSION_FILE, "r") as f:
version = f.read().strip()
# Some sanity
mm = version.split(".")
assert len(mm) == 2, "Expected version format MAJOR.MINOR"
assert int(mm[0]) + int(mm[1]), "Expected version integers MAJOR.MINOR"
ls = list()
with open(TID_FILE, "r") as f:
version_string = "version: {}.{}".format(version, get_commit_count())
for l in f:
if l.startswith("version:"):
print("Injecting version: {}".format(version_string))
ls.append(version_string + "\n")
else:
ls.append(l)
with open(TID_FILE, "w") as f:
f.write("".join(ls))
print("Finished")
if __name__ == "__main__":
main()
|
[
"subprocess.check_output"
] |
[((181, 244), 'subprocess.check_output', 'subprocess.check_output', (["['git', 'rev-list', '--count', 'HEAD']"], {}), "(['git', 'rev-list', '--count', 'HEAD'])\n", (204, 244), False, 'import subprocess\n')]
|
"""
画像ビューワー
"""
import itertools, os, sys
import tkinter as tk
import tkinter.ttk as ttk
import tkinter.font as tkFont
from tkinter import filedialog
from tkinterdnd2 import *
from typing import Tuple # 関数アノテーション用
from PIL import Image, ImageTk # Pillow
from PIL.ExifTags import TAGS, GPSTAGS # Exifタグ情報
class ListView(ttk.Frame):
"""
画像をリストビューで表示する
"""
check_str = {"uncheck":"☐", "checked":"☑"} # ☐☑☒チェックボックス用文字
def __init__(self, master):
"""
画面の作成
上のFrame: 入力用
下のFrame: 出力用
"""
super().__init__(master)
self.image_op = ImageOp()
self.u_frame = tk.Frame(bg="white") # 背景色を付けて配置を見る
self.b_frame = tk.Frame(bg="green") # 背景色を付けて配置を見る
self.u_frame.pack(fill=tk.X)
self.b_frame.pack(fill=tk.BOTH, expand=True)
self.create_input_frame(self.u_frame)
self.treeview1 = self.create_tree_frame(self.b_frame)
# bind
self.treeview1.bind("<Button 1>", self.togle_checkbox) # マウスを左クリックしたときの動作
# self.treeview1.bind("<Double 1>", self.preview_image) # マウスをダブルクリックしたときの動作
self.treeview1.bind("<Double 3>", self.preview_image) # マウスを右ダブルクリックしたときの動作
# マウスのクリックとダブルクリックを併用する場合
# self.double_click_flag =False
# self.treeview1.bind("<Button 1>", self.mouse_click) # マウスを左クリックしたときの動作
# self.treeview1.bind("<Double 1>", self.double_click) # マウスをダブルクリックしたときの動作
def fixed_map(self, option):
# Fix for setting text colour for Tkinter 8.6.9
# From: https://core.tcl.tk/tk/info/509cafafae
#
# Returns the style map for 'option' with any styles starting with
# ('!disabled', '!selected', ...) filtered out.
# style.map() returns an empty list for missing options, so this
# should be future-safe.
return [elm for elm in self.style.map('Treeview', query_opt=option) if
elm[:2] != ('!disabled', '!selected')]
def create_input_frame(self, parent):
"""
入力項目の画面の作成
上段:ファイル選択ボタン、すべて選択、選択解除、プレビューボタン
下段:メッセージ
"""
self.btn_f_sel = tk.Button(parent, text="ファイル選択", command=self.select_files)
self.btn_select_all = tk.Button(parent, text="すべて選択", command=self.select_all)
self.btn_deselection = tk.Button(parent, text="選択解除", command=self.deselection)
self.btn_preview = tk.Button(parent, text="プレビュー", command=self.preview_images)
self.msg = tk.StringVar(value="msg")
self.lbl_msg = tk.Label(parent
, textvariable=self.msg
, justify=tk.LEFT
, font=("Fixedsys", 11)
, relief=tk.RIDGE
, anchor=tk.W)
# pack
self.lbl_msg.pack(side=tk.BOTTOM, fill=tk.BOTH, expand=True) # 先にpackしないと下に配置されない
self.btn_preview.pack(side=tk.RIGHT, padx=5)
self.btn_deselection.pack(side=tk.RIGHT, padx=5)
self.btn_select_all.pack(side=tk.RIGHT, padx=5)
self.btn_f_sel.pack(side=tk.RIGHT, padx=5)
# bind
def create_tree_frame(self, parent:tk.Frame) -> ttk.Treeview:
"""
Treeviewとスクロールバーを持つframeを作成する。
frameは、Treeviewとスクロールバーをセットする
Treeviewは、ツリーと表形式、ツリーに画像、行は縞模様
Args:
Frame: 親Frame
Returns:
Treeview: ツリービュー
"""
# tagを有効にするためstyleを更新 tkinter8.6?以降必要みたい
# 表の文字色、背景色の設定に必要
self.style = ttk.Style()
self.style.map('Treeview', foreground=self.fixed_map('foreground')
, background=self.fixed_map('background'))
# スタイルの設定
self.style.configure("Treeview", rowheight = 150) # 画像を150pxで表示するので初期設定する
# frameの作成。frameにTreeviewとScrollbarを配置する
frame4tree = tk.Frame(parent, bg="pink")
frame4tree.pack(side=tk.TOP, fill=tk.BOTH, expand=True, padx=2, pady=2)
# Treeviewの作成
treeview1 = ttk.Treeview(frame4tree, style="Treeview")
# treeview1["show"] = "headings" # デフォルトは treeとheadingsなので設定しない
treeview1.tag_configure("odd", background="ivory2") # 奇数行の背景色を指定するtagを作成
# 水平スクロールバーの作成
h_scrollbar = tk.Scrollbar(frame4tree, orient=tk.HORIZONTAL, command=treeview1.xview)
treeview1.configure(xscrollcommand=h_scrollbar.set)
# 垂直スクロールバーの作成
v_scrollbar = tk.Scrollbar(frame4tree, orient=tk.VERTICAL, command=treeview1.yview)
treeview1.configure(yscrollcommand=v_scrollbar.set)
# pack expandがある方を後にpackしないと他が見えなくなる
h_scrollbar.pack(side=tk.BOTTOM, fill=tk.X) # 先にパックしないと表示されない
v_scrollbar.pack(side=tk.RIGHT, fill=tk.Y) # 先にパックしないと表示されない
treeview1.pack(side=tk.TOP, fill=tk.BOTH, expand=True, padx=2, pady=2)
treeview1.column("#0", width=200, stretch=False) # ツリー列の幅の設定
return treeview1
def update_tree_column(self, tree1:ttk.Treeview, columns:list):
"""
TreeViewの列定義と見出しを設定
見出しの文字長で列幅を初期設定
Args:
Treeview: treeviewオブジェクト
list: 列名のリスト
"""
tree1["columns"] = columns # treeviewの列定義を設定
font1 = tkFont.Font()
for col_name in columns:
tree1.heading(col_name, text=col_name) # 見出しの設定
width1 = font1.measure(col_name) # 見出しの文字幅をピクセルで取得
tree1.column(col_name, width=width1) # 見出し幅の設定
def update_tree_by_result(self, tree1:ttk.Treeview, rows:list, images:list):
"""
rows(表データ)、images(画像のデータ)をTreeViewに設定
要素の文字幅が見出しの文字幅より長い場合は、列幅を変更する。
奇数列の背景色を変更
Args:
Treeview: Treeviewインスタンス
list: 行データ(行リストの列リスト)
list: 画像データ
"""
if not rows: # 要素が無ければ戻る
return
font1 = tkFont.Font()
# 要素の長さにより列幅を修正
for i, _ in enumerate(rows[0]): # 列数分回す(1行目の要素数分)
# 同じ列のデータをリストにし列の値の長さを求め、最大となる列のデータを求める。
# 値は数字もあるので文字に変換し長さを求める。また、Noneは'None'となるので' 'とする。
max_str = max([x[i] for x in rows], key=lambda x:len(str(x))) or " "
# 求めたものが文字列だったら、改行された状態での最大となるデータを求める。
# 厳密にはこの状態で最大となるデータを探さなければならないが割愛
if type(max_str) is str:
max_str = max(max_str.split("\n"), key=len)
width1 = font1.measure(max_str) # 文字幅をピクセルで取得
curent_width = tree1.column(tree1['columns'][i], width=None) # 現在の幅を取得
# 設定済みの列幅より列データの幅の方が大きいなら列幅を再設定
if width1 > curent_width:
tree1.column(tree1['columns'][i], width=width1) # 見出し幅の再設定
# print(f"幅の再設定 幅:{width1}、値:{max_str}") # debug用
tree1.delete(*tree1.get_children()) # Treeviewをクリア
# 要素の追加
for i, row in enumerate(rows):
tags1 = [] # tag設定値の初期化
if i & 1: # 奇数か? i % 2 == 1:
tags1.append("odd") # 奇数番目(treeviewは0始まりなので偶数行)だけ背景色を変える(oddタグを設定)
# 要素の追加(image=はツリー列の画像、text=はツリー列の文字(疑似チェックボックス))
iid = tree1.insert("", tk.END, values=row, tags=tags1,
image=images[i], text=self.check_str["uncheck"]) # Treeviewに1行分のデータを設定
def open_file_and_get_data(self, event=None):
"""
self.file_pathsのパスからファイル情報、画像サムネイルを作成
Treeviewに情報追加
データの幅でTreeviewの列の幅を設定する
データの行数でTreeviewの行の高さを設定する(行ごとにはできないので一番高い行に合わせる)
"""
self.image_op.msg = ""
# DnD対応
if event:
# DnDのファイル情報はevent.dataで取得
# "{空白を含むパス名1} 空白を含まないパス名1"が返る
# widget.tk.splitlistでパス名のタプルに変換
self.file_paths = self.u_frame.tk.splitlist(event.data)
# 取得したパスから拡張子がself.extentiosのkeyに含まれるものだけにする
file_paths2 = tuple(path for path in self.file_paths if os.path.splitext(path)[1].lower() in self.image_op.extensions)
if len(file_paths2) == 0:
self.image_op.msg = "対象のファイルがありません"
self.msg.set(self.image_op.msg)
return
if file_paths2 != self.file_paths:
self.image_op.msg = "対象外のファイルは除きました"
self.file_paths = file_paths2
# 取得したパスから表示データと画像を作成
columns1, rows1, images1, msg1 = self.image_op.get_images(self.file_paths)
self.d_images = [] # ダイアログ表示用画像初期化
self.msg.set(self.image_op.msg) # エラーメッセージの表示
# 見出しの文字長で列幅を初期設定、treeviewのカラム幅を文字長に合わせて調整
self.update_tree_column(self.treeview1, columns1)
# 列項目を右寄せ
# self.treeview1.column("#0", anchor=tk.E) # 列項目を右寄せ(ツリー)#0には働かないみたい
self.treeview1.column("#2", anchor=tk.E) # 列項目を右寄せ(幅)
self.treeview1.column("#3", anchor=tk.E) # 列項目を右寄せ(高さ)
self.treeview1.column("#4", anchor=tk.E) # 列項目を右寄せ(サイズ)
# rows、画像をTreeViewに設定
# 要素の文字幅が見出しの文字幅より長い場合は、列幅を変更する。偶数列の背景色を変更
self.update_tree_by_result(self.treeview1, rows1, images1)
# 一番行数の多い行に合わせて高さを設定する
# 2次元のデータを平坦化しstr型だけを抽出する
cells = [s for s in itertools.chain.from_iterable(rows1) if type(s) is str]
if cells:
# 抽出したリストの要素の中で改行の数の最も多い要素を取得
longest_cell = max(cells, key=lambda x:x.count("\n"))
max_row_lines = longest_cell.count("\n") + 1 # 改行の数を数える
# Treeviewの行の高さを変更 # タブごとのスタイルの設定
if max_row_lines * 18 > 150:
self.style.configure("Treeview", rowheight = 18 * max_row_lines)
def select_files(self, event=None):
"""
ファイル選択ダイアログを表示。選択したファイルパスを取得
ファイル情報や画像を取得して表示
"""
# 拡張子の辞書からfiletypes用のデータを作成
# 辞書{".csv":"CSV", ".tsv":"TSV"}、filetypes=[("CSV",".csv"), ("TSV",".tsv")]
self.file_paths = filedialog.askopenfilenames(
filetypes=[(value, key) for key, value in self.image_op.extensions.items()])
self.open_file_and_get_data() # ファイル情報や画像を取得して表示
# マウスのクリックとダブルクリックを併用する場合
# 反応が鈍いので未使用。参考に残す。
def mouse_click(self, event=None):
"""
マウスのシングルクリック時の処理
シングルクリックとダブルクリックイベントは両方発生するので
シングルクリックイベントでダブルクリックイベントの発生を待ち、
ダブルクリックが発生してから共通の処理(中身は分ける)を実行する
"""
self.treeview1.after(200, self.mouse_action, event)
# マウスのクリックとダブルクリックを併用する場合
def double_click(self,event=None):
"""
マウスのダブルクリック時の処理
ダブルマリックの発生をフラグに設定
"""
self.double_click_flag = True
# マウスのクリックとダブルクリックを併用する場合
def mouse_action(self, event=None):
"""
マウスクリック時の処理
ダブルクリック発生フラグを確認して処理を実行
ダブルクリック用処理実行後はフラグをクリア
"""
if self.double_click_flag:
self.preview_image(event)
self.double_click_flag =False
else:
self.togle_checkbox(event)
def togle_checkbox(self, event=None):
"""
チェックボックスの状態を反転
"""
rowid = self.treeview1.identify_row(event.y) # マウスの座標から対象の行を取得する
if self.treeview1.item(rowid, text=None) == self.check_str["uncheck"]:
self.treeview1.item(rowid, text=self.check_str["checked"])
else:
self.treeview1.item(rowid, text=self.check_str["uncheck"])
def preview_image(self, event=None, path=""):
"""
画像のプレビュー
ダイアログ表示
Args:
string: ファイルパス(ない場合もある)
"""
# マウスのクリックとダブルクリックを併用する場合
# マウスクリックイベントが先に動いているので打ち消す
# クリックとダブルクリックを左ボタンで実装する時の考慮
# self.togle_checkbox(event)
if event:
rowid = self.treeview1.identify_row(event.y) # マウスの座標から対象の行を取得する
path1 = self.treeview1.item(rowid)["values"][0].replace("\n", "") # ファイル名取得
else:
path1 = path
# ダイアログ表示
dialog = tk.Toplevel(self) # モードレスダイアログの作成
dialog.title("Preview") # タイトル
self.d_images.append(ImageTk.PhotoImage(file=path1)) # 複数表示する時のために画像を残す
label1 = tk.Label(dialog, image=self.d_images[-1]) # 最後のものを表示
label1.pack()
def preview_images(self, event=None):
"""
選択された画像のプレビュー
"""
self.msg.set("")
# Treeviewのチェックボックスがオンの行のファイル名列(1列)を取得。改行してあるので除く。
paths = [self.treeview1.item(x)["values"][0].replace("\n", "") for x in self.treeview1.get_children() if self.treeview1.item(x)["text"] == self.check_str["checked"]]
for path1 in paths:
self.preview_image(path=path1)
if not paths:
self.msg.set("選択された画像がありません")
def select_all(self, event=None):
"""
Treeviewの要素をすべて選択する
"""
self.set_all_checkbox("checked")
def deselection(self, event=None):
"""
Treeviewの要素をすべて選択解除する
"""
self.set_all_checkbox("uncheck")
def set_all_checkbox(self, check_stat:str):
"""
Treeviewのチェックボックスをすべて設定する
Args:
str: "checked" または "uncheck"
"""
for iid in self.treeview1.get_children():
self.treeview1.item(iid, text=self.check_str[check_stat])
class ImageOp():
"""
画像データの操作を行う
"""
def __init__(self):
self.msg = "" # メッセージ受渡し用
# 対象拡張子 辞書(key:拡張子、値:表示文字)
self.extensions = {".png .jpg .gif .webp":"画像", ".png":"PNG",
".jpg":"JPEG", ".gif":"GIF", ".webp":"WebP"}
def get_images(self, file_names:tuple) -> Tuple[list, str]:
"""
画像ファイルを読みデータを返す
Args:
str: ファイル名
Returns:
columns1(list): 列名
rows1(list): 行データ(行リストの列リスト)
self.images(list): 画像データ
msg1(str): エラーメッセージ(空文はエラーなし)
"""
msg1 = ""
columns1 = ["ファイル名", "幅(px)", "高さ(px)", "サイズ(kB)", "画像情報 EXIF", "位置情報 GPS"]
try:
self.images = [] # selfでないとうまくいかない。理由はローカル変数だと関数終了後gcされるため
rows1 = []
for file_name in file_names: # パス名で回す
# basename = os.path.basename(file_name)
f = os.path.normpath(file_name)
wrap_file_name = f.replace("\\", "\\\n")
# 画像のサイズ
file_size = os.path.getsize(file_name)
# 画像の取得
image1 = Image.open(file_name)
# ファイルサイズの取得
image_size = image1.size
# Exif情報の取得
exif_dict = image1.getexif()
exif = [TAGS.get(k, "Unknown")+ f": {str(v)}" for k, v in exif_dict.items()]
exif_str = "\n".join(exif)
# GPS情報の取得
gps_dict = exif_dict.get_ifd(34853)
gps = [GPSTAGS.get(k, "Unknown") + f": {str(v)}" for k, v in gps_dict.items()]
gps_str = "\n".join(gps)
# 縮小
image1.thumbnail((150, 150), Image.BICUBIC)
# サムネイルの大きさを統一(そうしないとチェックボックスの位置がまちまちになるため)
# ベース画像の作成と縮小画像の貼り付け(中央寄せ)
# base_image = Image.new(image1.mode, (160, 160), "#ffffff")
base_image = Image.new('RGBA', (160, 160), (255, 0, 0, 0)) # 透明なものにしないとgifの色が変わる
horizontal = int((base_image.size[0] - image1.size[0]) / 2)
vertical = int((base_image.size[1] - image1.size[1]) / 2)
# print(f"size:{image1.size} h,v:{horizontal},{vertical}, base:{base_image.size}") # debug
base_image.paste(image1, (horizontal, vertical))
image1 = base_image
# PhotoImageへ変換
image1 = ImageTk.PhotoImage(image1)
# 列データと画像データを追加
self.images.append(image1)
rows1.append([wrap_file_name, image_size[0], image_size[1],
"{:.1f}".format(file_size/1024), exif_str, gps_str])
except Exception as e:
msg1 = e
print(f"error:{e}")
finally:
return columns1, rows1, self.images, msg1
if __name__ == '__main__':
root = TkinterDnD.Tk() # トップレベルウィンドウの作成 tkinterdnd2の適用
root.title("画像 viewer") # タイトル
root.geometry("800x710") # サイズ
listview = ListView(root) # ListViewクラスのインスタンス作成
root.drop_target_register(DND_FILES) # ドロップ受け取りを登録
root.dnd_bind("<<Drop>>", listview.open_file_and_get_data) # ドロップ後に実行するメソッドを登録
# コマンドライン引数からドラッグ&ドロップされたファイル情報を取得
if len(sys.argv) > 1:
listview.file_paths = tuple(sys.argv[1:])
listview.open_file_and_get_data() # オープン処理の実行
root.mainloop()
|
[
"tkinter.StringVar",
"itertools.chain.from_iterable",
"PIL.Image.new",
"PIL.ImageTk.PhotoImage",
"tkinter.Button",
"os.path.getsize",
"tkinter.ttk.Style",
"tkinter.Scrollbar",
"tkinter.font.Font",
"PIL.Image.open",
"tkinter.Toplevel",
"os.path.normpath",
"tkinter.ttk.Treeview",
"tkinter.Frame",
"PIL.ExifTags.GPSTAGS.get",
"PIL.ExifTags.TAGS.get",
"os.path.splitext",
"tkinter.Label"
] |
[((664, 684), 'tkinter.Frame', 'tk.Frame', ([], {'bg': '"""white"""'}), "(bg='white')\n", (672, 684), True, 'import tkinter as tk\n'), ((727, 747), 'tkinter.Frame', 'tk.Frame', ([], {'bg': '"""green"""'}), "(bg='green')\n", (735, 747), True, 'import tkinter as tk\n'), ((2165, 2224), 'tkinter.Button', 'tk.Button', (['parent'], {'text': '"""ファイル選択"""', 'command': 'self.select_files'}), "(parent, text='ファイル選択', command=self.select_files)\n", (2174, 2224), True, 'import tkinter as tk\n'), ((2255, 2311), 'tkinter.Button', 'tk.Button', (['parent'], {'text': '"""すべて選択"""', 'command': 'self.select_all'}), "(parent, text='すべて選択', command=self.select_all)\n", (2264, 2311), True, 'import tkinter as tk\n'), ((2343, 2399), 'tkinter.Button', 'tk.Button', (['parent'], {'text': '"""選択解除"""', 'command': 'self.deselection'}), "(parent, text='選択解除', command=self.deselection)\n", (2352, 2399), True, 'import tkinter as tk\n'), ((2427, 2487), 'tkinter.Button', 'tk.Button', (['parent'], {'text': '"""プレビュー"""', 'command': 'self.preview_images'}), "(parent, text='プレビュー', command=self.preview_images)\n", (2436, 2487), True, 'import tkinter as tk\n'), ((2507, 2532), 'tkinter.StringVar', 'tk.StringVar', ([], {'value': '"""msg"""'}), "(value='msg')\n", (2519, 2532), True, 'import tkinter as tk\n'), ((2556, 2670), 'tkinter.Label', 'tk.Label', (['parent'], {'textvariable': 'self.msg', 'justify': 'tk.LEFT', 'font': "('Fixedsys', 11)", 'relief': 'tk.RIDGE', 'anchor': 'tk.W'}), "(parent, textvariable=self.msg, justify=tk.LEFT, font=('Fixedsys', \n 11), relief=tk.RIDGE, anchor=tk.W)\n", (2564, 2670), True, 'import tkinter as tk\n'), ((3575, 3586), 'tkinter.ttk.Style', 'ttk.Style', ([], {}), '()\n', (3584, 3586), True, 'import tkinter.ttk as ttk\n'), ((3910, 3937), 'tkinter.Frame', 'tk.Frame', (['parent'], {'bg': '"""pink"""'}), "(parent, bg='pink')\n", (3918, 3937), True, 'import tkinter as tk\n'), ((4060, 4102), 'tkinter.ttk.Treeview', 'ttk.Treeview', (['frame4tree'], {'style': '"""Treeview"""'}), "(frame4tree, style='Treeview')\n", (4072, 4102), True, 'import tkinter.ttk as ttk\n'), ((4310, 4381), 'tkinter.Scrollbar', 'tk.Scrollbar', (['frame4tree'], {'orient': 'tk.HORIZONTAL', 'command': 'treeview1.xview'}), '(frame4tree, orient=tk.HORIZONTAL, command=treeview1.xview)\n', (4322, 4381), True, 'import tkinter as tk\n'), ((4487, 4556), 'tkinter.Scrollbar', 'tk.Scrollbar', (['frame4tree'], {'orient': 'tk.VERTICAL', 'command': 'treeview1.yview'}), '(frame4tree, orient=tk.VERTICAL, command=treeview1.yview)\n', (4499, 4556), True, 'import tkinter as tk\n'), ((5311, 5324), 'tkinter.font.Font', 'tkFont.Font', ([], {}), '()\n', (5322, 5324), True, 'import tkinter.font as tkFont\n'), ((5955, 5968), 'tkinter.font.Font', 'tkFont.Font', ([], {}), '()\n', (5966, 5968), True, 'import tkinter.font as tkFont\n'), ((11924, 11941), 'tkinter.Toplevel', 'tk.Toplevel', (['self'], {}), '(self)\n', (11935, 11941), True, 'import tkinter as tk\n'), ((12110, 12151), 'tkinter.Label', 'tk.Label', (['dialog'], {'image': 'self.d_images[-1]'}), '(dialog, image=self.d_images[-1])\n', (12118, 12151), True, 'import tkinter as tk\n'), ((12039, 12069), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', ([], {'file': 'path1'}), '(file=path1)\n', (12057, 12069), False, 'from PIL import Image, ImageTk\n'), ((9223, 9259), 'itertools.chain.from_iterable', 'itertools.chain.from_iterable', (['rows1'], {}), '(rows1)\n', (9252, 9259), False, 'import itertools, os, sys\n'), ((14179, 14206), 'os.path.normpath', 'os.path.normpath', (['file_name'], {}), '(file_name)\n', (14195, 14206), False, 'import itertools, os, sys\n'), ((14317, 14343), 'os.path.getsize', 'os.path.getsize', (['file_name'], {}), '(file_name)\n', (14332, 14343), False, 'import itertools, os, sys\n'), ((14393, 14414), 'PIL.Image.open', 'Image.open', (['file_name'], {}), '(file_name)\n', (14403, 14414), False, 'from PIL import Image, ImageTk\n'), ((15199, 15244), 'PIL.Image.new', 'Image.new', (['"""RGBA"""', '(160, 160)', '(255, 0, 0, 0)'], {}), "('RGBA', (160, 160), (255, 0, 0, 0))\n", (15208, 15244), False, 'from PIL import Image, ImageTk\n'), ((15684, 15710), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', (['image1'], {}), '(image1)\n', (15702, 15710), False, 'from PIL import Image, ImageTk\n'), ((14582, 14604), 'PIL.ExifTags.TAGS.get', 'TAGS.get', (['k', '"""Unknown"""'], {}), "(k, 'Unknown')\n", (14590, 14604), False, 'from PIL.ExifTags import TAGS, GPSTAGS\n'), ((14796, 14821), 'PIL.ExifTags.GPSTAGS.get', 'GPSTAGS.get', (['k', '"""Unknown"""'], {}), "(k, 'Unknown')\n", (14807, 14821), False, 'from PIL.ExifTags import TAGS, GPSTAGS\n'), ((7994, 8016), 'os.path.splitext', 'os.path.splitext', (['path'], {}), '(path)\n', (8010, 8016), False, 'import itertools, os, sys\n')]
|
import os
__copyright__ = """Copyright 2020 Chromation, Inc"""
__license__ = """All Rights Reserved by Chromation, Inc"""
__doc__ = """
see API documentation: 'python -m pydoc microspeclib.simple'
"""
# NOTE: Sphinx ignores __init__.py files, so for generalized documentation,
# please use pydoc, or the sphinx-generated documents in doc/build,
# or the README.md file
# NOTE on CHROMASPEC_ROOTDIR
#
# It is specifically located in the __init__.py of the base microspeclib
# package, so that the ../ (src) ../ (microspec) directory can be found,
# so that, in turn, the cfg and other directories can be referenced
# programmatically and without relative references throughout the
# packages. The test system can find root package directories, but the
# runtime system has no standard for this, and we are avoiding utilizing
# a test system for runtime use.
#
# If microspeclib is in /foo/bar/microspec/src/microspeclib then
# CHROMASPEC_ROOTDIR will be /foo/bar/microspec
CHROMASPEC_ROOTDIR = os.path.realpath(
os.path.join(
os.path.dirname(__file__), # microspeclib
"..", # src
".." # microspec
)
)
|
[
"os.path.dirname"
] |
[((1098, 1123), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1113, 1123), False, 'import os\n')]
|
#!env python
import re
def c_to_mx_typename(c_type, special_map):
m = re.search("([a-zA-Z0-9]+)_t", c_type)
if m == None:
mx_type = c_type
else:
mx_type = m.groups()[0]
if c_type in special_map:
mx_type = special_map[c_type]
return mx_type.upper()
c_type = ('void', 'bool', 'double', 'float', 'uint64_t', 'int64_t', 'uint32_t', 'int32_t', 'uint16_t', 'int16_t', 'uint8_t', 'int8_t')
special_map = {'float': 'single', 'bool': 'logical' }
empty_trait = "template <class T>\nstruct mx_traits { };\n\n"
header_guard = """#ifndef HAVE_MX_TRAITS_HPP
#define HAVE_MX_TRAITS_HPP
#include <mex.h>
"""
trait_template = """// %s
template<> struct mx_traits<%s> {
static const mxClassID classId = mx%s_CLASS;
static inline const char* name() {
return "%s";
}
};
"""
mx_traits_header = open('include/mx_traits.hpp', 'wt')
mx_traits_header.write(header_guard)
mx_traits_header.write(empty_trait)
for type_curr in c_type:
for constness in ("", "const ",):
full_type = constness + type_curr
mx_traits_header.write(trait_template % (full_type, full_type, c_to_mx_typename(type_curr, special_map), full_type))
mx_traits_header.write("#endif // HAVE_MX_TRAITS_HPP\n")
mx_traits_header.close()
|
[
"re.search"
] |
[((73, 110), 're.search', 're.search', (['"""([a-zA-Z0-9]+)_t"""', 'c_type'], {}), "('([a-zA-Z0-9]+)_t', c_type)\n", (82, 110), False, 'import re\n')]
|
from moto.cloudwatch.models import cloudwatch_backends
from localstack.services.generic_proxy import ProxyListener
from localstack.utils.aws import aws_stack
# path for backdoor API to receive raw metrics
PATH_GET_RAW_METRICS = "/cloudwatch/metrics/raw"
class ProxyListenerCloudWatch(ProxyListener):
def forward_request(self, method, path, data, headers):
# TODO: solve with custom url routing rules for ASF providers
if path.startswith(PATH_GET_RAW_METRICS):
result = cloudwatch_backends[aws_stack.get_region()].metric_data
result = [
{
"ns": r.namespace,
"n": r.name,
"v": r.value,
"t": r.timestamp,
"d": [{"n": d.name, "v": d.value} for d in r.dimensions],
}
for r in result
]
return {"metrics": result}
return True
# instantiate listener
UPDATE_CLOUD_WATCH = ProxyListenerCloudWatch()
|
[
"localstack.utils.aws.aws_stack.get_region"
] |
[((525, 547), 'localstack.utils.aws.aws_stack.get_region', 'aws_stack.get_region', ([], {}), '()\n', (545, 547), False, 'from localstack.utils.aws import aws_stack\n')]
|
from xml.dom.minidom import parseString
from svgpathtools import svgdoc2paths, wsvg
example_text = '<svg>' \
' <rect x="100" y="100" height="200" width="200" style="fill:#0ff;" />' \
' <line x1="200" y1="200" x2="200" y2="300" />' \
' <line x1="200" y1="200" x2="300" y2="200" />' \
' <line x1="200" y1="200" x2="100" y2="200" />' \
' <line x1="200" y1="200" x2="200" y2="100" />' \
' <circle cx="200" cy="200" r="30" style="fill:#00f;" />' \
' <circle cx="200" cy="300" r="30" style="fill:#0f0;" />' \
' <circle cx="300" cy="200" r="30" style="fill:#f00;" />' \
' <circle cx="100" cy="200" r="30" style="fill:#ff0;" />' \
' <circle cx="200" cy="100" r="30" style="fill:#f0f;" />' \
' <text x="50" y="50" font-size="24">' \
' Testing SVG </text></svg>'
doc = parseString(example_text)
paths, attributes = svgdoc2paths(doc)
wsvg(paths)
|
[
"svgpathtools.svgdoc2paths",
"svgpathtools.wsvg",
"xml.dom.minidom.parseString"
] |
[((956, 981), 'xml.dom.minidom.parseString', 'parseString', (['example_text'], {}), '(example_text)\n', (967, 981), False, 'from xml.dom.minidom import parseString\n'), ((1003, 1020), 'svgpathtools.svgdoc2paths', 'svgdoc2paths', (['doc'], {}), '(doc)\n', (1015, 1020), False, 'from svgpathtools import svgdoc2paths, wsvg\n'), ((1022, 1033), 'svgpathtools.wsvg', 'wsvg', (['paths'], {}), '(paths)\n', (1026, 1033), False, 'from svgpathtools import svgdoc2paths, wsvg\n')]
|
"""
invocation functions for all
"""
__author__ = '<NAME>'
__date__ = '6/18/14'
__version__ = '0.5'
## Imports
import re
import json
import time
import os
import base64
import urllib
import urllib2
import cStringIO
import requests
import datetime
from string import Template
from collections import defaultdict
# Local
from biokbase.narrative.common.service import init_service, method, finalize_service
from biokbase.narrative.common import kbtypes
from biokbase.InvocationService.Client import InvocationService
from biokbase.shock import Client as shockService
## Globals
VERSION = (0, 0, 1)
NAME = "KBase Commands"
class URLS:
shock = "http://shock.metagenomics.anl.gov"
workspace = "https://kbase.us/services/ws"
invocation = "https://kbase.us/services/invocation"
# Initialize
init_service(name=NAME, desc="Functions for executing KBase commands and manipulating the results", version=VERSION)
def _list_cmds():
token = os.environ['KB_AUTH_TOKEN']
invo = InvocationService(url=URLS.invocation, token=token)
return invo.valid_commands()
def _run_invo(cmd):
token = os.environ['KB_AUTH_TOKEN']
invo = InvocationService(url=URLS.invocation, token=token)
stdout, stderr = invo.run_pipeline("", cmd, [], 0, '/')
return "".join(stdout), "".join(stderr)
def _list_files(d):
token = os.environ['KB_AUTH_TOKEN']
invo = InvocationService(url=URLS.invocation, token=token)
_, files = invo.list_files("", '/', d)
return files
def _mv_file(old, new):
token = os.environ['KB_AUTH_TOKEN']
invo = InvocationService(url=URLS.invocation, token=token)
invo.rename_file("", '/', old, new)
return
def _rm_file(f):
token = os.environ['KB_AUTH_TOKEN']
invo = InvocationService(url=URLS.invocation, token=token)
invo.remove_files("", '/', f)
return
def _get_invo(name, binary=False):
# upload from invo server
stdout, stderr = _run_invo("mg-upload2shock %s %s"%(URLS.shock, name))
if stderr:
return stderr, True
node = json.loads(stdout)
# get file content from shock
return _get_shock_data(node['id'], binary=binary), False
def _get_shock_data(nodeid, binary=False):
token = os.environ['KB_AUTH_TOKEN']
shock = shockService(URLS.shock, token)
return shock.download_to_string(nodeid, binary=binary)
@method(name="Execute KBase Command")
def _execute_command(meth, command):
"""Execute given KBase command.
:param command: command to run
:type command: kbtypes.Unicode
:ui_name command: Command
:return: Results
:rtype: kbtypes.Unicode
:output_widget: DisplayTextWidget
"""
meth.stages = 2
if not command:
raise Exception("Command is empty.")
command.replace('$workspace', os.environ['KB_WORKSPACE_ID'])
meth.advance("Running Command")
stdout, stderr = _run_invo(command)
if (stdout == '') and (stderr == ''):
stdout = 'Your command executed successfully'
meth.advance("Displaying Output")
return json.dumps({'text': stdout, 'error': stderr})
@method(name="View KBase Commands")
def _view_cmds(meth):
"""View available KBase commands.
:return: Command List
:rtype: kbtypes.Unicode
:output_widget: CategoryViewWidget
"""
meth.stages = 2
meth.advance("Retrieving Commands")
cmd_list = _list_cmds()
meth.advance("Displaying Output")
cmd_sort = sorted(cmd_list, key=lambda k: k['title'])
cmd_data = []
for cat in cmd_sort:
data = {'title': cat['title'], 'items': []}
for c in cat['items']:
data['items'].append(c['cmd'])
cmd_data.append(data)
return json.dumps({'data': cmd_data})
@method(name="View Files")
def _view_files(meth, sortby):
"""View your files in temp invocation file space.
:param sortby: sort files by name or date, default is name
:type sortby: kbtypes.Unicode
:ui_name sortby: Sort By
:default sortby: name
:return: File List
:rtype: kbtypes.Unicode
:output_widget: GeneTableWidget
"""
meth.stages = 2
meth.advance("Retrieving File List")
file_list = _list_files("")
meth.advance("Displaying Output")
# get datetime objects
for f in file_list:
f['mod_date'] = datetime.datetime.strptime(f['mod_date'], "%b %d %Y %H:%M:%S")
# sort
if sortby == 'date':
file_sort = sorted(file_list, key=lambda k: k['mod_date'], reverse=True)
else:
file_sort = sorted(file_list, key=lambda k: k['name'])
# output
file_table = [['name', 'size', 'timestamp']]
for f in file_sort:
file_table.append([ f['name'], f['size'], f['mod_date'].ctime() ])
return json.dumps({'table': file_table})
@method(name="View PNG File")
def _view_files(meth, afile):
"""View a .png image file from temp invocation file space.
:param afile: file to display
:type afile: kbtypes.Unicode
:ui_name afile: File
:return: File List
:rtype: kbtypes.Unicode
:output_widget: ImageViewWidget
"""
meth.stages = 2
if not afile:
raise Exception("Missing file name.")
if not afile.endswith('.png'):
raise Exception("Invalid file type.")
meth.advance("Retrieving Content")
content, err = _get_invo(afile, binary=True)
meth.advance("Displaying Image")
if err:
raise Exception(content)
b64png = base64.b64encode(content)
return json.dumps({'type': 'png', 'width': '600', 'data': b64png})
@method(name="Download File")
def _download_file(meth, afile):
"""Download a file from temp invocation file space.
:param afile: file to download
:type afile: kbtypes.Unicode
:ui_name afile: File
:return: Status
:rtype: kbtypes.Unicode
:output_widget: DownloadFileWidget
"""
meth.stages = 3
if not afile:
raise Exception("Missing file name.")
meth.advance("Validating Filename")
file_list = _list_files("")
has_file = False
for f in file_list:
if f['name'] == afile:
has_file = True
break
if not has_file:
raise Exception("The file '"+afile+"' does not exist")
meth.advance("Retrieving Content")
content, err = _get_invo(afile, binary=False)
if err:
raise Exception(content)
meth.advance("Creating Download")
return json.dumps({'data': content, 'name': afile})
@method(name="Upload File")
def _upload_file(meth):
"""Upload a file to temp invocation file space.
:return: Status
:rtype: kbtypes.Unicode
:output_widget: UploadFileWidget
"""
meth.stages = 1
meth.advance("Creating Upload")
return json.dumps({'url': URLS.invocation, 'auth': {'token': os.environ['KB_AUTH_TOKEN']}})
@method(name="Rename File")
def _rename_file(meth, old, new):
"""Rename a file in temp invocation file space.
:param old: old filename
:type old: kbtypes.Unicode
:ui_name old: Old
:param new: new filename
:type new: kbtypes.Unicode
:ui_name new: New
:return: Status
:rtype: kbtypes.Unicode
:output_widget: DisplayTextWidget
"""
meth.stages = 1
if not (old and new):
raise Exception("Missing file names.")
meth.advance("Renaming File")
_mv_file(old, new)
return json.dumps({'text': '%s changed to %s'%(old,new)})
@method(name="Delete File")
def _delete_file(meth, afile):
"""Delete a file from temp invocation file space.
:param afile: file to delete
:type afile: kbtypes.Unicode
:ui_name afile: File
:return: Status
:rtype: kbtypes.Unicode
:output_widget: DisplayTextWidget
"""
meth.stages = 1
if not afile:
raise Exception("Missing file name.")
meth.advance("Deleting File")
_rm_file(afile)
return json.dumps({'text': 'removed '+afile})
# Finalization
finalize_service()
|
[
"biokbase.narrative.common.service.finalize_service",
"biokbase.narrative.common.service.init_service",
"json.loads",
"biokbase.InvocationService.Client.InvocationService",
"biokbase.shock.Client",
"json.dumps",
"datetime.datetime.strptime",
"base64.b64encode",
"biokbase.narrative.common.service.method"
] |
[((799, 924), 'biokbase.narrative.common.service.init_service', 'init_service', ([], {'name': 'NAME', 'desc': '"""Functions for executing KBase commands and manipulating the results"""', 'version': 'VERSION'}), "(name=NAME, desc=\n 'Functions for executing KBase commands and manipulating the results',\n version=VERSION)\n", (811, 924), False, 'from biokbase.narrative.common.service import init_service, method, finalize_service\n'), ((2326, 2362), 'biokbase.narrative.common.service.method', 'method', ([], {'name': '"""Execute KBase Command"""'}), "(name='Execute KBase Command')\n", (2332, 2362), False, 'from biokbase.narrative.common.service import init_service, method, finalize_service\n'), ((3051, 3085), 'biokbase.narrative.common.service.method', 'method', ([], {'name': '"""View KBase Commands"""'}), "(name='View KBase Commands')\n", (3057, 3085), False, 'from biokbase.narrative.common.service import init_service, method, finalize_service\n'), ((3679, 3704), 'biokbase.narrative.common.service.method', 'method', ([], {'name': '"""View Files"""'}), "(name='View Files')\n", (3685, 3704), False, 'from biokbase.narrative.common.service import init_service, method, finalize_service\n'), ((4709, 4737), 'biokbase.narrative.common.service.method', 'method', ([], {'name': '"""View PNG File"""'}), "(name='View PNG File')\n", (4715, 4737), False, 'from biokbase.narrative.common.service import init_service, method, finalize_service\n'), ((5470, 5498), 'biokbase.narrative.common.service.method', 'method', ([], {'name': '"""Download File"""'}), "(name='Download File')\n", (5476, 5498), False, 'from biokbase.narrative.common.service import init_service, method, finalize_service\n'), ((6374, 6400), 'biokbase.narrative.common.service.method', 'method', ([], {'name': '"""Upload File"""'}), "(name='Upload File')\n", (6380, 6400), False, 'from biokbase.narrative.common.service import init_service, method, finalize_service\n'), ((6729, 6755), 'biokbase.narrative.common.service.method', 'method', ([], {'name': '"""Rename File"""'}), "(name='Rename File')\n", (6735, 6755), False, 'from biokbase.narrative.common.service import init_service, method, finalize_service\n'), ((7319, 7345), 'biokbase.narrative.common.service.method', 'method', ([], {'name': '"""Delete File"""'}), "(name='Delete File')\n", (7325, 7345), False, 'from biokbase.narrative.common.service import init_service, method, finalize_service\n'), ((7825, 7843), 'biokbase.narrative.common.service.finalize_service', 'finalize_service', ([], {}), '()\n', (7841, 7843), False, 'from biokbase.narrative.common.service import init_service, method, finalize_service\n'), ((986, 1037), 'biokbase.InvocationService.Client.InvocationService', 'InvocationService', ([], {'url': 'URLS.invocation', 'token': 'token'}), '(url=URLS.invocation, token=token)\n', (1003, 1037), False, 'from biokbase.InvocationService.Client import InvocationService\n'), ((1143, 1194), 'biokbase.InvocationService.Client.InvocationService', 'InvocationService', ([], {'url': 'URLS.invocation', 'token': 'token'}), '(url=URLS.invocation, token=token)\n', (1160, 1194), False, 'from biokbase.InvocationService.Client import InvocationService\n'), ((1371, 1422), 'biokbase.InvocationService.Client.InvocationService', 'InvocationService', ([], {'url': 'URLS.invocation', 'token': 'token'}), '(url=URLS.invocation, token=token)\n', (1388, 1422), False, 'from biokbase.InvocationService.Client import InvocationService\n'), ((1559, 1610), 'biokbase.InvocationService.Client.InvocationService', 'InvocationService', ([], {'url': 'URLS.invocation', 'token': 'token'}), '(url=URLS.invocation, token=token)\n', (1576, 1610), False, 'from biokbase.InvocationService.Client import InvocationService\n'), ((1731, 1782), 'biokbase.InvocationService.Client.InvocationService', 'InvocationService', ([], {'url': 'URLS.invocation', 'token': 'token'}), '(url=URLS.invocation, token=token)\n', (1748, 1782), False, 'from biokbase.InvocationService.Client import InvocationService\n'), ((2023, 2041), 'json.loads', 'json.loads', (['stdout'], {}), '(stdout)\n', (2033, 2041), False, 'import json\n'), ((2233, 2264), 'biokbase.shock.Client', 'shockService', (['URLS.shock', 'token'], {}), '(URLS.shock, token)\n', (2245, 2264), True, 'from biokbase.shock import Client as shockService\n'), ((3003, 3048), 'json.dumps', 'json.dumps', (["{'text': stdout, 'error': stderr}"], {}), "({'text': stdout, 'error': stderr})\n", (3013, 3048), False, 'import json\n'), ((3646, 3676), 'json.dumps', 'json.dumps', (["{'data': cmd_data}"], {}), "({'data': cmd_data})\n", (3656, 3676), False, 'import json\n'), ((4673, 4706), 'json.dumps', 'json.dumps', (["{'table': file_table}"], {}), "({'table': file_table})\n", (4683, 4706), False, 'import json\n'), ((5371, 5396), 'base64.b64encode', 'base64.b64encode', (['content'], {}), '(content)\n', (5387, 5396), False, 'import base64\n'), ((5408, 5467), 'json.dumps', 'json.dumps', (["{'type': 'png', 'width': '600', 'data': b64png}"], {}), "({'type': 'png', 'width': '600', 'data': b64png})\n", (5418, 5467), False, 'import json\n'), ((6327, 6371), 'json.dumps', 'json.dumps', (["{'data': content, 'name': afile}"], {}), "({'data': content, 'name': afile})\n", (6337, 6371), False, 'import json\n'), ((6642, 6731), 'json.dumps', 'json.dumps', (["{'url': URLS.invocation, 'auth': {'token': os.environ['KB_AUTH_TOKEN']}}"], {}), "({'url': URLS.invocation, 'auth': {'token': os.environ[\n 'KB_AUTH_TOKEN']}})\n", (6652, 6731), False, 'import json\n'), ((7266, 7319), 'json.dumps', 'json.dumps', (["{'text': '%s changed to %s' % (old, new)}"], {}), "({'text': '%s changed to %s' % (old, new)})\n", (7276, 7319), False, 'import json\n'), ((7770, 7810), 'json.dumps', 'json.dumps', (["{'text': 'removed ' + afile}"], {}), "({'text': 'removed ' + afile})\n", (7780, 7810), False, 'import json\n'), ((4248, 4310), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (["f['mod_date']", '"""%b %d %Y %H:%M:%S"""'], {}), "(f['mod_date'], '%b %d %Y %H:%M:%S')\n", (4274, 4310), False, 'import datetime\n')]
|
# -*- coding: utf-8 -*-
import numpy as np
from scipy.integrate import quad
from pmutt import _ModelBase
from pmutt import constants as c
from pmutt.io.json import remove_class
class HarmonicVib(_ModelBase):
"""Vibrational modes using the harmonic approximation. Equations used
sourced from:
- <NAME>. An Introduction to Applied Statistical Thermodynamics;
<NAME> & Sons, 2010.
Attributes
----------
vib_wavenumbers : list of float
Vibrational wavenumbers (:math:`\\tilde{\\nu}`) in 1/cm
imaginary_substitute : float, optional
If this value is set, imaginary frequencies are substituted with
this value for calculations. Otherwise, imaginary frequencies are
ignored. Default is None
"""
def __init__(self, vib_wavenumbers=[], imaginary_substitute=None):
self.imaginary_substitute = imaginary_substitute
self.vib_wavenumbers = np.array(vib_wavenumbers)
@property
def vib_wavenumbers(self):
return self._vib_wavenumbers
@vib_wavenumbers.setter
def vib_wavenumbers(self, val):
self._vib_wavenumbers = val
self._valid_vib_wavenumbers = _get_valid_vib_wavenumbers(
wavenumbers=val, substitute=self.imaginary_substitute)
self._valid_vib_temperatures = c.wavenumber_to_temp(
self._valid_vib_wavenumbers)
def get_q(self, T, include_ZPE=True):
"""Calculates the partition function
:math:`q^{vib}=\\prod_i \\frac{\\exp({-\\frac{\\Theta_{V,i}}{2T}})}
{1-\\exp({-\\frac{\\Theta_{V,i}}{T}})}` if include_ZPE = True
:math:`q^{vib}=\\prod_i \\frac{1}
{1-\\exp({-\\frac{\\Theta_{V,i}}{T}})}` if include_ZPE = False
Parameters
----------
T : float
Temperature in K
include_ZPE : bool, optional
If True, includes the zero-point energy term
Returns
-------
q_vib : float
Vibrational partition function
"""
vib_dimless = self._valid_vib_temperatures / T
if include_ZPE:
qs = np.array(
np.exp(-vib_dimless / 2.) / (1. - np.exp(-vib_dimless)))
else:
qs = np.array(1. / (1. - np.exp(-vib_dimless)))
return np.prod(qs)
def get_CvoR(self, T):
"""Calculates the dimensionless heat capacity at constant volume
:math:`\\frac{C_V^{vib}}{R}=\\sum_i \\bigg(\\frac{\\Theta_{V,i}}{2T}
\\bigg)^2 \\frac{1}{\\big(\\sinh{\\frac{\\Theta_{V,i}}{2T}}\\big)^2}`
Parameters
----------
T : float
Temperature in K
Returns
-------
CvoR_vib : float
Vibrational dimensionless heat capacity at constant volume
"""
vib_dimless = self._valid_vib_temperatures / T
CvoRs = np.array([
(0.5 * vib_dimless)**2 * (1. / np.sinh(vib_dimless / 2.))**2
])
return np.sum(CvoRs)
def get_CpoR(self, T):
"""Calculates the dimensionless heat capacity at constant pressure
:math:`\\frac{C_P^{vib}}{R}=\\frac{C_V^{vib}}{R}=\\sum_i \\bigg(\\frac{
\\Theta_{V,i}}{2T}\\bigg)^2 \\frac{1}{\\big(\\sinh{\\frac{\\Theta_{V,i}}
{2T}}\\big)^2}`
Parameters
----------
T : float
Temperature in K
Returns
-------
CpoR_vib : float
Vibrational dimensionless heat capacity at constant pressure
"""
return self.get_CvoR(T=T)
def get_ZPE(self):
"""Calculates the zero point energy
:math:`ZPE=\\frac{1}{2}k_b\\sum_i \\Theta_{V,i}`
Returns
-------
zpe : float
Zero point energy in eV
"""
return 0.5 * c.kb('eV/K') * np.sum(self._valid_vib_temperatures)
def get_UoRT(self, T):
"""Calculates the dimensionless internal energy
:math:`\\frac{U^{vib}}{RT}=\\sum_i \\bigg(\\frac{\\Theta_{V,i}}{2T}+
\\frac{\\Theta_{V,i}}{T}\\frac{\\exp\\big(-\\frac{\\Theta_{V,i}}{T}
\\big)}{1-\\exp\\big(-\\frac{\\Theta_{V_i}}{T}\\big)}\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
UoRT_vib : float
Vibrational dimensionless internal energy
"""
vib_dimless = self._valid_vib_temperatures / T
UoRT = np.array([
vib_dimless / 2. + vib_dimless * np.exp(-vib_dimless) /
(1. - np.exp(-vib_dimless))
])
return np.sum(UoRT)
def get_HoRT(self, T):
"""Calculates the dimensionless enthalpy
:math:`\\frac{H^{vib}}{RT}=\\frac{U^{vib}}{RT}=\\sum_i \\bigg(\\frac{
\\Theta_{V,i}}{2T}+\\frac{\\Theta_{V,i}}{T}\\frac{\\exp\\big(-\\frac{
\\Theta_{V,i}}{T}\\big)}{1-\\exp\\big(-\\frac{\\Theta_{V_i}}{T}\\big)}
\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
HoRT_vib : float
Vibrational dimensionless enthalpy
"""
return self.get_UoRT(T=T)
def get_SoR(self, T):
"""Calculates the dimensionless entropy
:math:`\\frac{S^{vib}}{R}=\\sum_i \\frac{\\Theta_{V,i}}{T}\\frac{\\exp
\\big(-\\frac{\\Theta_{V,i}}{T}\\big)}{1-\\exp\\big(-\\frac{
\\Theta_{V,i}}{T}\\big)}-\\ln \\bigg(1-\\exp\\big(-\\frac{
\\Theta_{V,i}}{T}\\big)\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
SoR_vib : float
Vibrational dimensionless entropy
"""
vib_dimless = self._valid_vib_temperatures / T
return np.sum([
vib_dimless * np.exp(-vib_dimless) / (1. - np.exp(-vib_dimless)) -
np.log(1. - np.exp(-vib_dimless))
])
def get_FoRT(self, T):
"""Calculates the dimensionless Helmholtz energy
:math:`\\frac{A^{vib}}{RT}=\\frac{U^{vib}}{RT}-\\frac{S^{vib}}{R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
FoRT_vib : float
Vibrational dimensionless Helmholtz energy
"""
return self.get_UoRT(T=T) - self.get_SoR(T=T)
def get_GoRT(self, T):
"""Calculates the dimensionless Gibbs energy
:math:`\\frac{G^{vib}}{RT}=\\frac{H^{vib}}{RT}-\\frac{S^{vib}}{R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
GoRT_vib : float
Vibrational dimensionless Gibbs energy
"""
return self.get_HoRT(T=T) - self.get_SoR(T=T)
def to_dict(self):
"""Represents object as dictionary with JSON-accepted datatypes
Returns
-------
obj_dict : dict
"""
return {
'class': str(self.__class__),
'vib_wavenumbers': list(self.vib_wavenumbers),
'imaginary_substitute': self.imaginary_substitute
}
@classmethod
def from_dict(cls, json_obj):
"""Recreate an object from the JSON representation.
Parameters
----------
json_obj : dict
JSON representation
Returns
-------
HarmonicVib : HarmonicVib object
"""
json_obj = remove_class(json_obj)
return cls(**json_obj)
def print_calc_wavenumbers(self):
"""Prints the wavenumbers that will be used in a thermodynamic
calculation. If ``self.imaginary_substitute`` is a float, then
imaginary frequencies are replaced with that value. Otherwise,
imaginary frequencies are ignored."""
print(self._valid_vib_wavenumbers)
class QRRHOVib(_ModelBase):
"""Vibrational modes using the Quasi Rigid Rotor Harmonic Oscillator
approximation. Equations source from:
* <NAME>.; <NAME>.; <NAME>.; <NAME>.; <NAME>.
Phys. Chem. C 2015, 119 (4), 1840–1850.
* <NAME>. - A Eur. J. 2012, 18 (32), 9955–9964.
Attributes
----------
vib_wavenumber : list of float
Vibrational wavenumbers (:math:`\\tilde{\\nu}`) in 1/cm
Bav : float, optional
Average molecular moment of inertia as a limiting value of small
wavenumbers. Default is 1.e-44 kg m2
v0 : float, optional
Wavenumber to scale vibrations. Default is 100 cm :sup:`-1`
alpha : int, optional
Power to raise ratio of wavenumbers. Default is 4
imaginary_substitute : float, optional
If this value is set, imaginary frequencies are substituted with
this value for calculations. Otherwise, imaginary frequencies are
ignored. Default is None
"""
def __init__(self,
vib_wavenumbers,
Bav=1.e-44,
v0=100.,
alpha=4,
imaginary_substitute=None):
self.Bav = Bav
self.v0 = v0
self.alpha = alpha
self.imaginary_substitute = imaginary_substitute
self.vib_wavenumbers = vib_wavenumbers
@property
def vib_wavenumbers(self):
return self._vib_wavenumbers
@vib_wavenumbers.setter
def vib_wavenumbers(self, val):
self._vib_wavenumbers = val
self._valid_vib_wavenumbers = _get_valid_vib_wavenumbers(
wavenumbers=val, substitute=self.imaginary_substitute)
self._valid_vib_temperatures = c.wavenumber_to_temp(
self._valid_vib_wavenumbers)
self._valid_scaled_wavenumbers = self._get_scaled_wavenumber()
self._valid_scaled_inertia = self._get_scaled_inertia()
def _get_scaled_wavenumber(self):
"""Calculates the scaled wavenumber determining mixture of RRHO to
add.
:math:`\\omega = \\frac {1}{1 + (\\frac{\\nu_0}{\\nu})^\\alpha}`
Returns
-------
scaled_wavenumber : float
Scaled wavenumber
"""
return 1. / (1. + (self.v0 / self._valid_vib_wavenumbers)**self.alpha)
def _get_scaled_inertia(self):
"""Calculates the scaled moment of inertia.
:math:`\\mu'=\\frac {\\mu B_{av}} {\\mu + B_{av}}`
Returns
-------
mu1 : float
Scaled moment of inertia in kg*m2
"""
mu = c.wavenumber_to_inertia(self._valid_vib_wavenumbers)
return mu * self.Bav / (mu + self.Bav)
def get_q(self):
"""Calculates the partition function
Returns
-------
q_vib : float
Vibrational partition function
"""
raise NotImplementedError()
def get_CvoR(self, T):
"""Calculates the dimensionless heat capacity at constant volume
:math:`\\frac {C_{v}^{qRRHO}}{R} = \\sum_{i}\\omega_i\\frac{C_{v,i}
^{RRHO}}{R} + \\frac{1}{2}(1-\\omega_i)`
:math:`\\frac{C_{v}^{RRHO}}{R} = \\sum_{i}\\exp \\bigg(-\\frac{
\\Theta_i}{T}\\bigg) \\bigg(\\frac{\\Theta_i}{T}\\frac{1}{1-\\exp(-
\\frac{\\Theta_i}{T})}\\bigg)^2`
Parameters
----------
T : float
Temperature in K
Returns
-------
CvoR_vib : float
Vibrational dimensionless heat capacity at constant volume
"""
CvoR = []
vib_dimless = self._valid_vib_temperatures / T
for vib_dimless_i, w_i in zip(vib_dimless,
self._valid_scaled_wavenumbers):
CvoR_RRHO = np.exp(-vib_dimless_i) \
* (vib_dimless_i/(1. - np.exp(-vib_dimless_i)))**2
CvoR.append(w_i * CvoR_RRHO + 0.5 * (1. - w_i))
return np.sum(CvoR)
def get_CpoR(self, T):
"""Calculates the dimensionless heat capacity at constant pressure
:math:`\\frac{C_{P}^{qRRHO}} {R} = \\frac{C_{V}^{qRRHO}} {R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
CpoR_vib : float
Vibrational dimensionless heat capacity at constant pressure
"""
return self.get_CvoR(T=T)
def get_ZPE(self):
"""Calculates the zero point energy
:math:`ZPE=\\frac{1}{2}k_b\\sum_i \\omega_i\\Theta_{V,i}`
Returns
-------
zpe : float
Zero point energy in eV
"""
return 0.5 * c.kb('eV/K') * np.dot(self._valid_vib_temperatures,
self._valid_scaled_wavenumbers)
def _get_UoRT_RRHO(self, T, vib_temperature):
"""Calculates the dimensionless RRHO contribution to internal energy
Parameters
----------
T : float
Temperature in K
vib_temperature : float
Vibrational temperature in K
Returns
-------
UoRT_RRHO : float
Dimensionless internal energy of Rigid Rotor Harmonic Oscillator
"""
vib_dimless = vib_temperature / T
return vib_dimless * (0.5 + np.exp(-vib_dimless) /
(1. - np.exp(-vib_dimless)))
def get_UoRT(self, T):
"""Calculates the dimensionless internal energy
:math:`\\frac {U^{qRRHO}}{RT} = \\sum_{i}\\omega_i\\frac{U^{RRHO}}{RT}
+ \\frac{1}{2}(1-\\omega_i)`
:math:`\\frac {U^{RRHO}_{i}}{RT} = \\frac{\\Theta_i}{T} \\bigg(
\\frac{1}{2} + \\frac{\\exp(-\\frac{\\Theta_i}{T})}{1-\\exp(-\\frac{
\\Theta_i}{T})}\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
UoRT_vib : float
Vibrational dimensionless internal energy
"""
UoRT_QRRHO = []
for theta_i, w_i in zip(self._valid_vib_temperatures,
self._valid_scaled_wavenumbers):
UoRT_RRHO = self._get_UoRT_RRHO(T=T, vib_temperature=theta_i)
UoRT_QRRHO.append(w_i * UoRT_RRHO + (1. - w_i) * 0.5)
return np.sum(UoRT_QRRHO)
def get_HoRT(self, T):
"""Calculates the dimensionless enthalpy
:math:`\\frac{H^{qRRHO}} {RT} = \\frac{U^{qRRHO}} {RT}`
Parameters
----------
T : float
Temperature in K
Returns
-------
HoRT_vib : float
Vibrational dimensionless enthalpy
"""
return self.get_UoRT(T=T)
def _get_SoR_H(self, T, vib_temperature):
"""Calculates the dimensionless harmonic osccilator contribution to
entropy
Parameters
----------
T : float
Temperature in K
vib_temperature : float
Vibrational temperature in K
Returns
-------
SoR_RHHO : float
Dimensionless entropy of Rigid Rotor Harmonic Oscillator
"""
return vib_temperature/T/(np.exp(vib_temperature/T)-1) \
- np.log(1-np.exp(-vib_temperature/T))
def _get_SoR_RRHO(self, T, vib_inertia):
"""Calculates the dimensionless RRHO contribution to entropy
Parameters
----------
T : float
Temperature in K
vib_inertia : float
Vibrational inertia in kg m2
Returns
-------
SoR_RHHO : float
Dimensionless entropy of Rigid Rotor Harmonic Oscillator
"""
return 0.5 + np.log(
(8. * np.pi**3 * vib_inertia * c.kb('J/K') * T / c.h('J s')**2)**
0.5)
def get_SoR(self, T):
"""Calculates the dimensionless entropy
:math:`\\frac{S^{qRRHO}}{R}=\\sum_i\\omega_i\\frac{S_i^{H}}{R}+(1-
\\omega_i)\\frac{S_i^{RRHO}}{R}`
:math:`\\frac {S^{RRHO}_i}{R} = \\frac{1}{2} + \\log \\bigg(\\bigg[
\\frac{8\\pi^3\\mu'_ik_BT}{h^2}\\bigg]^{\\frac{1}{2}}\\bigg)`
:math:`\\frac {S^{H}_i}{R}=\\bigg(\\frac{\\Theta_i}{T}\\bigg)\\frac{1}
{\\exp(\\frac{\\Theta_i}{T})-1}-\\log\\bigg(1-\\exp(\\frac{-\\Theta_i}
{T})\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
SoR_vib : float
Vibrational dimensionless entropy
"""
SoR_QRRHO = []
for theta_i, mu_i, w_i in zip(self._valid_vib_temperatures,
self._valid_scaled_inertia,
self._valid_scaled_wavenumbers):
SoR_H = self._get_SoR_H(T=T, vib_temperature=theta_i)
SoR_RRHO = self._get_SoR_RRHO(T=T, vib_inertia=mu_i)
SoR_QRRHO.append(w_i * SoR_H + (1. - w_i) * SoR_RRHO)
return np.sum(SoR_QRRHO)
def get_FoRT(self, T):
"""Calculates the dimensionless Helmholtz energy
:math:`\\frac{A^{qRRHO}}{RT} = \\frac{U^{qRRHO}}{RT}-
\\frac{S^{qRRHO}}{R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
FoRT_vib : float
Vibrational dimensionless Helmholtz energy
"""
return self.get_UoRT(T=T) - self.get_SoR(T=T)
def get_GoRT(self, T):
"""Calculates the dimensionless Gibbs energy
:math:`\\frac{G^{qRRHO}}{RT} = \\frac{H^{qRRHO}}{RT}-
\\frac{S^{qRRHO}}{R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
GoRT_vib : float
Vibrational dimensionless Gibbs energy
"""
return self.get_HoRT(T=T) - self.get_SoR(T=T)
def to_dict(self):
"""Represents object as dictionary with JSON-accepted datatypes
Returns
-------
obj_dict : dict
"""
return {
'class': str(self.__class__),
'vib_wavenumbers': list(self.vib_wavenumbers),
'Bav': self.Bav,
'v0': self.v0,
'alpha': self.alpha,
'imaginary_substitute': self.imaginary_substitute
}
@classmethod
def from_dict(cls, json_obj):
"""Recreate an object from the JSON representation.
Parameters
----------
json_obj : dict
JSON representation
Returns
-------
QRRHOVib : QRRHOVib object
"""
json_obj = remove_class(json_obj)
return cls(**json_obj)
def print_calc_wavenumbers(self):
"""Prints the wavenumbers that will be used in a thermodynamic
calculation. If ``self.imaginary_substitute`` is a float, then
imaginary frequencies are replaced with that value. Otherwise,
imaginary frequencies are ignored."""
print(
_get_valid_vib_wavenumbers(wavenumbers=self.vib_wavenumbers,
substitute=self.imaginary_substitute))
class EinsteinVib(_ModelBase):
"""Einstein model of a crystal. Equations used sourced from
* <NAME>. An Introduction to Applied Statistical Thermodynamics;
<NAME> & Sons, 2010.
Attributes
----------
einstein_temperature : float
Einstein temperature (:math:`\\Theta_E`) in K
interaction_energy : float, optional
Interaction energy (:math:`u`) per atom in eV. Default is 0 eV
"""
def __init__(self, einstein_temperature, interaction_energy=0.):
self.einstein_temperature = einstein_temperature
self.interaction_energy = interaction_energy
def get_q(self, T):
"""Calculates the partition function
:math:`q^{vib}=\\exp\\bigg({\\frac{-u}{k_BT}}\\bigg)\\bigg(\\frac{
\\exp(-\\frac{\\Theta_E}{2T})}{1-\\exp(\\frac{-\\Theta_E}{T})}\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
q_vib : float
Vibrational partition function
"""
u = self.interaction_energy
theta_E = self.einstein_temperature
return np.exp(-u/c.kb('eV/K')/T) \
* (np.exp(-theta_E/2./T)/(1. - np.exp(-theta_E/T)))
def get_CvoR(self, T):
"""Calculates the dimensionless heat capacity at constant volume
:math:`\\frac{C_V^{vib}}{R}=3\\bigg(\\frac{\\Theta_E}{T}\\bigg)^2
\\frac{\\exp(-\\frac{\\Theta_E}{T})}{\\big(1-\\exp(\\frac{-
\\Theta_E}{T})\\big)^2}`
Parameters
----------
T : float
Temperature in K
Returns
-------
CvoR_vib : float
Vibrational dimensionless heat capacity at constant volume
"""
theta_E = self.einstein_temperature
return 3. * (theta_E / T)**2 * np.exp(
-theta_E / T) / (1 - np.exp(-theta_E / T))**2
def get_CpoR(self, T):
"""Calculates the dimensionless heat capacity at constant pressure
:math:`\\frac{C_P^{vib}}{R}=\\frac{C_V^{vib}}{R}=3\\bigg(\\frac{
\\Theta_E}{T}\\bigg)^2\\frac{\\exp(-\\frac{\\Theta_E}{T})}{\\big(1-
\\exp(\\frac{-\\Theta_E}{T})\\big)^2}`
Parameters
----------
T : float
Temperature in K
Returns
-------
CpoR_vib : float
Vibrational dimensionless heat capacity at constant pressure
"""
return self.get_CvoR(T=T)
def get_ZPE(self):
"""Calculates the zero point energy
:math:`u^0_E=u+\\frac{3}{2}\\Theta_E k_B`
Returns
-------
zpe : float
Zero point energy in eV
"""
return self.interaction_energy \
+ 1.5*self.einstein_temperature*c.kb('eV/K')
def get_UoRT(self, T):
"""Calculates the dimensionless internal energy
:math:`\\frac{U^{vib}}{RT}=\\frac{u^0_E}{k_BT}+3\\frac{\\Theta_E}{T}
\\bigg(\\frac{\\exp(-\\frac{\\Theta_E}{T})}{1-\\exp(-\\frac{\\Theta_E}
{T})}\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
UoRT_vib : float
Vibrational dimensionless internal energy
"""
theta_E = self.einstein_temperature
return self.get_ZPE()/c.kb('eV/K')/T \
+ 3.*theta_E/T*np.exp(-theta_E/T)/(1. - np.exp(-theta_E/T))
def get_HoRT(self, T):
"""Calculates the dimensionless enthalpy
:math:`\\frac{H^{vib}}{RT}=\\frac{U^{vib}}{RT}=\\frac{N_A u^0_E}{k_BT}
+3\\frac{\\Theta_E}{T}\\bigg(\\frac{\\exp(-\\frac{\\Theta_E}{T})}{1-
\\exp(-\\frac{\\Theta_E}{T})}\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
HoRT_vib : float
Vibrational dimensionless enthalpy
"""
return self.get_UoRT(T=T)
def get_SoR(self, T):
"""Calculates the dimensionless entropy
:math:`\\frac{S^{vib}}{R}=3\\bigg(\\frac{\\Theta_E}{T}\\frac{\\exp\\big(
\\frac{-\\Theta_E}{T}\\big)}{1-\\exp\\big(-\\frac{\\Theta_E}{T}\\big)}
\\bigg)-\\ln\\bigg(1-\\exp\\big(\\frac{-\\Theta_E}{T}\\big)\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
SoR_vib : float
Vibrational dimensionless entropy
"""
theta_E = self.einstein_temperature
exp_term = np.exp(-theta_E / T)
return 3. * (theta_E / T * exp_term /
(1. - exp_term) - np.log(1. - exp_term))
def get_FoRT(self, T):
"""Calculates the dimensionless Helmholtz energy
:math:`\\frac{A^{vib}}{RT}=\\frac{U^{vib}}{RT}-\\frac{S^{vib}}{R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
FoRT_vib : float
Vibrational dimensionless Helmholtz energy
"""
return self.get_UoRT(T=T) - self.get_SoR(T=T)
def get_GoRT(self, T):
"""Calculates the dimensionless Gibbs energy
:math:`\\frac{G^{vib}}{RT}=\\frac{H^{vib}}{RT}-\\frac{S^{vib}}{R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
GoRT_vib : float
Vibrational dimensionless Gibbs energy
"""
return self.get_HoRT(T=T) - self.get_SoR(T=T)
def to_dict(self):
"""Represents object as dictionary with JSON-accepted datatypes
Returns
-------
obj_dict : dict
"""
return {
'class': str(self.__class__),
'einstein_temperature': self.einstein_temperature,
'interaction_energy': self.interaction_energy
}
class DebyeVib(_ModelBase):
"""Debye model of a crystal. Equations sourced from:
* <NAME>. An Introduction to Applied Statistical Thermodynamics;
<NAME> & Sons, 2010.
Attributes
----------
debye_temperature : float
Debye temperature (:math:`\\Theta_D`) in K
interaction_energy : float, optional
Interaction energy (:math:`u`) per atom in eV. Default is 0 eV
"""
def __init__(self, debye_temperature, interaction_energy):
self.debye_temperature = debye_temperature
self.interaction_energy = interaction_energy
def get_q(self, T):
"""Calculate the partition function
:math:`q^{vib} = \\exp\\bigg(-\\frac{u}{3k_B T} - \\frac{3}{8}
\\frac{\\Theta_D}{T} - G\\big(\\frac{\\Theta_D}{T}\\big)\\bigg)`
:math:`G\\bigg(\\frac{\\Theta_D}{T}\\bigg) = 3\\bigg(\\frac{T}{
\\Theta_D}\\bigg)^3\\int_0^{\\frac{\\Theta_D}{T}}x^2 \\ln
\\bigg(1-e^{-x}\\bigg)dx`
Parameters
----------
T : float
Temperature in K
Returns
-------
q : float
Partition function
"""
G = self._get_intermediate_fn(T=T, fn=self._G_integrand)
return np.exp(-self.interaction_energy/3./c.kb('eV/K')/T \
-3./8.*self.debye_temperature/T - G)
def get_CvoR(self, T):
"""Calculates dimensionless heat capacity (constant V)
:math:`\\frac {C_V^{vib}}{R} = 3K\\bigg(\\frac{\\Theta_D}{T}\\bigg)`
:math:`K\\bigg(\\frac{\\Theta_D}{T}\\bigg)=3\\bigg(\\frac{T}{\\Theta_D}
\\bigg)^3 \\int_0^{\\frac{\\Theta_D}{T}}\\frac{x^4 e^x}{(e^x-1)^2}dx`
Parameters
----------
T : float
Temperature in K
Returns
-------
CvoR : float
Dimensionless heat capacity (constant V)
"""
K = self._get_intermediate_fn(T=T, fn=self._K_integrand)
return 3. * K
def get_CpoR(self, T):
"""Calculates dimensionless heat capacity (constant P)
:math:`\\frac {C_P^{vib}}{R} = 3K\\bigg(\\frac{\\Theta_D}{T}\\bigg)`
:math:`K\\bigg(\\frac{\\Theta_D}{T}\\bigg)=3\\bigg(\\frac{T}{\\Theta_D}
\\bigg)^3 \\int_0^{\\frac{\\Theta_D}{T}}\\frac{x^4 e^x}{(e^x-1)^2}dx`
Parameters
----------
T : float
Temperature in K
Returns
-------
CpoR : float
Dimensionless heat capacity (constant P)
"""
return self.get_CvoR(T=T)
def get_UoRT(self, T):
"""Calculates dimensionless internal energy
:math:`\\frac{U^{vib}}{RT} = \\frac{u_D^o}{RT} + 3F\\bigg(\\frac{
\\Theta_D}{T}\\bigg)`
:math:`F\\bigg(\\frac{\\Theta_D}{T}\\bigg) = 3\\bigg(\\frac{T}{
\\Theta_D}\\bigg)^3 \\int_0^{\\frac{\\Theta_D}{T}} \\frac{x^3 e^x}
{e^x-1} dx`
Parameters
----------
T : float
Temperature in K
Returns
-------
UoRT : float
Dimensionless internal energy
"""
return self.get_ZPE()/c.kb('eV/K')/T \
+ 3.*self._get_intermediate_fn(T=T, fn=self._F_integrand)
def get_HoRT(self, T):
"""Calculates dimensionless enthalpy
:math:`\\frac{H^{vib}}{RT} = \\frac{u_D^o}{RT} + 3F\\bigg(\\frac{
\\Theta_D}{T}\\bigg)`
:math:`F\\bigg(\\frac{\\Theta_D}{T}\\bigg) = 3\\bigg(\\frac{T}{
\\Theta_D}\\bigg)^3 \\int_0^{\\frac{\\Theta_D}{T}} \\frac{x^3 e^x}
{e^x-1} dx`
Parameters
----------
T : float
Temperature in K
Returns
-------
HoRT : float
Dimensionless enthalpy
"""
return self.get_UoRT(T=T)
def get_SoR(self, T):
"""Calculates dimensionless entropy
:math:`\\frac{S^{vib}}{R} = 3\\bigg[F\\bigg(\\frac{\\Theta_D}{T}\\bigg)
- G\\bigg(\\frac{\\Theta_D}{T}\\bigg)\\bigg]`
:math:`F\\bigg(\\frac{\\Theta_D}{T}\\bigg) = 3\\bigg(\\frac{T}{
\\Theta_D}\\bigg)^3 \\int_0^{\\frac{\\Theta_D}{T}} \\frac{x^3 e^x}
{e^x-1} dx`
:math:`G\\bigg(\\frac{\\Theta_D}{T}\\bigg) = 3\\bigg(\\frac{T}{
\\Theta_D}\\bigg)^3\\int_0^{\\frac{\\Theta_D}{T}}x^2 \\ln
\\bigg(1-e^{-x}\\bigg)dx`
Parameters
----------
T : float
Temperature in K
Returns
-------
SoR : float
Dimensionless entropy
"""
F = self._get_intermediate_fn(T=T, fn=self._F_integrand)
G = self._get_intermediate_fn(T=T, fn=self._G_integrand)
return 3. * (F - G)
def get_FoRT(self, T):
"""Calculates dimensionless Helmholtz energy
:math:`\\frac{F^{vib}}{RT}=\\frac{U^{vib}}{RT}-\\frac{S^{vib}}{R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
FoRT : float
Dimensionless Helmholtz energy
"""
return self.get_UoRT(T=T) - self.get_SoR(T=T)
def get_GoRT(self, T):
"""Calculates dimensionless Gibbs energy
:math:`\\frac{G^{vib}}{RT}=\\frac{H^{vib}}{RT}-\\frac{S^{vib}}{R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
GoRT : float
Dimensionless Gibbs energy
"""
return self.get_HoRT(T=T) - self.get_SoR(T=T)
def get_ZPE(self):
"""Calculate zero point energy
:math:`u^o_D = u^o +\\frac{9}{8}R\\Theta_D`
Returns
-------
zpe : float
Zero point energy in eV
"""
return self.interaction_energy \
+ 9./8.*c.R('eV/K')*self.debye_temperature
def _G_integrand(self, x):
"""Integrand when evaluating intermediate function G.
:math:`f(x) = x^2 \\ln \\bigg(1-e^{-x}\\bigg)`
Parameters
----------
x : float
Variable of integration. Represents
:math:`\\frac{\\Theta_D}{T}}`
Returns
-------
f(x) : float
Integrand evaluated at x
"""
return np.log(1. - np.exp(-x)) * (x**2)
def _K_integrand(self, x):
"""Integrand when evaluating intermediate function K.
:math:`f(x) = \\frac {x^4 e^x}{(e^x -1)^2}`
Parameters
----------
x : float
Variable of integration. Represents
:math:`\\frac{\\Theta_D}{T}}`
Returns
-------
f(x) : float
Integrand evaluated at x
"""
return (x**4) * np.exp(x) / (np.exp(x) - 1.)**2
def _F_integrand(self, x):
"""Integrand when evaluating intermediate function F.
:math:`f(x) = \\frac {x^3 e^x}{e^x -1}`
Parameters
----------
x : float
Variable of integration. Represents
:math:`\\frac{\\Theta_D}{T}}`
Returns
-------
f(x) : float
Integrand evaluated at x
"""
return (x**3) * np.exp(x) / (np.exp(x) - 1.)
def _get_intermediate_fn(self, T, fn):
"""Calculates the intermediate function (i.e. F, G, or K)
:math:`F(x) = 3\\bigg(\\frac{T}{\\Theta_D}\\bigg)^3\\int_0^{\\frac
{\\Theta_D}{T}} f(x) dx`
Parameters
----------
T : float
Temperature in K
fn : function
Integrand function, f(x)
Returns
-------
F : float
Intermediate function evaluated at T
"""
vib_dimless = self.debye_temperature / T
integral = quad(func=fn, a=0., b=vib_dimless)[0]
return 3. * integral / vib_dimless**3
def _get_valid_vib_wavenumbers(wavenumbers, substitute=None):
"""Returns wavenumbers to use for vibration calculations. Imaginary
frequencies are expected to be negative.
Parameters
----------
wavenumbers : list of float
Wavenumbers in 1/cm
substitute : float, optional
Value to use to replace imaginary frequencies. If not specified,
imaginary frequencies are ignored. Default is None
Returns
-------
wavenumbers_out : (N,) np.ndarray
Valid wavenumbers
"""
wavenumbers_out = []
for wavenumber in wavenumbers:
if wavenumber > 0.:
# Real wavenumbers always added
wavenumbers_out.append(wavenumber)
elif substitute is not None:
# Substitute added if imaginary frequency encountered
wavenumbers_out.append(substitute)
return np.array(wavenumbers_out)
def _get_vib_dimless(wavenumbers, T, substitute=None):
"""Calculates dimensionless temperatures for the wavenumbers and
temperature specified
Parameters
----------
wavenumbers : (N,) np.ndarray
Wavenumbers in 1/cm
T : float
Temperature in K
substitute : float, optional
Value to use to replace imaginary frequencies. If not specified,
imaginary frequencies are ignored. Default is None
Returns
-------
vib_dimless : (N,) np.ndarray
Vibrational temperatures normalized by T
"""
valid_wavenumbers = _get_valid_vib_wavenumbers(wavenumbers=wavenumbers,
substitute=substitute)
vib_dimless = c.wavenumber_to_temp(valid_wavenumbers) / T
return vib_dimless
|
[
"numpy.sinh",
"numpy.sum",
"pmutt.constants.wavenumber_to_temp",
"scipy.integrate.quad",
"numpy.log",
"pmutt.constants.h",
"pmutt.io.json.remove_class",
"numpy.array",
"numpy.exp",
"pmutt.constants.R",
"numpy.dot",
"pmutt.constants.kb",
"numpy.prod",
"pmutt.constants.wavenumber_to_inertia"
] |
[((34120, 34145), 'numpy.array', 'np.array', (['wavenumbers_out'], {}), '(wavenumbers_out)\n', (34128, 34145), True, 'import numpy as np\n'), ((950, 975), 'numpy.array', 'np.array', (['vib_wavenumbers'], {}), '(vib_wavenumbers)\n', (958, 975), True, 'import numpy as np\n'), ((1332, 1381), 'pmutt.constants.wavenumber_to_temp', 'c.wavenumber_to_temp', (['self._valid_vib_wavenumbers'], {}), '(self._valid_vib_wavenumbers)\n', (1352, 1381), True, 'from pmutt import constants as c\n'), ((2336, 2347), 'numpy.prod', 'np.prod', (['qs'], {}), '(qs)\n', (2343, 2347), True, 'import numpy as np\n'), ((3028, 3041), 'numpy.sum', 'np.sum', (['CvoRs'], {}), '(CvoRs)\n', (3034, 3041), True, 'import numpy as np\n'), ((4665, 4677), 'numpy.sum', 'np.sum', (['UoRT'], {}), '(UoRT)\n', (4671, 4677), True, 'import numpy as np\n'), ((7578, 7600), 'pmutt.io.json.remove_class', 'remove_class', (['json_obj'], {}), '(json_obj)\n', (7590, 7600), False, 'from pmutt.io.json import remove_class\n'), ((9715, 9764), 'pmutt.constants.wavenumber_to_temp', 'c.wavenumber_to_temp', (['self._valid_vib_wavenumbers'], {}), '(self._valid_vib_wavenumbers)\n', (9735, 9764), True, 'from pmutt import constants as c\n'), ((10590, 10642), 'pmutt.constants.wavenumber_to_inertia', 'c.wavenumber_to_inertia', (['self._valid_vib_wavenumbers'], {}), '(self._valid_vib_wavenumbers)\n', (10613, 10642), True, 'from pmutt import constants as c\n'), ((11964, 11976), 'numpy.sum', 'np.sum', (['CvoR'], {}), '(CvoR)\n', (11970, 11976), True, 'import numpy as np\n'), ((14352, 14370), 'numpy.sum', 'np.sum', (['UoRT_QRRHO'], {}), '(UoRT_QRRHO)\n', (14358, 14370), True, 'import numpy as np\n'), ((17072, 17089), 'numpy.sum', 'np.sum', (['SoR_QRRHO'], {}), '(SoR_QRRHO)\n', (17078, 17089), True, 'import numpy as np\n'), ((18763, 18785), 'pmutt.io.json.remove_class', 'remove_class', (['json_obj'], {}), '(json_obj)\n', (18775, 18785), False, 'from pmutt.io.json import remove_class\n'), ((23877, 23897), 'numpy.exp', 'np.exp', (['(-theta_E / T)'], {}), '(-theta_E / T)\n', (23883, 23897), True, 'import numpy as np\n'), ((34914, 34953), 'pmutt.constants.wavenumber_to_temp', 'c.wavenumber_to_temp', (['valid_wavenumbers'], {}), '(valid_wavenumbers)\n', (34934, 34953), True, 'from pmutt import constants as c\n'), ((3880, 3916), 'numpy.sum', 'np.sum', (['self._valid_vib_temperatures'], {}), '(self._valid_vib_temperatures)\n', (3886, 3916), True, 'import numpy as np\n'), ((12709, 12777), 'numpy.dot', 'np.dot', (['self._valid_vib_temperatures', 'self._valid_scaled_wavenumbers'], {}), '(self._valid_vib_temperatures, self._valid_scaled_wavenumbers)\n', (12715, 12777), True, 'import numpy as np\n'), ((33135, 33170), 'scipy.integrate.quad', 'quad', ([], {'func': 'fn', 'a': '(0.0)', 'b': 'vib_dimless'}), '(func=fn, a=0.0, b=vib_dimless)\n', (33139, 33170), False, 'from scipy.integrate import quad\n'), ((3865, 3877), 'pmutt.constants.kb', 'c.kb', (['"""eV/K"""'], {}), "('eV/K')\n", (3869, 3877), True, 'from pmutt import constants as c\n'), ((11789, 11811), 'numpy.exp', 'np.exp', (['(-vib_dimless_i)'], {}), '(-vib_dimless_i)\n', (11795, 11811), True, 'import numpy as np\n'), ((12694, 12706), 'pmutt.constants.kb', 'c.kb', (['"""eV/K"""'], {}), "('eV/K')\n", (12698, 12706), True, 'from pmutt import constants as c\n'), ((20482, 20508), 'numpy.exp', 'np.exp', (['(-theta_E / 2.0 / T)'], {}), '(-theta_E / 2.0 / T)\n', (20488, 20508), True, 'import numpy as np\n'), ((21133, 21153), 'numpy.exp', 'np.exp', (['(-theta_E / T)'], {}), '(-theta_E / T)\n', (21139, 21153), True, 'import numpy as np\n'), ((22090, 22102), 'pmutt.constants.kb', 'c.kb', (['"""eV/K"""'], {}), "('eV/K')\n", (22094, 22102), True, 'from pmutt import constants as c\n'), ((23983, 24005), 'numpy.log', 'np.log', (['(1.0 - exp_term)'], {}), '(1.0 - exp_term)\n', (23989, 24005), True, 'import numpy as np\n'), ((32071, 32080), 'numpy.exp', 'np.exp', (['x'], {}), '(x)\n', (32077, 32080), True, 'import numpy as np\n'), ((32539, 32548), 'numpy.exp', 'np.exp', (['x'], {}), '(x)\n', (32545, 32548), True, 'import numpy as np\n'), ((32552, 32561), 'numpy.exp', 'np.exp', (['x'], {}), '(x)\n', (32558, 32561), True, 'import numpy as np\n'), ((2190, 2216), 'numpy.exp', 'np.exp', (['(-vib_dimless / 2.0)'], {}), '(-vib_dimless / 2.0)\n', (2196, 2216), True, 'import numpy as np\n'), ((13356, 13376), 'numpy.exp', 'np.exp', (['(-vib_dimless)'], {}), '(-vib_dimless)\n', (13362, 13376), True, 'import numpy as np\n'), ((15259, 15286), 'numpy.exp', 'np.exp', (['(vib_temperature / T)'], {}), '(vib_temperature / T)\n', (15265, 15286), True, 'import numpy as np\n'), ((15313, 15341), 'numpy.exp', 'np.exp', (['(-vib_temperature / T)'], {}), '(-vib_temperature / T)\n', (15319, 15341), True, 'import numpy as np\n'), ((20510, 20530), 'numpy.exp', 'np.exp', (['(-theta_E / T)'], {}), '(-theta_E / T)\n', (20516, 20530), True, 'import numpy as np\n'), ((21174, 21194), 'numpy.exp', 'np.exp', (['(-theta_E / T)'], {}), '(-theta_E / T)\n', (21180, 21194), True, 'import numpy as np\n'), ((22665, 22677), 'pmutt.constants.kb', 'c.kb', (['"""eV/K"""'], {}), "('eV/K')\n", (22669, 22677), True, 'from pmutt import constants as c\n'), ((22709, 22729), 'numpy.exp', 'np.exp', (['(-theta_E / T)'], {}), '(-theta_E / T)\n', (22715, 22729), True, 'import numpy as np\n'), ((22734, 22754), 'numpy.exp', 'np.exp', (['(-theta_E / T)'], {}), '(-theta_E / T)\n', (22740, 22754), True, 'import numpy as np\n'), ((28428, 28440), 'pmutt.constants.kb', 'c.kb', (['"""eV/K"""'], {}), "('eV/K')\n", (28432, 28440), True, 'from pmutt import constants as c\n'), ((31129, 31140), 'pmutt.constants.R', 'c.R', (['"""eV/K"""'], {}), "('eV/K')\n", (31132, 31140), True, 'from pmutt import constants as c\n'), ((31610, 31620), 'numpy.exp', 'np.exp', (['(-x)'], {}), '(-x)\n', (31616, 31620), True, 'import numpy as np\n'), ((32084, 32093), 'numpy.exp', 'np.exp', (['x'], {}), '(x)\n', (32090, 32093), True, 'import numpy as np\n'), ((2224, 2244), 'numpy.exp', 'np.exp', (['(-vib_dimless)'], {}), '(-vib_dimless)\n', (2230, 2244), True, 'import numpy as np\n'), ((2298, 2318), 'numpy.exp', 'np.exp', (['(-vib_dimless)'], {}), '(-vib_dimless)\n', (2304, 2318), True, 'import numpy as np\n'), ((13415, 13435), 'numpy.exp', 'np.exp', (['(-vib_dimless)'], {}), '(-vib_dimless)\n', (13421, 13435), True, 'import numpy as np\n'), ((20449, 20461), 'pmutt.constants.kb', 'c.kb', (['"""eV/K"""'], {}), "('eV/K')\n", (20453, 20461), True, 'from pmutt import constants as c\n'), ((2972, 2998), 'numpy.sinh', 'np.sinh', (['(vib_dimless / 2.0)'], {}), '(vib_dimless / 2.0)\n', (2979, 2998), True, 'import numpy as np\n'), ((4576, 4596), 'numpy.exp', 'np.exp', (['(-vib_dimless)'], {}), '(-vib_dimless)\n', (4582, 4596), True, 'import numpy as np\n'), ((4617, 4637), 'numpy.exp', 'np.exp', (['(-vib_dimless)'], {}), '(-vib_dimless)\n', (4623, 4637), True, 'import numpy as np\n'), ((5912, 5932), 'numpy.exp', 'np.exp', (['(-vib_dimless)'], {}), '(-vib_dimless)\n', (5918, 5932), True, 'import numpy as np\n'), ((5941, 5961), 'numpy.exp', 'np.exp', (['(-vib_dimless)'], {}), '(-vib_dimless)\n', (5947, 5961), True, 'import numpy as np\n'), ((5989, 6009), 'numpy.exp', 'np.exp', (['(-vib_dimless)'], {}), '(-vib_dimless)\n', (5995, 6009), True, 'import numpy as np\n'), ((11861, 11883), 'numpy.exp', 'np.exp', (['(-vib_dimless_i)'], {}), '(-vib_dimless_i)\n', (11867, 11883), True, 'import numpy as np\n'), ((15863, 15873), 'pmutt.constants.h', 'c.h', (['"""J s"""'], {}), "('J s')\n", (15866, 15873), True, 'from pmutt import constants as c\n'), ((26543, 26555), 'pmutt.constants.kb', 'c.kb', (['"""eV/K"""'], {}), "('eV/K')\n", (26547, 26555), True, 'from pmutt import constants as c\n'), ((15845, 15856), 'pmutt.constants.kb', 'c.kb', (['"""J/K"""'], {}), "('J/K')\n", (15849, 15856), True, 'from pmutt import constants as c\n')]
|
import formula
#年利率、借款期数(月)、初始资金(元)、投资总周期(月)、坏账率
print(formula.annualIncome(22,12,10000,12,0))
|
[
"formula.annualIncome"
] |
[((57, 99), 'formula.annualIncome', 'formula.annualIncome', (['(22)', '(12)', '(10000)', '(12)', '(0)'], {}), '(22, 12, 10000, 12, 0)\n', (77, 99), False, 'import formula\n')]
|
# Generated by Django 2.2.6 on 2019-10-15 23:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myapp', '0002_gallery'),
]
operations = [
migrations.AddField(
model_name='gallery',
name='title',
field=models.TextField(default=0),
preserve_default=False,
),
]
|
[
"django.db.models.TextField"
] |
[((321, 348), 'django.db.models.TextField', 'models.TextField', ([], {'default': '(0)'}), '(default=0)\n', (337, 348), False, 'from django.db import migrations, models\n')]
|
from controllers import mainController
import aiohttp_cors
def routes(app):
cors = aiohttp_cors.setup(app, defaults={
"*": aiohttp_cors.ResourceOptions(
allow_methods=("*"),
allow_credentials=True,
expose_headers=("*",),
allow_headers=("*"),
max_age=3600,
)
})
cors.add(app.router.add_get('/', mainController.index))
cors.add(app.router.add_post('/auth', mainController.auth))
cors.add(app.router.add_post('/create-pool', mainController.create_pool))
cors.add(app.router.add_post('/delete-pool', mainController.delete_pool))
cors.add(app.router.add_get('/devices', mainController.get_storage_info))
cors.add(app.router.add_get('/status', mainController.check_status))
cors.add(app.router.add_get('/io-status', mainController.get_io_status))
cors.add(app.router.add_post('/add-disk', mainController.add_disk))
cors.add(app.router.add_post('/add-spare-disk', mainController.add_spare_disk))
cors.add(app.router.add_post('/replace-disk', mainController.replace_disk))
cors.add(app.router.add_post('/mountpoint', mainController.set_mountpoint))
|
[
"aiohttp_cors.ResourceOptions"
] |
[((137, 268), 'aiohttp_cors.ResourceOptions', 'aiohttp_cors.ResourceOptions', ([], {'allow_methods': '"""*"""', 'allow_credentials': '(True)', 'expose_headers': "('*',)", 'allow_headers': '"""*"""', 'max_age': '(3600)'}), "(allow_methods='*', allow_credentials=True,\n expose_headers=('*',), allow_headers='*', max_age=3600)\n", (165, 268), False, 'import aiohttp_cors\n')]
|
import os
from pytorch_lightning import seed_everything
TEST_ROOT = os.path.realpath(os.path.dirname(__file__))
PACKAGE_ROOT = os.path.dirname(TEST_ROOT)
DATASETS_PATH = os.path.join(PACKAGE_ROOT, 'datasets')
# generate a list of random seeds for each test
ROOT_SEED = 1234
def reset_seed():
seed_everything()
|
[
"os.path.dirname",
"os.path.join",
"pytorch_lightning.seed_everything"
] |
[((129, 155), 'os.path.dirname', 'os.path.dirname', (['TEST_ROOT'], {}), '(TEST_ROOT)\n', (144, 155), False, 'import os\n'), ((172, 210), 'os.path.join', 'os.path.join', (['PACKAGE_ROOT', '"""datasets"""'], {}), "(PACKAGE_ROOT, 'datasets')\n", (184, 210), False, 'import os\n'), ((87, 112), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (102, 112), False, 'import os\n'), ((300, 317), 'pytorch_lightning.seed_everything', 'seed_everything', ([], {}), '()\n', (315, 317), False, 'from pytorch_lightning import seed_everything\n')]
|
# -*- coding: utf-8 -*-
import argparse
import logging
import os
import tarfile
import textwrap
from cliff.command import Command
# TODO(dittrich): https://github.com/Mckinsey666/bullet/issues/2
# Workaround until bullet has Windows missing 'termios' fix.
try:
from bullet import Bullet
except ModuleNotFoundError:
pass
from sys import stdin
class SecretsRestore(Command):
"""Restore secrets and descriptions from a backup file."""
logger = logging.getLogger(__name__)
def get_parser(self, prog_name):
parser = super().get_parser(prog_name)
parser.formatter_class = argparse.RawDescriptionHelpFormatter
parser.add_argument('backup', nargs='?', default=None)
parser.epilog = textwrap.dedent("""
TODO(dittrich): Finish documenting command.
""")
return parser
def take_action(self, parsed_args):
self.logger.debug('[*] restore secrets')
secrets = self.app.secrets
secrets.requires_environment()
backups_dir = os.path.join(
secrets.environment_path(),
"backups")
backups = [fn for fn in
os.listdir(backups_dir)
if fn.endswith('.tgz')]
if parsed_args.backup is not None:
choice = parsed_args.backup
elif not (stdin.isatty() and 'Bullet' in globals()):
# Can't involve user in getting a choice.
raise RuntimeError('[-] no backup specified for restore')
else:
# Give user a chance to choose.
choices = ['<CANCEL>'] + sorted(backups)
cli = Bullet(prompt="\nSelect a backup from which to restore:",
choices=choices,
indent=0,
align=2,
margin=1,
shift=0,
bullet="→",
pad_right=5)
choice = cli.launch()
if choice == "<CANCEL>":
self.logger.info('cancelled restoring from backup')
return
backup_path = os.path.join(backups_dir, choice)
with tarfile.open(backup_path, "r:gz") as tf:
# Only select intended files. See warning re: Tarfile.extractall()
# in https://docs.python.org/3/library/tarfile.html
allowed_prefixes = ['secrets.json', 'secrets.d/']
names = [fn for fn in tf.getnames()
if any(fn.startswith(prefix)
for prefix in allowed_prefixes
if '../' not in fn)
]
env_path = secrets.environment_path()
for name in names:
tf.extract(name, path=env_path)
self.logger.info('[+] restored backup %s to %s', backup_path, env_path)
# vim: set fileencoding=utf-8 ts=4 sw=4 tw=0 et :
|
[
"textwrap.dedent",
"sys.stdin.isatty",
"tarfile.open",
"os.path.join",
"os.listdir",
"logging.getLogger",
"bullet.Bullet"
] |
[((462, 489), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (479, 489), False, 'import logging\n'), ((732, 834), 'textwrap.dedent', 'textwrap.dedent', (['"""\n TODO(dittrich): Finish documenting command.\n """'], {}), '(\n """\n TODO(dittrich): Finish documenting command.\n """\n )\n', (747, 834), False, 'import textwrap\n'), ((2122, 2155), 'os.path.join', 'os.path.join', (['backups_dir', 'choice'], {}), '(backups_dir, choice)\n', (2134, 2155), False, 'import os\n'), ((2169, 2202), 'tarfile.open', 'tarfile.open', (['backup_path', '"""r:gz"""'], {}), "(backup_path, 'r:gz')\n", (2181, 2202), False, 'import tarfile\n'), ((1161, 1184), 'os.listdir', 'os.listdir', (['backups_dir'], {}), '(backups_dir)\n', (1171, 1184), False, 'import os\n'), ((1625, 1770), 'bullet.Bullet', 'Bullet', ([], {'prompt': '"""\nSelect a backup from which to restore:"""', 'choices': 'choices', 'indent': '(0)', 'align': '(2)', 'margin': '(1)', 'shift': '(0)', 'bullet': '"""→"""', 'pad_right': '(5)'}), '(prompt="""\nSelect a backup from which to restore:""", choices=\n choices, indent=0, align=2, margin=1, shift=0, bullet=\'→\', pad_right=5)\n', (1631, 1770), False, 'from bullet import Bullet\n'), ((1329, 1343), 'sys.stdin.isatty', 'stdin.isatty', ([], {}), '()\n', (1341, 1343), False, 'from sys import stdin\n')]
|
# -*- coding: utf-8 -*-
"""
ABM virus visuals
Created on Thu Apr 9 10:16:47 2020
@author: Kyle
"""
import matplotlib.pyplot as plt
from matplotlib import colors
import numpy as np
import os
import tempfile
from datetime import datetime
import imageio
class visuals():
def agentPlot(self, storageArrayList, cmap=None, save=False, saveFolder=None,
display=False, i=0, fig=None, axs=None):
"""Generate a plot of the environment grid.
Expects ABM to have already been run and status of every grid point
(which will encode status of every agent) to be saved in an array.
Each time step is also in array, and which time step to visualize
is set by i.
cmap needs to be defined to provide color coding for agent status."""
if cmap ==None:
cmap = colors.ListedColormap(['white','lightblue','lightgreen',
[elm/250 for elm in [72, 169, 171]], 'orange','red', 'black'])
storedArray=storageArrayList[i]
if axs == None:
fig, (ax1) = plt.subplots(1, figsize=[8,8])
else:
ax1=axs[0]
#plt.figure(figsize=[8,8])
ax1.pcolormesh(storedArray, cmap=cmap, vmin=-1,vmax=5)
# #plt.colorbar()
ax1.axis('off')
plt.tight_layout()
if save==True:
plt.savefig(os.path.join(os.getcwd(), saveFolder, 'step_%s.png'%(i)))
if display == True:
plt.show()
#plt.close()
#return fig
def agentStatusPlot(self, agent_status, steps, cmap=None, hospitalThreshold = None,
save=False, saveFolder=None,
display=False,
fig = None,
axs=None):
"""Generates various high level visuals of the progression of the
disease through the population. Expects """
agent_status = agent_status[['type']]; #hotfix for updated code elsewhere
if cmap ==None:
cmap = colors.ListedColormap(['white','lightblue','lightgreen',
[elm/250 for elm in [72, 169, 171]], 'orange','red', 'black'])
i = agent_status.index[-1][0]+1
#i=steps
healthy=np.count_nonzero(agent_status.unstack().to_numpy() == 0,axis=1)[:i]
recovered=np.count_nonzero(agent_status.unstack().to_numpy() == 1,axis=1)[:i]
vaccinated=np.count_nonzero(agent_status.unstack().to_numpy() == 2,axis=1)[:i]
walkingSick=np.count_nonzero(agent_status.unstack().to_numpy() == 3,axis=1)[:i]
hospital=np.count_nonzero(agent_status.unstack().to_numpy() == 4,axis=1)[:i]
dead=np.count_nonzero(agent_status.unstack().to_numpy() == 5,axis=1)[:i]
if axs == None:
fig, (ax1, ax2, ax3) = plt.subplots(3, sharex=True, figsize=[12,8])
else:
ax1=axs[0]; ax2=axs[1]; ax3=axs[2]
ax1.bar(range(len(healthy)), dead, width=1.0, color='black', label='dead')
ax1.bar(range(len(healthy)), hospital, width=1.0,
bottom=dead,
color='red', label='hospitalized')
ax1.bar(range(len(healthy)), walkingSick, width=1.0,
bottom=dead+hospital,
color='orange', label='walking sick')
ax1.bar(range(len(healthy)), vaccinated, width=1.0,
bottom=dead+hospital+walkingSick,
color=[elm/250 for elm in [72, 169, 171]], label='vaccinated')
ax1.bar(range(len(healthy)), healthy, width=1.0,
bottom=dead+hospital+walkingSick+vaccinated,
color='lightblue', label='healthy')
ax1.bar(range(len(healthy)), recovered, width=1.0,
bottom=dead+hospital+walkingSick+vaccinated+healthy,
color='green', label='recovered')
ax1.set_ylabel('Population', size=12);
ax1.set_title('Effect of Virus on Population Over Time',size=20)
ax2.plot(walkingSick, color='orange', label='walking sick')
ax2.plot(hospital, color='red', label='hospitalized')
if hospitalThreshold:
print(hospitalThreshold)
ax2.axhline(y=hospitalThreshold,
linestyle='--',color='gray', label='capacity')
ax2.set_ylabel('Number of sick');
ax2.set_title('Number of Sick Over Time', size=20)
ax3.plot(dead, color='black', label='dead');
ax3.set_xlabel('Time Steps',size=18)
ax3.set_ylabel('Number of deaad');
ax3.set_title('Number of Dead Over Time', size=20)
ax1.legend(loc='center left', bbox_to_anchor=(1, 0.5))
ax2.legend(loc='center left', bbox_to_anchor=(1, 0.5))
ax3.legend(loc='center left', bbox_to_anchor=(1, 0.5))
ax1.axvline(x=steps, color='black',alpha=.25,linewidth=7)
ax2.axvline(x=steps, color='black',alpha=.25,linewidth=7)
ax3.axvline(x=steps, color='black',alpha=.25,linewidth=7)
#plt.xlim([0,steps])
plt.xlim([0,i])
#plt.tight_layout();
if save==True:
plt.savefig(os.path.join(os.getcwd(), saveFolder, 'step_%s.png'%(steps)))
if display==True:
plt.show()
#plt.close()
#return fig
def combinedVisuals(self, SAL, agent_status, cmap=None, i=0,
hospitalThreshold = None,
modelName='Model visualization',
save=False, saveFolder=None, display=False):
"""Combines a few different visuals into a single large image."""
fig = plt.figure(figsize=[16,8])
gs = fig.add_gridspec(3, 5)
ax4 = fig.add_subplot(gs[0:3, 0:3])
ax3 = fig.add_subplot(gs[2, 3:])
ax1 = fig.add_subplot(gs[0, 3:], sharex=ax3)
ax2 = fig.add_subplot(gs[1, 3:], sharex=ax3)
self.agentPlot(SAL, i=i, fig=fig, axs=[ax4])
self.agentStatusPlot(agent_status, i, fig=fig, axs=(ax1, ax2, ax3), cmap=cmap, hospitalThreshold=hospitalThreshold)
plt.suptitle('%s\nTime Step %s'%(modelName, i), size=24)
fig.tight_layout(rect=[0, 0.03, 1, 0.9])
if save==True:
plt.savefig(os.path.join(os.getcwd(), saveFolder, 'step_%s.png'%(i)))
if display == True:
plt.show()
#plt.close()
#return fig
def generateGIF(self, SAL, agent_status, NumSteps, visualFunction='all', cmap=None, stepSkip=1,
saveFolder=os.getcwd(),modelName='ABM Simulation',
GIFname='ABM_sim', datestamp=True, fps = 10,
hospitalThreshold = None):
if not cmap:
cmap = colors.ListedColormap(['white','lightblue','lightgreen',
[elm/250 for elm in [72, 169, 171]], 'orange','red', 'black'])
print("Starting to generate frames for GIF...")
with tempfile.TemporaryDirectory(dir=os.getcwd()) as f:
for i in range(0, NumSteps):
if i%stepSkip == 0: #saving only every stepSkip frame for the GIF
if visualFunction == 'all' and i != 0:
self.combinedVisuals(SAL, agent_status, i = i,
cmap=None,
hospitalThreshold = None,#hospitalThreshold,
modelName=modelName.strip()+' ',
save=True, saveFolder=f, display=False)
elif visualFunction == 'animation':
self.agentPlot(SAL, cmap=cmap, save=True, saveFolder=f, display=False, i = i)
elif visualFunction == 'graphs':
self.agentStatusPlot(agent_status, i, cmap=cmap,
hospitalThreshold=hospitalThreshold,
save=True, saveFolder=f, display=False)
plt.close()
print("frames generated. Making GIF...")
images = []
fileNums = [int(elm.split('_')[1].split('.png')[0]) for elm in os.listdir(f) if '.png' in elm]
fileNums = sorted(fileNums)
for num in fileNums:
file_name = 'step_%s.png'%(num)
file_path = os.path.join(f, file_name)
images.append(imageio.imread(file_path))
imageio.mimsave(os.path.join(saveFolder,'%s.gif'%(GIFname)),images,fps=fps)
print("GIF complete!")
|
[
"os.listdir",
"matplotlib.pyplot.xlim",
"matplotlib.pyplot.show",
"os.path.join",
"matplotlib.pyplot.suptitle",
"os.getcwd",
"matplotlib.pyplot.close",
"imageio.imread",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.tight_layout",
"matplotlib.colors.ListedColormap"
] |
[((1333, 1351), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (1349, 1351), True, 'import matplotlib.pyplot as plt\n'), ((5052, 5068), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[0, i]'], {}), '([0, i])\n', (5060, 5068), True, 'import matplotlib.pyplot as plt\n'), ((5642, 5669), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '[16, 8]'}), '(figsize=[16, 8])\n', (5652, 5669), True, 'import matplotlib.pyplot as plt\n'), ((6082, 6140), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (["('%s\\nTime Step %s' % (modelName, i))"], {'size': '(24)'}), "('%s\\nTime Step %s' % (modelName, i), size=24)\n", (6094, 6140), True, 'import matplotlib.pyplot as plt\n'), ((6518, 6529), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (6527, 6529), False, 'import os\n'), ((848, 978), 'matplotlib.colors.ListedColormap', 'colors.ListedColormap', (["['white', 'lightblue', 'lightgreen', [(elm / 250) for elm in [72, 169, 171]\n ], 'orange', 'red', 'black']"], {}), "(['white', 'lightblue', 'lightgreen', [(elm / 250) for\n elm in [72, 169, 171]], 'orange', 'red', 'black'])\n", (869, 978), False, 'from matplotlib import colors\n'), ((1109, 1140), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)'], {'figsize': '[8, 8]'}), '(1, figsize=[8, 8])\n', (1121, 1140), True, 'import matplotlib.pyplot as plt\n'), ((1497, 1507), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1505, 1507), True, 'import matplotlib.pyplot as plt\n'), ((2054, 2184), 'matplotlib.colors.ListedColormap', 'colors.ListedColormap', (["['white', 'lightblue', 'lightgreen', [(elm / 250) for elm in [72, 169, 171]\n ], 'orange', 'red', 'black']"], {}), "(['white', 'lightblue', 'lightgreen', [(elm / 250) for\n elm in [72, 169, 171]], 'orange', 'red', 'black'])\n", (2075, 2184), False, 'from matplotlib import colors\n'), ((2853, 2898), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)'], {'sharex': '(True)', 'figsize': '[12, 8]'}), '(3, sharex=True, figsize=[12, 8])\n', (2865, 2898), True, 'import matplotlib.pyplot as plt\n'), ((5244, 5254), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5252, 5254), True, 'import matplotlib.pyplot as plt\n'), ((6333, 6343), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6341, 6343), True, 'import matplotlib.pyplot as plt\n'), ((6711, 6841), 'matplotlib.colors.ListedColormap', 'colors.ListedColormap', (["['white', 'lightblue', 'lightgreen', [(elm / 250) for elm in [72, 169, 171]\n ], 'orange', 'red', 'black']"], {}), "(['white', 'lightblue', 'lightgreen', [(elm / 250) for\n elm in [72, 169, 171]], 'orange', 'red', 'black'])\n", (6732, 6841), False, 'from matplotlib import colors\n'), ((8018, 8029), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (8027, 8029), True, 'import matplotlib.pyplot as plt\n'), ((8363, 8389), 'os.path.join', 'os.path.join', (['f', 'file_name'], {}), '(f, file_name)\n', (8375, 8389), False, 'import os\n'), ((8475, 8519), 'os.path.join', 'os.path.join', (['saveFolder', "('%s.gif' % GIFname)"], {}), "(saveFolder, '%s.gif' % GIFname)\n", (8487, 8519), False, 'import os\n'), ((1412, 1423), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1421, 1423), False, 'import os\n'), ((5157, 5168), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5166, 5168), False, 'import os\n'), ((6248, 6259), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (6257, 6259), False, 'import os\n'), ((6975, 6986), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (6984, 6986), False, 'import os\n'), ((8182, 8195), 'os.listdir', 'os.listdir', (['f'], {}), '(f)\n', (8192, 8195), False, 'import os\n'), ((8420, 8445), 'imageio.imread', 'imageio.imread', (['file_path'], {}), '(file_path)\n', (8434, 8445), False, 'import imageio\n')]
|
from google.appengine.ext import vendor
vendor.add('lib')
vendor.add('lib/nltk')
vendor.add('lib/nltk-3.2.1.egg-info')
|
[
"google.appengine.ext.vendor.add"
] |
[((40, 57), 'google.appengine.ext.vendor.add', 'vendor.add', (['"""lib"""'], {}), "('lib')\n", (50, 57), False, 'from google.appengine.ext import vendor\n'), ((58, 80), 'google.appengine.ext.vendor.add', 'vendor.add', (['"""lib/nltk"""'], {}), "('lib/nltk')\n", (68, 80), False, 'from google.appengine.ext import vendor\n'), ((81, 118), 'google.appengine.ext.vendor.add', 'vendor.add', (['"""lib/nltk-3.2.1.egg-info"""'], {}), "('lib/nltk-3.2.1.egg-info')\n", (91, 118), False, 'from google.appengine.ext import vendor\n')]
|
from aws_xray_sdk.core import xray_recorder
import app
xray_recorder.begin_segment("Test")
def test_read_root():
response = app.read_root()
assert response == {"hello": "world"}
|
[
"aws_xray_sdk.core.xray_recorder.begin_segment",
"app.read_root"
] |
[((57, 92), 'aws_xray_sdk.core.xray_recorder.begin_segment', 'xray_recorder.begin_segment', (['"""Test"""'], {}), "('Test')\n", (84, 92), False, 'from aws_xray_sdk.core import xray_recorder\n'), ((132, 147), 'app.read_root', 'app.read_root', ([], {}), '()\n', (145, 147), False, 'import app\n')]
|
import os
import torch
import random
import librosa
import torchaudio
import numpy as np
from glob import glob
import nlpaug.flow as naf
import nlpaug.augmenter.audio as naa
import nlpaug.augmenter.spectrogram as nas
from torchvision.transforms import Normalize
from torch.utils.data import Dataset
from nlpaug.augmenter.audio import AudioAugmenter
from src.datasets.librispeech import WavformAugmentation, SpectrumAugmentation
from src.datasets.root_paths import DATA_ROOTS
GOOGLESPEECH_MEAN = [-46.847]
GOOGLESPEECH_STDEV = [19.151]
GOOGLESPEECH_LABELS = ['eight', 'right', 'happy', 'three', 'yes', 'up', 'no', 'stop', 'on', 'four', 'nine',
'zero', 'down', 'go', 'six', 'two', 'left', 'five', 'off', 'seven', 'one',
'cat', 'bird', 'marvin', 'wow', 'tree', 'dog', 'sheila', 'bed', 'house']
class GoogleSpeechCommands(Dataset):
def __init__(
self,
root=DATA_ROOTS['google_speech'],
train=True,
spectral_transforms=False,
wavform_transforms=False,
max_length=150526,
input_size=224,
normalize_mean=GOOGLESPEECH_MEAN,
normalize_stdev=GOOGLESPEECH_STDEV,
):
super().__init__()
assert not (spectral_transforms and wavform_transforms)
if train:
train_paths = open(os.path.join(root, 'training_list.txt'), 'r').readlines()
val_paths = open(os.path.join(root, 'validation_list.txt'), 'r').readlines()
wav_paths = train_paths + val_paths
else:
test_paths = open(os.path.join(root, 'testing_list.txt'), 'r').readlines()
wav_paths = test_paths
wav_paths = [path.strip() for path in wav_paths]
self.root = root
self.num_labels = len(GOOGLESPEECH_LABELS)
self.wav_paths = wav_paths
self.spectral_transforms = spectral_transforms
self.wavform_transforms = wavform_transforms
self.max_length = max_length
self.train = train
self.input_size = input_size
self.FILTER_SIZE = input_size
self.normalize_mean = normalize_mean
self.normalize_stdev = normalize_stdev
def __getitem__(self, index):
wav_name = self.wav_paths[index]
label_name = wav_name.split('/')[0].lower()
label = GOOGLESPEECH_LABELS.index(label_name)
wav_path = os.path.join(self.root, wav_name)
wavform, sample_rate = torchaudio.load(wav_path)
wavform = wavform[0].numpy()
if self.wavform_transforms:
transforms = WavformAugmentation(sample_rate)
wavform = transforms(wavform)
# pad to 150k frames
if len(wavform) > self.max_length:
# randomly pick which side to chop off (fix if validation)
flip = (bool(random.getrandbits(1)) if self.train else True)
padded = (wavform[:self.max_length] if flip else
wavform[-self.max_length:])
else:
padded = np.zeros(self.max_length)
padded[:len(wavform)] = wavform # pad w/ silence
hop_length_dict = {224: 672, 112: 1344, 64: 2360, 32: 4800}
spectrum = librosa.feature.melspectrogram(
padded,
sample_rate,
hop_length=hop_length_dict[self.input_size],
n_mels=self.input_size,
)
if self.spectral_transforms: # apply time and frequency masks
transforms = SpectrumAugmentation()
spectrum = transforms(spectrum)
# log mel-spectrogram
spectrum = librosa.power_to_db(spectrum**2)
spectrum = torch.from_numpy(spectrum).float()
spectrum = spectrum.unsqueeze(0)
if self.spectral_transforms: # apply noise on spectral
noise_stdev = 0.25 * self.normalize_stdev[0]
noise = torch.randn_like(spectrum) * noise_stdev
spectrum = spectrum + noise
normalize = Normalize(self.normalize_mean, self.normalize_stdev)
spectrum = normalize(spectrum)
return index, spectrum, int(label)
def __len__(self):
return len(self.wav_paths)
|
[
"torch.randn_like",
"numpy.zeros",
"librosa.feature.melspectrogram",
"src.datasets.librispeech.SpectrumAugmentation",
"librosa.power_to_db",
"random.getrandbits",
"torchaudio.load",
"torchvision.transforms.Normalize",
"os.path.join",
"src.datasets.librispeech.WavformAugmentation",
"torch.from_numpy"
] |
[((2416, 2449), 'os.path.join', 'os.path.join', (['self.root', 'wav_name'], {}), '(self.root, wav_name)\n', (2428, 2449), False, 'import os\n'), ((2482, 2507), 'torchaudio.load', 'torchaudio.load', (['wav_path'], {}), '(wav_path)\n', (2497, 2507), False, 'import torchaudio\n'), ((3222, 3347), 'librosa.feature.melspectrogram', 'librosa.feature.melspectrogram', (['padded', 'sample_rate'], {'hop_length': 'hop_length_dict[self.input_size]', 'n_mels': 'self.input_size'}), '(padded, sample_rate, hop_length=\n hop_length_dict[self.input_size], n_mels=self.input_size)\n', (3252, 3347), False, 'import librosa\n'), ((3616, 3650), 'librosa.power_to_db', 'librosa.power_to_db', (['(spectrum ** 2)'], {}), '(spectrum ** 2)\n', (3635, 3650), False, 'import librosa\n'), ((3988, 4040), 'torchvision.transforms.Normalize', 'Normalize', (['self.normalize_mean', 'self.normalize_stdev'], {}), '(self.normalize_mean, self.normalize_stdev)\n', (3997, 4040), False, 'from torchvision.transforms import Normalize\n'), ((2607, 2639), 'src.datasets.librispeech.WavformAugmentation', 'WavformAugmentation', (['sample_rate'], {}), '(sample_rate)\n', (2626, 2639), False, 'from src.datasets.librispeech import WavformAugmentation, SpectrumAugmentation\n'), ((3046, 3071), 'numpy.zeros', 'np.zeros', (['self.max_length'], {}), '(self.max_length)\n', (3054, 3071), True, 'import numpy as np\n'), ((3499, 3521), 'src.datasets.librispeech.SpectrumAugmentation', 'SpectrumAugmentation', ([], {}), '()\n', (3519, 3521), False, 'from src.datasets.librispeech import WavformAugmentation, SpectrumAugmentation\n'), ((3668, 3694), 'torch.from_numpy', 'torch.from_numpy', (['spectrum'], {}), '(spectrum)\n', (3684, 3694), False, 'import torch\n'), ((3886, 3912), 'torch.randn_like', 'torch.randn_like', (['spectrum'], {}), '(spectrum)\n', (3902, 3912), False, 'import torch\n'), ((2851, 2872), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (2869, 2872), False, 'import random\n'), ((1367, 1406), 'os.path.join', 'os.path.join', (['root', '"""training_list.txt"""'], {}), "(root, 'training_list.txt')\n", (1379, 1406), False, 'import os\n'), ((1454, 1495), 'os.path.join', 'os.path.join', (['root', '"""validation_list.txt"""'], {}), "(root, 'validation_list.txt')\n", (1466, 1495), False, 'import os\n'), ((1606, 1644), 'os.path.join', 'os.path.join', (['root', '"""testing_list.txt"""'], {}), "(root, 'testing_list.txt')\n", (1618, 1644), False, 'import os\n')]
|
import os
from dotenv import load_dotenv
import requests
import json
from xml.etree import ElementTree
# Load API Keys
load_dotenv()
ATTOM_API_KEY = os.getenv('ATTOM_API_KEY')
url = "http://api.gateway.attomdata.com/propertyapi/v1.0.0/property/detail?"
headers = {
'accept': "application/json",
'apikey': ATTOM_API_KEY
}
params = {
'address1': '4529 Winona Court' ,
'address2': 'Denver, CO'
}
response = requests.request("GET", url, headers=headers, params=params)
print(response.json())
|
[
"dotenv.load_dotenv",
"requests.request",
"os.getenv"
] |
[((120, 133), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (131, 133), False, 'from dotenv import load_dotenv\n'), ((151, 177), 'os.getenv', 'os.getenv', (['"""ATTOM_API_KEY"""'], {}), "('ATTOM_API_KEY')\n", (160, 177), False, 'import os\n'), ((426, 486), 'requests.request', 'requests.request', (['"""GET"""', 'url'], {'headers': 'headers', 'params': 'params'}), "('GET', url, headers=headers, params=params)\n", (442, 486), False, 'import requests\n')]
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
https://github.com/cgloeckner/pyvtt/
Copyright (c) 2020-2021 <NAME>
License: MIT (see LICENSE for details)
"""
from pony.orm import db_session
import cache, orm
from test.utils import EngineBaseTest, SocketDummy
class GameCacheTest(EngineBaseTest):
def setUp(self):
super().setUp()
with db_session:
gm = self.engine.main_db.GM(name='user123', url='foo', sid='123456')
gm.postSetup()
# create GM database
self.db = orm.createGmDatabase(engine=self.engine, filename=':memory:')
with db_session:
game = self.db.Game(url='bar', gm_url='foo')
game.postSetup()
self.cache = self.engine.cache.get(gm).get(game)
def tearDown(self):
del self.db
del self.cache
super().tearDown()
def test_getNextId(self):
self.assertEqual(self.cache.getNextId(), 0)
self.assertEqual(self.cache.getNextId(), 1)
self.assertEqual(self.cache.getNextId(), 2)
self.assertEqual(self.cache.getNextId(), 3)
def rebuildIndices(self):
# @NOTE: this is called on insert and remove. hence it's tested
# during those operations
pass
def test_insert(self):
# create some players
p = self.cache.insert('arthur', 'red', False)
self.assertIsNotNone(p)
self.cache.insert('bob', 'blue', True) # GM
self.cache.insert('carlos', 'yellow', False)
# test indices being rebuilt
ids = set()
for name in self.cache.players:
ids.add(self.cache.players[name].index)
self.assertEqual(len(ids), 3)
self.assertEqual(ids, {0, 1, 2})
# force carlos to be online
self.cache.get('carlos').socket = SocketDummy()
# cannot add player twice (if online)
with self.assertRaises(KeyError) as e:
self.cache.insert('carlos', 'black', True)
self.assertEqual(str(e), 'carlos')
# can re-login player if offline
self.cache.insert('bob', 'cyan', False)
def test_get(self):
# create some players
self.cache.insert('arthur', 'red', False)
self.cache.insert('bob', 'blue', True) # GM
self.cache.insert('carlos', 'yellow', False)
# query players
cache1 = self.cache.get('arthur')
self.assertIsNotNone(cache1)
cache2 = self.cache.get('bob')
self.assertIsNotNone(cache2)
cache3 = self.cache.get('carlos')
self.assertIsNotNone(cache3)
# removed player cannot be queried
self.cache.remove('bob')
cache2 = self.cache.get('bob')
self.assertIsNone(cache2)
# cannot query unknown player
unknown_cache = self.cache.get('gabriel')
self.assertIsNone(unknown_cache)
def test_getData(self):
# create some players
self.cache.insert('arthur', 'red', False)
self.cache.insert('gabriel', 'red', False)
self.cache.insert('carlos', 'yellow', False)
self.cache.insert('bob', 'blue', True)
# query data (in index-order)
data = self.cache.getData()
self.assertEqual(len(data), 4)
self.assertEqual(data[0]['name'], 'arthur')
self.assertEqual(data[1]['name'], 'gabriel')
self.assertEqual(data[2]['name'], 'carlos')
self.assertEqual(data[3]['name'], 'bob')
# remove player
self.cache.remove('carlos')
# re- query data (in index-order)
data = self.cache.getData()
self.assertEqual(len(data), 3)
self.assertEqual(data[0]['name'], 'arthur')
self.assertEqual(data[1]['name'], 'gabriel')
self.assertEqual(data[2]['name'], 'bob')
def test_getSelections(self):
# create some players
self.cache.insert('arthur', 'red', False)
self.cache.insert('gabriel', 'red', False)
self.cache.insert('carlos', 'yellow', False)
self.cache.insert('bob', 'blue', True)
# set selections
self.cache.get('arthur').selected = [236, 154]
self.cache.get('carlos').selected = [12]
self.cache.get('bob').selected = [124, 236, 12]
# expect selections per player name
selections = self.cache.getSelections()
for name in selections:
self.assertEqual(selections[name], self.cache.get(name).selected)
def test_remove(self):
# create some players
self.cache.insert('arthur', 'red', False)
self.cache.insert('gabriel', 'red', False)
self.cache.insert('carlos', 'yellow', False)
self.cache.insert('bob', 'blue', True)
# remove but expect indices being rebuilt
self.cache.remove('carlos')
ids = set()
for name in self.cache.players:
ids.add(self.cache.players[name].index)
self.assertEqual(len(ids), 3)
self.assertEqual(ids, {0, 1, 2})
# cannot remove player twice
with self.assertRaises(KeyError):
self.cache.remove('carlos')
# cannot remove unknown player
with self.assertRaises(KeyError):
self.cache.remove('dimitri')
# @NOTE: other operations are tested during integration test
|
[
"test.utils.SocketDummy",
"orm.createGmDatabase"
] |
[((555, 616), 'orm.createGmDatabase', 'orm.createGmDatabase', ([], {'engine': 'self.engine', 'filename': '""":memory:"""'}), "(engine=self.engine, filename=':memory:')\n", (575, 616), False, 'import cache, orm\n'), ((1896, 1909), 'test.utils.SocketDummy', 'SocketDummy', ([], {}), '()\n', (1907, 1909), False, 'from test.utils import EngineBaseTest, SocketDummy\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import math
import time
import numpy
import digitalio
import board
from PIL import Image, ImageDraw, ImageFont
from fonts.ttf import RobotoMedium
import RPi.GPIO as GPIO
from ST7789 import ST7789
SPI_SPEED_MHZ = 80
display = ST7789(
rotation=90, # Needed to display the right way up on Pirate Audio
port=0, # SPI port
cs=1, # SPI port Chip-select channel
dc=9, # BCM pin used for data/command
backlight=13,
spi_speed_hz=SPI_SPEED_MHZ * 1000 * 1000
)
GPIO.setmode(GPIO.BCM)
GPIO.setup(13, GPIO.OUT)
FLIP = os.environ.get('FLIP', False)
WIDTH = display.height
HEIGHT = display.width
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
COLORS = [
(255, 0, 0),
(255, 128, 0),
(255, 255, 0),
(128, 255, 0),
(0, 255, 0),
(0, 255, 128),
(0, 255, 255),
(0, 128, 255),
(0, 0, 255),
(255, 0, 255),
(255, 0, 128),
]
index = 0
font_smiley = ImageFont.truetype('./CODE2000.TTF', 28)
font = ImageFont.truetype(RobotoMedium, 40)
img = Image.new("RGB", (WIDTH, HEIGHT), 0)
draw = ImageDraw.Draw(img)
BUTTONS = [5, 6, 16, 24]
LABELS = ['A', 'B', 'X', 'Y']
GPIO.setmode(GPIO.BCM)
GPIO.setup(BUTTONS, GPIO.IN, pull_up_down=GPIO.PUD_UP)
button = ""
def show_credits(button):
global index
ROTATION = 270 if FLIP else 90
draw.text((0, 0), "A", font=font, fill=COLORS[index] if button == "A" else WHITE)
draw.text((WIDTH - 32, 0), "X", font=font, fill=COLORS[index] if button == "X" else WHITE)
draw.text((0, HEIGHT - 48), "B", font=font, fill=COLORS[index] if button == "B" else WHITE)
draw.text((WIDTH - 32, HEIGHT - 48), "Y", font=font, fill=COLORS[index] if button == "Y" else WHITE)
draw.text((int(WIDTH*0.2), int(HEIGHT*0.09)), "¯\_(ツ)_/¯", font=font_smiley, fill=COLORS[index] if button == "" else WHITE)
draw.text((int(WIDTH*0.09), int(HEIGHT*0.35)), "promethee", font=font, fill=COLORS[index] if button == "" else WHITE)
draw.text((int(WIDTH*0.2), int(HEIGHT*0.6)), "@github", font=font, fill=COLORS[index] if button == "" else WHITE)
display.display(img)
def button_press(pin):
global button
button = LABELS[BUTTONS.index(pin)] if button == "" else ""
for pin in BUTTONS:
GPIO.add_event_detect(pin, GPIO.BOTH, button_press, bouncetime=100)
while True:
index = index + 1 if index < len(COLORS) - 1 else 0
show_credits(button)
|
[
"RPi.GPIO.setmode",
"PIL.Image.new",
"RPi.GPIO.setup",
"RPi.GPIO.add_event_detect",
"os.environ.get",
"PIL.ImageFont.truetype",
"PIL.ImageDraw.Draw",
"ST7789.ST7789"
] |
[((284, 384), 'ST7789.ST7789', 'ST7789', ([], {'rotation': '(90)', 'port': '(0)', 'cs': '(1)', 'dc': '(9)', 'backlight': '(13)', 'spi_speed_hz': '(SPI_SPEED_MHZ * 1000 * 1000)'}), '(rotation=90, port=0, cs=1, dc=9, backlight=13, spi_speed_hz=\n SPI_SPEED_MHZ * 1000 * 1000)\n', (290, 384), False, 'from ST7789 import ST7789\n'), ((557, 579), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BCM'], {}), '(GPIO.BCM)\n', (569, 579), True, 'import RPi.GPIO as GPIO\n'), ((580, 604), 'RPi.GPIO.setup', 'GPIO.setup', (['(13)', 'GPIO.OUT'], {}), '(13, GPIO.OUT)\n', (590, 604), True, 'import RPi.GPIO as GPIO\n'), ((612, 641), 'os.environ.get', 'os.environ.get', (['"""FLIP"""', '(False)'], {}), "('FLIP', False)\n", (626, 641), False, 'import os\n'), ((971, 1011), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (['"""./CODE2000.TTF"""', '(28)'], {}), "('./CODE2000.TTF', 28)\n", (989, 1011), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((1019, 1055), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (['RobotoMedium', '(40)'], {}), '(RobotoMedium, 40)\n', (1037, 1055), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((1062, 1098), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(WIDTH, HEIGHT)', '(0)'], {}), "('RGB', (WIDTH, HEIGHT), 0)\n", (1071, 1098), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((1106, 1125), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['img'], {}), '(img)\n', (1120, 1125), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((1182, 1204), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BCM'], {}), '(GPIO.BCM)\n', (1194, 1204), True, 'import RPi.GPIO as GPIO\n'), ((1205, 1259), 'RPi.GPIO.setup', 'GPIO.setup', (['BUTTONS', 'GPIO.IN'], {'pull_up_down': 'GPIO.PUD_UP'}), '(BUTTONS, GPIO.IN, pull_up_down=GPIO.PUD_UP)\n', (1215, 1259), True, 'import RPi.GPIO as GPIO\n'), ((2260, 2327), 'RPi.GPIO.add_event_detect', 'GPIO.add_event_detect', (['pin', 'GPIO.BOTH', 'button_press'], {'bouncetime': '(100)'}), '(pin, GPIO.BOTH, button_press, bouncetime=100)\n', (2281, 2327), True, 'import RPi.GPIO as GPIO\n')]
|
#!/usr/bin/env python3
# coding:utf-8
import os
import sys
"""
config 1.0
"""
DEBUG = True
HOST = '0.0.0.0'
PORT = 8000
NAME = 'layout'
DEPLOY = 0 # 0: 单机部署 ; 1: 接入云服务器
HOMEPAGE = "/projects"
ERRPAGE = "/404"
TEST_ID = 0
# path
_PATH = os.path.abspath(os.path.dirname(__file__))
APP_PATH = os.path.abspath(os.path.dirname(_PATH))
ROOT_PATH = os.path.abspath(os.path.dirname(APP_PATH))
WEB_PATH = os.path.abspath(os.path.join(ROOT_PATH, "web"))
DIST_PATH = os.path.abspath(os.path.join(WEB_PATH, "dist"))
DIST_STATIC_PATH = os.path.abspath(os.path.join(WEB_PATH, "dist"))
DIST_INDEX_PATH = os.path.abspath(os.path.join(WEB_PATH, "dist", "index.html"))
WEB_3D_PATH = os.path.abspath(os.path.join(ROOT_PATH, "3d"))
DIST_3D_PATH = os.path.abspath(os.path.join(WEB_3D_PATH, "dist"))
DIST_3D_INDEX = os.path.abspath(os.path.join(DIST_3D_PATH, "index.html"))
# sqlite
DB_FILE_PATH = os.path.abspath(os.path.join(ROOT_PATH, f"{NAME}.db"))
DB_FILE = f'sqlite:///{DB_FILE_PATH}'
# PROJECT PATH
BASE_PROJECT_PATH = os.path.abspath(os.path.join(ROOT_PATH, "project"))
PROJECT_PATH = os.path.abspath(os.path.join(BASE_PROJECT_PATH, "project"))
DWG_PATH = os.path.abspath(os.path.join(BASE_PROJECT_PATH, "dwg"))
# WDA CAD PATH
WDA_CAD_PROJECT_PATH = os.path.abspath(os.path.join(ROOT_PATH, "cad-project", "storage"))
PROJECT_LOG_PATH = os.path.abspath(os.path.join(BASE_PROJECT_PATH, "log"))
GLOBAL_PATH = os.path.abspath(os.path.join(BASE_PROJECT_PATH, "global"))
STORAGE_PATH = os.path.abspath(os.path.join(GLOBAL_PATH, "storage"))
TMP_PATH = os.path.abspath(os.path.join(BASE_PROJECT_PATH, "tmp"))
TMP_INPUT_PATH = os.path.abspath(os.path.join(TMP_PATH, "input"))
DEMO_PATH = os.path.abspath(os.path.join(APP_PATH, "demo"))
DEMO_JSON_PATH = os.path.abspath(os.path.join(DEMO_PATH, "json"))
# tool v2
LIB_TOOL_PATH = os.path.abspath(os.path.join(ROOT_PATH, "tools"))
sys.path.insert(0, LIB_TOOL_PATH)
# core v2
LIB_CORE_PATH = os.path.abspath(os.path.join(ROOT_PATH, "core"))
sys.path.insert(0, LIB_CORE_PATH)
# cad v2
LIB_CAD_PATH = os.path.abspath(os.path.join(ROOT_PATH, "cad"))
sys.path.insert(0, LIB_CAD_PATH)
# auth wda-auth-decorators
AUTH_DECORATORS_PATH = os.path.abspath(os.path.join(ROOT_PATH, "wda-auth-decorators"))
# auth database
AUTH_DB_HOST = "172.17.0.1"
AUTH_DB_PORT = 15432
AUTH_DB_USERNAME = "admin"
AUTH_DB_PASSWORD = "<PASSWORD>"
# model
MODEL_PATH = os.path.abspath(os.path.join(ROOT_PATH, "wda-cloud"))
# model database 172.17.0.1
DB_HOST = "172.17.0.1"
DB_PORT = 15433
DB_USERNAME = "admin"
DB_PASSWORD = "<PASSWORD>"
# logger
LOG_NAME = f"{NAME}"
LOG_LEVER = "INFO" # "WARNING"
LOG_PATH = os.path.abspath(os.path.join(APP_PATH, f"{NAME}.log"))
# dwg2dxf
DWG2DXF_SERVER = "http://172.17.0.1:8001/dwg2dxf/"
DXF2DWG_SERVER = "http://172.17.0.1:8001/dxf2dwg/"
try:
from local_config import *
except:
pass
try:
from config.cloud import *
except:
pass
if DEPLOY == 1:
sys.path.append(AUTH_DECORATORS_PATH)
sys.path.append(MODEL_PATH)
print("deploy", DEPLOY)
print("homepage", HOMEPAGE)
print(sys.path)
|
[
"sys.path.append",
"os.path.dirname",
"os.path.join",
"sys.path.insert"
] |
[((1869, 1902), 'sys.path.insert', 'sys.path.insert', (['(0)', 'LIB_TOOL_PATH'], {}), '(0, LIB_TOOL_PATH)\n', (1884, 1902), False, 'import sys\n'), ((1979, 2012), 'sys.path.insert', 'sys.path.insert', (['(0)', 'LIB_CORE_PATH'], {}), '(0, LIB_CORE_PATH)\n', (1994, 2012), False, 'import sys\n'), ((2086, 2118), 'sys.path.insert', 'sys.path.insert', (['(0)', 'LIB_CAD_PATH'], {}), '(0, LIB_CAD_PATH)\n', (2101, 2118), False, 'import sys\n'), ((256, 281), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (271, 281), False, 'import os\n'), ((310, 332), 'os.path.dirname', 'os.path.dirname', (['_PATH'], {}), '(_PATH)\n', (325, 332), False, 'import os\n'), ((363, 388), 'os.path.dirname', 'os.path.dirname', (['APP_PATH'], {}), '(APP_PATH)\n', (378, 388), False, 'import os\n'), ((418, 448), 'os.path.join', 'os.path.join', (['ROOT_PATH', '"""web"""'], {}), "(ROOT_PATH, 'web')\n", (430, 448), False, 'import os\n'), ((478, 508), 'os.path.join', 'os.path.join', (['WEB_PATH', '"""dist"""'], {}), "(WEB_PATH, 'dist')\n", (490, 508), False, 'import os\n'), ((545, 575), 'os.path.join', 'os.path.join', (['WEB_PATH', '"""dist"""'], {}), "(WEB_PATH, 'dist')\n", (557, 575), False, 'import os\n'), ((611, 655), 'os.path.join', 'os.path.join', (['WEB_PATH', '"""dist"""', '"""index.html"""'], {}), "(WEB_PATH, 'dist', 'index.html')\n", (623, 655), False, 'import os\n'), ((688, 717), 'os.path.join', 'os.path.join', (['ROOT_PATH', '"""3d"""'], {}), "(ROOT_PATH, '3d')\n", (700, 717), False, 'import os\n'), ((750, 783), 'os.path.join', 'os.path.join', (['WEB_3D_PATH', '"""dist"""'], {}), "(WEB_3D_PATH, 'dist')\n", (762, 783), False, 'import os\n'), ((817, 857), 'os.path.join', 'os.path.join', (['DIST_3D_PATH', '"""index.html"""'], {}), "(DIST_3D_PATH, 'index.html')\n", (829, 857), False, 'import os\n'), ((900, 937), 'os.path.join', 'os.path.join', (['ROOT_PATH', 'f"""{NAME}.db"""'], {}), "(ROOT_PATH, f'{NAME}.db')\n", (912, 937), False, 'import os\n'), ((1029, 1063), 'os.path.join', 'os.path.join', (['ROOT_PATH', '"""project"""'], {}), "(ROOT_PATH, 'project')\n", (1041, 1063), False, 'import os\n'), ((1096, 1138), 'os.path.join', 'os.path.join', (['BASE_PROJECT_PATH', '"""project"""'], {}), "(BASE_PROJECT_PATH, 'project')\n", (1108, 1138), False, 'import os\n'), ((1167, 1205), 'os.path.join', 'os.path.join', (['BASE_PROJECT_PATH', '"""dwg"""'], {}), "(BASE_PROJECT_PATH, 'dwg')\n", (1179, 1205), False, 'import os\n'), ((1262, 1311), 'os.path.join', 'os.path.join', (['ROOT_PATH', '"""cad-project"""', '"""storage"""'], {}), "(ROOT_PATH, 'cad-project', 'storage')\n", (1274, 1311), False, 'import os\n'), ((1349, 1387), 'os.path.join', 'os.path.join', (['BASE_PROJECT_PATH', '"""log"""'], {}), "(BASE_PROJECT_PATH, 'log')\n", (1361, 1387), False, 'import os\n'), ((1419, 1460), 'os.path.join', 'os.path.join', (['BASE_PROJECT_PATH', '"""global"""'], {}), "(BASE_PROJECT_PATH, 'global')\n", (1431, 1460), False, 'import os\n'), ((1493, 1529), 'os.path.join', 'os.path.join', (['GLOBAL_PATH', '"""storage"""'], {}), "(GLOBAL_PATH, 'storage')\n", (1505, 1529), False, 'import os\n'), ((1559, 1597), 'os.path.join', 'os.path.join', (['BASE_PROJECT_PATH', '"""tmp"""'], {}), "(BASE_PROJECT_PATH, 'tmp')\n", (1571, 1597), False, 'import os\n'), ((1632, 1663), 'os.path.join', 'os.path.join', (['TMP_PATH', '"""input"""'], {}), "(TMP_PATH, 'input')\n", (1644, 1663), False, 'import os\n'), ((1694, 1724), 'os.path.join', 'os.path.join', (['APP_PATH', '"""demo"""'], {}), "(APP_PATH, 'demo')\n", (1706, 1724), False, 'import os\n'), ((1759, 1790), 'os.path.join', 'os.path.join', (['DEMO_PATH', '"""json"""'], {}), "(DEMO_PATH, 'json')\n", (1771, 1790), False, 'import os\n'), ((1835, 1867), 'os.path.join', 'os.path.join', (['ROOT_PATH', '"""tools"""'], {}), "(ROOT_PATH, 'tools')\n", (1847, 1867), False, 'import os\n'), ((1946, 1977), 'os.path.join', 'os.path.join', (['ROOT_PATH', '"""core"""'], {}), "(ROOT_PATH, 'core')\n", (1958, 1977), False, 'import os\n'), ((2054, 2084), 'os.path.join', 'os.path.join', (['ROOT_PATH', '"""cad"""'], {}), "(ROOT_PATH, 'cad')\n", (2066, 2084), False, 'import os\n'), ((2186, 2232), 'os.path.join', 'os.path.join', (['ROOT_PATH', '"""wda-auth-decorators"""'], {}), "(ROOT_PATH, 'wda-auth-decorators')\n", (2198, 2232), False, 'import os\n'), ((2397, 2433), 'os.path.join', 'os.path.join', (['ROOT_PATH', '"""wda-cloud"""'], {}), "(ROOT_PATH, 'wda-cloud')\n", (2409, 2433), False, 'import os\n'), ((2642, 2679), 'os.path.join', 'os.path.join', (['APP_PATH', 'f"""{NAME}.log"""'], {}), "(APP_PATH, f'{NAME}.log')\n", (2654, 2679), False, 'import os\n'), ((2923, 2960), 'sys.path.append', 'sys.path.append', (['AUTH_DECORATORS_PATH'], {}), '(AUTH_DECORATORS_PATH)\n', (2938, 2960), False, 'import sys\n'), ((2965, 2992), 'sys.path.append', 'sys.path.append', (['MODEL_PATH'], {}), '(MODEL_PATH)\n', (2980, 2992), False, 'import sys\n')]
|
import pandas as pd
from selecttotex.database import Database
class Totex:
"""Classe para transformar resultados de selects em Latex
"""
def __init__(self):
self.db = Database().get_connection()
def to_tex(self, command_list: list, output_file: str) -> None:
"""Função para transformar select em tabela latex
:param: command_list: Lista com os selects que deverão ser utilizados
:param: output_file: Caminho/Nome do arquivo a ser salvo com as tabelas
"""
# Criando arquivo para armazenar resultados
file = open(output_file, 'w')
file.write('Tabelas geradas pelo SelectToTex\n\n\n')
# Criando o loop para percorrer os comandos da lista
for command in command_list:
self.db.execute(command)
# Recupera o resultado e já transforma ele em String
r = str(pd.DataFrame(self.db.fetchall()).to_latex())
file.write(r)
file.write('\n\n')
file.close()
|
[
"selecttotex.database.Database"
] |
[((190, 200), 'selecttotex.database.Database', 'Database', ([], {}), '()\n', (198, 200), False, 'from selecttotex.database import Database\n')]
|
import numpy as np
from numpy import exp, sqrt
from functools import partial
from scipy import optimize
from scipy.stats import norm
import scipy.integrate as integrate
from fox_toolbox.utils import rates
"""This module price swaption under Hull White model using Jamshidian method.
Usage example:
from hw import Jamshidian as jamsh
jamsh_price, debug = jamsh.hw_swo(swo, ref_mr, sigma_hw_jamsh, dsc_curve, estim_curve)
swo : rates.Swaption
ref_mr : float
sigma_hw_jamsh : rates.Curve
dsc_curve : rates.RateCurve
estim_curve : rates.RateCurve
"""
class Jamshidian():
def __init__(self, mr, sigma, dsc_curve, estim_curve):
assert isinstance(sigma, (float, rates.Curve)), f'sigma: float or rates.Curve, not {type(sigma)}'
self.mr = mr
self.sigma = sigma
self.dsc_curve = dsc_curve
self.estim_curve = estim_curve
@staticmethod
def sign_changes(array):
"""return number of times the sign is changed in array"""
return np.where(np.diff(np.sign(array)))[0]
@staticmethod
def _B(t, T, a):
return (1 - exp(-a * (T - t))) / a
@staticmethod
def _v(t, T, u, a):
p1 = (T - t)
p2 = - (2 / a) * exp(-a * u) * (exp(a * T) - exp(a * t))
p3 = exp(-2 * a *u) * (exp(2 * a *T) - exp(2 * a *t)) / (2 * a)
return (p1 + p2 + p3) / (a**2)
@staticmethod
def _V(t, T, u, a, sigma):
if isinstance(sigma, float):
return sigma**2 * _v(t, T, u, a)
elif isinstance(sigma, rates.Curve):
total_var = 0.
expiry = T
previous_expiries = [t_exp for t_exp in sigma.buckets if t_exp <= expiry]
previous_sigmas = list(sigma.values[:len(previous_expiries)])
if previous_expiries[-1] < expiry:
previous_sigmas.append(sigma.values[len(previous_expiries)])
previous_expiries.append(expiry)
for i in range(len(previous_expiries) - 1):
total_var += (previous_sigmas[i+1] ** 2) * _v(t, previous_expiries[i+1], u, a)
return total_var
@staticmethod
def _A(t, T, a, sigma, dsc_curve):
assert isinstance(sigma, (float, rates.Curve)), f'sigma: float or rates.Curve, not {type(sigma)}'
fwd_dsc = dsc_curve.get_fwd_dsc(t, T)
return fwd_dsc * exp(0.5*(_V(0, t, t, a, sigma) - _V(0, t, T, a, sigma)))
def get_coef(self, swo):
""" Coefficients for Put swaption from calibration basket. Jamishidian """
flt_adjs = swo.get_flt_adjustments(self.dsc_curve, self.estim_curve)
c0 = -_A(swo.expiry, swo.start_date, self.mr, self.sigma, self.dsc_curve)
c = list(map(lambda dcf, pdate, fadj: dcf * (swo.strike - fadj) * _A(swo.expiry, pdate, self.mr, self.sigma, self.dsc_curve),
swo.day_count_fractions, swo.payment_dates, flt_adjs))
c[-1] += _A(swo.expiry, swo.maturity, self.mr, self.sigma, self.dsc_curve)
c.insert(0, c0)
return np.array(c)
def get_var_x(self, expiry):
if isinstance(sigma, float):
return 1 / (2 * a) * (1 - exp(-2 * a * expiry)) * sigma ** 2
elif isinstance(sigma, rates.Curve):
total_var = 0.
previous_expiries = [t_exp for t_exp in self.sigma.buckets if t_exp <= expiry]
previous_sigmas = list(self.sigma.values[:len(previous_expiries)])
if previous_expiries[-1] < expiry:
previous_sigmas.append(self.sigma.values[len(previous_expiries)])
previous_expiries.append(expiry)
for i in range(len(previous_expiries) - 1):
total_var += 1 / (2 * self.mr) * (previous_sigmas[i+1] ** 2) * (exp(-2 * self.mr * (expiry - previous_expiries[i+1])) - exp(-2 * self.mr * (expiry - previous_expiries[i])))
return total_var
def get_b_i(self, swo):
""" array of B_i for by each payment date """
b0 = _B(swo.expiry, swo.start_date, self.mr)
b = list(map(lambda pdate: _B(swo.expiry, pdate, self.mr), swo.payment_dates))
b.insert(0, b0)
return np.array(b)
@staticmethod
def swap_value(coef, b_i, varx, x):
""" Swap function for finding x_star """
exp_b_var = exp(- b_i * sqrt(varx) * x)
return coef.dot(exp_b_var)
@staticmethod
def get_x_star(coef, b_i, varx):
x0 = .0
func = partial(swap_value, coef, b_i, varx)
# optimum = optimize.newton(func, x0=x0)
optimum = optimize.bisect(func, -6, 6)
return optimum
###TODO: continue adopting
def hw_swo_analytic(coef, b_i, varx, x_star, IsCall):
""" analytic """
sign = -1 if IsCall else 1
if IsCall: coef = np.negative(coef)
val_arr = exp(0.5 * b_i ** 2 * varx) * norm.cdf(sign*(x_star + b_i * sqrt(varx)))
return coef.dot(val_arr)
def hw_swo_numeric(coef, b_i, varx, IsCall):
if IsCall: coef = np.negative(coef)
swaption_numeric = integrate.quad(lambda x: swo_payoff(coef, b_i, varx, x) * norm.pdf(x), -10, 10)[0]
degen_swo_analytic, degen_swo_numeric = 0, 0
control_variable = degen_swo_analytic - degen_swo_numeric
return swaption_numeric + control_variable
def swo_payoff(coef, b_i, varx, x):
"""Call/Put is hidden in coef"""
swap = swap_value(coef, b_i, varx, x)
return swap if swap > 0 else 0
def hw_swo(swo, a, sigma, dsc_curve, estim_curve):
""" Main Hull White swaption function """
IsCall = False if swo.pay_rec == 'Receiver' else True
coef = get_coef(swo, a, sigma, dsc_curve, estim_curve)
b_i = get_b_i(swo, a)
varx = get_var_x(swo.expiry, a, sigma)
sgn_changes = sign_changes(coef)
change_once = len(sgn_changes) == 1
if change_once:
x_star = get_x_star(coef, b_i, varx)
debug_dict = {}
return hw_swo_analytic(coef, b_i, varx, x_star, IsCall), debug_dict
else:
debug_dict = {}
return hw_swo_numeric(coef, b_i, varx, IsCall), debug_dict
|
[
"functools.partial",
"numpy.negative",
"scipy.stats.norm.pdf",
"numpy.array",
"numpy.exp",
"numpy.sign",
"scipy.optimize.bisect",
"numpy.sqrt"
] |
[((3058, 3069), 'numpy.array', 'np.array', (['c'], {}), '(c)\n', (3066, 3069), True, 'import numpy as np\n'), ((4178, 4189), 'numpy.array', 'np.array', (['b'], {}), '(b)\n', (4186, 4189), True, 'import numpy as np\n'), ((4468, 4504), 'functools.partial', 'partial', (['swap_value', 'coef', 'b_i', 'varx'], {}), '(swap_value, coef, b_i, varx)\n', (4475, 4504), False, 'from functools import partial\n'), ((4572, 4600), 'scipy.optimize.bisect', 'optimize.bisect', (['func', '(-6)', '(6)'], {}), '(func, -6, 6)\n', (4587, 4600), False, 'from scipy import optimize\n'), ((4803, 4820), 'numpy.negative', 'np.negative', (['coef'], {}), '(coef)\n', (4814, 4820), True, 'import numpy as np\n'), ((4840, 4866), 'numpy.exp', 'exp', (['(0.5 * b_i ** 2 * varx)'], {}), '(0.5 * b_i ** 2 * varx)\n', (4843, 4866), False, 'from numpy import exp, sqrt\n'), ((5024, 5041), 'numpy.negative', 'np.negative', (['coef'], {}), '(coef)\n', (5035, 5041), True, 'import numpy as np\n'), ((1144, 1161), 'numpy.exp', 'exp', (['(-a * (T - t))'], {}), '(-a * (T - t))\n', (1147, 1161), False, 'from numpy import exp, sqrt\n'), ((1260, 1271), 'numpy.exp', 'exp', (['(-a * u)'], {}), '(-a * u)\n', (1263, 1271), False, 'from numpy import exp, sqrt\n'), ((1275, 1285), 'numpy.exp', 'exp', (['(a * T)'], {}), '(a * T)\n', (1278, 1285), False, 'from numpy import exp, sqrt\n'), ((1288, 1298), 'numpy.exp', 'exp', (['(a * t)'], {}), '(a * t)\n', (1291, 1298), False, 'from numpy import exp, sqrt\n'), ((1313, 1328), 'numpy.exp', 'exp', (['(-2 * a * u)'], {}), '(-2 * a * u)\n', (1316, 1328), False, 'from numpy import exp, sqrt\n'), ((1060, 1074), 'numpy.sign', 'np.sign', (['array'], {}), '(array)\n', (1067, 1074), True, 'import numpy as np\n'), ((1331, 1345), 'numpy.exp', 'exp', (['(2 * a * T)'], {}), '(2 * a * T)\n', (1334, 1345), False, 'from numpy import exp, sqrt\n'), ((1347, 1361), 'numpy.exp', 'exp', (['(2 * a * t)'], {}), '(2 * a * t)\n', (1350, 1361), False, 'from numpy import exp, sqrt\n'), ((4330, 4340), 'numpy.sqrt', 'sqrt', (['varx'], {}), '(varx)\n', (4334, 4340), False, 'from numpy import exp, sqrt\n'), ((3181, 3201), 'numpy.exp', 'exp', (['(-2 * a * expiry)'], {}), '(-2 * a * expiry)\n', (3184, 3201), False, 'from numpy import exp, sqrt\n'), ((5128, 5139), 'scipy.stats.norm.pdf', 'norm.pdf', (['x'], {}), '(x)\n', (5136, 5139), False, 'from scipy.stats import norm\n'), ((3776, 3831), 'numpy.exp', 'exp', (['(-2 * self.mr * (expiry - previous_expiries[i + 1]))'], {}), '(-2 * self.mr * (expiry - previous_expiries[i + 1]))\n', (3779, 3831), False, 'from numpy import exp, sqrt\n'), ((3832, 3883), 'numpy.exp', 'exp', (['(-2 * self.mr * (expiry - previous_expiries[i]))'], {}), '(-2 * self.mr * (expiry - previous_expiries[i]))\n', (3835, 3883), False, 'from numpy import exp, sqrt\n'), ((4899, 4909), 'numpy.sqrt', 'sqrt', (['varx'], {}), '(varx)\n', (4903, 4909), False, 'from numpy import exp, sqrt\n')]
|
import time
import requests
import pyeureka.validator as validator
import pyeureka.const as c
def get_timestamp():
return int(time.time())
class EurekaClientError(Exception):
pass
class EurekaInstanceDoesNotExistException(Exception):
pass
class EurekaClient:
def __init__(self, eureka_url, instance_definition=None, verbose=False):
"""
eureka_url is the address to send requests to.
instance_definition is description of service
NOT conforming (as of 16.05.17) to schema available in
https://github.com/Netflix/eureka/wiki/Eureka-REST-operations
Basic operations:
service side:
client = EurekaClient('localhost:8765', {'ipAddr': '127.0.0.1', 'port': 80, 'app': 'myapp'})
client.register()
client.heartbeat()
client side:
client = EurekaClient('localhost:8765')
try:
client.query(app='myapp')
except EurekaClientError:
print('operation failed')
"""
self.eureka_url = eureka_url
if instance_definition is not None:
self.instance_definition = validator.validate_instance_definition(
instance_definition)
self.app_id = self.instance_definition['instance']['app']
self.instance_id = self.instance_definition[
'instance']['instanceId']
self.verbose = verbose
if verbose:
print("EurekaClient running with verbosity enabled")
print("instance_definition: {}".format(self.instance_definition))
def register(self):
request_uri = self.eureka_url + '/eureka/apps/' + self.app_id
self._request('POST', request_uri, 'registration',
204, payload=self.instance_definition)
def deregister(self):
self._request('DELETE', comment='deregistration')
def heartbeat(self):
request_uri = self._instance_uri() + '?status=UP&lastDirtyTimestamp=' + \
str(get_timestamp())
self._request('PUT', uri=request_uri, comment='heartbeat',
errors={404: EurekaInstanceDoesNotExistException})
def query(self, app=None, instance=None):
request_uri = self.eureka_url + '/eureka/apps/'
if app is not None:
request_uri += app
if instance is not None:
request_uri += '/' + instance
elif instance is not None:
request_uri = self.eureka_url + '/eureka/instances/' + instance
request = self._request('GET', request_uri, 'query')
return request.json()
def query_vip(self, vip):
request_uri = self.eureka_url + '/eureka/vips/' + vip
request = self._request('GET', request_uri, 'query vip')
return request
def query_svip(self, svip):
request_uri = self.eureka_url + '/eureka/svips/' + svip
request = self._request('GET', request_uri, 'query svip')
return request
def take_instance_out_of_service(self):
request_uri = self._instance_uri() + '/status?value=OUT_OF_SERVICE'
self._request('PUT', request_uri, 'out of service')
def put_instance_back_into_service(self):
request_uri = self._instance_uri() + '/status?value=UP'
self._request('PUT', request_uri, 'up')
def update_metadata(self, key, value):
request_uri = self._instance_uri() + \
'/metadata?{}={}'.format(key, value)
self._request('PUT', request_uri, 'update_metadata')
def _instance_uri(self):
return self.eureka_url + '/eureka/apps/' + self.app_id + '/' + self.instance_id
def _fail_code(self, code, request, comment, errors=None):
if self.verbose:
self._show_request(request, comment)
if request.status_code != code:
error = EurekaClientError
if errors is not None and request.status_code in errors:
error = errors[request.status_code]
raise error({'request': request, 'comment': comment,
'status_code': request.status_code})
def _show_request(self, request, comment):
print("{}:".format(comment))
print("Request code: {}".format(request.status_code))
print("Request headers: {}".format(request.headers))
print("Request response: {}".format(request.text))
def _request(self, method, uri=None, comment='operation', accepted_code=200, errors=None, payload=None):
if uri is None:
uri = self._instance_uri()
request = c.EUREKA_REQUESTS[method](
uri, headers=c.EUREKA_HEADERS[method], json=payload)
self._fail_code(accepted_code, request, comment, errors=errors)
return request
|
[
"pyeureka.validator.validate_instance_definition",
"time.time"
] |
[((134, 145), 'time.time', 'time.time', ([], {}), '()\n', (143, 145), False, 'import time\n'), ((1179, 1238), 'pyeureka.validator.validate_instance_definition', 'validator.validate_instance_definition', (['instance_definition'], {}), '(instance_definition)\n', (1217, 1238), True, 'import pyeureka.validator as validator\n')]
|
## FUNCTIONS TO OVERLAYS ALL PICS!!
get_ipython().magic('matplotlib inline')
import cv2
from matplotlib import pyplot as plt
import numpy as np
import time as t
import glob, os
import operator
from PIL import Image
import pathlib
from pathlib import Path
image_dir = ["data/pics_for_overlaps/Sarah",
"data/pics_for_overlaps/Allison",
"data/pics_for_overlaps/Amanda_S",
"data/pics_for_overlaps/Gisele",
"data/pics_for_overlaps/Keira",
"data/pics_for_overlaps/Squares"
]
plt.figure(figsize=(20,10))
from PIL import Image, ImageDraw,ImageFont
font = ImageFont.truetype("fonts/Arial.ttf", 20)
n_row = 2
n_col = 3
g = 0
text = ["Sarah-round","Allison-oval","Amanda-heart",'Gisele-long','Keira-square','All Squares']
for ddir in image_dir:
a = .6
i = 0
g += 1
for f in os.listdir(ddir):
if f.endswith('.jpg'):
file, ext = os.path.splitext(f)
im = Image.open(ddir+'/'+f)
image = cv2.imread(ddir+'/'+f)
a = a-.01
i += 1
draw = ImageDraw.Draw(im)
draw.text((10,10) ,text[g-1], fill=None, font=font, anchor=None)
draw.text((10,30) ,str(i)+" Pics", fill=None, font=font, anchor=None)
plt.subplot(n_row, n_col, g )
plt.imshow(im, alpha = a)
|
[
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.imshow",
"PIL.Image.open",
"PIL.ImageFont.truetype",
"cv2.imread",
"matplotlib.pyplot.figure",
"os.path.splitext",
"PIL.ImageDraw.Draw",
"os.listdir"
] |
[((550, 578), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(20, 10)'}), '(figsize=(20, 10))\n', (560, 578), True, 'from matplotlib import pyplot as plt\n'), ((628, 669), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (['"""fonts/Arial.ttf"""', '(20)'], {}), "('fonts/Arial.ttf', 20)\n", (646, 669), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((861, 877), 'os.listdir', 'os.listdir', (['ddir'], {}), '(ddir)\n', (871, 877), False, 'import glob, os\n'), ((934, 953), 'os.path.splitext', 'os.path.splitext', (['f'], {}), '(f)\n', (950, 953), False, 'import glob, os\n'), ((972, 998), 'PIL.Image.open', 'Image.open', (["(ddir + '/' + f)"], {}), "(ddir + '/' + f)\n", (982, 998), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((1015, 1041), 'cv2.imread', 'cv2.imread', (["(ddir + '/' + f)"], {}), "(ddir + '/' + f)\n", (1025, 1041), False, 'import cv2\n'), ((1098, 1116), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['im'], {}), '(im)\n', (1112, 1116), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((1288, 1316), 'matplotlib.pyplot.subplot', 'plt.subplot', (['n_row', 'n_col', 'g'], {}), '(n_row, n_col, g)\n', (1299, 1316), True, 'from matplotlib import pyplot as plt\n'), ((1330, 1353), 'matplotlib.pyplot.imshow', 'plt.imshow', (['im'], {'alpha': 'a'}), '(im, alpha=a)\n', (1340, 1353), True, 'from matplotlib import pyplot as plt\n')]
|
from django.db import models
class Census(models.Model):
voting_id = models.PositiveIntegerField()
voter_id = models.PositiveIntegerField()
class Meta:
unique_together = (('voting_id', 'voter_id'),)
|
[
"django.db.models.PositiveIntegerField"
] |
[((75, 104), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (102, 104), False, 'from django.db import models\n'), ((120, 149), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (147, 149), False, 'from django.db import models\n')]
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models, api
class Lead(models.Model):
_inherit = 'crm.lead'
event_lead_rule_id = fields.Many2one('event.lead.rule', string="Registration Rule", help="Rule that created this lead")
event_id = fields.Many2one('event.event', string="Source Event", help="Event triggering the rule that created this lead")
registration_ids = fields.Many2many(
'event.registration', string="Source Registrations",
groups='event.group_event_user',
help="Registrations triggering the rule that created this lead")
registration_count = fields.Integer(
string="# Registrations", compute='_compute_registration_count',
groups='event.group_event_user',
help="Counter for the registrations linked to this lead")
@api.depends('registration_ids')
def _compute_registration_count(self):
for record in self:
record.registration_count = len(record.registration_ids)
|
[
"odoo.fields.Many2many",
"odoo.api.depends",
"odoo.fields.Many2one",
"odoo.fields.Integer"
] |
[((217, 320), 'odoo.fields.Many2one', 'fields.Many2one', (['"""event.lead.rule"""'], {'string': '"""Registration Rule"""', 'help': '"""Rule that created this lead"""'}), "('event.lead.rule', string='Registration Rule', help=\n 'Rule that created this lead')\n", (232, 320), False, 'from odoo import fields, models, api\n'), ((331, 446), 'odoo.fields.Many2one', 'fields.Many2one', (['"""event.event"""'], {'string': '"""Source Event"""', 'help': '"""Event triggering the rule that created this lead"""'}), "('event.event', string='Source Event', help=\n 'Event triggering the rule that created this lead')\n", (346, 446), False, 'from odoo import fields, models, api\n'), ((465, 641), 'odoo.fields.Many2many', 'fields.Many2many', (['"""event.registration"""'], {'string': '"""Source Registrations"""', 'groups': '"""event.group_event_user"""', 'help': '"""Registrations triggering the rule that created this lead"""'}), "('event.registration', string='Source Registrations',\n groups='event.group_event_user', help=\n 'Registrations triggering the rule that created this lead')\n", (481, 641), False, 'from odoo import fields, models, api\n'), ((683, 863), 'odoo.fields.Integer', 'fields.Integer', ([], {'string': '"""# Registrations"""', 'compute': '"""_compute_registration_count"""', 'groups': '"""event.group_event_user"""', 'help': '"""Counter for the registrations linked to this lead"""'}), "(string='# Registrations', compute=\n '_compute_registration_count', groups='event.group_event_user', help=\n 'Counter for the registrations linked to this lead')\n", (697, 863), False, 'from odoo import fields, models, api\n'), ((885, 916), 'odoo.api.depends', 'api.depends', (['"""registration_ids"""'], {}), "('registration_ids')\n", (896, 916), False, 'from odoo import fields, models, api\n')]
|
from re import U
import numpy as np
from collections import Counter, defaultdict
from pprint import pprint
def moves(pos, endv, pathz, rolls=0):
if rolls==3:
pathz.append(pos); return
for i in [ 1, 2, 3 ]:
npos=(pos+i-1)%10+1
moves(npos, endv, pathz, rolls+1)
possibilities={}
for x in range (1,11):
pathz=[]
moves(x,0,pathz)
#print(x, Counter(pathz), len(pathz))
possibilities[x]=Counter(pathz)
#pu=dict({(4,0,8,0):1})
pu=dict({(7,0,6,0):1})
p1wins=0
p2wins=0
onesmove=0
aa=0
while len(pu.keys()) != 0:
onesmove=not onesmove
pun=defaultdict(int)
for p1,s1,p2,s2 in pu.keys():
universes=pu[(p1,s1,p2,s2)]
if onesmove:
for npos in possibilities[p1]:
nscore=s1+npos
if nscore>=21:
p1wins+=universes*possibilities[p1][npos]
else:
pun[(npos,nscore,p2,s2)]+=universes*possibilities[p1][npos]
else:
for npos in possibilities[p2]:
nscore=s2+npos
if nscore>=21:
p2wins+=universes*possibilities[p2][npos]
else:
pun[(p1,s1,npos,nscore)]+=universes*possibilities[p2][npos]
pu=pun.copy()
print(f'player1 wins: {p1wins}')
print(f'player2 wins: {p2wins}')
|
[
"collections.defaultdict",
"collections.Counter"
] |
[((434, 448), 'collections.Counter', 'Counter', (['pathz'], {}), '(pathz)\n', (441, 448), False, 'from collections import Counter, defaultdict\n'), ((593, 609), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (604, 609), False, 'from collections import Counter, defaultdict\n')]
|
import datetime
import re
import time
from logging import getLogger
from random import randint, gauss, random
import pandas as pd
from .external import DataframeLoader
log = getLogger(__name__)
class PrimarySchoolClasses(DataframeLoader):
DISABLE_CACHE = False
def __init__(self, pupils, present=None):
self._present = present
super().__init__(pupils)
def data(self) -> pd.DataFrame:
"""Return locations for all the classes in the supplied primary schools. Simple approximation: only one class
per pupil-age, even if 80 pupils in one class..."""
def rows():
i = 0
seen = set()
# for row in self._source.itertuples(name='Segment'): Does not work! No column headings!
for index, row in self._source.iterrows():
for cell in row.items():
r = re.match('leeftijd_(\d+)', cell[0])
if not r:
continue
age = int(r.group(1))
if (row.brin_nummer, age) in seen:
continue
seen.add((row.brin_nummer, age))
i += 1
yield {'location_id': i,
'postcode_target': row.postcode_target}
return pd.DataFrame((row for row in rows()), columns=('location_id', 'postcode_target'))
|
[
"re.match",
"logging.getLogger"
] |
[((177, 196), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (186, 196), False, 'from logging import getLogger\n'), ((884, 920), 're.match', 're.match', (['"""leeftijd_(\\\\d+)"""', 'cell[0]'], {}), "('leeftijd_(\\\\d+)', cell[0])\n", (892, 920), False, 'import re\n')]
|
#
# Hello World client in Python
# Connects REQ socket to tcp://localhost:5555
#
import zmq
def main():
context = zmq.Context()
# Socket to talk to server
print("Connecting to Paddle Server…")
socket = context.socket(zmq.REQ)
socket.connect("tcp://localhost:5555")
# Do 2 requests, waiting each time for a response
VIDEO_FILE_PATHS = [
"dataset/ict/CAM1-Case18-Low.mp4",
"dataset/ict/CAM2-Case18-Low.mp4",
]
try:
for i, p in enumerate(VIDEO_FILE_PATHS):
print(f"Sending Video: {i} ...")
socket.send_string(p)
# Get the reply.
message = socket.recv()
print(f"Received reply {i}, Status: {message}")
except KeyboardInterrupt:
print("W: interrupt received, stopping...")
finally:
# clean up
socket.close()
context.term()
if __name__ == "__main__":
main()
|
[
"zmq.Context"
] |
[((124, 137), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (135, 137), False, 'import zmq\n')]
|
import pytest
from _pytest.logging import LogCaptureFixture
from loguru import logger
@pytest.fixture
def caplog(caplog: LogCaptureFixture):
handler_id = logger.add(caplog.handler, format="{message}")
yield caplog
logger.remove(handler_id)
|
[
"loguru.logger.remove",
"loguru.logger.add"
] |
[((160, 206), 'loguru.logger.add', 'logger.add', (['caplog.handler'], {'format': '"""{message}"""'}), "(caplog.handler, format='{message}')\n", (170, 206), False, 'from loguru import logger\n'), ((228, 253), 'loguru.logger.remove', 'logger.remove', (['handler_id'], {}), '(handler_id)\n', (241, 253), False, 'from loguru import logger\n')]
|
import sys
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError
from celery import states
from celery.result import AsyncResult, allow_join_result
from .fields import JSONField
def validate_schedule_at(value):
if value < timezone.now():
raise ValidationError("Request schedule cannot be in the past!")
return value
class HttpRequest(models.Model):
GET = 'get'
HEAD = 'head'
POST = 'post'
PUT = 'put'
DELETE = 'delete'
METHOD_CHOICES = (
(GET, _('Get')),
(HEAD, _('Head')),
(POST, _('Post')),
(PUT, _('Put')),
(DELETE, _('Delete')),
)
url = models.URLField()
method = models.CharField(max_length=8, choices=METHOD_CHOICES)
headers = JSONField(blank=True)
params = JSONField(blank=True)
data = JSONField(blank=True)
schedule_at = models.DateTimeField(validators=[validate_schedule_at])
task_id = models.CharField(max_length=36, blank=True, editable=False)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
@property
def task_status(self):
if not self.task_id:
return states.PENDING
if self.httpresponse:
return states.SUCCESS
in_celery = sys.argv and sys.argv[0].endswith('celery') and 'worker' in sys.argv
if in_celery:
with allow_join_result():
result = AsyncResult(self.task_id)
else:
result = AsyncResult(self.task_id)
return result.state
def __str__(self):
return f'{self.url} ({self.method}) at {self.schedule_at}'
class HttpResponse(models.Model):
request = models.OneToOneField(HttpRequest, on_delete=models.CASCADE)
status_code = models.PositiveIntegerField()
headers = JSONField()
text = models.TextField(blank=True)
def __str__(self):
return f'Response from url {self.request} ({self.request.method}): {self.status_code}'
|
[
"django.db.models.URLField",
"django.db.models.OneToOneField",
"django.db.models.TextField",
"django.core.exceptions.ValidationError",
"django.db.models.CharField",
"django.utils.timezone.now",
"django.db.models.PositiveIntegerField",
"celery.result.AsyncResult",
"django.db.models.DateTimeField",
"django.utils.translation.ugettext_lazy",
"celery.result.allow_join_result"
] |
[((753, 770), 'django.db.models.URLField', 'models.URLField', ([], {}), '()\n', (768, 770), False, 'from django.db import models\n'), ((784, 838), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(8)', 'choices': 'METHOD_CHOICES'}), '(max_length=8, choices=METHOD_CHOICES)\n', (800, 838), False, 'from django.db import models\n'), ((961, 1016), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'validators': '[validate_schedule_at]'}), '(validators=[validate_schedule_at])\n', (981, 1016), False, 'from django.db import models\n'), ((1031, 1090), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(36)', 'blank': '(True)', 'editable': '(False)'}), '(max_length=36, blank=True, editable=False)\n', (1047, 1090), False, 'from django.db import models\n'), ((1105, 1144), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1125, 1144), False, 'from django.db import models\n'), ((1159, 1194), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (1179, 1194), False, 'from django.db import models\n'), ((1794, 1853), 'django.db.models.OneToOneField', 'models.OneToOneField', (['HttpRequest'], {'on_delete': 'models.CASCADE'}), '(HttpRequest, on_delete=models.CASCADE)\n', (1814, 1853), False, 'from django.db import models\n'), ((1872, 1901), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (1899, 1901), False, 'from django.db import models\n'), ((1939, 1967), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (1955, 1967), False, 'from django.db import models\n'), ((346, 360), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (358, 360), False, 'from django.utils import timezone\n'), ((376, 434), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Request schedule cannot be in the past!"""'], {}), "('Request schedule cannot be in the past!')\n", (391, 434), False, 'from django.core.exceptions import ValidationError\n'), ((615, 623), 'django.utils.translation.ugettext_lazy', '_', (['"""Get"""'], {}), "('Get')\n", (616, 623), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((641, 650), 'django.utils.translation.ugettext_lazy', '_', (['"""Head"""'], {}), "('Head')\n", (642, 650), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((668, 677), 'django.utils.translation.ugettext_lazy', '_', (['"""Post"""'], {}), "('Post')\n", (669, 677), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((694, 702), 'django.utils.translation.ugettext_lazy', '_', (['"""Put"""'], {}), "('Put')\n", (695, 702), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((722, 733), 'django.utils.translation.ugettext_lazy', '_', (['"""Delete"""'], {}), "('Delete')\n", (723, 733), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1599, 1624), 'celery.result.AsyncResult', 'AsyncResult', (['self.task_id'], {}), '(self.task_id)\n', (1610, 1624), False, 'from celery.result import AsyncResult, allow_join_result\n'), ((1492, 1511), 'celery.result.allow_join_result', 'allow_join_result', ([], {}), '()\n', (1509, 1511), False, 'from celery.result import AsyncResult, allow_join_result\n'), ((1538, 1563), 'celery.result.AsyncResult', 'AsyncResult', (['self.task_id'], {}), '(self.task_id)\n', (1549, 1563), False, 'from celery.result import AsyncResult, allow_join_result\n')]
|
from random import randint
cont = 0
while True:
valor = int(input("Digite um valor: "))
conputador = randint(0, 10)
total = conputador + valor
tipo = " "
while tipo not in "PI":
tipo = str(input("Par ou Impar [P/I]")).strip().upper()[0]
print(f"você jogou {valor} e o computador {conputador}")
print("Deu Par " if total % 2 == 0 else "Deu impar")
if tipo == "P":
if total % 2 == 0:
print("Você venceu!")
cont += 1
else:
print("Você Perdeu!")
break
elif tipo == "I":
if total % 2 == 1:
print("Você Venceu!")
cont += 1
else:
print("Você Perdeu!")
break
print("Vamos jogar novamente...")
print(f"Você venceu {cont} vezes!")
|
[
"random.randint"
] |
[((109, 123), 'random.randint', 'randint', (['(0)', '(10)'], {}), '(0, 10)\n', (116, 123), False, 'from random import randint\n')]
|
from pygame.locals import *
from blast import Blast
from sound import Sound
from wentity import WEntity
from pygame.math import Vector2
from utils import *
WIDTH = 3 # line thickness
SCALE_FACTOR = 5.0
ACCELERATION = 250.0 # pixels per second
DAMPING = 0.57 # some damping
ANGULAR_SPEED = 180.0 # degrees per second
SHIP_WIREFRAME = [
Vector2(0.0, -5.0), Vector2(3.0, 4.0), Vector2(1.5, 2.0),
Vector2(-1.5, 2.0), Vector2(-3.0, 4.0)
]
THRUST_WIREFRAME = [
Vector2(1.0, 2.0), Vector2(0.0, 5.0), Vector2(-1.0, 2.0)
]
class Ship(WEntity):
def __init__(self, galaxy):
super().__init__(galaxy, "ship", GREEN, SHIP_WIREFRAME, WIDTH)
# ship initial position
self.position = Vector2(self.galaxy.rect.width/2,
self.galaxy.rect.height/2)
self.acceleration = ACCELERATION
self.damping = DAMPING
self.angular_speed = ANGULAR_SPEED
self.size = SCALE_FACTOR
self.shielded = True
self.firing = False
self.dying = False
def update(self, time_passed, event_list):
super().update(time_passed, event_list)
if self.galaxy.get_entity_by_name('score').game_status != GAME_RUNNING:
return
self.process_events(event_list)
if self.firing:
# build a new blast, set its position to the ship's,
# set its velocity vector to ship's orientation
# and then add it to the galaxy
blast = Blast(self.galaxy, Vector2(self.position), self.angle)
self.galaxy.add_entity(blast)
for entity in self.galaxy.get_entities_by_name('asteroid'):
if not self.shielded and self.collide(entity):
# if a rock hit me, I lose a life but I'm shielded for 5 sec!
# I also need to be positioned at the center of screen stationary,
# and in the same angle I was born. The lives must be reduced by 1
self.dying = True
self.shield()
pygame.time.set_timer(UNSHIELD_EVENT, 2500, 1)
self.position = Vector2(self.galaxy.rect.width/2,
self.galaxy.rect.height/2)
self.velocity = Vector2(0.0, 0.0)
self.angle = 0.0
self.galaxy.get_entity_by_name('score').update_lives(-1)
def render(self, surface):
super().render(surface)
if self.accelerating == FORWARD:
Sound().play('thrust')
self.wireframe = THRUST_WIREFRAME
super().render(surface)
self.wireframe = SHIP_WIREFRAME
if self.firing:
Sound().play('fire')
self.firing = False
if self.dying:
Sound().play('bang')
self.dying = False
def process_events(self, event_list):
for event in event_list:
if event.type == KEYDOWN:
if event.key == K_LEFT or event.key == K_a:
self.start_rotating(CCLOCKWISE)
if event.key == K_RIGHT or event.key == K_d:
self.start_rotating(CLOCKWISE)
if event.key == K_UP or event.key == K_w:
self.start_accelerating(FORWARD)
if event.key == K_SPACE:
self.fire()
if event.type == KEYUP:
if event.key == K_LEFT or event.key == K_a or \
event.key == K_RIGHT or event.key == K_d:
self.stop_rotating()
if event.key == K_UP or event.key == K_w:
self.stop_accelerating()
if event.type == UNSHIELD_EVENT:
self.unshield()
def fire(self):
self.firing = True
def unshield(self):
self.shielded = False
self.galaxy.get_entity_by_name('score').update_ship_shielded(False)
def shield(self):
self.shielded = True
self.galaxy.get_entity_by_name('score').update_ship_shielded(True)
|
[
"sound.Sound",
"pygame.math.Vector2"
] |
[((345, 363), 'pygame.math.Vector2', 'Vector2', (['(0.0)', '(-5.0)'], {}), '(0.0, -5.0)\n', (352, 363), False, 'from pygame.math import Vector2\n'), ((366, 383), 'pygame.math.Vector2', 'Vector2', (['(3.0)', '(4.0)'], {}), '(3.0, 4.0)\n', (373, 383), False, 'from pygame.math import Vector2\n'), ((385, 402), 'pygame.math.Vector2', 'Vector2', (['(1.5)', '(2.0)'], {}), '(1.5, 2.0)\n', (392, 402), False, 'from pygame.math import Vector2\n'), ((408, 426), 'pygame.math.Vector2', 'Vector2', (['(-1.5)', '(2.0)'], {}), '(-1.5, 2.0)\n', (415, 426), False, 'from pygame.math import Vector2\n'), ((428, 446), 'pygame.math.Vector2', 'Vector2', (['(-3.0)', '(4.0)'], {}), '(-3.0, 4.0)\n', (435, 446), False, 'from pygame.math import Vector2\n'), ((474, 491), 'pygame.math.Vector2', 'Vector2', (['(1.0)', '(2.0)'], {}), '(1.0, 2.0)\n', (481, 491), False, 'from pygame.math import Vector2\n'), ((493, 510), 'pygame.math.Vector2', 'Vector2', (['(0.0)', '(5.0)'], {}), '(0.0, 5.0)\n', (500, 510), False, 'from pygame.math import Vector2\n'), ((512, 530), 'pygame.math.Vector2', 'Vector2', (['(-1.0)', '(2.0)'], {}), '(-1.0, 2.0)\n', (519, 530), False, 'from pygame.math import Vector2\n'), ((716, 780), 'pygame.math.Vector2', 'Vector2', (['(self.galaxy.rect.width / 2)', '(self.galaxy.rect.height / 2)'], {}), '(self.galaxy.rect.width / 2, self.galaxy.rect.height / 2)\n', (723, 780), False, 'from pygame.math import Vector2\n'), ((1522, 1544), 'pygame.math.Vector2', 'Vector2', (['self.position'], {}), '(self.position)\n', (1529, 1544), False, 'from pygame.math import Vector2\n'), ((2146, 2210), 'pygame.math.Vector2', 'Vector2', (['(self.galaxy.rect.width / 2)', '(self.galaxy.rect.height / 2)'], {}), '(self.galaxy.rect.width / 2, self.galaxy.rect.height / 2)\n', (2153, 2210), False, 'from pygame.math import Vector2\n'), ((2279, 2296), 'pygame.math.Vector2', 'Vector2', (['(0.0)', '(0.0)'], {}), '(0.0, 0.0)\n', (2286, 2296), False, 'from pygame.math import Vector2\n'), ((2521, 2528), 'sound.Sound', 'Sound', ([], {}), '()\n', (2526, 2528), False, 'from sound import Sound\n'), ((2707, 2714), 'sound.Sound', 'Sound', ([], {}), '()\n', (2712, 2714), False, 'from sound import Sound\n'), ((2796, 2803), 'sound.Sound', 'Sound', ([], {}), '()\n', (2801, 2803), False, 'from sound import Sound\n')]
|
from django.contrib import admin
from messenger.models import ChatSession
class ChatSessionAdmin(admin.ModelAdmin):
readonly_fields = ['uuid', 'user_id']
list_display = ['uuid', 'state', 'user_id']
list_filter = ['state']
admin.site.register(ChatSession, ChatSessionAdmin)
|
[
"django.contrib.admin.site.register"
] |
[((238, 288), 'django.contrib.admin.site.register', 'admin.site.register', (['ChatSession', 'ChatSessionAdmin'], {}), '(ChatSession, ChatSessionAdmin)\n', (257, 288), False, 'from django.contrib import admin\n')]
|
from typing import Optional, List
import torch
import torchvision
import numpy as np
from ..basic_typing import Datasets
from ..train import SequenceArray
from ..train import SamplerRandom, SamplerSequential
import functools
import collections
import os
from ..transforms import Transform
from typing_extensions import Literal
def image_to_torch(i):
return torch.from_numpy(np.array(i).transpose((2, 0, 1))).unsqueeze(0)
def segmentation_to_torch(i):
return torch.from_numpy(np.array(i)).type(torch.int64).unsqueeze(0).unsqueeze(0)
def load_case(batch, dataset, transform):
case_ids = batch['case_id']
images = []
segmentations = []
for case_id in case_ids:
image, segmentation = dataset[case_id]
images.append(image_to_torch(image))
segmentations.append(segmentation_to_torch(segmentation))
data_batch = {
'case_id': case_ids,
'image': torch.cat(images),
'segmentation': torch.cat(segmentations)
}
if transform is not None:
data_batch = transform(data_batch)
return data_batch
def create_cityscapes_dataset(
batch_size: int = 32,
root: Optional[str] = None,
transform_train: Optional[List[Transform]] = None,
transform_valid: Optional[List[Transform]] = None,
nb_workers: int = 4,
target_type: Literal['semantic'] = 'semantic') -> Datasets:
"""
Load the cityscapes dataset. This requires to register on their website https://www.cityscapes-dataset.com/
and manually download the dataset.
The dataset is composed of 3 parts: gtCoarse, gtFine, leftImg8bit. Download each package and unzip in a
folder (e.g., `cityscapes`)
Args:
batch_size:
root: the folder containing the 3 unzipped cityscapes data `gtCoarse`, `gtFine`, `leftImg8bit`
transform_train: the transform to apply on the training batches
transform_valid: the transform to apply on the validation batches
nb_workers: the number of workers for each split allocated to the data loading and processing
target_type: the segmentation task
Returns:
a dict of splits. Each split is a :class:`trw.train.Sequence`
"""
if root is None:
# first, check if we have some environment variables configured
root = os.environ.get('TRW_DATA_ROOT')
if root is None:
# else default a standard folder
root = './data'
cityscapes_path = os.path.join(root, 'cityscapes')
train_dataset = torchvision.datasets.cityscapes.Cityscapes(cityscapes_path, mode='fine', split='train', target_type=target_type)
valid_dataset = torchvision.datasets.cityscapes.Cityscapes(cityscapes_path, mode='fine', split='val', target_type=target_type)
train_sampler = SamplerRandom(batch_size=batch_size)
train_sequence = SequenceArray({'case_id': np.arange(len(train_dataset))}, sampler=train_sampler)
train_sequence = train_sequence.map(
functools.partial(load_case, dataset=train_dataset, transform=transform_train), nb_workers=nb_workers)
valid_sampler = SamplerSequential(batch_size=batch_size)
valid_sequence = SequenceArray({'case_id': np.arange(len(valid_dataset))}, sampler=valid_sampler)
valid_sequence = valid_sequence.map(
functools.partial(load_case, dataset=valid_dataset, transform=transform_valid), nb_workers=nb_workers)
dataset = collections.OrderedDict([
('train', train_sequence),
('valid', valid_sequence)
])
return collections.OrderedDict([
('cityscapes', dataset)
])
|
[
"functools.partial",
"torch.cat",
"torchvision.datasets.cityscapes.Cityscapes",
"os.environ.get",
"numpy.array",
"collections.OrderedDict",
"os.path.join"
] |
[((2469, 2501), 'os.path.join', 'os.path.join', (['root', '"""cityscapes"""'], {}), "(root, 'cityscapes')\n", (2481, 2501), False, 'import os\n'), ((2522, 2638), 'torchvision.datasets.cityscapes.Cityscapes', 'torchvision.datasets.cityscapes.Cityscapes', (['cityscapes_path'], {'mode': '"""fine"""', 'split': '"""train"""', 'target_type': 'target_type'}), "(cityscapes_path, mode='fine',\n split='train', target_type=target_type)\n", (2564, 2638), False, 'import torchvision\n'), ((2655, 2769), 'torchvision.datasets.cityscapes.Cityscapes', 'torchvision.datasets.cityscapes.Cityscapes', (['cityscapes_path'], {'mode': '"""fine"""', 'split': '"""val"""', 'target_type': 'target_type'}), "(cityscapes_path, mode='fine',\n split='val', target_type=target_type)\n", (2697, 2769), False, 'import torchvision\n'), ((3409, 3488), 'collections.OrderedDict', 'collections.OrderedDict', (["[('train', train_sequence), ('valid', valid_sequence)]"], {}), "([('train', train_sequence), ('valid', valid_sequence)])\n", (3432, 3488), False, 'import collections\n'), ((3523, 3573), 'collections.OrderedDict', 'collections.OrderedDict', (["[('cityscapes', dataset)]"], {}), "([('cityscapes', dataset)])\n", (3546, 3573), False, 'import collections\n'), ((916, 933), 'torch.cat', 'torch.cat', (['images'], {}), '(images)\n', (925, 933), False, 'import torch\n'), ((959, 983), 'torch.cat', 'torch.cat', (['segmentations'], {}), '(segmentations)\n', (968, 983), False, 'import torch\n'), ((2327, 2358), 'os.environ.get', 'os.environ.get', (['"""TRW_DATA_ROOT"""'], {}), "('TRW_DATA_ROOT')\n", (2341, 2358), False, 'import os\n'), ((2975, 3053), 'functools.partial', 'functools.partial', (['load_case'], {'dataset': 'train_dataset', 'transform': 'transform_train'}), '(load_case, dataset=train_dataset, transform=transform_train)\n', (2992, 3053), False, 'import functools\n'), ((3291, 3369), 'functools.partial', 'functools.partial', (['load_case'], {'dataset': 'valid_dataset', 'transform': 'transform_valid'}), '(load_case, dataset=valid_dataset, transform=transform_valid)\n', (3308, 3369), False, 'import functools\n'), ((382, 393), 'numpy.array', 'np.array', (['i'], {}), '(i)\n', (390, 393), True, 'import numpy as np\n'), ((489, 500), 'numpy.array', 'np.array', (['i'], {}), '(i)\n', (497, 500), True, 'import numpy as np\n')]
|
import socket
from threading import Thread
from typing import Union, Optional, List, Tuple
from time import sleep
from PIL import Image
from io import BytesIO
import re
from typing import Optional
class InvalidRTSPRequest(Exception):
pass
class RTSPPacket:
RTSP_VERSION = 'RTSP/1.0'
INVALID = -1
SETUP = 'SETUP'
PLAY = 'PLAY'
PAUSE = 'PAUSE'
TEARDOWN = 'TEARDOWN'
RESPONSE = 'RESPONSE'
def __init__(
self,
request_type,
video_file_path: Optional[str] = None,
sequence_number: Optional[int] = None,
dst_port: Optional[int] = None,
session_id: Optional[str] = None):
self.request_type = request_type
self.video_file_path = video_file_path
self.sequence_number = sequence_number
self.session_id = session_id
# if request_type SETUP
self.rtp_dst_port = dst_port
def __str__(self):
return (f"RTSPPacket({self.request_type}, "
f"{self.video_file_path}, "
f"{self.sequence_number}, "
f"{self.rtp_dst_port}, "
f"{self.session_id})")
@classmethod
def from_response(cls, response: bytes):
# only response format implemented, taken from server class:
# """
# <RTSP_VERSION> 200 OK\r\n
# CSeq: <SEQUENCE_NUMBER>\r\n
# Session: <SESSION_ID>\r\n
# """
match = re.match(
r"(?P<rtsp_version>RTSP/\d+.\d+) 200 OK\r?\n"
r"CSeq: (?P<sequence_number>\d+)\r?\n"
r"Session: (?P<session_id>\d+)\r?\n",
response.decode()
)
if match is None:
raise Exception(f"failed to parse RTSP response: {response}")
g = match.groupdict()
# not used, defaults to 1.0
# rtsp_version = g.get('rtsp_version')
sequence_number = g.get('sequence_number')
session_id = g.get('session_id')
try:
sequence_number = int(sequence_number)
except (ValueError, TypeError):
raise Exception(f"failed to parse sequence number: {response}")
if session_id is None:
raise Exception(f"failed to parse session id: {response}")
return cls(
request_type=RTSPPacket.RESPONSE,
sequence_number=sequence_number,
session_id=session_id
)
@classmethod
def build_response(cls, sequence_number: int, session_id: str):
response = '\r\n'.join((
f"{cls.RTSP_VERSION} 200 OK",
f"CSeq: {sequence_number}",
f"Session: {session_id}",
)) + '\r\n'
return response
@classmethod
def from_request(cls, request: bytes):
# loosely follows actual rtsp protocol, considering only SETUP, PLAY, PAUSE, and TEARDOWN
# https://en.wikipedia.org/wiki/Real_Time_Streaming_Protocol
match = re.match(
r"(?P<request_type>\w+) rtsp://(?P<video_file_path>\S+) (?P<rtsp_version>RTSP/\d+.\d+)\r?\n"
r"CSeq: (?P<sequence_number>\d+)\r?\n"
r"(Range: (?P<play_range>\w+=\d+-\d+\r?\n))?"
r"(Transport: .*client_port=(?P<dst_port>\d+).*\r?\n)?" # in case of SETUP request
r"(Session: (?P<session_id>\d+)\r?\n)?",
request.decode()
)
if match is None:
raise InvalidRTSPRequest(f"failed to parse request: {request}")
g = match.groupdict()
request_type = g.get('request_type')
if request_type not in (RTSPPacket.SETUP,
RTSPPacket.PLAY,
RTSPPacket.PAUSE,
RTSPPacket.TEARDOWN):
raise InvalidRTSPRequest(f"invalid request type: {request}")
video_file_path = g.get('video_file_path')
# not used, defaults to `RTSPPacket.RTSP_VERSION`
# rtsp_version = g.get('rtsp_version')
sequence_number = g.get('sequence_number')
dst_port = g.get('dst_port')
session_id = g.get('session_id')
if request_type == RTSPPacket.SETUP:
try:
dst_port = int(dst_port)
except (ValueError, TypeError):
raise InvalidRTSPRequest(f"failed to parse RTP port")
try:
sequence_number = int(sequence_number)
except (ValueError, TypeError):
raise InvalidRTSPRequest(f"failed to parse sequence number: {request}")
return cls(
request_type,
video_file_path,
sequence_number,
dst_port,
session_id
)
def to_request(self) -> bytes:
# loosely follows actual rtsp protocol, considering only SETUP, PLAY, PAUSE, and TEARDOWN
# https://en.wikipedia.org/wiki/Real_Time_Streaming_Protocol
if any((attr is None for attr in (self.request_type,
self.sequence_number,
self.session_id))):
raise InvalidRTSPRequest('missing one attribute of: `request_type`, `sequence_number`, `session_id`')
if self.request_type in (self.INVALID, self.RESPONSE):
raise InvalidRTSPRequest(f"invalid request type: {self}")
request_lines = [
f"{self.request_type} rtsp://{self.video_file_path} {self.RTSP_VERSION}",
f"CSeq: {self.sequence_number}",
]
if self.request_type == self.SETUP:
if self.rtp_dst_port is None:
raise InvalidRTSPRequest(f"missing RTP destination port: {self}")
request_lines.append(
f"Transport: RTP/UDP;client_port={self.rtp_dst_port}"
)
else:
request_lines.append(
f"Session: {self.session_id}"
)
request = '\r\n'.join(request_lines) + '\r\n'
return request.encode()
class InvalidPacketException(Exception):
pass
class RTPPacket:
# default header info
HEADER_SIZE = 12 # bytes
VERSION = 0b10 # 2 bits -> current version 2
PADDING = 0b0 # 1 bit
EXTENSION = 0b0 # 1 bit
CC = 0x0 # 4 bits
MARKER = 0b0 # 1 bit
SSRC = 0x00000000 # 32 bits
class TYPE:
MJPEG = 26
def __init__(
self,
payload_type: int = None,
sequence_number: int = None,
timestamp: int = None,
payload: bytes = None):
self.payload = payload
self.payload_type = payload_type
self.sequence_number = sequence_number
self.timestamp = timestamp
# b0 -> v0 v1 p x c0 c1 c2 c3
zeroth_byte = (self.VERSION << 6) | (self.PADDING << 5) | (self.EXTENSION << 4) | self.CC
# b1 -> m pt0 pt1 pt2 pt3 pt4 pt5 pt6
first_byte = (self.MARKER << 7) | self.payload_type
# b2 -> s0 s1 s2 s3 s4 s5 s6 s7
second_byte = self.sequence_number >> 8
# b3 -> s8 s9 s10 s11 s12 s13 s14 s15
third_byte = self.sequence_number & 0xFF
# b4~b7 -> timestamp
fourth_to_seventh_bytes = [
(self.timestamp >> shift) & 0xFF for shift in (24, 16, 8, 0)
]
# b8~b11 -> ssrc
eigth_to_eleventh_bytes = [
(self.SSRC >> shift) & 0xFF for shift in (24, 16, 8, 0)
]
self.header = bytes((
zeroth_byte,
first_byte,
second_byte,
third_byte,
*fourth_to_seventh_bytes,
*eigth_to_eleventh_bytes,
))
@classmethod
def from_packet(cls, packet: bytes):
if len(packet) < cls.HEADER_SIZE:
raise InvalidPacketException(f"The packet {repr(packet)} is invalid")
header = packet[:cls.HEADER_SIZE]
payload = packet[cls.HEADER_SIZE:]
# b1 -> m pt0 ... pt6
# i.e. payload type is whole byte except first bit
payload_type = header[1] & 0x7F
# b2 -> s0 ~ s7
# b3 -> s8 ~ s15
# i.e. sequence number is b2<<8 | b3
sequence_number = header[2] << 8 | header[3]
# b4 ~ b7 -> t0 ~ t31
timestamp = 0
for i, b in enumerate(header[4:8]):
timestamp = timestamp | b << (3 - i) * 8
return cls(
payload_type,
sequence_number,
timestamp,
payload
)
def get_packet(self) -> bytes:
return bytes((*self.header, *self.payload))
def print_header(self):
# print header without SSRC
for i, by in enumerate(self.header[:8]):
s = ' '.join(f"{by:08b}")
# break line after the third and seventh bytes
print(s, end=' ' if i not in (3, 7) else '\n')
class Client:
DEFAULT_CHUNK_SIZE = 128 * 1024
DEFAULT_RECV_DELAY = 20 # in milliseconds
DEFAULT_LOCAL_HOST = '0.0.0.0'
RTP_SOFT_TIMEOUT = 5 # in milliseconds
# for allowing simulated non-blocking operations
# (useful for keyboard break)
RTSP_SOFT_TIMEOUT = 1# in milliseconds
# if it's present at the end of chunk, client assumes
# it's the last chunk for current frame (end of frame)
PACKET_HEADER_LENGTH = 5
def __init__(
self,
file_path: str,
remote_host_address: str,
remote_host_port: int,
rtp_port: int):
self._rtsp_connection: Union[None, socket.socket] = None
self._rtp_socket: Union[None, socket.socket] = None
self._rtp_receive_thread: Union[None, Thread] = None
self._frame_buffer: List[Image.Image] = []
self._current_sequence_number = 0
self.session_id = ''
self.current_frame_number = -1
self.is_rtsp_connected = False
self.is_receiving_rtp = False
self.file_path = file_path
self.remote_host_address = remote_host_address
self.remote_host_port = remote_host_port
self.rtp_port = rtp_port
def get_next_frame(self) -> Optional[Tuple[Image.Image, int]]:
if self._frame_buffer:
self.current_frame_number += 1
# skip 5 bytes which contain frame length in bytes
return self._frame_buffer.pop(0), self.current_frame_number
return None
@staticmethod
def _get_frame_from_packet(packet: RTPPacket) -> Image.Image:
# the payload is already the jpeg
raw = packet.payload
frame = Image.open(BytesIO(raw))
return frame
def _recv_rtp_packet(self, size=DEFAULT_CHUNK_SIZE) -> RTPPacket:
recv = bytes()
print('Waiting RTP packet...')
while True:
try:
recv += self._rtp_socket.recv(size)
print('packet', len(recv))
if recv.endswith(b'\xff\xd9'): # VideoStream.JPEG_EOF = b'\xff\xd9'
break
except socket.timeout:
continue
except Exception as e:
print(e)
# print(f"Received from server: {repr(recv)}")
return RTPPacket.from_packet(recv)
def _start_rtp_receive_thread(self):
self._rtp_receive_thread = Thread(target=self._handle_video_receive)
self._rtp_receive_thread.setDaemon(True)
self._rtp_receive_thread.start()
def _handle_video_receive(self):
self._rtp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self._rtp_socket.bind((self.DEFAULT_LOCAL_HOST, self.rtp_port))
self._rtp_socket.settimeout(self.RTP_SOFT_TIMEOUT / 1000.)
while True:
if not self.is_receiving_rtp:
sleep(self.RTP_SOFT_TIMEOUT/1000.) # diminish cpu hogging
continue
packet = self._recv_rtp_packet()
frame = self._get_frame_from_packet(packet)
self._frame_buffer.append(frame)
def establish_rtsp_connection(self):
if self.is_rtsp_connected:
print('RTSP is already connected.')
return
print(f"Connecting to {self.remote_host_address}:{self.remote_host_port}...")
self._rtsp_connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._rtsp_connection.connect((self.remote_host_address, self.remote_host_port))
self._rtsp_connection.settimeout(self.RTSP_SOFT_TIMEOUT / 1000.)
self.is_rtsp_connected = True
def close_rtsp_connection(self):
if not self.is_rtsp_connected:
print('RTSP is not connected.')
return
self._rtsp_connection.close()
self.is_rtsp_connected = False
if self._rtp_socket:
self._rtp_socket.close()
def _send_request(self, request_type=RTSPPacket.INVALID) -> RTSPPacket:
if not self.is_rtsp_connected:
raise Exception('rtsp connection not established. run `setup_rtsp_connection()`')
request = RTSPPacket(
request_type,
self.file_path,
self._current_sequence_number,
self.rtp_port,
self.session_id
).to_request()
print(f"Sending request: {repr(request)}")
self._rtsp_connection.send(request)
self._current_sequence_number += 1
return self._get_response()
def send_setup_request(self) -> RTSPPacket:
response = self._send_request(RTSPPacket.SETUP)
self._start_rtp_receive_thread()
self.session_id = response.session_id
return response
def send_play_request(self) -> RTSPPacket:
response = self._send_request(RTSPPacket.PLAY)
self.is_receiving_rtp = True
return response
def send_pause_request(self) -> RTSPPacket:
response = self._send_request(RTSPPacket.PAUSE)
self.is_receiving_rtp = False
return response
def send_teardown_request(self) -> RTSPPacket:
response = self._send_request(RTSPPacket.TEARDOWN)
self.is_receiving_rtp = False
self.is_rtsp_connected = False
return response
def _get_response(self, size=DEFAULT_CHUNK_SIZE) -> RTSPPacket:
rcv = None
while True:
try:
rcv = self._rtsp_connection.recv(size)
break
except socket.timeout:
continue
# print(f"Received from server: {repr(rcv)}")
response = RTSPPacket.from_response(rcv)
return response
|
[
"threading.Thread",
"io.BytesIO",
"socket.socket",
"time.sleep"
] |
[((11181, 11222), 'threading.Thread', 'Thread', ([], {'target': 'self._handle_video_receive'}), '(target=self._handle_video_receive)\n', (11187, 11222), False, 'from threading import Thread\n'), ((11378, 11426), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (11391, 11426), False, 'import socket\n'), ((12136, 12185), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (12149, 12185), False, 'import socket\n'), ((10475, 10487), 'io.BytesIO', 'BytesIO', (['raw'], {}), '(raw)\n', (10482, 10487), False, 'from io import BytesIO\n'), ((11644, 11681), 'time.sleep', 'sleep', (['(self.RTP_SOFT_TIMEOUT / 1000.0)'], {}), '(self.RTP_SOFT_TIMEOUT / 1000.0)\n', (11649, 11681), False, 'from time import sleep\n')]
|
import re
import spacy
spacy_model_name = 'en_core_web_lg'
if not spacy.util.is_package(spacy_model_name):
spacy.cli.download(spacy_model_name)
nlp = spacy.load(spacy_model_name)
def filter_sentence(sentence):
def sentence_length(s, min_len=8):
if len(s) < min_len:
return False
else:
return True
filters = [sentence_length]
return all([filter_(sentence) for filter_ in filters])
def gen_atomic_statements(sentence):
"""
obsługuje sytuację (1) ..., (2) ...
:param sentence:
:return:
"""
rex = r"\([abcdefghi123456789]\)([A-z \n,–:;-]+(\(?(?=[A-z]{2,})[A-z]+\)?[A-z \n,-–;]+)+)"
splits = re.split(rex, sentence)
main_sentence = splits[0] if splits is not None else None
subsentences = re.findall(rex, sentence)
atomic_statements = []
if main_sentence and subsentences:
clean_main_sentence = re.sub(r'\([abcdefgh123456789]\)|\n', ' ', main_sentence).strip()
for subsentence in subsentences:
clean_subsentence = re.sub(r'\([abcdefgh123456789]\)|\n', ' ', subsentence[0]).strip()
atomic_statements.append(clean_main_sentence + ' ' + clean_subsentence + '.')
return atomic_statements
else:
return sentence
|
[
"spacy.cli.download",
"re.split",
"spacy.util.is_package",
"spacy.load",
"re.findall",
"re.sub"
] |
[((156, 184), 'spacy.load', 'spacy.load', (['spacy_model_name'], {}), '(spacy_model_name)\n', (166, 184), False, 'import spacy\n'), ((68, 107), 'spacy.util.is_package', 'spacy.util.is_package', (['spacy_model_name'], {}), '(spacy_model_name)\n', (89, 107), False, 'import spacy\n'), ((113, 149), 'spacy.cli.download', 'spacy.cli.download', (['spacy_model_name'], {}), '(spacy_model_name)\n', (131, 149), False, 'import spacy\n'), ((677, 700), 're.split', 're.split', (['rex', 'sentence'], {}), '(rex, sentence)\n', (685, 700), False, 'import re\n'), ((782, 807), 're.findall', 're.findall', (['rex', 'sentence'], {}), '(rex, sentence)\n', (792, 807), False, 'import re\n'), ((905, 964), 're.sub', 're.sub', (['"""\\\\([abcdefgh123456789]\\\\)|\\\\n"""', '""" """', 'main_sentence'], {}), "('\\\\([abcdefgh123456789]\\\\)|\\\\n', ' ', main_sentence)\n", (911, 964), False, 'import re\n'), ((1044, 1104), 're.sub', 're.sub', (['"""\\\\([abcdefgh123456789]\\\\)|\\\\n"""', '""" """', 'subsentence[0]'], {}), "('\\\\([abcdefgh123456789]\\\\)|\\\\n', ' ', subsentence[0])\n", (1050, 1104), False, 'import re\n')]
|
"""
This file contains code that will kick off training and testing processes
"""
import os, sys
import argparse
import json
import numpy as np
from experiments.UNetExperiment import UNetExperiment
from data_prep.HippocampusDatasetLoader import LoadHippocampusData
from torch.utils.data import random_split
class Config:
"""
Holds configuration parameters
"""
def __init__(self):
self.name = "Basic_unet"
self.root_dir = r"data/"
self.n_epochs = 10
self.learning_rate = 0.0002
self.batch_size = 8
self.patch_size = 64
self.test_results_dir = "out/results"
self.model_name = "" # the command line provided model name to save network weights in
self.weights_name = "" # the command line provided weights file name to load network weights from
self.test = False
def set_model_name(self, m):
self.model_name = m
def set_weights_name(self, w):
self.weights_name = w
def set_test(self, t):
self.test = t
if __name__ == "__main__":
# Get configuration
# TASK: Fill in parameters of the Config class and specify directory where the data is stored and
# directory where results will go
c = Config()
parser = argparse.ArgumentParser()
parser.add_argument("--weights", "-w", help="file name for saved model weights", action="store")
parser.add_argument("--modelname", "-m", help="model weights filename used for saving this model", action="store")
parser.add_argument("--testonly", "-t", help="test only, no training", action="store_true")
args = parser.parse_args()
if args.weights:
print("Will load model weights from", args.weights)
c.set_weights_name(args.weights)
else:
print("No pretrained model weights given. Will train a new model.")
if args.modelname:
print("Will store model weights in", args.modelname)
c.set_model_name(args.modelname)
if args.testonly:
# need to also provide a weights filename if we're only testing
print("Testing mode.")
c.set_test(True)
if not args.weights:
print("Please also provide a weights filename through -w")
sys.exit()
# Load data
print("Loading data...")
# TASK: LoadHippocampusData is not complete. Go to the implementation and complete it.
data = LoadHippocampusData(c.root_dir + "TrainingSet/", y_shape = c.patch_size, z_shape = c.patch_size)
# Create test-train-val split
# In a real world scenario you would probably do multiple splits for
# multi-fold training to improve your model quality
data_len = len(data)
keys = range(data_len)
# Here, random permutation of keys array would be useful in case if we do something like
# a k-fold training and combining the results.
# TASK: create three keys in the dictionary: "train", "val" and "test". In each key, store
# the array with indices of training volumes to be used for training, validation
# and testing respectively.
train_proportion = 0.7
val_proportion = 0.2
test_proportion = 0.1
splits = [int(np.floor(train_proportion * data_len)),
int(np.floor(val_proportion * data_len)),
int(np.floor(test_proportion * data_len))]
train, val, test = random_split(keys, splits)
split = {"train": train,
"val": val,
"test": test}
# Set up and run experiment
# TASK: Class UNetExperiment has missing pieces. Go to the file and fill them in
exp = UNetExperiment(c, split, data)
# You could free up memory by deleting the dataset
# as it has been copied into loaders
del data
if not args.testonly:
# run training and validation
exp.run()
# prep and run testing
# TASK: Test method is not complete. Go to the method and complete it
results_json = exp.run_test()
results_json["config"] = vars(c)
with open(os.path.join(exp.out_dir, "results.json"), 'w') as out_file:
json.dump(results_json, out_file, indent=2, separators=(',', ': '))
|
[
"experiments.UNetExperiment.UNetExperiment",
"data_prep.HippocampusDatasetLoader.LoadHippocampusData",
"json.dump",
"argparse.ArgumentParser",
"numpy.floor",
"torch.utils.data.random_split",
"os.path.join",
"sys.exit"
] |
[((1293, 1318), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1316, 1318), False, 'import argparse\n'), ((2445, 2541), 'data_prep.HippocampusDatasetLoader.LoadHippocampusData', 'LoadHippocampusData', (["(c.root_dir + 'TrainingSet/')"], {'y_shape': 'c.patch_size', 'z_shape': 'c.patch_size'}), "(c.root_dir + 'TrainingSet/', y_shape=c.patch_size,\n z_shape=c.patch_size)\n", (2464, 2541), False, 'from data_prep.HippocampusDatasetLoader import LoadHippocampusData\n'), ((3404, 3430), 'torch.utils.data.random_split', 'random_split', (['keys', 'splits'], {}), '(keys, splits)\n', (3416, 3430), False, 'from torch.utils.data import random_split\n'), ((3655, 3685), 'experiments.UNetExperiment.UNetExperiment', 'UNetExperiment', (['c', 'split', 'data'], {}), '(c, split, data)\n', (3669, 3685), False, 'from experiments.UNetExperiment import UNetExperiment\n'), ((4138, 4205), 'json.dump', 'json.dump', (['results_json', 'out_file'], {'indent': '(2)', 'separators': "(',', ': ')"}), "(results_json, out_file, indent=2, separators=(',', ': '))\n", (4147, 4205), False, 'import json\n'), ((2280, 2290), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2288, 2290), False, 'import os, sys\n'), ((3226, 3263), 'numpy.floor', 'np.floor', (['(train_proportion * data_len)'], {}), '(train_proportion * data_len)\n', (3234, 3263), True, 'import numpy as np\n'), ((3285, 3320), 'numpy.floor', 'np.floor', (['(val_proportion * data_len)'], {}), '(val_proportion * data_len)\n', (3293, 3320), True, 'import numpy as np\n'), ((3342, 3378), 'numpy.floor', 'np.floor', (['(test_proportion * data_len)'], {}), '(test_proportion * data_len)\n', (3350, 3378), True, 'import numpy as np\n'), ((4069, 4110), 'os.path.join', 'os.path.join', (['exp.out_dir', '"""results.json"""'], {}), "(exp.out_dir, 'results.json')\n", (4081, 4110), False, 'import os, sys\n')]
|
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, Session
from sqlalchemy.exc import IntegrityError, InvalidRequestError
from db_management.DB import Base, Company, News, db_link
def add_company(company):
engine = create_engine(db_link) # pool_size=20, max_overflow=0
# Bind the engine to the metadata of the Base class so that the
# declaratives can be accessed through a DBSession instance
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
# A DBSession() instance establishes all conversations with the database
# and represents a "staging zone" for all the objects loaded into the
# database session object. Any change made against the objects in the
# session won't be persisted into the database until you call
# session.commit(). If you're not happy about the changes, you can
# revert all of them back to the last commit by calling
# session.rollback()
session = DBSession()
data = session.query(Company).all()
print(data)
if company in [el.name for el in data]:
return "There is already a Table with such name: {}".format(company)
# Insert a company in the Comapny table
DBcompany = Company(name=company)
session.add(DBcompany)
try:
session.commit()
except:
session.rollback()
finally:
session.close()
return "The new table {} is created.".format(company)
def add_news(info_dict):
# Insert an news in the address table
engine = create_engine(db_link)
engine.pool_timeout = 60
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
company = info_dict['comp_index']
cur_company = session.query(Company).filter_by(name=company).first()
if not cur_company:
print("Not found. Creating company: {}".format(company))
cur_company = Company(name=company)
try:
key = info_dict.keys()
# WITH SOURCECOLLECTIONIDENTIFIER AND TITLE
#new_news = News(DATE=str(info_dict[key[0]]), SOURCECOLLECTIONIDENTIFIER= int(info_dict[key[1]]), SOURCECOMMONNAME=info_dict[key[2]], DOCUMENTIDENTIFIER=info_dict[key[3]], LOCATIONS=info_dict[key[4]],
# ORGANIZATIONS=info_dict[key[5]], TONE=info_dict[key[6]], GCAM=info_dict[key[7]], ALLNAMES=info_dict[key[8]], TITLE=info_dict[key[9]], company_id=info_dict[key[10]])
#WITHOUT SOURCECOLLECTIONIDENTIFIER AND TITLE
new_news = News(DATE=str(info_dict[key[0]]),
SOURCECOMMONNAME=info_dict[key[2]], DOCUMENTIDENTIFIER=info_dict[key[3]],
#LOCATIONS=info_dict[key[4]],
#TITLE=info_dict[key[9]],
ORGANIZATIONS=info_dict[key[5]], TONE=info_dict[key[6]], GCAM=info_dict[key[7]],
ALLNAMES=info_dict[key[8]], company_id=cur_company.id)
session.add(new_news)
session.commit()
except IntegrityError:
session.rollback()
return 'The link provided seems to exist in DB: {}'.format(info_dict[key[3]])
except InvalidRequestError:
session.rollback()
return 'You are requesting access to the non-existing source'
try:
#print("COMMITING...")
session.commit()
except:
session.rollback()
finally:
session.close()
#print("The news has been successfully added")
|
[
"sqlalchemy.create_engine",
"sqlalchemy.orm.sessionmaker",
"db_management.DB.Company"
] |
[((249, 271), 'sqlalchemy.create_engine', 'create_engine', (['db_link'], {}), '(db_link)\n', (262, 271), False, 'from sqlalchemy import create_engine\n'), ((485, 510), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', ([], {'bind': 'engine'}), '(bind=engine)\n', (497, 510), False, 'from sqlalchemy.orm import sessionmaker, Session\n'), ((1225, 1246), 'db_management.DB.Company', 'Company', ([], {'name': 'company'}), '(name=company)\n', (1232, 1246), False, 'from db_management.DB import Base, Company, News, db_link\n'), ((1534, 1556), 'sqlalchemy.create_engine', 'create_engine', (['db_link'], {}), '(db_link)\n', (1547, 1556), False, 'from sqlalchemy import create_engine\n'), ((1634, 1659), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', ([], {'bind': 'engine'}), '(bind=engine)\n', (1646, 1659), False, 'from sqlalchemy.orm import sessionmaker, Session\n'), ((1911, 1932), 'db_management.DB.Company', 'Company', ([], {'name': 'company'}), '(name=company)\n', (1918, 1932), False, 'from db_management.DB import Base, Company, News, db_link\n')]
|
#"""Fun pligon...for HardcoreUserbot
#\nCode by @Hack12R
#type `.degi` and `.nehi` to see the fun.
#"""
import random, re
#from uniborg.util import admin_cmd
import asyncio
from telethon import events
from userbot.events import register
from asyncio import sleep
import time
from userbot import CMD_HELP
@register(outgoing=True, pattern="^.degi$")
async def _(event):
if not event.text[0].isalpha() and event.text[0] not in ("/", "#", "@", "!"):
await event.edit("wO")
await asyncio.sleep(0.7)
await event.edit("dEgI")
await asyncio.sleep(1)
await event.edit("tUm")
await asyncio.sleep(0.8)
await event.edit("EkBaR")
await asyncio.sleep(0.9)
await event.edit("mAnG")
await asyncio.sleep(1)
await event.edit("kAr")
await asyncio.sleep(0.8)
await event.edit("ToH")
await asyncio.sleep(0.7)
await event.edit("dEkHo")
await asyncio.sleep(1)
await event.edit("`wO dEgI tUm EkBaR mAnG kAr ToH dEkHo`")
@register(outgoing=True, pattern="^.nehi$")
async def _(event):
if not event.text[0].isalpha() and event.text[0] not in ("/", "#", "@", "!"):
await event.edit("wO")
await asyncio.sleep(0.7)
await event.edit("pAkKa")
await asyncio.sleep(1)
await event.edit("DeGi")
await asyncio.sleep(0.8)
await event.edit("Tu")
await asyncio.sleep(0.9)
await event.edit("MaNg")
await asyncio.sleep(1)
await event.edit("KaR")
await asyncio.sleep(0.8)
await event.edit("tOh")
await asyncio.sleep(0.7)
await event.edit("Dekh")
await asyncio.sleep(1)
await event.edit("`wO pAkKa DeGi Tu MaNg KaR tOh DeKh`")
CMD_HELP.update({
"degi":
".degi or .nehi\
\nUsage: Sabka Katega."
})
|
[
"userbot.CMD_HELP.update",
"asyncio.sleep",
"userbot.events.register"
] |
[((306, 348), 'userbot.events.register', 'register', ([], {'outgoing': '(True)', 'pattern': '"""^.degi$"""'}), "(outgoing=True, pattern='^.degi$')\n", (314, 348), False, 'from userbot.events import register\n'), ((1040, 1082), 'userbot.events.register', 'register', ([], {'outgoing': '(True)', 'pattern': '"""^.nehi$"""'}), "(outgoing=True, pattern='^.nehi$')\n", (1048, 1082), False, 'from userbot.events import register\n'), ((1770, 1838), 'userbot.CMD_HELP.update', 'CMD_HELP.update', (['{\'degi\': """.degi or .nehi\nUsage: Sabka Katega."""}'], {}), '({\'degi\': """.degi or .nehi\nUsage: Sabka Katega."""})\n', (1785, 1838), False, 'from userbot import CMD_HELP\n'), ((497, 515), 'asyncio.sleep', 'asyncio.sleep', (['(0.7)'], {}), '(0.7)\n', (510, 515), False, 'import asyncio\n'), ((563, 579), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (576, 579), False, 'import asyncio\n'), ((626, 644), 'asyncio.sleep', 'asyncio.sleep', (['(0.8)'], {}), '(0.8)\n', (639, 644), False, 'import asyncio\n'), ((693, 711), 'asyncio.sleep', 'asyncio.sleep', (['(0.9)'], {}), '(0.9)\n', (706, 711), False, 'import asyncio\n'), ((759, 775), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (772, 775), False, 'import asyncio\n'), ((822, 840), 'asyncio.sleep', 'asyncio.sleep', (['(0.8)'], {}), '(0.8)\n', (835, 840), False, 'import asyncio\n'), ((887, 905), 'asyncio.sleep', 'asyncio.sleep', (['(0.7)'], {}), '(0.7)\n', (900, 905), False, 'import asyncio\n'), ((954, 970), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (967, 970), False, 'import asyncio\n'), ((1231, 1249), 'asyncio.sleep', 'asyncio.sleep', (['(0.7)'], {}), '(0.7)\n', (1244, 1249), False, 'import asyncio\n'), ((1298, 1314), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (1311, 1314), False, 'import asyncio\n'), ((1362, 1380), 'asyncio.sleep', 'asyncio.sleep', (['(0.8)'], {}), '(0.8)\n', (1375, 1380), False, 'import asyncio\n'), ((1426, 1444), 'asyncio.sleep', 'asyncio.sleep', (['(0.9)'], {}), '(0.9)\n', (1439, 1444), False, 'import asyncio\n'), ((1492, 1508), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (1505, 1508), False, 'import asyncio\n'), ((1555, 1573), 'asyncio.sleep', 'asyncio.sleep', (['(0.8)'], {}), '(0.8)\n', (1568, 1573), False, 'import asyncio\n'), ((1620, 1638), 'asyncio.sleep', 'asyncio.sleep', (['(0.7)'], {}), '(0.7)\n', (1633, 1638), False, 'import asyncio\n'), ((1686, 1702), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (1699, 1702), False, 'import asyncio\n')]
|
# -*- coding: utf-8 -*-
from app import logging
from app import config as config
import logging
def debug(client, message):
try:
client.send_message(
message.from_user.id,
"Ниже находится информация, которая может оказаться полезной."
"\n\n**Информация о приложении:** \n`Version: {0}`\n`Commit: {1}`\n`Developer Mode: {2}`"
"\n\n**Информация о пользователе:** \n`User ID: {3}`\n`Message ID: {4}`\n`Language Code: {5}`".format(
config.VERSION, config.COMMIT, config.DEVELOPER_MODE, message.from_user.id, message.message_id,
message.from_user.language_code))
except Exception as e:
try:
client.send_message(
message.from_user.id,
"❗ Произошла непредвиденная ошибка при выполнении метода. Сообщите об этом администратору для более "
"быстрого ее исправления.")
except:
pass
logging.error("Произошла ошибка при попытке выполнения метода.", exc_info=True)
return e
|
[
"logging.error"
] |
[((968, 1047), 'logging.error', 'logging.error', (['"""Произошла ошибка при попытке выполнения метода."""'], {'exc_info': '(True)'}), "('Произошла ошибка при попытке выполнения метода.', exc_info=True)\n", (981, 1047), False, 'import logging\n')]
|
import pytest
from commodore import k8sobject
_test_objs = [
{
"apiVersion": "v1",
"kind": "ServiceAccount",
"metadata": {
"name": "test",
"namespace": "test",
},
},
{
"apiVersion": "v1",
"kind": "ServiceAccount",
"metadata": {
"name": "test-sa-2",
"namespace": "test",
},
},
{
"apiVersion": "v1",
"kind": "Pod",
"metadata": {
"name": "test",
"namespace": "test",
"labels": {
"name": "test",
},
},
"spec": {
"image": "image",
"command": "pause",
},
},
{
"apiVersion": "rbac.authorization.k8s.io/v1",
"kind": "Role",
"metadata": {
"name": "test-role",
"namespace": "test",
},
},
{
"apiVersion": "rbac.authorization.k8s.io/v1",
"kind": "Role",
"metadata": {
"name": "test-role",
"namespace": "test-2",
},
},
{
"apiVersion": "rbac.authorization.k8s.io/v1",
"kind": "ClusterRole",
"metadata": {
"name": "test-cr",
},
},
{
"apiVersion": "rbac.authorization.k8s.io/v1",
"kind": "ClusterRole",
"metadata": {
"name": "test-cr-2",
},
},
{
"test": "testing",
},
]
@pytest.mark.parametrize(
"k8sdict,expected",
zip(
[None] + _test_objs,
[
{
"kind": "",
"name": "",
"namespace": "",
},
{
"kind": "ServiceAccount",
"name": "test",
"namespace": "test",
},
{
"kind": "ServiceAccount",
"name": "test-sa-2",
"namespace": "test",
},
{
"kind": "Pod",
"name": "test",
"namespace": "test",
},
{
"kind": "Role",
"name": "test-role",
"namespace": "test",
"spec": {
"test": "testing",
},
},
{
"kind": "Role",
"name": "test-role",
"namespace": "test-2",
"spec": {
"test": "testing2",
},
},
{
"kind": "ClusterRole",
"namespace": "",
"name": "test-cr",
},
{
"kind": "ClusterRole",
"namespace": "",
"name": "test-cr-2",
},
{
"name": "",
"namespace": "",
"kind": "",
},
],
),
)
def test_k8sobject_constructor(k8sdict, expected):
o = k8sobject.K8sObject(k8sdict)
assert expected["kind"] == o._kind
assert expected["name"] == o._name
assert expected["namespace"] == o._namespace
_cluster_scoped_obj = k8sobject.K8sObject(
{
"apiVersion": "v1",
"kind": "Namespace",
"metadata": {
"name": "test",
"labels": {
"name": "test",
},
},
}
)
_ns_scoped_obj = k8sobject.K8sObject(
{
"apiVersion": "v1",
"kind": "ServiceAccount",
"metadata": {
"name": "test",
"labels": {
"name": "test",
},
},
}
)
@pytest.mark.parametrize(
"k8sdict,to_cluster_scoped,to_ns_scoped",
zip(
_test_objs,
[False, False, False, False, False, True, True, True],
[False, False, True, True, True, True, True, True],
),
)
def test_k8sobject_less_than(k8sdict, to_cluster_scoped, to_ns_scoped):
o = k8sobject.K8sObject(k8sdict)
assert (o < _cluster_scoped_obj) == to_cluster_scoped
assert (o < _ns_scoped_obj) == to_ns_scoped
assert (o > _cluster_scoped_obj) == (not to_cluster_scoped)
assert (o > _ns_scoped_obj) == (not to_ns_scoped)
@pytest.mark.parametrize("k8sdict_a", _test_objs)
@pytest.mark.parametrize("k8sdict_b", _test_objs)
def test_k8sobject_equal(k8sdict_a, k8sdict_b):
a = k8sobject.K8sObject(k8sdict_a)
b = k8sobject.K8sObject(k8sdict_b)
expect = False
if (
k8sdict_a.get("kind", "") == k8sdict_b.get("kind", "")
and k8sdict_a.get("metadata", {}).get("namespace", "")
== k8sdict_b.get("metadata", {}).get("namespace", "")
and k8sdict_a.get("metadata", {}).get("name", "")
== k8sdict_b.get("metadata", {}).get("name", "")
):
expect = True
assert (a == b) == expect
|
[
"pytest.mark.parametrize",
"commodore.k8sobject.K8sObject"
] |
[((3200, 3325), 'commodore.k8sobject.K8sObject', 'k8sobject.K8sObject', (["{'apiVersion': 'v1', 'kind': 'Namespace', 'metadata': {'name': 'test',\n 'labels': {'name': 'test'}}}"], {}), "({'apiVersion': 'v1', 'kind': 'Namespace', 'metadata': {\n 'name': 'test', 'labels': {'name': 'test'}}})\n", (3219, 3325), False, 'from commodore import k8sobject\n'), ((3441, 3570), 'commodore.k8sobject.K8sObject', 'k8sobject.K8sObject', (["{'apiVersion': 'v1', 'kind': 'ServiceAccount', 'metadata': {'name': 'test',\n 'labels': {'name': 'test'}}}"], {}), "({'apiVersion': 'v1', 'kind': 'ServiceAccount',\n 'metadata': {'name': 'test', 'labels': {'name': 'test'}}})\n", (3460, 3570), False, 'from commodore import k8sobject\n'), ((4241, 4289), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""k8sdict_a"""', '_test_objs'], {}), "('k8sdict_a', _test_objs)\n", (4264, 4289), False, 'import pytest\n'), ((4291, 4339), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""k8sdict_b"""', '_test_objs'], {}), "('k8sdict_b', _test_objs)\n", (4314, 4339), False, 'import pytest\n'), ((3020, 3048), 'commodore.k8sobject.K8sObject', 'k8sobject.K8sObject', (['k8sdict'], {}), '(k8sdict)\n', (3039, 3048), False, 'from commodore import k8sobject\n'), ((3985, 4013), 'commodore.k8sobject.K8sObject', 'k8sobject.K8sObject', (['k8sdict'], {}), '(k8sdict)\n', (4004, 4013), False, 'from commodore import k8sobject\n'), ((4396, 4426), 'commodore.k8sobject.K8sObject', 'k8sobject.K8sObject', (['k8sdict_a'], {}), '(k8sdict_a)\n', (4415, 4426), False, 'from commodore import k8sobject\n'), ((4435, 4465), 'commodore.k8sobject.K8sObject', 'k8sobject.K8sObject', (['k8sdict_b'], {}), '(k8sdict_b)\n', (4454, 4465), False, 'from commodore import k8sobject\n')]
|
__author__ = 'traff'
import threading
import os
import sys
import tempfile
from _prof_imports import Stats, FuncStat, Function
try:
execfile=execfile #Not in Py3k
except NameError:
#We must redefine it in Py3k if it's not already there
def execfile(file, glob=None, loc=None):
if glob is None:
import sys
glob = sys._getframe().f_back.f_globals
if loc is None:
loc = glob
# It seems that the best way is using tokenize.open(): http://code.activestate.com/lists/python-dev/131251/
import tokenize
stream = tokenize.open(file) # @UndefinedVariable
try:
contents = stream.read()
finally:
stream.close()
#execute the script (note: it's important to compile first to have the filename set in debug mode)
exec(compile(contents+"\n", file, 'exec'), glob, loc)
def save_main_module(file, module_name):
sys.modules[module_name] = sys.modules['__main__']
sys.modules[module_name].__name__ = module_name
from imp import new_module
m = new_module('__main__')
sys.modules['__main__'] = m
if hasattr(sys.modules[module_name], '__loader__'):
setattr(m, '__loader__', getattr(sys.modules[module_name], '__loader__'))
m.__file__ = file
return m
class ProfDaemonThread(threading.Thread):
def __init__(self):
super(ProfDaemonThread, self).__init__()
self.setDaemon(True)
self.killReceived = False
def run(self):
self.OnRun()
def OnRun(self):
pass
def generate_snapshot_filepath(basepath, local_temp_dir=False, extension='.pstat'):
basepath = get_snapshot_basepath(basepath, local_temp_dir)
n = 0
path = basepath + extension
while os.path.exists(path):
n+=1
path = basepath + (str(n) if n>0 else '') + extension
return path
def get_snapshot_basepath(basepath, local_temp_dir):
if basepath is None:
basepath = 'snapshot'
if local_temp_dir:
basepath = os.path.join(tempfile.gettempdir(), os.path.basename(basepath.replace('\\', '/')))
return basepath
def stats_to_response(stats, m):
if stats is None:
return
ystats = Stats()
ystats.func_stats = []
m.ystats = ystats
for func, stat in stats.items():
path, line, func_name = func
cc, nc, tt, ct, callers = stat
func = Function()
func_stat = FuncStat()
func.func_stat = func_stat
ystats.func_stats.append(func)
func_stat.file = path
func_stat.line = line
func_stat.func_name = func_name
func_stat.calls_count = nc
func_stat.total_time = ct
func_stat.own_time = tt
func.callers = []
for f, s in callers.items():
caller_stat = FuncStat()
func.callers.append(caller_stat)
path, line, func_name = f
cc, nc, tt, ct = s
caller_stat.file = path
caller_stat.line = line
caller_stat.func_name = func_name
caller_stat.calls_count = cc
caller_stat.total_time = ct
caller_stat.own_time = tt
# m.validate()
|
[
"_prof_imports.Function",
"_prof_imports.FuncStat",
"tempfile.gettempdir",
"os.path.exists",
"_prof_imports.Stats",
"sys._getframe",
"imp.new_module",
"tokenize.open"
] |
[((1100, 1122), 'imp.new_module', 'new_module', (['"""__main__"""'], {}), "('__main__')\n", (1110, 1122), False, 'from imp import new_module\n'), ((1786, 1806), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (1800, 1806), False, 'import os\n'), ((2245, 2252), '_prof_imports.Stats', 'Stats', ([], {}), '()\n', (2250, 2252), False, 'from _prof_imports import Stats, FuncStat, Function\n'), ((2431, 2441), '_prof_imports.Function', 'Function', ([], {}), '()\n', (2439, 2441), False, 'from _prof_imports import Stats, FuncStat, Function\n'), ((2462, 2472), '_prof_imports.FuncStat', 'FuncStat', ([], {}), '()\n', (2470, 2472), False, 'from _prof_imports import Stats, FuncStat, Function\n'), ((600, 619), 'tokenize.open', 'tokenize.open', (['file'], {}), '(file)\n', (613, 619), False, 'import tokenize\n'), ((2065, 2086), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (2084, 2086), False, 'import tempfile\n'), ((2839, 2849), '_prof_imports.FuncStat', 'FuncStat', ([], {}), '()\n', (2847, 2849), False, 'from _prof_imports import Stats, FuncStat, Function\n'), ((358, 373), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (371, 373), False, 'import sys\n')]
|
"""
Create holdem game record table
"""
from yoyo import step
__depends__ = {'20211109_01_xKblp-change-comments-on-black-jack-record'}
steps = [
step("CREATE TABLE `holdemGameRecord` ( `userID` BIGINT NOT NULL , `moneyInvested` BIGINT NOT NULL , `status` INT NOT NULL COMMENT '0 represent in progress; 1 represent lose or fold; 2 represent win;' , `tableID` BIGINT NOT NULL , `time` TIMESTAMP NOT NULL , `tableUUID` VARCHAR(64) NOT NULL ) ENGINE = InnoDB;")
]
|
[
"yoyo.step"
] |
[((152, 474), 'yoyo.step', 'step', (['"""CREATE TABLE `holdemGameRecord` ( `userID` BIGINT NOT NULL , `moneyInvested` BIGINT NOT NULL , `status` INT NOT NULL COMMENT \'0 represent in progress; 1 represent lose or fold; 2 represent win;\' , `tableID` BIGINT NOT NULL , `time` TIMESTAMP NOT NULL , `tableUUID` VARCHAR(64) NOT NULL ) ENGINE = InnoDB;"""'], {}), '(\n "CREATE TABLE `holdemGameRecord` ( `userID` BIGINT NOT NULL , `moneyInvested` BIGINT NOT NULL , `status` INT NOT NULL COMMENT \'0 represent in progress; 1 represent lose or fold; 2 represent win;\' , `tableID` BIGINT NOT NULL , `time` TIMESTAMP NOT NULL , `tableUUID` VARCHAR(64) NOT NULL ) ENGINE = InnoDB;"\n )\n', (156, 474), False, 'from yoyo import step\n')]
|
"""
Module that holds classes for performing I/O operations on GEOS geometry
objects. Specifically, this has Python implementations of WKB/WKT
reader and writer classes.
"""
from ctypes import byref, c_size_t
from django.contrib.gis.geos.base import GEOSBase
from django.contrib.gis.geos.error import GEOSException
from django.contrib.gis.geos.geometry import GEOSGeometry
from django.contrib.gis.geos.libgeos import GEOM_PTR
from django.contrib.gis.geos.prototypes import io as capi
class IOBase(GEOSBase):
"Base class for GEOS I/O objects."
def __init__(self):
# Getting the pointer with the constructor.
self.ptr = self.constructor()
def __del__(self):
# Cleaning up with the appropriate destructor.
if self._ptr: self.destructor(self._ptr)
### WKT Reading and Writing objects ###
# Non-public class for internal use because its `read` method returns
# _pointers_ instead of a GEOSGeometry object.
class _WKTReader(IOBase):
constructor = capi.wkt_reader_create
destructor = capi.wkt_reader_destroy
ptr_type = capi.WKT_READ_PTR
def read(self, wkt):
if not isinstance(wkt, basestring): raise TypeError
return capi.wkt_reader_read(self.ptr, wkt)
class WKTReader(_WKTReader):
def read(self, wkt):
"Returns a GEOSGeometry for the given WKT string."
return GEOSGeometry(super(WKTReader, self).read(wkt))
class WKTWriter(IOBase):
constructor = capi.wkt_writer_create
destructor = capi.wkt_writer_destroy
ptr_type = capi.WKT_WRITE_PTR
def write(self, geom):
"Returns the WKT representation of the given geometry."
return capi.wkt_writer_write(self.ptr, geom.ptr)
### WKB Reading and Writing objects ###
# Non-public class for the same reason as _WKTReader above.
class _WKBReader(IOBase):
constructor = capi.wkb_reader_create
destructor = capi.wkb_reader_destroy
ptr_type = capi.WKB_READ_PTR
def read(self, wkb):
"Returns a _pointer_ to C GEOS Geometry object from the given WKB."
if isinstance(wkb, buffer):
wkb_s = str(wkb)
return capi.wkb_reader_read(self.ptr, wkb_s, len(wkb_s))
elif isinstance(wkb, basestring):
return capi.wkb_reader_read_hex(self.ptr, wkb, len(wkb))
else:
raise TypeError
class WKBReader(_WKBReader):
def read(self, wkb):
"Returns a GEOSGeometry for the given WKB buffer."
return GEOSGeometry(super(WKBReader, self).read(wkb))
class WKBWriter(IOBase):
constructor = capi.wkb_writer_create
destructor = capi.wkb_writer_destroy
ptr_type = capi.WKB_WRITE_PTR
def write(self, geom):
"Returns the WKB representation of the given geometry."
return buffer(capi.wkb_writer_write(self.ptr, geom.ptr, byref(c_size_t())))
def write_hex(self, geom):
"Returns the HEXEWKB representation of the given geometry."
return capi.wkb_writer_write_hex(self.ptr, geom.ptr, byref(c_size_t()))
### WKBWriter Properties ###
# Property for getting/setting the byteorder.
def _get_byteorder(self):
return capi.wkb_writer_get_byteorder(self.ptr)
def _set_byteorder(self, order):
if not order in (0, 1): raise ValueError('Byte order parameter must be 0 (Big Endian) or 1 (Little Endian).')
capi.wkb_writer_set_byteorder(self.ptr, order)
byteorder = property(_get_byteorder, _set_byteorder)
# Property for getting/setting the output dimension.
def _get_outdim(self):
return capi.wkb_writer_get_outdim(self.ptr)
def _set_outdim(self, new_dim):
if not new_dim in (2, 3): raise ValueError('WKB output dimension must be 2 or 3')
capi.wkb_writer_set_outdim(self.ptr, new_dim)
outdim = property(_get_outdim, _set_outdim)
# Property for getting/setting the include srid flag.
def _get_include_srid(self):
return bool(ord(capi.wkb_writer_get_include_srid(self.ptr)))
def _set_include_srid(self, include):
if bool(include): flag = chr(1)
else: flag = chr(0)
capi.wkb_writer_set_include_srid(self.ptr, flag)
srid = property(_get_include_srid, _set_include_srid)
# Instances of the WKT and WKB reader/writer objects.
wkt_r = _WKTReader()
wkt_w = WKTWriter()
wkb_r = _WKBReader()
wkb_w = WKBWriter()
|
[
"django.contrib.gis.geos.prototypes.io.wkb_writer_set_include_srid",
"django.contrib.gis.geos.prototypes.io.wkb_writer_get_byteorder",
"ctypes.c_size_t",
"django.contrib.gis.geos.prototypes.io.wkb_writer_get_outdim",
"django.contrib.gis.geos.prototypes.io.wkt_writer_write",
"django.contrib.gis.geos.prototypes.io.wkb_writer_get_include_srid",
"django.contrib.gis.geos.prototypes.io.wkb_writer_set_byteorder",
"django.contrib.gis.geos.prototypes.io.wkb_writer_set_outdim",
"django.contrib.gis.geos.prototypes.io.wkt_reader_read"
] |
[((1192, 1227), 'django.contrib.gis.geos.prototypes.io.wkt_reader_read', 'capi.wkt_reader_read', (['self.ptr', 'wkt'], {}), '(self.ptr, wkt)\n', (1212, 1227), True, 'from django.contrib.gis.geos.prototypes import io as capi\n'), ((1653, 1694), 'django.contrib.gis.geos.prototypes.io.wkt_writer_write', 'capi.wkt_writer_write', (['self.ptr', 'geom.ptr'], {}), '(self.ptr, geom.ptr)\n', (1674, 1694), True, 'from django.contrib.gis.geos.prototypes import io as capi\n'), ((3131, 3170), 'django.contrib.gis.geos.prototypes.io.wkb_writer_get_byteorder', 'capi.wkb_writer_get_byteorder', (['self.ptr'], {}), '(self.ptr)\n', (3160, 3170), True, 'from django.contrib.gis.geos.prototypes import io as capi\n'), ((3335, 3381), 'django.contrib.gis.geos.prototypes.io.wkb_writer_set_byteorder', 'capi.wkb_writer_set_byteorder', (['self.ptr', 'order'], {}), '(self.ptr, order)\n', (3364, 3381), True, 'from django.contrib.gis.geos.prototypes import io as capi\n'), ((3540, 3576), 'django.contrib.gis.geos.prototypes.io.wkb_writer_get_outdim', 'capi.wkb_writer_get_outdim', (['self.ptr'], {}), '(self.ptr)\n', (3566, 3576), True, 'from django.contrib.gis.geos.prototypes import io as capi\n'), ((3712, 3757), 'django.contrib.gis.geos.prototypes.io.wkb_writer_set_outdim', 'capi.wkb_writer_set_outdim', (['self.ptr', 'new_dim'], {}), '(self.ptr, new_dim)\n', (3738, 3757), True, 'from django.contrib.gis.geos.prototypes import io as capi\n'), ((4087, 4135), 'django.contrib.gis.geos.prototypes.io.wkb_writer_set_include_srid', 'capi.wkb_writer_set_include_srid', (['self.ptr', 'flag'], {}), '(self.ptr, flag)\n', (4119, 4135), True, 'from django.contrib.gis.geos.prototypes import io as capi\n'), ((2988, 2998), 'ctypes.c_size_t', 'c_size_t', ([], {}), '()\n', (2996, 2998), False, 'from ctypes import byref, c_size_t\n'), ((3923, 3965), 'django.contrib.gis.geos.prototypes.io.wkb_writer_get_include_srid', 'capi.wkb_writer_get_include_srid', (['self.ptr'], {}), '(self.ptr)\n', (3955, 3965), True, 'from django.contrib.gis.geos.prototypes import io as capi\n'), ((2807, 2817), 'ctypes.c_size_t', 'c_size_t', ([], {}), '()\n', (2815, 2817), False, 'from ctypes import byref, c_size_t\n')]
|
from django.urls import path
from admin import views
urlpatterns = [
path('manage/', views.AdminPanel.as_view(), name = 'admin-panel'),
path('manage/customer-list/', views.AdminCustomerListView.as_view(), name = 'admin-customer-list-view'),
path('manage/book-list/', views.AdminBookListView.as_view(), name = 'admin-book-list-view'),
path('manage/author-list/', views.AdminAuthorListView.as_view(), name = 'admin-author-list-view'),
path('manage/publisher-list/', views.AdminPublisherListView.as_view(), name = 'admin-publisher-list-view'),
path('manage/order-log/', views.AdminOrderLogView.as_view(), name = 'admin-order-log-view'),
path('manage/offer-list/', views.AdminOfferListView.as_view(), name = 'admin-offer-list-view'),
path('manage/borrows/', views.AdminBorrowsView.as_view(), name = 'admin-borrows-view'),
path('manage/plan-list/', views.AdminPlanListView.as_view(), name = 'admin-plan-list-view'),
# path('test/', views.Test.as_view(), name = 'test'),
]
|
[
"admin.views.AdminBookListView.as_view",
"admin.views.AdminOfferListView.as_view",
"admin.views.AdminBorrowsView.as_view",
"admin.views.AdminOrderLogView.as_view",
"admin.views.AdminPlanListView.as_view",
"admin.views.AdminPublisherListView.as_view",
"admin.views.AdminPanel.as_view",
"admin.views.AdminAuthorListView.as_view",
"admin.views.AdminCustomerListView.as_view"
] |
[((94, 120), 'admin.views.AdminPanel.as_view', 'views.AdminPanel.as_view', ([], {}), '()\n', (118, 120), False, 'from admin import views\n'), ((180, 217), 'admin.views.AdminCustomerListView.as_view', 'views.AdminCustomerListView.as_view', ([], {}), '()\n', (215, 217), False, 'from admin import views\n'), ((286, 319), 'admin.views.AdminBookListView.as_view', 'views.AdminBookListView.as_view', ([], {}), '()\n', (317, 319), False, 'from admin import views\n'), ((386, 421), 'admin.views.AdminAuthorListView.as_view', 'views.AdminAuthorListView.as_view', ([], {}), '()\n', (419, 421), False, 'from admin import views\n'), ((493, 531), 'admin.views.AdminPublisherListView.as_view', 'views.AdminPublisherListView.as_view', ([], {}), '()\n', (529, 531), False, 'from admin import views\n'), ((601, 634), 'admin.views.AdminOrderLogView.as_view', 'views.AdminOrderLogView.as_view', ([], {}), '()\n', (632, 634), False, 'from admin import views\n'), ((700, 734), 'admin.views.AdminOfferListView.as_view', 'views.AdminOfferListView.as_view', ([], {}), '()\n', (732, 734), False, 'from admin import views\n'), ((798, 830), 'admin.views.AdminBorrowsView.as_view', 'views.AdminBorrowsView.as_view', ([], {}), '()\n', (828, 830), False, 'from admin import views\n'), ((893, 926), 'admin.views.AdminPlanListView.as_view', 'views.AdminPlanListView.as_view', ([], {}), '()\n', (924, 926), False, 'from admin import views\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.