code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
from setuptools import setup, find_packages
with open("readme.md", "r") as fh:
long_description = fh.read()
setup(
name='vvspy',
py_modules=["vvspy"],
version='1.1.3',
license='MIT',
description='API Wrapper for VVS (Verkehrsverbund Stuttgart)',
author='zaanposni',
author_email='<EMAIL>',
url='https://github.com/FI18-Trainees/vvspy',
keywords=['VVS', 'API', 'STUTTGART', 'WRAPPER', 'JSON', 'REST', 'EFA', 'PYTHON'],
packages=find_packages(exclude=["*tests"]),
package_data={
"vvspy": ["vvspy/*"]
},
install_requires=[
'requests',
'typing',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
long_description=long_description,
long_description_content_type="text/markdown"
)
|
[
"setuptools.find_packages"
] |
[((453, 486), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['*tests']"}), "(exclude=['*tests'])\n", (466, 486), False, 'from setuptools import setup, find_packages\n')]
|
import os
import subprocess
from unittest import skip
from unittest.mock import patch
from plz.runner import run_command
starting_dir = os.getcwd()
def test_run_command_returns_int():
# Arrange
# Act
result = run_command("echo test")
# Assert
assert type(result) == int
@patch("subprocess.check_call")
def test_run_command_returns_1_if_CalledProcessError(mock_check_call):
# Arrange
mock_check_call.side_effect = subprocess.CalledProcessError
# Act
result = run_command('bash -c "exit 99"')
# Assert
assert result == 1
@patch("subprocess.check_call")
def test_run_command_returns_1_if_CalledProcessError(mock_check_call):
# Arrange
mock_check_call.side_effect = KeyboardInterrupt
# Act
result = run_command('bash -c "exit 99"')
# Assert
assert result == 1
@skip("Error codes no longer supported")
def test_run_command_returns_exit_code():
# Arrange
# Act
result = run_command('bash -c "exit 99"')
# Assert
assert result == 99
@skip("returning output not currently supported")
def test_run_command_returns_output():
# Arrange
stdout = "\n".join(["1", "2", "3", "4"])
# Act
result = run_command('bash -c "for x in `seq 1 4`; do echo $x; done"')
# Assert
assert result[1] == stdout.split("\n")
def test_run_command_prints_to_stdout(capfd):
# Arrange
stdout = "\n".join(["1", "2", "3", "4"]) + "\n"
# Act
run_command('bash -c "for x in `seq 1 4`; do echo $x; done"')
out, err = capfd.readouterr()
# Assert
assert out == stdout
@skip("stdout parameter not currently supported")
def test_run_command_does_not_print_to_stdout_when_disabled(capfd):
# Arrange
# Act
run_command('bash -c "for x in `seq 1 4`; do echo $x; done"', std_output=False)
out, err = capfd.readouterr()
# Assert
assert out == ""
def test_run_command_accepts_env(capfd):
# Arrange
test_value = "this is a test"
# Act
run_command('bash -c "echo $FOO"', env={"FOO": test_value})
out, err = capfd.readouterr()
# Assert
assert out == "{}\n".format(test_value)
def test_run_command_simple_glob(capfd):
# Arrange
stdout = "\n".join(["plz/__init__.py"]) + "\n"
# Act
run_command("ls plz/__*.py")
out, err = capfd.readouterr()
# Assert
assert out == stdout
def test_run_command_glob_with_cwd(capfd):
"""
Integration test
Scenario: the plz.yaml file is "located" in the plz directory.
In this case, the user will be running something like: `plz ls`
"""
# Arrange
os.chdir(starting_dir)
stdout = "\n".join(["__init__.py"]) + "\n"
cwd = os.path.join(os.getcwd(), "plz")
# Act
run_command("ls __*.py", cwd=cwd)
out, err = capfd.readouterr()
# Assert
assert out == stdout
def test_run_command_glob_with_cwd_and_args(capfd):
"""
Integration test
Scenario: the plz.yaml file is "located" in the root of this repo, but
the command is run from the child plz directory.
In this case, the user will be running something like: `plz ls ../*.md`
"""
# Arrange
os.chdir(starting_dir)
stdout = "\n".join(["README.md"]) + "\n"
cwd = os.getcwd()
os.chdir("plz")
# Act
run_command("ls", cwd=cwd, args=["../*.md"])
out, err = capfd.readouterr()
# Assert
assert out == stdout
|
[
"plz.runner.run_command",
"os.getcwd",
"unittest.mock.patch",
"unittest.skip",
"os.chdir"
] |
[((138, 149), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (147, 149), False, 'import os\n'), ((299, 329), 'unittest.mock.patch', 'patch', (['"""subprocess.check_call"""'], {}), "('subprocess.check_call')\n", (304, 329), False, 'from unittest.mock import patch\n'), ((576, 606), 'unittest.mock.patch', 'patch', (['"""subprocess.check_call"""'], {}), "('subprocess.check_call')\n", (581, 606), False, 'from unittest.mock import patch\n'), ((841, 880), 'unittest.skip', 'skip', (['"""Error codes no longer supported"""'], {}), "('Error codes no longer supported')\n", (845, 880), False, 'from unittest import skip\n'), ((1035, 1083), 'unittest.skip', 'skip', (['"""returning output not currently supported"""'], {}), "('returning output not currently supported')\n", (1039, 1083), False, 'from unittest import skip\n'), ((1592, 1640), 'unittest.skip', 'skip', (['"""stdout parameter not currently supported"""'], {}), "('stdout parameter not currently supported')\n", (1596, 1640), False, 'from unittest import skip\n'), ((226, 250), 'plz.runner.run_command', 'run_command', (['"""echo test"""'], {}), "('echo test')\n", (237, 250), False, 'from plz.runner import run_command\n'), ((503, 535), 'plz.runner.run_command', 'run_command', (['"""bash -c "exit 99\\""""'], {}), '(\'bash -c "exit 99"\')\n', (514, 535), False, 'from plz.runner import run_command\n'), ((768, 800), 'plz.runner.run_command', 'run_command', (['"""bash -c "exit 99\\""""'], {}), '(\'bash -c "exit 99"\')\n', (779, 800), False, 'from plz.runner import run_command\n'), ((961, 993), 'plz.runner.run_command', 'run_command', (['"""bash -c "exit 99\\""""'], {}), '(\'bash -c "exit 99"\')\n', (972, 993), False, 'from plz.runner import run_command\n'), ((1206, 1267), 'plz.runner.run_command', 'run_command', (['"""bash -c "for x in `seq 1 4`; do echo $x; done\\""""'], {}), '(\'bash -c "for x in `seq 1 4`; do echo $x; done"\')\n', (1217, 1267), False, 'from plz.runner import run_command\n'), ((1454, 1515), 'plz.runner.run_command', 'run_command', (['"""bash -c "for x in `seq 1 4`; do echo $x; done\\""""'], {}), '(\'bash -c "for x in `seq 1 4`; do echo $x; done"\')\n', (1465, 1515), False, 'from plz.runner import run_command\n'), ((1738, 1817), 'plz.runner.run_command', 'run_command', (['"""bash -c "for x in `seq 1 4`; do echo $x; done\\""""'], {'std_output': '(False)'}), '(\'bash -c "for x in `seq 1 4`; do echo $x; done"\', std_output=False)\n', (1749, 1817), False, 'from plz.runner import run_command\n'), ((1993, 2052), 'plz.runner.run_command', 'run_command', (['"""bash -c "echo $FOO\\""""'], {'env': "{'FOO': test_value}"}), '(\'bash -c "echo $FOO"\', env={\'FOO\': test_value})\n', (2004, 2052), False, 'from plz.runner import run_command\n'), ((2268, 2296), 'plz.runner.run_command', 'run_command', (['"""ls plz/__*.py"""'], {}), "('ls plz/__*.py')\n", (2279, 2296), False, 'from plz.runner import run_command\n'), ((2607, 2629), 'os.chdir', 'os.chdir', (['starting_dir'], {}), '(starting_dir)\n', (2615, 2629), False, 'import os\n'), ((2735, 2768), 'plz.runner.run_command', 'run_command', (['"""ls __*.py"""'], {'cwd': 'cwd'}), "('ls __*.py', cwd=cwd)\n", (2746, 2768), False, 'from plz.runner import run_command\n'), ((3158, 3180), 'os.chdir', 'os.chdir', (['starting_dir'], {}), '(starting_dir)\n', (3166, 3180), False, 'import os\n'), ((3236, 3247), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3245, 3247), False, 'import os\n'), ((3252, 3267), 'os.chdir', 'os.chdir', (['"""plz"""'], {}), "('plz')\n", (3260, 3267), False, 'import os\n'), ((3283, 3327), 'plz.runner.run_command', 'run_command', (['"""ls"""'], {'cwd': 'cwd', 'args': "['../*.md']"}), "('ls', cwd=cwd, args=['../*.md'])\n", (3294, 3327), False, 'from plz.runner import run_command\n'), ((2700, 2711), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2709, 2711), False, 'import os\n')]
|
import utils
from . import rnn
from . import vae
from . import common
from . import pooling
from . import manager
from . import encoder
from . import decoder
from . import nonlinear
from . import embedding
def add_arguments(parser):
ModelArgumentConstructor(parser).add_all_arguments()
class ModelArgumentConstructor(object):
def __init__(self, parser):
self.parser = parser
@staticmethod
def joinargs(parent, name):
assert name is not None, "name cannot be empty"
frags = [name]
if parent is not None:
frags.insert(0, parent)
return '-'.join(frags)
def add(self, name, parent=None, **kwargs):
self.parser.add_argument(f"--{self.joinargs(parent, name)}", **kwargs)
def add_module_argument(self, key, module):
modules = manager.get_module_names(module)
self.add(key, type=str, default=modules[0], choices=modules)
def add_nonlinear_argument(self, key):
self.add_module_argument(key, nonlinear)
def add_pooling_arguments(self, key):
self.add_module_argument(key, pooling)
def add_rnn_arguments(self, key):
self.add_module_argument(key, rnn)
self.add("layers", parent=key, type=int, default=1)
self.add("dynamic", parent=key, action="store_true", default=False)
self.add("dropout", parent=key, type=float, default=0)
def add_encoder_arguments(self, key):
self.add_module_argument(key, encoder)
self.add_rnn_arguments(self.joinargs(key, "cell"))
self.add_pooling_arguments(self.joinargs(key, "pooling"))
def add_decoder_arguments(self, key):
self.add_module_argument(key, decoder)
self.add_rnn_arguments(self.joinargs(key, "cell"))
def add_vsae_arguments(self, key):
self.add_module_argument(key, vae)
self.add("z-dim", parent=key, type=int, default=512)
self.add("word-dim", parent=key, type=int, default=300)
self.add("kld-scale", parent=key, type=float, default=1.0)
self.add_encoder_arguments(self.joinargs(key, "encoder"))
self.add_decoder_arguments(self.joinargs(key, "decoder"))
self.add("embed-freeze", parent=key, action="store_true", default=False)
def add_all_arguments(self):
self.add_nonlinear_argument("nonlinear")
self.add_vsae_arguments("vae")
class ModelBuilder(object):
def __init__(self, args, vocab):
self.args = args
self.vocab = vocab
self.vocab_size = len(vocab)
self.bos_idx = vocab.f2i.get(args.bos)
self.eos_idx = vocab.f2i.get(args.eos)
def get(self, key, default=None):
return getattr(self.args, key, default)
def get_module_cls(self, key, kwargs_map=None, fallback=None):
if fallback is None:
fallback = {}
if kwargs_map is None:
kwargs_map = {}
type = self.get(key)
cls = manager.get(type)
sub_kwargs = utils.map_val(type, kwargs_map,
ignore_err=True, fallback=fallback)
def create(*args, **kwargs):
return cls(*args, **kwargs, **sub_kwargs)
return create
def get_nonlinear_cls(self, key):
return self.get_module_cls(key)
def get_pooling_cls(self, key):
return self.get_module_cls(key)
def get_rnn_cls(self, key):
return self.get_module_cls(key, fallback=dict(
dynamic=self.get(f"{key}_dynamic"),
dropout=self.get(f"{key}_dropout"),
layers=self.get(f"{key}_layers")
))
def get_encoder_cls(self, key):
return self.get_module_cls(key, {
"last-state-rnn-encoder": dict(
rnn_cls=self.get_rnn_cls(f"{key}_cell")
),
"pooled-rnn-encoder": dict(
rnn_cls=self.get_rnn_cls(f"{key}_cell"),
pool_cls=self.get_pooling_cls(f"{key}_pooling")
)
})
def get_decoder_cls(self, key):
return self.get_module_cls(key, {
"rnn-decoder": dict(
rnn_cls=self.get_rnn_cls(f"{key}_cell")
),
"rnn-recalling-decoder": dict(
rnn_cls=self.get_rnn_cls(f"{key}_cell")
),
})
def get_embedding_cls(self, key):
return lambda *args, **kwargs: embedding.FineTunableEmbedding(
*args, **kwargs,
allow_padding=True,
freeze=self.get(f"{key}_embed_freeze"),
unfrozen_idx=[self.bos_idx, self.eos_idx]
)
def get_vsae_cls(self, key):
return self.get_module_cls(key, {
"variational-sentence-autoencoder": dict(
z_dim=self.get(f"{key}_z_dim"),
word_dim=self.get(f"{key}_word_dim"),
vocab_size=self.vocab_size,
kld_scale=self.get(f"{key}_kld_scale"),
emb_cls=self.get_embedding_cls(key),
enc_cls=self.get_encoder_cls(f"{key}_encoder"),
dec_cls=self.get_decoder_cls(f"{key}_decoder")
)
})
def build_model(*args, **kwargs):
builder = ModelBuilder(*args, **kwargs)
nonlinear.set_default(builder.get_nonlinear_cls("nonlinear"))
return builder.get_vsae_cls("vae")()
|
[
"utils.map_val"
] |
[((2959, 3026), 'utils.map_val', 'utils.map_val', (['type', 'kwargs_map'], {'ignore_err': '(True)', 'fallback': 'fallback'}), '(type, kwargs_map, ignore_err=True, fallback=fallback)\n', (2972, 3026), False, 'import utils\n')]
|
import json
import os
from django.utils.translation import ugettext_lazy as _
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRETS_DIR = os.path.join(BASE_DIR, 'secret')
SECRETS_BASE = os.path.join(SECRETS_DIR, 'base.json')
try:
secrets_base = json.load(open(SECRETS_BASE, 'rt'))
except FileNotFoundError:
import subprocess
subprocess.call('python generate_secrets.py')
secrets_base = json.load(open(SECRETS_BASE, 'rt'))
"""
raise ImproperlyConfigured('Could not find secret file {}'.format(SECRETS_BASE))
"""
SECRET_KEY = secrets_base['SECRET_KEY']
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'storages',
'dashboard.apps.DashboardConfig'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware', # For translation
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'wsgi.application'
LOGIN_URL = '/login/'
LOGIN_REDIRECT_URL = '/apps/'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_USER_MODEL = 'dashboard.User'
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'ko-KR'
TIME_ZONE = 'Asia/Seoul'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
LANGUAGES = [
('ko', _('Korean')),
('en', _('English')),
]
LOCALE_PATHS = (
os.path.join(BASE_DIR, 'locale'),
)
|
[
"django.utils.translation.ugettext_lazy",
"os.path.abspath",
"subprocess.call",
"os.path.join"
] |
[((165, 197), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""secret"""'], {}), "(BASE_DIR, 'secret')\n", (177, 197), False, 'import os\n'), ((213, 251), 'os.path.join', 'os.path.join', (['SECRETS_DIR', '"""base.json"""'], {}), "(SECRETS_DIR, 'base.json')\n", (225, 251), False, 'import os\n'), ((3235, 3272), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""staticfiles"""'], {}), "(BASE_DIR, 'staticfiles')\n", (3247, 3272), False, 'import os\n'), ((3298, 3330), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""static"""'], {}), "(BASE_DIR, 'static')\n", (3310, 3330), False, 'import os\n'), ((3425, 3457), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""locale"""'], {}), "(BASE_DIR, 'locale')\n", (3437, 3457), False, 'import os\n'), ((123, 148), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (138, 148), False, 'import os\n'), ((365, 410), 'subprocess.call', 'subprocess.call', (['"""python generate_secrets.py"""'], {}), "('python generate_secrets.py')\n", (380, 410), False, 'import subprocess\n'), ((3361, 3372), 'django.utils.translation.ugettext_lazy', '_', (['"""Korean"""'], {}), "('Korean')\n", (3362, 3372), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3386, 3398), 'django.utils.translation.ugettext_lazy', '_', (['"""English"""'], {}), "('English')\n", (3387, 3398), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1747, 1782), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""templates"""'], {}), "(BASE_DIR, 'templates')\n", (1759, 1782), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from django.conf.urls.static import static
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'app.thirdpart.views.main', name='首页'),
# url(r'^item/detail/$', 'app.thirdpart.views.item_detail', name='详情'),
# url(r'^mysite/', include('mysite.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
#/admin/auth/user/
url(r'^$', 'app.customer.views.user.base'),
url(r'^homepage/$', 'app.customer.views.user.homepage'),
# url(r'^stat$', 'statistics.views.index.index', name="statistics_index"),
url(r'^audio/', include('app.audio.urls')),
url(r'^add/top', 'app.customer.views.user.add_top'),
url(r'^del/top', 'app.customer.views.user.delete_top'),
url(r'^top/position', 'app.customer.views.user.save_top_position'),
url(r'^admin/', include(admin.site.urls)),
url(r'^customer/', include('app.customer.urls')),
url(r'^signin/$', 'django.contrib.auth.views.login', {'template_name': 'signin.html'}, name="signin"),
url(r'^signout/$', 'django.contrib.auth.views.logout_then_login', name="signout"),
###################################################################################################################
# 定义静态文件处理函数
###################################################################################################################
url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT,}),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
[
"django.contrib.admin.autodiscover",
"django.conf.urls.static.static",
"django.conf.urls.url",
"django.conf.urls.include"
] |
[((238, 258), 'django.contrib.admin.autodiscover', 'admin.autodiscover', ([], {}), '()\n', (256, 258), False, 'from django.contrib import admin\n'), ((1797, 1860), 'django.conf.urls.static.static', 'static', (['settings.STATIC_URL'], {'document_root': 'settings.STATIC_ROOT'}), '(settings.STATIC_URL, document_root=settings.STATIC_ROOT)\n', (1803, 1860), False, 'from django.conf.urls.static import static\n'), ((708, 749), 'django.conf.urls.url', 'url', (['"""^$"""', '"""app.customer.views.user.base"""'], {}), "('^$', 'app.customer.views.user.base')\n", (711, 749), False, 'from django.conf.urls import patterns, include, url\n'), ((756, 810), 'django.conf.urls.url', 'url', (['"""^homepage/$"""', '"""app.customer.views.user.homepage"""'], {}), "('^homepage/$', 'app.customer.views.user.homepage')\n", (759, 810), False, 'from django.conf.urls import patterns, include, url\n'), ((944, 994), 'django.conf.urls.url', 'url', (['"""^add/top"""', '"""app.customer.views.user.add_top"""'], {}), "('^add/top', 'app.customer.views.user.add_top')\n", (947, 994), False, 'from django.conf.urls import patterns, include, url\n'), ((1001, 1054), 'django.conf.urls.url', 'url', (['"""^del/top"""', '"""app.customer.views.user.delete_top"""'], {}), "('^del/top', 'app.customer.views.user.delete_top')\n", (1004, 1054), False, 'from django.conf.urls import patterns, include, url\n'), ((1061, 1126), 'django.conf.urls.url', 'url', (['"""^top/position"""', '"""app.customer.views.user.save_top_position"""'], {}), "('^top/position', 'app.customer.views.user.save_top_position')\n", (1064, 1126), False, 'from django.conf.urls import patterns, include, url\n'), ((1236, 1340), 'django.conf.urls.url', 'url', (['"""^signin/$"""', '"""django.contrib.auth.views.login"""', "{'template_name': 'signin.html'}"], {'name': '"""signin"""'}), "('^signin/$', 'django.contrib.auth.views.login', {'template_name':\n 'signin.html'}, name='signin')\n", (1239, 1340), False, 'from django.conf.urls import patterns, include, url\n'), ((1343, 1428), 'django.conf.urls.url', 'url', (['"""^signout/$"""', '"""django.contrib.auth.views.logout_then_login"""'], {'name': '"""signout"""'}), "('^signout/$', 'django.contrib.auth.views.logout_then_login', name='signout'\n )\n", (1346, 1428), False, 'from django.conf.urls import patterns, include, url\n'), ((1690, 1792), 'django.conf.urls.url', 'url', (['"""^static/(?P<path>.*)$"""', '"""django.views.static.serve"""', "{'document_root': settings.STATIC_ROOT}"], {}), "('^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root':\n settings.STATIC_ROOT})\n", (1693, 1792), False, 'from django.conf.urls import patterns, include, url\n'), ((912, 937), 'django.conf.urls.include', 'include', (['"""app.audio.urls"""'], {}), "('app.audio.urls')\n", (919, 937), False, 'from django.conf.urls import patterns, include, url\n'), ((1150, 1174), 'django.conf.urls.include', 'include', (['admin.site.urls'], {}), '(admin.site.urls)\n', (1157, 1174), False, 'from django.conf.urls import patterns, include, url\n'), ((1200, 1228), 'django.conf.urls.include', 'include', (['"""app.customer.urls"""'], {}), "('app.customer.urls')\n", (1207, 1228), False, 'from django.conf.urls import patterns, include, url\n')]
|
from mmcv.cnn import ConvModule, build_norm_layer
from torch import nn
class InvertedResidual(nn.Module):
"""Inverted residual module.
Args:
in_channels (int): The input channels of the InvertedResidual block.
out_channels (int): The output channels of the InvertedResidual block.
stride (int): Stride of the middle (first) 3x3 convolution.
expand_ratio (int): adjusts number of channels of the hidden layer
in InvertedResidual by this amount.
conv_cfg (dict): Config dict for convolution layer.
Default: None, which means using conv2d.
norm_cfg (dict): Config dict for normalization layer.
Default: dict(type='BN').
act_cfg (dict): Config dict for activation layer.
Default: dict(type='ReLU6').
"""
def __init__(self,
in_channels,
out_channels,
stride,
expand_ratio,
dilation=1,
conv_cfg=None,
norm_cfg=dict(type='BN'),
act_cfg=dict(type='ReLU6')):
super(InvertedResidual, self).__init__()
self.stride = stride
assert stride in [1, 2]
hidden_dim = int(round(in_channels * expand_ratio))
self.use_res_connect = self.stride == 1 \
and in_channels == out_channels
layers = []
if expand_ratio != 1:
# pw
layers.append(
ConvModule(
in_channels,
hidden_dim,
kernel_size=1,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg,
act_cfg=act_cfg))
layers.extend([
# dw
ConvModule(
hidden_dim,
hidden_dim,
kernel_size=3,
padding=dilation,
stride=stride,
dilation=dilation,
groups=hidden_dim,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg,
act_cfg=act_cfg),
# pw-linear
nn.Conv2d(hidden_dim, out_channels, 1, 1, 0, bias=False),
build_norm_layer(norm_cfg, out_channels)[1],
])
self.conv = nn.Sequential(*layers)
def forward(self, x):
if self.use_res_connect:
return x + self.conv(x)
else:
return self.conv(x)
|
[
"mmcv.cnn.build_norm_layer",
"torch.nn.Conv2d",
"mmcv.cnn.ConvModule",
"torch.nn.Sequential"
] |
[((2287, 2309), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (2300, 2309), False, 'from torch import nn\n'), ((1486, 1595), 'mmcv.cnn.ConvModule', 'ConvModule', (['in_channels', 'hidden_dim'], {'kernel_size': '(1)', 'conv_cfg': 'conv_cfg', 'norm_cfg': 'norm_cfg', 'act_cfg': 'act_cfg'}), '(in_channels, hidden_dim, kernel_size=1, conv_cfg=conv_cfg,\n norm_cfg=norm_cfg, act_cfg=act_cfg)\n', (1496, 1595), False, 'from mmcv.cnn import ConvModule, build_norm_layer\n'), ((1767, 1951), 'mmcv.cnn.ConvModule', 'ConvModule', (['hidden_dim', 'hidden_dim'], {'kernel_size': '(3)', 'padding': 'dilation', 'stride': 'stride', 'dilation': 'dilation', 'groups': 'hidden_dim', 'conv_cfg': 'conv_cfg', 'norm_cfg': 'norm_cfg', 'act_cfg': 'act_cfg'}), '(hidden_dim, hidden_dim, kernel_size=3, padding=dilation, stride=\n stride, dilation=dilation, groups=hidden_dim, conv_cfg=conv_cfg,\n norm_cfg=norm_cfg, act_cfg=act_cfg)\n', (1777, 1951), False, 'from mmcv.cnn import ConvModule, build_norm_layer\n'), ((2141, 2197), 'torch.nn.Conv2d', 'nn.Conv2d', (['hidden_dim', 'out_channels', '(1)', '(1)', '(0)'], {'bias': '(False)'}), '(hidden_dim, out_channels, 1, 1, 0, bias=False)\n', (2150, 2197), False, 'from torch import nn\n'), ((2211, 2251), 'mmcv.cnn.build_norm_layer', 'build_norm_layer', (['norm_cfg', 'out_channels'], {}), '(norm_cfg, out_channels)\n', (2227, 2251), False, 'from mmcv.cnn import ConvModule, build_norm_layer\n')]
|
from nltk import download
download()
|
[
"nltk.download"
] |
[((26, 36), 'nltk.download', 'download', ([], {}), '()\n', (34, 36), False, 'from nltk import download\n')]
|
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
df = pd.DataFrame({'Group': ['A', 'A', 'A', 'B', 'C', 'B', 'B', 'C', 'A', 'C'],
'Apple': np.random.rand(10),'Orange': np.random.rand(10)})
# df = df[['Group','Apple','Orange']]
dd = pd.melt(df, id_vars=['Group'], value_vars=['Apple', 'Orange'], var_name='Fruits')
sns.boxplot(x='Group', y='value', data=dd, hue='Fruits')
plt.show()
|
[
"numpy.random.rand",
"pandas.melt",
"seaborn.boxplot",
"matplotlib.pyplot.show"
] |
[((296, 382), 'pandas.melt', 'pd.melt', (['df'], {'id_vars': "['Group']", 'value_vars': "['Apple', 'Orange']", 'var_name': '"""Fruits"""'}), "(df, id_vars=['Group'], value_vars=['Apple', 'Orange'], var_name=\n 'Fruits')\n", (303, 382), True, 'import pandas as pd\n'), ((378, 434), 'seaborn.boxplot', 'sns.boxplot', ([], {'x': '"""Group"""', 'y': '"""value"""', 'data': 'dd', 'hue': '"""Fruits"""'}), "(x='Group', y='value', data=dd, hue='Fruits')\n", (389, 434), True, 'import seaborn as sns\n'), ((435, 445), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (443, 445), True, 'import matplotlib.pyplot as plt\n'), ((203, 221), 'numpy.random.rand', 'np.random.rand', (['(10)'], {}), '(10)\n', (217, 221), True, 'import numpy as np\n'), ((232, 250), 'numpy.random.rand', 'np.random.rand', (['(10)'], {}), '(10)\n', (246, 250), True, 'import numpy as np\n')]
|
from flask import (
Flask,
render_template,
Response,
stream_with_context,
send_from_directory,
)
from flask_cors import CORS
import os
import random
import json
import string
from time import sleep
from datetime import datetime, date, timedelta
def random_date(year_start=2000, year_end=2005):
"""Random datetime between 2 dates"""
start_date = datetime(year_start, 1, 1)
end_date = datetime(year_end, 1, 1)
time_between_dates = end_date - start_date
days_between_dates = time_between_dates.days
random_number_of_days = random.randrange(days_between_dates)
random_seconds = random.randrange(0, 60 * 60 * 24)
rand_date = start_date + timedelta(
days=random_number_of_days, seconds=random_seconds
)
return rand_date
def random_data(include_digits=False, include_nulls=False):
"""Generate a random string of fixed length"""
size = random.randint(10, 200)
if include_nulls and bool(random.getrandbits(1)):
rand_str = None
elif include_digits:
rand_str = "".join(
random.choice(string.ascii_letters + string.digits) for i in range(1, size)
)
else:
rand_str = "".join(random.choice(string.ascii_letters) for i in range(1, size))
return rand_str
def generate(include_digits=False, include_nulls=False):
"""create and return data in small parts"""
for counter in range(1, 60):
obj = dict()
obj["id"] = counter
obj["date"] = random_date().strftime("%m/%d/%Y, %H:%M:%S %p")
obj["payload"] = random_data(include_digits, include_nulls)
json_obj = json.dumps(obj)
# sleep(1000)
yield json_obj
def create_app(config=None):
template_dir = os.path.relpath("./templates")
app = Flask(
__name__,
instance_relative_config=True,
template_folder=template_dir,
static_url_path="/static",
)
app.config.from_object(__name__)
if config is not None:
app.config.update(config)
try:
os.makedirs(app.instance_path)
except OSError:
pass
CORS(app)
@app.after_request
def set_response_headers(response):
"""Ensures no cache"""
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Pragma"] = "no-cache"
response.headers["Expires"] = "0"
return response
@app.route("/stream1", methods=["GET"])
def gimme_data1():
"""streams down large data"""
# stream with context so the 'after_request' happens when streaming is finished
return Response(stream_with_context(generate()), mimetype="application/json")
@app.route("/stream2", methods=["GET"])
def gimme_data2():
"""streams down large data"""
# stream with context so the 'after_request' happens when streaming is finished
return Response(
stream_with_context(generate(include_digits=True)),
mimetype="application/json",
)
@app.route("/stream3", methods=["GET"])
def gimme_data3():
"""streams down large data"""
# stream with context so the 'after_request' happens when streaming is finished
return Response(
stream_with_context(generate(include_digits=True, include_nulls=True)),
mimetype="application/json",
)
@app.route("/")
def entry_point():
"""simple entry for test"""
return render_template("base.html")
return app
if __name__ == "__main__":
app = create_app()
app.run(host="0.0.0.0", port=random.randint(2000, 9000))
|
[
"random.randint",
"os.makedirs",
"flask_cors.CORS",
"flask.Flask",
"random.getrandbits",
"random.choice",
"json.dumps",
"datetime.datetime",
"random.randrange",
"os.path.relpath",
"datetime.timedelta",
"flask.render_template"
] |
[((376, 402), 'datetime.datetime', 'datetime', (['year_start', '(1)', '(1)'], {}), '(year_start, 1, 1)\n', (384, 402), False, 'from datetime import datetime, date, timedelta\n'), ((418, 442), 'datetime.datetime', 'datetime', (['year_end', '(1)', '(1)'], {}), '(year_end, 1, 1)\n', (426, 442), False, 'from datetime import datetime, date, timedelta\n'), ((568, 604), 'random.randrange', 'random.randrange', (['days_between_dates'], {}), '(days_between_dates)\n', (584, 604), False, 'import random\n'), ((626, 659), 'random.randrange', 'random.randrange', (['(0)', '(60 * 60 * 24)'], {}), '(0, 60 * 60 * 24)\n', (642, 659), False, 'import random\n'), ((911, 934), 'random.randint', 'random.randint', (['(10)', '(200)'], {}), '(10, 200)\n', (925, 934), False, 'import random\n'), ((1740, 1770), 'os.path.relpath', 'os.path.relpath', (['"""./templates"""'], {}), "('./templates')\n", (1755, 1770), False, 'import os\n'), ((1781, 1888), 'flask.Flask', 'Flask', (['__name__'], {'instance_relative_config': '(True)', 'template_folder': 'template_dir', 'static_url_path': '"""/static"""'}), "(__name__, instance_relative_config=True, template_folder=template_dir,\n static_url_path='/static')\n", (1786, 1888), False, 'from flask import Flask, render_template, Response, stream_with_context, send_from_directory\n'), ((2108, 2117), 'flask_cors.CORS', 'CORS', (['app'], {}), '(app)\n', (2112, 2117), False, 'from flask_cors import CORS\n'), ((689, 750), 'datetime.timedelta', 'timedelta', ([], {'days': 'random_number_of_days', 'seconds': 'random_seconds'}), '(days=random_number_of_days, seconds=random_seconds)\n', (698, 750), False, 'from datetime import datetime, date, timedelta\n'), ((1628, 1643), 'json.dumps', 'json.dumps', (['obj'], {}), '(obj)\n', (1638, 1643), False, 'import json\n'), ((2039, 2069), 'os.makedirs', 'os.makedirs', (['app.instance_path'], {}), '(app.instance_path)\n', (2050, 2069), False, 'import os\n'), ((3476, 3504), 'flask.render_template', 'render_template', (['"""base.html"""'], {}), "('base.html')\n", (3491, 3504), False, 'from flask import Flask, render_template, Response, stream_with_context, send_from_directory\n'), ((965, 986), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (983, 986), False, 'import random\n'), ((3606, 3632), 'random.randint', 'random.randint', (['(2000)', '(9000)'], {}), '(2000, 9000)\n', (3620, 3632), False, 'import random\n'), ((1078, 1129), 'random.choice', 'random.choice', (['(string.ascii_letters + string.digits)'], {}), '(string.ascii_letters + string.digits)\n', (1091, 1129), False, 'import random\n'), ((1201, 1236), 'random.choice', 'random.choice', (['string.ascii_letters'], {}), '(string.ascii_letters)\n', (1214, 1236), False, 'import random\n')]
|
import unittest
from agent import *
from completesimulation import HamadryasSim, HamaPopulation, GeladaSim, GelPopulation
from dispersal import HamadryasDispersal, GeladaDispersal
from group import HamadryasGroup, GeladaGroup
from seedgroups import HamadryasSeed, GeladaSeed
class DispersalTests(unittest.TestCase):
def setup_gelada(self):
sim = GeladaSim()
pop = GelPopulation()
band1 = GeladaGroup(1)
band2 = GeladaGroup(2)
pop.groupsdict[1] = band1
pop.groupsdict[2] = band2
GeladaSeed.addagenttoseed(1, band1, pop, 'm', None, None, 10, sim)
pass
def test_attractiveness_ranking(self):
pass
def test_become_bachelor(self):
pass
def test_inherit(self):
pass
def test_challenge(self):
pass
def test_fol_switch_OMU(self):
pass
def test_disperse_bands(self):
pass
def test_follow(self):
pass
def test_solitary(self):
pass
def test_follower(self):
pass
|
[
"group.GeladaGroup",
"completesimulation.GelPopulation",
"completesimulation.GeladaSim",
"seedgroups.GeladaSeed.addagenttoseed"
] |
[((361, 372), 'completesimulation.GeladaSim', 'GeladaSim', ([], {}), '()\n', (370, 372), False, 'from completesimulation import HamadryasSim, HamaPopulation, GeladaSim, GelPopulation\n'), ((387, 402), 'completesimulation.GelPopulation', 'GelPopulation', ([], {}), '()\n', (400, 402), False, 'from completesimulation import HamadryasSim, HamaPopulation, GeladaSim, GelPopulation\n'), ((419, 433), 'group.GeladaGroup', 'GeladaGroup', (['(1)'], {}), '(1)\n', (430, 433), False, 'from group import HamadryasGroup, GeladaGroup\n'), ((450, 464), 'group.GeladaGroup', 'GeladaGroup', (['(2)'], {}), '(2)\n', (461, 464), False, 'from group import HamadryasGroup, GeladaGroup\n'), ((543, 609), 'seedgroups.GeladaSeed.addagenttoseed', 'GeladaSeed.addagenttoseed', (['(1)', 'band1', 'pop', '"""m"""', 'None', 'None', '(10)', 'sim'], {}), "(1, band1, pop, 'm', None, None, 10, sim)\n", (568, 609), False, 'from seedgroups import HamadryasSeed, GeladaSeed\n')]
|
#! python3
# fillTheGaps.py - Finds all files with a given prefix, such as
# spam001.txt, spam002.txt, and so on, in a single
# folder and locates any gaps in the numbering. Have
# the program rename all the later files to close this
# gap.
# <NAME>
import re
import os
import shutil
# Returns boolean based on if the passed string has a file extension.
def isFileExtension(filename):
extensionRegex = re.compile(r'\.[a-zA-Z]{3,4}')
mo = extensionRegex.search(filename)
if not mo:
return True
else:
return False
print('Enter the absolute path of the folder you want to search:')
folder = input()
print('''Enter the name of the file without the desired prefix:
(Ex. Enter spam.txt instead of spam001.txt)''')
filename = input()
if isFileExtension(filename) is False:
while isFileExtension(filename) is False:
print('Invalid filename: File extension not found')
print('''Enter the name of the file without the desired prefix:
(Ex. Enter spam.txt instead of spam001.txt)''')
filename = input()
print('''Finally enter the prefix you would like to use starting at 1:
(Ex. 001, 01, 1)''')
prefix = input()
# Ensures extension starts at 1.
if prefix[-1] != '1':
while True:
print('Invalid Prefix')
print('''Please enter the prefix that starts at 1:
(Ex. 001, 01, 1)''')
extension = input()
if prefix[-1] == '1':
break
# If the prefix is something like 001, this holds those 0's.
charsBeforeNum = prefix[:-1]
# Create variable that holds the file extension.
extensionRegex = re.compile(r'\.[a-zA-Z]{3,4}')
mo = extensionRegex.search(filename)
extension = mo.group()
# Holds a string of the file without extension. So is spam.txt is spam.
filewoExtension = filename.replace(extension, '')
# Create regex that detects the file number.
fileNumRegex = re.compile(r'([1-9]+[0]*)\.')
fileNums = []
# Put the file numbers in a list.
for file in os.listdir(folder):
if filewoExtension in file:
mo = fileNumRegex.search(file)
fileNums.append(int(mo.group(1)))
# Sort the list of file numbers.
fileNums.sort()
# Determines where the gap in the numbering begins
for i in range(len(fileNums)):
if fileNums[i] + 1 != fileNums[i+1]:
gapStart = fileNums[i]
break
filesToBeRenamed = []
# Determines which numbered files have to be renamed to keep the numbering.
for file in os.listdir(folder):
if filewoExtension in file:
mo = fileNumRegex.search(file)
if int(mo.group(1)) > gapStart:
filesToBeRenamed.append(int(mo.group(1)))
# Sort the list of file numbers to be renamed.
filesToBeRenamed.sort()
newFileNum = gapStart + 1
# Fills in the gaps in the numbering.
for i in range(len(filesToBeRenamed)):
filePath = os.path.join(folder, filewoExtension + charsBeforeNum +
str(filesToBeRenamed[i]) + extension)
newFilePath = os.path.join(folder, filewoExtension + charsBeforeNum +
str(newFileNum) + extension)
newFileNum += 1
if os.path.exists(filePath):
os.rename(filePath, newFilePath)
|
[
"os.rename",
"os.path.exists",
"os.listdir",
"re.compile"
] |
[((1652, 1682), 're.compile', 're.compile', (['"""\\\\.[a-zA-Z]{3,4}"""'], {}), "('\\\\.[a-zA-Z]{3,4}')\n", (1662, 1682), False, 'import re\n'), ((1927, 1956), 're.compile', 're.compile', (['"""([1-9]+[0]*)\\\\."""'], {}), "('([1-9]+[0]*)\\\\.')\n", (1937, 1956), False, 'import re\n'), ((2019, 2037), 'os.listdir', 'os.listdir', (['folder'], {}), '(folder)\n', (2029, 2037), False, 'import os\n'), ((2482, 2500), 'os.listdir', 'os.listdir', (['folder'], {}), '(folder)\n', (2492, 2500), False, 'import os\n'), ((477, 507), 're.compile', 're.compile', (['"""\\\\.[a-zA-Z]{3,4}"""'], {}), "('\\\\.[a-zA-Z]{3,4}')\n", (487, 507), False, 'import re\n'), ((3141, 3165), 'os.path.exists', 'os.path.exists', (['filePath'], {}), '(filePath)\n', (3155, 3165), False, 'import os\n'), ((3175, 3207), 'os.rename', 'os.rename', (['filePath', 'newFilePath'], {}), '(filePath, newFilePath)\n', (3184, 3207), False, 'import os\n')]
|
import maya.mel as mm
import maya.cmds as mc
import maya.OpenMaya as OpenMaya
import glTools.utils.base
import glTools.utils.mesh
import glTools.utils.skinCluster
import os.path
def writeBurlyWeights(mesh,skinCluster,influence,filePath):
'''
'''
# Get basic procedure information
burly = 'dnBurlyDeformer1'
vtxCount = mc.polyEvaluate(mesh,v=True)
inf = mc.ls(influence,l=True)
# Check skinCluster
if not glTools.utils.skinCluster.isSkinCluster(skinCluster):
raise Exception('Object "'+skinCluster+'" is not a valid skinCluster!')
# Get skinCluster Fn
skinFn = glTools.utils.skinCluster.getSkinClusterFn(skinCluster)
# Get influence dag path
influencePath = glTools.utils.base.getMDagPath(influence)
# Get points affected by influence
infSelectionList = OpenMaya.MSelectionList()
infWeightList = OpenMaya.MFloatArray()
skinFn.getPointsAffectedByInfluence(influencePath,infSelectionList,infWeightList)
infObjectPath = OpenMaya.MDagPath()
infComponentList = OpenMaya.MObject()
infSelectionList.getDagPath(0,infObjectPath,infComponentList)
# Get affect point indices
infComponentIndex = OpenMaya.MIntArray()
infComponentIndexFn = OpenMaya.MFnSingleIndexedComponent(infComponentList)
infComponentIndexFn.getElements(infComponentIndex)
infComponentIndex = list(infComponentIndex)
# Get affect point position and normal arrays
infComponentPosArray = OpenMaya.MPointArray()
infComponentNormArray = OpenMaya.MVectorArray()
infComponentVtxIt = OpenMaya.MItMeshVertex(infObjectPath,infComponentList)
normal = OpenMaya.MVector()
while not infComponentVtxIt.isDone():
infComponentPosArray.append(infComponentVtxIt.position(OpenMaya.MSpace.kWorld))
infComponentVtxIt.getNormal(normal)
infComponentNormArray.append(normal)
infComponentVtxIt.next()
# Open file
fileId = open(filePath, "w")
# Header
header = [ '<?xml version="1.0" standalone="no" ?>\n',
'<dnWeights type="dnBurlyDeformer" version="1.0" name="'+burly+'">\n',
'\t<Map name="'+inf[0]+'">\n',
'\t\t<Topology vertexCount="'+str(vtxCount)+'"/>\n' ]
fileId.writelines(header)
# Weights
weights = ['\t\t<Weights>\n']
for i in range(len(infComponentIndex)):
if not i%5: weights.append('\t\t\t')
weights.append(str(infWeightList[i]) + ' ')
if i%5 == 4: weights.append('\n')
weights.append('\n\t\t</Weights>\n')
fileId.writelines(weights)
# Indices
indices = ['\t\t<Indices>\n']
for i in range(len(infComponentIndex)):
if not i%10: indices.append('\t\t\t')
indices.append(str(infComponentIndex[i]) + ' ')
if i%10 == 9: indices.append('\n')
indices.append('\n\t\t</Indices>\n')
fileId.writelines(indices)
# Position
pos = ['\t\t<Positions>\n']
for i in range(len(infComponentIndex)):
if not i%2: pos.append('\t\t\t')
pos.append(str(infComponentPosArray[i][0])+' '+str(infComponentPosArray[i][1])+' '+str(infComponentPosArray[i][2])+' ')
if i%2: pos.append('\n')
pos.append('\n\t\t</Positions>\n')
fileId.writelines(pos)
# Normals
norm = ['\t\t<Normals>\n']
for i in range(len(infComponentIndex)):
if not i%2: norm.append('\t\t\t')
norm.append(str(infComponentNormArray[i][0])+' '+str(infComponentNormArray[i][1])+' '+str(infComponentNormArray[i][2])+' ')
if i%2: norm.append('\n')
norm.append('\n\t\t</Normals>\n')
fileId.writelines(norm)
# Radii
radii = ['\t\t<Radii>\n']
for i in range(len(infComponentIndex)):
if not i%6: radii.append('\t\t\t')
radii.append('0.01 ')
if i%6 == 5: radii.append('\n')
radii.append('\n\t\t</Radii>\n')
fileId.writelines(radii)
# Footer
footer = ['\t</Map>','\n</dnWeights>']
fileId.writelines(footer)
# Close file
fileId.close()
def writeBurlyWeights_allInfluences(mesh,skinCluster,directoryPath):
'''
'''
# Check mesh
if not glTools.utils.mesh.isMesh(mesh):
raise Exception('Object "'+mesh+'" contains no valid polygon mesh!')
# Check skinCluster
if not glTools.utils.skinCluster.isSkinCluster(skinCluster):
raise Exception('Object "'+skinCluster+'" is not a valid skinCluster!')
# Check directory
if not os.path.isdir(directoryPath):
raise Exception('Directory path "'+directoryPath+'" does not exist!')
# Get skinCluster influences
influenceList = mc.skinCluster(skinCluster,q=True,inf=True)
# Write weights
for influence in influenceList:
writeBurlyWeights(mesh,skinCluster,influence,directoryPath+influence+'.xml')
def loadBurlyWeights(burlyDeformer,directoryPath):
'''
'''
# Check burly deformer
if not mc.objExists(burlyDeformer):
raise Exception('Burly deformer "'+burlyDeformer+'" does not exist!')
# Check directory path
if not directoryPath.endswith('/'): directoryPath+='/'
if not os.path.isdir(directoryPath):
raise Exception('Directory path "'+directoryPath+'" does not exist!')
# Get directory listing
fileList = [i for i in os.listdir(directoryPath) if i.endswith('.xml')]
# Load weights
for filePath in fileList:
fileId = directoryPath+filePath
influence = filePath.replace('.xml','')
mm.eval('dnBurlyDeformer -loadWeights "'+fileId+'" "'+burlyDeformer+'" "'+influence+'"')
def convertToBurly(skinCluster,burlyDeformerName=''):
'''
'''
# Check skinCluster
if not mc.objExists(skinCluster):
raise Exception('SkinCluster "'+skinCluster+'" does not exist!')
if not glTools.utils.skinCluster.isSkinCluster(skinCluster):
raise Exception('Object "'+skinCluster+'" is not a valid skinCluster deformer!')
# Get affected mesh
#mesh =
# Designate temporary path for exported weight files
dirPath = '/usr/tmp/'
# Export skinCluster weight files
influenceList = mc.skinCluster(skinCluster,q=True,inf=True)
writeBurlyWeights_allInfluences(mesh,skinCluster,dirPath)
# Create burly deformer
mm.eval('dnBurlyDeformer_createNamed("'+geo+'","'+burlyDeformerName+'")')
|
[
"maya.mel.eval",
"maya.cmds.skinCluster",
"maya.OpenMaya.MObject",
"maya.cmds.polyEvaluate",
"maya.OpenMaya.MItMeshVertex",
"maya.OpenMaya.MIntArray",
"maya.OpenMaya.MFloatArray",
"maya.cmds.ls",
"maya.OpenMaya.MSelectionList",
"maya.OpenMaya.MFnSingleIndexedComponent",
"maya.OpenMaya.MVector",
"maya.OpenMaya.MPointArray",
"maya.cmds.objExists",
"maya.OpenMaya.MVectorArray",
"maya.OpenMaya.MDagPath"
] |
[((326, 355), 'maya.cmds.polyEvaluate', 'mc.polyEvaluate', (['mesh'], {'v': '(True)'}), '(mesh, v=True)\n', (341, 355), True, 'import maya.cmds as mc\n'), ((362, 386), 'maya.cmds.ls', 'mc.ls', (['influence'], {'l': '(True)'}), '(influence, l=True)\n', (367, 386), True, 'import maya.cmds as mc\n'), ((776, 801), 'maya.OpenMaya.MSelectionList', 'OpenMaya.MSelectionList', ([], {}), '()\n', (799, 801), True, 'import maya.OpenMaya as OpenMaya\n'), ((819, 841), 'maya.OpenMaya.MFloatArray', 'OpenMaya.MFloatArray', ([], {}), '()\n', (839, 841), True, 'import maya.OpenMaya as OpenMaya\n'), ((942, 961), 'maya.OpenMaya.MDagPath', 'OpenMaya.MDagPath', ([], {}), '()\n', (959, 961), True, 'import maya.OpenMaya as OpenMaya\n'), ((982, 1000), 'maya.OpenMaya.MObject', 'OpenMaya.MObject', ([], {}), '()\n', (998, 1000), True, 'import maya.OpenMaya as OpenMaya\n'), ((1115, 1135), 'maya.OpenMaya.MIntArray', 'OpenMaya.MIntArray', ([], {}), '()\n', (1133, 1135), True, 'import maya.OpenMaya as OpenMaya\n'), ((1159, 1211), 'maya.OpenMaya.MFnSingleIndexedComponent', 'OpenMaya.MFnSingleIndexedComponent', (['infComponentList'], {}), '(infComponentList)\n', (1193, 1211), True, 'import maya.OpenMaya as OpenMaya\n'), ((1382, 1404), 'maya.OpenMaya.MPointArray', 'OpenMaya.MPointArray', ([], {}), '()\n', (1402, 1404), True, 'import maya.OpenMaya as OpenMaya\n'), ((1430, 1453), 'maya.OpenMaya.MVectorArray', 'OpenMaya.MVectorArray', ([], {}), '()\n', (1451, 1453), True, 'import maya.OpenMaya as OpenMaya\n'), ((1475, 1530), 'maya.OpenMaya.MItMeshVertex', 'OpenMaya.MItMeshVertex', (['infObjectPath', 'infComponentList'], {}), '(infObjectPath, infComponentList)\n', (1497, 1530), True, 'import maya.OpenMaya as OpenMaya\n'), ((1540, 1558), 'maya.OpenMaya.MVector', 'OpenMaya.MVector', ([], {}), '()\n', (1556, 1558), True, 'import maya.OpenMaya as OpenMaya\n'), ((4193, 4238), 'maya.cmds.skinCluster', 'mc.skinCluster', (['skinCluster'], {'q': '(True)', 'inf': '(True)'}), '(skinCluster, q=True, inf=True)\n', (4207, 4238), True, 'import maya.cmds as mc\n'), ((5567, 5612), 'maya.cmds.skinCluster', 'mc.skinCluster', (['skinCluster'], {'q': '(True)', 'inf': '(True)'}), '(skinCluster, q=True, inf=True)\n', (5581, 5612), True, 'import maya.cmds as mc\n'), ((5700, 5785), 'maya.mel.eval', 'mm.eval', (['(\'dnBurlyDeformer_createNamed("\' + geo + \'","\' + burlyDeformerName + \'")\')'], {}), '(\'dnBurlyDeformer_createNamed("\' + geo + \'","\' + burlyDeformerName +\n \'")\')\n', (5707, 5785), True, 'import maya.mel as mm\n'), ((4463, 4490), 'maya.cmds.objExists', 'mc.objExists', (['burlyDeformer'], {}), '(burlyDeformer)\n', (4475, 4490), True, 'import maya.cmds as mc\n'), ((4979, 5083), 'maya.mel.eval', 'mm.eval', (['(\'dnBurlyDeformer -loadWeights "\' + fileId + \'" "\' + burlyDeformer + \'" "\' +\n influence + \'"\')'], {}), '(\'dnBurlyDeformer -loadWeights "\' + fileId + \'" "\' + burlyDeformer +\n \'" "\' + influence + \'"\')\n', (4986, 5083), True, 'import maya.mel as mm\n'), ((5162, 5187), 'maya.cmds.objExists', 'mc.objExists', (['skinCluster'], {}), '(skinCluster)\n', (5174, 5187), True, 'import maya.cmds as mc\n')]
|
import shlex
import sys
from subprocess import PIPE, Popen
from typing import List
class Executer:
SUCCESS = 0
ERROR = 1
@staticmethod
def run(command: str) -> None:
p = Popen(shlex.split(command))
print(f"-> {command}")
p.communicate()
if p.returncode == Executer.ERROR:
sys.exit(Executer.ERROR)
@staticmethod
def run_pipe(command: str) -> List[str]:
p = Popen(shlex.split(command), stdout=PIPE)
print(f"-> {command}")
result = p.communicate()
if p.returncode == Executer.ERROR:
sys.exit(Executer.ERROR)
return_lines = result[0].decode("utf-8").split("\n")
return return_lines
@staticmethod
def exit_error():
sys.exit(Executer.ERROR)
|
[
"shlex.split",
"sys.exit"
] |
[((760, 784), 'sys.exit', 'sys.exit', (['Executer.ERROR'], {}), '(Executer.ERROR)\n', (768, 784), False, 'import sys\n'), ((203, 223), 'shlex.split', 'shlex.split', (['command'], {}), '(command)\n', (214, 223), False, 'import shlex\n'), ((335, 359), 'sys.exit', 'sys.exit', (['Executer.ERROR'], {}), '(Executer.ERROR)\n', (343, 359), False, 'import sys\n'), ((442, 462), 'shlex.split', 'shlex.split', (['command'], {}), '(command)\n', (453, 462), False, 'import shlex\n'), ((596, 620), 'sys.exit', 'sys.exit', (['Executer.ERROR'], {}), '(Executer.ERROR)\n', (604, 620), False, 'import sys\n')]
|
from unittest import TestCase
import torch
from models.utils import combine_mapping_networks, categorize_mappings
from models.networks.fc import FCGenerator
class UtilTests(TestCase):
def setUp(self) -> None:
self.state_dicts = [FCGenerator().state_dict() for _ in range(5)]
self.mappings = [torch.eye(300, 300) for _ in range(2)]
def test_combine_networks(self):
child = combine_mapping_networks(*self.mappings, is_SO=True)
self.assertTrue(
torch.all(child['module.layer'] == self.mappings[0])
)
def test_combine_networks_r(self):
for i in range(len(self.mappings)):
self.mappings[i][0] = -self.mappings[i][0]
child = combine_mapping_networks(*self.mappings, is_SO=False)
self.assertTrue(
torch.all(child['module.layer'] == self.mappings[0])
)
|
[
"models.networks.fc.FCGenerator",
"models.utils.combine_mapping_networks",
"torch.eye",
"torch.all"
] |
[((410, 462), 'models.utils.combine_mapping_networks', 'combine_mapping_networks', (['*self.mappings'], {'is_SO': '(True)'}), '(*self.mappings, is_SO=True)\n', (434, 462), False, 'from models.utils import combine_mapping_networks, categorize_mappings\n'), ((718, 771), 'models.utils.combine_mapping_networks', 'combine_mapping_networks', (['*self.mappings'], {'is_SO': '(False)'}), '(*self.mappings, is_SO=False)\n', (742, 771), False, 'from models.utils import combine_mapping_networks, categorize_mappings\n'), ((317, 336), 'torch.eye', 'torch.eye', (['(300)', '(300)'], {}), '(300, 300)\n', (326, 336), False, 'import torch\n'), ((500, 552), 'torch.all', 'torch.all', (["(child['module.layer'] == self.mappings[0])"], {}), "(child['module.layer'] == self.mappings[0])\n", (509, 552), False, 'import torch\n'), ((809, 861), 'torch.all', 'torch.all', (["(child['module.layer'] == self.mappings[0])"], {}), "(child['module.layer'] == self.mappings[0])\n", (818, 861), False, 'import torch\n'), ((246, 259), 'models.networks.fc.FCGenerator', 'FCGenerator', ([], {}), '()\n', (257, 259), False, 'from models.networks.fc import FCGenerator\n')]
|
from django.test import SimpleTestCase
from cpu.center import Center
from game.transforms import Board
class CenterAiTest(SimpleTestCase):
def test_picks_center(self):
data = [' '] * 9
cpu = Center()
move = cpu.play(Board(data), 'x', 'o')
self.assertEquals(move, 4)
def test_wins_if_possible(self):
data = [
' ', ' ', ' ',
'o', 'x', ' ',
'o', ' ', 'x',
]
cpu = Center()
move = cpu.play(Board(data), 'x', 'o')
self.assertEquals(move, 0)
def test_defends_if_needed(self):
data = [
' ', ' ', ' ',
' ', 'x', ' ',
'o', ' ', 'x',
]
cpu = Center()
move = cpu.play(Board(data), 'o', 'x')
self.assertEquals(move, 0)
|
[
"game.transforms.Board",
"cpu.center.Center"
] |
[((215, 223), 'cpu.center.Center', 'Center', ([], {}), '()\n', (221, 223), False, 'from cpu.center import Center\n'), ((466, 474), 'cpu.center.Center', 'Center', ([], {}), '()\n', (472, 474), False, 'from cpu.center import Center\n'), ((718, 726), 'cpu.center.Center', 'Center', ([], {}), '()\n', (724, 726), False, 'from cpu.center import Center\n'), ((248, 259), 'game.transforms.Board', 'Board', (['data'], {}), '(data)\n', (253, 259), False, 'from game.transforms import Board\n'), ((499, 510), 'game.transforms.Board', 'Board', (['data'], {}), '(data)\n', (504, 510), False, 'from game.transforms import Board\n'), ((751, 762), 'game.transforms.Board', 'Board', (['data'], {}), '(data)\n', (756, 762), False, 'from game.transforms import Board\n')]
|
import unittest
class TestCase(unittest.TestCase):
def test_dummy(self):
self.assertEqual('tests to be added', 'tests to be added')
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main"
] |
[((179, 194), 'unittest.main', 'unittest.main', ([], {}), '()\n', (192, 194), False, 'import unittest\n')]
|
from django.db import models
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from datetime import datetime
# Create your models here.
class Faculty(models.Model):
"""Model definition for Faculty."""
name = models.CharField(max_length=250, unique=True)
code = models.IntegerField(null=True, blank=True, unique=True)
description = models.TextField(null=True, blank=True)
dean = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.DO_NOTHING,
limit_choices_to={'is_staff': True},
null=True,
blank=True
)
is_active = models.BooleanField(default=True)
timestamp = models.DateTimeField(auto_now=False, auto_now_add=True)
class Meta:
"""Meta definition for Faculty."""
ordering = ['id']
verbose_name = _('Faculty')
verbose_name_plural = _('Faculty')
def save(self, *args, **kwargs):
if self.dean:
try:
dean = self.dean.staff_set.all().first()
dean.is_dean_of_faculty = True
dean.save()
print(f"{dean} \n Is dean of faculty: {dean.is_dean_of_faculty}")
except Exception:
self.dean.get_staff()
dean = self.dean.staff_set.all().first()
dean.is_dean_of_faculty = True
dean.save()
print(f"{dean} \n Is dean of faculty: {dean.is_dean_of_faculty}")
super(Faculty, self).save(*args, **kwargs) # Call the real save() method
def __str__(self):
"""String representation of Faculty."""
return self.name
class Department(models.Model):
"""Model definition for Department."""
faculty = models.ForeignKey(Faculty, on_delete=models.CASCADE)
name = models.CharField(max_length=250, unique=True)
code = models.CharField(max_length=250, null=True, blank=True, unique=True)
description = models.TextField(null=True, blank=True)
head = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.DO_NOTHING,
limit_choices_to={'is_staff': True},
null=True,
blank=True
)
is_active = models.BooleanField(default=True)
timestamp = models.DateTimeField(auto_now=False, auto_now_add=True)
class Meta:
"""Meta definition for Department."""
ordering = ['id']
verbose_name = _('Department')
verbose_name_plural = _('Departments')
def save(self, *args, **kwargs):
if self.head:
try:
head = self.head.staff_set.all().first()
head.is_head_of_department = True
head.save()
print(f"{head} \n Is head of department: {head.is_head_of_department}")
except Exception:
self.head.get_staff()
head = self.head.staff_set.all().first()
head.is_head_of_department = True
head.save()
print(f"{head} \n Is head of department: {head.is_head_of_department}")
super(Department, self).save(*args, **kwargs) # Call the real save() method
def __str__(self):
"""String representation of Department."""
return self.name
class Specialization(models.Model):
"""Model definition for Specialization."""
department = models.ForeignKey(Department, on_delete=models.CASCADE)
name = models.CharField(max_length=250, unique=True)
code = models.CharField(max_length=250, null=True, blank=True, unique=True)
max_level = models.ForeignKey("Level", on_delete=models.DO_NOTHING, null=True, blank=True)
description = models.TextField(null=True, blank=True)
is_active = models.BooleanField(default=True)
timestamp = models.DateTimeField(auto_now=False, auto_now_add=True)
class Meta:
"""Meta definition for Specialization."""
ordering = ['id']
verbose_name = _('Specialization')
verbose_name_plural = _('Specializations')
def __str__(self):
"""String representation of Specialization."""
return self.name
class Course(models.Model):
"""Model definition for Course."""
specialization = models.ForeignKey(Specialization, on_delete=models.CASCADE)
name = models.CharField(max_length=250, unique=True)
code = models.CharField(max_length=250, null=True, blank=True, unique=True)
description = models.TextField(null=True, blank=True)
coordinator = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.DO_NOTHING,
limit_choices_to={'is_staff': True},
null=True,
blank=True
)
is_active = models.BooleanField(default=True)
timestamp = models.DateTimeField(auto_now=False, auto_now_add=True)
class Meta:
"""Meta definition for Course."""
ordering = ['id']
verbose_name = _('Course')
verbose_name_plural = _('Courses')
def __str__(self):
"""String representation of Course."""
return self.name
class Level(models.Model):
"""Model definition for Level."""
class LevelChoices(models.IntegerChoices):
ONE = 100
TWO = 200
THREE = 300
FOUR = 400
FIVE = 500
code = models.IntegerField(
choices=LevelChoices.choices,
null=True,
default=LevelChoices.ONE,
)
class Meta:
"""Meta definition for Level."""
ordering = ['id']
verbose_name = _('Level')
verbose_name_plural = _('Levels')
def __str__(self):
"""String representation of Level"""
return f"{self.code}"
class Semester(models.Model):
"""Model definition for Semester."""
class SemesterChoices(models.IntegerChoices):
FIRST = 1, '1st Semester'
SECOND = 2, '2nd Semester'
semester = models.IntegerField(
choices=SemesterChoices.choices,
null=True,
default=SemesterChoices.FIRST
)
class Meta:
"""Meta definition for Semester."""
ordering = ['id']
verbose_name = 'Semester'
verbose_name_plural = 'Semesters'
def __str__(self):
"""String representation of Semester."""
return f"{self.semester}"
class Session(models.Model):
"""Model definition for Session."""
year = models.CharField(max_length=4)
is_current = models.BooleanField(default=False)
class Meta:
"""Meta definition for Session."""
ordering = ['year']
verbose_name = 'Session'
verbose_name_plural = 'Sessions'
def save(self, *args, **kwargs):
current_year = datetime.today().year
year = datetime.strptime(self.year, "%Y").year
if year == current_year:
self.is_current = True
super(Session, self).save(*args, **kwargs) # Call the real save() method
def __str__(self):
"""String representation of Session."""
return f'{self.year} / {datetime.strptime(self.year, "%Y").year + 1}'
|
[
"django.db.models.TextField",
"datetime.datetime.today",
"django.utils.translation.gettext_lazy",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.IntegerField",
"datetime.datetime.strptime",
"django.db.models.DateTimeField"
] |
[((258, 303), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'unique': '(True)'}), '(max_length=250, unique=True)\n', (274, 303), False, 'from django.db import models\n'), ((315, 370), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)', 'blank': '(True)', 'unique': '(True)'}), '(null=True, blank=True, unique=True)\n', (334, 370), False, 'from django.db import models\n'), ((389, 428), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (405, 428), False, 'from django.db import models\n'), ((440, 576), 'django.db.models.ForeignKey', 'models.ForeignKey', (['settings.AUTH_USER_MODEL'], {'on_delete': 'models.DO_NOTHING', 'limit_choices_to': "{'is_staff': True}", 'null': '(True)', 'blank': '(True)'}), "(settings.AUTH_USER_MODEL, on_delete=models.DO_NOTHING,\n limit_choices_to={'is_staff': True}, null=True, blank=True)\n", (457, 576), False, 'from django.db import models\n'), ((635, 668), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (654, 668), False, 'from django.db import models\n'), ((685, 740), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(False)', 'auto_now_add': '(True)'}), '(auto_now=False, auto_now_add=True)\n', (705, 740), False, 'from django.db import models\n'), ((1751, 1803), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Faculty'], {'on_delete': 'models.CASCADE'}), '(Faculty, on_delete=models.CASCADE)\n', (1768, 1803), False, 'from django.db import models\n'), ((1815, 1860), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'unique': '(True)'}), '(max_length=250, unique=True)\n', (1831, 1860), False, 'from django.db import models\n'), ((1872, 1940), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'null': '(True)', 'blank': '(True)', 'unique': '(True)'}), '(max_length=250, null=True, blank=True, unique=True)\n', (1888, 1940), False, 'from django.db import models\n'), ((1959, 1998), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1975, 1998), False, 'from django.db import models\n'), ((2010, 2146), 'django.db.models.ForeignKey', 'models.ForeignKey', (['settings.AUTH_USER_MODEL'], {'on_delete': 'models.DO_NOTHING', 'limit_choices_to': "{'is_staff': True}", 'null': '(True)', 'blank': '(True)'}), "(settings.AUTH_USER_MODEL, on_delete=models.DO_NOTHING,\n limit_choices_to={'is_staff': True}, null=True, blank=True)\n", (2027, 2146), False, 'from django.db import models\n'), ((2205, 2238), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (2224, 2238), False, 'from django.db import models\n'), ((2255, 2310), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(False)', 'auto_now_add': '(True)'}), '(auto_now=False, auto_now_add=True)\n', (2275, 2310), False, 'from django.db import models\n'), ((3366, 3421), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Department'], {'on_delete': 'models.CASCADE'}), '(Department, on_delete=models.CASCADE)\n', (3383, 3421), False, 'from django.db import models\n'), ((3433, 3478), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'unique': '(True)'}), '(max_length=250, unique=True)\n', (3449, 3478), False, 'from django.db import models\n'), ((3490, 3558), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'null': '(True)', 'blank': '(True)', 'unique': '(True)'}), '(max_length=250, null=True, blank=True, unique=True)\n', (3506, 3558), False, 'from django.db import models\n'), ((3575, 3653), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Level"""'], {'on_delete': 'models.DO_NOTHING', 'null': '(True)', 'blank': '(True)'}), "('Level', on_delete=models.DO_NOTHING, null=True, blank=True)\n", (3592, 3653), False, 'from django.db import models\n'), ((3672, 3711), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (3688, 3711), False, 'from django.db import models\n'), ((3728, 3761), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (3747, 3761), False, 'from django.db import models\n'), ((3778, 3833), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(False)', 'auto_now_add': '(True)'}), '(auto_now=False, auto_now_add=True)\n', (3798, 3833), False, 'from django.db import models\n'), ((4217, 4276), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Specialization'], {'on_delete': 'models.CASCADE'}), '(Specialization, on_delete=models.CASCADE)\n', (4234, 4276), False, 'from django.db import models\n'), ((4288, 4333), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'unique': '(True)'}), '(max_length=250, unique=True)\n', (4304, 4333), False, 'from django.db import models\n'), ((4345, 4413), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'null': '(True)', 'blank': '(True)', 'unique': '(True)'}), '(max_length=250, null=True, blank=True, unique=True)\n', (4361, 4413), False, 'from django.db import models\n'), ((4432, 4471), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (4448, 4471), False, 'from django.db import models\n'), ((4490, 4626), 'django.db.models.ForeignKey', 'models.ForeignKey', (['settings.AUTH_USER_MODEL'], {'on_delete': 'models.DO_NOTHING', 'limit_choices_to': "{'is_staff': True}", 'null': '(True)', 'blank': '(True)'}), "(settings.AUTH_USER_MODEL, on_delete=models.DO_NOTHING,\n limit_choices_to={'is_staff': True}, null=True, blank=True)\n", (4507, 4626), False, 'from django.db import models\n'), ((4685, 4718), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (4704, 4718), False, 'from django.db import models\n'), ((4735, 4790), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(False)', 'auto_now_add': '(True)'}), '(auto_now=False, auto_now_add=True)\n', (4755, 4790), False, 'from django.db import models\n'), ((5272, 5363), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'LevelChoices.choices', 'null': '(True)', 'default': 'LevelChoices.ONE'}), '(choices=LevelChoices.choices, null=True, default=\n LevelChoices.ONE)\n', (5291, 5363), False, 'from django.db import models\n'), ((5859, 5958), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'SemesterChoices.choices', 'null': '(True)', 'default': 'SemesterChoices.FIRST'}), '(choices=SemesterChoices.choices, null=True, default=\n SemesterChoices.FIRST)\n', (5878, 5958), False, 'from django.db import models\n'), ((6338, 6368), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(4)'}), '(max_length=4)\n', (6354, 6368), False, 'from django.db import models\n'), ((6386, 6420), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (6405, 6420), False, 'from django.db import models\n'), ((851, 863), 'django.utils.translation.gettext_lazy', '_', (['"""Faculty"""'], {}), "('Faculty')\n", (852, 863), True, 'from django.utils.translation import gettext_lazy as _\n'), ((894, 906), 'django.utils.translation.gettext_lazy', '_', (['"""Faculty"""'], {}), "('Faculty')\n", (895, 906), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2424, 2439), 'django.utils.translation.gettext_lazy', '_', (['"""Department"""'], {}), "('Department')\n", (2425, 2439), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2470, 2486), 'django.utils.translation.gettext_lazy', '_', (['"""Departments"""'], {}), "('Departments')\n", (2471, 2486), True, 'from django.utils.translation import gettext_lazy as _\n'), ((3951, 3970), 'django.utils.translation.gettext_lazy', '_', (['"""Specialization"""'], {}), "('Specialization')\n", (3952, 3970), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4001, 4021), 'django.utils.translation.gettext_lazy', '_', (['"""Specializations"""'], {}), "('Specializations')\n", (4002, 4021), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4900, 4911), 'django.utils.translation.gettext_lazy', '_', (['"""Course"""'], {}), "('Course')\n", (4901, 4911), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4942, 4954), 'django.utils.translation.gettext_lazy', '_', (['"""Courses"""'], {}), "('Courses')\n", (4943, 4954), True, 'from django.utils.translation import gettext_lazy as _\n'), ((5498, 5508), 'django.utils.translation.gettext_lazy', '_', (['"""Level"""'], {}), "('Level')\n", (5499, 5508), True, 'from django.utils.translation import gettext_lazy as _\n'), ((5539, 5550), 'django.utils.translation.gettext_lazy', '_', (['"""Levels"""'], {}), "('Levels')\n", (5540, 5550), True, 'from django.utils.translation import gettext_lazy as _\n'), ((6645, 6661), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (6659, 6661), False, 'from datetime import datetime\n'), ((6682, 6716), 'datetime.datetime.strptime', 'datetime.strptime', (['self.year', '"""%Y"""'], {}), "(self.year, '%Y')\n", (6699, 6716), False, 'from datetime import datetime\n'), ((6978, 7012), 'datetime.datetime.strptime', 'datetime.strptime', (['self.year', '"""%Y"""'], {}), "(self.year, '%Y')\n", (6995, 7012), False, 'from datetime import datetime\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
import keras
import keras.backend as K
import re
import cv2
import numpy as np
np.set_printoptions(threshold='nan')
def list_pictures(directory, ext='jpg|jpeg|bmp|png|ppm'):
return [os.path.join(root, f)
for root, _, files in os.walk(directory) for f in files
if re.match(r'([\w]+\.(?:' + ext + '))', f)]
def get_train_test_dataset():
if os.path.exists('./data/train.npz'):
dataset = np.load('./data/train.npz')
print('{} already exits.'.format('./data/train.npz'))
return (dataset['x'], dataset['y'])
x = list_pictures('./test_dataset', ext='png')
y = [item[:-4] + '_posmap.jpg' for item in x]
filted_x = []
filted_y = []
for ix, iy in zip(x, y):
if os.path.exists(ix) and os.path.exists(iy):
filted_x.append(ix)
filted_y.append(iy)
else:
print('{} or {} not exits.'.format(ix, iy))
x = [cv2.imread(item) for item in filted_x]
y = [cv2.imread(item) for item in filted_y]
x = np.array(x)
y = np.array(y)
if not os.path.exists('./data'):
os.makedirs('./data')
np.savez('./data/train.npz', x=x, y=y)
return (x, y)
def res_block(x, filters):
# stage1
shortcut = x
shortcut = keras.layers.Conv2D(
filters, (1, 1), strides=(2, 2), padding='same')(shortcut)
x = keras.layers.Conv2D(
filters / 2, (1, 1), strides=(1, 1), padding='same', activation='relu')(x)
x = keras.layers.Conv2D(
filters / 2, (4, 4), strides=(2, 2), padding='same', activation='relu')(x)
x = keras.layers.Conv2D(
filters, (1, 1), strides=(1, 1), padding='same')(x)
x = keras.layers.Add()([x, shortcut])
x = keras.layers.BatchNormalization()(x)
x = keras.layers.Activation('relu')(x)
# stage2
shortcut = x
x = keras.layers.Conv2D(
filters / 2, (1, 1), strides=(1, 1), padding='same', activation='relu')(x)
x = keras.layers.Conv2D(
filters / 2, (4, 4), strides=(1, 1), padding='same', activation='relu')(x)
x = keras.layers.Conv2D(
filters, (1, 1), strides=(1, 1), padding='same')(x)
x = keras.layers.Add()([x, shortcut])
x = keras.layers.BatchNormalization()(x)
x = keras.layers.Activation('relu')(x)
return x
def get_regress_model():
input = keras.layers.Input(shape=(256, 256, 3))
x = keras.layers.Conv2D(
16, (4, 4), strides=(1, 1), padding='same', activation='relu')(input)
x = res_block(x, 32)
x = res_block(x, 64)
x = res_block(x, 128)
x = res_block(x, 256)
x = res_block(x, 512)
x = keras.layers.Conv2DTranspose(512, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(
256, (4, 4), strides=(2, 2), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(256, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(256, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(
128, (4, 4), strides=(2, 2), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(128, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(128, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(
64, (4, 4), strides=(2, 2), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(64, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(64, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(
32, (4, 4), strides=(2, 2), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(32, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(
16, (4, 4), strides=(2, 2), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(16, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(3, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(3, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(3, (4, 4), padding='same')(x)
model = keras.Model(input, x)
return model
def preprocess_input(x, y=None):
x = x.astype(np.float32)
x = keras.applications.xception.preprocess_input(x)
if y is not None:
y = y.astype(np.float32)
y /= 256.0
return (x, y)
loss_mask = cv2.imread('./data/uv-data/uv_weight_mask.png')
face_mask = cv2.imread('./data/uv-data/uv_face_mask.png')
loss_mask = np.where(face_mask > 0, loss_mask, face_mask)
loss_mask = loss_mask.astype(np.float32)
loss_mask /= 16.0
def mean_squared_error_with_mask(y_true, y_pred):
mask = K.constant(loss_mask)
return K.mean(K.mean(K.square(y_pred - y_true) * mask, axis=-1), axis=-1)
def lr_adjustor(epoch):
base_lr = 0.001
if epoch < 100:
return base_lr
base_lr *= .1
if epoch < 150:
return base_lr
base_lr *= .1
return base_lr
def train():
(x, y) = get_train_test_dataset()
# x = np.concatenate([x for i in range(20)])
# y = np.concatenate([y for i in range(20)])
print('x shape -> {}, y shape -> {}.'.format(x.shape, y.shape))
(x, y) = preprocess_input(x, y)
model = get_regress_model()
model.summary()
model.load_weights('./weights.100-0.0137.hdf5')
# keras.utils.plot_model(model, show_shapes=True)
opti = keras.optimizers.Adam(lr=0.001)
if not os.path.exists('./weights'):
os.makedirs('./weights')
callbacks = [
keras.callbacks.LearningRateScheduler(lr_adjustor),
keras.callbacks.CSVLogger('train.log'),
keras.callbacks.ModelCheckpoint(
'./weights/weights.{epoch:02d}-{loss:.4f}.hdf5',
monitor='loss',
save_best_only=True,
period=10)]
model.compile(opti, loss=mean_squared_error_with_mask)
model.fit(x, y, batch_size=16, epochs=200, callbacks=callbacks)
def test():
(x, y) = get_train_test_dataset()
# x = np.concatenate([x for i in range(20)])
# y = np.concatenate([y for i in range(20)])
print('x shape -> {}, y shape -> {}.'.format(x.shape, y.shape))
(x, y) = preprocess_input(x, y)
model = get_regress_model()
model.summary()
# model.load_weights('./weights.100-0.0137.hdf5')
model.load_weights('./Data/net-data/weights.190-0.0010.hdf5')
if not os.path.exists('./result'):
os.makedirs('./result')
y = model.predict(x)
for index, i in enumerate(y):
i *= 255
i = i.astype(np.uint8)
savename = os.path.join('./result', str(index) + '.png')
cv2.imwrite(savename, i)
if __name__ == "__main__":
# train()
test()
|
[
"numpy.load",
"os.walk",
"keras.layers.Input",
"keras.callbacks.LearningRateScheduler",
"os.path.join",
"numpy.set_printoptions",
"keras.backend.constant",
"cv2.imwrite",
"os.path.exists",
"keras.Model",
"keras.callbacks.ModelCheckpoint",
"keras.applications.xception.preprocess_input",
"keras.optimizers.Adam",
"keras.layers.Conv2DTranspose",
"re.match",
"keras.layers.Conv2D",
"numpy.savez",
"keras.layers.BatchNormalization",
"os.makedirs",
"keras.layers.Activation",
"keras.layers.Add",
"cv2.imread",
"numpy.where",
"numpy.array",
"keras.callbacks.CSVLogger",
"keras.backend.square"
] |
[((176, 212), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'threshold': '"""nan"""'}), "(threshold='nan')\n", (195, 212), True, 'import numpy as np\n'), ((4558, 4605), 'cv2.imread', 'cv2.imread', (['"""./data/uv-data/uv_weight_mask.png"""'], {}), "('./data/uv-data/uv_weight_mask.png')\n", (4568, 4605), False, 'import cv2\n'), ((4618, 4663), 'cv2.imread', 'cv2.imread', (['"""./data/uv-data/uv_face_mask.png"""'], {}), "('./data/uv-data/uv_face_mask.png')\n", (4628, 4663), False, 'import cv2\n'), ((4676, 4721), 'numpy.where', 'np.where', (['(face_mask > 0)', 'loss_mask', 'face_mask'], {}), '(face_mask > 0, loss_mask, face_mask)\n', (4684, 4721), True, 'import numpy as np\n'), ((471, 505), 'os.path.exists', 'os.path.exists', (['"""./data/train.npz"""'], {}), "('./data/train.npz')\n", (485, 505), False, 'import os\n'), ((1118, 1129), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (1126, 1129), True, 'import numpy as np\n'), ((1138, 1149), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (1146, 1149), True, 'import numpy as np\n'), ((1221, 1259), 'numpy.savez', 'np.savez', (['"""./data/train.npz"""'], {'x': 'x', 'y': 'y'}), "('./data/train.npz', x=x, y=y)\n", (1229, 1259), True, 'import numpy as np\n'), ((2409, 2448), 'keras.layers.Input', 'keras.layers.Input', ([], {'shape': '(256, 256, 3)'}), '(shape=(256, 256, 3))\n', (2427, 2448), False, 'import keras\n'), ((4293, 4314), 'keras.Model', 'keras.Model', (['input', 'x'], {}), '(input, x)\n', (4304, 4314), False, 'import keras\n'), ((4404, 4451), 'keras.applications.xception.preprocess_input', 'keras.applications.xception.preprocess_input', (['x'], {}), '(x)\n', (4448, 4451), False, 'import keras\n'), ((4844, 4865), 'keras.backend.constant', 'K.constant', (['loss_mask'], {}), '(loss_mask)\n', (4854, 4865), True, 'import keras.backend as K\n'), ((5556, 5587), 'keras.optimizers.Adam', 'keras.optimizers.Adam', ([], {'lr': '(0.001)'}), '(lr=0.001)\n', (5577, 5587), False, 'import keras\n'), ((285, 306), 'os.path.join', 'os.path.join', (['root', 'f'], {}), '(root, f)\n', (297, 306), False, 'import os\n'), ((525, 552), 'numpy.load', 'np.load', (['"""./data/train.npz"""'], {}), "('./data/train.npz')\n", (532, 552), True, 'import numpy as np\n'), ((1023, 1039), 'cv2.imread', 'cv2.imread', (['item'], {}), '(item)\n', (1033, 1039), False, 'import cv2\n'), ((1071, 1087), 'cv2.imread', 'cv2.imread', (['item'], {}), '(item)\n', (1081, 1087), False, 'import cv2\n'), ((1161, 1185), 'os.path.exists', 'os.path.exists', (['"""./data"""'], {}), "('./data')\n", (1175, 1185), False, 'import os\n'), ((1195, 1216), 'os.makedirs', 'os.makedirs', (['"""./data"""'], {}), "('./data')\n", (1206, 1216), False, 'import os\n'), ((1352, 1420), 'keras.layers.Conv2D', 'keras.layers.Conv2D', (['filters', '(1, 1)'], {'strides': '(2, 2)', 'padding': '"""same"""'}), "(filters, (1, 1), strides=(2, 2), padding='same')\n", (1371, 1420), False, 'import keras\n'), ((1448, 1543), 'keras.layers.Conv2D', 'keras.layers.Conv2D', (['(filters / 2)', '(1, 1)'], {'strides': '(1, 1)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters / 2, (1, 1), strides=(1, 1), padding='same',\n activation='relu')\n", (1467, 1543), False, 'import keras\n'), ((1560, 1655), 'keras.layers.Conv2D', 'keras.layers.Conv2D', (['(filters / 2)', '(4, 4)'], {'strides': '(2, 2)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters / 2, (4, 4), strides=(2, 2), padding='same',\n activation='relu')\n", (1579, 1655), False, 'import keras\n'), ((1672, 1740), 'keras.layers.Conv2D', 'keras.layers.Conv2D', (['filters', '(1, 1)'], {'strides': '(1, 1)', 'padding': '"""same"""'}), "(filters, (1, 1), strides=(1, 1), padding='same')\n", (1691, 1740), False, 'import keras\n'), ((1761, 1779), 'keras.layers.Add', 'keras.layers.Add', ([], {}), '()\n', (1777, 1779), False, 'import keras\n'), ((1803, 1836), 'keras.layers.BatchNormalization', 'keras.layers.BatchNormalization', ([], {}), '()\n', (1834, 1836), False, 'import keras\n'), ((1848, 1879), 'keras.layers.Activation', 'keras.layers.Activation', (['"""relu"""'], {}), "('relu')\n", (1871, 1879), False, 'import keras\n'), ((1922, 2017), 'keras.layers.Conv2D', 'keras.layers.Conv2D', (['(filters / 2)', '(1, 1)'], {'strides': '(1, 1)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters / 2, (1, 1), strides=(1, 1), padding='same',\n activation='relu')\n", (1941, 2017), False, 'import keras\n'), ((2034, 2129), 'keras.layers.Conv2D', 'keras.layers.Conv2D', (['(filters / 2)', '(4, 4)'], {'strides': '(1, 1)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters / 2, (4, 4), strides=(1, 1), padding='same',\n activation='relu')\n", (2053, 2129), False, 'import keras\n'), ((2146, 2214), 'keras.layers.Conv2D', 'keras.layers.Conv2D', (['filters', '(1, 1)'], {'strides': '(1, 1)', 'padding': '"""same"""'}), "(filters, (1, 1), strides=(1, 1), padding='same')\n", (2165, 2214), False, 'import keras\n'), ((2235, 2253), 'keras.layers.Add', 'keras.layers.Add', ([], {}), '()\n', (2251, 2253), False, 'import keras\n'), ((2277, 2310), 'keras.layers.BatchNormalization', 'keras.layers.BatchNormalization', ([], {}), '()\n', (2308, 2310), False, 'import keras\n'), ((2322, 2353), 'keras.layers.Activation', 'keras.layers.Activation', (['"""relu"""'], {}), "('relu')\n", (2345, 2353), False, 'import keras\n'), ((2457, 2544), 'keras.layers.Conv2D', 'keras.layers.Conv2D', (['(16)', '(4, 4)'], {'strides': '(1, 1)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(16, (4, 4), strides=(1, 1), padding='same', activation=\n 'relu')\n", (2476, 2544), False, 'import keras\n'), ((2693, 2769), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(512)', '(4, 4)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(512, (4, 4), padding='same', activation='relu')\n", (2721, 2769), False, 'import keras\n'), ((2782, 2878), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(256)', '(4, 4)'], {'strides': '(2, 2)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(256, (4, 4), strides=(2, 2), padding='same',\n activation='relu')\n", (2810, 2878), False, 'import keras\n'), ((2895, 2971), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(256)', '(4, 4)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(256, (4, 4), padding='same', activation='relu')\n", (2923, 2971), False, 'import keras\n'), ((2983, 3059), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(256)', '(4, 4)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(256, (4, 4), padding='same', activation='relu')\n", (3011, 3059), False, 'import keras\n'), ((3072, 3168), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(128)', '(4, 4)'], {'strides': '(2, 2)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(128, (4, 4), strides=(2, 2), padding='same',\n activation='relu')\n", (3100, 3168), False, 'import keras\n'), ((3185, 3261), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(128)', '(4, 4)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(128, (4, 4), padding='same', activation='relu')\n", (3213, 3261), False, 'import keras\n'), ((3273, 3349), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(128)', '(4, 4)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(128, (4, 4), padding='same', activation='relu')\n", (3301, 3349), False, 'import keras\n'), ((3362, 3457), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(64)', '(4, 4)'], {'strides': '(2, 2)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(64, (4, 4), strides=(2, 2), padding='same',\n activation='relu')\n", (3390, 3457), False, 'import keras\n'), ((3474, 3549), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(64)', '(4, 4)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(64, (4, 4), padding='same', activation='relu')\n", (3502, 3549), False, 'import keras\n'), ((3561, 3636), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(64)', '(4, 4)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(64, (4, 4), padding='same', activation='relu')\n", (3589, 3636), False, 'import keras\n'), ((3649, 3744), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(32)', '(4, 4)'], {'strides': '(2, 2)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(32, (4, 4), strides=(2, 2), padding='same',\n activation='relu')\n", (3677, 3744), False, 'import keras\n'), ((3761, 3836), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(32)', '(4, 4)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(32, (4, 4), padding='same', activation='relu')\n", (3789, 3836), False, 'import keras\n'), ((3849, 3944), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(16)', '(4, 4)'], {'strides': '(2, 2)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(16, (4, 4), strides=(2, 2), padding='same',\n activation='relu')\n", (3877, 3944), False, 'import keras\n'), ((3961, 4036), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(16)', '(4, 4)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(16, (4, 4), padding='same', activation='relu')\n", (3989, 4036), False, 'import keras\n'), ((4049, 4123), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(3)', '(4, 4)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(3, (4, 4), padding='same', activation='relu')\n", (4077, 4123), False, 'import keras\n'), ((4135, 4209), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(3)', '(4, 4)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(3, (4, 4), padding='same', activation='relu')\n", (4163, 4209), False, 'import keras\n'), ((4221, 4276), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(3)', '(4, 4)'], {'padding': '"""same"""'}), "(3, (4, 4), padding='same')\n", (4249, 4276), False, 'import keras\n'), ((5599, 5626), 'os.path.exists', 'os.path.exists', (['"""./weights"""'], {}), "('./weights')\n", (5613, 5626), False, 'import os\n'), ((5636, 5660), 'os.makedirs', 'os.makedirs', (['"""./weights"""'], {}), "('./weights')\n", (5647, 5660), False, 'import os\n'), ((5687, 5737), 'keras.callbacks.LearningRateScheduler', 'keras.callbacks.LearningRateScheduler', (['lr_adjustor'], {}), '(lr_adjustor)\n', (5724, 5737), False, 'import keras\n'), ((5747, 5785), 'keras.callbacks.CSVLogger', 'keras.callbacks.CSVLogger', (['"""train.log"""'], {}), "('train.log')\n", (5772, 5785), False, 'import keras\n'), ((5795, 5928), 'keras.callbacks.ModelCheckpoint', 'keras.callbacks.ModelCheckpoint', (['"""./weights/weights.{epoch:02d}-{loss:.4f}.hdf5"""'], {'monitor': '"""loss"""', 'save_best_only': '(True)', 'period': '(10)'}), "('./weights/weights.{epoch:02d}-{loss:.4f}.hdf5'\n , monitor='loss', save_best_only=True, period=10)\n", (5826, 5928), False, 'import keras\n'), ((6540, 6566), 'os.path.exists', 'os.path.exists', (['"""./result"""'], {}), "('./result')\n", (6554, 6566), False, 'import os\n'), ((6576, 6599), 'os.makedirs', 'os.makedirs', (['"""./result"""'], {}), "('./result')\n", (6587, 6599), False, 'import os\n'), ((6780, 6804), 'cv2.imwrite', 'cv2.imwrite', (['savename', 'i'], {}), '(savename, i)\n', (6791, 6804), False, 'import cv2\n'), ((341, 359), 'os.walk', 'os.walk', (['directory'], {}), '(directory)\n', (348, 359), False, 'import os\n'), ((390, 431), 're.match', 're.match', (["('([\\\\w]+\\\\.(?:' + ext + '))')", 'f'], {}), "('([\\\\w]+\\\\.(?:' + ext + '))', f)\n", (398, 431), False, 'import re\n'), ((837, 855), 'os.path.exists', 'os.path.exists', (['ix'], {}), '(ix)\n', (851, 855), False, 'import os\n'), ((860, 878), 'os.path.exists', 'os.path.exists', (['iy'], {}), '(iy)\n', (874, 878), False, 'import os\n'), ((4891, 4916), 'keras.backend.square', 'K.square', (['(y_pred - y_true)'], {}), '(y_pred - y_true)\n', (4899, 4916), True, 'import keras.backend as K\n')]
|
import json
import socket
import urllib2
#import requests
class GvAnalyzerClient(object):
"""
GV Analyzer Client
"""
def __init__(self, gd_data):
self.base_url = "https://damp-retreat-1145.herokuapp.com/"
self.base_url = "http://127.0.0.1:5000/"
self.gd_data = gd_data
socket.setdefaulttimeout(15)
def analyze(self, gv_data):
"""Invoke analyze API of GV Analyzer"""
url = self.base_url + "gv_analyze"
gdv_data = json.dumps({"gd_data":self.gd_data, "gv_data":gv_data})
req = urllib2.Request(url)
req.add_header('Content-Type', 'application/json')
req.add_header('Accept', 'application/json')
try:
res = urllib2.urlopen(req, gdv_data)
response = json.loads(res.read())
return response
except Exception as e:
return {"Error":str(e)}
"""
#requests version:
headers = {'Accept' : 'application/json', 'Content-Type' : 'application/json'}
try:
r = requests.post(url, data = gdv_data, headers = headers)
return r.json()
except requests.exceptions.RequestException as e:
return {"Error":str(e)}
"""
"""
def gva():
url = "http://127.0.0.1:5000/analyze"
#url = "https://damp-retreat-1145.herokuapp.com/analyze"
headers = {'Accept' : 'application/json', 'Content-Type' : 'application/json'}
r = requests.post(url, data = open("event.json", "rb"), headers = headers)
print json.dumps(r.json(), indent=4)
"""
|
[
"socket.setdefaulttimeout",
"urllib2.Request",
"urllib2.urlopen",
"json.dumps"
] |
[((282, 310), 'socket.setdefaulttimeout', 'socket.setdefaulttimeout', (['(15)'], {}), '(15)\n', (306, 310), False, 'import socket\n'), ((433, 490), 'json.dumps', 'json.dumps', (["{'gd_data': self.gd_data, 'gv_data': gv_data}"], {}), "({'gd_data': self.gd_data, 'gv_data': gv_data})\n", (443, 490), False, 'import json\n'), ((497, 517), 'urllib2.Request', 'urllib2.Request', (['url'], {}), '(url)\n', (512, 517), False, 'import urllib2\n'), ((634, 664), 'urllib2.urlopen', 'urllib2.urlopen', (['req', 'gdv_data'], {}), '(req, gdv_data)\n', (649, 664), False, 'import urllib2\n')]
|
# coding=utf-8
from OTLMOW.OTLModel.Datatypes.KeuzelijstField import KeuzelijstField
from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde
# Generated with OTLEnumerationCreator. To modify: extend, do not edit
class KlVerlichtingstoestelVerlichtGebied(KeuzelijstField):
"""Het gebied op de wegbaan of het object dat verlicht wordt door het verlichtingstoestel."""
naam = 'KlVerlichtingstoestelVerlichtGebied'
label = 'Verlichtingstoestel verlicht gebied.'
objectUri = 'https://wegenenverkeer.data.vlaanderen.be/ns/abstracten#KlVerlichtingstoestelVerlichtGebied'
definition = 'Het gebied op de wegbaan of het object dat verlicht wordt door het verlichtingstoestel.'
codelist = 'https://wegenenverkeer.data.vlaanderen.be/id/conceptscheme/KlVerlichtingstoestelVerlichtGebied'
options = {
'afrit': KeuzelijstWaarde(invulwaarde='afrit',
label='afrit',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/afrit'),
'bebakening': KeuzelijstWaarde(invulwaarde='bebakening',
label='bebakening',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/bebakening'),
'doorlopende-straatverlichting': KeuzelijstWaarde(invulwaarde='doorlopende-straatverlichting',
label='doorlopende straatverlichting',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/doorlopende-straatverlichting'),
'fietspad': KeuzelijstWaarde(invulwaarde='fietspad',
label='fietspad',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/fietspad'),
'hoofdweg': KeuzelijstWaarde(invulwaarde='hoofdweg',
label='hoofdweg',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/hoofdweg'),
'kruispunt': KeuzelijstWaarde(invulwaarde='kruispunt',
label='kruispunt',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/kruispunt'),
'monument': KeuzelijstWaarde(invulwaarde='monument',
label='monument',
definitie='Alle niet-functie verlichting, dus alle verlichting die nodig is om je weg te vinden.Verlichting voor artistieke creaties op (bv. rotonde) of rond de openbare weg (bv. ecoduct dat onderaan een schilderij heeft) of voor artistieke belichting (niet verlichting) te geven, bv een hangbrug waarbij de kabels aangelicht worden. Somskan dit ook zijn voor het aanlichten of belichten van gebouwen.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/monument'),
'onderdoorgang': KeuzelijstWaarde(invulwaarde='onderdoorgang',
label='onderdoorgang',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/onderdoorgang'),
'oprit': KeuzelijstWaarde(invulwaarde='oprit',
label='oprit',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/oprit'),
'parking': KeuzelijstWaarde(invulwaarde='parking',
label='parking',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/parking'),
'projector': KeuzelijstWaarde(invulwaarde='projector',
label='projector',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/projector'),
'punctuele-verlichting': KeuzelijstWaarde(invulwaarde='punctuele-verlichting',
label='punctuele verlichting',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/punctuele-verlichting'),
'rotonde': KeuzelijstWaarde(invulwaarde='rotonde',
label='rotonde',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/rotonde'),
'tunnelverlichting': KeuzelijstWaarde(invulwaarde='tunnelverlichting',
label='tunnelverlichting',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/tunnelverlichting'),
'wisselaar': KeuzelijstWaarde(invulwaarde='wisselaar',
label='wisselaar',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/wisselaar')
}
|
[
"OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde"
] |
[((850, 1020), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""afrit"""', 'label': '"""afrit"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/afrit"""'}), "(invulwaarde='afrit', label='afrit', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/afrit'\n )\n", (866, 1020), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((1102, 1287), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""bebakening"""', 'label': '"""bebakening"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/bebakening"""'}), "(invulwaarde='bebakening', label='bebakening', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/bebakening'\n )\n", (1118, 1287), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((1398, 1645), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""doorlopende-straatverlichting"""', 'label': '"""doorlopende straatverlichting"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/doorlopende-straatverlichting"""'}), "(invulwaarde='doorlopende-straatverlichting', label=\n 'doorlopende straatverlichting', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/doorlopende-straatverlichting'\n )\n", (1414, 1645), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((1768, 1947), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""fietspad"""', 'label': '"""fietspad"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/fietspad"""'}), "(invulwaarde='fietspad', label='fietspad', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/fietspad'\n )\n", (1784, 1947), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((2033, 2212), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""hoofdweg"""', 'label': '"""hoofdweg"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/hoofdweg"""'}), "(invulwaarde='hoofdweg', label='hoofdweg', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/hoofdweg'\n )\n", (2049, 2212), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((2299, 2481), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""kruispunt"""', 'label': '"""kruispunt"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/kruispunt"""'}), "(invulwaarde='kruispunt', label='kruispunt', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/kruispunt'\n )\n", (2315, 2481), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((2569, 3162), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""monument"""', 'label': '"""monument"""', 'definitie': '"""Alle niet-functie verlichting, dus alle verlichting die nodig is om je weg te vinden.Verlichting voor artistieke creaties op (bv. rotonde) of rond de openbare weg (bv. ecoduct dat onderaan een schilderij heeft) of voor artistieke belichting (niet verlichting) te geven, bv een hangbrug waarbij de kabels aangelicht worden. Somskan dit ook zijn voor het aanlichten of belichten van gebouwen."""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/monument"""'}), "(invulwaarde='monument', label='monument', definitie=\n 'Alle niet-functie verlichting, dus alle verlichting die nodig is om je weg te vinden.Verlichting voor artistieke creaties op (bv. rotonde) of rond de openbare weg (bv. ecoduct dat onderaan een schilderij heeft) of voor artistieke belichting (niet verlichting) te geven, bv een hangbrug waarbij de kabels aangelicht worden. Somskan dit ook zijn voor het aanlichten of belichten van gebouwen.'\n , objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/monument'\n )\n", (2585, 3162), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((3280, 3478), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""onderdoorgang"""', 'label': '"""onderdoorgang"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/onderdoorgang"""'}), "(invulwaarde='onderdoorgang', label='onderdoorgang',\n objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/onderdoorgang'\n )\n", (3296, 3478), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((3567, 3737), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""oprit"""', 'label': '"""oprit"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/oprit"""'}), "(invulwaarde='oprit', label='oprit', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/oprit'\n )\n", (3583, 3737), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((3816, 3992), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""parking"""', 'label': '"""parking"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/parking"""'}), "(invulwaarde='parking', label='parking', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/parking'\n )\n", (3832, 3992), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((4077, 4259), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""projector"""', 'label': '"""projector"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/projector"""'}), "(invulwaarde='projector', label='projector', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/projector'\n )\n", (4093, 4259), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((4360, 4583), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""punctuele-verlichting"""', 'label': '"""punctuele verlichting"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/punctuele-verlichting"""'}), "(invulwaarde='punctuele-verlichting', label=\n 'punctuele verlichting', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/punctuele-verlichting'\n )\n", (4376, 4583), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((4689, 4865), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""rotonde"""', 'label': '"""rotonde"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/rotonde"""'}), "(invulwaarde='rotonde', label='rotonde', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/rotonde'\n )\n", (4705, 4865), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((4958, 5168), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""tunnelverlichting"""', 'label': '"""tunnelverlichting"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/tunnelverlichting"""'}), "(invulwaarde='tunnelverlichting', label='tunnelverlichting',\n objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/tunnelverlichting'\n )\n", (4974, 5168), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((5269, 5451), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""wisselaar"""', 'label': '"""wisselaar"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/wisselaar"""'}), "(invulwaarde='wisselaar', label='wisselaar', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/wisselaar'\n )\n", (5285, 5451), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n')]
|
import re
import neovim
import enum
import json
try:
import psutil
except ImportError:
psutil = None
def isNumber(x):
return x in '1234567890'
class Result(enum.Enum):
BY_PASS = 1
HANDLED = 2
UNHANDLED = 3
def is_shell(name):
for i in ['fish', 'bash', 'csh', 'zsh']:
if i in name:
return True
return False
@neovim.plugin
class MultiTerm(object):
def __init__(self, nvim):
self.nvim = nvim
self.data = {}
self.name_map = {}
self.last_term_job_id = None
self.last_command = ''
self.name_list = ['one', 'two', 'three', 'four', 'five', 'six',
'seven', 'eight', 'nine', 'ten']
self.name_index = 0
self.browser = self.nvim.eval("expand('$BROWSER')")
if self.browser == '$BROWSER':
self.browser = 'w3m'
def get_command_map(self):
try:
command_map_history = self.nvim.eval('g:MultiTerm_Map')
command_map = json.loads(command_map_history)
except Exception as e:
self.echo(e)
command_map = {}
return command_map
def write_text(self, job_id, data):
self.nvim.call('jobsend', int(job_id), data)
def run(self, job_id, cmd):
self.last_command = cmd.strip()
self.write_text(job_id, cmd)
def run_in_all_terminal(self, cmd):
for i in self.data:
self.run(int(self.data[i]), cmd)
def echo(self, data):
self.nvim.command('echo "%s"' % data)
def replace_args(self, args):
for i in range(len(args)):
val = args[i]
if val == '!':
args[i] = self.last_command
elif val == '!l':
args[i] = self.nvim.current.line.strip()
elif val == '!w':
self.nvim.command('normal! viw"*y')
args[i] = self.nvim.eval('@*')
elif val == '!!':
shell = self.nvim.eval('&shell')
if 'fish' in shell:
args[i] = 'eval $history[1]'
elif 'zsh' in shell:
args[i] = '!!\n'
else:
args[i] = '!!'
elif len(val) == 2 and val[0] == '@':
args[i] = self.nvim.eval('@' + val[1])
def subcommand_a(self, arg0, args, range):
'''
Run the command in all terminal.
'''
cmd = ' '.join(args[1:]) + '\n'
self.run_in_all_terminal(cmd)
return Result.HANDLED
def subcommand_s(self, arg0, args, range):
'''
Store the command in the command_map.
'''
if len(arg0) == 2 and arg0[0] == 's' and isNumber(arg0[1]):
cmd = ' '.join(args[1:]) + '\n'
command_map = self.get_command_map()
command_map[arg0[1]] = cmd
self.nvim.command("let g:MultiTerm_Map='%s'" % json.dumps(command_map))
return Result.HANDLED
return Result.UNHANDLED
def subcommand_r(self, arg0, args, range):
'''
Run the command stored in command_map.
'''
command_map = self.get_command_map()
if arg0[0] == 'r' and len(arg0) == 1:
self.echo(arg0)
cmd = command_map.get(arg0[1], '')
self.run(self.last_term_job_id, cmd)
return Result.HANDLED
elif arg0[0] == 'r' and len(arg0) == 2 and isNumber(arg0[1]):
# C g1 : run command 1 stored in command map.
self.echo(arg0)
cmd = command_map.get(arg0[1], '')
self.run(self.last_term_job_id, cmd)
return Result.HANDLED
return Result.UNHANDLED
def subcommand_n(self, arg0, args, range):
'''
Name the terminal.
'''
if arg0 in ['n', 'N'] and len(args) > 1:
if len(args) == 2:
try:
filename = self.nvim.eval("expand('%:p')").split('#')[0].strip()
job_id = self.nvim.eval('expand(b:terminal_job_id)')
self.name_map[job_id] = args[1]
self.nvim.command("keepalt file %s \#%s" % (filename, args[1]))
except:
self.name_map[self.last_term_job_id] = args[1]
return Result.HANDLED
elif len(args) > 2:
self.name_map[args[2]] = args[1]
return Result.HANDLED
return Result.UNHANDLED
def subcommand_g(self, arg0, args, range):
'''
Go to the terminal.
'''
name_or_id = args[1]
inv_name_map = {v: k for k, v in self.name_map.items()}
inv_data_map = {v: k for k, v in self.data.items()}
r = inv_name_map.get(name_or_id, None)
if r is None:
r = name_or_id
r = inv_data_map.get(r, None)
if r is None:
self.echo("Terminal not found")
return Result.BY_PASS
self.nvim.command("buffer %s" % r)
return Result.HANDLED
def subcommand_w(self, arg0, args, range):
'''
Run w3m browser in the w3m terminal buffer.
'''
if psutil is None:
return Result.BY_PASS
inv_name_map = {v: k for k, v in self.name_map.items()}
if inv_name_map.get('w3m', None) is None:
self.nvim.command("terminal")
self.nvim.command("C n w3m")
url = ' '.join(args[1:]) + '\n'
self.kill_and_run('w3m', '%s %s' % (self.browser, url))
self.nvim.command("normal! i")
return Result.HANDLED
def subcommand_k(self, arg0, args, range):
'''
Kill and run command in terminal.
'''
if psutil is None:
return Result.BY_PASS
name_list = args[1].split(',')
if len(name_list) < 2:
return
cmd = ' '.join(args[2:]) + '\n'
for i in name_list:
if i == '':
continue
self.kill_and_run(i, cmd)
self.nvim.command("normal! G")
return Result.HANDLED
def kill_and_run(self, name, command):
inv_name_map = {v: k for k, v in self.name_map.items()}
inv_data_map = {v: k for k, v in self.data.items()}
job_id = inv_name_map.get(name, None)
if job_id is None:
self.nvim.command("terminal")
self.nvim.command("C n %s" % name)
inv_name_map = {v: k for k, v in self.name_map.items()}
inv_data_map = {v: k for k, v in self.data.items()}
job_id = inv_name_map.get(name, None)
if job_id is None:
self.echo("terminal not found")
return
file_name = inv_data_map[job_id]
self.nvim.command("buffer %s" % file_name)
pid = file_name.split(':')[1].split('/')[-1]
p = psutil.Process(pid=int(pid, 10))
childrens = p.children()
for i in childrens:
i.kill()
self.run(job_id, command)
return Result.HANDLED
def subcommand_l(self, arg0, args, range):
'''
List all terminal.
'''
if len(arg0) > 1:
return Result.UNHANDLED
text = ''
for i in self.data:
job_id = self.data[i]
text += '%s => %s, %s\n' % (job_id, i,
self.name_map.get(job_id, ''))
try:
job_id = self.nvim.eval('expand(b:terminal_job_id)')
except:
pass
text += 'current job_id=%s, name=%s' % (job_id, self.name_map[job_id])
self.echo(text)
return Result.HANDLED
def subcommand_empty(self, arg0, args, range):
return Result.UNHANDLED
@neovim.command("C", range='', nargs='*', sync=True)
def command(self, args, range):
if len(args) < 1:
return
if self.last_term_job_id is None:
self.nvim.command("split")
self.nvim.command("wincmd j")
self.nvim.command("terminal")
self.replace_args(args)
function_map = {
'a': self.subcommand_a,
'g': self.subcommand_g,
'l': self.subcommand_l,
'n': self.subcommand_n,
'r': self.subcommand_r,
's': self.subcommand_s,
'w': self.subcommand_w,
'k': self.subcommand_k,
}
arg0 = args[0]
result = function_map.get(arg0[0],
self.subcommand_empty)(arg0, args, range)
if result == Result.BY_PASS or result == Result.HANDLED:
return
if re.match(r'(\d+,)*\d+', arg0):
# C 1, 3 ls : run ls in terminal 1, terminal 3.
cmd = ' '.join(args[1:]) + '\n'
for i in arg0.split(','):
self.run(i, cmd)
elif re.match(r'(\w+,)+', arg0):
cmd = ' '.join(args[1:]) + '\n'
name_list = arg0.split(',')
inv_name_map = {v: k for k, v in self.name_map.items()}
ever_run = False
for name in name_list:
job_id = inv_name_map.get(name, None)
if job_id is None:
continue
self.run(job_id, cmd)
ever_run = True
if ever_run is False:
self.run(self.last_term_job_id, cmd)
else:
cmd = ' '.join(args[:]) + '\n'
self.run(self.last_term_job_id, cmd)
@neovim.autocmd('TermOpen', eval='expand("<afile>")', sync=True,
pattern='*sh*')
def on_termopen(self, filename):
if not is_shell(filename):
return
lst = filename.split('#')
filename = lst[0]
job_id = self.nvim.eval('expand(b:terminal_job_id)')
self.data[filename] = job_id
self.last_term_job_id = job_id
if len(lst) > 1:
terminal_name = lst[-1]
self.name_map[job_id] = terminal_name
try:
index = self.name_list.index(terminal_name)
del self.name_list[index]
if index < self.name_index:
self.name_index -= 1
except ValueError:
pass
return
if self.name_index < len(self.name_list):
name = self.name_list[self.name_index]
self.name_map[job_id] = name
self.nvim.command("keepalt file %s \#%s" % (filename, name))
self.name_index += 1
@neovim.autocmd('BufWinEnter', eval='expand("%:p")', sync=False,
pattern='*sh*')
def on_buffer_win_enter(self, filename):
try:
job_id = self.nvim.eval('expand(b:terminal_job_id)')
if self.name_map.get(job_id, '') != 'w3m':
self.last_term_job_id = job_id
except:
pass
@neovim.autocmd('BufEnter', eval='expand("%:p")', sync=False,
pattern='*sh*')
def on_buffer_enter(self, filename):
if psutil is None:
return
try:
pid = filename.split('/')[-1].split(':')[0]
p = psutil.Process(pid=int(pid, 10))
childrens = p.children()
if len(childrens) > 0 and childrens[0].name() == 'w3m':
self.nvim.command("normal! g")
self.nvim.command("normal! i")
except:
pass
|
[
"json.loads",
"re.match",
"json.dumps",
"neovim.autocmd",
"neovim.command"
] |
[((7722, 7773), 'neovim.command', 'neovim.command', (['"""C"""'], {'range': '""""""', 'nargs': '"""*"""', 'sync': '(True)'}), "('C', range='', nargs='*', sync=True)\n", (7736, 7773), False, 'import neovim\n'), ((9469, 9548), 'neovim.autocmd', 'neovim.autocmd', (['"""TermOpen"""'], {'eval': '"""expand("<afile>")"""', 'sync': '(True)', 'pattern': '"""*sh*"""'}), '(\'TermOpen\', eval=\'expand("<afile>")\', sync=True, pattern=\'*sh*\')\n', (9483, 9548), False, 'import neovim\n'), ((10497, 10576), 'neovim.autocmd', 'neovim.autocmd', (['"""BufWinEnter"""'], {'eval': '"""expand("%:p")"""', 'sync': '(False)', 'pattern': '"""*sh*"""'}), '(\'BufWinEnter\', eval=\'expand("%:p")\', sync=False, pattern=\'*sh*\')\n', (10511, 10576), False, 'import neovim\n'), ((10861, 10937), 'neovim.autocmd', 'neovim.autocmd', (['"""BufEnter"""'], {'eval': '"""expand("%:p")"""', 'sync': '(False)', 'pattern': '"""*sh*"""'}), '(\'BufEnter\', eval=\'expand("%:p")\', sync=False, pattern=\'*sh*\')\n', (10875, 10937), False, 'import neovim\n'), ((8619, 8649), 're.match', 're.match', (['"""(\\\\d+,)*\\\\d+"""', 'arg0'], {}), "('(\\\\d+,)*\\\\d+', arg0)\n", (8627, 8649), False, 'import re\n'), ((1010, 1041), 'json.loads', 'json.loads', (['command_map_history'], {}), '(command_map_history)\n', (1020, 1041), False, 'import json\n'), ((8838, 8864), 're.match', 're.match', (['"""(\\\\w+,)+"""', 'arg0'], {}), "('(\\\\w+,)+', arg0)\n", (8846, 8864), False, 'import re\n'), ((2928, 2951), 'json.dumps', 'json.dumps', (['command_map'], {}), '(command_map)\n', (2938, 2951), False, 'import json\n')]
|
"""Install instructions for non-packaged java programs.
"""
import os
from fabric.api import *
from fabric.contrib.files import *
from shared import _if_not_installed
@_if_not_installed("cljr")
def install_cljr(env):
"""Install the clojure package manager cljr
http://github.com/liebke/cljr
"""
run("wget http://incanter.org/downloads/cljr-installer.jar")
run("java -jar cljr-installer.jar")
env.safe_sudo("ln -s .cljr/bin/cljr /usr/bin")
run("rm cljr-installer.jar")
@_if_not_installed("lein")
def install_leinengin(env):
"""Standard clojure build tool: http://github.com/technomancy/leiningen
"""
run("wget --no-check-certificate https://github.com/technomancy/leiningen/raw/stable/bin/lein")
run("chmod a+rwx lein")
env.safe_sudo("mv lein %s" % os.path.join(env.system_install, "bin"))
run("lein self-install")
|
[
"shared._if_not_installed",
"os.path.join"
] |
[((171, 196), 'shared._if_not_installed', '_if_not_installed', (['"""cljr"""'], {}), "('cljr')\n", (188, 196), False, 'from shared import _if_not_installed\n'), ((502, 527), 'shared._if_not_installed', '_if_not_installed', (['"""lein"""'], {}), "('lein')\n", (519, 527), False, 'from shared import _if_not_installed\n'), ((801, 840), 'os.path.join', 'os.path.join', (['env.system_install', '"""bin"""'], {}), "(env.system_install, 'bin')\n", (813, 840), False, 'import os\n')]
|
"""
LC 438
Given a string and a pattern, find all of the pattern in the given string.
Every anagram is a permutation of a string. As we know, when we are not allowed to repeat characters while finding permutations of a string, we get N!N! permutations (or anagrams) of a string having NN characters. For example, here are the six anagrams of the string “abc”:
abc
acb
bac
bca
cab
cba
Write a function to return a list of starting indices of the anagrams of the pattern in the given string.
Example 1:
Input: String="ppqp", Pattern="pq"
Output: [1, 2]
Explanation: The two anagrams of the pattern in the given string are "pq" and "qp".
Example 2:
Input: String="abbcabc", Pattern="abc"
Output: [2, 3, 4]
Explanation: The three anagrams of the pattern in the given string are "bca", "cab", and "abc".
"""
from collections import defaultdict
class Solution:
def findAnagrams(self, str1: str, pattern: str):
cnt = defaultdict(int)
for c in pattern:
cnt[c] += 1
chars = set(cnt.keys())
ans = []
for i, c in enumerate(str1):
if i >= len(pattern):
self.add_c(cnt, chars, str1[i - len(pattern)])
self.rm_c(cnt, chars, c)
if not cnt:
ans.append(i - len(pattern) + 1)
return ans
def add_c(self, cnt, chars, c):
if c in chars:
cnt[c] += 1
if cnt[c] == 0:
del cnt[c]
def rm_c(self, cnt, chars, c):
if c in chars:
cnt[c] -= 1
if cnt[c] == 0:
del cnt[c]
"""
Time O(N + M)
Space O(N): space to store result
"""
|
[
"collections.defaultdict"
] |
[((929, 945), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (940, 945), False, 'from collections import defaultdict\n')]
|
import logging, sys, os, ldap, time, yaml
from ns1 import NS1, Config
from ns1.rest.errors import ResourceException, RateLimitException, AuthException
from flask import Flask, json, g, request, make_response, jsonify
from flask.logging import create_logger
from flask_cors import CORS, cross_origin
from flask_jwt import JWT, jwt_required, current_identity
from datetime import datetime, timedelta
from vmware.vapi.vmc.client import create_vmc_client
from com.vmware.nsx_vmc_app_client_for_vmc import create_nsx_vmc_app_client_for_vmc
from com.vmware.nsx_policy_client_for_vmc import create_nsx_policy_client_for_vmc
from com.vmware.nsx_vmc_app.model_client import PublicIp
from com.vmware.nsx_policy.model_client import PolicyNatRule
from vmware.vapi.bindings.struct import PrettyPrinter as NsxPrettyPrinter
from com.vmware.nsx_policy.model_client import ApiError
# Import config settings from yaml file
yaml_file = open("e:\\GitHub\\EUC-Lab-Portal-Python\\api_server\\config.yaml", 'r')
yaml_dict = yaml.load(yaml_file, Loader=yaml.FullLoader)
# Logging Settings
log_format = ('[%(asctime)s] %(levelname)-8s %(name)-12s %(message)s')
logging.basicConfig(
filename=yaml_dict['LogFilepath'],
level=logging.ERROR,
format=log_format
)
# Flask app config settings
app = Flask(__name__)
app.config['CORS_HEADERS'] = 'Content-Type'
app.config['JWT_AUTH_HEADER_PREFIX'] = 'Bearer'
app.config['JWT_EXPIRATION_DELTA'] = timedelta(seconds=yaml_dict['JwtTimeoutInSeconds'])
app.config['SECRET_KEY'] = yaml_dict['JwtKey']
CORS(app)
# LDAP
LDAP_CONNECTION_STRING = yaml_dict['LdapConnectionString']
LDAP_PROTOCOL_VERSION = yaml_dict['LdapProtocolVersion']
# NS1 DNS config settings
API_KEY_VALUE = yaml_dict['DnsApiKey']
EUCLABNET_ZONE_NAME = yaml_dict['DnsZones'][0]
PSOLABNET_ZONE_NAME = yaml_dict['DnsZones'][1]
config = Config()
config.createFromAPIKey(API_KEY_VALUE)
api = NS1(config=config)
# VMC
VMC_CSP_REFRESH_TOKEN = yaml_dict['VmcCspRefreshToken']
VMC_CSP_AUTH_URL = yaml_dict['VmcCspAuthUrl'] + "?refresh_token=" + VMC_CSP_REFRESH_TOKEN
VMC_ORG = yaml_dict['VmcOrg']
VMC_ORG_ID = yaml_dict['VmcOrgId']
VMC_SDDC = yaml_dict['VmcSddc']
VMC_SDDC_ID = yaml_dict['VmcSddcId']
NSX_VMC_AWS_API_BASE_URL = yaml_dict['NsxVmxAwsApiBaseUrl']
# format NSXT objects for readability
nsx_pp = NsxPrettyPrinter()
@app.route("/")
def home():
localtime = time.asctime( time.localtime(time.time()) )
logging.info("Server is running.")
return {"Status": "Running", "DateTime": localtime}
@app.route("/dns", methods=["GET"])
@jwt_required()
def get_dns_records():
try:
psolabnet_zone = api.loadZone(PSOLABNET_ZONE_NAME)
euclabnet_zone = api.loadZone(EUCLABNET_ZONE_NAME)
all_zone_records = {
psolabnet_zone.zone: psolabnet_zone.data["records"],
euclabnet_zone.zone: euclabnet_zone.data["records"]
}
all_zone_records_json = json.dumps(all_zone_records)
return all_zone_records_json
except Exception as ex:
logging.error("Exception: " + ex)
response = make_response({"message": "No action was taken."}, 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/dns", methods=["PUT"])
@jwt_required()
def create_dns_record():
data = request.get_json()
logging.info("Creating DNS record " + data['zone'])
zone = api.loadZone(data['zone'])
response = make_response({"message": "No action was taken."}, 500)
try:
if data['type'] == 'A':
ns1Record = zone.add_A(data['domain'], data['answers'])
json = jsonify(ns1Record.data)
response = make_response(ns1Record.data, json.status_code)
elif data['type'] == 'AAAA':
ns1Record = zone.add_AAAA(data['domain'], data['answers'])
json = jsonify(ns1Record.data)
response = make_response(ns1Record.data, json.status_code)
elif data['type'] == 'CNAME':
ns1Record = zone.add_CNAME(data['domain'], data['answers'])
json = jsonify(ns1Record.data)
response = make_response(ns1Record.data, json.status_code)
elif data['type'] == 'MX':
mx_answers_list = data['answers'].replace(" ","").split(",")
if len(mx_answers_list) == 2:
ns1Record = zone.add_MX(data['domain'], [[int(mx_answers_list[0]), mx_answers_list[1]]])
json = jsonify(ns1Record.data)
response = make_response(ns1Record.data, json.status_code)
elif len(mx_answers_list) == 4:
ns1Record = zone.add_MX(data['domain'], [[int(mx_answers_list[0]), mx_answers_list[1]], [int(mx_answers_list[2]), mx_answers_list[3]]])
json = jsonify(ns1Record.data)
response = make_response(ns1Record.data, json.status_code)
else:
response = make_response({"message": "Unable to create MX record due to issue parsing the answers list => " + data['answers']}, 400)
elif data['type'] == 'TXT':
ns1Record = zone.add_TXT(data['domain'], data['answers'])
json = jsonify(ns1Record.data)
response = make_response(ns1Record.data, json.status_code)
else:
logging.warn("Unknown record type: " + data['type'])
response = make_response({"message": "Unable to create DNS record due to unknown record type " + data['type']}, 400)
except ResourceException as re:
response = make_response(re.response.text, re.response.status_code)
logging.error("ResourceException: " + re)
except Exception as ex:
logging.error("Exception: " + ex)
response = make_response({"message": "An error occurred when trying to create a DNS record."}, 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/dns", methods=["POST"])
@jwt_required()
def update_dns_record():
data = request.get_json()
zone = api.loadZone(data['zone'])
rec = zone.loadRecord(data['domain'], data['type'])
# Modify the record with the new values
logging.info("Updating DNS record: " + rec.domain)
response = make_response({"message": "No action was taken."}, 500)
try:
ns1Record = rec.update(answers=[data['answers']])
json = jsonify(ns1Record.data)
response = make_response(ns1Record.data, json.status_code)
except ResourceException as re:
response = make_response(re.response.text, re.response.status_code)
logging.error("ResourceException: " + ex)
except Exception as ex:
logging.error("Exception: " + ex)
error_message = "Something unexpected occurred when updating " + rec.domain
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/dns/delete", methods=["POST"])
@jwt_required()
def delete_dns_record():
response = make_response({"message": "No action was taken."}, 500)
try:
data = request.get_json()
zone = api.loadZone(data['zone'])
rec = zone.loadRecord(data['domain'], data['type'])
print("Deleting DNS record: " + rec.domain)
response = rec.delete()
if response:
error_message = "Something unexpected occurred when deleting " + rec.domain
print(error_message)
response = make_response(jsonify({"message": error_message}), 500)
else:
print("Deleted " + rec.domain + " successfully.")
response = make_response(jsonify({"message": "Deleted " + rec.domain + " successfully."}))
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when deleting DNS record IP " + data['domain']
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/publicips", methods=["GET"])
@jwt_required()
def get_vmc_public_ips():
response = make_response({"message": "No action was taken."}, 500)
try:
nsx_vmc_client = create_nsx_vmc_app_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
response = make_response(nsx_vmc_client.infra.PublicIps.list().to_json(), 200)
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when getting list of leased IP addresses."
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/publicips", methods=["POST"])
@jwt_required()
def request_new_vmc_public_ip():
response = make_response({"message": "No action was taken."}, 500)
try:
nsx_vmc_client = create_nsx_vmc_app_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
data = request.get_json()
public_ip = PublicIp(display_name=data['display_name'])
response = make_response(nsx_vmc_client.infra.PublicIps.update(data['display_name'], public_ip).to_json(), 200)
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when requesting IP " + data['display_name']
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/publicips", methods=["PATCH"])
@jwt_required()
def update_vmc_public_ip():
nsx_vmc_client = create_nsx_vmc_app_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
data = request.get_json()
public_ip = PublicIp(display_name=data['display_name'], ip=data['ip'], id=data['id'])
response = make_response(nsx_vmc_client.infra.PublicIps.update(data['display_name'], public_ip).to_json(), 200)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/publicips", methods=["PUT"])
@jwt_required()
def delete_new_vmc_public_ip():
response = make_response({"message": "No action was taken."}, 500)
try:
data = request.get_json()
# Ensure IP is not being used in a NAT Rule before attempting delete
nsx_policy_client = create_nsx_policy_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
nat = nsx_policy_client.infra.tier_1s.nat.NatRules.list('cgw', 'USER')
for nat_rule in nat.results:
if nat_rule.translated_network == data['ip']:
response = make_response({"message": "The IP is being used by NAT rule " + nat_rule.display_name + ". Delete NAT rule before continuing." }, 409)
response.headers["Content-Type"] = "application/json"
return response
# Proceed to delete
nsx_vmc_client = create_nsx_vmc_app_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
response = nsx_vmc_client.infra.PublicIps.delete(data['display_name']) # None value returned on successful delete
response = make_response()
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when releasing IP " + data['ip']
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/natrules", methods=['GET'])
@jwt_required()
def get_nat_rules():
response = make_response({"message": "No action was taken."}, 500)
try:
nsx_policy_client = create_nsx_policy_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
nat = nsx_policy_client.infra.tier_1s.nat.NatRules.list('cgw', 'USER')
response = make_response(nat.to_json(), 200)
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when getting NAT rules. Exception: " + ex
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/natrules", methods=['POST'])
@jwt_required()
def create_nat_rule():
data = request.get_json()
response = make_response({"message": "No action was taken."}, 500)
try:
nat_obj = PolicyNatRule(action = 'REFLEXIVE',
scope = ['/infra/labels/cgw-public'],
source_network = data['source_network'],
translated_network = data['translated_network'],
display_name = data['display_name'],
sequence_number = 1,
firewall_match = 'MATCH_INTERNAL_ADDRESS')
# patch() method is void
nsx_policy_client = create_nsx_policy_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
nsx_policy_client.infra.tier_1s.nat.NatRules.patch('cgw', 'USER', data['display_name'], nat_obj)
response = make_response(nat_obj.to_json(), 200)
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when creating NAT rule " + data['display_name'] + ". Exception: " + ex
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/natrules", methods=['PUT'])
@jwt_required()
def delete_nat_rule():
data = request.get_json()
response = make_response({"message": "No action was taken."}, 500)
try:
nsx_policy_client = create_nsx_policy_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
nsx_policy_client.infra.tier_1s.nat.NatRules.delete('cgw', 'USER', data['display_name'])
response = make_response({"message": "Successfully deleted NAT rule " + data['display_name']}, 200)
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when deleting NAT rule " + data['display_name'] + ". Exception: " + ex
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/cgwrules", methods=['GET'])
@jwt_required()
def get_cgw_rules():
response = make_response({"message": "No action was taken."}, 500)
try:
nsx_policy_client = create_nsx_policy_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
cgw_object = nsx_policy_client.infra.domains.GatewayPolicies.get('cgw', 'default')
security_groups = nsx_policy_client.infra.domains.Groups.list('cgw').results
services = nsx_policy_client.infra.Services.list()
# Replace destination group ID, source group ID, and service ID with display name
for cgw in cgw_object.rules:
new_dest_list = []
for dest_group in cgw.destination_groups:
if dest_group != 'ANY':
for sec_group in security_groups:
if sec_group.id == dest_group.split('/')[-1]:
new_dest_list.append(sec_group.display_name)
if len(new_dest_list) > 0:
cgw.destination_groups = new_dest_list
new_source_list = []
for source_group in cgw.source_groups:
if source_group != 'ANY':
for sec_group in security_groups:
if sec_group.id == source_group.split('/')[-1]:
new_source_list.append(sec_group.display_name)
if len(new_source_list) > 0:
cgw.source_groups = new_source_list
new_service_list = []
for cgw_service in cgw.services:
if cgw_service != 'ANY':
for service in services.results:
if service.id == cgw_service.split('/')[-1]:
new_service_list.append(service.display_name)
if len(new_service_list) > 0:
cgw.services = new_service_list
new_scope_list = []
for scope in cgw.scope:
new_scope_list.append(scope.split('/')[-1])
if len(new_scope_list) > 0:
cgw.scope = new_scope_list
response = make_response(cgw_object.to_json(), 200)
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when getting CGW rules. Exception: " + ex
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/cgwrules", methods=['PUT'])
@jwt_required()
def delete_cgw_rule():
data = request.get_json()
response = make_response({"message": "No action was taken."}, 500)
try:
nsx_policy_client = create_nsx_policy_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
nsx_policy_client.infra.domains.gateway_policies.Rules.delete('cgw', 'default', data['display_name'])
response = make_response({"message": "Successfully deleted CGW rule " + data['display_name']}, 200)
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when deleting CGW rule " + data['display_name'] + ". Exception: " + ex
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
def log_error(ex):
"""
Generic error logger that will use NSXT API Error message decoders for
more descriptive information on errors
"""
api_error = ex.data.convert_to(ApiError)
print("Error configuring {}".format(api_error.error_message))
print("{}".format(api_error.__dict__))
print("{}".format(api_error.details))
logging.error("Error configuring {}".format(api_error.error_message))
logging.error("{}".format(api_error.__dict__))
logging.error("{}".format(api_error.details))
def ldap_bind_as_user(upn, password):
"""
UPN is required for AD bind.
"""
result = False
conn = ldap.initialize(LDAP_CONNECTION_STRING)
conn.protocol_version = LDAP_PROTOCOL_VERSION
try:
bind_result_s = conn.simple_bind_s(upn, password)
if bind_result_s[0] == 97:
logging.info("LDAP bind successful for upn " + upn + ".")
result = User(id=upn)
else:
logging.error("Received an unexpected bind result code: " + bind_result_s)
except ldap.INVALID_CREDENTIALS:
logging.error("Username or password is incorrect.")
except ldap.LDAPError as e:
if type(e.message) == dict and e.message.has_key('desc'):
logging.error("LDAP Error exception occurred: " + e.message['desc'])
else:
logging.error("A server error occurred during API authentication.")
except Exception as e:
logging.error("An exception occurred when performing ldap bind. Exception: " + e)
finally:
conn.unbind_s()
return result
class User(object):
def __init__(self, id):
self.id = id
def __str__(self):
return "User(id='%s')" % self.id
def authenticate(username, password):
if not (username and password):
return False
elif "_admin" not in username:
logging.error("The given username does not contain the substring '_admin': " + username)
return False
else:
return ldap_bind_as_user(username, password)
def identity(payload):
user_id = payload['identity']
return {"user_id": user_id}
jwt = JWT(app, authenticate, identity)
if __name__ == "__main__":
app.run()
|
[
"yaml.load",
"vmware.vapi.bindings.struct.PrettyPrinter",
"flask_cors.CORS",
"flask.jsonify",
"com.vmware.nsx_vmc_app_client_for_vmc.create_nsx_vmc_app_client_for_vmc",
"flask_jwt.jwt_required",
"flask.request.get_json",
"logging.error",
"flask_jwt.JWT",
"datetime.timedelta",
"ns1.NS1",
"ldap.initialize",
"ns1.Config",
"com.vmware.nsx_policy.model_client.PolicyNatRule",
"flask.make_response",
"com.vmware.nsx_policy_client_for_vmc.create_nsx_policy_client_for_vmc",
"logging.basicConfig",
"com.vmware.nsx_vmc_app.model_client.PublicIp",
"logging.warn",
"flask.Flask",
"time.time",
"logging.info",
"flask.json.dumps"
] |
[((1002, 1046), 'yaml.load', 'yaml.load', (['yaml_file'], {'Loader': 'yaml.FullLoader'}), '(yaml_file, Loader=yaml.FullLoader)\n', (1011, 1046), False, 'import logging, sys, os, ldap, time, yaml\n'), ((1138, 1236), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': "yaml_dict['LogFilepath']", 'level': 'logging.ERROR', 'format': 'log_format'}), "(filename=yaml_dict['LogFilepath'], level=logging.ERROR,\n format=log_format)\n", (1157, 1236), False, 'import logging, sys, os, ldap, time, yaml\n'), ((1283, 1298), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (1288, 1298), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((1428, 1479), 'datetime.timedelta', 'timedelta', ([], {'seconds': "yaml_dict['JwtTimeoutInSeconds']"}), "(seconds=yaml_dict['JwtTimeoutInSeconds'])\n", (1437, 1479), False, 'from datetime import datetime, timedelta\n'), ((1527, 1536), 'flask_cors.CORS', 'CORS', (['app'], {}), '(app)\n', (1531, 1536), False, 'from flask_cors import CORS, cross_origin\n'), ((1830, 1838), 'ns1.Config', 'Config', ([], {}), '()\n', (1836, 1838), False, 'from ns1 import NS1, Config\n'), ((1884, 1902), 'ns1.NS1', 'NS1', ([], {'config': 'config'}), '(config=config)\n', (1887, 1902), False, 'from ns1 import NS1, Config\n'), ((2298, 2316), 'vmware.vapi.bindings.struct.PrettyPrinter', 'NsxPrettyPrinter', ([], {}), '()\n', (2314, 2316), True, 'from vmware.vapi.bindings.struct import PrettyPrinter as NsxPrettyPrinter\n'), ((2539, 2553), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (2551, 2553), False, 'from flask_jwt import JWT, jwt_required, current_identity\n'), ((3246, 3260), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (3258, 3260), False, 'from flask_jwt import JWT, jwt_required, current_identity\n'), ((5905, 5919), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (5917, 5919), False, 'from flask_jwt import JWT, jwt_required, current_identity\n'), ((6942, 6956), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (6954, 6956), False, 'from flask_jwt import JWT, jwt_required, current_identity\n'), ((8090, 8104), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (8102, 8104), False, 'from flask_jwt import JWT, jwt_required, current_identity\n'), ((8796, 8810), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (8808, 8810), False, 'from flask_jwt import JWT, jwt_required, current_identity\n'), ((9652, 9666), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (9664, 9666), False, 'from flask_jwt import JWT, jwt_required, current_identity\n'), ((10167, 10181), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (10179, 10181), False, 'from flask_jwt import JWT, jwt_required, current_identity\n'), ((11635, 11649), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (11647, 11649), False, 'from flask_jwt import JWT, jwt_required, current_identity\n'), ((12382, 12396), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (12394, 12396), False, 'from flask_jwt import JWT, jwt_required, current_identity\n'), ((13733, 13747), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (13745, 13747), False, 'from flask_jwt import JWT, jwt_required, current_identity\n'), ((14612, 14626), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (14624, 14626), False, 'from flask_jwt import JWT, jwt_required, current_identity\n'), ((17188, 17202), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (17200, 17202), False, 'from flask_jwt import JWT, jwt_required, current_identity\n'), ((20187, 20219), 'flask_jwt.JWT', 'JWT', (['app', 'authenticate', 'identity'], {}), '(app, authenticate, identity)\n', (20190, 20219), False, 'from flask_jwt import JWT, jwt_required, current_identity\n'), ((2410, 2444), 'logging.info', 'logging.info', (['"""Server is running."""'], {}), "('Server is running.')\n", (2422, 2444), False, 'import logging, sys, os, ldap, time, yaml\n'), ((3069, 3124), 'flask.make_response', 'make_response', (["{'message': 'No action was taken.'}", '(500)'], {}), "({'message': 'No action was taken.'}, 500)\n", (3082, 3124), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((3297, 3315), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (3313, 3315), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((3320, 3371), 'logging.info', 'logging.info', (["('Creating DNS record ' + data['zone'])"], {}), "('Creating DNS record ' + data['zone'])\n", (3332, 3371), False, 'import logging, sys, os, ldap, time, yaml\n'), ((3427, 3482), 'flask.make_response', 'make_response', (["{'message': 'No action was taken.'}", '(500)'], {}), "({'message': 'No action was taken.'}, 500)\n", (3440, 3482), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((5956, 5974), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (5972, 5974), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((6118, 6168), 'logging.info', 'logging.info', (["('Updating DNS record: ' + rec.domain)"], {}), "('Updating DNS record: ' + rec.domain)\n", (6130, 6168), False, 'import logging, sys, os, ldap, time, yaml\n'), ((6185, 6240), 'flask.make_response', 'make_response', (["{'message': 'No action was taken.'}", '(500)'], {}), "({'message': 'No action was taken.'}, 500)\n", (6198, 6240), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((6997, 7052), 'flask.make_response', 'make_response', (["{'message': 'No action was taken.'}", '(500)'], {}), "({'message': 'No action was taken.'}, 500)\n", (7010, 7052), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((8146, 8201), 'flask.make_response', 'make_response', (["{'message': 'No action was taken.'}", '(500)'], {}), "({'message': 'No action was taken.'}, 500)\n", (8159, 8201), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((8859, 8914), 'flask.make_response', 'make_response', (["{'message': 'No action was taken.'}", '(500)'], {}), "({'message': 'No action was taken.'}, 500)\n", (8872, 8914), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((9716, 9801), 'com.vmware.nsx_vmc_app_client_for_vmc.create_nsx_vmc_app_client_for_vmc', 'create_nsx_vmc_app_client_for_vmc', (['VMC_CSP_REFRESH_TOKEN', 'VMC_ORG_ID', 'VMC_SDDC_ID'], {}), '(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID,\n VMC_SDDC_ID)\n', (9749, 9801), False, 'from com.vmware.nsx_vmc_app_client_for_vmc import create_nsx_vmc_app_client_for_vmc\n'), ((9814, 9832), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (9830, 9832), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((9849, 9922), 'com.vmware.nsx_vmc_app.model_client.PublicIp', 'PublicIp', ([], {'display_name': "data['display_name']", 'ip': "data['ip']", 'id': "data['id']"}), "(display_name=data['display_name'], ip=data['ip'], id=data['id'])\n", (9857, 9922), False, 'from com.vmware.nsx_vmc_app.model_client import PublicIp\n'), ((10229, 10284), 'flask.make_response', 'make_response', (["{'message': 'No action was taken.'}", '(500)'], {}), "({'message': 'No action was taken.'}, 500)\n", (10242, 10284), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((11686, 11741), 'flask.make_response', 'make_response', (["{'message': 'No action was taken.'}", '(500)'], {}), "({'message': 'No action was taken.'}, 500)\n", (11699, 11741), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((12431, 12449), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (12447, 12449), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((12465, 12520), 'flask.make_response', 'make_response', (["{'message': 'No action was taken.'}", '(500)'], {}), "({'message': 'No action was taken.'}, 500)\n", (12478, 12520), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((13782, 13800), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (13798, 13800), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((13816, 13871), 'flask.make_response', 'make_response', (["{'message': 'No action was taken.'}", '(500)'], {}), "({'message': 'No action was taken.'}, 500)\n", (13829, 13871), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((14663, 14718), 'flask.make_response', 'make_response', (["{'message': 'No action was taken.'}", '(500)'], {}), "({'message': 'No action was taken.'}, 500)\n", (14676, 14718), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((17237, 17255), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (17253, 17255), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((17271, 17326), 'flask.make_response', 'make_response', (["{'message': 'No action was taken.'}", '(500)'], {}), "({'message': 'No action was taken.'}, 500)\n", (17284, 17326), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((18686, 18725), 'ldap.initialize', 'ldap.initialize', (['LDAP_CONNECTION_STRING'], {}), '(LDAP_CONNECTION_STRING)\n', (18701, 18725), False, 'import logging, sys, os, ldap, time, yaml\n'), ((2907, 2935), 'flask.json.dumps', 'json.dumps', (['all_zone_records'], {}), '(all_zone_records)\n', (2917, 2935), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((6328, 6351), 'flask.jsonify', 'jsonify', (['ns1Record.data'], {}), '(ns1Record.data)\n', (6335, 6351), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((6371, 6418), 'flask.make_response', 'make_response', (['ns1Record.data', 'json.status_code'], {}), '(ns1Record.data, json.status_code)\n', (6384, 6418), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((7078, 7096), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (7094, 7096), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((8237, 8322), 'com.vmware.nsx_vmc_app_client_for_vmc.create_nsx_vmc_app_client_for_vmc', 'create_nsx_vmc_app_client_for_vmc', (['VMC_CSP_REFRESH_TOKEN', 'VMC_ORG_ID', 'VMC_SDDC_ID'], {}), '(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID,\n VMC_SDDC_ID)\n', (8270, 8322), False, 'from com.vmware.nsx_vmc_app_client_for_vmc import create_nsx_vmc_app_client_for_vmc\n'), ((8950, 9035), 'com.vmware.nsx_vmc_app_client_for_vmc.create_nsx_vmc_app_client_for_vmc', 'create_nsx_vmc_app_client_for_vmc', (['VMC_CSP_REFRESH_TOKEN', 'VMC_ORG_ID', 'VMC_SDDC_ID'], {}), '(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID,\n VMC_SDDC_ID)\n', (8983, 9035), False, 'from com.vmware.nsx_vmc_app_client_for_vmc import create_nsx_vmc_app_client_for_vmc\n'), ((9056, 9074), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (9072, 9074), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((9095, 9138), 'com.vmware.nsx_vmc_app.model_client.PublicIp', 'PublicIp', ([], {'display_name': "data['display_name']"}), "(display_name=data['display_name'])\n", (9103, 9138), False, 'from com.vmware.nsx_vmc_app.model_client import PublicIp\n'), ((10310, 10328), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (10326, 10328), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((10435, 10520), 'com.vmware.nsx_policy_client_for_vmc.create_nsx_policy_client_for_vmc', 'create_nsx_policy_client_for_vmc', (['VMC_CSP_REFRESH_TOKEN', 'VMC_ORG_ID', 'VMC_SDDC_ID'], {}), '(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID\n )\n', (10467, 10520), False, 'from com.vmware.nsx_policy_client_for_vmc import create_nsx_policy_client_for_vmc\n'), ((11010, 11095), 'com.vmware.nsx_vmc_app_client_for_vmc.create_nsx_vmc_app_client_for_vmc', 'create_nsx_vmc_app_client_for_vmc', (['VMC_CSP_REFRESH_TOKEN', 'VMC_ORG_ID', 'VMC_SDDC_ID'], {}), '(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID,\n VMC_SDDC_ID)\n', (11043, 11095), False, 'from com.vmware.nsx_vmc_app_client_for_vmc import create_nsx_vmc_app_client_for_vmc\n'), ((11233, 11248), 'flask.make_response', 'make_response', ([], {}), '()\n', (11246, 11248), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((11780, 11865), 'com.vmware.nsx_policy_client_for_vmc.create_nsx_policy_client_for_vmc', 'create_nsx_policy_client_for_vmc', (['VMC_CSP_REFRESH_TOKEN', 'VMC_ORG_ID', 'VMC_SDDC_ID'], {}), '(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID\n )\n', (11812, 11865), False, 'from com.vmware.nsx_policy_client_for_vmc import create_nsx_policy_client_for_vmc\n'), ((12549, 12812), 'com.vmware.nsx_policy.model_client.PolicyNatRule', 'PolicyNatRule', ([], {'action': '"""REFLEXIVE"""', 'scope': "['/infra/labels/cgw-public']", 'source_network': "data['source_network']", 'translated_network': "data['translated_network']", 'display_name': "data['display_name']", 'sequence_number': '(1)', 'firewall_match': '"""MATCH_INTERNAL_ADDRESS"""'}), "(action='REFLEXIVE', scope=['/infra/labels/cgw-public'],\n source_network=data['source_network'], translated_network=data[\n 'translated_network'], display_name=data['display_name'],\n sequence_number=1, firewall_match='MATCH_INTERNAL_ADDRESS')\n", (12562, 12812), False, 'from com.vmware.nsx_policy.model_client import PolicyNatRule\n'), ((13069, 13154), 'com.vmware.nsx_policy_client_for_vmc.create_nsx_policy_client_for_vmc', 'create_nsx_policy_client_for_vmc', (['VMC_CSP_REFRESH_TOKEN', 'VMC_ORG_ID', 'VMC_SDDC_ID'], {}), '(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID\n )\n', (13101, 13154), False, 'from com.vmware.nsx_policy_client_for_vmc import create_nsx_policy_client_for_vmc\n'), ((13910, 13995), 'com.vmware.nsx_policy_client_for_vmc.create_nsx_policy_client_for_vmc', 'create_nsx_policy_client_for_vmc', (['VMC_CSP_REFRESH_TOKEN', 'VMC_ORG_ID', 'VMC_SDDC_ID'], {}), '(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID\n )\n', (13942, 13995), False, 'from com.vmware.nsx_policy_client_for_vmc import create_nsx_policy_client_for_vmc\n'), ((14107, 14200), 'flask.make_response', 'make_response', (["{'message': 'Successfully deleted NAT rule ' + data['display_name']}", '(200)'], {}), "({'message': 'Successfully deleted NAT rule ' + data[\n 'display_name']}, 200)\n", (14120, 14200), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((14757, 14842), 'com.vmware.nsx_policy_client_for_vmc.create_nsx_policy_client_for_vmc', 'create_nsx_policy_client_for_vmc', (['VMC_CSP_REFRESH_TOKEN', 'VMC_ORG_ID', 'VMC_SDDC_ID'], {}), '(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID\n )\n', (14789, 14842), False, 'from com.vmware.nsx_policy_client_for_vmc import create_nsx_policy_client_for_vmc\n'), ((17364, 17449), 'com.vmware.nsx_policy_client_for_vmc.create_nsx_policy_client_for_vmc', 'create_nsx_policy_client_for_vmc', (['VMC_CSP_REFRESH_TOKEN', 'VMC_ORG_ID', 'VMC_SDDC_ID'], {}), '(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID\n )\n', (17396, 17449), False, 'from com.vmware.nsx_policy_client_for_vmc import create_nsx_policy_client_for_vmc\n'), ((17574, 17667), 'flask.make_response', 'make_response', (["{'message': 'Successfully deleted CGW rule ' + data['display_name']}", '(200)'], {}), "({'message': 'Successfully deleted CGW rule ' + data[\n 'display_name']}, 200)\n", (17587, 17667), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((2391, 2402), 'time.time', 'time.time', ([], {}), '()\n', (2400, 2402), False, 'import logging, sys, os, ldap, time, yaml\n'), ((3018, 3052), 'logging.error', 'logging.error', (["('Exception: ' + ex)"], {}), "('Exception: ' + ex)\n", (3031, 3052), False, 'import logging, sys, os, ldap, time, yaml\n'), ((3612, 3635), 'flask.jsonify', 'jsonify', (['ns1Record.data'], {}), '(ns1Record.data)\n', (3619, 3635), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((3659, 3706), 'flask.make_response', 'make_response', (['ns1Record.data', 'json.status_code'], {}), '(ns1Record.data, json.status_code)\n', (3672, 3706), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((5500, 5556), 'flask.make_response', 'make_response', (['re.response.text', 're.response.status_code'], {}), '(re.response.text, re.response.status_code)\n', (5513, 5556), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((5565, 5607), 'logging.error', 'logging.error', (["('ResourceException: ' + re)"], {}), "('ResourceException: ' + re)\n", (5578, 5607), False, 'import logging, sys, os, ldap, time, yaml\n'), ((5644, 5678), 'logging.error', 'logging.error', (["('Exception: ' + ex)"], {}), "('Exception: ' + ex)\n", (5657, 5678), False, 'import logging, sys, os, ldap, time, yaml\n'), ((5698, 5790), 'flask.make_response', 'make_response', (["{'message': 'An error occurred when trying to create a DNS record.'}", '(500)'], {}), "({'message':\n 'An error occurred when trying to create a DNS record.'}, 500)\n", (5711, 5790), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((6474, 6530), 'flask.make_response', 'make_response', (['re.response.text', 're.response.status_code'], {}), '(re.response.text, re.response.status_code)\n', (6487, 6530), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((6539, 6581), 'logging.error', 'logging.error', (["('ResourceException: ' + ex)"], {}), "('ResourceException: ' + ex)\n", (6552, 6581), False, 'import logging, sys, os, ldap, time, yaml\n'), ((6618, 6652), 'logging.error', 'logging.error', (["('Exception: ' + ex)"], {}), "('Exception: ' + ex)\n", (6631, 6652), False, 'import logging, sys, os, ldap, time, yaml\n'), ((7733, 7767), 'logging.error', 'logging.error', (["('Exception: ' + ex)"], {}), "('Exception: ' + ex)\n", (7746, 7767), False, 'import logging, sys, os, ldap, time, yaml\n'), ((8442, 8476), 'logging.error', 'logging.error', (["('Exception: ' + ex)"], {}), "('Exception: ' + ex)\n", (8455, 8476), False, 'import logging, sys, os, ldap, time, yaml\n'), ((9296, 9330), 'logging.error', 'logging.error', (["('Exception: ' + ex)"], {}), "('Exception: ' + ex)\n", (9309, 9330), False, 'import logging, sys, os, ldap, time, yaml\n'), ((11285, 11319), 'logging.error', 'logging.error', (["('Exception: ' + ex)"], {}), "('Exception: ' + ex)\n", (11298, 11319), False, 'import logging, sys, os, ldap, time, yaml\n'), ((12029, 12063), 'logging.error', 'logging.error', (["('Exception: ' + ex)"], {}), "('Exception: ' + ex)\n", (12042, 12063), False, 'import logging, sys, os, ldap, time, yaml\n'), ((13353, 13387), 'logging.error', 'logging.error', (["('Exception: ' + ex)"], {}), "('Exception: ' + ex)\n", (13366, 13387), False, 'import logging, sys, os, ldap, time, yaml\n'), ((14232, 14266), 'logging.error', 'logging.error', (["('Exception: ' + ex)"], {}), "('Exception: ' + ex)\n", (14245, 14266), False, 'import logging, sys, os, ldap, time, yaml\n'), ((16828, 16862), 'logging.error', 'logging.error', (["('Exception: ' + ex)"], {}), "('Exception: ' + ex)\n", (16841, 16862), False, 'import logging, sys, os, ldap, time, yaml\n'), ((17699, 17733), 'logging.error', 'logging.error', (["('Exception: ' + ex)"], {}), "('Exception: ' + ex)\n", (17712, 17733), False, 'import logging, sys, os, ldap, time, yaml\n'), ((18895, 18952), 'logging.info', 'logging.info', (["('LDAP bind successful for upn ' + upn + '.')"], {}), "('LDAP bind successful for upn ' + upn + '.')\n", (18907, 18952), False, 'import logging, sys, os, ldap, time, yaml\n'), ((19013, 19087), 'logging.error', 'logging.error', (["('Received an unexpected bind result code: ' + bind_result_s)"], {}), "('Received an unexpected bind result code: ' + bind_result_s)\n", (19026, 19087), False, 'import logging, sys, os, ldap, time, yaml\n'), ((19134, 19185), 'logging.error', 'logging.error', (['"""Username or password is incorrect."""'], {}), "('Username or password is incorrect.')\n", (19147, 19185), False, 'import logging, sys, os, ldap, time, yaml\n'), ((19496, 19584), 'logging.error', 'logging.error', (["('An exception occurred when performing ldap bind. Exception: ' + e)"], {}), "(\n 'An exception occurred when performing ldap bind. Exception: ' + e)\n", (19509, 19584), False, 'import logging, sys, os, ldap, time, yaml\n'), ((19912, 20006), 'logging.error', 'logging.error', (['("The given username does not contain the substring \'_admin\': " + username)'], {}), '(\n "The given username does not contain the substring \'_admin\': " + username)\n', (19925, 20006), False, 'import logging, sys, os, ldap, time, yaml\n'), ((3834, 3857), 'flask.jsonify', 'jsonify', (['ns1Record.data'], {}), '(ns1Record.data)\n', (3841, 3857), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((3881, 3928), 'flask.make_response', 'make_response', (['ns1Record.data', 'json.status_code'], {}), '(ns1Record.data, json.status_code)\n', (3894, 3928), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((6770, 6805), 'flask.jsonify', 'jsonify', (["{'message': error_message}"], {}), "({'message': error_message})\n", (6777, 6805), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((7463, 7498), 'flask.jsonify', 'jsonify', (["{'message': error_message}"], {}), "({'message': error_message})\n", (7470, 7498), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((7618, 7682), 'flask.jsonify', 'jsonify', (["{'message': 'Deleted ' + rec.domain + ' successfully.'}"], {}), "({'message': 'Deleted ' + rec.domain + ' successfully.'})\n", (7625, 7682), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((7925, 7960), 'flask.jsonify', 'jsonify', (["{'message': error_message}"], {}), "({'message': error_message})\n", (7932, 7960), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((8630, 8665), 'flask.jsonify', 'jsonify', (["{'message': error_message}"], {}), "({'message': error_message})\n", (8637, 8665), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((9485, 9520), 'flask.jsonify', 'jsonify', (["{'message': error_message}"], {}), "({'message': error_message})\n", (9492, 9520), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((10718, 10857), 'flask.make_response', 'make_response', (["{'message': 'The IP is being used by NAT rule ' + nat_rule.display_name +\n '. Delete NAT rule before continuing.'}", '(409)'], {}), "({'message': 'The IP is being used by NAT rule ' + nat_rule.\n display_name + '. Delete NAT rule before continuing.'}, 409)\n", (10731, 10857), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((11463, 11498), 'flask.jsonify', 'jsonify', (["{'message': error_message}"], {}), "({'message': error_message})\n", (11470, 11498), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((12217, 12252), 'flask.jsonify', 'jsonify', (["{'message': error_message}"], {}), "({'message': error_message})\n", (12224, 12252), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((13569, 13604), 'flask.jsonify', 'jsonify', (["{'message': error_message}"], {}), "({'message': error_message})\n", (13576, 13604), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((14448, 14483), 'flask.jsonify', 'jsonify', (["{'message': error_message}"], {}), "({'message': error_message})\n", (14455, 14483), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((17016, 17051), 'flask.jsonify', 'jsonify', (["{'message': error_message}"], {}), "({'message': error_message})\n", (17023, 17051), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((17915, 17950), 'flask.jsonify', 'jsonify', (["{'message': error_message}"], {}), "({'message': error_message})\n", (17922, 17950), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((19296, 19365), 'logging.error', 'logging.error', (["('LDAP Error exception occurred: ' + e.message['desc'])"], {}), "('LDAP Error exception occurred: ' + e.message['desc'])\n", (19309, 19365), False, 'import logging, sys, os, ldap, time, yaml\n'), ((19393, 19460), 'logging.error', 'logging.error', (['"""A server error occurred during API authentication."""'], {}), "('A server error occurred during API authentication.')\n", (19406, 19460), False, 'import logging, sys, os, ldap, time, yaml\n'), ((4058, 4081), 'flask.jsonify', 'jsonify', (['ns1Record.data'], {}), '(ns1Record.data)\n', (4065, 4081), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((4105, 4152), 'flask.make_response', 'make_response', (['ns1Record.data', 'json.status_code'], {}), '(ns1Record.data, json.status_code)\n', (4118, 4152), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((4431, 4454), 'flask.jsonify', 'jsonify', (['ns1Record.data'], {}), '(ns1Record.data)\n', (4438, 4454), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((4482, 4529), 'flask.make_response', 'make_response', (['ns1Record.data', 'json.status_code'], {}), '(ns1Record.data, json.status_code)\n', (4495, 4529), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((5141, 5164), 'flask.jsonify', 'jsonify', (['ns1Record.data'], {}), '(ns1Record.data)\n', (5148, 5164), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((5188, 5235), 'flask.make_response', 'make_response', (['ns1Record.data', 'json.status_code'], {}), '(ns1Record.data, json.status_code)\n', (5201, 5235), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((5262, 5314), 'logging.warn', 'logging.warn', (["('Unknown record type: ' + data['type'])"], {}), "('Unknown record type: ' + data['type'])\n", (5274, 5314), False, 'import logging, sys, os, ldap, time, yaml\n'), ((5338, 5453), 'flask.make_response', 'make_response', (["{'message': 'Unable to create DNS record due to unknown record type ' +\n data['type']}", '(400)'], {}), "({'message': \n 'Unable to create DNS record due to unknown record type ' + data['type'\n ]}, 400)\n", (5351, 5453), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((4749, 4772), 'flask.jsonify', 'jsonify', (['ns1Record.data'], {}), '(ns1Record.data)\n', (4756, 4772), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((4800, 4847), 'flask.make_response', 'make_response', (['ns1Record.data', 'json.status_code'], {}), '(ns1Record.data, json.status_code)\n', (4813, 4847), False, 'from flask import Flask, json, g, request, make_response, jsonify\n'), ((4893, 5023), 'flask.make_response', 'make_response', (["{'message': \n 'Unable to create MX record due to issue parsing the answers list => ' +\n data['answers']}", '(400)'], {}), "({'message': \n 'Unable to create MX record due to issue parsing the answers list => ' +\n data['answers']}, 400)\n", (4906, 5023), False, 'from flask import Flask, json, g, request, make_response, jsonify\n')]
|
import paddle
import paddle.nn as nn
import vgg
def compute_l1_loss(input, output):
return paddle.mean(paddle.abs(input - output))
def loss_Textures(x, y, nc=3, alpha=1.2, margin=0):
xi = x.contiguous().view(x.size(0), -1, nc, x.size(2), x.size(3))
yi = y.contiguous().view(y.size(0), -1, nc, y.size(2), y.size(3))
xi2 = paddle.sum(xi * xi, axis=2)
yi2 = paddle.sum(yi * yi, axis=2)
# pdb.set_trace() #15*32*32
out = nn.functional.relu(yi2.mul(alpha) - xi2 + margin)
return paddle.mean(out)
class LossNetwork(nn.Layer):
"""Reference:
https://discuss.pytorch.org/t/how-to-extract-features-of-an-image-from-a-trained-model/119/3
"""
def __init__(self, pretrained: str = None):
super(LossNetwork, self).__init__()
self.vgg_layers = vgg.vgg19(pretrained=pretrained).features
self.layer_name_mapping = {
'3': "relu1",
'8': "relu2",
'13': "relu3",
'22': "relu4",
'31': "relu5", # 1_2 to 5_2
}
def forward(self, x):
output = {}
# import pdb
# pdb.set_trace()
for name, module in self.vgg_layers._sub_layers.items():
x = module(x)
if name in self.layer_name_mapping:
output[self.layer_name_mapping[name]] = x
return output
class TVLoss(nn.Layer):
def __init__(self, weight=1):
super(TVLoss, self).__init__()
self.weight = weight
def forward(self, x):
batch_size = x.shape[0]
h_x = x.shape[2]
w_x = x.shape[3]
count_h = self._tensor_size(x[:, :, 1:, :])
count_w = self._tensor_size(x[:, :, :, 1:])
h_tv = paddle.pow((x[:, :, 1:, :] - x[:, :, :h_x - 1, :]), 2).sum()
w_tv = paddle.pow((x[:, :, :, 1:] - x[:, :, :, :w_x - 1]), 2).sum()
return self.weight * 2 * (h_tv / count_h + w_tv / count_w) / batch_size
def _tensor_size(self, t):
return t.shape[1] * t.shape[2] * t.shape[3]
if __name__ == '__main__':
img = paddle.randn([1, 3, 224, 224])
net = LossNetwork()
out = net(img)
|
[
"paddle.sum",
"paddle.mean",
"paddle.abs",
"paddle.pow",
"paddle.randn",
"vgg.vgg19"
] |
[((343, 370), 'paddle.sum', 'paddle.sum', (['(xi * xi)'], {'axis': '(2)'}), '(xi * xi, axis=2)\n', (353, 370), False, 'import paddle\n'), ((381, 408), 'paddle.sum', 'paddle.sum', (['(yi * yi)'], {'axis': '(2)'}), '(yi * yi, axis=2)\n', (391, 408), False, 'import paddle\n'), ((516, 532), 'paddle.mean', 'paddle.mean', (['out'], {}), '(out)\n', (527, 532), False, 'import paddle\n'), ((2056, 2086), 'paddle.randn', 'paddle.randn', (['[1, 3, 224, 224]'], {}), '([1, 3, 224, 224])\n', (2068, 2086), False, 'import paddle\n'), ((110, 136), 'paddle.abs', 'paddle.abs', (['(input - output)'], {}), '(input - output)\n', (120, 136), False, 'import paddle\n'), ((810, 842), 'vgg.vgg19', 'vgg.vgg19', ([], {'pretrained': 'pretrained'}), '(pretrained=pretrained)\n', (819, 842), False, 'import vgg\n'), ((1717, 1769), 'paddle.pow', 'paddle.pow', (['(x[:, :, 1:, :] - x[:, :, :h_x - 1, :])', '(2)'], {}), '(x[:, :, 1:, :] - x[:, :, :h_x - 1, :], 2)\n', (1727, 1769), False, 'import paddle\n'), ((1793, 1845), 'paddle.pow', 'paddle.pow', (['(x[:, :, :, 1:] - x[:, :, :, :w_x - 1])', '(2)'], {}), '(x[:, :, :, 1:] - x[:, :, :, :w_x - 1], 2)\n', (1803, 1845), False, 'import paddle\n')]
|
"""
2.2.6: `then` may be called multiple times on the same promise.
https://github.com/promises-aplus/promises-tests/blob/2.1.1/lib/tests/2.2.6.js
"""
import mock
from test.components.scheduler.promises.helpers import generate_rejected_test_case
other = {'other': 'other'}
sentinel = {'sentinel': 'sentinel'}
sentinel2 = {'sentinel2': 'sentinel2'}
sentinel3 = {'sentinel3': 'sentinel3'}
dummy = {'dummy': 'dummy'}
def multiple_boring_tests(test_case, promise, done):
handler1 = mock.MagicMock(return_value=other)
handler2 = mock.MagicMock(return_value=other)
handler3 = mock.MagicMock(return_value=other)
fulfilled = mock.MagicMock()
def final_rejected(argument):
test_case.assertEqual(argument, sentinel)
handler1.assert_called_once_with(sentinel)
handler2.assert_called_once_with(sentinel)
handler3.assert_called_once_with(sentinel)
fulfilled.assert_not_called()
done()
promise.then(fulfilled, handler1)
promise.then(fulfilled, handler2)
promise.then(fulfilled, handler3)
promise.then(None, final_rejected)
def multiple_one_throws(test_case, promise, done):
handler1 = mock.MagicMock(return_value=other)
handler2 = mock.MagicMock(side_effect=AttributeError())
handler3 = mock.MagicMock(return_value=other)
fulfilled = mock.MagicMock()
def final_rejected(argument):
test_case.assertEqual(argument, sentinel)
handler1.assert_called_once_with(sentinel)
handler2.assert_called_once_with(sentinel)
handler3.assert_called_once_with(sentinel)
fulfilled.assert_not_called()
done()
promise.then(fulfilled, handler1)
promise.then(fulfilled, handler2)
promise.then(fulfilled, handler3)
promise.then(None, final_rejected)
def multiple_branching_chains_each_with_own_value(test_case, promise, done):
test_case.session['semiDone'] = 0
def semidone():
test_case.session['semiDone'] += 1
if test_case.session['semiDone'] == 3:
done()
def branch01(value):
return sentinel
def branch01_final(value):
test_case.assertIs(value, sentinel)
semidone()
branch02_error = TypeError()
def branch02(value):
raise branch02_error
def branch02_final(value):
test_case.assertIs(value, branch02_error)
semidone()
def branch03(value):
return sentinel3
def branch03_final(value):
test_case.assertIs(value, sentinel3)
semidone()
promise.then(None, branch01).then(branch01_final)
promise.then(None, branch02).then(None, branch02_final)
promise.then(None, branch03).then(branch03_final)
def on_fulfilled_handlers_called_in_original_order(test_case, promise, done):
handler_mock = mock.MagicMock(**{'handler01.return_value': sentinel,
'handler02.return_value': sentinel2,
'handler03.return_value': sentinel3})
promise.then(None, handler_mock.handler01)
promise.then(None, handler_mock.handler02)
promise.then(None, handler_mock.handler03)
def test_handlers(value):
test_case.assertIs(dummy, value)
method_calls = [a[0] for a in handler_mock.method_calls]
test_case.assertEquals(['handler01', 'handler02', 'handler03'], method_calls)
done()
promise.then(None, test_handlers)
def order_manipulated_in_a_promise(test_case, promise, done):
handler_mock = mock.MagicMock(**{'handler01.return_value': sentinel,
'handler02.return_value': sentinel2,
'handler03.return_value': sentinel3})
def inject_handler_during_execution(value):
handler_mock.handler01()
promise.then(None, handler_mock.handler03)
promise.then(None, inject_handler_during_execution)
promise.then(None, handler_mock.handler02)
def test_handlers():
method_calls = [a[0] for a in handler_mock.method_calls]
test_case.assertEquals(['handler01', 'handler02', 'handler03'], method_calls)
done()
def schedule_test(value):
test_case.scheduler.schedule_task(test_handlers, 0.015)
promise.then(None, schedule_test)
MultipleBoringTestCases = generate_rejected_test_case(method=multiple_boring_tests, value=sentinel,
module=__name__,
name='MultipleBoringTestCases')
MultipleOneThrowsTestCases = generate_rejected_test_case(method=multiple_one_throws, value=sentinel,
module=__name__,
name='MultipleOneThrowsTestCases')
MultipleBranchingTestCases = generate_rejected_test_case(method=multiple_branching_chains_each_with_own_value,
module=__name__,
value=dummy,
name='MultipleBranchingTestCases')
FulfilledHandlersInOrder = generate_rejected_test_case(method=on_fulfilled_handlers_called_in_original_order,
value=dummy,
module=__name__,
name='FulfilledHandlersInOrder')
OrderManipulatedInPromise = generate_rejected_test_case(method=order_manipulated_in_a_promise,
value=dummy,
module=__name__,
name='OrderManipulatedInPromise')
|
[
"test.components.scheduler.promises.helpers.generate_rejected_test_case",
"mock.MagicMock"
] |
[((4298, 4424), 'test.components.scheduler.promises.helpers.generate_rejected_test_case', 'generate_rejected_test_case', ([], {'method': 'multiple_boring_tests', 'value': 'sentinel', 'module': '__name__', 'name': '"""MultipleBoringTestCases"""'}), "(method=multiple_boring_tests, value=sentinel,\n module=__name__, name='MultipleBoringTestCases')\n", (4325, 4424), False, 'from test.components.scheduler.promises.helpers import generate_rejected_test_case\n'), ((4558, 4685), 'test.components.scheduler.promises.helpers.generate_rejected_test_case', 'generate_rejected_test_case', ([], {'method': 'multiple_one_throws', 'value': 'sentinel', 'module': '__name__', 'name': '"""MultipleOneThrowsTestCases"""'}), "(method=multiple_one_throws, value=sentinel,\n module=__name__, name='MultipleOneThrowsTestCases')\n", (4585, 4685), False, 'from test.components.scheduler.promises.helpers import generate_rejected_test_case\n'), ((4825, 4981), 'test.components.scheduler.promises.helpers.generate_rejected_test_case', 'generate_rejected_test_case', ([], {'method': 'multiple_branching_chains_each_with_own_value', 'module': '__name__', 'value': 'dummy', 'name': '"""MultipleBranchingTestCases"""'}), "(method=\n multiple_branching_chains_each_with_own_value, module=__name__, value=\n dummy, name='MultipleBranchingTestCases')\n", (4852, 4981), False, 'from test.components.scheduler.promises.helpers import generate_rejected_test_case\n'), ((5170, 5325), 'test.components.scheduler.promises.helpers.generate_rejected_test_case', 'generate_rejected_test_case', ([], {'method': 'on_fulfilled_handlers_called_in_original_order', 'value': 'dummy', 'module': '__name__', 'name': '"""FulfilledHandlersInOrder"""'}), "(method=\n on_fulfilled_handlers_called_in_original_order, value=dummy, module=\n __name__, name='FulfilledHandlersInOrder')\n", (5197, 5325), False, 'from test.components.scheduler.promises.helpers import generate_rejected_test_case\n'), ((5509, 5644), 'test.components.scheduler.promises.helpers.generate_rejected_test_case', 'generate_rejected_test_case', ([], {'method': 'order_manipulated_in_a_promise', 'value': 'dummy', 'module': '__name__', 'name': '"""OrderManipulatedInPromise"""'}), "(method=order_manipulated_in_a_promise, value=\n dummy, module=__name__, name='OrderManipulatedInPromise')\n", (5536, 5644), False, 'from test.components.scheduler.promises.helpers import generate_rejected_test_case\n'), ((485, 519), 'mock.MagicMock', 'mock.MagicMock', ([], {'return_value': 'other'}), '(return_value=other)\n', (499, 519), False, 'import mock\n'), ((535, 569), 'mock.MagicMock', 'mock.MagicMock', ([], {'return_value': 'other'}), '(return_value=other)\n', (549, 569), False, 'import mock\n'), ((585, 619), 'mock.MagicMock', 'mock.MagicMock', ([], {'return_value': 'other'}), '(return_value=other)\n', (599, 619), False, 'import mock\n'), ((637, 653), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (651, 653), False, 'import mock\n'), ((1170, 1204), 'mock.MagicMock', 'mock.MagicMock', ([], {'return_value': 'other'}), '(return_value=other)\n', (1184, 1204), False, 'import mock\n'), ((1280, 1314), 'mock.MagicMock', 'mock.MagicMock', ([], {'return_value': 'other'}), '(return_value=other)\n', (1294, 1314), False, 'import mock\n'), ((1332, 1348), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1346, 1348), False, 'import mock\n'), ((2794, 2926), 'mock.MagicMock', 'mock.MagicMock', ([], {}), "(**{'handler01.return_value': sentinel,\n 'handler02.return_value': sentinel2, 'handler03.return_value': sentinel3})\n", (2808, 2926), False, 'import mock\n'), ((3502, 3634), 'mock.MagicMock', 'mock.MagicMock', ([], {}), "(**{'handler01.return_value': sentinel,\n 'handler02.return_value': sentinel2, 'handler03.return_value': sentinel3})\n", (3516, 3634), False, 'import mock\n')]
|
from nifcloud import session
import sys
import base64
# ---- define name -------
# -- key name ----------
SSH_KYE_FILE_NAME = 'key.pub'
EAST31_KEY_NAME = "key"
# -- security group ----
WEB_SECURITY_GP_NAME = "webfw"
DB_SECURITY_GP_NAME = "dbfw"
# -- Private LAN name ---
WEB_DB_PRV_NET_NAME = "webdbnet"
# -- Router name ---
WEB_DB_ROUTER_NAME = "webdbRtr"
# -------------------------
# -------- Create Firewall --------------------------------------
def wait_for_fw_create(client, sg_name):
print("wait : ", sys._getframe().f_code.co_name)
try:
waiter = client.get_waiter('security_group_exists')
wait_result = waiter.wait(
Filter=[
{
'ListOfRequestValue': [
'applied',
],
'Name': 'group-name'
},
],
GroupName=[sg_name, ],
WaiterConfig={
'Delay': 20,
'MaxAttempts': 40
}
)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
finally:
return wait_result
def wait_for_fw_applied(client, sg_name):
print("wait : ", sys._getframe().f_code.co_name)
try:
waiter = client.get_waiter('security_group_applied')
wait_result = waiter.wait(
Filter=[
{
'ListOfRequestValue': [
'applied',
],
'Name': 'group-name'
},
],
GroupName=[sg_name, ],
WaiterConfig={
'Delay': 20,
'MaxAttempts': 40
}
)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
finally:
return wait_result
def create_fw(client):
try:
sg = client.create_security_group(
GroupName=WEB_SECURITY_GP_NAME,
GroupDescription="WEB FW"
)
print("create : ", sg)
wait_for_fw_create(client, WEB_SECURITY_GP_NAME)
sg = client.create_security_group(
GroupName=DB_SECURITY_GP_NAME,
GroupDescription="DB FW"
)
print("create : ", sg)
wait_for_fw_create(client, WEB_SECURITY_GP_NAME)
# -------------- web fw -----------------------------
client.authorize_security_group_ingress(
GroupName=WEB_SECURITY_GP_NAME,
IpPermissions=[
{
'Description': 'class b allow',
'InOut': 'IN',
'IpProtocol': 'ANY',
'ListOfRequestIpRanges': [
{
'CidrIp': "192.168.2.0/24",
},
],
},
]
)
wait_for_fw_applied(client, WEB_SECURITY_GP_NAME)
client.authorize_security_group_ingress(
GroupName=WEB_SECURITY_GP_NAME,
IpPermissions=[
{
'Description': 'DB alow',
'InOut': 'IN',
'IpProtocol': 'ANY',
'ListOfRequestGroups': [
{
'GroupName': DB_SECURITY_GP_NAME,
},
],
},
]
)
wait_for_fw_applied(client, WEB_SECURITY_GP_NAME)
client.authorize_security_group_ingress(
GroupName=WEB_SECURITY_GP_NAME,
IpPermissions=[
{
'Description': 'ssh allow(example IP)',
'FromPort': 22,
'ToPort': 22,
'InOut': 'IN',
'IpProtocol': 'TCP',
'ListOfRequestIpRanges': [
{
'CidrIp': "203.0.113.1",
},
],
},
]
)
wait_for_fw_applied(client, WEB_SECURITY_GP_NAME)
# ------------- dbfw ----------------------------
client.authorize_security_group_ingress(
GroupName=DB_SECURITY_GP_NAME,
IpPermissions=[
{
'Description': 'DB alow',
'InOut': 'IN',
'IpProtocol': 'ANY',
'ListOfRequestGroups': [
{
'GroupName': WEB_SECURITY_GP_NAME,
},
],
},
]
)
wait_for_fw_applied(client, DB_SECURITY_GP_NAME)
client.authorize_security_group_ingress(
GroupName=DB_SECURITY_GP_NAME,
IpPermissions=[
{
'Description': 'class b allow',
'InOut': 'IN',
'IpProtocol': 'ANY',
'ListOfRequestIpRanges': [
{
'CidrIp': "192.168.2.0/24",
},
],
},
]
)
wait_for_fw_applied(client, DB_SECURITY_GP_NAME)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
client.delete_security_group(
GroupName=WEB_SECURITY_GP_NAME,
)
client.delete_security_group(
GroupName=DB_SECURITY_GP_NAME,
)
sys.exit(1)
# ------ Create Virtual Server ----------------------------------
def wait_for_instance_create(client, instance_name):
print("wait : ", sys._getframe().f_code.co_name)
try:
waiter = client.get_waiter('instance_running')
wait_result = waiter.wait(
InstanceId=[instance_name, ],
Tenancy=['all', ],
WaiterConfig={ # Wait 10 min with a check interval of 30s.
'Delay': 30,
'MaxAttempts': 20
}
)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
finally:
return wait_result
def wait_for_instance_stop(client, instance_name):
print("wait : ", sys._getframe().f_code.co_name)
try:
waiter = client.get_waiter('instance_stopped')
wait_result = waiter.wait(
InstanceId=[instance_name, ],
Tenancy=['all', ],
WaiterConfig={ # Wait 10 min with a check interval of 30s.
'Delay': 30,
'MaxAttempts': 20
}
)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
finally:
return wait_result
def create_instance(client):
try:
"""
client.run_instances(
AccountingType='2',#'1':Monthly
#'2':Payper(Default)
Admin='string',#Windows Admin User Name
Agreement="False",#True:License Agree for SPLA,RHEL and anymore.
# see also https://pfs.nifcloud.com/service/licence_ms.htm
# https://pfs.nifcloud.com/service/rhel.htm
#False:Not License Agree(Default)
Description='string',# memo
DisableApiTermination=True,#True :Not Allow to delete from API(Default)
#False:Allow to delete from API
ImageId='string', #OS Image Name
InstanceId='string',#Server Name
InstanceType="",#Server Type
#see also https://pfs.nifcloud.com/api/rest/RunInstances.htm
KeyName='string',#SSH Key Name
License=[#License Infomation.see also https://pfs.nifcloud.com/service/licence_ms.htm
{#No.1 License Info
'LicenseName': 'RDS'|'Office(Std)'|'Office(Pro Plus)',
'LicenseNum' : 'string'
},
#...
{#No.N License Info
'LicenseName': 'RDS'|'Office(Std)'|'Office(Pro Plus)',
'LicenseNum' : 'string'
},
],
IpType='',#'static' :Use Global IP
#'elastic':Use Replacement IP.Shuld set PublicIp
#'none' :Not Use Global IP
PublicIp='string',#If you use Replacement IP set this
NetworkInterface=[#Network Config.
{#Full argument
'IpAddress': 'string',#See also NetworkInterface.n.IpAddress in
#https://pfs.nifcloud.com/api/rest/RunInstances.htm
#if use the DHCP delete this
'NetworkId': 'string',#Connect Network
#net-COMMON_GLOBAL :Common Global
#net-COMMON_PRIVATE:Common Private
#NetworkID :Network ID at Private LAN
'NetworkName': 'string'
},
{#Common Private DHCP sample
'NetworkId': 'net-COMMON_PRIVATE',
},
],
Password='<PASSWORD>',#Password for Windows Admin user
Placement={
'AvailabilityZone': 'string',#Zone Name.
#For jp-east-1, east-11,east-12,east-13,east-14 can be selected.
#For jp-west-1, west-11,west-12,west-13 can be selected.
},
SecurityGroup=[#Firewall Group name
'string',
],
UserData={#Server Boot Script
'Content': 'string',#Encoded Server Boot Script body
'Encoding': 'string'#Encoding Type
#'' :text
#'base64':base64 encode(Default)
}
)
"""
client.run_instances(
AccountingType='2',
Description='web sv',
DisableApiTermination=False,
ImageId='220',
InstanceId='websv',
InstanceType="e-small4",
KeyName=EAST31_KEY_NAME,
IpType='static',
NetworkInterface=[
{
'NetworkName': WEB_DB_PRV_NET_NAME,
},
],
Placement={
'AvailabilityZone': 'east-31',
},
SecurityGroup=[
WEB_SECURITY_GP_NAME,
],
)
wait_for_instance_create(client, 'websv')
client.run_instances(
AccountingType='2',
Description='DB sv',
DisableApiTermination=False,
ImageId='220',
InstanceId='dbsv',
InstanceType="e-small4",
KeyName=EAST31_KEY_NAME,
IpType='none',
NetworkInterface=[
{
'NetworkName': WEB_DB_PRV_NET_NAME,
},
],
Placement={
'AvailabilityZone': 'east-31',
},
SecurityGroup=[
WEB_SECURITY_GP_NAME,
],
)
wait_for_instance_create(client, 'dbsv')
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
client.stop_instances(
Force=True,
InstanceId=[
'websv',
],
)
wait_for_instance_stop(client, 'websv')
client.terminate_instances(
InstanceId=[
'websv',
]
)
client.stop_instances(
Force=True,
InstanceId=[
'dbsv',
],
)
wait_for_instance_stop(client, 'dbsv')
client.terminate_instances(
InstanceId=[
'dbsv',
]
)
sys.exit(1)
# ----- import ssh key ------------------------------------------
def import_sshkey(client):
try:
with open(SSH_KYE_FILE_NAME, "rb") as ssh_pub_file:
client.import_key_pair(
Description='memo',
KeyName=EAST31_KEY_NAME,
PublicKeyMaterial=base64.b64encode(
ssh_pub_file.read()).decode("ascii")
)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
sys.exit(1)
# ----- Create Private LAN --------------------------------------
def wait_for_private_lan_create(client, private_lan_name):
print("wait : ", sys._getframe().f_code.co_name)
try:
waiter = client.get_waiter('private_lan_exists')
wait_result = waiter.wait(
Filter=[
{
'ListOfRequestValue': [
'available',
],
'Name': 'state'
},
],
PrivateLanName=[private_lan_name, ],
WaiterConfig={
'Delay': 20,
'MaxAttempts': 40
}
)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
finally:
return wait_result
def create_private_lan(client):
try:
"""
client.nifty_create_private_lan(
AccountingType = '2',#'1':Monthly
#'2':Payper(Default)
AvailabilityZone = 'string',#Zone Name.east-31,east-11,west-12 and more
CidrBlock = 'string',#CIDR for Private LAN address
Description = 'string',#memo
PrivateLanName = 'string'#Private LAN Name
)
"""
client.nifty_create_private_lan(
AccountingType='2', # '1':Monthly
AvailabilityZone='east-31', # Zone Name.east-31,east-11,west-12 and more
CidrBlock='192.168.170.0/24', # CIDR for Private LAN address
Description='memo', # memo
PrivateLanName=WEB_DB_PRV_NET_NAME # Private LAN Name
)
wait_for_private_lan_create(client, WEB_DB_PRV_NET_NAME)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
client.nifty_delete_private_lan(
PrivateLanName=WEB_DB_PRV_NET_NAME
)
sys.exit(1)
# ------ Create Private LAN DHCP Router -------------------------
def wait_for_create_router(client, router_name):
print("wait : ", sys._getframe().f_code.co_name)
try:
waiter = client.get_waiter('router_exists')
wait_result = waiter.wait(
Filter=[
{
'ListOfRequestValue': [
'available',
],
'Name': 'state'
},
],
RouterName=[
router_name,
],
WaiterConfig={ # Wait 10 min with a check interval of 30s.
'Delay': 30,
'MaxAttempts': 20
}
)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
finally:
return wait_result
def enable_dhcp(client):
# Create DHCP Config
dhcp_config_resutl = client.nifty_create_dhcp_config()
dhcp_config_id = dhcp_config_resutl['DhcpConfig']['DhcpConfigId']
# DHCP Setting ADD
client.nifty_create_dhcp_ip_address_pool(
Description='memo', # memo
DhcpConfigId=dhcp_config_id, # DHCP Config ID
StartIpAddress='192.168.170.100', # DHCP Start IP
StopIpAddress='192.168.170.250' # DHCP End IP
)
# Create Router
"""
client.nifty_create_router(
AccountingType = '2',#'1':Monthly
#'2':Payper(Default)
RouterName = 'string',#Router Name
AvailabilityZone = 'string',#Zone Name.east-31,east-11,west-12 and more
Description = 'string', #memo
NetworkInterface=[
{
'Dhcp' : True, #True :DHCP Enable.Request after item(Default)
#False:DHCP Disable
'DhcpConfigId' : 'string',#DHCP Config ID
'DhcpOptionsId': 'string',#DHCP Option ID
'IpAddress' : 'string',#IP Address at Connectted Private LAN
'NetworkId' : 'string',#Select Setting Network.Exclusive NetworkName
'NetworkName' : 'string' #Select Setting Network.Exclusive NetwokId
},
],
SecurityGroup=[#Firewall Group(Option)
'string',
],
Type='small'#'small' :Max 10 Rule(Default)
#'medium':Max 30 Rule
#'large' :Max 80 Rule
)
"""
client.nifty_create_router(
AccountingType='2',
RouterName=WEB_DB_ROUTER_NAME,
AvailabilityZone='east-31',
Description='memo', # memo
NetworkInterface=[
{
'Dhcp': True,
'DhcpConfigId': dhcp_config_id,
'IpAddress': '192.168.170.1',
'NetworkName': WEB_DB_PRV_NET_NAME
},
],
Type='small'
)
wait_for_create_router(client, WEB_DB_ROUTER_NAME)
# -------------- main ----------------
client = session.get_session().create_client(
"computing",
region_name="jp-east-3",
)
import_sshkey(client)
create_fw(client)
create_private_lan(client)
enable_dhcp(client)
create_instance(client)
|
[
"nifcloud.session.get_session",
"sys._getframe",
"sys.exit"
] |
[((17601, 17622), 'nifcloud.session.get_session', 'session.get_session', ([], {}), '()\n', (17620, 17622), False, 'from nifcloud import session\n'), ((5572, 5583), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5580, 5583), False, 'import sys\n'), ((12146, 12157), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (12154, 12157), False, 'import sys\n'), ((12668, 12679), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (12676, 12679), False, 'import sys\n'), ((14596, 14607), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (14604, 14607), False, 'import sys\n'), ((519, 534), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (532, 534), False, 'import sys\n'), ((1227, 1242), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (1240, 1242), False, 'import sys\n'), ((5727, 5742), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (5740, 5742), False, 'import sys\n'), ((6306, 6321), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (6319, 6321), False, 'import sys\n'), ((12829, 12844), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (12842, 12844), False, 'import sys\n'), ((14746, 14761), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (14759, 14761), False, 'import sys\n'), ((1090, 1105), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (1103, 1105), False, 'import sys\n'), ((1799, 1814), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (1812, 1814), False, 'import sys\n'), ((5349, 5364), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (5362, 5364), False, 'import sys\n'), ((6160, 6175), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (6173, 6175), False, 'import sys\n'), ((6739, 6754), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (6752, 6754), False, 'import sys\n'), ((11533, 11548), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (11546, 11548), False, 'import sys\n'), ((12628, 12643), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (12641, 12643), False, 'import sys\n'), ((13408, 13423), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (13421, 13423), False, 'import sys\n'), ((14458, 14473), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (14471, 14473), False, 'import sys\n'), ((15385, 15400), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (15398, 15400), False, 'import sys\n')]
|
from matplotlib import pyplot as plt
import pandas as pd
import random
from itertools import count
from matplotlib.animation import FuncAnimation
plt.style.use('bmh')
# index = count()
# x = []
# y = []
# def animate(i):
# x.append(next(index))
# y.append(random.randint(1, 10))
# plt.cla()
# plt.plot(x, y)
# plt.title('Lavel Monitor')
# plt.xlabel('Count')
# plt.ylabel('Levels')
# plt.yticks(ticks=range(12))
# ani = FuncAnimation(plt.gcf(), animate, interval=1000)
# plt.tight_layout()
# plt.show()
def animate(i):
df = pd.read_csv('data\\changing_data.csv')
x = df.iloc[-50:, 0]
y1 = df.iloc[-50:, 1]
y2 = df.iloc[-50:, 2]
plt.cla() # clear axis
plt.plot(x, y1, label='Ajaira LTD')
plt.plot(x, y2, label='<NAME>')
plt.fill_between(x, y1, y2, where=y1 > y2, color='b', alpha=0.5, interpolate=True)
plt.fill_between(x, y1, y2, where=y1 <= y2, color='r', alpha=0.5, interpolate=True)
plt.title('Channel Subscriptions')
plt.xlabel('Days')
plt.ylabel('Subscriptions')
plt.legend()
ani = FuncAnimation(plt.gcf(), animate, interval=1000)
plt.tight_layout()
plt.show()
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"pandas.read_csv",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.style.use",
"matplotlib.pyplot.cla",
"matplotlib.pyplot.gcf",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.fill_between",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.tight_layout"
] |
[((146, 166), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""bmh"""'], {}), "('bmh')\n", (159, 166), True, 'from matplotlib import pyplot as plt\n'), ((1136, 1154), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (1152, 1154), True, 'from matplotlib import pyplot as plt\n'), ((1155, 1165), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1163, 1165), True, 'from matplotlib import pyplot as plt\n'), ((569, 607), 'pandas.read_csv', 'pd.read_csv', (['"""data\\\\changing_data.csv"""'], {}), "('data\\\\changing_data.csv')\n", (580, 607), True, 'import pandas as pd\n'), ((691, 700), 'matplotlib.pyplot.cla', 'plt.cla', ([], {}), '()\n', (698, 700), True, 'from matplotlib import pyplot as plt\n'), ((718, 753), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y1'], {'label': '"""Ajaira LTD"""'}), "(x, y1, label='Ajaira LTD')\n", (726, 753), True, 'from matplotlib import pyplot as plt\n'), ((758, 789), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y2'], {'label': '"""<NAME>"""'}), "(x, y2, label='<NAME>')\n", (766, 789), True, 'from matplotlib import pyplot as plt\n'), ((795, 881), 'matplotlib.pyplot.fill_between', 'plt.fill_between', (['x', 'y1', 'y2'], {'where': '(y1 > y2)', 'color': '"""b"""', 'alpha': '(0.5)', 'interpolate': '(True)'}), "(x, y1, y2, where=y1 > y2, color='b', alpha=0.5,\n interpolate=True)\n", (811, 881), True, 'from matplotlib import pyplot as plt\n'), ((882, 969), 'matplotlib.pyplot.fill_between', 'plt.fill_between', (['x', 'y1', 'y2'], {'where': '(y1 <= y2)', 'color': '"""r"""', 'alpha': '(0.5)', 'interpolate': '(True)'}), "(x, y1, y2, where=y1 <= y2, color='r', alpha=0.5,\n interpolate=True)\n", (898, 969), True, 'from matplotlib import pyplot as plt\n'), ((971, 1005), 'matplotlib.pyplot.title', 'plt.title', (['"""Channel Subscriptions"""'], {}), "('Channel Subscriptions')\n", (980, 1005), True, 'from matplotlib import pyplot as plt\n'), ((1010, 1028), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Days"""'], {}), "('Days')\n", (1020, 1028), True, 'from matplotlib import pyplot as plt\n'), ((1033, 1060), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Subscriptions"""'], {}), "('Subscriptions')\n", (1043, 1060), True, 'from matplotlib import pyplot as plt\n'), ((1065, 1077), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (1075, 1077), True, 'from matplotlib import pyplot as plt\n'), ((1100, 1109), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (1107, 1109), True, 'from matplotlib import pyplot as plt\n')]
|
import pytest
from roocs_utils.exceptions import InvalidParameterValue
from roocs_utils.parameter.time_components_parameter import string_to_dict
from roocs_utils.parameter.time_components_parameter import time_components
from roocs_utils.parameter.time_components_parameter import TimeComponentsParameter
type_error = (
"Input type of <{}> not allowed. Must be one of: "
"[<class 'dict'>, <class 'str'>, <class "
"'roocs_utils.parameter.param_utils.TimeComponents'>, <class 'NoneType'>]"
)
tc_str = "year:1999,2000,2001|month:12,01,02|hour:00"
tc_dict = {"year": [1999, 2000, 2001], "month": [12, 1, 2], "hour": [0]}
tc_dict_month_long_names = {
"year": [1999, 2000, 2001],
"month": ["December", "January", "February"],
"hour": [0],
}
tc_dict_short_names = {
"year": [1999, 2000, 2001],
"month": ["dec", "jan", "feb"],
"hour": [0],
}
def test_string_input():
# year, month, hour
parameter = TimeComponentsParameter("year:1999,2000,2001|month:dec,jan,feb|hour:00")
assert parameter.value["year"] == [1999, 2000, 2001]
assert parameter.value["month"] == [12, 1, 2]
assert parameter.value["hour"] == [0]
assert parameter.get_bounds() == ("1999-01-01T00:00:00", "2001-12-31T23:59:59")
# month
parameter = TimeComponentsParameter("month:12,1,02")
assert parameter.value["month"] == [12, 1, 2]
assert parameter.get_bounds() == (None, None)
# single year
parameter = TimeComponentsParameter("year:2010|month:mar,apr,may")
assert parameter.value["year"] == [2010]
assert parameter.value["month"] == [3, 4, 5]
assert parameter.get_bounds() == ("2010-01-01T00:00:00", "2010-12-31T23:59:59")
def test_TimeComponents_class():
tc1 = time_components(**string_to_dict(tc_str))
tc2 = time_components(**tc_dict)
tc3 = time_components(**tc_dict_month_long_names)
tc4 = time_components(**tc_dict_short_names)
assert tc1.value == tc2.value
assert tc2.value == tc3.value
assert tc2.value == tc4.value
def test__str__():
parameter = TimeComponentsParameter(tc_str)
assert str(parameter).startswith("Time components to select:")
assert "month => [12, 1, 2]" in str(parameter)
def test_raw():
parameter = TimeComponentsParameter(tc_str)
assert parameter.raw == tc_str
def test_validate_error_id():
with pytest.raises(InvalidParameterValue) as exc:
TimeComponentsParameter("I am rubbish")
assert str(exc.value) == "Cannot create TimeComponentsParameter from: I am rubbish"
def test_bad_type_input():
with pytest.raises(InvalidParameterValue) as exc:
TimeComponentsParameter(34)
assert str(exc.value) == type_error.format("class 'int'")
def test_dict():
for input_dct in (tc_dict, tc_dict_short_names, tc_dict_month_long_names):
parameter = TimeComponentsParameter(input_dct)
assert parameter.value == tc_dict
def test_time_components_input():
tc = time_components(**tc_dict)
parameter = TimeComponentsParameter(tc)
assert parameter.value == tc_dict
def test_time_components_with_args():
tc = time_components(year=[200, 500], hour="06")
assert tc.value["year"] == [200, 500]
assert tc.value["hour"] == [6]
def test_whitespace():
parameter = TimeComponentsParameter(tc_str + " ")
assert parameter.value == tc_dict
def test_empty_string():
parameter = TimeComponentsParameter("")
assert parameter.value is None
def test_none():
parameter = TimeComponentsParameter(None)
assert parameter.value is None
def test_class_instance():
parameter = TimeComponentsParameter(tc_str)
new_parameter = TimeComponentsParameter(parameter)
assert new_parameter.value == tc_dict
|
[
"roocs_utils.parameter.time_components_parameter.TimeComponentsParameter",
"pytest.raises",
"roocs_utils.parameter.time_components_parameter.string_to_dict",
"roocs_utils.parameter.time_components_parameter.time_components"
] |
[((943, 1015), 'roocs_utils.parameter.time_components_parameter.TimeComponentsParameter', 'TimeComponentsParameter', (['"""year:1999,2000,2001|month:dec,jan,feb|hour:00"""'], {}), "('year:1999,2000,2001|month:dec,jan,feb|hour:00')\n", (966, 1015), False, 'from roocs_utils.parameter.time_components_parameter import TimeComponentsParameter\n'), ((1277, 1317), 'roocs_utils.parameter.time_components_parameter.TimeComponentsParameter', 'TimeComponentsParameter', (['"""month:12,1,02"""'], {}), "('month:12,1,02')\n", (1300, 1317), False, 'from roocs_utils.parameter.time_components_parameter import TimeComponentsParameter\n'), ((1452, 1506), 'roocs_utils.parameter.time_components_parameter.TimeComponentsParameter', 'TimeComponentsParameter', (['"""year:2010|month:mar,apr,may"""'], {}), "('year:2010|month:mar,apr,may')\n", (1475, 1506), False, 'from roocs_utils.parameter.time_components_parameter import TimeComponentsParameter\n'), ((1782, 1808), 'roocs_utils.parameter.time_components_parameter.time_components', 'time_components', ([], {}), '(**tc_dict)\n', (1797, 1808), False, 'from roocs_utils.parameter.time_components_parameter import time_components\n'), ((1819, 1862), 'roocs_utils.parameter.time_components_parameter.time_components', 'time_components', ([], {}), '(**tc_dict_month_long_names)\n', (1834, 1862), False, 'from roocs_utils.parameter.time_components_parameter import time_components\n'), ((1873, 1911), 'roocs_utils.parameter.time_components_parameter.time_components', 'time_components', ([], {}), '(**tc_dict_short_names)\n', (1888, 1911), False, 'from roocs_utils.parameter.time_components_parameter import time_components\n'), ((2052, 2083), 'roocs_utils.parameter.time_components_parameter.TimeComponentsParameter', 'TimeComponentsParameter', (['tc_str'], {}), '(tc_str)\n', (2075, 2083), False, 'from roocs_utils.parameter.time_components_parameter import TimeComponentsParameter\n'), ((2236, 2267), 'roocs_utils.parameter.time_components_parameter.TimeComponentsParameter', 'TimeComponentsParameter', (['tc_str'], {}), '(tc_str)\n', (2259, 2267), False, 'from roocs_utils.parameter.time_components_parameter import TimeComponentsParameter\n'), ((2946, 2972), 'roocs_utils.parameter.time_components_parameter.time_components', 'time_components', ([], {}), '(**tc_dict)\n', (2961, 2972), False, 'from roocs_utils.parameter.time_components_parameter import time_components\n'), ((2989, 3016), 'roocs_utils.parameter.time_components_parameter.TimeComponentsParameter', 'TimeComponentsParameter', (['tc'], {}), '(tc)\n', (3012, 3016), False, 'from roocs_utils.parameter.time_components_parameter import TimeComponentsParameter\n'), ((3104, 3147), 'roocs_utils.parameter.time_components_parameter.time_components', 'time_components', ([], {'year': '[200, 500]', 'hour': '"""06"""'}), "(year=[200, 500], hour='06')\n", (3119, 3147), False, 'from roocs_utils.parameter.time_components_parameter import time_components\n'), ((3266, 3305), 'roocs_utils.parameter.time_components_parameter.TimeComponentsParameter', 'TimeComponentsParameter', (["(tc_str + ' ')"], {}), "(tc_str + ' ')\n", (3289, 3305), False, 'from roocs_utils.parameter.time_components_parameter import TimeComponentsParameter\n'), ((3387, 3414), 'roocs_utils.parameter.time_components_parameter.TimeComponentsParameter', 'TimeComponentsParameter', (['""""""'], {}), "('')\n", (3410, 3414), False, 'from roocs_utils.parameter.time_components_parameter import TimeComponentsParameter\n'), ((3485, 3514), 'roocs_utils.parameter.time_components_parameter.TimeComponentsParameter', 'TimeComponentsParameter', (['None'], {}), '(None)\n', (3508, 3514), False, 'from roocs_utils.parameter.time_components_parameter import TimeComponentsParameter\n'), ((3595, 3626), 'roocs_utils.parameter.time_components_parameter.TimeComponentsParameter', 'TimeComponentsParameter', (['tc_str'], {}), '(tc_str)\n', (3618, 3626), False, 'from roocs_utils.parameter.time_components_parameter import TimeComponentsParameter\n'), ((3647, 3681), 'roocs_utils.parameter.time_components_parameter.TimeComponentsParameter', 'TimeComponentsParameter', (['parameter'], {}), '(parameter)\n', (3670, 3681), False, 'from roocs_utils.parameter.time_components_parameter import TimeComponentsParameter\n'), ((2344, 2380), 'pytest.raises', 'pytest.raises', (['InvalidParameterValue'], {}), '(InvalidParameterValue)\n', (2357, 2380), False, 'import pytest\n'), ((2397, 2436), 'roocs_utils.parameter.time_components_parameter.TimeComponentsParameter', 'TimeComponentsParameter', (['"""I am rubbish"""'], {}), "('I am rubbish')\n", (2420, 2436), False, 'from roocs_utils.parameter.time_components_parameter import TimeComponentsParameter\n'), ((2563, 2599), 'pytest.raises', 'pytest.raises', (['InvalidParameterValue'], {}), '(InvalidParameterValue)\n', (2576, 2599), False, 'import pytest\n'), ((2616, 2643), 'roocs_utils.parameter.time_components_parameter.TimeComponentsParameter', 'TimeComponentsParameter', (['(34)'], {}), '(34)\n', (2639, 2643), False, 'from roocs_utils.parameter.time_components_parameter import TimeComponentsParameter\n'), ((2824, 2858), 'roocs_utils.parameter.time_components_parameter.TimeComponentsParameter', 'TimeComponentsParameter', (['input_dct'], {}), '(input_dct)\n', (2847, 2858), False, 'from roocs_utils.parameter.time_components_parameter import TimeComponentsParameter\n'), ((1748, 1770), 'roocs_utils.parameter.time_components_parameter.string_to_dict', 'string_to_dict', (['tc_str'], {}), '(tc_str)\n', (1762, 1770), False, 'from roocs_utils.parameter.time_components_parameter import string_to_dict\n')]
|
from django.db import models
from django.db.models.base import Model
class Puesto(models.Model):
nombre = models.CharField(max_length=255)
created_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.nombre
|
[
"django.db.models.CharField",
"django.db.models.DateTimeField"
] |
[((112, 144), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (128, 144), False, 'from django.db import models\n'), ((162, 201), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (182, 201), False, 'from django.db import models\n')]
|
import cv2
import numpy as np
def label2rgb(label_np):
print(label_np)
label_color = np.argmax(label_np, axis=0)
label_color = label_color / np.max(label_color) * 255
print(label_color)
n = label_color.astype(np.uint8)
n = np.array(n)
print(type(n))
label_color = cv2.applyColorMap(n, 'jet')
return label_color
|
[
"cv2.applyColorMap",
"numpy.max",
"numpy.array",
"numpy.argmax"
] |
[((95, 122), 'numpy.argmax', 'np.argmax', (['label_np'], {'axis': '(0)'}), '(label_np, axis=0)\n', (104, 122), True, 'import numpy as np\n'), ((249, 260), 'numpy.array', 'np.array', (['n'], {}), '(n)\n', (257, 260), True, 'import numpy as np\n'), ((298, 325), 'cv2.applyColorMap', 'cv2.applyColorMap', (['n', '"""jet"""'], {}), "(n, 'jet')\n", (315, 325), False, 'import cv2\n'), ((155, 174), 'numpy.max', 'np.max', (['label_color'], {}), '(label_color)\n', (161, 174), True, 'import numpy as np\n')]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from greytheory import GreyTheory
grey = GreyTheory()
# GM0N
gm0n = grey.gm0n
gm0n.add_outputs([1., 1., 1., 1., 1., 1.], "x1")
gm0n.add_patterns([.75, 1.22, .2, 1., 1., 1.], "x2")
gm0n.add_patterns([.5, 1., .7, .66, 1., .5], "x3")
gm0n.add_patterns([1., 1.09, .4, .33, .66, .25], "x4")
gm0n.add_patterns([.25, .99, 1., .66, .33, .25], "x5")
gm0n.analyze()
# Looks GM0N the results as below:
gm0n.print_analyzed_results()
gm0n.print_influence_degrees()
# GM1N
gm1n = grey.gm1n
gm1n.add_outputs([2., 11., 1.5, 2., 2.2, 3.], "x1")
gm1n.add_patterns([3., 13.5, 1., 3., 3., 4.], "x2")
gm1n.add_patterns([2., 11., 3.5, 2., 3., 2.], "x3")
gm1n.add_patterns([4., 12., 2., 1., 2., 1.], "x4")
gm1n.add_patterns([1., 10., 5., 2., 1., 1.], "x5")
gm1n.analyze()
# Looks GM1N the results as below:
gm1n.print_analyzed_results()
gm1n.print_influence_degrees()
# GM11
gm11 = grey.gm11
# To try customized alpha for IAGO of Z.
gm11.alpha = 0.5
gm11.convolution = True # Convolutional forecasting of GM11.
gm11.stride = 1
gm11.length = 4
# gm11.add_pattern(533.0, "x1")
# gm11.add_pattern(665.0, "x2")
# gm11.add_pattern(655.0, "x3")
# gm11.add_pattern(740.0, "x4")
gm11.add_pattern(223.3, "a1")
gm11.add_pattern(227.3, "a2")
gm11.add_pattern(230.5, "a3")
gm11.add_pattern(238.1, "a4")
gm11.add_pattern(242.9, "a5")
gm11.add_pattern(251.1, "a6")
gm11.forecast()
# To record last forecasted result.
#last_forecasted_results = gm11.forecasted_outputs
# To clean all forecasted results.
#gm11.clean_forecasted()
# In next iteration of forecasting, we wanna continue use last forecasted results to do next forecasting,
# but if we removed gm11.forecasted_outputs list before,
# we can use continue_forecasting() to extend / recall the last for ecasted result come back to be convolutional features.
#gm11.continue_forecasting(last_forecasted_results)
# Looks GM11 the results for example as below:
gm11.print_forecasted_results()
"""
# multiprocessing examples:
# for GM0N, GM1N
queue = []
queue.append(gm0n.deepcopy())
queue.append(gm0n.deepcopy())
queue.append(gm0n.deepcopy())
queue.append(gm0n.deepcopy())
queue.append(gm0n.deepcopy())
queue.append(gm0n.deepcopy())
queue.append(gm0n.deepcopy())
grey.run.gm0n(queue)
for gm in queue:
gm.print_influence_degrees()
# for GM11
gm11_queue = []
gm11_queue.append(gm11.deepcopy())
gm11_queue.append(gm11.deepcopy())
gm11_queue.append(gm11.deepcopy())
gm11_queue.append(gm11.deepcopy())
gm11_queue.append(gm11.deepcopy())
gm11_queue.append(gm11.deepcopy())
gm11_queue.append(gm11.deepcopy())
grey.run.gm11(gm11_queue)
for gm in gm11_queue:
gm.print_forecasted_results()
"""
|
[
"greytheory.GreyTheory"
] |
[((86, 98), 'greytheory.GreyTheory', 'GreyTheory', ([], {}), '()\n', (96, 98), False, 'from greytheory import GreyTheory\n')]
|
"""Serveradmin
Copyright (c) 2019 InnoGames GmbH
"""
from json import dumps
from django import template
from django.conf import settings
from adminapi.filters import filter_classes
from serveradmin.serverdb.models import Attribute, Servertype
register = template.Library()
@register.inclusion_tag('serversearch.html')
def serversearch_js(search_id):
servertypes = Servertype.objects.all()
attributes = list(Attribute.objects.all())
attributes.extend(Attribute.specials.values())
return {
'servertypes_json': dumps({s.servertype_id: {} for s in servertypes}),
'attributes_json': dumps({
a.attribute_id: {
'multi': a.multi,
'type': a.type,
'regexp': a.regexp,
}
for a in attributes
}),
'filters_json': dumps([f.__name__ for f in filter_classes]),
'search_id': search_id,
'STATIC_URL': settings.STATIC_URL,
}
@register.filter
def json(value):
return dumps(value)
|
[
"django.template.Library",
"serveradmin.serverdb.models.Attribute.objects.all",
"serveradmin.serverdb.models.Attribute.specials.values",
"json.dumps",
"serveradmin.serverdb.models.Servertype.objects.all"
] |
[((259, 277), 'django.template.Library', 'template.Library', ([], {}), '()\n', (275, 277), False, 'from django import template\n'), ((375, 399), 'serveradmin.serverdb.models.Servertype.objects.all', 'Servertype.objects.all', ([], {}), '()\n', (397, 399), False, 'from serveradmin.serverdb.models import Attribute, Servertype\n'), ((1013, 1025), 'json.dumps', 'dumps', (['value'], {}), '(value)\n', (1018, 1025), False, 'from json import dumps\n'), ((422, 445), 'serveradmin.serverdb.models.Attribute.objects.all', 'Attribute.objects.all', ([], {}), '()\n', (443, 445), False, 'from serveradmin.serverdb.models import Attribute, Servertype\n'), ((469, 496), 'serveradmin.serverdb.models.Attribute.specials.values', 'Attribute.specials.values', ([], {}), '()\n', (494, 496), False, 'from serveradmin.serverdb.models import Attribute, Servertype\n'), ((540, 589), 'json.dumps', 'dumps', (['{s.servertype_id: {} for s in servertypes}'], {}), '({s.servertype_id: {} for s in servertypes})\n', (545, 589), False, 'from json import dumps\n'), ((618, 722), 'json.dumps', 'dumps', (["{a.attribute_id: {'multi': a.multi, 'type': a.type, 'regexp': a.regexp} for\n a in attributes}"], {}), "({a.attribute_id: {'multi': a.multi, 'type': a.type, 'regexp': a.\n regexp} for a in attributes})\n", (623, 722), False, 'from json import dumps\n'), ((840, 883), 'json.dumps', 'dumps', (['[f.__name__ for f in filter_classes]'], {}), '([f.__name__ for f in filter_classes])\n', (845, 883), False, 'from json import dumps\n')]
|
from invoke import task
_TEST_FOLDER = "tests"
_SOURCE_FOLDERS = " ".join(["bq_schema", _TEST_FOLDER])
@task
def lint(context):
context.run(f"pylint {_SOURCE_FOLDERS}")
@task
def type_check(context):
context.run("mypy bq_schema")
@task
def check_code_format(context):
context.run("black --check .")
context.run("isort --profile black --check .")
@task
def test(context):
context.run(
f"pytest {_TEST_FOLDER} --doctest-modules --junitxml=junit/test-results.xml --cov=bq_schema --cov-report=xml --cov-report=html"
)
@task
def format_code(context):
context.run("black .")
context.run("isort --profile black .")
@task(pre=[lint, type_check, check_code_format, test])
def check_all(_):
pass
|
[
"invoke.task"
] |
[((662, 715), 'invoke.task', 'task', ([], {'pre': '[lint, type_check, check_code_format, test]'}), '(pre=[lint, type_check, check_code_format, test])\n', (666, 715), False, 'from invoke import task\n')]
|
store = {}
def anagram_key(s):
if s not in store:
store[s] = sorted(s)
return store[s]
def group_anagrams(ls):
ls = sorted(ls, key=anagram_key)
return ls
def test():
from random import shuffle
l = [
"ascot",
"coats",
"coast",
"sushi",
"tacos",
"angel",
"breakfast",
"angle",
"glean",
"deist",
"coffee",
"diets",
"edits",
"sited",
"tides",
]
shuffle(l)
print(group_anagrams(l))
if __name__ == '__main__':
test()
|
[
"random.shuffle"
] |
[((510, 520), 'random.shuffle', 'shuffle', (['l'], {}), '(l)\n', (517, 520), False, 'from random import shuffle\n')]
|
from flask import render_template,request,redirect,url_for,abort
from flask_login import login_user,login_required,current_user,logout_user
from ..models import User
from .forms import LoginForm,RegisterForm
from . import auth
# Views
@auth.route('/login', methods=["GET","POST"])
def login():
if current_user.is_authenticated:
return redirect(url_for('main.dashboard'))
title = 'Login'
Form = LoginForm()
Error=False
if Form.validate_on_submit():
username=str(Form.username.data)
password=str(Form.password.data)
if username and password:
user=User.query.filter(User.username==username).first()
if user and user.verifypass(password):
print(password)
login_user(user,Form.remember.data)
return redirect(url_for('main.dashboard'))
Error='Wrong Username or Password'
else:
Error='Please Type a Username or Password'
return render_template('login.html', title = title ,Form=Form,Error=Error)
@auth.route('/register', methods=["GET","POST"])
def register():
if current_user.is_authenticated:
return redirect(url_for('main.dashboard'))
title = 'Register'
Form = RegisterForm()
Error=False
if Form.validate_on_submit():
username=str(Form.username.data)
password=str(Form.password.data)
if username and password:
user=User.query.filter(User.username==username).first()
if not user:
user=User(username=username,passwd=password)
user.save()
return redirect(url_for('auth.login'))
Error='Username Already taken'
return render_template('register.html', title = title ,Form=Form,Error=Error)
@auth.route('/logout')
def logout():
if current_user.is_authenticated:
logout_user()
return redirect(url_for('main.dashboard'))
|
[
"flask.url_for",
"flask_login.login_user",
"flask_login.logout_user",
"flask.render_template"
] |
[((982, 1048), 'flask.render_template', 'render_template', (['"""login.html"""'], {'title': 'title', 'Form': 'Form', 'Error': 'Error'}), "('login.html', title=title, Form=Form, Error=Error)\n", (997, 1048), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((1711, 1780), 'flask.render_template', 'render_template', (['"""register.html"""'], {'title': 'title', 'Form': 'Form', 'Error': 'Error'}), "('register.html', title=title, Form=Form, Error=Error)\n", (1726, 1780), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((1866, 1879), 'flask_login.logout_user', 'logout_user', ([], {}), '()\n', (1877, 1879), False, 'from flask_login import login_user, login_required, current_user, logout_user\n'), ((1900, 1925), 'flask.url_for', 'url_for', (['"""main.dashboard"""'], {}), "('main.dashboard')\n", (1907, 1925), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((357, 382), 'flask.url_for', 'url_for', (['"""main.dashboard"""'], {}), "('main.dashboard')\n", (364, 382), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((1178, 1203), 'flask.url_for', 'url_for', (['"""main.dashboard"""'], {}), "('main.dashboard')\n", (1185, 1203), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((760, 796), 'flask_login.login_user', 'login_user', (['user', 'Form.remember.data'], {}), '(user, Form.remember.data)\n', (770, 796), False, 'from flask_login import login_user, login_required, current_user, logout_user\n'), ((828, 853), 'flask.url_for', 'url_for', (['"""main.dashboard"""'], {}), "('main.dashboard')\n", (835, 853), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((1634, 1655), 'flask.url_for', 'url_for', (['"""auth.login"""'], {}), "('auth.login')\n", (1641, 1655), False, 'from flask import render_template, request, redirect, url_for, abort\n')]
|
import sys
import os
import shutil
import shlex
from command import Command
def check_ffmpeg_installed():
if shutil.which('ffmpeg') is None:
print('The program \'ffmpeg\' is not installed in your system.\n'
'You can install it by visiting http://ffmpeg.org/download.html')
sys.exit(0)
def convert_file(path: str, from_format: str, to: str, delete_original=False):
assert path.endswith(from_format)
escaped = shlex.quote(path)
new_name = escaped.replace(from_format, to)
ffmpeg = shutil.which('ffmpeg')
filename = path[path.rfind('/') + 1:]
print('Converting \'{}\''.format(filename))
cmd = Command('{} -i {} {}'.format(ffmpeg, escaped, new_name), os.getcwd())
sig, _, err = cmd.run()
if sig != 0:
print('\'{}\' could not be converted'.format(filename))
print('>>> ERR >>>')
print(err)
if delete_original:
os.remove(path)
def convert_directory(path: str, from_format: str, to: str, delete_original=False):
for dirpath, _, files in os.walk(path):
for f in filter(lambda f: f.endswith(from_format), files):
convert_file('{}/{}'.format(dirpath, f), from_format,
to, delete_original)
|
[
"os.remove",
"os.getcwd",
"os.walk",
"shutil.which",
"shlex.quote",
"sys.exit"
] |
[((455, 472), 'shlex.quote', 'shlex.quote', (['path'], {}), '(path)\n', (466, 472), False, 'import shlex\n'), ((534, 556), 'shutil.which', 'shutil.which', (['"""ffmpeg"""'], {}), "('ffmpeg')\n", (546, 556), False, 'import shutil\n'), ((1051, 1064), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (1058, 1064), False, 'import os\n'), ((115, 137), 'shutil.which', 'shutil.which', (['"""ffmpeg"""'], {}), "('ffmpeg')\n", (127, 137), False, 'import shutil\n'), ((309, 320), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (317, 320), False, 'import sys\n'), ((716, 727), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (725, 727), False, 'import os\n'), ((920, 935), 'os.remove', 'os.remove', (['path'], {}), '(path)\n', (929, 935), False, 'import os\n')]
|
import pandas as pd
import numpy as np
from tpot import TPOTClassifier
from sklearn.model_selection import train_test_split
benchmark = pd.read_pickle('us_pct.pickle') # us overall housing price index percentage change
HPI = pd.read_pickle('HPI_complete.pickle') # all of the state data, thirty year mortgage, unemployment rate, GDP, SP500
HPI = HPI.join(benchmark['United States'])
# all in percentage change since the start of the data (1975-01-01)
HPI.dropna(inplace=True)
housing_pct = HPI.pct_change()
housing_pct.replace([np.inf, -np.inf], np.nan, inplace=True)
housing_pct['US_HPI_future'] = housing_pct['United States'].shift(-1)
housing_pct.dropna(inplace=True)
def create_labels(cur_hpi, fut_hpi):
if fut_hpi > cur_hpi:
return 1
else:
return 0
housing_pct['label'] = list(map(create_labels, housing_pct['United States'], housing_pct['US_HPI_future']))
# housing_pct['ma_apply_example'] = housing_pct['M30'].rolling(window=10).apply(moving_average)
# print(housing_pct.tail())
X = np.array(housing_pct.drop(['label', 'US_HPI_future'], 1))
y = np.array(housing_pct['label'])
X_train, X_test, y_train, y_test = train_test_split(X,y, test_size=0.25)
tpot = TPOTClassifier(generations=10, population_size=20, verbosity=2)
tpot.fit(X_train, y_train)
print(tpot.score(X_test, y_test))
tpot.export('HPI_tpot_pipeline.py')
|
[
"pandas.read_pickle",
"sklearn.model_selection.train_test_split",
"numpy.array",
"tpot.TPOTClassifier"
] |
[((139, 170), 'pandas.read_pickle', 'pd.read_pickle', (['"""us_pct.pickle"""'], {}), "('us_pct.pickle')\n", (153, 170), True, 'import pandas as pd\n'), ((229, 266), 'pandas.read_pickle', 'pd.read_pickle', (['"""HPI_complete.pickle"""'], {}), "('HPI_complete.pickle')\n", (243, 266), True, 'import pandas as pd\n'), ((1086, 1116), 'numpy.array', 'np.array', (["housing_pct['label']"], {}), "(housing_pct['label'])\n", (1094, 1116), True, 'import numpy as np\n'), ((1153, 1191), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.25)'}), '(X, y, test_size=0.25)\n', (1169, 1191), False, 'from sklearn.model_selection import train_test_split\n'), ((1199, 1262), 'tpot.TPOTClassifier', 'TPOTClassifier', ([], {'generations': '(10)', 'population_size': '(20)', 'verbosity': '(2)'}), '(generations=10, population_size=20, verbosity=2)\n', (1213, 1262), False, 'from tpot import TPOTClassifier\n')]
|
#Important Modules
from flask import Flask,render_template, url_for ,flash , redirect
import pickle
from flask import request
import numpy as np
import os
from flask import send_from_directory
#from this import SQLAlchemy
app=Flask(__name__,template_folder='template')
@app.route("/")
@app.route("/home")
def home():
return render_template("home.html")
@app.route("/about")
def about():
return render_template("about.html")
@app.route("/heart")
def heart():
return render_template("heart.html")
def ValuePredictor(to_predict_list, size):
to_predict = np.array(to_predict_list).reshape(1,size)
if(size==13):#Heart
loaded_model = pickle.load(open("Heart_model_new.pkl","rb"))
result =loaded_model.predict(to_predict)
return result[0]
@app.route('/result',methods = ["POST"])
def result():
if request.method == 'POST':
to_predict_list = request.form.to_dict()
to_predict_list=list(to_predict_list.values())
to_predict_list = list(map(float, to_predict_list))
if(len(to_predict_list)==13):#heart
result = ValuePredictor(to_predict_list,13)
if(int(result)==1):
prediction="The patient's heart seems to be healthy."
else:
prediction="The patient's heart does not seems to be healthy."
return(render_template("result.html", prediction=prediction))
if __name__ == "__main__":
app.run(debug=True)
|
[
"numpy.array",
"flask.Flask",
"flask.request.form.to_dict",
"flask.render_template"
] |
[((233, 276), 'flask.Flask', 'Flask', (['__name__'], {'template_folder': '"""template"""'}), "(__name__, template_folder='template')\n", (238, 276), False, 'from flask import Flask, render_template, url_for, flash, redirect\n'), ((340, 368), 'flask.render_template', 'render_template', (['"""home.html"""'], {}), "('home.html')\n", (355, 368), False, 'from flask import Flask, render_template, url_for, flash, redirect\n'), ((417, 446), 'flask.render_template', 'render_template', (['"""about.html"""'], {}), "('about.html')\n", (432, 446), False, 'from flask import Flask, render_template, url_for, flash, redirect\n'), ((493, 522), 'flask.render_template', 'render_template', (['"""heart.html"""'], {}), "('heart.html')\n", (508, 522), False, 'from flask import Flask, render_template, url_for, flash, redirect\n'), ((1354, 1407), 'flask.render_template', 'render_template', (['"""result.html"""'], {'prediction': 'prediction'}), "('result.html', prediction=prediction)\n", (1369, 1407), False, 'from flask import Flask, render_template, url_for, flash, redirect\n'), ((905, 927), 'flask.request.form.to_dict', 'request.form.to_dict', ([], {}), '()\n', (925, 927), False, 'from flask import request\n'), ((585, 610), 'numpy.array', 'np.array', (['to_predict_list'], {}), '(to_predict_list)\n', (593, 610), True, 'import numpy as np\n')]
|
import cv2 as cv
import numpy as np
if __name__ == "__main__":
img = cv.imread('../../assets/test1.jpg')
height, width = img.shape[:2] # rows, columns
# translating the img 200 pixels right (x axis)
translation_matrix = np.float32([[1, 0, 200], [0, 1, 0]])
output = cv.warpAffine(img, translation_matrix, (width, height))
cv.imshow('1) 200 Pixels right', output)
# translating the img 50 pixels down (y axis)
translation_matrix = np.float32([[1, 0, 0], [0, 1, 50]])
output = cv.warpAffine(img, translation_matrix, (width, height))
cv.imshow('2) 50 Pixels Down', output)
# translating the img in both x-y axis.
translation_matrix = np.float32([[1, 0, 200], [0, 1, 50]])
output = cv.warpAffine(img, translation_matrix, (width, height))
cv.imshow('3) (dx, dy) = (200, 50)', output)
# translating without getting cropped (by increasing the output size)
translation_matrix = np.float32([[1, 0, 200], [0, 1, 50]])
output = cv.warpAffine(img, translation_matrix, (width + 200, height + 50))
cv.imshow("4) Preventing Crop", output)
cv.waitKey(0)
cv.destroyAllWindows()
|
[
"cv2.waitKey",
"cv2.destroyAllWindows",
"numpy.float32",
"cv2.imread",
"cv2.warpAffine",
"cv2.imshow"
] |
[((79, 114), 'cv2.imread', 'cv.imread', (['"""../../assets/test1.jpg"""'], {}), "('../../assets/test1.jpg')\n", (88, 114), True, 'import cv2 as cv\n'), ((245, 281), 'numpy.float32', 'np.float32', (['[[1, 0, 200], [0, 1, 0]]'], {}), '([[1, 0, 200], [0, 1, 0]])\n', (255, 281), True, 'import numpy as np\n'), ((295, 350), 'cv2.warpAffine', 'cv.warpAffine', (['img', 'translation_matrix', '(width, height)'], {}), '(img, translation_matrix, (width, height))\n', (308, 350), True, 'import cv2 as cv\n'), ((355, 395), 'cv2.imshow', 'cv.imshow', (['"""1) 200 Pixels right"""', 'output'], {}), "('1) 200 Pixels right', output)\n", (364, 395), True, 'import cv2 as cv\n'), ((472, 507), 'numpy.float32', 'np.float32', (['[[1, 0, 0], [0, 1, 50]]'], {}), '([[1, 0, 0], [0, 1, 50]])\n', (482, 507), True, 'import numpy as np\n'), ((527, 582), 'cv2.warpAffine', 'cv.warpAffine', (['img', 'translation_matrix', '(width, height)'], {}), '(img, translation_matrix, (width, height))\n', (540, 582), True, 'import cv2 as cv\n'), ((587, 625), 'cv2.imshow', 'cv.imshow', (['"""2) 50 Pixels Down"""', 'output'], {}), "('2) 50 Pixels Down', output)\n", (596, 625), True, 'import cv2 as cv\n'), ((696, 733), 'numpy.float32', 'np.float32', (['[[1, 0, 200], [0, 1, 50]]'], {}), '([[1, 0, 200], [0, 1, 50]])\n', (706, 733), True, 'import numpy as np\n'), ((747, 802), 'cv2.warpAffine', 'cv.warpAffine', (['img', 'translation_matrix', '(width, height)'], {}), '(img, translation_matrix, (width, height))\n', (760, 802), True, 'import cv2 as cv\n'), ((807, 851), 'cv2.imshow', 'cv.imshow', (['"""3) (dx, dy) = (200, 50)"""', 'output'], {}), "('3) (dx, dy) = (200, 50)', output)\n", (816, 851), True, 'import cv2 as cv\n'), ((952, 989), 'numpy.float32', 'np.float32', (['[[1, 0, 200], [0, 1, 50]]'], {}), '([[1, 0, 200], [0, 1, 50]])\n', (962, 989), True, 'import numpy as np\n'), ((1003, 1069), 'cv2.warpAffine', 'cv.warpAffine', (['img', 'translation_matrix', '(width + 200, height + 50)'], {}), '(img, translation_matrix, (width + 200, height + 50))\n', (1016, 1069), True, 'import cv2 as cv\n'), ((1074, 1113), 'cv2.imshow', 'cv.imshow', (['"""4) Preventing Crop"""', 'output'], {}), "('4) Preventing Crop', output)\n", (1083, 1113), True, 'import cv2 as cv\n'), ((1119, 1132), 'cv2.waitKey', 'cv.waitKey', (['(0)'], {}), '(0)\n', (1129, 1132), True, 'import cv2 as cv\n'), ((1137, 1159), 'cv2.destroyAllWindows', 'cv.destroyAllWindows', ([], {}), '()\n', (1157, 1159), True, 'import cv2 as cv\n')]
|
# -*- coding: utf-8 -*-
from django.shortcuts import render
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.http import HttpResponse
from django.http import JsonResponse
import json
import datetime
# Template classes
class CreateTemplateView(CreateView):
def __init__(self, model, template_name, form_class, ctx=None):
self.model = model
self.template_name = template_name
self.form_class = form_class
self.ctx = ctx
def get(self, request, *args, **kwargs):
if self.ctx:
return render(request, self.template_name, self.ctx)
return render(request, self.template_name)
def post(self, request, *args, **kwargs):
post = request.POST
if self.with_request:
form = self.form_class(post, request)
else:
form = self.form_class(post)
if form.is_valid():
element = form.save()
response = [
element.as_json()
]
response = json.dumps(response, indent=4, separators=(',', ': '))
return HttpResponse(response, content_type="application/json")
else:
return JsonResponse({'status': 'false', 'message': form.error},
status=500)
class UpdateTemplateView(UpdateView):
def __init__(self, model, template_name, form_class, message_not_exists,
element_name):
self.template_name = template_name
self.message_not_exists = message_not_exists
self.model = model
self.element_name = element_name
self.form_class = form_class
self.ctx = {}
self.element = None
def get(self, request, *args, **kwargs):
element_id = kwargs['pk']
element = self.model.objects.filter(pk=element_id)
if not element:
return JsonResponse({'status': 'false',
'message': self.message_not_exists},
status=500)
element = element[0]
self.element = element
self.ctx[self.element_name] = element
self.add_data_ctx()
return render(request, self.template_name, self.ctx)
def post(self, request, *args, **kwargs):
post = request.POST
if self.with_request:
form = self.form_class(post, kwargs['pk'], request)
else:
form = self.form_class(post, kwargs['pk'])
if form.is_valid():
element = form.update()
response = [
element.as_json()
]
response = json.dumps(response, indent=4, separators=(',', ': '))
return HttpResponse(response, content_type="application/json")
else:
return JsonResponse({'status': 'false', 'message': form.error},
status=500)
def add_data_ctx(self):
pass
class DeleteTemplateView(DeleteView):
model = None
message_not_exists = None
def get(self, request, *args, **kwargs):
pk = int(kwargs['pk'])
element = self.model.objects.all().filter(pk=pk)
if element:
element.delete()
return JsonResponse({'status': 'true', 'message': 200}, status=200)
else:
return JsonResponse({'status': 'false',
'message': self.message_not_exists}, status=500
)
class DeleteLogicTemplateView(DeleteView):
model = None
message_not_exists = None
def get(self, request, *args, **kwargs):
pk = int(kwargs['pk'])
reason = kwargs['reason']
element = self.model.objects.all().filter(pk=pk)
if element:
element.update(eliminated=True,
eliminated_reason=reason,
eliminated_date=datetime.date.today())
return JsonResponse({'status': 'true', 'message': 200}, status=200)
else:
return JsonResponse({'status': 'false',
'message': self.message_not_exists}, status=500
)
class ListTemplateView(ListView):
def get(self, request, *args, **kwargs):
elements = self.model.objects.all()
response = [
element.as_json()
for element in elements
]
response = json.dumps(response, indent=4, separators=(',', ': '))
return HttpResponse(response, content_type="application/json")
class DetailTemplateView(DetailView):
def get(self, request, *args, **kwargs):
element_id = kwargs.get('pk', None)
element = self.model.objects.filter(pk=element_id)
if element:
element = element[0]
response = [
element.as_json()
]
response = json.dumps(response, indent=4, separators=(',', ': '))
return HttpResponse(response, content_type="application/json")
else:
return JsonResponse(
{'status': 'false', 'message': self.message_not_exists},
status=500)
class FilterTemplateView(ListView):
def get(self, request, *args, **kwargs):
filter = self.filter(request)
elements = filter.results()
response = [
element.as_json()
for element in elements
]
response = json.dumps(response, indent=4, separators=(',', ': '))
return HttpResponse(response, content_type="application/json")
|
[
"django.http.HttpResponse",
"datetime.date.today",
"json.dumps",
"django.http.JsonResponse",
"django.shortcuts.render"
] |
[((738, 773), 'django.shortcuts.render', 'render', (['request', 'self.template_name'], {}), '(request, self.template_name)\n', (744, 773), False, 'from django.shortcuts import render\n'), ((2287, 2332), 'django.shortcuts.render', 'render', (['request', 'self.template_name', 'self.ctx'], {}), '(request, self.template_name, self.ctx)\n', (2293, 2332), False, 'from django.shortcuts import render\n'), ((4514, 4568), 'json.dumps', 'json.dumps', (['response'], {'indent': '(4)', 'separators': "(',', ': ')"}), "(response, indent=4, separators=(',', ': '))\n", (4524, 4568), False, 'import json\n'), ((4584, 4639), 'django.http.HttpResponse', 'HttpResponse', (['response'], {'content_type': '"""application/json"""'}), "(response, content_type='application/json')\n", (4596, 4639), False, 'from django.http import HttpResponse\n'), ((5531, 5585), 'json.dumps', 'json.dumps', (['response'], {'indent': '(4)', 'separators': "(',', ': ')"}), "(response, indent=4, separators=(',', ': '))\n", (5541, 5585), False, 'import json\n'), ((5601, 5656), 'django.http.HttpResponse', 'HttpResponse', (['response'], {'content_type': '"""application/json"""'}), "(response, content_type='application/json')\n", (5613, 5656), False, 'from django.http import HttpResponse\n'), ((677, 722), 'django.shortcuts.render', 'render', (['request', 'self.template_name', 'self.ctx'], {}), '(request, self.template_name, self.ctx)\n', (683, 722), False, 'from django.shortcuts import render\n'), ((1143, 1197), 'json.dumps', 'json.dumps', (['response'], {'indent': '(4)', 'separators': "(',', ': ')"}), "(response, indent=4, separators=(',', ': '))\n", (1153, 1197), False, 'import json\n'), ((1217, 1272), 'django.http.HttpResponse', 'HttpResponse', (['response'], {'content_type': '"""application/json"""'}), "(response, content_type='application/json')\n", (1229, 1272), False, 'from django.http import HttpResponse\n'), ((1306, 1374), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 'false', 'message': form.error}"], {'status': '(500)'}), "({'status': 'false', 'message': form.error}, status=500)\n", (1318, 1374), False, 'from django.http import JsonResponse\n'), ((1992, 2077), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 'false', 'message': self.message_not_exists}"], {'status': '(500)'}), "({'status': 'false', 'message': self.message_not_exists},\n status=500)\n", (2004, 2077), False, 'from django.http import JsonResponse\n'), ((2731, 2785), 'json.dumps', 'json.dumps', (['response'], {'indent': '(4)', 'separators': "(',', ': ')"}), "(response, indent=4, separators=(',', ': '))\n", (2741, 2785), False, 'import json\n'), ((2805, 2860), 'django.http.HttpResponse', 'HttpResponse', (['response'], {'content_type': '"""application/json"""'}), "(response, content_type='application/json')\n", (2817, 2860), False, 'from django.http import HttpResponse\n'), ((2894, 2962), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 'false', 'message': form.error}"], {'status': '(500)'}), "({'status': 'false', 'message': form.error}, status=500)\n", (2906, 2962), False, 'from django.http import JsonResponse\n'), ((3327, 3387), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 'true', 'message': 200}"], {'status': '(200)'}), "({'status': 'true', 'message': 200}, status=200)\n", (3339, 3387), False, 'from django.http import JsonResponse\n'), ((3421, 3506), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 'false', 'message': self.message_not_exists}"], {'status': '(500)'}), "({'status': 'false', 'message': self.message_not_exists},\n status=500)\n", (3433, 3506), False, 'from django.http import JsonResponse\n'), ((4030, 4090), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 'true', 'message': 200}"], {'status': '(200)'}), "({'status': 'true', 'message': 200}, status=200)\n", (4042, 4090), False, 'from django.http import JsonResponse\n'), ((4124, 4209), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 'false', 'message': self.message_not_exists}"], {'status': '(500)'}), "({'status': 'false', 'message': self.message_not_exists},\n status=500)\n", (4136, 4209), False, 'from django.http import JsonResponse\n'), ((4979, 5033), 'json.dumps', 'json.dumps', (['response'], {'indent': '(4)', 'separators': "(',', ': ')"}), "(response, indent=4, separators=(',', ': '))\n", (4989, 5033), False, 'import json\n'), ((5053, 5108), 'django.http.HttpResponse', 'HttpResponse', (['response'], {'content_type': '"""application/json"""'}), "(response, content_type='application/json')\n", (5065, 5108), False, 'from django.http import HttpResponse\n'), ((5142, 5227), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 'false', 'message': self.message_not_exists}"], {'status': '(500)'}), "({'status': 'false', 'message': self.message_not_exists},\n status=500)\n", (5154, 5227), False, 'from django.http import JsonResponse\n'), ((3988, 4009), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (4007, 4009), False, 'import datetime\n')]
|
from flask import Flask
from injector import Injector
from edu_loan.config.default import Config
from edu_loan.config.dependencies import ApplicationRegister, Application
from edu_loan.config.main_module import MODULES, create_injector
def create_app(injector: Injector) -> Flask:
"""
Creates a Flask app
:param injector: The injector
:return: Returns the Flask app
"""
app_flask = Flask(__name__)
injector.binder.bind(Application, to=app_flask)
app_flask.config.from_object(Config)
registers = injector.get(ApplicationRegister)
if registers:
for register in registers:
try:
register = injector.get(register)
register.register_endpoints()
except Exception as e:
print(e)
raise
return app_flask
def initialize(modules=MODULES):
injector = create_injector(modules=modules)
application = create_app(injector)
return application
|
[
"edu_loan.config.main_module.create_injector",
"flask.Flask"
] |
[((410, 425), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (415, 425), False, 'from flask import Flask\n'), ((891, 923), 'edu_loan.config.main_module.create_injector', 'create_injector', ([], {'modules': 'modules'}), '(modules=modules)\n', (906, 923), False, 'from edu_loan.config.main_module import MODULES, create_injector\n')]
|
from marshmallow import fields, validate
from .. import db, ma
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String, unique=True, nullable=False)
password = db.Column(db.String, nullable=False)
creation_date = db.Column(db.TIMESTAMP,
default=db.func.current_timestamp(),
nullable=False)
modification_date = db.Column(db.TIMESTAMP,
default=db.func.current_timestamp(),
onupdate=db.func.current_timestamp(),
nullable=False)
class UserSchema(ma.Schema):
id = fields.Integer()
email = fields.Email(required=True)
password = fields.String(required=True,
validate=[validate.Length(min=6, max=20)])
creation_date = fields.DateTime()
modification_date = fields.DateTime()
|
[
"marshmallow.fields.Email",
"marshmallow.validate.Length",
"marshmallow.fields.DateTime",
"marshmallow.fields.Integer"
] |
[((688, 704), 'marshmallow.fields.Integer', 'fields.Integer', ([], {}), '()\n', (702, 704), False, 'from marshmallow import fields, validate\n'), ((717, 744), 'marshmallow.fields.Email', 'fields.Email', ([], {'required': '(True)'}), '(required=True)\n', (729, 744), False, 'from marshmallow import fields, validate\n'), ((881, 898), 'marshmallow.fields.DateTime', 'fields.DateTime', ([], {}), '()\n', (896, 898), False, 'from marshmallow import fields, validate\n'), ((923, 940), 'marshmallow.fields.DateTime', 'fields.DateTime', ([], {}), '()\n', (938, 940), False, 'from marshmallow import fields, validate\n'), ((828, 858), 'marshmallow.validate.Length', 'validate.Length', ([], {'min': '(6)', 'max': '(20)'}), '(min=6, max=20)\n', (843, 858), False, 'from marshmallow import fields, validate\n')]
|
from storageManager.CrudTupla import CrudTuplas
class Tabla:
def __init__(self, nombre, columnas):
self.nombre = nombre
self.columnas = columnas
self.estructura = CrudTuplas(columnas)
def getNombreASCII(self):
number = 0
for c in self.nombre:
number += ord(c)
return number
|
[
"storageManager.CrudTupla.CrudTuplas"
] |
[((192, 212), 'storageManager.CrudTupla.CrudTuplas', 'CrudTuplas', (['columnas'], {}), '(columnas)\n', (202, 212), False, 'from storageManager.CrudTupla import CrudTuplas\n')]
|
import numpy as np
import vrep
import ctypes
import math
import sys
import time
sim_dt = 0.01
dt = 0.001
SYNC = True
vrep_mode = vrep.simx_opmode_oneshot
def b( num ):
""" forces magnitude to be 1 or less """
if abs( num ) > 1.0:
return math.copysign( 1.0, num )
else:
return num
def convert_angles( ang ):
""" Converts Euler angles from x-y-z to z-x-y convention """
s1 = math.sin(ang[0])
s2 = math.sin(ang[1])
s3 = math.sin(ang[2])
c1 = math.cos(ang[0])
c2 = math.cos(ang[1])
c3 = math.cos(ang[2])
pitch = math.asin( b(c1*c3*s2-s1*s3) )
cp = math.cos(pitch)
# just in case
if cp == 0:
cp = 0.000001
yaw = math.asin( b((c1*s3+c3*s1*s2)/cp) ) #flipped
# Fix for getting the quadrants right
if c3 < 0 and yaw > 0:
yaw = math.pi - yaw
elif c3 < 0 and yaw < 0:
yaw = -math.pi - yaw
roll = math.asin( b((c3*s1+c1*s2*s3)/cp) ) #flipped
return [roll, pitch, yaw]
class Quadcopter( object ):
"""
This callable class will return the state of the quadcopter relative to its
target whenever it is called. It will also accept motor commands which will be
sent to the quadcopter in V-REP.
"""
def __init__( self, max_target_distance=4, noise=False,
noise_std=None, dodging=True,
target_func=None, cid=None,ori_mode=False
):
self.ori_mode = ori_mode
# If a cid is specified, assume the connection has already been
# established and should remain open
if cid is None:
vrep.simxFinish(-1) # just in case, close all opened connections
self.cid = vrep.simxStart('127.0.0.1',19997,True,True,5000,5)
else:
self.cid = cid
if self.cid != -1:
print ('Connected to V-REP remote API server, client id: %s' % self.cid)
vrep.simxStartSimulation( self.cid, vrep.simx_opmode_oneshot )
if SYNC:
vrep.simxSynchronous( self.cid, True )
else:
print ('Failed connecting to V-REP remote API server')
self.exit()
err, self.copter = vrep.simxGetObjectHandle(self.cid, "Quadricopter_base",
vrep.simx_opmode_oneshot_wait )
err, self.target = vrep.simxGetObjectHandle(self.cid, "Quadricopter_target",
vrep.simx_opmode_oneshot_wait )
# Reset the motor commands to zero
packedData=vrep.simxPackFloats([0,0,0,0])
raw_bytes = (ctypes.c_ubyte * len(packedData)).from_buffer_copy(packedData)
err = vrep.simxSetStringSignal(self.cid, "rotorTargetVelocities",
raw_bytes,
vrep_mode)
self.pos = [0,0,0]
self.pos_err = [0,0,0]
self.t_pos = [0,0,0]
self.lin = [0,0,0]
self.ori = [0,0,0]
self.ori_err = [0,0,0]
self.t_ori = [0,0,0]
self.ang = [0,0,0]
self.count = 0
# Maximum target distance error that can be returned
self.max_target_distance = max_target_distance
# If noise is being modelled
if noise_std is not None:
self.noise = True
else:
self.noise = False
# Standard Deviation of the noise for the 4 state variables
self.noise_std = noise_std
# Overwrite the get_target method if the target is to be controlled by a
# function instead of by V-REP
if target_func is not None:
self.step = 0
self.target_func = target_func
def get_target():
self.t_pos, self.t_ori = self.target_func( self.step )
self.step += 1
self.get_target = get_target
def stop( self ):
"""
Stops the simulation
"""
err = vrep.simxStopSimulation( self.cid, vrep.simx_opmode_oneshot_wait )
time.sleep(0.01) # Maybe this will prevent V-REP from crashing as often
return hasattr(self, 'failed') # Returns true if this is a failed run
def reset( self ):
err = vrep.simxStopSimulation(self.cid, vrep.simx_opmode_oneshot_wait)
time.sleep(1)
self.pos_err = [0,0,0]
self.ori_err = [0,0,0]
self.lin = [0,0,0]
self.ang = [0,0,0]
err = vrep.simxStartSimulation(self.cid, vrep.simx_opmode_oneshot_wait)
if SYNC:
vrep.simxSynchronous( self.cid, True )
def exit( self ):
self.failed = True
exit(1)
def get_target( self ):
err, self.t_ori = vrep.simxGetObjectOrientation(self.cid, self.target, -1,
vrep_mode )
err, self.t_pos = vrep.simxGetObjectPosition(self.cid, self.target, -1,
vrep_mode )
# Convert orientations to z-y-x convention
self.t_ori = convert_angles(self.t_ori)
def calculate_error( self ):
# Return the state variables
err, self.ori = vrep.simxGetObjectOrientation(self.cid, self.copter, -1,
vrep_mode )
err, self.pos = vrep.simxGetObjectPosition(self.cid, self.copter, -1,
vrep_mode )
err, self.lin, self.ang = vrep.simxGetObjectVelocity(self.cid, self.copter,
vrep_mode )
self.ori = convert_angles(self.ori)
# Apply noise to each measurement if required
#FIXME this is a dumb way to do this, clean it up later
# if self.noise:
# n_pos = np.random.normal(0,self.noise_std[0],3)
# n_lin = np.random.normal(0,self.noise_std[1],3)
# n_ori = np.random.normal(0,self.noise_std[2],3)
# n_ang = np.random.normal(0,self.noise_std[3],3)
# for i in range(3):
# self.pos[i] += n_pos[i]
# self.lin[i] += n_lin[i]
# self.ori[i] += n_ori[i]
# self.ang[i] += n_ang[i]
#TODO: might have to wrap angles here
# Find the error
self.ori_err = [self.t_ori[0] - self.ori[0],
self.t_ori[1] - self.ori[1],
self.t_ori[2] - self.ori[2]]
# print(self.ori_err)
cz = math.cos(self.ori[2])
sz = math.sin(self.ori[2])
x_err = self.t_pos[0] - self.pos[0]
y_err = self.t_pos[1] - self.pos[1]
if not self.ori_mode:
self.pos_err = [ x_err * cz + y_err * sz,
-x_err * sz + y_err * cz,
self.t_pos[2] - self.pos[2]]
else:
self.pos_err = [0,0,
self.t_pos[2] - self.pos[2]]
# print(self.pos_err)
self.lin = [self.lin[0]*cz+self.lin[1]*sz, -self.lin[0]*sz+self.lin[1]*cz, self.lin[2]]
self.ang = [self.ang[0]*cz+self.ang[1]*sz, -self.ang[0]*sz+self.ang[1]*cz, self.ang[2]]
for i in range(3):
if self.ori_err[i] > math.pi:
self.ori_err[i] -= 2 * math.pi
elif self.ori_err[i] < -math.pi:
self.ori_err[i] += 2 * math.pi
def send_motor_commands( self, values ):
# Limit motors by max and min values
motor_values = np.zeros(4)
for i in range(4):
"""
if values[i] > 30:
motor_values[i] = 30
elif values[i] < 0:
motor_values[i] = 0
else:
motor_values[i] = values[i]
"""
motor_values[i] = values[i]
packedData=vrep.simxPackFloats(motor_values.flatten())
raw_bytes = (ctypes.c_ubyte * len(packedData)).from_buffer_copy(packedData)
err = vrep.simxSetStringSignal(self.cid, "rotorTargetVelocities",
raw_bytes,
vrep_mode)
def handle_input( self, values ):
# Send motor commands to V-REP
self.send_motor_commands( values )
# Retrieve target location
self.get_target()
# Calculate state error
self.calculate_error()
def bound( self, value ):
if abs( value ) > self.max_target_distance:
return math.copysign( self.max_target_distance, value )
else:
return value
def get_state( self ):
"""
Returns the current state. Used for recording benchmarks of performance
"""
return [self.pos, self.ori,
self.lin, self.ang,
self.t_pos, self.t_ori]
def handle_output( self ):
l = math.sqrt(self.pos_err[0]**2 + self.pos_err[1]**2)
bl = self.bound(l)
r = (bl+.1)/(l+.1)
return [r*self.pos_err[0], r*self.pos_err[1], self.bound(self.pos_err[2]),
self.lin[0], self.lin[1], self.lin[2],
self.ori_err[0], self.ori_err[1], self.ori_err[2],
self.ang[0], self.ang[1], self.ang[2]]
def __call__( self, t, values ):
""" This class will be callable within a nengo node. It will accept as input
the control signals for each rotor, and will output the relevant state
variables (position, velocity, orientation, angular velocity).
"""
self.count += 1
if self.count == int(round(sim_dt/dt)):
self.count = 0
self.handle_input( values )
if SYNC:
vrep.simxSynchronousTrigger( self.cid )
return self.handle_output()
class FullStateQuadcopter( Quadcopter ):
"""
Returns both egocentric and allocentric information about the state
"""
def handle_output( self ):
l = math.sqrt(self.pos_err[0]**2 + self.pos_err[1]**2)
bl = self.bound(l)
r = (bl+.1)/(l+.1)
return [r*self.pos_err[0], r*self.pos_err[1], self.bound(self.pos_err[2]),
self.lin[0], self.lin[1], self.lin[2],
self.ori_err[0], self.ori_err[1], self.ori_err[2],
self.ang[0], self.ang[1], self.ang[2],
self.pos[0], self.pos[1], self.pos[2],
self.ori[0], self.ori[1], self.ori[2],
]
class FullStateTargetQuadcopter( Quadcopter ):
"""
Returns both egocentric and allocentric information about the state
as well as the state of the target
"""
def handle_output( self ):
l = math.sqrt(self.pos_err[0]**2 + self.pos_err[1]**2)
bl = self.bound(l)
r = (bl+.1)/(l+.1)
return [r*self.pos_err[0], r*self.pos_err[1], self.bound(self.pos_err[2]),
self.lin[0], self.lin[1], self.lin[2],
self.ori_err[0], self.ori_err[1], self.ori_err[2],
self.ang[0], self.ang[1], self.ang[2],
self.pos[0], self.pos[1], self.pos[2],
self.ori[0], self.ori[1], self.ori[2],
self.t_pos[0], self.t_pos[1], self.t_pos[2],
self.t_ori[0], self.t_ori[1], self.t_ori[2],
]
class TargetControlledQuadcopter( Quadcopter ):
"""
The target location is sent as input to this node, rather than reading from a
manually controlled target in V-REP. This class is designed to be used for
hyperopt tuning of gains.
"""
def set_target( self, v ):
self.t_pos = [v[0], v[1], v[2]]
self.t_ori = [0, 0, v[3]]
def handle_input( self, values ):
"""
First four elements of values are the motor commands.
The next four are x,y,z,yaw of the target
"""
self.send_motor_commands( values[:4] )
self.set_target( values[4:] )
self.calculate_error()
def exit( self ):
# When running hyperopt to find gains, should not exit program if one
# trial fails
self.failed = True
class AdaptiveController( object ):
"""
Adaptive controller based on Slotine's methods and physics model from the
python quadcopter simulator
"""
def __init__( self, adaptive=True, dt=0.001, initial_param=None ):
# When false, parameter updating does not occur
self.adaptive = adaptive
self.initialize_parameters( initial_param )
# Gain set
k1 = 0.43352026190263104
k2 = 2.0 * 2
k3 = 0.5388202808181405
k4 = 1.65 * 2
k5 = 2.5995452450850185
k6 = 0.802872750102059 * 2
k7 = 0.5990281657438163
k8 = 2.8897310746350824 * 2
ak1 = 0.026210965785217845
ak2 = 2.0 * 5
ak3 = 0.027614986033826894
ak4 = 1.65 * 5
ak6 = k6
ak8 = k8
self.K = np.matrix([[ 0, 0, k2, 0, 0,-k4, 0, 0, 0, 0, 0, 0],
[ 0, k1, 0, 0,-k3, 0,-k5, 0, 0, k7, 0, 0],
[-k1, 0, 0, k3, 0, 0, 0,-k5, 0, 0, k7, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0,-k6, 0, 0, k8] ])
self.AK = np.matrix([[ 0, 0, ak2, 0, 0,-ak4, 0, 0, 0, 0, 0, 0],
[ 0, ak1, 0, 0,-ak3, 0, 0, 0, 0, 0, 0, 0],
[-ak1, 0, 0, ak3, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, -ak6, 0, 0, ak8] ])
self.task_to_rotor = np.matrix([[ 1,-1, 1, 1],
[ 1,-1,-1,-1],
[ 1, 1,-1, 1],
[ 1, 1, 1,-1] ])
self.control_matrix = self.task_to_rotor * self.K
self.adaptive_matrix = self.task_to_rotor * self.AK
self.error = np.matrix([[0.0], # x
[0.0], # y
[0.0], # z
[0.0], # dx
[0.0], # dy
[0.0], # dz
[0.0], # roll
[0.0], # pitch
[0.0], # yaw
[0.0], # droll
[0.0], # dpitch
[0.0], # dyaw
])
self.learning_rate = 1
self.dt = dt
def initialize_parameters( self, initial_param ):
# Unknown Constant Vector
self.param = np.matrix([[0.0],
[0.0],
[0.0],
[0.0],
[0.0],
[0.0],
[0.0],
])
# If initial parameters are specified, set them now
if initial_param is not None:
for i in range(len(initial_param)):
self.param[i,0] = initial_param[i]
def compute_Y( self ):
"""
Generate the Y matrix
"""
# TODO: this might need to be allocentric, or the equations changed for
# egocentric
c1 = math.cos( self.error[6,0] )
c2 = math.cos( self.error[7,0] )
c3 = math.cos( self.error[8,0] )
s1 = math.sin( self.error[6,0] )
s2 = math.sin( self.error[7,0] )
s3 = math.sin( self.error[8,0] )
at = c1*s2*c3 + s1*s3
bt = c1*s2*s3 - s1*c3
ct = c1*c2
a = at / (at*at + bt*bt + ct*ct)
b = bt / (at*at + bt*bt + ct*ct)
c = ct / (at*at + bt*bt + ct*ct)
ax = a*abs(self.error[3,0])*self.error[3,0]
by = b*abs(self.error[4,0])*self.error[4,0]
cz = c*abs(self.error[5,0])*self.error[5,0]
rxy = self.error[9,0]*self.error[10,0]
rxz = self.error[9,0]*self.error[1,0]
ryz = self.error[10,0]*self.error[11,0]
"""
self.Y = np.matrix([[ax, by, cz, c, 0, -rxz, -rxy],
[ax, by, cz, c, -ryz, 0, rxy],
[ax, by, cz, c, 0, rxz, -rxy],
[ax, by, cz, c, ryz, 0, rxy],
])
"""
# Trying out different orientation of rotor blades
self.Y = np.matrix([[ax, by, cz, c, -ryz, rxz, rxy],
[ax, by, cz, c, -ryz, -rxz, -rxy],
[ax, by, cz, c, ryz, -rxz, rxy],
[ax, by, cz, c, ryz, rxz, -rxy],
])
def compute_rotor_velocities( self ):
"""
Generate the four rotor velocities to control the quadcopter
"""
self.compute_Y()
# Calculate rotor velocities
w = self.Y * self.param +\
self.control_matrix * self.error
#self.adaptive_matrix * self.error
if self.adaptive:
dparam = self.learning_rate *\
self.Y.T *\
( self.control_matrix * self.error ) *\
self.dt
#( self.adaptive_matrix * self.error ) *\
#self.dt
# Update the parameter estimates
self.param += dparam
return [ w[0,0], w[1,0], w[2,0], w[3,0] ]
def __call__( self, t, values ):
""" This class will be callable within a nengo node. It will accept as input
the 12D state error and will output desired rotor velocities
"""
# Insert state into error matrix
for i in range(len(values)):
self.error[i,0] = values[i]
# Compute desired rotor velocities
return self.compute_rotor_velocities()
|
[
"vrep.simxGetObjectVelocity",
"vrep.simxSynchronousTrigger",
"math.copysign",
"vrep.simxStart",
"vrep.simxSynchronous",
"vrep.simxGetObjectHandle",
"vrep.simxSetStringSignal",
"math.cos",
"vrep.simxGetObjectPosition",
"vrep.simxStopSimulation",
"math.sqrt",
"vrep.simxFinish",
"math.sin",
"time.sleep",
"numpy.matrix",
"vrep.simxGetObjectOrientation",
"numpy.zeros",
"vrep.simxPackFloats",
"vrep.simxStartSimulation"
] |
[((396, 412), 'math.sin', 'math.sin', (['ang[0]'], {}), '(ang[0])\n', (404, 412), False, 'import math\n'), ((420, 436), 'math.sin', 'math.sin', (['ang[1]'], {}), '(ang[1])\n', (428, 436), False, 'import math\n'), ((444, 460), 'math.sin', 'math.sin', (['ang[2]'], {}), '(ang[2])\n', (452, 460), False, 'import math\n'), ((468, 484), 'math.cos', 'math.cos', (['ang[0]'], {}), '(ang[0])\n', (476, 484), False, 'import math\n'), ((492, 508), 'math.cos', 'math.cos', (['ang[1]'], {}), '(ang[1])\n', (500, 508), False, 'import math\n'), ((516, 532), 'math.cos', 'math.cos', (['ang[2]'], {}), '(ang[2])\n', (524, 532), False, 'import math\n'), ((584, 599), 'math.cos', 'math.cos', (['pitch'], {}), '(pitch)\n', (592, 599), False, 'import math\n'), ((249, 272), 'math.copysign', 'math.copysign', (['(1.0)', 'num'], {}), '(1.0, num)\n', (262, 272), False, 'import math\n'), ((2124, 2215), 'vrep.simxGetObjectHandle', 'vrep.simxGetObjectHandle', (['self.cid', '"""Quadricopter_base"""', 'vrep.simx_opmode_oneshot_wait'], {}), "(self.cid, 'Quadricopter_base', vrep.\n simx_opmode_oneshot_wait)\n", (2148, 2215), False, 'import vrep\n'), ((2287, 2380), 'vrep.simxGetObjectHandle', 'vrep.simxGetObjectHandle', (['self.cid', '"""Quadricopter_target"""', 'vrep.simx_opmode_oneshot_wait'], {}), "(self.cid, 'Quadricopter_target', vrep.\n simx_opmode_oneshot_wait)\n", (2311, 2380), False, 'import vrep\n'), ((2488, 2521), 'vrep.simxPackFloats', 'vrep.simxPackFloats', (['[0, 0, 0, 0]'], {}), '([0, 0, 0, 0])\n', (2507, 2521), False, 'import vrep\n'), ((2619, 2704), 'vrep.simxSetStringSignal', 'vrep.simxSetStringSignal', (['self.cid', '"""rotorTargetVelocities"""', 'raw_bytes', 'vrep_mode'], {}), "(self.cid, 'rotorTargetVelocities', raw_bytes,\n vrep_mode)\n", (2643, 2704), False, 'import vrep\n'), ((3894, 3958), 'vrep.simxStopSimulation', 'vrep.simxStopSimulation', (['self.cid', 'vrep.simx_opmode_oneshot_wait'], {}), '(self.cid, vrep.simx_opmode_oneshot_wait)\n', (3917, 3958), False, 'import vrep\n'), ((3969, 3985), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (3979, 3985), False, 'import time\n'), ((4158, 4222), 'vrep.simxStopSimulation', 'vrep.simxStopSimulation', (['self.cid', 'vrep.simx_opmode_oneshot_wait'], {}), '(self.cid, vrep.simx_opmode_oneshot_wait)\n', (4181, 4222), False, 'import vrep\n'), ((4231, 4244), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (4241, 4244), False, 'import time\n'), ((4375, 4440), 'vrep.simxStartSimulation', 'vrep.simxStartSimulation', (['self.cid', 'vrep.simx_opmode_oneshot_wait'], {}), '(self.cid, vrep.simx_opmode_oneshot_wait)\n', (4399, 4440), False, 'import vrep\n'), ((4632, 4699), 'vrep.simxGetObjectOrientation', 'vrep.simxGetObjectOrientation', (['self.cid', 'self.target', '(-1)', 'vrep_mode'], {}), '(self.cid, self.target, -1, vrep_mode)\n', (4661, 4699), False, 'import vrep\n'), ((4779, 4843), 'vrep.simxGetObjectPosition', 'vrep.simxGetObjectPosition', (['self.cid', 'self.target', '(-1)', 'vrep_mode'], {}), '(self.cid, self.target, -1, vrep_mode)\n', (4805, 4843), False, 'import vrep\n'), ((5096, 5163), 'vrep.simxGetObjectOrientation', 'vrep.simxGetObjectOrientation', (['self.cid', 'self.copter', '(-1)', 'vrep_mode'], {}), '(self.cid, self.copter, -1, vrep_mode)\n', (5125, 5163), False, 'import vrep\n'), ((5237, 5301), 'vrep.simxGetObjectPosition', 'vrep.simxGetObjectPosition', (['self.cid', 'self.copter', '(-1)', 'vrep_mode'], {}), '(self.cid, self.copter, -1, vrep_mode)\n', (5263, 5301), False, 'import vrep\n'), ((5381, 5441), 'vrep.simxGetObjectVelocity', 'vrep.simxGetObjectVelocity', (['self.cid', 'self.copter', 'vrep_mode'], {}), '(self.cid, self.copter, vrep_mode)\n', (5407, 5441), False, 'import vrep\n'), ((6416, 6437), 'math.cos', 'math.cos', (['self.ori[2]'], {}), '(self.ori[2])\n', (6424, 6437), False, 'import math\n'), ((6451, 6472), 'math.sin', 'math.sin', (['self.ori[2]'], {}), '(self.ori[2])\n', (6459, 6472), False, 'import math\n'), ((7396, 7407), 'numpy.zeros', 'np.zeros', (['(4)'], {}), '(4)\n', (7404, 7407), True, 'import numpy as np\n'), ((7843, 7928), 'vrep.simxSetStringSignal', 'vrep.simxSetStringSignal', (['self.cid', '"""rotorTargetVelocities"""', 'raw_bytes', 'vrep_mode'], {}), "(self.cid, 'rotorTargetVelocities', raw_bytes,\n vrep_mode)\n", (7867, 7928), False, 'import vrep\n'), ((8741, 8795), 'math.sqrt', 'math.sqrt', (['(self.pos_err[0] ** 2 + self.pos_err[1] ** 2)'], {}), '(self.pos_err[0] ** 2 + self.pos_err[1] ** 2)\n', (8750, 8795), False, 'import math\n'), ((9821, 9875), 'math.sqrt', 'math.sqrt', (['(self.pos_err[0] ** 2 + self.pos_err[1] ** 2)'], {}), '(self.pos_err[0] ** 2 + self.pos_err[1] ** 2)\n', (9830, 9875), False, 'import math\n'), ((10536, 10590), 'math.sqrt', 'math.sqrt', (['(self.pos_err[0] ** 2 + self.pos_err[1] ** 2)'], {}), '(self.pos_err[0] ** 2 + self.pos_err[1] ** 2)\n', (10545, 10590), False, 'import math\n'), ((12650, 12840), 'numpy.matrix', 'np.matrix', (['[[0, 0, k2, 0, 0, -k4, 0, 0, 0, 0, 0, 0], [0, k1, 0, 0, -k3, 0, -k5, 0, 0,\n k7, 0, 0], [-k1, 0, 0, k3, 0, 0, 0, -k5, 0, 0, k7, 0], [0, 0, 0, 0, 0, \n 0, 0, 0, -k6, 0, 0, k8]]'], {}), '([[0, 0, k2, 0, 0, -k4, 0, 0, 0, 0, 0, 0], [0, k1, 0, 0, -k3, 0, -\n k5, 0, 0, k7, 0, 0], [-k1, 0, 0, k3, 0, 0, 0, -k5, 0, 0, k7, 0], [0, 0,\n 0, 0, 0, 0, 0, 0, -k6, 0, 0, k8]])\n', (12659, 12840), True, 'import numpy as np\n'), ((12954, 13146), 'numpy.matrix', 'np.matrix', (['[[0, 0, ak2, 0, 0, -ak4, 0, 0, 0, 0, 0, 0], [0, ak1, 0, 0, -ak3, 0, 0, 0, 0,\n 0, 0, 0], [-ak1, 0, 0, ak3, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0,\n 0, 0, -ak6, 0, 0, ak8]]'], {}), '([[0, 0, ak2, 0, 0, -ak4, 0, 0, 0, 0, 0, 0], [0, ak1, 0, 0, -ak3, \n 0, 0, 0, 0, 0, 0, 0], [-ak1, 0, 0, ak3, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0,\n 0, 0, 0, 0, 0, 0, -ak6, 0, 0, ak8]])\n', (12963, 13146), True, 'import numpy as np\n'), ((13280, 13353), 'numpy.matrix', 'np.matrix', (['[[1, -1, 1, 1], [1, -1, -1, -1], [1, 1, -1, 1], [1, 1, 1, -1]]'], {}), '([[1, -1, 1, 1], [1, -1, -1, -1], [1, 1, -1, 1], [1, 1, 1, -1]])\n', (13289, 13353), True, 'import numpy as np\n'), ((13594, 13694), 'numpy.matrix', 'np.matrix', (['[[0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0\n ], [0.0]]'], {}), '([[0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [\n 0.0], [0.0], [0.0]])\n', (13603, 13694), True, 'import numpy as np\n'), ((14253, 14313), 'numpy.matrix', 'np.matrix', (['[[0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0]]'], {}), '([[0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0]])\n', (14262, 14313), True, 'import numpy as np\n'), ((14861, 14887), 'math.cos', 'math.cos', (['self.error[6, 0]'], {}), '(self.error[6, 0])\n', (14869, 14887), False, 'import math\n'), ((14898, 14924), 'math.cos', 'math.cos', (['self.error[7, 0]'], {}), '(self.error[7, 0])\n', (14906, 14924), False, 'import math\n'), ((14935, 14961), 'math.cos', 'math.cos', (['self.error[8, 0]'], {}), '(self.error[8, 0])\n', (14943, 14961), False, 'import math\n'), ((14972, 14998), 'math.sin', 'math.sin', (['self.error[6, 0]'], {}), '(self.error[6, 0])\n', (14980, 14998), False, 'import math\n'), ((15009, 15035), 'math.sin', 'math.sin', (['self.error[7, 0]'], {}), '(self.error[7, 0])\n', (15017, 15035), False, 'import math\n'), ((15046, 15072), 'math.sin', 'math.sin', (['self.error[8, 0]'], {}), '(self.error[8, 0])\n', (15054, 15072), False, 'import math\n'), ((15860, 16010), 'numpy.matrix', 'np.matrix', (['[[ax, by, cz, c, -ryz, rxz, rxy], [ax, by, cz, c, -ryz, -rxz, -rxy], [ax,\n by, cz, c, ryz, -rxz, rxy], [ax, by, cz, c, ryz, rxz, -rxy]]'], {}), '([[ax, by, cz, c, -ryz, rxz, rxy], [ax, by, cz, c, -ryz, -rxz, -\n rxy], [ax, by, cz, c, ryz, -rxz, rxy], [ax, by, cz, c, ryz, rxz, -rxy]])\n', (15869, 16010), True, 'import numpy as np\n'), ((1547, 1566), 'vrep.simxFinish', 'vrep.simxFinish', (['(-1)'], {}), '(-1)\n', (1562, 1566), False, 'import vrep\n'), ((1635, 1690), 'vrep.simxStart', 'vrep.simxStart', (['"""127.0.0.1"""', '(19997)', '(True)', '(True)', '(5000)', '(5)'], {}), "('127.0.0.1', 19997, True, True, 5000, 5)\n", (1649, 1690), False, 'import vrep\n'), ((1852, 1912), 'vrep.simxStartSimulation', 'vrep.simxStartSimulation', (['self.cid', 'vrep.simx_opmode_oneshot'], {}), '(self.cid, vrep.simx_opmode_oneshot)\n', (1876, 1912), False, 'import vrep\n'), ((4468, 4504), 'vrep.simxSynchronous', 'vrep.simxSynchronous', (['self.cid', '(True)'], {}), '(self.cid, True)\n', (4488, 4504), False, 'import vrep\n'), ((8365, 8411), 'math.copysign', 'math.copysign', (['self.max_target_distance', 'value'], {}), '(self.max_target_distance, value)\n', (8378, 8411), False, 'import math\n'), ((1952, 1988), 'vrep.simxSynchronous', 'vrep.simxSynchronous', (['self.cid', '(True)'], {}), '(self.cid, True)\n', (1972, 1988), False, 'import vrep\n'), ((9572, 9609), 'vrep.simxSynchronousTrigger', 'vrep.simxSynchronousTrigger', (['self.cid'], {}), '(self.cid)\n', (9599, 9609), False, 'import vrep\n')]
|
import discord
from discord.ext import commands
import random
import sys
import traceback
class ErrorHandler(commands.Cog):
def __init__(self, client):
self.client = client
@commands.Cog.listener()
async def on_command_error(self, ctx, error):
# This prevents any commands with local handlers being handled here in on_command_error.
if hasattr(ctx.command, 'on_error'):
return
# This prevents any cogs with an overwritten cog_command_error being handled here.
cog = ctx.cog
if cog:
if cog._get_overridden_method(cog.cog_command_error) is not None:
return
ignored = ()
if isinstance(error, ignored):
return
elif isinstance(error, commands.DisabledCommand):
await ctx.send(f'{ctx.command} has been disabled.')
elif isinstance(error, commands.NoPrivateMessage):
try:
await ctx.author.send(f'`{ctx.command}` can not be used in Private Messages.')
except discord.HTTPException:
pass
elif isinstance(error, commands.BadArgument):
await ctx.send('The given argument(s) were not correct.')
elif isinstance(error, commands.MissingRequiredArgument):
await ctx.send(f'One or more required arguments were missing.')
elif isinstance(error, commands.CommandNotFound):
await ctx.send(f'Command not found')
elif isinstance(error, commands.BotMissingPermissions):
try:
await ctx.send(f'The bot is missing the required permissions to complete this action.')
except:
try:
await ctx.guild.owner.send(f'The bot is missing required permissions in your sever: {ctx.guild.name} (guild id: {ctx.guild.id})')
except:
pass
else:
await ctx.send(f'unkown error occured: `{error}`')
print('Ignoring exception in command {}:'.format(ctx.command), file=sys.stderr)
traceback.print_exception(type(error), error, error.__traceback__, file=sys.stderr)
def setup(bot):
bot.add_cog(ErrorHandler(bot))
|
[
"discord.ext.commands.Cog.listener"
] |
[((202, 225), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (223, 225), False, 'from discord.ext import commands\n')]
|
from openarticlegauge import plugin
import re
class OUPPlugin(plugin.Plugin):
_short_name = __name__.split('.')[-1]
__version__='0.1' # consider incrementing or at least adding a minor version
# e.g. "0.1.1" if you change this plugin
__desc__ = "Handles articles from the Oxford University Press"
supported_url_format = '(http|https){0,1}://.+?\.oxfordjournals.org/.+'
_license_mappings = [
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (http://creativecommons.org/licenses/by/3.0/),"
+ "\n" + ' '*21 + "which permits unrestricted reuse, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by/3.0/'}
},
# same, but note "re-use" vs "reuse"
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (http://creativecommons.org/licenses/by/3.0/),"
+ "\n" + ' '*21 + "which permits unrestricted re-use, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by/3.0/'}
},
{ # Same as above but without the trailing slash in the URL in the license statement and 'use' rather than 'reuse'
"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (http://creativecommons.org/licenses/by/3.0),"
+ "\n" + ' '*21 + "which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by/3.0/'}
},
{ # Same as above but without the trailing slash in the URL and 'reuse' rather than 'use'
"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (http://creativecommons.org/licenses/by/3.0),"
+ "\n" + ' '*21 + "which permits unrestricted reuse, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by/3.0/'}
},
{ # this license statement is the same as the one above, but somebody's missed out the "reuse" word after unrestricted
"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (http://creativecommons.org/licenses/by/3.0/),"
+ "\n" + ' '*21 + "which permits unrestricted, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by/3.0/'}
},
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution Non-Commercial License (http://creativecommons.org/licenses/by-nc/3.0),"
+ "\n" + ' '*21 + "which permits unrestricted non-commercial use, distribution, and reproduction in any medium, provided the original work is"
+ "\n" + ' '*21 + "properly cited.":
{'type': 'cc-nc', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by-nc/3.0/'}
},
{ # Same as above but with the trailing slash in the URL in the license statement
"This is an Open Access article distributed under the terms of the Creative Commons Attribution Non-Commercial License (http://creativecommons.org/licenses/by-nc/3.0/),"
+ "\n" + ' '*21 + "which permits unrestricted non-commercial use, distribution, and reproduction in any medium, provided the original work is"
+ "\n" + ' '*21 + "properly cited.":
{'type': 'cc-nc', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by-nc/3.0/'}
},
{ # Subtly different text
"This is an Open Access article distributed under the terms of the Creative Commons Attribution Non-Commercial License (http://creativecommons.org/licenses/by-nc/3.0/),"
+ "\n" + ' '*21 + "which permits unrestricted non-commercial use, distribution, and reproduction in any medium, provided the original work is properly"
+ "\n" + ' '*21 + "and fully attributed":
{'type': 'cc-nc', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by-nc/3.0/'}
},
# Yet another subtly different case - note "reuse" immediately after unrestricted
{
"This is an Open Access article distributed under the terms of the Creative Commons Attribution Non-Commercial License (http://creativecommons.org/licenses/by-nc/3.0),"
+ "\n" + ' '*21 + "which permits unrestricted reuse, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-nc', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by-nc/3.0/'}
},
# Variation on the above with a trailing slash in the license URL
{
"This is an Open Access article distributed under the terms of the Creative Commons Attribution Non-Commercial License (http://creativecommons.org/licenses/by-nc/3.0/),"
+ "\n" + ' '*21 + "which permits unrestricted reuse, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-nc', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by-nc/3.0/'}
},
{ # Yet another case at eg: http://cardiovascres.oxfordjournals.org/content/98/2/286
"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (http://creativecommons.org/licenses/by-nc/3.0/),"
+ "\n" + ' '*21 + "which permits non-commercial use, distribution, and reproduction in any medium, provided that the original authorship is properly"
+ "\n" + ' '*21 + "and fully attributed":
{'type': 'cc-nc', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by-nc/3.0/'}
}
]
def capabilities(self):
return {
"type_detect_verify" : False,
"canonicalise" : [],
"detect_provider" : [],
"license_detect" : True
}
def supports(self, provider):
"""
Does the page_license plugin support this provider
"""
for url in provider.get("url", []):
if self.supports_url(url):
return True
return False
def supports_url(self, url):
if re.match(self.supported_url_format, url):
return True
return False
def license_detect(self, record):
"""
To respond to the provider identifier: *.oxfordjournals.org
This should determine the licence conditions of the OUP article and populate
the record['bibjson']['license'] (note the US spelling) field.
"""
# licensing statements to look for on this publisher's pages
# take the form of {statement: meaning}
# where meaning['type'] identifies the license (see licenses.py)
# and meaning['version'] identifies the license version (if available)
lic_statements = self._license_mappings
for url in record.provider_urls:
if self.supports_url(url):
self.simple_extract(lic_statements, record, url)
return (self._short_name, self.__version__)
def get_description(self, plugin_name):
pd = super(OUPPlugin, self).get_description(plugin_name)
pd.provider_support = "Supports urls which match the regular expression: " + self.supported_url_format
return pd
|
[
"re.match"
] |
[((8778, 8818), 're.match', 're.match', (['self.supported_url_format', 'url'], {}), '(self.supported_url_format, url)\n', (8786, 8818), False, 'import re\n')]
|
#!/usr/bin/env python3
import rospy
import threading
from enum import Enum
from smads_core.client import JackalClient
from smads_core.client import SpotClient
from smads_core.client import RobotClient
from smads_core.interface import RobotSensorInterface
from smads_core.interface import RobotNavigationInterface
class RobotType:
SPOT = 1
JACKAL = 2
platform_map = {
SPOT : SpotClient(),
JACKAL : JackalClient(),
}
class SMADSROS:
def __init__(self, client, sensor_poll_rate, robot_prefix="smads_platform"):
self.client = client
self.robot_prefix = robot_prefix
self.client_mutex = threading.Lock()
self.sensor_interface = RobotSensorInterface(client, self.client_mutex, sensor_poll_rate, robot_prefix)
self.navigation_interface = RobotNavigationInterface(client, self.client_mutex, robot_prefix)
def start(self):
x = threading.Thread(target=self.sensor_interface.start)
y = threading.Thread(target=self.navigation_interface.start)
x.start()
y.start()
rospy.spin()
if __name__ == '__main__':
try:
rospy.init_node('smads_ros_node', anonymous=False)
platform = RobotType.JACKAL
client = RobotType.platform_map[platform]
platorm = rospy.get_param("~platform", 1)
platform_prefix = rospy.get_param("~platform_prefix", "smads_platform")
poll_rate = rospy.get_param("~sensor_poll_rate", 10)
smadsros = SMADSROS(client, poll_rate, platform_prefix)
smadsros.start()
except rospy.ROSInterruptException:
pass
|
[
"threading.Thread",
"smads_core.interface.RobotNavigationInterface",
"smads_core.interface.RobotSensorInterface",
"threading.Lock",
"rospy.get_param",
"rospy.init_node",
"rospy.spin",
"smads_core.client.SpotClient",
"smads_core.client.JackalClient"
] |
[((404, 416), 'smads_core.client.SpotClient', 'SpotClient', ([], {}), '()\n', (414, 416), False, 'from smads_core.client import SpotClient\n'), ((435, 449), 'smads_core.client.JackalClient', 'JackalClient', ([], {}), '()\n', (447, 449), False, 'from smads_core.client import JackalClient\n'), ((653, 669), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (667, 669), False, 'import threading\n'), ((702, 781), 'smads_core.interface.RobotSensorInterface', 'RobotSensorInterface', (['client', 'self.client_mutex', 'sensor_poll_rate', 'robot_prefix'], {}), '(client, self.client_mutex, sensor_poll_rate, robot_prefix)\n', (722, 781), False, 'from smads_core.interface import RobotSensorInterface\n'), ((818, 883), 'smads_core.interface.RobotNavigationInterface', 'RobotNavigationInterface', (['client', 'self.client_mutex', 'robot_prefix'], {}), '(client, self.client_mutex, robot_prefix)\n', (842, 883), False, 'from smads_core.interface import RobotNavigationInterface\n'), ((918, 970), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.sensor_interface.start'}), '(target=self.sensor_interface.start)\n', (934, 970), False, 'import threading\n'), ((983, 1039), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.navigation_interface.start'}), '(target=self.navigation_interface.start)\n', (999, 1039), False, 'import threading\n'), ((1084, 1096), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (1094, 1096), False, 'import rospy\n'), ((1142, 1192), 'rospy.init_node', 'rospy.init_node', (['"""smads_ros_node"""'], {'anonymous': '(False)'}), "('smads_ros_node', anonymous=False)\n", (1157, 1192), False, 'import rospy\n'), ((1298, 1329), 'rospy.get_param', 'rospy.get_param', (['"""~platform"""', '(1)'], {}), "('~platform', 1)\n", (1313, 1329), False, 'import rospy\n'), ((1356, 1409), 'rospy.get_param', 'rospy.get_param', (['"""~platform_prefix"""', '"""smads_platform"""'], {}), "('~platform_prefix', 'smads_platform')\n", (1371, 1409), False, 'import rospy\n'), ((1430, 1470), 'rospy.get_param', 'rospy.get_param', (['"""~sensor_poll_rate"""', '(10)'], {}), "('~sensor_poll_rate', 10)\n", (1445, 1470), False, 'import rospy\n')]
|
import discord, asyncio, typing, random, os, html
from discord import ui
from discord.ext import commands
from collections import defaultdict
from datetime import datetime, timezone, timedelta
from .. import converters, embeds, services, utils, views
class MiscStuff(utils.MeldedCog, name = "Miscellaneous", category = "Other", limit = True):
ACTIVITIES = {
-1: "",
0: "Playing",
1: "Streaming",
2: "Listening to",
3: "Watching",
4: "Activity:",
5: "Competing in"
}
def __init__(self, bot):
self.bot = bot
@commands.command(help = "Retrieves a random piece of advice.\nUses adviceslip.com", aliases = ("ad",))
@utils.trigger_typing
async def advice(self, ctx):
result = await utils.get_json(self.bot.session,f"https://api.adviceslip.com/advice",content_type="text/html")
embed = embeds.GENERIC.create(result["slip"]["advice"], "", heading = "Random advice")
embed.set_footer(text=f"Retrieved using adviceslip.com")
await ctx.send(embed = embed)
@commands.command(
help = "Generates a continuously updated countdown post.",
aliases = ("time", "cd")
)
async def countdown(self, ctx, *, elapsed : converters.Date):
delta = (elapsed - datetime.now(tz = timezone.utc)) + timedelta(minutes = 1)
embed = embeds.COUNTDOWN.create(f"<t:{elapsed.timestamp():.0f}:R>", "")
embed.add_field(name = "End time", value = f"<t:{elapsed.timestamp():.0f}:F>")
if delta.total_seconds() < 0:
raise utils.CustomCommandError(
"Date has already occured",
"The date that you entered is in the past."
)
desc = (await views.RespondOrReact(ctx).run(
f"Your countdown will expire <t:{elapsed.timestamp():.0f}:R>."
" Give it a name by responding below."
)).content
embed.description = desc
message = await ctx.send(embed = embed)
@commands.command(
help = "Generates a competition distribution.\n If no number is specified, asks for a list of names.",
aliases = ("dt", "dist")
)
async def distribute(self, ctx, size : converters.Range(3, 50) = None):
if not size:
message = await views.RespondOrReact(ctx, timeout = views.LONG_TIMEOUT).run(
"Enter a list of contestants separated by line breaks (\u21E7\u23CE on desktop)",
)
names = dict(enumerate(message.content.split("\n"), start = 1))
size = await converters.Range(3, 50).convert(ctx, len(names))
else: names = None
def distribution(keysize):
vals = list(range(1, keysize))
candidates = {i: None for i in range(1, keysize)}
for c in candidates:
same = c in vals
if len(vals) == 1 and same: #try again, no valid option
candidates = distribution(keysize)
break
elif same: vals.remove(c)
candidates[c] = vals.pop(random.randrange(0, len(vals)))
if same: vals.append(c)
return candidates
dist = distribution(size + 1)
display = lambda e: f"**{e}**: {names[e]}" if names else f"**{e}**"
output = "".join(f"{display(k)} \U0001F86A {display(v)}\n" for k, v in dist.items())
await ctx.send(output)
@commands.command(help = "Conducts a search using Google Images.", aliases = ("img", "gi"))
@utils.trigger_typing
async def imgsearch(self, ctx, *, query):
await services.gis(ctx, "" + query)
@commands.command(
help = "Chooses a random number.\n"
" By default, this is out of 6, but another value can be specified.",
aliases = ("dice", "d")
)
async def roll(self, ctx, ceiling : converters.Range(2, 9999) = 6):
message = await ctx.send(":game_die: | Rolling the dice...")
result = random.randrange(1, ceiling + 1)
await asyncio.sleep(2)
await message.edit(content=f":game_die: | The dice landed on... **{result}**!")
@commands.command(help = "Sends a post as the bot user. Handy for jokes and such.", aliases = ("st",), hidden = True)
@commands.is_owner()
async def sendtext(ctx, channel : typing.Optional[discord.TextChannel] = None, *, message_content):
channel = channel or ctx.channel
await channel.send(message_content)
@commands.command(
help = "Completes a passage of text using machine learning.\n"
" This uses DeepAI's online model to compute the result.",
aliases=("aitext", "tg")
)
@utils.trigger_typing
async def textgen(self, ctx, *, text : str):
url = "https://api.deepai.org/api/text-generator"
data = {"text": text.strip()}
headers = {"api-key": ctx.bot.conf["DEEP_AI"].strip()}
async with ctx.bot.session.post(url, data = data, headers = headers) as source:
if not source.ok:
raise utils.CustomCommandError(
"Invalid HTTP request",
f"Please try again. If problems persist, contact the bot's maintainer."
)
result_json = await source.json()
result = result_json["output"]
newtext = result[result.index(text) + len(text):]
await ctx.send(f":abcd: | **Text generated!**\n\n*{text}*{newtext}")
@commands.group(
invoke_without_command = True,
help = "Asks a trivia question that users can react to.\n"
"Optionally, a numeric category can be specified."
"\nCourtesy of the Open Trivia Database.\n\u0020\n",
aliases = ("q","tr")
)
@utils.trigger_typing
async def trivia(self, ctx, category : typing.Optional[int] = -1):
catstring = "" if category == -1 else f"&category={category}"
json = f"https://opentdb.com/api.php?amount=1{catstring}"
result = await utils.get_json(self.bot.session, json)
if result["response_code"] == 1:
raise utils.CustomCommandError(
"Invalid category code",
f"Consult `{ctx.clean_prefix}trivia categories` to see the available codes."
)
result = result["results"][0]
info = f"**{result['category']}** | {result['difficulty'].capitalize()}\n\n"
embed = embeds.GENERIC.create(html.unescape(result["question"]), info, heading = "Trivia")
correct = random.randrange(0,2 if result["type"] == "boolean" else 4)
answers = result["incorrect_answers"]
answers.insert(correct, result["correct_answer"])
embed.description += f"The correct answer will appear in **one minute.**"
embed.set_footer(text = f"Courtesy of the Open Trivia Database.")
view = ui.View()
users = {}
tuple(view.add_item(views.TriviaButton(answer, users)) for answer in answers)
message = await ctx.send(embed = embed, view = view)
await asyncio.sleep(60)
embed.description = f"{info}The correct answer is: **{html.unescape(answers[correct])}**"
updated = await message.channel.fetch_message(message.id)
if updated is None: return #message deleted
results = defaultdict(list)
for user, answer in users.items():
results[answer].append(user)
stats = "\n".join(f"- {a}: {','.join(u)} (**{len(u)}**)" for a, u in results.items())
if stats: embed.description += f"\n\n**Responses:**\n\u0020{stats}"
await message.edit(embed = embed, view = None)
@trivia.command(help = "Lists all categories.")
async def categories(self, ctx):
result = await utils.get_json(self.bot.session, f"https://opentdb.com/api_category.php")
embed = embeds.GENERIC.create(
"Trivia categories", "To choose a category, specify its numeric ID.", heading = "Trivia"
)
for category in result["trivia_categories"]:
embed.add_field(name = category["name"], value=category["id"], inline=True)
embed.set_footer(text = f"Courtesy of the Open Trivia Database.")
await ctx.send(embed = embed)
@commands.command(help = "Looks up a Discord user.", aliases = ("u",))
@utils.trigger_typing
async def user(self, ctx, *, user : converters.MemberOrUser = None):
user = user or ctx.author
if not isinstance(user, discord.Member) and ctx.guild:
user = ctx.guild.get_member(user.id) or user
embed = embeds.USER_INFO.create(f"{user.name}#{user.discriminator}", f"{user.mention}")
if user.bot:
embed.description += " | **Bot**"
embed.set_thumbnail(url = user.display_avatar.with_size(512).url)
embed.add_field(name = "Created", value = utils.stddate(user.created_at), inline = True)
embed.description += "\n\u200b"
if isinstance(user, discord.Member):
embed.colour = user.colour if user.colour.value != 0 else embeds.DEFAULT
for activity in user.activities:
preface = activity.emoji or "" if hasattr(activity, "emoji") else f"**{self.ACTIVITIES[int(activity.type)]}**"
embed.description += f"\n{preface} {activity.name}"
embed.add_field(name = "Joined", value = utils.stddate(user.joined_at), inline = True)
embed.add_field(name = "Status", value = f"Currently **{user.raw_status}**", inline = True)
if isinstance(ctx.channel, discord.abc.GuildChannel):
roles = (str(role.mention) for role in user.roles[1:])
embed.add_field(name = "Roles", value = ", ".join(("@everyone ", *roles)), inline = False)
await ctx.send(embed = embed)
def setup(bot):
bot.add_cog(MiscStuff(bot))
|
[
"html.unescape",
"discord.ext.commands.command",
"discord.ui.View",
"asyncio.sleep",
"datetime.datetime.now",
"collections.defaultdict",
"random.randrange",
"datetime.timedelta",
"discord.ext.commands.group",
"discord.ext.commands.is_owner"
] |
[((534, 645), 'discord.ext.commands.command', 'commands.command', ([], {'help': '"""Retrieves a random piece of advice.\nUses adviceslip.com"""', 'aliases': "('ad',)"}), '(help=\n """Retrieves a random piece of advice.\nUses adviceslip.com""", aliases=\n (\'ad\',))\n', (550, 645), False, 'from discord.ext import commands\n'), ((994, 1095), 'discord.ext.commands.command', 'commands.command', ([], {'help': '"""Generates a continuously updated countdown post."""', 'aliases': "('time', 'cd')"}), "(help='Generates a continuously updated countdown post.',\n aliases=('time', 'cd'))\n", (1010, 1095), False, 'from discord.ext import commands\n'), ((1806, 1960), 'discord.ext.commands.command', 'commands.command', ([], {'help': '"""Generates a competition distribution.\n If no number is specified, asks for a list of names."""', 'aliases': "('dt', 'dist')"}), '(help=\n """Generates a competition distribution.\n If no number is specified, asks for a list of names."""\n , aliases=(\'dt\', \'dist\'))\n', (1822, 1960), False, 'from discord.ext import commands\n'), ((3039, 3130), 'discord.ext.commands.command', 'commands.command', ([], {'help': '"""Conducts a search using Google Images."""', 'aliases': "('img', 'gi')"}), "(help='Conducts a search using Google Images.', aliases=(\n 'img', 'gi'))\n", (3055, 3130), False, 'from discord.ext import commands\n'), ((3237, 3390), 'discord.ext.commands.command', 'commands.command', ([], {'help': '"""Chooses a random number.\n By default, this is out of 6, but another value can be specified."""', 'aliases': "('dice', 'd')"}), '(help=\n """Chooses a random number.\n By default, this is out of 6, but another value can be specified."""\n , aliases=(\'dice\', \'d\'))\n', (3253, 3390), False, 'from discord.ext import commands\n'), ((3684, 3804), 'discord.ext.commands.command', 'commands.command', ([], {'help': '"""Sends a post as the bot user. Handy for jokes and such."""', 'aliases': "('st',)", 'hidden': '(True)'}), "(help=\n 'Sends a post as the bot user. Handy for jokes and such.', aliases=(\n 'st',), hidden=True)\n", (3700, 3804), False, 'from discord.ext import commands\n'), ((3803, 3822), 'discord.ext.commands.is_owner', 'commands.is_owner', ([], {}), '()\n', (3820, 3822), False, 'from discord.ext import commands\n'), ((4003, 4175), 'discord.ext.commands.command', 'commands.command', ([], {'help': '"""Completes a passage of text using machine learning.\n This uses DeepAI\'s online model to compute the result."""', 'aliases': "('aitext', 'tg')"}), '(help=\n """Completes a passage of text using machine learning.\n This uses DeepAI\'s online model to compute the result."""\n , aliases=(\'aitext\', \'tg\'))\n', (4019, 4175), False, 'from discord.ext import commands\n'), ((4855, 5079), 'discord.ext.commands.group', 'commands.group', ([], {'invoke_without_command': '(True)', 'help': '"""Asks a trivia question that users can react to.\nOptionally, a numeric category can be specified.\nCourtesy of the Open Trivia Database.\n \n"""', 'aliases': "('q', 'tr')"}), '(invoke_without_command=True, help=\n """Asks a trivia question that users can react to.\nOptionally, a numeric category can be specified.\nCourtesy of the Open Trivia Database.\n \n"""\n , aliases=(\'q\', \'tr\'))\n', (4869, 5079), False, 'from discord.ext import commands\n'), ((7362, 7427), 'discord.ext.commands.command', 'commands.command', ([], {'help': '"""Looks up a Discord user."""', 'aliases': "('u',)"}), "(help='Looks up a Discord user.', aliases=('u',))\n", (7378, 7427), False, 'from discord.ext import commands\n'), ((3539, 3571), 'random.randrange', 'random.randrange', (['(1)', '(ceiling + 1)'], {}), '(1, ceiling + 1)\n', (3555, 3571), False, 'import discord, asyncio, typing, random, os, html\n'), ((5786, 5846), 'random.randrange', 'random.randrange', (['(0)', "(2 if result['type'] == 'boolean' else 4)"], {}), "(0, 2 if result['type'] == 'boolean' else 4)\n", (5802, 5846), False, 'import discord, asyncio, typing, random, os, html\n'), ((6104, 6113), 'discord.ui.View', 'ui.View', ([], {}), '()\n', (6111, 6113), False, 'from discord import ui\n'), ((6510, 6527), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (6521, 6527), False, 'from collections import defaultdict\n'), ((1224, 1244), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(1)'}), '(minutes=1)\n', (1233, 1244), False, 'from datetime import datetime, timezone, timedelta\n'), ((3580, 3596), 'asyncio.sleep', 'asyncio.sleep', (['(2)'], {}), '(2)\n', (3593, 3596), False, 'import discord, asyncio, typing, random, os, html\n'), ((5713, 5746), 'html.unescape', 'html.unescape', (["result['question']"], {}), "(result['question'])\n", (5726, 5746), False, 'import discord, asyncio, typing, random, os, html\n'), ((6273, 6290), 'asyncio.sleep', 'asyncio.sleep', (['(60)'], {}), '(60)\n', (6286, 6290), False, 'import discord, asyncio, typing, random, os, html\n'), ((1189, 1218), 'datetime.datetime.now', 'datetime.now', ([], {'tz': 'timezone.utc'}), '(tz=timezone.utc)\n', (1201, 1218), False, 'from datetime import datetime, timezone, timedelta\n'), ((6350, 6381), 'html.unescape', 'html.unescape', (['answers[correct]'], {}), '(answers[correct])\n', (6363, 6381), False, 'import discord, asyncio, typing, random, os, html\n')]
|
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.chrome.options import Options
options = Options()
options.headless = True
# options.add_argument('--proxy-server http://127.0.0.1:8001')
options.binary_location = '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome'
driver = webdriver.Chrome(
executable_path='/usr/local/bin/chromedriver', options=options)
# driver.get('https://mrmockup.com/freebies/')
driver.get('https://cn.bing.com/')
content = driver.find_elements_by_css_selector(
'#b_footerItems li.b_footerItems_icp')
anchor = driver.find_element_by_css_selector('#scpl4')
print(f"a.href: {anchor.get_attribute('href')}")
# print(driver.page_source)
|
[
"selenium.webdriver.chrome.options.Options",
"selenium.webdriver.Chrome"
] |
[((144, 153), 'selenium.webdriver.chrome.options.Options', 'Options', ([], {}), '()\n', (151, 153), False, 'from selenium.webdriver.chrome.options import Options\n'), ((340, 425), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'executable_path': '"""/usr/local/bin/chromedriver"""', 'options': 'options'}), "(executable_path='/usr/local/bin/chromedriver', options=options\n )\n", (356, 425), False, 'from selenium import webdriver\n')]
|
"""A Martel format to parse the output from transfac.
Formats:
format Format for a whole file.
"""
import warnings
warnings.warn("Bio.expressions was deprecated, as it does not work with recent versions of mxTextTools. If you want to continue to use this module, please get in contact with the Biopython developers at <EMAIL> to avoid permanent removal of this module from Biopython", DeprecationWarning)
import sys
from Martel import *
from Martel import RecordReader
blank_line = Opt(Spaces()) + AnyEol()
MATRIX_LINE = Str("Search for sites by WeightMatrix library:") + Spaces() + \
UntilEol("matrix_file") + AnyEol()
SEQUENCE_LINE = Str("Sequence file:") + Spaces() + \
UntilEol("sequence_file") + AnyEol()
PROFILE_LINE = Str("Site selection profile:") + Spaces() + \
UntilSep("profile_file", sep=" ") + Spaces() + \
UntilEol("profile_description") + AnyEol()
TITLE_LINE = Str("Inspecting sequence ID") + Spaces() + \
UntilSep("entryname", sep=" ") + Spaces() + \
UntilSep("dataclass", sep=";") + Str(";") + Spaces() + \
UntilSep("molecule", sep=";") + Str(";") + Spaces() + \
UntilSep("division", sep=";") + Str(";") + Spaces() + \
UntilSep("sequencelength", sep=" ") + Spaces() + Str("BP") + \
UntilEol() + AnyEol()
def SS(exp): # expression surrounded by optional spaces.
return Opt(Spaces()) + exp + Opt(Spaces())
DATA_LINE = \
SS(UntilSep("matrix_identifier", sep=" |")) + \
Str("|") + \
SS(UntilSep("position", sep=" ")) + \
SS(Str("(") + Group("strand", Any("+-")) + Str(")")) + \
Str("|") + \
SS(Float("core_match")) + \
Str("|") + \
SS(Float("matrix_match")) + \
Str("|") + \
Opt(Spaces()) + UntilEol("sequence") + AnyEol()
SEQUENCES_LENGTH_LINE = \
Spaces() + Str("Total sequences length=") + Integer("sequences_length") + \
AnyEol()
FOUND_SITES_LINE = \
Spaces() + Str("Total number of found sites=") + Integer("found_sites") + \
AnyEol()
SITE_FREQUENCY_LINE = \
Spaces() + Str("Frequency of sites per nucleotide=") + \
Float("sites_per_nucleotide") + AnyEol()
format = MATRIX_LINE + \
SEQUENCE_LINE + \
PROFILE_LINE + \
blank_line + \
TITLE_LINE + \
blank_line + \
Rep(DATA_LINE) + \
blank_line + \
SEQUENCES_LENGTH_LINE + \
blank_line + \
FOUND_SITES_LINE + \
blank_line + \
SITE_FREQUENCY_LINE
|
[
"warnings.warn"
] |
[((130, 429), 'warnings.warn', 'warnings.warn', (['"""Bio.expressions was deprecated, as it does not work with recent versions of mxTextTools. If you want to continue to use this module, please get in contact with the Biopython developers at <EMAIL> to avoid permanent removal of this module from Biopython"""', 'DeprecationWarning'], {}), "(\n 'Bio.expressions was deprecated, as it does not work with recent versions of mxTextTools. If you want to continue to use this module, please get in contact with the Biopython developers at <EMAIL> to avoid permanent removal of this module from Biopython'\n , DeprecationWarning)\n", (143, 429), False, 'import warnings\n')]
|
from django.contrib import admin
from jab.models import Post, SidebarItem
class PostAdmin(admin.ModelAdmin):
list_display = ('publication_date', 'title', 'status',)
ordering = ('-publication_date',)
admin.site.register(Post, PostAdmin)
class SidebarItemAdmin(admin.ModelAdmin):
pass
admin.site.register(SidebarItem, SidebarItemAdmin)
|
[
"django.contrib.admin.site.register"
] |
[((211, 247), 'django.contrib.admin.site.register', 'admin.site.register', (['Post', 'PostAdmin'], {}), '(Post, PostAdmin)\n', (230, 247), False, 'from django.contrib import admin\n'), ((302, 352), 'django.contrib.admin.site.register', 'admin.site.register', (['SidebarItem', 'SidebarItemAdmin'], {}), '(SidebarItem, SidebarItemAdmin)\n', (321, 352), False, 'from django.contrib import admin\n')]
|
# !/usr/bin/env python3
# -*- coding:utf-8 -*-
# @Time : 2022/05/00 16:47
# @Author : clear
# @FileName: test_get_laplacian.py
import tensorlayerx as tlx
from gammagl.utils.get_laplacian import get_laplacian
def test_get_laplacian():
edge_index = tlx.convert_to_tensor([[0, 1, 1, 2], [1, 0, 2, 1]], dtype=tlx.int64)
edge_weight = tlx.convert_to_tensor([1, 2, 2, 4], dtype=tlx.float32)
lap = get_laplacian(edge_index, 3, edge_weight)
assert tlx.convert_to_numpy(lap[0]).tolist() == [[0, 1, 1, 2, 0, 1, 2], [1, 0, 2, 1, 0, 1, 2]]
assert tlx.convert_to_numpy(lap[1]).tolist() == [-1, -2, -2, -4, 1, 4, 4]
lap_sym = get_laplacian(edge_index, 3, edge_weight, normalization='sym')
assert tlx.convert_to_numpy(lap_sym[0]).tolist() == tlx.convert_to_numpy(lap[0]).tolist()
assert tlx.convert_to_numpy(lap_sym[1]).tolist() == [-0.5, -1, -0.5, -1, 1, 1, 1]
lap_rw = get_laplacian(edge_index, 3, edge_weight, normalization='rw')
assert tlx.convert_to_numpy(lap_rw[0]).tolist() == tlx.convert_to_numpy(lap[0]).tolist()
assert tlx.convert_to_numpy(lap_rw[1]).tolist() == [-1, -0.5, -0.5, -1, 1, 1, 1]
|
[
"tensorlayerx.convert_to_numpy",
"gammagl.utils.get_laplacian.get_laplacian",
"tensorlayerx.convert_to_tensor"
] |
[((258, 326), 'tensorlayerx.convert_to_tensor', 'tlx.convert_to_tensor', (['[[0, 1, 1, 2], [1, 0, 2, 1]]'], {'dtype': 'tlx.int64'}), '([[0, 1, 1, 2], [1, 0, 2, 1]], dtype=tlx.int64)\n', (279, 326), True, 'import tensorlayerx as tlx\n'), ((345, 399), 'tensorlayerx.convert_to_tensor', 'tlx.convert_to_tensor', (['[1, 2, 2, 4]'], {'dtype': 'tlx.float32'}), '([1, 2, 2, 4], dtype=tlx.float32)\n', (366, 399), True, 'import tensorlayerx as tlx\n'), ((411, 452), 'gammagl.utils.get_laplacian.get_laplacian', 'get_laplacian', (['edge_index', '(3)', 'edge_weight'], {}), '(edge_index, 3, edge_weight)\n', (424, 452), False, 'from gammagl.utils.get_laplacian import get_laplacian\n'), ((645, 707), 'gammagl.utils.get_laplacian.get_laplacian', 'get_laplacian', (['edge_index', '(3)', 'edge_weight'], {'normalization': '"""sym"""'}), "(edge_index, 3, edge_weight, normalization='sym')\n", (658, 707), False, 'from gammagl.utils.get_laplacian import get_laplacian\n'), ((903, 964), 'gammagl.utils.get_laplacian.get_laplacian', 'get_laplacian', (['edge_index', '(3)', 'edge_weight'], {'normalization': '"""rw"""'}), "(edge_index, 3, edge_weight, normalization='rw')\n", (916, 964), False, 'from gammagl.utils.get_laplacian import get_laplacian\n'), ((464, 492), 'tensorlayerx.convert_to_numpy', 'tlx.convert_to_numpy', (['lap[0]'], {}), '(lap[0])\n', (484, 492), True, 'import tensorlayerx as tlx\n'), ((563, 591), 'tensorlayerx.convert_to_numpy', 'tlx.convert_to_numpy', (['lap[1]'], {}), '(lap[1])\n', (583, 591), True, 'import tensorlayerx as tlx\n'), ((720, 752), 'tensorlayerx.convert_to_numpy', 'tlx.convert_to_numpy', (['lap_sym[0]'], {}), '(lap_sym[0])\n', (740, 752), True, 'import tensorlayerx as tlx\n'), ((765, 793), 'tensorlayerx.convert_to_numpy', 'tlx.convert_to_numpy', (['lap[0]'], {}), '(lap[0])\n', (785, 793), True, 'import tensorlayerx as tlx\n'), ((814, 846), 'tensorlayerx.convert_to_numpy', 'tlx.convert_to_numpy', (['lap_sym[1]'], {}), '(lap_sym[1])\n', (834, 846), True, 'import tensorlayerx as tlx\n'), ((976, 1007), 'tensorlayerx.convert_to_numpy', 'tlx.convert_to_numpy', (['lap_rw[0]'], {}), '(lap_rw[0])\n', (996, 1007), True, 'import tensorlayerx as tlx\n'), ((1020, 1048), 'tensorlayerx.convert_to_numpy', 'tlx.convert_to_numpy', (['lap[0]'], {}), '(lap[0])\n', (1040, 1048), True, 'import tensorlayerx as tlx\n'), ((1069, 1100), 'tensorlayerx.convert_to_numpy', 'tlx.convert_to_numpy', (['lap_rw[1]'], {}), '(lap_rw[1])\n', (1089, 1100), True, 'import tensorlayerx as tlx\n')]
|
from typing import Any
import pytest
from pytestqt.qtbot import QtBot
from qtpy.QtCore import Signal, QObject
import numpy as np
from pydm.application import PyDMApplication
from pydm.data_plugins.calc_plugin import epics_string, epics_unsigned
from pydm.widgets.channel import PyDMChannel
@pytest.mark.parametrize(
"input_string,expected",
[
(np.array((0x6f, 0x6b, 0x61, 0x79, 0, 42), dtype=np.int8), "okay"),
(np.array((0x6f, 0x6b, 0x61, 0x79), dtype=np.int8), "okay"),
(np.array((0, 0x6f, 0x6b, 0x61, 0x79, 0, 42, 42), dtype=np.int8), ""),
],
)
def test_epics_string(input_string: str, expected: str):
assert epics_string(input_string) == expected
@pytest.mark.parametrize(
"input_int,bits,expected",
[
(100, 32, 100),
(-1, 8, 255),
(-2, 4, 0b1110),
],
)
def test_epics_unsigned(input_int: int, bits: int, expected: int):
assert epics_unsigned(input_int, bits) == expected
@pytest.mark.parametrize(
"calc,input1,expected1,input2,expected2",
[
('val + 3', 0, 3, 1, 4),
('int(np.abs(val))', -5, 5, -10, 10),
('math.floor(val)', 3.4, 3, 5.7, 5),
('epics_string(val)',
np.array((0x61, 0), dtype=np.int8), 'a',
np.array((0x62, 0), dtype=np.int8), 'b'),
('epics_unsigned(val, 8)', -1, 255, -2, 254),
]
)
def test_calc_plugin(
qapp: PyDMApplication,
qtbot: QtBot,
calc: str,
input1: Any,
expected1: Any,
input2: Any,
expected2: Any,
):
class SigHolder(QObject):
sig = Signal(type(input1))
sig_holder = SigHolder()
type_str = str(type(input1))
local_addr = f'loc://test_calc_plugin_local_{calc}'
local_ch = PyDMChannel(
address=f'{local_addr}?type={type_str}&init={input1}',
value_signal=sig_holder.sig,
)
local_ch.connect()
calc_values = []
def new_calc_value(val: Any):
calc_values.append(val)
calc_addr = f'calc://test_calc_plugin_calc_{calc}'
calc_ch = PyDMChannel(
address=f'{calc_addr}?val={local_addr}&expr={calc}',
value_slot=new_calc_value,
)
calc_ch.connect()
sig_holder.sig.emit(input1)
def has_value():
assert len(calc_values) >= 1
qtbot.wait_until(has_value)
assert calc_values[0] == expected1
calc_values.clear()
sig_holder.sig.emit(input2)
qtbot.wait_until(has_value)
assert calc_values[0] == expected2
|
[
"pydm.data_plugins.calc_plugin.epics_string",
"pydm.widgets.channel.PyDMChannel",
"pydm.data_plugins.calc_plugin.epics_unsigned",
"numpy.array",
"pytest.mark.parametrize"
] |
[((698, 797), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""input_int,bits,expected"""', '[(100, 32, 100), (-1, 8, 255), (-2, 4, 14)]'], {}), "('input_int,bits,expected', [(100, 32, 100), (-1, 8,\n 255), (-2, 4, 14)])\n", (721, 797), False, 'import pytest\n'), ((1717, 1816), 'pydm.widgets.channel.PyDMChannel', 'PyDMChannel', ([], {'address': 'f"""{local_addr}?type={type_str}&init={input1}"""', 'value_signal': 'sig_holder.sig'}), "(address=f'{local_addr}?type={type_str}&init={input1}',\n value_signal=sig_holder.sig)\n", (1728, 1816), False, 'from pydm.widgets.channel import PyDMChannel\n'), ((2017, 2113), 'pydm.widgets.channel.PyDMChannel', 'PyDMChannel', ([], {'address': 'f"""{calc_addr}?val={local_addr}&expr={calc}"""', 'value_slot': 'new_calc_value'}), "(address=f'{calc_addr}?val={local_addr}&expr={calc}', value_slot\n =new_calc_value)\n", (2028, 2113), False, 'from pydm.widgets.channel import PyDMChannel\n'), ((656, 682), 'pydm.data_plugins.calc_plugin.epics_string', 'epics_string', (['input_string'], {}), '(input_string)\n', (668, 682), False, 'from pydm.data_plugins.calc_plugin import epics_string, epics_unsigned\n'), ((918, 949), 'pydm.data_plugins.calc_plugin.epics_unsigned', 'epics_unsigned', (['input_int', 'bits'], {}), '(input_int, bits)\n', (932, 949), False, 'from pydm.data_plugins.calc_plugin import epics_string, epics_unsigned\n'), ((364, 415), 'numpy.array', 'np.array', (['(111, 107, 97, 121, 0, 42)'], {'dtype': 'np.int8'}), '((111, 107, 97, 121, 0, 42), dtype=np.int8)\n', (372, 415), True, 'import numpy as np\n'), ((440, 484), 'numpy.array', 'np.array', (['(111, 107, 97, 121)'], {'dtype': 'np.int8'}), '((111, 107, 97, 121), dtype=np.int8)\n', (448, 484), True, 'import numpy as np\n'), ((509, 567), 'numpy.array', 'np.array', (['(0, 111, 107, 97, 121, 0, 42, 42)'], {'dtype': 'np.int8'}), '((0, 111, 107, 97, 121, 0, 42, 42), dtype=np.int8)\n', (517, 567), True, 'import numpy as np\n'), ((1205, 1237), 'numpy.array', 'np.array', (['(97, 0)'], {'dtype': 'np.int8'}), '((97, 0), dtype=np.int8)\n', (1213, 1237), True, 'import numpy as np\n'), ((1255, 1287), 'numpy.array', 'np.array', (['(98, 0)'], {'dtype': 'np.int8'}), '((98, 0), dtype=np.int8)\n', (1263, 1287), True, 'import numpy as np\n')]
|
import io
import pytest
import stray_recipe_manager.units
import stray_recipe_manager.storage
from stray_recipe_manager.recipe import (
CommentedRecipe,
Recipe,
Ingredient,
RecipeStep,
)
ureg = stray_recipe_manager.units.default_unit_registry
@pytest.fixture(scope="module")
def toml_coding():
registry = stray_recipe_manager.units.UnitHandler(ureg)
toml_load = stray_recipe_manager.storage.TOMLCoding(registry)
return toml_load
@pytest.mark.parametrize(
"recipe",
[
Recipe(
name="Boiling Water",
makes=Ingredient(item="Boiling water", quantity=1.0 * ureg.cup),
tools=["Saucepan"],
ingredients=[Ingredient(item="Water", quantity=1 * ureg.cup,)],
steps=[
RecipeStep(description="Place water on stove until boiling")
],
),
CommentedRecipe(
name="Boiling Water",
comments="Utterly basic",
makes=Ingredient(item="Boiling water", quantity=1.0 * ureg.cup),
tools=["Saucepan"],
ingredients=[Ingredient(item="Water", quantity=1 * ureg.cup,)],
steps=[
RecipeStep(description="Place water on stove until boiling")
],
),
Recipe(
name="Boiling Water",
makes=Ingredient(item="Boiling water", quantity=1.0 * ureg.cup),
tools=["Saucepan"],
ingredients=[Ingredient(item="Water", quantity=1 * ureg.cup,)],
steps=[
RecipeStep(
description="Place water on stove until boiling",
time=10 * ureg.min,
)
],
),
CommentedRecipe(
name="Boiling Water",
makes=Ingredient(item="Boiling water", quantity=1.0 * ureg.cup),
comments="Utterly basic",
tools=["Saucepan"],
ingredients=[Ingredient(item="Water", quantity=1 * ureg.cup,)],
steps=[
RecipeStep(
description="Place water on stove until boiling",
time=10 * ureg.min,
)
],
),
],
)
def test_recipe_round_trip(recipe, toml_coding):
# type: (Recipe, stray_recipe_manager.storage.TOMLCoding) -> None
fstream = io.StringIO()
toml_coding.write_recipe_to_toml_file(fstream, recipe)
fstream.seek(0)
print(fstream.getvalue())
n_recipe = toml_coding.load_recipe_from_toml_file(fstream)
assert recipe == n_recipe
|
[
"stray_recipe_manager.recipe.Ingredient",
"io.StringIO",
"pytest.fixture",
"stray_recipe_manager.recipe.RecipeStep"
] |
[((264, 294), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (278, 294), False, 'import pytest\n'), ((2335, 2348), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (2346, 2348), False, 'import io\n'), ((577, 634), 'stray_recipe_manager.recipe.Ingredient', 'Ingredient', ([], {'item': '"""Boiling water"""', 'quantity': '(1.0 * ureg.cup)'}), "(item='Boiling water', quantity=1.0 * ureg.cup)\n", (587, 634), False, 'from stray_recipe_manager.recipe import CommentedRecipe, Recipe, Ingredient, RecipeStep\n'), ((982, 1039), 'stray_recipe_manager.recipe.Ingredient', 'Ingredient', ([], {'item': '"""Boiling water"""', 'quantity': '(1.0 * ureg.cup)'}), "(item='Boiling water', quantity=1.0 * ureg.cup)\n", (992, 1039), False, 'from stray_recipe_manager.recipe import CommentedRecipe, Recipe, Ingredient, RecipeStep\n'), ((1340, 1397), 'stray_recipe_manager.recipe.Ingredient', 'Ingredient', ([], {'item': '"""Boiling water"""', 'quantity': '(1.0 * ureg.cup)'}), "(item='Boiling water', quantity=1.0 * ureg.cup)\n", (1350, 1397), False, 'from stray_recipe_manager.recipe import CommentedRecipe, Recipe, Ingredient, RecipeStep\n'), ((1786, 1843), 'stray_recipe_manager.recipe.Ingredient', 'Ingredient', ([], {'item': '"""Boiling water"""', 'quantity': '(1.0 * ureg.cup)'}), "(item='Boiling water', quantity=1.0 * ureg.cup)\n", (1796, 1843), False, 'from stray_recipe_manager.recipe import CommentedRecipe, Recipe, Ingredient, RecipeStep\n'), ((693, 740), 'stray_recipe_manager.recipe.Ingredient', 'Ingredient', ([], {'item': '"""Water"""', 'quantity': '(1 * ureg.cup)'}), "(item='Water', quantity=1 * ureg.cup)\n", (703, 740), False, 'from stray_recipe_manager.recipe import CommentedRecipe, Recipe, Ingredient, RecipeStep\n'), ((780, 840), 'stray_recipe_manager.recipe.RecipeStep', 'RecipeStep', ([], {'description': '"""Place water on stove until boiling"""'}), "(description='Place water on stove until boiling')\n", (790, 840), False, 'from stray_recipe_manager.recipe import CommentedRecipe, Recipe, Ingredient, RecipeStep\n'), ((1098, 1145), 'stray_recipe_manager.recipe.Ingredient', 'Ingredient', ([], {'item': '"""Water"""', 'quantity': '(1 * ureg.cup)'}), "(item='Water', quantity=1 * ureg.cup)\n", (1108, 1145), False, 'from stray_recipe_manager.recipe import CommentedRecipe, Recipe, Ingredient, RecipeStep\n'), ((1185, 1245), 'stray_recipe_manager.recipe.RecipeStep', 'RecipeStep', ([], {'description': '"""Place water on stove until boiling"""'}), "(description='Place water on stove until boiling')\n", (1195, 1245), False, 'from stray_recipe_manager.recipe import CommentedRecipe, Recipe, Ingredient, RecipeStep\n'), ((1456, 1503), 'stray_recipe_manager.recipe.Ingredient', 'Ingredient', ([], {'item': '"""Water"""', 'quantity': '(1 * ureg.cup)'}), "(item='Water', quantity=1 * ureg.cup)\n", (1466, 1503), False, 'from stray_recipe_manager.recipe import CommentedRecipe, Recipe, Ingredient, RecipeStep\n'), ((1543, 1628), 'stray_recipe_manager.recipe.RecipeStep', 'RecipeStep', ([], {'description': '"""Place water on stove until boiling"""', 'time': '(10 * ureg.min)'}), "(description='Place water on stove until boiling', time=10 * ureg.min\n )\n", (1553, 1628), False, 'from stray_recipe_manager.recipe import CommentedRecipe, Recipe, Ingredient, RecipeStep\n'), ((1940, 1987), 'stray_recipe_manager.recipe.Ingredient', 'Ingredient', ([], {'item': '"""Water"""', 'quantity': '(1 * ureg.cup)'}), "(item='Water', quantity=1 * ureg.cup)\n", (1950, 1987), False, 'from stray_recipe_manager.recipe import CommentedRecipe, Recipe, Ingredient, RecipeStep\n'), ((2027, 2112), 'stray_recipe_manager.recipe.RecipeStep', 'RecipeStep', ([], {'description': '"""Place water on stove until boiling"""', 'time': '(10 * ureg.min)'}), "(description='Place water on stove until boiling', time=10 * ureg.min\n )\n", (2037, 2112), False, 'from stray_recipe_manager.recipe import CommentedRecipe, Recipe, Ingredient, RecipeStep\n')]
|
import copy
import queue
import time
import logging
import binascii
from enum import Enum
from scrutiny.server.protocol.comm_handler import CommHandler
from scrutiny.server.protocol import Protocol, ResponseCode
from scrutiny.server.device.device_searcher import DeviceSearcher
from scrutiny.server.device.request_dispatcher import RequestDispatcher
from scrutiny.server.device.heartbeat_generator import HeartbeatGenerator
from scrutiny.core.firmware_id import PLACEHOLDER as DEFAULT_FIRMWARE_ID
from scrutiny.server.server_tools import Timer
DEFAULT_FIRMWARE_ID_ASCII = binascii.hexlify(DEFAULT_FIRMWARE_ID).decode('ascii')
class DeviceHandler:
DEFAULT_PARAMS = {
'response_timeout' : 1.0, # If a response take more than this delay to be received after a request is sent, drop the response.
'heartbeat_timeout' : 4.0
}
class FsmState(Enum):
INIT = 0
DISCOVERING = 1
CONNECTING = 2
POLLING_INFO = 3
def __init__(self, config, datastore):
self.logger = logging.getLogger(self.__class__.__name__)
self.config = copy.copy(self.DEFAULT_PARAMS)
self.config.update(config)
self.datastore = datastore
self.dispatcher = RequestDispatcher()
self.protocol = Protocol(1,0)
self.device_searcher = DeviceSearcher(self.protocol, self.dispatcher)
self.heartbeat_generator = HeartbeatGenerator(self.protocol, self.dispatcher)
self.comm_handler = CommHandler(self.config)
self.heartbeat_generator.set_interval(max(0.5, self.config['heartbeat_timeout'] * 0.75))
self.comm_broken = False
self.device_id = None
self.reconnect_timer = Timer(1)
self.reset_comm()
def reset_comm(self):
if self.comm_broken and self.device_id is not None:
self.logger.info('Communication with device stopped. Restarting')
self.connected = False
self.fsm_state = self.FsmState.INIT
self.last_fsm_state = self.FsmState.INIT
self.active_request_record = None
self.device_id = None
self.comm_broken = False
self.device_searcher.stop()
self.heartbeat_generator.stop()
self.session_id = None
self.reconnect_timer.stop()
def init_comm(self):
if self.config['link_type'] == 'none':
return
if self.config['link_type'] == 'udp':
from .links.udp_link import UdpLink
link_class = UdpLink
elif self.config['link_type'] == 'dummy':
from .links.dummy_link import DummyLink
link_class = DummyLink
else:
raise ValueError('Unknown link type %s' % self.config['link_type'])
device_link = link_class(self.config['link_config']) #instantiate the class
self.comm_handler.open(device_link)
self.reset_comm()
def stop_comm(self):
if self.comm_handler is not None:
self.comm_handler.close()
self.reset_comm()
def refresh_vars(self):
pass
def process(self):
self.device_searcher.process()
self.heartbeat_generator.process()
self.handle_comm() # Make sure request and response are being exchanged with the device
self.do_state_machine()
def do_state_machine(self):
if self.comm_broken:
self.fsm_state = self.FsmState.INIT
if self.connected:
time.time() - self.heartbeat_generator.last_valid_heartbeat_timestamp() > self.config['heartbeat_timeout']
# === FSM ===
state_entry = True if self.fsm_state != self.last_fsm_state else False
next_state = self.fsm_state
if self.fsm_state == self.FsmState.INIT:
self.reset_comm()
next_state = self.FsmState.DISCOVERING
#============= DISCOVERING =====================
elif self.fsm_state == self.FsmState.DISCOVERING:
if state_entry:
self.device_searcher.start()
found_device_id = self.device_searcher.get_found_device_ascii()
if found_device_id is not None:
if self.device_id is None:
self.logger.info('Found a device - %s' % found_device_id)
self.device_id = found_device_id
if found_device_id == DEFAULT_FIRMWARE_ID_ASCII:
self.logger.warning("Firmware ID of this device is a default placeholder. Firmware might not have been tagged with a valid ID in the build toolchain.")
if self.device_id is not None:
self.device_searcher.stop()
next_state = self.FsmState.CONNECTING
#============= CONNECTING =====================
elif self.fsm_state == self.FsmState.CONNECTING:
if state_entry:
self.comm_handler.reset() # Clear any active transmission. Just for safety
if not self.comm_handler.waiting_response():
if self.reconnect_timer.is_stopped() or self.reconnect_timer.is_timed_out():
self.comm_handler.send_request(self.protocol.comm_connect())
if self.comm_handler.has_timed_out():
self.comm_broken = True
elif self.comm_handler.response_available():
response = self.comm_handler.get_response()
if response.code == ResponseCode.OK:
self.session_id = self.protocol.parse_response(response)['session_id']
self.logger.debug("Session ID set : 0x%08x" % self.session_id)
self.heartbeat_generator.set_session_id(self.session_id)
self.heartbeat_generator.start() # This guy will send recurrent heartbeat request. If that request fails (timeout), comme will be reset
self.connected = True
next_state = self.FsmState.POLLING_INFO
else:
self.reconnect_timer.start()
elif self.fsm_state == self.FsmState.POLLING_INFO:
pass
# ==== FSM END ====
self.last_fsm_state = self.fsm_state
if next_state != self.fsm_state:
self.logger.debug('Moving FSM to state %s' % next_state)
self.fsm_state = next_state
def handle_comm(self):
self.comm_handler.process() # Process reception
if not self.comm_handler.is_open():
return
if self.active_request_record is None: # We haven't send a request
record = self.dispatcher.next()
if record is not None: # A new request to send
self.active_request_record = record
self.comm_handler.send_request(record.request)
else:
if self.comm_handler.has_timed_out(): # The request we have sent has timed out.. no response
self.comm_broken = True
self.comm_handler.clear_timeout()
self.active_request_record.complete(success=False)
elif self.comm_handler.waiting_response(): # We are still wiating for a resonse
if self.comm_handler.response_available(): # We got a response! yay
response = self.comm_handler.get_response()
try:
data = self.protocol.parse_response(response)
self.active_request_record.complete(success=True, response=response, response_data=data) # Valid response if we get here.
except Exception as e: # Malformed response.
self.comm_broken = True
self.logger.error("Invalid response received. %s" % str(e))
self.active_request_record.complete(success=False)
else: # should never happen - paranoid check.
self.comm_broken = True
self.comm_handler.reset()
self.active_request_record.complete(success=False)
if self.active_request_record.is_completed(): # If we have called a callback, then we are done with this request.
self.active_request_record = None
self.comm_handler.process() # Process new transmission now.
|
[
"scrutiny.server.protocol.comm_handler.CommHandler",
"binascii.hexlify",
"scrutiny.server.device.request_dispatcher.RequestDispatcher",
"copy.copy",
"scrutiny.server.device.device_searcher.DeviceSearcher",
"time.time",
"scrutiny.server.device.heartbeat_generator.HeartbeatGenerator",
"scrutiny.server.protocol.Protocol",
"scrutiny.server.server_tools.Timer",
"logging.getLogger"
] |
[((574, 611), 'binascii.hexlify', 'binascii.hexlify', (['DEFAULT_FIRMWARE_ID'], {}), '(DEFAULT_FIRMWARE_ID)\n', (590, 611), False, 'import binascii\n'), ((1045, 1087), 'logging.getLogger', 'logging.getLogger', (['self.__class__.__name__'], {}), '(self.__class__.__name__)\n', (1062, 1087), False, 'import logging\n'), ((1111, 1141), 'copy.copy', 'copy.copy', (['self.DEFAULT_PARAMS'], {}), '(self.DEFAULT_PARAMS)\n', (1120, 1141), False, 'import copy\n'), ((1238, 1257), 'scrutiny.server.device.request_dispatcher.RequestDispatcher', 'RequestDispatcher', ([], {}), '()\n', (1255, 1257), False, 'from scrutiny.server.device.request_dispatcher import RequestDispatcher\n'), ((1282, 1296), 'scrutiny.server.protocol.Protocol', 'Protocol', (['(1)', '(0)'], {}), '(1, 0)\n', (1290, 1296), False, 'from scrutiny.server.protocol import Protocol, ResponseCode\n'), ((1327, 1373), 'scrutiny.server.device.device_searcher.DeviceSearcher', 'DeviceSearcher', (['self.protocol', 'self.dispatcher'], {}), '(self.protocol, self.dispatcher)\n', (1341, 1373), False, 'from scrutiny.server.device.device_searcher import DeviceSearcher\n'), ((1409, 1459), 'scrutiny.server.device.heartbeat_generator.HeartbeatGenerator', 'HeartbeatGenerator', (['self.protocol', 'self.dispatcher'], {}), '(self.protocol, self.dispatcher)\n', (1427, 1459), False, 'from scrutiny.server.device.heartbeat_generator import HeartbeatGenerator\n'), ((1489, 1513), 'scrutiny.server.protocol.comm_handler.CommHandler', 'CommHandler', (['self.config'], {}), '(self.config)\n', (1500, 1513), False, 'from scrutiny.server.protocol.comm_handler import CommHandler\n'), ((1706, 1714), 'scrutiny.server.server_tools.Timer', 'Timer', (['(1)'], {}), '(1)\n', (1711, 1714), False, 'from scrutiny.server.server_tools import Timer\n'), ((3463, 3474), 'time.time', 'time.time', ([], {}), '()\n', (3472, 3474), False, 'import time\n')]
|
import cv2
import numpy as np
import matplotlib.pyplot as plt
def main():
path = "C:\\Users\\enesa\\Documents\\MATLAB\\blobs_objects.jpg"
img = cv2.imread(path, 1)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
filter1 = np.array(([0, -1, 0], [-1, 5, -1], [0, -1, 0]), np.float32) #Sharpening Filter
output = cv2.filter2D(img, -1, filter1) #convolution filter
blur = cv2.GaussianBlur(img,(5,5),0)
gray = cv2.cvtColor(blur, cv2.COLOR_BGR2GRAY)
_, thresh = cv2.threshold(gray,170,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
###########################################################################################################################
# Create a simple filter. The kernel slides through the image (as in 2D convolution).
kernel = np.ones((3, 3), np.uint8)
# Create a Rectangular Structuring Element
se1 = cv2.getStructuringElement(cv2.MORPH_RECT,(5,5))
# Create a Elliptical Structuring Element
se2 = cv2.getStructuringElement(cv2.MORPH_ELLIPSE,(5,5))
# Apply Erosion method over the image with kernel
erosion = cv2.erode(thresh,se1,iterations = 1)
# Apply Dilation method over the image with kernel
dilation = cv2.dilate(thresh,se2,iterations = 1)
# Noise removal using Morphological closing operation
closing = cv2.morphologyEx(dilation, cv2.MORPH_CLOSE, kernel, iterations = 4)
# Noise removal using Morphological opening operation
opening = cv2.morphologyEx(erosion, cv2.MORPH_OPEN, kernel, iterations = 1)
###########################################################################################################################
dilation = 255 - dilation # Complementing Operation
_, contours, _ = cv2.findContours(dilation, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
print("{} Objects have detected!".format(len(contours)))
original = cv2.imread(path, 1)
original = cv2.cvtColor(original, cv2.COLOR_BGR2RGB)
sayac = 0
for i in contours:
# perimeter = cv2.arcLength(i,True)
# if perimeter > 20:
sayac = sayac +1
#cv2.drawContours(img, contours, -1, (0, 0, 255), 2)
x,y,w,h = cv2.boundingRect(i)
cv2.rectangle(img,(x,y),(x+w,y+h),(0,255,0),2)
cv2.putText(img, str(sayac), (x+10, y+15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 1)
#plt.plot(cx, cy, color='red', marker='o', linestyle='dashed', linewidth=2, markersize=12) # belilenen noktaya x isareti koy.
#cv2.putText(img, 'x', (cx, cy), cv2.FONT_HERSHEY_SIMPLEX, 1.0, (0, 0, 255), 2)
#cv2.putText(closing, str(sayac), (cx, cy), cv2.FONT_HERSHEY_SIMPLEX, 0.4, (0, 0, 255), 1)
print("{} Objects have drown!".format(sayac))
###########################################################################################################################
# output = [original, img]
# titles = ['Original', 'Contours']
# for i in range(2):
# plt.subplot(1, 2, i+1)
# plt.imshow(output[i])
# plt.title(titles[i])
# plt.xticks([])
# plt.yticks([])
cv2.imshow('Orignal Image', img)
#cv2.imshow('Erosion Image', erosion)
cv2.imshow('Dilation Image', dilation)
cv2.imshow('Closing Image', closing)
cv2.imshow('Opening Image', opening)
plt.show()
cv2.waitKey(0)
cv2.destroyAllWindows()
if __name__ == "__main__":
main()
|
[
"cv2.GaussianBlur",
"cv2.boundingRect",
"matplotlib.pyplot.show",
"cv2.filter2D",
"cv2.dilate",
"cv2.cvtColor",
"cv2.getStructuringElement",
"cv2.threshold",
"cv2.morphologyEx",
"cv2.waitKey",
"numpy.ones",
"cv2.destroyAllWindows",
"cv2.imread",
"numpy.array",
"cv2.rectangle",
"cv2.erode",
"cv2.imshow",
"cv2.findContours"
] |
[((158, 177), 'cv2.imread', 'cv2.imread', (['path', '(1)'], {}), '(path, 1)\n', (168, 177), False, 'import cv2\n'), ((186, 222), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2RGB'], {}), '(img, cv2.COLOR_BGR2RGB)\n', (198, 222), False, 'import cv2\n'), ((238, 297), 'numpy.array', 'np.array', (['([0, -1, 0], [-1, 5, -1], [0, -1, 0])', 'np.float32'], {}), '(([0, -1, 0], [-1, 5, -1], [0, -1, 0]), np.float32)\n', (246, 297), True, 'import numpy as np\n'), ((328, 358), 'cv2.filter2D', 'cv2.filter2D', (['img', '(-1)', 'filter1'], {}), '(img, -1, filter1)\n', (340, 358), False, 'import cv2\n'), ((390, 422), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['img', '(5, 5)', '(0)'], {}), '(img, (5, 5), 0)\n', (406, 422), False, 'import cv2\n'), ((431, 469), 'cv2.cvtColor', 'cv2.cvtColor', (['blur', 'cv2.COLOR_BGR2GRAY'], {}), '(blur, cv2.COLOR_BGR2GRAY)\n', (443, 469), False, 'import cv2\n'), ((487, 553), 'cv2.threshold', 'cv2.threshold', (['gray', '(170)', '(255)', '(cv2.THRESH_BINARY + cv2.THRESH_OTSU)'], {}), '(gray, 170, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)\n', (500, 553), False, 'import cv2\n'), ((777, 802), 'numpy.ones', 'np.ones', (['(3, 3)', 'np.uint8'], {}), '((3, 3), np.uint8)\n', (784, 802), True, 'import numpy as np\n'), ((859, 908), 'cv2.getStructuringElement', 'cv2.getStructuringElement', (['cv2.MORPH_RECT', '(5, 5)'], {}), '(cv2.MORPH_RECT, (5, 5))\n', (884, 908), False, 'import cv2\n'), ((961, 1013), 'cv2.getStructuringElement', 'cv2.getStructuringElement', (['cv2.MORPH_ELLIPSE', '(5, 5)'], {}), '(cv2.MORPH_ELLIPSE, (5, 5))\n', (986, 1013), False, 'import cv2\n'), ((1081, 1117), 'cv2.erode', 'cv2.erode', (['thresh', 'se1'], {'iterations': '(1)'}), '(thresh, se1, iterations=1)\n', (1090, 1117), False, 'import cv2\n'), ((1186, 1223), 'cv2.dilate', 'cv2.dilate', (['thresh', 'se2'], {'iterations': '(1)'}), '(thresh, se2, iterations=1)\n', (1196, 1223), False, 'import cv2\n'), ((1294, 1359), 'cv2.morphologyEx', 'cv2.morphologyEx', (['dilation', 'cv2.MORPH_CLOSE', 'kernel'], {'iterations': '(4)'}), '(dilation, cv2.MORPH_CLOSE, kernel, iterations=4)\n', (1310, 1359), False, 'import cv2\n'), ((1433, 1496), 'cv2.morphologyEx', 'cv2.morphologyEx', (['erosion', 'cv2.MORPH_OPEN', 'kernel'], {'iterations': '(1)'}), '(erosion, cv2.MORPH_OPEN, kernel, iterations=1)\n', (1449, 1496), False, 'import cv2\n'), ((1705, 1771), 'cv2.findContours', 'cv2.findContours', (['dilation', 'cv2.RETR_TREE', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(dilation, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n', (1721, 1771), False, 'import cv2\n'), ((1850, 1869), 'cv2.imread', 'cv2.imread', (['path', '(1)'], {}), '(path, 1)\n', (1860, 1869), False, 'import cv2\n'), ((1886, 1927), 'cv2.cvtColor', 'cv2.cvtColor', (['original', 'cv2.COLOR_BGR2RGB'], {}), '(original, cv2.COLOR_BGR2RGB)\n', (1898, 1927), False, 'import cv2\n'), ((2989, 3021), 'cv2.imshow', 'cv2.imshow', (['"""Orignal Image"""', 'img'], {}), "('Orignal Image', img)\n", (2999, 3021), False, 'import cv2\n'), ((3065, 3103), 'cv2.imshow', 'cv2.imshow', (['"""Dilation Image"""', 'dilation'], {}), "('Dilation Image', dilation)\n", (3075, 3103), False, 'import cv2\n'), ((3107, 3143), 'cv2.imshow', 'cv2.imshow', (['"""Closing Image"""', 'closing'], {}), "('Closing Image', closing)\n", (3117, 3143), False, 'import cv2\n'), ((3147, 3183), 'cv2.imshow', 'cv2.imshow', (['"""Opening Image"""', 'opening'], {}), "('Opening Image', opening)\n", (3157, 3183), False, 'import cv2\n'), ((3191, 3201), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3199, 3201), True, 'import matplotlib.pyplot as plt\n'), ((3205, 3219), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (3216, 3219), False, 'import cv2\n'), ((3222, 3245), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (3243, 3245), False, 'import cv2\n'), ((2116, 2135), 'cv2.boundingRect', 'cv2.boundingRect', (['i'], {}), '(i)\n', (2132, 2135), False, 'import cv2\n'), ((2139, 2197), 'cv2.rectangle', 'cv2.rectangle', (['img', '(x, y)', '(x + w, y + h)', '(0, 255, 0)', '(2)'], {}), '(img, (x, y), (x + w, y + h), (0, 255, 0), 2)\n', (2152, 2197), False, 'import cv2\n')]
|
from time import sleep
from appium import webdriver
from appium.webdriver.common.mobileby import MobileBy
phone_info = {
"platformName": "android",
"platformVersion": "8.1",
"deviceName": "S4F6R19C18016391",
"appPackage": "com.tencent.wework",
"appActivity": ".launch.LaunchSplashActivity t9",
"noReset": "true",
# "dontStopAppOnReset": "true",
"skipDeviceInitialization": "true",
"resetKeyBoard": "true",
"waitFoeIdleTimeout": 0
}
class Test_wework:
def setup(self):
self.driver = webdriver.Remote("http://localhost:4723/wd/hub", phone_info)
self.driver.implicitly_wait(10)
def teardown(self):
sleep(5)
self.driver.quit()
def test_wework_Clockin(self):
self.driver.find_element(MobileBy.XPATH,
"//*[@resource-id='com.tencent.wework:id/en5' and @text='工作台']").click()
self.driver.find_element_by_android_uiautomator('new UiScrollable(new UiSelector().scrollable(true).instance('
'0)).scrollIntoView(new UiSelector().text("打卡").instance('
'0));').click()
self.driver.find_element(MobileBy.XPATH, '//*[@text="外出打卡"]').click()
self.driver.find_element(MobileBy.XPATH, '//*[contains(@text, "次外出")]').click()
ele = self.driver.find_element(MobileBy.ID, 'com.tencent.wework:id/pu').text
print(ele)
assert ele == "外出打卡成功"
def test_wework_jointeam(self):
add_name = "袁不婷"
add_num = "1008611"
add_another_name = "沙雕"
add_iphone_num = "13160018191"
self.driver.find_element(MobileBy.XPATH, '//*[@text="通讯录"]').click()
self.driver.find_element(MobileBy.ANDROID_UIAUTOMATOR, 'new UiScrollable(new UiSelector().scrollable('
'true).instance(0)).scrollIntoView(new UiSelector('
').text("添加成员").instance(0));').click()
self.driver.find_element(MobileBy.XPATH, '//*[@text="手动输入添加"]').click()
# self.driver.find_element(MobileBy.XPATH, '//*[@text="完整输入"]').click()
name = self.driver.find_element(MobileBy.XPATH, '//*[contains(@text,"姓名")]/../android.widget.EditText')
name.send_keys(add_name)
num = self.driver.find_element(MobileBy.XPATH, '//*[contains(@text,"帐号")]/../android.widget.EditText')
num.send_keys(add_num)
another_name = self.driver.find_element(MobileBy.XPATH, '//*[contains(@text,"别名")]/../android.widget.EditText')
another_name.send_keys(add_another_name)
iphone_num = self.driver.find_element(MobileBy.XPATH, '//*[contains(@text,"手机号")]')
iphone_num.send_keys(add_iphone_num)
self.driver.find_element(MobileBy.ANDROID_UIAUTOMATOR, 'new UiScrollable(new UiSelector().scrollable('
'true).instance(0)).scrollIntoView(new UiSelector('
').text("保存").instance(0));').click()
ele = self.driver.find_element(MobileBy.XPATH, '//*[@class="android.widget.Toast"]').text
assert "添加成功" == ele
self.driver.get_screenshot_as_file('路径.png')
self.driver.start_recording_screen()
self.driver.stop_recording_screen()
|
[
"time.sleep",
"appium.webdriver.Remote"
] |
[((537, 597), 'appium.webdriver.Remote', 'webdriver.Remote', (['"""http://localhost:4723/wd/hub"""', 'phone_info'], {}), "('http://localhost:4723/wd/hub', phone_info)\n", (553, 597), False, 'from appium import webdriver\n'), ((671, 679), 'time.sleep', 'sleep', (['(5)'], {}), '(5)\n', (676, 679), False, 'from time import sleep\n')]
|
import logging
import socket
import threading
import datetime
import time
import math
from . import BPLMonitor, BPLCurtain, DATA_DOMAIN
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the sensor platform."""
# We only want this platform to be set up via discovery.
if discovery_info is None:
return
sensors = hass.data[DATA_DOMAIN]['sensors']
sensors_to_add = []
for s in sensors :
if isinstance (s,BPLCurtain) :
sensors_to_add.append(s)
add_entities(sensors_to_add)
|
[
"logging.getLogger"
] |
[((199, 226), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (216, 226), False, 'import logging\n')]
|
# Python3
from solution1 import lineEncoding as f
qa = [
('aabbbc', '2a3bc'),
('abbcabb', 'a2bca2b'),
('abcd', 'abcd'),
('zzzz', '4z'),
('wwwwwwwawwwwwww', '7wa7w'),
('ccccccccccccccc', '15c'),
('qwertyuioplkjhg', 'qwertyuioplkjhg'),
('ssiiggkooo', '2s2i2gk3o'),
('adfaaa', 'adf3a'),
('bbjaadlkjdl', '2bj2adlkjdl')
]
for *q, a in qa:
for i, e in enumerate(q):
print('input{0}: {1}'.format(i + 1, e))
ans = f(*q)
if ans != a:
print(' [failed]')
print(' output:', ans)
print(' expected:', a)
else:
print(' [ok]')
print(' output:', ans)
print()
|
[
"solution1.lineEncoding"
] |
[((465, 470), 'solution1.lineEncoding', 'f', (['*q'], {}), '(*q)\n', (466, 470), True, 'from solution1 import lineEncoding as f\n')]
|
from arrays.remove_element import remove_element
def test_remove_element():
arr = [3, 2, 2, 3]
length = remove_element(arr, 3)
assert length == 2
assert arr == [2, 2, 2, 3]
arr = [1]
length = remove_element(arr, 1)
assert length == 0
assert arr == [1]
arr = [2, 2, 3, 3]
length = remove_element(arr, 3)
assert length == 2
assert arr == [2, 2, 3, 3]
|
[
"arrays.remove_element.remove_element"
] |
[((114, 136), 'arrays.remove_element.remove_element', 'remove_element', (['arr', '(3)'], {}), '(arr, 3)\n', (128, 136), False, 'from arrays.remove_element import remove_element\n'), ((219, 241), 'arrays.remove_element.remove_element', 'remove_element', (['arr', '(1)'], {}), '(arr, 1)\n', (233, 241), False, 'from arrays.remove_element import remove_element\n'), ((324, 346), 'arrays.remove_element.remove_element', 'remove_element', (['arr', '(3)'], {}), '(arr, 3)\n', (338, 346), False, 'from arrays.remove_element import remove_element\n')]
|
"""Common methods for SleepIQ."""
from __future__ import annotations
from collections.abc import Generator
from unittest.mock import MagicMock, create_autospec, patch
from asyncsleepiq import (
SleepIQActuator,
SleepIQBed,
SleepIQFoundation,
SleepIQLight,
SleepIQPreset,
SleepIQSleeper,
)
import pytest
from homeassistant.components.sleepiq import DOMAIN
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry
BED_ID = "123456"
BED_NAME = "Test Bed"
BED_NAME_LOWER = BED_NAME.lower().replace(" ", "_")
SLEEPER_L_ID = "98765"
SLEEPER_R_ID = "43219"
SLEEPER_L_NAME = "SleeperL"
SLEEPER_R_NAME = "Sleeper R"
SLEEPER_L_NAME_LOWER = SLEEPER_L_NAME.lower().replace(" ", "_")
SLEEPER_R_NAME_LOWER = SLEEPER_R_NAME.lower().replace(" ", "_")
PRESET_L_STATE = "Watch TV"
PRESET_R_STATE = "Flat"
SLEEPIQ_CONFIG = {
CONF_USERNAME: "<EMAIL>",
CONF_PASSWORD: "password",
}
@pytest.fixture
def mock_bed() -> MagicMock:
"""Mock a SleepIQBed object with sleepers and lights."""
bed = create_autospec(SleepIQBed)
bed.name = BED_NAME
bed.id = BED_ID
bed.mac_addr = "12:34:56:78:AB:CD"
bed.model = "C10"
bed.paused = False
sleeper_l = create_autospec(SleepIQSleeper)
sleeper_r = create_autospec(SleepIQSleeper)
bed.sleepers = [sleeper_l, sleeper_r]
sleeper_l.side = "L"
sleeper_l.name = SLEEPER_L_NAME
sleeper_l.in_bed = True
sleeper_l.sleep_number = 40
sleeper_l.pressure = 1000
sleeper_l.sleeper_id = SLEEPER_L_ID
sleeper_r.side = "R"
sleeper_r.name = SLEEPER_R_NAME
sleeper_r.in_bed = False
sleeper_r.sleep_number = 80
sleeper_r.pressure = 1400
sleeper_r.sleeper_id = SLEEPER_R_ID
bed.foundation = create_autospec(SleepIQFoundation)
light_1 = create_autospec(SleepIQLight)
light_1.outlet_id = 1
light_1.is_on = False
light_2 = create_autospec(SleepIQLight)
light_2.outlet_id = 2
light_2.is_on = False
bed.foundation.lights = [light_1, light_2]
return bed
@pytest.fixture
def mock_asyncsleepiq_single_foundation(
mock_bed: MagicMock,
) -> Generator[MagicMock, None, None]:
"""Mock an AsyncSleepIQ object with a single foundation."""
with patch("homeassistant.components.sleepiq.AsyncSleepIQ", autospec=True) as mock:
client = mock.return_value
client.beds = {BED_ID: mock_bed}
actuator_h = create_autospec(SleepIQActuator)
actuator_f = create_autospec(SleepIQActuator)
mock_bed.foundation.actuators = [actuator_h, actuator_f]
actuator_h.side = "R"
actuator_h.side_full = "Right"
actuator_h.actuator = "H"
actuator_h.actuator_full = "Head"
actuator_h.position = 60
actuator_f.side = None
actuator_f.actuator = "F"
actuator_f.actuator_full = "Foot"
actuator_f.position = 10
preset = create_autospec(SleepIQPreset)
mock_bed.foundation.presets = [preset]
preset.preset = PRESET_R_STATE
preset.side = None
preset.side_full = None
yield client
@pytest.fixture
def mock_asyncsleepiq(mock_bed: MagicMock) -> Generator[MagicMock, None, None]:
"""Mock an AsyncSleepIQ object with a split foundation."""
with patch("homeassistant.components.sleepiq.AsyncSleepIQ", autospec=True) as mock:
client = mock.return_value
client.beds = {BED_ID: mock_bed}
actuator_h_r = create_autospec(SleepIQActuator)
actuator_h_l = create_autospec(SleepIQActuator)
actuator_f = create_autospec(SleepIQActuator)
mock_bed.foundation.actuators = [actuator_h_r, actuator_h_l, actuator_f]
actuator_h_r.side = "R"
actuator_h_r.side_full = "Right"
actuator_h_r.actuator = "H"
actuator_h_r.actuator_full = "Head"
actuator_h_r.position = 60
actuator_h_l.side = "L"
actuator_h_l.side_full = "Left"
actuator_h_l.actuator = "H"
actuator_h_l.actuator_full = "Head"
actuator_h_l.position = 50
actuator_f.side = None
actuator_f.actuator = "F"
actuator_f.actuator_full = "Foot"
actuator_f.position = 10
preset_l = create_autospec(SleepIQPreset)
preset_r = create_autospec(SleepIQPreset)
mock_bed.foundation.presets = [preset_l, preset_r]
preset_l.preset = PRESET_L_STATE
preset_l.side = "L"
preset_l.side_full = "Left"
preset_r.preset = PRESET_R_STATE
preset_r.side = "R"
preset_r.side_full = "Right"
yield client
async def setup_platform(
hass: HomeAssistant, platform: str | None = None
) -> MockConfigEntry:
"""Set up the SleepIQ platform."""
mock_entry = MockConfigEntry(
domain=DOMAIN,
data=SLEEPIQ_CONFIG,
unique_id=SLEEPIQ_CONFIG[CONF_USERNAME].lower(),
)
mock_entry.add_to_hass(hass)
if platform:
with patch("homeassistant.components.sleepiq.PLATFORMS", [platform]):
assert await async_setup_component(hass, DOMAIN, {})
await hass.async_block_till_done()
return mock_entry
|
[
"unittest.mock.patch",
"unittest.mock.create_autospec",
"homeassistant.setup.async_setup_component"
] |
[((1161, 1188), 'unittest.mock.create_autospec', 'create_autospec', (['SleepIQBed'], {}), '(SleepIQBed)\n', (1176, 1188), False, 'from unittest.mock import MagicMock, create_autospec, patch\n'), ((1333, 1364), 'unittest.mock.create_autospec', 'create_autospec', (['SleepIQSleeper'], {}), '(SleepIQSleeper)\n', (1348, 1364), False, 'from unittest.mock import MagicMock, create_autospec, patch\n'), ((1381, 1412), 'unittest.mock.create_autospec', 'create_autospec', (['SleepIQSleeper'], {}), '(SleepIQSleeper)\n', (1396, 1412), False, 'from unittest.mock import MagicMock, create_autospec, patch\n'), ((1862, 1896), 'unittest.mock.create_autospec', 'create_autospec', (['SleepIQFoundation'], {}), '(SleepIQFoundation)\n', (1877, 1896), False, 'from unittest.mock import MagicMock, create_autospec, patch\n'), ((1911, 1940), 'unittest.mock.create_autospec', 'create_autospec', (['SleepIQLight'], {}), '(SleepIQLight)\n', (1926, 1940), False, 'from unittest.mock import MagicMock, create_autospec, patch\n'), ((2007, 2036), 'unittest.mock.create_autospec', 'create_autospec', (['SleepIQLight'], {}), '(SleepIQLight)\n', (2022, 2036), False, 'from unittest.mock import MagicMock, create_autospec, patch\n'), ((2348, 2417), 'unittest.mock.patch', 'patch', (['"""homeassistant.components.sleepiq.AsyncSleepIQ"""'], {'autospec': '(True)'}), "('homeassistant.components.sleepiq.AsyncSleepIQ', autospec=True)\n", (2353, 2417), False, 'from unittest.mock import MagicMock, create_autospec, patch\n'), ((2525, 2557), 'unittest.mock.create_autospec', 'create_autospec', (['SleepIQActuator'], {}), '(SleepIQActuator)\n', (2540, 2557), False, 'from unittest.mock import MagicMock, create_autospec, patch\n'), ((2579, 2611), 'unittest.mock.create_autospec', 'create_autospec', (['SleepIQActuator'], {}), '(SleepIQActuator)\n', (2594, 2611), False, 'from unittest.mock import MagicMock, create_autospec, patch\n'), ((3015, 3045), 'unittest.mock.create_autospec', 'create_autospec', (['SleepIQPreset'], {}), '(SleepIQPreset)\n', (3030, 3045), False, 'from unittest.mock import MagicMock, create_autospec, patch\n'), ((3383, 3452), 'unittest.mock.patch', 'patch', (['"""homeassistant.components.sleepiq.AsyncSleepIQ"""'], {'autospec': '(True)'}), "('homeassistant.components.sleepiq.AsyncSleepIQ', autospec=True)\n", (3388, 3452), False, 'from unittest.mock import MagicMock, create_autospec, patch\n'), ((3562, 3594), 'unittest.mock.create_autospec', 'create_autospec', (['SleepIQActuator'], {}), '(SleepIQActuator)\n', (3577, 3594), False, 'from unittest.mock import MagicMock, create_autospec, patch\n'), ((3618, 3650), 'unittest.mock.create_autospec', 'create_autospec', (['SleepIQActuator'], {}), '(SleepIQActuator)\n', (3633, 3650), False, 'from unittest.mock import MagicMock, create_autospec, patch\n'), ((3672, 3704), 'unittest.mock.create_autospec', 'create_autospec', (['SleepIQActuator'], {}), '(SleepIQActuator)\n', (3687, 3704), False, 'from unittest.mock import MagicMock, create_autospec, patch\n'), ((4324, 4354), 'unittest.mock.create_autospec', 'create_autospec', (['SleepIQPreset'], {}), '(SleepIQPreset)\n', (4339, 4354), False, 'from unittest.mock import MagicMock, create_autospec, patch\n'), ((4374, 4404), 'unittest.mock.create_autospec', 'create_autospec', (['SleepIQPreset'], {}), '(SleepIQPreset)\n', (4389, 4404), False, 'from unittest.mock import MagicMock, create_autospec, patch\n'), ((5054, 5117), 'unittest.mock.patch', 'patch', (['"""homeassistant.components.sleepiq.PLATFORMS"""', '[platform]'], {}), "('homeassistant.components.sleepiq.PLATFORMS', [platform])\n", (5059, 5117), False, 'from unittest.mock import MagicMock, create_autospec, patch\n'), ((5144, 5183), 'homeassistant.setup.async_setup_component', 'async_setup_component', (['hass', 'DOMAIN', '{}'], {}), '(hass, DOMAIN, {})\n', (5165, 5183), False, 'from homeassistant.setup import async_setup_component\n')]
|
import streamlit as st
from dataclasses import dataclass
from typing import Any, List
import datetime as datetime
import pandas as pd
import hashlib
@dataclass
class Title:
sender: str
receiver: str
title: str
@dataclass
class Ownership:
record: Title
creator_id: int
prev_hash: str = "0"
timestamp: str = datetime.datetime.utcnow().strftime("%H:%M:%S")
nonce: int = 0
def hash_block(self):
sha = hashlib.sha256()
record = str(self.record).encode()
sha.update(record)
creator_id = str(self.creator_id).encode()
sha.update(creator_id)
timestamp = str(self.timestamp).encode()
sha.update(timestamp)
prev_hash = str(self.prev_hash).encode()
sha.update(prev_hash)
nonce = str(self.nonce).encode()
sha.update(nonce)
return sha.hexdigest()
@dataclass
class TransferOwnership:
chain: List[Ownership]
difficulty: int = 10
def proof_of_work(self, ownership):
calculated_has = ownership.hash_block()
num_of_zeros = "0" + self.difficulty
while not calulated_has.startswith(num_of_zero):
ownership.nonce += 1
calculated_hash = ownership.hash_block()
print("Title Transfered", calculated_hash)
return ownership
def add_block(self, newOwner):
ownership = self.proof_of_work(newOwner)
self.chain += [ownership]
def is_valid(self):
block_has = self.chain[0].hash_block()
for ownership in self.chain[1:]:
if block_hash != ownership.prev_hash:
print("Title cannot be Transferred!")
return False
block_hash = ownership.hash_block()
print("Title has been Transferred")
return True
@st.cache(allow_output_mutation=True)
def setup():
print("Initializing Title Information")
return TransferOwnership([Ownership("Title", 0)])
st.markdown("# Transfer Ownership Title")
st.markdown("## Input address of who you would like to Transfer the Ownership to ")
titleTransfer = setup()
input_sender_id = st.text_input("Current Owner ID")
input_receiver_id = st.text_input("New Owner ID")
input_title = st.text("<NAME>")
if st.button("Transfer Title"):
prev_block = titleTransfer.chain[-1]
prev_block_hash = prev_block.hash_block()
new_owner = Ownership(
title=Title(
sender=input_sender_id, receiver=input_receiver_id, title=input_title
),
creator_id=42,
prev_hash=prev_block_hash,
)
titleTransfer.add_block(new_owner)
st.balloons()
st.markdown("## Transfer Pet Ownership")
Transfer_df = pd.DataFrame(titleTransfer.chain).astype(str)
st.write(Transfer_df)
difficulty = st.sidbar.slider("Block Difficulty", 1, 5, 2)
titleTransfer.difficulty = difficulty
st.sidbar.write("# Owner History")
selected_block = st.sidebar.selectbox(
"Which Owner would you like to see?", titleTransfer.chain
)
st, sidebar.write(selected_block)
if st.button(" Transfer Complete"):
st.write(titleTransfer.is_valid())
# NExt Steps:
"""GAnache Interface,
Drop down menus
Token Creation in Solidity
insert Token into fields
Picture URLs for dog pictures
Add independant funtions into the token then merge later
Update, """
## Pet Token Address
# Drop Down menu for Pet Token Address
#
|
[
"streamlit.balloons",
"pandas.DataFrame",
"streamlit.markdown",
"streamlit.text_input",
"streamlit.cache",
"streamlit.sidbar.slider",
"streamlit.write",
"hashlib.sha256",
"streamlit.text",
"streamlit.button",
"streamlit.sidebar.selectbox",
"datetime.datetime.utcnow",
"streamlit.sidbar.write"
] |
[((1809, 1845), 'streamlit.cache', 'st.cache', ([], {'allow_output_mutation': '(True)'}), '(allow_output_mutation=True)\n', (1817, 1845), True, 'import streamlit as st\n'), ((1959, 2000), 'streamlit.markdown', 'st.markdown', (['"""# Transfer Ownership Title"""'], {}), "('# Transfer Ownership Title')\n", (1970, 2000), True, 'import streamlit as st\n'), ((2001, 2089), 'streamlit.markdown', 'st.markdown', (['"""## Input address of who you would like to Transfer the Ownership to """'], {}), "(\n '## Input address of who you would like to Transfer the Ownership to ')\n", (2012, 2089), True, 'import streamlit as st\n'), ((2130, 2163), 'streamlit.text_input', 'st.text_input', (['"""Current Owner ID"""'], {}), "('Current Owner ID')\n", (2143, 2163), True, 'import streamlit as st\n'), ((2185, 2214), 'streamlit.text_input', 'st.text_input', (['"""New Owner ID"""'], {}), "('New Owner ID')\n", (2198, 2214), True, 'import streamlit as st\n'), ((2230, 2247), 'streamlit.text', 'st.text', (['"""<NAME>"""'], {}), "('<NAME>')\n", (2237, 2247), True, 'import streamlit as st\n'), ((2252, 2279), 'streamlit.button', 'st.button', (['"""Transfer Title"""'], {}), "('Transfer Title')\n", (2261, 2279), True, 'import streamlit as st\n'), ((2615, 2628), 'streamlit.balloons', 'st.balloons', ([], {}), '()\n', (2626, 2628), True, 'import streamlit as st\n'), ((2631, 2671), 'streamlit.markdown', 'st.markdown', (['"""## Transfer Pet Ownership"""'], {}), "('## Transfer Pet Ownership')\n", (2642, 2671), True, 'import streamlit as st\n'), ((2733, 2754), 'streamlit.write', 'st.write', (['Transfer_df'], {}), '(Transfer_df)\n', (2741, 2754), True, 'import streamlit as st\n'), ((2769, 2814), 'streamlit.sidbar.slider', 'st.sidbar.slider', (['"""Block Difficulty"""', '(1)', '(5)', '(2)'], {}), "('Block Difficulty', 1, 5, 2)\n", (2785, 2814), True, 'import streamlit as st\n'), ((2854, 2888), 'streamlit.sidbar.write', 'st.sidbar.write', (['"""# Owner History"""'], {}), "('# Owner History')\n", (2869, 2888), True, 'import streamlit as st\n'), ((2906, 2985), 'streamlit.sidebar.selectbox', 'st.sidebar.selectbox', (['"""Which Owner would you like to see?"""', 'titleTransfer.chain'], {}), "('Which Owner would you like to see?', titleTransfer.chain)\n", (2926, 2985), True, 'import streamlit as st\n'), ((3031, 3062), 'streamlit.button', 'st.button', (['""" Transfer Complete"""'], {}), "(' Transfer Complete')\n", (3040, 3062), True, 'import streamlit as st\n'), ((446, 462), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (460, 462), False, 'import hashlib\n'), ((2687, 2720), 'pandas.DataFrame', 'pd.DataFrame', (['titleTransfer.chain'], {}), '(titleTransfer.chain)\n', (2699, 2720), True, 'import pandas as pd\n'), ((338, 364), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (362, 364), True, 'import datetime as datetime\n')]
|
from homeassistant.util import dt
def orbit_time_to_local_time(timestamp: str):
if timestamp is not None:
return dt.as_local(dt.parse_datetime(timestamp))
return None
def anonymize(device):
device["address"] = "REDACTED"
device["full_location"] = "REDACTED"
device["location"] = "REDACTED"
return device
|
[
"homeassistant.util.dt.parse_datetime"
] |
[((139, 167), 'homeassistant.util.dt.parse_datetime', 'dt.parse_datetime', (['timestamp'], {}), '(timestamp)\n', (156, 167), False, 'from homeassistant.util import dt\n')]
|
"""
Example code to push a dataset into the data node. A complete
dataset includes "Dataset", "Fhir Store", "Annotation Store",
"Annotation", "Patient", "Note"
To run this code, here are the requirements:
- Install the nlpsandbox-client (`pip install nlpsandbox-client`)
- Start the Data Node locally - Follow instructions here:
https://github.com/nlpsandbox/data-node
- python push_dataset.py
"""
import json
import nlpsandbox
import nlpsandbox.apis
import nlpsandbox.models
from nlpsandbox.rest import ApiException
import nlpsandboxclient.utils
# Defining the host is optional and defaults to http://example.com/api/v1
# See configuration.py for a list of all supported configuration parameters.
host = "http://localhost:8080/api/v1"
configuration = nlpsandbox.Configuration(host=host)
dataset_id = 'test-dataset'
fhir_store_id = 'evaluation'
annotation_store_id = 'goldstandard'
json_filename = "example-patient-bundles.json"
with nlpsandbox.ApiClient(configuration) as api_client:
dataset_api = nlpsandbox.apis.DatasetApi(api_client)
fhir_store_api = nlpsandbox.apis.FhirStoreApi(api_client)
annotation_store_api = nlpsandbox.apis.AnnotationStoreApi(api_client)
patient_api = nlpsandbox.apis.PatientApi(api_client)
note_api = nlpsandbox.apis.NoteApi(api_client)
annotation_api = nlpsandbox.apis.AnnotationApi(api_client)
# The example is always deleted
try:
# get the dataset
dataset = dataset_api.get_dataset(dataset_id)
# delete the dataset
print(f"Deleting exist dataset: {dataset_id}")
dataset_api.delete_dataset(dataset_id)
except ApiException:
pass
# create dataset if not found
print(f"Creating dataset: {dataset_id}")
dataset = dataset_api.create_dataset(
dataset_id,
body={}
)
print(f"Creating Fhir Store: {fhir_store_id}")
fhir_store = fhir_store_api.create_fhir_store(
dataset_id, fhir_store_id,
body={}
)
print(f"Creating Annotation Store: {annotation_store_id}")
annotation_store = annotation_store_api.create_annotation_store(
dataset_id, annotation_store_id,
body={}
)
with open(json_filename) as f:
data = json.load(f)
patient_bundles = data['patient_bundles']
for patient_bundle in patient_bundles:
# Create or get a FHIR Patient
patient = nlpsandboxclient.utils.change_keys(
patient_bundle['patient'],
nlpsandboxclient.utils.camelcase_to_snakecase
)
patient_id = patient.pop("identifier")
print(f"Creating patient: {patient_id}")
patient_api.create_patient(
dataset_id, fhir_store_id, patient_id,
patient_create_request=patient
)
# Create the Note and Annotation objects linked to the patient
note_bundles = patient_bundle['note_bundles']
for note_bundle in note_bundles:
# Determine note Id since noteId isn't part of the 'note'
annotation = note_bundle['annotation']
note_ids = set()
# Loop through annotations to get noteId
for key, value in annotation.items():
if key.startswith("text"):
for annot in value:
note_ids.add(annot['noteId'])
assert len(note_ids) == 1, "Must only have one noteId"
note_id = list(note_ids)[0]
# Create Note
note = nlpsandboxclient.utils.change_keys(
note_bundle['note'],
nlpsandboxclient.utils.camelcase_to_snakecase
)
note['patient_id'] = patient_id
print(f"Creating note ({note_id}) for patient ({patient_id})")
note_api.create_note(
dataset_id, fhir_store_id, note_id,
note_create_request=note
)
# Create annotation
annotation['annotationSource']['resourceSource']['name'] = \
"{fhir_store_name}/fhir/Note/{note_id}".format(
fhir_store_name=fhir_store.name,
note_id=note_id
)
new_annotation = nlpsandboxclient.utils.change_keys(
annotation,
nlpsandboxclient.utils.camelcase_to_snakecase
)
print(f"Creating annotation for note: {note_id}")
annotation = annotation_api.create_annotation(
dataset_id, annotation_store_id,
annotation_id=note_id,
annotation_create_request=new_annotation
)
|
[
"nlpsandbox.apis.PatientApi",
"json.load",
"nlpsandbox.ApiClient",
"nlpsandbox.apis.DatasetApi",
"nlpsandbox.apis.FhirStoreApi",
"nlpsandbox.apis.NoteApi",
"nlpsandbox.Configuration",
"nlpsandbox.apis.AnnotationApi",
"nlpsandbox.apis.AnnotationStoreApi"
] |
[((758, 793), 'nlpsandbox.Configuration', 'nlpsandbox.Configuration', ([], {'host': 'host'}), '(host=host)\n', (782, 793), False, 'import nlpsandbox\n'), ((943, 978), 'nlpsandbox.ApiClient', 'nlpsandbox.ApiClient', (['configuration'], {}), '(configuration)\n', (963, 978), False, 'import nlpsandbox\n'), ((1012, 1050), 'nlpsandbox.apis.DatasetApi', 'nlpsandbox.apis.DatasetApi', (['api_client'], {}), '(api_client)\n', (1038, 1050), False, 'import nlpsandbox\n'), ((1072, 1112), 'nlpsandbox.apis.FhirStoreApi', 'nlpsandbox.apis.FhirStoreApi', (['api_client'], {}), '(api_client)\n', (1100, 1112), False, 'import nlpsandbox\n'), ((1140, 1186), 'nlpsandbox.apis.AnnotationStoreApi', 'nlpsandbox.apis.AnnotationStoreApi', (['api_client'], {}), '(api_client)\n', (1174, 1186), False, 'import nlpsandbox\n'), ((1205, 1243), 'nlpsandbox.apis.PatientApi', 'nlpsandbox.apis.PatientApi', (['api_client'], {}), '(api_client)\n', (1231, 1243), False, 'import nlpsandbox\n'), ((1259, 1294), 'nlpsandbox.apis.NoteApi', 'nlpsandbox.apis.NoteApi', (['api_client'], {}), '(api_client)\n', (1282, 1294), False, 'import nlpsandbox\n'), ((1316, 1357), 'nlpsandbox.apis.AnnotationApi', 'nlpsandbox.apis.AnnotationApi', (['api_client'], {}), '(api_client)\n', (1345, 1357), False, 'import nlpsandbox\n'), ((2224, 2236), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2233, 2236), False, 'import json\n')]
|
from org.mowl.CatParser import CatParser
import sys
from mowl.graph.graph import GraphGenModel
class CatOnt(GraphGenModel):
def __init__(self, dataset, subclass = True, relations = False):
super().__init__(dataset)
self.parser = CatParser(dataset.ontology)
def parseOWL(self):
edges = self.parser.parse()
return edges
|
[
"org.mowl.CatParser.CatParser"
] |
[((254, 281), 'org.mowl.CatParser.CatParser', 'CatParser', (['dataset.ontology'], {}), '(dataset.ontology)\n', (263, 281), False, 'from org.mowl.CatParser import CatParser\n')]
|
# Copyright 2020 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command-line script to train expert policies.
Picks best seed of train_rl for each (environment, reward) pair specified.
"""
import math
import os
from typing import Any, Mapping, Optional
from imitation.util import util
import numpy as np
import ray
import sacred
import tabulate
from evaluating_rewards import serialize
from evaluating_rewards.experiments import env_rewards
from evaluating_rewards.scripts import rl_common, script_utils
experts_ex = sacred.Experiment("train_experts")
rl_common.make_config(experts_ex)
@experts_ex.config
def default_config():
"""Default configuration."""
log_root = serialize.get_output_dir() # where results are written to
configs = {}
run_tag = "default"
_ = locals()
del _
@experts_ex.config
def default_env_rewards(configs):
"""Set default env-reward pair in `configs` entry if it is empty.
This is needed since if we were to define it in `default_config` it would be impossible
to delete it given how Sacred dictionary merging works.
"""
if not configs:
configs = { # noqa: F401
"evaluating_rewards/PointMassLine-v0": {
"evaluating_rewards/PointMassGroundTruth-v0": {"dummy": {}}
},
}
@experts_ex.config
def logging_config(log_root, run_tag):
"""Logging configuration: timestamp plus unique UUID."""
log_dir = os.path.join(log_root, "train_experts", run_tag, util.make_unique_timestamp())
_ = locals()
del _
def _make_ground_truth_configs():
"""Ground truth configs.
Separate function to avoid polluting Sacred ConfigScope with local variables."""
configs = {}
for env, gt_reward in env_rewards.GROUND_TRUTH_REWARDS_BY_ENV.items():
cfg = rl_common.CONFIG_BY_ENV.get(env, {})
configs.setdefault(env, {}).setdefault(str(gt_reward), {})["dummy"] = cfg
return configs
@experts_ex.named_config
def ground_truth():
"""Train RL expert on all configured environments with the ground-truth reward."""
configs = _make_ground_truth_configs()
run_tag = "ground_truth"
_ = locals()
del _
@experts_ex.named_config
def point_maze_wrong_target():
"""Train RL policies on a "wrong" reward in PointMaze to get a bad visitation distribution."""
configs = {
env: {
"evaluating_rewards/PointMazeWrongTargetWithCtrl-v0": {
"dummy": dict(rl_common.CONFIG_BY_ENV[env])
}
}
for env in ("imitation/PointMazeLeftVel-v0", "imitation/PointMazeRightVel-v0")
}
run_tag = "point_maze_wrong_target"
_ = locals()
del _
@experts_ex.named_config
def test():
"""Unit test config."""
locals().update(**rl_common.FAST_CONFIG)
configs = {
"evaluating_rewards/PointMassLine-v0": {
"evaluating_rewards/PointMassGroundTruth-v0": {"dummy": {}},
}
}
run_tag = "test"
_ = locals()
del _
def _filter_key(k: str) -> Optional[str]:
"""Returns None if key k should be omitted; otherwise returns the (possibly modified) key."""
if k.startswith("return_"):
return None
elif k.endswith("_max") or k.endswith("_min"):
return None
else:
k = k.replace("monitor_return", "mr")
k = k.replace("wrapped_return", "wr")
return k
def tabulate_stats(stats: rl_common.Stats) -> str:
"""Pretty-prints the statistics in `stats` in a table."""
res = []
for (env_name, (reward_type, reward_path)), vs in stats.items():
for seed, (x, _log_dir) in enumerate(vs):
row = {
"env_name": env_name,
"reward_type": reward_type,
"reward_path": reward_path,
"seed": seed,
}
row.update(x)
filtered_row = {}
for k, v in row.items():
if k.endswith("_std"):
k = k[:-4] + "_se"
v = v / math.sqrt(row["n_traj"])
new_k = _filter_key(k)
if new_k is not None:
filtered_row[new_k] = v
res.append(filtered_row)
return tabulate.tabulate(res, headers="keys")
def select_best(stats: rl_common.Stats, log_dir: str) -> None:
"""Pick the best seed for each environment-reward pair in `stats`.
Concretely, chooses the seed with highest mean return, and:
- Adds a symlink `best` in the same directory as the seeds;
- Adds a key "best" that is `True` for the winning seed and `False` otherwise.
Note this modifies `stats` in-place.
For experiments where `reward_type` is not `None` (i.e. we are using a wrapped reward),
uses `wrapped_return_mean` for selection. Otherwise, uses `monitor_return_mean` (the
environment ground-truth return).
Args:
stats: The statistics to select the best seed from. Note this is modified in-place.
log_dir: The log directory for this experiment.
"""
for key, single_stats in stats.items():
env_name, (reward_type, reward_path) = key
return_key = "wrapped_return_mean" if reward_type else "monitor_return_mean"
threshold = env_rewards.THRESHOLDS.get(key, -np.inf)
returns = [x[return_key] for x, _log in single_stats]
best_seed = np.argmax(returns)
base_dir = os.path.join(
log_dir,
script_utils.sanitize_path(env_name),
script_utils.sanitize_path(reward_type),
script_utils.sanitize_path(reward_path),
)
# make symlink relative so it'll work even if directory structure is copied/moved
os.symlink(str(best_seed), os.path.join(base_dir, "best"))
for v, _log in single_stats:
v["pass"] = v[return_key] > threshold
v["best"] = False
best_v, _best_log = single_stats[best_seed]
best_v["best"] = True
if not best_v["pass"]:
print(
f"WARNING: ({env_name}, {reward_type}, {reward_path}) did not meet threshold: "
f"{best_v[return_key]} < {threshold}"
)
@experts_ex.main
def train_experts(
ray_kwargs: Mapping[str, Any],
num_cpus_fudge_factor: float,
global_configs: Mapping[str, Any],
configs: Mapping[str, Mapping[str, Mapping[str, Any]]],
log_dir: str,
) -> rl_common.Stats:
"""Entry-point into script to train expert policies specified by config.
Args:
ray_kwargs: arguments passed to `ray.init`.
num_cpus_fudge_factor: factor by which to scale `num_vec` to compute CPU requirements.
global_configs: configuration to apply to all environment-reward pairs.
configs: configuration for each environment-reward pair.
log_dir: the root directory to log experiments to.
Returns:
Statistics `stats` for all policies, where
`stats[(env_name, (reward_type, reward_path))][i]`
are the statistics for seed `i` of the given environment and reward pair.
"""
ray.init(**ray_kwargs)
try:
stats = rl_common.parallel_training(global_configs, configs, num_cpus_fudge_factor, log_dir)
select_best(stats, log_dir)
finally:
ray.shutdown()
print(tabulate_stats(stats))
return stats
if __name__ == "__main__":
script_utils.experiment_main(experts_ex, "train_experts")
|
[
"imitation.util.util.make_unique_timestamp",
"ray.init",
"evaluating_rewards.scripts.script_utils.sanitize_path",
"evaluating_rewards.scripts.rl_common.parallel_training",
"math.sqrt",
"numpy.argmax",
"evaluating_rewards.scripts.rl_common.make_config",
"evaluating_rewards.scripts.script_utils.experiment_main",
"evaluating_rewards.serialize.get_output_dir",
"evaluating_rewards.scripts.rl_common.CONFIG_BY_ENV.get",
"tabulate.tabulate",
"ray.shutdown",
"sacred.Experiment",
"evaluating_rewards.experiments.env_rewards.THRESHOLDS.get",
"os.path.join",
"evaluating_rewards.experiments.env_rewards.GROUND_TRUTH_REWARDS_BY_ENV.items"
] |
[((1039, 1073), 'sacred.Experiment', 'sacred.Experiment', (['"""train_experts"""'], {}), "('train_experts')\n", (1056, 1073), False, 'import sacred\n'), ((1074, 1107), 'evaluating_rewards.scripts.rl_common.make_config', 'rl_common.make_config', (['experts_ex'], {}), '(experts_ex)\n', (1095, 1107), False, 'from evaluating_rewards.scripts import rl_common, script_utils\n'), ((1199, 1225), 'evaluating_rewards.serialize.get_output_dir', 'serialize.get_output_dir', ([], {}), '()\n', (1223, 1225), False, 'from evaluating_rewards import serialize\n'), ((2255, 2302), 'evaluating_rewards.experiments.env_rewards.GROUND_TRUTH_REWARDS_BY_ENV.items', 'env_rewards.GROUND_TRUTH_REWARDS_BY_ENV.items', ([], {}), '()\n', (2300, 2302), False, 'from evaluating_rewards.experiments import env_rewards\n'), ((4719, 4757), 'tabulate.tabulate', 'tabulate.tabulate', (['res'], {'headers': '"""keys"""'}), "(res, headers='keys')\n", (4736, 4757), False, 'import tabulate\n'), ((7586, 7608), 'ray.init', 'ray.init', ([], {}), '(**ray_kwargs)\n', (7594, 7608), False, 'import ray\n'), ((7877, 7934), 'evaluating_rewards.scripts.script_utils.experiment_main', 'script_utils.experiment_main', (['experts_ex', '"""train_experts"""'], {}), "(experts_ex, 'train_experts')\n", (7905, 7934), False, 'from evaluating_rewards.scripts import rl_common, script_utils\n'), ((2004, 2032), 'imitation.util.util.make_unique_timestamp', 'util.make_unique_timestamp', ([], {}), '()\n', (2030, 2032), False, 'from imitation.util import util\n'), ((2318, 2354), 'evaluating_rewards.scripts.rl_common.CONFIG_BY_ENV.get', 'rl_common.CONFIG_BY_ENV.get', (['env', '{}'], {}), '(env, {})\n', (2345, 2354), False, 'from evaluating_rewards.scripts import rl_common, script_utils\n'), ((5743, 5783), 'evaluating_rewards.experiments.env_rewards.THRESHOLDS.get', 'env_rewards.THRESHOLDS.get', (['key', '(-np.inf)'], {}), '(key, -np.inf)\n', (5769, 5783), False, 'from evaluating_rewards.experiments import env_rewards\n'), ((5867, 5885), 'numpy.argmax', 'np.argmax', (['returns'], {}), '(returns)\n', (5876, 5885), True, 'import numpy as np\n'), ((7635, 7723), 'evaluating_rewards.scripts.rl_common.parallel_training', 'rl_common.parallel_training', (['global_configs', 'configs', 'num_cpus_fudge_factor', 'log_dir'], {}), '(global_configs, configs, num_cpus_fudge_factor,\n log_dir)\n', (7662, 7723), False, 'from evaluating_rewards.scripts import rl_common, script_utils\n'), ((7777, 7791), 'ray.shutdown', 'ray.shutdown', ([], {}), '()\n', (7789, 7791), False, 'import ray\n'), ((5952, 5988), 'evaluating_rewards.scripts.script_utils.sanitize_path', 'script_utils.sanitize_path', (['env_name'], {}), '(env_name)\n', (5978, 5988), False, 'from evaluating_rewards.scripts import rl_common, script_utils\n'), ((6002, 6041), 'evaluating_rewards.scripts.script_utils.sanitize_path', 'script_utils.sanitize_path', (['reward_type'], {}), '(reward_type)\n', (6028, 6041), False, 'from evaluating_rewards.scripts import rl_common, script_utils\n'), ((6055, 6094), 'evaluating_rewards.scripts.script_utils.sanitize_path', 'script_utils.sanitize_path', (['reward_path'], {}), '(reward_path)\n', (6081, 6094), False, 'from evaluating_rewards.scripts import rl_common, script_utils\n'), ((6231, 6261), 'os.path.join', 'os.path.join', (['base_dir', '"""best"""'], {}), "(base_dir, 'best')\n", (6243, 6261), False, 'import os\n'), ((4524, 4548), 'math.sqrt', 'math.sqrt', (["row['n_traj']"], {}), "(row['n_traj'])\n", (4533, 4548), False, 'import math\n')]
|
# -*- coding: utf-8 -*-
# this file is released under public domain and you can use without limitations
#########################################################################
## This is a sample controller
## - index is the default action of any application
## - user is required for authentication and authorization
## - download is for downloading files uploaded in the db (does streaming)
## - api is an example of Hypermedia API support and access control
#########################################################################
def index():
redirect(URL('jobs'))
return
@auth.requires_membership('administradores')
def jobs():
"""
lista de jobs
"""
rows = db(db.job).select(orderby=db.job.created_on)
return locals()
@auth.requires_membership('administradores')
def job():
thisjob=db.job(request.args(0,cast=int))
tasks=db(db.scheduler_task.id.belongs( thisjob.tasks)).select() if thisjob.tasks else None
record_count=db(db.registro.job==thisjob.id).count()
return locals()
@auth.requires_membership('administradores')
def create_job():
form=SQLFORM(db.job).process()
if form.accepted:
session.flash='Job Guardado!'
redirect(URL('jobs'))
elif form.errors:
response.flash='Por favor revise la forma'
return locals()
@auth.requires_membership('administradores')
def upload_records():
table =request.args(0)
jobid=request.args(1,cast=int)
db[table].job.default=jobid
job = db.job(jobid)
form = FORM(INPUT(_type='file', _name='data'), INPUT(_type='submit'))
if form.process().accepted:
ufile = db.registro.job.store(form.vars.data.file,form.vars.data.filename)
ret = scheduler.queue_task(upload_file,pvars=dict(jobid=jobid,tablename=table,csvfile=ufile),timeout=3600)
tasks = job.tasks + [ret.id] if job.tasks else [ret.id]
db(db.job.id==jobid).update(tasks=tasks)
session.flash = 'Upload Task Created for file: ' + form.vars.data.filename
redirect(URL('job',args=jobid))
return locals()
@auth.requires_membership('administradores')
def comunicados():
jobid=request.args(0,cast=int)
__populate_comunicado_for_job(jobid)
rows = db(db.comunicacion.job==jobid).select()
rowcount = len(rows)
return locals()
@auth.requires_membership('administradores')
def mensajes():
jobid=request.args(0,cast=int)
rows = db(db.mensaje.job==jobid).select()
rowcount=len(rows)
return locals()
def asignar():
cid=request.args(0,cast=int)
#comunicacion = db(db.comunicacion.id==cid).select()[0]
comunicacion = db.comunicacion(cid)
jobid=comunicacion.job
thisjobcids=[ r.id for r in db(db.comunicacion.job==comunicacion.job).select() ]
cidprev=thisjobcids[0]
cidnext=thisjobcids[-1]
nombreprev=''
nombrenext=''
if cid - 1 in thisjobcids:
cidprev = cid-1
nombreprev = db.comunicacion[cidprev].nombre
if cid + 1 in thisjobcids:
cidnext = cid+1
nombrenext = db.comunicacion[cidnext].nombre
i = thisjobcids.index(comunicacion.id)+1
total = len (thisjobcids)
typecodes = [ '10-BIENVENIDA','20-CUERPO','30-ANIVERSARIO','40-DESPEDIDA' ]
msj = [ (q,q.split('-')[1],db((db.mensaje.job==jobid) & (db.mensaje.typecode == q)).select()) for q in typecodes ]
components=[]
defoption1 = [OPTION('(vacio)', _value='vacio')]
defoption1 += [OPTION('(condicion)', _value='condicion')]
for t in msj:
rows = t[2]
components.append(t[1])
components.append(LI(SELECT(_name=t[1],*(defoption1+[OPTION(j.descripcion, _value=str(j.id)) for j in rows]))))
components.append(INPUT(_name='expresion_' + t[1]))
components.append(XML("<br>"))
form = FORM (INPUT(_type='submit'),XML("<br><br>"),
*components,
_method='post',
_action='')
if form.accepts(request,session):
for t in msj:
ci=form.vars[t[1]]
if not (ci in ['vacio','condicion']):
db.comunicacion_y_mensaje.insert(comunicacion=comunicacion,mensaje=int(form.vars[t[1]]))
elif form.vars[t[1]] == 'condicion':
db.comunicacion_y_mensaje.insert(condicional=True,condicion=form.vars['expresion_'+t[1]],
comunicacion=comunicacion)
#db.comunicacion_y_mensaje.insert(comunicacion=comunicacion,mensaje=int(form.vars.BIENVENIDA))
#db.comunicacion_y_mensaje.insert(comunicacion=comunicacion,mensaje=int(form.vars.CUERPO))
#db.comunicacion_y_mensaje.insert(comunicacion=comunicacion,mensaje=int(form.vars.ANIVERSARIO))
#db.comunicacion_y_mensaje.insert(comunicacion=comunicacion,mensaje=int(form.vars.DESPEDIDA))
response.flash = 'guardado'
elif form.errors:
response.flash = 'Verifique los campos'
return locals()
def user():
"""
exposes:
http://..../[app]/default/user/login
http://..../[app]/default/user/logout
http://..../[app]/default/user/register
http://..../[app]/default/user/profile
http://..../[app]/default/user/retrieve_password
http://..../[app]/default/user/change_password
http://..../[app]/default/user/manage_users (requires membership in
use @auth.requires_login()
@auth.requires_membership('group name')
@auth.requires_permission('read','table name',record_id)
to decorate functions that need access control
"""
return dict(form=auth())
@cache.action()
def download():
"""
allows downloading of uploaded files
http://..../[app]/default/download/[filename]
"""
return response.download(request, db)
def call():
"""
exposes services. for example:
http://..../[app]/default/call/jsonrpc
decorate with @services.jsonrpc the functions to expose
supports xml, json, xmlrpc, jsonrpc, amfrpc, rss, csv
"""
return service()
@auth.requires_login()
def api():
"""
this is example of API with access control
WEB2PY provides Hypermedia API (Collection+JSON) Experimental
"""
from gluon.contrib.hypermedia import Collection
rules = {
'<tablename>': {'GET':{},'POST':{},'PUT':{},'DELETE':{}},
}
return Collection(db).process(request,response,rules)
|
[
"gluon.contrib.hypermedia.Collection"
] |
[((6328, 6342), 'gluon.contrib.hypermedia.Collection', 'Collection', (['db'], {}), '(db)\n', (6338, 6342), False, 'from gluon.contrib.hypermedia import Collection\n')]
|
import logging as lg
lg.basicConfig(
level=lg.DEBUG,
format="%(asctime)s [%(levelname)s] %(name)s: %(message)s",
datefmt="%H:%M:%S")
_logger = lg.getLogger(__name__)
class Node:
def __init__(self, children, metadata):
self.children = children
self.metadata = metadata
def sum_metadata(self):
return sum(self.metadata) + sum([c.sum_metadata() for c in self.children])
def get_value(self):
_logger.debug("Node ({} children) metadata: {}".format(len(self.children), self.metadata))
if self.children:
cs = self.children
return sum([cs[idx - 1].get_value() for idx in self.metadata if 0 <= idx - 1 < len(cs)])
else:
return sum(self.metadata)
class Parser:
_node_class = Node
def __init__(self, data):
self.data = data
self.root = None
@classmethod
def from_data_str(cls, data_str):
return cls(list(map(int, data_str.strip().split(" "))))
def _build_node(self, j):
n_children = self.data[j]
n_metadata = self.data[j + 1]
# _logger.debug("Node at {}: {} children, {} metadata".format(j, n_children, n_metadata))
children = []
k = 2
for _ in range(n_children):
child, size = self._build_node(j + k)
children.append(child)
k += size
metadata = self.data[j + k:j + k + n_metadata]
node = self._node_class(children, metadata)
return node, k + n_metadata
def parse(self):
node, size = self._build_node(0)
assert size == len(self.data)
self.root = node
with open("input_day8.txt", "r") as f:
data_str = f.read()
parser = Parser.from_data_str(data_str)
parser.parse()
print("Answer pt1:", parser.root.sum_metadata())
print("Answer pt2:", parser.root.get_value())
|
[
"logging.getLogger",
"logging.basicConfig"
] |
[((22, 137), 'logging.basicConfig', 'lg.basicConfig', ([], {'level': 'lg.DEBUG', 'format': '"""%(asctime)s [%(levelname)s] %(name)s: %(message)s"""', 'datefmt': '"""%H:%M:%S"""'}), "(level=lg.DEBUG, format=\n '%(asctime)s [%(levelname)s] %(name)s: %(message)s', datefmt='%H:%M:%S')\n", (36, 137), True, 'import logging as lg\n'), ((156, 178), 'logging.getLogger', 'lg.getLogger', (['__name__'], {}), '(__name__)\n', (168, 178), True, 'import logging as lg\n')]
|
#
# @file TestL3Model.py
# @brief L3 Model unit tests
#
# @author <NAME> (Python conversion)
# @author <NAME>
#
# ====== WARNING ===== WARNING ===== WARNING ===== WARNING ===== WARNING ======
#
# DO NOT EDIT THIS FILE.
#
# This file was generated automatically by converting the file located at
# src/sbml/test/TestL3Model.c
# using the conversion program dev/utilities/translateTests/translateTests.pl.
# Any changes made here will be lost the next time the file is regenerated.
#
# -----------------------------------------------------------------------------
# This file is part of libSBML. Please visit http://sbml.org for more
# information about SBML, and the latest version of libSBML.
#
# Copyright 2005-2010 California Institute of Technology.
# Copyright 2002-2005 California Institute of Technology and
# Japan Science and Technology Corporation.
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation. A copy of the license agreement is provided
# in the file named "LICENSE.txt" included with this software distribution
# and also available online as http://sbml.org/software/libsbml/license.html
# -----------------------------------------------------------------------------
import sys
import unittest
import libsbml
class TestL3Model(unittest.TestCase):
global M
M = None
def setUp(self):
self.M = libsbml.Model(3,1)
if (self.M == None):
pass
pass
def tearDown(self):
_dummyList = [ self.M ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_Model_NS(self):
self.assert_( self.M.getNamespaces() != None )
self.assert_( self.M.getNamespaces().getLength() == 1 )
self.assert_(( "http://www.sbml.org/sbml/level3/version1/core" == self.M.getNamespaces().getURI(0) ))
pass
def test_L3_Model_areaUnits(self):
units = "mole";
self.assertEqual( False, self.M.isSetAreaUnits() )
self.M.setAreaUnits(units)
self.assert_(( units == self.M.getAreaUnits() ))
self.assertEqual( True, self.M.isSetAreaUnits() )
if (self.M.getAreaUnits() == units):
pass
self.M.unsetAreaUnits()
self.assertEqual( False, self.M.isSetAreaUnits() )
if (self.M.getAreaUnits() != None):
pass
pass
def test_L3_Model_conversionFactor(self):
units = "mole";
self.assertEqual( False, self.M.isSetConversionFactor() )
self.M.setConversionFactor(units)
self.assert_(( units == self.M.getConversionFactor() ))
self.assertEqual( True, self.M.isSetConversionFactor() )
if (self.M.getConversionFactor() == units):
pass
self.M.unsetConversionFactor()
self.assertEqual( False, self.M.isSetConversionFactor() )
if (self.M.getConversionFactor() != None):
pass
pass
def test_L3_Model_create(self):
self.assert_( self.M.getTypeCode() == libsbml.SBML_MODEL )
self.assert_( self.M.getMetaId() == "" )
self.assert_( self.M.getNotes() == None )
self.assert_( self.M.getAnnotation() == None )
self.assert_( self.M.getId() == "" )
self.assert_( self.M.getName() == "" )
self.assert_( self.M.getSubstanceUnits() == "" )
self.assert_( self.M.getTimeUnits() == "" )
self.assert_( self.M.getVolumeUnits() == "" )
self.assert_( self.M.getAreaUnits() == "" )
self.assert_( self.M.getLengthUnits() == "" )
self.assert_( self.M.getConversionFactor() == "" )
self.assertEqual( False, self.M.isSetId() )
self.assertEqual( False, self.M.isSetName() )
self.assertEqual( False, self.M.isSetSubstanceUnits() )
self.assertEqual( False, self.M.isSetTimeUnits() )
self.assertEqual( False, self.M.isSetVolumeUnits() )
self.assertEqual( False, self.M.isSetAreaUnits() )
self.assertEqual( False, self.M.isSetLengthUnits() )
self.assertEqual( False, self.M.isSetConversionFactor() )
pass
def test_L3_Model_createWithNS(self):
xmlns = libsbml.XMLNamespaces()
xmlns.add( "http://www.sbml.org", "testsbml")
sbmlns = libsbml.SBMLNamespaces(3,1)
sbmlns.addNamespaces(xmlns)
m = libsbml.Model(sbmlns)
self.assert_( m.getTypeCode() == libsbml.SBML_MODEL )
self.assert_( m.getMetaId() == "" )
self.assert_( m.getNotes() == None )
self.assert_( m.getAnnotation() == None )
self.assert_( m.getLevel() == 3 )
self.assert_( m.getVersion() == 1 )
self.assert_( m.getNamespaces() != None )
self.assert_( m.getNamespaces().getLength() == 2 )
self.assert_( m.getId() == "" )
self.assert_( m.getName() == "" )
self.assert_( m.getSubstanceUnits() == "" )
self.assert_( m.getTimeUnits() == "" )
self.assert_( m.getVolumeUnits() == "" )
self.assert_( m.getAreaUnits() == "" )
self.assert_( m.getLengthUnits() == "" )
self.assert_( m.getConversionFactor() == "" )
self.assertEqual( False, m.isSetId() )
self.assertEqual( False, m.isSetName() )
self.assertEqual( False, m.isSetSubstanceUnits() )
self.assertEqual( False, m.isSetTimeUnits() )
self.assertEqual( False, m.isSetVolumeUnits() )
self.assertEqual( False, m.isSetAreaUnits() )
self.assertEqual( False, m.isSetLengthUnits() )
self.assertEqual( False, m.isSetConversionFactor() )
_dummyList = [ m ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_Model_extentUnits(self):
units = "mole";
self.assertEqual( False, self.M.isSetExtentUnits() )
self.M.setExtentUnits(units)
self.assert_(( units == self.M.getExtentUnits() ))
self.assertEqual( True, self.M.isSetExtentUnits() )
if (self.M.getExtentUnits() == units):
pass
self.M.unsetExtentUnits()
self.assertEqual( False, self.M.isSetExtentUnits() )
if (self.M.getExtentUnits() != None):
pass
pass
def test_L3_Model_free_NULL(self):
_dummyList = [ None ]; _dummyList[:] = []; del _dummyList
pass
def test_L3_Model_id(self):
id = "mitochondria";
self.assertEqual( False, self.M.isSetId() )
self.M.setId(id)
self.assert_(( id == self.M.getId() ))
self.assertEqual( True, self.M.isSetId() )
if (self.M.getId() == id):
pass
self.M.unsetId()
self.assertEqual( False, self.M.isSetId() )
if (self.M.getId() != None):
pass
pass
def test_L3_Model_lengthUnits(self):
units = "mole";
self.assertEqual( False, self.M.isSetLengthUnits() )
self.M.setLengthUnits(units)
self.assert_(( units == self.M.getLengthUnits() ))
self.assertEqual( True, self.M.isSetLengthUnits() )
if (self.M.getLengthUnits() == units):
pass
self.M.unsetLengthUnits()
self.assertEqual( False, self.M.isSetLengthUnits() )
if (self.M.getLengthUnits() != None):
pass
pass
def test_L3_Model_name(self):
name = "My_Favorite_Factory";
self.assertEqual( False, self.M.isSetName() )
self.M.setName(name)
self.assert_(( name == self.M.getName() ))
self.assertEqual( True, self.M.isSetName() )
if (self.M.getName() == name):
pass
self.M.unsetName()
self.assertEqual( False, self.M.isSetName() )
if (self.M.getName() != None):
pass
pass
def test_L3_Model_substanceUnits(self):
units = "mole";
self.assertEqual( False, self.M.isSetSubstanceUnits() )
self.M.setSubstanceUnits(units)
self.assert_(( units == self.M.getSubstanceUnits() ))
self.assertEqual( True, self.M.isSetSubstanceUnits() )
if (self.M.getSubstanceUnits() == units):
pass
self.M.unsetSubstanceUnits()
self.assertEqual( False, self.M.isSetSubstanceUnits() )
if (self.M.getSubstanceUnits() != None):
pass
pass
def test_L3_Model_timeUnits(self):
units = "mole";
self.assertEqual( False, self.M.isSetTimeUnits() )
self.M.setTimeUnits(units)
self.assert_(( units == self.M.getTimeUnits() ))
self.assertEqual( True, self.M.isSetTimeUnits() )
if (self.M.getTimeUnits() == units):
pass
self.M.unsetTimeUnits()
self.assertEqual( False, self.M.isSetTimeUnits() )
if (self.M.getTimeUnits() != None):
pass
pass
def test_L3_Model_volumeUnits(self):
units = "mole";
self.assertEqual( False, self.M.isSetVolumeUnits() )
self.M.setVolumeUnits(units)
self.assert_(( units == self.M.getVolumeUnits() ))
self.assertEqual( True, self.M.isSetVolumeUnits() )
if (self.M.getVolumeUnits() == units):
pass
self.M.unsetVolumeUnits()
self.assertEqual( False, self.M.isSetVolumeUnits() )
if (self.M.getVolumeUnits() != None):
pass
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestL3Model))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
|
[
"libsbml.Model",
"unittest.TextTestRunner",
"unittest.TestSuite",
"libsbml.XMLNamespaces",
"unittest.makeSuite",
"libsbml.SBMLNamespaces",
"sys.exit"
] |
[((8727, 8747), 'unittest.TestSuite', 'unittest.TestSuite', ([], {}), '()\n', (8745, 8747), False, 'import unittest\n'), ((1488, 1507), 'libsbml.Model', 'libsbml.Model', (['(3)', '(1)'], {}), '(3, 1)\n', (1501, 1507), False, 'import libsbml\n'), ((4032, 4055), 'libsbml.XMLNamespaces', 'libsbml.XMLNamespaces', ([], {}), '()\n', (4053, 4055), False, 'import libsbml\n'), ((4119, 4147), 'libsbml.SBMLNamespaces', 'libsbml.SBMLNamespaces', (['(3)', '(1)'], {}), '(3, 1)\n', (4141, 4147), False, 'import libsbml\n'), ((4187, 4208), 'libsbml.Model', 'libsbml.Model', (['sbmlns'], {}), '(sbmlns)\n', (4200, 4208), False, 'import libsbml\n'), ((8764, 8795), 'unittest.makeSuite', 'unittest.makeSuite', (['TestL3Model'], {}), '(TestL3Model)\n', (8782, 8795), False, 'import unittest\n'), ((8918, 8929), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (8926, 8929), False, 'import sys\n'), ((8942, 8953), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (8950, 8953), False, 'import sys\n'), ((8846, 8882), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(1)'}), '(verbosity=1)\n', (8869, 8882), False, 'import unittest\n')]
|
from datetime import datetime
dados=dict()
dados['Nome']= str(input('Nome: '))
nasc= int(input('Ano de nascimento: '))
dados['Idade']= datetime.now().year - nasc
dados['ctps'] = int(input('Digite o ctps(0 se nao tem): '))
if dados['ctps']!=0:
dados['contratação']=int(input('Ano de contratação: '))
dados['salario']=float(input('Salário:R$ '))
dados['aposentadoria']=dados['Idade']+((dados['contratação']+35)-datetime.now().year)
for k,v in dados.items():
print(f' - {k} tem o valor {v}')
|
[
"datetime.datetime.now"
] |
[((135, 149), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (147, 149), False, 'from datetime import datetime\n'), ((423, 437), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (435, 437), False, 'from datetime import datetime\n')]
|
from utils import generate_random_sequence
def search(seq, l, r, x):
num_comparisons = 0
if r >= l:
mid = l + (r - l) // 2
num_comparisons += 1
if seq[mid] == x:
return mid, num_comparisons
elif seq[mid] > x:
res, num = search(seq, l, mid - 1, x)
num_comparisons += num
return res, num_comparisons
else:
res, num = search(seq, mid + 1, r, x)
num_comparisons += num
return res, num_comparisons
else:
return -1, num_comparisons
if __name__ == '__main__':
random_seq = generate_random_sequence(20)
print('- Random sequence: ' + str(random_seq))
sorted_seq = sorted(random_seq)
print('\n- Sorted sequence: ' + str(sorted_seq))
# Implement binary search
number_search = 534
print('\n- Search number: ' + str(number_search))
result, comps = search(sorted(sorted_seq), 0, len(random_seq) - 1, number_search)
print('Number of comparisons: ' + str(comps))
if result == -1:
print('Not found!')
else:
print('Index: ' + str(result))
number_search = 150
print('\n- Search number: ' + str(number_search))
result, comps = search(sorted(sorted_seq), 0, len(random_seq) - 1, number_search)
print('Number of comparisons: ' + str(comps))
if result == -1:
print('Not found!')
else:
print('Index: ' + str(result))
|
[
"utils.generate_random_sequence"
] |
[((623, 651), 'utils.generate_random_sequence', 'generate_random_sequence', (['(20)'], {}), '(20)\n', (647, 651), False, 'from utils import generate_random_sequence\n')]
|
import subprocess
def debug(pid):
cmd = ['adb', "forward", "tcp:1234", "jdwp:{}".format(pid)]
stream = subprocess.Popen(cmd)
stream.wait()
jdb = ["jdb", "-attach", "localhost:1234"]
stream = subprocess.Popen(jdb)
stream.wait()
|
[
"subprocess.Popen"
] |
[((114, 135), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {}), '(cmd)\n', (130, 135), False, 'import subprocess\n'), ((215, 236), 'subprocess.Popen', 'subprocess.Popen', (['jdb'], {}), '(jdb)\n', (231, 236), False, 'import subprocess\n')]
|
from Files.utils import ipv4_regex, ipv6_regex
import re
if __name__ == "__main__":
"""
This script parse the Internet2 interfaces files and generates router files
"""
router_id_regex = re.compile('<th id=".*?">(.*)</th>')
gt_interface_addresses = (
"resources/internet2/ground-truth/Internet2-interfaces.html"
)
ground_truth_routers_v4 = {}
ground_truth_routers_v6 = {}
with open(gt_interface_addresses) as f:
for line in f:
# Match the id between <>
m_router_id = re.search(router_id_regex, line)
if m_router_id is not None:
m_interface_v4 = re.search(ipv4_regex, line)
m_interface_v6 = re.search(ipv6_regex, line)
router_id = m_router_id.group(1)
if router_id not in ground_truth_routers_v4:
ground_truth_routers_v4[router_id] = set()
if router_id not in ground_truth_routers_v6:
ground_truth_routers_v6[router_id] = set()
if m_interface_v4 is not None:
ground_truth_routers_v4[router_id].add(m_interface_v4.group(0))
if m_interface_v6 is not None:
ground_truth_routers_v6[router_id].add(m_interface_v6.group(0))
gt_routers_dir = "resources/internet2/ground-truth/routers/"
n_ipv4_ips = 0
discard_interfaces = {
"172.16.31.10",
"172.31.254.2",
"172.16.31.10",
"192.168.127.12",
"192.168.127.12",
"fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b",
}
ipv4_candidates = set()
ipv6_candidates = set()
for router_id, ips in ground_truth_routers_v4.items():
with open(gt_routers_dir + "v4/" + router_id, "w") as f:
for ip in ips:
f.write(ip + "\n")
if ip in ipv4_candidates:
print("Duplicata", router_id, ip)
if ip not in discard_interfaces:
ipv4_candidates.add(ip)
n_ipv4_ips += 1
n_ipv6_ips = 0
for router_id, ips in ground_truth_routers_v6.items():
with open(gt_routers_dir + "v6/" + router_id, "w") as f:
for ip in ips:
f.write(ip + "\n")
if ip in ipv6_candidates:
print("Duplicata", router_id, ip)
if ip not in discard_interfaces:
ipv6_candidates.add(ip)
n_ipv6_ips += 1
gt_dir = "resources/internet2/ground-truth/"
with open(gt_dir + "ips4", "w") as f:
for ip in ipv4_candidates:
f.write(ip + "\n")
with open(gt_dir + "ips6", "w") as f:
for ip in ipv6_candidates:
f.write(ip + "\n")
print("IPv4 addresses: " + str(n_ipv4_ips))
print("IPv6 addresses: " + str(n_ipv6_ips))
|
[
"re.search",
"re.compile"
] |
[((206, 242), 're.compile', 're.compile', (['"""<th id=".*?">(.*)</th>"""'], {}), '(\'<th id=".*?">(.*)</th>\')\n', (216, 242), False, 'import re\n'), ((550, 582), 're.search', 're.search', (['router_id_regex', 'line'], {}), '(router_id_regex, line)\n', (559, 582), False, 'import re\n'), ((656, 683), 're.search', 're.search', (['ipv4_regex', 'line'], {}), '(ipv4_regex, line)\n', (665, 683), False, 'import re\n'), ((717, 744), 're.search', 're.search', (['ipv6_regex', 'line'], {}), '(ipv6_regex, line)\n', (726, 744), False, 'import re\n')]
|
import digitalocean
import os
from fabric.decorators import wraps, _wrap_as_new
from retry.api import retry_call
class TokenError(Exception):
pass
def _list_annotating_decorator(attribute, *values):
"""
From fabric.decorators._list_annotating_decorator
https://github.com/fabric/fabric/blob/master/fabric/decorators.py#L49
"""
def attach_list(func):
@wraps(func)
def inner_decorator(*args, **kwargs):
return func(*args, **kwargs)
_values = values
# Allow for single iterable argument as well as *args
if len(_values) == 1 and not isinstance(_values[0], str):
_values = _values[0]
setattr(inner_decorator, attribute, list(_values))
# Don't replace @task new-style task objects with inner_decorator by
# itself -- wrap in a new Task object first.
inner_decorator = _wrap_as_new(func, inner_decorator)
return inner_decorator
return attach_list
def droplet_generator(region=None, tag=None, ids=[], status=[]):
"""
A generator that yields Droplet IP addresses.
:param region: A DigitalOcean region
:type region: str
:param tag: A DigitalOcean tag name
:type tag: str
:param id: A list of DigitalOcean Droplet IDs
:type id: list
"""
token = os.getenv('FABRIC_DIGITALOCEAN_TOKEN')
if not token:
raise TokenError('The environmental variable FABRIC_DIGITALOCEAN_TOKEN'
' is empty. It must contain a valid DigitalOcean API'
' token.')
client = digitalocean.Manager(token=token)
hosts = []
if not ids:
droplets = client.get_all_droplets(tag_name=tag)
for d in droplets:
if not region or d.region['slug'] == region:
hosts.append(d)
else:
if isinstance(ids, int):
droplet = client.get_droplet(droplet_id=ids)
hosts.append(droplet)
else:
for i in ids:
droplet = client.get_droplet(droplet_id=i)
hosts.append(droplet)
if status and isinstance(status, list):
hosts = [h for h in hosts if h.status in status]
for h in hosts:
yield h.ip_address
@wraps(droplet_generator)
def droplets(region=None, tag=None, ids=[], status=[], retry={}):
"""
Fabric decorator for running a task on DigitalOcean Droplets.
:param region: A DigitalOcean region
:type region: str
:param tag: A DigitalOcean tag name
:type tag: str
:param id: A list of DigitalOcean Droplet IDs
:type id: list
"""
retry_defaults = {'tries': 1, 'delay': 0, 'backoff':1, 'max_delay': None}
droplets = retry_call(_list_annotating_decorator,
fargs=['hosts', droplet_generator(region, tag, ids, status)],
exceptions=(digitalocean.baseapi.DataReadError, digitalocean.baseapi.JSONReadError),
tries=retry.get('tries', retry_defaults['tries']),
delay=retry.get('delay', retry_defaults['delay']),
backoff=retry.get('backoff', retry_defaults['backoff']),
max_delay=retry.get('max_delay', retry_defaults['max_delay']))
return droplets
|
[
"fabric.decorators._wrap_as_new",
"digitalocean.Manager",
"os.getenv",
"fabric.decorators.wraps"
] |
[((2245, 2269), 'fabric.decorators.wraps', 'wraps', (['droplet_generator'], {}), '(droplet_generator)\n', (2250, 2269), False, 'from fabric.decorators import wraps, _wrap_as_new\n'), ((1316, 1354), 'os.getenv', 'os.getenv', (['"""FABRIC_DIGITALOCEAN_TOKEN"""'], {}), "('FABRIC_DIGITALOCEAN_TOKEN')\n", (1325, 1354), False, 'import os\n'), ((1582, 1615), 'digitalocean.Manager', 'digitalocean.Manager', ([], {'token': 'token'}), '(token=token)\n', (1602, 1615), False, 'import digitalocean\n'), ((387, 398), 'fabric.decorators.wraps', 'wraps', (['func'], {}), '(func)\n', (392, 398), False, 'from fabric.decorators import wraps, _wrap_as_new\n'), ((887, 922), 'fabric.decorators._wrap_as_new', '_wrap_as_new', (['func', 'inner_decorator'], {}), '(func, inner_decorator)\n', (899, 922), False, 'from fabric.decorators import wraps, _wrap_as_new\n')]
|
import numpy as np
from sklearn.preprocessing import FunctionTransformer
from ..wrappers import wrap
def linearize(X):
X = np.asarray(X)
return np.reshape(X, (X.shape[0], -1))
class Linearize(FunctionTransformer):
"""Extracts features by simply concatenating all elements of the data into
one long vector."""
def __init__(self, **kwargs):
super().__init__(func=linearize, **kwargs)
def SampleLinearize(**kwargs):
return wrap([Linearize, "sample"], **kwargs)
def CheckpointSampleLinearize(**kwargs):
return wrap([Linearize, "sample", "checkpoint"], **kwargs)
|
[
"numpy.asarray",
"numpy.reshape"
] |
[((131, 144), 'numpy.asarray', 'np.asarray', (['X'], {}), '(X)\n', (141, 144), True, 'import numpy as np\n'), ((156, 187), 'numpy.reshape', 'np.reshape', (['X', '(X.shape[0], -1)'], {}), '(X, (X.shape[0], -1))\n', (166, 187), True, 'import numpy as np\n')]
|
from pathlib import Path
from contextlib import contextmanager
from typing import Any, Iterator, List, Optional
import duckdb
from ..models.task import Task
@contextmanager
def database_connection() -> Iterator[duckdb.DuckDBPyConnection]:
connection: duckdb.DuckDBPyConnection = duckdb.connect(f"{Path(__file__).parent}/db.duckdb")
cursor: duckdb.DuckDBPyConnection = connection.cursor()
try:
yield cursor
connection.commit()
finally:
cursor.close()
connection.close()
def init_database():
query = """DROP TABLE IF EXISTS tasks;
DROP SEQUENCE IF EXISTS __id;
"""
with database_connection() as db:
db.execute(query)
query = "CREATE SEQUENCE IF NOT EXISTS __id START 1;"
with database_connection() as db:
db.execute(query)
query = """
CREATE TABLE IF NOT EXISTS tasks (
id INTEGER PRIMARY KEY,
text TEXT,
day TEXT,
reminder INTEGER
)"""
with database_connection() as db:
db.execute(query)
init_data = [
Task(id=1, text="Doctors Appointment", day="Feb 5th at 2:30pm", reminder=True),
Task(id=2, text="Meeting at School", day="Feb 6th at 1:30pm", reminder=True),
]
for task in init_data:
insert_taks(task)
def insert_taks(task: Task) -> Task:
task_dict = task._asdict()
query = """INSERT INTO tasks (id, text, day, reminder)
VALUES (nextval('__id'), ?, ?, ?)"""
parameters = [task.text, task.day, task.reminder]
with database_connection() as db:
db.execute(query, parameters)
id_ = db.lastrowid
task_dict["id"] = id_
return Task(**task_dict)
def get_all() -> List[Task]:
query = "SELECT id, text, day, reminder FROM tasks"
with database_connection() as db:
db.execute(query)
records = db.fetchall()
tasks = []
for record in records:
task = Task(
id=record[0], text=record[1], day=record[2], reminder=bool(record[3])
)
tasks.append(task)
return tasks
def get_by_id(task: Task) -> Optional[Task]:
query = "SELECT id, text, day, reminder FROM tasks WHERE id = ?"
parameters = [task.id]
with database_connection() as db:
db.execute(query, parameters)
record = db.fetchone()
if not record:
return None
return Task(id=record[0], text=record[1], day=record[2], reminder=bool(record[3]))
def delete_by_id(task: Task) -> None:
query = "DELETE FROM tasks WHERE id = ?"
parameters = [task.id]
with database_connection() as db:
db.execute(query, parameters)
def update_by_id(task: Task) -> Task:
query = """UPDATE tasks
SET text = ?, day = ?, reminder = ?
WHERE id = ?"""
parameters = [task.text, task.day, task.reminder, task.id]
with database_connection() as db:
db.execute(query, parameters)
return task
|
[
"pathlib.Path"
] |
[((305, 319), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (309, 319), False, 'from pathlib import Path\n')]
|
from setuptools import setup
requires = ["flake8 > 3.0.0", "attr"]
flake8_entry_point = "flake8.extension"
long_description = """
A flake8 style checker for pandas method chaining, forked from https://github.com/deppen8/pandas-vet]
"""
setup(
name="pandas-method-chaining",
version="0.1.0",
author="<NAME>",
license="MIT",
description="A pandas method chaining checker",
install_requires=requires,
entry_points={
flake8_entry_point: [
"PMC=pandas_method_chaining:Plugin",
]
}
)
|
[
"setuptools.setup"
] |
[((240, 487), 'setuptools.setup', 'setup', ([], {'name': '"""pandas-method-chaining"""', 'version': '"""0.1.0"""', 'author': '"""<NAME>"""', 'license': '"""MIT"""', 'description': '"""A pandas method chaining checker"""', 'install_requires': 'requires', 'entry_points': "{flake8_entry_point: ['PMC=pandas_method_chaining:Plugin']}"}), "(name='pandas-method-chaining', version='0.1.0', author='<NAME>',\n license='MIT', description='A pandas method chaining checker',\n install_requires=requires, entry_points={flake8_entry_point: [\n 'PMC=pandas_method_chaining:Plugin']})\n", (245, 487), False, 'from setuptools import setup\n')]
|
"""
test_shopitem.py Copyright 2015 by stefanlehmann
"""
import pytest
from shopy.shop import Shop
from shopy.shopitem import ShopItem
def test_shopitem_repr():
shop = Shop.from_file('amazon.json')
item = ShopItem()
item.name = "testitem"
item.articlenr = "123"
item.price = 12.5
item.shop = shop
assert repr(item) == \
"<ShopItem object (name:'%s', articlenr:'%s'," \
" price:%s, shop:'%s')>" % (
'testitem',
'123',
'12.50',
'Amazon'
)
|
[
"shopy.shop.Shop.from_file",
"shopy.shopitem.ShopItem"
] |
[((186, 215), 'shopy.shop.Shop.from_file', 'Shop.from_file', (['"""amazon.json"""'], {}), "('amazon.json')\n", (200, 215), False, 'from shopy.shop import Shop\n'), ((227, 237), 'shopy.shopitem.ShopItem', 'ShopItem', ([], {}), '()\n', (235, 237), False, 'from shopy.shopitem import ShopItem\n')]
|
import os
from datetime import datetime, timedelta, timezone
from unittest.mock import (
AsyncMock,
MagicMock,
Mock,
patch,
)
import pytest
from opencoverage.clients import scm
from tests import utils
pytestmark = pytest.mark.asyncio
@pytest.fixture(autouse=True)
def _clear():
scm.github._token_cache.clear()
scm.github._private_key_cache.clear()
def test_get_client_unsupported():
settings = Mock()
settings.scm = "missing"
with pytest.raises(TypeError):
scm.get_client(settings, None)
class TestGithub:
@pytest.fixture()
async def client(self, settings):
settings.github_app_pem_file = os.path.join(utils.DATA_DIR, "test.pem")
cli = scm.Github(settings, None)
yield cli
await cli.close()
@pytest.fixture()
def response(self):
res = AsyncMock()
res.status = 200
res.json.return_value = {"foo": "bar"}
res.text.return_value = '{"foo": "bar"}'
yield res
@pytest.fixture()
def req(self, response):
req = AsyncMock()
req.__aenter__.return_value = response
yield req
@pytest.fixture()
def session(self, client, req):
session = MagicMock()
session.post.return_value = req
session.patch.return_value = req
session.put.return_value = req
session.get.return_value = req
with patch("opencoverage.clients.scm.github.aiohttp_client", session):
yield session
@pytest.fixture()
def token(self, client):
with patch.object(client, "get_access_token", return_value="token"):
yield "token"
def test_get_client_github_missing_pem(self):
settings = Mock()
settings.scm = "github"
settings.github_app_pem_file = None
with pytest.raises(TypeError):
assert scm.get_client(settings, None)
async def test_get_client(self, client):
assert client
async def test_get_access_token(self, client, session, response):
response.status = 201
response.json.return_value = scm.github.GithubAccessData(
token="token",
expires_at=datetime.utcnow().replace(tzinfo=timezone.utc)
+ timedelta(hours=1),
permissions={},
repository_selection="repository_selection",
).dict()
token = await client.get_access_token()
assert token == "token"
async def test_get_access_token_failure(self, client, session, response):
response.status = 401
response.json.return_value = {"error": "error"}
with pytest.raises(scm.github.APIException):
await client.get_access_token()
async def test_get_access_token_cache(self, client, session, response):
response.status = 201
response.json.return_value = scm.github.GithubAccessData(
token="token",
expires_at=datetime.utcnow().replace(tzinfo=timezone.utc)
+ timedelta(hours=1),
permissions={},
repository_selection="repository_selection",
).dict()
token = await client.get_access_token()
assert token == "token"
calls = len(session.post.mock_calls)
assert await client.get_access_token() == "token"
assert len(session.post.mock_calls) == calls
async def test_get_pulls_missing(self, client, session, response, token):
response.status = 422
pulls = await client.get_pulls("org", "repo", "commit_hash")
assert len(pulls) == 0
async def test_get_pulls_auth_error(self, client, session, response, token):
response.status = 401
with pytest.raises(scm.github.AuthorizationException):
await client.get_pulls("org", "repo", "commit_hash")
async def test_get_pull_diff_auth_error(self, client, session, response, token):
response.status = 401
with pytest.raises(scm.github.AuthorizationException):
await client.get_pull_diff("org", "repo", "id")
async def test_create_check(self, client, session, response, token):
response.status = 201
check = scm.github.GithubCheck(
id=123,
status="created",
started_at=datetime.utcnow(),
name="name",
head_sha="head_sha",
)
response.json.return_value = check.dict()
assert await client.create_check("org", "repo", "commit") == "123"
async def test_create_check_auth_error(self, client, session, response, token):
response.status = 401
with pytest.raises(scm.github.AuthorizationException):
await client.create_check("org", "repo", "commit")
async def test_update_check(self, client, session, response, token):
await client.update_check("org", "repo", "check_id")
assert session.patch.mock_calls[0].kwargs["json"] == {
"status": "completed",
"conclusion": "failure",
}
async def test_update_check_success(self, client, session, response, token):
await client.update_check("org", "repo", "check_id", running=True, success=True)
assert session.patch.mock_calls[0].kwargs["json"] == {
"status": "in_progress",
"conclusion": "success",
}
async def test_update_check_auth_error(self, client, session, response, token):
response.status = 401
with pytest.raises(scm.github.APIException):
await client.update_check("org", "repo", "check_id")
async def test_create_comment(self, client, session, response, token):
response.status = 201
comment = scm.github.GithubComment(id=123, body="text")
response.json.return_value = comment.dict()
assert await client.create_comment("org", "repo", "pull_id", "text") == "123"
async def test_create_comment_auth_error(self, client, session, response, token):
response.status = 401
with pytest.raises(scm.github.APIException):
await client.create_comment("org", "repo", "pull_id", "text")
async def test_update_comment(self, client, session, response, token):
response.status = 200
await client.update_comment("org", "repo", "123", "text")
async def test_update_comment_auth_error(self, client, session, response, token):
response.status = 401
with pytest.raises(scm.github.APIException):
await client.update_comment("org", "repo", "123", "text")
async def test_download_file_401(self, client, session, response, token):
response.status = 401
with pytest.raises(scm.github.AuthorizationException):
async for chunk in client.download_file("org", "repo", "commit", "filename"):
...
async def test_download_file_404(self, client, session, response, token):
response.status = 404
with pytest.raises(scm.github.NotFoundException):
async for chunk in client.download_file("org", "repo", "commit", "filename"):
...
async def test_file_exists(self, client, session, response, token):
response.status = 200
assert await client.file_exists("org", "repo", "commit", "filename")
async def test_not_file_exists(self, client, session, response, token):
response.status = 404
assert not await client.file_exists("org", "repo", "commit", "filename")
async def test_file_exists_authz(self, client, session, response, token):
response.status = 401
with pytest.raises(scm.github.AuthorizationException):
assert not await client.file_exists("org", "repo", "commit", "filename")
|
[
"opencoverage.clients.scm.get_client",
"unittest.mock.patch.object",
"opencoverage.clients.scm.Github",
"unittest.mock.MagicMock",
"opencoverage.clients.scm.github.GithubComment",
"opencoverage.clients.scm.github._token_cache.clear",
"unittest.mock.Mock",
"pytest.fixture",
"unittest.mock.AsyncMock",
"unittest.mock.patch",
"datetime.datetime.utcnow",
"pytest.raises",
"datetime.timedelta",
"opencoverage.clients.scm.github._private_key_cache.clear",
"os.path.join"
] |
[((256, 284), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (270, 284), False, 'import pytest\n'), ((303, 334), 'opencoverage.clients.scm.github._token_cache.clear', 'scm.github._token_cache.clear', ([], {}), '()\n', (332, 334), False, 'from opencoverage.clients import scm\n'), ((339, 376), 'opencoverage.clients.scm.github._private_key_cache.clear', 'scm.github._private_key_cache.clear', ([], {}), '()\n', (374, 376), False, 'from opencoverage.clients import scm\n'), ((429, 435), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (433, 435), False, 'from unittest.mock import AsyncMock, MagicMock, Mock, patch\n'), ((564, 580), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (578, 580), False, 'import pytest\n'), ((790, 806), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (804, 806), False, 'import pytest\n'), ((1002, 1018), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1016, 1018), False, 'import pytest\n'), ((1145, 1161), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1159, 1161), False, 'import pytest\n'), ((1500, 1516), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1514, 1516), False, 'import pytest\n'), ((474, 498), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (487, 498), False, 'import pytest\n'), ((508, 538), 'opencoverage.clients.scm.get_client', 'scm.get_client', (['settings', 'None'], {}), '(settings, None)\n', (522, 538), False, 'from opencoverage.clients import scm\n'), ((658, 698), 'os.path.join', 'os.path.join', (['utils.DATA_DIR', '"""test.pem"""'], {}), "(utils.DATA_DIR, 'test.pem')\n", (670, 698), False, 'import os\n'), ((713, 739), 'opencoverage.clients.scm.Github', 'scm.Github', (['settings', 'None'], {}), '(settings, None)\n', (723, 739), False, 'from opencoverage.clients import scm\n'), ((845, 856), 'unittest.mock.AsyncMock', 'AsyncMock', ([], {}), '()\n', (854, 856), False, 'from unittest.mock import AsyncMock, MagicMock, Mock, patch\n'), ((1062, 1073), 'unittest.mock.AsyncMock', 'AsyncMock', ([], {}), '()\n', (1071, 1073), False, 'from unittest.mock import AsyncMock, MagicMock, Mock, patch\n'), ((1216, 1227), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1225, 1227), False, 'from unittest.mock import AsyncMock, MagicMock, Mock, patch\n'), ((1719, 1725), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (1723, 1725), False, 'from unittest.mock import AsyncMock, MagicMock, Mock, patch\n'), ((5657, 5702), 'opencoverage.clients.scm.github.GithubComment', 'scm.github.GithubComment', ([], {'id': '(123)', 'body': '"""text"""'}), "(id=123, body='text')\n", (5681, 5702), False, 'from opencoverage.clients import scm\n'), ((1402, 1466), 'unittest.mock.patch', 'patch', (['"""opencoverage.clients.scm.github.aiohttp_client"""', 'session'], {}), "('opencoverage.clients.scm.github.aiohttp_client', session)\n", (1407, 1466), False, 'from unittest.mock import AsyncMock, MagicMock, Mock, patch\n'), ((1559, 1621), 'unittest.mock.patch.object', 'patch.object', (['client', '"""get_access_token"""'], {'return_value': '"""token"""'}), "(client, 'get_access_token', return_value='token')\n", (1571, 1621), False, 'from unittest.mock import AsyncMock, MagicMock, Mock, patch\n'), ((1815, 1839), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1828, 1839), False, 'import pytest\n'), ((1860, 1890), 'opencoverage.clients.scm.get_client', 'scm.get_client', (['settings', 'None'], {}), '(settings, None)\n', (1874, 1890), False, 'from opencoverage.clients import scm\n'), ((2617, 2655), 'pytest.raises', 'pytest.raises', (['scm.github.APIException'], {}), '(scm.github.APIException)\n', (2630, 2655), False, 'import pytest\n'), ((3678, 3726), 'pytest.raises', 'pytest.raises', (['scm.github.AuthorizationException'], {}), '(scm.github.AuthorizationException)\n', (3691, 3726), False, 'import pytest\n'), ((3922, 3970), 'pytest.raises', 'pytest.raises', (['scm.github.AuthorizationException'], {}), '(scm.github.AuthorizationException)\n', (3935, 3970), False, 'import pytest\n'), ((4589, 4637), 'pytest.raises', 'pytest.raises', (['scm.github.AuthorizationException'], {}), '(scm.github.AuthorizationException)\n', (4602, 4637), False, 'import pytest\n'), ((5428, 5466), 'pytest.raises', 'pytest.raises', (['scm.github.APIException'], {}), '(scm.github.APIException)\n', (5441, 5466), False, 'import pytest\n'), ((5971, 6009), 'pytest.raises', 'pytest.raises', (['scm.github.APIException'], {}), '(scm.github.APIException)\n', (5984, 6009), False, 'import pytest\n'), ((6387, 6425), 'pytest.raises', 'pytest.raises', (['scm.github.APIException'], {}), '(scm.github.APIException)\n', (6400, 6425), False, 'import pytest\n'), ((6619, 6667), 'pytest.raises', 'pytest.raises', (['scm.github.AuthorizationException'], {}), '(scm.github.AuthorizationException)\n', (6632, 6667), False, 'import pytest\n'), ((6901, 6944), 'pytest.raises', 'pytest.raises', (['scm.github.NotFoundException'], {}), '(scm.github.NotFoundException)\n', (6914, 6944), False, 'import pytest\n'), ((7546, 7594), 'pytest.raises', 'pytest.raises', (['scm.github.AuthorizationException'], {}), '(scm.github.AuthorizationException)\n', (7559, 7594), False, 'import pytest\n'), ((4249, 4266), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4264, 4266), False, 'from datetime import datetime, timedelta, timezone\n'), ((2237, 2255), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (2246, 2255), False, 'from datetime import datetime, timedelta, timezone\n'), ((2985, 3003), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (2994, 3003), False, 'from datetime import datetime, timedelta, timezone\n'), ((2176, 2193), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2191, 2193), False, 'from datetime import datetime, timedelta, timezone\n'), ((2924, 2941), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2939, 2941), False, 'from datetime import datetime, timedelta, timezone\n')]
|
import matplotlib
matplotlib.use('Agg')
#matplotlib.use("gtk")
#matplotlib.use('Qt5Agg')
from rectify_vars_and_wald_functions import *
import pickle
import os
import pandas as pd
import matplotlib.pyplot as plt
import sys
sys.path.insert(1, '../../le_experiments/')
# print(data)
import numpy as np
import os
from scipy import stats
from matplotlib.pyplot import figure
from pathlib import Path
import glob
import numpy as np
import read_config
from output_format import H_ALGO_ACTION_FAILURE, H_ALGO_ACTION_SUCCESS, H_ALGO_ACTION, H_ALGO_OBSERVED_REWARD
from output_format import H_ALGO_ESTIMATED_MU, H_ALGO_ESTIMATED_V, H_ALGO_ESTIMATED_ALPHA, H_ALGO_ESTIMATED_BETA
from output_format import H_ALGO_PROB_BEST_ACTION, H_ALGO_NUM_TRIALS
import beta_bernoulli
import scipy.stats
from scipy.stats import spearmanr
from scipy.stats import pearsonr
#import thompson_policy
import ipdb
EPSILON_PROB = .000001
DESIRED_POWER = 0.8
DESIRED_ALPHA = 0.05
SMALL_SIZE = 10
MEDIUM_SIZE = 13
BIGGER_SIZE = 14
plt.rc('font', size=SMALL_SIZE) # controls default text sizes
plt.rc('axes', titlesize=SMALL_SIZE) # fontsize of the axes title
plt.rc('axes', labelsize=MEDIUM_SIZE) # fontsize of the x and y labels
plt.rc('xtick', labelsize=8.5) # fontsize of the tick labels
plt.rc('ytick', labelsize=10) # fontsize of the tick labels
plt.rc('legend', fontsize=SMALL_SIZE) # legend fontsize
plt.rc('figure', titlesize=BIGGER_SIZE) # fontsize of the figure title
def plot_minssratio_vs_algs(ax, df_list, x_label, y_label):
# ipdb.set_trace()
idx = 0
ind = np.arange(4)
ax.set_xticks(ind)
labels = ('Uniform', 'EG0pt3', 'EG0pt1', 'TS')
ax.set_xticklabels(labels)
for df in df_list:
df[df[y_label] > 1.0] = 1/(df[df[y_label] > 1.0]) #Ratio is smaller sample size/ larger sample size
df_reject = df[df[x_label] == True]
x_idx = np.zeros(len(df_reject[x_label])) + idx
jitter = np.random.normal(0, 0.1, len(x_idx))/2
if idx == 0:
ax.scatter(x_idx + jitter,df_reject[y_label], color = 'red', label = "Rejected Null With Wald Test")
else:
ax.scatter(x_idx + jitter,df_reject[y_label], color = 'red')
df_accept = df[df[x_label] == False]
x_idx = np.zeros(len(df_accept[x_label])) + idx
jitter = np.random.normal(0, 0.1, len(x_idx))/2
if idx == 0:
ax.scatter(x_idx + jitter, df_accept[y_label], color = 'blue', label = "Failed to Reject Null With Wald Test")
else:
ax.scatter(x_idx + jitter, df_accept[y_label], color = 'blue')
idx +=1
def scatter_ratio(df = None, to_check_eg0pt1 = None, to_check_eg0pt3 = None, to_check_unif = None, to_check_ipw = None, n = None, num_sims = None, load_df = True, \
title = None,\
to_check_ts = None):
'''
'''
if load_df == True:
with open(to_check_eg0pt1, 'rb') as f:
df_eg0pt1 = pickle.load(f)
with open(to_check_eg0pt3, 'rb') as f:
df_eg0pt3 = pickle.load(f)
with open(to_check_unif, 'rb') as f:
df_unif = pickle.load(f)
if to_check_ipw != None:
ipw_t1_list = np.load(to_check_ipw)
if to_check_ts != None:
with open(to_check_ts, 'rb') as t:
df_ts = pickle.load(t)
# SE = np.sqrt(mean_1*(1 - mean_1)/sample_size_1 + mean_2*(1 - mean_2)/sample_size_2)
df_eg0pt1 = df_eg0pt1.dropna()
wald_pval_eg0pt1 = (1 - scipy.stats.norm.cdf(np.abs(df_eg0pt1["wald_type_stat"].dropna())))*2 #Two sided, symetric, so compare to 0.05
df_eg0pt1["Wald Rejected"] = wald_pval_eg0pt1 < 0.05
df_eg0pt1.to_csv("overview_csvs/EG0pt1/eg0pt1_overview_noNa_n={}.csv".format(n))
df_eg0pt3 = df_eg0pt3.dropna()
wald_pval_eg0pt3 = (1 - scipy.stats.norm.cdf(np.abs(df_eg0pt3["wald_type_stat"].dropna())))*2 #Two sided, symetric, so compare to 0.05
df_eg0pt3["Wald Rejected"] = wald_pval_eg0pt3 < 0.05
df_eg0pt3.to_csv("overview_csvs/EG0pt3/eg0pt3_overview_noNa_n={}.csv".format(n))
df_ts = df_ts.dropna()
wald_pval_ts = (1 - scipy.stats.norm.cdf(np.abs(df_ts["wald_type_stat"].dropna())))*2 #Two sided, symetric, so compare to 0.05
df_ts["Wald Rejected"] = wald_pval_ts < 0.05
df_ts.to_csv("overview_csvs/TS/ts_overview_noNa_n={}.csv".format(n))
df_unif = df_unif.dropna()
wald_pval_unif = (1 - scipy.stats.norm.cdf(np.abs(df_unif["wald_type_stat"].dropna())))*2 #Two sided, symetric, so compare to 0.05
df_unif["Wald Rejected"] = wald_pval_unif < 0.05 #print(data)
df_unif.to_csv("overview_csvs/unif/unif_overview_noNa_n={}.csv".format(n))
fig, ax = plt.subplots(2,2)
fig.set_size_inches(14.5, 10.5)
ax = ax.ravel()
i = 0
step_sizes = df_unif['num_steps'].unique()
size_vars = ["n/2", "n", "2*n", "4*n"]
for num_steps in step_sizes:
df_for_num_steps_eg0pt1 = df_eg0pt1[df_eg0pt1['num_steps'] == num_steps].dropna()
df_for_num_steps_eg0pt3 = df_eg0pt3[df_eg0pt3['num_steps'] == num_steps]
df_for_num_steps_unif = df_unif[df_unif['num_steps'] == num_steps]
df_for_num_steps_ts = df_ts[df_ts['num_steps'] == num_steps]
df_list = [df_for_num_steps_unif, df_for_num_steps_eg0pt3, df_for_num_steps_eg0pt1, df_for_num_steps_ts]
# df_list = [df_for_num_steps_eg0pt1]
#df_list = [df_for_num_steps_ts]
# df_list = [df_for_num_steps_unif]
y_label = "ratio"
x_label = "Wald Rejected"
plot_minssratio_vs_algs(ax = ax[i], df_list = df_list, x_label = x_label, y_label = y_label)
num_replications = len(df_for_num_steps_eg0pt1)
#
ax[i].set_xlabel("Number of participants = {} = {}".format(size_vars[i], num_steps))
ax[i].legend()
ax[i].set_ylim(0,1.02)
ax[i].set_ylabel("Minimum Sample Size Ratio \n Min($\\frac{n_1}{n_2}$, $\\frac{n_2}{n_1}$)")
i +=1
fig.suptitle(title)
#fig.tight_layout(rect=[0, 0.03, 1, 0.90])
# if not os.path.isdir("plots"):
# os.path.mkdir("plots")
save_dir_ne = "../simulation_analysis_saves/scatter_ratio_waldreject/NoEffect/"
save_dir_e = "../simulation_analysis_saves/scatter_ratio_waldreject/Effect/"
Path(save_dir_ne).mkdir(parents=True, exist_ok=True)
Path(save_dir_e).mkdir(parents=True, exist_ok=True)
save_str_ne = save_dir_ne + "{}.png".format(title)
save_str_e = save_dir_e + "{}.png".format(title)
# save_str_ne = "../simulation_analysis_saves/scatter_ratio_waldreject/NoEffect/{}.png".format(title)
# save_str_e = "../simulation_analysis_saves/scatter_ratio_waldreject/Effect/{}.png".format(title)
if "No Effect" in title:
print("saving to ", save_str_ne)
fig.savefig(save_str_ne, bbox_inches = "tight")
elif "With Effect" in title:
print("saving to ", save_str_e, bbox_inches = "tight")
fig.savefig(save_str_e)
#plt.show()
plt.clf()
plt.close()
def plot_correlation(fig, ax, df_list, x_label, y_label, num_steps, ax_idx):
# ipdb.set_trace()
idx = 0
df = df_list[0]
# for df in df_list: #This loop not needed
df_reject = df[df["Wald Rejected"] == True]
xvals = np.abs(df_reject[x_label]/num_steps - 0.5) #Ratio is smaller sample size/ larger sample size
yvals = np.abs(df_reject[y_label.format(2)] - df_reject[y_label.format(1)]) #Ratio is smaller sample size/ larger sample size
if ax_idx == 0:
ax.scatter(xvals, yvals, color = 'red', label = "Rejected Null With Wald Test")
else:
ax.scatter(xvals,yvals, color = 'red')
df_accept = df[df["Wald Rejected"] == False]
xvals = np.abs(df_accept[x_label]/num_steps - 0.5) #Ratio is smaller sample size/ larger sample size
yvals = np.abs(df_accept[y_label.format(2)] - df_accept[y_label.format(1)]) #Ratio is smaller sample size/ larger sample size
if len(df) == 0:
ipdb.set_trace()
print()
proportion_reject = len(df_reject)/len(df)
yvals_all = np.abs(df[y_label.format(2)] - df[y_label.format(1)]) #Ratio is smaller sample size/ larger sample size
xvals_all = np.abs(df[x_label]/num_steps - 0.5) #Ratio is smaller sample size/ larger sample size
proportion_reject = np.round(proportion_reject, 3)
coeff, p = spearmanr(xvals_all, yvals_all)
coeff = np.round(coeff, 3)
p = np.round(p, 3)
coeff_pear, p_pear = pearsonr(xvals_all, yvals_all)
coeff_pear = np.round(coeff_pear, 3)
p_pear = np.round(p_pear, 3)
if ax_idx == 0:
ax.scatter(xvals, yvals, color = 'blue', label = "Failed to Reject Null With Wald Test")
ax.legend(loc = "upper center", bbox_to_anchor = (1.2, 1.276))
else:
ax.scatter(xvals,yvals , color = 'blue')
ax.text(0.02, 0.75,"Proprtion Rejected (Power/Type 1 Error) = {} \nSpearman's Correlation Coefficent = {} \nwith pvalue = {}\n Pearon's Correlation Coefficent = {} \nwith pvalue = {}".format(proportion_reject, coeff, p, coeff_pear, p_pear))
# if ax_idx == 0 and 0:
# leg1 = ax.legend((p_red[0], p_blue[0]), "Rejected Null Hypothesis With Wald Test", "Failed To Reject Null Hypothesis With Wald Test", bbox_to_anchor = (1.0, 1.076))
# ax.add_artist(leg1)
# handles, labels = ax.get_legend_handles_labels()
# fig.legend(handles, ["a","g"], loc='upper right', prop={'size': 50})
def scatter_correlation_helper_outer(df = None, df_eg0pt1 = None, df_eg0pt3 = None, df_unif = None, n = None, num_sims = None, load_df = True, \
title = None,\
df_ts = None, effect_size = 0):
alg_key_list = ["TS", "EG0pt1", "EG0pt3", "Uniform"]
alg_key_list = ["TS"]
alg_name_list = ["Thompson Sampling (TS)","Epsilon Greedy 0.1 (EG0.1)","Epsilon Greedy 0.3 (EG0.3)", "Uniform"]
alg_name_list = ["Thompson Sampling (TS)"]
for alg_key, alg_name in zip(alg_key_list, alg_name_list):
if effect_size == 0:
title_scatter_corr = "{} ".format(alg_name) + "Difference in arm means (|$\hatp_1$ - $\hatp_2$|) vs. |Proportion of samples in Condtion 1 - 0.5|" + " For n = {} \n Across {} Simulations \n No Effect $p_1$ = $p_2$ = 0.5".format(n, num_sims)
else:
title_scatter_corr = "{} ".format(alg_name) + "Difference in arm means (|$\hatp_1$ - $\hatp_2$|) vs. |Proportion of samples in Condtion 1 - 0.5|" + " For n = {} \n Across {} Simulations \n With Effect Size {}".format(n, num_sims, effect_size)
scatter_correlation(df_eg0pt1 = df_eg0pt1 , df_eg0pt3 = df_eg0pt3,\
df_unif = df_unif, df_ts = df_ts,\
title = title_scatter_corr, \
n = n, num_sims = num_sims, alg_key = alg_key)
def scatter_correlation(df = None, df_eg0pt1 = None, df_eg0pt3 = None, df_unif = None, n = None, num_sims = None, load_df = True, \
title = None,\
df_ts = None, alg_key = "TS"):
'''
maybe something like |proportion condition 1 - 0.5| vs. difference in means? Something which captures the imbalance directly
'''
df_eg0pt1 = df_eg0pt1
wald_pval_eg0pt1 = (1 - scipy.stats.norm.cdf(np.abs(df_eg0pt1["wald_type_stat"].dropna())))*2 #Two sided, symetric, so compare to 0.05
#df_eg0pt1["Wald Rejected"] = wald_pval_eg0pt1 < 0.05
df_eg0pt1["Wald Rejected"] = df_eg0pt1["wald_pval"] < 0.05
#df_eg0pt3 = df_eg0pt3.dropna()
wald_pval_eg0pt3 = (1 - scipy.stats.norm.cdf(np.abs(df_eg0pt3["wald_type_stat"].dropna())))*2 #Two sided, symetric, so compare to 0.05
df_eg0pt3["Wald Rejected"] = df_eg0pt3["wald_pval"] < 0.05
#df_ts = df_ts.dropna()
wald_pval_ts = (1 - scipy.stats.norm.cdf(np.abs(df_ts["wald_type_stat"].dropna())))*2 #Two sided, symetric, so compare to 0.05
df_ts["Wald Rejected"] = df_ts["wald_pval"] < 0.05
# df_unif = df_unif.dropna()
wald_pval_unif = (1 - scipy.stats.norm.cdf(np.abs(df_unif["wald_type_stat"].dropna())))*2 #Two sided, symetric, so compare to 0.05
df_unif["Wald Rejected"] = df_unif["wald_pval"] < 0.05
fig, ax = plt.subplots(2,2)
fig.set_size_inches(14.5, 10.5)
ax = ax.ravel()
i = 0
step_sizes = df_unif['num_steps'].unique()
size_vars = ["n/2", "n", "2*n", "4*n"]
alg_key = "TS" #ALWAYS TS
for num_steps in step_sizes:
df_for_num_steps_eg0pt1 = df_eg0pt1[df_eg0pt1['num_steps'] == num_steps]
df_for_num_steps_eg0pt3 = df_eg0pt3[df_eg0pt3['num_steps'] == num_steps]
df_for_num_steps_unif = df_unif[df_unif['num_steps'] == num_steps]
df_for_num_steps_ts = df_ts[df_ts['num_steps'] == num_steps]
#df_list = [df_for_num_steps_unif, df_for_num_steps_eg0pt3, df_for_num_steps_eg0pt1, df_for_num_steps_ts]
# df_list = [df_for_num_steps_eg0pt3]
alg_dict = {"TS":df_for_num_steps_ts, "EG0pt1":df_for_num_steps_eg0pt1, "EG0pt3":df_for_num_steps_eg0pt3, "Uniform":df_for_num_steps_unif}
df_list = [alg_dict[alg_key]]
# df_list = [df_for_num_steps_ts]
#df_list = [df_for_num_steps_ts]
# df_list = [df_for_num_steps_unif]
# bins = np.arange(0, 1.01, .025)
x_label = "sample_size_1"
y_label = "mean_{}"
if len(df_list[0]) == 0:
ipdb.set_trace()
plot_correlation(fig, ax = ax[i], df_list = df_list, x_label = x_label, y_label = y_label, num_steps = num_steps, ax_idx = i)
num_replications = len(df_for_num_steps_eg0pt1)
#
#
ax[i].set_xlabel("|Proportion of samples in Condtion 1 - 0.5| For Number of participants = {} = {}".format(size_vars[i], num_steps))
# ax[i].legend()
ax[i].set_ylim(0,1.02)
ax[i].set_xlim(0, 0.501)
ax[i].set_ylabel("Difference in Arm Mean Estimates |$\hatp1$ - $\hatp2$|")
i +=1
fig.suptitle(title)
fig.subplots_adjust(top=0.80)
# fig.tight_layout(rect=[0, 0.03, 1, 0.90])
# if not os.path.isdir("plots"):
# os.path.mkdir("plots")
save_dir_ne = "../simulation_analysis_saves/scatter_correlation/NoEffect/"
save_dir_e = "../simulation_analysis_saves/scatter_correlation/Effect/"
Path(save_dir_ne).mkdir(parents=True, exist_ok=True)
Path(save_dir_e).mkdir(parents=True, exist_ok=True)
save_str_ne = save_dir_ne + "{}.png".format(title)
save_str_e = save_dir_e + "{}.png".format(title)
# save_str_ne = "../simulation_analysis_saves/scatter_correlation/NoEffect/{}/{}.png".format(alg_key, title)
# save_str_e = "../simulation_analysis_saves/scatter_correlation/Effect/{}/{}.png".format(alg_key, title)
if "No Effect" in title:
print("saving to ", save_str_ne)
fig.savefig(save_str_ne, bbox_inches = "tight")
elif "With Effect" in title:
print("saving to ", save_str_e)
fig.savefig(save_str_e, bbox_inches = "tight")
#plt.show()
plt.clf()
plt.close()
|
[
"numpy.load",
"numpy.abs",
"matplotlib.pyplot.clf",
"ipdb.set_trace",
"matplotlib.pyplot.close",
"scipy.stats.spearmanr",
"sys.path.insert",
"scipy.stats.pearsonr",
"pathlib.Path",
"matplotlib.use",
"numpy.arange",
"matplotlib.pyplot.rc",
"pickle.load",
"numpy.round",
"matplotlib.pyplot.subplots"
] |
[((18, 39), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (32, 39), False, 'import matplotlib\n'), ((225, 268), 'sys.path.insert', 'sys.path.insert', (['(1)', '"""../../le_experiments/"""'], {}), "(1, '../../le_experiments/')\n", (240, 268), False, 'import sys\n'), ((1001, 1032), 'matplotlib.pyplot.rc', 'plt.rc', (['"""font"""'], {'size': 'SMALL_SIZE'}), "('font', size=SMALL_SIZE)\n", (1007, 1032), True, 'import matplotlib.pyplot as plt\n'), ((1072, 1108), 'matplotlib.pyplot.rc', 'plt.rc', (['"""axes"""'], {'titlesize': 'SMALL_SIZE'}), "('axes', titlesize=SMALL_SIZE)\n", (1078, 1108), True, 'import matplotlib.pyplot as plt\n'), ((1142, 1179), 'matplotlib.pyplot.rc', 'plt.rc', (['"""axes"""'], {'labelsize': 'MEDIUM_SIZE'}), "('axes', labelsize=MEDIUM_SIZE)\n", (1148, 1179), True, 'import matplotlib.pyplot as plt\n'), ((1216, 1246), 'matplotlib.pyplot.rc', 'plt.rc', (['"""xtick"""'], {'labelsize': '(8.5)'}), "('xtick', labelsize=8.5)\n", (1222, 1246), True, 'import matplotlib.pyplot as plt\n'), ((1280, 1309), 'matplotlib.pyplot.rc', 'plt.rc', (['"""ytick"""'], {'labelsize': '(10)'}), "('ytick', labelsize=10)\n", (1286, 1309), True, 'import matplotlib.pyplot as plt\n'), ((1343, 1380), 'matplotlib.pyplot.rc', 'plt.rc', (['"""legend"""'], {'fontsize': 'SMALL_SIZE'}), "('legend', fontsize=SMALL_SIZE)\n", (1349, 1380), True, 'import matplotlib.pyplot as plt\n'), ((1402, 1441), 'matplotlib.pyplot.rc', 'plt.rc', (['"""figure"""'], {'titlesize': 'BIGGER_SIZE'}), "('figure', titlesize=BIGGER_SIZE)\n", (1408, 1441), True, 'import matplotlib.pyplot as plt\n'), ((1580, 1592), 'numpy.arange', 'np.arange', (['(4)'], {}), '(4)\n', (1589, 1592), True, 'import numpy as np\n'), ((4787, 4805), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(2)'], {}), '(2, 2)\n', (4799, 4805), True, 'import matplotlib.pyplot as plt\n'), ((7111, 7120), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (7118, 7120), True, 'import matplotlib.pyplot as plt\n'), ((7125, 7136), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (7134, 7136), True, 'import matplotlib.pyplot as plt\n'), ((7380, 7424), 'numpy.abs', 'np.abs', (['(df_reject[x_label] / num_steps - 0.5)'], {}), '(df_reject[x_label] / num_steps - 0.5)\n', (7386, 7424), True, 'import numpy as np\n'), ((7833, 7877), 'numpy.abs', 'np.abs', (['(df_accept[x_label] / num_steps - 0.5)'], {}), '(df_accept[x_label] / num_steps - 0.5)\n', (7839, 7877), True, 'import numpy as np\n'), ((8302, 8339), 'numpy.abs', 'np.abs', (['(df[x_label] / num_steps - 0.5)'], {}), '(df[x_label] / num_steps - 0.5)\n', (8308, 8339), True, 'import numpy as np\n'), ((8412, 8442), 'numpy.round', 'np.round', (['proportion_reject', '(3)'], {}), '(proportion_reject, 3)\n', (8420, 8442), True, 'import numpy as np\n'), ((8458, 8489), 'scipy.stats.spearmanr', 'spearmanr', (['xvals_all', 'yvals_all'], {}), '(xvals_all, yvals_all)\n', (8467, 8489), False, 'from scipy.stats import spearmanr\n'), ((8502, 8520), 'numpy.round', 'np.round', (['coeff', '(3)'], {}), '(coeff, 3)\n', (8510, 8520), True, 'import numpy as np\n'), ((8529, 8543), 'numpy.round', 'np.round', (['p', '(3)'], {}), '(p, 3)\n', (8537, 8543), True, 'import numpy as np\n'), ((8570, 8600), 'scipy.stats.pearsonr', 'pearsonr', (['xvals_all', 'yvals_all'], {}), '(xvals_all, yvals_all)\n', (8578, 8600), False, 'from scipy.stats import pearsonr\n'), ((8618, 8641), 'numpy.round', 'np.round', (['coeff_pear', '(3)'], {}), '(coeff_pear, 3)\n', (8626, 8641), True, 'import numpy as np\n'), ((8655, 8674), 'numpy.round', 'np.round', (['p_pear', '(3)'], {}), '(p_pear, 3)\n', (8663, 8674), True, 'import numpy as np\n'), ((12318, 12336), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(2)'], {}), '(2, 2)\n', (12330, 12336), True, 'import matplotlib.pyplot as plt\n'), ((15140, 15149), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (15147, 15149), True, 'import matplotlib.pyplot as plt\n'), ((15154, 15165), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (15163, 15165), True, 'import matplotlib.pyplot as plt\n'), ((8085, 8101), 'ipdb.set_trace', 'ipdb.set_trace', ([], {}), '()\n', (8099, 8101), False, 'import ipdb\n'), ((2983, 2997), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2994, 2997), False, 'import pickle\n'), ((3069, 3083), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (3080, 3083), False, 'import pickle\n'), ((3152, 3166), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (3163, 3166), False, 'import pickle\n'), ((3227, 3248), 'numpy.load', 'np.load', (['to_check_ipw'], {}), '(to_check_ipw)\n', (3234, 3248), True, 'import numpy as np\n'), ((6418, 6435), 'pathlib.Path', 'Path', (['save_dir_ne'], {}), '(save_dir_ne)\n', (6422, 6435), False, 'from pathlib import Path\n'), ((6475, 6491), 'pathlib.Path', 'Path', (['save_dir_e'], {}), '(save_dir_e)\n', (6479, 6491), False, 'from pathlib import Path\n'), ((13539, 13555), 'ipdb.set_trace', 'ipdb.set_trace', ([], {}), '()\n', (13553, 13555), False, 'import ipdb\n'), ((14433, 14450), 'pathlib.Path', 'Path', (['save_dir_ne'], {}), '(save_dir_ne)\n', (14437, 14450), False, 'from pathlib import Path\n'), ((14490, 14506), 'pathlib.Path', 'Path', (['save_dir_e'], {}), '(save_dir_e)\n', (14494, 14506), False, 'from pathlib import Path\n'), ((3352, 3366), 'pickle.load', 'pickle.load', (['t'], {}), '(t)\n', (3363, 3366), False, 'import pickle\n')]
|
import os
import click
import csv
import random
import sys
from osp.common import config
from osp.common.utils import query_bar
from osp.corpus.corpus import Corpus
from osp.corpus.models import Document
from osp.corpus.models import Document_Format
from osp.corpus.models import Document_Text
from osp.corpus.jobs import ext_format
from osp.corpus.jobs import ext_text
from peewee import create_model_tables
from prettytable import PrettyTable
@click.group()
def cli():
pass
@cli.command()
def init_db():
"""
Create the database tables.
"""
create_model_tables([
Document,
Document_Format,
Document_Text
], fail_silently=True)
@cli.command()
def insert_documents():
"""
Insert documents in the database.
"""
Document.insert_documents()
@cli.command()
def queue_format():
"""
Queue format extraction tasks in the worker.
"""
for doc in query_bar(Document.select()):
config.rq.enqueue(ext_format, doc.id)
@cli.command()
def queue_text():
"""
Queue text extraction tasks in the worker.
"""
for doc in query_bar(Document.select()):
config.rq.enqueue(ext_text, doc.id)
@cli.command()
def format_counts():
"""
Print a table of file format -> count.
"""
t = PrettyTable(['File Type', 'Doc Count'])
t.align = 'l'
for c in Document_Format.format_counts():
t.add_row(c)
click.echo(t)
@cli.command()
def file_count():
"""
Print the total number of files.
"""
corpus = Corpus.from_env()
click.echo(corpus.file_count)
|
[
"osp.corpus.models.Document.select",
"peewee.create_model_tables",
"click.echo",
"osp.corpus.models.Document_Format.format_counts",
"prettytable.PrettyTable",
"osp.corpus.models.Document.insert_documents",
"osp.corpus.corpus.Corpus.from_env",
"click.group",
"osp.common.config.rq.enqueue"
] |
[((451, 464), 'click.group', 'click.group', ([], {}), '()\n', (462, 464), False, 'import click\n'), ((571, 658), 'peewee.create_model_tables', 'create_model_tables', (['[Document, Document_Format, Document_Text]'], {'fail_silently': '(True)'}), '([Document, Document_Format, Document_Text],\n fail_silently=True)\n', (590, 658), False, 'from peewee import create_model_tables\n'), ((786, 813), 'osp.corpus.models.Document.insert_documents', 'Document.insert_documents', ([], {}), '()\n', (811, 813), False, 'from osp.corpus.models import Document\n'), ((1305, 1344), 'prettytable.PrettyTable', 'PrettyTable', (["['File Type', 'Doc Count']"], {}), "(['File Type', 'Doc Count'])\n", (1316, 1344), False, 'from prettytable import PrettyTable\n'), ((1377, 1408), 'osp.corpus.models.Document_Format.format_counts', 'Document_Format.format_counts', ([], {}), '()\n', (1406, 1408), False, 'from osp.corpus.models import Document_Format\n'), ((1436, 1449), 'click.echo', 'click.echo', (['t'], {}), '(t)\n', (1446, 1449), False, 'import click\n'), ((1553, 1570), 'osp.corpus.corpus.Corpus.from_env', 'Corpus.from_env', ([], {}), '()\n', (1568, 1570), False, 'from osp.corpus.corpus import Corpus\n'), ((1575, 1604), 'click.echo', 'click.echo', (['corpus.file_count'], {}), '(corpus.file_count)\n', (1585, 1604), False, 'import click\n'), ((943, 960), 'osp.corpus.models.Document.select', 'Document.select', ([], {}), '()\n', (958, 960), False, 'from osp.corpus.models import Document\n'), ((971, 1008), 'osp.common.config.rq.enqueue', 'config.rq.enqueue', (['ext_format', 'doc.id'], {}), '(ext_format, doc.id)\n', (988, 1008), False, 'from osp.common import config\n'), ((1134, 1151), 'osp.corpus.models.Document.select', 'Document.select', ([], {}), '()\n', (1149, 1151), False, 'from osp.corpus.models import Document\n'), ((1162, 1197), 'osp.common.config.rq.enqueue', 'config.rq.enqueue', (['ext_text', 'doc.id'], {}), '(ext_text, doc.id)\n', (1179, 1197), False, 'from osp.common import config\n')]
|
import os
import copy
import yaml
from datetime import datetime, timedelta
from .utils.run import dbt_seed, dbt_run, dbt_test, dbt_command
RUN_TIME = datetime(2021, 5, 2, 0, 0, 0)
DBT_VARS = {
're_data:time_window_start': (RUN_TIME - timedelta(days=1)).strftime("%Y-%m-%d %H:%M:%S"),
're_data:time_window_end': RUN_TIME.strftime("%Y-%m-%d %H:%M:%S"),
}
def test_monitoring(db):
load_deps = 'dbt deps'
assert os.system(load_deps) == 0
dbt_vars = copy.deepcopy(DBT_VARS)
print (f"Running setup and tests for {db}")
dbt_seed('--vars "{}"'.format(yaml.dump(dbt_vars)), db)
dbt_run('--models transformed', db)
print (f"Computing re_data metrics for {db}")
dbt_run('--exclude transformed --vars "{}"'.format(yaml.dump(dbt_vars)), db)
# updat dbts_vars to run dbt for next day of data
dbt_vars['re_data:time_window_start'] = dbt_vars['re_data:time_window_end']
dbt_vars['re_data:time_window_end'] = (RUN_TIME + timedelta(days=1)).strftime("%Y-%m-%d %H:%M:%S")
dbt_command(
'dbt run --exclude transformed --fail-fast --vars "{}"'.format(yaml.dump(dbt_vars)),
db
)
dbt_test('--vars "{}"'.format(yaml.dump(dbt_vars)), db)
op_vars = {
'start_date': RUN_TIME.strftime("%Y-%m-%d"),
'end_date': (RUN_TIME + timedelta(days=1)).strftime("%Y-%m-%d"),
'interval': 'days:1',
}
dbt_command(
'dbt run-operation generate_overview --args "{}"'.format(yaml.dump(op_vars)),
db, common_args=''
)
print (f"Running tests completed for {db}")
|
[
"copy.deepcopy",
"yaml.dump",
"os.system",
"datetime.datetime",
"datetime.timedelta"
] |
[((151, 180), 'datetime.datetime', 'datetime', (['(2021)', '(5)', '(2)', '(0)', '(0)', '(0)'], {}), '(2021, 5, 2, 0, 0, 0)\n', (159, 180), False, 'from datetime import datetime, timedelta\n'), ((470, 493), 'copy.deepcopy', 'copy.deepcopy', (['DBT_VARS'], {}), '(DBT_VARS)\n', (483, 493), False, 'import copy\n'), ((428, 448), 'os.system', 'os.system', (['load_deps'], {}), '(load_deps)\n', (437, 448), False, 'import os\n'), ((582, 601), 'yaml.dump', 'yaml.dump', (['dbt_vars'], {}), '(dbt_vars)\n', (591, 601), False, 'import yaml\n'), ((756, 775), 'yaml.dump', 'yaml.dump', (['dbt_vars'], {}), '(dbt_vars)\n', (765, 775), False, 'import yaml\n'), ((1109, 1128), 'yaml.dump', 'yaml.dump', (['dbt_vars'], {}), '(dbt_vars)\n', (1118, 1128), False, 'import yaml\n'), ((1183, 1202), 'yaml.dump', 'yaml.dump', (['dbt_vars'], {}), '(dbt_vars)\n', (1192, 1202), False, 'import yaml\n'), ((1471, 1489), 'yaml.dump', 'yaml.dump', (['op_vars'], {}), '(op_vars)\n', (1480, 1489), False, 'import yaml\n'), ((240, 257), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (249, 257), False, 'from datetime import datetime, timedelta\n'), ((971, 988), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (980, 988), False, 'from datetime import datetime, timedelta\n'), ((1311, 1328), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (1320, 1328), False, 'from datetime import datetime, timedelta\n')]
|
'''
Created by auto_sdk on 2020.01.09
'''
from dingtalk.api.base import RestApi
class OapiEduFaceSearchRequest(RestApi):
def __init__(self,url=None):
RestApi.__init__(self,url)
self.class_id = None
self.height = None
self.synchronous = None
self.url = None
self.userid = None
self.width = None
def getHttpMethod(self):
return 'POST'
def getapiname(self):
return 'dingtalk.oapi.edu.face.search'
|
[
"dingtalk.api.base.RestApi.__init__"
] |
[((153, 180), 'dingtalk.api.base.RestApi.__init__', 'RestApi.__init__', (['self', 'url'], {}), '(self, url)\n', (169, 180), False, 'from dingtalk.api.base import RestApi\n')]
|
# Bep Marketplace ELE
# Copyright (c) 2016-2021 Kolibri Solutions
# License: See LICENSE file or https://github.com/KolibriSolutions/BepMarketplace/blob/master/LICENSE
#
from django.conf.urls import url
from . import views
app_name = 'osirisdata'
urlpatterns = [
url('^list/$', views.listOsiris, name='list'),
url('^tometa/$', views.osirisToMeta, name='tometa'),
]
|
[
"django.conf.urls.url"
] |
[((273, 318), 'django.conf.urls.url', 'url', (['"""^list/$"""', 'views.listOsiris'], {'name': '"""list"""'}), "('^list/$', views.listOsiris, name='list')\n", (276, 318), False, 'from django.conf.urls import url\n'), ((324, 375), 'django.conf.urls.url', 'url', (['"""^tometa/$"""', 'views.osirisToMeta'], {'name': '"""tometa"""'}), "('^tometa/$', views.osirisToMeta, name='tometa')\n", (327, 375), False, 'from django.conf.urls import url\n')]
|
# Generated by Django 2.1.4 on 2019-01-29 15:33
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('todoapi', '0003_todolist_taskid'),
]
operations = [
migrations.RemoveField(
model_name='todolist',
name='id',
),
migrations.AlterField(
model_name='todolist',
name='taskid',
field=models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False),
),
]
|
[
"django.db.migrations.RemoveField",
"django.db.models.UUIDField"
] |
[((244, 300), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""todolist"""', 'name': '"""id"""'}), "(model_name='todolist', name='id')\n", (266, 300), False, 'from django.db import migrations, models\n'), ((448, 539), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False)\n', (464, 539), False, 'from django.db import migrations, models\n')]
|
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# author: <NAME>
""" This is an example of using the code to sequence the data"""
import sys
import os
import numpy as np
import pandas as pd
import random
import time
import datetime
import numpy as np
import time as time
import matplotlib
import matplotlib.pyplot as plt
import seaborn as sns
import math as math
import re as re
import inspect
import scipy as scipy
import functools
import itertools
import operator
import warnings
import json
import IPython
import hashlib
import base64
def GetContent(fn):
with open(fn, 'r') as f:
content = f.read()
return content
## specify the path for the source code
path = ''
srcFns = [path + 'expt-analysis/python/data_analysis.py',
path + 'expt-analysis/python/sequential_data.py',
path + 'expt-analysis/python/sequences_statistics_v1.py',
path + 'expt-analysis/python/sequences_statistics_v2.py',
path + 'expt-analysis/python/unit_analysis.py']
for fn in srcFns: exec(GetContent(fn=fn))
# Define a location for SQL Tables Log File (SQL tables are optional)
# and a writePath for the seq data files
# make sure these paths do exist
sqlTablesLogFile = '~/data/seq_data/seq_data_info.csv'
writePath = '~/data/seq_data/'
## define a figs path
figsPath = '~/data/seq_data/figs/'
## define a tables path for writing results tables
tablesPath = '~/data/seq_data/tables/'
# Example with simulated data demo purpose:
## step 1: simulate usage data
df = Sim_depUsageData(userNum=200, subSeqLen=4, repeatPattern=None)
## step 2: sequence the data
dataNamesList = ['test']
dataDesc = 'seq'
fnSuff = '.csv'
# trim is the length of sequences we are considering for finding significance
trim = 3
condDict = None
out = WriteSeqTable_forSql(
df=df,
seqDimCols=['prod', 'form_factor'],
partitionCols0=['user_id'],
sliceCols=['date', 'country'],
seqPropCols=['form_factor'],
timeGapMin=5,
timeCol='time',
timeColEnd='end_time',
trim=trim,
countDistinctCols=['user_id', 'seq_id'],
condDict=None,
addSeqPropMixedCols=['form_factor'],
ordered=True,
writePath=writePath,
dataNamesList=dataNamesList,
dataDesc=dataDesc,
fnSuff=fnSuff,
defineSqlTab=False,
DefineSqlTable_fromFile=DefineSqlTable_fromFile,
ExecSqlQuery=ExecSqlQuery,
sqlTablePrefix="",
timeGapDict=None,
writeTableLogFn=sqlTablesLogFile)
# run this if you have implemented SQL query execution
# and like to use SQL tables rather than files
sqlStr = out['sqlStr']
Mark(sqlStr, color='purple', bold=True)
#ExecSqlQuery(sqlStr)
## look at the info table
for fn in srcFns: exec(GetContent(fn=fn))
seqTablesDf = ReadCsv(fn=sqlTablesLogFile)
Mark(
seqTablesDf,
text='set of available sql tables for finding sequences',
color='purple',
bold=True)
## step 3: get that particular table we need using the info table
rowNum = 0
row = seqTablesDf.iloc[rowNum]
trim = row['trim']
Mark(trim, 'trim')
seqPropCols = []
if str(row['seqPropCols']) != 'nan':
seqPropCols = row['seqPropCols'].split(';')
Mark(seqPropCols, 'seqPropCols are as follows:')
seqPropCols = (
[x + '_parallel' for x in seqPropCols] +
[x + '_mix' for x in seqPropCols])
countDistinctCols = []
if str(row['countDistinctCols']) != 'nan':
countDistinctCols = row['countDistinctCols'].split(';')
Mark(countDistinctCols, 'countDistinctCols are as follows:')
Mark(seqPropCols)
sqlTableName = row['sqlTableName']
fileName = row["writePath"] + row["fileName"] + ".csv"
Mark(sqlTableName, 'This is the sql table name you requested.')
## if want to load data through file
seqDf = ReadCsv(fileName)
Mark(seqDf.shape, 'data size (seqDf.shape):')
Mark(seqDf[:2], 'example seq data:')
## if want to load data via SQL (assuming the SQL functions are implemented)
# seqDf2 = ReadSqlTable(table=sqlTableName)
# Mark(seqDf2.shape, 'data size (seqDf.shape):')
# Mark(seqDf2[:2], 'example seq data:')
## step 4: finding the sig sequences which satisfy particular conditions
sliceCols = ['country']
auxSliceCols = ['trimmed_form_factor_parallel', 'trimmed_form_factor_parallel_mix']
## calculate significance
seqDfWithSignif = AddSeqProbCiMetrics(
seqDf=seqDf.copy(),
trim=int(trim),
addCounts=True,
sliceCols=sliceCols,
auxSliceCols=auxSliceCols,
seqCol='seq',
countDistinctCols=countDistinctCols,
seqCountMin=3)
## also calculate penetration:
# need to pass penetItemCols to do that
seqDfWithSignif2 = FindSigSeq_withPenet(
seqDf=seqDf.copy(),
trim=int(trim),
seqCol='seq',
sliceCols=sliceCols,
auxSliceCols=auxSliceCols,
countDistinctCols=countDistinctCols,
penetItemCols=['user_id', 'date'],
seqCountMin=3)
condDict = {
#'country':['JP', 'US', 'FR']
#'trimmed_form_factor_parallel_mix':['COMP']
}
## a set of values to be regex
regDict = {}
plt.figure()
Mark(text="SIG PLOTS + PENETRATION PLOT", color='blue', bold=True)
sigDict = Plt_sigSeq_compareSlices(
seqDfWithSignif=seqDfWithSignif2.copy(),
sliceCols=sliceCols,
condDict=condDict,
metricCol='relative_prob2',
metricColLower='relative_prob_lower2',
metricColUpper='relative_prob_upper2',
removeBlankSeqs=True,
relativeProbLowerLim = 1.05,
orderByCol='relative_prob2',
addPenetPlots=True,
seqNumLimit = None,
rotation=0,
logScale=True,
figSize=[8, 8],
saveFig=True,
figPath=figsPath,
figFnPrefix=sqlTableName.replace('.', '_'),
figFnExt='png',
Open=OpenFile)
sigDf = sigDict['df']
sigDf = sigDict['df']
if (sigDf is not None):
Mark(x=sigDf.shape, text="sigDf.shape:", color="green", bold=True)
Mark(x=sigDf[:6], text="sigDf snapshot:", color="blue", bold=True)
Write_sigSeqDf(
sigDf=sigDf,
sqlTableName=sqlTableName,
path=tablesPath,
regDict=regDict,
condDict=condDict)
else:
Mark(text='no data was found', color='red')
|
[
"matplotlib.pyplot.figure"
] |
[((5424, 5436), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (5434, 5436), True, 'import matplotlib.pyplot as plt\n')]
|
#分别传入网络连接和本地路径
def request_download(imgUrl, Path):
import requests
r = requests.get(imgUrl)
with open(Path, 'wb') as f:
f.write(r.content)
if __name__ == "__main__":
request_download('https://ss3.bdstatic.com/70cFv8Sh_Q1YnxGkpoWK1HF6hhy/it/u=2018604370,3101817315&fm=26&gp=0.jpg', 'images/1.jpg')
|
[
"requests.get"
] |
[((80, 100), 'requests.get', 'requests.get', (['imgUrl'], {}), '(imgUrl)\n', (92, 100), False, 'import requests\n')]
|
"""LogRegression trains a logistic regression model implemented by
Scikit-Learn on the given dataset. Before training, the user is
prompted for parameter input. After training, model metrics are
displayed, and the user can make new predictions.
View the documentation at https://manufacturingnet.readthedocs.io/.
"""
import matplotlib.pyplot as plt
import numpy as np
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import (accuracy_score, confusion_matrix, make_scorer,
roc_auc_score, roc_curve)
from sklearn.model_selection import (GridSearchCV, cross_val_score,
train_test_split)
class LogRegression:
"""Class framework for logistic regression model."""
def __init__(self, attributes=None, labels=None):
"""Initializes a LogisticRegression object."""
self.attributes = attributes
self.labels = labels
self.test_size = None
self.cv = None
self.graph_results = None
self.fpr = None
self.tpr = None
self.bin = False
self.gridsearch = False
self.gs_params = None
self.gs_result = None
self.regression = None
self.classes = None
self.coefficients = None
self.intercept = None
self.n_iter = None
self.accuracy = None
self.precision = None
self.recall = None
self.roc_auc = None
self.confusion_matrix = None
self.cross_val_scores = None
# Accessor methods
def get_attributes(self):
"""Accessor method for attributes."""
return self.attributes
def get_labels(self):
"""Accessor method for labels."""
return self.labels
def get_classes(self):
"""Accessor method for classes."""
return self.classes
def get_regression(self):
"""Accessor method for regression."""
return self.regression
def get_coefficents(self):
"""Accessor method for coefficients."""
return self.coefficients
def get_n_iter(self):
"""Accessor method for n_iter."""
return self.n_iter
def get_accuracy(self):
"""Accessor method for accuracy."""
return self.accuracy
def get_roc_auc(self):
"""Accessor method for roc_auc."""
return self.roc_auc
def get_confusion_matrix(self):
"""Accessor method for confusion_matrix."""
return self.confusion_matrix
def get_cross_val_scores(self):
"""Accessor method for cross_val_scores."""
return self.cross_val_scores
# Modifier methods
def set_attributes(self, new_attributes=None):
"""Modifier method for attributes."""
self.attributes = new_attributes
def set_labels(self, new_labels=None):
"""Modifier method for labels."""
self.labels = new_labels
# Wrapper for logistic regression model
def run(self):
"""Performs logistic regression on dataset and updates relevant
instance data.
"""
if self._check_inputs():
# Instantiate LogisticRegression() object using helper method
self.regression = self._create_model()
# Split into training and testing set
dataset_X_train, dataset_X_test, dataset_y_train, dataset_y_test = \
train_test_split(self.attributes, self.labels,
test_size=self.test_size)
# Train the model and get resultant coefficients
# Handle exception if arguments are incorrect
try:
self.regression.fit(dataset_X_train, np.ravel(dataset_y_train))
except Exception as e:
print("An exception occurred while training the regression",
"model. Check your inputs and try again.")
print("Here is the exception message:")
print(e)
self.regression = None
return
# Get resultant model instance data
self.classes = self.regression.classes_
self.coefficients = self.regression.coef_
self.intercept = self.regression.intercept_
self.n_iter = self.regression.n_iter_
# Make predictions using testing set
y_prediction = self.regression.predict(dataset_X_test)
# Metrics
self.accuracy = accuracy_score(y_prediction, dataset_y_test)
probas = self.regression.predict_proba(dataset_X_test)
# If classification is binary, calculate roc_auc
if probas.shape[1] == 2:
self.bin = True
self.roc_auc = \
roc_auc_score(self.regression.predict(dataset_X_test),
probas[::, 1])
self.fpr, self.tpr, _ = roc_curve(
dataset_y_test, probas[::, 1])
# Else, calculate confusion matrix
else:
self.confusion_matrix = \
confusion_matrix(dataset_y_test, y_prediction)
self.cross_val_scores = cross_val_score(self.regression,
self.attributes,
self.labels, cv=self.cv)
# Output results
self._output_results()
def predict(self, dataset_X=None):
"""Predicts the output of each datapoint in dataset_X using the
regression model. Returns the predictions.
"""
# Check that run() has already been called
if self.regression is None:
print("The regression model seems to be missing. Have you called",
"run() yet?")
return None
# Try to make the prediction
# Handle exception if dataset_X isn't a valid input
try:
y_prediction = self.regression.predict(dataset_X)
except Exception as e:
print("The model failed to run. Check your inputs and try again.")
print("Here is the exception message:")
print(e)
return None
print("\nLogRegression Predictions:\n", y_prediction, "\n")
return y_prediction
# Helper methods
def _create_model(self):
"""Runs UI for getting parameters and creating model."""
print("\n==================================")
print("= LogRegression Parameter Inputs =")
print("==================================\n")
print("Default values:",
"test_size = 0.25",
"cv = 5",
"graph_results = False",
"penalty = 'l2'",
"dual = False",
"tol = 0.0001",
"C = 1.0",
"fit_intercept = True",
"intercept_scaling = 1",
"class_weight = None",
"random_state = None",
"solver = 'lbfgs'",
"max_iter = 100",
"multi_class = 'auto'",
"verbose = False",
"warm_start = False",
"n_jobs = None",
"l1_ratio = None", sep="\n")
# Set defaults
self.test_size = 0.25
self.cv = None
self.graph_results = False
while True:
user_input = input("\nUse default parameters (Y/n)? ").lower()
if user_input in {"y", ""}:
print("\n===========================================")
print("= End of inputs; press enter to continue. =")
input("===========================================\n")
return LogisticRegression()
elif user_input == "n":
break
else:
print("Invalid input.")
print("\nIf you are unsure about a parameter, press enter to use its",
"default value.")
print("If you finish entering parameters early, enter 'q' to skip",
"ahead.\n")
# Set more defaults
penalty = "l2"
dual = False
tol = 0.0001
C = 1.0
fit_intercept = True
intercept_scaling = 1
class_weight = None
random_state = None
solver = "lbfgs"
max_iter = 100
multi_class = "auto"
verbose = 0
warm_start = False
n_jobs = None
l1_ratio = None
# Get user parameter input
while True:
break_early = False
while True:
user_input = input("\nWhat fraction of the dataset should be the "
+ "testing set (0,1)? ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0 or user_input >= 1:
raise Exception
self.test_size = user_input
break
except Exception:
print("Invalid input.")
print("test_size =", self.test_size)
if break_early:
break
while True:
user_input = input("\nUse GridSearch to find the best "
+ "hyperparameters (y/N)? ").lower()
if user_input == "q":
break_early = True
break
elif user_input in {"n", "y", ""}:
break
else:
print("Invalid input.")
if break_early:
break
while user_input == "y":
print("\n= GridSearch Parameter Inputs =\n")
print("Enter 'q' to skip GridSearch.")
self.gridsearch = True
params = {}
print("\nWarnings:")
print("Solvers 'lbfgs', 'newton-cg', 'sag', and 'saga' support",
"only 'l2' or no penalty.")
print("Solver 'liblinear' requires a penalty.")
print("Penalty 'elasticnet' is only supported by the",
"'saga' solver.")
print("Failing to heed these warnings may crash GridSearch!")
while True:
print("\nEnter the classifier penalties to evaluate.")
print("Options: 1-'l1', 2-'l2', 3-'elasticnet'. Enter 'all'",
"for all options.")
print("Example input: 1,2,3")
user_input = input().lower()
if user_input == "q":
self.gridsearch = False
break_early = True
break
elif user_input == "all":
pen_params = ["l1", "l2", "elasticnet"]
break
else:
pen_dict = {1: "l1", 2: "l2", 3: "elasticnet"}
try:
pen_params_int = \
list(map(int, list(user_input.split(","))))
if len(pen_params_int) > len(pen_dict):
raise Exception
pen_params = []
for each in pen_params_int:
if not pen_dict.get(each):
raise Exception
pen_params.append(pen_dict.get(each))
break
except Exception:
print("Invalid input.")
if break_early:
break
params["penalty"] = pen_params
print("penalties:", pen_params)
while True:
print("\nEnter the solvers to evaluate.")
print("Options: 1-'newton-cg', 2-'lbfgs', 3-'liblinear',",
"4-'sag', 5-'saga'. Enter 'all' for all options.")
print("Example input: 1,2,3")
user_input = input().lower()
if user_input == "q":
self.gridsearch = False
break_early = True
break
elif user_input == "all":
sol_params = ["newton-cg", "lbfgs", "liblinear", "sag",
"saga"]
break
else:
sol_dict = {1: "newton-cg", 2: "lbfgs", 3: "liblinear",
4: "sag", 5: "saga"}
try:
sol_params_int = \
list(map(int, list(user_input.split(","))))
if len(sol_params_int) > len(sol_dict):
raise Exception
sol_params = []
for each in sol_params_int:
if not sol_dict.get(each):
raise Exception
sol_params.append(sol_dict.get(each))
break
except Exception:
print("Invalid input.")
if break_early:
break
params["solver"] = sol_params
print("solvers:", sol_params)
print("\n= End of GridSearch inputs. =\n")
self.gs_params = params
best_params = self._run_gridsearch()
solver = best_params["solver"]
penalty = best_params["penalty"]
break
break_early = False
while True:
user_input = input("\nEnter the number of folds for cross "
+ "validation [2,): ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = int(user_input)
if user_input < 2:
raise Exception
self.cv = user_input
break
except Exception:
print("Invalid input.")
print("cv =", self.cv)
if break_early:
break
while True:
user_input = \
input("\nGraph the ROC curve? Only binary classification "
+ "is supported (y/N): ").lower()
if user_input == "y":
self.graph_results = True
break
elif user_input in {"n", ""}:
break
elif user_input == "q":
break_early = True
break
else:
print("Invalid input.")
print("graph_results =", self.graph_results)
if break_early:
break
while not self.gridsearch:
print("\nWhich algorithm should be used in the optimization",
"problem?")
user_input = input("Enter 1 for 'newton-cg', 2 for 'lbfgs', 3 "
+ "for 'liblinear', 4 for 'sag', or 5 for "
+ "'saga': ").lower()
if user_input == "1":
solver = "newton-cg"
break
elif user_input == "3":
solver = "liblinear"
break
elif user_input == "4":
solver = "sag"
break
elif user_input == "5":
solver = "saga"
break
elif user_input in {"2", ""}:
break
elif user_input == "q":
break_early = True
break
else:
print("Invalid input.")
if not self.gridsearch:
print("solver =", solver)
if break_early:
break
while not self.gridsearch:
print("\nWhich norm should be used in penalization?")
user_input = input("Enter 1 for 'l1', 2 for 'l2', 3 for "
+ "'elasticnet', or 4 for 'none': ").lower()
if solver in {"newton-cg", "lbfgs", "sag"} \
and user_input not in {"2", "4"}:
print("Invalid input.")
print("Solvers 'newton-cg', 'sag', and 'lbfgs' support",
"only 'l2' or no penalty.")
continue
if user_input == "3" and solver != "saga":
print("Invalid input.")
print("'elasticnet' is only supported by the 'saga' solver.")
continue
if user_input == "4" and solver == "liblinear":
print("Invalid input.")
print("Solver 'liblinear' requires a penalty.")
continue
if user_input == "1":
penalty = "l1"
break
elif user_input == "3":
penalty = "elasticnet"
break
elif user_input == "4":
penalty = "none"
break
elif user_input in {"2", ""}:
break
elif user_input == "q":
break_early = True
break
else:
print("Invalid input.")
if not self.gridsearch:
print("penalty =", penalty)
if break_early:
break
while True:
user_input = input("\nUse dual formulation (y/N)? ").lower()
if user_input == "y":
dual = True
break
elif user_input in {"n", ""}:
break
elif user_input == "q":
break_early = True
break
else:
print("Invalid input.")
print("dual =", dual)
if break_early:
break
while True:
user_input = input("\nEnter a positive number for the tolerance "
+ "for stopping criteria: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0:
raise Exception
tol = user_input
break
except Exception:
print("Invalid input.")
print("tol =", tol)
if break_early:
break
while True:
user_input = input("\nEnter a positive number for the inverse "
+ "of regularization strength C: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = float(user_input)
if user_input <= 0:
raise Exception
C = user_input
break
except Exception:
print("Invalid input.")
print("C =", C)
if break_early:
break
while True:
user_input = \
input("\nInclude a y-intercept in the model (Y/n)? ").lower()
if user_input == "n":
fit_intercept = False
break
elif user_input in {"y", ""}:
break
elif user_input == "q":
break_early = True
break
else:
print("Invalid input.")
print("fit_intercept =", fit_intercept)
if break_early:
break
while fit_intercept:
user_input = input("\nEnter a number for the intercept "
+ "scaling factor: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
intercept_scaling = float(user_input)
break
except Exception:
print("Invalid input.")
if fit_intercept:
print("intercept_scaling =", intercept_scaling)
if break_early:
break
while True:
user_input = input("\nAutomatically balance the class weights "
+ "(y/N)? ").lower()
if user_input == "y":
class_weight = "balanced"
break
elif user_input in {"n", ""}:
break
elif user_input == "q":
break_early = True
break
else:
print("Invalid input.")
print("class_weight =", class_weight)
if break_early:
break
print("\nTo set manual weights, call",
"get_regression().set_params() to set the class_weight",
"parameter.")
while True:
user_input = \
input("\nEnter an integer for the random number seed: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
random_state = int(user_input)
break
except Exception:
print("Invalid input.")
print("random_state =", random_state)
if break_early:
break
while True:
user_input = \
input("\nEnter a positive maximum number of iterations: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = int(user_input)
if user_input <= 0:
raise Exception
max_iter = user_input
break
except Exception:
print("Invalid input.")
print("max_iter =", max_iter)
if break_early:
break
while True:
print("\nPlease choose a multiclass scheme.")
user_input = input("Enter 1 for one-vs-rest, 2 for multinomial, "
+ "or 3 to automatically choose: ").lower()
if user_input == "1":
multi_class = "ovr"
break
elif user_input == "2":
multi_class = "multinomial"
break
elif user_input in {"3", ""}:
break
elif user_input == "q":
break_early = True
break
else:
print("Invalid input.")
print("multi_class =", multi_class)
if break_early:
break
while True:
user_input = input("\nEnable verbose output during training "
+ "(y/N)? ").lower()
if user_input == "y":
verbose = 1
break
elif user_input in {"n", ""}:
break
elif user_input == "q":
break_early = True
break
else:
print("Invalid input.")
print("verbose =", bool(verbose))
if break_early:
break
while True:
user_input = \
input("\nEnable warm start? This will use the previous "
+ "solution for fitting (y/N): ").lower()
if user_input == "y":
warm_start = True
break
elif user_input in {"n", ""}:
break
elif user_input == "q":
break_early = True
break
else:
print("Invalid input.")
print("warm_start =", warm_start)
if break_early:
break
while multi_class == "ovr":
print("\nEnter a positive number of CPU cores to use.")
user_input = input("Enter -1 to use all cores: ")
try:
if user_input == "":
break
elif user_input.lower() == "q":
break_early = True
break
user_input = int(user_input)
if user_input <= 0 and user_input != -1:
raise Exception
n_jobs = user_input
break
except Exception:
print("Invalid input.")
if multi_class == "ovr":
print("n_jobs =", n_jobs)
if break_early:
break
while penalty == "elasticnet":
user_input = input("\nEnter a decimal for the Elastic-Net "
+ "mixing parameter [0,1]: ")
try:
if user_input.lower() in {"q", ""}:
break
user_input = float(user_input)
if user_input < 0 or user_input > 1:
raise Exception
l1_ratio = user_input
break
except Exception:
print("Invalid input.")
if penalty == "elasticnet":
print("l1_ratio =", l1_ratio)
break
print("\n===========================================")
print("= End of inputs; press enter to continue. =")
input("===========================================\n")
return LogisticRegression(penalty=penalty, dual=dual, tol=tol, C=C,
fit_intercept=fit_intercept,
intercept_scaling=intercept_scaling,
class_weight=class_weight,
random_state=random_state, solver=solver,
max_iter=max_iter, multi_class=multi_class,
verbose=verbose, warm_start=warm_start,
n_jobs=n_jobs, l1_ratio=l1_ratio)
def _output_results(self):
"""Outputs model metrics after run() finishes."""
print("\n=========================")
print("= LogRegression Results =")
print("=========================\n")
print("Classes:\n", self.classes)
print("\nNumber of Iterations:\n", self.n_iter)
print("\n{:<20} {:<20}".format("Accuracy:", self.accuracy))
if self.bin:
print("\n{:<20} {:<20}".format("ROC AUC:", self.roc_auc))
else:
print("\nConfusion Matrix:\n", self.confusion_matrix)
print("\nCross Validation Scores: ", self.cross_val_scores)
if self.gridsearch:
print("\n{:<20} {:<20}".format("GridSearch Score:",
self.gs_result))
if self.bin and self.graph_results:
plt.plot(self.fpr, self.tpr)
plt.xlabel("False Positive Rate")
plt.ylabel("True Positive Rate")
plt.title("ROC Curve")
plt.legend(loc=4)
plt.show()
print("\n\nCall predict() to make predictions for new data.")
print("\n===================")
print("= End of results. =")
print("===================\n")
def _run_gridsearch(self):
"""Runs GridSearch with the parameters given in run(). Returns
the best parameters."""
acc_scorer = make_scorer(accuracy_score)
clf = LogisticRegression()
dataset_X_train, dataset_X_test, dataset_y_train, dataset_y_test = \
train_test_split(self.attributes, self.labels,
test_size=self.test_size)
# Run GridSearch
grid_obj = GridSearchCV(clf, self.gs_params, scoring=acc_scorer)
grid_obj = grid_obj.fit(dataset_X_train, dataset_y_train)
# Set the clf to the best combination of parameters
clf = grid_obj.best_estimator_
# Fit the best algorithm to the data
clf.fit(dataset_X_train, dataset_y_train)
predictions = clf.predict(dataset_X_test)
self.gs_result = accuracy_score(dataset_y_test, predictions)
# Return the best parameters
print("\nBest GridSearch Parameters:\n", grid_obj.best_params_, "\n")
return grid_obj.best_params_
def _check_inputs(self):
"""Verifies if the instance data is ready for use in logistic
regression model.
"""
# Check if attributes exists
if self.attributes is None:
print("attributes is missing; call set_attributes(new_attributes)",
"to fix this! new_attributes should be a populated numpy",
"array of your independent variables.")
return False
# Check if labels exists
if self.labels is None:
print("labels is missing; call set_labels(new_labels) to fix this!",
"new_labels should be a populated numpy array of your",
"dependent variables.")
return False
# Check if attributes and labels have same number of rows (samples)
if self.attributes.shape[0] != self.labels.shape[0]:
print("attributes and labels don't have the same number of rows.",
"Make sure the number of samples in each dataset matches!")
return False
return True
|
[
"matplotlib.pyplot.title",
"sklearn.model_selection.GridSearchCV",
"sklearn.metrics.confusion_matrix",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"sklearn.metrics.roc_curve",
"numpy.ravel",
"sklearn.model_selection.train_test_split",
"sklearn.metrics.accuracy_score",
"sklearn.model_selection.cross_val_score",
"matplotlib.pyplot.legend",
"sklearn.metrics.make_scorer",
"sklearn.linear_model.LogisticRegression",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel"
] |
[((27414, 27747), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {'penalty': 'penalty', 'dual': 'dual', 'tol': 'tol', 'C': 'C', 'fit_intercept': 'fit_intercept', 'intercept_scaling': 'intercept_scaling', 'class_weight': 'class_weight', 'random_state': 'random_state', 'solver': 'solver', 'max_iter': 'max_iter', 'multi_class': 'multi_class', 'verbose': 'verbose', 'warm_start': 'warm_start', 'n_jobs': 'n_jobs', 'l1_ratio': 'l1_ratio'}), '(penalty=penalty, dual=dual, tol=tol, C=C, fit_intercept=\n fit_intercept, intercept_scaling=intercept_scaling, class_weight=\n class_weight, random_state=random_state, solver=solver, max_iter=\n max_iter, multi_class=multi_class, verbose=verbose, warm_start=\n warm_start, n_jobs=n_jobs, l1_ratio=l1_ratio)\n', (27432, 27747), False, 'from sklearn.linear_model import LogisticRegression\n'), ((29358, 29385), 'sklearn.metrics.make_scorer', 'make_scorer', (['accuracy_score'], {}), '(accuracy_score)\n', (29369, 29385), False, 'from sklearn.metrics import accuracy_score, confusion_matrix, make_scorer, roc_auc_score, roc_curve\n'), ((29400, 29420), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {}), '()\n', (29418, 29420), False, 'from sklearn.linear_model import LogisticRegression\n'), ((29510, 29582), 'sklearn.model_selection.train_test_split', 'train_test_split', (['self.attributes', 'self.labels'], {'test_size': 'self.test_size'}), '(self.attributes, self.labels, test_size=self.test_size)\n', (29526, 29582), False, 'from sklearn.model_selection import GridSearchCV, cross_val_score, train_test_split\n'), ((29657, 29710), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['clf', 'self.gs_params'], {'scoring': 'acc_scorer'}), '(clf, self.gs_params, scoring=acc_scorer)\n', (29669, 29710), False, 'from sklearn.model_selection import GridSearchCV, cross_val_score, train_test_split\n'), ((30048, 30091), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['dataset_y_test', 'predictions'], {}), '(dataset_y_test, predictions)\n', (30062, 30091), False, 'from sklearn.metrics import accuracy_score, confusion_matrix, make_scorer, roc_auc_score, roc_curve\n'), ((3380, 3452), 'sklearn.model_selection.train_test_split', 'train_test_split', (['self.attributes', 'self.labels'], {'test_size': 'self.test_size'}), '(self.attributes, self.labels, test_size=self.test_size)\n', (3396, 3452), False, 'from sklearn.model_selection import GridSearchCV, cross_val_score, train_test_split\n'), ((4452, 4496), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y_prediction', 'dataset_y_test'], {}), '(y_prediction, dataset_y_test)\n', (4466, 4496), False, 'from sklearn.metrics import accuracy_score, confusion_matrix, make_scorer, roc_auc_score, roc_curve\n'), ((5165, 5239), 'sklearn.model_selection.cross_val_score', 'cross_val_score', (['self.regression', 'self.attributes', 'self.labels'], {'cv': 'self.cv'}), '(self.regression, self.attributes, self.labels, cv=self.cv)\n', (5180, 5239), False, 'from sklearn.model_selection import GridSearchCV, cross_val_score, train_test_split\n'), ((28807, 28835), 'matplotlib.pyplot.plot', 'plt.plot', (['self.fpr', 'self.tpr'], {}), '(self.fpr, self.tpr)\n', (28815, 28835), True, 'import matplotlib.pyplot as plt\n'), ((28848, 28881), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""False Positive Rate"""'], {}), "('False Positive Rate')\n", (28858, 28881), True, 'import matplotlib.pyplot as plt\n'), ((28894, 28926), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""True Positive Rate"""'], {}), "('True Positive Rate')\n", (28904, 28926), True, 'import matplotlib.pyplot as plt\n'), ((28939, 28961), 'matplotlib.pyplot.title', 'plt.title', (['"""ROC Curve"""'], {}), "('ROC Curve')\n", (28948, 28961), True, 'import matplotlib.pyplot as plt\n'), ((28974, 28991), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(4)'}), '(loc=4)\n', (28984, 28991), True, 'import matplotlib.pyplot as plt\n'), ((29004, 29014), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (29012, 29014), True, 'import matplotlib.pyplot as plt\n'), ((4892, 4931), 'sklearn.metrics.roc_curve', 'roc_curve', (['dataset_y_test', 'probas[:, 1]'], {}), '(dataset_y_test, probas[:, 1])\n', (4901, 4931), False, 'from sklearn.metrics import accuracy_score, confusion_matrix, make_scorer, roc_auc_score, roc_curve\n'), ((5081, 5127), 'sklearn.metrics.confusion_matrix', 'confusion_matrix', (['dataset_y_test', 'y_prediction'], {}), '(dataset_y_test, y_prediction)\n', (5097, 5127), False, 'from sklearn.metrics import accuracy_score, confusion_matrix, make_scorer, roc_auc_score, roc_curve\n'), ((7687, 7707), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {}), '()\n', (7705, 7707), False, 'from sklearn.linear_model import LogisticRegression\n'), ((3676, 3701), 'numpy.ravel', 'np.ravel', (['dataset_y_train'], {}), '(dataset_y_train)\n', (3684, 3701), True, 'import numpy as np\n')]
|
from __future__ import division
from builtins import str
import numpy
import os
import sys
import logging
from ektelo.algorithm.dawa.cutils import cutil
from ektelo.algorithm.dawa.partition_engines import partition_engine
from ektelo import util
class l1partition_engine(partition_engine.partition_engine):
"""Use the L1 partition method."""
def __init__(self):
self.init_params = util.init_params_from_locals(locals())
@staticmethod
def Run(x, epsilon, ratio,seed):
return L1partition(x, epsilon, ratio, gethist=True,seed =seed)
class l1partition_approx_engine(partition_engine.partition_engine):
"""Use the approximate L1 partition method."""
def __init__(self):
self.init_params = util.init_params_from_locals(locals())
@staticmethod
def Run(x, epsilon, ratio,seed):
return L1partition_approx(x, epsilon, ratio, gethist=True,seed = seed)
def L1partition(x, epsilon, ratio=0.5, gethist=False,seed=None):
"""Compute the noisy L1 histogram using all interval buckets
Args:
x - list of numeric values. The input data vector
epsilon - double. Total private budget
ratio - double in (0, 1). use ratio*epsilon for partition computation and (1-ratio)*epsilon for querying
the count in each partition
gethist - boolean. If set to truth, return the partition directly (the privacy budget used is still ratio*epsilon)
Return:
if gethist == False, return an estimated data vector. Otherwise, return the partition
"""
assert seed is not None, "seed must be set"
prng = numpy.random.RandomState(seed)
assert (x.dtype == numpy.dtype(int) or x.dtype == numpy.dtype("int32")), "Input vector must be int! %s given" %x.dtype
y=x.astype('int32') #numpy type int32 is not not JSON serializable
check = (x ==y)
assert check.sum() == len(check), "Casting error from int to int32"
x=y
n = len(x)
hist = cutil.L1partition(n+1, x, epsilon, ratio, prng.randint(500000))
hatx = numpy.zeros(n)
rb = n
if gethist:
bucks = []
for lb in hist[1:]:
bucks.insert(0, [lb, rb-1])
rb = lb
if lb == 0:
break
logging.debug(' L1-PART: number of buckets %s' % str(bucks[:5]) )
return bucks
else:
for lb in hist[1:]:
hatx[lb:rb] = util.old_div(max(0, sum(x[lb:rb]) + prng.laplace(0, util.old_div(1.0,(epsilon*(1-ratio))), 1)), float(rb - lb))
rb = lb
if lb == 0:
break
return hatx
def L1partition_approx(x, epsilon, ratio=0.5, gethist=False,seed =None):
"""Compute the noisy L1 histogram using interval buckets of size 2^k
Args:
x - list of numeric values. The input data vector
epsilon - double. Total private budget
ratio - double in (0, 1) the use ratio*epsilon for partition computation and (1-ratio)*epsilon for querying
the count in each partition
gethist - boolean. If set to truth, return the partition directly (the privacy budget used is still ratio*epsilon)
Return:
if gethist == False, return an estimated data vector. Otherwise, return the partition
"""
assert seed is not None, "seed must be set"
prng = numpy.random.RandomState(seed)
n = len(x)
# check that the input vector x is of appropriate type
assert (x.dtype == numpy.dtype(int) or x.dtype == numpy.dtype("int32")), "Input vector must be int! %s given" %x.dtype
y=x.astype('int32') #numpy type int32 is not not JSON serializable
check = (x ==y)
assert check.sum() == len(check), "Casting error from int to int32"
x=y
hist = cutil.L1partition_approx(n+1, x, epsilon, ratio, prng.randint(500000))
hatx = numpy.zeros(n)
rb = n
if gethist:
bucks = []
for lb in hist[1:]:
bucks.insert(0, [lb, rb-1])
rb = lb
if lb == 0:
break
return bucks
else:
for lb in hist[1:]:
hatx[lb:rb] = util.old_div(max(0, sum(x[lb:rb]) + prng.laplace(0, util.old_div(1.0,(epsilon*(1-ratio))), 1)), float(rb - lb))
rb = lb
if lb == 0:
break
return hatx
|
[
"numpy.dtype",
"numpy.zeros",
"numpy.random.RandomState",
"ektelo.util.old_div",
"builtins.str"
] |
[((1623, 1653), 'numpy.random.RandomState', 'numpy.random.RandomState', (['seed'], {}), '(seed)\n', (1647, 1653), False, 'import numpy\n'), ((2051, 2065), 'numpy.zeros', 'numpy.zeros', (['n'], {}), '(n)\n', (2062, 2065), False, 'import numpy\n'), ((3345, 3375), 'numpy.random.RandomState', 'numpy.random.RandomState', (['seed'], {}), '(seed)\n', (3369, 3375), False, 'import numpy\n'), ((3839, 3853), 'numpy.zeros', 'numpy.zeros', (['n'], {}), '(n)\n', (3850, 3853), False, 'import numpy\n'), ((1678, 1694), 'numpy.dtype', 'numpy.dtype', (['int'], {}), '(int)\n', (1689, 1694), False, 'import numpy\n'), ((1709, 1729), 'numpy.dtype', 'numpy.dtype', (['"""int32"""'], {}), "('int32')\n", (1720, 1729), False, 'import numpy\n'), ((3474, 3490), 'numpy.dtype', 'numpy.dtype', (['int'], {}), '(int)\n', (3485, 3490), False, 'import numpy\n'), ((3505, 3525), 'numpy.dtype', 'numpy.dtype', (['"""int32"""'], {}), "('int32')\n", (3516, 3525), False, 'import numpy\n'), ((2304, 2318), 'builtins.str', 'str', (['bucks[:5]'], {}), '(bucks[:5])\n', (2307, 2318), False, 'from builtins import str\n'), ((2477, 2517), 'ektelo.util.old_div', 'util.old_div', (['(1.0)', '(epsilon * (1 - ratio))'], {}), '(1.0, epsilon * (1 - ratio))\n', (2489, 2517), False, 'from ektelo import util\n'), ((4186, 4226), 'ektelo.util.old_div', 'util.old_div', (['(1.0)', '(epsilon * (1 - ratio))'], {}), '(1.0, epsilon * (1 - ratio))\n', (4198, 4226), False, 'from ektelo import util\n')]
|
import pysolr
class InvalidPagingConfigError(RuntimeError):
def __init__(self, message):
super(RuntimeError, self).__init__(message)
class _SolrCursorIter:
""" Cursor-based iteration, most performant. Requires a sort on id somewhere
in required "sort" argument.
This is recommended approach for iterating docs in a Solr collection
"""
def __init__(self, solr_conn, query, sort='id desc', **options):
self.query = query
self.solr_conn = solr_conn
self.lastCursorMark = ''
self.cursorMark = '*'
self.sort = sort
try:
self.rows = options['rows']
del options['rows']
except KeyError:
self.rows = 0
self.options = options
self.max = None
self.docs = None
def __iter__(self):
response = self.solr_conn.search(self.query, rows=0, **self.options)
self.max = response.hits
return self
def __next__(self):
try:
if self.docs is not None:
try:
return next(self.docs)
except StopIteration:
self.docs = None
if self.docs is None:
if self.lastCursorMark != self.cursorMark:
response = self.solr_conn.search(self.query, rows=self.rows,
cursorMark=self.cursorMark,
sort=self.sort,
**self.options)
self.docs = iter(response.docs)
self.lastCursorMark = self.cursorMark
self.cursorMark = response.nextCursorMark
return next(self.docs)
else:
raise StopIteration()
except pysolr.SolrError as e:
print(e)
if "Cursor" in e.message:
raise InvalidPagingConfigError(e.message)
raise e
class _SolrPagingIter:
""" Traditional search paging, most flexible but will
gradually get slower on each request due to deep-paging
See graph here:
http://opensourceconnections.com/blog/2014/07/13/reindexing-collections-with-solrs-cursor-support/
"""
def __init__(self, solr_conn, query, **options):
self.current = 0
self.query = query
self.solr_conn = solr_conn
try:
self.rows = options['rows']
del options['rows']
except KeyError:
self.rows = 0
self.options = options
self.max = None
self.docs = None
def __iter__(self):
response = self.solr_conn.search(self.query, rows=0, **self.options)
self.max = response.hits
return self
def __next__(self):
if self.docs is not None:
try:
return self.docs.next()
except StopIteration:
self.docs = None
if self.docs is None:
if self.current * self.rows < self.max:
self.current += 1
response = self.solr_conn.search(self.query, rows=self.rows,
start=(self.current - 1) * self.rows,
**self.options)
self.docs = iter(response.docs)
return next(self.docs)
else:
raise StopIteration()
SolrDocs = _SolrCursorIter # recommended, see note for SolrCursorIter
SlowSolrDocs = _SolrPagingIter
def parse_args():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('solr_url',
type=str)
parser.add_argument('--query',
type=str,
default='*:*')
parser.add_argument('--sort',
type=str,
default='id desc')
parser.add_argument('--fields',
type=str,
default='')
parser.add_argument('--batch_size',
type=int,
default=500)
parser.add_argument('--dest',
type=argparse.FileType('w'))
return vars(parser.parse_args())
if __name__ == "__main__":
args = parse_args()
solr_conn = pysolr.Solr(args['solr_url'])
solr_fields = args['fields'].split() if args['fields'] else ''
solr_itr = SolrDocs(solr_conn, args['query'], rows=args['batch_size'], sort=args['sort'], fl=solr_fields)
destFile = args['dest']
import json
numDocs = 0
for doc in solr_itr:
destFile.write(json.dumps(doc) + "\n")
numDocs += 1
if (numDocs % 1000 == 0):
print("Wrote %s docs" % numDocs)
print("Wrote %s docs" % numDocs)
|
[
"pysolr.Solr",
"argparse.ArgumentParser",
"json.dumps",
"argparse.FileType"
] |
[((3660, 3685), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (3683, 3685), False, 'import argparse\n'), ((4392, 4421), 'pysolr.Solr', 'pysolr.Solr', (["args['solr_url']"], {}), "(args['solr_url'])\n", (4403, 4421), False, 'import pysolr\n'), ((4260, 4282), 'argparse.FileType', 'argparse.FileType', (['"""w"""'], {}), "('w')\n", (4277, 4282), False, 'import argparse\n'), ((4709, 4724), 'json.dumps', 'json.dumps', (['doc'], {}), '(doc)\n', (4719, 4724), False, 'import json\n')]
|
import sys
from itertools import combinations
from helpers import as_list_ints
containers = as_list_ints('2015/day17/input.txt')
# containers = as_list_ints('2015/day17/example-input.txt')
total = 150
count = 0
min_containers = sys.maxsize
min_count = 0
for i in range(len(containers)):
for c in combinations(containers, i):
liquid = sum(c)
if liquid == total:
if i <= min_containers:
min_containers = i
min_count += 1
count += 1
print('2015 Day 17 Part 1')
print(count)
print('2015 Day 17 Part 2')
print(min_containers, min_count)
|
[
"itertools.combinations",
"helpers.as_list_ints"
] |
[((94, 130), 'helpers.as_list_ints', 'as_list_ints', (['"""2015/day17/input.txt"""'], {}), "('2015/day17/input.txt')\n", (106, 130), False, 'from helpers import as_list_ints\n'), ((303, 330), 'itertools.combinations', 'combinations', (['containers', 'i'], {}), '(containers, i)\n', (315, 330), False, 'from itertools import combinations\n')]
|
import os, io, csv, json
import requests, argparse
import pandas as pd
import numpy as np
from ast import literal_eval
from datetime import datetime
from panoptes_client import Project, Panoptes
from collections import OrderedDict, Counter
from sklearn.cluster import DBSCAN
import kso_utils.db_utils as db_utils
from kso_utils.zooniverse_utils import auth_session
def bb_iou(boxA, boxB):
# Compute edges
temp_boxA = boxA.copy()
temp_boxB = boxB.copy()
temp_boxA[2], temp_boxA[3] = (
temp_boxA[0] + temp_boxA[2],
temp_boxA[1] + temp_boxA[3],
)
temp_boxB[2], temp_boxB[3] = (
temp_boxB[0] + temp_boxB[2],
temp_boxB[1] + temp_boxB[3],
)
# determine the (x, y)-coordinates of the intersection rectangle
xA = max(temp_boxA[0], temp_boxB[0])
yA = max(temp_boxA[1], temp_boxB[1])
xB = min(temp_boxA[2], temp_boxB[2])
yB = min(temp_boxA[3], temp_boxB[3])
# compute the area of intersection rectangle
interArea = abs(max((xB - xA, 0)) * max((yB - yA), 0))
if interArea == 0:
return 1
# compute the area of both the prediction and ground-truth
# rectangles
boxAArea = abs((temp_boxA[2] - temp_boxA[0]) * (temp_boxA[3] - temp_boxA[1]))
boxBArea = abs((temp_boxB[2] - temp_boxB[0]) * (temp_boxB[3] - temp_boxB[1]))
# compute the intersection over union by taking the intersection
# area and dividing it by the sum of prediction + ground-truth
# areas - the intersection area
iou = interArea / float(boxAArea + boxBArea - interArea)
# return the intersection over union value
return 1 - iou
def filter_bboxes(total_users, users, bboxes, obj, eps, iua):
# If at least half of those who saw this frame decided that there was an object
user_count = pd.Series(users).nunique()
if user_count / total_users >= obj:
# Get clusters of annotation boxes based on iou criterion
cluster_ids = DBSCAN(min_samples=1, metric=bb_iou, eps=eps).fit_predict(bboxes)
# Count the number of users within each cluster
counter_dict = Counter(cluster_ids)
# Accept a cluster assignment if at least 80% of users agree on annotation
passing_ids = [k for k, v in counter_dict.items() if v / user_count >= iua]
indices = np.isin(cluster_ids, passing_ids)
final_boxes = []
for i in passing_ids:
# Compute median over all accepted bounding boxes
boxes = np.median(np.array(bboxes)[np.where(cluster_ids == i)], axis=0)
final_boxes.append(boxes)
return indices, final_boxes
else:
return [], bboxes
def main():
"Handles argument parsing and launches the correct function."
parser = argparse.ArgumentParser()
parser.add_argument(
"--user", "-u", help="Zooniverse username", type=str, required=True
)
parser.add_argument(
"--password", "-p", help="Zooniverse password", type=str, required=True
)
parser.add_argument(
"-db",
"--db_path",
type=str,
help="the absolute path to the database file",
default=r"koster_lab.db",
required=True,
)
parser.add_argument(
"-obj",
"--object_thresh",
type=float,
help="Agreement threshold required among different users",
default=0.8,
)
parser.add_argument(
"-zw",
"--zoo_workflow",
type=float,
help="Number of the Zooniverse workflow of interest",
default=12852,
required=False,
)
parser.add_argument(
"-zwv",
"--zoo_workflow_version",
type=float,
help="Version number of the Zooniverse workflow of interest",
default=21.85,
required=False,
)
parser.add_argument(
"-eps",
"--iou_epsilon",
type=float,
help="threshold of iou for clustering",
default=0.5,
)
parser.add_argument(
"-iua",
"--inter_user_agreement",
type=float,
help="proportion of users agreeing on clustering",
default=0.8,
)
parser.add_argument(
"-nu",
"--n_users",
type=float,
help="Minimum number of different Zooniverse users required per clip",
default=5,
required=False,
)
parser.add_argument(
"-du",
"--duplicates_file_id",
help="Google drive id of list of duplicated subjects",
type=str,
required=False,
)
args = parser.parse_args()
project = auth_session(args.user, args.password)
# Get the export classifications
export = project.get_export("classifications")
# Save the response as pandas data frame
rawdata = pd.read_csv(
io.StringIO(export.content.decode("utf-8")),
usecols=[
"user_name",
"subject_ids",
"subject_data",
"classification_id",
"workflow_id",
"workflow_version",
"created_at",
"annotations",
],
)
# Filter w2 classifications
w2_data = rawdata[
(rawdata.workflow_id == args.zoo_workflow)
& (rawdata.workflow_version >= args.zoo_workflow_version)
].reset_index()
# Clear duplicated subjects
if args.duplicates_file_id:
w2_data = db_utils.combine_duplicates(w2_data, args.duplicates_file_id)
#Drop NaN columns
w2_data = w2_data.drop(['dupl_subject_id', 'single_subject_id'], 1)
## Check if subjects have been uploaded
# Get species id for each species
conn = db_utils.create_connection(args.db_path)
# Get subject table
uploaded_subjects = pd.read_sql_query(
"SELECT id FROM subjects WHERE subject_type='frame'", conn
)
# Add frame subjects to db that have not been uploaded
new_subjects = w2_data[(~w2_data.subject_ids.isin(uploaded_subjects))]
new_subjects["subject_dict"] = new_subjects["subject_data"].apply(lambda x: [v["retired"] for k,v in json.loads(x).items()][0])
new_subjects = new_subjects[~new_subjects.subject_dict.isnull()].drop("subject_dict", 1)
if len(new_subjects) > 0 and args.zoo_workflow_version > 30:
# Get info of subjects uploaded to the project
export = project.get_export("subjects")
# Save the subjects info as pandas data frame
subjects_df = pd.read_csv(
io.StringIO(export.content.decode("utf-8")),
usecols=["subject_id", "subject_set_id", "created_at"],
)
new_subjects = pd.merge(
new_subjects,
subjects_df,
how="left",
left_on="subject_ids",
right_on="subject_id",
)
# Extract the video filename and annotation details
new_subjects[
[
"frame_number",
"frame_exp_sp_id",
"movie_id",
"classifications_count",
"created_at",
"retired_at",
"retirement_reason",
]
] = pd.DataFrame(
new_subjects["subject_data"]
.apply(
lambda x: [
{
"frame_number": v["frame_number"],
"frame_exp_sp_id": v["frame_exp_sp_id"],
"movie_id": v["movie_id"],
"classifications_count": v["retired"]["classifications_count"],
"created_at": v["retired"]["created_at"],
"retired_at": v["retired"]["retired_at"],
"retirement_reason": v["retired"]["retirement_reason"],
}
for k, v in json.loads(x).items()
][0]
)
.tolist()
)
new_subjects["subject_type"] = "frame"
movies_df = pd.read_sql_query("SELECT id, filename FROM movies", conn)
movies_df = movies_df.rename(
columns={"id": "movie_id", "filename": "movie_filename"}
)
new_subjects = pd.merge(new_subjects, movies_df, how="left", on="movie_id")
new_subjects["filename"] = new_subjects.apply(
lambda x: x["movie_filename"] + "_" + str(x["frame_number"]) + ".jpg",
axis=1,
)
# Set irrelevant columns to None
new_subjects["clip_start_time"] = None
new_subjects["clip_end_time"] = None
new_subjects = new_subjects[
[
"subject_ids",
"subject_type",
"filename",
"clip_start_time",
"clip_end_time",
"frame_exp_sp_id",
"frame_number",
"workflow_id",
"subject_set_id",
"classifications_count",
"retired_at",
"retirement_reason",
"created_at",
"movie_id",
]
]
new_subjects = new_subjects.drop_duplicates(subset="subject_ids")
db_utils.test_table(new_subjects, "subjects", keys=["movie_id"])
# Add values to subjects
db_utils.add_to_table(
args.db_path, "subjects", [tuple(i) for i in new_subjects.values], 14
)
# Calculate the number of users that classified each subject
w2_data["n_users"] = w2_data.groupby("subject_ids")["classification_id"].transform(
"nunique"
)
# Select frames with at least n different user classifications
w2_data = w2_data[w2_data.n_users >= args.n_users]
# Drop workflow and n_users columns
w2_data = w2_data.drop(
columns=[
"workflow_id",
"workflow_version",
"n_users",
"created_at",
]
)
# Extract the video filename and annotation details
subject_data_df = pd.DataFrame(
w2_data["subject_data"]
.apply(
lambda x: [
{
"movie_id": v["movie_id"],
"frame_number": v["frame_number"],
"label": v["label"],
}
for k, v in json.loads(x).items() # if v['retired']
][0],
1,
)
.tolist()
)
w2_data = pd.concat(
[w2_data.reset_index().drop("index", 1), subject_data_df],
axis=1,
ignore_index=True,
)
w2_data = w2_data[w2_data.columns[1:]]
pd.set_option('display.max_columns', None)
w2_data.columns = [
"classification_id",
"user_name",
"annotations",
"subject_data",
"subject_ids",
"movie_id",
"frame_number",
"label",
]
movies_df = pd.read_sql_query("SELECT id, filename FROM movies", conn)
movies_df = movies_df.rename(columns={"id": "movie_id"})
w2_data = pd.merge(w2_data, movies_df, how="left", on="movie_id")
# Convert to dictionary entries
w2_data["movie_id"] = w2_data["movie_id"].apply(lambda x: {"movie_id": x})
w2_data["frame_number"] = w2_data["frame_number"].apply(
lambda x: {"frame_number": x}
)
w2_data["label"] = w2_data["label"].apply(lambda x: {"label": x})
w2_data["user_name"] = w2_data["user_name"].apply(lambda x: {"user_name": x})
w2_data["subject_id"] = w2_data["subject_ids"].apply(lambda x: {"subject_id": x})
w2_data["annotation"] = w2_data["annotations"].apply(
lambda x: literal_eval(x)[0]["value"], 1
)
# Extract annotation metadata
w2_data["annotation"] = w2_data[
["movie_id", "frame_number", "label", "annotation", "user_name", "subject_id"]
].apply(
lambda x: [
OrderedDict(
list(x["movie_id"].items())
+ list(x["frame_number"].items())
+ list(x["label"].items())
+ list(x["annotation"][i].items())
+ list(x["user_name"].items())
+ list(x["subject_id"].items())
)
for i in range(len(x["annotation"]))
]
if len(x["annotation"]) > 0
else [
OrderedDict(
list(x["movie_id"].items())
+ list(x["frame_number"].items())
+ list(x["label"].items())
+ list(x["user_name"].items())
+ list(x["subject_id"].items())
)
],
1,
)
# Convert annotation to format which the tracker expects
ds = [
OrderedDict(
{
"user": i["user_name"],
"movie_id": i["movie_id"],
"label": i["label"],
"start_frame": i["frame_number"],
"x": int(i["x"]) if "x" in i else None,
"y": int(i["y"]) if "y" in i else None,
"w": int(i["width"]) if "width" in i else None,
"h": int(i["height"]) if "height" in i else None,
"subject_id": i["subject_id"] if "subject_id" in i else None,
}
)
for i in w2_data.annotation.explode()
if i is not None and i is not np.nan
]
# Get prepared annotations
w2_full = pd.DataFrame(ds)
w2_annotations = w2_full[w2_full["x"].notnull()]
new_rows = []
final_indices = []
for name, group in w2_annotations.groupby(["movie_id", "label", "start_frame"]):
movie_id, label, start_frame = name
total_users = w2_full[
(w2_full.movie_id == movie_id)
& (w2_full.label == label)
& (w2_full.start_frame == start_frame)
]["user"].nunique()
# Filter bboxes using IOU metric (essentially a consensus metric)
# Keep only bboxes where mean overlap exceeds this threshold
indices, new_group = filter_bboxes(
total_users=total_users,
users=[i[0] for i in group.values],
bboxes=[np.array((i[4], i[5], i[6], i[7])) for i in group.values],
obj=args.object_thresh,
eps=args.iou_epsilon,
iua=args.inter_user_agreement,
)
subject_ids = [i[8] for i in group.values[indices]]
for ix, box in zip(subject_ids, new_group):
new_rows.append(
(
movie_id,
label,
start_frame,
ix,
)
+ tuple(box)
)
w2_annotations = pd.DataFrame(
new_rows,
columns=[
"movie_id",
"label",
"start_frame",
"subject_id",
"x",
"y",
"w",
"h",
],
)
# Get species id for each species
conn = db_utils.create_connection(args.db_path)
# Get subject table
subjects_df = pd.read_sql_query("SELECT id, frame_exp_sp_id FROM subjects", conn)
subjects_df = subjects_df.rename(
columns={"id": "subject_id", "frame_exp_sp_id": "species_id"}
)
w2_annotations = pd.merge(
w2_annotations,
subjects_df,
how="left",
left_on="subject_id",
right_on="subject_id",
validate="many_to_one",
)
# Filter out invalid movies
w2_annotations = w2_annotations[w2_annotations["movie_id"].notnull()][
["species_id", "x", "y", "w", "h", "subject_id"]
]
# Add values to agg_annotations_frame
db_utils.add_to_table(
args.db_path,
"agg_annotations_frame",
[(None,) + tuple(i) for i in w2_annotations.values],
7,
)
print(f"Frame Aggregation Complete: {len(w2_annotations)} annotations added")
if __name__ == "__main__":
main()
|
[
"pandas.DataFrame",
"numpy.isin",
"kso_utils.db_utils.combine_duplicates",
"argparse.ArgumentParser",
"kso_utils.zooniverse_utils.auth_session",
"json.loads",
"pandas.merge",
"kso_utils.db_utils.create_connection",
"numpy.where",
"numpy.array",
"pandas.Series",
"pandas.read_sql_query",
"ast.literal_eval",
"collections.Counter",
"kso_utils.db_utils.test_table",
"pandas.set_option",
"sklearn.cluster.DBSCAN"
] |
[((2739, 2764), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2762, 2764), False, 'import requests, argparse\n'), ((4562, 4600), 'kso_utils.zooniverse_utils.auth_session', 'auth_session', (['args.user', 'args.password'], {}), '(args.user, args.password)\n', (4574, 4600), False, 'from kso_utils.zooniverse_utils import auth_session\n'), ((5603, 5643), 'kso_utils.db_utils.create_connection', 'db_utils.create_connection', (['args.db_path'], {}), '(args.db_path)\n', (5629, 5643), True, 'import kso_utils.db_utils as db_utils\n'), ((5693, 5770), 'pandas.read_sql_query', 'pd.read_sql_query', (['"""SELECT id FROM subjects WHERE subject_type=\'frame\'"""', 'conn'], {}), '("SELECT id FROM subjects WHERE subject_type=\'frame\'", conn)\n', (5710, 5770), True, 'import pandas as pd\n'), ((10475, 10517), 'pandas.set_option', 'pd.set_option', (['"""display.max_columns"""', 'None'], {}), "('display.max_columns', None)\n", (10488, 10517), True, 'import pandas as pd\n'), ((10747, 10805), 'pandas.read_sql_query', 'pd.read_sql_query', (['"""SELECT id, filename FROM movies"""', 'conn'], {}), "('SELECT id, filename FROM movies', conn)\n", (10764, 10805), True, 'import pandas as pd\n'), ((10882, 10937), 'pandas.merge', 'pd.merge', (['w2_data', 'movies_df'], {'how': '"""left"""', 'on': '"""movie_id"""'}), "(w2_data, movies_df, how='left', on='movie_id')\n", (10890, 10937), True, 'import pandas as pd\n'), ((13198, 13214), 'pandas.DataFrame', 'pd.DataFrame', (['ds'], {}), '(ds)\n', (13210, 13214), True, 'import pandas as pd\n'), ((14465, 14571), 'pandas.DataFrame', 'pd.DataFrame', (['new_rows'], {'columns': "['movie_id', 'label', 'start_frame', 'subject_id', 'x', 'y', 'w', 'h']"}), "(new_rows, columns=['movie_id', 'label', 'start_frame',\n 'subject_id', 'x', 'y', 'w', 'h'])\n", (14477, 14571), True, 'import pandas as pd\n'), ((14748, 14788), 'kso_utils.db_utils.create_connection', 'db_utils.create_connection', (['args.db_path'], {}), '(args.db_path)\n', (14774, 14788), True, 'import kso_utils.db_utils as db_utils\n'), ((14832, 14899), 'pandas.read_sql_query', 'pd.read_sql_query', (['"""SELECT id, frame_exp_sp_id FROM subjects"""', 'conn'], {}), "('SELECT id, frame_exp_sp_id FROM subjects', conn)\n", (14849, 14899), True, 'import pandas as pd\n'), ((15037, 15159), 'pandas.merge', 'pd.merge', (['w2_annotations', 'subjects_df'], {'how': '"""left"""', 'left_on': '"""subject_id"""', 'right_on': '"""subject_id"""', 'validate': '"""many_to_one"""'}), "(w2_annotations, subjects_df, how='left', left_on='subject_id',\n right_on='subject_id', validate='many_to_one')\n", (15045, 15159), True, 'import pandas as pd\n'), ((2091, 2111), 'collections.Counter', 'Counter', (['cluster_ids'], {}), '(cluster_ids)\n', (2098, 2111), False, 'from collections import OrderedDict, Counter\n'), ((2298, 2331), 'numpy.isin', 'np.isin', (['cluster_ids', 'passing_ids'], {}), '(cluster_ids, passing_ids)\n', (2305, 2331), True, 'import numpy as np\n'), ((5352, 5413), 'kso_utils.db_utils.combine_duplicates', 'db_utils.combine_duplicates', (['w2_data', 'args.duplicates_file_id'], {}), '(w2_data, args.duplicates_file_id)\n', (5379, 5413), True, 'import kso_utils.db_utils as db_utils\n'), ((6564, 6661), 'pandas.merge', 'pd.merge', (['new_subjects', 'subjects_df'], {'how': '"""left"""', 'left_on': '"""subject_ids"""', 'right_on': '"""subject_id"""'}), "(new_subjects, subjects_df, how='left', left_on='subject_ids',\n right_on='subject_id')\n", (6572, 6661), True, 'import pandas as pd\n'), ((7896, 7954), 'pandas.read_sql_query', 'pd.read_sql_query', (['"""SELECT id, filename FROM movies"""', 'conn'], {}), "('SELECT id, filename FROM movies', conn)\n", (7913, 7954), True, 'import pandas as pd\n'), ((8095, 8155), 'pandas.merge', 'pd.merge', (['new_subjects', 'movies_df'], {'how': '"""left"""', 'on': '"""movie_id"""'}), "(new_subjects, movies_df, how='left', on='movie_id')\n", (8103, 8155), True, 'import pandas as pd\n'), ((9072, 9136), 'kso_utils.db_utils.test_table', 'db_utils.test_table', (['new_subjects', '"""subjects"""'], {'keys': "['movie_id']"}), "(new_subjects, 'subjects', keys=['movie_id'])\n", (9091, 9136), True, 'import kso_utils.db_utils as db_utils\n'), ((1791, 1807), 'pandas.Series', 'pd.Series', (['users'], {}), '(users)\n', (1800, 1807), True, 'import pandas as pd\n'), ((1946, 1991), 'sklearn.cluster.DBSCAN', 'DBSCAN', ([], {'min_samples': '(1)', 'metric': 'bb_iou', 'eps': 'eps'}), '(min_samples=1, metric=bb_iou, eps=eps)\n', (1952, 1991), False, 'from sklearn.cluster import DBSCAN\n'), ((2480, 2496), 'numpy.array', 'np.array', (['bboxes'], {}), '(bboxes)\n', (2488, 2496), True, 'import numpy as np\n'), ((2497, 2523), 'numpy.where', 'np.where', (['(cluster_ids == i)'], {}), '(cluster_ids == i)\n', (2505, 2523), True, 'import numpy as np\n'), ((11473, 11488), 'ast.literal_eval', 'literal_eval', (['x'], {}), '(x)\n', (11485, 11488), False, 'from ast import literal_eval\n'), ((13925, 13959), 'numpy.array', 'np.array', (['(i[4], i[5], i[6], i[7])'], {}), '((i[4], i[5], i[6], i[7]))\n', (13933, 13959), True, 'import numpy as np\n'), ((6025, 6038), 'json.loads', 'json.loads', (['x'], {}), '(x)\n', (6035, 6038), False, 'import os, io, csv, json\n'), ((10178, 10191), 'json.loads', 'json.loads', (['x'], {}), '(x)\n', (10188, 10191), False, 'import os, io, csv, json\n'), ((7739, 7752), 'json.loads', 'json.loads', (['x'], {}), '(x)\n', (7749, 7752), False, 'import os, io, csv, json\n')]
|
"""
Script entry point
"""
from src.sandbox.network import Network
from src.sandbox.dense import Dense
import src.sandbox.linalg as linalg
import numpy as np
import time
def main():
n = 6000
v = [x for x in range(n)]
m = [[x for x in range(n)] for _ in range(n)]
time_start = time.time()
for _ in range(3):
linalg.mdotv(m, v)
print(time.time() - time_start)
def main2():
n = 8000
v = np.asarray([x for x in range(n)])
m = np.asarray([[x for x in range(n)] for _ in range(n)])
time_start = time.time()
z = None
for _ in range(3):
z = m.dot(v)
print(z.sum())
print(time.time() - time_start)
if __name__ == "__main__":
# main()
main2()
|
[
"src.sandbox.linalg.mdotv",
"time.time"
] |
[((298, 309), 'time.time', 'time.time', ([], {}), '()\n', (307, 309), False, 'import time\n'), ((548, 559), 'time.time', 'time.time', ([], {}), '()\n', (557, 559), False, 'import time\n'), ((342, 360), 'src.sandbox.linalg.mdotv', 'linalg.mdotv', (['m', 'v'], {}), '(m, v)\n', (354, 360), True, 'import src.sandbox.linalg as linalg\n'), ((372, 383), 'time.time', 'time.time', ([], {}), '()\n', (381, 383), False, 'import time\n'), ((653, 664), 'time.time', 'time.time', ([], {}), '()\n', (662, 664), False, 'import time\n')]
|
'''
pip install flask gevent requests pillow
https://github.com/jrosebr1/simple-keras-rest-api
https://gist.github.com/kylehounslow/767fb72fde2ebdd010a0bf4242371594
'''
''' Usage
python ..\scripts\classifier.py --socket=5000 --weights=weights\obj_last.weights
curl -X POST -F [email protected] http://localhost:5000/training/begin?plan=testplan
'''
import threading
import time
import csv
import datetime
import flask
import traceback
import sys
import os
import cv2 as cv
import argparse
import lightnet
import darknet
import socket
import requests
import get_ar_plan
import logging
logger = logging.getLogger(__name__)
app = flask.Flask(__name__)
from os.path import join
args = None
nets = []
metas = []
args_groups = []
csv_file = None
csv_writer = None
cap = None
gpu_lock = threading.Lock()
host_ip = 'localhost'
#
server_state_idle = 0
server_state_training = 1
server_state = None
server_training_status = {
'plan_name': '',
'percentage': 0,
}
server_training_status_internal = {
'folders': [],
}
def get_Host_name_IP():
try:
global host_ip
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("baidu.com", 80))
host_ip, _ = s.getsockname()
print("http://%s:5000" % host_ip)
except:
print("Unable to get Hostname and IP")
@app.route("/", methods=["GET"])
def index_get():
data = vars(args)
data['usage'] = "curl -X POST -F [email protected] http://%s:5000/predict" % (
host_ip)
return flask.jsonify(data)
def go_idle():
global server_state, server_training_status, server_training_status_internal
server_state = server_state_idle
server_training_status['plan_name'] = ''
server_training_status['percentage'] = 0
server_training_status_internal['folders'] = []
@app.route("/training/status", methods=["GET"])
def training_status():
return flask.jsonify(server_training_status)
def training_thread_function(training_folders):
global server_state, server_training_status, server_training_status_internal
server_training_status_internal['folders'] = training_folders
import subprocess
idx = 1 # start from 1
for folder in training_folders:
bat_file = join(folder, 'train.bat')
logging.info("%s: starting", bat_file)
p = subprocess.Popen(bat_file, shell=True, stdout = subprocess.PIPE)
stdout, stderr = p.communicate()
print(p.returncode) # is 0 if success
logging.info("%s: finishing", bat_file)
server_training_status['percentage'] = idx * 100 / len(training_folders)
idx += 1
go_idle()
@app.route("/training/begin", methods=["GET"])
def training_begin():
global server_state, server_training_status
if server_state != server_state_idle:
result = {
'errCode': 'Busy', # 'OK/Busy/Error'
'errMsg': 'Server is busy training %s' % server_training_status['plan_name']
}
return flask.jsonify(result)
try:
server_state = server_state_training
plan = flask.request.args.get("plan")
print(plan)
server_training_status['plan_name'] = plan
server_training_status['percentage'] = 0
url = 'http://localhost:8800/api/Training/plan?plan=%s' % plan
response = requests.get(url)
plan_json = response.json()
# return flask.jsonify(result)
training_folders = get_ar_plan.prepare_training_folders(plan_json)
x = threading.Thread(target=training_thread_function, args=(training_folders,))
x.start()
result = {
'errCode': 'OK', # 'OK/Busy/Error'
'errMsg': ''
}
except:
error_callstack = traceback.format_exc()
print(error_callstack)
result = {
'errCode': 'Error', # or 'Error'
'errMsg': error_callstack
}
go_idle()
return flask.jsonify(result)
def main():
# lightnet.set_cwd(dir)
global nets, metas, args, cap, args_groups
global server_state
server_state = server_state_idle
def add_bool_arg(parser, name, default=False):
group = parser.add_mutually_exclusive_group(required=False)
group.add_argument('--' + name, dest=name, action='store_true')
group.add_argument('--no-' + name, dest=name, action='store_false')
parser.set_defaults(**{name: default})
parser = argparse.ArgumentParser()
parser.add_argument('--group', default='default')
parser.add_argument('--cfg', default='obj.cfg')
parser.add_argument('--weights', default='weights/obj_last.weights')
parser.add_argument('--names', default='obj.names')
parser.add_argument('--socket', type=int, default=5000)
parser.add_argument('--top_k', type=int, default=5)
parser.add_argument('--gold_confidence', type=float, default=0.95)
parser.add_argument('--threshold', type=float, default=0.5)
add_bool_arg(parser, 'debug')
args = parser.parse_args()
logging.basicConfig(level=logging.INFO)
# flask routine
print('=========================================')
get_Host_name_IP()
print('=========================================')
app.run(host='0.0.0.0', port=args.socket, threaded=True)
if __name__ == "__main__":
main()
|
[
"threading.Thread",
"subprocess.Popen",
"argparse.ArgumentParser",
"logging.basicConfig",
"flask.request.args.get",
"get_ar_plan.prepare_training_folders",
"flask.Flask",
"socket.socket",
"threading.Lock",
"logging.info",
"flask.jsonify",
"traceback.format_exc",
"requests.get",
"os.path.join",
"logging.getLogger"
] |
[((627, 654), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (644, 654), False, 'import logging\n'), ((662, 683), 'flask.Flask', 'flask.Flask', (['__name__'], {}), '(__name__)\n', (673, 683), False, 'import flask\n'), ((828, 844), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (842, 844), False, 'import threading\n'), ((1577, 1596), 'flask.jsonify', 'flask.jsonify', (['data'], {}), '(data)\n', (1590, 1596), False, 'import flask\n'), ((1971, 2008), 'flask.jsonify', 'flask.jsonify', (['server_training_status'], {}), '(server_training_status)\n', (1984, 2008), False, 'import flask\n'), ((4058, 4079), 'flask.jsonify', 'flask.jsonify', (['result'], {}), '(result)\n', (4071, 4079), False, 'import flask\n'), ((4572, 4597), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4595, 4597), False, 'import argparse\n'), ((5168, 5207), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (5187, 5207), False, 'import logging\n'), ((1159, 1208), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (1172, 1208), False, 'import socket\n'), ((2319, 2344), 'os.path.join', 'join', (['folder', '"""train.bat"""'], {}), "(folder, 'train.bat')\n", (2323, 2344), False, 'from os.path import join\n'), ((2354, 2392), 'logging.info', 'logging.info', (['"""%s: starting"""', 'bat_file'], {}), "('%s: starting', bat_file)\n", (2366, 2392), False, 'import logging\n'), ((2406, 2468), 'subprocess.Popen', 'subprocess.Popen', (['bat_file'], {'shell': '(True)', 'stdout': 'subprocess.PIPE'}), '(bat_file, shell=True, stdout=subprocess.PIPE)\n', (2422, 2468), False, 'import subprocess\n'), ((2573, 2612), 'logging.info', 'logging.info', (['"""%s: finishing"""', 'bat_file'], {}), "('%s: finishing', bat_file)\n", (2585, 2612), False, 'import logging\n'), ((3082, 3103), 'flask.jsonify', 'flask.jsonify', (['result'], {}), '(result)\n', (3095, 3103), False, 'import flask\n'), ((3180, 3210), 'flask.request.args.get', 'flask.request.args.get', (['"""plan"""'], {}), "('plan')\n", (3202, 3210), False, 'import flask\n'), ((3428, 3445), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (3440, 3445), False, 'import requests\n'), ((3551, 3598), 'get_ar_plan.prepare_training_folders', 'get_ar_plan.prepare_training_folders', (['plan_json'], {}), '(plan_json)\n', (3587, 3598), False, 'import get_ar_plan\n'), ((3614, 3689), 'threading.Thread', 'threading.Thread', ([], {'target': 'training_thread_function', 'args': '(training_folders,)'}), '(target=training_thread_function, args=(training_folders,))\n', (3630, 3689), False, 'import threading\n'), ((3856, 3878), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (3876, 3878), False, 'import traceback\n')]
|
import tkinter as tk
win = tk.Tk()
current_index = tk.StringVar()
text = tk.Text(win, bg="white", fg="black")
lab = tk.Label(win, textvar=current_index)
def update_index(event=None):
cursor_position = text.index(tk.INSERT)
cursor_position_pieces = str(cursor_position).split('.')
cursor_line = cursor_position_pieces[0]
cursor_char = cursor_position_pieces[1]
current_index.set('line: ' + cursor_line + ' char: ' + cursor_char + ' index: ' + str(cursor_position))
text.pack(side=tk.TOP, fill=tk.BOTH, expand=1)
lab.pack(side=tk.BOTTOM, fill=tk.X, expand=1)
text.bind('<KeyRelease>', update_index)
win.mainloop()
|
[
"tkinter.StringVar",
"tkinter.Text",
"tkinter.Label",
"tkinter.Tk"
] |
[((28, 35), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (33, 35), True, 'import tkinter as tk\n'), ((52, 66), 'tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (64, 66), True, 'import tkinter as tk\n'), ((74, 110), 'tkinter.Text', 'tk.Text', (['win'], {'bg': '"""white"""', 'fg': '"""black"""'}), "(win, bg='white', fg='black')\n", (81, 110), True, 'import tkinter as tk\n'), ((117, 153), 'tkinter.Label', 'tk.Label', (['win'], {'textvar': 'current_index'}), '(win, textvar=current_index)\n', (125, 153), True, 'import tkinter as tk\n')]
|
#Pluginname="Quizkampen (Android)"
#Filename="quizkampen"
#Type=App
import struct
import xml.etree.ElementTree
import tempfile
def convertdata(db):
#ctx.gui_clearData()
ctx.gui_setMainLabel("Quizkampen: Extracting userid");
tmpdir = tempfile.mkdtemp()
outuid = os.path.join(tmpdir, "userid")
uid=""
filenames=["/data/se.feomedia.quizkampen.pl.lite/shared_prefs/PREF_SETTINGS_NAME.xml","/se.feomedia.quizkampen.pl.lite/shared_prefs/PREF_SETTINGS_NAME.xml"]
for f in filenames:
if ctx.fs_file_extract(f,outuid):
ctx.gui_add_report_relevant_file(f)
e = xml.etree.ElementTree.parse(outuid).getroot()
for atype in e.findall("long"):
if atype.get("name")=="current_user":
uid=atype.get("value")
print("Userid: "+uid+"\n")
os.remove(outuid)
break;
ctx.gui_setMainLabel("Quizkampen: Extracting users");
waconn=ctx.sqlite_run_cmd(db,"SELECT DISTINCT id, name from qk_users;")
if (waconn==-1):
print ("Error: "+ctx.sqlite_last_error(db))
return
contacts={}
if waconn!=-1:
rows=ctx.sqlite_get_data_size(waconn)[0]
for i in range(0,rows):
id=str(ctx.sqlite_get_data(waconn,i,0))
name=str(ctx.sqlite_get_data(waconn,i,1))
if (id not in contacts):
if name != None:
contacts[id]=name
else:
contacts[id]=""
#print(contacts)
ctx.gui_setMainLabel("Quizkampen: Extracting messages");
ctx.sqlite_cmd_close(waconn)
conn=ctx.sqlite_run_cmd(db,"select rowid, to_id, from_id, text, datetime, is_message_read, is_deleted from qk_messages;")
rows=ctx.sqlite_get_data_size(conn)[0]
oldpos=0
r=0
for i in range(0,rows):
newpos=int(i/rows*100)
if (oldpos<newpos):
oldpos=newpos
ctx.gui_setMainProgressBar(oldpos)
rowid=ctx.sqlite_get_data(conn,i,0)
to_id=str(ctx.sqlite_get_data(conn,i,1))
to_id_alias=""
if to_id in contacts:
to_id_alias=contacts[to_id]
from_id=str(ctx.sqlite_get_data(conn,i,2))
from_id_alias=""
if from_id in contacts:
from_id_alias=contacts[from_id]
text=ctx.sqlite_get_data(conn,i,3)
timestamp=ctx.sqlite_get_data(conn,i,4)
timestamp=str(timestamp[:-3])
is_message_read=ctx.sqlite_get_data(conn,i,5)
is_deleted=ctx.sqlite_get_data(conn,i,6)
ctx.gui_set_data(r,0,rowid)
ctx.gui_set_data(r,1,to_id)
ctx.gui_set_data(r,2,to_id_alias)
ctx.gui_set_data(r,3,from_id)
ctx.gui_set_data(r,4,from_id_alias)
ctx.gui_set_data(r,5,text)
print(timestamp)
ctx.gui_set_data(r,6,timestamp)
ctx.gui_set_data(r,7,is_message_read)
ctx.gui_set_data(r,8,is_deleted)
if (uid==from_id):
from_me="yes"
else:
from_me="no"
if (uid==""):
from_me="unknown"
ctx.gui_set_data(r,9,from_me)
r+=1
ctx.sqlite_cmd_close(conn)
def main():
headers=["rowid (int)","to_id (QString)", "to_id_alias (QString)", "from_id (QString)", "from_id_alias (QString)", "text (QString)","timestamp (int)","is_message_read (QString)","is_deleted (QString)", "is_from_me (QString)"]
ctx.gui_set_headers(headers)
ctx.gui_setMainLabel("Quizkampen: Parsing Strings");
ctx.gui_setMainProgressBar(0)
db=ctx.sqlite_open("gui",True)
convertdata(db)
ctx.gui_update()
ctx.gui_setMainLabel("Status: Idle.")
ctx.gui_setMainProgressBar(0)
ctx.sqlite_close(db)
return "Finished running plugin."
|
[
"tempfile.mkdtemp"
] |
[((258, 276), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (274, 276), False, 'import tempfile\n')]
|
#!/opt/bb/bin/python3.7
"""This module defines a program that generates the 'baljsn_encoder_testtypes'
component and replace all uses of 'bdes' with 'bsls' within its files.
"""
from asyncio import create_subprocess_exec as aio_create_subprocess_exec
from asyncio import run as aio_run
from asyncio import subprocess as aio_subprocess
from mmap import mmap as mm_mmap
from re import compile as re_compile
from re import finditer as re_finditer
from sys import exit as sys_exit
from sys import version_info as sys_version_info
from typing import AsyncGenerator as ty_AsyncGenerator
from typing import TypeVar as ty_TypeVar
from typing import Union as ty_Union
from typing import cast as ty_cast
T = ty_TypeVar('T')
def not_none_cast(x: ty_Union[T, None]) -> T:
"""Return the specified `x` cast to the specified `T` type.
Args:
x (typing.Union[T, None]): the value to return
Returns:
T: the specified `x` cast to `T`
"""
return ty_cast(T, x)
async def generate_components() -> ty_AsyncGenerator[str, None]:
"""Generate the `baljsn_encoder_testtypes` components.
Spawn a subprocess that generates the C++ code for the
`baljsn_encoder_testtypes.xsd` schema. Return an async generator `G` that
yields each line of output from the subprocess as it is received and that
returns upon termination of the process. Note that this function does not
have a dependency on the shell of the user, but does depend on the user's
executable search path, since it directly executes `bas_codegen.pl`.
Returns:
typing.AsyncGenerator[str, None]: `G`
"""
process = await aio_create_subprocess_exec(
'bas_codegen.pl',
'--mode',
'msg',
'--noAggregateConversion',
'--noExternalization',
'--msgComponent=encoder_testtypes',
'--package=baljsn',
'baljsn_encoder_testtypes.xsd',
stdout=aio_subprocess.PIPE,
stderr=aio_subprocess.STDOUT)
stdout = not_none_cast(process.stdout)
while not stdout.at_eof():
line: bytes = await stdout.readline()
if not stdout.at_eof or len(line) != 0:
yield line.decode()
await process.communicate()
def rewrite_bdes_ident_to_bsls(file: str) -> None:
"""Replace all occurrences of "bdes_ident" with "bsls_ident" in the
specified `file`.
Args:
file (str): an absolute or relative path to a file
Returns:
None
"""
with open(file, "r+b") as f, mm_mmap(f.fileno(), 0) as filemap:
regex = b'(?P<LOWER_CASE>bdes_ident)|(?P<UPPER_CASE>BDES_IDENT)'
compiled_regex = re_compile(regex)
# mmap objects satisfy the bytearray interface
filemap_bytearray = ty_cast(bytearray, filemap)
for match in re_finditer(compiled_regex, filemap_bytearray):
group = match.lastgroup
if group == 'LOWER_CASE':
filemap[match.start():match.end()] = b'bsls_ident'
else:
assert group == 'UPPER_CASE'
filemap[match.start():match.end()] = b'BSLS_IDENT'
filemap.flush()
async def main() -> None:
"""Asynchronously generate the 'baljsn_encdoer_testypes' components and
replace all occurrences of "bdes_ident" with "bsls_ident" within them.
Return:
None
"""
print("Generating files with bas_codegen.pl")
lines = generate_components()
async for line in lines:
print(line.strip('\n'))
print("Replacing 'bdes_ident' with 'bsls_ident' in " +
"baljsn_encoder_testtypes.h")
rewrite_bdes_ident_to_bsls('./baljsn_encoder_testtypes.h')
print("Replacing 'bdes_ident' with 'bsls_ident' in " +
"baljsn_encoder_testtypes.cpp")
rewrite_bdes_ident_to_bsls('./baljsn_encoder_testtypes.cpp')
if __name__ == '__main__':
if not sys_version_info.major == 3 and sys_version_info.minor >= 6:
print("This program requires Python 3.6 or higher")
sys_exit(1)
aio_run(main())
# ----------------------------------------------------------------------------
# Copyright 2020 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------- END-OF-FILE ----------------------------------
|
[
"asyncio.create_subprocess_exec",
"typing.cast",
"re.finditer",
"typing.TypeVar",
"sys.exit",
"re.compile"
] |
[((704, 719), 'typing.TypeVar', 'ty_TypeVar', (['"""T"""'], {}), "('T')\n", (714, 719), True, 'from typing import TypeVar as ty_TypeVar\n'), ((971, 984), 'typing.cast', 'ty_cast', (['T', 'x'], {}), '(T, x)\n', (978, 984), True, 'from typing import cast as ty_cast\n'), ((1645, 1919), 'asyncio.create_subprocess_exec', 'aio_create_subprocess_exec', (['"""bas_codegen.pl"""', '"""--mode"""', '"""msg"""', '"""--noAggregateConversion"""', '"""--noExternalization"""', '"""--msgComponent=encoder_testtypes"""', '"""--package=baljsn"""', '"""baljsn_encoder_testtypes.xsd"""'], {'stdout': 'aio_subprocess.PIPE', 'stderr': 'aio_subprocess.STDOUT'}), "('bas_codegen.pl', '--mode', 'msg',\n '--noAggregateConversion', '--noExternalization',\n '--msgComponent=encoder_testtypes', '--package=baljsn',\n 'baljsn_encoder_testtypes.xsd', stdout=aio_subprocess.PIPE, stderr=\n aio_subprocess.STDOUT)\n", (1671, 1919), True, 'from asyncio import create_subprocess_exec as aio_create_subprocess_exec\n'), ((2673, 2690), 're.compile', 're_compile', (['regex'], {}), '(regex)\n', (2683, 2690), True, 'from re import compile as re_compile\n'), ((2774, 2801), 'typing.cast', 'ty_cast', (['bytearray', 'filemap'], {}), '(bytearray, filemap)\n', (2781, 2801), True, 'from typing import cast as ty_cast\n'), ((2823, 2869), 're.finditer', 're_finditer', (['compiled_regex', 'filemap_bytearray'], {}), '(compiled_regex, filemap_bytearray)\n', (2834, 2869), True, 'from re import finditer as re_finditer\n'), ((4023, 4034), 'sys.exit', 'sys_exit', (['(1)'], {}), '(1)\n', (4031, 4034), True, 'from sys import exit as sys_exit\n')]
|
from django.db.models.query import Q
from django.utils import timezone
from rest_framework import serializers
from ..accounts.serializers import UserSerializer
from .models import Amenity, Booking
class AmenityRelatedField(serializers.RelatedField):
def to_native(self, value):
return {
'id': value.id,
'name': value.name,
}
class BookingSerializer(serializers.ModelSerializer):
resident = UserSerializer(read_only=True)
amenity_detail = AmenityRelatedField('amenity', read_only=True)
is_editable = serializers.SerializerMethodField('is_obj_editable')
is_removable = serializers.SerializerMethodField('is_obj_removable')
class Meta:
model = Booking
fields = ('id',
'resident',
'amenity',
'amenity_detail',
'reserved_from',
'reserved_to',
'is_editable',
'is_removable')
def is_obj_editable(self, obj):
return obj.has_permission(self.context['request'].user,
'amenities.change_booking')
def is_obj_removable(self, obj):
return obj.has_permission(self.context['request'].user,
'amenities.delete_booking')
def validate_amenity(self, attrs, source):
value = attrs[source]
if not value.is_available:
raise serializers.ValidationError("Amenity not available")
if not value in self.context['request'].building.amenity_set.all():
raise serializers.ValidationError("Amenity not found")
return attrs
def validate_reserved_from(self, attrs, source):
value = attrs[source]
if value < timezone.now():
raise serializers.ValidationError("'From' date must be in future")
return attrs
def validate(self, attrs):
if attrs['reserved_from'] > attrs['reserved_to']:
raise serializers.ValidationError(
"The 'from' date is after the 'to' date")
bookings = attrs['amenity'].booking_set.all()
date_range = (attrs['reserved_from'], attrs['reserved_to'])
qs = bookings.filter(
Q(reserved_from__range=date_range) |
Q(reserved_to__range=date_range))
booking_id = self.init_data.get('id')
if booking_id:
qs = qs.exclude(pk=booking_id)
if qs.exists():
raise serializers.ValidationError("Booking conflict")
return attrs
class AmenitySerializer(serializers.ModelSerializer):
class Meta:
model = Amenity
fields = ('id', 'name', 'is_available', )
|
[
"django.utils.timezone.now",
"django.db.models.query.Q",
"rest_framework.serializers.SerializerMethodField",
"rest_framework.serializers.ValidationError"
] |
[((564, 616), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', (['"""is_obj_editable"""'], {}), "('is_obj_editable')\n", (597, 616), False, 'from rest_framework import serializers\n'), ((636, 689), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', (['"""is_obj_removable"""'], {}), "('is_obj_removable')\n", (669, 689), False, 'from rest_framework import serializers\n'), ((1443, 1495), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""Amenity not available"""'], {}), "('Amenity not available')\n", (1470, 1495), False, 'from rest_framework import serializers\n'), ((1590, 1638), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""Amenity not found"""'], {}), "('Amenity not found')\n", (1617, 1638), False, 'from rest_framework import serializers\n'), ((1763, 1777), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1775, 1777), False, 'from django.utils import timezone\n'), ((1797, 1857), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""\'From\' date must be in future"""'], {}), '("\'From\' date must be in future")\n', (1824, 1857), False, 'from rest_framework import serializers\n'), ((1987, 2056), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""The \'from\' date is after the \'to\' date"""'], {}), '("The \'from\' date is after the \'to\' date")\n', (2014, 2056), False, 'from rest_framework import serializers\n'), ((2476, 2523), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""Booking conflict"""'], {}), "('Booking conflict')\n", (2503, 2523), False, 'from rest_framework import serializers\n'), ((2238, 2272), 'django.db.models.query.Q', 'Q', ([], {'reserved_from__range': 'date_range'}), '(reserved_from__range=date_range)\n', (2239, 2272), False, 'from django.db.models.query import Q\n'), ((2287, 2319), 'django.db.models.query.Q', 'Q', ([], {'reserved_to__range': 'date_range'}), '(reserved_to__range=date_range)\n', (2288, 2319), False, 'from django.db.models.query import Q\n')]
|
# Copyright 2020 TestProject (https://testproject.io)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
# Notice we import WebDriverWait from SDK classes!
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
from src.testproject.classes import WebDriverWait
from src.testproject.sdk.drivers import webdriver
from selenium.webdriver.support import expected_conditions as ec
from tests.pageobjects.web import LoginPage, ProfilePage
@pytest.fixture
def driver():
driver = webdriver.Chrome()
yield driver
driver.quit()
@pytest.fixture()
def wait(driver):
wait = WebDriverWait(driver, 2) # Notice the imports, using WebDriverWait from 'src.testproject.classes'
yield wait
def test_wait_with_ec_invisible(driver, wait):
# Driver command will fail because element will not be found but this is the expected result so this step actually
# passes and will reported as passed as well.
LoginPage(driver).open().login_as("<NAME>", "12345")
# Check successful login.
assert ProfilePage(driver).greetings_are_displayed() is True
ProfilePage(driver).logout()
# Greeting label shouldn't be shown anymore after logout.
textlabel_greetings = (By.CSS_SELECTOR, "#greetings")
element_not_present = wait.until(ec.invisibility_of_element_located(textlabel_greetings))
assert element_not_present
# This step will fail because the example page's title is not the one we give below, step will be reported as failed
# and a TimeoutException will arose by the WebDriverWait instance.
try:
wait.until(ec.title_is("Title that is definitely not this one."))
except TimeoutException:
pass
|
[
"tests.pageobjects.web.ProfilePage",
"pytest.fixture",
"selenium.webdriver.support.expected_conditions.invisibility_of_element_located",
"selenium.webdriver.support.expected_conditions.title_is",
"src.testproject.classes.WebDriverWait",
"src.testproject.sdk.drivers.webdriver.Chrome",
"tests.pageobjects.web.LoginPage"
] |
[((1092, 1108), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1106, 1108), False, 'import pytest\n'), ((1035, 1053), 'src.testproject.sdk.drivers.webdriver.Chrome', 'webdriver.Chrome', ([], {}), '()\n', (1051, 1053), False, 'from src.testproject.sdk.drivers import webdriver\n'), ((1138, 1162), 'src.testproject.classes.WebDriverWait', 'WebDriverWait', (['driver', '(2)'], {}), '(driver, 2)\n', (1151, 1162), False, 'from src.testproject.classes import WebDriverWait\n'), ((1812, 1867), 'selenium.webdriver.support.expected_conditions.invisibility_of_element_located', 'ec.invisibility_of_element_located', (['textlabel_greetings'], {}), '(textlabel_greetings)\n', (1846, 1867), True, 'from selenium.webdriver.support import expected_conditions as ec\n'), ((1626, 1645), 'tests.pageobjects.web.ProfilePage', 'ProfilePage', (['driver'], {}), '(driver)\n', (1637, 1645), False, 'from tests.pageobjects.web import LoginPage, ProfilePage\n'), ((2120, 2173), 'selenium.webdriver.support.expected_conditions.title_is', 'ec.title_is', (['"""Title that is definitely not this one."""'], {}), "('Title that is definitely not this one.')\n", (2131, 2173), True, 'from selenium.webdriver.support import expected_conditions as ec\n'), ((1568, 1587), 'tests.pageobjects.web.ProfilePage', 'ProfilePage', (['driver'], {}), '(driver)\n', (1579, 1587), False, 'from tests.pageobjects.web import LoginPage, ProfilePage\n'), ((1474, 1491), 'tests.pageobjects.web.LoginPage', 'LoginPage', (['driver'], {}), '(driver)\n', (1483, 1491), False, 'from tests.pageobjects.web import LoginPage, ProfilePage\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.