code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
import torch
import torch.optim
import math
from typing import TYPE_CHECKING, Any, Callable, Optional
if TYPE_CHECKING:
from torch.optim.optimizer import _params_t
else:
_params_t = Any
class MirrorMADGRAD(torch.optim.Optimizer):
"""
Mirror MADGRAD_: A Momentumized, Adaptive, Dual Averaged Gradient Method for Stochastic
Optimization.
.. _MADGRAD: https://arxiv.org/abs/2101.11075
Mirror MADGRAD uses the weighting and momentum of MADGRAD but uses mirror descent
rather than dual averaging as the base method. In general, the mirror variant works
better than standard MADGRAD on problems where generalization gap is not an issue,
such as large Transformer model training. On CIFAR-10/Image-Net and smaller NLP models
the standard variant should be prefered. The Mirror variant is more numerically stable
which may help with large model training.
Currently does not support sparse gradients.
Arguments:
params (iterable):
Iterable of parameters to optimize or dicts defining parameter groups.
lr (float):
Learning rate (default: 1e-2).
momentum (float):
Momentum value in the range [0,1) (default: 0.9).
weight_decay (float):
Weight decay, i.e. a L2 penalty (default: 0).
eps (float):
Term added to the denominator outside of the root operation to improve numerical stability. (default: 0).
This parameter is less important in MADGRAD than in Adam. A value of 0 will likely give the best results.
decouple_decay (bool):
Apply AdamW style decoupled weight decay (EXPERIMENTAL).
Application of decay occurs before the step.
"""
def __init__(
self, params: _params_t, lr: float = 1e-2, momentum: float = 0.9,
weight_decay: float = 0, eps: float = 0, decouple_decay=False,
):
if momentum < 0 or momentum >= 1:
raise ValueError(f"Momentum {momentum} must be in the range [0,1]")
if lr <= 0:
raise ValueError(f"Learning rate {lr} must be positive")
if weight_decay < 0:
raise ValueError(f"Weight decay {weight_decay} must be non-negative")
if eps < 0:
raise ValueError(f"Eps must be non-negative")
defaults = dict(lr=lr, eps=eps, momentum=momentum,
weight_decay=weight_decay, decouple_decay=decouple_decay)
super().__init__(params, defaults)
@property
def supports_memory_efficient_fp16(self) -> bool:
return True
@property
def supports_flat_params(self) -> bool:
return True
def step(self, closure: Optional[Callable[[], float]] = None) -> Optional[float]:
"""Performs a single optimization step.
Arguments:
closure (callable, optional): A closure that reevaluates the model
and returns the loss.
"""
loss = None
if closure is not None:
loss = closure()
# step counter must be stored in state to ensure correct behavior under
# optimizer sharding
if 'k' not in self.state:
self.state['k'] = torch.tensor([0], dtype=torch.long)
k = self.state['k'].item()
update_ratio = math.pow(k/(k+1), 1/2)
lamb = math.pow(k+1, 1/3)
for group in self.param_groups:
eps = group["eps"]
lr = group["lr"] + eps
decay = group["weight_decay"]
momentum = group["momentum"]
decouple_decay = group["decouple_decay"]
ck = 1 - momentum
for p in group["params"]:
if p.grad is None:
continue
grad = p.grad.data
if grad.dtype in {torch.float16, torch.bfloat16}:
grad = grad.float()
state = self.state[p]
p_data_fp32 = p.data
if p.data.dtype in {torch.float16, torch.bfloat16}:
p_data_fp32 = p_data_fp32.float()
if "grad_sum_sq" not in state:
state["grad_sum_sq"] = torch.zeros_like(p_data_fp32).detach()
state["z"] = torch.clone(p_data_fp32).detach()
if momentum != 0.0 and grad.is_sparse:
raise RuntimeError("momentum != 0 is not compatible with sparse gradients")
grad_sum_sq = state["grad_sum_sq"]
z = state["z"]
# Apply weight decay
if decay != 0:
if grad.is_sparse:
raise RuntimeError("weight_decay option is not compatible with sparse gradients")
if decouple_decay:
z.data.add_(z.data, alpha=-lr*decay)
else:
grad.add_(p_data_fp32, alpha=decay)
grad_sum_sq.mul_(update_ratio)
# Accumulate second moments
grad_sum_sq.addcmul_(grad, grad, value=1)
rms = grad_sum_sq.pow(1 / 3).add_(eps)
if eps == 0:
rms[rms == 0] = float('inf')
# Update z
z.data.addcdiv_(grad, rms, value=-lr*lamb)
# Step
p_data_fp32.mul_(1 - ck).add_(z, alpha=ck)
if p.data.dtype in {torch.float16, torch.bfloat16}:
p.data.copy_(p_data_fp32)
self.state['k'] += 1
return loss
|
[
"torch.zeros_like",
"torch.tensor",
"torch.clone",
"math.pow"
] |
[((3489, 3517), 'math.pow', 'math.pow', (['(k / (k + 1))', '(1 / 2)'], {}), '(k / (k + 1), 1 / 2)\n', (3497, 3517), False, 'import math\n'), ((3527, 3549), 'math.pow', 'math.pow', (['(k + 1)', '(1 / 3)'], {}), '(k + 1, 1 / 3)\n', (3535, 3549), False, 'import math\n'), ((3394, 3429), 'torch.tensor', 'torch.tensor', (['[0]'], {'dtype': 'torch.long'}), '([0], dtype=torch.long)\n', (3406, 3429), False, 'import torch\n'), ((4354, 4383), 'torch.zeros_like', 'torch.zeros_like', (['p_data_fp32'], {}), '(p_data_fp32)\n', (4370, 4383), False, 'import torch\n'), ((4426, 4450), 'torch.clone', 'torch.clone', (['p_data_fp32'], {}), '(p_data_fp32)\n', (4437, 4450), False, 'import torch\n')]
|
import os
path = os.getcwd()
from cu__grid_cell.data_gen import data_gen
from cu__grid_cell.preparation import preparation
import numpy as np
from cu__grid_cell.Validation.validation_utils import plot_image, grid_based_eval_with_iou, plot_image3d, nms, concatenate_cells
import matplotlib.pyplot as plt
import cv2
def sigmoid(x):
return 1. / (1. + np.exp(-x))
batch = 2
model_obj = preparation(testing = True)
config = model_obj.config
a = data_gen(dataset=config.CU_test6_curve_hdf5_path, batchsize=batch, config=config, augment=False)
generator = a.batch_gen(test=True)
x_img, y, gt_image, gt_lanes = next(generator)
y = y[0]
concatenate_cells(y[0], config)
prediction = model_obj.predict(x_img)
scale_size_y = (1640 -1) / config.img_w
scale_size_x = (590 -1) /config.img_h
M = np.array([[scale_size_y, 0, 0],
[0, scale_size_x, 0],
[0, 0, 1.]])
M=M[0:2]
if config.splitted:
lok = prediction[-2]
conf = prediction[-1]
prediction = np.concatenate([lok, conf], axis=-1)
#elif config.staged:
# prediction = prediction[-1]
for i, s in enumerate(prediction):
s = nms(s, config)
plt.figure(1)
#f, axarr = plt.subplots(1, 2)
#axarr[0].imshow(gt_image[i,:,:,::-1].astype(np.uint8))
#axarr[0].set_title('Ground Thruth', color='0.7')
for a in gt_lanes[i]:
gt_image[i] = cv2.polylines(gt_image[i], np.int32([a]), isClosed=0, color=(0, 255, 0), thickness=10)
lanes_pred = concatenate_cells(s, config, prediction=True)
original_points = lanes_pred
for j, o in enumerate(original_points):
o = np.array(o).T
ones = np.ones_like(o[:, 0])
ones = ones[..., None]
original_points[j] = np.concatenate((o, ones),
axis=1) # we reuse 3rd column in completely different way here, it is hack for matmul with M
original_points[j] = np.matmul(M, original_points[j].T).T # transpose for multiplikation
lanes = original_points # take only coords!
for a in lanes:
gt_image[i] = cv2.polylines(gt_image[i], np.int32([a]), isClosed=0,color=(0,0,255), thickness=10)
#pred_img = plot_image(s, config, with_print=True, plot_image =x_img[i,:,:])
plt.imshow(gt_image[i,:,:,::-1].astype(np.uint8))
# plt.set_title('Predicted', color='0.7')
# now 3d plot
plot_image3d(s, config, True, with_print=False)
# plot_image3d(y[i], config, False, with_print=False)
plt.show()
test = 0
|
[
"matplotlib.pyplot.show",
"numpy.ones_like",
"os.getcwd",
"cu__grid_cell.Validation.validation_utils.concatenate_cells",
"matplotlib.pyplot.figure",
"cu__grid_cell.data_gen.data_gen",
"numpy.array",
"numpy.exp",
"numpy.int32",
"numpy.matmul",
"cu__grid_cell.Validation.validation_utils.nms",
"cu__grid_cell.Validation.validation_utils.plot_image3d",
"numpy.concatenate",
"cu__grid_cell.preparation.preparation"
] |
[((18, 29), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (27, 29), False, 'import os\n'), ((392, 417), 'cu__grid_cell.preparation.preparation', 'preparation', ([], {'testing': '(True)'}), '(testing=True)\n', (403, 417), False, 'from cu__grid_cell.preparation import preparation\n'), ((450, 551), 'cu__grid_cell.data_gen.data_gen', 'data_gen', ([], {'dataset': 'config.CU_test6_curve_hdf5_path', 'batchsize': 'batch', 'config': 'config', 'augment': '(False)'}), '(dataset=config.CU_test6_curve_hdf5_path, batchsize=batch, config=\n config, augment=False)\n', (458, 551), False, 'from cu__grid_cell.data_gen import data_gen\n'), ((638, 669), 'cu__grid_cell.Validation.validation_utils.concatenate_cells', 'concatenate_cells', (['y[0]', 'config'], {}), '(y[0], config)\n', (655, 669), False, 'from cu__grid_cell.Validation.validation_utils import plot_image, grid_based_eval_with_iou, plot_image3d, nms, concatenate_cells\n'), ((796, 863), 'numpy.array', 'np.array', (['[[scale_size_y, 0, 0], [0, scale_size_x, 0], [0, 0, 1.0]]'], {}), '([[scale_size_y, 0, 0], [0, scale_size_x, 0], [0, 0, 1.0]])\n', (804, 863), True, 'import numpy as np\n'), ((2483, 2493), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2491, 2493), True, 'import matplotlib.pyplot as plt\n'), ((1014, 1050), 'numpy.concatenate', 'np.concatenate', (['[lok, conf]'], {'axis': '(-1)'}), '([lok, conf], axis=-1)\n', (1028, 1050), True, 'import numpy as np\n'), ((1150, 1164), 'cu__grid_cell.Validation.validation_utils.nms', 'nms', (['s', 'config'], {}), '(s, config)\n', (1153, 1164), False, 'from cu__grid_cell.Validation.validation_utils import plot_image, grid_based_eval_with_iou, plot_image3d, nms, concatenate_cells\n'), ((1169, 1182), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (1179, 1182), True, 'import matplotlib.pyplot as plt\n'), ((1486, 1531), 'cu__grid_cell.Validation.validation_utils.concatenate_cells', 'concatenate_cells', (['s', 'config'], {'prediction': '(True)'}), '(s, config, prediction=True)\n', (1503, 1531), False, 'from cu__grid_cell.Validation.validation_utils import plot_image, grid_based_eval_with_iou, plot_image3d, nms, concatenate_cells\n'), ((2377, 2424), 'cu__grid_cell.Validation.validation_utils.plot_image3d', 'plot_image3d', (['s', 'config', '(True)'], {'with_print': '(False)'}), '(s, config, True, with_print=False)\n', (2389, 2424), False, 'from cu__grid_cell.Validation.validation_utils import plot_image, grid_based_eval_with_iou, plot_image3d, nms, concatenate_cells\n'), ((1651, 1672), 'numpy.ones_like', 'np.ones_like', (['o[:, 0]'], {}), '(o[:, 0])\n', (1663, 1672), True, 'import numpy as np\n'), ((1733, 1766), 'numpy.concatenate', 'np.concatenate', (['(o, ones)'], {'axis': '(1)'}), '((o, ones), axis=1)\n', (1747, 1766), True, 'import numpy as np\n'), ((357, 367), 'numpy.exp', 'np.exp', (['(-x)'], {}), '(-x)\n', (363, 367), True, 'import numpy as np\n'), ((1409, 1422), 'numpy.int32', 'np.int32', (['[a]'], {}), '([a])\n', (1417, 1422), True, 'import numpy as np\n'), ((1622, 1633), 'numpy.array', 'np.array', (['o'], {}), '(o)\n', (1630, 1633), True, 'import numpy as np\n'), ((1926, 1960), 'numpy.matmul', 'np.matmul', (['M', 'original_points[j].T'], {}), '(M, original_points[j].T)\n', (1935, 1960), True, 'import numpy as np\n'), ((2114, 2127), 'numpy.int32', 'np.int32', (['[a]'], {}), '([a])\n', (2122, 2127), True, 'import numpy as np\n')]
|
import json
import falcon
from api.resources import BaseResource
from core.pedidos.exceptions import PedidoNotFoundException
from core.pedidos.gateway import PedidoGateway
class PedidosResource(BaseResource):
def on_get(self, req, resp, pedido_id=None):
pedido_gateway = PedidoGateway(self.db.session)
if pedido_id:
try:
pedidos = pedido_gateway.get_one(int(pedido_id))
content = pedidos.as_dict
except PedidoNotFoundException as exc:
resp.status = falcon.HTTP_404
resp.body = json.dumps({"erro": str(exc)})
return resp
else:
pedidos = pedido_gateway.get_all()
content = [pedido.as_dict for pedido in pedidos]
resp.status = falcon.HTTP_200
resp.body = json.dumps(content)
def on_post(self, req, resp):
pedido_gateway = PedidoGateway(self.db.session)
body = req.bounded_stream.read().decode()
if not body:
resp.status = falcon.HTTP_PRECONDITION_FAILED
resp.body = json.dumps({"erro": "POST precisa conter um body."})
return resp
raw_json = json.loads(body)
data = raw_json["data"]
cliente_id = raw_json["cliente_id"]
valor = raw_json["valor"]
pedido_gateway.create(data, cliente_id, valor)
resp.status = falcon.HTTP_201
def on_put(self, req, resp, pedido_id=None):
pedido_gateway = PedidoGateway(self.db.session)
if not pedido_id:
resp.status = falcon.HTTP_412
resp.body = json.dumps({"erro": "Metodo PUT requer o campo 'pedido_id' na URL"})
return resp
resp.status = falcon.HTTP_200
raw_json = json.loads(req.bounded_stream.read().decode())
data = raw_json.get("data", None)
cliente_id = raw_json.get("cliente_id", None)
valor = raw_json.get("valor", None)
try:
pedido_gateway.update(pedido_id, data, cliente_id, valor)
except PedidoNotFoundException as exc:
resp.status = falcon.HTTP_404
resp.body = json.dumps({"erro": str(exc)})
return resp
|
[
"core.pedidos.gateway.PedidoGateway",
"json.dumps",
"json.loads"
] |
[((287, 317), 'core.pedidos.gateway.PedidoGateway', 'PedidoGateway', (['self.db.session'], {}), '(self.db.session)\n', (300, 317), False, 'from core.pedidos.gateway import PedidoGateway\n'), ((830, 849), 'json.dumps', 'json.dumps', (['content'], {}), '(content)\n', (840, 849), False, 'import json\n'), ((910, 940), 'core.pedidos.gateway.PedidoGateway', 'PedidoGateway', (['self.db.session'], {}), '(self.db.session)\n', (923, 940), False, 'from core.pedidos.gateway import PedidoGateway\n'), ((1192, 1208), 'json.loads', 'json.loads', (['body'], {}), '(body)\n', (1202, 1208), False, 'import json\n'), ((1487, 1517), 'core.pedidos.gateway.PedidoGateway', 'PedidoGateway', (['self.db.session'], {}), '(self.db.session)\n', (1500, 1517), False, 'from core.pedidos.gateway import PedidoGateway\n'), ((1095, 1147), 'json.dumps', 'json.dumps', (["{'erro': 'POST precisa conter um body.'}"], {}), "({'erro': 'POST precisa conter um body.'})\n", (1105, 1147), False, 'import json\n'), ((1611, 1679), 'json.dumps', 'json.dumps', (['{\'erro\': "Metodo PUT requer o campo \'pedido_id\' na URL"}'], {}), '({\'erro\': "Metodo PUT requer o campo \'pedido_id\' na URL"})\n', (1621, 1679), False, 'import json\n')]
|
import time
import string
import json
import sys
import paho.mqtt.publish as publish
import paho.mqtt.client as mqtt
from workload_config import * # Read configuration
import utils # Utilities file in this dir (utils.py)
def post_networkdata_single_wiotp(jsonpayload, event_id, heart_beat=False):
"""Tries once to send network data in json format to WIoTP via mqtt.
Returns 1 if successful, 0 if not, -1 if failed because not registered.
"""
try:
retain = True
qos = 2 # since speed data is sent so infrequently we can afford to make sure it gets there exactly once
if debug_flag:
utils.print_("mqtt_pub.py: Sending data to mqtt... \
mqtt_topic=%s, mqtt_broker=%s, client_id=%s" % (mqtt_topic, mqtt_broker, mqtt_client_id))
# Publish to MQTT
publish.single(topic=mqtt_topic, payload=jsonpayload, qos=qos, hostname=mqtt_broker,
protocol=mqtt.MQTTv311, client_id=mqtt_client_id, port=mqtt_port, #auth=mqtt_auth,
tls=mqtt_tls, retain=retain)
if debug_flag: utils.print_('mqtt_pub.py: Send to mqtt successful')
return 1
except:
e = sys.exc_info()[1]
if 'not authori' in str(e).lower() or 'bad user name or password' in str(e).lower():
# The data send failed because we are not successfully registered
return -1
else:
utils.print_('Send to mqtt failed: %s' % e)
return 0
|
[
"utils.print_",
"sys.exc_info",
"paho.mqtt.publish.single"
] |
[((868, 1057), 'paho.mqtt.publish.single', 'publish.single', ([], {'topic': 'mqtt_topic', 'payload': 'jsonpayload', 'qos': 'qos', 'hostname': 'mqtt_broker', 'protocol': 'mqtt.MQTTv311', 'client_id': 'mqtt_client_id', 'port': 'mqtt_port', 'tls': 'mqtt_tls', 'retain': 'retain'}), '(topic=mqtt_topic, payload=jsonpayload, qos=qos, hostname=\n mqtt_broker, protocol=mqtt.MQTTv311, client_id=mqtt_client_id, port=\n mqtt_port, tls=mqtt_tls, retain=retain)\n', (882, 1057), True, 'import paho.mqtt.publish as publish\n'), ((674, 840), 'utils.print_', 'utils.print_', (["('mqtt_pub.py: Sending data to mqtt... mqtt_topic=%s, mqtt_broker=%s, client_id=%s'\n % (mqtt_topic, mqtt_broker, mqtt_client_id))"], {}), "(\n 'mqtt_pub.py: Sending data to mqtt... mqtt_topic=%s, mqtt_broker=%s, client_id=%s'\n % (mqtt_topic, mqtt_broker, mqtt_client_id))\n", (686, 840), False, 'import utils\n'), ((1112, 1164), 'utils.print_', 'utils.print_', (['"""mqtt_pub.py: Send to mqtt successful"""'], {}), "('mqtt_pub.py: Send to mqtt successful')\n", (1124, 1164), False, 'import utils\n'), ((1206, 1220), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1218, 1220), False, 'import sys\n'), ((1443, 1486), 'utils.print_', 'utils.print_', (["('Send to mqtt failed: %s' % e)"], {}), "('Send to mqtt failed: %s' % e)\n", (1455, 1486), False, 'import utils\n')]
|
# Generated by Django 3.1.6 on 2021-09-24 14:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0023_auto_20210924_1400'),
]
operations = [
migrations.RemoveField(
model_name='objectpermission',
name='permission_level',
),
migrations.AddField(
model_name='objectpermission',
name='can_delete',
field=models.BooleanField(default=True),
preserve_default=False,
),
migrations.AddField(
model_name='objectpermission',
name='can_execute',
field=models.BooleanField(default=True),
preserve_default=False,
),
migrations.AddField(
model_name='objectpermission',
name='can_update',
field=models.BooleanField(default=True),
preserve_default=False,
),
migrations.AddField(
model_name='objectpermission',
name='can_view',
field=models.BooleanField(default=True),
preserve_default=False,
),
]
|
[
"django.db.migrations.RemoveField",
"django.db.models.BooleanField"
] |
[((233, 311), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""objectpermission"""', 'name': '"""permission_level"""'}), "(model_name='objectpermission', name='permission_level')\n", (255, 311), False, 'from django.db import migrations, models\n'), ((469, 502), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (488, 502), False, 'from django.db import migrations, models\n'), ((673, 706), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (692, 706), False, 'from django.db import migrations, models\n'), ((876, 909), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (895, 909), False, 'from django.db import migrations, models\n'), ((1077, 1110), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1096, 1110), False, 'from django.db import migrations, models\n')]
|
######################################################################################################################
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Apache License Version 2.0 (the "License"). You may not use this file except in compliance #
# with the License. A copy of the License is located at #
# #
# http://www.apache.org/licenses/ #
# #
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES #
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions #
# and limitations under the License. #
######################################################################################################################
import boto3
import services.elbv2_service
class ElbV2(object):
def __init__(self, region=None, session=None):
self.region = region if region is not None else boto3.Session().region_name
self.session = session if session is not None else boto3.Session(region_name=self.region)
self.elbv2_client = self.session.client("elbv2", region_name=self.region)
self.elbv2_service = services.elbv2_service.Elbv2Service(session=self.session)
def register_instance(self, target_group_arn, instance_id, port=None, availability_zone=None):
target = {
"Id": instance_id
}
if port is not None:
target["Port"] = port
if availability_zone is not None:
target["AvailabilityZone"] = availability_zone
self.elbv2_client.register_targets(TargetGroupArn=target_group_arn, Targets=[target])
def get_instance_target_groups(self, instance_id):
result = []
args = {
"service_resource": services.elbv2_service.TARGET_GROUPS,
"region": self.region,
}
target_groups = list(self.elbv2_service.describe(**args))
for target_group in target_groups:
target_group_healths = list(self.elbv2_service.describe(services.elbv2_service.TARGET_HEALTH,
TargetGroupArn=target_group["TargetGroupArn"]))
for target_group_health in target_group_healths:
target = target_group_health["Target"]
if target["Id"] != instance_id:
continue
result.append(target_group.get("TargetGroupArn"))
return result
|
[
"boto3.Session"
] |
[((1702, 1740), 'boto3.Session', 'boto3.Session', ([], {'region_name': 'self.region'}), '(region_name=self.region)\n', (1715, 1740), False, 'import boto3\n'), ((1615, 1630), 'boto3.Session', 'boto3.Session', ([], {}), '()\n', (1628, 1630), False, 'import boto3\n')]
|
from __future__ import print_function
import tensorflow
import tensorflow.keras as keras
from tensorflow.keras.datasets import mnist
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Flatten
from tensorflow.keras.layers import Conv2D, MaxPooling2D
from tensorflow.keras.callbacks import ModelCheckpoint, TensorBoard
from tensorflow.keras import backend as K
import os
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
gpu_devices = tensorflow.config.experimental.list_physical_devices('GPU')
tensorflow.config.experimental.set_memory_growth(gpu_devices[0], True)
#print("GPUs: " + gpu_devices[0])
gpus = tensorflow.test.gpu_device_name()
print("GPUs: " + gpus)
batch_size = 128
num_classes = 10
epochs = 12
# input image dimensions
img_rows, img_cols = 28, 28
# the data, split between train and test sets
(x_train, y_train), (x_test, y_test) = mnist.load_data()
if K.image_data_format() == 'channels_first':
x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)
x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)
input_shape = (1, img_rows, img_cols)
else:
x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)
x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)
input_shape = (img_rows, img_cols, 1)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3),
activation='relu',
input_shape=input_shape))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
best_check = ModelCheckpoint(filepath="model-best.h5", verbose=1, save_weights_only=True, save_best_only=True)
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
verbose=1,
validation_data=(x_test, y_test),
callbacks=[best_check])
score = model.evaluate(x_test, y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
|
[
"tensorflow.keras.backend.image_data_format",
"tensorflow.keras.optimizers.Adadelta",
"tensorflow.keras.utils.to_categorical",
"tensorflow.keras.layers.Conv2D",
"tensorflow.keras.layers.MaxPooling2D",
"tensorflow.keras.layers.Dropout",
"tensorflow.keras.layers.Dense",
"tensorflow.config.experimental.set_memory_growth",
"tensorflow.keras.callbacks.ModelCheckpoint",
"tensorflow.keras.datasets.mnist.load_data",
"tensorflow.keras.models.Sequential",
"tensorflow.config.experimental.list_physical_devices",
"tensorflow.test.gpu_device_name",
"tensorflow.keras.layers.Flatten"
] |
[((474, 533), 'tensorflow.config.experimental.list_physical_devices', 'tensorflow.config.experimental.list_physical_devices', (['"""GPU"""'], {}), "('GPU')\n", (526, 533), False, 'import tensorflow\n'), ((534, 604), 'tensorflow.config.experimental.set_memory_growth', 'tensorflow.config.experimental.set_memory_growth', (['gpu_devices[0]', '(True)'], {}), '(gpu_devices[0], True)\n', (582, 604), False, 'import tensorflow\n'), ((647, 680), 'tensorflow.test.gpu_device_name', 'tensorflow.test.gpu_device_name', ([], {}), '()\n', (678, 680), False, 'import tensorflow\n'), ((891, 908), 'tensorflow.keras.datasets.mnist.load_data', 'mnist.load_data', ([], {}), '()\n', (906, 908), False, 'from tensorflow.keras.datasets import mnist\n'), ((1603, 1651), 'tensorflow.keras.utils.to_categorical', 'keras.utils.to_categorical', (['y_train', 'num_classes'], {}), '(y_train, num_classes)\n', (1629, 1651), True, 'import tensorflow.keras as keras\n'), ((1661, 1708), 'tensorflow.keras.utils.to_categorical', 'keras.utils.to_categorical', (['y_test', 'num_classes'], {}), '(y_test, num_classes)\n', (1687, 1708), True, 'import tensorflow.keras as keras\n'), ((1718, 1730), 'tensorflow.keras.models.Sequential', 'Sequential', ([], {}), '()\n', (1728, 1730), False, 'from tensorflow.keras.models import Sequential\n'), ((2267, 2368), 'tensorflow.keras.callbacks.ModelCheckpoint', 'ModelCheckpoint', ([], {'filepath': '"""model-best.h5"""', 'verbose': '(1)', 'save_weights_only': '(True)', 'save_best_only': '(True)'}), "(filepath='model-best.h5', verbose=1, save_weights_only=True,\n save_best_only=True)\n", (2282, 2368), False, 'from tensorflow.keras.callbacks import ModelCheckpoint, TensorBoard\n'), ((913, 934), 'tensorflow.keras.backend.image_data_format', 'K.image_data_format', ([], {}), '()\n', (932, 934), True, 'from tensorflow.keras import backend as K\n'), ((1741, 1815), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(32)'], {'kernel_size': '(3, 3)', 'activation': '"""relu"""', 'input_shape': 'input_shape'}), "(32, kernel_size=(3, 3), activation='relu', input_shape=input_shape)\n", (1747, 1815), False, 'from tensorflow.keras.layers import Conv2D, MaxPooling2D\n'), ((1861, 1898), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(64)', '(3, 3)'], {'activation': '"""relu"""'}), "(64, (3, 3), activation='relu')\n", (1867, 1898), False, 'from tensorflow.keras.layers import Conv2D, MaxPooling2D\n'), ((1910, 1940), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', ([], {'pool_size': '(2, 2)'}), '(pool_size=(2, 2))\n', (1922, 1940), False, 'from tensorflow.keras.layers import Conv2D, MaxPooling2D\n'), ((1952, 1965), 'tensorflow.keras.layers.Dropout', 'Dropout', (['(0.25)'], {}), '(0.25)\n', (1959, 1965), False, 'from tensorflow.keras.layers import Dense, Dropout, Flatten\n'), ((1977, 1986), 'tensorflow.keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (1984, 1986), False, 'from tensorflow.keras.layers import Dense, Dropout, Flatten\n'), ((1998, 2027), 'tensorflow.keras.layers.Dense', 'Dense', (['(128)'], {'activation': '"""relu"""'}), "(128, activation='relu')\n", (2003, 2027), False, 'from tensorflow.keras.layers import Dense, Dropout, Flatten\n'), ((2039, 2051), 'tensorflow.keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (2046, 2051), False, 'from tensorflow.keras.layers import Dense, Dropout, Flatten\n'), ((2063, 2103), 'tensorflow.keras.layers.Dense', 'Dense', (['num_classes'], {'activation': '"""softmax"""'}), "(num_classes, activation='softmax')\n", (2068, 2103), False, 'from tensorflow.keras.layers import Dense, Dropout, Flatten\n'), ((2188, 2215), 'tensorflow.keras.optimizers.Adadelta', 'keras.optimizers.Adadelta', ([], {}), '()\n', (2213, 2215), True, 'import tensorflow.keras as keras\n')]
|
"""Tests for managers of contributions (media files)."""
import os
import glob
from PIL import Image
from StringIO import StringIO
from django.core.files.base import ContentFile
from django.test import TestCase
from django.conf import settings
from nose.tools import raises
from geokey.core.exceptions import FileTypeError
from geokey.core.tests.helpers.image_helpers import get_image
from geokey.contributions.models import MediaFile
from geokey.contributions.tests.model_factories import ObservationFactory
from geokey.users.tests.model_factories import UserFactory
from .model_factories import ImageFileFactory
class ModelManagerTest(TestCase):
def tearDown(self):
files = glob.glob(os.path.join(
settings.MEDIA_ROOT,
'user-uploads/images/*'
))
for f in files:
os.remove(f)
def test_get_queryset(self):
ImageFileFactory.create_batch(3)
files = MediaFile.objects.all()
self.assertEqual(len(files), 3)
for f in files:
self.assertEqual('ImageFile', f.type_name)
def test_create_image(self):
image_file = MediaFile.objects.create(
name='<NAME>',
description='Test Description',
contribution=ObservationFactory.create(),
creator=UserFactory.create(),
the_file=get_image()
)
self.assertIsNotNone(image_file.image)
self.assertEqual(image_file.type_name, 'ImageFile')
@raises(FileTypeError)
def test_create_not_supported(self):
xyz_file = StringIO()
xyz = Image.new('RGBA', size=(50, 50), color=(256, 0, 0))
xyz.save(xyz_file, 'png')
xyz_file.seek(0)
the_file = ContentFile(xyz_file.read(), 'test.xyz')
the_file.content_type = 'chemical/x-xyz'
MediaFile.objects.create(
name='<NAME>',
description='Test Description',
contribution=ObservationFactory.create(),
creator=UserFactory.create(),
the_file=the_file
)
|
[
"PIL.Image.new",
"os.remove",
"geokey.contributions.tests.model_factories.ObservationFactory.create",
"geokey.core.tests.helpers.image_helpers.get_image",
"geokey.users.tests.model_factories.UserFactory.create",
"nose.tools.raises",
"os.path.join",
"StringIO.StringIO",
"geokey.contributions.models.MediaFile.objects.all"
] |
[((1489, 1510), 'nose.tools.raises', 'raises', (['FileTypeError'], {}), '(FileTypeError)\n', (1495, 1510), False, 'from nose.tools import raises\n'), ((940, 963), 'geokey.contributions.models.MediaFile.objects.all', 'MediaFile.objects.all', ([], {}), '()\n', (961, 963), False, 'from geokey.contributions.models import MediaFile\n'), ((1571, 1581), 'StringIO.StringIO', 'StringIO', ([], {}), '()\n', (1579, 1581), False, 'from StringIO import StringIO\n'), ((1596, 1647), 'PIL.Image.new', 'Image.new', (['"""RGBA"""'], {'size': '(50, 50)', 'color': '(256, 0, 0)'}), "('RGBA', size=(50, 50), color=(256, 0, 0))\n", (1605, 1647), False, 'from PIL import Image\n'), ((706, 764), 'os.path.join', 'os.path.join', (['settings.MEDIA_ROOT', '"""user-uploads/images/*"""'], {}), "(settings.MEDIA_ROOT, 'user-uploads/images/*')\n", (718, 764), False, 'import os\n'), ((836, 848), 'os.remove', 'os.remove', (['f'], {}), '(f)\n', (845, 848), False, 'import os\n'), ((1261, 1288), 'geokey.contributions.tests.model_factories.ObservationFactory.create', 'ObservationFactory.create', ([], {}), '()\n', (1286, 1288), False, 'from geokey.contributions.tests.model_factories import ObservationFactory\n'), ((1310, 1330), 'geokey.users.tests.model_factories.UserFactory.create', 'UserFactory.create', ([], {}), '()\n', (1328, 1330), False, 'from geokey.users.tests.model_factories import UserFactory\n'), ((1353, 1364), 'geokey.core.tests.helpers.image_helpers.get_image', 'get_image', ([], {}), '()\n', (1362, 1364), False, 'from geokey.core.tests.helpers.image_helpers import get_image\n'), ((1948, 1975), 'geokey.contributions.tests.model_factories.ObservationFactory.create', 'ObservationFactory.create', ([], {}), '()\n', (1973, 1975), False, 'from geokey.contributions.tests.model_factories import ObservationFactory\n'), ((1997, 2017), 'geokey.users.tests.model_factories.UserFactory.create', 'UserFactory.create', ([], {}), '()\n', (2015, 2017), False, 'from geokey.users.tests.model_factories import UserFactory\n')]
|
from Utilities import *
import math
from rlbot.agents.base_agent import BaseAgent, SimpleControllerState
from rlbot.utils.structures.game_data_struct import GameTickPacket
from rlbot.utils.game_state_util import GameState, BallState, CarState, Physics, Vector3, Rotator
import random
"""
Right corner loc: (-2048, -2560), yaw: 0.25 pi loc: (2048, 2560), yaw: -0.75 pi
Left corner loc: (2048, -2560), yaw: 0.75 pi loc: (-2048, 2560), yaw: -0.25 pi
Back right loc: (-256.0, -3840), yaw: 0.5 pi loc: (256.0, 3840), yaw: -0.5 pi
Back left loc: (256.0, -3840), yaw: 0.5 pi loc: (-256.0, 3840), yaw: -0.5 pi
Far back center loc: (0.0, -4608), yaw: 0.5 pi loc: (0.0, 4608), yaw: -0.5 pi
"""
def getKickoffPosition(vec):
kickoff_locations = [[2048, 2560], [256, 3848], [0, 4608]]
if abs(vec[0]) >= 350:
return 0
elif abs(vec[0]) > 5:
return 1
else:
return 2
class baseState:
def __init__(self, agent):
self.agent = agent
self.active = True
def __repr__(self):
return f"{type(self).__name__}"
class State:
RESET = 0
WAIT = 1
INITIALIZE = 2
RUNNING = 3
class GetBoost(baseState):
def update(self):
return saferBoostGrabber(self.agent)
class airLaunch(baseState):
def __init__(self,agent):
baseState.__init__(self,agent)
self.initiated = agent.time
self.jumpTimer = agent.time
self.firstJump = False
self.secondJump = False
self.firstJumpHold = 0.5
self.secondJumpHold = 0.4
self.active = True
def update(self):
stateController = SimpleControllerState()
if not self.firstJump:
self.firstJump = True
stateController.jump = True
self.jumpTimer = self.agent.time
elif self.firstJump and not self.secondJump:
if self.agent.time - self.jumpTimer < self.firstJumpHold:
stateController.jump = True
elif self.agent.time - self.jumpTimer > self.firstJumpHold and self.agent.time - self.jumpTimer < self.firstJumpHold +.05:
stateController.boost = True
stateController.jump = False
else:
self.secondJump = True
stateController.boost = True
self.jumpTimer = self.agent.time
else:
if self.agent.time - self.jumpTimer < self.secondJumpHold:
stateController.jump = True
stateController.boost = True
else:
self.active = False
self.jump = False
self.agent.activeState = DivineGrace(self.agent)
if self.agent.time - self.jumpTimer > 0.15 and self.agent.time - self.jumpTimer < 0.35:
stateController.pitch = 1
return stateController
class Aerial():
def __init__(self,agent,target,time):
self.active = False
self.agent = agent
self.target = target
self.time = clamp(10,0.00001,time)
self.jumping = False
self.jumpTimer = 0
self.airborne = False
self.launcher = None
self.setup()
def setup(self):
dv_target = backsolve(self.target, self.agent, self.time)
if self.agent.deltaV >= dv_target.magnitude():
self.dv_target = dv_target
self.active = True
self.launcher = airLaunch(self.agent)
def update(self):
# takes the agent, an intercept point, and an intercept time.Adjusts the agent's controller
# (agent.c) to perform an aerial
self.time = self.time = clamp(10,0.00001,self.time - self.agent.deltaTime)
before = self.jumping
dv_target = backsolve(self.target, self.agent, self.time)
dv_total = dv_target.magnitude()
dv_local = matrixDot(self.agent.me.matrix, dv_target)
# dv_local = agent.me.matrix.dot(dv_target)
angles,self.controller = defaultPD(self.agent, dv_local)
print(self.controller.yaw,self.controller.pitch,self.controller.roll)
precision = clamp(0.6, 0.05, dv_total / 1500)
# precision = cap((dv_total/1500),0.05, 0.60)
# if dv_local[2] > 100 or not self.airborne and self.agent.onSurface: #agent.me.airborne == False:
# #if agent.sinceJump < 0.3:
# if self.jumpTimer < 0.3:
# self.jumping = True
# if before != True:
# self.controller.pitch = self.controller.yaw = self.controller.roll = 0
#
# elif self.jumpTimer >= 0.32:
# self.jumping = True
# self.airborne = True
# if before != True:
# self.controller.pitch = self.controller.yaw = self.controller.roll = 0
# #agent.c.pitch = agent.c.yaw = agent.c.roll = 0
# else:
# self.jumping = False
# #agent.c.jump = False
# else:
# self.jumping = False
# #agent.c.jump = False
if self.launcher.active:
return self.launcher.update()
else:
if dv_total > 50:
if abs(angles[1]) + abs(angles[2]) < precision:
self.controller.boost = True
#agent.c.boost = True
else:
self.controller.boost = False
#print(dv_total)
#agent.c.boost = False
else:
fly_target = self.agent.me.matrix.dot(self.target - self.agent.me.location)
angles = defaultPD(self.agent, fly_target)
self.controller.boost = False
#self.controller.jump = self.jumping
if self.time <= 0.0001:
self.active = False
print("timed out?")
return self.controller
class Celestial_Arrest(baseState):
def __init__(self,agent):
self.active = True
self.agent = agent
def update(self):
pass
class LeapOfFaith(baseState):
def __init__(self,agent, targetCode,target = None):
self.agent = agent
self.active = True
self.targetCode = targetCode #0 flip at ball , 1 flip forward, 2 double jump, 3 flip backwards, 4 flip left, 5 flip right, 6 flip at target ,7 left forward diagnal flip, 8 right forward diagnal flip
self.flip_obj = FlipStatus(agent.time)
self.target = target
self.cancelTimerCap = .3
self.cancelStartTime = None
self.jumped = False
def update(self):
controller_state = SimpleControllerState()
jump = flipHandler(self.agent, self.flip_obj)
if jump:
if self.targetCode == 1:
controller_state.pitch = -1
controller_state.steer = 0
controller_state.throttle = 1
elif self.targetCode == 0:
ball_local = toLocal(self.agent.ball.location, self.agent.me).normalize()
ball_angle = math.atan2(ball_local.data[1], ball_local.data[0])
controller_state.jump = True
controller_state.yaw = math.sin(ball_angle)
pitch = -math.cos(ball_angle)
controller_state.pitch = pitch
if pitch > 0:
controller_state.throttle = -1
else:
controller_state.throttle = 1
elif self.targetCode == 2:
controller_state.pitch = 0
controller_state.steer = 0
controller_state.yaw = 0
elif self.targetCode == 3:
controller_state.pitch = 1
controller_state.steer = 0
controller_state.throttle = -1
elif self.targetCode == -1:
controller_state.pitch = 0
controller_state.steer = 0
controller_state.throttle = 0
elif self.targetCode == 4:
controller_state.pitch = 0
controller_state.yaw = -1
controller_state.steer = -1
controller_state.throttle = -0
elif self.targetCode == 5:
controller_state.pitch = 0
controller_state.yaw = 1
controller_state.steer = 1
controller_state.throttle = -0
elif self.targetCode == 6:
target_local = toLocal(self.target, self.agent.me).normalize()
target_angle = math.atan2(target_local.data[1], target_local.data[0])
controller_state.jump = True
controller_state.yaw = math.sin(target_angle)
pitch = -math.cos(target_angle)
controller_state.pitch = pitch
if pitch > 0:
controller_state.throttle = -1
else:
controller_state.throttle = 1
elif self.targetCode == 7:
controller_state.pitch = -1
controller_state.yaw = -1
controller_state.steer = -1
controller_state.throttle = 1
elif self.targetCode == 8:
controller_state.pitch = -1
controller_state.yaw = 1
controller_state.steer = 1
controller_state.throttle = 1
elif self.targetCode == 9:
#diagnal flip cancel
controller_state.pitch = -1
controller_state.roll = -1
#controller_state.steer = -1
controller_state.throttle = 1
elif self.targetCode == 10:
#diagnal flip cancel
controller_state.pitch = -1
controller_state.roll = 1
#controller_state.steer = -1
controller_state.throttle = 1
elif self.targetCode == -1:
controller_state.pitch = 0
controller_state.steer = 0
controller_state.throttle = 0
controller_state.jump = jump
controller_state.boost = False
if self.targetCode == 7 or self.targetCode == 8:
controller_state.boost = True
if self.flip_obj.flipDone:
if self.targetCode != 9 or self.targetCode != 10:
self.active = False
else:
if not self.cancelStartTime:
self.cancelStartTime = self.agent.time
return controller_state
if self.targetCode == 9:
controller_state.pitch = 1
controller_state.roll = 1
controller_state.throttle = 1
else:
controller_state.pitch = 1
controller_state.roll = -1
controller_state.throttle = 1
if self.agent.time - self.cancelStartTime >= self.cancelTimerCap:
self.active = False
# if self.agent.forward:
# controller_state.throttle = 1
# else:
# controller_state.throttle = -1
return controller_state
class Action_chain():
#class for performing consecutive actions over a period of time. Example: Flipping forward
def __init__(self, agent,controls_list: list, durations_list : list):
self.controls = controls_list
self.durations = durations_list
self.complete = False
self.index = 0
self.current_duration = 0
self.agent = agent
# there should be a duration in the durations for every controller given in the list. This inserts 0 for any lacking
if len(durations_list) < len(controls_list):
self.durations+= [0*len(controls_list)-len(durations_list)]
self.active = True
def create_custom_controls(self,actionCode):
#perform specialized actions if creating controlers at creation time wasn't feasible
controller_state = SimpleControllerState()
if actionCode == 0:
ball_local = toLocal(self.agent.ball.location, self.agent.me).normalize()
ball_angle = math.atan2(ball_local.data[1], ball_local.data[0])
controller_state.jump = True
controller_state.yaw = clamp(1,-1,math.sin(ball_angle))
controller_state.pitch = clamp(1,-1,-math.cos(ball_angle))
print(self.agent.me.location[2])
return controller_state
def update(self): #call this once per frame with delta time to recieve updated controls
self.current_duration += self.agent.deltaTime
if self.current_duration > self.durations[self.index]:
self.index+=1
self.current_duration = 0
if self.index == len(self.controls):
self.active = False
return SimpleControllerState()
if type(self.controls[self.index]) == SimpleControllerState:
return self.controls[self.index]
else:
return self.create_custom_controls(self.controls[self.index])
class RighteousVolley(baseState):
def __init__(self,agent,delay,target):
baseState.__init__(self,agent)
self.smartAngle = False
self.target = target
height = target[2]
boomerDelay = 0.05
# if len(agent.allies) < 1:
# boomerDelay = 0
delay = clamp(1.25,.3,delay+boomerDelay)
if delay >= .3:
if height <= 200:
#print("tiny powershot")
self.jumpTimerMax = .1
self.angleTimer = clamp(.15,.05,self.jumpTimerMax/2)
else:
#print("normal powershot")
self.jumpTimerMax = delay-.2
self.angleTimer = clamp(.15, .1, self.jumpTimerMax / 2)
self.delay = delay
if self.delay >= .5:
self.smartAngle = True
self.jumped = False
self.jumpTimer = 0
#print("setting action to powershot")
def update(self):
controller_state = SimpleControllerState()
controller_state.throttle = 0
controller_state.boost = False
ball_local = toLocal(self.agent.ball.location, self.agent.me).normalize()
#ball_local = toLocal(self.target, self.agent.me)
ball_angle = math.atan2(ball_local.data[1], ball_local.data[0])
angle_degrees = correctAngle(math.degrees(ball_angle))
if not self.jumped:
self.jumped = True
controller_state.jump = True
return controller_state
else:
self.jumpTimer += self.agent.deltaTime
if self.jumpTimer < self.angleTimer:
controller_state.pitch = 1
if self.jumpTimer < self.jumpTimerMax:
controller_state.jump = True
else:
controller_state.jump = False
if self.jumpTimer > self.jumpTimerMax:
if self.jumpTimer >= self.delay-.2 and self.jumpTimer < self.delay-.15:
controller_state.jump = False
elif self.jumpTimer >= self.delay-.15 and self.jumpTimer < self.delay:
controller_state.yaw = math.sin(ball_angle)
controller_state.pitch = -math.cos(ball_angle)
controller_state.jump = True
elif self.jumpTimer < self.delay+.1:
controller_state.jump = False
else:
self.active = False
controller_state.jump = False
return controller_state
class DivineRetribution():
def __init__(self,agent,targetCar):
self.agent = agent
self.targetCar = targetCar
self.active = True
def update(self,):
action = demoTarget(self.agent,self.targetCar)
return action
class DemolitionBot():
def __init__(self,agent):
self.agent = agent
self.active = True
def update(self):
target = self.agent.closestEnemyToBall
valid = False
if target.location[2] <= 90:
if ((target.location[1] > self.agent.ball.location[1] and target.location[1] < self.agent.me.location[1]) or
(target.location[1] < self.agent.ball.location[1] and target.location[1] > self.agent.me.location[1])):
valid = True
if valid:
return demoEnemyCar(self.agent,target)
else:
self.active = False
return ShellTime(self.agent)
class GroundShot(baseState):
def __init__(self, agent):
self.agent = agent
self.active = True
def update(self):
return lineupShot(self.agent,3)
class GroundAssault(baseState):
def __init__(self, agent):
self.agent = agent
self.active = True
def update(self):
return lineupShot(self.agent,1)
class HolyGrenade(baseState):
def __init__(self, agent):
self.agent = agent
self.active = True
def update(self):
return handleBounceShot(self.agent)
class HolyProtector(baseState):
def update(self):
return ShellTime(self.agent)
class AerialDefend(baseState):
pass
class TurnTowardsPosition(baseState):
def __init__(self,agent,target,targetCode): #0 = ball.location
baseState.__init__(self,agent)
self.target = target
self.threshold = 1
self.targetCode = targetCode
def update(self):
if self.targetCode == 0:
self.target = self.agent.ball.location
localTarg = toLocal(self.target,self.agent.me)
localAngle = correctAngle(math.degrees(math.atan2(localTarg[1],localTarg[0])))
controls = SimpleControllerState()
if abs(localAngle) > self.threshold:
if self.agent.forward:
if localAngle > 0:
controls.steer = 1
else:
controls.steer = -1
controls.handbrake = True
if self.agent.currentSpd <300:
controls.throttle = .5
else:
if localAngle > 0:
controls.steer = -.5
else:
controls.steer = 1
controls.handbrake = True
if self.agent.currentSpd <300:
controls.throttle = -.5
else:
self.active = False
return controls
class Obstruct(baseState):
def update(self):
if not kickOffTest(self.agent):
return turtleTime(self.agent)
else:
self.active = False
self.agent.activeState = PreemptiveStrike(self.agent)
return self.agent.activeState.update()
"""
def getKickoffPosition(vec):
kickoff_locations = [[2048, 2560], [256, 3848], [0, 4608]]
for i in range(len(kickoff_locations)):
if kickoff_locations[i] == [abs(vec[0]),abs(vec[1])]:
return i
return -1
"""
class Kickoff(baseState):
def __init__(self,agent):
self.agent = agent
self.started = False
self.firstFlip = False
self.secondFlip = False
self.finalFlipDistance = 750
self.active = True
self.startTime = agent.time
self.flipState = None
def fakeKickOffChecker(self):
closestToBall, bDist = findEnemyClosestToLocation(self.agent, self.agent.ball.location)
myDist = findDistance(self.agent.me.location,self.agent.ball.location)
if bDist:
if bDist <= myDist*.75:
return True
else:
return False
return False
def retire(self):
self.active = False
self.agent.activeState = None
self.flipState = None
def update(self):
spd = self.agent.currentSpd
if self.flipState != None:
if self.flipState.active:
controller = self.flipState.update()
if self.agent.time - self.flipState.flip_obj.flipStartedTimer <= 0.15:
if spd < maxPossibleSpeed:
controller.boost = True
return controller
if self.secondFlip:
self.retire()
jumping = False
ballDistance = distance2D(self.agent.me.location, self.agent.ball.location)
if not self.started:
if not kickOffTest(self.agent):
self.started = True
self.startTime = self.agent.time
if self.started and self.agent.time - self.startTime > 2.5:
self.retire()
if not self.firstFlip:
if spd > 1100:
self.flipState = LeapOfFaith(self.agent,0,target = self.agent.ball.location)
self.firstFlip = True
return self.flipState.update()
if ballDistance > self.finalFlipDistance:
destination = self.agent.ball.location
if not self.firstFlip:
if self.agent.me.location[0] > self.agent.ball.location[0]:
destination.data[0] -= 200
else:
destination.data[0] += 200
else:
if self.agent.me.location[0] > self.agent.ball.location[0]:
destination.data[0] -= 5
else:
destination.data[0] += 5
return greedyMover(self.agent, destination)
else:
self.flipState = LeapOfFaith(self.agent,0,self.agent.ball.location)
self.secondFlip = True
return self.flipState.update()
class HeavenylyReprieve(baseState):
def __init__(self,agent,boostloc):
self.agent = agent
self.boostLoc = boostloc
self.active = True
def update(self):
result = inCornerWithBoost(self.agent)
if result != False:
return refuel(self.agent, result[0])
else:
self.active = False
return ShellTime(self.agent)
class PreemptiveStrike(baseState):
def __init__(self,agent):
self.agent = agent
self.started = False
self.firstFlip = False
self.secondFlip = False
self.finalFlipDistance = 850
#self.finalFlipDistance = 1400
self.active = True
self.startTime = agent.time
self.flipState = None
self.kickoff_type = getKickoffPosition(agent.me.location)
self.method = 0
self.setup()
agent.stubbornessTimer = 5
agent.stubborness= agent.stubbornessMax
agent.stubborness= agent.stubbornessMax
def setup(self):
if abs(self.agent.me.location[0]) < 257:
self.method = 1
self.replacement = Kickoff(self.agent)
def rightSelf(self):
controller_state = SimpleControllerState()
if self.agent.me.rotation[2] > 0:
controller_state.roll = -1
elif self.agent.me.rotation[2] < 0:
controller_state.roll = 1
if self.agent.me.rotation[0] > self.agent.velAngle:
controller_state.yaw = -1
elif self.agent.me.rotation[0] < self.agent.velAngle:
controller_state.yaw = 1
if self.agent.me.rotation[0] > 0:
controller_state.pitch = -1
elif self.agent.me.rotation[0] < 0:
controller_state.pitch = 1
controller_state.throttle = 1
return controller_state
def fakeKickOffChecker(self):
closestToBall, bDist = findEnemyClosestToLocation(self.agent, self.agent.ball.location)
myDist = findDistance(self.agent.me.location,self.agent.ball.location)
if bDist:
if bDist <= myDist*.75:
return True
else:
return False
return False
def retire(self):
self.active = False
self.agent.activeState = None
self.flipState = None
def update(self):
if self.method == 1:
action = self.replacement.update()
if not self.replacement.active:
self.retire()
return action
else:
spd = self.agent.currentSpd
if self.flipState != None:
if self.flipState.active:
controller = self.flipState.update()
controller.boost = True
return controller
if self.secondFlip:
self.retire()
jumping = False
ballDistance = distance2D(self.agent.me.location, self.agent.ball.location)
if ballDistance < 200:
self.retire()
if not self.started:
if not kickOffTest(self.agent):
self.started = True
self.startTime = self.agent.time
if self.started and self.agent.time - self.startTime > 2.5:
self.retire()
if not self.firstFlip:
if spd > 1050:
localBall = self.agent.ball.local_location
angle = correctAngle(math.degrees(math.atan2(localBall[1],localBall[0])))
#if self.agent.team == 0:
if angle < 0:
self.flipState = LeapOfFaith(self.agent, 9)
else:
self.flipState = LeapOfFaith(self.agent, 10)
# else:
# if angle > 0:
# self.flipState = LeapOfFaith(self.agent, 9)
# else:
# self.flipState = LeapOfFaith(self.agent, 10)
self.firstFlip = True
controller = self.flipState.update()
controller.boost = True
return controller
destination = self.agent.ball.location
if ballDistance > self.finalFlipDistance:
#destination.data[1] += -sign(self.agent.team)*100
if not self.firstFlip:
#print(self.kickoff_type)
if self.agent.team == 1:
if self.kickoff_type == 0:
if destination[0] > self.agent.me.location[0]:
#print("greater than 0")
destination.data[0] += 1100#1000
else:
destination.data[0] -= 1100#1000
#print("less than 0")
elif self.kickoff_type == 1:
if destination[0] > self.agent.me.location[0]:
#print("greater than 0")
destination.data[0] += 900
else:
destination.data[0] -= 900
#print("less than 0")
elif self.kickoff_type == 2:
destination.data[0] -= 750
else:
if destination[0] > self.agent.me.location[0] or self.kickoff_type == -1:
destination.data[0] += 1100
else:
destination.data[0] -= 1100
else:
if self.kickoff_type == 0:
if destination[0] > self.agent.me.location[0]:
#print("greater than 0")
destination.data[0] += 1100#1000
else:
destination.data[0] -= 1100#1000
#print("less than 0")
elif self.kickoff_type == 1:
if destination[0] > self.agent.me.location[0]:
#print("greater than 0")
destination.data[0] += 900
else:
destination.data[0] -= 900
#print("less than 0")
elif self.kickoff_type == 2:
destination.data[0] += 750
else:
if destination[0] > self.agent.me.location[0] or self.kickoff_type == -1:
destination.data[0] -= 1100
else:
destination.data[0] += 1100
else:
if destination[0] > self.agent.me.location[0]:
destination.data[0] -=25
else:
destination.data[0] += 25
controls = greedyMover(self.agent, destination)
if self.firstFlip and not self.secondFlip:
if self.flipState:
if not self.flipState.active:
if not self.agent.onSurface:
controls = self.rightSelf()
if spd < 2195:
controls.boost = True
else:
controls.boost = False
return controls
else:
if self.agent.onSurface:
self.flipState = LeapOfFaith(self.agent, 0)
self.secondFlip = True
return self.flipState.update()
else:
controls = self.rightSelf()
if spd < maxPossibleSpeed:
controls.boost = True
if ballDistance < 150:
self.retire()
return controls
class DivineGrace(baseState):
def update(self):
controller_state = SimpleControllerState()
controller_state.throttle = 1
if self.agent.onSurface or self.agent.me.location[2] < 120:
self.active = False
# vel = self.agent.me.avelocity.normalize().scale(2500)
# fpos = self.agent.me.location - vel
# fpos.data[2] = self.agent.me.location[2]
#
# controller_state.steer, controller_state.yaw, controller_state.pitch, roll = orientTowardsVector(self.agent,
# fpos)
if self.agent.me.rotation[2] > 0:
controller_state.roll = -1
elif self.agent.me.rotation[2] < 0:
controller_state.roll = 1
if self.agent.me.rotation[0] > self.agent.velAngle:
controller_state.yaw = -1
elif self.agent.me.rotation[0] < self.agent.velAngle:
controller_state.yaw = 1
# if self.agent.me.rotation[1] > 0:
# controller_state.pitch = -1
#
# elif self.agent.me.rotation[1] < 0:
# controller_state.pitch = 1
return controller_state
class WardAgainstEvil(baseState):
def __init__(self,agent):
self.agent = agent
self.active = True
self.timeCreated = self.agent.time
def update(self):
#print(f"We're too scared! {self.agent.time}")
return scaredyCat(self.agent)
class BlessingOfDexterity(baseState):
def __init__(self,agent):
self.agent = agent
self.active = True
self.firstJump= False
self.secondJump = False
self.jumpStart = 0
self.timeCreated = self.agent.time
def update(self):
controller_state = SimpleControllerState()
controller_state.throttle = -1
if not self.firstJump:
controller_state.jump = True
controller_state.pitch = 1
self.firstJump = True
self.jumpStart = self.agent.time
return controller_state
elif self.firstJump and not self.secondJump:
jumpTimer = self.agent.time - self.jumpStart
controller_state.pitch = 1
controller_state.jump = False
if jumpTimer < 0.12:
controller_state.jump = True
if jumpTimer > 0.15:
controller_state.jump = True
self.jumpStart = self.agent.time
self.secondJump = True
return controller_state
elif self.firstJump and self.secondJump:
timer = self.agent.time - self.jumpStart
if timer < 0.15:
controller_state.pitch = 1
else:
controller_state.pitch = -1
controller_state.roll = 1
if timer > .8:
controller_state.roll = 0
if timer > 1.15:
self.active = False
return controller_state
else:
print("halfFlip else conditional called in update. This should not be happening")
class Chase(baseState):
def __init__(self, agent):
self.agent = agent
self.active = True
def update(self):
if not kickOffTest(self.agent):
return efficientMover(self.agent,self.agent.ball,self.agent.maxSpd)
else:
self.active = False
self.agent.activeState = PreemptiveStrike(self.agent)
return self.agent.activeState.update()
class BlessingOfSafety(baseState):
def update(self):
distMin = 2000
if distance2D(Vector([0, 5200 * sign(self.agent.team), 200]),
self.agent.currentHit.pred_vector) < distMin:
return ShellTime(self.agent)
else:
if self.agent.rotationNumber == 2:
if len(self.agent.allies) >=2:
return playBack(self.agent,buffer = 2500)
else:
return playBack(self.agent)
if self.agent.rotationNumber >=3:
return playBack(self.agent,buffer = 5500)
#print("returning default value")
return playBack(self.agent)
class DivineAssistance(baseState):
def update(self):
return secondManSupport(self.agent)
def halfFlipStateManager(agent):
if agent.activeState.active == False:
agent.activeState = BlessingOfDexterity(agent)
else:
if type(agent.activeState) != BlessingOfDexterity:
agent.activeState = BlessingOfDexterity(agent)
class soloDefense(baseState):
def update(self):
if distance2D(Vector([0, 5200 * sign(self.agent.team), 200]),convertStructLocationToVector(self.agent.selectedBallPred))<1500:
return ShellTime(self.agent)
else:
return playBack(self.agent)
class ScaleTheWalls(baseState):
def update(self):
return handleWallShot(self.agent)
class AngelicEmbrace(baseState):
def update(self):
return carry_flick(self.agent,cradled = True)
#return newCarry(self.agent)
class emergencyDefend(baseState):
def update(self):
penetrationPosition = convertStructLocationToVector(self.agent.goalPred)
penetrationPosition.data[1] = 5350 * sign(self.agent.team)
if self.agent.goalPred.game_seconds - self.agent.gameInfo.seconds_elapsed > .1:
if distance2D(self.agent.me.location,penetrationPosition) > 100:
return testMover(self.agent,penetrationPosition,2300)
else:
if penetrationPosition[2] > 300:
self.activeState = LeapOfFaith(self.agent, -1)
return self.activeState.update()
else:
self.activeState = LeapOfFaith(self.agent, 0)
return self.activeState.update()
def parseCarInfo(carList, index, _max = False):
val = 0
best = None
for each in carList:
if _max:
if each[index] > val:
best = each
val = each[index]
else:
if each[index] < val:
best = each
val = each[index]
return best
def teamStateManager(agent):
if len(agent.allies) < 1:
soloStateManager(agent)
return
agentType = type(agent.activeState)
groundHeighCutOff = 120
if agentType != PreemptiveStrike:
if not kickOffTest(agent):
myGoalLoc = center = Vector([0, 5200 * sign(agent.team), 200])
enemyGoalLoc = center = Vector([0, 5200 * sign(agent.team), 200])
ballDistanceFromGoal = distance2D(myGoalLoc, agent.ball)
carDistanceFromGoal = distance2D(myGoalLoc, agent.me)
carDistanceFromEnemyGoal = distance2D(enemyGoalLoc, agent.me)
if ballDistanceFromGoal <= 2000:
agent.contested = True
timeTillBallReady = 6
if agent.contested:
ballStruct = agent.selectedBallPred
timeTillBallReady = agent.ballDelay
else:
if is_in_strike_zone(agent, convertStructLocationToVector(agent.selectedBallPred)):
agent.contested = True
ballStruct = agent.selectedBallPred
timeTillBallReady = agent.ballDelay
else:
agent.selectedBallPred = findSuitableBallPosition(agent, 110, agent.getCurrentSpd(), agent.me.location)
ballStruct = agent.selectedBallPred
goalward = ballHeadedTowardsMyGoal(agent)
agent.openGoal = openGoalOpportunity(agent)
aerialStructs = findAerialTargets(agent)
createBox(agent, hit.pred_vector)
# print(groundHeighCutOff,structHeight)
if agentType == LeapOfFaith:
if agent.activeState.active != False:
return
if agentType == airLaunch:
if agent.activeState.active != False:
return
if agentType == BlessingOfDexterity:
if agent.activeState.active != False:
return
if agentType == DivineGrace:
if agent.activeState.active != False:
return
if agentType == RighteousVolley:
if agent.activeState.active != False:
return
if agentType == DivineGrace:
if agent.activeState.active != False:
return
if agentType == Aerial:
if agent.activeState.active != False:
return
if not agent.onSurface:
if agent.me.location[2] > 165:
if agentType != DivineGrace:
agent.activeState = DivineGrace(agent)
return
# carDistancesFromGoal = []
# cardistancesFromBall = []
# carInfo = []
# for c in agent.allies:
# cdfg = distance2D(myGoalLoc, c.location)
# cdfb = distance2D(agent.ball.location, c.location)
# carDistancesFromGoal.append(cdfg)
# cardistancesFromBall.append(cdfb)
# carInfo.append([cdfg, cdfb, c])
carDistanceFromGoal = distance2D(myGoalLoc, agent.me)
carDistanceFromBall = distance2D(agent.me.location, agent.ball.location)
predLocation = convertStructLocationToVector(agent.selectedBallPred)
if len(agent.allies) == 1: #print 2vX
if agent.me.location[1] * sign(agent.team) < agent.ball.location[1] *sign(agent.team): #bp = -3000 ball = -4000/ 3000,4000 // op = 3000 ball = 4000 /3000,4000
#beyond the ball - demo and retreat if there's a last man, otherwise evac asap
if agent.allies[0].location[1] * sign(agent.team) < agent.ball.location[1] *sign(agent.team):
#get back asap!
if agentType != BlessingOfSafety:
agent.activeState = BlessingOfSafety(agent)
return
else:
#there's a back man, cause some havic
#print("it's clobbering time!")
if agentType != DemolitionBot:
agent.activeState = DemolitionBot(agent)
return
else:
#bot not over extended, check to see if teammate is
if agent.allies[0].location[1] * sign(agent.team) > agent.ball.location[1] * sign(agent.team):
#both bots are in defensive positions
if distance2D(agent.me.location,agent.ball.location) <= distance2D(agent.allies[0].location,agent.ball.location):
#print("this bot is closest to ball, go on offensive")
if goalward:
if agentType != HolyProtector:
agent.activeState = HolyProtector(agent)
return
if structHeight <= groundHeighCutOff:
if agentType != GroundAssault:
agent.activeState = GroundAssault(agent)
return
else:
if agentType != HolyGrenade:
agent.activeState = HolyGrenade(agent)
return
else:
if agentType != BlessingOfSafety:
agent.activeState = BlessingOfSafety(agent)
return
else:
#teammate is closer, play the back man
if agentType != BlessingOfSafety:
agent.activeState = BlessingOfSafety(agent)
return
else: #3vX+
print("why am I in 3v3?")
if goalward:
if agent.activeState != HolyProtector:
agent.activeState = HolyProtector(agent)
return
else:
if predLocation[2] > groundHeighCutOff:
if agentType != HolyGrenade:
agent.activeState = HolyGrenade(agent)
return
else:
if agentType != GroundAssault:
agent.activeState = GroundAssault(agent)
return
# pass
#
# if carDistanceFromGoal > ballDistanceFromGoal + 100:
# if agentType != GroundDefend:
# agent.activeState = GroundDefend(agent)
# return
#
# elif goalward:
# if agentType != GroundDefend:
# agent.activeState = GroundDefend(agent)
# return
#
#
# else:
#
# if structHeight <= groundHeighCutOff:
# if agentType != Dribble:
# agent.activeState = Dribble(agent)
# return
# else:
# if agentType != bounceShot:
# agent.activeState = bounceShot(agent)
# return
else:
if agent.activeState != PreemptiveStrike:
agent.activeState = PreemptiveStrike(agent)
return
def orientationStateManager(agent):
if agent.me.location[2] < 30 or agent.onSurface:
print("resetting orientations")
car_state = CarState(physics=Physics(velocity=Vector3(z=1550,x = random.randrange(-1500,1500),y =random.randrange(-1500,1500 )),location=Vector3(0, 0, 20)))
game_state = GameState(cars={agent.index: car_state})
agent.set_game_state(game_state)
if agent.activeState != DivineGrace:
agent.activeState = DivineGrace(agent)
#return agent.activeState
def launchStateManager(agent):
if agent.activeState:
if agent.activeState.active:
return
else:
if type(agent.activeState) == airLaunch:
agent.activeState = DivineGrace(agent)
else:
if agent.onSurface:
if agent.getCurrentSpd() < 50:
agent.activeState = airLaunch(agent)
else:
agent.activeState = airLaunch(agent)
def facePositionManager(agent):
agentType = type(agent.activeState)
if agentType != TurnTowardsPosition or not agent.activeState.active:
agent.activeState = TurnTowardsPosition(agent,agent.ball.location,0)
def demoTest(agent):
targ = findEnemyClosestToLocation(agent,agent.ball.location)[0]
return demoEnemyCar(agent,targ)
def newTeamStateManager(agent):
agentType = type(agent.activeState)
if agentType != PreemptiveStrike:
if not kickOffTest(agent):
myGoalLoc = Vector([0, 5200 * sign(agent.team), 200])
ballDistanceFromGoal = distance2D(myGoalLoc, agent.ball)
carDistanceFromGoal = distance2D(myGoalLoc, agent.me)
if agentType == LeapOfFaith:
if agent.activeState.active != False:
return
if agentType == Action_chain:
if agent.activeState.active != False:
return
if agentType == airLaunch:
if agent.activeState.active != False:
return
if agentType == BlessingOfDexterity:
if agent.activeState.active != False:
return
if agentType == DivineGrace:
if agent.activeState.active != False:
return
if agentType == RighteousVolley:
if agent.activeState.active != False:
return
fastesthit = find_soonest_hit(agent)
hit = fastesthit
openNet = openGoalOpportunity(agent)
agent.openGoal = openNet
agent.timid = False
scared = False
tempDelay = hit.prediction_time - agent.gameInfo.seconds_elapsed
if tempDelay >= agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
if agent.enemyAttacking:
agent.contested = True
if tempDelay >= agent.enemyBallInterceptDelay + agent.contestedTimeLimit:
if not butterZone(hit.pred_vector):
if ballDistanceFromGoal <= 5000:
agent.timid = True
else:
scared = True
#print(tempDelay,agent.enemyBallInterceptDelay)
#pass
if distance2D(hit.pred_vector,myGoalLoc) <= 2000 or distance2D(agent.enemyTargetVec,myGoalLoc) <= 2000 or ballDistanceFromGoal <= 2000:
agent.contested = True
agent.enemyAttacking = True
agent.timid = False
scared = False
# if not agent.contested:
# if hit.hit_type == 4:
# if agent.hits[1] != None:
# temptime = agent.hits[1].prediction_time - agent.time
# if temptime < agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
# hit = agent.hits[1]
#
# if hit.hit_type == 1:
# if agent.hits[0] != None:
# temptime = agent.hits[0].prediction_time - agent.time
# if temptime < agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
# # if not ballHeadedTowardsMyGoal_testing(agent, agent.hits[0]):
# hit = agent.hits[0]
#
# goalward = ballHeadedTowardsMyGoal_testing(agent, hit)
# agent.goalward = goalward
# agent.currentHit = hit
# agent.ballDelay = hit.prediction_time - agent.gameInfo.seconds_elapsed
# agent.ballGrounded = False
#
# #print(agent.ballDelay, agent.enemyBallInterceptDelay,agent.contested,agent.timid)
#
# if hit.hit_type == 2:
# agent.wallShot = True
# agent.ballGrounded = False
# else:
# agent.wallShot = False
# if hit.hit_type == 1:
# if hit.pred_vector[2] <=agent.groundCutOff:
# agent.ballGrounded = True
# else:
# agent.ballGrounded = False
#
#
#
# createBox(agent, hit.pred_vector)
if agentType == Aerial:
if agent.activeState.active != False:
return
if not agent.onSurface:
if agent.me.location[2] > 170:
if agentType != DivineGrace:
agent.activeState = DivineGrace(agent)
return
if agent.dribbling:
if agentType != AngelicEmbrace:
agent.activeState = AngelicEmbrace(agent)
return
lastManY = 0
if agent.team == 0:
lastManY = math.inf
for ally in agent.allies:
if ally.location[1] < lastManY:
lastManY = ally.location[1]
if agent.me.location[1] < lastManY:
lastManY = agent.me.location[1]
else:
lastManY = -math.inf
for ally in agent.allies:
if ally.location[1] > lastManY:
lastManY = ally.location[1]
if agent.me.location[1] > lastManY:
lastManY = agent.me.location[1]
#determine which man in rotation I am #1, #2, #3, forward
man = 1
if agent.me.location[1] * sign(agent.team) < agent.ball.location[1] *sign(agent.team):
if agent.me.location[1] * sign(agent.team) < hit.pred_vector[1] * sign(agent.team):
if agent.me.location[1] != lastManY:
# if agent.team == 0:
# if agent.me.location[1] > -3500:
# man = 4
# elif agent.team == 1:
# if agent.me.location[1] < 3500:
# man = 4
man = 4
if player_retreat_status(agent.me,agent.team):
if agent.me.location[1] != lastManY:
if distance2D(hit.pred_vector, myGoalLoc) >2000:
man = 4
# elif player_retreat_status(agent.me,agent.team):
# if agent.me.location[1] != lastManY:
# # if agent.team == 0:
# # if agent.me.location[1] > -3500:
# # man = 4
# # elif agent.team == 1:
# # if agent.me.location[1] < 3500:
# # man = 4
# man = 4
if man != 4:
myDist = distance2D(agent.me.location, agent.ball.location)
for ally in agent.allies:
if not ally.demolished:
if ally.location[1] * sign(agent.team) > agent.ball.location[1] * sign(agent.team):
allyDist = distance2D(ally.location, agent.ball.location)
if allyDist < myDist:
if not player_retreat_status(ally,agent.team) or allyDist < 250:
man += 1
man = clamp(3, 0, man)
agent.rotationNumber = man
if man != 1 or openNet:
if not agent.contested:
if hit.hit_type == 4:
if agent.hits[1] != None:
temptime = agent.hits[1].prediction_time - agent.time
if temptime < agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
hit = agent.hits[1]
if hit.hit_type == 1:
if agent.hits[0] != None:
temptime = agent.hits[0].prediction_time - agent.time
if temptime < agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
# if not ballHeadedTowardsMyGoal_testing(agent, agent.hits[0]):
hit = agent.hits[0]
goalward = ballHeadedTowardsMyGoal_testing(agent, hit)
agent.goalward = goalward
agent.currentHit = hit
agent.ballDelay = hit.prediction_time - agent.gameInfo.seconds_elapsed
agent.ballGrounded = False
#print(agent.ballDelay, agent.enemyBallInterceptDelay,agent.contested,agent.timid)
if hit.hit_type == 2:
agent.wallShot = True
agent.ballGrounded = False
else:
agent.wallShot = False
if hit.hit_type == 1:
if hit.pred_vector[2] <=agent.groundCutOff:
agent.ballGrounded = True
else:
agent.ballGrounded = False
createBox(agent, hit.pred_vector)
boostOpportunity = inCornerWithBoost(agent)
if boostOpportunity != False:
if agent.me.boostLevel <=50:
getBoost = False
if agent.team == 0:
if boostOpportunity[1] == 0 or boostOpportunity[1] == 1:
getBoost = True
else:
if boostOpportunity[1] == 2 or boostOpportunity[1] == 3:
getBoost = True
if getBoost:
if agentType != HeavenylyReprieve:
agent.activeState = HeavenylyReprieve(agent,boostOpportunity[0])
return
if man == 1:
if agent.me.boostLevel <=0:
if len(agent.allies) >1:
if distance2D(agent.me.location,hit.pred_vector) > 7000:
if not is_in_strike_zone(agent,hit.pred_vector):
if agentType != BlessingOfSafety:
agent.activeState = BlessingOfSafety(agent)
return
if carDistanceFromGoal > ballDistanceFromGoal:
if agentType != HolyProtector:
agent.activeState = HolyProtector(agent)
return
if goalward:
if hit.hit_type != 2:
if agentType != HolyProtector:
agent.activeState = HolyProtector(agent)
return
else:
if agentType != ScaleTheWalls:
agent.activeState = ScaleTheWalls(agent)
return
else:
if hit.hit_type == 0: # hit.pred_vector[2] <= agent.groundCutOff:
if agentType != GroundAssault:
agent.activeState = GroundAssault(agent)
return
elif hit.hit_type == 1:
if agentType != HolyGrenade:
agent.activeState = HolyGrenade(agent)
return
else:
if agentType != ScaleTheWalls:
agent.activeState = ScaleTheWalls(agent)
return
else:
if agentType != BlessingOfSafety:
agent.activeState = BlessingOfSafety(agent)
return
# elif man == 2:
# if agentType != BlessingOfSafety:
# agent.activeState = BlessingOfSafety(agent)
# return
#
# elif man == 3:
# if agentType != BlessingOfSafety:
# agent.activeState = BlessingOfSafety(agent)
# return
#
# elif man == 4:
# if agentType != BlessingOfSafety:
# agent.activeState = BlessingOfSafety(agent)
# return
else:
agent.activeState = PreemptiveStrike(agent)
def soloStateManager(agent):
agentType = type(agent.activeState)
if agentType != PreemptiveStrike:
if not kickOffTest(agent):
myGoalLoc = Vector([0, 5200 * sign(agent.team), 200])
ballDistanceFromGoal = distance2D(myGoalLoc, agent.ball)
carDistanceFromGoal = distance2D(myGoalLoc, agent.me)
#agent.resetTimer += agent.deltaTime
if agentType == LeapOfFaith:
if agent.activeState.active != False:
return
if agentType == airLaunch:
if agent.activeState.active != False:
return
if agentType == BlessingOfDexterity:
if agent.activeState.active != False:
return
if agentType == DivineGrace:
if agent.activeState.active != False:
return
if agentType == RighteousVolley:
if agent.activeState.active != False:
return
hit = find_soonest_hit(agent)
openNet = openGoalOpportunity(agent)
agent.openGoal = openNet
agent.timid = False
scared = False
tempDelay = hit.prediction_time - agent.time
#print(tempDelay)
if tempDelay >= agent.enemyBallInterceptDelay - .5:
if agent.enemyAttacking:
agent.contested = True
if tempDelay >= agent.enemyBallInterceptDelay + 1:
if not butterZone(hit.pred_vector):
if ballDistanceFromGoal <= 5000:
agent.timid = True
else:
scared = True
#print(tempDelay,agent.enemyBallInterceptDelay)
#pass
if distance2D(hit.pred_vector,myGoalLoc) <= 2000 or distance2D(agent.enemyTargetVec,myGoalLoc) <= 2000:
agent.contested = True
agent.timid = False
scared = False
if not agent.contested or not agent.enemyAttacking:
if agent.hits[0] != None:
temptime = agent.hits[0].prediction_time - agent.gameInfo.seconds_elapsed
#if temptime >=1:
if hit.hit_type != 2:
#if temptime < agent.enemyBallInterceptDelay - .5:
hit = agent.hits[0]
goalward = ballHeadedTowardsMyGoal_testing(agent, hit)
agent.goalward = goalward
agent.currentHit = hit
agent.ballDelay = hit.prediction_time - agent.time
agent.ballGrounded = False
#print(agent.ballDelay, agent.enemyBallInterceptDelay,agent.contested,agent.timid)
if hit.hit_type == 2:
agent.wallShot = True
agent.ballGrounded = False
else:
agent.wallShot = False
if hit.hit_type == 1:
if hit.pred_vector[2] <=agent.groundCutOff:
agent.ballGrounded = True
else:
agent.ballGrounded = False
createBox(agent, hit.pred_vector)
if agentType == Aerial:
if agent.activeState.active != False:
return
if not agent.onSurface:
if agent.me.location[2] > 170:
if agentType != DivineGrace:
agent.activeState = DivineGrace(agent)
return
if agent.dribbling:
if not goalward:
if agentType != AngelicEmbrace:
agent.activeState = AngelicEmbrace(agent)
return
#else:
# agent.resetTimer += agent.deltaTime
# if agent.resetTimer >= 5:
# agent.resetTimer = 0
# print("setting up dribble training")
# #game_state = GameState()
# #self.set_game_state(game_state)
# ball_state = BallState(Physics(location=Vector3(agent.me.location[0], agent.me.location[1], agent.me.location[2]+160),velocity=Vector3(agent.me.velocity[0],agent.me.velocity[1],agent.me.velocity[2])))
# game_state = GameState(ball=ball_state)
# agent.set_game_state(game_state)
# if agentType != AngelicEmbrace:
# agent.activeState = AngelicEmbrace(agent)
# return
# if agent.timid or scared:
# #print(f"being timid {agent.time}")
# if agentType != WardAgainstEvil:
# agent.activeState = WardAgainstEvil(agent)
# return
# if scared or agent.timid:
# if agentType != BlessingOfSafety:
# agent.activeState = BlessingOfSafety(agent)
# return
if carDistanceFromGoal > ballDistanceFromGoal:
if agentType != HolyProtector:
agent.activeState = HolyProtector(agent)
return
elif goalward:
if hit.hit_type !=2:
if agentType != HolyProtector:
agent.activeState = HolyProtector(agent)
return
else:
if agentType != ScaleTheWalls:
agent.activeState = ScaleTheWalls(agent)
#print("scaling walls")
#print(f"scale the walls defensive {agent.time}")
return
else:
if hit.hit_type == 0:
if agentType != GroundAssault:
agent.activeState = GroundAssault(agent)
return
elif hit.hit_type == 1:
if agentType != HolyGrenade:
agent.activeState = HolyGrenade(agent)
return
elif hit.hit_type == 2:
if agentType != ScaleTheWalls:
agent.activeState = ScaleTheWalls(agent)
return
else:
print("we got an eroneous hit_type somehow")
print("rawr")
else:
agent.activeState = PreemptiveStrike(agent)
def soloStateManager_testing(agent):
agentType = type(agent.activeState)
if agentType != PreemptiveStrike:
if not kickOffTest(agent):
myGoalLoc = Vector([0, 5200 * sign(agent.team), 200])
ballDistanceFromGoal = distance2D(myGoalLoc, agent.ball)
carDistanceFromGoal = distance2D(myGoalLoc, agent.me)
#agent.resetTimer += agent.deltaTime
if agentType == LeapOfFaith:
if agent.activeState.active != False:
return
if agentType == Action_chain:
if agent.activeState.active != False:
return
if agentType == airLaunch:
if agent.activeState.active != False:
return
if agentType == BlessingOfDexterity:
if agent.activeState.active != False:
return
if agentType == DivineGrace:
if agent.activeState.active != False:
return
if agentType == RighteousVolley:
if agent.activeState.active != False:
return
hit = find_soonest_hit(agent)
if agent.goalPred != None:
agent.enemyAttacking = True
openNet = openGoalOpportunity(agent)
agent.openGoal = openNet
agent.timid = False
scared = False
tempDelay = hit.time_difference()
#print(tempDelay)
#print(agent.enemyBallInterceptDelay)
if tempDelay >= agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
if agent.enemyAttacking:
#agent.enemyAttacking = True
agent.contested = True
# else:
# print(f"{tempDelay} {agent.enemyBallInterceptDelay}")
if distance2D(hit.pred_vector, myGoalLoc) <= 2000 or distance2D(agent.enemyTargetVec,
myGoalLoc) <= 2000 or ballDistanceFromGoal <= 2000:
if agent.enemyAttacking:
agent.contested = True
agent.timid = False
scared = False
#agent.enemyAttacking = True
# agent.contested = True
# agent.enemyAttacking = True
#if agent.team == 0:
if not agent.contested:
if hit.hit_type == 4:
if agent.hits[1] != None:
temptime = agent.hits[1].prediction_time - agent.time
if temptime < agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
hit = agent.hits[1]
if hit.hit_type == 1:
if agent.hits[0] != None:
temptime = agent.hits[0].prediction_time - agent.time
if temptime < agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
#if not ballHeadedTowardsMyGoal_testing(agent, agent.hits[0]):
hit = agent.hits[0]
# if agent.hits[0] != None:
# if hit.hit_type != 2:
# temptime = agent.hits[0].prediction_time - agent.time
# # if temptime >=1:
#
# if temptime < agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
# if not ballHeadedTowardsMyGoal_testing(agent, agent.hits[0]):
# hit = agent.hits[0]
goalward = ballHeadedTowardsMyGoal_testing(agent, hit)
agent.goalward = goalward
agent.currentHit = hit
agent.ballDelay = hit.prediction_time - agent.time
agent.ballGrounded = False
if hit.hit_type == 2:
agent.wallShot = True
else:
agent.wallShot = False
createBox(agent, hit.pred_vector)
if agentType == Aerial:
if agent.activeState.active != False:
return
if not agent.onSurface:
if agent.me.location[2] > 120:
if agentType != DivineGrace:
agent.activeState = DivineGrace(agent)
return
if agent.dribbling:
#if not goalward:
if agentType != AngelicEmbrace:
agent.activeState = AngelicEmbrace(agent)
return
boostOpportunity = inCornerWithBoost(agent)
if boostOpportunity != False:
if agent.me.boostLevel <= 50:
getBoost = False
if agent.team == 0:
if boostOpportunity[1] == 0 or boostOpportunity[1] == 1:
getBoost = True
else:
if boostOpportunity[1] == 2 or boostOpportunity[1] == 3:
getBoost = True
if getBoost:
if agentType != HeavenylyReprieve:
agent.activeState = HeavenylyReprieve(agent, boostOpportunity[0])
return
# if scared or agent.timid:
# if agentType != BlessingOfSafety:
# agent.activeState = BlessingOfSafety(agent)
# return
if carDistanceFromGoal > ballDistanceFromGoal:
if agentType != HolyProtector:
agent.activeState = HolyProtector(agent)
return
if goalward:
if hit.hit_type !=2:
if agentType != HolyProtector:
agent.activeState = HolyProtector(agent)
return
else:
if agentType != ScaleTheWalls:
agent.activeState = ScaleTheWalls(agent)
return
else:
if hit.hit_type == 0: #hit.pred_vector[2] <= agent.groundCutOff:
if agentType != GroundAssault:
agent.activeState = GroundAssault(agent)
return
elif hit.hit_type == 1 or hit.hit_type == 4:
if agentType != HolyGrenade:
agent.activeState = HolyGrenade(agent)
return
else:
if agentType != ScaleTheWalls:
agent.activeState = ScaleTheWalls(agent)
return
else:
agent.activeState = PreemptiveStrike(agent)
|
[
"rlbot.agents.base_agent.SimpleControllerState",
"math.atan2",
"math.sin",
"random.randrange",
"math.cos",
"rlbot.utils.game_state_util.Vector3",
"math.degrees",
"rlbot.utils.game_state_util.GameState"
] |
[((1612, 1635), 'rlbot.agents.base_agent.SimpleControllerState', 'SimpleControllerState', ([], {}), '()\n', (1633, 1635), False, 'from rlbot.agents.base_agent import BaseAgent, SimpleControllerState\n'), ((6603, 6626), 'rlbot.agents.base_agent.SimpleControllerState', 'SimpleControllerState', ([], {}), '()\n', (6624, 6626), False, 'from rlbot.agents.base_agent import BaseAgent, SimpleControllerState\n'), ((11998, 12021), 'rlbot.agents.base_agent.SimpleControllerState', 'SimpleControllerState', ([], {}), '()\n', (12019, 12021), False, 'from rlbot.agents.base_agent import BaseAgent, SimpleControllerState\n'), ((14051, 14074), 'rlbot.agents.base_agent.SimpleControllerState', 'SimpleControllerState', ([], {}), '()\n', (14072, 14074), False, 'from rlbot.agents.base_agent import BaseAgent, SimpleControllerState\n'), ((14313, 14363), 'math.atan2', 'math.atan2', (['ball_local.data[1]', 'ball_local.data[0]'], {}), '(ball_local.data[1], ball_local.data[0])\n', (14323, 14363), False, 'import math\n'), ((17751, 17774), 'rlbot.agents.base_agent.SimpleControllerState', 'SimpleControllerState', ([], {}), '()\n', (17772, 17774), False, 'from rlbot.agents.base_agent import BaseAgent, SimpleControllerState\n'), ((22837, 22860), 'rlbot.agents.base_agent.SimpleControllerState', 'SimpleControllerState', ([], {}), '()\n', (22858, 22860), False, 'from rlbot.agents.base_agent import BaseAgent, SimpleControllerState\n'), ((29850, 29873), 'rlbot.agents.base_agent.SimpleControllerState', 'SimpleControllerState', ([], {}), '()\n', (29871, 29873), False, 'from rlbot.agents.base_agent import BaseAgent, SimpleControllerState\n'), ((31579, 31602), 'rlbot.agents.base_agent.SimpleControllerState', 'SimpleControllerState', ([], {}), '()\n', (31600, 31602), False, 'from rlbot.agents.base_agent import BaseAgent, SimpleControllerState\n'), ((43872, 43912), 'rlbot.utils.game_state_util.GameState', 'GameState', ([], {'cars': '{agent.index: car_state}'}), '(cars={agent.index: car_state})\n', (43881, 43912), False, 'from rlbot.utils.game_state_util import GameState, BallState, CarState, Physics, Vector3, Rotator\n'), ((12161, 12211), 'math.atan2', 'math.atan2', (['ball_local.data[1]', 'ball_local.data[0]'], {}), '(ball_local.data[1], ball_local.data[0])\n', (12171, 12211), False, 'import math\n'), ((14401, 14425), 'math.degrees', 'math.degrees', (['ball_angle'], {}), '(ball_angle)\n', (14413, 14425), False, 'import math\n'), ((12299, 12319), 'math.sin', 'math.sin', (['ball_angle'], {}), '(ball_angle)\n', (12307, 12319), False, 'import math\n'), ((12851, 12874), 'rlbot.agents.base_agent.SimpleControllerState', 'SimpleControllerState', ([], {}), '()\n', (12872, 12874), False, 'from rlbot.agents.base_agent import BaseAgent, SimpleControllerState\n'), ((17692, 17730), 'math.atan2', 'math.atan2', (['localTarg[1]', 'localTarg[0]'], {}), '(localTarg[1], localTarg[0])\n', (17702, 17730), False, 'import math\n'), ((7027, 7077), 'math.atan2', 'math.atan2', (['ball_local.data[1]', 'ball_local.data[0]'], {}), '(ball_local.data[1], ball_local.data[0])\n', (7037, 7077), False, 'import math\n'), ((7162, 7182), 'math.sin', 'math.sin', (['ball_angle'], {}), '(ball_angle)\n', (7170, 7182), False, 'import math\n'), ((12370, 12390), 'math.cos', 'math.cos', (['ball_angle'], {}), '(ball_angle)\n', (12378, 12390), False, 'import math\n'), ((7208, 7228), 'math.cos', 'math.cos', (['ball_angle'], {}), '(ball_angle)\n', (7216, 7228), False, 'import math\n'), ((43831, 43848), 'rlbot.utils.game_state_util.Vector3', 'Vector3', (['(0)', '(0)', '(20)'], {}), '(0, 0, 20)\n', (43838, 43848), False, 'from rlbot.utils.game_state_util import GameState, BallState, CarState, Physics, Vector3, Rotator\n'), ((15225, 15245), 'math.sin', 'math.sin', (['ball_angle'], {}), '(ball_angle)\n', (15233, 15245), False, 'import math\n'), ((25134, 25172), 'math.atan2', 'math.atan2', (['localBall[1]', 'localBall[0]'], {}), '(localBall[1], localBall[0])\n', (25144, 25172), False, 'import math\n'), ((15296, 15316), 'math.cos', 'math.cos', (['ball_angle'], {}), '(ball_angle)\n', (15304, 15316), False, 'import math\n'), ((43759, 43788), 'random.randrange', 'random.randrange', (['(-1500)', '(1500)'], {}), '(-1500, 1500)\n', (43775, 43788), False, 'import random\n'), ((43791, 43820), 'random.randrange', 'random.randrange', (['(-1500)', '(1500)'], {}), '(-1500, 1500)\n', (43807, 43820), False, 'import random\n'), ((8523, 8577), 'math.atan2', 'math.atan2', (['target_local.data[1]', 'target_local.data[0]'], {}), '(target_local.data[1], target_local.data[0])\n', (8533, 8577), False, 'import math\n'), ((8662, 8684), 'math.sin', 'math.sin', (['target_angle'], {}), '(target_angle)\n', (8670, 8684), False, 'import math\n'), ((8710, 8732), 'math.cos', 'math.cos', (['target_angle'], {}), '(target_angle)\n', (8718, 8732), False, 'import math\n')]
|
import logging
import luigi
import luigi.contrib.s3
from . import hh, index
# Disable all child loggers
for name in ["botocore", "boto3", "elasticsearch"]:
logging.getLogger(name).propagate = False
class MainTask(luigi.Task):
# 113 - Россия, 1 - Москва, 83 - Смоленск
# areas_ids = luigi.ListParameter([113])
areas_ids = luigi.ListParameter([113])
def requires(self):
return (
[hh.HHClearCompaniesDescriptionsAtArea(area_id) for area_id in self.areas_ids]
+ [hh.HHGetContries()]
+ [index.IndexHH()]
)
|
[
"luigi.ListParameter",
"logging.getLogger"
] |
[((342, 368), 'luigi.ListParameter', 'luigi.ListParameter', (['[113]'], {}), '([113])\n', (361, 368), False, 'import luigi\n'), ((163, 186), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (180, 186), False, 'import logging\n')]
|
# Generated by Django 2.1.7 on 2020-08-13 22:00
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('server', '0011_auto_20200721_1516'),
]
operations = [
migrations.AddField(
model_name='namedentityannotationhistory',
name='annotation',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ner_history', to='server.Annotation'),
),
migrations.AddField(
model_name='relationextractionannotationhistory',
name='annotation',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='re_history', to='server.Annotation'),
),
migrations.AlterField(
model_name='annotation',
name='task',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='annotations', to='server.Task'),
),
]
|
[
"django.db.models.ForeignKey"
] |
[((392, 521), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""ner_history"""', 'to': '"""server.Annotation"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n related_name='ner_history', to='server.Annotation')\n", (409, 521), False, 'from django.db import migrations, models\n'), ((670, 798), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""re_history"""', 'to': '"""server.Annotation"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n related_name='re_history', to='server.Annotation')\n", (687, 798), False, 'from django.db import migrations, models\n'), ((918, 1031), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""annotations"""', 'to': '"""server.Task"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='annotations', to='server.Task')\n", (935, 1031), False, 'from django.db import migrations, models\n')]
|
from urllib.parse import urlparse
import warnings
from django.contrib.sitemaps import Sitemap, views
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.paginator import EmptyPage, PageNotAnInteger
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.template.response import TemplateResponse
from codalib.bagatom import TIME_FORMAT_STRING
from coda_mdstore.models import Bag
try:
MOST_RECENT_BAGGING_DATE = Bag.objects.latest(
'bagging_date'
).bagging_date.strftime(TIME_FORMAT_STRING)
except Exception:
MOST_RECENT_BAGGING_DATE = '2012-12-12T00:00:00Z'
def index(
request,
sitemaps,
template_name='sitemap_index.xml',
content_type='application/xml',
sitemap_url_name='resourcelist',
mimetype=None
):
"""
This method is overloaded from django.contrib.sitemaps.views.
we need this overload so that we can change the default method of
pagination display in the sitemaps index. it's a bit hacky - but it works.
"""
if mimetype:
warnings.warn(
"The mimetype keyword argument is deprecated, use "
"content_type instead", DeprecationWarning, stacklevel=2
)
content_type = mimetype
req_protocol = 'https' if request.is_secure() else 'http'
req_site = get_current_site(request)
sites = []
for section, site in sitemaps.items():
if callable(site):
site = site()
protocol = req_protocol if site.protocol is None else site.protocol
sitemap_url = reverse(
sitemap_url_name, kwargs={'section': section})
absolute_url = '%s://%s%s' % (protocol, req_site.domain, sitemap_url)
sites.append(absolute_url)
for page in range(2, site.paginator.num_pages + 1):
# we want to change how the pagination is displayed
sites.append(
'%s-%03d.xml' % (absolute_url.replace('-001.xml', ''), page)
)
return TemplateResponse(
request,
template_name,
{
'sitemaps': sites,
'MOST_RECENT_BAGGING_DATE': MOST_RECENT_BAGGING_DATE,
},
content_type=content_type
)
def sitemap(request, sitemaps, section=None,
template_name='sitemap.xml', content_type='application/xml'):
"""
This method is overloaded from django.contrib.sitemaps.views.
we need this overload so that we can handle the urls served up by the other
overloaded method above "index".
"""
req_site = get_current_site(request)
# since we no longer give ?p arguments,
# we want the page to be the 'section'
page = section
# now, the 'section' is really the key of the sitemaps dict seen below
section = '001'
maps = [sitemaps[section]]
urls = []
for site in maps:
try:
if callable(site):
site = site()
u = site.get_urls(page=page, site=req_site)
urls.extend(u)
except EmptyPage:
raise Http404("Page %s empty" % page)
except PageNotAnInteger:
raise Http404("No page \'%s\'" % page)
for u in urls:
bag_name = urlparse(u['location']).path.replace('/bag/', '')
bag = get_object_or_404(Bag, name=bag_name)
u.setdefault('oxum', '%s.%s' % (bag.size, bag.files))
return TemplateResponse(
request,
template_name,
{
'urlset': urls,
'MOST_RECENT_BAGGING_DATE': MOST_RECENT_BAGGING_DATE,
},
content_type=content_type
)
def changelist(request, sitemaps, section=None,
template_name='changelist.xml', content_type='application/xml'):
most_recent_bags = Bag.objects.order_by('-bagging_date', '-name').values(
'name',
'size',
'files',
'bagging_date'
)[:10000]
for b in most_recent_bags:
b['bagging_date'] = b['bagging_date'].strftime(TIME_FORMAT_STRING)
return TemplateResponse(
request,
template_name,
{
'urlset': reversed(most_recent_bags),
'MOST_RECENT_BAGGING_DATE': MOST_RECENT_BAGGING_DATE,
},
content_type=content_type
)
def capabilitylist(
request,
template_name='mdstore/capabilitylist.xml',
content_type='application/xml'
):
return TemplateResponse(
request,
template_name,
{
'MOST_RECENT_BAGGING_DATE': MOST_RECENT_BAGGING_DATE,
},
content_type=content_type
)
# overload the stock sitemap pagination stuff with our own methods
setattr(views, 'index', index)
setattr(views, 'sitemap', sitemap)
setattr(Sitemap, 'limit', 5000)
class BaseSitemap(Sitemap):
lastmod = None
protocol = 'http'
def items(self):
# return the list of all the bags sorted by bagging_date
return Bag.objects.order_by('bagging_date', 'name').values('name')
def location(self, obj):
# if we just return the object it will give a unicode value tuple
return "/bag/%s" % obj['name']
sitemaps = {
'001': BaseSitemap,
}
|
[
"django.template.response.TemplateResponse",
"django.urls.reverse",
"django.shortcuts.get_object_or_404",
"coda_mdstore.models.Bag.objects.latest",
"django.http.Http404",
"django.contrib.sites.shortcuts.get_current_site",
"warnings.warn",
"urllib.parse.urlparse",
"coda_mdstore.models.Bag.objects.order_by"
] |
[((1371, 1396), 'django.contrib.sites.shortcuts.get_current_site', 'get_current_site', (['request'], {}), '(request)\n', (1387, 1396), False, 'from django.contrib.sites.shortcuts import get_current_site\n'), ((2041, 2192), 'django.template.response.TemplateResponse', 'TemplateResponse', (['request', 'template_name', "{'sitemaps': sites, 'MOST_RECENT_BAGGING_DATE': MOST_RECENT_BAGGING_DATE}"], {'content_type': 'content_type'}), "(request, template_name, {'sitemaps': sites,\n 'MOST_RECENT_BAGGING_DATE': MOST_RECENT_BAGGING_DATE}, content_type=\n content_type)\n", (2057, 2192), False, 'from django.template.response import TemplateResponse\n'), ((2593, 2618), 'django.contrib.sites.shortcuts.get_current_site', 'get_current_site', (['request'], {}), '(request)\n', (2609, 2618), False, 'from django.contrib.sites.shortcuts import get_current_site\n'), ((3418, 3566), 'django.template.response.TemplateResponse', 'TemplateResponse', (['request', 'template_name', "{'urlset': urls, 'MOST_RECENT_BAGGING_DATE': MOST_RECENT_BAGGING_DATE}"], {'content_type': 'content_type'}), "(request, template_name, {'urlset': urls,\n 'MOST_RECENT_BAGGING_DATE': MOST_RECENT_BAGGING_DATE}, content_type=\n content_type)\n", (3434, 3566), False, 'from django.template.response import TemplateResponse\n'), ((4409, 4536), 'django.template.response.TemplateResponse', 'TemplateResponse', (['request', 'template_name', "{'MOST_RECENT_BAGGING_DATE': MOST_RECENT_BAGGING_DATE}"], {'content_type': 'content_type'}), "(request, template_name, {'MOST_RECENT_BAGGING_DATE':\n MOST_RECENT_BAGGING_DATE}, content_type=content_type)\n", (4425, 4536), False, 'from django.template.response import TemplateResponse\n'), ((1103, 1232), 'warnings.warn', 'warnings.warn', (['"""The mimetype keyword argument is deprecated, use content_type instead"""', 'DeprecationWarning'], {'stacklevel': '(2)'}), "(\n 'The mimetype keyword argument is deprecated, use content_type instead',\n DeprecationWarning, stacklevel=2)\n", (1116, 1232), False, 'import warnings\n'), ((1607, 1661), 'django.urls.reverse', 'reverse', (['sitemap_url_name'], {'kwargs': "{'section': section}"}), "(sitemap_url_name, kwargs={'section': section})\n", (1614, 1661), False, 'from django.urls import reverse\n'), ((3307, 3344), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Bag'], {'name': 'bag_name'}), '(Bag, name=bag_name)\n', (3324, 3344), False, 'from django.shortcuts import get_object_or_404\n'), ((510, 544), 'coda_mdstore.models.Bag.objects.latest', 'Bag.objects.latest', (['"""bagging_date"""'], {}), "('bagging_date')\n", (528, 544), False, 'from coda_mdstore.models import Bag\n'), ((3089, 3120), 'django.http.Http404', 'Http404', (["('Page %s empty' % page)"], {}), "('Page %s empty' % page)\n", (3096, 3120), False, 'from django.http import Http404\n'), ((3172, 3202), 'django.http.Http404', 'Http404', (['("No page \'%s\'" % page)'], {}), '("No page \'%s\'" % page)\n', (3179, 3202), False, 'from django.http import Http404\n'), ((3784, 3830), 'coda_mdstore.models.Bag.objects.order_by', 'Bag.objects.order_by', (['"""-bagging_date"""', '"""-name"""'], {}), "('-bagging_date', '-name')\n", (3804, 3830), False, 'from coda_mdstore.models import Bag\n'), ((4934, 4978), 'coda_mdstore.models.Bag.objects.order_by', 'Bag.objects.order_by', (['"""bagging_date"""', '"""name"""'], {}), "('bagging_date', 'name')\n", (4954, 4978), False, 'from coda_mdstore.models import Bag\n'), ((3243, 3266), 'urllib.parse.urlparse', 'urlparse', (["u['location']"], {}), "(u['location'])\n", (3251, 3266), False, 'from urllib.parse import urlparse\n')]
|
# This file is part of the NESi software.
#
# Copyright (c) 2020
# Original Software Design by <NAME> <https://github.com/etingof>.
#
# Software adapted by inexio <https://github.com/inexio>.
# - <NAME> <https://github.com/unkn0wn-user>
# - <NAME> <https://github.com/Connyko65>
# - <NAME> <https://github.com/Dinker1996>
#
# License: https://github.com/inexio/NESi/LICENSE.rst
from nesi.devices.softbox.base_resources.interface import Interface, InterfaceCollection, logging, base
LOG = logging.getLogger(__name__)
class KeyMileInterface(Interface):
"""Represent logical interface resource."""
port_id = base.Field('port_id')
chan_id = base.Field('chan_id')
logport_id = base.Field('logport_id')
# vcc
vcc_profile = base.Field('vcc_profile')
vlan_profile = base.Field('vlan_profile')
number_of_conn_services = base.Field('number_of_conn_services')
reconfiguration_allowed = base.Field('reconfiguration_allowed')
services_connected = base.Field('services_connected')
class KeyMileInterfaceCollection(InterfaceCollection):
"""Represent a collection of interfaces."""
@property
def _resource_type(self):
return KeyMileInterface
|
[
"nesi.devices.softbox.base_resources.interface.base.Field",
"nesi.devices.softbox.base_resources.interface.logging.getLogger"
] |
[((491, 518), 'nesi.devices.softbox.base_resources.interface.logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (508, 518), False, 'from nesi.devices.softbox.base_resources.interface import Interface, InterfaceCollection, logging, base\n'), ((619, 640), 'nesi.devices.softbox.base_resources.interface.base.Field', 'base.Field', (['"""port_id"""'], {}), "('port_id')\n", (629, 640), False, 'from nesi.devices.softbox.base_resources.interface import Interface, InterfaceCollection, logging, base\n'), ((655, 676), 'nesi.devices.softbox.base_resources.interface.base.Field', 'base.Field', (['"""chan_id"""'], {}), "('chan_id')\n", (665, 676), False, 'from nesi.devices.softbox.base_resources.interface import Interface, InterfaceCollection, logging, base\n'), ((694, 718), 'nesi.devices.softbox.base_resources.interface.base.Field', 'base.Field', (['"""logport_id"""'], {}), "('logport_id')\n", (704, 718), False, 'from nesi.devices.softbox.base_resources.interface import Interface, InterfaceCollection, logging, base\n'), ((748, 773), 'nesi.devices.softbox.base_resources.interface.base.Field', 'base.Field', (['"""vcc_profile"""'], {}), "('vcc_profile')\n", (758, 773), False, 'from nesi.devices.softbox.base_resources.interface import Interface, InterfaceCollection, logging, base\n'), ((793, 819), 'nesi.devices.softbox.base_resources.interface.base.Field', 'base.Field', (['"""vlan_profile"""'], {}), "('vlan_profile')\n", (803, 819), False, 'from nesi.devices.softbox.base_resources.interface import Interface, InterfaceCollection, logging, base\n'), ((850, 887), 'nesi.devices.softbox.base_resources.interface.base.Field', 'base.Field', (['"""number_of_conn_services"""'], {}), "('number_of_conn_services')\n", (860, 887), False, 'from nesi.devices.softbox.base_resources.interface import Interface, InterfaceCollection, logging, base\n'), ((918, 955), 'nesi.devices.softbox.base_resources.interface.base.Field', 'base.Field', (['"""reconfiguration_allowed"""'], {}), "('reconfiguration_allowed')\n", (928, 955), False, 'from nesi.devices.softbox.base_resources.interface import Interface, InterfaceCollection, logging, base\n'), ((981, 1013), 'nesi.devices.softbox.base_resources.interface.base.Field', 'base.Field', (['"""services_connected"""'], {}), "('services_connected')\n", (991, 1013), False, 'from nesi.devices.softbox.base_resources.interface import Interface, InterfaceCollection, logging, base\n')]
|
import requests
from bs4 import BeautifulSoup
import html5lib as h5l
import json
import pandas as pd
import os
import time
r = requests.get("https://en.wikipedia.org/wiki/List_of_Indian_Premier_League_venues")
htmlContent = r.content
soup = BeautifulSoup(htmlContent, 'html.parser')
stadium_det = [['Stadium', 'Home Teams']]
table = soup.find("tbody").find_all("tr")
table = table[1:] # Removing table Heading
for rows in table:
row = rows.find_all("td")
stadium_name = row[0].getText().strip('\n')
home_team = []
home_teams = row[5].find_all("a")
for team in home_teams:
home_team.append(team.getText())
temp = [stadium_name, home_team]
stadium_det.append(temp)
df = pd.DataFrame(stadium_det)
df.to_csv('Stadium_and_Home_Teams.csv')
print("Done")
|
[
"bs4.BeautifulSoup",
"pandas.DataFrame",
"requests.get"
] |
[((129, 216), 'requests.get', 'requests.get', (['"""https://en.wikipedia.org/wiki/List_of_Indian_Premier_League_venues"""'], {}), "(\n 'https://en.wikipedia.org/wiki/List_of_Indian_Premier_League_venues')\n", (141, 216), False, 'import requests\n'), ((243, 284), 'bs4.BeautifulSoup', 'BeautifulSoup', (['htmlContent', '"""html.parser"""'], {}), "(htmlContent, 'html.parser')\n", (256, 284), False, 'from bs4 import BeautifulSoup\n'), ((722, 747), 'pandas.DataFrame', 'pd.DataFrame', (['stadium_det'], {}), '(stadium_det)\n', (734, 747), True, 'import pandas as pd\n')]
|
import logging
import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import validate
from streamlink.stream import HLSStream
log = logging.getLogger(__name__)
class DRDK(Plugin):
live_api_url = 'https://www.dr-massive.com/api/page'
url_re = re.compile(r'''
https?://(?:www\.)?dr\.dk/drtv
(/kanal/[\w-]+)
''', re.VERBOSE)
_live_data_schema = validate.Schema(
{'item': {'customFields': {
validate.optional('hlsURL'): validate.url(),
validate.optional('hlsWithSubtitlesURL'): validate.url(),
}}},
validate.get('item'),
validate.get('customFields'),
)
@classmethod
def can_handle_url(cls, url):
return cls.url_re.match(url) is not None
def _get_live(self, path):
params = dict(
ff='idp',
path=path,
)
res = self.session.http.get(self.live_api_url, params=params)
playlists = self.session.http.json(res, schema=self._live_data_schema)
streams = {}
for name, url in playlists.items():
name_prefix = ''
if name == 'hlsWithSubtitlesURL':
name_prefix = 'subtitled_'
streams.update(HLSStream.parse_variant_playlist(
self.session,
url,
name_prefix=name_prefix,
))
return streams
def _get_streams(self):
m = self.url_re.match(self.url)
path = m and m.group(1)
log.debug("Path={0}".format(path))
return self._get_live(path)
__plugin__ = DRDK
|
[
"streamlink.plugin.api.validate.url",
"streamlink.plugin.api.validate.optional",
"streamlink.plugin.api.validate.get",
"streamlink.stream.HLSStream.parse_variant_playlist",
"logging.getLogger",
"re.compile"
] |
[((153, 180), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (170, 180), False, 'import logging\n'), ((274, 385), 're.compile', 're.compile', (['"""\n https?://(?:www\\\\.)?dr\\\\.dk/drtv\n (/kanal/[\\\\w-]+)\n """', 're.VERBOSE'], {}), '(\n """\n https?://(?:www\\\\.)?dr\\\\.dk/drtv\n (/kanal/[\\\\w-]+)\n """\n , re.VERBOSE)\n', (284, 385), False, 'import re\n'), ((600, 620), 'streamlink.plugin.api.validate.get', 'validate.get', (['"""item"""'], {}), "('item')\n", (612, 620), False, 'from streamlink.plugin.api import validate\n'), ((630, 658), 'streamlink.plugin.api.validate.get', 'validate.get', (['"""customFields"""'], {}), "('customFields')\n", (642, 658), False, 'from streamlink.plugin.api import validate\n'), ((1238, 1314), 'streamlink.stream.HLSStream.parse_variant_playlist', 'HLSStream.parse_variant_playlist', (['self.session', 'url'], {'name_prefix': 'name_prefix'}), '(self.session, url, name_prefix=name_prefix)\n', (1270, 1314), False, 'from streamlink.stream import HLSStream\n'), ((464, 491), 'streamlink.plugin.api.validate.optional', 'validate.optional', (['"""hlsURL"""'], {}), "('hlsURL')\n", (481, 491), False, 'from streamlink.plugin.api import validate\n'), ((521, 561), 'streamlink.plugin.api.validate.optional', 'validate.optional', (['"""hlsWithSubtitlesURL"""'], {}), "('hlsWithSubtitlesURL')\n", (538, 561), False, 'from streamlink.plugin.api import validate\n'), ((493, 507), 'streamlink.plugin.api.validate.url', 'validate.url', ([], {}), '()\n', (505, 507), False, 'from streamlink.plugin.api import validate\n'), ((563, 577), 'streamlink.plugin.api.validate.url', 'validate.url', ([], {}), '()\n', (575, 577), False, 'from streamlink.plugin.api import validate\n')]
|
from collections.abc import Iterable
from itertools import repeat
def _ntuple(n):
def parse(x):
if isinstance(x, Iterable):
return x
return tuple(repeat(x, n))
return parse
_single = _ntuple(1)
|
[
"itertools.repeat"
] |
[((180, 192), 'itertools.repeat', 'repeat', (['x', 'n'], {}), '(x, n)\n', (186, 192), False, 'from itertools import repeat\n')]
|
import vial
def init():
vial.register_command('VialEscape', '.plugin.escape')
vial.register_command('VialSearchOutline', '.plugin.search_outline')
vial.register_command('VialChangedProjects', '.plugin.changed_projects')
vial.register_command('VialNew', '.plugin.new', complete='file', nargs=1)
vial.register_command('VialFilterqf', '.plugin.filter_qf', nargs=1)
vial.register_command('VialAddProjects', '.plugin.add_projects',
complete='dir', bang=True, nargs='*')
vial.register_command('VialAddIgnoreExtension',
'.plugin.add_ignore_extensions', bang=True, nargs='*')
vial.register_command('VialAddIgnoreDirs', '.plugin.add_ignore_dirs',
complete='dir', bang=True, nargs='*')
vial.register_function('VialIndent()', '.plugin.indent')
|
[
"vial.register_command",
"vial.register_function"
] |
[((30, 83), 'vial.register_command', 'vial.register_command', (['"""VialEscape"""', '""".plugin.escape"""'], {}), "('VialEscape', '.plugin.escape')\n", (51, 83), False, 'import vial\n'), ((88, 156), 'vial.register_command', 'vial.register_command', (['"""VialSearchOutline"""', '""".plugin.search_outline"""'], {}), "('VialSearchOutline', '.plugin.search_outline')\n", (109, 156), False, 'import vial\n'), ((161, 233), 'vial.register_command', 'vial.register_command', (['"""VialChangedProjects"""', '""".plugin.changed_projects"""'], {}), "('VialChangedProjects', '.plugin.changed_projects')\n", (182, 233), False, 'import vial\n'), ((238, 311), 'vial.register_command', 'vial.register_command', (['"""VialNew"""', '""".plugin.new"""'], {'complete': '"""file"""', 'nargs': '(1)'}), "('VialNew', '.plugin.new', complete='file', nargs=1)\n", (259, 311), False, 'import vial\n'), ((316, 383), 'vial.register_command', 'vial.register_command', (['"""VialFilterqf"""', '""".plugin.filter_qf"""'], {'nargs': '(1)'}), "('VialFilterqf', '.plugin.filter_qf', nargs=1)\n", (337, 383), False, 'import vial\n'), ((388, 495), 'vial.register_command', 'vial.register_command', (['"""VialAddProjects"""', '""".plugin.add_projects"""'], {'complete': '"""dir"""', 'bang': '(True)', 'nargs': '"""*"""'}), "('VialAddProjects', '.plugin.add_projects', complete=\n 'dir', bang=True, nargs='*')\n", (409, 495), False, 'import vial\n'), ((521, 627), 'vial.register_command', 'vial.register_command', (['"""VialAddIgnoreExtension"""', '""".plugin.add_ignore_extensions"""'], {'bang': '(True)', 'nargs': '"""*"""'}), "('VialAddIgnoreExtension',\n '.plugin.add_ignore_extensions', bang=True, nargs='*')\n", (542, 627), False, 'import vial\n'), ((654, 765), 'vial.register_command', 'vial.register_command', (['"""VialAddIgnoreDirs"""', '""".plugin.add_ignore_dirs"""'], {'complete': '"""dir"""', 'bang': '(True)', 'nargs': '"""*"""'}), "('VialAddIgnoreDirs', '.plugin.add_ignore_dirs',\n complete='dir', bang=True, nargs='*')\n", (675, 765), False, 'import vial\n'), ((793, 849), 'vial.register_function', 'vial.register_function', (['"""VialIndent()"""', '""".plugin.indent"""'], {}), "('VialIndent()', '.plugin.indent')\n", (815, 849), False, 'import vial\n')]
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018 Tigera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
ZERO = datetime.timedelta(0)
class UTC(datetime.tzinfo):
def utcoffset(self, dt):
return ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return ZERO
utc = UTC()
def timestamp_now():
utc_now = datetime.datetime.now(utc)
return utc_now.strftime('%Y-%m-%dT%H:%M:%SZ')
# e.g. 2015-05-19T20:32:12Z
|
[
"datetime.timedelta",
"datetime.datetime.now"
] |
[((651, 672), 'datetime.timedelta', 'datetime.timedelta', (['(0)'], {}), '(0)\n', (669, 672), False, 'import datetime\n'), ((894, 920), 'datetime.datetime.now', 'datetime.datetime.now', (['utc'], {}), '(utc)\n', (915, 920), False, 'import datetime\n')]
|
import torch
import argparse
import sys
import os
import torchvision
import torchvision.transforms as transforms
from fedlab.core.client.scale.trainer import SubsetSerialTrainer
from fedlab.core.client.scale.manager import ScaleClientPassiveManager
from fedlab.core.network import DistNetwork
from fedlab.utils.logger import Logger
from fedlab.utils.aggregator import Aggregators
from fedlab.utils.functional import load_dict
sys.path.append("../../../")
from models.cnn import CNN_MNIST
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Distbelief training example")
parser.add_argument("--ip", type=str, default="127.0.0.1")
parser.add_argument("--port", type=str, default="3002")
parser.add_argument("--world_size", type=int)
parser.add_argument("--rank", type=int)
parser.add_argument("--partition", type=str, default="noniid")
parser.add_argument("--gpu", type=str, default="0,1,2,3")
parser.add_argument("--ethernet", type=str, default=None)
args = parser.parse_args()
if args.gpu != "-1":
args.cuda = True
os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
else:
args.cuda = False
trainset = torchvision.datasets.MNIST(
root='../../../datasets/mnist/',
train=True,
download=True,
transform=transforms.ToTensor())
if args.partition == "noniid":
data_indices = load_dict("mnist_noniid.pkl")
elif args.partition == "iid":
data_indices = load_dict("mnist_iid.pkl")
else:
raise ValueError("invalid partition type ", args.partition)
# Process rank x represent client id from (x-1)*10 - (x-1)*10 +10
# e.g. rank 5 <--> client 40-50
client_id_list = [
i for i in range((args.rank - 1) * 10, (args.rank - 1) * 10 + 10)
]
# get corresponding data partition indices
sub_data_indices = {
idx: data_indices[cid]
for idx, cid in enumerate(client_id_list)
}
model = CNN_MNIST()
aggregator = Aggregators.fedavg_aggregate
network = DistNetwork(address=(args.ip, args.port),
world_size=args.world_size,
rank=args.rank,
ethernet=args.ethernet)
trainer = SubsetSerialTrainer(model=model,
dataset=trainset,
data_slices=sub_data_indices,
aggregator=aggregator,
args={
"batch_size": 100,
"lr": 0.02,
"epochs": 5
})
manager_ = ScaleClientPassiveManager(trainer=trainer, network=network)
manager_.run()
|
[
"sys.path.append",
"argparse.ArgumentParser",
"fedlab.core.client.scale.trainer.SubsetSerialTrainer",
"fedlab.core.client.scale.manager.ScaleClientPassiveManager",
"fedlab.utils.functional.load_dict",
"models.cnn.CNN_MNIST",
"fedlab.core.network.DistNetwork",
"torchvision.transforms.ToTensor"
] |
[((430, 458), 'sys.path.append', 'sys.path.append', (['"""../../../"""'], {}), "('../../../')\n", (445, 458), False, 'import sys\n'), ((534, 600), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Distbelief training example"""'}), "(description='Distbelief training example')\n", (557, 600), False, 'import argparse\n'), ((1988, 1999), 'models.cnn.CNN_MNIST', 'CNN_MNIST', ([], {}), '()\n', (1997, 1999), False, 'from models.cnn import CNN_MNIST\n'), ((2066, 2180), 'fedlab.core.network.DistNetwork', 'DistNetwork', ([], {'address': '(args.ip, args.port)', 'world_size': 'args.world_size', 'rank': 'args.rank', 'ethernet': 'args.ethernet'}), '(address=(args.ip, args.port), world_size=args.world_size, rank=\n args.rank, ethernet=args.ethernet)\n', (2077, 2180), False, 'from fedlab.core.network import DistNetwork\n'), ((2269, 2432), 'fedlab.core.client.scale.trainer.SubsetSerialTrainer', 'SubsetSerialTrainer', ([], {'model': 'model', 'dataset': 'trainset', 'data_slices': 'sub_data_indices', 'aggregator': 'aggregator', 'args': "{'batch_size': 100, 'lr': 0.02, 'epochs': 5}"}), "(model=model, dataset=trainset, data_slices=\n sub_data_indices, aggregator=aggregator, args={'batch_size': 100, 'lr':\n 0.02, 'epochs': 5})\n", (2288, 2432), False, 'from fedlab.core.client.scale.trainer import SubsetSerialTrainer\n'), ((2678, 2737), 'fedlab.core.client.scale.manager.ScaleClientPassiveManager', 'ScaleClientPassiveManager', ([], {'trainer': 'trainer', 'network': 'network'}), '(trainer=trainer, network=network)\n', (2703, 2737), False, 'from fedlab.core.client.scale.manager import ScaleClientPassiveManager\n'), ((1413, 1442), 'fedlab.utils.functional.load_dict', 'load_dict', (['"""mnist_noniid.pkl"""'], {}), "('mnist_noniid.pkl')\n", (1422, 1442), False, 'from fedlab.utils.functional import load_dict\n'), ((1331, 1352), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1350, 1352), True, 'import torchvision.transforms as transforms\n'), ((1500, 1526), 'fedlab.utils.functional.load_dict', 'load_dict', (['"""mnist_iid.pkl"""'], {}), "('mnist_iid.pkl')\n", (1509, 1526), False, 'from fedlab.utils.functional import load_dict\n')]
|
import argparse
import torch
from pathlib import Path
import h5py
import logging
from tqdm import tqdm
import pprint
import numpy as np
from . import matchers
from .utils.base_model import dynamic_load
from .utils.parsers import names_to_pair
'''
A set of standard configurations that can be directly selected from the command
line using their name. Each is a dictionary with the following entries:
- output: the name of the match file that will be generated.
- model: the model configuration, as passed to a feature matcher.
'''
confs = {
'superglue': {
'output': 'matches-superglue',
'model': {
'name': 'superglue',
'weights': 'outdoor',
'sinkhorn_iterations': 50,
},
},
'NN': {
'output': 'matches-NN-mutual-dist.7',
'model': {
'name': 'nearest_neighbor',
'mutual_check': True,
'distance_threshold': 0.7,
},
}
}
def get_model(conf):
device = 'cuda' if torch.cuda.is_available() else 'cpu'
Model = dynamic_load(matchers, conf['model']['name'])
model = Model(conf['model']).eval().to(device)
return model
@torch.no_grad()
def do_match (name0, name1, pairs, matched, num_matches_found, model, match_file, feature_file, query_feature_file, min_match_score, min_valid_ratio):
device = 'cuda' if torch.cuda.is_available() else 'cpu'
pair = names_to_pair(name0, name1)
# Avoid to recompute duplicates to save time
if len({(name0, name1), (name1, name0)} & matched) or pair in match_file:
return num_matches_found
data = {}
feats0, feats1 = query_feature_file[name0], feature_file[name1]
for k in feats1.keys():
data[k+'0'] = feats0[k].__array__()
for k in feats1.keys():
data[k+'1'] = feats1[k].__array__()
data = {k: torch.from_numpy(v)[None].float().to(device)
for k, v in data.items()}
# some matchers might expect an image but only use its size
data['image0'] = torch.empty((1, 1,)+tuple(feats0['image_size'])[::-1])
data['image1'] = torch.empty((1, 1,)+tuple(feats1['image_size'])[::-1])
pred = model(data)
matches = pred['matches0'][0].cpu().short().numpy()
scores = pred['matching_scores0'][0].cpu().half().numpy()
# if score < min_match_score, set match to invalid
matches[ scores < min_match_score ] = -1
num_valid = np.count_nonzero(matches > -1)
if float(num_valid)/len(matches) > min_valid_ratio:
v = pairs.get(name0)
if v is None:
v = set(())
v.add(name1)
pairs[name0] = v
grp = match_file.create_group(pair)
grp.create_dataset('matches0', data=matches)
grp.create_dataset('matching_scores0', data=scores)
matched |= {(name0, name1), (name1, name0)}
num_matches_found += 1
return num_matches_found
@torch.no_grad()
def best_match(conf, global_feature_path, feature_path, match_output_path, query_global_feature_path=None, query_feature_path=None, num_match_required=10,
max_try=None, min_matched=None, pair_file_path=None, num_seq=False, sample_list=None, sample_list_path=None, min_match_score=0.85, min_valid_ratio=0.09):
logging.info('Dyn Matching local features with configuration:'
f'\n{pprint.pformat(conf)}')
assert global_feature_path.exists(), feature_path
global_feature_file = h5py.File(str(global_feature_path), 'r')
if query_global_feature_path is not None:
logging.info(f'(Using query_global_feature_path:{query_global_feature_path}')
query_global_feature_file = h5py.File(str(query_global_feature_path), 'r')
else:
query_global_feature_file = global_feature_file
assert feature_path.exists(), feature_path
feature_file = h5py.File(str(feature_path), 'r')
if query_feature_path is not None:
logging.info(f'(Using query_feature_path:{query_feature_path}')
query_feature_file = h5py.File(str(query_feature_path), 'r')
else:
query_feature_file = feature_file
match_file = h5py.File(str(match_output_path), 'a')
if sample_list_path is not None:
sample_list = json.load(open(str(sample_list_path, 'r')))
# get all sample names
if sample_list is not None:
names = sample_list
q_names = names
else:
names = []
global_feature_file.visititems(
lambda _, obj: names.append(obj.parent.name.strip('/'))
if isinstance(obj, h5py.Dataset) else None)
names = list(set(names))
names.sort()
q_names = []
query_global_feature_file.visititems(
lambda _, obj: q_names.append(obj.parent.name.strip('/'))
if isinstance(obj, h5py.Dataset) else None)
q_names = list(set(q_names))
q_names.sort()
device = 'cuda' if torch.cuda.is_available() else 'cpu'
def tensor_from_names(names, hfile):
desc = [hfile[i]['global_descriptor'].__array__() for i in names]
desc = torch.from_numpy(np.stack(desc, 0)).to(device).float()
return desc
desc = tensor_from_names(names, global_feature_file)
if query_global_feature_path is not None:
q_desc = tensor_from_names(q_names, query_global_feature_file)
else:
q_desc = desc
# descriptors are normalized, dot product indicates how close they are
sim = torch.einsum('id,jd->ij', q_desc, desc)
if max_try is None:
max_try = len(names)
topk = torch.topk(sim, max_try, dim=1).indices.cpu().numpy()
Model = dynamic_load(matchers, conf['model']['name'])
model = Model(conf['model']).eval().to(device)
pairs = {}
matched = set()
for name0, indices in tqdm(zip(q_names, topk)):
num_matches_found = 0
# try sequential neighbor first
if num_seq is not None:
name0_at = names.index(name0)
begin_from = name0_at - num_seq
if begin_from < 0:
begin_from = 0
for i in range(begin_from, name0_at+num_seq):
if i >= len(names):
break
name1 = names[i]
if name0 != name1:
num_matches_found = do_match(name0, name1, pairs, matched, num_matches_found, model, match_file, feature_file, query_feature_file, min_match_score, min_valid_ratio)
# then the global retrievel
for i in indices:
name1 = names[i]
if query_global_feature_path is not None or name0 != name1:
num_matches_found = do_match(name0, name1, pairs, matched, num_matches_found, model, match_file, feature_file, query_feature_file, min_match_score, min_valid_ratio)
if num_matches_found >= num_match_required:
break
if num_matches_found < num_match_required:
logging.warning(f'num match for {name0} found {num_matches_found} less than num_match_required:{num_match_required}')
match_file.close()
if pair_file_path is not None:
if min_matched is not None:
pairs = {k:v for k,v in pairs.items() if len(v) >= min_matched }
pairs_list = []
for n0 in pairs.keys():
for n1 in pairs.get(n0):
pairs_list.append((n0,n1))
with open(str(pair_file_path), 'w') as f:
f.write('\n'.join(' '.join([i, j]) for i, j in pairs_list))
logging.info('Finished exporting matches.')
@torch.no_grad()
def main(conf, pairs, features, export_dir, db_features=None, query_features=None, output_dir=None, exhaustive=False):
logging.info('Matching local features with configuration:'
f'\n{pprint.pformat(conf)}')
if db_features:
feature_path = db_features
else:
feature_path = Path(export_dir, features+'.h5')
assert feature_path.exists(), feature_path
feature_file = h5py.File(str(feature_path), 'r')
if query_features is not None:
logging.info(f'Using query_features {query_features}')
else:
logging.info('No query_features')
query_features = feature_path
assert query_features.exists(), query_features
query_feature_file = h5py.File(str(query_features), 'r')
pairs_name = pairs.stem
if not exhaustive:
assert pairs.exists(), pairs
with open(pairs, 'r') as f:
pair_list = f.read().rstrip('\n').split('\n')
elif exhaustive:
logging.info(f'Writing exhaustive match pairs to {pairs}.')
assert not pairs.exists(), pairs
# get the list of images from the feature file
images = []
feature_file.visititems(
lambda name, obj: images.append(obj.parent.name.strip('/'))
if isinstance(obj, h5py.Dataset) else None)
images = list(set(images))
pair_list = [' '.join((images[i], images[j]))
for i in range(len(images)) for j in range(i)]
with open(str(pairs), 'w') as f:
f.write('\n'.join(pair_list))
device = 'cuda' if torch.cuda.is_available() else 'cpu'
Model = dynamic_load(matchers, conf['model']['name'])
model = Model(conf['model']).eval().to(device)
match_name = f'{features}_{conf["output"]}_{pairs_name}'
if output_dir is None:
output_dir = export_dir
match_path = Path(output_dir, match_name+'.h5')
match_path.parent.mkdir(exist_ok=True, parents=True)
match_file = h5py.File(str(match_path), 'a')
matched = set()
for pair in tqdm(pair_list, smoothing=.1):
name0, name1 = pair.split(' ')
pair = names_to_pair(name0, name1)
# Avoid to recompute duplicates to save time
if len({(name0, name1), (name1, name0)} & matched) \
or pair in match_file:
continue
data = {}
feats0, feats1 = query_feature_file[name0], feature_file[name1]
for k in feats1.keys():
data[k+'0'] = feats0[k].__array__()
for k in feats1.keys():
data[k+'1'] = feats1[k].__array__()
data = {k: torch.from_numpy(v)[None].float().to(device)
for k, v in data.items()}
# some matchers might expect an image but only use its size
data['image0'] = torch.empty((1, 1,)+tuple(feats0['image_size'])[::-1])
data['image1'] = torch.empty((1, 1,)+tuple(feats1['image_size'])[::-1])
pred = model(data)
grp = match_file.create_group(pair)
matches = pred['matches0'][0].cpu().short().numpy()
grp.create_dataset('matches0', data=matches)
if 'matching_scores0' in pred:
scores = pred['matching_scores0'][0].cpu().half().numpy()
grp.create_dataset('matching_scores0', data=scores)
matched |= {(name0, name1), (name1, name0)}
match_file.close()
logging.info('Finished exporting matches.')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--export_dir', type=Path)
parser.add_argument('--output_dir', type=Path, required=False)
parser.add_argument('--features', type=str,
default='feats-superpoint-n4096-r1024')
parser.add_argument('--db_features', type=Path)
parser.add_argument('--query_features', type=Path, required=False)
parser.add_argument('--pairs', type=Path)
parser.add_argument('--conf', type=str, default='superglue',
choices=list(confs.keys()))
parser.add_argument('--exhaustive', action='store_true')
# best_match
parser.add_argument('--best_match', action='store_true')
parser.add_argument('--global_feature_path', type=Path)
parser.add_argument('--feature_path', type=Path)
parser.add_argument('--query_global_feature_path', type=Path)
parser.add_argument('--query_feature_path', type=Path)
parser.add_argument('--match_output_path', type=Path)
parser.add_argument('--num_match_required', type=int, default=10)
parser.add_argument('--min_matched', type=int, default=1)
parser.add_argument('--max_try', type=int)
parser.add_argument('--num_seq', type=int)
parser.add_argument('--min_match_score', type=float, default=0.85)
parser.add_argument('--min_valid_ratio', type=float, default=0.09)
parser.add_argument('--sample_list_path', type=Path)
parser.add_argument('--pair_file_path', type=Path)
args = parser.parse_args()
if args.best_match:
best_match(confs[args.conf], args.global_feature_path, args.feature_path, args.match_output_path,
query_global_feature_path=args.query_global_feature_path, query_feature_path=args.query_feature_path,
num_match_required=args.num_match_required, min_matched=args.min_matched, min_match_score=args.min_match_score, min_valid_ratio=args.min_valid_ratio,
max_try=args.max_try, num_seq=args.num_seq, sample_list_path=args.sample_list_path, pair_file_path=args.pair_file_path)
else:
main(
confs[args.conf], args.pairs, args.features,args.export_dir,
db_features=args.db_features, query_features=args.query_features, output_dir=args.output_dir, exhaustive=args.exhaustive)
|
[
"numpy.stack",
"tqdm.tqdm",
"pprint.pformat",
"numpy.count_nonzero",
"argparse.ArgumentParser",
"torch.topk",
"logging.warning",
"logging.info",
"torch.einsum",
"pathlib.Path",
"torch.cuda.is_available",
"torch.no_grad",
"torch.from_numpy"
] |
[((1171, 1186), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1184, 1186), False, 'import torch\n'), ((2879, 2894), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2892, 2894), False, 'import torch\n'), ((7473, 7488), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (7486, 7488), False, 'import torch\n'), ((2398, 2428), 'numpy.count_nonzero', 'np.count_nonzero', (['(matches > -1)'], {}), '(matches > -1)\n', (2414, 2428), True, 'import numpy as np\n'), ((5401, 5440), 'torch.einsum', 'torch.einsum', (['"""id,jd->ij"""', 'q_desc', 'desc'], {}), "('id,jd->ij', q_desc, desc)\n", (5413, 5440), False, 'import torch\n'), ((7427, 7470), 'logging.info', 'logging.info', (['"""Finished exporting matches."""'], {}), "('Finished exporting matches.')\n", (7439, 7470), False, 'import logging\n'), ((9339, 9375), 'pathlib.Path', 'Path', (['output_dir', "(match_name + '.h5')"], {}), "(output_dir, match_name + '.h5')\n", (9343, 9375), False, 'from pathlib import Path\n'), ((9517, 9547), 'tqdm.tqdm', 'tqdm', (['pair_list'], {'smoothing': '(0.1)'}), '(pair_list, smoothing=0.1)\n', (9521, 9547), False, 'from tqdm import tqdm\n'), ((10831, 10874), 'logging.info', 'logging.info', (['"""Finished exporting matches."""'], {}), "('Finished exporting matches.')\n", (10843, 10874), False, 'import logging\n'), ((10917, 10942), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (10940, 10942), False, 'import argparse\n'), ((1006, 1031), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1029, 1031), False, 'import torch\n'), ((1361, 1386), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1384, 1386), False, 'import torch\n'), ((3509, 3586), 'logging.info', 'logging.info', (['f"""(Using query_global_feature_path:{query_global_feature_path}"""'], {}), "(f'(Using query_global_feature_path:{query_global_feature_path}')\n", (3521, 3586), False, 'import logging\n'), ((3884, 3947), 'logging.info', 'logging.info', (['f"""(Using query_feature_path:{query_feature_path}"""'], {}), "(f'(Using query_feature_path:{query_feature_path}')\n", (3896, 3947), False, 'import logging\n'), ((4866, 4891), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (4889, 4891), False, 'import torch\n'), ((7806, 7840), 'pathlib.Path', 'Path', (['export_dir', "(features + '.h5')"], {}), "(export_dir, features + '.h5')\n", (7810, 7840), False, 'from pathlib import Path\n'), ((7983, 8037), 'logging.info', 'logging.info', (['f"""Using query_features {query_features}"""'], {}), "(f'Using query_features {query_features}')\n", (7995, 8037), False, 'import logging\n'), ((8056, 8089), 'logging.info', 'logging.info', (['"""No query_features"""'], {}), "('No query_features')\n", (8068, 8089), False, 'import logging\n'), ((9055, 9080), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (9078, 9080), False, 'import torch\n'), ((6875, 7002), 'logging.warning', 'logging.warning', (['f"""num match for {name0} found {num_matches_found} less than num_match_required:{num_match_required}"""'], {}), "(\n f'num match for {name0} found {num_matches_found} less than num_match_required:{num_match_required}'\n )\n", (6890, 7002), False, 'import logging\n'), ((8452, 8511), 'logging.info', 'logging.info', (['f"""Writing exhaustive match pairs to {pairs}."""'], {}), "(f'Writing exhaustive match pairs to {pairs}.')\n", (8464, 8511), False, 'import logging\n'), ((3308, 3328), 'pprint.pformat', 'pprint.pformat', (['conf'], {}), '(conf)\n', (3322, 3328), False, 'import pprint\n'), ((7693, 7713), 'pprint.pformat', 'pprint.pformat', (['conf'], {}), '(conf)\n', (7707, 7713), False, 'import pprint\n'), ((5505, 5536), 'torch.topk', 'torch.topk', (['sim', 'max_try'], {'dim': '(1)'}), '(sim, max_try, dim=1)\n', (5515, 5536), False, 'import torch\n'), ((1840, 1859), 'torch.from_numpy', 'torch.from_numpy', (['v'], {}), '(v)\n', (1856, 1859), False, 'import torch\n'), ((5051, 5068), 'numpy.stack', 'np.stack', (['desc', '(0)'], {}), '(desc, 0)\n', (5059, 5068), True, 'import numpy as np\n'), ((10075, 10094), 'torch.from_numpy', 'torch.from_numpy', (['v'], {}), '(v)\n', (10091, 10094), False, 'import torch\n')]
|
import datetime
import decimal
def test_sanity():
from kvfile import KVFile
kv = KVFile()
data = dict(
s='value',
i=123,
d=datetime.datetime.fromtimestamp(12325),
n=decimal.Decimal('1234.56'),
ss=set(range(10)),
o=dict(d=decimal.Decimal('1234.58'), n=datetime.datetime.fromtimestamp(12325))
)
for k, v in data.items():
kv.set(k, v)
for k, v in data.items():
assert kv.get(k) == v
assert sorted(kv.keys()) == sorted(data.keys())
assert sorted(kv.items()) == sorted(data.items())
|
[
"decimal.Decimal",
"kvfile.KVFile",
"datetime.datetime.fromtimestamp"
] |
[((91, 99), 'kvfile.KVFile', 'KVFile', ([], {}), '()\n', (97, 99), False, 'from kvfile import KVFile\n'), ((164, 202), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['(12325)'], {}), '(12325)\n', (195, 202), False, 'import datetime\n'), ((215, 241), 'decimal.Decimal', 'decimal.Decimal', (['"""1234.56"""'], {}), "('1234.56')\n", (230, 241), False, 'import decimal\n'), ((287, 313), 'decimal.Decimal', 'decimal.Decimal', (['"""1234.58"""'], {}), "('1234.58')\n", (302, 313), False, 'import decimal\n'), ((317, 355), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['(12325)'], {}), '(12325)\n', (348, 355), False, 'import datetime\n')]
|
from unittest import TestCase
from i18n import translate, LangText
langs = {
"lang1": "asd.text=aaa",
"lang2": "asd.text=bbb"
}
class TestLangText(LangText):
def load(self, langname):
super(TestLangText, self)._loadText(langs[langname])
@translate(TestLangText)
class TestTranslation:
def __init__(self):
self.text = "notset"
self.lang("asd.text", self.set_text)
def set_text(self, text):
self.text = text
class StepsTest(TestCase):
def test_should_(self):
testt = TestTranslation()
self.assertEqual(testt.text, "notset")
testt.lang_set("lang1")
self.assertEqual(testt.text, "aaa")
testt.lang_set("lang2")
self.assertEqual(testt.text, "bbb")
|
[
"i18n.translate"
] |
[((263, 286), 'i18n.translate', 'translate', (['TestLangText'], {}), '(TestLangText)\n', (272, 286), False, 'from i18n import translate, LangText\n')]
|
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.module_utils.facts.virtual.base import Virtual, VirtualCollector
class SunOSVirtual(Virtual):
"""
This is a SunOS-specific subclass of Virtual. It defines
- virtualization_type
- virtualization_role
- container
"""
platform = 'SunOS'
def get_virtual_facts(self):
virtual_facts = {}
host_tech = set()
guest_tech = set()
# Check if it's a zone
zonename = self.module.get_bin_path('zonename')
if zonename:
rc, out, err = self.module.run_command(zonename)
if rc == 0:
if out.rstrip() == "global":
host_tech.add('zone')
else:
guest_tech.add('zone')
virtual_facts['container'] = 'zone'
# Check if it's a branded zone (i.e. Solaris 8/9 zone)
if os.path.isdir('/.SUNWnative'):
guest_tech.add('zone')
virtual_facts['container'] = 'zone'
# If it's a zone check if we can detect if our global zone is itself virtualized.
# Relies on the "guest tools" (e.g. vmware tools) to be installed
if 'container' in virtual_facts and virtual_facts['container'] == 'zone':
modinfo = self.module.get_bin_path('modinfo')
if modinfo:
rc, out, err = self.module.run_command(modinfo)
if rc == 0:
for line in out.splitlines():
if 'VMware' in line:
guest_tech.add('vmware')
virtual_facts['virtualization_type'] = 'vmware'
virtual_facts['virtualization_role'] = 'guest'
if 'VirtualBox' in line:
guest_tech.add('virtualbox')
virtual_facts['virtualization_type'] = 'virtualbox'
virtual_facts['virtualization_role'] = 'guest'
if os.path.exists('/proc/vz'):
guest_tech.add('virtuozzo')
virtual_facts['virtualization_type'] = 'virtuozzo'
virtual_facts['virtualization_role'] = 'guest'
# Detect domaining on Sparc hardware
virtinfo = self.module.get_bin_path('virtinfo')
if virtinfo:
# The output of virtinfo is different whether we are on a machine with logical
# domains ('LDoms') on a T-series or domains ('Domains') on a M-series. Try LDoms first.
rc, out, err = self.module.run_command("/usr/sbin/virtinfo -p")
# The output contains multiple lines with different keys like this:
# DOMAINROLE|impl=LDoms|control=false|io=false|service=false|root=false
# The output may also be not formatted and the returncode is set to 0 regardless of the error condition:
# virtinfo can only be run from the global zone
if rc == 0:
try:
for line in out.splitlines():
fields = line.split('|')
if fields[0] == 'DOMAINROLE' and fields[1] == 'impl=LDoms':
guest_tech.add('ldom')
virtual_facts['virtualization_type'] = 'ldom'
virtual_facts['virtualization_role'] = 'guest'
hostfeatures = []
for field in fields[2:]:
arg = field.split('=')
if arg[1] == 'true':
hostfeatures.append(arg[0])
if len(hostfeatures) > 0:
virtual_facts['virtualization_role'] = 'host (' + ','.join(hostfeatures) + ')'
except ValueError:
pass
else:
smbios = self.module.get_bin_path('smbios')
if not smbios:
return
rc, out, err = self.module.run_command(smbios)
if rc == 0:
for line in out.splitlines():
if 'VMware' in line:
guest_tech.add('vmware')
virtual_facts['virtualization_type'] = 'vmware'
virtual_facts['virtualization_role'] = 'guest'
elif 'Parallels' in line:
guest_tech.add('parallels')
virtual_facts['virtualization_type'] = 'parallels'
virtual_facts['virtualization_role'] = 'guest'
elif 'VirtualBox' in line:
guest_tech.add('virtualbox')
virtual_facts['virtualization_type'] = 'virtualbox'
virtual_facts['virtualization_role'] = 'guest'
elif 'HVM domU' in line:
guest_tech.add('xen')
virtual_facts['virtualization_type'] = 'xen'
virtual_facts['virtualization_role'] = 'guest'
elif 'KVM' in line:
guest_tech.add('kvm')
virtual_facts['virtualization_type'] = 'kvm'
virtual_facts['virtualization_role'] = 'guest'
virtual_facts['virtualization_tech_guest'] = guest_tech
virtual_facts['virtualization_tech_host'] = host_tech
return virtual_facts
class SunOSVirtualCollector(VirtualCollector):
_fact_class = SunOSVirtual
_platform = 'SunOS'
|
[
"os.path.isdir",
"os.path.exists"
] |
[((1622, 1651), 'os.path.isdir', 'os.path.isdir', (['"""/.SUNWnative"""'], {}), "('/.SUNWnative')\n", (1635, 1651), False, 'import os\n'), ((2729, 2755), 'os.path.exists', 'os.path.exists', (['"""/proc/vz"""'], {}), "('/proc/vz')\n", (2743, 2755), False, 'import os\n')]
|
from django import forms
from app.models import Post, Comment, Profile
from django.contrib.auth.models import User
class CommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ('name','email','comment_body' )
class RegisterForm(forms.ModelForm):
username = forms.CharField(max_length=100, label='<NAME>')
first_name = forms.CharField(max_length=100, label='الاسم الاول')
last_name = forms.CharField(max_length=100, label='الاسم الاخير')
email = forms.EmailField(label='البريد الالكتروني')
password1= forms.CharField(widget=forms.PasswordInput(),label='<PASSWORD>رور',min_length=8)
password2 = forms.CharField(widget=forms.PasswordInput(), label='تأكيد كلمة المرور', min_length=8)
class Meta:
model = User
fields = ('username','first_name','last_name','email','password1','<PASSWORD>')
def clean_username(self):
cd = self.cleaned_data
if User.objects.filter(username=cd['username']).exists():
raise forms.ValidationError('اسم المستخدم موجود مسبقا')
return cd['username']
def clean_password2(self):
cd = self.cleaned_data
if cd['password1'] != cd['password2']:
raise forms.ValidationError('كلمة المرور غير متطابقة')
return cd['password2']
class LoginForm(forms.ModelForm):
username= forms.CharField(max_length=100, label='<NAME>')
password= forms.CharField(widget=forms.PasswordInput(), label='<PASSWORD>رور')
class Meta:
model= User
fields= ('username', 'password')
class UserUpdateForm(forms.ModelForm):
first_name = forms.CharField(label='الاسم الأول')
last_name = forms.CharField(label='الاسم الأخير')
email = forms.EmailField(label='البريد الإلكتروني')
class Meta:
model = User
fields = ('first_name', 'last_name', 'email')
class ProfileUpdateForm(forms.ModelForm):
class Meta:
model = Profile
fields = ('image',)
#class ProfileUpdateForm(forms.ModelForm):
# first_name = forms.CharField(max_length=100, label='الاسم الاول')
# last_name = forms.CharField(max_length=100, label='الاسم الاخير')
# email = forms.EmailField(label='البريد الالكتروني')
# class Meta:
# model = User
# fields = ('first_name', 'last_name', 'email')
#
#class ImageUpdateForm(forms.ModelForm):
# class Meta:
# model = Profile
# fields = ('image', )
class NewPost(forms.ModelForm):
post_name= forms.CharField(max_length=500, label='عنوان التدوينة')
post_body= forms.TextInput()
class Meta:
model= Post
fields=('post_name', 'post_body',)
|
[
"django.forms.EmailField",
"django.forms.TextInput",
"django.forms.PasswordInput",
"django.contrib.auth.models.User.objects.filter",
"django.forms.ValidationError",
"django.forms.CharField"
] |
[((334, 381), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'label': '"""<NAME>"""'}), "(max_length=100, label='<NAME>')\n", (349, 381), False, 'from django import forms\n'), ((399, 451), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'label': '"""الاسم الاول"""'}), "(max_length=100, label='الاسم الاول')\n", (414, 451), False, 'from django import forms\n'), ((468, 521), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'label': '"""الاسم الاخير"""'}), "(max_length=100, label='الاسم الاخير')\n", (483, 521), False, 'from django import forms\n'), ((534, 577), 'django.forms.EmailField', 'forms.EmailField', ([], {'label': '"""البريد الالكتروني"""'}), "(label='البريد الالكتروني')\n", (550, 577), False, 'from django import forms\n'), ((1425, 1472), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'label': '"""<NAME>"""'}), "(max_length=100, label='<NAME>')\n", (1440, 1472), False, 'from django import forms\n'), ((1707, 1743), 'django.forms.CharField', 'forms.CharField', ([], {'label': '"""الاسم الأول"""'}), "(label='الاسم الأول')\n", (1722, 1743), False, 'from django import forms\n'), ((1760, 1797), 'django.forms.CharField', 'forms.CharField', ([], {'label': '"""الاسم الأخير"""'}), "(label='الاسم الأخير')\n", (1775, 1797), False, 'from django import forms\n'), ((1810, 1853), 'django.forms.EmailField', 'forms.EmailField', ([], {'label': '"""البريد الإلكتروني"""'}), "(label='البريد الإلكتروني')\n", (1826, 1853), False, 'from django import forms\n'), ((2577, 2632), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(500)', 'label': '"""عنوان التدوينة"""'}), "(max_length=500, label='عنوان التدوينة')\n", (2592, 2632), False, 'from django import forms\n'), ((2648, 2665), 'django.forms.TextInput', 'forms.TextInput', ([], {}), '()\n', (2663, 2665), False, 'from django import forms\n'), ((616, 637), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (635, 637), False, 'from django import forms\n'), ((713, 734), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (732, 734), False, 'from django import forms\n'), ((1065, 1114), 'django.forms.ValidationError', 'forms.ValidationError', (['"""اسم المستخدم موجود مسبقا"""'], {}), "('اسم المستخدم موجود مسبقا')\n", (1086, 1114), False, 'from django import forms\n'), ((1282, 1330), 'django.forms.ValidationError', 'forms.ValidationError', (['"""كلمة المرور غير متطابقة"""'], {}), "('كلمة المرور غير متطابقة')\n", (1303, 1330), False, 'from django import forms\n'), ((1510, 1531), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (1529, 1531), False, 'from django import forms\n'), ((992, 1036), 'django.contrib.auth.models.User.objects.filter', 'User.objects.filter', ([], {'username': "cd['username']"}), "(username=cd['username'])\n", (1011, 1036), False, 'from django.contrib.auth.models import User\n')]
|
import traceback
from email.parser import Parser
import sentry_sdk
from aiosmtpd.controller import Controller
from aiosmtpd.lmtp import LMTP
from aiosmtpd.smtp import Envelope
from . import make_logger
class CustomLMTP(LMTP):
"""
A relatively simple wrapper around the LMTP/SMTP classes that implements some less obtrusive logging around
connections.
Required until https://github.com/aio-libs/aiosmtpd/issues/239 has been resolved.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.custom_logger = make_logger("lmtp")
def _get_peer_name(self):
return f"{self.session.peer[0]}:{self.session.peer[1]}"
def connection_made(self, *args, **kwargs):
# we have to run the superclass method in advance, as it'll set up self.session, which we'll need in
# _get_peer_name
rv = super().connection_made(*args, **kwargs)
self.custom_logger.info("Client connected: %s", self._get_peer_name())
return rv
def connection_lost(self, *args, **kwargs):
self.custom_logger.info("Client connection lost: %s", self._get_peer_name())
return super().connection_lost(*args, **kwargs)
class LmtpController(Controller):
"""
A custom controller implementation, return LMTP instances instead of SMTP ones.
Inspired by GNU Mailman 3"s LMTPController.
"""
def factory(self):
return CustomLMTP(self.handler, ident="NewPipe crash report importer")
class CrashReportHandler:
"""
Very simple handler which only accepts mail for allowed addresses and stores them into the Sentry database.
"""
def __init__(self, callback: callable):
self.callback = callback
self.logger = make_logger("lmtp_handler")
async def handle_RCPT(
self, server, session, envelope: Envelope, address: str, rcpt_options
):
if address not in ["<EMAIL>", "<EMAIL>"]:
return f"550 not handling mail for address {address}"
envelope.rcpt_tos.append(address)
return "250 OK"
@staticmethod
def convert_to_rfc822_message(envelope: Envelope):
return Parser().parsestr(envelope.content.decode())
async def handle_DATA(self, server, session, envelope: Envelope):
try:
message = self.convert_to_rfc822_message(envelope)
# as the volume of incoming mails is relatively low (< 3 per minute usually) and reporting doesn't take
# very long, we can just do it here and don't require some message queue/worker setup
# the callback is defined as async, but can, due to the low volume, be implemented synchronously, too
await self.callback(message)
except:
# in case an exception happens in the callback (e.g., the message can't be parsed correctly), we don't
# want to notify the sending MTA, but have them report success of delivery
# it's after all not their problem: if they got so far, the message was indeed delivered to our LMTP server
# however, we want the exception to show up in the log
traceback.print_exc()
# also, we want to report all kinds of issues to GlitchTip
sentry_sdk.capture_exception()
# make sure all control flow paths return a string reply!
return "250 Message accepted for delivery"
|
[
"email.parser.Parser",
"sentry_sdk.capture_exception",
"traceback.print_exc"
] |
[((2175, 2183), 'email.parser.Parser', 'Parser', ([], {}), '()\n', (2181, 2183), False, 'from email.parser import Parser\n'), ((3155, 3176), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (3174, 3176), False, 'import traceback\n'), ((3261, 3291), 'sentry_sdk.capture_exception', 'sentry_sdk.capture_exception', ([], {}), '()\n', (3289, 3291), False, 'import sentry_sdk\n')]
|
import unittest
from src.dataset.reader.csv_reader import CSVDatasetReader
from src.dataset.reader.dataset_reader_errors import InvalidFileContentError
class CSVReaderTest(unittest.TestCase):
def test_empty_path(self):
try:
reader = CSVDatasetReader()
_ = reader.read('')
self.fail('Empty path should not be accepted')
except ValueError:
pass
def test_invalid_path(self):
try:
reader = CSVDatasetReader()
_ = reader.read('./random/dataset.csv')
self.fail('Invalid path should not be accepted')
except ValueError:
pass
def test_valid_path(self):
try:
reader = CSVDatasetReader()
_ = reader.read('../../resource/testSet.csv')
except ValueError:
self.fail('Valid path should be accepted')
def test_invalid_file_extension(self):
try:
reader = CSVDatasetReader()
_ = reader.read('./random/dataset.txt')
self.fail('Invalid file extension should not be accepted')
except ValueError:
pass
def test_valid_file_extension(self):
try:
reader = CSVDatasetReader()
_ = reader.read('../../resource/testSet.csv')
except ValueError:
self.fail('Valid file extension should be accepted')
def test_invalid_content(self):
try:
reader = CSVDatasetReader()
_ = reader.read('../../resource/invalidTestSet.csv')
self.fail('Invalid dataset should not be accepted')
except InvalidFileContentError:
pass
def test_empty_content(self):
try:
reader = CSVDatasetReader()
_ = reader.read('../../resource/emptyFile.csv')
self.fail('Invalid dataset should not be accepted')
except InvalidFileContentError:
pass
def test_valid_content(self):
try:
reader = CSVDatasetReader()
dataset = reader.read('../../resource/testSet.csv')
self.assertEqual('TestSet1', dataset.title)
self.assertEqual(120, dataset.total_packages)
self.assertEqual(6, dataset.total_stations)
self.assertEqual(15, dataset.width)
self.assertEqual(15, dataset.height)
except Exception as e:
self.fail(e)
|
[
"src.dataset.reader.csv_reader.CSVDatasetReader"
] |
[((260, 278), 'src.dataset.reader.csv_reader.CSVDatasetReader', 'CSVDatasetReader', ([], {}), '()\n', (276, 278), False, 'from src.dataset.reader.csv_reader import CSVDatasetReader\n'), ((482, 500), 'src.dataset.reader.csv_reader.CSVDatasetReader', 'CSVDatasetReader', ([], {}), '()\n', (498, 500), False, 'from src.dataset.reader.csv_reader import CSVDatasetReader\n'), ((724, 742), 'src.dataset.reader.csv_reader.CSVDatasetReader', 'CSVDatasetReader', ([], {}), '()\n', (740, 742), False, 'from src.dataset.reader.csv_reader import CSVDatasetReader\n'), ((961, 979), 'src.dataset.reader.csv_reader.CSVDatasetReader', 'CSVDatasetReader', ([], {}), '()\n', (977, 979), False, 'from src.dataset.reader.csv_reader import CSVDatasetReader\n'), ((1223, 1241), 'src.dataset.reader.csv_reader.CSVDatasetReader', 'CSVDatasetReader', ([], {}), '()\n', (1239, 1241), False, 'from src.dataset.reader.csv_reader import CSVDatasetReader\n'), ((1463, 1481), 'src.dataset.reader.csv_reader.CSVDatasetReader', 'CSVDatasetReader', ([], {}), '()\n', (1479, 1481), False, 'from src.dataset.reader.csv_reader import CSVDatasetReader\n'), ((1737, 1755), 'src.dataset.reader.csv_reader.CSVDatasetReader', 'CSVDatasetReader', ([], {}), '()\n', (1753, 1755), False, 'from src.dataset.reader.csv_reader import CSVDatasetReader\n'), ((2006, 2024), 'src.dataset.reader.csv_reader.CSVDatasetReader', 'CSVDatasetReader', ([], {}), '()\n', (2022, 2024), False, 'from src.dataset.reader.csv_reader import CSVDatasetReader\n')]
|
# -*- coding: utf-8 -*-
# ==============================================================================
# MIT License
#
# Copyright (c) 2020 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ==============================================================================
import hashlib
from typing import Union, Optional
import ecdsa
from dimp import Dictionary
from dimp import AsymmetricKey, PublicKey, PrivateKey
class ECCPublicKey(Dictionary, PublicKey):
""" ECC Public Key """
def __init__(self, key: dict):
super().__init__(key)
# data in 'PEM' format
data = key['data']
data_len = len(data)
if data_len == 130 or data_len == 128:
data = bytes.fromhex(data)
key = ecdsa.VerifyingKey.from_string(data, curve=ecdsa.SECP256k1, hashfunc=hashlib.sha256)
else:
key = ecdsa.VerifyingKey.from_pem(data, hashfunc=hashlib.sha256)
self.__key = key
self.__data = key.to_string(encoding='uncompressed')
@property
def data(self) -> bytes:
return self.__data
@property
def size(self) -> int:
return self.bits >> 3
@property
def bits(self) -> int:
bits = self.get('sizeInBits')
if bits is None:
return 256 # ECC-256
else:
return int(bits)
def verify(self, data: bytes, signature: bytes) -> bool:
try:
return self.__key.verify(signature=signature, data=data,
hashfunc=hashlib.sha256, sigdecode=ecdsa.util.sigdecode_der)
except ecdsa.BadSignatureError:
return False
class ECCPrivateKey(Dictionary, PrivateKey):
""" ECC Private Key """
def __init__(self, key: Optional[dict] = None):
if key is None:
key = {'algorithm': AsymmetricKey.ECC}
super().__init__(key)
# data in 'PEM' format
data = key.get('data')
if data is None or len(data) == 0:
# generate private key data
key = ecdsa.SigningKey.generate(curve=ecdsa.SECP256k1, hashfunc=hashlib.sha256)
data = key.to_string()
# store private key in PKCS#8 format
pem = key.to_pem(format='pkcs8').decode('utf-8')
# pem = data.hex()
self.__key = key
self.__data = data
self['data'] = pem
self['curve'] = 'SECP256k1'
self['digest'] = 'SHA256'
else:
if len(data) == 64:
data = bytes.fromhex(data)
key = ecdsa.SigningKey.from_string(data, curve=ecdsa.SECP256k1, hashfunc=hashlib.sha256)
else:
key = ecdsa.SigningKey.from_pem(data, hashfunc=hashlib.sha256)
self.__key = key
self.__data = key.to_string()
@property
def data(self) -> bytes:
return self.__data
@property
def size(self) -> int:
return self.bits >> 3
@property
def bits(self) -> int:
bits = self.get('sizeInBits')
if bits is None:
return 256 # ECC-256
else:
return int(bits)
@property
def public_key(self) -> Union[PublicKey]:
key = self.__key.get_verifying_key()
# store public key in X.509 format
pem = key.to_pem().decode('utf-8')
# pem = key.to_string(encoding='uncompressed').hex()
info = {
'algorithm': PublicKey.ECC,
'data': pem,
'curve': 'SECP256k1',
'digest': 'SHA256'
}
return ECCPublicKey(info)
def sign(self, data: bytes) -> bytes:
return self.__key.sign(data=data, hashfunc=hashlib.sha256, sigencode=ecdsa.util.sigencode_der)
|
[
"ecdsa.SigningKey.from_pem",
"ecdsa.SigningKey.generate",
"ecdsa.VerifyingKey.from_string",
"ecdsa.SigningKey.from_string",
"ecdsa.VerifyingKey.from_pem"
] |
[((1764, 1853), 'ecdsa.VerifyingKey.from_string', 'ecdsa.VerifyingKey.from_string', (['data'], {'curve': 'ecdsa.SECP256k1', 'hashfunc': 'hashlib.sha256'}), '(data, curve=ecdsa.SECP256k1, hashfunc=\n hashlib.sha256)\n', (1794, 1853), False, 'import ecdsa\n'), ((1881, 1939), 'ecdsa.VerifyingKey.from_pem', 'ecdsa.VerifyingKey.from_pem', (['data'], {'hashfunc': 'hashlib.sha256'}), '(data, hashfunc=hashlib.sha256)\n', (1908, 1939), False, 'import ecdsa\n'), ((3054, 3127), 'ecdsa.SigningKey.generate', 'ecdsa.SigningKey.generate', ([], {'curve': 'ecdsa.SECP256k1', 'hashfunc': 'hashlib.sha256'}), '(curve=ecdsa.SECP256k1, hashfunc=hashlib.sha256)\n', (3079, 3127), False, 'import ecdsa\n'), ((3584, 3671), 'ecdsa.SigningKey.from_string', 'ecdsa.SigningKey.from_string', (['data'], {'curve': 'ecdsa.SECP256k1', 'hashfunc': 'hashlib.sha256'}), '(data, curve=ecdsa.SECP256k1, hashfunc=hashlib.\n sha256)\n', (3612, 3671), False, 'import ecdsa\n'), ((3707, 3763), 'ecdsa.SigningKey.from_pem', 'ecdsa.SigningKey.from_pem', (['data'], {'hashfunc': 'hashlib.sha256'}), '(data, hashfunc=hashlib.sha256)\n', (3732, 3763), False, 'import ecdsa\n')]
|
# -*- coding: utf-8 -*-
"""Preview
Code for 'Inf-Net: Automatic COVID-19 Lung Infection Segmentation from CT Scans'
submit to Transactions on Medical Imaging, 2020.
First Version: Created on 2020-05-13 (@author: <NAME>)
"""
# ---- base lib -----
import os
import argparse
from datetime import datetime
import cv2
import numpy as np
import random
import shutil
from scipy import misc
# ---- torch lib ----
import torch
from torch.autograd import Variable
import torch.nn.functional as F
# ---- custom lib ----
# NOTES: Here we nly provide Res2Net, you can also replace it with other backbones
from Code.model_lung_infection.InfNet_Res2Net import Inf_Net as Network
from Code.utils.dataloader_LungInf import get_loader, test_dataset
from Code.utils.utils import clip_gradient, adjust_lr, AvgMeter
from Code.utils.format_conversion import binary2edge
def joint_loss(pred, mask):
weit = 1 + 5*torch.abs(F.avg_pool2d(mask, kernel_size=31, stride=1, padding=15) - mask)
wbce = F.binary_cross_entropy_with_logits(pred, mask, reduce='none')
wbce = (weit*wbce).sum(dim=(2, 3)) / weit.sum(dim=(2, 3))
pred = torch.sigmoid(pred)
inter = ((pred * mask)*weit).sum(dim=(2, 3))
union = ((pred + mask)*weit).sum(dim=(2, 3))
wiou = 1 - (inter + 1)/(union - inter+1)
return (wbce + wiou).mean()
def trainer(train_loader, model, optimizer, epoch, opt, total_step):
model.train()
# ---- multi-scale training ----
size_rates = [0.75, 1, 1.25] # replace your desired scale
loss_record1, loss_record2, loss_record3, loss_record4, loss_record5 = AvgMeter(), AvgMeter(), AvgMeter(), AvgMeter(), AvgMeter()
for i, pack in enumerate(train_loader, start=1):
for rate in size_rates:
optimizer.zero_grad()
# ---- data prepare ----
images, gts, edges = pack
images = Variable(images).cuda()
gts = Variable(gts).cuda()
edges = Variable(edges).cuda()
# ---- rescale ----
trainsize = int(round(opt.trainsize*rate/32)*32)
if rate != 1:
images = F.upsample(images, size=(trainsize, trainsize), mode='bilinear', align_corners=True)
gts = F.upsample(gts, size=(trainsize, trainsize), mode='bilinear', align_corners=True)
edges = F.upsample(edges, size=(trainsize, trainsize), mode='bilinear', align_corners=True)
# ---- forward ----
lateral_map_5, lateral_map_4, lateral_map_3, lateral_map_2, lateral_edge = model(images)
# ---- loss function ----
loss5 = joint_loss(lateral_map_5, gts)
loss4 = joint_loss(lateral_map_4, gts)
loss3 = joint_loss(lateral_map_3, gts)
loss2 = joint_loss(lateral_map_2, gts)
loss1 = torch.nn.BCEWithLogitsLoss()(lateral_edge, edges)
loss = loss1 + loss2 + loss3 + loss4 + loss5
# ---- backward ----
loss.backward()
clip_gradient(optimizer, opt.clip)
optimizer.step()
# ---- recording loss ----
if rate == 1:
loss_record1.update(loss1.data, opt.batchsize)
loss_record2.update(loss2.data, opt.batchsize)
loss_record3.update(loss3.data, opt.batchsize)
loss_record4.update(loss4.data, opt.batchsize)
loss_record5.update(loss5.data, opt.batchsize)
# ---- train visualization ----
if i % 5 == 0 or i == total_step:
print('{} Epoch [{:03d}/{:03d}], Step [{:04d}/{:04d}], [lateral-edge: {:.4f}, '
'lateral-2: {:.4f}, lateral-3: {:0.4f}, lateral-4: {:0.4f}, lateral-5: {:0.4f}]'.
format(datetime.now(), epoch, opt.epoch, i, total_step, loss_record1.show(),
loss_record2.show(), loss_record3.show(), loss_record4.show(), loss_record5.show()))
# ---- save model_lung_infection ----
save_path = 'Snapshots/{}/'.format(opt.train_save)
os.makedirs(save_path, exist_ok=True)
if (epoch+1) % 10 == 0:
torch.save(model.state_dict(), save_path + 'Semi-Inf-Net-%d.pth' % (epoch+1))
print('[Saving Snapshot:]', save_path + 'Semi-Inf-Net-%d.pth' % (epoch+1))
def train_module(_train_path, _train_save, _resume_snapshot):
parser = argparse.ArgumentParser()
parser.add_argument('--epoch', type=int, default=10, help='epoch number')
parser.add_argument('--lr', type=float, default=3e-4, help='learning rate')
parser.add_argument('--batchsize', type=int, default=16, help='training batch size')
parser.add_argument('--trainsize', type=int, default=352, help='training dataset size')
parser.add_argument('--clip', type=float, default=0.5, help='gradient clipping margin')
parser.add_argument('--decay_rate', type=float, default=0.1, help='decay rate of learning rate')
parser.add_argument('--decay_epoch', type=int, default=50, help='every n epochs decay learning rate')
parser.add_argument('--train_path', type=str, default=_train_path)
parser.add_argument('--train_save', type=str, default=_train_save)
parser.add_argument('--resume_snapshot', type=str, default=_resume_snapshot)
opt = parser.parse_args()
# ---- build models ----
torch.cuda.set_device(0)
model = Network(channel=32, n_class=1).cuda()
model.load_state_dict(torch.load(opt.resume_snapshot))
params = model.parameters()
optimizer = torch.optim.Adam(params, opt.lr)
image_root = '{}/Imgs/'.format(opt.train_path)
gt_root = '{}/GT/'.format(opt.train_path)
edge_root = '{}/Edge/'.format(opt.train_path)
train_loader = get_loader(image_root, gt_root, edge_root, batchsize=opt.batchsize, trainsize=opt.trainsize)
total_step = len(train_loader)
print("#"*20, "Start Training", "#"*20)
for epoch in range(1, opt.epoch):
adjust_lr(optimizer, opt.lr, epoch, opt.decay_rate, opt.decay_epoch)
trainer(train_loader=train_loader, model=model, optimizer=optimizer,
epoch=epoch, opt=opt, total_step=total_step)
def inference_module(_data_path, _save_path, _pth_path):
model = Network(channel=32, n_class=1)
model.load_state_dict(torch.load(_pth_path))
model.cuda()
model.eval()
os.makedirs(_save_path, exist_ok=True)
# FIXME
image_root = '{}/'.format(_data_path)
# gt_root = '{}/mask/'.format(data_path)
test_loader = test_dataset(image_root, image_root, 352)
for i in range(test_loader.size):
image, name = test_loader.load_data()
#gt = np.asarray(gt, np.float32)
#gt /= (gt.max() + 1e-8)
image = image.cuda()
lateral_map_5, lateral_map_4, lateral_map_3, lateral_map_2, lateral_edge = model(image)
res = lateral_map_2 # final segmentation
#res = F.upsample(res, size=gt.shape, mode='bilinear', align_corners=False)
res = res.sigmoid().data.cpu().numpy().squeeze()
res = (res - res.min()) / (res.max() - res.min() + 1e-8)
misc.imsave(_save_path + '/' + name, res)
def movefiles(_src_dir, _dst_dir):
os.makedirs(_dst_dir, exist_ok=True)
for file_name in os.listdir(_src_dir):
shutil.copyfile(os.path.join(_src_dir, file_name),
os.path.join(_dst_dir, file_name))
if __name__ == '__main__':
slices = './Dataset/TrainingSet/LungInfection-Train/Pseudo-label/DataPrepare'
slices_dir = slices + '/Imgs_split'
slices_pred_seg_dir = slices + '/pred_seg_split'
slices_pred_edge_dir = slices + '/pred_edge_split'
# NOTES: Hybrid-label = Doctor-label + Pseudo-label
semi = './Dataset/TrainingSet/LungInfection-Train/Pseudo-label/DataPrepare/Hybrid-label'
semi_img = semi + '/Imgs'
semi_mask = semi + '/GT'
semi_edge = semi + '/Edge'
if (not os.path.exists(semi_img)) or (len(os.listdir(semi_img)) != 50):
shutil.copytree('Dataset/TrainingSet/LungInfection-Train/Doctor-label/Imgs',
semi_img)
shutil.copytree('Dataset/TrainingSet/LungInfection-Train/Doctor-label/GT',
semi_mask)
shutil.copytree('Dataset/TrainingSet/LungInfection-Train/Doctor-label/Edge',
semi_edge)
print('Copy done')
else:
print('Check done')
slices_lst = os.listdir(slices_dir)
random.shuffle(slices_lst)
print("#" * 20, "\nStart Training (Inf-Net)\nThis code is written for 'Inf-Net: Automatic COVID-19 Lung "
"Infection Segmentation from CT Scans', 2020, arXiv.\n"
"----\nPlease cite the paper if you use this code and dataset. "
"And any questions feel free to contact me "
"via E-mail (<EMAIL>)\n----\n", "#" * 20)
for i, split_name in enumerate(slices_lst):
print('\n[INFO] {} ({}/320)'.format(split_name, i))
# ---- inference ----
test_aux_dir = os.path.join(slices_dir, split_name)
test_aux_save_dir = os.path.join(slices_pred_seg_dir, split_name)
if i == 0:
snapshot_dir = './Snapshots/save_weights/Inf-Net/Inf-Net-100.pth'
else:
snapshot_dir = './Snapshots/semi_training/Semi-Inf-Net_{}/Semi-Inf-Net-10.pth'.format(i-1)
inference_module(_data_path=test_aux_dir, _save_path=test_aux_save_dir, _pth_path=snapshot_dir)
os.makedirs(os.path.join(slices_pred_edge_dir, split_name), exist_ok=True)
for pred_name in os.listdir(test_aux_save_dir):
edge_tmp = binary2edge(os.path.join(test_aux_save_dir, pred_name))
cv2.imwrite(os.path.join(slices_pred_edge_dir, split_name, pred_name), edge_tmp)
# ---- move generation ----
movefiles(test_aux_dir, semi_img)
movefiles(test_aux_save_dir, semi_mask)
movefiles(os.path.join(slices_pred_edge_dir, split_name), semi_edge)
# ---- training ----
train_module(_train_path=semi,
_train_save='semi_training/Semi-Inf-Net_{}'.format(i),
_resume_snapshot=snapshot_dir)
# move img/pseudo-label into `./Dataset/TrainingSet/LungInfection-Train/Pseudo-label`
shutil.copytree(semi_img, './Dataset/TrainingSet/LungInfection-Train/Pseudo-label/Imgs')
shutil.copytree(semi_mask, './Dataset/TrainingSet/LungInfection-Train/Pseudo-label/GT')
shutil.copytree(semi_edge, 'Dataset/TrainingSet/LungInfection-Train/Pseudo-label/Edge')
print('Pseudo Label Generated!')
|
[
"argparse.ArgumentParser",
"random.shuffle",
"scipy.misc.imsave",
"os.path.join",
"Code.utils.utils.AvgMeter",
"Code.model_lung_infection.InfNet_Res2Net.Inf_Net",
"torch.nn.functional.avg_pool2d",
"torch.load",
"os.path.exists",
"torch.nn.functional.binary_cross_entropy_with_logits",
"Code.utils.utils.adjust_lr",
"torch.cuda.set_device",
"datetime.datetime.now",
"torch.nn.BCEWithLogitsLoss",
"torch.autograd.Variable",
"torch.nn.functional.upsample",
"torch.optim.Adam",
"os.listdir",
"Code.utils.dataloader_LungInf.get_loader",
"Code.utils.utils.clip_gradient",
"os.makedirs",
"torch.sigmoid",
"shutil.copytree",
"Code.utils.dataloader_LungInf.test_dataset"
] |
[((984, 1045), 'torch.nn.functional.binary_cross_entropy_with_logits', 'F.binary_cross_entropy_with_logits', (['pred', 'mask'], {'reduce': '"""none"""'}), "(pred, mask, reduce='none')\n", (1018, 1045), True, 'import torch.nn.functional as F\n'), ((1120, 1139), 'torch.sigmoid', 'torch.sigmoid', (['pred'], {}), '(pred)\n', (1133, 1139), False, 'import torch\n'), ((4002, 4039), 'os.makedirs', 'os.makedirs', (['save_path'], {'exist_ok': '(True)'}), '(save_path, exist_ok=True)\n', (4013, 4039), False, 'import os\n'), ((4315, 4340), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4338, 4340), False, 'import argparse\n'), ((5266, 5290), 'torch.cuda.set_device', 'torch.cuda.set_device', (['(0)'], {}), '(0)\n', (5287, 5290), False, 'import torch\n'), ((5450, 5482), 'torch.optim.Adam', 'torch.optim.Adam', (['params', 'opt.lr'], {}), '(params, opt.lr)\n', (5466, 5482), False, 'import torch\n'), ((5651, 5747), 'Code.utils.dataloader_LungInf.get_loader', 'get_loader', (['image_root', 'gt_root', 'edge_root'], {'batchsize': 'opt.batchsize', 'trainsize': 'opt.trainsize'}), '(image_root, gt_root, edge_root, batchsize=opt.batchsize,\n trainsize=opt.trainsize)\n', (5661, 5747), False, 'from Code.utils.dataloader_LungInf import get_loader, test_dataset\n'), ((6149, 6179), 'Code.model_lung_infection.InfNet_Res2Net.Inf_Net', 'Network', ([], {'channel': '(32)', 'n_class': '(1)'}), '(channel=32, n_class=1)\n', (6156, 6179), True, 'from Code.model_lung_infection.InfNet_Res2Net import Inf_Net as Network\n'), ((6268, 6306), 'os.makedirs', 'os.makedirs', (['_save_path'], {'exist_ok': '(True)'}), '(_save_path, exist_ok=True)\n', (6279, 6306), False, 'import os\n'), ((6424, 6465), 'Code.utils.dataloader_LungInf.test_dataset', 'test_dataset', (['image_root', 'image_root', '(352)'], {}), '(image_root, image_root, 352)\n', (6436, 6465), False, 'from Code.utils.dataloader_LungInf import get_loader, test_dataset\n'), ((7099, 7135), 'os.makedirs', 'os.makedirs', (['_dst_dir'], {'exist_ok': '(True)'}), '(_dst_dir, exist_ok=True)\n', (7110, 7135), False, 'import os\n'), ((7157, 7177), 'os.listdir', 'os.listdir', (['_src_dir'], {}), '(_src_dir)\n', (7167, 7177), False, 'import os\n'), ((8313, 8335), 'os.listdir', 'os.listdir', (['slices_dir'], {}), '(slices_dir)\n', (8323, 8335), False, 'import os\n'), ((8340, 8366), 'random.shuffle', 'random.shuffle', (['slices_lst'], {}), '(slices_lst)\n', (8354, 8366), False, 'import random\n'), ((10166, 10258), 'shutil.copytree', 'shutil.copytree', (['semi_img', '"""./Dataset/TrainingSet/LungInfection-Train/Pseudo-label/Imgs"""'], {}), "(semi_img,\n './Dataset/TrainingSet/LungInfection-Train/Pseudo-label/Imgs')\n", (10181, 10258), False, 'import shutil\n'), ((10259, 10350), 'shutil.copytree', 'shutil.copytree', (['semi_mask', '"""./Dataset/TrainingSet/LungInfection-Train/Pseudo-label/GT"""'], {}), "(semi_mask,\n './Dataset/TrainingSet/LungInfection-Train/Pseudo-label/GT')\n", (10274, 10350), False, 'import shutil\n'), ((10351, 10442), 'shutil.copytree', 'shutil.copytree', (['semi_edge', '"""Dataset/TrainingSet/LungInfection-Train/Pseudo-label/Edge"""'], {}), "(semi_edge,\n 'Dataset/TrainingSet/LungInfection-Train/Pseudo-label/Edge')\n", (10366, 10442), False, 'import shutil\n'), ((1581, 1591), 'Code.utils.utils.AvgMeter', 'AvgMeter', ([], {}), '()\n', (1589, 1591), False, 'from Code.utils.utils import clip_gradient, adjust_lr, AvgMeter\n'), ((1593, 1603), 'Code.utils.utils.AvgMeter', 'AvgMeter', ([], {}), '()\n', (1601, 1603), False, 'from Code.utils.utils import clip_gradient, adjust_lr, AvgMeter\n'), ((1605, 1615), 'Code.utils.utils.AvgMeter', 'AvgMeter', ([], {}), '()\n', (1613, 1615), False, 'from Code.utils.utils import clip_gradient, adjust_lr, AvgMeter\n'), ((1617, 1627), 'Code.utils.utils.AvgMeter', 'AvgMeter', ([], {}), '()\n', (1625, 1627), False, 'from Code.utils.utils import clip_gradient, adjust_lr, AvgMeter\n'), ((1629, 1639), 'Code.utils.utils.AvgMeter', 'AvgMeter', ([], {}), '()\n', (1637, 1639), False, 'from Code.utils.utils import clip_gradient, adjust_lr, AvgMeter\n'), ((5368, 5399), 'torch.load', 'torch.load', (['opt.resume_snapshot'], {}), '(opt.resume_snapshot)\n', (5378, 5399), False, 'import torch\n'), ((5871, 5939), 'Code.utils.utils.adjust_lr', 'adjust_lr', (['optimizer', 'opt.lr', 'epoch', 'opt.decay_rate', 'opt.decay_epoch'], {}), '(optimizer, opt.lr, epoch, opt.decay_rate, opt.decay_epoch)\n', (5880, 5939), False, 'from Code.utils.utils import clip_gradient, adjust_lr, AvgMeter\n'), ((6206, 6227), 'torch.load', 'torch.load', (['_pth_path'], {}), '(_pth_path)\n', (6216, 6227), False, 'import torch\n'), ((7016, 7057), 'scipy.misc.imsave', 'misc.imsave', (["(_save_path + '/' + name)", 'res'], {}), "(_save_path + '/' + name, res)\n", (7027, 7057), False, 'from scipy import misc\n'), ((7881, 7971), 'shutil.copytree', 'shutil.copytree', (['"""Dataset/TrainingSet/LungInfection-Train/Doctor-label/Imgs"""', 'semi_img'], {}), "('Dataset/TrainingSet/LungInfection-Train/Doctor-label/Imgs',\n semi_img)\n", (7896, 7971), False, 'import shutil\n'), ((8000, 8089), 'shutil.copytree', 'shutil.copytree', (['"""Dataset/TrainingSet/LungInfection-Train/Doctor-label/GT"""', 'semi_mask'], {}), "('Dataset/TrainingSet/LungInfection-Train/Doctor-label/GT',\n semi_mask)\n", (8015, 8089), False, 'import shutil\n'), ((8118, 8209), 'shutil.copytree', 'shutil.copytree', (['"""Dataset/TrainingSet/LungInfection-Train/Doctor-label/Edge"""', 'semi_edge'], {}), "('Dataset/TrainingSet/LungInfection-Train/Doctor-label/Edge',\n semi_edge)\n", (8133, 8209), False, 'import shutil\n'), ((8928, 8964), 'os.path.join', 'os.path.join', (['slices_dir', 'split_name'], {}), '(slices_dir, split_name)\n', (8940, 8964), False, 'import os\n'), ((8993, 9038), 'os.path.join', 'os.path.join', (['slices_pred_seg_dir', 'split_name'], {}), '(slices_pred_seg_dir, split_name)\n', (9005, 9038), False, 'import os\n'), ((9467, 9496), 'os.listdir', 'os.listdir', (['test_aux_save_dir'], {}), '(test_aux_save_dir)\n', (9477, 9496), False, 'import os\n'), ((2978, 3012), 'Code.utils.utils.clip_gradient', 'clip_gradient', (['optimizer', 'opt.clip'], {}), '(optimizer, opt.clip)\n', (2991, 3012), False, 'from Code.utils.utils import clip_gradient, adjust_lr, AvgMeter\n'), ((5303, 5333), 'Code.model_lung_infection.InfNet_Res2Net.Inf_Net', 'Network', ([], {'channel': '(32)', 'n_class': '(1)'}), '(channel=32, n_class=1)\n', (5310, 5333), True, 'from Code.model_lung_infection.InfNet_Res2Net import Inf_Net as Network\n'), ((7203, 7236), 'os.path.join', 'os.path.join', (['_src_dir', 'file_name'], {}), '(_src_dir, file_name)\n', (7215, 7236), False, 'import os\n'), ((7262, 7295), 'os.path.join', 'os.path.join', (['_dst_dir', 'file_name'], {}), '(_dst_dir, file_name)\n', (7274, 7295), False, 'import os\n'), ((7809, 7833), 'os.path.exists', 'os.path.exists', (['semi_img'], {}), '(semi_img)\n', (7823, 7833), False, 'import os\n'), ((9379, 9425), 'os.path.join', 'os.path.join', (['slices_pred_edge_dir', 'split_name'], {}), '(slices_pred_edge_dir, split_name)\n', (9391, 9425), False, 'import os\n'), ((9815, 9861), 'os.path.join', 'os.path.join', (['slices_pred_edge_dir', 'split_name'], {}), '(slices_pred_edge_dir, split_name)\n', (9827, 9861), False, 'import os\n'), ((2105, 2193), 'torch.nn.functional.upsample', 'F.upsample', (['images'], {'size': '(trainsize, trainsize)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(images, size=(trainsize, trainsize), mode='bilinear',\n align_corners=True)\n", (2115, 2193), True, 'import torch.nn.functional as F\n'), ((2212, 2298), 'torch.nn.functional.upsample', 'F.upsample', (['gts'], {'size': '(trainsize, trainsize)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(gts, size=(trainsize, trainsize), mode='bilinear', align_corners\n =True)\n", (2222, 2298), True, 'import torch.nn.functional as F\n'), ((2318, 2405), 'torch.nn.functional.upsample', 'F.upsample', (['edges'], {'size': '(trainsize, trainsize)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(edges, size=(trainsize, trainsize), mode='bilinear',\n align_corners=True)\n", (2328, 2405), True, 'import torch.nn.functional as F\n'), ((2798, 2826), 'torch.nn.BCEWithLogitsLoss', 'torch.nn.BCEWithLogitsLoss', ([], {}), '()\n', (2824, 2826), False, 'import torch\n'), ((7843, 7863), 'os.listdir', 'os.listdir', (['semi_img'], {}), '(semi_img)\n', (7853, 7863), False, 'import os\n'), ((9533, 9575), 'os.path.join', 'os.path.join', (['test_aux_save_dir', 'pred_name'], {}), '(test_aux_save_dir, pred_name)\n', (9545, 9575), False, 'import os\n'), ((9601, 9658), 'os.path.join', 'os.path.join', (['slices_pred_edge_dir', 'split_name', 'pred_name'], {}), '(slices_pred_edge_dir, split_name, pred_name)\n', (9613, 9658), False, 'import os\n'), ((908, 964), 'torch.nn.functional.avg_pool2d', 'F.avg_pool2d', (['mask'], {'kernel_size': '(31)', 'stride': '(1)', 'padding': '(15)'}), '(mask, kernel_size=31, stride=1, padding=15)\n', (920, 964), True, 'import torch.nn.functional as F\n'), ((1855, 1871), 'torch.autograd.Variable', 'Variable', (['images'], {}), '(images)\n', (1863, 1871), False, 'from torch.autograd import Variable\n'), ((1897, 1910), 'torch.autograd.Variable', 'Variable', (['gts'], {}), '(gts)\n', (1905, 1910), False, 'from torch.autograd import Variable\n'), ((1938, 1953), 'torch.autograd.Variable', 'Variable', (['edges'], {}), '(edges)\n', (1946, 1953), False, 'from torch.autograd import Variable\n'), ((3721, 3735), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3733, 3735), False, 'from datetime import datetime\n')]
|
# Copyright (C) 2020-2021 by <EMAIL>, < https://github.com/DevsExpo >.
#
# This file is part of < https://github.com/DevsExpo/FridayUserBot > project,
# and is released under the "GNU v3.0 License Agreement".
# Please see < https://github.com/DevsExpo/blob/master/LICENSE >
#
# All rights reserved.
from pyrogram import filters
from innexiaBot.pyrogramee.pluginshelper import admins_only, get_text
from innexiaBot import pbot
@pbot.on_message(filters.command("tagall") & ~filters.edited & ~filters.bot)
@admins_only
async def tagall(client, message):
await message.reply("`Processing.....`")
sh = get_text(message)
if not sh:
sh = "Hi!"
mentions = ""
async for member in client.iter_chat_members(message.chat.id):
mentions += member.user.mention + " "
n = 4096
kk = [mentions[i : i + n] for i in range(0, len(mentions), n)]
for i in kk:
j = f"<b>{sh}</b> \n{i}"
await client.send_message(message.chat.id, j, parse_mode="html")
__mod_name__ = "Tagall"
__help__ = """
- /tagall : Tag everyone in a chat
"""
|
[
"innexiaBot.pyrogramee.pluginshelper.get_text",
"pyrogram.filters.command"
] |
[((610, 627), 'innexiaBot.pyrogramee.pluginshelper.get_text', 'get_text', (['message'], {}), '(message)\n', (618, 627), False, 'from innexiaBot.pyrogramee.pluginshelper import admins_only, get_text\n'), ((448, 473), 'pyrogram.filters.command', 'filters.command', (['"""tagall"""'], {}), "('tagall')\n", (463, 473), False, 'from pyrogram import filters\n')]
|
from modules.trainer.configs import TrainingConfigs
from modules.trainer.train_model import train_model
from tsne import tsne_visualizer
import matplotlib.pyplot as plt
from sys import argv
def create_training_cfg() -> TrainingConfigs:
conf = TrainingConfigs()
# conf.data_sets = ['20ng', 'R8', 'R52', 'ohsumed', 'mr', 'cora', 'citeseer', 'pubmed']
conf.data_sets = ['R8']
conf.corpus_split_index_dir = 'data/corpus.shuffled/split_index/'
conf.corpus_node_features_dir = 'data/corpus.shuffled/node_features/'
conf.corpus_adjacency_dir = ''
conf.corpus_vocab_dir = 'data/corpus.shuffled/vocabulary/'
conf.adjacency_sets = ['frequency', 'syntactic_dependency',
'linguistic_inquiry', 'semantic', 'graph']
conf.model = 'gcn'
conf.learning_rate = 0.02
conf.epochs = 200
conf.hidden1 = 200
conf.dropout = 0.5
conf.weight_decay = 0.
conf.early_stopping = 10
conf.chebyshev_max_degree = 3
conf.build()
return conf
def train(ds: str, training_cfg: TrainingConfigs):
# Start training
return train_model(ds_name=ds, is_featureless=True, cfg=training_cfg)
def save_history(hist, representation, dataset):
file_name = f'logs/experiments/{representation}_dataset_{dataset}.txt'
with open(file_name, 'w') as my_file:
my_file.writelines(hist)
def create_training_plot(training_history, name="training_history"):
fig, axes = plt.subplots(2, 1)
axes[0].plot(training_history.epoch, training_history.accuracy, c="blue")
axes[0].set_ylabel("Accuracy", size=20)
axes[0].grid(which="both")
axes[1].plot(training_history.epoch, training_history.val_loss,
c="green", label='Validation')
axes[1].plot(training_history.epoch,
training_history.train_loss, c="red", label='Train')
axes[1].set_ylabel("Loss", size=20)
axes[1].set_xlabel("Epoch", size=20)
axes[1].grid(which="both")
axes[1].legend(fontsize=15)
fig = plt.gcf()
fig.set_size_inches(15, 8)
plt.tight_layout()
plt.savefig(f"{name}.jpg", dpi=200)
def batch_train(rp: str, trn_cfg):
'''
Experiments > Graph Representation > Model Hyperparameter Tuning > Run Step
'''
path = 'data/corpus.shuffled/adjacency/'
if rp == 'frequency':
# Default adjacency
trn_cfg.corpus_adjacency_dir = f'{path}/frequency/'
elif rp == 'semantic':
# Semantic adjacency
trn_cfg.corpus_adjacency_dir = f'{path}/semantic/'
elif rp == 'syntactic_dependency':
# Syntactic adjacency
trn_cfg.corpus_adjacency_dir = f'{path}/syntactic_dependency/'
elif rp == 'linguistic_inquiry':
# Semantic adjacency
trn_cfg.corpus_adjacency_dir = f'{path}/linguistic_inquiry/'
elif rp == 'graph':
# Graph adjacency
trn_cfg.corpus_adjacency_dir = f'{path}/graph/'
for ds in trn_cfg.data_sets:
print('\n\n▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄ ' + ds)
hist = train(ds=ds, training_cfg=trn_cfg)
save_history(hist, rp, ds)
tsne_visualizer(ds, rp)
create_training_plot(hist)
if __name__ == '__main__':
trn_cfg = create_training_cfg()
if len(argv) < 2:
raise Exception(
"Adjacency Representation name cannot be left blank. Must be one of representation:%r." % trn_cfg.adjacency_sets)
rp_name = argv[1]
#print("------ Working with dataset", ds_name, "------\n")
# ORIGINAL_PAPER = {
# "mr": {"avg": 0.7674, "std": 0.0020},
# "Ohsumed": {"avg": 0.6836, "std": 0.0056},
# "R8": {"avg": 0.9707, "std": 0.0010},
# "R52": {"avg": 0.9356, "std": 0.0018}
# }
# print(ORIGINAL_PAPER[ds_name])
batch_train(rp_name, trn_cfg)
print('\nDone!!!')
|
[
"modules.trainer.configs.TrainingConfigs",
"tsne.tsne_visualizer",
"modules.trainer.train_model.train_model",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.gcf",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.savefig"
] |
[((250, 267), 'modules.trainer.configs.TrainingConfigs', 'TrainingConfigs', ([], {}), '()\n', (265, 267), False, 'from modules.trainer.configs import TrainingConfigs\n'), ((1094, 1156), 'modules.trainer.train_model.train_model', 'train_model', ([], {'ds_name': 'ds', 'is_featureless': '(True)', 'cfg': 'training_cfg'}), '(ds_name=ds, is_featureless=True, cfg=training_cfg)\n', (1105, 1156), False, 'from modules.trainer.train_model import train_model\n'), ((1446, 1464), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(1)'], {}), '(2, 1)\n', (1458, 1464), True, 'import matplotlib.pyplot as plt\n'), ((2000, 2009), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (2007, 2009), True, 'import matplotlib.pyplot as plt\n'), ((2045, 2063), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2061, 2063), True, 'import matplotlib.pyplot as plt\n'), ((2068, 2103), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""{name}.jpg"""'], {'dpi': '(200)'}), "(f'{name}.jpg', dpi=200)\n", (2079, 2103), True, 'import matplotlib.pyplot as plt\n'), ((3123, 3146), 'tsne.tsne_visualizer', 'tsne_visualizer', (['ds', 'rp'], {}), '(ds, rp)\n', (3138, 3146), False, 'from tsne import tsne_visualizer\n')]
|
import sys, json, traceback, datetime, glob, xlrd
from xlrd import open_workbook, cellname, xldate_as_tuple, error_text_from_code
def dump_record(record_type, values):
print(json.dumps([record_type, values]));
def parse_cell_value(sheet, cell):
if cell.ctype == xlrd.XL_CELL_DATE:
year, month, day, hour, minute, second = xldate_as_tuple(cell.value, sheet.book.datemode)
return ['date', year, month, day, hour, minute, second]
elif cell.ctype == xlrd.XL_CELL_ERROR:
return ['error', error_text_from_code[cell.value]]
elif cell.ctype == xlrd.XL_CELL_BOOLEAN:
return False if cell.value == 0 else True
elif cell.ctype == xlrd.XL_CELL_EMPTY:
return None
return cell.value
def dump_sheet(sheet, sheet_index, max_rows):
dump_record("s", {
"index": sheet_index,
"name": sheet.name,
"rows": sheet.nrows,
"columns": sheet.ncols,
"visibility": sheet.visibility
})
for rowx in range(max_rows or sheet.nrows):
for colx in range(sheet.ncols):
cell = sheet.cell(rowx, colx)
dump_record("c", [rowx, colx, cellname(rowx, colx), parse_cell_value(sheet, cell)])
def main(cmd_args):
import optparse
usage = "\n%prog [options] [file1] [file2] ..."
oparser = optparse.OptionParser(usage)
oparser.add_option(
"-m", "--meta",
dest = "iterate_sheets",
action = "store_false",
default = True,
help = "dumps only the workbook record, does not load any worksheet")
oparser.add_option(
"-s", "--sheet",
dest = "sheets",
action = "append",
help = "names of the sheets to load - if omitted, all sheets are loaded")
oparser.add_option(
"-r", "--rows",
dest = "max_rows",
default = None,
action = "store",
type = "int",
help = "maximum number of rows to load")
options, args = oparser.parse_args(cmd_args)
# loop on all input files
for file in args:
try:
wb = open_workbook(filename=file, on_demand=True)
sheet_names = wb.sheet_names()
dump_record("w", {
"file": file,
"sheets": sheet_names,
"user": wb.user_name
})
if options.iterate_sheets:
if options.sheets:
for sheet_to_load in options.sheets:
try:
sheet_name = sheet_to_load
if sheet_to_load.isdigit():
sheet = wb.sheet_by_index(int(sheet_to_load))
sheet_name = sheet.name
else:
sheet = wb.sheet_by_name(sheet_to_load)
dump_sheet(sheet, sheet_names.index(sheet_name), options.max_rows)
wb.unload_sheet(sheet_name)
except:
dump_record("error", {
"id": "load_sheet_failed",
"file": file,
"sheet": sheet_name,
"traceback": traceback.format_exc()
})
else:
for sheet_index in range(len(sheet_names)):
try:
sheet = wb.sheet_by_index(sheet_index)
dump_sheet(sheet, sheet_index, options.max_rows)
wb.unload_sheet(sheet_index)
except:
dump_record("error", {
"id": "load_sheet_failed",
"file": file,
"sheet": sheet_index,
"traceback": traceback.format_exc()
})
except:
dump_record("error", {
"id": "open_workbook_failed",
"file": file,
"traceback": traceback.format_exc()
})
sys.exit()
main(sys.argv[1:])
|
[
"optparse.OptionParser",
"xlrd.open_workbook",
"json.dumps",
"xlrd.cellname",
"traceback.format_exc",
"xlrd.xldate_as_tuple",
"sys.exit"
] |
[((1223, 1251), 'optparse.OptionParser', 'optparse.OptionParser', (['usage'], {}), '(usage)\n', (1244, 1251), False, 'import optparse\n'), ((3478, 3488), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3486, 3488), False, 'import sys, json, traceback, datetime, glob, xlrd\n'), ((177, 210), 'json.dumps', 'json.dumps', (['[record_type, values]'], {}), '([record_type, values])\n', (187, 210), False, 'import sys, json, traceback, datetime, glob, xlrd\n'), ((332, 380), 'xlrd.xldate_as_tuple', 'xldate_as_tuple', (['cell.value', 'sheet.book.datemode'], {}), '(cell.value, sheet.book.datemode)\n', (347, 380), False, 'from xlrd import open_workbook, cellname, xldate_as_tuple, error_text_from_code\n'), ((1896, 1940), 'xlrd.open_workbook', 'open_workbook', ([], {'filename': 'file', 'on_demand': '(True)'}), '(filename=file, on_demand=True)\n', (1909, 1940), False, 'from xlrd import open_workbook, cellname, xldate_as_tuple, error_text_from_code\n'), ((1068, 1088), 'xlrd.cellname', 'cellname', (['rowx', 'colx'], {}), '(rowx, colx)\n', (1076, 1088), False, 'from xlrd import open_workbook, cellname, xldate_as_tuple, error_text_from_code\n'), ((3443, 3465), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (3463, 3465), False, 'import sys, json, traceback, datetime, glob, xlrd\n'), ((2800, 2822), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (2820, 2822), False, 'import sys, json, traceback, datetime, glob, xlrd\n'), ((3281, 3303), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (3301, 3303), False, 'import sys, json, traceback, datetime, glob, xlrd\n')]
|
"""
Add user profile columns
Revision ID: 6f86796f64e0
Revises: <KEY>
Create Date: 2016-07-06 11:28:50.075057
"""
from __future__ import unicode_literals
from alembic import op
import sqlalchemy as sa
revision = '6f86796f64e0'
down_revision = '<KEY>'
def upgrade():
op.add_column('user', sa.Column('display_name', sa.UnicodeText()))
op.add_column('user', sa.Column('description', sa.UnicodeText()))
op.add_column('user', sa.Column('location', sa.UnicodeText()))
op.add_column('user', sa.Column('uri', sa.UnicodeText()))
op.add_column('user', sa.Column('orcid', sa.UnicodeText()))
def downgrade():
op.drop_column('user', 'display_name')
op.drop_column('user', 'description')
op.drop_column('user', 'location')
op.drop_column('user', 'uri')
op.drop_column('user', 'orcid')
|
[
"alembic.op.drop_column",
"sqlalchemy.UnicodeText"
] |
[((630, 668), 'alembic.op.drop_column', 'op.drop_column', (['"""user"""', '"""display_name"""'], {}), "('user', 'display_name')\n", (644, 668), False, 'from alembic import op\n'), ((673, 710), 'alembic.op.drop_column', 'op.drop_column', (['"""user"""', '"""description"""'], {}), "('user', 'description')\n", (687, 710), False, 'from alembic import op\n'), ((715, 749), 'alembic.op.drop_column', 'op.drop_column', (['"""user"""', '"""location"""'], {}), "('user', 'location')\n", (729, 749), False, 'from alembic import op\n'), ((754, 783), 'alembic.op.drop_column', 'op.drop_column', (['"""user"""', '"""uri"""'], {}), "('user', 'uri')\n", (768, 783), False, 'from alembic import op\n'), ((788, 819), 'alembic.op.drop_column', 'op.drop_column', (['"""user"""', '"""orcid"""'], {}), "('user', 'orcid')\n", (802, 819), False, 'from alembic import op\n'), ((325, 341), 'sqlalchemy.UnicodeText', 'sa.UnicodeText', ([], {}), '()\n', (339, 341), True, 'import sqlalchemy as sa\n'), ((395, 411), 'sqlalchemy.UnicodeText', 'sa.UnicodeText', ([], {}), '()\n', (409, 411), True, 'import sqlalchemy as sa\n'), ((462, 478), 'sqlalchemy.UnicodeText', 'sa.UnicodeText', ([], {}), '()\n', (476, 478), True, 'import sqlalchemy as sa\n'), ((524, 540), 'sqlalchemy.UnicodeText', 'sa.UnicodeText', ([], {}), '()\n', (538, 540), True, 'import sqlalchemy as sa\n'), ((588, 604), 'sqlalchemy.UnicodeText', 'sa.UnicodeText', ([], {}), '()\n', (602, 604), True, 'import sqlalchemy as sa\n')]
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009-2016 Red Hat, Inc.
#
# Authors:
# <NAME> <<EMAIL>>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from gi.repository import GLib, GObject
# force use of pygobject3 in python-slip
import sys
sys.modules['gobject'] = GObject
import dbus.mainloop.glib
import slip.dbus
from decorator import decorator
from firewall import config
from firewall.core.base import DEFAULT_ZONE_TARGET
from firewall.dbus_utils import dbus_to_python
from firewall.functions import b2u
from firewall.core.rich import Rich_Rule
from firewall import errors
from firewall.errors import FirewallError
import dbus
import traceback
exception_handler = None
not_authorized_loop = False
@decorator
def handle_exceptions(func, *args, **kwargs):
"""Decorator to handle exceptions
"""
authorized = False
while not authorized:
try:
return func(*args, **kwargs)
except dbus.exceptions.DBusException as e:
dbus_message = e.get_dbus_message() # returns unicode
dbus_name = e.get_dbus_name()
if not exception_handler:
raise
if "NotAuthorizedException" in dbus_name:
exception_handler("NotAuthorizedException")
elif "org.freedesktop.DBus.Error" in dbus_name:
# dbus error, try again
exception_handler(dbus_message)
else:
authorized = True
if dbus_message:
exception_handler(dbus_message)
else:
exception_handler(b2u(str(e)))
except FirewallError as e:
if not exception_handler:
raise
else:
exception_handler(b2u(str(e)))
except Exception:
if not exception_handler:
raise
else:
exception_handler(b2u(traceback.format_exc()))
if not not_authorized_loop:
break
# zone config setings
class FirewallClientZoneSettings(object):
@handle_exceptions
def __init__(self, settings = None):
if settings:
self.settings = settings
else:
self.settings = ["", "", "", False, DEFAULT_ZONE_TARGET, [], [],
[], False, [], [], [], [], [], [], False]
@handle_exceptions
def __repr__(self):
return '%s(%r)' % (self.__class__, self.settings)
@handle_exceptions
def getVersion(self):
return self.settings[0]
@handle_exceptions
def setVersion(self, version):
self.settings[0] = version
@handle_exceptions
def getShort(self):
return self.settings[1]
@handle_exceptions
def setShort(self, short):
self.settings[1] = short
@handle_exceptions
def getDescription(self):
return self.settings[2]
@handle_exceptions
def setDescription(self, description):
self.settings[2] = description
# self.settings[3] was used for 'immutable'
@handle_exceptions
def getTarget(self):
return self.settings[4] if self.settings[4] != DEFAULT_ZONE_TARGET else "default"
@handle_exceptions
def setTarget(self, target):
self.settings[4] = target if target != "default" else DEFAULT_ZONE_TARGET
@handle_exceptions
def getServices(self):
return self.settings[5]
@handle_exceptions
def setServices(self, services):
self.settings[5] = services
@handle_exceptions
def addService(self, service):
if service not in self.settings[5]:
self.settings[5].append(service)
else:
raise FirewallError(errors.ALREADY_ENABLED, service)
@handle_exceptions
def removeService(self, service):
if service in self.settings[5]:
self.settings[5].remove(service)
else:
raise FirewallError(errors.NOT_ENABLED, service)
@handle_exceptions
def queryService(self, service):
return service in self.settings[5]
@handle_exceptions
def getPorts(self):
return self.settings[6]
@handle_exceptions
def setPorts(self, ports):
self.settings[6] = ports
@handle_exceptions
def addPort(self, port, protocol):
if (port,protocol) not in self.settings[6]:
self.settings[6].append((port,protocol))
else:
raise FirewallError(errors.ALREADY_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def removePort(self, port, protocol):
if (port,protocol) in self.settings[6]:
self.settings[6].remove((port,protocol))
else:
raise FirewallError(errors.NOT_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def queryPort(self, port, protocol):
return (port,protocol) in self.settings[6]
@handle_exceptions
def getProtocols(self):
return self.settings[13]
@handle_exceptions
def setProtocols(self, protocols):
self.settings[13] = protocols
@handle_exceptions
def addProtocol(self, protocol):
if protocol not in self.settings[13]:
self.settings[13].append(protocol)
else:
raise FirewallError(errors.ALREADY_ENABLED, protocol)
@handle_exceptions
def removeProtocol(self, protocol):
if protocol in self.settings[13]:
self.settings[13].remove(protocol)
else:
raise FirewallError(errors.NOT_ENABLED, protocol)
@handle_exceptions
def queryProtocol(self, protocol):
return protocol in self.settings[13]
@handle_exceptions
def getSourcePorts(self):
return self.settings[14]
@handle_exceptions
def setSourcePorts(self, ports):
self.settings[14] = ports
@handle_exceptions
def addSourcePort(self, port, protocol):
if (port,protocol) not in self.settings[14]:
self.settings[14].append((port,protocol))
else:
raise FirewallError(errors.ALREADY_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def removeSourcePort(self, port, protocol):
if (port,protocol) in self.settings[14]:
self.settings[14].remove((port,protocol))
else:
raise FirewallError(errors.NOT_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def querySourcePort(self, port, protocol):
return (port,protocol) in self.settings[14]
@handle_exceptions
def getIcmpBlocks(self):
return self.settings[7]
@handle_exceptions
def setIcmpBlocks(self, icmpblocks):
self.settings[7] = icmpblocks
@handle_exceptions
def addIcmpBlock(self, icmptype):
if icmptype not in self.settings[7]:
self.settings[7].append(icmptype)
else:
raise FirewallError(errors.ALREADY_ENABLED, icmptype)
@handle_exceptions
def removeIcmpBlock(self, icmptype):
if icmptype in self.settings[7]:
self.settings[7].remove(icmptype)
else:
raise FirewallError(errors.NOT_ENABLED, icmptype)
@handle_exceptions
def queryIcmpBlock(self, icmptype):
return icmptype in self.settings[7]
@handle_exceptions
def getIcmpBlockInversion(self):
return self.settings[15]
@handle_exceptions
def setIcmpBlockInversion(self, flag):
self.settings[15] = flag
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addIcmpBlockInversion(self):
if not self.settings[15]:
self.settings[15] = True
else:
FirewallError(errors.ALREADY_ENABLED, "icmp-block-inversion")
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeIcmpBlockInversion(self):
if self.settings[15]:
self.settings[15] = False
else:
FirewallError(errors.NOT_ENABLED, "icmp-block-inversion")
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryIcmpBlockInversion(self):
return self.settings[15]
@handle_exceptions
def getMasquerade(self):
return self.settings[8]
@handle_exceptions
def setMasquerade(self, masquerade):
self.settings[8] = masquerade
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addMasquerade(self):
if not self.settings[8]:
self.settings[8] = True
else:
FirewallError(errors.ALREADY_ENABLED, "masquerade")
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeMasquerade(self):
if self.settings[8]:
self.settings[8] = False
else:
FirewallError(errors.NOT_ENABLED, "masquerade")
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryMasquerade(self):
return self.settings[8]
@handle_exceptions
def getForwardPorts(self):
return self.settings[9]
@handle_exceptions
def setForwardPorts(self, ports):
self.settings[9] = ports
@handle_exceptions
def addForwardPort(self, port, protocol, to_port, to_addr):
if to_port is None:
to_port = ''
if to_addr is None:
to_addr = ''
if (port,protocol,to_port,to_addr) not in self.settings[9]:
self.settings[9].append((port,protocol,to_port,to_addr))
else:
raise FirewallError(errors.ALREADY_ENABLED, "'%s:%s:%s:%s'" % \
(port, protocol, to_port, to_addr))
@handle_exceptions
def removeForwardPort(self, port, protocol, to_port, to_addr):
if to_port is None:
to_port = ''
if to_addr is None:
to_addr = ''
if (port,protocol,to_port,to_addr) in self.settings[9]:
self.settings[9].remove((port,protocol,to_port,to_addr))
else:
raise FirewallError(errors.NOT_ENABLED, "'%s:%s:%s:%s'" % \
(port, protocol, to_port, to_addr))
@handle_exceptions
def queryForwardPort(self, port, protocol, to_port, to_addr):
if to_port is None:
to_port = ''
if to_addr is None:
to_addr = ''
return (port,protocol,to_port,to_addr) in self.settings[9]
@handle_exceptions
def getInterfaces(self):
return self.settings[10]
@handle_exceptions
def setInterfaces(self, interfaces):
self.settings[10] = interfaces
@handle_exceptions
def addInterface(self, interface):
if interface not in self.settings[10]:
self.settings[10].append(interface)
else:
raise FirewallError(errors.ALREADY_ENABLED, interface)
@handle_exceptions
def removeInterface(self, interface):
if interface in self.settings[10]:
self.settings[10].remove(interface)
else:
raise FirewallError(errors.NOT_ENABLED, interface)
@handle_exceptions
def queryInterface(self, interface):
return interface in self.settings[10]
@handle_exceptions
def getSources(self):
return self.settings[11]
@handle_exceptions
def setSources(self, sources):
self.settings[11] = sources
@handle_exceptions
def addSource(self, source):
if source not in self.settings[11]:
self.settings[11].append(source)
else:
raise FirewallError(errors.ALREADY_ENABLED, source)
@handle_exceptions
def removeSource(self, source):
if source in self.settings[11]:
self.settings[11].remove(source)
else:
raise FirewallError(errors.NOT_ENABLED, source)
@handle_exceptions
def querySource(self, source):
return source in self.settings[11]
@handle_exceptions
def getRichRules(self):
return self.settings[12]
@handle_exceptions
def setRichRules(self, rules):
rules = [ str(Rich_Rule(rule_str=r)) for r in rules ]
self.settings[12] = rules
@handle_exceptions
def addRichRule(self, rule):
rule = str(Rich_Rule(rule_str=rule))
if rule not in self.settings[12]:
self.settings[12].append(rule)
else:
raise FirewallError(errors.ALREADY_ENABLED, rule)
@handle_exceptions
def removeRichRule(self, rule):
rule = str(Rich_Rule(rule_str=rule))
if rule in self.settings[12]:
self.settings[12].remove(rule)
else:
raise FirewallError(errors.NOT_ENABLED, rule)
@handle_exceptions
def queryRichRule(self, rule):
rule = str(Rich_Rule(rule_str=rule))
return rule in self.settings[12]
# zone config
class FirewallClientConfigZone(object):
def __init__(self, bus, path):
self.bus = bus
self.path = path
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE, path)
self.fw_zone = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
self.fw_properties = dbus.Interface(
self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')
#TODO: check interface version and revision (need to match client
# version)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_property(self, prop):
return dbus_to_python(self.fw_properties.Get(
config.dbus.DBUS_INTERFACE_CONFIG_ZONE, prop))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_properties(self):
return dbus_to_python(self.fw_properties.GetAll(
config.dbus.DBUS_INTERFACE_CONFIG_ZONE))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def set_property(self, prop, value):
self.fw_properties.Set(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
prop, value)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSettings(self):
return FirewallClientZoneSettings(list(dbus_to_python(\
self.fw_zone.getSettings())))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def update(self, settings):
self.fw_zone.update(tuple(settings.settings))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def loadDefaults(self):
self.fw_zone.loadDefaults()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def remove(self):
self.fw_zone.remove()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def rename(self, name):
self.fw_zone.rename(name)
# version
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getVersion(self):
return self.fw_zone.getVersion()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setVersion(self, version):
self.fw_zone.setVersion(version)
# short
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getShort(self):
return self.fw_zone.getShort()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setShort(self, short):
self.fw_zone.setShort(short)
# description
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDescription(self):
return self.fw_zone.getDescription()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDescription(self, description):
self.fw_zone.setDescription(description)
# target
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getTarget(self):
return self.fw_zone.getTarget()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setTarget(self, target):
self.fw_zone.setTarget(target)
# service
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getServices(self):
return self.fw_zone.getServices()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setServices(self, services):
self.fw_zone.setServices(services)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addService(self, service):
self.fw_zone.addService(service)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeService(self, service):
self.fw_zone.removeService(service)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryService(self, service):
return self.fw_zone.queryService(service)
# port
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getPorts(self):
return self.fw_zone.getPorts()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setPorts(self, ports):
self.fw_zone.setPorts(ports)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addPort(self, port, protocol):
self.fw_zone.addPort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removePort(self, port, protocol):
self.fw_zone.removePort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryPort(self, port, protocol):
return self.fw_zone.queryPort(port, protocol)
# protocol
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getProtocols(self):
return self.fw_zone.getProtocols()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setProtocols(self, protocols):
self.fw_zone.setProtocols(protocols)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addProtocol(self, protocol):
self.fw_zone.addProtocol(protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeProtocol(self, protocol):
self.fw_zone.removeProtocol(protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryProtocol(self, protocol):
return self.fw_zone.queryProtocol(protocol)
# source-port
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSourcePorts(self):
return self.fw_zone.getSourcePorts()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setSourcePorts(self, ports):
self.fw_zone.setSourcePorts(ports)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addSourcePort(self, port, protocol):
self.fw_zone.addSourcePort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeSourcePort(self, port, protocol):
self.fw_zone.removeSourcePort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def querySourcePort(self, port, protocol):
return self.fw_zone.querySourcePort(port, protocol)
# icmp block
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIcmpBlocks(self):
return self.fw_zone.getIcmpBlocks()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setIcmpBlocks(self, icmptypes):
self.fw_zone.setIcmpBlocks(icmptypes)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addIcmpBlock(self, icmptype):
self.fw_zone.addIcmpBlock(icmptype)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeIcmpBlock(self, icmptype):
self.fw_zone.removeIcmpBlock(icmptype)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryIcmpBlock(self, icmptype):
return self.fw_zone.queryIcmpBlock(icmptype)
# icmp-block-inversion
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIcmpBlockInversion(self):
return self.fw_zone.getIcmpBlockInversion()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setIcmpBlockInversion(self, inversion):
self.fw_zone.setIcmpBlockInversion(inversion)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addIcmpBlockInversion(self):
self.fw_zone.addIcmpBlockInversion()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeIcmpBlockInversion(self):
self.fw_zone.removeIcmpBlockInversion()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryIcmpBlockInversion(self):
return self.fw_zone.queryIcmpBlockInversion()
# masquerade
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getMasquerade(self):
return self.fw_zone.getMasquerade()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setMasquerade(self, masquerade):
self.fw_zone.setMasquerade(masquerade)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addMasquerade(self):
self.fw_zone.addMasquerade()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeMasquerade(self):
self.fw_zone.removeMasquerade()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryMasquerade(self):
return self.fw_zone.queryMasquerade()
# forward port
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getForwardPorts(self):
return self.fw_zone.getForwardPorts()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setForwardPorts(self, ports):
self.fw_zone.setForwardPorts(ports)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addForwardPort(self, port, protocol, toport, toaddr):
if toport is None:
toport = ''
if toaddr is None:
toaddr = ''
self.fw_zone.addForwardPort(port, protocol, toport, toaddr)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeForwardPort(self, port, protocol, toport, toaddr):
if toport is None:
toport = ''
if toaddr is None:
toaddr = ''
self.fw_zone.removeForwardPort(port, protocol, toport, toaddr)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryForwardPort(self, port, protocol, toport, toaddr):
if toport is None:
toport = ''
if toaddr is None:
toaddr = ''
return self.fw_zone.queryForwardPort(port, protocol, toport, toaddr)
# interface
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getInterfaces(self):
return self.fw_zone.getInterfaces()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setInterfaces(self, interfaces):
self.fw_zone.setInterfaces(interfaces)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addInterface(self, interface):
self.fw_zone.addInterface(interface)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeInterface(self, interface):
self.fw_zone.removeInterface(interface)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryInterface(self, interface):
return self.fw_zone.queryInterface(interface)
# source
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSources(self):
return self.fw_zone.getSources()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setSources(self, sources):
self.fw_zone.setSources(sources)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addSource(self, source):
self.fw_zone.addSource(source)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeSource(self, source):
self.fw_zone.removeSource(source)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def querySource(self, source):
return self.fw_zone.querySource(source)
# rich rule
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getRichRules(self):
return self.fw_zone.getRichRules()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setRichRules(self, rules):
self.fw_zone.setRichRules(rules)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addRichRule(self, rule):
self.fw_zone.addRichRule(rule)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeRichRule(self, rule):
self.fw_zone.removeRichRule(rule)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryRichRule(self, rule):
return self.fw_zone.queryRichRule(rule)
# service config settings
class FirewallClientServiceSettings(object):
@handle_exceptions
def __init__(self, settings=None):
if settings:
self.settings = settings
else:
self.settings = ["", "", "", [], [], {}, [], []]
@handle_exceptions
def __repr__(self):
return '%s(%r)' % (self.__class__, self.settings)
@handle_exceptions
def getVersion(self):
return self.settings[0]
@handle_exceptions
def setVersion(self, version):
self.settings[0] = version
@handle_exceptions
def getShort(self):
return self.settings[1]
@handle_exceptions
def setShort(self, short):
self.settings[1] = short
@handle_exceptions
def getDescription(self):
return self.settings[2]
@handle_exceptions
def setDescription(self, description):
self.settings[2] = description
@handle_exceptions
def getPorts(self):
return self.settings[3]
@handle_exceptions
def setPorts(self, ports):
self.settings[3] = ports
@handle_exceptions
def addPort(self, port, protocol):
if (port,protocol) not in self.settings[3]:
self.settings[3].append((port,protocol))
else:
raise FirewallError(errors.ALREADY_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def removePort(self, port, protocol):
if (port,protocol) in self.settings[3]:
self.settings[3].remove((port,protocol))
else:
raise FirewallError(errors.NOT_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def queryPort(self, port, protocol):
return (port,protocol) in self.settings[3]
@handle_exceptions
def getProtocols(self):
return self.settings[6]
@handle_exceptions
def setProtocols(self, protocols):
self.settings[6] = protocols
@handle_exceptions
def addProtocol(self, protocol):
if protocol not in self.settings[6]:
self.settings[6].append(protocol)
else:
raise FirewallError(errors.ALREADY_ENABLED, protocol)
@handle_exceptions
def removeProtocol(self, protocol):
if protocol in self.settings[6]:
self.settings[6].remove(protocol)
else:
raise FirewallError(errors.NOT_ENABLED, protocol)
@handle_exceptions
def queryProtocol(self, protocol):
return protocol in self.settings[6]
@handle_exceptions
def getSourcePorts(self):
return self.settings[7]
@handle_exceptions
def setSourcePorts(self, ports):
self.settings[7] = ports
@handle_exceptions
def addSourcePort(self, port, protocol):
if (port,protocol) not in self.settings[7]:
self.settings[7].append((port,protocol))
else:
raise FirewallError(errors.ALREADY_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def removeSourcePort(self, port, protocol):
if (port,protocol) in self.settings[7]:
self.settings[7].remove((port,protocol))
else:
raise FirewallError(errors.NOT_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def querySourcePort(self, port, protocol):
return (port,protocol) in self.settings[7]
@handle_exceptions
def getModules(self):
return self.settings[4]
@handle_exceptions
def setModules(self, modules):
self.settings[4] = modules
@handle_exceptions
def addModule(self, module):
if module not in self.settings[4]:
self.settings[4].append(module)
else:
raise FirewallError(errors.ALREADY_ENABLED, module)
@handle_exceptions
def removeModule(self, module):
if module in self.settings[4]:
self.settings[4].remove(module)
else:
raise FirewallError(errors.NOT_ENABLED, module)
@handle_exceptions
def queryModule(self, module):
return module in self.settings[4]
@handle_exceptions
def getDestinations(self):
return self.settings[5]
@handle_exceptions
def setDestinations(self, destinations):
self.settings[5] = destinations
@handle_exceptions
def setDestination(self, dest_type, address):
if dest_type not in self.settings[5] or \
self.settings[5][dest_type] != address:
self.settings[5][dest_type] = address
else:
raise FirewallError(errors.ALREADY_ENABLED, "'%s:%s'" % \
(dest_type, address))
@handle_exceptions
def removeDestination(self, dest_type, address=None):
if dest_type in self.settings[5]:
if address is not None and self.settings[5][dest_type] != address:
raise FirewallError(errors.NOT_ENABLED, "'%s:%s'" % \
(dest_type, address))
del self.settings[5][dest_type]
else:
raise FirewallError(errors.NOT_ENABLED, "'%s'" % dest_type)
@handle_exceptions
def queryDestination(self, dest_type, address):
return (dest_type in self.settings[5] and \
address == self.settings[5][dest_type])
# ipset config settings
class FirewallClientIPSetSettings(object):
@handle_exceptions
def __init__(self, settings=None):
if settings:
self.settings = settings
else:
self.settings = ["", "", "", "", {}, []]
@handle_exceptions
def __repr__(self):
return '%s(%r)' % (self.__class__, self.settings)
@handle_exceptions
def getVersion(self):
return self.settings[0]
@handle_exceptions
def setVersion(self, version):
self.settings[0] = version
@handle_exceptions
def getShort(self):
return self.settings[1]
@handle_exceptions
def setShort(self, short):
self.settings[1] = short
@handle_exceptions
def getDescription(self):
return self.settings[2]
@handle_exceptions
def setDescription(self, description):
self.settings[2] = description
@handle_exceptions
def getType(self):
return self.settings[3]
@handle_exceptions
def setType(self, ipset_type):
self.settings[3] = ipset_type
@handle_exceptions
def getOptions(self):
return self.settings[4]
@handle_exceptions
def setOptions(self, options):
self.settings[4] = options
@handle_exceptions
def addOption(self, key, value):
if key not in self.settings[4] or self.settings[4][key] != value:
self.settings[4][key] = value
else:
raise FirewallError(errors.ALREADY_ENABLED, "'%s=%s'" % (key,value)
if value else key)
@handle_exceptions
def removeOption(self, key):
if key in self.settings[4]:
del self.settings[4][key]
else:
raise FirewallError(errors.NOT_ENABLED, key)
@handle_exceptions
def queryOption(self, key, value):
return key in self.settings[4] and self.settings[4][key] == value
@handle_exceptions
def getEntries(self):
return self.settings[5]
@handle_exceptions
def setEntries(self, entries):
if "timeout" in self.settings[4] and \
self.settings[4]["timeout"] != "0":
raise FirewallError(errors.IPSET_WITH_TIMEOUT)
self.settings[5] = entries
@handle_exceptions
def addEntry(self, entry):
if "timeout" in self.settings[4] and \
self.settings[4]["timeout"] != "0":
raise FirewallError(errors.IPSET_WITH_TIMEOUT)
if entry not in self.settings[5]:
self.settings[5].append(entry)
else:
raise FirewallError(errors.ALREADY_ENABLED, entry)
@handle_exceptions
def removeEntry(self, entry):
if "timeout" in self.settings[4] and \
self.settings[4]["timeout"] != "0":
raise FirewallError(errors.IPSET_WITH_TIMEOUT)
if entry in self.settings[5]:
self.settings[5].remove(entry)
else:
raise FirewallError(errors.NOT_ENABLED, entry)
@handle_exceptions
def queryEntry(self, entry):
if "timeout" in self.settings[4] and \
self.settings[4]["timeout"] != "0":
raise FirewallError(errors.IPSET_WITH_TIMEOUT)
return entry in self.settings[5]
# ipset config
class FirewallClientConfigIPSet(object):
@handle_exceptions
def __init__(self, bus, path):
self.bus = bus
self.path = path
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE, path)
self.fw_ipset = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_CONFIG_IPSET)
self.fw_properties = dbus.Interface(
self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_property(self, prop):
return dbus_to_python(self.fw_properties.Get(
config.dbus.DBUS_INTERFACE_CONFIG_IPSET, prop))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_properties(self):
return dbus_to_python(self.fw_properties.GetAll(
config.dbus.DBUS_INTERFACE_CONFIG_IPSET))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def set_property(self, prop, value):
self.fw_properties.Set(config.dbus.DBUS_INTERFACE_CONFIG_IPSET,
prop, value)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSettings(self):
return FirewallClientIPSetSettings(list(dbus_to_python(\
self.fw_ipset.getSettings())))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def update(self, settings):
self.fw_ipset.update(tuple(settings.settings))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def loadDefaults(self):
self.fw_ipset.loadDefaults()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def remove(self):
self.fw_ipset.remove()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def rename(self, name):
self.fw_ipset.rename(name)
# version
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getVersion(self):
return self.fw_ipset.getVersion()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setVersion(self, version):
self.fw_ipset.setVersion(version)
# short
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getShort(self):
return self.fw_ipset.getShort()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setShort(self, short):
self.fw_ipset.setShort(short)
# description
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDescription(self):
return self.fw_ipset.getDescription()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDescription(self, description):
self.fw_ipset.setDescription(description)
# entry
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getEntries(self):
return self.fw_ipset.getEntries()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setEntries(self, entries):
self.fw_ipset.setEntries(entries)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addEntry(self, entry):
self.fw_ipset.addEntry(entry)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeEntry(self, entry):
self.fw_ipset.removeEntry(entry)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryEntry(self, entry):
return self.fw_ipset.queryEntry(entry)
# helper config settings
class FirewallClientHelperSettings(object):
@handle_exceptions
def __init__(self, settings=None):
if settings:
self.settings = settings
else:
self.settings = ["", "", "", "", "", [ ]]
@handle_exceptions
def __repr__(self):
return '%s(%r)' % (self.__class__, self.settings)
@handle_exceptions
def getVersion(self):
return self.settings[0]
@handle_exceptions
def setVersion(self, version):
self.settings[0] = version
@handle_exceptions
def getShort(self):
return self.settings[1]
@handle_exceptions
def setShort(self, short):
self.settings[1] = short
@handle_exceptions
def getDescription(self):
return self.settings[2]
@handle_exceptions
def setDescription(self, description):
self.settings[2] = description
@handle_exceptions
def getFamily(self):
return self.settings[3]
@handle_exceptions
def setFamily(self, ipv):
if ipv is None:
self.settings[3] = ""
self.settings[3] = ipv
@handle_exceptions
def getModule(self):
return self.settings[4]
@handle_exceptions
def setModule(self, module):
self.settings[4] = module
@handle_exceptions
def getPorts(self):
return self.settings[5]
@handle_exceptions
def setPorts(self, ports):
self.settings[5] = ports
@handle_exceptions
def addPort(self, port, protocol):
if (port,protocol) not in self.settings[5]:
self.settings[5].append((port,protocol))
else:
raise FirewallError(errors.ALREADY_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def removePort(self, port, protocol):
if (port,protocol) in self.settings[5]:
self.settings[5].remove((port,protocol))
else:
raise FirewallError(errors.NOT_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def queryPort(self, port, protocol):
return (port,protocol) in self.settings[5]
# helper config
class FirewallClientConfigHelper(object):
@handle_exceptions
def __init__(self, bus, path):
self.bus = bus
self.path = path
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE, path)
self.fw_helper = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_CONFIG_HELPER)
self.fw_properties = dbus.Interface(
self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_property(self, prop):
return dbus_to_python(self.fw_properties.Get(
config.dbus.DBUS_INTERFACE_CONFIG_HELPER, prop))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_properties(self):
return dbus_to_python(self.fw_properties.GetAll(
config.dbus.DBUS_INTERFACE_CONFIG_HELPER))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def set_property(self, prop, value):
self.fw_properties.Set(config.dbus.DBUS_INTERFACE_CONFIG_HELPER,
prop, value)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSettings(self):
return FirewallClientHelperSettings(list(dbus_to_python(\
self.fw_helper.getSettings())))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def update(self, settings):
self.fw_helper.update(tuple(settings.settings))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def loadDefaults(self):
self.fw_helper.loadDefaults()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def remove(self):
self.fw_helper.remove()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def rename(self, name):
self.fw_helper.rename(name)
# version
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getVersion(self):
return self.fw_helper.getVersion()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setVersion(self, version):
self.fw_helper.setVersion(version)
# short
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getShort(self):
return self.fw_helper.getShort()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setShort(self, short):
self.fw_helper.setShort(short)
# description
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDescription(self):
return self.fw_helper.getDescription()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDescription(self, description):
self.fw_helper.setDescription(description)
# port
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getPorts(self):
return self.fw_helper.getPorts()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setPorts(self, ports):
self.fw_helper.setPorts(ports)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addPort(self, port, protocol):
self.fw_helper.addPort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removePort(self, port, protocol):
self.fw_helper.removePort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryPort(self, port, protocol):
return self.fw_helper.queryPort(port, protocol)
# family
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getFamily(self):
return self.fw_helper.getFamily()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setFamily(self, ipv):
if ipv is None:
self.fw_helper.setFamily("")
self.fw_helper.setFamily(ipv)
# module
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getModule(self):
return self.fw_helper.getModule()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setModule(self, module):
self.fw_helper.setModule(module)
# service config
class FirewallClientConfigService(object):
@handle_exceptions
def __init__(self, bus, path):
self.bus = bus
self.path = path
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE, path)
self.fw_service = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_CONFIG_SERVICE)
self.fw_properties = dbus.Interface(
self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_property(self, prop):
return dbus_to_python(self.fw_properties.Get(
config.dbus.DBUS_INTERFACE_CONFIG_SERVICE, prop))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_properties(self):
return dbus_to_python(self.fw_properties.GetAll(
config.dbus.DBUS_INTERFACE_CONFIG_SERVICE))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def set_property(self, prop, value):
self.fw_properties.Set(config.dbus.DBUS_INTERFACE_CONFIG_SERVICE,
prop, value)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSettings(self):
return FirewallClientServiceSettings(list(dbus_to_python(\
self.fw_service.getSettings())))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def update(self, settings):
self.fw_service.update(tuple(settings.settings))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def loadDefaults(self):
self.fw_service.loadDefaults()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def remove(self):
self.fw_service.remove()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def rename(self, name):
self.fw_service.rename(name)
# version
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getVersion(self):
return self.fw_service.getVersion()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setVersion(self, version):
self.fw_service.setVersion(version)
# short
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getShort(self):
return self.fw_service.getShort()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setShort(self, short):
self.fw_service.setShort(short)
# description
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDescription(self):
return self.fw_service.getDescription()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDescription(self, description):
self.fw_service.setDescription(description)
# port
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getPorts(self):
return self.fw_service.getPorts()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setPorts(self, ports):
self.fw_service.setPorts(ports)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addPort(self, port, protocol):
self.fw_service.addPort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removePort(self, port, protocol):
self.fw_service.removePort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryPort(self, port, protocol):
return self.fw_service.queryPort(port, protocol)
# protocol
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getProtocols(self):
return self.fw_service.getProtocols()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setProtocols(self, protocols):
self.fw_service.setProtocols(protocols)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addProtocol(self, protocol):
self.fw_service.addProtocol(protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeProtocol(self, protocol):
self.fw_service.removeProtocol(protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryProtocol(self, protocol):
return self.fw_service.queryProtocol(protocol)
# source-port
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSourcePorts(self):
return self.fw_service.getSourcePorts()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setSourcePorts(self, ports):
self.fw_service.setSourcePorts(ports)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addSourcePort(self, port, protocol):
self.fw_service.addSourcePort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeSourcePort(self, port, protocol):
self.fw_service.removeSourcePort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def querySourcePort(self, port, protocol):
return self.fw_service.querySourcePort(port, protocol)
# module
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getModules(self):
return self.fw_service.getModules()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setModules(self, modules):
self.fw_service.setModules(modules)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addModule(self, module):
self.fw_service.addModule(module)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeModule(self, module):
self.fw_service.removeModule(module)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryModule(self, module):
return self.fw_service.queryModule(module)
# destination
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDestinations(self):
return self.fw_service.getDestinations()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDestinations(self, destinations):
self.fw_service.setDestinations(destinations)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDestination(self, destination):
return self.fw_service.getDestination(destination)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDestination(self, destination, address):
self.fw_service.setDestination(destination, address)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeDestination(self, destination, address=None):
if address is not None and self.getDestination(destination) != address:
raise FirewallError(errors.NOT_ENABLED, "'%s:%s'" % \
(destination, address))
self.fw_service.removeDestination(destination)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryDestination(self, destination, address):
return self.fw_service.queryDestination(destination, address)
# icmptype config settings
class FirewallClientIcmpTypeSettings(object):
@handle_exceptions
def __init__(self, settings=None):
if settings:
self.settings = settings
else:
self.settings = ["", "", "", []]
@handle_exceptions
def __repr__(self):
return '%s(%r)' % (self.__class__, self.settings)
@handle_exceptions
def getVersion(self):
return self.settings[0]
@handle_exceptions
def setVersion(self, version):
self.settings[0] = version
@handle_exceptions
def getShort(self):
return self.settings[1]
@handle_exceptions
def setShort(self, short):
self.settings[1] = short
@handle_exceptions
def getDescription(self):
return self.settings[2]
@handle_exceptions
def setDescription(self, description):
self.settings[2] = description
@handle_exceptions
def getDestinations(self):
return self.settings[3]
@handle_exceptions
def setDestinations(self, destinations):
self.settings[3] = destinations
@handle_exceptions
def addDestination(self, destination):
# empty means all
if not self.settings[3]:
raise FirewallError(errors.ALREADY_ENABLED, destination)
elif destination not in self.settings[3]:
self.settings[3].append(destination)
else:
raise FirewallError(errors.ALREADY_ENABLED, destination)
@handle_exceptions
def removeDestination(self, destination):
if destination in self.settings[3]:
self.settings[3].remove(destination)
# empty means all
elif not self.settings[3]:
self.setDestinations(list(set(['ipv4','ipv6']) - \
set([destination])))
else:
raise FirewallError(errors.NOT_ENABLED, destination)
@handle_exceptions
def queryDestination(self, destination):
# empty means all
return not self.settings[3] or \
destination in self.settings[3]
# icmptype config
class FirewallClientConfigIcmpType(object):
@handle_exceptions
def __init__(self, bus, path):
self.bus = bus
self.path = path
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE, path)
self.fw_icmptype = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_CONFIG_ICMPTYPE)
self.fw_properties = dbus.Interface(
self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_property(self, prop):
return dbus_to_python(self.fw_properties.Get(
config.dbus.DBUS_INTERFACE_CONFIG_ICMPTYPE, prop))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_properties(self):
return dbus_to_python(self.fw_properties.GetAll(
config.dbus.DBUS_INTERFACE_CONFIG_ICMPTYPE))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def set_property(self, prop, value):
self.fw_properties.Set(config.dbus.DBUS_INTERFACE_CONFIG_ICMPTYPE,
prop, value)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSettings(self):
return FirewallClientIcmpTypeSettings(list(dbus_to_python(\
self.fw_icmptype.getSettings())))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def update(self, settings):
self.fw_icmptype.update(tuple(settings.settings))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def loadDefaults(self):
self.fw_icmptype.loadDefaults()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def remove(self):
self.fw_icmptype.remove()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def rename(self, name):
self.fw_icmptype.rename(name)
# version
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getVersion(self):
return self.fw_icmptype.getVersion()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setVersion(self, version):
self.fw_icmptype.setVersion(version)
# short
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getShort(self):
return self.fw_icmptype.getShort()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setShort(self, short):
self.fw_icmptype.setShort(short)
# description
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDescription(self):
return self.fw_icmptype.getDescription()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDescription(self, description):
self.fw_icmptype.setDescription(description)
# destination
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDestinations(self):
return self.fw_icmptype.getDestinations()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDestinations(self, destinations):
self.fw_icmptype.setDestinations(destinations)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addDestination(self, destination):
self.fw_icmptype.addDestination(destination)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeDestination(self, destination):
self.fw_icmptype.removeDestination(destination)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryDestination(self, destination):
return self.fw_icmptype.queryDestination(destination)
# config.policies lockdown whitelist
class FirewallClientPoliciesLockdownWhitelist(object):
@handle_exceptions
def __init__(self, settings=None):
if settings:
self.settings = settings
else:
self.settings = [ [], [], [], [] ]
@handle_exceptions
def __repr__(self):
return '%s(%r)' % (self.__class__, self.settings)
@handle_exceptions
def getCommands(self):
return self.settings[0]
@handle_exceptions
def setCommands(self, commands):
self.settings[0] = commands
@handle_exceptions
def addCommand(self, command):
if command not in self.settings[0]:
self.settings[0].append(command)
@handle_exceptions
def removeCommand(self, command):
if command in self.settings[0]:
self.settings[0].remove(command)
@handle_exceptions
def queryCommand(self, command):
return command in self.settings[0]
@handle_exceptions
def getContexts(self):
return self.settings[1]
@handle_exceptions
def setContexts(self, contexts):
self.settings[1] = contexts
@handle_exceptions
def addContext(self, context):
if context not in self.settings[1]:
self.settings[1].append(context)
@handle_exceptions
def removeContext(self, context):
if context in self.settings[1]:
self.settings[1].remove(context)
@handle_exceptions
def queryContext(self, context):
return context in self.settings[1]
@handle_exceptions
def getUsers(self):
return self.settings[2]
@handle_exceptions
def setUsers(self, users):
self.settings[2] = users
@handle_exceptions
def addUser(self, user):
if user not in self.settings[2]:
self.settings[2].append(user)
@handle_exceptions
def removeUser(self, user):
if user in self.settings[2]:
self.settings[2].remove(user)
@handle_exceptions
def queryUser(self, user):
return user in self.settings[2]
@handle_exceptions
def getUids(self):
return self.settings[3]
@handle_exceptions
def setUids(self, uids):
self.settings[3] = uids
@handle_exceptions
def addUid(self, uid):
if uid not in self.settings[3]:
self.settings[3].append(uid)
@handle_exceptions
def removeUid(self, uid):
if uid in self.settings[3]:
self.settings[3].remove(uid)
@handle_exceptions
def queryUid(self, uid):
return uid in self.settings[3]
# config.policies
class FirewallClientConfigPolicies(object):
@handle_exceptions
def __init__(self, bus):
self.bus = bus
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE,
config.dbus.DBUS_PATH_CONFIG)
self.fw_policies = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_CONFIG_POLICIES)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelist(self):
return FirewallClientPoliciesLockdownWhitelist( \
list(dbus_to_python(self.fw_policies.getLockdownWhitelist())))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setLockdownWhitelist(self, settings):
self.fw_policies.setLockdownWhitelist(tuple(settings.settings))
# command
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addLockdownWhitelistCommand(self, command):
self.fw_policies.addLockdownWhitelistCommand(command)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeLockdownWhitelistCommand(self, command):
self.fw_policies.removeLockdownWhitelistCommand(command)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdownWhitelistCommand(self, command):
return dbus_to_python(self.fw_policies.queryLockdownWhitelistCommand(command))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelistCommands(self):
return dbus_to_python(self.fw_policies.getLockdownWhitelistCommands())
# context
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addLockdownWhitelistContext(self, context):
self.fw_policies.addLockdownWhitelistContext(context)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeLockdownWhitelistContext(self, context):
self.fw_policies.removeLockdownWhitelistContext(context)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdownWhitelistContext(self, context):
return dbus_to_python(self.fw_policies.queryLockdownWhitelistContext(context))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelistContexts(self):
return dbus_to_python(self.fw_policies.getLockdownWhitelistContexts())
# user
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addLockdownWhitelistUser(self, user):
self.fw_policies.addLockdownWhitelistUser(user)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeLockdownWhitelistUser(self, user):
self.fw_policies.removeLockdownWhitelistUser(user)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdownWhitelistUser(self, user):
return dbus_to_python(self.fw_policies.queryLockdownWhitelistUser(user))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelistUsers(self):
return dbus_to_python(self.fw_policies.getLockdownWhitelistUsers())
# uid
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelistUids(self):
return dbus_to_python(self.fw_policies.getLockdownWhitelistUids())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setLockdownWhitelistUids(self, uids):
self.fw_policies.setLockdownWhitelistUids(uids)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addLockdownWhitelistUid(self, uid):
self.fw_policies.addLockdownWhitelistUid(uid)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeLockdownWhitelistUid(self, uid):
self.fw_policies.removeLockdownWhitelistUid(uid)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdownWhitelistUid(self, uid):
return dbus_to_python(self.fw_policies.queryLockdownWhitelistUid(uid))
# config.direct
class FirewallClientDirect(object):
@handle_exceptions
def __init__(self, settings=None):
if settings:
self.settings = settings
else:
self.settings = [ [], [], [], ]
@handle_exceptions
def __repr__(self):
return '%s(%r)' % (self.__class__, self.settings)
@handle_exceptions
def getAllChains(self):
return self.settings[0]
@handle_exceptions
def getChains(self, ipv, table):
return [ entry[2] for entry in self.settings[0] \
if entry[0] == ipv and entry[1] == table ]
@handle_exceptions
def setAllChains(self, chains):
self.settings[0] = chains
@handle_exceptions
def addChain(self, ipv, table, chain):
idx = (ipv, table, chain)
if idx not in self.settings[0]:
self.settings[0].append(idx)
@handle_exceptions
def removeChain(self, ipv, table, chain):
idx = (ipv, table, chain)
if idx in self.settings[0]:
self.settings[0].remove(idx)
@handle_exceptions
def queryChain(self, ipv, table, chain):
idx = (ipv, table, chain)
return idx in self.settings[0]
@handle_exceptions
def getAllRules(self):
return self.settings[1]
@handle_exceptions
def getRules(self, ipv, table, chain):
return [ entry[3:] for entry in self.settings[1] \
if entry[0] == ipv and entry[1] == table \
and entry[2] == chain ]
@handle_exceptions
def setAllRules(self, rules):
self.settings[1] = rules
@handle_exceptions
def addRule(self, ipv, table, chain, priority, args):
idx = (ipv, table, chain, priority, args)
if idx not in self.settings[1]:
self.settings[1].append(idx)
@handle_exceptions
def removeRule(self, ipv, table, chain, priority, args):
idx = (ipv, table, chain, priority, args)
if idx in self.settings[1]:
self.settings[1].remove(idx)
@handle_exceptions
def removeRules(self, ipv, table, chain):
for idx in list(self.settings[1]):
if idx[0] == ipv and idx[1] == table and idx[2] == chain:
self.settings[1].remove(idx)
@handle_exceptions
def queryRule(self, ipv, table, chain, priority, args):
idx = (ipv, table, chain, priority, args)
return idx in self.settings[1]
@handle_exceptions
def getAllPassthroughs(self):
return self.settings[2]
@handle_exceptions
def setAllPassthroughs(self, passthroughs):
self.settings[2] = passthroughs
@handle_exceptions
def removeAllPassthroughs(self):
self.settings[2] = []
@handle_exceptions
def getPassthroughs(self, ipv):
return [ entry[1] for entry in self.settings[2] \
if entry[0] == ipv ]
@handle_exceptions
def addPassthrough(self, ipv, args):
idx = (ipv, args)
if idx not in self.settings[2]:
self.settings[2].append(idx)
@handle_exceptions
def removePassthrough(self, ipv, args):
idx = (ipv, args)
if idx in self.settings[2]:
self.settings[2].remove(idx)
@handle_exceptions
def queryPassthrough(self, ipv, args):
idx = (ipv, args)
return idx in self.settings[2]
# config.direct
class FirewallClientConfigDirect(object):
@handle_exceptions
def __init__(self, bus):
self.bus = bus
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE,
config.dbus.DBUS_PATH_CONFIG)
self.fw_direct = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_CONFIG_DIRECT)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSettings(self):
return FirewallClientDirect( \
list(dbus_to_python(self.fw_direct.getSettings())))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def update(self, settings):
self.fw_direct.update(tuple(settings.settings))
# direct chain
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addChain(self, ipv, table, chain):
self.fw_direct.addChain(ipv, table, chain)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeChain(self, ipv, table, chain):
self.fw_direct.removeChain(ipv, table, chain)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryChain(self, ipv, table, chain):
return dbus_to_python(self.fw_direct.queryChain(ipv, table, chain))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getChains(self, ipv, table):
return dbus_to_python(self.fw_direct.getChains(ipv, table))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getAllChains(self):
return dbus_to_python(self.fw_direct.getAllChains())
# direct rule
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addRule(self, ipv, table, chain, priority, args):
self.fw_direct.addRule(ipv, table, chain, priority, args)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeRule(self, ipv, table, chain, priority, args):
self.fw_direct.removeRule(ipv, table, chain, priority, args)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeRules(self, ipv, table, chain):
self.fw_direct.removeRules(ipv, table, chain)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryRule(self, ipv, table, chain, priority, args):
return dbus_to_python(self.fw_direct.queryRule(ipv, table, chain, priority, args))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getRules(self, ipv, table, chain):
return dbus_to_python(self.fw_direct.getRules(ipv, table, chain))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getAllRules(self):
return dbus_to_python(self.fw_direct.getAllRules())
# tracked passthrough
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addPassthrough(self, ipv, args):
self.fw_direct.addPassthrough(ipv, args)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removePassthrough(self, ipv, args):
self.fw_direct.removePassthrough(ipv, args)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryPassthrough(self, ipv, args):
return dbus_to_python(self.fw_direct.queryPassthrough(ipv, args))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getPassthroughs(self, ipv):
return dbus_to_python(self.fw_direct.getPassthroughs(ipv))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getAllPassthroughs(self):
return dbus_to_python(self.fw_direct.getAllPassthroughs())
# config
class FirewallClientConfig(object):
@handle_exceptions
def __init__(self, bus):
self.bus = bus
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE,
config.dbus.DBUS_PATH_CONFIG)
self.fw_config = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_CONFIG)
self.fw_properties = dbus.Interface(
self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')
self._policies = FirewallClientConfigPolicies(self.bus)
self._direct = FirewallClientConfigDirect(self.bus)
# properties
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_property(self, prop):
return dbus_to_python(self.fw_properties.Get(
config.dbus.DBUS_INTERFACE_CONFIG, prop))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_properties(self):
return dbus_to_python(self.fw_properties.GetAll(
config.dbus.DBUS_INTERFACE_CONFIG))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def set_property(self, prop, value):
self.fw_properties.Set(config.dbus.DBUS_INTERFACE_CONFIG, prop, value)
# ipset
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIPSetNames(self):
return dbus_to_python(self.fw_config.getIPSetNames())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def listIPSets(self):
return dbus_to_python(self.fw_config.listIPSets())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIPSet(self, path):
return FirewallClientConfigIPSet(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIPSetByName(self, name):
path = dbus_to_python(self.fw_config.getIPSetByName(name))
return FirewallClientConfigIPSet(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addIPSet(self, name, settings):
if isinstance(settings, FirewallClientIPSetSettings):
path = self.fw_config.addIPSet(name, tuple(settings.settings))
else:
path = self.fw_config.addIPSet(name, tuple(settings))
return FirewallClientConfigIPSet(self.bus, path)
# zone
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZoneNames(self):
return dbus_to_python(self.fw_config.getZoneNames())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def listZones(self):
return dbus_to_python(self.fw_config.listZones())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZone(self, path):
return FirewallClientConfigZone(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZoneByName(self, name):
path = dbus_to_python(self.fw_config.getZoneByName(name))
return FirewallClientConfigZone(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZoneOfInterface(self, iface):
return dbus_to_python(self.fw_config.getZoneOfInterface(iface))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZoneOfSource(self, source):
return dbus_to_python(self.fw_config.getZoneOfSource(source))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addZone(self, name, settings):
if isinstance(settings, FirewallClientZoneSettings):
path = self.fw_config.addZone(name, tuple(settings.settings))
else:
path = self.fw_config.addZone(name, tuple(settings))
return FirewallClientConfigZone(self.bus, path)
# service
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getServiceNames(self):
return dbus_to_python(self.fw_config.getServiceNames())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def listServices(self):
return dbus_to_python(self.fw_config.listServices())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getService(self, path):
return FirewallClientConfigService(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getServiceByName(self, name):
path = dbus_to_python(self.fw_config.getServiceByName(name))
return FirewallClientConfigService(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addService(self, name, settings):
if isinstance(settings, FirewallClientServiceSettings):
path = self.fw_config.addService(name, tuple(settings.settings))
else:
path = self.fw_config.addService(name, tuple(settings))
return FirewallClientConfigService(self.bus, path)
# icmptype
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIcmpTypeNames(self):
return dbus_to_python(self.fw_config.getIcmpTypeNames())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def listIcmpTypes(self):
return dbus_to_python(self.fw_config.listIcmpTypes())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIcmpType(self, path):
return FirewallClientConfigIcmpType(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIcmpTypeByName(self, name):
path = dbus_to_python(self.fw_config.getIcmpTypeByName(name))
return FirewallClientConfigIcmpType(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addIcmpType(self, name, settings):
if isinstance(settings, FirewallClientIcmpTypeSettings):
path = self.fw_config.addIcmpType(name, tuple(settings.settings))
else:
path = self.fw_config.addIcmpType(name, tuple(settings))
return FirewallClientConfigIcmpType(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def policies(self):
return self._policies
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def direct(self):
return self._direct
# helper
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getHelperNames(self):
return dbus_to_python(self.fw_config.getHelperNames())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def listHelpers(self):
return dbus_to_python(self.fw_config.listHelpers())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getHelper(self, path):
return FirewallClientConfigHelper(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getHelperByName(self, name):
path = dbus_to_python(self.fw_config.getHelperByName(name))
return FirewallClientConfigHelper(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addHelper(self, name, settings):
if isinstance(settings, FirewallClientHelperSettings):
path = self.fw_config.addHelper(name, tuple(settings.settings))
else:
path = self.fw_config.addHelper(name, tuple(settings))
return FirewallClientConfigHelper(self.bus, path)
#
class FirewallClient(object):
@handle_exceptions
def __init__(self, bus=None, wait=0, quiet=True):
if not bus:
dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
try:
self.bus = slip.dbus.SystemBus()
self.bus.default_timeout = None
except Exception:
try:
self.bus = dbus.SystemBus()
except dbus.exceptions.DBusException as e:
raise FirewallError(errors.DBUS_ERROR,
e.get_dbus_message())
else:
print("Not using slip.dbus")
else:
self.bus = bus
self.bus.add_signal_receiver(
handler_function=self._dbus_connection_changed,
signal_name="NameOwnerChanged",
dbus_interface="org.freedesktop.DBus",
arg0=config.dbus.DBUS_INTERFACE)
for interface in [ config.dbus.DBUS_INTERFACE,
config.dbus.DBUS_INTERFACE_IPSET,
config.dbus.DBUS_INTERFACE_ZONE,
config.dbus.DBUS_INTERFACE_DIRECT,
config.dbus.DBUS_INTERFACE_POLICIES,
config.dbus.DBUS_INTERFACE_CONFIG,
config.dbus.DBUS_INTERFACE_CONFIG_IPSET,
config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
config.dbus.DBUS_INTERFACE_CONFIG_SERVICE,
config.dbus.DBUS_INTERFACE_CONFIG_HELPER,
config.dbus.DBUS_INTERFACE_CONFIG_DIRECT,
config.dbus.DBUS_INTERFACE_CONFIG_ICMPTYPE,
config.dbus.DBUS_INTERFACE_CONFIG_POLICIES ]:
self.bus.add_signal_receiver(self._signal_receiver,
dbus_interface=interface,
interface_keyword='interface',
member_keyword='member',
path_keyword='path')
# callbacks
self._callback = { }
self._callbacks = {
# client callbacks
"connection-changed": "connection-changed",
"connection-established": "connection-established",
"connection-lost": "connection-lost",
# firewalld callbacks
"log-denied-changed": "LogDeniedChanged",
"default-zone-changed": "DefaultZoneChanged",
"panic-mode-enabled": "PanicModeEnabled",
"panic-mode-disabled": "PanicModeDisabled",
"reloaded": "Reloaded",
"service-added": "ServiceAdded",
"service-removed": "ServiceRemoved",
"port-added": "PortAdded",
"port-removed": "PortRemoved",
"source-port-added": "SourcePortAdded",
"source-port-removed": "SourcePortRemoved",
"protocol-added": "ProtocolAdded",
"protocol-removed": "ProtocolRemoved",
"masquerade-added": "MasqueradeAdded",
"masquerade-removed": "MasqueradeRemoved",
"forward-port-added": "ForwardPortAdded",
"forward-port-removed": "ForwardPortRemoved",
"icmp-block-added": "IcmpBlockAdded",
"icmp-block-removed": "IcmpBlockRemoved",
"icmp-block-inversion-added": "IcmpBlockInversionAdded",
"icmp-block-inversion-removed": "IcmpBlockInversionRemoved",
"richrule-added": "RichRuleAdded",
"richrule-removed": "RichRuleRemoved",
"interface-added": "InterfaceAdded",
"interface-removed": "InterfaceRemoved",
"zone-changed": "ZoneOfInterfaceChanged", # DEPRECATED, use zone-of-interface-changed instead
"zone-of-interface-changed": "ZoneOfInterfaceChanged",
"source-added": "SourceAdded",
"source-removed": "SourceRemoved",
"zone-of-source-changed": "ZoneOfSourceChanged",
# ipset callbacks
"ipset-entry-added": "EntryAdded",
"ipset-entry-removed": "EntryRemoved",
# direct callbacks
"direct:chain-added": "ChainAdded",
"direct:chain-removed": "ChainRemoved",
"direct:rule-added": "RuleAdded",
"direct:rule-removed": "RuleRemoved",
"direct:passthrough-added": "PassthroughAdded",
"direct:passthrough-removed": "PassthroughRemoved",
"config:direct:updated": "config:direct:Updated",
# policy callbacks
"lockdown-enabled": "LockdownEnabled",
"lockdown-disabled": "LockdownDisabled",
"lockdown-whitelist-command-added": "LockdownWhitelistCommandAdded",
"lockdown-whitelist-command-removed": "LockdownWhitelistCommandRemoved",
"lockdown-whitelist-context-added": "LockdownWhitelistContextAdded",
"lockdown-whitelist-context-removed": "LockdownWhitelistContextRemoved",
"lockdown-whitelist-uid-added": "LockdownWhitelistUidAdded",
"lockdown-whitelist-uid-removed": "LockdownWhitelistUidRemoved",
"lockdown-whitelist-user-added": "LockdownWhitelistUserAdded",
"lockdown-whitelist-user-removed": "LockdownWhitelistUserRemoved",
# firewalld.config callbacks
"config:policies:lockdown-whitelist-updated": "config:policies:LockdownWhitelistUpdated",
"config:ipset-added": "config:IPSetAdded",
"config:ipset-updated": "config:IPSetUpdated",
"config:ipset-removed": "config:IPSetRemoved",
"config:ipset-renamed": "config:IPSetRenamed",
"config:zone-added": "config:ZoneAdded",
"config:zone-updated": "config:ZoneUpdated",
"config:zone-removed": "config:ZoneRemoved",
"config:zone-renamed": "config:ZoneRenamed",
"config:service-added": "config:ServiceAdded",
"config:service-updated": "config:ServiceUpdated",
"config:service-removed": "config:ServiceRemoved",
"config:service-renamed": "config:ServiceRenamed",
"config:icmptype-added": "config:IcmpTypeAdded",
"config:icmptype-updated": "config:IcmpTypeUpdated",
"config:icmptype-removed": "config:IcmpTypeRemoved",
"config:icmptype-renamed": "config:IcmpTypeRenamed",
"config:helper-added": "config:HelperAdded",
"config:helper-updated": "config:HelperUpdated",
"config:helper-removed": "config:HelperRemoved",
"config:helper-renamed": "config:HelperRenamed",
}
# initialize variables used for connection
self._init_vars()
self.quiet = quiet
if wait > 0:
# connect in one second
GLib.timeout_add_seconds(wait, self._connection_established)
else:
self._connection_established()
@handle_exceptions
def _init_vars(self):
self.fw = None
self.fw_ipset = None
self.fw_zone = None
self.fw_helper = None
self.fw_direct = None
self.fw_properties = None
self._config = None
self.connected = False
@handle_exceptions
def getExceptionHandler(self):
return exception_handler
@handle_exceptions
def setExceptionHandler(self, handler):
global exception_handler
exception_handler = handler
@handle_exceptions
def getNotAuthorizedLoop(self):
return not_authorized_loop
@handle_exceptions
def setNotAuthorizedLoop(self, enable):
global not_authorized_loop
not_authorized_loop = enable
@handle_exceptions
def connect(self, name, callback, *args):
if name in self._callbacks:
self._callback[self._callbacks[name]] = (callback, args)
else:
raise ValueError("Unknown callback name '%s'" % name)
@handle_exceptions
def _dbus_connection_changed(self, name, old_owner, new_owner):
if name != config.dbus.DBUS_INTERFACE:
return
if new_owner:
# connection established
self._connection_established()
else:
# connection lost
self._connection_lost()
@handle_exceptions
def _connection_established(self):
try:
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE,
config.dbus.DBUS_PATH)
self.fw = dbus.Interface(self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE)
self.fw_ipset = dbus.Interface(
self.dbus_obj, dbus_interface=config.dbus.DBUS_INTERFACE_IPSET)
self.fw_zone = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_ZONE)
self.fw_direct = dbus.Interface(
self.dbus_obj, dbus_interface=config.dbus.DBUS_INTERFACE_DIRECT)
self.fw_policies = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_POLICIES)
self.fw_properties = dbus.Interface(
self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')
except dbus.exceptions.DBusException as e:
# ignore dbus errors
if not self.quiet:
print ("DBusException", e.get_dbus_message())
return
except Exception as e:
if not self.quiet:
print ("Exception", e)
return
self._config = FirewallClientConfig(self.bus)
self.connected = True
self._signal_receiver(member="connection-established",
interface=config.dbus.DBUS_INTERFACE)
self._signal_receiver(member="connection-changed",
interface=config.dbus.DBUS_INTERFACE)
@handle_exceptions
def _connection_lost(self):
self._init_vars()
self._signal_receiver(member="connection-lost",
interface=config.dbus.DBUS_INTERFACE)
self._signal_receiver(member="connection-changed",
interface=config.dbus.DBUS_INTERFACE)
@handle_exceptions
def _signal_receiver(self, *args, **kwargs):
if "member" not in kwargs or "interface" not in kwargs:
return
signal = kwargs["member"]
interface = kwargs["interface"]
# config signals need special treatment
# pimp signal name
if interface.startswith(config.dbus.DBUS_INTERFACE_CONFIG_ZONE):
signal = "config:Zone" + signal
elif interface.startswith(config.dbus.DBUS_INTERFACE_CONFIG_IPSET):
signal = "config:IPSet" + signal
elif interface.startswith(config.dbus.DBUS_INTERFACE_CONFIG_SERVICE):
signal = "config:Service" + signal
elif interface.startswith(config.dbus.DBUS_INTERFACE_CONFIG_ICMPTYPE):
signal = "config:IcmpType" + signal
elif interface.startswith(config.dbus.DBUS_INTERFACE_CONFIG_HELPER):
signal = "config:Helper" + signal
elif interface == config.dbus.DBUS_INTERFACE_CONFIG:
signal = "config:" + signal
elif interface == config.dbus.DBUS_INTERFACE_CONFIG_POLICIES:
signal = "config:policies:" + signal
elif interface == config.dbus.DBUS_INTERFACE_CONFIG_DIRECT:
signal = "config:direct:" + signal
cb = None
for callback in self._callbacks:
if self._callbacks[callback] == signal and \
self._callbacks[callback] in self._callback:
cb = self._callback[self._callbacks[callback]]
if cb is None:
return
# call back with args converted to python types ...
cb_args = [ dbus_to_python(arg) for arg in args ]
try:
if cb[1]:
# add call data
cb_args.extend(cb[1])
# call back
cb[0](*cb_args)
except Exception as msg:
print(msg)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def config(self):
return self._config
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def reload(self):
self.fw.reload()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def complete_reload(self):
self.fw.completeReload()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def runtimeToPermanent(self):
self.fw.runtimeToPermanent()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def checkPermanentConfig(self):
self.fw.checkPermanentConfig()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_property(self, prop):
return dbus_to_python(self.fw_properties.Get(
config.dbus.DBUS_INTERFACE, prop))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_properties(self):
return dbus_to_python(self.fw_properties.GetAll(
config.dbus.DBUS_INTERFACE))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def set_property(self, prop, value):
self.fw_properties.Set(config.dbus.DBUS_INTERFACE, prop, value)
# panic mode
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def enablePanicMode(self):
self.fw.enablePanicMode()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def disablePanicMode(self):
self.fw.disablePanicMode()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryPanicMode(self):
return dbus_to_python(self.fw.queryPanicMode())
# list functions
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZoneSettings(self, zone):
return FirewallClientZoneSettings(list(dbus_to_python(\
self.fw.getZoneSettings(zone))))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIPSets(self):
return dbus_to_python(self.fw_ipset.getIPSets())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIPSetSettings(self, ipset):
return FirewallClientIPSetSettings(list(dbus_to_python(\
self.fw_ipset.getIPSetSettings(ipset))))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addEntry(self, ipset, entry):
self.fw_ipset.addEntry(ipset, entry)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getEntries(self, ipset):
return self.fw_ipset.getEntries(ipset)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setEntries(self, ipset, entries):
return self.fw_ipset.setEntries(ipset, entries)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeEntry(self, ipset, entry):
self.fw_ipset.removeEntry(ipset, entry)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryEntry(self, ipset, entry):
return dbus_to_python(self.fw_ipset.queryEntry(ipset, entry))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def listServices(self):
return dbus_to_python(self.fw.listServices())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getServiceSettings(self, service):
return FirewallClientServiceSettings(list(dbus_to_python(\
self.fw.getServiceSettings(service))))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def listIcmpTypes(self):
return dbus_to_python(self.fw.listIcmpTypes())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIcmpTypeSettings(self, icmptype):
return FirewallClientIcmpTypeSettings(list(dbus_to_python(\
self.fw.getIcmpTypeSettings(icmptype))))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getHelpers(self):
return dbus_to_python(self.fw.getHelpers())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getHelperSettings(self, helper):
return FirewallClientHelperSettings(list(dbus_to_python(\
self.fw.getHelperSettings(helper))))
# automatic helper setting
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getAutomaticHelpers(self):
return dbus_to_python(self.fw.getAutomaticHelpers())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setAutomaticHelpers(self, value):
self.fw.setAutomaticHelpers(value)
# log denied
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLogDenied(self):
return dbus_to_python(self.fw.getLogDenied())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setLogDenied(self, value):
self.fw.setLogDenied(value)
# default zone
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDefaultZone(self):
return dbus_to_python(self.fw.getDefaultZone())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDefaultZone(self, zone):
self.fw.setDefaultZone(zone)
# zone
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZones(self):
return dbus_to_python(self.fw_zone.getZones())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getActiveZones(self):
return dbus_to_python(self.fw_zone.getActiveZones())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZoneOfInterface(self, interface):
return dbus_to_python(self.fw_zone.getZoneOfInterface(interface))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZoneOfSource(self, source):
return dbus_to_python(self.fw_zone.getZoneOfSource(source))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def isImmutable(self, zone):
return dbus_to_python(self.fw_zone.isImmutable(zone))
# interfaces
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addInterface(self, zone, interface):
return dbus_to_python(self.fw_zone.addInterface(zone, interface))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def changeZone(self, zone, interface): # DEPRECATED
return dbus_to_python(self.fw_zone.changeZone(zone, interface))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def changeZoneOfInterface(self, zone, interface):
return dbus_to_python(self.fw_zone.changeZoneOfInterface(zone,
interface))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getInterfaces(self, zone):
return dbus_to_python(self.fw_zone.getInterfaces(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryInterface(self, zone, interface):
return dbus_to_python(self.fw_zone.queryInterface(zone, interface))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeInterface(self, zone, interface):
return dbus_to_python(self.fw_zone.removeInterface(zone, interface))
# sources
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addSource(self, zone, source):
return dbus_to_python(self.fw_zone.addSource(zone, source))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def changeZoneOfSource(self, zone, source):
return dbus_to_python(self.fw_zone.changeZoneOfSource(zone, source))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSources(self, zone):
return dbus_to_python(self.fw_zone.getSources(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def querySource(self, zone, source):
return dbus_to_python(self.fw_zone.querySource(zone, source))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeSource(self, zone, source):
return dbus_to_python(self.fw_zone.removeSource(zone, source))
# rich rules
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addRichRule(self, zone, rule, timeout=0):
return dbus_to_python(self.fw_zone.addRichRule(zone, rule, timeout))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getRichRules(self, zone):
return dbus_to_python(self.fw_zone.getRichRules(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryRichRule(self, zone, rule):
return dbus_to_python(self.fw_zone.queryRichRule(zone, rule))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeRichRule(self, zone, rule):
return dbus_to_python(self.fw_zone.removeRichRule(zone, rule))
# services
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addService(self, zone, service, timeout=0):
return dbus_to_python(self.fw_zone.addService(zone, service, timeout))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getServices(self, zone):
return dbus_to_python(self.fw_zone.getServices(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryService(self, zone, service):
return dbus_to_python(self.fw_zone.queryService(zone, service))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeService(self, zone, service):
return dbus_to_python(self.fw_zone.removeService(zone, service))
# ports
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addPort(self, zone, port, protocol, timeout=0):
return dbus_to_python(self.fw_zone.addPort(zone, port, protocol, timeout))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getPorts(self, zone):
return dbus_to_python(self.fw_zone.getPorts(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryPort(self, zone, port, protocol):
return dbus_to_python(self.fw_zone.queryPort(zone, port, protocol))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removePort(self, zone, port, protocol):
return dbus_to_python(self.fw_zone.removePort(zone, port, protocol))
# protocols
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addProtocol(self, zone, protocol, timeout=0):
return dbus_to_python(self.fw_zone.addProtocol(zone, protocol, timeout))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getProtocols(self, zone):
return dbus_to_python(self.fw_zone.getProtocols(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryProtocol(self, zone, protocol):
return dbus_to_python(self.fw_zone.queryProtocol(zone, protocol))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeProtocol(self, zone, protocol):
return dbus_to_python(self.fw_zone.removeProtocol(zone, protocol))
# masquerade
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addMasquerade(self, zone, timeout=0):
return dbus_to_python(self.fw_zone.addMasquerade(zone, timeout))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryMasquerade(self, zone):
return dbus_to_python(self.fw_zone.queryMasquerade(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeMasquerade(self, zone):
return dbus_to_python(self.fw_zone.removeMasquerade(zone))
# forward ports
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addForwardPort(self, zone, port, protocol, toport, toaddr,
timeout=0):
if toport is None:
toport = ""
if toaddr is None:
toaddr = ""
return dbus_to_python(self.fw_zone.addForwardPort(zone, port, protocol,
toport, toaddr,
timeout))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getForwardPorts(self, zone):
return dbus_to_python(self.fw_zone.getForwardPorts(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryForwardPort(self, zone, port, protocol, toport, toaddr):
if toport is None:
toport = ""
if toaddr is None:
toaddr = ""
return dbus_to_python(self.fw_zone.queryForwardPort(zone,
port, protocol,
toport, toaddr))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeForwardPort(self, zone, port, protocol, toport, toaddr):
if toport is None:
toport = ""
if toaddr is None:
toaddr = ""
return dbus_to_python(self.fw_zone.removeForwardPort(zone,
port, protocol,
toport, toaddr))
# source ports
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addSourcePort(self, zone, port, protocol, timeout=0):
return dbus_to_python(self.fw_zone.addSourcePort(zone, port, protocol,
timeout))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSourcePorts(self, zone):
return dbus_to_python(self.fw_zone.getSourcePorts(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def querySourcePort(self, zone, port, protocol):
return dbus_to_python(self.fw_zone.querySourcePort(zone, port, protocol))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeSourcePort(self, zone, port, protocol):
return dbus_to_python(self.fw_zone.removeSourcePort(zone, port,
protocol))
# icmpblock
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addIcmpBlock(self, zone, icmp, timeout=0):
return dbus_to_python(self.fw_zone.addIcmpBlock(zone, icmp, timeout))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIcmpBlocks(self, zone):
return dbus_to_python(self.fw_zone.getIcmpBlocks(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryIcmpBlock(self, zone, icmp):
return dbus_to_python(self.fw_zone.queryIcmpBlock(zone, icmp))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeIcmpBlock(self, zone, icmp):
return dbus_to_python(self.fw_zone.removeIcmpBlock(zone, icmp))
# icmp block inversion
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addIcmpBlockInversion(self, zone):
return dbus_to_python(self.fw_zone.addIcmpBlockInversion(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryIcmpBlockInversion(self, zone):
return dbus_to_python(self.fw_zone.queryIcmpBlockInversion(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeIcmpBlockInversion(self, zone):
return dbus_to_python(self.fw_zone.removeIcmpBlockInversion(zone))
# direct chain
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addChain(self, ipv, table, chain):
self.fw_direct.addChain(ipv, table, chain)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeChain(self, ipv, table, chain):
self.fw_direct.removeChain(ipv, table, chain)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryChain(self, ipv, table, chain):
return dbus_to_python(self.fw_direct.queryChain(ipv, table, chain))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getChains(self, ipv, table):
return dbus_to_python(self.fw_direct.getChains(ipv, table))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getAllChains(self):
return dbus_to_python(self.fw_direct.getAllChains())
# direct rule
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addRule(self, ipv, table, chain, priority, args):
self.fw_direct.addRule(ipv, table, chain, priority, args)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeRule(self, ipv, table, chain, priority, args):
self.fw_direct.removeRule(ipv, table, chain, priority, args)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeRules(self, ipv, table, chain):
self.fw_direct.removeRules(ipv, table, chain)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryRule(self, ipv, table, chain, priority, args):
return dbus_to_python(self.fw_direct.queryRule(ipv, table, chain, priority, args))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getRules(self, ipv, table, chain):
return dbus_to_python(self.fw_direct.getRules(ipv, table, chain))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getAllRules(self):
return dbus_to_python(self.fw_direct.getAllRules())
# direct passthrough
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def passthrough(self, ipv, args):
return dbus_to_python(self.fw_direct.passthrough(ipv, args))
# tracked passthrough
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getAllPassthroughs(self):
return dbus_to_python(self.fw_direct.getAllPassthroughs())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeAllPassthroughs(self):
self.fw_direct.removeAllPassthroughs()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getPassthroughs(self, ipv):
return dbus_to_python(self.fw_direct.getPassthroughs(ipv))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addPassthrough(self, ipv, args):
self.fw_direct.addPassthrough(ipv, args)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removePassthrough(self, ipv, args):
self.fw_direct.removePassthrough(ipv, args)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryPassthrough(self, ipv, args):
return dbus_to_python(self.fw_direct.queryPassthrough(ipv, args))
# lockdown
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def enableLockdown(self):
self.fw_policies.enableLockdown()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def disableLockdown(self):
self.fw_policies.disableLockdown()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdown(self):
return dbus_to_python(self.fw_policies.queryLockdown())
# policies
# lockdown white list commands
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addLockdownWhitelistCommand(self, command):
self.fw_policies.addLockdownWhitelistCommand(command)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelistCommands(self):
return dbus_to_python(self.fw_policies.getLockdownWhitelistCommands())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdownWhitelistCommand(self, command):
return dbus_to_python(self.fw_policies.queryLockdownWhitelistCommand(command))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeLockdownWhitelistCommand(self, command):
self.fw_policies.removeLockdownWhitelistCommand(command)
# lockdown white list contexts
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addLockdownWhitelistContext(self, context):
self.fw_policies.addLockdownWhitelistContext(context)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelistContexts(self):
return dbus_to_python(self.fw_policies.getLockdownWhitelistContexts())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdownWhitelistContext(self, context):
return dbus_to_python(self.fw_policies.queryLockdownWhitelistContext(context))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeLockdownWhitelistContext(self, context):
self.fw_policies.removeLockdownWhitelistContext(context)
# lockdown white list uids
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addLockdownWhitelistUid(self, uid):
self.fw_policies.addLockdownWhitelistUid(uid)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelistUids(self):
return dbus_to_python(self.fw_policies.getLockdownWhitelistUids())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdownWhitelistUid(self, uid):
return dbus_to_python(self.fw_policies.queryLockdownWhitelistUid(uid))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeLockdownWhitelistUid(self, uid):
self.fw_policies.removeLockdownWhitelistUid(uid)
# lockdown white list users
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addLockdownWhitelistUser(self, user):
self.fw_policies.addLockdownWhitelistUser(user)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelistUsers(self):
return dbus_to_python(self.fw_policies.getLockdownWhitelistUsers())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdownWhitelistUser(self, user):
return dbus_to_python(self.fw_policies.queryLockdownWhitelistUser(user))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeLockdownWhitelistUser(self, user):
self.fw_policies.removeLockdownWhitelistUser(user)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def authorizeAll(self):
""" Authorize once for all polkit actions. """
self.fw.authorizeAll()
|
[
"firewall.core.rich.Rich_Rule",
"firewall.errors.FirewallError",
"dbus.SystemBus",
"traceback.format_exc",
"dbus.Interface",
"dbus.mainloop.glib.DBusGMainLoop",
"gi.repository.GLib.timeout_add_seconds",
"firewall.dbus_utils.dbus_to_python"
] |
[((13557, 13646), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': 'config.dbus.DBUS_INTERFACE_CONFIG_ZONE'}), '(self.dbus_obj, dbus_interface=config.dbus.\n DBUS_INTERFACE_CONFIG_ZONE)\n', (13571, 13646), False, 'import dbus\n'), ((13696, 13775), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': '"""org.freedesktop.DBus.Properties"""'}), "(self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')\n", (13710, 13775), False, 'import dbus\n'), ((33559, 33649), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': 'config.dbus.DBUS_INTERFACE_CONFIG_IPSET'}), '(self.dbus_obj, dbus_interface=config.dbus.\n DBUS_INTERFACE_CONFIG_IPSET)\n', (33573, 33649), False, 'import dbus\n'), ((33699, 33778), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': '"""org.freedesktop.DBus.Properties"""'}), "(self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')\n", (33713, 33778), False, 'import dbus\n'), ((39082, 39173), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': 'config.dbus.DBUS_INTERFACE_CONFIG_HELPER'}), '(self.dbus_obj, dbus_interface=config.dbus.\n DBUS_INTERFACE_CONFIG_HELPER)\n', (39096, 39173), False, 'import dbus\n'), ((39223, 39302), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': '"""org.freedesktop.DBus.Properties"""'}), "(self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')\n", (39237, 39302), False, 'import dbus\n'), ((43102, 43194), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': 'config.dbus.DBUS_INTERFACE_CONFIG_SERVICE'}), '(self.dbus_obj, dbus_interface=config.dbus.\n DBUS_INTERFACE_CONFIG_SERVICE)\n', (43116, 43194), False, 'import dbus\n'), ((43244, 43323), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': '"""org.freedesktop.DBus.Properties"""'}), "(self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')\n", (43258, 43323), False, 'import dbus\n'), ((52049, 52142), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': 'config.dbus.DBUS_INTERFACE_CONFIG_ICMPTYPE'}), '(self.dbus_obj, dbus_interface=config.dbus.\n DBUS_INTERFACE_CONFIG_ICMPTYPE)\n', (52063, 52142), False, 'import dbus\n'), ((52192, 52271), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': '"""org.freedesktop.DBus.Properties"""'}), "(self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')\n", (52206, 52271), False, 'import dbus\n'), ((58196, 58289), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': 'config.dbus.DBUS_INTERFACE_CONFIG_POLICIES'}), '(self.dbus_obj, dbus_interface=config.dbus.\n DBUS_INTERFACE_CONFIG_POLICIES)\n', (58210, 58289), False, 'import dbus\n'), ((65407, 65498), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': 'config.dbus.DBUS_INTERFACE_CONFIG_DIRECT'}), '(self.dbus_obj, dbus_interface=config.dbus.\n DBUS_INTERFACE_CONFIG_DIRECT)\n', (65421, 65498), False, 'import dbus\n'), ((68883, 68962), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': 'config.dbus.DBUS_INTERFACE_CONFIG'}), '(self.dbus_obj, dbus_interface=config.dbus.DBUS_INTERFACE_CONFIG)\n', (68897, 68962), False, 'import dbus\n'), ((69017, 69096), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': '"""org.freedesktop.DBus.Properties"""'}), "(self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')\n", (69031, 69096), False, 'import dbus\n'), ((4248, 4294), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', 'service'], {}), '(errors.ALREADY_ENABLED, service)\n', (4261, 4294), False, 'from firewall.errors import FirewallError\n'), ((4473, 4515), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', 'service'], {}), '(errors.NOT_ENABLED, service)\n', (4486, 4515), False, 'from firewall.errors import FirewallError\n'), ((4985, 5052), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', '("\'%s:%s\'" % (port, protocol))'], {}), '(errors.ALREADY_ENABLED, "\'%s:%s\'" % (port, protocol))\n', (4998, 5052), False, 'from firewall.errors import FirewallError\n'), ((5283, 5346), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', '("\'%s:%s\'" % (port, protocol))'], {}), '(errors.NOT_ENABLED, "\'%s:%s\'" % (port, protocol))\n', (5296, 5346), False, 'from firewall.errors import FirewallError\n'), ((5864, 5911), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', 'protocol'], {}), '(errors.ALREADY_ENABLED, protocol)\n', (5877, 5911), False, 'from firewall.errors import FirewallError\n'), ((6096, 6139), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', 'protocol'], {}), '(errors.NOT_ENABLED, protocol)\n', (6109, 6139), False, 'from firewall.errors import FirewallError\n'), ((6635, 6702), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', '("\'%s:%s\'" % (port, protocol))'], {}), '(errors.ALREADY_ENABLED, "\'%s:%s\'" % (port, protocol))\n', (6648, 6702), False, 'from firewall.errors import FirewallError\n'), ((6941, 7004), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', '("\'%s:%s\'" % (port, protocol))'], {}), '(errors.NOT_ENABLED, "\'%s:%s\'" % (port, protocol))\n', (6954, 7004), False, 'from firewall.errors import FirewallError\n'), ((7530, 7577), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', 'icmptype'], {}), '(errors.ALREADY_ENABLED, icmptype)\n', (7543, 7577), False, 'from firewall.errors import FirewallError\n'), ((7761, 7804), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', 'icmptype'], {}), '(errors.NOT_ENABLED, icmptype)\n', (7774, 7804), False, 'from firewall.errors import FirewallError\n'), ((8297, 8358), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', '"""icmp-block-inversion"""'], {}), "(errors.ALREADY_ENABLED, 'icmp-block-inversion')\n", (8310, 8358), False, 'from firewall.errors import FirewallError\n'), ((8551, 8608), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', '"""icmp-block-inversion"""'], {}), "(errors.NOT_ENABLED, 'icmp-block-inversion')\n", (8564, 8608), False, 'from firewall.errors import FirewallError\n'), ((9108, 9159), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', '"""masquerade"""'], {}), "(errors.ALREADY_ENABLED, 'masquerade')\n", (9121, 9159), False, 'from firewall.errors import FirewallError\n'), ((9342, 9389), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', '"""masquerade"""'], {}), "(errors.NOT_ENABLED, 'masquerade')\n", (9355, 9389), False, 'from firewall.errors import FirewallError\n'), ((10054, 10149), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', '("\'%s:%s:%s:%s\'" % (port, protocol, to_port, to_addr))'], {}), '(errors.ALREADY_ENABLED, "\'%s:%s:%s:%s\'" % (port, protocol,\n to_port, to_addr))\n', (10067, 10149), False, 'from firewall.errors import FirewallError\n'), ((10541, 10632), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', '("\'%s:%s:%s:%s\'" % (port, protocol, to_port, to_addr))'], {}), '(errors.NOT_ENABLED, "\'%s:%s:%s:%s\'" % (port, protocol,\n to_port, to_addr))\n', (10554, 10632), False, 'from firewall.errors import FirewallError\n'), ((11303, 11351), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', 'interface'], {}), '(errors.ALREADY_ENABLED, interface)\n', (11316, 11351), False, 'from firewall.errors import FirewallError\n'), ((11540, 11584), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', 'interface'], {}), '(errors.NOT_ENABLED, interface)\n', (11553, 11584), False, 'from firewall.errors import FirewallError\n'), ((12049, 12094), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', 'source'], {}), '(errors.ALREADY_ENABLED, source)\n', (12062, 12094), False, 'from firewall.errors import FirewallError\n'), ((12271, 12312), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', 'source'], {}), '(errors.NOT_ENABLED, source)\n', (12284, 12312), False, 'from firewall.errors import FirewallError\n'), ((12728, 12752), 'firewall.core.rich.Rich_Rule', 'Rich_Rule', ([], {'rule_str': 'rule'}), '(rule_str=rule)\n', (12737, 12752), False, 'from firewall.core.rich import Rich_Rule\n'), ((12871, 12914), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', 'rule'], {}), '(errors.ALREADY_ENABLED, rule)\n', (12884, 12914), False, 'from firewall.errors import FirewallError\n'), ((12993, 13017), 'firewall.core.rich.Rich_Rule', 'Rich_Rule', ([], {'rule_str': 'rule'}), '(rule_str=rule)\n', (13002, 13017), False, 'from firewall.core.rich import Rich_Rule\n'), ((13132, 13171), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', 'rule'], {}), '(errors.NOT_ENABLED, rule)\n', (13145, 13171), False, 'from firewall.errors import FirewallError\n'), ((13249, 13273), 'firewall.core.rich.Rich_Rule', 'Rich_Rule', ([], {'rule_str': 'rule'}), '(rule_str=rule)\n', (13258, 13273), False, 'from firewall.core.rich import Rich_Rule\n'), ((26003, 26070), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', '("\'%s:%s\'" % (port, protocol))'], {}), '(errors.ALREADY_ENABLED, "\'%s:%s\'" % (port, protocol))\n', (26016, 26070), False, 'from firewall.errors import FirewallError\n'), ((26301, 26364), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', '("\'%s:%s\'" % (port, protocol))'], {}), '(errors.NOT_ENABLED, "\'%s:%s\'" % (port, protocol))\n', (26314, 26364), False, 'from firewall.errors import FirewallError\n'), ((26878, 26925), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', 'protocol'], {}), '(errors.ALREADY_ENABLED, protocol)\n', (26891, 26925), False, 'from firewall.errors import FirewallError\n'), ((27108, 27151), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', 'protocol'], {}), '(errors.NOT_ENABLED, protocol)\n', (27121, 27151), False, 'from firewall.errors import FirewallError\n'), ((27642, 27709), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', '("\'%s:%s\'" % (port, protocol))'], {}), '(errors.ALREADY_ENABLED, "\'%s:%s\'" % (port, protocol))\n', (27655, 27709), False, 'from firewall.errors import FirewallError\n'), ((27946, 28009), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', '("\'%s:%s\'" % (port, protocol))'], {}), '(errors.NOT_ENABLED, "\'%s:%s\'" % (port, protocol))\n', (27959, 28009), False, 'from firewall.errors import FirewallError\n'), ((28513, 28558), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', 'module'], {}), '(errors.ALREADY_ENABLED, module)\n', (28526, 28558), False, 'from firewall.errors import FirewallError\n'), ((28733, 28774), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', 'module'], {}), '(errors.NOT_ENABLED, module)\n', (28746, 28774), False, 'from firewall.errors import FirewallError\n'), ((29326, 29397), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', '("\'%s:%s\'" % (dest_type, address))'], {}), '(errors.ALREADY_ENABLED, "\'%s:%s\'" % (dest_type, address))\n', (29339, 29397), False, 'from firewall.errors import FirewallError\n'), ((29838, 29891), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', '("\'%s\'" % dest_type)'], {}), '(errors.NOT_ENABLED, "\'%s\'" % dest_type)\n', (29851, 29891), False, 'from firewall.errors import FirewallError\n'), ((31532, 31617), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', '("\'%s=%s\'" % (key, value) if value else key)'], {}), '(errors.ALREADY_ENABLED, "\'%s=%s\'" % (key, value) if value else\n key)\n', (31545, 31617), False, 'from firewall.errors import FirewallError\n'), ((31807, 31845), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', 'key'], {}), '(errors.NOT_ENABLED, key)\n', (31820, 31845), False, 'from firewall.errors import FirewallError\n'), ((32234, 32274), 'firewall.errors.FirewallError', 'FirewallError', (['errors.IPSET_WITH_TIMEOUT'], {}), '(errors.IPSET_WITH_TIMEOUT)\n', (32247, 32274), False, 'from firewall.errors import FirewallError\n'), ((32476, 32516), 'firewall.errors.FirewallError', 'FirewallError', (['errors.IPSET_WITH_TIMEOUT'], {}), '(errors.IPSET_WITH_TIMEOUT)\n', (32489, 32516), False, 'from firewall.errors import FirewallError\n'), ((32634, 32678), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', 'entry'], {}), '(errors.ALREADY_ENABLED, entry)\n', (32647, 32678), False, 'from firewall.errors import FirewallError\n'), ((32848, 32888), 'firewall.errors.FirewallError', 'FirewallError', (['errors.IPSET_WITH_TIMEOUT'], {}), '(errors.IPSET_WITH_TIMEOUT)\n', (32861, 32888), False, 'from firewall.errors import FirewallError\n'), ((33002, 33042), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', 'entry'], {}), '(errors.NOT_ENABLED, entry)\n', (33015, 33042), False, 'from firewall.errors import FirewallError\n'), ((33211, 33251), 'firewall.errors.FirewallError', 'FirewallError', (['errors.IPSET_WITH_TIMEOUT'], {}), '(errors.IPSET_WITH_TIMEOUT)\n', (33224, 33251), False, 'from firewall.errors import FirewallError\n'), ((38304, 38371), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', '("\'%s:%s\'" % (port, protocol))'], {}), '(errors.ALREADY_ENABLED, "\'%s:%s\'" % (port, protocol))\n', (38317, 38371), False, 'from firewall.errors import FirewallError\n'), ((38602, 38665), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', '("\'%s:%s\'" % (port, protocol))'], {}), '(errors.NOT_ENABLED, "\'%s:%s\'" % (port, protocol))\n', (38615, 38665), False, 'from firewall.errors import FirewallError\n'), ((49359, 49428), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', '("\'%s:%s\'" % (destination, address))'], {}), '(errors.NOT_ENABLED, "\'%s:%s\'" % (destination, address))\n', (49372, 49428), False, 'from firewall.errors import FirewallError\n'), ((50934, 50984), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', 'destination'], {}), '(errors.ALREADY_ENABLED, destination)\n', (50947, 50984), False, 'from firewall.errors import FirewallError\n'), ((75831, 75884), 'dbus.mainloop.glib.DBusGMainLoop', 'dbus.mainloop.glib.DBusGMainLoop', ([], {'set_as_default': '(True)'}), '(set_as_default=True)\n', (75863, 75884), False, 'import dbus\n'), ((82598, 82658), 'gi.repository.GLib.timeout_add_seconds', 'GLib.timeout_add_seconds', (['wait', 'self._connection_established'], {}), '(wait, self._connection_established)\n', (82622, 82658), False, 'from gi.repository import GLib, GObject\n'), ((84304, 84376), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': 'config.dbus.DBUS_INTERFACE'}), '(self.dbus_obj, dbus_interface=config.dbus.DBUS_INTERFACE)\n', (84318, 84376), False, 'import dbus\n'), ((84442, 84520), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': 'config.dbus.DBUS_INTERFACE_IPSET'}), '(self.dbus_obj, dbus_interface=config.dbus.DBUS_INTERFACE_IPSET)\n', (84456, 84520), False, 'import dbus\n'), ((84565, 84642), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': 'config.dbus.DBUS_INTERFACE_ZONE'}), '(self.dbus_obj, dbus_interface=config.dbus.DBUS_INTERFACE_ZONE)\n', (84579, 84642), False, 'import dbus\n'), ((84705, 84784), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': 'config.dbus.DBUS_INTERFACE_DIRECT'}), '(self.dbus_obj, dbus_interface=config.dbus.DBUS_INTERFACE_DIRECT)\n', (84719, 84784), False, 'import dbus\n'), ((84833, 84919), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': 'config.dbus.DBUS_INTERFACE_POLICIES'}), '(self.dbus_obj, dbus_interface=config.dbus.\n DBUS_INTERFACE_POLICIES)\n', (84847, 84919), False, 'import dbus\n'), ((84981, 85060), 'dbus.Interface', 'dbus.Interface', (['self.dbus_obj'], {'dbus_interface': '"""org.freedesktop.DBus.Properties"""'}), "(self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')\n", (84995, 85060), False, 'import dbus\n'), ((87692, 87711), 'firewall.dbus_utils.dbus_to_python', 'dbus_to_python', (['arg'], {}), '(arg)\n', (87706, 87711), False, 'from firewall.dbus_utils import dbus_to_python\n'), ((12579, 12600), 'firewall.core.rich.Rich_Rule', 'Rich_Rule', ([], {'rule_str': 'r'}), '(rule_str=r)\n', (12588, 12600), False, 'from firewall.core.rich import Rich_Rule\n'), ((29656, 29723), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', '("\'%s:%s\'" % (dest_type, address))'], {}), '(errors.NOT_ENABLED, "\'%s:%s\'" % (dest_type, address))\n', (29669, 29723), False, 'from firewall.errors import FirewallError\n'), ((51116, 51166), 'firewall.errors.FirewallError', 'FirewallError', (['errors.ALREADY_ENABLED', 'destination'], {}), '(errors.ALREADY_ENABLED, destination)\n', (51129, 51166), False, 'from firewall.errors import FirewallError\n'), ((51544, 51590), 'firewall.errors.FirewallError', 'FirewallError', (['errors.NOT_ENABLED', 'destination'], {}), '(errors.NOT_ENABLED, destination)\n', (51557, 51590), False, 'from firewall.errors import FirewallError\n'), ((76081, 76097), 'dbus.SystemBus', 'dbus.SystemBus', ([], {}), '()\n', (76095, 76097), False, 'import dbus\n'), ((2496, 2518), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (2516, 2518), False, 'import traceback\n')]
|
import LMRt
import os
import numpy as np
import pandas as pd
import xarray as xr
# preprocessing
print("\n======== Preprocessing ========\n")
config = 'configs.yml'
recon_iterations = 1
figure = 'graph'
job = LMRt.ReconJob()
job.load_configs(config, verbose=True)
job.load_proxydb(verbose=True)
job.filter_proxydb(verbose=True)
job.seasonalize_proxydb(verbose=True)
job.load_prior(verbose=True)
job.load_obs(verbose=True)
job_dirpath = job.configs['job_dirpath']
seasonalized_prior_path = os.path.join(job_dirpath, 'seasonalized_prior.pkl')
seasonalized_obs_path = os.path.join(job_dirpath, 'seasonalized_obs.pkl')
prior_loc_path = os.path.join(job_dirpath, 'prior_loc.pkl')
obs_loc_path = os.path.join(job_dirpath, 'obs_loc.pkl')
calibed_psm_path = os.path.join(job_dirpath, 'calibed_psm.pkl')
job.calibrate_psm(
seasonalized_prior_path=seasonalized_prior_path,
seasonalized_obs_path=seasonalized_obs_path,
prior_loc_path=prior_loc_path,
obs_loc_path=obs_loc_path,
calibed_psm_path=calibed_psm_path,
verbose=True,
)
job.forward_psm(verbose=True)
job.seasonalize_prior(verbose=True)
job.regrid_prior(verbose=True)
job.save()
print("\n======== Data Assimilation ========\n")
# Data assimilation
job.run(recon_seeds=np.arange(recon_iterations), verbose=True)
print("\n======== Preview of results ========\n")
# Preview of Results
# create the res object for reconstruction results
res = LMRt.ReconRes(job.configs['job_dirpath'], verbose=True)
# get the varialbes from the recon_paths
res.get_vars(['tas', 'nino3.4'], verbose=True)
if(figure_type == 'map'):
# plot the tas field
fig, ax = res.vars['tas'].field_list[0].plot()
fig.savefig("./map.png")
elif(figure_type == 'graph'):
# plot and validate the NINO3.4
from scipy.io import loadmat
data = loadmat('./data/obs/NINO34_BC09.mat')
syr, eyr = 1873, 2000
nyr = eyr-syr+1
nino34 = np.zeros(nyr)
for i in range(nyr):
nino34[i] = np.mean(data['nino34'][i*12:12+i*12])
target_series = LMRt.Series(time=np.arange(syr, eyr+1), value=nino34, label='BC09')
fig, ax = res.vars['nino3.4'].validate(target_series, verbose=True).plot(xlim=[1880, 2000])
fig.savefig("./graph.png")
else:
print("not a valid figure parameter \n")
|
[
"scipy.io.loadmat",
"LMRt.ReconJob",
"numpy.zeros",
"numpy.mean",
"numpy.arange",
"LMRt.ReconRes",
"os.path.join"
] |
[((213, 228), 'LMRt.ReconJob', 'LMRt.ReconJob', ([], {}), '()\n', (226, 228), False, 'import LMRt\n'), ((494, 545), 'os.path.join', 'os.path.join', (['job_dirpath', '"""seasonalized_prior.pkl"""'], {}), "(job_dirpath, 'seasonalized_prior.pkl')\n", (506, 545), False, 'import os\n'), ((570, 619), 'os.path.join', 'os.path.join', (['job_dirpath', '"""seasonalized_obs.pkl"""'], {}), "(job_dirpath, 'seasonalized_obs.pkl')\n", (582, 619), False, 'import os\n'), ((637, 679), 'os.path.join', 'os.path.join', (['job_dirpath', '"""prior_loc.pkl"""'], {}), "(job_dirpath, 'prior_loc.pkl')\n", (649, 679), False, 'import os\n'), ((695, 735), 'os.path.join', 'os.path.join', (['job_dirpath', '"""obs_loc.pkl"""'], {}), "(job_dirpath, 'obs_loc.pkl')\n", (707, 735), False, 'import os\n'), ((755, 799), 'os.path.join', 'os.path.join', (['job_dirpath', '"""calibed_psm.pkl"""'], {}), "(job_dirpath, 'calibed_psm.pkl')\n", (767, 799), False, 'import os\n'), ((1418, 1473), 'LMRt.ReconRes', 'LMRt.ReconRes', (["job.configs['job_dirpath']"], {'verbose': '(True)'}), "(job.configs['job_dirpath'], verbose=True)\n", (1431, 1473), False, 'import LMRt\n'), ((1245, 1272), 'numpy.arange', 'np.arange', (['recon_iterations'], {}), '(recon_iterations)\n', (1254, 1272), True, 'import numpy as np\n'), ((1806, 1843), 'scipy.io.loadmat', 'loadmat', (['"""./data/obs/NINO34_BC09.mat"""'], {}), "('./data/obs/NINO34_BC09.mat')\n", (1813, 1843), False, 'from scipy.io import loadmat\n'), ((1903, 1916), 'numpy.zeros', 'np.zeros', (['nyr'], {}), '(nyr)\n', (1911, 1916), True, 'import numpy as np\n'), ((1962, 2005), 'numpy.mean', 'np.mean', (["data['nino34'][i * 12:12 + i * 12]"], {}), "(data['nino34'][i * 12:12 + i * 12])\n", (1969, 2005), True, 'import numpy as np\n'), ((2038, 2061), 'numpy.arange', 'np.arange', (['syr', '(eyr + 1)'], {}), '(syr, eyr + 1)\n', (2047, 2061), True, 'import numpy as np\n')]
|
"""
Units models
"""
import logging
from datetime import date
import pint.systems
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext as _
from geocurrency.converters.models import BaseConverter, ConverterResult, \
ConverterResultDetail, ConverterResultError, ConverterLoadError
from . import UNIT_EXTENDED_DEFINITION, DIMENSIONS, \
UNIT_SYSTEM_BASE_AND_DERIVED_UNITS, \
ADDITIONAL_BASE_UNITS, PREFIX_SYMBOL
from .exceptions import UnitConverterInitError, DimensionNotFound, \
UnitSystemNotFound, UnitNotFound, \
UnitDuplicateError, UnitDimensionError, \
UnitValueError
from .settings import ADDITIONAL_UNITS, PREFIXED_UNITS_DISPLAY
class Quantity:
"""
Quantity class
"""
system = None
unit = None
value = 0
date_obj = None
def __init__(self, system: str, unit: str,
value: float, date_obj: date = None):
"""
Initialize quantity on unit system
"""
self.system = system
self.unit = unit
self.value = value
self.date_obj = date_obj
def __repr__(self):
"""
Look beautiful
"""
return f'{self.value} {self.unit} ({self.system})'
class Unit:
"""
Unit mock for hinting
"""
pass
class UnitSystem:
"""
Pint UnitRegistry wrapper
"""
ureg = None
system_name = None
system = None
_additional_units = set()
def __init__(self, system_name: str = 'SI',
fmt_locale: str = 'en', user: User = None,
key: str = None):
"""
Initialize UnitSystem from name and user / key
information for loading custom units
"""
found = False
for available_system in UnitSystem.available_systems():
if system_name.lower() == available_system.lower():
system_name = available_system
found = True
if not found:
raise UnitSystemNotFound("Invalid unit system")
self.system_name = system_name
try:
additional_units_settings = settings.GEOCURRENCY_ADDITIONAL_UNITS
except AttributeError:
additional_units_settings = ADDITIONAL_UNITS
try:
self.ureg = pint.UnitRegistry(
system=system_name,
fmt_locale=fmt_locale)
self.system = getattr(self.ureg.sys, system_name)
self._load_additional_units(units=ADDITIONAL_BASE_UNITS)
self._load_additional_units(units=additional_units_settings)
if user:
self._load_custom_units(user=user, key=key)
self._rebuild_cache()
except (FileNotFoundError, AttributeError):
raise UnitSystemNotFound("Invalid unit system")
def _rebuild_cache(self):
"""
Rebuild registry cache
It should be in the define method of the registry
"""
self.ureg._build_cache()
def _load_additional_units(
self, units: dict,
redefine: bool = False) -> bool:
"""
Load additional base units in registry
"""
available_units = self.available_unit_names()
if self.system_name not in units:
logging.warning(f"error loading additional units "
f"for {self.system_name}")
return False
added_units = []
for key, items in units[self.system_name].items():
if key not in available_units:
self.ureg.define(
f"{key} = {items['relation']} = {items['symbol']}")
added_units.append(key)
elif redefine:
self.ureg.redefine(
f"{key} = {items['relation']} = {items['symbol']}")
self._additional_units = self._additional_units | set(added_units)
return True
def _load_custom_units(
self,
user: User,
key: str = None,
redefine: bool = False) -> bool:
"""
Load custom units in registry
"""
if user and user.is_authenticated:
if user.is_superuser:
qs = CustomUnit.objects.all()
else:
qs = CustomUnit.objects.filter(user=user)
if key:
qs = qs.filter(key=key)
else:
qs = CustomUnit.objects.filter(pk=-1)
qs = qs.filter(unit_system=self.system_name)
available_units = self.available_unit_names()
added_units = []
for cu in qs:
props = [cu.code, cu.relation]
if cu.symbol:
props.append(cu.symbol)
if cu.alias:
props.append(cu.alias)
definition = " = ".join(props)
if cu.code not in available_units:
self.ureg.define(definition)
added_units.append(cu.code)
elif redefine:
self.ureg.redefine(definition)
else:
logging.error(f"{cu.code} already defined in registry")
self._additional_units = self._additional_units | set(added_units)
return True
def _test_additional_units(self, units: dict) -> bool:
"""
Load and check dimensionality of ADDITIONAL_BASE_UNITS values
"""
if self.system_name not in units:
return False
for key in units[self.system_name].keys():
try:
self.unit(key).dimensionality and True
except pint.errors.UndefinedUnitError:
return False
return True
def add_definition(self, code, relation, symbol, alias):
"""
Add a new unit definition to a UnitSystem, and rebuild cache
:param code: code of the unit
:param relation: relation to other units (e.g.: 3 kg/m)
:param symbol: short unit representation
:param alias: other name for unit
"""
self.ureg.define(f"{code} = {relation} = {symbol} = {alias}")
self._rebuild_cache()
@classmethod
def available_systems(cls) -> [str]:
"""
List of available Unit Systems
:return: Array of string
"""
ureg = pint.UnitRegistry(system='SI')
return dir(ureg.sys)
@classmethod
def is_valid(cls, system: str) -> bool:
"""
Check validity of the UnitSystem
:param system: name of the unit system
"""
us = cls()
return system in us.available_systems()
def current_system(self) -> pint.UnitRegistry:
"""
Return current pint.UnitRegistry
"""
return self.ureg
def unit(self, unit_name):
"""
Create a Object in the UnitSystem
:param unit_name: name of the unit in the unit system
"""
return Unit(unit_system=self, code=unit_name)
def available_unit_names(self) -> [str]:
"""
List of available units for a given Unit system
:return: Array of names of Unit systems
"""
try:
prefixed_units_display = \
settings.GEOCURRENCY_PREFIXED_UNITS_DISPLAY
except AttributeError:
prefixed_units_display = PREFIXED_UNITS_DISPLAY
prefixed_units = []
for key, prefixes in prefixed_units_display.items():
for prefix in prefixes:
prefixed_units.append(prefix + key)
return sorted(prefixed_units +
dir(getattr(self.ureg.sys, self.system_name))
+ list(self._additional_units))
def unit_dimensionality(self, unit: str) -> str:
"""
User friendly representation of the dimension
:param unit: name of the unit to display
:return: Human readable dimension
"""
return Unit.dimensionality_string(
unit_system=self.system,
unit_str=unit)
def available_dimensions(self, ordering: str = 'name') -> {}:
"""
Return available dimensions for the UnitSystem
:param ordering: sort result by attribute
"""
descending = False
if ordering and ordering[0] == '-':
ordering = ordering[1:]
descending = True
if ordering not in ['code', 'name', 'dimension']:
ordering = 'name'
return sorted([Dimension(unit_system=self, code=dim)
for dim in DIMENSIONS.keys()],
key=lambda x: getattr(x, ordering, ''),
reverse=descending)
@property
def _ureg_dimensions(self):
"""
return dimensions with units
"""
dimensions = []
for dim in self.ureg._dimensions:
try:
if not self.ureg.get_compatible_units(dim):
continue
dimensions.append(dim)
except KeyError:
continue
return dimensions
def _get_dimension_dimensionality(self, dimension: str) -> {}:
"""
Return the dimensionality of a dimension
based on the first compatible unit
"""
try:
for dim in self.ureg.get_compatible_units(dimension):
return self.ureg.get_base_units(dim)[1]
except KeyError:
return {}
def _generate_dimension_delta_dictionnary(self) -> {}:
"""
Generate the dict to put in DIMENSIONS
"""
output = {}
for dim in self._ureg_dimensions:
if dim not in DIMENSIONS:
output[dim] = {
'name': f'_({dim})',
'dimension': str(self._get_dimension_dimensionality(dim)),
'symbol': ''
}
return output
def units_per_dimension(self, dimensions: [str]) -> {}:
"""
Return units grouped by dimension
:param dimensions: restrict list of dimensions
"""
output = {}
registry_dimensions = dimensions or DIMENSIONS.keys()
for dim in registry_dimensions:
Dimension(unit_system=self, code=dim)
try:
units = self.ureg.get_compatible_units(dim)
if units:
output[dim] = units
except KeyError:
continue
return output
def units_per_dimensionality(self) -> {}:
"""
List of units per dimension
:return: dict of dimensions, with lists of unit strings
"""
units_array = self.available_unit_names()
output = {}
for unit_str in units_array:
dimension = Unit.dimensionality_string(self, unit_str)
try:
output[dimension].append(unit_str)
except KeyError:
output[dimension] = [unit_str]
return output
@property
def dimensionalities(self) -> [str]:
"""
List of dimensions available in the Unit system
:return: list of dimensions for Unit system
"""
return set([Unit.dimensionality_string(self, unit_str)
for unit_str in dir(self.system)])
class Dimension:
"""
Dimenion of a Unit
"""
unit_system = None
code = None
name = None
dimension = None
def __init__(self, unit_system: UnitSystem, code: str):
"""
Initialize a Dimension in a UnitSystem
"""
try:
dimension = DIMENSIONS[code]
self.unit_system = unit_system
self.code = code
self.name = dimension['name']
self.dimension = dimension['dimension']
except (ValueError, KeyError) as e:
logging.warning(str(e))
self.code = None
if not self.code:
raise DimensionNotFound
def __repr__(self):
"""
Look beautiful
"""
return self.code
def _prefixed_units(self, unit_names):
"""
Add prefixed units to list of units
:param unit_names: list of unit names
"""
unit_list = []
try:
prefixed_units_display = \
settings.GEOCURRENCY_PREFIXED_UNITS_DISPLAY
except AttributeError:
prefixed_units_display = PREFIXED_UNITS_DISPLAY
for unit, prefixes in prefixed_units_display.items():
if unit in unit_names:
for prefix in prefixes:
unit_list.append(
self.unit_system.unit(unit_name=prefix + unit))
return unit_list
def units(self, user=None, key=None) -> [Unit]:
"""
List of units for this dimension
:param user: optional user for custom units
:param key: optional key for custom units
"""
if self.code == '[compounded]':
return self._compounded_units
if self.code == '[custom]':
return self._custom_units(user=user, key=key)
unit_list = []
try:
unit_list.append(
self.unit_system.unit(
UNIT_SYSTEM_BASE_AND_DERIVED_UNITS[
self.unit_system.system_name][self.code]
)
)
except (KeyError, UnitNotFound):
logging.warning(f"unable to find base unit for"
f"unit system {self.unit_system.system_name}"
f" and dimension {self.code}")
try:
unit_list.extend(
[
Unit(unit_system=self.unit_system, pint_unit=unit)
for unit in
self.unit_system.ureg.get_compatible_units(self.code)
])
except KeyError:
logging.warning(f"Cannot find compatible units "
f"for this dimension {self.code}")
unit_names = [str(u) for u in unit_list]
unit_names.extend(self._prefixed_units(unit_names))
return set(sorted(unit_list, key=lambda x: x.name))
@property
def _compounded_units(self):
"""
List units that do not belong to a dimension
"""
available_units = self.unit_system.available_unit_names()
dimensioned_units = []
for dimension_code in [d for d in DIMENSIONS.keys() if
d != '[compounded]' and d != '[custom]']:
dimension = Dimension(
unit_system=self.unit_system,
code=dimension_code)
dimensioned_units.extend([u.code for u in dimension.units()])
return [self.unit_system.unit(au)
for au in set(available_units) - set(dimensioned_units)]
def _custom_units(self, user: User, key: str = None) -> [Unit]:
"""
Return list of custom units
:param user: User owning the units
:param key: optional unit key
"""
if user and user.is_authenticated:
if user.is_superuser:
custom_units = CustomUnit.objects.all()
else:
custom_units = CustomUnit.objects.filter(user=user)
if key:
custom_units = custom_units.filter(key=key)
return [self.unit_system.unit(cu.code) for cu in custom_units]
else:
return []
@property
def base_unit(self):
"""
Base unit for this dimension in this Unit System
"""
try:
return UNIT_SYSTEM_BASE_AND_DERIVED_UNITS[
self.unit_system.system_name][self.code]
except KeyError:
logging.warning(
f'dimension {self.dimension} is not part of '
f'unit system {self.unit_system.system_name}')
return None
class Unit:
"""
Pint Unit wrapper
"""
unit_system = None
code = None
unit = None
def __init__(
self,
unit_system: UnitSystem,
code: str = '',
pint_unit: pint.Unit = None):
"""
Initialize a Unit in a UnitSystem
:param unit_system: UnitSystem instance
:param code: code of the pint.Unit
"""
self.unit_system = unit_system
if pint_unit and isinstance(pint_unit, pint.Unit):
self.code = str(pint_unit)
self.unit = pint_unit
elif code:
self.code = code
try:
self.unit = getattr(unit_system.system, code)
except pint.errors.UndefinedUnitError:
raise UnitNotFound("invalid unit for system")
else:
raise UnitNotFound("invalid unit for system")
def __repr__(self):
return self.code
@classmethod
def is_valid(cls, name: str) -> bool:
"""
Check the validity of a unit in a UnitSystem
"""
try:
us_si = UnitSystem(system_name='SI')
except UnitSystemNotFound:
return False
try:
return us_si.unit(unit_name=name) and True
except pint.errors.UndefinedUnitError:
return False
@property
def name(self) -> str:
"""
Return name of the unit from table of units
"""
return self.unit_name(self.code)
@property
def symbol(self) -> str:
"""
Return symbol for Unit
"""
return self.unit_symbol(self.code)
@property
def dimensions(self) -> [Dimension]:
"""
Return Dimensions of Unit
"""
dimensions = [
Dimension(unit_system=self.unit_system, code=code) for code in
DIMENSIONS.keys()
if DIMENSIONS[code]['dimension'] == str(self.dimensionality)]
return dimensions or '[compounded]'
@staticmethod
def base_unit(unit_str: str) -> (str, str):
"""
Get base unit in case the unit is a prefixed unit
:param unit_str: name of unit to check
:return: base unit name, prefix
"""
prefix = ''
base_str = unit_str
try:
prefixed_units_display = \
settings.GEOCURRENCY_PREFIXED_UNITS_DISPLAY
except AttributeError:
prefixed_units_display = PREFIXED_UNITS_DISPLAY
for base, prefixes in prefixed_units_display.items():
for _prefix in prefixes:
if unit_str == _prefix + base:
prefix = _prefix
base_str = base
return base_str, prefix
@staticmethod
def unit_name(unit_str: str) -> str:
"""
Get translated name from unit string
:param unit_str: Name of unit
"""
base_str, prefix = Unit.base_unit(unit_str=unit_str)
try:
ext_unit = UNIT_EXTENDED_DEFINITION.get(base_str)
return prefix + str(ext_unit['name'])
except (KeyError, TypeError):
logging.error(f'No UNIT_EXTENDED_DEFINITION for unit {base_str}')
return unit_str
@staticmethod
def unit_symbol(unit_str: str) -> str:
"""
Static function to get symbol from unit string
:param unit_str: Name of unit
"""
base_str, prefix = Unit.base_unit(unit_str=unit_str)
try:
prefix_symbol = PREFIX_SYMBOL[prefix]
ext_unit = UNIT_EXTENDED_DEFINITION.get(base_str)
return prefix_symbol + ext_unit['symbol']
except (KeyError, TypeError):
logging.error(f'No UNIT_EXTENDED_DEFINITION for unit {base_str}')
return ''
@staticmethod
def dimensionality_string(unit_system: UnitSystem, unit_str: str) -> str:
"""
Converts pint dimensionality string to human readable string
:param unit_system: UnitSystem
:param unit_str: Unit name
:return: str
"""
ds = str(getattr(
unit_system.ureg, unit_str
).dimensionality).replace('[', '').replace(']', '')
ds = ds.replace(' ** ', '^')
ds = ds.split()
return ' '.join([_(d) for d in ds])
@property
def dimensionality(self):
"""
Return dimensionality of a unit in Pint universe
"""
try:
return self.unit_system.ureg.get_base_units(self.code)[1]
except KeyError:
return ''
@staticmethod
def translated_name(unit_system: UnitSystem, unit_str: str) -> str:
"""
Translated name of the unit
"""
try:
return '{}'.format(unit_system.ureg[unit_str])
except KeyError:
return unit_str
@property
def readable_dimension(self):
"""
Wrapper around Unit.dimensionality_string
"""
return Unit.dimensionality_string(
unit_system=self.unit_system,
unit_str=self.code)
class UnitConverter(BaseConverter):
"""
Conversion between units
"""
base_system = None
base_unit = None
user = None
key = None
def __init__(
self,
base_system: str,
base_unit: str,
user: User = None,
key: key = None,
id: str = None):
"""
Initialize the converter. It converts a payload into a destination unit
"""
try:
super().__init__(id=id)
self.base_system = base_system
self.base_unit = base_unit
self.user = user
self.key = key
self.system = UnitSystem(
system_name=base_system,
user=user,
key=key)
self.unit = Unit(
unit_system=self.system,
code=base_unit)
except (UnitSystemNotFound, UnitNotFound):
raise UnitConverterInitError
def add_data(self, data: []) -> []:
"""
Check data and add it to the dataset
Return list of errors
"""
errors = super().add_data(data)
return errors
def check_data(self, data):
"""
Validates that the data contains
system = str
unit = str
value = float
date_obj ('YYYY-MM-DD')
"""
from .serializers import QuantitySerializer
errors = []
for line in data:
serializer = QuantitySerializer(data=line)
if serializer.is_valid():
self.data.append(serializer.create(serializer.validated_data))
else:
errors.append(serializer.errors)
return errors
@classmethod
def load(cls,
id: str,
user: User = None,
key: str = None) -> BaseConverter:
"""
Load converter from ID
"""
try:
uc = super().load(id)
uc.system = UnitSystem(
system_name=uc.base_system,
user=user,
key=key)
uc.unit = Unit(unit_system=uc.system, code=uc.base_unit)
return uc
except (UnitSystemNotFound, UnitNotFound, KeyError) as e:
raise ConverterLoadError from e
def save(self):
"""
Save the converter to cache
"""
system = self.system
unit = self.unit
self.system = None
self.unit = None
super().save()
self.system = system
self.unit = unit
def convert(self) -> ConverterResult:
"""
Converts data to base unit in base system
"""
result = ConverterResult(id=self.id, target=self.base_unit)
q_ = self.system.ureg.Quantity
for quantity in self.data:
try:
pint_quantity = q_(quantity.value, quantity.unit)
out = pint_quantity.to(self.base_unit)
result.increment_sum(out.magnitude)
detail = ConverterResultDetail(
unit=quantity.unit,
original_value=quantity.value,
date=quantity.date_obj,
conversion_rate=0,
converted_value=out.magnitude
)
result.detail.append(detail)
except pint.UndefinedUnitError:
error = ConverterResultError(
unit=quantity.unit,
original_value=quantity.value,
date=quantity.date_obj,
error=_('Undefined unit in the registry')
)
result.errors.append(error)
except pint.DimensionalityError:
error = ConverterResultError(
unit=quantity.unit,
original_value=quantity.value,
date=quantity.date_obj,
error=_('Dimensionality error, incompatible units')
)
result.errors.append(error)
self.end_batch(result.end_batch())
return result
class UnitConversionPayload:
"""
Unit conversion payload
"""
data = None
base_system = ''
base_unit = ''
key = ''
batch_id = ''
eob = False
def __init__(self,
base_system: UnitSystem,
base_unit: Unit,
data=None,
key: str = None,
batch_id: str = None,
eob: bool = False):
"""
Initialize conversion payload
"""
self.data = data
self.base_system = base_system
self.base_unit = base_unit
self.key = key
self.batch_id = batch_id
self.eob = eob
class CustomUnit(models.Model):
"""
Additional unit for a user
"""
AVAILABLE_SYSTEMS = (
('Planck', 'Planck'),
('SI', 'SI'),
('US', 'US'),
('atomic', 'atomic'),
('cgs', 'CGS'),
('imperial', 'imperial'),
('mks', 'mks'),
)
user = models.ForeignKey(
User,
related_name='units',
on_delete=models.PROTECT)
key = models.CharField(
"Categorization field (e.g.: customer ID)",
max_length=255, default=None, db_index=True, null=True, blank=True)
unit_system = models.CharField(
"Unit system to register the unit in", max_length=20,
choices=AVAILABLE_SYSTEMS)
code = models.SlugField("technical name of the unit (e.g.: myUnit)")
name = models.CharField(
"Human readable name (e.g.: My Unit)",
max_length=255)
relation = models.CharField(
"Relation to an existing unit (e.g.: 12 kg*m/s)", max_length=255)
symbol = models.CharField(
"Symbol to use in a formula (e.g.: myu)",
max_length=20, blank=True, null=True)
alias = models.CharField(
"Other code for this unit (e.g.: mybu)",
max_length=20, null=True, blank=True)
class Meta:
"""
Meta
"""
unique_together = ('user', 'key', 'code')
ordering = ['name', 'code']
def save(self, *args, **kwargs):
"""
Save custom unit to database
"""
us = UnitSystem(system_name=self.unit_system)
self.code = self.code.replace('-', '_')
self.symbol = self.symbol.replace('-', '_')
self.alias = self.alias.replace('-', '_')
if self.code in us.available_unit_names():
raise UnitDuplicateError
try:
us.add_definition(
code=self.code,
relation=self.relation,
symbol=self.symbol,
alias=self.alias)
except ValueError as e:
raise UnitValueError(str(e)) from e
try:
us.unit(self.code).unit.dimensionality
except pint.errors.UndefinedUnitError:
raise UnitDimensionError
return super(CustomUnit, self).save(*args, **kwargs)
|
[
"logging.error",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"logging.warning",
"django.db.models.SlugField",
"geocurrency.converters.models.ConverterResultDetail",
"geocurrency.converters.models.ConverterResult",
"django.utils.translation.ugettext"
] |
[((26040, 26111), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'related_name': '"""units"""', 'on_delete': 'models.PROTECT'}), "(User, related_name='units', on_delete=models.PROTECT)\n", (26057, 26111), False, 'from django.db import models\n'), ((26147, 26279), 'django.db.models.CharField', 'models.CharField', (['"""Categorization field (e.g.: customer ID)"""'], {'max_length': '(255)', 'default': 'None', 'db_index': '(True)', 'null': '(True)', 'blank': '(True)'}), "('Categorization field (e.g.: customer ID)', max_length=255,\n default=None, db_index=True, null=True, blank=True)\n", (26163, 26279), False, 'from django.db import models\n'), ((26311, 26412), 'django.db.models.CharField', 'models.CharField', (['"""Unit system to register the unit in"""'], {'max_length': '(20)', 'choices': 'AVAILABLE_SYSTEMS'}), "('Unit system to register the unit in', max_length=20,\n choices=AVAILABLE_SYSTEMS)\n", (26327, 26412), False, 'from django.db import models\n'), ((26437, 26498), 'django.db.models.SlugField', 'models.SlugField', (['"""technical name of the unit (e.g.: myUnit)"""'], {}), "('technical name of the unit (e.g.: myUnit)')\n", (26453, 26498), False, 'from django.db import models\n'), ((26510, 26581), 'django.db.models.CharField', 'models.CharField', (['"""Human readable name (e.g.: My Unit)"""'], {'max_length': '(255)'}), "('Human readable name (e.g.: My Unit)', max_length=255)\n", (26526, 26581), False, 'from django.db import models\n'), ((26614, 26700), 'django.db.models.CharField', 'models.CharField', (['"""Relation to an existing unit (e.g.: 12 kg*m/s)"""'], {'max_length': '(255)'}), "('Relation to an existing unit (e.g.: 12 kg*m/s)',\n max_length=255)\n", (26630, 26700), False, 'from django.db import models\n'), ((26719, 26819), 'django.db.models.CharField', 'models.CharField', (['"""Symbol to use in a formula (e.g.: myu)"""'], {'max_length': '(20)', 'blank': '(True)', 'null': '(True)'}), "('Symbol to use in a formula (e.g.: myu)', max_length=20,\n blank=True, null=True)\n", (26735, 26819), False, 'from django.db import models\n'), ((26845, 26944), 'django.db.models.CharField', 'models.CharField', (['"""Other code for this unit (e.g.: mybu)"""'], {'max_length': '(20)', 'null': '(True)', 'blank': '(True)'}), "('Other code for this unit (e.g.: mybu)', max_length=20,\n null=True, blank=True)\n", (26861, 26944), False, 'from django.db import models\n'), ((23650, 23700), 'geocurrency.converters.models.ConverterResult', 'ConverterResult', ([], {'id': 'self.id', 'target': 'self.base_unit'}), '(id=self.id, target=self.base_unit)\n', (23665, 23700), False, 'from geocurrency.converters.models import BaseConverter, ConverterResult, ConverterResultDetail, ConverterResultError, ConverterLoadError\n'), ((3335, 3408), 'logging.warning', 'logging.warning', (['f"""error loading additional units for {self.system_name}"""'], {}), "(f'error loading additional units for {self.system_name}')\n", (3350, 3408), False, 'import logging\n'), ((13396, 13522), 'logging.warning', 'logging.warning', (['f"""unable to find base unit forunit system {self.unit_system.system_name} and dimension {self.code}"""'], {}), "(\n f'unable to find base unit forunit system {self.unit_system.system_name} and dimension {self.code}'\n )\n", (13411, 13522), False, 'import logging\n'), ((13871, 13950), 'logging.warning', 'logging.warning', (['f"""Cannot find compatible units for this dimension {self.code}"""'], {}), "(f'Cannot find compatible units for this dimension {self.code}')\n", (13886, 13950), False, 'import logging\n'), ((15720, 15834), 'logging.warning', 'logging.warning', (['f"""dimension {self.dimension} is not part of unit system {self.unit_system.system_name}"""'], {}), "(\n f'dimension {self.dimension} is not part of unit system {self.unit_system.system_name}'\n )\n", (15735, 15834), False, 'import logging\n'), ((19028, 19093), 'logging.error', 'logging.error', (['f"""No UNIT_EXTENDED_DEFINITION for unit {base_str}"""'], {}), "(f'No UNIT_EXTENDED_DEFINITION for unit {base_str}')\n", (19041, 19093), False, 'import logging\n'), ((19591, 19656), 'logging.error', 'logging.error', (['f"""No UNIT_EXTENDED_DEFINITION for unit {base_str}"""'], {}), "(f'No UNIT_EXTENDED_DEFINITION for unit {base_str}')\n", (19604, 19656), False, 'import logging\n'), ((20175, 20179), 'django.utils.translation.ugettext', '_', (['d'], {}), '(d)\n', (20176, 20179), True, 'from django.utils.translation import ugettext as _\n'), ((23990, 24140), 'geocurrency.converters.models.ConverterResultDetail', 'ConverterResultDetail', ([], {'unit': 'quantity.unit', 'original_value': 'quantity.value', 'date': 'quantity.date_obj', 'conversion_rate': '(0)', 'converted_value': 'out.magnitude'}), '(unit=quantity.unit, original_value=quantity.value,\n date=quantity.date_obj, conversion_rate=0, converted_value=out.magnitude)\n', (24011, 24140), False, 'from geocurrency.converters.models import BaseConverter, ConverterResult, ConverterResultDetail, ConverterResultError, ConverterLoadError\n'), ((5113, 5168), 'logging.error', 'logging.error', (['f"""{cu.code} already defined in registry"""'], {}), "(f'{cu.code} already defined in registry')\n", (5126, 5168), False, 'import logging\n'), ((24551, 24586), 'django.utils.translation.ugettext', '_', (['"""Undefined unit in the registry"""'], {}), "('Undefined unit in the registry')\n", (24552, 24586), True, 'from django.utils.translation import ugettext as _\n'), ((24901, 24946), 'django.utils.translation.ugettext', '_', (['"""Dimensionality error, incompatible units"""'], {}), "('Dimensionality error, incompatible units')\n", (24902, 24946), True, 'from django.utils.translation import ugettext as _\n')]
|
"""Test environment wrapper."""
import gym
class AutoStopEnv(gym.Wrapper):
"""Environment wrapper that stops episode at step max_episode_length."""
def __init__(self, env=None, env_name='', max_episode_length=100):
"""Create an AutoStepEnv.
Args:
env (gym.Env): Environment to be wrapped.
env_name (str): Name of the environment.
max_episode_length (int): Maximum length of the episode.
"""
if env_name:
super().__init__(gym.make(env_name))
else:
super().__init__(env)
self._episode_step = 0
self._max_episode_length = max_episode_length
def step(self, action):
"""Step the wrapped environment.
Args:
action (np.ndarray): the action.
Returns:
np.ndarray: Next observation
float: Reward
bool: Termination signal
dict: Environment information
"""
self._episode_step += 1
next_obs, reward, done, info = self.env.step(action)
if self._episode_step == self._max_episode_length:
done = True
self._episode_step = 0
return next_obs, reward, done, info
def reset(self, **kwargs):
"""Reset the wrapped environment.
Args:
**kwargs: Keyword arguments.
Returns:
np.ndarray: Initial observation.
"""
return self.env.reset(**kwargs)
|
[
"gym.make"
] |
[((514, 532), 'gym.make', 'gym.make', (['env_name'], {}), '(env_name)\n', (522, 532), False, 'import gym\n')]
|
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
from string import digits
import csv
# Create a SentimentIntensityAnalyzer object
file1 = open("DATASET\\LibriSpeech\\dev-clean\\84\\121123\\84-121123.trans.txt", 'r')
Lines = file1.readlines()
with open('vader_84_121123.csv', 'w', newline='') as file:
writer = csv.writer(file)
# Strips the newline character
for line in Lines:
remove_digits = str.maketrans('', '', digits)
sentence = line.translate(remove_digits)
sid_obj = SentimentIntensityAnalyzer()
sentiment_dict = sid_obj.polarity_scores(sentence)
print(sentence)
s=0
if sentiment_dict['compound'] >= 0.05 :
#Positive
s=0
elif sentiment_dict['compound'] <= - 0.05 :
#Negative
s=2
else :
#Neutral
s=1
writer.writerow([sentiment_dict['pos'], sentiment_dict['neu'], sentiment_dict['neg'], sentiment_dict['compound'], s])
|
[
"vaderSentiment.vaderSentiment.SentimentIntensityAnalyzer",
"csv.writer"
] |
[((339, 355), 'csv.writer', 'csv.writer', (['file'], {}), '(file)\n', (349, 355), False, 'import csv\n'), ((537, 565), 'vaderSentiment.vaderSentiment.SentimentIntensityAnalyzer', 'SentimentIntensityAnalyzer', ([], {}), '()\n', (563, 565), False, 'from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer\n')]
|
from pathlib import Path
from fhir.resources.valueset import ValueSet as _ValueSet
from oops_fhir.utils import ValueSet
from oops_fhir.r4.code_system.list_mode import ListMode as ListMode_
__all__ = ["ListMode"]
_resource = _ValueSet.parse_file(Path(__file__).with_suffix(".json"))
class ListMode(ListMode_):
"""
ListMode
The processing mode that applies to this list.
Status: draft - Version: 4.0.1
http://hl7.org/fhir/ValueSet/list-mode
"""
class Meta:
resource = _resource
|
[
"pathlib.Path"
] |
[((252, 266), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (256, 266), False, 'from pathlib import Path\n')]
|
from django.core.management.base import BaseCommand
from wq.db.patterns.identify.models import Authority
class Command(BaseCommand):
def handle(self, *args, **options):
Authority.objects.get_or_create(
name="This Site",
)
Authority.objects.get_or_create(
name="Wikipedia",
homepage="https://wikipedia.org",
object_url="https://wikipedia.org/wiki/%s",
)
|
[
"wq.db.patterns.identify.models.Authority.objects.get_or_create"
] |
[((183, 232), 'wq.db.patterns.identify.models.Authority.objects.get_or_create', 'Authority.objects.get_or_create', ([], {'name': '"""This Site"""'}), "(name='This Site')\n", (214, 232), False, 'from wq.db.patterns.identify.models import Authority\n'), ((264, 396), 'wq.db.patterns.identify.models.Authority.objects.get_or_create', 'Authority.objects.get_or_create', ([], {'name': '"""Wikipedia"""', 'homepage': '"""https://wikipedia.org"""', 'object_url': '"""https://wikipedia.org/wiki/%s"""'}), "(name='Wikipedia', homepage=\n 'https://wikipedia.org', object_url='https://wikipedia.org/wiki/%s')\n", (295, 396), False, 'from wq.db.patterns.identify.models import Authority\n')]
|
import math
class Solution:
def checkPerfectNumber(self, num: int) -> bool:
res = 0
high = int(math.sqrt(num))
for i in range(high, 0,-1):
if num%i==0:
res+=i
res+= num//i if i != 1 else 0
res = res - high if high*high == num else res
return num == res
if __name__ == "__main__":
a =Solution()
print(a.checkPerfectNumber(28))
|
[
"math.sqrt"
] |
[((115, 129), 'math.sqrt', 'math.sqrt', (['num'], {}), '(num)\n', (124, 129), False, 'import math\n')]
|
#
# Copyright 2004,2005 <NAME> <<EMAIL>>
#
# This file forms part of Infotrope Polymer
#
# Infotrope Polymer is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Infotrope Polymer is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the Infotrope Python Library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import infotrope.datasets.base
import infotrope.serverman
import infotrope.url
import infotrope.imap
import polymer.encode
class filters(infotrope.datasets.base.dataset_class):
def __init__( self, url ):
infotrope.datasets.base.dataset_class.__init__( self, url )
def get_search_return( self ):
return '*'
def get_search_criteria( self ):
return 'NOT EQUAL "entry" "i;octet" ""'
def get_search_sort( self ):
return ['vendor.infotrope.filter.priority', 'i;ascii-casemap']
def factory( self, e ):
if 'vendor.infotrope.filter.type' in e:
if e['vendor.infotrope.filter.type']['value'] == 'single':
return single
return base
def new( self, t=None, entryname=None ):
if t is None:
t = 'single'
if entryname is None:
import time
import socket
entryname = str(time.time()) + '@' + socket.gethostname()
raw = {'entry':{'value':entryname}}
return single( raw, self.url )
class base(infotrope.datasets.base.entry):
def __init__( self, e, url ):
infotrope.datasets.base.entry.__init__( self, e, url )
def decode( self, attr, raw ):
if attr in ['vendor.infotrope.filter.colour.foreground','vendor.infotrope.filter.colour.background']:
return tuple(map(int,raw.split(',')))
elif attr == 'vendor.infotrope.filter.name':
return raw.decode('utf-8')
elif attr in ['vendor.infotrope.filter.bold','vendor.infotrope.filter.italic','vendor.infotrope.filter.scanonly']:
return raw == "1"
elif attr == 'vendor.infotrope.filter.program':
return raw.decode('utf-8')
elif attr == 'vendor.infotrope.filter.subfilters':
return [ self.referral( x ) for x in raw ]
elif attr == 'vendor.infotrope.filter.description':
return raw.decode('utf-8')
return raw
def encode( self, attr, polish ):
raw = polish
if attr in ['vendor.infotrope.filter.colour.foreground','vendor.infotrope.filter.colour.background']:
raw = ','.join( [ str(x) for x in polish ] )
elif attr in ['vendor.infotrope.filter.bold','vendor.infotrope.filter.italic','vendor.infotrope.filter.scanonly']:
raw = None
if polish:
raw = "1"
return raw
class single(base):
def __init__( self, e, url ):
base.__init__( self, e, url )
def check_match( self, msg ):
return True
infotrope.datasets.base.register_dataset_type( 'vendor.infotrope.filter', filters )
import polymer.dialogs
import wx
class FilterList( polymer.dialogs.Base ):
def __init__( self, parent ):
self._filters = wx.GetApp().filters()
polymer.dialogs.Base.__init__( self, parent, "Edit View" )
self.selected = None
def add_prompts( self, p ):
self.AddPreamble( p, "Select view to edit" )
self.listing = wx.ListCtrl( p, -1, style=wx.LC_REPORT )
count = 0
self.listing.InsertColumn( 0, "View Name" )
filters = self._filters
for f in filters.entries():
e = filters[f]
item = wx.ListItem()
item.SetText( e['vendor.infotrope.filter.name'] )
item.SetId( count )
if e['vendor.infotrope.filter.colour.foreground']:
item.SetTextColour( e['vendor.infotrope.filter.colour.foreground'] )
if e['vendor.infotrope.filter.colour.background']:
item.SetBackgroundColour( e['vendor.infotrope.filter.colour.background'] )
if e['vendor.infotrope.filter.bold'] or e['vendor.infotrope.filter.italic']:
font = wx.SystemSettings.GetFont( wx.SYS_DEFAULT_GUI_FONT )
if e['vendor.infotrope.filter.bold']:
font.SetWeight( wx.BOLD )
if e['vendor.infotrope.filter.italic']:
font.SetStyle( wx.ITALIC )
item.SetFont( font )
self.listing.InsertItem( item )
count += 1
self.AddGeneric( self.listing, flags=wx.EXPAND, minsize=(-1,50) )
self.Bind( wx.EVT_LIST_ITEM_SELECTED, self.selected, self.listing )
self.descr = wx.StaticText( p, -1, "" )
self.AddGeneric( self.descr, flags=wx.EXPAND, prop=0 )
te = self.AddPrompt( p, "View Name", attr='filter', defvalue='' )
self.Bind( wx.EVT_TEXT_ENTER, self.Okay, te )
self.Bind( wx.EVT_TEXT, self.text_changed, te )
self.listing.SetColumnWidth( 0, wx.LIST_AUTOSIZE )
def unselect_all( self ):
idx = self.listing.GetFirstSelected()
while idx > -1:
self.listing.SetItemState( idx, 0, wx.LIST_STATE_SELECTED|wx.LIST_STATE_FOCUSED )
idx = self.listing.GetNextSelected( idx )
def selected( self, evt ):
self.selected = wx.GetApp().filters()[evt.GetIndex()]
self.prompts['filter'].SetValue( evt.GetText() )
self.descr.SetLabel( self._filters[evt.GetIndex()]['vendor.infotrope.filter.description'] )
def text_changed( self, evt ):
evt.Skip()
if self.selected is not None and evt.GetString():
if self.selected['vendor.infotrope.filter.name'] != polymer.encode.decode_ui( self.prompts['filter'].GetValue() ):
self.unselect_all()
self.descr.SetLabel( 'New' )
self.selected = None
def Okay( self, evt ):
self.End( wx.ID_OK )
class EditFilter( polymer.dialogs.EntryDialogNew ):
def __init__( self, parent, filt=None, name=None, dataset=None ):
self.name = name
if dataset is None:
dataset = wx.GetApp().filters()
polymer.dialogs.EntryDialogNew.__init__( self, parent, name or "New View", filt, dataset )
def add_prompts( self, p ):
self.AddPrompt( p, "Name", 'vendor.infotrope.filter.name', self.name )
self.AddPrompt( p, "Description", 'vendor.infotrope.filter.description' )
self.AddColourPrompt( p, "Foreground", 'vendor.infotrope.filter.colour.foreground' )
self.AddColourPrompt( p, "Background", 'vendor.infotrope.filter.colour.background' )
self.AddCheckBox( p, "Italic", 'vendor.infotrope.filter.italic' )
self.AddCheckBox( p, "Bold", 'vendor.infotrope.filter.bold' )
self.AddPrompt( p, "IMAP Search", 'vendor.infotrope.filter.program' )
self.AddCheckBox( p, "Don't list", 'vendor.infotrope.filter.scanonly' )
self.AddPrompt( p, "Priority", 'vendor.infotrope.filter.priority' )
def decode_ui( self ):
d = self.entry
d['vendor.infotrope.filter.name'] = polymer.encode.decode_ui( self.prompts['vendor.infotrope.filter.name'].GetValue() )
d['vendor.infotrope.filter.description'] = polymer.encode.decode_ui( self.prompts['vendor.infotrope.filter.description'].GetValue() )
d['vendor.infotrope.filter.colour.foreground'] = self.prompts['vendor.infotrope.filter.colour.foreground'].GetValue()
#if d['vendor.infotrope.filter.colour.foreground'] is not None:
# d['vendor.infotrope.filter.colour.foreground'] = ','.join( map(str,d['vendor.infotrope.filter.colour.foreground']) )
d['vendor.infotrope.filter.colour.background'] = self.prompts['vendor.infotrope.filter.colour.background'].GetValue()
#if d['vendor.infotrope.filter.colour.background'] is not None:
# d['vendor.infotrope.filter.colour.background'] = ','.join( map(str,d['vendor.infotrope.filter.colour.background']) )
d['vendor.infotrope.filter.program'] = polymer.encode.decode_ui( self.prompts['vendor.infotrope.filter.program'].GetValue() )
d['vendor.infotrope.filter.prority'] = self.prompts['vendor.infotrope.filter.priority'].GetValue()
d['vendor.infotrope.filter.type'] = 'single'
d['vendor.infotrope.filter.italic'] = int(self.prompts['vendor.infotrope.filter.italic'].GetValue())
d['vendor.infotrope.filter.bold'] = int(self.prompts['vendor.infotrope.filter.bold'].GetValue())
d['vendor.infotrope.filter.scanonly'] = int(self.prompts['vendor.infotrope.filter.scanonly'].GetValue())
|
[
"wx.ListCtrl",
"wx.SystemSettings.GetFont",
"time.time",
"wx.StaticText",
"socket.gethostname",
"wx.ListItem",
"wx.GetApp"
] |
[((3817, 3855), 'wx.ListCtrl', 'wx.ListCtrl', (['p', '(-1)'], {'style': 'wx.LC_REPORT'}), '(p, -1, style=wx.LC_REPORT)\n', (3828, 3855), False, 'import wx\n'), ((5095, 5119), 'wx.StaticText', 'wx.StaticText', (['p', '(-1)', '""""""'], {}), "(p, -1, '')\n", (5108, 5119), False, 'import wx\n'), ((4042, 4055), 'wx.ListItem', 'wx.ListItem', ([], {}), '()\n', (4053, 4055), False, 'import wx\n'), ((1760, 1780), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (1778, 1780), False, 'import socket\n'), ((3590, 3601), 'wx.GetApp', 'wx.GetApp', ([], {}), '()\n', (3599, 3601), False, 'import wx\n'), ((4564, 4614), 'wx.SystemSettings.GetFont', 'wx.SystemSettings.GetFont', (['wx.SYS_DEFAULT_GUI_FONT'], {}), '(wx.SYS_DEFAULT_GUI_FONT)\n', (4589, 4614), False, 'import wx\n'), ((5733, 5744), 'wx.GetApp', 'wx.GetApp', ([], {}), '()\n', (5742, 5744), False, 'import wx\n'), ((6550, 6561), 'wx.GetApp', 'wx.GetApp', ([], {}), '()\n', (6559, 6561), False, 'import wx\n'), ((1739, 1750), 'time.time', 'time.time', ([], {}), '()\n', (1748, 1750), False, 'import time\n')]
|
from random import random
class Treap:
def __init__(self, key, value = 0):
self.key = key
self.value = value
self.priority = random()
self.size = 1
self.height = 1
self.lazy = 0
self.max_value = value
self.min_value = value
self.left = None
self.right = None
@staticmethod
def SumAll(node, value):
if node is None:
return
node.value += value
node.max_value += value
node.min_value += value
node.lazy += value
@classmethod
def Unlazy(cls, node):
cls.SumAll(node.left, node.lazy)
cls.SumAll(node.right, node.lazy)
node.lazy = 0
@classmethod
def Update(cls, node):
if node is None:
return
cls.Unlazy(node)
node.size = 1
node.height = 0
node.max_value = node.value
node.min_value = node.value
if node.left is not None:
node.size += node.left.size
node.height = node.left.height
node.max_value = max(node.max_value, node.left.max_value)
node.min_value = min(node.min_value, node.left.min_value)
if node.right is not None:
node.size += node.right.size
node.height = max(node.height, node.right.height)
node.max_value = max(node.max_value, node.right.max_value)
node.min_value = min(node.min_value, node.right.min_value)
node.height += 1
@classmethod
def SplitKeepRight(cls, node, key):
if node is None:
return None, None
left, right = None, None
cls.Unlazy(node)
if key <= node.key:
left, node.left = cls.SplitKeepRight(node.left, key)
right = node
else:
node.right, right = cls.SplitKeepRight(node.right, key)
left = node
cls.Update(left)
cls.Update(right)
return left, right
@classmethod
def Merge(cls, left, right):
if left is None:
return right
if right is None:
return left
node = None
if left.priority > right.priority:
cls.Unlazy(left)
left.right = cls.Merge(left.right, right)
node = left
else:
cls.Unlazy(right)
right.left = cls.Merge(left, right.left)
node = right
cls.Update(node)
return node
@classmethod
def SplitSmallest(cls, node):
if node is None:
return None, None
left, right = None, None
cls.Unlazy(node)
if node.left is not None:
left, node.left = cls.SplitSmallest(node.left)
right = node
else:
right = node.right
node.right = None
left = node
cls.Update(left)
cls.Update(right)
return left, right
@classmethod
def SplitGreatest(cls, node):
if node is None:
return None, None
cls.Unlazy(node)
if node.right is not None:
node.right, right = cls.SplitGreatest(node.right)
left = node
else:
left = node.left
node.left = None
right = node
cls.Update(left)
cls.Update(right)
return left, right
@staticmethod
def Size(node):
return 0 if node is None else node.size
@staticmethod
def Height(node):
return 0 if node is None else node.height
@classmethod
def _ToList(cls, node, extractor, _list = None):
if _list is None:
_list = []
if node is None:
return _list
cls.Unlazy(node)
cls._ToList(node.left, extractor, _list)
_list.append(extractor(node))
cls._ToList(node.right, extractor, _list)
return _list
@classmethod
def KeysToList(cls, node, _list = None):
extractor = lambda x: x.key
return cls._ToList(node, extractor, _list)
@classmethod
def ValuesToList(cls, node, _list = None):
extractor = lambda x: x.value
return cls._ToList(node, extractor, _list)
|
[
"random.random"
] |
[((146, 154), 'random.random', 'random', ([], {}), '()\n', (152, 154), False, 'from random import random\n')]
|
from threading import Thread
import socket
import time
import json
offline = False
class Client(Thread):
address = ('localhost', 8957)
buffer_size = 8000
delimiter = b'\r\n'
def __init__(self):
super().__init__()
self.sock = None
self.make_socket()
self.handlers = {}
self.error_handlers = {}
if not offline:
self.start()
def make_socket(self):
try:
self.sock = socket.create_connection(self.address)
except ConnectionRefusedError:
self.reconnect()
def reconnect(self):
print('Reconnecting in 5 second')
if self.sock:
self.sock.close()
time.sleep(5)
self.make_socket()
def send(self, message_type, data):
if not self.sock:
self.make_socket()
try:
self.sock.sendall(json.dumps({'type': message_type, 'data': data}).encode('utf-8') + self.delimiter)
except ConnectionAbortedError:
self.reconnect()
def _recv(self):
if not self.sock:
self.make_socket()
try:
temp = self.sock.recv(self.buffer_size)
if not temp:
raise ConnectionAbortedError
return temp
except ConnectionAbortedError:
self.reconnect()
return
def handle(self, event):
def decorator(func):
def wrapper(data):
return func(**data)
self.handlers[event] = wrapper
return wrapper
return decorator
def error_handle(self, code):
def decorator(func):
def wrapper(data):
return func(**data)
self.error_handlers[code] = wrapper
return wrapper
return decorator
def _recv_handle(self, message):
if not message:
return
message = json.loads(message)
message_type = message['type']
if message_type == 'error':
func = self.error_handlers.get(message['data']['code'])
else:
func = self.handlers.get(message_type)
if func:
func(message['data'])
def run(self):
while True:
recv = self._recv()
if recv:
msgs = recv.strip(self.delimiter).split(self.delimiter)
for msg in msgs:
self._recv_handle(msg)
|
[
"json.dumps",
"json.loads",
"socket.create_connection",
"time.sleep"
] |
[((706, 719), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (716, 719), False, 'import time\n'), ((1921, 1940), 'json.loads', 'json.loads', (['message'], {}), '(message)\n', (1931, 1940), False, 'import json\n'), ((471, 509), 'socket.create_connection', 'socket.create_connection', (['self.address'], {}), '(self.address)\n', (495, 509), False, 'import socket\n'), ((890, 938), 'json.dumps', 'json.dumps', (["{'type': message_type, 'data': data}"], {}), "({'type': message_type, 'data': data})\n", (900, 938), False, 'import json\n')]
|
import string
import pprint
import types
import sys
import os
# strangely enough, the following code is necessary to find modules in the parent-directory
# (despite what is said in http://www.python.org/doc/current/tut/node8.html)
# it adds the parent directory to the sys.path variable that determines which directories to
# search for modules
import sys, os, os.path
sys.path.append(string.join(string.split(os.path.dirname(sys.argv[0]), os.sep)[:-1], os.sep))
from shared import XMLConfig
from shared import fileIndexer
# Not good. Fix.
from graph import *
from graphIterator import *
from graphStorage import *
from graphCommon import *
# def printSummary(_graph):
# if _graph!=None:
# nodes = graph.getNodeList(_graph, [])
# print "Available nodes:", nodes
# processed =[]
# for nodename in nodes:
# if not nodename in processed:
# node = XMLConfig.getNode(nodename, _graph['refered_by'][0]['parent'])
# # graph.printGraph(_graph['refered_by'][0]['parent'])
# if node!=None:
# print "%s, combi %d, heeft waarde %s" % (nodename, node['recordnr'], node['recordcontents'])
# processed.append(nodename)
def __getItemsFromInput(start, end, input):
""" get <item></item>-pairs number
start...end from the input-list """
# print "getting items", start
# print "to", end
# print "from", input
if start==-1 and end==-1:
all = 1
start = 0
else:
all = 0
if start<0 or start>end:
return None
answer = []
maxitemcount = end - start + 1
itemcount = 0
regexp = re.compile("^\s*(.+?)\s*=\s*(.+?)\s*$")
for linenr in range(len(input)):
strippedlowercaseline = string.strip(string.lower(input[linenr]))
if strippedlowercaseline == "<item>":
itemcount += 1
if itemcount>start:
local = {}
for index in range(linenr+1, len(input)):
parts = re.findall(regexp, input[index])
if len(parts)>0:
local[parts[0][0]]=parts[0][1]
strippedlowercaseline = string.strip(string.lower(input[index]))
if strippedlowercaseline=="</item>":
break
answer.append(local)
if itemcount==start + maxitemcount and all==0:
break
if answer==[]:
answer = itemcount
return answer
def __getOutputFromScript(scriptConfigEntry):
""" run the script specified in the 'script'-section
from the configfile and return its output """
vars = XMLConfig.getVariables(scriptConfigEntry)
# execute the script
if vars.has_key('script'):
pipe = os.popen3(vars['script'], "t")
# output written to stderr causes an error to be raised
stderr = pipe[2]
line = stderr.readline()[:-1]
error = line
while len(line)>0:
line= stderr.readline()[:-1]
error = "%s\n%s" % (error, line)
if len(error)>0:
raise "While trying to execute <<<%s>>> the following error occurred:\n%s\n\n\n----" \
"\nTip (free!): If applicable, in Windows, the script is executed using 'cmd.exe /c'.\n" \
"If you can't get the 'script'-tags to work, then enter 'cmd.exe /?' in a command-shell\n" \
"for more info on how to fix your problem. " % (vars['script'], error)
# output written to stdout is processed by separate code
stdout = pipe[1]
line = string.rstrip(stdout.readline())
output = [line]
while len(line)>0:
line= string.rstrip(stdout.readline())
output.append(line)
return output
else:
return None
def __getDirItems(start, end, fileindex):
if start==-1 and end==-1:
all = 1
else:
all = 0
counter = 0
answer = []
for file in fileindex:
if (counter>=start and counter<end) or (all==1):
answer.append(file['public'])
counter += 1
if answer == []:
return counter
else:
return answer
def __getRange(start, index, totalitems, printeditems):
if start - index > 0:
a = start - index
else:
a = 0
if printeditems < totalitems:
b = a + totalitems - printeditems - 1
else:
b = a - 1
answer = {}
answer['from'] = a
answer['to'] = b
return answer
def getCombinations(start, end, config, nostorage=0):
""" Get a specific range of combinations from combination
number 'start' up to and including number 'end'
Combinations are counted from 0 (which means that
'60' is the 61st combination) """
# check to see if we must return _all_ combinations
if end==-1 and start==-1:
start = 0
all = 1
else:
all = 0
if end <= start and all==0:
return None
totalitems = end - start + 1
printeditems = 0
index = 0 # how much combinations are 'behind us'?
# extract the database-connection parameters from the configuration
dbparameters = XMLConfig.getVariables(XMLConfig.getEntries("database", config)[0])
for requireditem in ['dbclient', 'dbname', 'dsn', 'host', 'password', 'user', 'connectstring']:
if not dbparameters.has_key(requireditem):
dbparameters[requireditem] = None
items = []
# First, list the dynamic webpages
dynamic = XMLConfig.getEntries('dynamic', config)
for dynentry in dynamic:
depgraphs = graph.createDependencyGraphs(dynentry)
for depgraph in depgraphs:
if start - index > 0:
startAt = start - index
else:
startAt = 0
# TODO: REMOVE THIS
# TODO: Fout zit in openGraph
# continue
depgraph = openGraph(depgraph, startAt, nostorage, dbparameters)
if type(depgraph) != types.IntType:
# increase index with starting combination of graph
index += startAt
while 1==1:
if printeditems<totalitems or all==1:
try:
combi = giveNextCombination(depgraph)
except:
break
if combi!=None:
items.append(getCombinationItem(combi))
printeditems += 1
index += 1
else:
break
else:
break
if nostorage == 0:
recordState(depgraph)
else:
# if the answer returned by 'openGraph' is
# an integer, then this means the provided
# graph is out of combinations. The integer
# returned is equal to the number of combinations
# that has been provided by the graph.
#
# Here we increase the number of combinations
# that are behind us with this number.
index += depgraph
closeDatabaseConnections(depgraph)
# print "#1 Printing range (%d, %d), index=%d, printed %d of %d items" % (start, end, index, printeditems, totalitems)
if printeditems<totalitems or all==1:
# Second, process the 'script'-entries
scripts = XMLConfig.getEntries('script', config)
for script in scripts:
output = __getOutputFromScript(script)
# which items do we need to get?
_range = __getRange(start, index, totalitems, printeditems)
if all==0:
answer = __getItemsFromInput(_range['from'], _range['to'], output)
else:
answer = __getItemsFromInput(-1, -1, output)
if type(answer) == types.IntType:
# If the returned answer is not a list but an integer,
# then this integer represents the number of combinations
# in the output of this script
index += answer
else:
if type(answer) == types.ListType:
# if the answer is a list, then append the contents of this list
# to the already obtained partial answer
items.extend(answer)
printeditems += len(answer)
# print "#2 Printing range (%d, %d), index=%d, printed %d of %d items" % (start, end, index, printeditems, totalitems)
if printeditems<totalitems or all==1:
# Third, process the 'textfile'-entries
textfiles = XMLConfig.getEntries('textfile', config)
for textfile in textfiles:
vars = XMLConfig.getVariables(textfile)
handle = open(vars['file'])
line = string.rstrip(handle.readline())
output = [line]
while len(line)>0:
line = string.rstrip(handle.readline())
output.append(line)
# which items do we need to get?
_range = __getRange(start, index, totalitems, printeditems)
if all==0:
answer = __getItemsFromInput(_range['from'], _range['to'], output)
else:
answer = __getItemsFromInput(-1, -1, output)
if type(answer) == types.IntType:
# If the returned answer is not a list but an integer,
# then this integer represents the number of combinations
# in the output of this script
index += answer
else:
if type(answer) == types.ListType:
# if the answer is a list, then append the contents of this list
# to the already obtained partial answer
items.extend(answer)
printeditems += len(answer)
# print "#3 Printing range (%d, %d), index=%d, printed %d of %d items" % (start, end, index, printeditems, totalitems)
if printeditems<totalitems or all==1:
# Fourth, process the 'directory'-entries
fileindex = fileIndexer.fileIndexer()
directories = XMLConfig.getEntries('directory', config)
for directory in directories:
vars = XMLConfig.getVariables(directory)
if vars.has_key('local') and vars.has_key('public'):
local = replaceVariables(vars['local'], directory)
public = replaceVariables(vars['public'], directory)
# remove trailing slashes
while public[-1]=="/":
public = public[:-1]
fileindex.addDir(local, public)
# which items do we need to get?
_range = __getRange(start, index, totalitems, printeditems)
# get content from directories and rewrite as URLs
if all==0:
diritems = __getDirItems(_range['from'], _range['to'], fileindex)
else:
diritems = __getDirItems(-1, -1, fileindex)
if type(diritems) == types.IntType:
index += diritems
else:
if diritems != None:
for item in diritems:
items.append(newItem(url=item))
printeditems += 1
index += 1
# print "#4 Done - Tried to print range (%d, %d), index=%d, printed %d of %d items" % (start, end, index, printeditems, totalitems)
return items
if __name__=="__main__":
if len(sys.argv)>1:
start = string.atoi(sys.argv[1])
end = string.atoi(sys.argv[2])
else:
start = 0
end = 300
config = XMLConfig.parse('webindex.ini')
# XMLConfig.printConfigtree(config)
items = getCombinations(start, end, config)
pprint.pprint(items)
# print "Done"
|
[
"shared.fileIndexer.fileIndexer",
"shared.XMLConfig.getVariables",
"string.atoi",
"string.lower",
"os.popen3",
"os.path.dirname",
"shared.XMLConfig.parse",
"pprint.pprint",
"shared.XMLConfig.getEntries"
] |
[((2448, 2489), 'shared.XMLConfig.getVariables', 'XMLConfig.getVariables', (['scriptConfigEntry'], {}), '(scriptConfigEntry)\n', (2470, 2489), False, 'from shared import XMLConfig\n'), ((5045, 5084), 'shared.XMLConfig.getEntries', 'XMLConfig.getEntries', (['"""dynamic"""', 'config'], {}), "('dynamic', config)\n", (5065, 5084), False, 'from shared import XMLConfig\n'), ((10192, 10223), 'shared.XMLConfig.parse', 'XMLConfig.parse', (['"""webindex.ini"""'], {}), "('webindex.ini')\n", (10207, 10223), False, 'from shared import XMLConfig\n'), ((10309, 10329), 'pprint.pprint', 'pprint.pprint', (['items'], {}), '(items)\n', (10322, 10329), False, 'import pprint\n'), ((2554, 2584), 'os.popen3', 'os.popen3', (["vars['script']", '"""t"""'], {}), "(vars['script'], 't')\n", (2563, 2584), False, 'import sys, os, os.path\n'), ((6573, 6611), 'shared.XMLConfig.getEntries', 'XMLConfig.getEntries', (['"""script"""', 'config'], {}), "('script', config)\n", (6593, 6611), False, 'from shared import XMLConfig\n'), ((7633, 7673), 'shared.XMLConfig.getEntries', 'XMLConfig.getEntries', (['"""textfile"""', 'config'], {}), "('textfile', config)\n", (7653, 7673), False, 'from shared import XMLConfig\n'), ((8891, 8916), 'shared.fileIndexer.fileIndexer', 'fileIndexer.fileIndexer', ([], {}), '()\n', (8914, 8916), False, 'from shared import fileIndexer\n'), ((8936, 8977), 'shared.XMLConfig.getEntries', 'XMLConfig.getEntries', (['"""directory"""', 'config'], {}), "('directory', config)\n", (8956, 8977), False, 'from shared import XMLConfig\n'), ((10084, 10108), 'string.atoi', 'string.atoi', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (10095, 10108), False, 'import string\n'), ((10119, 10143), 'string.atoi', 'string.atoi', (['sys.argv[2]'], {}), '(sys.argv[2])\n', (10130, 10143), False, 'import string\n'), ((1714, 1741), 'string.lower', 'string.lower', (['input[linenr]'], {}), '(input[linenr])\n', (1726, 1741), False, 'import string\n'), ((4750, 4790), 'shared.XMLConfig.getEntries', 'XMLConfig.getEntries', (['"""database"""', 'config'], {}), "('database', config)\n", (4770, 4790), False, 'from shared import XMLConfig\n'), ((7715, 7747), 'shared.XMLConfig.getVariables', 'XMLConfig.getVariables', (['textfile'], {}), '(textfile)\n', (7737, 7747), False, 'from shared import XMLConfig\n'), ((9022, 9055), 'shared.XMLConfig.getVariables', 'XMLConfig.getVariables', (['directory'], {}), '(directory)\n', (9044, 9055), False, 'from shared import XMLConfig\n'), ((429, 457), 'os.path.dirname', 'os.path.dirname', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (444, 457), False, 'import sys, os, os.path\n'), ((2049, 2075), 'string.lower', 'string.lower', (['input[index]'], {}), '(input[index])\n', (2061, 2075), False, 'import string\n')]
|
import json
import numpy as np
import cv2
from numpy.core.records import array
import cv2.aruco as aruco
import socket
from urllib.request import urlopen
from get_img import get_img as gi
ADDRESS = ('', 10000)
central = None
conn_pool = []
central = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
central.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
central.setblocking(False)
central.bind(ADDRESS)
central.listen(5)
print("Waiting...")
position = {}
orientation = {}
temp_threshold = 40
indication = [233]
font = cv2.FONT_HERSHEY_SIMPLEX
green_lower = np.array([35, 110, 106])
green_upper = np.array([77, 255, 255])
red_lower = np.array([156, 43, 46])
red_upper = np.array([180, 255, 255])
yellow_lower = np.array([26, 43, 46])
yellow_upper = np.array([34, 255, 255])
bts = b''
fix_size = (640, 480)
CAMERA_BUFFRER_SIZE = 8192
class Agent():
def __init__(self, id, order, state=0, test=False) -> None:
self.id = id
self.state = state
self.order = order
self.position = np.inf
self.orientation = np.inf
self.tick = 0
self.come_from = str(self.id) + 'come_from'
self.target = str(self.id) + 'target'
self.flag = True
self.url = 'http://192.168.1.27:81/stream'
if test:
self.path = [15, 16]
pass
def set_location(self):
if self.id in position:
self.position = position[self.id]
def set_orientation(self):
if self.id in orientation:
self.orientation = orientation[self.id]
def set_path(self, path):
self.path = path
self.come_from = self.path.pop(0)
self.target = self.path.pop(0)
def set_agent_list(self, agent_list):
self.agent_list = agent_list
def forward(self):
msg = str.encode('w')
conn_pool[self.order].send(msg)
if self.id in indication:
print('Agent {}: forward..., target:{}'.format(self.id, self.target))
pass
def backward(self):
msg = str.encode('s')
conn_pool[self.order].send(msg)
if self.id in indication:
print('Agent {}: backward..., target:{}'.format(self.id, self.target))
pass
def turn_right(self):
msg = str.encode('d')
conn_pool[self.order].send(msg)
if self.id in indication:
print('Agent {}: right..., target:{}'.format(self.id, self.target))
pass
def turn_left(self):
msg = str.encode('a')
conn_pool[self.order].send(msg)
if self.id in indication:
print('Agent {}: left..., target:{}'.format(self.id, self.target))
pass
def turn_to(self, target):
v1 = position[target] - position[self.id]
v2 = np.array([1, 0])
cos_angle = v1.dot(v2) / (np.linalg.norm(v1) * np.linalg.norm(v2))
angle = np.arccos(cos_angle) / np.pi * 180
if v1[1] < 0:
angle *= -1
agent_ori = self.orientation
# print(angle)
# print(agent_ori)
if abs(angle - agent_ori) > 180:
if angle > agent_ori:
self.turn_left()
else:
self.turn_right()
else:
if angle < agent_ori:
self.turn_left()
else:
self.turn_right()
def turn_to_ori(self, angle):
agent_ori = self.orientation
# print(angle)
# print(agent_ori)
if abs(angle - agent_ori) > 180:
if angle > agent_ori:
self.turn_left()
else:
self.turn_right()
else:
if angle < agent_ori:
self.turn_left()
else:
self.turn_right()
def stop(self):
msg = str.encode('t')
conn_pool[self.order].send(msg)
if self.id in indication:
print('Agent {}: stopping..., target:{}'.format(self.id, self.target))
pass
def look_for_target(self):
msg = str.encode('o')
conn_pool[self.order].send(msg)
pass
def thermal(self):
msg = str.encode('l')
conn_pool[self.order].send(msg)
pass
def attack(self):
msg = str.encode('k')
conn_pool[self.order].send(msg)
print('Agent {} is attacking!!'.format(self.id))
pass
def get_img(self):
return gi(self.url)
def find_edge(self):
msg = str.encode('g')
conn_pool[self.order].send(msg)
pass
def circle(self):
msg = str.encode('r')
conn_pool[self.order].send(msg)
pass
def quit(self):
msg = str.encode('q')
conn_pool[self.order].send(msg)
pass
def reach(self, target):
if cal_distance(target, self.id, position) < 0.04:
return True
else:
return False
def head_to(self, id):
v1 = position[id] - position[self.id]
v2 = np.array([1, 0])
cos_angle = v1.dot(v2) / (np.linalg.norm(v1) * np.linalg.norm(v2))
angle = np.arccos(cos_angle) / np.pi * 180
if v1[1] < 0:
angle *= -1
if self.orientation - angle < 3 and self.orientation - angle > -3:
return True
else:
return False
def head_to_ori(self, angle):
if abs(self.orientation - angle) < 12:
return True
else:
return False
def set_state(self, new_state):
self.state = new_state
def state_control_2(self):
if self.state == 0:
if self.id == 233:
self.set_state(11)
else:
self.set_state(0)
if self.state == 10:
# initialization
self.set_state(93)
if self.state == 911:
self.forward()
self.set_state(912)
if self.state == 912:
if self.reach(self.target):
self.set_state(-1)
else:
if self.tick % 30 == 0:
if self.head_to(self.target):
self.set_state(912)
else:
self.set_state(921)
else:
self.set_state(912)
if self.state == 921:
self.turn_to(self.target)
self.set_state(922)
if self.state == 922:
if self.head_to(self.target):
self.set_state(93)
else:
# self.turn_right()
self.set_state(922)
if self.state == 93:
self.stop()
if self.head_to(self.target):
self.set_state(911)
else:
self.set_state(921)
if self.state == 11:
self.look_for_target()
self.set_state(12)
if self.state == 12:
try:
data = conn_pool[self.order].recv(1064)
if len(data) != 0:
msg = data.decode('utf-8')
print(msg)
if msg == 'Reach the object':
self.set_state(21)
except Exception:
# print('12 except')
self.set_state(12)
pass
if self.state == 21:
self.thermal()
self.set_state(22)
if self.state == 22:
try:
data = conn_pool[self.order].recv(1064)
json_string = json.loads(data)
self.array = format_thermal(json_string)
print(self.array)
self.set_state(23)
except Exception:
# print('22 except')
self.set_state(22)
pass
if self.state == 23:
self.max_temp = max(max(self.array))
if self.max_temp == 0:
self.set_state(21)
else:
self.set_state(24)
if self.state == 24:
if self.max_temp > temp_threshold:
self.set_state(31)
else:
self.set_state(41)
if self.state == 31:
self.find_edge()
self.set_state(32)
if self.state == 32:
try:
data = conn_pool[self.order].recv(1064)
self.edge_len = float(data.decode('utf-8'))
print('edge length:', self.edge_len)
position['start'] = position[self.id]
self.forward()
self.set_state(33)
except Exception:
self.set_state(32)
pass
if self.state == 33:
# print('distance: ', cal_distance(self.id, 'start'))
if cal_distance(self.id, 'start') < 0.5:
self.set_state(33)
else:
position[str(self.id) + 'come_from'] = position[self.id]
self.set_state(10)
if self.state == 41:
color = self.get_img()
position['obj'] = position[self.id]
orientation['obj'] = orientation[self.id]
if color == 'red':
print('Red!!!!!')
self.agent_list[2].set_state(61)
self.set_state(10)
pass
elif color == 'yellow':
print('Yellow!!!!!')
self.agent_list[1].set_state(61)
self.set_state(10)
pass
elif color == 'green':
print('Green!!!!!')
self.set_state(51)
pass
else:
self.set_state(41)
pass
if self.state == 51:
self.circle()
self.set_state(52)
if self.state == 52:
try:
data = conn_pool[self.order].recv(1064)
msg = data.decode('utf-8')
if msg == 'Complete':
self.set_state(-1)
except Exception:
self.set_state(52)
pass
if self.state == 61:
position[str(self.id) + 'target'] = position['obj']
self.set_state(10)
if self.state == -1:
if self.id == 233:
self.stop()
else:
self.set_state(-21)
pass
if self.state == -21:
self.turn_to_ori(orientation['obj'])
self.set_state(-22)
pass
if self.state == -22:
if self.head_to_ori(orientation['obj']):
self.set_state(-23)
else:
self.set_state(-22)
if self.state == -23:
self.forward()
self.set_state(-24)
if self.state == -24:
if self.head_to_ori(orientation['obj']):
if cal_distance('obj', self.id) >= 0.9:
self.set_state(-4)
else:
self.set_state(-24)
else:
self.set_state(-31)
# if cal_distance('obj', self.id) >= 1:
# self.set_state(-4)
# else:
# self.set_state(-24)
if self.state == -31:
self.turn_to_ori(orientation['obj'])
self.set_state(-32)
if self.state == -32:
print('Ori: {}, OBJ_ori: {}'.format(self.orientation, orientation['obj']))
if self.head_to_ori(orientation['obj']):
self.set_state(-23)
else:
self.set_state(-32)
if self.state == -4:
self.stop()
self.attack()
if self.tick % 50 ==0:
if self.id in indication:
print(str(self.id) + ' state: ' + str(self.state))
self.tick += 1
def open_camera():
cap = cv2.VideoCapture(1)
cap.set(3, 1920)
cap.set(4, 1080)
return cap
def init_parameters():
mtx = np.array([[1051.1, 0, 695.0741],
[0, 1052.2, 297.7604],
[0., 0., 1.]])
dist = np.array([[-0.4223, 0.1412, 0, 0, 0.0921]])
return mtx, dist
def capture_frame(cap):
ret, frame = cap.read()
frame = cv2.GaussianBlur(frame, (5, 5), 0)
return frame
def detect_aruco(frame):
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
parameters = aruco.DetectorParameters_create()
aruco_dict = aruco.Dictionary_get(aruco.DICT_6X6_250)
corners, ids, rIP = aruco.detectMarkers(gray, aruco_dict, parameters=parameters)
return corners, ids, rIP
def get_position(ids, tvec, position):
for i in range(ids.shape[0]):
position[ids[i][0]] = (tvec[i][0])[:2]
def get_orientation(ids, rvec, orientation):
for i in range(ids.shape[0]):
temp = rvec[i][0]
r, _ = cv2.Rodrigues(temp)
theta_z = np.arctan2(r[1][0], r[0][0]) / np.pi * 180
orientation[ids[i][0]] = theta_z
def cal_distance(id1, id2, pos=position):
if id1 in pos and id2 in pos:
distance = np.linalg.norm(pos[id1] - pos[id2])
return distance
else:
return np.inf
def cal_angle(agent, vertex_id, next_id, pos):
try:
vertex = pos[vertex_id]
next = pos[next_id]
v1 = agent.position - vertex
v2 = next - vertex
cos_angle = v1.dot(v2) / (np.linalg.norm(v1) * np.linalg.norm(v2))
angle = np.arccos(cos_angle) / np.pi * 180
return angle
except Exception:
return np.inf
def format_thermal(one_d_array):
two_d_array = []
i = 0
for row in range(8):
temp = []
for col in range(8):
temp.append(one_d_array[i])
i = i + 1
two_d_array.append(temp)
return two_d_array
def main():
mtx, dist = init_parameters()
cap = open_camera()
initialization = True
while True:
if len(conn_pool) < 3:
try:
client, _ = central.accept()
# print('address: {},port: {} is connected'.format(addr[0], addr[1]))
conn_pool.append(client)
except BlockingIOError:
pass
else:
try:
frame = capture_frame(cap)
corners, ids, _ = detect_aruco(frame)
if ids is not None:
aruco.drawDetectedMarkers(frame, corners, ids)
rvec, tvec, _objPoints = aruco.estimatePoseSingleMarkers(corners, 0.158, mtx, dist)
for i in range(rvec.shape[0]):
aruco.drawAxis(frame, mtx, dist, rvec[i, :, :], tvec[i, :, :], 0.1)
aruco.drawDetectedMarkers(frame, corners, ids, (0, 0, 255))
get_position(ids, tvec, position)
get_orientation(ids, rvec, orientation)
if initialization:
if ids.shape[0] >= 4:
initialization = False
agent_1 = Agent(233, order=0, state=21)
agent_2 = Agent(234, order=1)
agent_3 = Agent(235, order=2)
agent_list = [agent_1, agent_2, agent_3]
for agent_id, id in zip((agent_1.id, agent_2.id, agent_3.id), (101, 102, 103)):
position[str(agent_id) + 'come_from'] = position[id]
position[str(agent_id) + 'target'] = position[104]
for agent in agent_list:
agent.set_agent_list(agent_list)
print('initialization complete...')
else:
print('initializing...')
if not initialization:
if agent_1.id in position and agent_2.id in position and agent_3.id in position:
for agent in agent_list:
agent.set_location()
agent.set_orientation()
agent.state_control_2()
if cv2.waitKey(1) & 0xFF == ord('q'):
for agent in agent_list:
agent.stop()
agent.quit()
break
cv2.imshow("Capture", frame)
except(BlockingIOError, ConnectionResetError):
print("Error 2")
pass
cap.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
main()
|
[
"cv2.GaussianBlur",
"cv2.aruco.drawDetectedMarkers",
"numpy.arctan2",
"socket.socket",
"cv2.aruco.detectMarkers",
"numpy.linalg.norm",
"cv2.imshow",
"json.loads",
"cv2.cvtColor",
"cv2.aruco.drawAxis",
"numpy.arccos",
"cv2.destroyAllWindows",
"cv2.aruco.estimatePoseSingleMarkers",
"cv2.waitKey",
"cv2.aruco.Dictionary_get",
"cv2.Rodrigues",
"get_img.get_img",
"cv2.aruco.DetectorParameters_create",
"cv2.VideoCapture",
"numpy.array"
] |
[((252, 301), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (265, 301), False, 'import socket\n'), ((570, 594), 'numpy.array', 'np.array', (['[35, 110, 106]'], {}), '([35, 110, 106])\n', (578, 594), True, 'import numpy as np\n'), ((609, 633), 'numpy.array', 'np.array', (['[77, 255, 255]'], {}), '([77, 255, 255])\n', (617, 633), True, 'import numpy as np\n'), ((646, 669), 'numpy.array', 'np.array', (['[156, 43, 46]'], {}), '([156, 43, 46])\n', (654, 669), True, 'import numpy as np\n'), ((682, 707), 'numpy.array', 'np.array', (['[180, 255, 255]'], {}), '([180, 255, 255])\n', (690, 707), True, 'import numpy as np\n'), ((723, 745), 'numpy.array', 'np.array', (['[26, 43, 46]'], {}), '([26, 43, 46])\n', (731, 745), True, 'import numpy as np\n'), ((761, 785), 'numpy.array', 'np.array', (['[34, 255, 255]'], {}), '([34, 255, 255])\n', (769, 785), True, 'import numpy as np\n'), ((11882, 11901), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(1)'], {}), '(1)\n', (11898, 11901), False, 'import cv2\n'), ((11994, 12067), 'numpy.array', 'np.array', (['[[1051.1, 0, 695.0741], [0, 1052.2, 297.7604], [0.0, 0.0, 1.0]]'], {}), '([[1051.1, 0, 695.0741], [0, 1052.2, 297.7604], [0.0, 0.0, 1.0]])\n', (12002, 12067), True, 'import numpy as np\n'), ((12116, 12159), 'numpy.array', 'np.array', (['[[-0.4223, 0.1412, 0, 0, 0.0921]]'], {}), '([[-0.4223, 0.1412, 0, 0, 0.0921]])\n', (12124, 12159), True, 'import numpy as np\n'), ((12247, 12281), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['frame', '(5, 5)', '(0)'], {}), '(frame, (5, 5), 0)\n', (12263, 12281), False, 'import cv2\n'), ((12337, 12376), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2GRAY'], {}), '(frame, cv2.COLOR_BGR2GRAY)\n', (12349, 12376), False, 'import cv2\n'), ((12394, 12427), 'cv2.aruco.DetectorParameters_create', 'aruco.DetectorParameters_create', ([], {}), '()\n', (12425, 12427), True, 'import cv2.aruco as aruco\n'), ((12445, 12485), 'cv2.aruco.Dictionary_get', 'aruco.Dictionary_get', (['aruco.DICT_6X6_250'], {}), '(aruco.DICT_6X6_250)\n', (12465, 12485), True, 'import cv2.aruco as aruco\n'), ((12510, 12570), 'cv2.aruco.detectMarkers', 'aruco.detectMarkers', (['gray', 'aruco_dict'], {'parameters': 'parameters'}), '(gray, aruco_dict, parameters=parameters)\n', (12529, 12570), True, 'import cv2.aruco as aruco\n'), ((16575, 16598), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (16596, 16598), False, 'import cv2\n'), ((2757, 2773), 'numpy.array', 'np.array', (['[1, 0]'], {}), '([1, 0])\n', (2765, 2773), True, 'import numpy as np\n'), ((4390, 4402), 'get_img.get_img', 'gi', (['self.url'], {}), '(self.url)\n', (4392, 4402), True, 'from get_img import get_img as gi\n'), ((4961, 4977), 'numpy.array', 'np.array', (['[1, 0]'], {}), '([1, 0])\n', (4969, 4977), True, 'import numpy as np\n'), ((12843, 12862), 'cv2.Rodrigues', 'cv2.Rodrigues', (['temp'], {}), '(temp)\n', (12856, 12862), False, 'import cv2\n'), ((13062, 13097), 'numpy.linalg.norm', 'np.linalg.norm', (['(pos[id1] - pos[id2])'], {}), '(pos[id1] - pos[id2])\n', (13076, 13097), True, 'import numpy as np\n'), ((2808, 2826), 'numpy.linalg.norm', 'np.linalg.norm', (['v1'], {}), '(v1)\n', (2822, 2826), True, 'import numpy as np\n'), ((2829, 2847), 'numpy.linalg.norm', 'np.linalg.norm', (['v2'], {}), '(v2)\n', (2843, 2847), True, 'import numpy as np\n'), ((2865, 2885), 'numpy.arccos', 'np.arccos', (['cos_angle'], {}), '(cos_angle)\n', (2874, 2885), True, 'import numpy as np\n'), ((5012, 5030), 'numpy.linalg.norm', 'np.linalg.norm', (['v1'], {}), '(v1)\n', (5026, 5030), True, 'import numpy as np\n'), ((5033, 5051), 'numpy.linalg.norm', 'np.linalg.norm', (['v2'], {}), '(v2)\n', (5047, 5051), True, 'import numpy as np\n'), ((5069, 5089), 'numpy.arccos', 'np.arccos', (['cos_angle'], {}), '(cos_angle)\n', (5078, 5089), True, 'import numpy as np\n'), ((7501, 7517), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (7511, 7517), False, 'import json\n'), ((12881, 12909), 'numpy.arctan2', 'np.arctan2', (['r[1][0]', 'r[0][0]'], {}), '(r[1][0], r[0][0])\n', (12891, 12909), True, 'import numpy as np\n'), ((13370, 13388), 'numpy.linalg.norm', 'np.linalg.norm', (['v1'], {}), '(v1)\n', (13384, 13388), True, 'import numpy as np\n'), ((13391, 13409), 'numpy.linalg.norm', 'np.linalg.norm', (['v2'], {}), '(v2)\n', (13405, 13409), True, 'import numpy as np\n'), ((13427, 13447), 'numpy.arccos', 'np.arccos', (['cos_angle'], {}), '(cos_angle)\n', (13436, 13447), True, 'import numpy as np\n'), ((16409, 16437), 'cv2.imshow', 'cv2.imshow', (['"""Capture"""', 'frame'], {}), "('Capture', frame)\n", (16419, 16437), False, 'import cv2\n'), ((14360, 14406), 'cv2.aruco.drawDetectedMarkers', 'aruco.drawDetectedMarkers', (['frame', 'corners', 'ids'], {}), '(frame, corners, ids)\n', (14385, 14406), True, 'import cv2.aruco as aruco\n'), ((14452, 14510), 'cv2.aruco.estimatePoseSingleMarkers', 'aruco.estimatePoseSingleMarkers', (['corners', '(0.158)', 'mtx', 'dist'], {}), '(corners, 0.158, mtx, dist)\n', (14483, 14510), True, 'import cv2.aruco as aruco\n'), ((14691, 14750), 'cv2.aruco.drawDetectedMarkers', 'aruco.drawDetectedMarkers', (['frame', 'corners', 'ids', '(0, 0, 255)'], {}), '(frame, corners, ids, (0, 0, 255))\n', (14716, 14750), True, 'import cv2.aruco as aruco\n'), ((14603, 14670), 'cv2.aruco.drawAxis', 'aruco.drawAxis', (['frame', 'mtx', 'dist', 'rvec[i, :, :]', 'tvec[i, :, :]', '(0.1)'], {}), '(frame, mtx, dist, rvec[i, :, :], tvec[i, :, :], 0.1)\n', (14617, 14670), True, 'import cv2.aruco as aruco\n'), ((16217, 16231), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (16228, 16231), False, 'import cv2\n')]
|
import numpy as np
import sys
sys.path.append("./original_data");
from dataset.mnist import load_mnist
from PIL import Image
import pickle
#This line loads training images, training labels (in or not in one-hot representation), testing images and testing labels (in or not in one-hot representation).
(x_train, t_train), (x_test, t_test) = load_mnist(flatten = True, normalize = False);
(x_train_2, t_train_one_hot), (x_test_2, t_test_one_hot) = load_mnist(flatten = True, normalize = False, one_hot_label = True);
#Used in `print_array()`.
#Converts `0.0` to `0` and `1.0` to `1`, but anything else isn't touched.
def convert_value(v):
if (v == 0):
return 0;
elif (v == 1):
return 1;
else:
return v;
def print_array(a):
print("[", end = "");
for i in range(len(a) - 1):
print(convert_value(a[i]), ", ", sep = "", end = "");
print(convert_value(a[len(a) - 1]), "]", sep = "");
print("image_train.size():", len(x_train));
print("image_train[0].size():", len(x_train[0]));
print("--- image_train[0] ---");
print_array(x_train[0]);
print("--- image_train[59999] ---");
print_array(x_train[59999]);
print();
print("label_train.size():", len(t_train));
print("label_train[0]:", t_train[0]);
print("label_train[59999]:", t_train[59999]);
print();
print("label_train_one_hot.size():", len(t_train_one_hot));
print("--- label_train_one_hot[0] ---")
print_array(t_train_one_hot[0]);
print("--- label_train_one_hot[59999] ---")
print_array(t_train_one_hot[59999]);
print();
print("image_test.size():", len(x_test));
print("image_test[0].size():", len(x_test[0]));
print("--- image_test[0] ---");
print_array(x_test[0]);
print("--- image_test[9999] ---");
print_array(x_test[9999]);
print();
print("label_test.size():", len(t_test));
print("label_test[0]:", t_test[0]);
print("label_test[9999]:", t_test[9999]);
print();
print("label_test_one_hot.size():", len(t_test_one_hot));
print("--- label_test_one_hot[0] ---")
print_array(t_test_one_hot[0]);
print("--- label_test_one_hot[9999] ---")
print_array(t_test_one_hot[9999]);
|
[
"sys.path.append",
"dataset.mnist.load_mnist"
] |
[((31, 65), 'sys.path.append', 'sys.path.append', (['"""./original_data"""'], {}), "('./original_data')\n", (46, 65), False, 'import sys\n'), ((344, 385), 'dataset.mnist.load_mnist', 'load_mnist', ([], {'flatten': '(True)', 'normalize': '(False)'}), '(flatten=True, normalize=False)\n', (354, 385), False, 'from dataset.mnist import load_mnist\n'), ((450, 511), 'dataset.mnist.load_mnist', 'load_mnist', ([], {'flatten': '(True)', 'normalize': '(False)', 'one_hot_label': '(True)'}), '(flatten=True, normalize=False, one_hot_label=True)\n', (460, 511), False, 'from dataset.mnist import load_mnist\n')]
|
import base64
from ..Helpers import get_xml_as_string
from ..Object import Data
class Document(object):
def __init__(self, client):
self.client = client
def add(self, file_path, folder_id, author_first_name, author_last_name, title):
"""
Submit a new document to your iThenticate account.
:file_path: The path to the document on your machine or bytes version of file
:folder_id: The folder where the document should be uploaded to
:author_first_name: First name of first author
:author_last_name: Last name of first author
:title: The title of the document to use in iThenticate
"""
try:
encoded = base64.b64encode(open(file_path, 'rb').read()).decode('utf-8')
filename = file_path.split('/')[-1]
except (AttributeError, ValueError):
# File_path is 'bytes' already
encoded = base64.b64encode(file_path).decode('utf-8')
filename = '{name}.pdf'.format(name=title.replace(' ', '_'))
xml_string = get_xml_as_string('add_document.xml')
xml_string = xml_string.format(
sid=self.client._session_id,
filename=filename,
author_last=author_last_name,
base64=encoded,
title=title,
author_first=author_first_name,
folder_id=folder_id)
xml_response = self.client.doHttpCall(data=xml_string)
return Data(xml_response,
self.client.getAPIStatus(xml_response),
self.client.getAPIMessages(xml_response))
def all(self, folder_id):
"""
Retrieve all documents within a folder
:folder_id: The folder_id to retrieve documents from.
"""
xml_string = get_xml_as_string('get.xml')
xml_string = xml_string.format(sid=self.client._session_id,
method_name='folder.get',
id=folder_id)
xml_response = self.client.doHttpCall(data=xml_string)
return Data(xml_response,
self.client.getAPIStatus(xml_response),
self.client.getAPIMessages(xml_response))
def get(self, document_id):
"""
Retrieve the current document status information within iThenticate.
:document_id: The document id as in iThenticate
"""
xml_string = get_xml_as_string('get.xml')
xml_string = xml_string.format(sid=self.client._session_id,
method_name='document.get',
id=document_id)
xml_response = self.client.doHttpCall(data=xml_string)
return Data(xml_response,
self.client.getAPIStatus(xml_response),
self.client.getAPIMessages(xml_response))
|
[
"base64.b64encode"
] |
[((924, 951), 'base64.b64encode', 'base64.b64encode', (['file_path'], {}), '(file_path)\n', (940, 951), False, 'import base64\n')]
|
import csv
import json
import os
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
"""
pre-requisite:
sql:
drop table hijri_calendar_data_file CASCADE;
drop table hijri_calendar CASCADE;
drop table holiday CASCADE;
or
truncate table hijri_calendar_data_file CASCADE;
script
python manage.py makemigrations
python manage.py migrate --fake hijri_calendar_app zero
python manage.py migrate
python manage.py loaddata data_file
Usage:
python manage.py get_hijri_json_from_csv \
'../data/source/Y2019-hijri_calendar.csv' > \
./hijri_calendar_app/fixtures/hijri_calendar_Y2019.json
python manage.py loaddata hijri_calendar_Y2019
python manage.py get_hijri_json_from_csv \
'../data/source/Y2020-hijri_calendar.csv' > \
./hijri_calendar_app/fixtures/hijri_calendar_Y2020.json
python manage.py loaddata hijri_calendar_Y2020
"""
def add_arguments(self, parser):
help_text = 'Converts a csv file containing Calendar data '
'into Fixture JSON format'
parser.add_argument('file_path', type=str, help=help_text)
def handle(self, *args, **options):
file_path = options['file_path']
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
full_file_path = os.path.join(BASE_DIR, file_path)
if not os.path.exists(full_file_path):
raise CommandError('File Path "%s" does not exist' % file_path)
json = self.hijri_calendar_csv_to_fixture_json(
file_full_path=full_file_path)
self.stdout.write(json)
def hijri_calendar_csv_to_fixture_json(self, file_full_path):
total_lines = 0
with open(file_full_path) as csvfile:
total_lines = sum(1 for line in csvfile)
total_data_rows = total_lines - 1 # totallines minus header
result = '[\n'
with open(file_full_path) as csvfile:
reader = csv.reader(csvfile)
for rid, row in enumerate(reader):
# skipping first header row
if rid == 0:
continue
d = {
"model": "hijri_calendar_app.hijricalendar",
"pk": row[0],
"fields": {
# "date_value": pk,
"day": int(row[5]),
"month": int(row[7]),
"year": int(row[8]),
"month_name": row[6],
"hijri_day": int(row[1]),
"hijri_month": int(row[2]),
"hijri_year": int(row[4]),
"hijri_month_name": row[3],
"data_file": row[9],
"hijri_date_value": row[10],
"is_holiday": str(False),
"created": "2019-06-12T07:00:00Z",
"updated": "2019-06-12T07:00:00Z",
}
}
result += str(d)
# skip comma for last row
if rid < total_data_rows:
result += ','
result += '\n'
result += ']\n'
result = result.replace("'", '"')
res = json.loads(result)
json_data = json.dumps(res, indent=4)
return json_data
|
[
"os.path.abspath",
"csv.reader",
"json.loads",
"os.path.exists",
"json.dumps",
"django.core.management.base.CommandError",
"os.path.join"
] |
[((1397, 1430), 'os.path.join', 'os.path.join', (['BASE_DIR', 'file_path'], {}), '(BASE_DIR, file_path)\n', (1409, 1430), False, 'import os\n'), ((3378, 3396), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (3388, 3396), False, 'import json\n'), ((3417, 3442), 'json.dumps', 'json.dumps', (['res'], {'indent': '(4)'}), '(res, indent=4)\n', (3427, 3442), False, 'import json\n'), ((1447, 1477), 'os.path.exists', 'os.path.exists', (['full_file_path'], {}), '(full_file_path)\n', (1461, 1477), False, 'import os\n'), ((1497, 1554), 'django.core.management.base.CommandError', 'CommandError', (['(\'File Path "%s" does not exist\' % file_path)'], {}), '(\'File Path "%s" does not exist\' % file_path)\n', (1509, 1554), False, 'from django.core.management.base import BaseCommand, CommandError\n'), ((2041, 2060), 'csv.reader', 'csv.reader', (['csvfile'], {}), '(csvfile)\n', (2051, 2060), False, 'import csv\n'), ((1344, 1369), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1359, 1369), False, 'import os\n')]
|
import sys
import shutil
import os
import time
from image_quality.handlers.model_builder import Nima
from image_quality.evaluater.predict import fetch_model,score_media
fetch_model() # ensure model weights are local in image quality path
image_quality_path = '/tmp/image_quality/'
image_ranking_model_name = 'MobileNet'
image_ranking_technical_file = os.path.join(image_quality_path,'models/MobileNet/weights_mobilenet_technical_0.11.hdf5')
image_ranking_aesthetic_file = os.path.join(image_quality_path,'models/MobileNet/weights_mobilenet_aesthetic_0.07.hdf5')
technical_model = None
aesthetic_model = None
def load_models():
global technical_model
technical_model = Nima(image_ranking_model_name)
technical_model.build()
technical_model.nima_model.load_weights(image_ranking_technical_file)
technical_model.nima_model.summary()
global aesthetic_model
aesthetic_model = Nima(image_ranking_model_name)
aesthetic_model.build()
aesthetic_model.nima_model.load_weights(image_ranking_aesthetic_file)
aesthetic_model.nima_model.summary()
load_models()
models = [technical_model,aesthetic_model]
media_path = sys.argv[1]
ts_start = time.time()
scores = score_media(models,media_path)
ts_end = time.time()
technical_scores = scores[0]['scores']
aesthetic_scores = scores[1]['scores']
print('scores time',ts_end - ts_start)
# python score_media.py s3://d2.welco.me/connoisseur/test/30.jpg
# python score_media.py https://s3.amazonaws.com/d2.welco.me/connoisseur/test/30.jpg
|
[
"image_quality.evaluater.predict.fetch_model",
"image_quality.handlers.model_builder.Nima",
"image_quality.evaluater.predict.score_media",
"time.time",
"os.path.join"
] |
[((170, 183), 'image_quality.evaluater.predict.fetch_model', 'fetch_model', ([], {}), '()\n', (181, 183), False, 'from image_quality.evaluater.predict import fetch_model, score_media\n'), ((366, 460), 'os.path.join', 'os.path.join', (['image_quality_path', '"""models/MobileNet/weights_mobilenet_technical_0.11.hdf5"""'], {}), "(image_quality_path,\n 'models/MobileNet/weights_mobilenet_technical_0.11.hdf5')\n", (378, 460), False, 'import os\n'), ((487, 581), 'os.path.join', 'os.path.join', (['image_quality_path', '"""models/MobileNet/weights_mobilenet_aesthetic_0.07.hdf5"""'], {}), "(image_quality_path,\n 'models/MobileNet/weights_mobilenet_aesthetic_0.07.hdf5')\n", (499, 581), False, 'import os\n'), ((1168, 1179), 'time.time', 'time.time', ([], {}), '()\n', (1177, 1179), False, 'import time\n'), ((1193, 1224), 'image_quality.evaluater.predict.score_media', 'score_media', (['models', 'media_path'], {}), '(models, media_path)\n', (1204, 1224), False, 'from image_quality.evaluater.predict import fetch_model, score_media\n'), ((1237, 1248), 'time.time', 'time.time', ([], {}), '()\n', (1246, 1248), False, 'import time\n'), ((688, 718), 'image_quality.handlers.model_builder.Nima', 'Nima', (['image_ranking_model_name'], {}), '(image_ranking_model_name)\n', (692, 718), False, 'from image_quality.handlers.model_builder import Nima\n'), ((901, 931), 'image_quality.handlers.model_builder.Nima', 'Nima', (['image_ranking_model_name'], {}), '(image_ranking_model_name)\n', (905, 931), False, 'from image_quality.handlers.model_builder import Nima\n')]
|
import torch
from torch.nn import Module
from geomloss import SamplesLoss
class SinkhornLoss(Module):
def __init__(self, blur=0.3, scaling=.8):
super(SinkhornLoss, self).__init__()
self.loss = SamplesLoss("sinkhorn", blur=blur, scaling=scaling)
def forward(self, *args):
x, y = args
x_f = torch.flatten(x, start_dim=2, end_dim=3)
y_f = torch.flatten(y, start_dim=2, end_dim=3)
return torch.mean(self.loss(x_f, y_f))
|
[
"geomloss.SamplesLoss",
"torch.flatten"
] |
[((220, 271), 'geomloss.SamplesLoss', 'SamplesLoss', (['"""sinkhorn"""'], {'blur': 'blur', 'scaling': 'scaling'}), "('sinkhorn', blur=blur, scaling=scaling)\n", (231, 271), False, 'from geomloss import SamplesLoss\n'), ((345, 385), 'torch.flatten', 'torch.flatten', (['x'], {'start_dim': '(2)', 'end_dim': '(3)'}), '(x, start_dim=2, end_dim=3)\n', (358, 385), False, 'import torch\n'), ((400, 440), 'torch.flatten', 'torch.flatten', (['y'], {'start_dim': '(2)', 'end_dim': '(3)'}), '(y, start_dim=2, end_dim=3)\n', (413, 440), False, 'import torch\n')]
|
import requests
example = {
"userid": 1,
"timestamp": "2020-04-04T12:17:00",
"feels_sick": False,
"location_trace": [
{
"start_time": "2020-04-03T00:00:00",
"end_time": "2020-04-03T04:00:00",
"geographic_location": {
"lat": 37.8123177,
"lon": -122.2728663
}
},
{
"start_time": "2020-04-03T08:00:00",
"end_time": "2020-04-03T09:18:00",
"geographic_location": {
"lat": 37.8244521,
"lon": -122.2655363
}
}
]
}
r = requests.post('http://localhost:5000/add', json=example)
print(r.content)
|
[
"requests.post"
] |
[((625, 681), 'requests.post', 'requests.post', (['"""http://localhost:5000/add"""'], {'json': 'example'}), "('http://localhost:5000/add', json=example)\n", (638, 681), False, 'import requests\n')]
|
import hassapi as hass
import datetime
import re
"""
Class Notification_Manager handles sending text to notfyng service
"""
__NOTIFY__ = "notify/"
SUB_NOTIFICHE = [(" +", " "), ("\s\s+", "\n")]
class Notification_Manager(hass.Hass):
def initialize(self):
self.text_last_message = self.args["text_last_message"]
def rewrite_notify(self, data, notify_name):
return (
notify_name
if (str(data).lower() in ["true", "on", "yes"] or data == "1" or data == 1 or data == "")
else data
)
def prepare_text(self, html, message, title, timestamp, assistant_name):
if str(html).lower() in ["true","on","yes","1"]:
title = ("<b>[{} - {}] {}</b>".format(assistant_name, timestamp, title))
title =self.replace_regular(title,[("\s<","<")])
else:
title = ("*[{} - {}] {}*".format(assistant_name, timestamp, title))
title =self.replace_regular(title,[("\s\*","*")])
return message, title
def send_notify(self, data, notify_name: str, assistant_name: str):
timestamp = datetime.datetime.now().strftime("%H:%M:%S")
title = data["title"]
message = self.replace_regular(data["message"], SUB_NOTIFICHE)
url = data["url"]
_file = data["file"]
caption = data["caption"]
link = data["link"]
html = data["html"]
notify_name = self.rewrite_notify(data["notify"], notify_name)
### SAVE IN INPUT_TEXT.LAST_MESSAGE
self.set_state(self.text_last_message, state=message[:245])
if notify_name.find("telegram") != -1:
message, title = self.prepare_text(html, message, title, timestamp, assistant_name)
if str(html).lower() not in ["true","on","yes","1"]:
message = message.replace("_", "\_")
if link !="":
message = ("{} {}".format(message,link))
if caption == "":
caption = "{}\n{}".format(title, message)
if url != "":
extra_data = {"photo": {"url": url, "caption": caption}}
elif _file != "":
extra_data = {"photo": {"file": _file, "caption": caption}}
if url != "" or _file != "":
self.call_service(__NOTIFY__ + notify_name, message="", data=extra_data)
else:
self.call_service(__NOTIFY__ + notify_name, message=message, title=title)
elif notify_name.find("whatsapp") != -1:
message, title = self.prepare_text(html, message, title, timestamp, assistant_name)
if link !="":
message = ("{} {}".format(message,link))
message = title + " " + message
self.call_service(__NOTIFY__ + notify_name, message=message)
else:
if title != "":
title = "[{} - {}] {}".format(assistant_name, timestamp, title)
else:
title = "[{} - {}]".format(assistant_name, timestamp)
if link !="":
message = ("{} {}".format(message,link))
self.call_service(__NOTIFY__ + notify_name, message=message, title=title)
def send_persistent(self, data, persistent_notification_info):
timestamp = datetime.datetime.now().strftime("%H:%M:%S")
try:
per_not_info = self.get_state(persistent_notification_info)
except:
per_not_info = "null"
message = self.replace_regular(data["message"], SUB_NOTIFICHE)
message = "{} - {}".format(timestamp, message)
if per_not_info == "notifying":
old_message = self.get_state(persistent_notification_info, attribute="message")
message = old_message + "\n" + message if len(old_message) < 2500 else message
self.call_service(
"persistent_notification/create", notification_id="info_messages", message=message, title="Centro Messaggi"
)
def replace_regular(self, text: str, substitutions: list):
for old, new in substitutions:
text = re.sub(old, new, text.strip())
return text
|
[
"datetime.datetime.now"
] |
[((1141, 1164), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1162, 1164), False, 'import datetime\n'), ((3345, 3368), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3366, 3368), False, 'import datetime\n')]
|
#!/usr/bin/env python3
"""
Testing the routing graph generator
"""
import pytrellis
import sys
pytrellis.load_database("../../database")
chip = pytrellis.Chip("LFE5U-45F")
rg = chip.get_routing_graph()
tile = rg.tiles[pytrellis.Location(9, 71)]
for wire in tile.wires:
print("Wire {}:".format(rg.to_str(wire.key())))
for dh in wire.data().downhill:
arc = rg.tiles[dh.loc].arcs[dh.id]
print(" --> R{}C{}_{}".format(arc.sink.loc.y, arc.sink.loc.x, rg.to_str(arc.sink.id)))
for bdh in wire.data().belsDownhill:
print(" ->| R{}C{}_{}.{}".format(bdh.bel.loc.y, bdh.bel.loc.x, rg.to_str(bdh.bel.id), rg.to_str(bdh.pin)))
print()
for uh in wire.data().uphill:
arc = rg.tiles[uh.loc].arcs[uh.id]
print(" <-- R{}C{}_{}".format(arc.source.loc.y, arc.source.loc.x, rg.to_str(arc.source.id)))
for buh in wire.data().belsUphill:
print(" <-| R{}C{}_{}.{}".format(buh.bel.loc.y, buh.bel.loc.x, rg.to_str(buh.bel.id), rg.to_str(buh.pin)))
print()
|
[
"pytrellis.load_database",
"pytrellis.Chip",
"pytrellis.Location"
] |
[((96, 137), 'pytrellis.load_database', 'pytrellis.load_database', (['"""../../database"""'], {}), "('../../database')\n", (119, 137), False, 'import pytrellis\n'), ((145, 172), 'pytrellis.Chip', 'pytrellis.Chip', (['"""LFE5U-45F"""'], {}), "('LFE5U-45F')\n", (159, 172), False, 'import pytrellis\n'), ((219, 244), 'pytrellis.Location', 'pytrellis.Location', (['(9)', '(71)'], {}), '(9, 71)\n', (237, 244), False, 'import pytrellis\n')]
|
import numpy as np
from .VariableUnitTest import VariableUnitTest
from gwlfe.Input.WaterBudget import Percolation
class TestPercolation(VariableUnitTest):
def test_Percolation_ground_truth(self):
z = self.z
np.testing.assert_array_almost_equal(
np.load(self.basepath + "/Percolation.npy"),
Percolation.Percolation(z.NYrs, z.DaysMonth, z.Temp, z.InitSnow_0, z.Prec, z.NRur, z.NUrb, z.Area,
z.CNI_0, z.AntMoist_0, z.Grow_0, z.CNP_0, z.Imper, z.ISRR, z.ISRA, z.CN,
z.UnsatStor_0, z.KV, z.PcntET, z.DayHrs, z.MaxWaterCap), decimal=7)
def test_Percolation(self):
z = self.z
np.testing.assert_array_almost_equal(
Percolation.Percolation_f(z.NYrs, z.DaysMonth, z.Temp, z.InitSnow_0, z.Prec, z.NRur, z.NUrb, z.Area,
z.CNI_0, z.AntMoist_0, z.Grow_0, z.CNP_0, z.Imper, z.ISRR, z.ISRA, z.CN,
z.UnsatStor_0, z.KV, z.PcntET, z.DayHrs, z.MaxWaterCap),
Percolation.Percolation(z.NYrs, z.DaysMonth, z.Temp, z.InitSnow_0, z.Prec, z.NRur, z.NUrb, z.Area,
z.CNI_0, z.AntMoist_0, z.Grow_0, z.CNP_0, z.Imper, z.ISRR, z.ISRA, z.CN,
z.UnsatStor_0, z.KV, z.PcntET, z.DayHrs, z.MaxWaterCap), decimal=7)
|
[
"gwlfe.Input.WaterBudget.Percolation.Percolation_f",
"numpy.load",
"gwlfe.Input.WaterBudget.Percolation.Percolation"
] |
[((281, 324), 'numpy.load', 'np.load', (["(self.basepath + '/Percolation.npy')"], {}), "(self.basepath + '/Percolation.npy')\n", (288, 324), True, 'import numpy as np\n'), ((338, 579), 'gwlfe.Input.WaterBudget.Percolation.Percolation', 'Percolation.Percolation', (['z.NYrs', 'z.DaysMonth', 'z.Temp', 'z.InitSnow_0', 'z.Prec', 'z.NRur', 'z.NUrb', 'z.Area', 'z.CNI_0', 'z.AntMoist_0', 'z.Grow_0', 'z.CNP_0', 'z.Imper', 'z.ISRR', 'z.ISRA', 'z.CN', 'z.UnsatStor_0', 'z.KV', 'z.PcntET', 'z.DayHrs', 'z.MaxWaterCap'], {}), '(z.NYrs, z.DaysMonth, z.Temp, z.InitSnow_0, z.Prec,\n z.NRur, z.NUrb, z.Area, z.CNI_0, z.AntMoist_0, z.Grow_0, z.CNP_0, z.\n Imper, z.ISRR, z.ISRA, z.CN, z.UnsatStor_0, z.KV, z.PcntET, z.DayHrs, z\n .MaxWaterCap)\n', (361, 579), False, 'from gwlfe.Input.WaterBudget import Percolation\n'), ((760, 1003), 'gwlfe.Input.WaterBudget.Percolation.Percolation_f', 'Percolation.Percolation_f', (['z.NYrs', 'z.DaysMonth', 'z.Temp', 'z.InitSnow_0', 'z.Prec', 'z.NRur', 'z.NUrb', 'z.Area', 'z.CNI_0', 'z.AntMoist_0', 'z.Grow_0', 'z.CNP_0', 'z.Imper', 'z.ISRR', 'z.ISRA', 'z.CN', 'z.UnsatStor_0', 'z.KV', 'z.PcntET', 'z.DayHrs', 'z.MaxWaterCap'], {}), '(z.NYrs, z.DaysMonth, z.Temp, z.InitSnow_0, z.Prec,\n z.NRur, z.NUrb, z.Area, z.CNI_0, z.AntMoist_0, z.Grow_0, z.CNP_0, z.\n Imper, z.ISRR, z.ISRA, z.CN, z.UnsatStor_0, z.KV, z.PcntET, z.DayHrs, z\n .MaxWaterCap)\n', (785, 1003), False, 'from gwlfe.Input.WaterBudget import Percolation\n'), ((1079, 1320), 'gwlfe.Input.WaterBudget.Percolation.Percolation', 'Percolation.Percolation', (['z.NYrs', 'z.DaysMonth', 'z.Temp', 'z.InitSnow_0', 'z.Prec', 'z.NRur', 'z.NUrb', 'z.Area', 'z.CNI_0', 'z.AntMoist_0', 'z.Grow_0', 'z.CNP_0', 'z.Imper', 'z.ISRR', 'z.ISRA', 'z.CN', 'z.UnsatStor_0', 'z.KV', 'z.PcntET', 'z.DayHrs', 'z.MaxWaterCap'], {}), '(z.NYrs, z.DaysMonth, z.Temp, z.InitSnow_0, z.Prec,\n z.NRur, z.NUrb, z.Area, z.CNI_0, z.AntMoist_0, z.Grow_0, z.CNP_0, z.\n Imper, z.ISRR, z.ISRA, z.CN, z.UnsatStor_0, z.KV, z.PcntET, z.DayHrs, z\n .MaxWaterCap)\n', (1102, 1320), False, 'from gwlfe.Input.WaterBudget import Percolation\n')]
|
"""add user client type
Revision ID: cbbf822c6b4f
Revises: <KEY>
Create Date: 2021-06-21 18:23:43.202954
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'cbbf822c6b4f'
down_revision = '<KEY>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('client_type', sa.Integer(), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'client_type')
# ### end Alembic commands ###
|
[
"alembic.op.drop_column",
"sqlalchemy.Integer"
] |
[((579, 616), 'alembic.op.drop_column', 'op.drop_column', (['"""user"""', '"""client_type"""'], {}), "('user', 'client_type')\n", (593, 616), False, 'from alembic import op\n'), ((424, 436), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (434, 436), True, 'import sqlalchemy as sa\n')]
|
import graphene
from app.schemas.console import ConsoleOutput
from .mutations import CreateConsole, DeleteConsole, UpdateConsole
from .resolvers import resolve_console
class ConsoleQuery(graphene.ObjectType):
"""Queries to get all console information."""
# consoles = graphene.List(graphene.relay.Node.Field(ConsoleNode))
# consoles = SQLAlchemyConnectionField(ConsoleNode.connection)
consoles = graphene.List(ConsoleOutput)
async def resolve_consoles(parent, info, **kwargs):
"""Wrap resolver function."""
return await resolve_console(parent, info, **kwargs)
class ConsoleMutation(graphene.ObjectType):
"""Mutations related to object model."""
create_console = CreateConsole.Field()
update_console = UpdateConsole.Field()
delete_console = DeleteConsole.Field()
|
[
"graphene.List"
] |
[((417, 445), 'graphene.List', 'graphene.List', (['ConsoleOutput'], {}), '(ConsoleOutput)\n', (430, 445), False, 'import graphene\n')]
|
from random import choice
class RandomList(list):
def get_random_element(self):
element = choice(self)
self.remove(element)
return element
# previous course and doesn't work
# def get_random_element(self):
# element_index = randint(0, len(self) - 1)
# element = self[element_index]
# self.pop(element_index)
# return element
# test first zero
import unittest
from unittest import mock
import random
class RandomListTests(unittest.TestCase):
def test_zero_first(self):
mocked_choice = lambda x: 5
with mock.patch('random.choice', mocked_choice):
li = RandomList()
li.append(4)
li.append(3)
li.append(5)
self.assertEqual(li.get_random_element(), 5)
if __name__ == '__main__':
unittest.main()
# rl = RandomList([1, 2, 3, 4])
# print(rl)
# rl.append(-1)
# print(rl)
# print(rl.get_random_element())
# print(rl)
|
[
"unittest.main",
"random.choice",
"unittest.mock.patch"
] |
[((833, 848), 'unittest.main', 'unittest.main', ([], {}), '()\n', (846, 848), False, 'import unittest\n'), ((105, 117), 'random.choice', 'choice', (['self'], {}), '(self)\n', (111, 117), False, 'from random import choice\n'), ((595, 637), 'unittest.mock.patch', 'mock.patch', (['"""random.choice"""', 'mocked_choice'], {}), "('random.choice', mocked_choice)\n", (605, 637), False, 'from unittest import mock\n')]
|
"""
Solution-based probabilistic linear solvers.
Implementations of solution-based linear solvers which perform inference on the solution
of a linear system given linear observations.
"""
import warnings
import numpy as np
from probnum.linalg.linearsolvers.matrixbased import ProbabilisticLinearSolver
class SolutionBasedSolver(ProbabilisticLinearSolver):
"""
Solver iteration of BayesCG.
Implements the solve iteration of the solution-based solver BayesCG [1]_.
Parameters
----------
A : array-like or LinearOperator or RandomVariable, shape=(n,n)
The square matrix or linear operator of the linear system.
b : array_like, shape=(n,) or (n, nrhs)
Right-hand side vector or matrix in :math:`A x = b`.
References
----------
.. [1] <NAME> al., A Bayesian Conjugate Gradient Method, *Bayesian
Analysis*, 2019, 14, 937-1012
"""
def __init__(self, A, b, x0=None):
self.x0 = x0
super().__init__(A=A, b=b)
def has_converged(self, iter, maxiter, resid=None, atol=None, rtol=None):
"""
Check convergence of a linear solver.
Evaluates a set of convergence criteria based on its input arguments to decide
whether the iteration has converged.
Parameters
----------
iter : int
Current iteration of solver.
maxiter : int
Maximum number of iterations
resid : array-like
Residual vector :math:`\\lVert r_i \\rVert = \\lVert Ax_i - b \\rVert` of
the current iteration.
atol : float
Absolute residual tolerance. Stops if
:math:`\\lVert r_i \\rVert < \\text{atol}`.
rtol : float
Relative residual tolerance. Stops if
:math:`\\lVert r_i \\rVert < \\text{rtol} \\lVert b \\rVert`.
Returns
-------
has_converged : bool
True if the method has converged.
convergence_criterion : str
Convergence criterion which caused termination.
"""
# maximum iterations
if iter >= maxiter:
warnings.warn(
"Iteration terminated. Solver reached the maximum number of iterations."
)
return True, "maxiter"
# residual below error tolerance
elif np.linalg.norm(resid) <= atol:
return True, "resid_atol"
elif np.linalg.norm(resid) <= rtol * np.linalg.norm(self.b):
return True, "resid_rtol"
else:
return False, ""
def solve(self, callback=None, maxiter=None, atol=None, rtol=None):
raise NotImplementedError
|
[
"warnings.warn",
"numpy.linalg.norm"
] |
[((2132, 2224), 'warnings.warn', 'warnings.warn', (['"""Iteration terminated. Solver reached the maximum number of iterations."""'], {}), "(\n 'Iteration terminated. Solver reached the maximum number of iterations.')\n", (2145, 2224), False, 'import warnings\n'), ((2339, 2360), 'numpy.linalg.norm', 'np.linalg.norm', (['resid'], {}), '(resid)\n', (2353, 2360), True, 'import numpy as np\n'), ((2421, 2442), 'numpy.linalg.norm', 'np.linalg.norm', (['resid'], {}), '(resid)\n', (2435, 2442), True, 'import numpy as np\n'), ((2453, 2475), 'numpy.linalg.norm', 'np.linalg.norm', (['self.b'], {}), '(self.b)\n', (2467, 2475), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test MNStorage.create() and MNRead.get() with revision chains."""
import io
import pytest
import responses
import d1_common.types.exceptions
import d1_gmn.tests.gmn_mock
import d1_gmn.tests.gmn_test_case
import d1_test.instance_generator.identifier
class TestCreateAndGetRevision(d1_gmn.tests.gmn_test_case.GMNTestCase):
@responses.activate
def test_1000(self, gmn_client_v1_v2):
"""MNStorage.create(): Creating a standalone object with new PID and SID does
not raise exception."""
self.create_obj(gmn_client_v1_v2)
@responses.activate
def test_1010(self, gmn_client_v2):
"""MNStorage.create(): Reusing existing SID as PID when creating a standalone
object raises IdentifierNotUnique.
Only applicable to v2.
"""
pid, sid, sciobj_bytes, sysmeta_pyxb = self.create_obj(gmn_client_v2, sid=True)
with pytest.raises(d1_common.types.exceptions.IdentifierNotUnique):
self.create_obj(gmn_client_v2, sid)
@responses.activate
def test_1020(self, gmn_client_v2):
"""MNStorage.create(): Attempting to reuse existing SID as SID when creating a
standalone object raises IdentifierNotUnique.
Only applicable to v2.
"""
pid, sid, sciobj_bytes, sysmeta_pyxb = self.create_obj(gmn_client_v2, sid=True)
with pytest.raises(d1_common.types.exceptions.IdentifierNotUnique):
self.create_obj(gmn_client_v2, sid=sid)
@responses.activate
def test_1030(self):
"""MNStorage.get(): v2.get() retrieves object created with v1.create()"""
pid, sid, send_sciobj_bytes, send_sysmeta_pyxb = self.create_obj(self.client_v1)
recv_sciobj_bytes, recv_sysmeta_pyxb = self.get_obj(self.client_v2, pid)
assert send_sciobj_bytes == recv_sciobj_bytes
assert recv_sysmeta_pyxb.identifier.value() == pid
assert recv_sysmeta_pyxb.seriesId is None
@responses.activate
def test_1040(self):
"""MNStorage.get(): v1.get() retrieves object created with v2.create()"""
pid, sid, send_sciobj_bytes, send_sysmeta_pyxb = self.create_obj(self.client_v2)
recv_sciobj_bytes, recv_sysmeta_pyxb = self.get_obj(self.client_v1, pid)
assert send_sciobj_bytes == recv_sciobj_bytes
assert recv_sysmeta_pyxb.identifier.value() == pid
assert not hasattr(recv_sysmeta_pyxb, "seriesId")
@responses.activate
def test_1050(self):
"""MNStorage.get(): Attempting to pass a SID to v1.get() raises NotFound even
though the SID exists (by design, we don't resolve SIDs for v1)"""
pid, sid, sciobj_bytes, sysmeta_pyxb = self.create_obj(self.client_v2, sid=True)
with pytest.raises(d1_common.types.exceptions.NotFound):
sciobj_bytes, sysmeta_pyxb = self.get_obj(self.client_v1, sid)
@responses.activate
def test_1060(self, gmn_client_v1_v2):
"""MNStorage.create(): Creating standalone object with sysmeta.obsoletes
pointing to KNOWN object raises InvalidSystemMetadata."""
with d1_gmn.tests.gmn_mock.disable_auth():
old_pid, old_sid, old_sciobj_bytes, old_sysmeta_pyxb = self.create_obj(
gmn_client_v1_v2
)
new_pid, sid, new_sciobj_bytes, new_sysmeta_pyxb = self.generate_sciobj_with_defaults(
gmn_client_v1_v2
)
new_sysmeta_pyxb.obsoletes = old_pid
with pytest.raises(d1_common.types.exceptions.InvalidSystemMetadata):
gmn_client_v1_v2.create(
new_pid, io.BytesIO(new_sciobj_bytes), new_sysmeta_pyxb
)
@responses.activate
def test_1070(self, gmn_client_v1_v2):
"""MNStorage.create(): Creating standalone object with sysmeta_pyxb.obsoletes
pointing to UNKNOWN object raises InvalidSystemMetadata."""
with d1_gmn.tests.gmn_mock.disable_auth():
new_pid, sid, sciobj_bytes, sysmeta_pyxb = self.generate_sciobj_with_defaults(
gmn_client_v1_v2
)
sysmeta_pyxb.obsoletes = (
d1_test.instance_generator.identifier.generate_pid()
)
with pytest.raises(d1_common.types.exceptions.InvalidSystemMetadata):
gmn_client_v1_v2.create(new_pid, io.BytesIO(sciobj_bytes), sysmeta_pyxb)
@responses.activate
def test_1080(self, gmn_client_v1_v2):
"""MNStorage.create(): Creating standalone object with sysmeta.obsoletedBy
pointing to KNOWN object raises InvalidSystemMetadata."""
with d1_gmn.tests.gmn_mock.disable_auth():
old_pid, old_sid, old_sciobj_bytes, old_sysmeta_pyxb = self.create_obj(
gmn_client_v1_v2
)
new_pid, sid, new_sciobj_bytes, new_sysmeta_pyxb = self.generate_sciobj_with_defaults(
gmn_client_v1_v2
)
new_sysmeta_pyxb.obsoletedBy = old_pid
with pytest.raises(d1_common.types.exceptions.InvalidSystemMetadata):
gmn_client_v1_v2.create(
new_pid, io.BytesIO(new_sciobj_bytes), new_sysmeta_pyxb
)
@responses.activate
def test_1090(self, gmn_client_v1_v2):
"""MNStorage.create(): Creating standalone object with sysmeta_pyxb.obsoletedBy
pointing to UNKNOWN object raises InvalidSystemMetadata."""
with d1_gmn.tests.gmn_mock.disable_auth():
new_pid, sid, sciobj_bytes, sysmeta_pyxb = self.generate_sciobj_with_defaults(
gmn_client_v1_v2
)
sysmeta_pyxb.obsoletedBy = (
d1_test.instance_generator.identifier.generate_pid()
)
with pytest.raises(d1_common.types.exceptions.InvalidSystemMetadata):
gmn_client_v1_v2.create(new_pid, io.BytesIO(sciobj_bytes), sysmeta_pyxb)
|
[
"io.BytesIO",
"pytest.raises"
] |
[((1709, 1770), 'pytest.raises', 'pytest.raises', (['d1_common.types.exceptions.IdentifierNotUnique'], {}), '(d1_common.types.exceptions.IdentifierNotUnique)\n', (1722, 1770), False, 'import pytest\n'), ((2173, 2234), 'pytest.raises', 'pytest.raises', (['d1_common.types.exceptions.IdentifierNotUnique'], {}), '(d1_common.types.exceptions.IdentifierNotUnique)\n', (2186, 2234), False, 'import pytest\n'), ((3539, 3589), 'pytest.raises', 'pytest.raises', (['d1_common.types.exceptions.NotFound'], {}), '(d1_common.types.exceptions.NotFound)\n', (3552, 3589), False, 'import pytest\n'), ((4276, 4339), 'pytest.raises', 'pytest.raises', (['d1_common.types.exceptions.InvalidSystemMetadata'], {}), '(d1_common.types.exceptions.InvalidSystemMetadata)\n', (4289, 4339), False, 'import pytest\n'), ((5027, 5090), 'pytest.raises', 'pytest.raises', (['d1_common.types.exceptions.InvalidSystemMetadata'], {}), '(d1_common.types.exceptions.InvalidSystemMetadata)\n', (5040, 5090), False, 'import pytest\n'), ((5795, 5858), 'pytest.raises', 'pytest.raises', (['d1_common.types.exceptions.InvalidSystemMetadata'], {}), '(d1_common.types.exceptions.InvalidSystemMetadata)\n', (5808, 5858), False, 'import pytest\n'), ((6550, 6613), 'pytest.raises', 'pytest.raises', (['d1_common.types.exceptions.InvalidSystemMetadata'], {}), '(d1_common.types.exceptions.InvalidSystemMetadata)\n', (6563, 6613), False, 'import pytest\n'), ((4411, 4439), 'io.BytesIO', 'io.BytesIO', (['new_sciobj_bytes'], {}), '(new_sciobj_bytes)\n', (4421, 4439), False, 'import io\n'), ((5141, 5165), 'io.BytesIO', 'io.BytesIO', (['sciobj_bytes'], {}), '(sciobj_bytes)\n', (5151, 5165), False, 'import io\n'), ((5930, 5958), 'io.BytesIO', 'io.BytesIO', (['new_sciobj_bytes'], {}), '(new_sciobj_bytes)\n', (5940, 5958), False, 'import io\n'), ((6664, 6688), 'io.BytesIO', 'io.BytesIO', (['sciobj_bytes'], {}), '(sciobj_bytes)\n', (6674, 6688), False, 'import io\n')]
|
import yaml
from yaml import Loader
from depth2mesh import data
from depth2mesh import metaavatar
method_dict = {
'metaavatar': metaavatar,
}
# General config
def load_config(path, default_path=None):
''' Loads config file.
Args:
path (str): path to config file
default_path (bool): whether to use default path
'''
# Load configuration from file itself
with open(path, 'r') as f:
cfg_special = yaml.load(f, Loader)
# Check if we should inherit from a config
inherit_from = cfg_special.get('inherit_from')
# If yes, load this config first as default
# If no, use the default_path
if inherit_from is not None:
cfg = load_config(inherit_from, default_path)
elif default_path is not None:
with open(default_path, 'r') as f:
cfg = yaml.load(f, Loader)
else:
cfg = dict()
# Include main configuration
update_recursive(cfg, cfg_special)
return cfg
def update_recursive(dict1, dict2):
''' Update two config dictionaries recursively.
Args:
dict1 (dict): first dictionary to be updated
dict2 (dict): second dictionary which entries should be used
'''
for k, v in dict2.items():
if k not in dict1:
dict1[k] = dict()
if isinstance(v, dict):
update_recursive(dict1[k], v)
else:
dict1[k] = v
# Models
def get_model(cfg, device=None, dataset=None):
''' Returns the model instance.
Args:
cfg (dict): config dictionary
device (device): pytorch device
dataset (dataset): dataset
'''
method = cfg['method']
model = method_dict[method].config.get_model(
cfg, device=device, dataset=dataset)
return model
# Trainer
def get_trainer(model, optimizer, cfg, device):
''' Returns a trainer instance.
Args:
model (nn.Module): the model which is used
optimizer (optimizer): pytorch optimizer
cfg (dict): config dictionary
device (device): pytorch device
'''
method = cfg['method']
trainer = method_dict[method].config.get_trainer(
model, optimizer, cfg, device)
return trainer
# Datasets
def get_dataset(mode, cfg, subject_idx=None, cloth_split=None, act_split=None, subsampling_rate=1, start_offset=0):
''' Returns the dataset.
Args:
mode (str): which mode the dataset is. Can be either train, val or test
cfg (dict): config dictionary
subject_idx (int or list of int): which subject(s) to use, None means using all subjects
cloth_split (list of str): which cloth type(s) to load. If None, will load all cloth types
cloth_split (list of str): which cloth type(s) to load. If None, will load all cloth types
act_split (list of str): which action(s) to load. If None, will load all actions
subsampling_rate (int): frame subsampling rate for the dataset
start_offset (int): starting frame offset
'''
method = cfg['method']
dataset_type = cfg['data']['dataset']
dataset_folder = cfg['data']['path']
use_aug = cfg['data']['use_aug']
normalized_scale = cfg['data']['normalized_scale']
splits = {
'train': cfg['data']['train_split'],
'val': cfg['data']['val_split'],
'test': cfg['data']['test_split'],
}
split = splits[mode]
# Get cloth-type and action splits from config file, if they are
# not specified
if cloth_split is None:
cloth_splits = {
'train': cfg['data']['train_cloth_types'],
'val': cfg['data']['val_cloth_types'],
'test': cfg['data']['test_cloth_types'],
}
cloth_split = cloth_splits[mode]
if act_split is None:
act_splits = {
'train': cfg['data']['train_action_names'],
'val': cfg['data']['val_action_names'],
'test': cfg['data']['test_action_names'],
}
act_split = act_splits[mode]
# Create dataset
if dataset_type == 'cape_corr':
input_pointcloud_n = cfg['data']['input_pointcloud_n']
single_view = cfg['data']['single_view']
use_raw_scans = cfg['data']['use_raw_scans']
input_pointcloud_noise = cfg['data']['input_pointcloud_noise']
keep_aspect_ratio = cfg['model']['keep_aspect_ratio']
dataset = data.CAPECorrDataset(
dataset_folder,
subjects=split,
mode=mode,
use_aug=use_aug,
input_pointcloud_n=input_pointcloud_n,
single_view=single_view,
cloth_types=cloth_split,
action_names=act_split,
subject_idx=subject_idx,
input_pointcloud_noise=input_pointcloud_noise,
use_raw_scans=use_raw_scans,
normalized_scale=normalized_scale,
subsampling_rate=subsampling_rate,
start_offset=start_offset,
keep_aspect_ratio=keep_aspect_ratio,
)
else:
raise ValueError('Invalid dataset "%s"' % cfg['data']['dataset'])
return dataset
|
[
"yaml.load",
"depth2mesh.data.CAPECorrDataset"
] |
[((446, 466), 'yaml.load', 'yaml.load', (['f', 'Loader'], {}), '(f, Loader)\n', (455, 466), False, 'import yaml\n'), ((4371, 4825), 'depth2mesh.data.CAPECorrDataset', 'data.CAPECorrDataset', (['dataset_folder'], {'subjects': 'split', 'mode': 'mode', 'use_aug': 'use_aug', 'input_pointcloud_n': 'input_pointcloud_n', 'single_view': 'single_view', 'cloth_types': 'cloth_split', 'action_names': 'act_split', 'subject_idx': 'subject_idx', 'input_pointcloud_noise': 'input_pointcloud_noise', 'use_raw_scans': 'use_raw_scans', 'normalized_scale': 'normalized_scale', 'subsampling_rate': 'subsampling_rate', 'start_offset': 'start_offset', 'keep_aspect_ratio': 'keep_aspect_ratio'}), '(dataset_folder, subjects=split, mode=mode, use_aug=\n use_aug, input_pointcloud_n=input_pointcloud_n, single_view=single_view,\n cloth_types=cloth_split, action_names=act_split, subject_idx=\n subject_idx, input_pointcloud_noise=input_pointcloud_noise,\n use_raw_scans=use_raw_scans, normalized_scale=normalized_scale,\n subsampling_rate=subsampling_rate, start_offset=start_offset,\n keep_aspect_ratio=keep_aspect_ratio)\n', (4391, 4825), False, 'from depth2mesh import data\n'), ((832, 852), 'yaml.load', 'yaml.load', (['f', 'Loader'], {}), '(f, Loader)\n', (841, 852), False, 'import yaml\n')]
|
import torch
from weakvtg.loss import loss_orthogonal
def test_loss_orthogonal():
X = torch.tensor([1, -1, 1, -1, 0, 0, .236, -.751], dtype=torch.float),
y = torch.tensor([1, -1, -1, 1, -1, 1, -1, 1], dtype=torch.float)
assert torch.equal(
loss_orthogonal(X, y),
-1 * torch.tensor([1, 0, 0, -1, 0, 0, 0, -.751]) + torch.tensor([0, 1, 1, 0, 0, 0, .236 ** 2, 0])
)
|
[
"weakvtg.loss.loss_orthogonal",
"torch.tensor"
] |
[((171, 232), 'torch.tensor', 'torch.tensor', (['[1, -1, -1, 1, -1, 1, -1, 1]'], {'dtype': 'torch.float'}), '([1, -1, -1, 1, -1, 1, -1, 1], dtype=torch.float)\n', (183, 232), False, 'import torch\n'), ((93, 161), 'torch.tensor', 'torch.tensor', (['[1, -1, 1, -1, 0, 0, 0.236, -0.751]'], {'dtype': 'torch.float'}), '([1, -1, 1, -1, 0, 0, 0.236, -0.751], dtype=torch.float)\n', (105, 161), False, 'import torch\n'), ((273, 294), 'weakvtg.loss.loss_orthogonal', 'loss_orthogonal', (['X', 'y'], {}), '(X, y)\n', (288, 294), False, 'from weakvtg.loss import loss_orthogonal\n'), ((355, 402), 'torch.tensor', 'torch.tensor', (['[0, 1, 1, 0, 0, 0, 0.236 ** 2, 0]'], {}), '([0, 1, 1, 0, 0, 0, 0.236 ** 2, 0])\n', (367, 402), False, 'import torch\n'), ((309, 353), 'torch.tensor', 'torch.tensor', (['[1, 0, 0, -1, 0, 0, 0, -0.751]'], {}), '([1, 0, 0, -1, 0, 0, 0, -0.751])\n', (321, 353), False, 'import torch\n')]
|
"""
Django settings for collaborative project.
Generated by 'django-admin startproject' using Django 2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import dj_database_url
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLATES_DIR = os.path.join(BASE_DIR, "templates")
STATIC_ROOT = os.path.join(BASE_DIR, "www/assets")
LOCALE_PATHS = (os.path.join(BASE_DIR, "locale"),)
MEDIA_ROOT = os.path.join(BASE_DIR, "media")
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.getenv(
"DJANGO_SECRET_KEY", 'gq301$(s^m%n*k$k#u5xw%532tj-nrn4o^26!yb-%=cmu#3swx'
)
DEBUG = False
ALLOWED_HOSTS = ['*',]
# Application definition
INSTALLED_APPS = [
'dal',
'dal_select2',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.forms',
'social_django',
'import_export',
'taggit',
'django_models_from_csv',
'collaborative',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'collaborative.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
TEMPLATES_DIR,
'django_models_from_csv/templates'
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'social_django.context_processors.backends',
'social_django.context_processors.login_redirect',
],
},
},
]
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
# Uncomment these lines (below) to get verbose logging
'django_models_from_csv': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'DEBUG'),
},
'collaborative': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'DEBUG'),
},
},
}
FORM_RENDERER = 'django.forms.renderers.TemplatesSetting'
WSGI_APPLICATION = 'collaborative.wsgi.application'
CSV_MODELS_TEMP_DB = "schemabuilding"
CSV_MODELS_WIZARD_REDIRECT_TO = "/setup-credentials?postsave=True"
CSV_MODELS_AUTO_REGISTER_ADMIN = False
# Put model names here that you want to show up first
# note that these need to be the app_label, not display name
APP_ORDER = [
# imported data sources
'django_models_from_csv',
# django Users
'auth',
'taggit',
]
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
# Set up the database connection dynamically from the DATABASE_URL
# environment variable. Don't change the second database as it's a
# critical part of data source importing.
db_from_env = dj_database_url.config()
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
},
CSV_MODELS_TEMP_DB: {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
DATABASES['default'].update(db_from_env)
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
AUTHENTICATION_BACKENDS = (
'collaborative.auth.WhitelistedGoogleOAuth2',
'social_core.backends.slack.SlackOAuth2',
'django.contrib.auth.backends.ModelBackend',
)
# When you log in, by default you'll be directed to the admin
# overview page. Here, we override that and direct users to an
# endpoint that checks to see if any data sources have been added
# and, if not, will direct them to the wizard. If sources have been
# created, this will direct users to the admin, as usual.
LOGIN_REDIRECT_URL = "/setup-check/"
LOGIN_URL = "/admin"
# You can pass each row imported from a spreadsheet through a custom
# data pipeline function. Every row gets passed into these functions in
# turn, modifying the data to suit your needs. For more information,
# please see the documentation at http://TKTKTK.
DATA_PIPELINE = [
# To have the app automatically redact personally identifiable
# information from a spreadsheet, setup the credentials in the
# google credentials page and then select columns using the
# redact checkbox.
'collaborative.data_pipeline.google_redactor',
# Example data pipeline processor that uppercases everything
# 'collaborative.data_pipeline.uppercase',
]
# Types of private information to filter out, here are some example
# options. A full list can be found here:
# https://cloud.google.com/dlp/docs/infotypes-reference
# COLLAB_PIPE_GOOGLE_DLP_PII_FILTERS = [
# "EMAIL_ADDRESS", "FIRST_NAME", "LAST_NAME", "PHONE_NUMBER",
# "STREET_ADDRESS",
# ]
# Eliminate social auth trailing slashes because Google OAuth
# explodes if you tell it to call back to a slash-ending URL
SOCIAL_AUTH_TRAILING_SLASH = False
# Google Sign In
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = ""
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = ""
# Slack Sign In
SOCIAL_AUTH_SLACK_KEY = ""
SOCIAL_AUTH_SLACK_SECRET = ""
SOCIAL_AUTH_LOGIN_REDIRECT_URL = "/admin/"
SOCIAL_AUTH_SLACK_TEAM = ""
SOCIAL_AUTH_PIPELINE = (
# Get the information we can about the user and return it in a simple
# format to create the user instance later. On some cases the details are
# already part of the auth response from the provider, but sometimes this
# could hit a provider API.
'social_core.pipeline.social_auth.social_details',
# Get the social uid from whichever service we're authing thru. The uid is
# the unique identifier of the given user in the provider.
'social_core.pipeline.social_auth.social_uid',
# Verifies that the current auth process is valid within the current
# project, this is where emails and domains whitelists are applied (if
# defined).
'social_core.pipeline.social_auth.auth_allowed',
# Make sure the user is in the configured Slack team
'collaborative.user.enforce_slack_team',
# Checks if the current social-account is already associated in the site.
'social_core.pipeline.social_auth.social_user',
# Create the user account if they're in a domain (assuming one is defined)
# If a domains whitelist isn't defined or the user trying to authenticate
# isn't within the domain, we *do not* create the user. They will be
# rejected by the subsequent step.
'collaborative.user.create_user_in_domain_whitelist',
# Associates the current social details with another user account with
# the same email address. Otherwise, pause the pipeline if user
# isn't granted access and tell them to request a user be created by
# an admin.
# 'social_core.pipeline.social_auth.associate_by_email',
'collaborative.user.associate_by_email_or_pause',
# # Make up a username for this person, appends a random string at the end if
# # there's any collision.
# 'social_core.pipeline.user.get_username',
# Send a validation email to the user to verify its email address.
# Disabled by default.
# 'social.pipeline.mail.mail_validation',
# # Create a user account if we haven't found one yet.
# 'social_core.pipeline.user.create_user',
# Create the record that associates the social account with the user.
'social_core.pipeline.social_auth.associate_user',
# # Set the user account to is_staff (else they can't use the Admin panel):
# 'collaborative.user.set_staff_status',
# Populate the extra_data field in the social record with the values
# specified by settings (and the default ones like access_token, etc).
'social_core.pipeline.social_auth.load_extra_data',
# Update the user record with any changed info from the auth service.
'social_core.pipeline.user.user_details',
)
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
# Language of the codebase
LANGUAGE_CODE = 'en-us'
# UI languages (for translation)
LANGUAGES = [
('es', 'Spanish'),
('en', 'English'),
]
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
# The URL that static assets will get accessed via the browser
STATIC_URL = '/static/'
# Where static assets are stored for this module
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# total number of records to import for so we don't
# get 504 errors on importing. this means large imports
# will depend on the background importer, which has no
# limit.
# You can set this to None to disable timeouts
MAX_IMPORT_RECORDS = 750
try:
from collaborative.settings_dev import *
except ModuleNotFoundError:
pass
try:
from collaborative.settings_prod import *
except ModuleNotFoundError:
pass
|
[
"os.path.abspath",
"os.path.join",
"os.getenv",
"dj_database_url.config"
] |
[((505, 540), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""templates"""'], {}), "(BASE_DIR, 'templates')\n", (517, 540), False, 'import os\n'), ((555, 591), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""www/assets"""'], {}), "(BASE_DIR, 'www/assets')\n", (567, 591), False, 'import os\n'), ((656, 687), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""media"""'], {}), "(BASE_DIR, 'media')\n", (668, 687), False, 'import os\n'), ((905, 993), 'os.getenv', 'os.getenv', (['"""DJANGO_SECRET_KEY"""', '"""gq301$(s^m%n*k$k#u5xw%532tj-nrn4o^26!yb-%=cmu#3swx"""'], {}), "('DJANGO_SECRET_KEY',\n 'gq301$(s^m%n*k$k#u5xw%532tj-nrn4o^26!yb-%=cmu#3swx')\n", (914, 993), False, 'import os\n'), ((3971, 3995), 'dj_database_url.config', 'dj_database_url.config', ([], {}), '()\n', (3993, 3995), False, 'import dj_database_url\n'), ((608, 640), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""locale"""'], {}), "(BASE_DIR, 'locale')\n", (620, 640), False, 'import os\n'), ((9952, 9984), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""static"""'], {}), "(BASE_DIR, 'static')\n", (9964, 9984), False, 'import os\n'), ((461, 486), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (476, 486), False, 'import os\n'), ((4091, 4127), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""db.sqlite3"""'], {}), "(BASE_DIR, 'db.sqlite3')\n", (4103, 4127), False, 'import os\n'), ((3016, 3054), 'os.getenv', 'os.getenv', (['"""DJANGO_LOG_LEVEL"""', '"""DEBUG"""'], {}), "('DJANGO_LOG_LEVEL', 'DEBUG')\n", (3025, 3054), False, 'import os\n'), ((3152, 3190), 'os.getenv', 'os.getenv', (['"""DJANGO_LOG_LEVEL"""', '"""DEBUG"""'], {}), "('DJANGO_LOG_LEVEL', 'DEBUG')\n", (3161, 3190), False, 'import os\n')]
|
import numpy as np
#from ..utils import *
from ..metrics import Metrics
from .map_data import StdMapData
class StdMapMetrics():
"""
Class used for calculating pattern attributes and difficulty.
.. warning::
Undocumented functions in this class are not supported and are experimental.
"""
@staticmethod
def calc_tapping_intervals(map_data=[]):
"""
Gets the timing difference between note starting times.
Parameters
----------
map_data : numpy.array
Hitobject data from ``StdMapData.get_aimpoint_data``
Returns
-------
(numpy.array, numpy.array)
Tuple of ``(times, intervals)``. ``times`` are hitobject timings. ``intervals`` are the timings
difference between current and previous note. Resultant array size is ``len(map_data) - 1``.
"""
t = StdMapData.start_times(map_data)
dt = np.diff(t)
return t[1:], dt
@staticmethod
def calc_notes_per_sec(map_data=[]):
"""
Gets number of notes tapped per second based on immidiate duration between notes.
Parameters
----------
map_data : numpy.array
Hitobject data from ``StdMapData.get_aimpoint_data``
Returns
-------
(numpy.array, numpy.array)
Tuple of ``(times, nps)``. ``times`` are hitobject timings. ``nps`` is notes per second.
Resultant array size is ``len(map_data) - 1``.
"""
t = StdMapData.start_times(map_data)
dt = 1000/np.diff(t)
return t[1:], dt
@staticmethod
def calc_path_dist(map_data=[]):
"""
Calculates distance between aimpoints. Aimpoints are hitobject start
and end times, and slider ticks.
Parameters
----------
map_data : numpy.array
Hitobject data from ``StdMapData.get_aimpoint_data``
Returns
-------
(numpy.array, numpy.array)
Tuple of ``(times, dists)``. ``times`` are aimpoint timings. ``dists`` are distances
between aimpoints. Resultant array size is ``len(map_data) - 1``.
"""
t = StdMapData.all_times(map_data)
x, y = StdMapData.all_positions(map_data)
return t[1:], Metrics.dists(x, y)
@staticmethod
def calc_path_vel(map_data=[]):
"""
Calculates velocity between aimpoints. Aimpoints are hitobject start
and end times, and slider ticks.
Parameters
----------
map_data : numpy.array
Hitobject data from ``StdMapData.get_aimpoint_data``
Returns
-------
(numpy.array, numpy.array)
Tuple of ``(times, vels)``. ``times`` are aimpoint timings. ``vels`` are based on time and distance
between aimpoints. Resultant array size is ``len(map_data) - 2``.
"""
t = StdMapData.all_times(map_data)
x, y = StdMapData.all_positions(map_data)
return t[1:], Metrics.vel_2d(x, y, t)
@staticmethod
def calc_path_accel(map_data=[]):
"""
Calculates acceleration between aimpoints. Aimpoints are hitobject start
and end times, and slider ticks.
Parameters
----------
map_data : numpy.array
Hitobject data from ``StdMapData.get_aimpoint_data``
Returns
-------
(numpy.array, numpy.array)
Tuple of (times, accels). ``times`` are aimpoint timings. ``accels`` are based on
change in velocity between aimpoints. Resultant array size is ``len(map_data) - 3``.
"""
t = StdMapData.all_times(map_data)
x, y = StdMapData.all_positions(map_data)
return t[1:], Metrics.accel_2d(x, y, t)
@staticmethod
def calc_xy_dist(map_data=[]):
"""
Calculates parametric distance between aimpoints. Aimpoints are hitobject start
and end times, and slider ticks.
Parameters
map_data
map_data : numpy.array
Hitobject data from ``StdMapData.get_aimpoint_data``
Returns
-------
(numpy.array, numpy.array)
Tuple of ``(times, x_dists, y_dists)``. ``times`` are aimpoint timings. ``x_dists`` are distances
between aimpoints in the x-coordinate direction. ``y_dists`` are distances between aimpoints
in the y-coordinate direction. Resultant array size is ``len(map_data) - 1``.
"""
t = StdMapData.all_times(map_data)
x, y = StdMapData.all_positions(map_data)
dx = np.diff(x)
dy = np.diff(y)
return t[1:], dx, dy
@staticmethod
def calc_xy_vel(map_data=[]):
"""
Calculates parametric velocity between aimpoints. Aimpoints are hitobject start
and end times, and slider ticks.
Parameters
----------
map_data : numpy.array
Hitobject data from ``StdMapData.get_aimpoint_data``
Returns
-------
(numpy.array, numpy.array)
Tuple of ``(times, x_vels, y_vels)``. ``times`` are aimpoint timings. ``x_vels`` are velocities
between aimpoints in the x-coordinate direction. ``y_vels`` are velocities between aimpoints
in the y-coordinate direction. Resultant array size is ``len(map_data) - 2``.
"""
t = StdMapData.all_times(map_data)
x, y = StdMapData.all_positions(map_data)
dt = np.diff(t)
dx = np.diff(x)
dy = np.diff(y)
return t[1:], dx/dt, dy/dt
@staticmethod
def calc_xy_accel(map_data=[]):
"""
Calculates parametric acceleration between aimpoints. Aimpoints are hitobject start
and end times, and slider ticks.
Parameters
----------
map_data : numpy.array
Hitobject data from ``StdMapData.get_aimpoint_data``
Returns
-------
(numpy.array, numpy.array)
Tuple of ``(times, x_accels, y_accels)``. ``times`` are aimpoint timings. ``x_accels`` are
accelerations between aimpoints in the x-coordinate direction. ``y_accels`` are accelerations
between aimpoints in the y-coordinate direction. Resultant array size is ``len(map_data) - 3``.
"""
t, vx, vy = StdMapMetrics.calc_xy_vel(map_data.iloc[2:])
dvx = np.diff(vx)
dvy = np.diff(vy)
dt = np.diff(t)
return t[1:], dvx/dt, dvy/dt
@staticmethod
def calc_xy_jerk(map_data=[]):
"""
Calculates parametric jerks between aimpoints. Aimpoints are hitobject start
and end times, and slider ticks.
Parameters
----------
map_data : numpy.array
Hitobject data from ``StdMapData.get_aimpoint_data``
Returns
-------
(numpy.array, numpy.array)
Tuple of ``(times, x_jerks, y_jerks)``. ``times`` are aimpoint timings. ``x_jerks`` are
jerks between aimpoints in the x-coordinate direction. ``y_jerks`` are jerks
between aimpoints in the y-coordinate direction. Resultant array size is ``len(map_data) - 4``.
"""
map_data = np.asarray(map_data[2:])
t, ax, ay = StdMapMetrics.calc_xy_accel(map_data)
dax = np.diff(ax)
day = np.diff(ay)
dt = np.diff(t)
return t[1:], dax/dt, day/dt
@staticmethod
def calc_velocity_start(map_data=[]):
t = StdMapData.start_times(map_data)
x, y = StdMapData.start_positions(map_data)
return t[1:], Metrics.vel_2d(x, y, t)
@staticmethod
def calc_intensity(map_data=[]):
t, v = StdMapMetrics.calc_velocity_start(map_data)
t, nps = StdMapMetrics.calc_notes_per_sec(map_data)
intensity = v*nps
return t, intensity
@staticmethod
def calc_angles(map_data=[]):
"""
Calculates angle between aimpoints. Aimpoints are hitobject start
and end times, and slider ticks.
Parameters
----------
map_data : numpy.array
Hitobject data from ``StdMapData.get_aimpoint_data``
Returns
-------
(numpy.array, numpy.array)
Tuple of ``(times, angles)``. ``times`` are aimpoint timings. ``angles`` are
angles between aimpoints. Resultant array size is ``len(map_data) - 1``.
"""
t = StdMapData.all_times(map_data)
x, y = StdMapData.all_positions(map_data)
return t[1:], Metrics.angle(x, y, t)
@staticmethod
def calc_theta_per_second(map_data=[]):
"""
Calculates immediate path rotation (in radians per second) from previous aimpoint.
Parameters
----------
map_data : numpy.array
Hitobject data from ``StdMapData.get_aimpoint_data``
Returns
-------
(numpy.array, numpy.array)
Tuple of ``(times, rps)``. ``times`` are aimpoint timings. ``rps`` are
radians per second between aimpoints. Resultant array size is ``len(map_data) - 1``.
"""
t, thetas = StdMapMetrics.calc_angles(map_data)
dt = np.diff(t)
return t[1:], thetas*(1000/dt)
@staticmethod
def calc_radial_velocity(map_data=[]):
"""
Calculates radial velocity between aimpoints. Aimpoints are hitobject start
and end times, and slider ticks. Radial velocity is how fast a path
moves in a circle in radians per second. Unlike ``calc_theta_per_second``, which
calculates immediate rotation, this calculates average rotation.
The difference between the two implemtations is apparent when considering zig-zag and circular patterns.
Zig-zag patterns has no average angular velocity, but have average linear velocity. In a zig-zag
pattern one angle would be positive indicating a rotation in a clockwise direction, and another angle
would be negative indicating a rotation in a counter-clockwise direction. Ultimately those two cancel
out to result in no overall rotation direction. A circular pattern would have either both angles positive
or both angles negative, yielding a net negative or a net positive rotation direction.
Parameters
----------
map_data : numpy.array
Hitobject data from ``StdMapData.get_aimpoint_data``
Returns
-------
(numpy.array, numpy.array)
Tuple of ``(times, avg_rad_vels)``. ``times`` are aimpoint timings. ``avg_rad_vels`` are
average radial velocities. Resultant array size is ``len(map_data) - 2``.
"""
t = StdMapData.all_times(map_data)
x, y = StdMapData.all_positions(map_data)
return t[2:], Metrics.avg_ang_vel(x, y, t[1:])
@staticmethod
def calc_perp_int(map_data=[]):
"""
Calculates perpendicular intensity between aimpoints. Aimpoints are hitobject start
and end times, and slider ticks. Perpendicular intensity is how much strongly the path
between aimpoints turns 90 deg, factoring in average radial velocity of the path as well as
overall velocity throughout the path (measured in osu!px*radians/millisconds^2).
Parameters
----------
map_data : numpy.array
Hitobject data from ``StdMapData.get_aimpoint_data``
Returns
-------
(numpy.array, numpy.array)
Tuple of ``(times, perp_ints)``. ``times`` are aimpoint timings. ``perp_ints`` are
perpendicular intensities. Resultant array size is ``len(map_data) - 2``.
"""
times, rv = StdMapMetrics.calc_radial_velocity(map_data)
times, x_vel, y_vel = StdMapMetrics.calc_xy_vel(map_data)
# Construct vector angles from parametric velocities
theta1 = np.arctan2(y_vel[1:], x_vel[1:])
theta2 = np.arctan2(y_vel[:-1], x_vel[:-1])
# Make stacks 0 angle change
mask = np.where(np.logical_and(y_vel[1:] == 0, x_vel[1:] == 0))[0]
theta1[mask] = theta1[mask - 1]
mask = np.where(np.logical_and(y_vel[:-1] == 0, x_vel[:-1] == 0))[0]
theta2[mask] = theta2[mask - 1]
# Velocity in the perpendicular direction relative to current
dy_vel = np.sin(theta2 - theta1)
return times, rv*dy_vel[1:]
# Linear intensity
@staticmethod
def calc_lin_int(map_data=[]):
"""
Calculates linear intensity between aimpoints. Aimpoints are hitobject start
and end times, and slider ticks. Linear intensity is how much strongly the path
between aimpoints is linear, factoring in average radial velocity of the path as well as
overall velocity throughout the path (measured in osu!px*radians/millisconds^2).
Parameters
----------
map_data : numpy.array
Hitobject data from ``StdMapData.get_aimpoint_data``
Returns
-------
(numpy.array, numpy.array)
Tuple of ``(times, lin_ints)``. ``times`` are aimpoint timings. ``lin_ints`` are
linear intensities. Resultant array size is ``len(map_data) - 2``.
"""
times, rv = StdMapMetrics.calc_radial_velocity(map_data)
times, x_vel, y_vel = StdMapMetrics.calc_xy_vel(map_data)
# Construct vector angles from parametric velocities
theta1 = np.arctan2(y_vel[1:], x_vel[1:])
theta2 = np.arctan2(y_vel[:-1], x_vel[:-1])
# Make stacks 0 angle change
mask = np.where(np.logical_and(y_vel[1:] == 0, x_vel[1:] == 0))[0]
theta1[mask] = theta1[mask - 1]
mask = np.where(np.logical_and(y_vel[:-1] == 0, x_vel[:-1] == 0))[0]
theta2[mask] = theta2[mask - 1]
# Velocity in the parellel direction relative to current
dx_vel = np.cos(theta2 - theta1)
return times, rv*dx_vel[1:]
all_times = StdMapData.all_times(map_data)
all_positions = StdMapData.all_positions(map_data)
if len(all_positions) < 3: return [], []
positions = [ Pos(*pos) for pos in all_positions ]
angles = [ get_angle(*param) for param in zip(positions[:-2], positions[1:-1], positions[2:]) ]
return all_times[1:-1], angles
@staticmethod
def calc_acceleration(map_data=[]):
pass
pass
'''
Response metrics
'''
@staticmethod
def calc_speed_response(resolution=1, x_range=(1, 100)):
return ([x for x in range(*x_range)], [ 1/x for x in range(*x_range) ])
'''
Advanced metrics
'''
@staticmethod
def calc_rhythmic_complexity(map_data=[]):
def calc_harmonic(prev_note_interval, curr_note_interval, target_time, v_scale):
if prev_note_interval == 0: print('WARNING: 0 note interval detected at ', target_time, ' ms')
return -(v_scale/2)*math.cos((2*math.pi)/prev_note_interval*curr_note_interval) + (v_scale/2)
def decay(interval, decay_factor):
return math.exp(-decay_factor*interval)
def speed(interval, speed_factor):
return speed_factor/interval
def calc_note(time, curr_interval, prev_interval, decay_factor, v_scale):
return decay(curr_interval, decay_factor) * calc_harmonic(prev_interval, curr_interval, time, v_scale)
speed_factor = 600.0
v_factor = 10.0
decay_factor = 0.005
time, intervals = StdMapMetrics.calc_tapping_intervals(map_data)
harmonics = [ calc_note(time[i], intervals[i], intervals[i - 1], decay_factor, v_factor) for i in range(1, len(intervals)) ]
return time, [ sum(harmonics[:i])*speed(intervals[i], speed_factor) for i in range(0, len(intervals)) ]
@staticmethod
def calc_path_curvature(hitobjects):
pass
@staticmethod
def calc_visual_density(hitobjects):
pass
'''
Skill metrics
'''
@staticmethod
def calc_speed_skill(hitobjects):
pass
@staticmethod
def calc_tapping_skill(hitobjects):
pass
@staticmethod
def calc_targeting_skill(hitobjects):
pass
@staticmethod
def calc_agility_skill(hitobjects):
pass
|
[
"numpy.arctan2",
"numpy.logical_and",
"numpy.asarray",
"numpy.sin",
"numpy.diff",
"numpy.cos"
] |
[((944, 954), 'numpy.diff', 'np.diff', (['t'], {}), '(t)\n', (951, 954), True, 'import numpy as np\n'), ((4638, 4648), 'numpy.diff', 'np.diff', (['x'], {}), '(x)\n', (4645, 4648), True, 'import numpy as np\n'), ((4662, 4672), 'numpy.diff', 'np.diff', (['y'], {}), '(y)\n', (4669, 4672), True, 'import numpy as np\n'), ((5533, 5543), 'numpy.diff', 'np.diff', (['t'], {}), '(t)\n', (5540, 5543), True, 'import numpy as np\n'), ((5557, 5567), 'numpy.diff', 'np.diff', (['x'], {}), '(x)\n', (5564, 5567), True, 'import numpy as np\n'), ((5581, 5591), 'numpy.diff', 'np.diff', (['y'], {}), '(y)\n', (5588, 5591), True, 'import numpy as np\n'), ((6444, 6455), 'numpy.diff', 'np.diff', (['vx'], {}), '(vx)\n', (6451, 6455), True, 'import numpy as np\n'), ((6470, 6481), 'numpy.diff', 'np.diff', (['vy'], {}), '(vy)\n', (6477, 6481), True, 'import numpy as np\n'), ((6496, 6506), 'numpy.diff', 'np.diff', (['t'], {}), '(t)\n', (6503, 6506), True, 'import numpy as np\n'), ((7280, 7304), 'numpy.asarray', 'np.asarray', (['map_data[2:]'], {}), '(map_data[2:])\n', (7290, 7304), True, 'import numpy as np\n'), ((7386, 7397), 'numpy.diff', 'np.diff', (['ax'], {}), '(ax)\n', (7393, 7397), True, 'import numpy as np\n'), ((7412, 7423), 'numpy.diff', 'np.diff', (['ay'], {}), '(ay)\n', (7419, 7423), True, 'import numpy as np\n'), ((7438, 7448), 'numpy.diff', 'np.diff', (['t'], {}), '(t)\n', (7445, 7448), True, 'import numpy as np\n'), ((9283, 9293), 'numpy.diff', 'np.diff', (['t'], {}), '(t)\n', (9290, 9293), True, 'import numpy as np\n'), ((11994, 12026), 'numpy.arctan2', 'np.arctan2', (['y_vel[1:]', 'x_vel[1:]'], {}), '(y_vel[1:], x_vel[1:])\n', (12004, 12026), True, 'import numpy as np\n'), ((12044, 12078), 'numpy.arctan2', 'np.arctan2', (['y_vel[:-1]', 'x_vel[:-1]'], {}), '(y_vel[:-1], x_vel[:-1])\n', (12054, 12078), True, 'import numpy as np\n'), ((12438, 12461), 'numpy.sin', 'np.sin', (['(theta2 - theta1)'], {}), '(theta2 - theta1)\n', (12444, 12461), True, 'import numpy as np\n'), ((13547, 13579), 'numpy.arctan2', 'np.arctan2', (['y_vel[1:]', 'x_vel[1:]'], {}), '(y_vel[1:], x_vel[1:])\n', (13557, 13579), True, 'import numpy as np\n'), ((13597, 13631), 'numpy.arctan2', 'np.arctan2', (['y_vel[:-1]', 'x_vel[:-1]'], {}), '(y_vel[:-1], x_vel[:-1])\n', (13607, 13631), True, 'import numpy as np\n'), ((13986, 14009), 'numpy.cos', 'np.cos', (['(theta2 - theta1)'], {}), '(theta2 - theta1)\n', (13992, 14009), True, 'import numpy as np\n'), ((1581, 1591), 'numpy.diff', 'np.diff', (['t'], {}), '(t)\n', (1588, 1591), True, 'import numpy as np\n'), ((12141, 12187), 'numpy.logical_and', 'np.logical_and', (['(y_vel[1:] == 0)', '(x_vel[1:] == 0)'], {}), '(y_vel[1:] == 0, x_vel[1:] == 0)\n', (12155, 12187), True, 'import numpy as np\n'), ((12257, 12305), 'numpy.logical_and', 'np.logical_and', (['(y_vel[:-1] == 0)', '(x_vel[:-1] == 0)'], {}), '(y_vel[:-1] == 0, x_vel[:-1] == 0)\n', (12271, 12305), True, 'import numpy as np\n'), ((13694, 13740), 'numpy.logical_and', 'np.logical_and', (['(y_vel[1:] == 0)', '(x_vel[1:] == 0)'], {}), '(y_vel[1:] == 0, x_vel[1:] == 0)\n', (13708, 13740), True, 'import numpy as np\n'), ((13810, 13858), 'numpy.logical_and', 'np.logical_and', (['(y_vel[:-1] == 0)', '(x_vel[:-1] == 0)'], {}), '(y_vel[:-1] == 0, x_vel[:-1] == 0)\n', (13824, 13858), True, 'import numpy as np\n')]
|
import datetime
import re
from unittest import TestCase
from module import storage_util
from module.oss_util import read_file_in_oss, copy_file, delete_file
from module.pyplot_util import generate_pie_chart
class TestOSSUtil(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
def test_read_file_in_oss(self):
object_str = read_file_in_oss('test_read_file_in_oss_remotely.json')
object_strip_str = re.sub(r'\s+', '', object_str)
expect_str = str(storage_util.read_json_from_file_locally("test_read_file_in_oss_local.json"))
expect_strip_str = expect_str.replace(" ", "")
expect_strip_str = expect_strip_str.replace("\'", "\"")
self.assertEqual(expect_strip_str, object_strip_str)
def test_save_image_oss(self):
label = ['Fore-End', 'Product Logic', 'Server', 'Third Part', 'Wrong Reported']
data = [7, 3, 15, 3, 15]
generate_pie_chart(label, data, "test_oss_upload_file_" + datetime.date.today().strftime(
"%m_%d_%y") + ".png")
def test_copyfile(self):
copy_file("test_read_file_in_oss_remotely.json", "test_read_file_in_oss_remotely2.json")
source = read_file_in_oss("test_read_file_in_oss_remotely.json")
target = read_file_in_oss("test_read_file_in_oss_remotely2.json")
self.assertEqual(source, target)
def test_delete_file(self):
copy_file("test_read_file_in_oss_remotely.json", "test_read_file_in_oss_remotely2.json")
self.assertTrue(delete_file("test_read_file_in_oss_remotely2.json"))
|
[
"module.storage_util.read_json_from_file_locally",
"module.oss_util.delete_file",
"module.oss_util.read_file_in_oss",
"datetime.date.today",
"re.sub",
"module.oss_util.copy_file"
] |
[((370, 425), 'module.oss_util.read_file_in_oss', 'read_file_in_oss', (['"""test_read_file_in_oss_remotely.json"""'], {}), "('test_read_file_in_oss_remotely.json')\n", (386, 425), False, 'from module.oss_util import read_file_in_oss, copy_file, delete_file\n'), ((453, 483), 're.sub', 're.sub', (['"""\\\\s+"""', '""""""', 'object_str'], {}), "('\\\\s+', '', object_str)\n", (459, 483), False, 'import re\n'), ((1094, 1186), 'module.oss_util.copy_file', 'copy_file', (['"""test_read_file_in_oss_remotely.json"""', '"""test_read_file_in_oss_remotely2.json"""'], {}), "('test_read_file_in_oss_remotely.json',\n 'test_read_file_in_oss_remotely2.json')\n", (1103, 1186), False, 'from module.oss_util import read_file_in_oss, copy_file, delete_file\n'), ((1200, 1255), 'module.oss_util.read_file_in_oss', 'read_file_in_oss', (['"""test_read_file_in_oss_remotely.json"""'], {}), "('test_read_file_in_oss_remotely.json')\n", (1216, 1255), False, 'from module.oss_util import read_file_in_oss, copy_file, delete_file\n'), ((1273, 1329), 'module.oss_util.read_file_in_oss', 'read_file_in_oss', (['"""test_read_file_in_oss_remotely2.json"""'], {}), "('test_read_file_in_oss_remotely2.json')\n", (1289, 1329), False, 'from module.oss_util import read_file_in_oss, copy_file, delete_file\n'), ((1412, 1504), 'module.oss_util.copy_file', 'copy_file', (['"""test_read_file_in_oss_remotely.json"""', '"""test_read_file_in_oss_remotely2.json"""'], {}), "('test_read_file_in_oss_remotely.json',\n 'test_read_file_in_oss_remotely2.json')\n", (1421, 1504), False, 'from module.oss_util import read_file_in_oss, copy_file, delete_file\n'), ((509, 585), 'module.storage_util.read_json_from_file_locally', 'storage_util.read_json_from_file_locally', (['"""test_read_file_in_oss_local.json"""'], {}), "('test_read_file_in_oss_local.json')\n", (549, 585), False, 'from module import storage_util\n'), ((1525, 1576), 'module.oss_util.delete_file', 'delete_file', (['"""test_read_file_in_oss_remotely2.json"""'], {}), "('test_read_file_in_oss_remotely2.json')\n", (1536, 1576), False, 'from module.oss_util import read_file_in_oss, copy_file, delete_file\n'), ((990, 1011), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (1009, 1011), False, 'import datetime\n')]
|
import os
class BaseConfig:
"""Base configuration"""
SQLALCHEMY_TRACK_MODIFICATIONS = False
SECRET_KEY = "<KEY>"
TOKEN_EXPIRATION_DAYS = 30
TOKEN_EXPIRATION_SECONDS = 0
class DevelopmentConfig(BaseConfig):
"""Development configuration"""
SQLALCHEMY_DATABASE_URI = os.environ.get("DATABASE_URL")
|
[
"os.environ.get"
] |
[((296, 326), 'os.environ.get', 'os.environ.get', (['"""DATABASE_URL"""'], {}), "('DATABASE_URL')\n", (310, 326), False, 'import os\n')]
|
#%%
import cvxpy as cp
import numpy as np
import matplotlib.pyplot as plt
def loss_fn(X, Y, beta):
return cp.norm2(cp.matmul(X, beta) - Y)**2
def regularizer(beta):
return cp.norm1(beta)
def objective_fn(X, Y, beta, lambd):
return loss_fn(X, Y, beta) + lambd * regularizer(beta)
def mse(X, Y, beta):
return (1.0 / X.shape[0]) * loss_fn(X, Y, beta).value
def generate_data(m=100, n=20, sigma=5, density=0.2):
"Generates data matrix X and observations Y."
np.random.seed(1)
beta_star = np.random.randn(n)
idxs = np.random.choice(range(n), int((1-density)*n), replace=False)
for idx in idxs:
beta_star[idx] = 0
X = np.random.randn(m,n)
Y = X.dot(beta_star) + np.random.normal(0, sigma, size=m)
return X, Y, beta_star
m = 100
n = 20
sigma = 5
density = 0.2
X, Y, _ = generate_data(m, n, sigma)
X_train = X[:50, :]
Y_train = Y[:50]
X_test = X[50:, :]
Y_test = Y[50:]
beta = cp.Variable(n)
lambd = cp.Parameter(nonneg=True)
problem = cp.Problem(cp.Minimize(objective_fn(X_train, Y_train, beta, lambd)))
lambd_values = np.logspace(-2, 3, 50)
train_errors = []
test_errors = []
beta_values = []
for v in lambd_values:
lambd.value = v
problem.solve()
train_errors.append(mse(X_train, Y_train, beta))
test_errors.append(mse(X_test, Y_test, beta))
beta_values.append(beta.value)
# matplotlib inline
# config InlineBackend.figure_format = 'svg'
def plot_train_test_errors(train_errors, test_errors, lambd_values):
plt.plot(lambd_values, train_errors, label="Train error")
plt.plot(lambd_values, test_errors, label="Test error")
plt.xscale("log")
plt.legend(loc="upper left")
plt.xlabel(r"$\lambda$", fontsize=16)
plt.title("Mean Squared Error (MSE)")
plt.show()
plot_train_test_errors(train_errors, test_errors, lambd_values)
print('done')
|
[
"matplotlib.pyplot.xscale",
"matplotlib.pyplot.title",
"numpy.random.seed",
"cvxpy.Parameter",
"matplotlib.pyplot.plot",
"numpy.random.randn",
"matplotlib.pyplot.show",
"numpy.logspace",
"matplotlib.pyplot.legend",
"cvxpy.matmul",
"cvxpy.norm1",
"numpy.random.normal",
"cvxpy.Variable",
"matplotlib.pyplot.xlabel"
] |
[((936, 950), 'cvxpy.Variable', 'cp.Variable', (['n'], {}), '(n)\n', (947, 950), True, 'import cvxpy as cp\n'), ((959, 984), 'cvxpy.Parameter', 'cp.Parameter', ([], {'nonneg': '(True)'}), '(nonneg=True)\n', (971, 984), True, 'import cvxpy as cp\n'), ((1080, 1102), 'numpy.logspace', 'np.logspace', (['(-2)', '(3)', '(50)'], {}), '(-2, 3, 50)\n', (1091, 1102), True, 'import numpy as np\n'), ((183, 197), 'cvxpy.norm1', 'cp.norm1', (['beta'], {}), '(beta)\n', (191, 197), True, 'import cvxpy as cp\n'), ((484, 501), 'numpy.random.seed', 'np.random.seed', (['(1)'], {}), '(1)\n', (498, 501), True, 'import numpy as np\n'), ((518, 536), 'numpy.random.randn', 'np.random.randn', (['n'], {}), '(n)\n', (533, 536), True, 'import numpy as np\n'), ((666, 687), 'numpy.random.randn', 'np.random.randn', (['m', 'n'], {}), '(m, n)\n', (681, 687), True, 'import numpy as np\n'), ((1496, 1553), 'matplotlib.pyplot.plot', 'plt.plot', (['lambd_values', 'train_errors'], {'label': '"""Train error"""'}), "(lambd_values, train_errors, label='Train error')\n", (1504, 1553), True, 'import matplotlib.pyplot as plt\n'), ((1558, 1613), 'matplotlib.pyplot.plot', 'plt.plot', (['lambd_values', 'test_errors'], {'label': '"""Test error"""'}), "(lambd_values, test_errors, label='Test error')\n", (1566, 1613), True, 'import matplotlib.pyplot as plt\n'), ((1618, 1635), 'matplotlib.pyplot.xscale', 'plt.xscale', (['"""log"""'], {}), "('log')\n", (1628, 1635), True, 'import matplotlib.pyplot as plt\n'), ((1640, 1668), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper left"""'}), "(loc='upper left')\n", (1650, 1668), True, 'import matplotlib.pyplot as plt\n'), ((1673, 1710), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$\\\\lambda$"""'], {'fontsize': '(16)'}), "('$\\\\lambda$', fontsize=16)\n", (1683, 1710), True, 'import matplotlib.pyplot as plt\n'), ((1715, 1752), 'matplotlib.pyplot.title', 'plt.title', (['"""Mean Squared Error (MSE)"""'], {}), "('Mean Squared Error (MSE)')\n", (1724, 1752), True, 'import matplotlib.pyplot as plt\n'), ((1757, 1767), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1765, 1767), True, 'import matplotlib.pyplot as plt\n'), ((714, 748), 'numpy.random.normal', 'np.random.normal', (['(0)', 'sigma'], {'size': 'm'}), '(0, sigma, size=m)\n', (730, 748), True, 'import numpy as np\n'), ((121, 139), 'cvxpy.matmul', 'cp.matmul', (['X', 'beta'], {}), '(X, beta)\n', (130, 139), True, 'import cvxpy as cp\n')]
|
import numpy as np
def stringify_vec(vec):
s = ""
for x in vec: s += str(x) + " "
return s
def distance(p1, p2):
pv1 = np.asarray(p1)
pv2 = np.asarray(p2)
return np.linalg.norm(pv1 - pv2)
def multireplace(arr, x, sub_arr):
new_arr = []
for entry in arr:
if (entry == x).all():
new_arr += sub_arr
else:
new_arr += [entry]
return new_arr
def rotate_about_line(point, base_pt, vec, theta):
pv = np.asarray(point)
bpv = np.asarray(base_pt)
lv = np.asarray(vec)
diffv = pv - bpv
diffproj = lv * np.dot(diffv, lv) / np.linalg.norm(lv)**2
projv = bpv + diffproj
rv1 = pv - projv
rv2 = np.cross(lv, rv1)
rv2 = rv2 * np.linalg.norm(rv1) / np.linalg.norm(rv2)
new_pv = projv + rv1 * np.cos(theta) + rv2 * np.sin(theta)
return new_pv
|
[
"numpy.asarray",
"numpy.cross",
"numpy.sin",
"numpy.linalg.norm",
"numpy.cos",
"numpy.dot"
] |
[((137, 151), 'numpy.asarray', 'np.asarray', (['p1'], {}), '(p1)\n', (147, 151), True, 'import numpy as np\n'), ((162, 176), 'numpy.asarray', 'np.asarray', (['p2'], {}), '(p2)\n', (172, 176), True, 'import numpy as np\n'), ((188, 213), 'numpy.linalg.norm', 'np.linalg.norm', (['(pv1 - pv2)'], {}), '(pv1 - pv2)\n', (202, 213), True, 'import numpy as np\n'), ((476, 493), 'numpy.asarray', 'np.asarray', (['point'], {}), '(point)\n', (486, 493), True, 'import numpy as np\n'), ((504, 523), 'numpy.asarray', 'np.asarray', (['base_pt'], {}), '(base_pt)\n', (514, 523), True, 'import numpy as np\n'), ((533, 548), 'numpy.asarray', 'np.asarray', (['vec'], {}), '(vec)\n', (543, 548), True, 'import numpy as np\n'), ((690, 707), 'numpy.cross', 'np.cross', (['lv', 'rv1'], {}), '(lv, rv1)\n', (698, 707), True, 'import numpy as np\n'), ((746, 765), 'numpy.linalg.norm', 'np.linalg.norm', (['rv2'], {}), '(rv2)\n', (760, 765), True, 'import numpy as np\n'), ((590, 607), 'numpy.dot', 'np.dot', (['diffv', 'lv'], {}), '(diffv, lv)\n', (596, 607), True, 'import numpy as np\n'), ((610, 628), 'numpy.linalg.norm', 'np.linalg.norm', (['lv'], {}), '(lv)\n', (624, 628), True, 'import numpy as np\n'), ((724, 743), 'numpy.linalg.norm', 'np.linalg.norm', (['rv1'], {}), '(rv1)\n', (738, 743), True, 'import numpy as np\n'), ((815, 828), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (821, 828), True, 'import numpy as np\n'), ((793, 806), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (799, 806), True, 'import numpy as np\n')]
|
import aws_cdk.aws_lambda as _lambda
from aws_cdk import core
from aws_cdk.aws_apigatewayv2 import (
HttpApi,
HttpMethod,
)
from aws_cdk.aws_apigatewayv2_integrations import (
LambdaProxyIntegration,
)
from aws_cdk.aws_iam import PolicyStatement, Effect
class CdkStack(core.Stack):
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
credentials_secret_name = "advent/gdrive-service-credentials"
super().__init__(scope, id, **kwargs)
google_sheet_id = self.node.try_get_context("sheet_id")
google_sheet_range = self.node.try_get_context("sheet_range")
advent_function = _lambda.Function(
self,
f"{id}-function",
code=_lambda.Code.from_asset("_build/_build.zip"),
handler="functions/advent/handler.handler",
runtime=_lambda.Runtime.PYTHON_3_8,
environment={
"GDRIVE_CREDENTIALS_SECRET": credentials_secret_name,
"SHEET_ID": google_sheet_id,
"SHEET_RANGE": google_sheet_range,
},
timeout=core.Duration.seconds(10),
)
advent_function.add_to_role_policy(
PolicyStatement(
effect=Effect.ALLOW,
actions=["secretsmanager:GetSecretValue"],
resources=["*"],
)
)
api = HttpApi(self, f"{id}-api")
api.add_routes(
path="/",
methods=[HttpMethod.GET],
integration=(LambdaProxyIntegration(handler=advent_function)),
)
core.CfnOutput(self, f"{id}-url", value=api.api_endpoint)
|
[
"aws_cdk.core.CfnOutput",
"aws_cdk.core.Duration.seconds",
"aws_cdk.aws_iam.PolicyStatement",
"aws_cdk.aws_lambda.Code.from_asset",
"aws_cdk.aws_apigatewayv2_integrations.LambdaProxyIntegration",
"aws_cdk.aws_apigatewayv2.HttpApi"
] |
[((1388, 1414), 'aws_cdk.aws_apigatewayv2.HttpApi', 'HttpApi', (['self', 'f"""{id}-api"""'], {}), "(self, f'{id}-api')\n", (1395, 1414), False, 'from aws_cdk.aws_apigatewayv2 import HttpApi, HttpMethod\n'), ((1593, 1650), 'aws_cdk.core.CfnOutput', 'core.CfnOutput', (['self', 'f"""{id}-url"""'], {'value': 'api.api_endpoint'}), "(self, f'{id}-url', value=api.api_endpoint)\n", (1607, 1650), False, 'from aws_cdk import core\n'), ((1203, 1304), 'aws_cdk.aws_iam.PolicyStatement', 'PolicyStatement', ([], {'effect': 'Effect.ALLOW', 'actions': "['secretsmanager:GetSecretValue']", 'resources': "['*']"}), "(effect=Effect.ALLOW, actions=[\n 'secretsmanager:GetSecretValue'], resources=['*'])\n", (1218, 1304), False, 'from aws_cdk.aws_iam import PolicyStatement, Effect\n'), ((732, 776), 'aws_cdk.aws_lambda.Code.from_asset', '_lambda.Code.from_asset', (['"""_build/_build.zip"""'], {}), "('_build/_build.zip')\n", (755, 776), True, 'import aws_cdk.aws_lambda as _lambda\n'), ((1109, 1134), 'aws_cdk.core.Duration.seconds', 'core.Duration.seconds', (['(10)'], {}), '(10)\n', (1130, 1134), False, 'from aws_cdk import core\n'), ((1524, 1571), 'aws_cdk.aws_apigatewayv2_integrations.LambdaProxyIntegration', 'LambdaProxyIntegration', ([], {'handler': 'advent_function'}), '(handler=advent_function)\n', (1546, 1571), False, 'from aws_cdk.aws_apigatewayv2_integrations import LambdaProxyIntegration\n')]
|
# -*- coding: utf-8 -*-
from __future__ import division, print_function
from matplotlib.backends.backend_pdf import PdfPages
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
colNames = ['x', 'y', 'z', 'particle.type', 'BPM no']
particleTypeNames = {
-1: 'other',
0: 'e-',
1: 'e+',
2: 'gamma'
}
data = pd.read_csv(
'../build-10/out_nt_bpmScreenHits.csv',
header=None,
names=colNames,
comment='#'
)
particleTypes = sorted(data['particle.type'].unique())
def plot(data, typeName, pp):
histo, xEdges, yEdges = np.histogram2d(
data['x'], data['y'],
bins=300, range=[[-300, 300], [-300, 300]]
)
histo = histo.T
histoMasked = np.ma.masked_where(histo == 0, histo)
fig, ax = plt.subplots()
cm = ax.pcolormesh(
xEdges, yEdges, histoMasked,
cmap='viridis', rasterized=True,
zorder=6
)
cb = fig.colorbar(cm)
circle = plt.Circle(
(0, 0), 13.125/2*2.54*10,
color=(1.0, 0.0, 1.0), fill=False,
zorder=5
)
ax.add_artist(circle)
ax.grid(True)
xlims = ax.get_xlim()
ax.set_xlim(xlims[1], xlims[0])
ax.set_title('{} hits'.format(typeName))
ax.set_xlabel('$x_\mathrm{dump} \quad [\mathrm{mm}]$')
ax.set_ylabel('$y_\mathrm{dump} \quad [\mathrm{mm}]$')
cb.set_label('$\#_\mathrm{counts}$')
fig.tight_layout()
pp.savefig(dpi=150)
with PdfPages('plot_BPM.pdf') as pp:
for bpmNo in xrange(1, 3):
plot(data[data['BPM no'] == bpmNo], 'BPM {}'.format(bpmNo), pp)
|
[
"matplotlib.backends.backend_pdf.PdfPages",
"numpy.ma.masked_where",
"pandas.read_csv",
"numpy.histogram2d",
"matplotlib.pyplot.Circle",
"matplotlib.pyplot.subplots"
] |
[((345, 443), 'pandas.read_csv', 'pd.read_csv', (['"""../build-10/out_nt_bpmScreenHits.csv"""'], {'header': 'None', 'names': 'colNames', 'comment': '"""#"""'}), "('../build-10/out_nt_bpmScreenHits.csv', header=None, names=\n colNames, comment='#')\n", (356, 443), True, 'import pandas as pd\n'), ((573, 658), 'numpy.histogram2d', 'np.histogram2d', (["data['x']", "data['y']"], {'bins': '(300)', 'range': '[[-300, 300], [-300, 300]]'}), "(data['x'], data['y'], bins=300, range=[[-300, 300], [-300, 300]]\n )\n", (587, 658), True, 'import numpy as np\n'), ((715, 752), 'numpy.ma.masked_where', 'np.ma.masked_where', (['(histo == 0)', 'histo'], {}), '(histo == 0, histo)\n', (733, 752), True, 'import numpy as np\n'), ((768, 782), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (780, 782), True, 'import matplotlib.pyplot as plt\n'), ((949, 1041), 'matplotlib.pyplot.Circle', 'plt.Circle', (['(0, 0)', '(13.125 / 2 * 2.54 * 10)'], {'color': '(1.0, 0.0, 1.0)', 'fill': '(False)', 'zorder': '(5)'}), '((0, 0), 13.125 / 2 * 2.54 * 10, color=(1.0, 0.0, 1.0), fill=\n False, zorder=5)\n', (959, 1041), True, 'import matplotlib.pyplot as plt\n'), ((1427, 1451), 'matplotlib.backends.backend_pdf.PdfPages', 'PdfPages', (['"""plot_BPM.pdf"""'], {}), "('plot_BPM.pdf')\n", (1435, 1451), False, 'from matplotlib.backends.backend_pdf import PdfPages\n')]
|
from datetime import datetime
import cacahuate.inputs
from cacahuate.jsontypes import MultiFormDict, Map
DATE_FIELDS = [
'started_at',
'finished_at',
]
def make_actor_map(execution_data):
actor_map = {}
for fg in execution_data['values']:
ref = fg['ref']
form_groups = []
for frm in fg['forms']:
current_form = {}
for fld in frm['fields']:
if fld['state'] != 'valid':
continue
k = fld['name']
current_form[k] = {
'actor': fld['actor']['identifier'],
'set_at': fld['set_at'],
}
form_groups.append(current_form)
actor_map[ref] = form_groups
return actor_map
def make_context(execution_data, config):
''' the proper and only way to get the ``'values'`` key out of
an execution document from mongo. It takes care of the transformations
needed for it to work in jinja templates and other contexts where the
multiplicity of answers (multiforms) is relevant. '''
context = {}
try:
for fg in execution_data['values']:
ref = fg['ref']
form_groups = []
for frm in fg['forms']:
current_form = {}
for fld in frm['fields']:
if fld['state'] != 'valid':
continue
k = fld['name']
current_form[k] = fld['value']
current_form[f'get_{k}_display'] = (
fld.get('value_caption') or fld['value']
)
form_groups.append(current_form)
context[ref] = MultiFormDict(form_groups)
except KeyError:
pass
context['_env'] = MultiFormDict([config.get('PROCESS_ENV') or {}])
return context
def json_prepare(obj):
''' Takes ``obj`` from a mongo collection and returns it *as is* with two
minor changes:
* ``_id`` key removed
* objects of type ``datetime`` converted to their string isoformat representation
'''
return {
k: v if not isinstance(v, datetime) else v.isoformat()
for k, v in obj.items()
if k != '_id'
}
def pointer_entry(node, name, description, execution, pointer, notified_users=None):
return {
'id': pointer.id,
'started_at': pointer.started_at,
'finished_at': pointer.finished_at,
'execution': execution.to_json(),
'node': {
'id': node.id,
'name': name,
'description': description,
'type': type(node).__name__.lower(),
},
'actors': Map([], key='identifier').to_json(),
'actor_list': [],
'process_id': execution.process_name,
'notified_users': notified_users or [],
'state': 'ongoing',
}
def execution_entry(execution, state):
return {
'_type': 'execution',
'id': execution.id,
'name': execution.name,
'process_name': execution.process_name,
'description': execution.description,
'status': execution.status,
'started_at': execution.started_at,
'finished_at': None,
'state': state,
'values': [{
'_type': 'fgroup',
'ref': '_execution',
'forms': [{
'ref': '_execution',
'fields': [
{
**cacahuate.inputs.TextInput(
label='Id',
name='id',
).to_json(),
'value': execution.id,
'value_caption': execution.id,
'state': 'valid',
'actor': {
'_type': 'user',
'identifier': '__system__',
'fullname': 'System',
'email': None,
},
'set_at': execution.started_at,
},
{
**cacahuate.inputs.TextInput(
label='Process Name',
name='process_name',
).to_json(),
'value': execution.process_name,
'value_caption': execution.process_name,
'state': 'valid',
'actor': {
'_type': 'user',
'identifier': '__system__',
'fullname': 'System',
'email': None,
},
'set_at': execution.started_at,
},
{
**cacahuate.inputs.TextInput(
label='Name',
name='name',
).to_json(),
'value': execution.name,
'value_caption': execution.name,
'state': 'valid',
'actor': {
'_type': 'user',
'identifier': '__system__',
'fullname': 'System',
'email': None,
},
'set_at': execution.started_at,
},
{
**cacahuate.inputs.TextInput(
label='Description',
name='description',
).to_json(),
'value': execution.description,
'value_caption': execution.description,
'state': 'valid',
'actor': {
'_type': 'user',
'identifier': '__system__',
'fullname': 'System',
'email': None,
},
'set_at': execution.started_at,
},
{
**cacahuate.inputs.DatetimeInput(
label='Started At',
name='started_at',
).to_json(),
'value': execution.started_at.isoformat(),
'value_caption': execution.started_at.isoformat(),
'state': 'valid',
'actor': {
'_type': 'user',
'identifier': '__system__',
'fullname': 'System',
'email': None,
},
'set_at': execution.started_at,
},
],
}],
}],
'actors': {},
'actor_list': [],
}
|
[
"cacahuate.jsontypes.MultiFormDict",
"cacahuate.jsontypes.Map"
] |
[((1733, 1759), 'cacahuate.jsontypes.MultiFormDict', 'MultiFormDict', (['form_groups'], {}), '(form_groups)\n', (1746, 1759), False, 'from cacahuate.jsontypes import MultiFormDict, Map\n'), ((2708, 2733), 'cacahuate.jsontypes.Map', 'Map', (['[]'], {'key': '"""identifier"""'}), "([], key='identifier')\n", (2711, 2733), False, 'from cacahuate.jsontypes import MultiFormDict, Map\n')]
|
#setup.py - Free Rider HD Installation script
#by maxmillion18
#http://www.github.com/maxmillion18
#http://www.freeriderhd.com/u/MaxwellNurzia
from setuptools import setup, find_packages
versionFile = "VERSION"
setup(name="frhdtools",
version=open(versionFile).read(),
description="Library to work with Free Rider HD Tracks",
long_description=open("README.rst").read(),
url="https://github.com/maxmillion18/frhdtools",
author="maxmillion18",
author_email="<EMAIL>",
license="MIT License",
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Code Generators",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3 :: Only"
],
keywords="development freeriderhd freerider code track tracks",
packages=find_packages(exclude=["images"]),
)
|
[
"setuptools.find_packages"
] |
[((879, 912), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['images']"}), "(exclude=['images'])\n", (892, 912), False, 'from setuptools import setup, find_packages\n')]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['RoleAssignmentArgs', 'RoleAssignment']
@pulumi.input_type
class RoleAssignmentArgs:
def __init__(__self__, *,
role_id: pulumi.Input[str],
domain_id: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
user_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a RoleAssignment resource.
:param pulumi.Input[str] role_id: The role to assign.
:param pulumi.Input[str] domain_id: The domain to assign the role in.
:param pulumi.Input[str] group_id: The group to assign the role to.
:param pulumi.Input[str] project_id: The project to assign the role in.
:param pulumi.Input[str] user_id: The user to assign the role to.
"""
pulumi.set(__self__, "role_id", role_id)
if domain_id is not None:
pulumi.set(__self__, "domain_id", domain_id)
if group_id is not None:
pulumi.set(__self__, "group_id", group_id)
if project_id is not None:
pulumi.set(__self__, "project_id", project_id)
if region is not None:
pulumi.set(__self__, "region", region)
if user_id is not None:
pulumi.set(__self__, "user_id", user_id)
@property
@pulumi.getter(name="roleId")
def role_id(self) -> pulumi.Input[str]:
"""
The role to assign.
"""
return pulumi.get(self, "role_id")
@role_id.setter
def role_id(self, value: pulumi.Input[str]):
pulumi.set(self, "role_id", value)
@property
@pulumi.getter(name="domainId")
def domain_id(self) -> Optional[pulumi.Input[str]]:
"""
The domain to assign the role in.
"""
return pulumi.get(self, "domain_id")
@domain_id.setter
def domain_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "domain_id", value)
@property
@pulumi.getter(name="groupId")
def group_id(self) -> Optional[pulumi.Input[str]]:
"""
The group to assign the role to.
"""
return pulumi.get(self, "group_id")
@group_id.setter
def group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_id", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> Optional[pulumi.Input[str]]:
"""
The project to assign the role in.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="userId")
def user_id(self) -> Optional[pulumi.Input[str]]:
"""
The user to assign the role to.
"""
return pulumi.get(self, "user_id")
@user_id.setter
def user_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_id", value)
@pulumi.input_type
class _RoleAssignmentState:
def __init__(__self__, *,
domain_id: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
role_id: Optional[pulumi.Input[str]] = None,
user_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering RoleAssignment resources.
:param pulumi.Input[str] domain_id: The domain to assign the role in.
:param pulumi.Input[str] group_id: The group to assign the role to.
:param pulumi.Input[str] project_id: The project to assign the role in.
:param pulumi.Input[str] role_id: The role to assign.
:param pulumi.Input[str] user_id: The user to assign the role to.
"""
if domain_id is not None:
pulumi.set(__self__, "domain_id", domain_id)
if group_id is not None:
pulumi.set(__self__, "group_id", group_id)
if project_id is not None:
pulumi.set(__self__, "project_id", project_id)
if region is not None:
pulumi.set(__self__, "region", region)
if role_id is not None:
pulumi.set(__self__, "role_id", role_id)
if user_id is not None:
pulumi.set(__self__, "user_id", user_id)
@property
@pulumi.getter(name="domainId")
def domain_id(self) -> Optional[pulumi.Input[str]]:
"""
The domain to assign the role in.
"""
return pulumi.get(self, "domain_id")
@domain_id.setter
def domain_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "domain_id", value)
@property
@pulumi.getter(name="groupId")
def group_id(self) -> Optional[pulumi.Input[str]]:
"""
The group to assign the role to.
"""
return pulumi.get(self, "group_id")
@group_id.setter
def group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_id", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> Optional[pulumi.Input[str]]:
"""
The project to assign the role in.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="roleId")
def role_id(self) -> Optional[pulumi.Input[str]]:
"""
The role to assign.
"""
return pulumi.get(self, "role_id")
@role_id.setter
def role_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role_id", value)
@property
@pulumi.getter(name="userId")
def user_id(self) -> Optional[pulumi.Input[str]]:
"""
The user to assign the role to.
"""
return pulumi.get(self, "user_id")
@user_id.setter
def user_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_id", value)
class RoleAssignment(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
domain_id: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
role_id: Optional[pulumi.Input[str]] = None,
user_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a V3 Role assignment within OpenStack Keystone.
> **Note:** You _must_ have admin privileges in your OpenStack cloud to use
this resource.
## Example Usage
```python
import pulumi
import pulumi_openstack as openstack
project1 = openstack.identity.Project("project1")
user1 = openstack.identity.User("user1", default_project_id=project1.id)
role1 = openstack.identity.Role("role1")
role_assignment1 = openstack.identity.RoleAssignment("roleAssignment1",
project_id=project1.id,
role_id=role1.id,
user_id=user1.id)
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] domain_id: The domain to assign the role in.
:param pulumi.Input[str] group_id: The group to assign the role to.
:param pulumi.Input[str] project_id: The project to assign the role in.
:param pulumi.Input[str] role_id: The role to assign.
:param pulumi.Input[str] user_id: The user to assign the role to.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RoleAssignmentArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a V3 Role assignment within OpenStack Keystone.
> **Note:** You _must_ have admin privileges in your OpenStack cloud to use
this resource.
## Example Usage
```python
import pulumi
import pulumi_openstack as openstack
project1 = openstack.identity.Project("project1")
user1 = openstack.identity.User("user1", default_project_id=project1.id)
role1 = openstack.identity.Role("role1")
role_assignment1 = openstack.identity.RoleAssignment("roleAssignment1",
project_id=project1.id,
role_id=role1.id,
user_id=user1.id)
```
:param str resource_name: The name of the resource.
:param RoleAssignmentArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RoleAssignmentArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
domain_id: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
role_id: Optional[pulumi.Input[str]] = None,
user_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RoleAssignmentArgs.__new__(RoleAssignmentArgs)
__props__.__dict__["domain_id"] = domain_id
__props__.__dict__["group_id"] = group_id
__props__.__dict__["project_id"] = project_id
__props__.__dict__["region"] = region
if role_id is None and not opts.urn:
raise TypeError("Missing required property 'role_id'")
__props__.__dict__["role_id"] = role_id
__props__.__dict__["user_id"] = user_id
super(RoleAssignment, __self__).__init__(
'openstack:identity/roleAssignment:RoleAssignment',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
domain_id: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
role_id: Optional[pulumi.Input[str]] = None,
user_id: Optional[pulumi.Input[str]] = None) -> 'RoleAssignment':
"""
Get an existing RoleAssignment resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] domain_id: The domain to assign the role in.
:param pulumi.Input[str] group_id: The group to assign the role to.
:param pulumi.Input[str] project_id: The project to assign the role in.
:param pulumi.Input[str] role_id: The role to assign.
:param pulumi.Input[str] user_id: The user to assign the role to.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RoleAssignmentState.__new__(_RoleAssignmentState)
__props__.__dict__["domain_id"] = domain_id
__props__.__dict__["group_id"] = group_id
__props__.__dict__["project_id"] = project_id
__props__.__dict__["region"] = region
__props__.__dict__["role_id"] = role_id
__props__.__dict__["user_id"] = user_id
return RoleAssignment(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="domainId")
def domain_id(self) -> pulumi.Output[Optional[str]]:
"""
The domain to assign the role in.
"""
return pulumi.get(self, "domain_id")
@property
@pulumi.getter(name="groupId")
def group_id(self) -> pulumi.Output[Optional[str]]:
"""
The group to assign the role to.
"""
return pulumi.get(self, "group_id")
@property
@pulumi.getter(name="projectId")
def project_id(self) -> pulumi.Output[Optional[str]]:
"""
The project to assign the role in.
"""
return pulumi.get(self, "project_id")
@property
@pulumi.getter
def region(self) -> pulumi.Output[str]:
return pulumi.get(self, "region")
@property
@pulumi.getter(name="roleId")
def role_id(self) -> pulumi.Output[str]:
"""
The role to assign.
"""
return pulumi.get(self, "role_id")
@property
@pulumi.getter(name="userId")
def user_id(self) -> pulumi.Output[Optional[str]]:
"""
The user to assign the role to.
"""
return pulumi.get(self, "user_id")
|
[
"pulumi.get",
"pulumi.getter",
"pulumi.ResourceOptions",
"pulumi.set"
] |
[((1797, 1825), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""roleId"""'}), "(name='roleId')\n", (1810, 1825), False, 'import pulumi\n'), ((2098, 2128), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""domainId"""'}), "(name='domainId')\n", (2111, 2128), False, 'import pulumi\n'), ((2445, 2474), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""groupId"""'}), "(name='groupId')\n", (2458, 2474), False, 'import pulumi\n'), ((2785, 2816), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""projectId"""'}), "(name='projectId')\n", (2798, 2816), False, 'import pulumi\n'), ((3388, 3416), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""userId"""'}), "(name='userId')\n", (3401, 3416), False, 'import pulumi\n'), ((5182, 5212), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""domainId"""'}), "(name='domainId')\n", (5195, 5212), False, 'import pulumi\n'), ((5529, 5558), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""groupId"""'}), "(name='groupId')\n", (5542, 5558), False, 'import pulumi\n'), ((5869, 5900), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""projectId"""'}), "(name='projectId')\n", (5882, 5900), False, 'import pulumi\n'), ((6472, 6500), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""roleId"""'}), "(name='roleId')\n", (6485, 6500), False, 'import pulumi\n'), ((6793, 6821), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""userId"""'}), "(name='userId')\n", (6806, 6821), False, 'import pulumi\n'), ((13994, 14024), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""domainId"""'}), "(name='domainId')\n", (14007, 14024), False, 'import pulumi\n'), ((14213, 14242), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""groupId"""'}), "(name='groupId')\n", (14226, 14242), False, 'import pulumi\n'), ((14428, 14459), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""projectId"""'}), "(name='projectId')\n", (14441, 14459), False, 'import pulumi\n'), ((14771, 14799), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""roleId"""'}), "(name='roleId')\n", (14784, 14799), False, 'import pulumi\n'), ((14960, 14988), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""userId"""'}), "(name='userId')\n", (14973, 14988), False, 'import pulumi\n'), ((1296, 1336), 'pulumi.set', 'pulumi.set', (['__self__', '"""role_id"""', 'role_id'], {}), "(__self__, 'role_id', role_id)\n", (1306, 1336), False, 'import pulumi\n'), ((1937, 1964), 'pulumi.get', 'pulumi.get', (['self', '"""role_id"""'], {}), "(self, 'role_id')\n", (1947, 1964), False, 'import pulumi\n'), ((2043, 2077), 'pulumi.set', 'pulumi.set', (['self', '"""role_id"""', 'value'], {}), "(self, 'role_id', value)\n", (2053, 2077), False, 'import pulumi\n'), ((2266, 2295), 'pulumi.get', 'pulumi.get', (['self', '"""domain_id"""'], {}), "(self, 'domain_id')\n", (2276, 2295), False, 'import pulumi\n'), ((2388, 2424), 'pulumi.set', 'pulumi.set', (['self', '"""domain_id"""', 'value'], {}), "(self, 'domain_id', value)\n", (2398, 2424), False, 'import pulumi\n'), ((2610, 2638), 'pulumi.get', 'pulumi.get', (['self', '"""group_id"""'], {}), "(self, 'group_id')\n", (2620, 2638), False, 'import pulumi\n'), ((2729, 2764), 'pulumi.set', 'pulumi.set', (['self', '"""group_id"""', 'value'], {}), "(self, 'group_id', value)\n", (2739, 2764), False, 'import pulumi\n'), ((2956, 2986), 'pulumi.get', 'pulumi.get', (['self', '"""project_id"""'], {}), "(self, 'project_id')\n", (2966, 2986), False, 'import pulumi\n'), ((3081, 3118), 'pulumi.set', 'pulumi.set', (['self', '"""project_id"""', 'value'], {}), "(self, 'project_id', value)\n", (3091, 3118), False, 'import pulumi\n'), ((3221, 3247), 'pulumi.get', 'pulumi.get', (['self', '"""region"""'], {}), "(self, 'region')\n", (3231, 3247), False, 'import pulumi\n'), ((3334, 3367), 'pulumi.set', 'pulumi.set', (['self', '"""region"""', 'value'], {}), "(self, 'region', value)\n", (3344, 3367), False, 'import pulumi\n'), ((3550, 3577), 'pulumi.get', 'pulumi.get', (['self', '"""user_id"""'], {}), "(self, 'user_id')\n", (3560, 3577), False, 'import pulumi\n'), ((3666, 3700), 'pulumi.set', 'pulumi.set', (['self', '"""user_id"""', 'value'], {}), "(self, 'user_id', value)\n", (3676, 3700), False, 'import pulumi\n'), ((5350, 5379), 'pulumi.get', 'pulumi.get', (['self', '"""domain_id"""'], {}), "(self, 'domain_id')\n", (5360, 5379), False, 'import pulumi\n'), ((5472, 5508), 'pulumi.set', 'pulumi.set', (['self', '"""domain_id"""', 'value'], {}), "(self, 'domain_id', value)\n", (5482, 5508), False, 'import pulumi\n'), ((5694, 5722), 'pulumi.get', 'pulumi.get', (['self', '"""group_id"""'], {}), "(self, 'group_id')\n", (5704, 5722), False, 'import pulumi\n'), ((5813, 5848), 'pulumi.set', 'pulumi.set', (['self', '"""group_id"""', 'value'], {}), "(self, 'group_id', value)\n", (5823, 5848), False, 'import pulumi\n'), ((6040, 6070), 'pulumi.get', 'pulumi.get', (['self', '"""project_id"""'], {}), "(self, 'project_id')\n", (6050, 6070), False, 'import pulumi\n'), ((6165, 6202), 'pulumi.set', 'pulumi.set', (['self', '"""project_id"""', 'value'], {}), "(self, 'project_id', value)\n", (6175, 6202), False, 'import pulumi\n'), ((6305, 6331), 'pulumi.get', 'pulumi.get', (['self', '"""region"""'], {}), "(self, 'region')\n", (6315, 6331), False, 'import pulumi\n'), ((6418, 6451), 'pulumi.set', 'pulumi.set', (['self', '"""region"""', 'value'], {}), "(self, 'region', value)\n", (6428, 6451), False, 'import pulumi\n'), ((6622, 6649), 'pulumi.get', 'pulumi.get', (['self', '"""role_id"""'], {}), "(self, 'role_id')\n", (6632, 6649), False, 'import pulumi\n'), ((6738, 6772), 'pulumi.set', 'pulumi.set', (['self', '"""role_id"""', 'value'], {}), "(self, 'role_id', value)\n", (6748, 6772), False, 'import pulumi\n'), ((6955, 6982), 'pulumi.get', 'pulumi.get', (['self', '"""user_id"""'], {}), "(self, 'user_id')\n", (6965, 6982), False, 'import pulumi\n'), ((7071, 7105), 'pulumi.set', 'pulumi.set', (['self', '"""user_id"""', 'value'], {}), "(self, 'user_id', value)\n", (7081, 7105), False, 'import pulumi\n'), ((14163, 14192), 'pulumi.get', 'pulumi.get', (['self', '"""domain_id"""'], {}), "(self, 'domain_id')\n", (14173, 14192), False, 'import pulumi\n'), ((14379, 14407), 'pulumi.get', 'pulumi.get', (['self', '"""group_id"""'], {}), "(self, 'group_id')\n", (14389, 14407), False, 'import pulumi\n'), ((14600, 14630), 'pulumi.get', 'pulumi.get', (['self', '"""project_id"""'], {}), "(self, 'project_id')\n", (14610, 14630), False, 'import pulumi\n'), ((14724, 14750), 'pulumi.get', 'pulumi.get', (['self', '"""region"""'], {}), "(self, 'region')\n", (14734, 14750), False, 'import pulumi\n'), ((14912, 14939), 'pulumi.get', 'pulumi.get', (['self', '"""role_id"""'], {}), "(self, 'role_id')\n", (14922, 14939), False, 'import pulumi\n'), ((15123, 15150), 'pulumi.get', 'pulumi.get', (['self', '"""user_id"""'], {}), "(self, 'user_id')\n", (15133, 15150), False, 'import pulumi\n'), ((1383, 1427), 'pulumi.set', 'pulumi.set', (['__self__', '"""domain_id"""', 'domain_id'], {}), "(__self__, 'domain_id', domain_id)\n", (1393, 1427), False, 'import pulumi\n'), ((1473, 1515), 'pulumi.set', 'pulumi.set', (['__self__', '"""group_id"""', 'group_id'], {}), "(__self__, 'group_id', group_id)\n", (1483, 1515), False, 'import pulumi\n'), ((1563, 1609), 'pulumi.set', 'pulumi.set', (['__self__', '"""project_id"""', 'project_id'], {}), "(__self__, 'project_id', project_id)\n", (1573, 1609), False, 'import pulumi\n'), ((1653, 1691), 'pulumi.set', 'pulumi.set', (['__self__', '"""region"""', 'region'], {}), "(__self__, 'region', region)\n", (1663, 1691), False, 'import pulumi\n'), ((1736, 1776), 'pulumi.set', 'pulumi.set', (['__self__', '"""user_id"""', 'user_id'], {}), "(__self__, 'user_id', user_id)\n", (1746, 1776), False, 'import pulumi\n'), ((4683, 4727), 'pulumi.set', 'pulumi.set', (['__self__', '"""domain_id"""', 'domain_id'], {}), "(__self__, 'domain_id', domain_id)\n", (4693, 4727), False, 'import pulumi\n'), ((4773, 4815), 'pulumi.set', 'pulumi.set', (['__self__', '"""group_id"""', 'group_id'], {}), "(__self__, 'group_id', group_id)\n", (4783, 4815), False, 'import pulumi\n'), ((4863, 4909), 'pulumi.set', 'pulumi.set', (['__self__', '"""project_id"""', 'project_id'], {}), "(__self__, 'project_id', project_id)\n", (4873, 4909), False, 'import pulumi\n'), ((4953, 4991), 'pulumi.set', 'pulumi.set', (['__self__', '"""region"""', 'region'], {}), "(__self__, 'region', region)\n", (4963, 4991), False, 'import pulumi\n'), ((5036, 5076), 'pulumi.set', 'pulumi.set', (['__self__', '"""role_id"""', 'role_id'], {}), "(__self__, 'role_id', role_id)\n", (5046, 5076), False, 'import pulumi\n'), ((5121, 5161), 'pulumi.set', 'pulumi.set', (['__self__', '"""user_id"""', 'user_id'], {}), "(__self__, 'user_id', user_id)\n", (5131, 5161), False, 'import pulumi\n'), ((11002, 11026), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {}), '()\n', (11024, 11026), False, 'import pulumi\n'), ((13495, 13524), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'id': 'id'}), '(id=id)\n', (13517, 13524), False, 'import pulumi\n')]
|
from src import app, api
from flask_restful import Resource
from src.data import Data
from src.models import Pair, Interval
data = Data()
class HelloWorld(Resource):
def get(self):
return {'hello': 'world'}
class Test(Resource):
def get(self):
return {'results':data.test()}
class History(Resource):
def get(self, symbol_1:str, symbol_2:str, _datetime:int):
pair = Pair(symbol_1, symbol_2)
res = data.history(pair, _datetime)
return res
class HistoryRange(Resource):
def get(self, symbol_1:str, symbol_2:str, datetime_start:int, datetime_end:int, interval:str):
pair = Pair(symbol_1, symbol_2)
interval = Interval(interval)
res = data.history(pair, datetime_start, datetime_end, interval)
return res
api.add_resource(HelloWorld, '/')
api.add_resource(Test,'/test')
api.add_resource(History,'/history/<string:symbol_1>/<string:symbol_2>/<int:_datetime>')
api.add_resource(HistoryRange,'/history/<string:symbol_1>/<string:symbol_2>/<int:datetime_start>/<int:datetime_end>/<string:interval>')
|
[
"src.models.Interval",
"src.api.add_resource",
"src.data.Data",
"src.models.Pair"
] |
[((132, 138), 'src.data.Data', 'Data', ([], {}), '()\n', (136, 138), False, 'from src.data import Data\n'), ((799, 832), 'src.api.add_resource', 'api.add_resource', (['HelloWorld', '"""/"""'], {}), "(HelloWorld, '/')\n", (815, 832), False, 'from src import app, api\n'), ((833, 864), 'src.api.add_resource', 'api.add_resource', (['Test', '"""/test"""'], {}), "(Test, '/test')\n", (849, 864), False, 'from src import app, api\n'), ((864, 957), 'src.api.add_resource', 'api.add_resource', (['History', '"""/history/<string:symbol_1>/<string:symbol_2>/<int:_datetime>"""'], {}), "(History,\n '/history/<string:symbol_1>/<string:symbol_2>/<int:_datetime>')\n", (880, 957), False, 'from src import app, api\n'), ((953, 1098), 'src.api.add_resource', 'api.add_resource', (['HistoryRange', '"""/history/<string:symbol_1>/<string:symbol_2>/<int:datetime_start>/<int:datetime_end>/<string:interval>"""'], {}), "(HistoryRange,\n '/history/<string:symbol_1>/<string:symbol_2>/<int:datetime_start>/<int:datetime_end>/<string:interval>'\n )\n", (969, 1098), False, 'from src import app, api\n'), ((405, 429), 'src.models.Pair', 'Pair', (['symbol_1', 'symbol_2'], {}), '(symbol_1, symbol_2)\n', (409, 429), False, 'from src.models import Pair, Interval\n'), ((640, 664), 'src.models.Pair', 'Pair', (['symbol_1', 'symbol_2'], {}), '(symbol_1, symbol_2)\n', (644, 664), False, 'from src.models import Pair, Interval\n'), ((684, 702), 'src.models.Interval', 'Interval', (['interval'], {}), '(interval)\n', (692, 702), False, 'from src.models import Pair, Interval\n')]
|
"""
LSH for euclidean distance.
"""
from pyspark import SparkContext, RDD
from datming.utils import join_multiple_keys
import numpy as np
__all__ = [
"EuclideanDistance"
]
class EuclideanDistanceLSH(object):
"""
Find item pairs between which Euclidean Distance is closed enough.
"""
def __init__(self, n_dimension: int, threshold: int,
block_size: int=1, n_bands: int=20, signature_length: int=200,
random_seed: int=None, n_partitions=5):
"""
:param n_dimension: Dimension of vector
:param block_size: size of block to split the dimensions.
:param threshold: Maximum distance to consider a pair of vectors as similar vectors.
:param n_partitions: Maximum number of partitions during the computation.
"""
self.__block_size = block_size
self.__n_dim = n_dimension
self.__threshold = threshold
self.__n_bands = n_bands
self.__n_rows = signature_length // n_bands
self.__signature_length = self.__n_rows * self.__n_bands
self.__random_seed = (random_seed if isinstance(random_seed, int)
else np.random.randint(0, 2**32-1))
self.__n_partitions = n_partitions
def _lsh_predict(self, data: RDD) -> RDD:
"""
:param data: RDD<(int, np.array)>
= RDD<(id, vector)>
:return: RDD<(int, int, float)>
= RDD<(id, id, distance)>
"""
hyperplanes = self.__init_hyperplanes(
self.__n_dim, self.__signature_length, self.__random_seed
)
candidates = self.__compute_candidates(
data, hyperplanes,
self.__block_size, self.__n_bands, self.__n_rows, self.__n_partitions
)
similarity = self.__compute_similarity(
data, candidates
)
threshold = self.__threshold
similarity = similarity.filter(lambda u: u[2] <= threshold).cache()
similarity.count()
return similarity
@staticmethod
def __init_hyperplanes(n_dim: int, signature_length: int,
random_seed: int):
"""
Initialize random n-D Unit vectors.
Muller, <NAME>. "A note on a method for generating points uniformly on n-dimensional spheres."
Communications of the ACM 2.4 (1959): 19-20.
"""
np.random.seed(random_seed)
hyperplanes = np.random.randn(signature_length, n_dim)
hyperplanes = (hyperplanes / np.linalg.norm(hyperplanes, axis=1)
.reshape(-1, 1))
return hyperplanes
@staticmethod
def __compute_candidates(data, hyperplanes,
block_size, n_bands, n_rows, num_partitions):
"""
Compute signatures, group items according to signature and generate candidate pairs.
"""
def compute(generator_of_key_values):
for key, values in generator_of_key_values:
blocks = np.floor(
np.dot(hyperplanes, values) / block_size
)
for i in range(n_bands):
yield (
(i, tuple(blocks[i*n_rows:(i+1)*n_rows])), key
)
def generate_pairs(list_of_keys: list):
if len(list_of_keys) < 2:
return []
list_of_keys.sort()
for idxA, keyA in enumerate(list_of_keys[:-1]):
for keyB in list_of_keys[idxA+1:]:
yield ((keyA, keyB), -1)
candidates = (data
.mapPartitions(compute)
.coalesce(num_partitions)
.aggregateByKey(list(), lambda u, v: u + [v], lambda u1, u2: u1 + u2)
.map(lambda u: u[1])
.flatMap(generate_pairs)
.distinct()
.coalesce(num_partitions)
.cache()
)
return candidates
@staticmethod
def __compute_similarity(data, candidates):
def compute(key_values):
(key1, key2), (_, vector1, vector2) = key_values
return key1, key2, euclidean_distance(vector1, vector2)
similarity = (join_multiple_keys(left=candidates, right=data, n=2)
.map(compute)
)
return similarity
class Euclidean(EuclideanDistanceLSH):
def __init__(self, mode: str="lsh", **kwargs):
self.mode = mode.lower()
if mode.lower() == "lsh":
EuclideanDistanceLSH.__init__(self, **kwargs)
else:
raise NotImplementedError
def predict(self, data: RDD) -> RDD:
if self.mode == "lsh":
return self._lsh_predict(data)
else:
raise NotImplementedError
def euclidean_distance(vector1, vector2):
return np.linalg.norm(vector1 - vector2)
def test_case_with_random_data():
test_data = [
np.random.randn(5) for _ in range(1000)
]
sc = SparkContext.getOrCreate()
test_rdd = sc.parallelize(
[(i, arr) for i, arr in enumerate(test_data)]
)
_threshold = 1
lsh_result = Euclidean(
block_size=8, n_dimension=5, threshold=_threshold, n_bands=10, signature_length=50
).predict(data=test_rdd).collect()
lsh_result = set([
(i, j) for i, j, _ in lsh_result
])
print("number of LSH-selected pairs: ", len(lsh_result))
truth = set()
for i, arr1 in enumerate(test_data[:-1]):
for j, arr2 in enumerate(test_data[i + 1:]):
if euclidean_distance(arr1, arr2) <= _threshold:
truth.add((i, j + i + 1))
print("number of true pairs: ", len(truth))
print("TP rate=", len(lsh_result & truth) / len(truth))
print("FN rate=", len(truth - lsh_result) / len(truth))
if __name__ == '__main__':
test_case_with_random_data()
|
[
"numpy.random.seed",
"numpy.random.randn",
"pyspark.SparkContext.getOrCreate",
"datming.utils.join_multiple_keys",
"numpy.random.randint",
"numpy.linalg.norm",
"numpy.dot"
] |
[((4908, 4941), 'numpy.linalg.norm', 'np.linalg.norm', (['(vector1 - vector2)'], {}), '(vector1 - vector2)\n', (4922, 4941), True, 'import numpy as np\n'), ((5059, 5085), 'pyspark.SparkContext.getOrCreate', 'SparkContext.getOrCreate', ([], {}), '()\n', (5083, 5085), False, 'from pyspark import SparkContext, RDD\n'), ((2389, 2416), 'numpy.random.seed', 'np.random.seed', (['random_seed'], {}), '(random_seed)\n', (2403, 2416), True, 'import numpy as np\n'), ((2439, 2479), 'numpy.random.randn', 'np.random.randn', (['signature_length', 'n_dim'], {}), '(signature_length, n_dim)\n', (2454, 2479), True, 'import numpy as np\n'), ((5004, 5022), 'numpy.random.randn', 'np.random.randn', (['(5)'], {}), '(5)\n', (5019, 5022), True, 'import numpy as np\n'), ((1181, 1214), 'numpy.random.randint', 'np.random.randint', (['(0)', '(2 ** 32 - 1)'], {}), '(0, 2 ** 32 - 1)\n', (1198, 1214), True, 'import numpy as np\n'), ((4277, 4329), 'datming.utils.join_multiple_keys', 'join_multiple_keys', ([], {'left': 'candidates', 'right': 'data', 'n': '(2)'}), '(left=candidates, right=data, n=2)\n', (4295, 4329), False, 'from datming.utils import join_multiple_keys\n'), ((2517, 2552), 'numpy.linalg.norm', 'np.linalg.norm', (['hyperplanes'], {'axis': '(1)'}), '(hyperplanes, axis=1)\n', (2531, 2552), True, 'import numpy as np\n'), ((3036, 3063), 'numpy.dot', 'np.dot', (['hyperplanes', 'values'], {}), '(hyperplanes, values)\n', (3042, 3063), True, 'import numpy as np\n')]
|
from django.contrib import admin
from .models import Sheet, Cell
@admin.register(Sheet)
class SheetAdmin(admin.ModelAdmin):
list_display = ('name', 'key', 'owner')
readonly_fields = ('key', )
search_fields = ('name', )
@admin.register(Cell)
class CellAdmin(admin.ModelAdmin):
pass
|
[
"django.contrib.admin.register"
] |
[((69, 90), 'django.contrib.admin.register', 'admin.register', (['Sheet'], {}), '(Sheet)\n', (83, 90), False, 'from django.contrib import admin\n'), ((228, 248), 'django.contrib.admin.register', 'admin.register', (['Cell'], {}), '(Cell)\n', (242, 248), False, 'from django.contrib import admin\n')]
|
from unittest import TestCase
from mapping import Mapping
from mapping import Value
class MappingTestCase(TestCase):
def setUp(self):
self.mapping = Mapping(Value.zero, Value.one, Value.two, Value.three)
def test_distinct_values_correct(self):
try:
Mapping(Value.zero, Value.one, Value.two, Value.three)
except ValueError:
self.fail()
def test_distinct_value_incorrect(self):
self.assertRaises(ValueError, Mapping, Value.zero, Value.one,
Value.zero, Value.two)
def test_get_nucleotide_by_value(self):
self.assertEqual('A', self.mapping.get_nucleotide_by_value(0))
self.assertEqual('T', self.mapping.get_nucleotide_by_value(1))
self.assertEqual('C', self.mapping.get_nucleotide_by_value(2))
self.assertEqual('G', self.mapping.get_nucleotide_by_value(3))
def get_value_by_nucleotide_case(self, upper=True):
dna = 'ATCG'
if upper:
dna = dna.upper()
else:
dna = dna.lower()
self.assertEqual(0, self.mapping.get_value_by_nucleotide(dna[0]))
self.assertEqual(1, self.mapping.get_value_by_nucleotide(dna[1]))
self.assertEqual(2, self.mapping.get_value_by_nucleotide(dna[2]))
self.assertEqual(3, self.mapping.get_value_by_nucleotide(dna[3]))
def test_get_value_by_nucleotide_case_lower(self):
self.get_value_by_nucleotide_case(False)
def test_get_value_by_nucleotide_case_upper(self):
self.get_value_by_nucleotide_case()
class ValueTestCase(TestCase):
def test_get_instance_not_integer(self):
self.assertRaises(TypeError, Value.get_instance, '0')
def test_get_instance_not_within_bounds(self):
self.assertRaises(ValueError, Value.get_instance, -1)
self.assertRaises(ValueError, Value.get_instance, 4)
def test_get_instance(self):
self.assertEqual(Value.zero, Value.get_instance(0))
self.assertEqual(Value.one, Value.get_instance(1))
self.assertEqual(Value.two, Value.get_instance(2))
self.assertEqual(Value.three, Value.get_instance(3))
|
[
"mapping.Mapping",
"mapping.Value.get_instance"
] |
[((157, 211), 'mapping.Mapping', 'Mapping', (['Value.zero', 'Value.one', 'Value.two', 'Value.three'], {}), '(Value.zero, Value.one, Value.two, Value.three)\n', (164, 211), False, 'from mapping import Mapping\n'), ((264, 318), 'mapping.Mapping', 'Mapping', (['Value.zero', 'Value.one', 'Value.two', 'Value.three'], {}), '(Value.zero, Value.one, Value.two, Value.three)\n', (271, 318), False, 'from mapping import Mapping\n'), ((1739, 1760), 'mapping.Value.get_instance', 'Value.get_instance', (['(0)'], {}), '(0)\n', (1757, 1760), False, 'from mapping import Value\n'), ((1792, 1813), 'mapping.Value.get_instance', 'Value.get_instance', (['(1)'], {}), '(1)\n', (1810, 1813), False, 'from mapping import Value\n'), ((1845, 1866), 'mapping.Value.get_instance', 'Value.get_instance', (['(2)'], {}), '(2)\n', (1863, 1866), False, 'from mapping import Value\n'), ((1900, 1921), 'mapping.Value.get_instance', 'Value.get_instance', (['(3)'], {}), '(3)\n', (1918, 1921), False, 'from mapping import Value\n')]
|
# =============================================================================
#
# Explicit Finite Difference Method Code
# Solves the 2D Temperature Convection-Diffusion Equation
# Assumes Tubular Plug-Flow-Reactor in Laminar Regime
# Assumes hagen poiseuille velocity profile
# Heat Source-Sink Included Uses Laminar Nusselt Correlation for "h"
# Written by: <NAME> (2020)
# Institution: Virginia Commonwealth University
#
# =============================================================================
# Required Modules
import numpy as np
from matplotlib import pyplot as plt
from matplotlib import cm
from mpl_toolkits.mplot3d import Axes3D
import math
from array import array
D = 0.0015875 # tubing diameter in m
xl = 30/100 # tubing length in m & x range
yl = D # tubing diameter & y range
nx = 300 # x grid points
ny = 50 # y grid points
dx = xl/(nx-1) # x stepsize
dy = yl/(ny-1) # y stepsize
k= .12 # thermal conductvity W/(m*K)
p = 1750 # density (kg/m3)
Cp = 1172 # specifc heat (J/kg/K)
a = k/(p*Cp) # thermal diffusivity (m2/s)
sigma = .001 # time step factor
dt = sigma * dx * dy / a # time stepsize
Vr = math.pi*(D/2)**2*xl # tubing volume (m3)
Qmlm = 1 # volumetric flowrate (mL/min)
Q = (Qmlm*10**-6)/60 # volumetric flowrate (m3/s)
Ac = math.pi*(D/2)**2 # cross-sectional area (m2)
lamx = a*dt/dx**2 # lumped coefficient
lamy = a*dt/dy**2 # lumped coefficient
Nu = 3.66 # nusselt laminar flow in tube
h = Nu*k/D # convective heat transfer coefficient (W/m2/K)
T0 = 130+273.15 # stream inlet temperature (degK)
Tw = 25+273.15 # wall temperature (degK)
reltol = 1e-8 # tolerance for convergence
# grid formation
x = np.linspace(0, xl, nx)
y = np.linspace(0, yl, ny)
X, Y = np.meshgrid(x, y)
# hagen poiseuille velocity field generation
uAvg = Q/Ac # average velocity (m/s)
uMax = 2*uAvg # max velocity (m/s)
u = np.zeros(ny) # array initilization
u[:] = np.linspace(-(D/2),(D/2),ny) # array intialization
u[:] = uMax*(1-(u[:]/(D/2))**2) # hagan-poiselle profile
u[0]=u[-1]=0 # no slip BC
u = np.array([u,]*nx) # velocity field
u = u.T # transpose/align field
maxCFL = np.max(u*dt/dx) # CFL condition calc.
print('The max CFL is %s'%(maxCFL))
# main function loop
def lets_get_tubular():
# array initialization
Ttol = np.zeros((ny,nx))
T = np.ones((ny, nx))*Tw
Tn = np.ones((ny, nx))*Tw
# initialize termination condition
# compares norms of current and previous solution arrays
termcond = (np.abs((np.linalg.norm(Ttol)-np.linalg.norm(Tn))))/np.linalg.norm(Tn)
stepcount = 1 # step counter
while termcond >= reltol:
termcond = np.abs((np.linalg.norm(Ttol)-np.linalg.norm(Tn)))/np.linalg.norm(Tn)
Tn = T.copy()
# FDM vectorized solution using explicit euler and CDS
T[1:-1, 1:-1] = (Tn[1:-1,1:-1] - (u[1:-1,1:-1]*(dt/(2*dx))*(Tn[1:-1,2:] \
-Tn[1:-1,:-2])) \
+ lamx *(Tn[1:-1, 2:] - 2 * Tn[1:-1, 1:-1] + Tn[1:-1, :-2]) \
+ lamy* (Tn[2:,1:-1] - 2 * Tn[1:-1, 1:-1] + Tn[:-2, 1:-1])) \
- h*D*math.pi*(Tn[1:-1,1:-1]-Tw)*dt/p/Cp*xl/Vr
# BCs
T[0, :] = Tw # tubing wall temp dirichlet BC
T[-1, :] = Tw # tubing wall temp dirichlet BC
T[:, 0] = T0 # inlet flow temp dirichlet BC
T[:, -1] = T[:,-2] # outlet flow temp neumann BC
Ttol=T.copy() # update solution
stepcount += 1 # update counter
# fig = plt.figure()
# ax = fig.gca(projection='3d')
# surf = ax.plot_surface(X, Y, T[:], rstride=1, cstride=1, cmap=cm.viridis,
# linewidth=0, antialiased=True)
# ax.set_xlabel('$x$')
# ax.set_ylabel('$y$');
T[:]=T[:]-273.15 # converts back to degC
# generates plots
# top plot is 2D filled contour plot
# bottom plot is centerline and near-wall line data points
fig1 = plt.subplot(211)
# ax = fig1.gca()
# plt.imshow(T[:])
cont = plt.contourf(X,Y,T[:],50)
ax = plt.gca()
ax.axis('scaled')
ax.axes.get_yaxis().set_visible(False)
plt.xlim(0,.05)
plt.xlabel('Tubing Length (m)')
cbar = plt.colorbar(cont)
cbar.ax.set_ylabel('Temperature (degC)')
centerline = ny/2
wallline = ny-5
centerline = int(centerline)
wallline = int(wallline)
centerT = T[centerline,:]
wallT = T[wallline,:]
fig2 = plt.subplot(212)
plt.plot(x, centerT,label='center')
plt.plot(x,wallT,label='wall')
plt.legend()
plt.ylabel('Temperature (degC)')
plt.xlabel('Tubing Length (m)')
plt.show()
print('Stepcount = %s' %(stepcount))
if __name__ == "__main__":
lets_get_tubular()
|
[
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.xlim",
"numpy.meshgrid",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.legend",
"numpy.zeros",
"numpy.ones",
"matplotlib.pyplot.colorbar",
"numpy.max",
"numpy.array",
"matplotlib.pyplot.contourf",
"numpy.linspace",
"matplotlib.pyplot.gca",
"numpy.linalg.norm",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel"
] |
[((2506, 2528), 'numpy.linspace', 'np.linspace', (['(0)', 'xl', 'nx'], {}), '(0, xl, nx)\n', (2517, 2528), True, 'import numpy as np\n'), ((2535, 2557), 'numpy.linspace', 'np.linspace', (['(0)', 'yl', 'ny'], {}), '(0, yl, ny)\n', (2546, 2557), True, 'import numpy as np\n'), ((2567, 2584), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (2578, 2584), True, 'import numpy as np\n'), ((2783, 2795), 'numpy.zeros', 'np.zeros', (['ny'], {}), '(ny)\n', (2791, 2795), True, 'import numpy as np\n'), ((2857, 2889), 'numpy.linspace', 'np.linspace', (['(-(D / 2))', '(D / 2)', 'ny'], {}), '(-(D / 2), D / 2, ny)\n', (2868, 2889), True, 'import numpy as np\n'), ((3061, 3079), 'numpy.array', 'np.array', (['([u] * nx)'], {}), '([u] * nx)\n', (3069, 3079), True, 'import numpy as np\n'), ((3205, 3224), 'numpy.max', 'np.max', (['(u * dt / dx)'], {}), '(u * dt / dx)\n', (3211, 3224), True, 'import numpy as np\n'), ((3393, 3411), 'numpy.zeros', 'np.zeros', (['(ny, nx)'], {}), '((ny, nx))\n', (3401, 3411), True, 'import numpy as np\n'), ((5093, 5109), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(211)'], {}), '(211)\n', (5104, 5109), True, 'from matplotlib import pyplot as plt\n'), ((5167, 5195), 'matplotlib.pyplot.contourf', 'plt.contourf', (['X', 'Y', 'T[:]', '(50)'], {}), '(X, Y, T[:], 50)\n', (5179, 5195), True, 'from matplotlib import pyplot as plt\n'), ((5203, 5212), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (5210, 5212), True, 'from matplotlib import pyplot as plt\n'), ((5285, 5302), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', '(0.05)'], {}), '(0, 0.05)\n', (5293, 5302), True, 'from matplotlib import pyplot as plt\n'), ((5306, 5337), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Tubing Length (m)"""'], {}), "('Tubing Length (m)')\n", (5316, 5337), True, 'from matplotlib import pyplot as plt\n'), ((5350, 5368), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['cont'], {}), '(cont)\n', (5362, 5368), True, 'from matplotlib import pyplot as plt\n'), ((5599, 5615), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(212)'], {}), '(212)\n', (5610, 5615), True, 'from matplotlib import pyplot as plt\n'), ((5621, 5657), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'centerT'], {'label': '"""center"""'}), "(x, centerT, label='center')\n", (5629, 5657), True, 'from matplotlib import pyplot as plt\n'), ((5662, 5694), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'wallT'], {'label': '"""wall"""'}), "(x, wallT, label='wall')\n", (5670, 5694), True, 'from matplotlib import pyplot as plt\n'), ((5698, 5710), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (5708, 5710), True, 'from matplotlib import pyplot as plt\n'), ((5716, 5748), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Temperature (degC)"""'], {}), "('Temperature (degC)')\n", (5726, 5748), True, 'from matplotlib import pyplot as plt\n'), ((5754, 5785), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Tubing Length (m)"""'], {}), "('Tubing Length (m)')\n", (5764, 5785), True, 'from matplotlib import pyplot as plt\n'), ((5797, 5807), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5805, 5807), True, 'from matplotlib import pyplot as plt\n'), ((3421, 3438), 'numpy.ones', 'np.ones', (['(ny, nx)'], {}), '((ny, nx))\n', (3428, 3438), True, 'import numpy as np\n'), ((3454, 3471), 'numpy.ones', 'np.ones', (['(ny, nx)'], {}), '((ny, nx))\n', (3461, 3471), True, 'import numpy as np\n'), ((3646, 3664), 'numpy.linalg.norm', 'np.linalg.norm', (['Tn'], {}), '(Tn)\n', (3660, 3664), True, 'import numpy as np\n'), ((3801, 3819), 'numpy.linalg.norm', 'np.linalg.norm', (['Tn'], {}), '(Tn)\n', (3815, 3819), True, 'import numpy as np\n'), ((3603, 3623), 'numpy.linalg.norm', 'np.linalg.norm', (['Ttol'], {}), '(Ttol)\n', (3617, 3623), True, 'import numpy as np\n'), ((3624, 3642), 'numpy.linalg.norm', 'np.linalg.norm', (['Tn'], {}), '(Tn)\n', (3638, 3642), True, 'import numpy as np\n'), ((3759, 3779), 'numpy.linalg.norm', 'np.linalg.norm', (['Ttol'], {}), '(Ttol)\n', (3773, 3779), True, 'import numpy as np\n'), ((3780, 3798), 'numpy.linalg.norm', 'np.linalg.norm', (['Tn'], {}), '(Tn)\n', (3794, 3798), True, 'import numpy as np\n')]
|
"""
Priors for GPCSD parameters.
"""
import autograd.numpy as np
from scipy.stats import invgamma, halfnorm
class GPCSDPrior:
def __init__(self):
pass
class GPCSDInvGammaPrior(GPCSDPrior):
def __init__(self, alpha=1, beta=1):
GPCSDPrior.__init__(self)
self.alpha = alpha
self.beta = beta
def __str__(self):
return "InvGamma(%0.2f, %0.2f)" % (self.alpha, self.beta)
def lpdf(self, x):
if x <= 0:
val = -1.0 * np.inf
else:
val = -1.0 * (self.alpha + 1) * np.log(x) - self.beta/x
return val
def set_params(self, l, u):
self.alpha = 2 + 9 * np.square((l + u)/(u - l))
self.beta = 0.5 * (self.alpha - 1) * (l + u)
def sample(self):
return invgamma.rvs(self.alpha, scale=self.beta)
class GPCSDHalfNormalPrior(GPCSDPrior):
def __init__(self, sd=1):
GPCSDPrior.__init__(self)
self.sd = sd
def __str__(self):
return "HalfNormal(%0.2f)" % (self.sd)
def lpdf(self, x):
if x <= 0:
val = -1.0 * np.inf
else:
val = -0.5 * np.square(x / self.sd)
return val
def sample(self):
return halfnorm.rvs(scale=self.sd)
|
[
"autograd.numpy.log",
"scipy.stats.invgamma.rvs",
"autograd.numpy.square",
"scipy.stats.halfnorm.rvs"
] |
[((787, 828), 'scipy.stats.invgamma.rvs', 'invgamma.rvs', (['self.alpha'], {'scale': 'self.beta'}), '(self.alpha, scale=self.beta)\n', (799, 828), False, 'from scipy.stats import invgamma, halfnorm\n'), ((1225, 1252), 'scipy.stats.halfnorm.rvs', 'halfnorm.rvs', ([], {'scale': 'self.sd'}), '(scale=self.sd)\n', (1237, 1252), False, 'from scipy.stats import invgamma, halfnorm\n'), ((665, 693), 'autograd.numpy.square', 'np.square', (['((l + u) / (u - l))'], {}), '((l + u) / (u - l))\n', (674, 693), True, 'import autograd.numpy as np\n'), ((1141, 1163), 'autograd.numpy.square', 'np.square', (['(x / self.sd)'], {}), '(x / self.sd)\n', (1150, 1163), True, 'import autograd.numpy as np\n'), ((560, 569), 'autograd.numpy.log', 'np.log', (['x'], {}), '(x)\n', (566, 569), True, 'import autograd.numpy as np\n')]
|
from django.db import models
from .managers import GeneralManager
class Rain(models.Model):
amount = models.IntegerField()
date = models.DateField(auto_now=False)
objects = models.Manager()
my_query = GeneralManager()
class Meta:
ordering = ['date']
def __str__(self):
return "{}mm of rain fell on {}".format(self.amount, self.date)
|
[
"django.db.models.DateField",
"django.db.models.IntegerField",
"django.db.models.Manager"
] |
[((112, 133), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (131, 133), False, 'from django.db import models\n'), ((146, 178), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now': '(False)'}), '(auto_now=False)\n', (162, 178), False, 'from django.db import models\n'), ((194, 210), 'django.db.models.Manager', 'models.Manager', ([], {}), '()\n', (208, 210), False, 'from django.db import models\n')]
|
"""CNN Module."""
import warnings
import akro
import numpy as np
import torch
from torch import nn
from garage import InOutSpec
from garage.torch import (expand_var, NonLinearity, output_height_2d,
output_width_2d)
# pytorch v1.6 issue, see https://github.com/pytorch/pytorch/issues/42305
# pylint: disable=abstract-method
class CNNModule(nn.Module):
"""Convolutional neural network (CNN) model in pytorch.
Args:
spec (garage.InOutSpec): Specification of inputs and outputs.
The input should be in 'NCHW' format: [batch_size, channel, height,
width]. Will print a warning if the channel size is not 1 or 3.
If output_space is specified, then a final linear layer will be
inserted to map to that dimensionality.
If output_space is None, it will be filled in with the computed
output space.
image_format (str): Either 'NCHW' or 'NHWC'. Should match the input
specification. Gym uses NHWC by default, but PyTorch uses NCHW by
default.
hidden_channels (tuple[int]): Number of output channels for CNN.
For example, (3, 32) means there are two convolutional layers.
The filter for the first conv layer outputs 3 channels
and the second one outputs 32 channels.
kernel_sizes (tuple[int]): Dimension of the conv filters.
For example, (3, 5) means there are two convolutional layers.
The filter for first layer is of dimension (3 x 3)
and the second one is of dimension (5 x 5).
strides (tuple[int]): The stride of the sliding window. For example,
(1, 2) means there are two convolutional layers. The stride of the
filter for first layer is 1 and that of the second layer is 2.
paddings (tuple[int]): Amount of zero-padding added to both sides of
the input of a conv layer.
padding_mode (str): The type of padding algorithm to use, i.e.
'constant', 'reflect', 'replicate' or 'circular' and
by default is 'zeros'.
hidden_nonlinearity (callable or torch.nn.Module):
Activation function for intermediate dense layer(s).
It should return a torch.Tensor. Set it to None to maintain a
linear activation.
hidden_b_init (callable): Initializer function for the bias
of intermediate dense layer(s). The function should return a
torch.Tensor.
max_pool (bool): Bool for using max-pooling or not.
pool_shape (tuple[int]): Dimension of the pooling layer(s). For
example, (2, 2) means that all pooling layers are of the same
shape (2, 2).
pool_stride (tuple[int]): The strides of the pooling layer(s). For
example, (2, 2) means that all the pooling layers have
strides (2, 2).
layer_normalization (bool): Bool for using layer normalization or not.
hidden_w_init (callable): Initializer function for the weight
of intermediate dense layer(s). The function should return a
torch.Tensor.
enable_cudnn_benchmarks (bool): Whether to enable cudnn benchmarks
in `torch`. If enabled, the backend selects the CNN benchamark
algorithm with the best performance.
"""
def __init__(
self,
spec,
image_format,
hidden_channels,
*, # Many things after this are ints or tuples of ints.
kernel_sizes,
strides,
paddings=0,
padding_mode='zeros',
hidden_nonlinearity=nn.ReLU,
hidden_w_init=nn.init.xavier_uniform_,
hidden_b_init=nn.init.zeros_,
max_pool=False,
pool_shape=None,
pool_stride=1,
layer_normalization=False,
enable_cudnn_benchmarks=True):
super().__init__()
assert len(hidden_channels) > 0
# PyTorch forces us to use NCHW internally.
in_channels, height, width = _check_spec(spec, image_format)
self._format = image_format
kernel_sizes = expand_var('kernel_sizes', kernel_sizes,
len(hidden_channels), 'hidden_channels')
strides = expand_var('strides', strides, len(hidden_channels),
'hidden_channels')
paddings = expand_var('paddings', paddings, len(hidden_channels),
'hidden_channels')
pool_shape = expand_var('pool_shape', pool_shape, len(hidden_channels),
'hidden_channels')
pool_stride = expand_var('pool_stride', pool_stride,
len(hidden_channels), 'hidden_channels')
self._cnn_layers = nn.Sequential()
torch.backends.cudnn.benchmark = enable_cudnn_benchmarks
# In case there are no hidden channels, handle output case.
out_channels = in_channels
for i, out_channels in enumerate(hidden_channels):
conv_layer = nn.Conv2d(in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_sizes[i],
stride=strides[i],
padding=paddings[i],
padding_mode=padding_mode)
height = output_height_2d(conv_layer, height)
width = output_width_2d(conv_layer, width)
hidden_w_init(conv_layer.weight)
hidden_b_init(conv_layer.bias)
self._cnn_layers.add_module(f'conv_{i}', conv_layer)
if layer_normalization:
self._cnn_layers.add_module(
f'layer_norm_{i}',
nn.LayerNorm((out_channels, height, width)))
if hidden_nonlinearity:
self._cnn_layers.add_module(f'non_linearity_{i}',
NonLinearity(hidden_nonlinearity))
if max_pool:
pool = nn.MaxPool2d(kernel_size=pool_shape[i],
stride=pool_stride[i])
height = output_height_2d(pool, height)
width = output_width_2d(pool, width)
self._cnn_layers.add_module(f'max_pooling_{i}', pool)
in_channels = out_channels
output_dims = out_channels * height * width
if spec.output_space is None:
final_spec = InOutSpec(
spec.input_space,
akro.Box(low=-np.inf, high=np.inf, shape=(output_dims, )))
self._final_layer = None
else:
final_spec = spec
# Checked at start of __init__
self._final_layer = nn.Linear(output_dims,
spec.output_space.shape[0])
self.spec = final_spec
# pylint: disable=arguments-differ
def forward(self, x):
"""Forward method.
Args:
x (torch.Tensor): Input values. Should match image_format
specified at construction (either NCHW or NCWH).
Returns:
List[torch.Tensor]: Output values
"""
# Transform single values into batch, if necessary.
if len(x.shape) == 3:
x = x.unsqueeze(0)
# This should be the single place in torch that image normalization
# happens
if isinstance(self.spec.input_space, akro.Image):
x = torch.div(x, 255.0)
assert len(x.shape) == 4
if self._format == 'NHWC':
# Convert to internal NCHW format
x = x.permute((0, 3, 1, 2))
for layer in self._cnn_layers:
x = layer(x)
if self._format == 'NHWC':
# Convert back to NHWC (just in case)
x = x.permute((0, 2, 3, 1))
# Remove non-batch dimensions
x = x.reshape(x.shape[0], -1)
# Apply final linearity, if it was requested.
if self._final_layer is not None:
x = self._final_layer(x)
return x
def _check_spec(spec, image_format):
"""Check that an InOutSpec is suitable for a CNNModule.
Args:
spec (garage.InOutSpec): Specification of inputs and outputs. The
input should be in 'NCHW' format: [batch_size, channel, height,
width]. Will print a warning if the channel size is not 1 or 3.
If output_space is specified, then a final linear layer will be
inserted to map to that dimensionality. If output_space is None,
it will be filled in with the computed output space.
image_format (str): Either 'NCHW' or 'NHWC'. Should match the input
specification. Gym uses NHWC by default, but PyTorch uses NCHW by
default.
Returns:
tuple[int, int, int]: The input channels, height, and width.
Raises:
ValueError: If spec isn't suitable for a CNNModule.
"""
# pylint: disable=no-else-raise
input_space = spec.input_space
output_space = spec.output_space
# Don't use isinstance, since akro.Space is guaranteed to inherit from
# gym.Space
if getattr(input_space, 'shape', None) is None:
raise ValueError(
f'input_space to CNNModule is {input_space}, but should be an '
f'akro.Box or akro.Image')
elif len(input_space.shape) != 3:
raise ValueError(
f'Input to CNNModule is {input_space}, but should have three '
f'dimensions.')
if (output_space is not None and not (hasattr(output_space, 'shape')
and len(output_space.shape) == 1)):
raise ValueError(
f'output_space to CNNModule is {output_space}, but should be '
f'an akro.Box with a single dimension or None')
if image_format == 'NCHW':
in_channels = spec.input_space.shape[0]
height = spec.input_space.shape[1]
width = spec.input_space.shape[2]
elif image_format == 'NHWC':
height = spec.input_space.shape[0]
width = spec.input_space.shape[1]
in_channels = spec.input_space.shape[2]
else:
raise ValueError(
f'image_format has value {image_format!r}, but must be either '
f"'NCHW' or 'NHWC'")
if in_channels not in (1, 3):
warnings.warn(
f'CNNModule input has {in_channels} channels, but '
f'1 or 3 channels are typical. Consider changing the CNN '
f'image_format.')
return in_channels, height, width
|
[
"torch.nn.Sequential",
"garage.torch.NonLinearity",
"garage.torch.output_width_2d",
"torch.nn.Conv2d",
"akro.Box",
"torch.nn.LayerNorm",
"torch.div",
"torch.nn.Linear",
"torch.nn.MaxPool2d",
"warnings.warn",
"garage.torch.output_height_2d"
] |
[((4863, 4878), 'torch.nn.Sequential', 'nn.Sequential', ([], {}), '()\n', (4876, 4878), False, 'from torch import nn\n'), ((10467, 10611), 'warnings.warn', 'warnings.warn', (['f"""CNNModule input has {in_channels} channels, but 1 or 3 channels are typical. Consider changing the CNN image_format."""'], {}), "(\n f'CNNModule input has {in_channels} channels, but 1 or 3 channels are typical. Consider changing the CNN image_format.'\n )\n", (10480, 10611), False, 'import warnings\n'), ((5132, 5299), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'in_channels', 'out_channels': 'out_channels', 'kernel_size': 'kernel_sizes[i]', 'stride': 'strides[i]', 'padding': 'paddings[i]', 'padding_mode': 'padding_mode'}), '(in_channels=in_channels, out_channels=out_channels, kernel_size=\n kernel_sizes[i], stride=strides[i], padding=paddings[i], padding_mode=\n padding_mode)\n', (5141, 5299), False, 'from torch import nn\n'), ((5486, 5522), 'garage.torch.output_height_2d', 'output_height_2d', (['conv_layer', 'height'], {}), '(conv_layer, height)\n', (5502, 5522), False, 'from garage.torch import expand_var, NonLinearity, output_height_2d, output_width_2d\n'), ((5543, 5577), 'garage.torch.output_width_2d', 'output_width_2d', (['conv_layer', 'width'], {}), '(conv_layer, width)\n', (5558, 5577), False, 'from garage.torch import expand_var, NonLinearity, output_height_2d, output_width_2d\n'), ((6859, 6909), 'torch.nn.Linear', 'nn.Linear', (['output_dims', 'spec.output_space.shape[0]'], {}), '(output_dims, spec.output_space.shape[0])\n', (6868, 6909), False, 'from torch import nn\n'), ((7593, 7612), 'torch.div', 'torch.div', (['x', '(255.0)'], {}), '(x, 255.0)\n', (7602, 7612), False, 'import torch\n'), ((6148, 6210), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': 'pool_shape[i]', 'stride': 'pool_stride[i]'}), '(kernel_size=pool_shape[i], stride=pool_stride[i])\n', (6160, 6210), False, 'from torch import nn\n'), ((6272, 6302), 'garage.torch.output_height_2d', 'output_height_2d', (['pool', 'height'], {}), '(pool, height)\n', (6288, 6302), False, 'from garage.torch import expand_var, NonLinearity, output_height_2d, output_width_2d\n'), ((6327, 6355), 'garage.torch.output_width_2d', 'output_width_2d', (['pool', 'width'], {}), '(pool, width)\n', (6342, 6355), False, 'from garage.torch import expand_var, NonLinearity, output_height_2d, output_width_2d\n'), ((6644, 6700), 'akro.Box', 'akro.Box', ([], {'low': '(-np.inf)', 'high': 'np.inf', 'shape': '(output_dims,)'}), '(low=-np.inf, high=np.inf, shape=(output_dims,))\n', (6652, 6700), False, 'import akro\n'), ((5872, 5915), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['(out_channels, height, width)'], {}), '((out_channels, height, width))\n', (5884, 5915), False, 'from torch import nn\n'), ((6064, 6097), 'garage.torch.NonLinearity', 'NonLinearity', (['hidden_nonlinearity'], {}), '(hidden_nonlinearity)\n', (6076, 6097), False, 'from garage.torch import expand_var, NonLinearity, output_height_2d, output_width_2d\n')]
|
"""
.. module:: function
:synopsis: Nuts that perform functions on single stream elements.
"""
from __future__ import print_function
from __future__ import absolute_import
import time
import threading
from nutsflow.common import (shapestr, as_tuple, is_iterable, istensor,
print_type, console)
from nutsflow.factory import nut_function, NutFunction
@nut_function
def Identity(x):
"""
iterable >> Identity()
Pass iterable through. Output is identical to input.
>>> from nutsflow import Collect
>>> [1, 2, 3] >> Identity() >> Collect()
[1, 2, 3]
:param iterable iterable: Any iterable
:param any x: Any input
:return: Returns input unaltered
:rtype: object
"""
return x
@nut_function
def Square(x):
"""
iterable >> Square()
Return squared input.
>>> from nutsflow import Collect
>>> [1, 2, 3] >> Square() >> Collect()
[1, 4, 9]
:param iterable iterable: Any iterable over numbers
:param number x: Any number
:return: Squared number
:rtype: number
"""
return x * x
@nut_function
def NOP(x, *args): # *args is needed!
"""
iterable >> Nop(*args)
No Operation. Useful to skip nuts. Same as commenting a nut out
or removing it from a pipeline.
>>> from nutsflow import Collect
>>> [1, 2, 3] >> NOP(Square()) >> Collect()
[1, 2, 3]
:param iterable iterable: Any iterable
:param object x: Any object
:param args args: Additional args are ignored.
:return: Squared number
:rtype: number
"""
return x
@nut_function
def Get(x, start, end=None, step=None):
"""
iterable >> Get(start, end, step)
Extract elements from iterable. Equivalent to slicing [start:end:step]
but per element of the iterable.
>>> from nutsflow import Collect
>>> [(1, 2, 3), (4, 5, 6)] >> Get(1) >> Collect()
[2, 5]
>>> [(1, 2, 3), (4, 5, 6)] >> Get(0, 2) >> Collect()
[(1, 2), (4, 5)]
>>> [(1, 2, 3), (4, 5, 6)] >> Get(0, 3, 2) >> Collect()
[(1, 3), (4, 6)]
>>> [(1, 2, 3), (4, 5, 6)] >> Get(None) >> Collect()
[(1, 2, 3), (4, 5, 6)]
:param iterable iterable: Any iterable
:param indexable x: Any indexable input
:param int start: Start index for columns to extract from x
If start = None, x is returned
:param int end: End index (not inclusive)
:param int step: Step index (same as slicing)
:return: Extracted elements
:rtype: object|list
"""
return x if start is None else x[slice(start, end, step) if end else start]
@nut_function
def GetCols(x, *columns):
"""
iterable >> GetCols(*columns)
Extract elements in given order from x. Also useful to change the order of
or clone elements in x.
>>> from nutsflow import Collect
>>> [(1, 2, 3), (4, 5, 6)] >> GetCols(1) >> Collect()
[(2,), (5,)]
>>> [[1, 2, 3], [4, 5, 6]] >> GetCols(2, 0) >> Collect()
[(3, 1), (6, 4)]
>>> [[1, 2, 3], [4, 5, 6]] >> GetCols((2, 0)) >> Collect()
[(3, 1), (6, 4)]
>>> [(1, 2, 3), (4, 5, 6)] >> GetCols(2, 1, 0) >> Collect()
[(3, 2, 1), (6, 5, 4)]
>>> [(1, 2, 3), (4, 5, 6)] >> GetCols(1, 1) >> Collect()
[(2, 2), (5, 5)]
:param iterable iterable: Any iterable
:param indexable container x: Any indexable input
:param int|tuple|args columns: Indicies of elements/columns in x to extract
or a tuple with these indices.
:return: Extracted elements
:rtype: tuple
"""
if len(columns) == 1 and isinstance(columns[0], tuple):
columns = columns[0]
return tuple(x[i] for i in columns)
class Counter(NutFunction):
"""
Increment counter depending on elements in iterable.
Intended mostly for debugging and monitoring. Avoid for standard
processing of data. The function has side-effects but is thread-safe.
"""
def __init__(self, name, filterfunc=lambda x: True, value=0):
"""
counter = Counter(name, filterfunc, value)
iterable >> counter
>>> from nutsflow import Consume
>>> counter = Counter('smallerthan3', lambda x: x < 3, 1)
>>> range(10) >> counter >> Consume()
>>> counter
smallerthan3 = 4
:param str name: Name of the counter
:param func filterfunc: Filter function.
Count only elements where func returns True.
:param int value: Initial counter value
"""
self.name = name
self.value = value
self.filterfunc = filterfunc
self.lock = threading.Lock()
def reset(self, value=0):
"""
Reset counter to given value.
:param int value: Reset value
"""
with self.lock:
self.value = value
def __repr__(self):
"""
Return counter value as string.
:return: Counter value
:rtype: str
"""
return self.__str__()
def __str__(self):
"""
Return string representation of counter value.
:return: counter name and value as string
:rtype: str
"""
return '{} = {}'.format(self.name, self.value)
def __call__(self, x):
"""
Increment counter.
:param object x: Element in iterable
:return: Unchanged element
:rtype: Any
"""
with self.lock:
if self.filterfunc(x):
self.value += 1
return x
@nut_function
def Sleep(x, duration=1):
"""
iterable >> Sleep(duration)
Return same input as console but sleep for each element.
>>> from nutsflow import Collect
>>> [1, 2, 3] >> Sleep(0.1) >> Collect()
[1, 2, 3]
:param iterable iterable: Any iterable
:param object x: Any input
:param float duration: Sleeping time in seconds.
:return: Returns input unaltered
:rtype: object
"""
time.sleep(duration)
return x
@nut_function
def Format(x, fmt):
"""
iterable >> Format(fmt)
Return input as formatted string. For format definition see:
https://docs.python.org/2/library/string.html
>>> from nutsflow import Collect
>>> [1, 2, 3] >> Format('num:{}') >> Collect()
['num:1', 'num:2', 'num:3']
>>> [(1, 2), (3, 4)] >> Format('{0}:{1}') >> Collect()
['1:2', '3:4']
:param iterable iterable: Any iterable
:param string fmt: Formatting string, e.g. '{:02d}'
:return: Returns inputs as strings formatted as specified
:rtype: str
"""
return fmt.format(*(x if is_iterable(x) else [x]))
class Print(NutFunction):
"""
Print elements in iterable.
"""
def __init__(self, fmtfunc=None, every_sec=0, every_n=0,
filterfunc=lambda x: True, end='\n'):
"""
iterable >> Print(fmtfunc=None, every_sec=0, every_n=0,
filterfunc=lambda x: True)
Return same input as console but print for each element.
>>> from nutsflow import Consume
>>> [1, 2] >> Print() >> Consume()
1
2
>>> range(10) >> Print(every_n=3) >> Consume()
2
5
8
>>> even = lambda x: x % 2 == 0
>>> [1, 2, 3, 4] >> Print(filterfunc=even) >> Consume()
2
4
>>> [{'val': 1}, {'val': 2}] >> Print('number={val}') >> Consume()
number=1
number=2
>>> [[1, 2], [3, 4]] >> Print('number={1}:{0}') >> Consume()
number=2:1
number=4:3
>>> myfmt = lambda x: 'char='+x.upper()
>>> ['a', 'b'] >> Print(myfmt) >> Consume()
char=A
char=B
>>> range(5) >> Print('.', end=' ') >> Consume()
. . . . .
:param object x: Any input
:param string|function fmtfunc: Format string or function.
fmtfunc is a standard Python str.format() string,
see https://docs.python.org/2/library/string.html
or a function that returns a string.
:param float every_sec: Print every given second, e.g. to print
every 2.5 sec every_sec = 2.5
:param int every_n: Print every n-th call.
:param str end: Ending of text printed.
:param function filterfunc: Boolean function to filter print.
:return: Returns input unaltered
:rtype: object
:raise: ValueError if fmtfunc is not string or function
"""
self.fmtfunc = fmtfunc
self.every_sec = every_sec
self.every_n = every_n
self.filterfunc = filterfunc
self.end = end
self.cnt = 0
self.time = time.time()
def __delta_sec(self):
"""Return time in seconds (float) consumed between prints so far"""
return time.time() - self.time
def __should_print(self, x):
"""Return true if element x should be printed"""
self.cnt += 1
return (self.filterfunc(x) and
self.cnt >= self.every_n and
self.__delta_sec() >= self.every_sec)
def __call__(self, x):
"""Return element x and potentially print its value"""
if not self.__should_print(x):
return x
self.cnt = 0 # reset counter
self.time = time.time() # reset timer
fmtfunc = self.fmtfunc
if hasattr(x, 'ndim'): # is it a numpy array?
x = x.tolist() if x.ndim else x.item()
if not fmtfunc:
text = x
elif isinstance(fmtfunc, str):
if isinstance(x, dict):
text = fmtfunc.format(**x)
else:
text = fmtfunc.format(*(x if is_iterable(x) else [x]))
elif hasattr(fmtfunc, '__call__'):
text = fmtfunc(x)
else:
raise ValueError('Invalid format ' + str(fmtfunc))
console(text, end=self.end)
return x
class PrintColType(NutFunction):
def __init__(self, cols=None):
"""
iterable >> PrintColType()
Print type and other information for column data (tuples).
>>> import numpy as np
>>> from nutsflow import Consume
>>> data = [(np.zeros((10, 20, 3)), 1), ('text', 2), 3]
>>> data >> PrintColType() >> Consume()
item 0: <tuple>
0: <ndarray> shape:10x20x3 dtype:float64 range:0.0..0.0
1: <int> 1
item 1: <tuple>
0: <str> text
1: <int> 2
item 2: <int>
0: <int> 3
>>> [(1, 2), (3, 4)] >> PrintColType(1) >> Consume()
item 0: <tuple>
1: <int> 2
item 1: <tuple>
1: <int> 4
>>> from collections import namedtuple
>>> Sample = namedtuple('Sample', 'x,y')
>>> a = np.zeros((3, 4), dtype='uint8')
>>> b = np.ones((1, 2), dtype='float32')
>>> data = [Sample(a, 1), Sample(b, 2)]
>>> data >> PrintColType() >> Consume()
item 0: <Sample>
x: <ndarray> shape:3x4 dtype:uint8 range:0..0
y: <int> 1
item 1: <Sample>
x: <ndarray> shape:1x2 dtype:float32 range:1.0..1.0
y: <int> 2
:param int|tuple|None cols: Indices of columnbs to show info for.
None means all columns. Can be a single index or a tuple of indices.
:return: input data unchanged
:rtype: same as input data
"""
self.cols = cols
self.cnt = -1
def __call__(self, data):
"""
Print data info.
:param any data: Any type of iterable
:return: data unchanged
:rtype: same as data
"""
items = [(i, e) for i, e in enumerate(as_tuple(data))]
cols = None if self.cols is None else as_tuple(self.cols)
has_fields = hasattr(data, '_fields')
colnames = data._fields if has_fields else [str(i) for i, _ in items]
self.cnt += 1
print('item {}: <{}>'.format(self.cnt, type(data).__name__))
for i, e in items:
if cols is None or i in cols:
typename = type(e).__name__
print(' {}: <{}>'.format(colnames[i], typename), end=' ')
if istensor(e):
msg = 'shape:{} dtype:{} range:{}..{}'
print(msg.format(shapestr(e), e.dtype, e.min(), e.max()))
else:
print('{}'.format(str(e)))
return data
class PrintType(NutFunction):
def __init__(self, prefix=''):
"""
iterable >> PrintType()
Print type and shape information for structured data. This is
especially useful for data containing (large) Numpy arrays or
Pytorch/Tensorflow tensors.
>>> import numpy as np
>>> from nutsflow import Consume, Take
>>> a = np.zeros((3, 4), dtype='uint8')
>>> b = np.zeros((1, 2), dtype='float32')
>>> data = [(a, b), 1.1, [[a], 2]]
>>> data >> PrintType() >> Consume()
(<ndarray> 3x4:uint8, <ndarray> 1x2:float32)
<float> 1.1
[[<ndarray> 3x4:uint8], <int> 2]
>>> data >> Take(1) >> PrintType('dtype:') >> Consume()
dtype: (<ndarray> 3x4:uint8, <ndarray> 1x2:float32)
>>> from collections import namedtuple
>>> Sample = namedtuple('Sample', 'x,y')
>>> data = [Sample(a, 1), Sample(b, 2)]
>>> data >> PrintType() >> Consume()
Sample(x=<ndarray> 3x4:uint8, y=<int> 1)
Sample(x=<ndarray> 1x2:float32, y=<int> 2)
Note that there is also a function print_type() that allows to print
individual data elements instead of data streams.
>>> data = [{'mat':a}, 2]
>>> print_type(data)
[{mat:<ndarray> 3x4:uint8}, <int> 2]
:param str prefix: Prefix text printed before type
:return: input data unchanged
:rtype: same as input data
"""
self.prefix = prefix
def __call__(self, data):
"""
Print data info.
:param object data: Any object.
:return: data unchanged
:rtype: same as object
"""
if self.prefix:
print(self.prefix, end=' ')
print_type(data)
return data
|
[
"nutsflow.common.print_type",
"nutsflow.common.is_iterable",
"nutsflow.common.shapestr",
"time.time",
"threading.Lock",
"time.sleep",
"nutsflow.common.console",
"nutsflow.common.as_tuple",
"nutsflow.common.istensor"
] |
[((5898, 5918), 'time.sleep', 'time.sleep', (['duration'], {}), '(duration)\n', (5908, 5918), False, 'import time\n'), ((4569, 4585), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (4583, 4585), False, 'import threading\n'), ((8604, 8615), 'time.time', 'time.time', ([], {}), '()\n', (8613, 8615), False, 'import time\n'), ((9220, 9231), 'time.time', 'time.time', ([], {}), '()\n', (9229, 9231), False, 'import time\n'), ((9795, 9822), 'nutsflow.common.console', 'console', (['text'], {'end': 'self.end'}), '(text, end=self.end)\n', (9802, 9822), False, 'from nutsflow.common import shapestr, as_tuple, is_iterable, istensor, print_type, console\n'), ((14112, 14128), 'nutsflow.common.print_type', 'print_type', (['data'], {}), '(data)\n', (14122, 14128), False, 'from nutsflow.common import shapestr, as_tuple, is_iterable, istensor, print_type, console\n'), ((8735, 8746), 'time.time', 'time.time', ([], {}), '()\n', (8744, 8746), False, 'import time\n'), ((11672, 11691), 'nutsflow.common.as_tuple', 'as_tuple', (['self.cols'], {}), '(self.cols)\n', (11680, 11691), False, 'from nutsflow.common import shapestr, as_tuple, is_iterable, istensor, print_type, console\n'), ((6535, 6549), 'nutsflow.common.is_iterable', 'is_iterable', (['x'], {}), '(x)\n', (6546, 6549), False, 'from nutsflow.common import shapestr, as_tuple, is_iterable, istensor, print_type, console\n'), ((12115, 12126), 'nutsflow.common.istensor', 'istensor', (['e'], {}), '(e)\n', (12123, 12126), False, 'from nutsflow.common import shapestr, as_tuple, is_iterable, istensor, print_type, console\n'), ((11609, 11623), 'nutsflow.common.as_tuple', 'as_tuple', (['data'], {}), '(data)\n', (11617, 11623), False, 'from nutsflow.common import shapestr, as_tuple, is_iterable, istensor, print_type, console\n'), ((12224, 12235), 'nutsflow.common.shapestr', 'shapestr', (['e'], {}), '(e)\n', (12232, 12235), False, 'from nutsflow.common import shapestr, as_tuple, is_iterable, istensor, print_type, console\n'), ((9611, 9625), 'nutsflow.common.is_iterable', 'is_iterable', (['x'], {}), '(x)\n', (9622, 9625), False, 'from nutsflow.common import shapestr, as_tuple, is_iterable, istensor, print_type, console\n')]
|
from peek_platform import PeekPlatformConfig
from peek_platform.sw_install.PeekSwInstallManagerABC import PeekSwInstallManagerABC
__author__ = 'synerty'
class PeekSwInstallManager(PeekSwInstallManagerABC):
def _stopCode(self):
PeekPlatformConfig.pluginLoader.stopOptionalPlugins()
PeekPlatformConfig.pluginLoader.stopCorePlugins()
PeekPlatformConfig.pluginLoader.unloadOptionalPlugins()
PeekPlatformConfig.pluginLoader.unloadCorePlugins()
def _upgradeCode(self):
pass
def _startCode(self):
PeekPlatformConfig.pluginLoader.loadCorePlugins()
PeekPlatformConfig.pluginLoader.loadOptionalPlugins()
PeekPlatformConfig.pluginLoader.startCorePlugins()
PeekPlatformConfig.pluginLoader.startOptionalPlugins()
|
[
"peek_platform.PeekPlatformConfig.pluginLoader.startOptionalPlugins",
"peek_platform.PeekPlatformConfig.pluginLoader.unloadOptionalPlugins",
"peek_platform.PeekPlatformConfig.pluginLoader.stopOptionalPlugins",
"peek_platform.PeekPlatformConfig.pluginLoader.unloadCorePlugins",
"peek_platform.PeekPlatformConfig.pluginLoader.loadOptionalPlugins",
"peek_platform.PeekPlatformConfig.pluginLoader.startCorePlugins",
"peek_platform.PeekPlatformConfig.pluginLoader.loadCorePlugins",
"peek_platform.PeekPlatformConfig.pluginLoader.stopCorePlugins"
] |
[((243, 296), 'peek_platform.PeekPlatformConfig.pluginLoader.stopOptionalPlugins', 'PeekPlatformConfig.pluginLoader.stopOptionalPlugins', ([], {}), '()\n', (294, 296), False, 'from peek_platform import PeekPlatformConfig\n'), ((305, 354), 'peek_platform.PeekPlatformConfig.pluginLoader.stopCorePlugins', 'PeekPlatformConfig.pluginLoader.stopCorePlugins', ([], {}), '()\n', (352, 354), False, 'from peek_platform import PeekPlatformConfig\n'), ((363, 418), 'peek_platform.PeekPlatformConfig.pluginLoader.unloadOptionalPlugins', 'PeekPlatformConfig.pluginLoader.unloadOptionalPlugins', ([], {}), '()\n', (416, 418), False, 'from peek_platform import PeekPlatformConfig\n'), ((427, 478), 'peek_platform.PeekPlatformConfig.pluginLoader.unloadCorePlugins', 'PeekPlatformConfig.pluginLoader.unloadCorePlugins', ([], {}), '()\n', (476, 478), False, 'from peek_platform import PeekPlatformConfig\n'), ((556, 605), 'peek_platform.PeekPlatformConfig.pluginLoader.loadCorePlugins', 'PeekPlatformConfig.pluginLoader.loadCorePlugins', ([], {}), '()\n', (603, 605), False, 'from peek_platform import PeekPlatformConfig\n'), ((614, 667), 'peek_platform.PeekPlatformConfig.pluginLoader.loadOptionalPlugins', 'PeekPlatformConfig.pluginLoader.loadOptionalPlugins', ([], {}), '()\n', (665, 667), False, 'from peek_platform import PeekPlatformConfig\n'), ((677, 727), 'peek_platform.PeekPlatformConfig.pluginLoader.startCorePlugins', 'PeekPlatformConfig.pluginLoader.startCorePlugins', ([], {}), '()\n', (725, 727), False, 'from peek_platform import PeekPlatformConfig\n'), ((736, 790), 'peek_platform.PeekPlatformConfig.pluginLoader.startOptionalPlugins', 'PeekPlatformConfig.pluginLoader.startOptionalPlugins', ([], {}), '()\n', (788, 790), False, 'from peek_platform import PeekPlatformConfig\n')]
|
import unittest
from ncaabb import team
class TestTeam(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.team_one = team.Team(["Team One", "Region", 1, True, 30, 30])
def test_calculate_rating(self):
self.assertNotEqual(self.team_one, self.team_one.calculate_rating())
def test_get_scores(self):
ucla = team.Team(["UCLA", "Region", 1, True, 30, 30])
#TODO: .get_scores() looks in wrong directory for the database
ucla.get_scores()
self.assertTrue(ucla.points_scored, "Error getting points scored")
self.assertTrue(ucla.points_allowed, "Error getting points allowed")
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"ncaabb.team.Team"
] |
[((711, 726), 'unittest.main', 'unittest.main', ([], {}), '()\n', (724, 726), False, 'import unittest\n'), ((153, 203), 'ncaabb.team.Team', 'team.Team', (["['Team One', 'Region', 1, True, 30, 30]"], {}), "(['Team One', 'Region', 1, True, 30, 30])\n", (162, 203), False, 'from ncaabb import team\n'), ((372, 418), 'ncaabb.team.Team', 'team.Team', (["['UCLA', 'Region', 1, True, 30, 30]"], {}), "(['UCLA', 'Region', 1, True, 30, 30])\n", (381, 418), False, 'from ncaabb import team\n')]
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2019. <NAME> <<EMAIL>>
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""
netbox.py
A lookup function designed to return data from the Netbox application
"""
from __future__ import absolute_import, division, print_function
from pprint import pformat
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
from ansible.parsing.splitter import parse_kv
from ansible.utils.display import Display
import pynetbox
__metaclass__ = type
DOCUMENTATION = """
lookup: netbox
author: <NAME> (@cpmills1975)
version_added: "2.9"
short_description: Queries and returns elements from Netbox
description:
- Queries Netbox via its API to return virtually any information
capable of being held in Netbox.
- If wanting to obtain the plaintext attribute of a secret, key_file must be provided.
options:
_terms:
description:
- The Netbox object type to query
required: True
api_endpoint:
description:
- The URL to the Netbox instance to query
required: True
api_filter:
description:
- The api_filter to use.
required: False
token:
description:
- The API token created through Netbox
required: True
key_file:
description:
- The location of the private key tied to user account.
required: False
requirements:
- pynetbox
"""
EXAMPLES = """
tasks:
# query a list of devices
- name: Obtain list of devices from Netbox
debug:
msg: >
"Device {{ item.value.display_name }} (ID: {{ item.key }}) was
manufactured by {{ item.value.device_type.manufacturer.name }}"
loop: "{{ query('netbox', 'devices',
api_endpoint='http://localhost/',
token='<redacted>') }}"
This example uses an API Filter
tasks:
# query a list of devices
- name: Obtain list of devices from Netbox
debug:
msg: >
"Device {{ item.value.display_name }} (ID: {{ item.key }}) was
manufactured by {{ item.value.device_type.manufacturer.name }}"
loop: "{{ query('netbox', 'devices',
api_endpoint='http://localhost/',
api_filter='role=management tag=Dell'),
token='<<PASSWORD>acted>') }}"
# Obtain a secret for R1-device
tasks:
- name: "Obtain secrets for R1-Device"
debug:
msg: "{{ query('netbox', 'secrets', api_filter='device=R1-Device', api_endpoint='http://localhost/', token='<<PASSWORD>acted>', key_file='~/.ssh/id_rsa') }}"
"""
RETURN = """
_list:
description:
- list of composed dictonaries with key and value
type: list
"""
def get_endpoint(netbox, term):
"""
get_endpoint(netbox, term)
netbox: a predefined pynetbox.api() pointing to a valid instance
of Netbox
term: the term passed to the lookup function upon which the api
call will be identified
"""
netbox_endpoint_map = {
"aggregates": {"endpoint": netbox.ipam.aggregates},
"circuit-terminations": {"endpoint": netbox.circuits.circuit_terminations},
"circuit-types": {"endpoint": netbox.circuits.circuit_types},
"circuits": {"endpoint": netbox.circuits.circuits},
"circuit-providers": {"endpoint": netbox.circuits.providers},
"cables": {"endpoint": netbox.dcim.cables},
"cluster-groups": {"endpoint": netbox.virtualization.cluster_groups},
"cluster-types": {"endpoint": netbox.virtualization.cluster_types},
"clusters": {"endpoint": netbox.virtualization.clusters},
"config-contexts": {"endpoint": netbox.extras.config_contexts},
"console-connections": {"endpoint": netbox.dcim.console_connections},
"console-ports": {"endpoint": netbox.dcim.console_ports},
"console-server-port-templates": {
"endpoint": netbox.dcim.console_server_port_templates
},
"console-server-ports": {"endpoint": netbox.dcim.console_server_ports},
"device-bay-templates": {"endpoint": netbox.dcim.device_bay_templates},
"device-bays": {"endpoint": netbox.dcim.device_bays},
"device-roles": {"endpoint": netbox.dcim.device_roles},
"device-types": {"endpoint": netbox.dcim.device_types},
"devices": {"endpoint": netbox.dcim.devices},
"export-templates": {"endpoint": netbox.dcim.export_templates},
"front-port-templates": {"endpoint": netbox.dcim.front_port_templates},
"front-ports": {"endpoint": netbox.dcim.front_ports},
"graphs": {"endpoint": netbox.extras.graphs},
"image-attachments": {"endpoint": netbox.extras.image_attachments},
"interface-connections": {"endpoint": netbox.dcim.interface_connections},
"interface-templates": {"endpoint": netbox.dcim.interface_templates},
"interfaces": {"endpoint": netbox.dcim.interfaces},
"inventory-items": {"endpoint": netbox.dcim.inventory_items},
"ip-addresses": {"endpoint": netbox.ipam.ip_addresses},
"manufacturers": {"endpoint": netbox.dcim.manufacturers},
"object-changes": {"endpoint": netbox.extras.object_changes},
"platforms": {"endpoint": netbox.dcim.platforms},
"power-connections": {"endpoint": netbox.dcim.power_connections},
"power-outlet-templates": {"endpoint": netbox.dcim.power_outlet_templates},
"power-outlets": {"endpoint": netbox.dcim.power_outlets},
"power-port-templates": {"endpoint": netbox.dcim.power_port_templates},
"power-ports": {"endpoint": netbox.dcim.power_ports},
"prefixes": {"endpoint": netbox.ipam.prefixes},
"rack-groups": {"endpoint": netbox.dcim.rack_groups},
"rack-reservations": {"endpoint": netbox.dcim.rack_reservations},
"rack-roles": {"endpoint": netbox.dcim.rack_roles},
"racks": {"endpoint": netbox.dcim.racks},
"rear-port-templates": {"endpoint": netbox.dcim.rear_port_templates},
"rear-ports": {"endpoint": netbox.dcim.rear_ports},
"regions": {"endpoint": netbox.dcim.regions},
"reports": {"endpoint": netbox.extras.reports},
"rirs": {"endpoint": netbox.ipam.rirs},
"roles": {"endpoint": netbox.ipam.roles},
"secret-roles": {"endpoint": netbox.secrets.secret_roles},
"secrets": {"endpoint": netbox.secrets.secrets},
"services": {"endpoint": netbox.ipam.services},
"sites": {"endpoint": netbox.dcim.sites},
"tags": {"endpoint": netbox.extras.tags},
"tenant-groups": {"endpoint": netbox.tenancy.tenant_groups},
"tenants": {"endpoint": netbox.tenancy.tenants},
"topology-maps": {"endpoint": netbox.extras.topology_maps},
"virtual-chassis": {"endpoint": netbox.dcim.virtual_chassis},
"virtual-machines": {"endpoint": netbox.virtualization.virtual_machines},
"virtualization-interfaces": {"endpoint": netbox.virtualization.interfaces},
"vlan-groups": {"endpoint": netbox.ipam.vlan_groups},
"vlans": {"endpoint": netbox.ipam.vlans},
"vrfs": {"endpoint": netbox.ipam.vrfs},
}
return netbox_endpoint_map[term]["endpoint"]
class LookupModule(LookupBase):
"""
LookupModule(LookupBase) is defined by Ansible
"""
def run(self, terms, variables=None, **kwargs):
netbox_api_token = kwargs.get("token")
netbox_api_endpoint = kwargs.get("api_endpoint")
netbox_private_key_file = kwargs.get("key_file")
netbox_api_filter = kwargs.get("api_filter")
if not isinstance(terms, list):
terms = [terms]
try:
netbox = pynetbox.api(
netbox_api_endpoint,
token=netbox_api_token,
private_key_file=netbox_private_key_file,
)
except FileNotFoundError:
raise AnsibleError(
"%s cannot be found. Please make sure file exists."
% netbox_private_key_file
)
results = []
for term in terms:
try:
endpoint = get_endpoint(netbox, term)
except KeyError:
raise AnsibleError("Unrecognised term %s. Check documentation" % term)
Display().vvvv(
u"Netbox lookup for %s to %s using token %s filter %s"
% (term, netbox_api_endpoint, netbox_api_token, netbox_api_filter)
)
if netbox_api_filter:
filter = parse_kv(netbox_api_filter)
Display().vvvv("filter is %s" % filter)
for res in endpoint.filter(**filter):
Display().vvvvv(pformat(dict(res)))
key = dict(res)["id"]
result = {key: dict(res)}
results.extend(self._flatten_hash_to_list(result))
else:
for res in endpoint.all():
Display().vvvvv(pformat(dict(res)))
key = dict(res)["id"]
result = {key: dict(res)}
results.extend(self._flatten_hash_to_list(result))
return results
|
[
"ansible.utils.display.Display",
"ansible.errors.AnsibleError",
"ansible.parsing.splitter.parse_kv",
"pynetbox.api"
] |
[((7878, 7982), 'pynetbox.api', 'pynetbox.api', (['netbox_api_endpoint'], {'token': 'netbox_api_token', 'private_key_file': 'netbox_private_key_file'}), '(netbox_api_endpoint, token=netbox_api_token, private_key_file=\n netbox_private_key_file)\n', (7890, 7982), False, 'import pynetbox\n'), ((8093, 8188), 'ansible.errors.AnsibleError', 'AnsibleError', (["('%s cannot be found. Please make sure file exists.' % netbox_private_key_file)"], {}), "('%s cannot be found. Please make sure file exists.' %\n netbox_private_key_file)\n", (8105, 8188), False, 'from ansible.errors import AnsibleError\n'), ((8725, 8752), 'ansible.parsing.splitter.parse_kv', 'parse_kv', (['netbox_api_filter'], {}), '(netbox_api_filter)\n', (8733, 8752), False, 'from ansible.parsing.splitter import parse_kv\n'), ((8403, 8467), 'ansible.errors.AnsibleError', 'AnsibleError', (["('Unrecognised term %s. Check documentation' % term)"], {}), "('Unrecognised term %s. Check documentation' % term)\n", (8415, 8467), False, 'from ansible.errors import AnsibleError\n'), ((8481, 8490), 'ansible.utils.display.Display', 'Display', ([], {}), '()\n', (8488, 8490), False, 'from ansible.utils.display import Display\n'), ((8770, 8779), 'ansible.utils.display.Display', 'Display', ([], {}), '()\n', (8777, 8779), False, 'from ansible.utils.display import Display\n'), ((8886, 8895), 'ansible.utils.display.Display', 'Display', ([], {}), '()\n', (8893, 8895), False, 'from ansible.utils.display import Display\n'), ((9166, 9175), 'ansible.utils.display.Display', 'Display', ([], {}), '()\n', (9173, 9175), False, 'from ansible.utils.display import Display\n')]
|
from django.conf import settings
from haystack.backends import SQ
from haystack.generic_views import SearchView
from haystack.inputs import AutoQuery
from haystack.query import SearchQuerySet
# convert the subfacet settings to Facet objects
from facets import Facet
SEARCH_SUBFACETS = getattr(settings, "SEARCH_SUBFACETS", {})
for k, kwargs_list in SEARCH_SUBFACETS.items():
facets = [Facet(**kw) for kw in kwargs_list]
SEARCH_SUBFACETS[k] = facets
class ICEkitSearchView(SearchView):
"""
A search view which arranges results according to a top facet ('type'),
then any of several sets of subfacets, depending on which top facet is
selected.
Only zero or one top facet can be active at a time, but many sub-facets
can be active at a time.
Counter to Haystack convention, we're not using search logic in the form
"""
top_facet = Facet(field_name='search_types', is_top_level=True, select_many=False)
fluent_page = None
def get_top_level_facet_value(self):
value = self.request.GET.get(self.top_facet.field_name)
if value:
return value
if self.fluent_page:
return self.fluent_page.default_search_type or None
return None
def pre_facet_sqs(self):
"""
Return the queryset used for generating facets, before any facets
are applied
"""
sqs = SearchQuerySet()
if self.query:
sqs = sqs.filter(
SQ(content=AutoQuery(self.query)) | # Search `text` document
SQ(get_title=AutoQuery(self.query)) | # boosted field
SQ(boosted_search_terms=AutoQuery(self.query)) # boosted field
)
return sqs
def get(self, request, *args, **kwargs):
"""User has conducted a search, or default state"""
form_class = self.get_form_class()
form = self.get_form(form_class)
top_value = self.get_top_level_facet_value()
subfacets = SEARCH_SUBFACETS.get(top_value, [])
self.active_facets = [self.top_facet] + subfacets
if form.is_valid():
self.query = form.cleaned_data.get(self.search_field)
else:
self.query = ""
sqs = self.pre_facet_sqs()
for facet in self.active_facets:
sqs = facet.set_on_sqs(sqs)
facet_counts = sqs.facet_counts()
for facet in self.active_facets:
facet.set_values_from_sqs_facet_counts(facet_counts)
facet.apply_request_and_page_to_values(self.request, self.fluent_page)
for facet in self.active_facets:
sqs = facet.narrow_sqs(sqs)
context = self.get_context_data(**{
self.form_name: form,
'facets': self.active_facets,
'top_facet': self.top_facet,
'query': self.query,
'object_list': sqs,
'page': self.fluent_page,
'show_placeholders': self.show_placeholders()
})
return self.render_to_response(context)
def show_placeholders(self):
return not self.query and all([f.is_default() for f in self.active_facets])
|
[
"haystack.query.SearchQuerySet",
"haystack.inputs.AutoQuery",
"facets.Facet"
] |
[((879, 949), 'facets.Facet', 'Facet', ([], {'field_name': '"""search_types"""', 'is_top_level': '(True)', 'select_many': '(False)'}), "(field_name='search_types', is_top_level=True, select_many=False)\n", (884, 949), False, 'from facets import Facet\n'), ((392, 403), 'facets.Facet', 'Facet', ([], {}), '(**kw)\n', (397, 403), False, 'from facets import Facet\n'), ((1398, 1414), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (1412, 1414), False, 'from haystack.query import SearchQuerySet\n'), ((1657, 1678), 'haystack.inputs.AutoQuery', 'AutoQuery', (['self.query'], {}), '(self.query)\n', (1666, 1678), False, 'from haystack.inputs import AutoQuery\n'), ((1496, 1517), 'haystack.inputs.AutoQuery', 'AutoQuery', (['self.query'], {}), '(self.query)\n', (1505, 1517), False, 'from haystack.inputs import AutoQuery\n'), ((1576, 1597), 'haystack.inputs.AutoQuery', 'AutoQuery', (['self.query'], {}), '(self.query)\n', (1585, 1597), False, 'from haystack.inputs import AutoQuery\n')]
|
import os
from stray.scene import Scene
from stray.renderer import Renderer
import numpy as np
import pycocotools.mask as mask_util
import pickle
def write_segmentation_masks(scene_path):
scene = Scene(scene_path)
renderer = Renderer(scene)
segmentation_parent_path = os.path.join(scene_path, "segmentation")
os.makedirs(segmentation_parent_path, exist_ok=True)
for bbox_id, bbox in enumerate(scene.bounding_boxes):
segmentation_path = os.path.join(segmentation_parent_path, f"instance_{bbox_id}")
os.makedirs(segmentation_path, exist_ok=True)
renderer.add_scene_instance(bbox)
for i in range(0, len(scene), 1):
print(f"Processing frame {i:06}", end='\r')
mask = renderer.render_segmentation(i)
segmentation = mask_util.encode(np.asarray(mask, order="F"))
with open(os.path.join(segmentation_path, f"{i:06}.pickle"), 'wb') as handle:
pickle.dump(segmentation, handle, protocol=pickle.HIGHEST_PROTOCOL)
print(f"Saved segmetations to {segmentation_path} for instance {bbox_id}")
renderer.clear_scene_instances()
|
[
"pickle.dump",
"os.makedirs",
"numpy.asarray",
"stray.renderer.Renderer",
"stray.scene.Scene",
"os.path.join"
] |
[((202, 219), 'stray.scene.Scene', 'Scene', (['scene_path'], {}), '(scene_path)\n', (207, 219), False, 'from stray.scene import Scene\n'), ((235, 250), 'stray.renderer.Renderer', 'Renderer', (['scene'], {}), '(scene)\n', (243, 250), False, 'from stray.renderer import Renderer\n'), ((282, 322), 'os.path.join', 'os.path.join', (['scene_path', '"""segmentation"""'], {}), "(scene_path, 'segmentation')\n", (294, 322), False, 'import os\n'), ((327, 379), 'os.makedirs', 'os.makedirs', (['segmentation_parent_path'], {'exist_ok': '(True)'}), '(segmentation_parent_path, exist_ok=True)\n', (338, 379), False, 'import os\n'), ((466, 527), 'os.path.join', 'os.path.join', (['segmentation_parent_path', 'f"""instance_{bbox_id}"""'], {}), "(segmentation_parent_path, f'instance_{bbox_id}')\n", (478, 527), False, 'import os\n'), ((536, 581), 'os.makedirs', 'os.makedirs', (['segmentation_path'], {'exist_ok': '(True)'}), '(segmentation_path, exist_ok=True)\n', (547, 581), False, 'import os\n'), ((817, 844), 'numpy.asarray', 'np.asarray', (['mask'], {'order': '"""F"""'}), "(mask, order='F')\n", (827, 844), True, 'import numpy as np\n'), ((952, 1019), 'pickle.dump', 'pickle.dump', (['segmentation', 'handle'], {'protocol': 'pickle.HIGHEST_PROTOCOL'}), '(segmentation, handle, protocol=pickle.HIGHEST_PROTOCOL)\n', (963, 1019), False, 'import pickle\n'), ((868, 917), 'os.path.join', 'os.path.join', (['segmentation_path', 'f"""{i:06}.pickle"""'], {}), "(segmentation_path, f'{i:06}.pickle')\n", (880, 917), False, 'import os\n')]
|
# Generated by Django 3.2.9 on 2022-02-03 13:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0012_auto_20220203_1617'),
]
operations = [
migrations.AlterField(
model_name='fundraiser',
name='end_date',
field=models.DateTimeField(null=True),
),
migrations.AlterField(
model_name='fundraiser',
name='start_date',
field=models.DateTimeField(auto_now_add=True, null=True),
),
]
|
[
"django.db.models.DateTimeField"
] |
[((338, 369), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)'}), '(null=True)\n', (358, 369), False, 'from django.db import migrations, models\n'), ((499, 549), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'null': '(True)'}), '(auto_now_add=True, null=True)\n', (519, 549), False, 'from django.db import migrations, models\n')]
|
from setuptools import setup
from Cython.Build import cythonize
from distutils.extension import Extension
from distutils.command.build_ext import build_ext
import numpy
import mpi4py
import os
class build_ext_subclass(build_ext):
user_options = build_ext.user_options + \
[
('mpicc', None, 'MPICC')
]
def initialize_options(self):
try:
compiler = str(mpi4py.get_config()['mpicc'])
except:
compiler = "mpicc"
self.mpicc = os.environ.get('MPICC', compiler)
build_ext.initialize_options(self)
def finalize_options(self):
build_ext.finalize_options(self)
def build_extensions(self):
# turns out set_executables only works for linker_so, but for compiler_so
self.compiler.compiler_so[0] = self.mpicc
self.compiler.linker_so[0] = self.mpicc
build_ext.build_extensions(self)
extensions = [
Extension("mpsort.binding", [
"mpsort/binding.pyx",
"radixsort.c",
"mp-mpiu.c",
"mpsort-mpi.c"],
include_dirs = ["./", numpy.get_include()],
depends=[
"mpsort.h",
"mpsort-mpi.h",
"mp-mpiu.h",
]
)
]
def find_version(path):
import re
# path shall be a plain ascii text file.
s = open(path, 'rt').read()
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
s, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Version not found")
setup(
name="mpsort",
version=find_version("mpsort/version.py"),
author="<NAME>",
author_email="<EMAIL>",
url="http://github.com/rainwoodman/mpsort",
description="python binding of MP-sort, a peta scale sorting routine",
zip_safe = False,
package_dir = {'mpsort': 'mpsort'},
install_requires=['cython', 'numpy', 'mpi4py'],
packages= ['mpsort', 'mpsort.tests'],
license='BSD-2-Clause',
cmdclass = {
"build_ext": build_ext_subclass
},
ext_modules = cythonize(extensions)
)
|
[
"Cython.Build.cythonize",
"distutils.command.build_ext.build_ext.finalize_options",
"mpi4py.get_config",
"os.environ.get",
"distutils.command.build_ext.build_ext.initialize_options",
"distutils.command.build_ext.build_ext.build_extensions",
"numpy.get_include",
"re.search"
] |
[((1444, 1506), 're.search', 're.search', (['"""^__version__ = [\'\\\\"]([^\'\\\\"]*)[\'\\\\"]"""', 's', 're.M'], {}), '(\'^__version__ = [\\\'\\\\"]([^\\\'\\\\"]*)[\\\'\\\\"]\', s, re.M)\n', (1453, 1506), False, 'import re\n'), ((516, 549), 'os.environ.get', 'os.environ.get', (['"""MPICC"""', 'compiler'], {}), "('MPICC', compiler)\n", (530, 549), False, 'import os\n'), ((559, 593), 'distutils.command.build_ext.build_ext.initialize_options', 'build_ext.initialize_options', (['self'], {}), '(self)\n', (587, 593), False, 'from distutils.command.build_ext import build_ext\n'), ((635, 667), 'distutils.command.build_ext.build_ext.finalize_options', 'build_ext.finalize_options', (['self'], {}), '(self)\n', (661, 667), False, 'from distutils.command.build_ext import build_ext\n'), ((889, 921), 'distutils.command.build_ext.build_ext.build_extensions', 'build_ext.build_extensions', (['self'], {}), '(self)\n', (915, 921), False, 'from distutils.command.build_ext import build_ext\n'), ((2148, 2169), 'Cython.Build.cythonize', 'cythonize', (['extensions'], {}), '(extensions)\n', (2157, 2169), False, 'from Cython.Build import cythonize\n'), ((1141, 1160), 'numpy.get_include', 'numpy.get_include', ([], {}), '()\n', (1158, 1160), False, 'import numpy\n'), ((417, 436), 'mpi4py.get_config', 'mpi4py.get_config', ([], {}), '()\n', (434, 436), False, 'import mpi4py\n')]
|
import os, sys
ticksleep = 0.1
project_path = os.path.realpath(os.path.join(os.path.dirname(__file__), "../.."))
|
[
"os.path.dirname"
] |
[((77, 102), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (92, 102), False, 'import os, sys\n')]
|
import os
from random import randint
for name in os.listdir('pendrive'):
newname='{}.mp3'.format(randint(0,1000000))
os.rename("pendrive/"+name,"pendrive/"+newname)
|
[
"os.rename",
"random.randint",
"os.listdir"
] |
[((50, 72), 'os.listdir', 'os.listdir', (['"""pendrive"""'], {}), "('pendrive')\n", (60, 72), False, 'import os\n'), ((120, 172), 'os.rename', 'os.rename', (["('pendrive/' + name)", "('pendrive/' + newname)"], {}), "('pendrive/' + name, 'pendrive/' + newname)\n", (129, 172), False, 'import os\n'), ((99, 118), 'random.randint', 'randint', (['(0)', '(1000000)'], {}), '(0, 1000000)\n', (106, 118), False, 'from random import randint\n')]
|
# -*- coding: UTF-8 -*-
# 自定义py导入开始
# from getList import getList
import zbrank
# 导入拓展包开始
import json
import time
# 导入拓展包结束
# 自定义py导入结束
now = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
class status_start:
def __init__(self):
# 主url
self.main_url = "http://www.zbrank.com/ranking/affect/"
# ajax url
self.ajax_url = "http://www.zbrank.com/ranking/getRatingList"
def status_start(self):
# zbrank.zbrankSave().saveZbrankRank()
zbrank.zbrankSave().saveZbrankRankNew()
status_start().status_start()
|
[
"zbrank.zbrankSave",
"time.time"
] |
[((198, 209), 'time.time', 'time.time', ([], {}), '()\n', (207, 209), False, 'import time\n'), ((509, 528), 'zbrank.zbrankSave', 'zbrank.zbrankSave', ([], {}), '()\n', (526, 528), False, 'import zbrank\n')]
|
"""Tests for graphmode_tensornetwork."""
import numpy as np
import tensorflow as tf
from tensornetwork import (contract, connect, flatten_edges_between,
contract_between, Node)
import pytest
class GraphmodeTensorNetworkTest(tf.test.TestCase):
def test_basic_graphmode(self):
# pylint: disable=not-context-manager
with tf.compat.v1.Graph().as_default():
a = Node(tf.ones(10), backend="tensorflow")
b = Node(tf.ones(10), backend="tensorflow")
e = connect(a[0], b[0])
final_tensor = contract(e).get_tensor()
sess = tf.compat.v1.Session()
final_val = sess.run(final_tensor)
self.assertAllClose(final_val, 10.0)
def test_gradient_decent(self):
# pylint: disable=not-context-manager
with tf.compat.v1.Graph().as_default():
a = Node(tf.Variable(tf.ones(10)), backend="tensorflow")
b = Node(tf.ones(10), backend="tensorflow")
e = connect(a[0], b[0])
final_tensor = contract(e).get_tensor()
opt = tf.compat.v1.train.GradientDescentOptimizer(0.001)
train_op = opt.minimize(final_tensor)
sess = tf.compat.v1.Session()
sess.run(tf.compat.v1.global_variables_initializer())
self.assertAllClose(sess.run(final_tensor), 10.0)
sess.run(train_op)
self.assertLess(sess.run(final_tensor), 10.0)
def test_dynamic_network_sizes(self):
@tf.function
def f(x, n):
x_slice = x[:n]
n1 = Node(x_slice, backend="tensorflow")
n2 = Node(x_slice, backend="tensorflow")
e = connect(n1[0], n2[0])
return contract(e).get_tensor()
x = np.ones(10)
self.assertAllClose(f(x, tf.convert_to_tensor(2)), 2.0)
self.assertAllClose(f(x, tf.convert_to_tensor(3)), 3.0)
@pytest.mark.skip(reason="Test fails due to probable bug in tensorflow 2.0.0")
def test_dynamic_network_sizes_contract_between(self):
@tf.function
def f(x, n):
x_slice = x[..., :n]
n1 = Node(x_slice, backend="tensorflow")
n2 = Node(x_slice, backend="tensorflow")
connect(n1[0], n2[0])
connect(n1[1], n2[1])
connect(n1[2], n2[2])
return contract_between(n1, n2).get_tensor()
x = tf.ones((3, 4, 5))
self.assertAllClose(f(x, tf.convert_to_tensor(2)), 24.0)
self.assertAllClose(f(x, tf.convert_to_tensor(3)), 36.0)
def test_dynamic_network_sizes_flatten_standard(self):
@tf.function
def f(x, n):
x_slice = x[..., :n]
n1 = Node(x_slice, backend="tensorflow")
n2 = Node(x_slice, backend="tensorflow")
connect(n1[0], n2[0])
connect(n1[1], n2[1])
connect(n1[2], n2[2])
return contract(flatten_edges_between(n1, n2)).get_tensor()
x = np.ones((3, 4, 5))
self.assertAllClose(f(x, tf.convert_to_tensor(2)), 24.0)
self.assertAllClose(f(x, tf.convert_to_tensor(3)), 36.0)
def test_dynamic_network_sizes_flatten_trace(self):
@tf.function
def f(x, n):
x_slice = x[..., :n]
n1 = Node(x_slice, backend="tensorflow")
connect(n1[0], n1[2])
connect(n1[1], n1[3])
return contract(flatten_edges_between(n1, n1)).get_tensor()
x = np.ones((3, 4, 3, 4, 5))
self.assertAllClose(f(x, tf.convert_to_tensor(2)), np.ones((2,)) * 12)
self.assertAllClose(f(x, tf.convert_to_tensor(3)), np.ones((3,)) * 12)
def test_batch_usage(self,):
def build_tensornetwork(tensors):
a = Node(tensors[0], backend="tensorflow")
b = Node(tensors[1], backend="tensorflow")
e = connect(a[0], b[0])
return contract(e).get_tensor()
tensors = [np.ones((5, 10)), np.ones((5, 10))]
result = tf.map_fn(build_tensornetwork, tensors, dtype=tf.float64)
np.testing.assert_allclose(result, np.ones(5) * 10)
if __name__ == '__main__':
tf.test.main()
|
[
"tensorflow.test.main",
"tensorflow.ones",
"tensornetwork.Node",
"tensorflow.convert_to_tensor",
"tensornetwork.contract",
"tensorflow.compat.v1.train.GradientDescentOptimizer",
"tensornetwork.contract_between",
"numpy.ones",
"tensorflow.compat.v1.Session",
"tensornetwork.connect",
"tensorflow.map_fn",
"tensornetwork.flatten_edges_between",
"pytest.mark.skip",
"tensorflow.compat.v1.Graph",
"tensorflow.compat.v1.global_variables_initializer"
] |
[((1743, 1820), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""Test fails due to probable bug in tensorflow 2.0.0"""'}), "(reason='Test fails due to probable bug in tensorflow 2.0.0')\n", (1759, 1820), False, 'import pytest\n'), ((3750, 3764), 'tensorflow.test.main', 'tf.test.main', ([], {}), '()\n', (3762, 3764), True, 'import tensorflow as tf\n'), ((1607, 1618), 'numpy.ones', 'np.ones', (['(10)'], {}), '(10)\n', (1614, 1618), True, 'import numpy as np\n'), ((2178, 2196), 'tensorflow.ones', 'tf.ones', (['(3, 4, 5)'], {}), '((3, 4, 5))\n', (2185, 2196), True, 'import tensorflow as tf\n'), ((2692, 2710), 'numpy.ones', 'np.ones', (['(3, 4, 5)'], {}), '((3, 4, 5))\n', (2699, 2710), True, 'import numpy as np\n'), ((3128, 3152), 'numpy.ones', 'np.ones', (['(3, 4, 3, 4, 5)'], {}), '((3, 4, 3, 4, 5))\n', (3135, 3152), True, 'import numpy as np\n'), ((3605, 3662), 'tensorflow.map_fn', 'tf.map_fn', (['build_tensornetwork', 'tensors'], {'dtype': 'tf.float64'}), '(build_tensornetwork, tensors, dtype=tf.float64)\n', (3614, 3662), True, 'import tensorflow as tf\n'), ((503, 522), 'tensornetwork.connect', 'connect', (['a[0]', 'b[0]'], {}), '(a[0], b[0])\n', (510, 522), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((583, 605), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (603, 605), True, 'import tensorflow as tf\n'), ((934, 953), 'tensornetwork.connect', 'connect', (['a[0]', 'b[0]'], {}), '(a[0], b[0])\n', (941, 953), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((1012, 1062), 'tensorflow.compat.v1.train.GradientDescentOptimizer', 'tf.compat.v1.train.GradientDescentOptimizer', (['(0.001)'], {}), '(0.001)\n', (1055, 1062), True, 'import tensorflow as tf\n'), ((1120, 1142), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (1140, 1142), True, 'import tensorflow as tf\n'), ((1445, 1480), 'tensornetwork.Node', 'Node', (['x_slice'], {'backend': '"""tensorflow"""'}), "(x_slice, backend='tensorflow')\n", (1449, 1480), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((1492, 1527), 'tensornetwork.Node', 'Node', (['x_slice'], {'backend': '"""tensorflow"""'}), "(x_slice, backend='tensorflow')\n", (1496, 1527), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((1538, 1559), 'tensornetwork.connect', 'connect', (['n1[0]', 'n2[0]'], {}), '(n1[0], n2[0])\n', (1545, 1559), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((1951, 1986), 'tensornetwork.Node', 'Node', (['x_slice'], {'backend': '"""tensorflow"""'}), "(x_slice, backend='tensorflow')\n", (1955, 1986), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((1998, 2033), 'tensornetwork.Node', 'Node', (['x_slice'], {'backend': '"""tensorflow"""'}), "(x_slice, backend='tensorflow')\n", (2002, 2033), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((2040, 2061), 'tensornetwork.connect', 'connect', (['n1[0]', 'n2[0]'], {}), '(n1[0], n2[0])\n', (2047, 2061), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((2068, 2089), 'tensornetwork.connect', 'connect', (['n1[1]', 'n2[1]'], {}), '(n1[1], n2[1])\n', (2075, 2089), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((2096, 2117), 'tensornetwork.connect', 'connect', (['n1[2]', 'n2[2]'], {}), '(n1[2], n2[2])\n', (2103, 2117), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((2450, 2485), 'tensornetwork.Node', 'Node', (['x_slice'], {'backend': '"""tensorflow"""'}), "(x_slice, backend='tensorflow')\n", (2454, 2485), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((2497, 2532), 'tensornetwork.Node', 'Node', (['x_slice'], {'backend': '"""tensorflow"""'}), "(x_slice, backend='tensorflow')\n", (2501, 2532), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((2539, 2560), 'tensornetwork.connect', 'connect', (['n1[0]', 'n2[0]'], {}), '(n1[0], n2[0])\n', (2546, 2560), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((2567, 2588), 'tensornetwork.connect', 'connect', (['n1[1]', 'n2[1]'], {}), '(n1[1], n2[1])\n', (2574, 2588), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((2595, 2616), 'tensornetwork.connect', 'connect', (['n1[2]', 'n2[2]'], {}), '(n1[2], n2[2])\n', (2602, 2616), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((2961, 2996), 'tensornetwork.Node', 'Node', (['x_slice'], {'backend': '"""tensorflow"""'}), "(x_slice, backend='tensorflow')\n", (2965, 2996), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((3003, 3024), 'tensornetwork.connect', 'connect', (['n1[0]', 'n1[2]'], {}), '(n1[0], n1[2])\n', (3010, 3024), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((3031, 3052), 'tensornetwork.connect', 'connect', (['n1[1]', 'n1[3]'], {}), '(n1[1], n1[3])\n', (3038, 3052), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((3384, 3422), 'tensornetwork.Node', 'Node', (['tensors[0]'], {'backend': '"""tensorflow"""'}), "(tensors[0], backend='tensorflow')\n", (3388, 3422), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((3433, 3471), 'tensornetwork.Node', 'Node', (['tensors[1]'], {'backend': '"""tensorflow"""'}), "(tensors[1], backend='tensorflow')\n", (3437, 3471), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((3482, 3501), 'tensornetwork.connect', 'connect', (['a[0]', 'b[0]'], {}), '(a[0], b[0])\n', (3489, 3501), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((3556, 3572), 'numpy.ones', 'np.ones', (['(5, 10)'], {}), '((5, 10))\n', (3563, 3572), True, 'import numpy as np\n'), ((3574, 3590), 'numpy.ones', 'np.ones', (['(5, 10)'], {}), '((5, 10))\n', (3581, 3590), True, 'import numpy as np\n'), ((408, 419), 'tensorflow.ones', 'tf.ones', (['(10)'], {}), '(10)\n', (415, 419), True, 'import tensorflow as tf\n'), ((458, 469), 'tensorflow.ones', 'tf.ones', (['(10)'], {}), '(10)\n', (465, 469), True, 'import tensorflow as tf\n'), ((889, 900), 'tensorflow.ones', 'tf.ones', (['(10)'], {}), '(10)\n', (896, 900), True, 'import tensorflow as tf\n'), ((1158, 1201), 'tensorflow.compat.v1.global_variables_initializer', 'tf.compat.v1.global_variables_initializer', ([], {}), '()\n', (1199, 1201), True, 'import tensorflow as tf\n'), ((1648, 1671), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['(2)'], {}), '(2)\n', (1668, 1671), True, 'import tensorflow as tf\n'), ((1708, 1731), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['(3)'], {}), '(3)\n', (1728, 1731), True, 'import tensorflow as tf\n'), ((2226, 2249), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['(2)'], {}), '(2)\n', (2246, 2249), True, 'import tensorflow as tf\n'), ((2287, 2310), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['(3)'], {}), '(3)\n', (2307, 2310), True, 'import tensorflow as tf\n'), ((2740, 2763), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['(2)'], {}), '(2)\n', (2760, 2763), True, 'import tensorflow as tf\n'), ((2801, 2824), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['(3)'], {}), '(3)\n', (2821, 2824), True, 'import tensorflow as tf\n'), ((3182, 3205), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['(2)'], {}), '(2)\n', (3202, 3205), True, 'import tensorflow as tf\n'), ((3208, 3221), 'numpy.ones', 'np.ones', (['(2,)'], {}), '((2,))\n', (3215, 3221), True, 'import numpy as np\n'), ((3257, 3280), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['(3)'], {}), '(3)\n', (3277, 3280), True, 'import tensorflow as tf\n'), ((3283, 3296), 'numpy.ones', 'np.ones', (['(3,)'], {}), '((3,))\n', (3290, 3296), True, 'import numpy as np\n'), ((3702, 3712), 'numpy.ones', 'np.ones', (['(5)'], {}), '(5)\n', (3709, 3712), True, 'import numpy as np\n'), ((358, 378), 'tensorflow.compat.v1.Graph', 'tf.compat.v1.Graph', ([], {}), '()\n', (376, 378), True, 'import tensorflow as tf\n'), ((544, 555), 'tensornetwork.contract', 'contract', (['e'], {}), '(e)\n', (552, 555), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((776, 796), 'tensorflow.compat.v1.Graph', 'tf.compat.v1.Graph', ([], {}), '()\n', (794, 796), True, 'import tensorflow as tf\n'), ((838, 849), 'tensorflow.ones', 'tf.ones', (['(10)'], {}), '(10)\n', (845, 849), True, 'import tensorflow as tf\n'), ((975, 986), 'tensornetwork.contract', 'contract', (['e'], {}), '(e)\n', (983, 986), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((1573, 1584), 'tensornetwork.contract', 'contract', (['e'], {}), '(e)\n', (1581, 1584), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((2131, 2155), 'tensornetwork.contract_between', 'contract_between', (['n1', 'n2'], {}), '(n1, n2)\n', (2147, 2155), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((3515, 3526), 'tensornetwork.contract', 'contract', (['e'], {}), '(e)\n', (3523, 3526), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((2639, 2668), 'tensornetwork.flatten_edges_between', 'flatten_edges_between', (['n1', 'n2'], {}), '(n1, n2)\n', (2660, 2668), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n'), ((3075, 3104), 'tensornetwork.flatten_edges_between', 'flatten_edges_between', (['n1', 'n1'], {}), '(n1, n1)\n', (3096, 3104), False, 'from tensornetwork import contract, connect, flatten_edges_between, contract_between, Node\n')]
|
import os
import sys
import glob
import numpy as np
import tensorflow as tf
import scipy
import scipy.io
import keras
from keras.models import Model, Sequential
from keras.layers import *
from keras.optimizers import Adam
from keras import regularizers
from keras import backend as K
from keras.utils import to_categorical
from keras.preprocessing.image import ImageDataGenerator
from keras.callbacks import LearningRateScheduler
from Benchmark import Benchmark
from Config import MODEL_PARAMS_DIR
class ResNet50(Benchmark):
def buildModel(self):
def identity_block(input_tensor, kernel_size, filters, stage, block):
filters1, filters2, filters3 = filters
bn_axis = 1
x = Conv2D(filters1, (1, 1))(input_tensor)
x = BatchNormalization(axis=bn_axis)(x)
x = Activation('relu')(x)
x = Conv2D(filters2, kernel_size,
padding='same')(x)
x = BatchNormalization(axis=bn_axis)(x)
x = Activation('relu')(x)
x = Conv2D(filters3, (1, 1))(x)
x = BatchNormalization(axis=bn_axis)(x)
x = add([x, input_tensor])
x = Activation('relu')(x)
return x
def conv_block(input_tensor,
kernel_size,
filters,
stage,
block,
strides=(2, 2)):
filters1, filters2, filters3 = filters
bn_axis = 1
x = Conv2D(filters1, (1, 1), strides=strides)(input_tensor)
x = BatchNormalization(axis=bn_axis)(x)
x = Activation('relu')(x)
x = Conv2D(filters2, kernel_size, padding='same')(x)
x = BatchNormalization(axis=bn_axis)(x)
x = Activation('relu')(x)
x = Conv2D(filters3, (1, 1))(x)
x = BatchNormalization(axis=bn_axis)(x)
shortcut = Conv2D(filters3, (1, 1), strides=strides)(input_tensor)
shortcut = BatchNormalization(
axis=bn_axis)(shortcut)
x = add([x, shortcut])
x = Activation('relu')(x)
return x
img_input = Input(shape=(3, 224, 224))
bn_axis = 1
x = ZeroPadding2D((3, 3))(img_input)
x = Conv2D(64, (7, 7), strides=(2, 2))(x)
# x = BatchNormalization(axis=bn_axis)(x)
x = Activation('relu')(x)
x = MaxPooling2D((3, 3), strides=(2, 2))(x)
x = BatchNormalization(axis=bn_axis)(x)
x = conv_block(x, 3, [64, 64, 256], stage=2, block='a', strides=(1, 1))
x = identity_block(x, 3, [64, 64, 256], stage=2, block='b')
x = identity_block(x, 3, [64, 64, 256], stage=2, block='c')
x = conv_block(x, 3, [128, 128, 512], stage=3, block='a')
x = identity_block(x, 3, [128, 128, 512], stage=3, block='b')
x = identity_block(x, 3, [128, 128, 512], stage=3, block='c')
x = identity_block(x, 3, [128, 128, 512], stage=3, block='d')
x = conv_block(x, 3, [256, 256, 1024], stage=4, block='a')
x = identity_block(x, 3, [256, 256, 1024], stage=4, block='b')
x = identity_block(x, 3, [256, 256, 1024], stage=4, block='c')
x = identity_block(x, 3, [256, 256, 1024], stage=4, block='d')
x = identity_block(x, 3, [256, 256, 1024], stage=4, block='e')
x = identity_block(x, 3, [256, 256, 1024], stage=4, block='f')
x = conv_block(x, 3, [512, 512, 2048], stage=5, block='a')
x = identity_block(x, 3, [512, 512, 2048], stage=5, block='b')
x = identity_block(x, 3, [512, 512, 2048], stage=5, block='c')
x = AveragePooling2D((7, 7))(x)
x = Flatten()(x)
x = Dense(1000)(x)
x = Activation('softmax')(x)
model = Model(img_input, x)
return model
def data_preprocess(self):
X_train, y_train = None, None
X_test = np.fromfile(MODEL_PARAMS_DIR + '/resnet50_imagenet/test_input.bin', dtype=np.float32)
X_test = X_test.reshape((-1, 3, 224, 224))
y_test = np.fromfile(MODEL_PARAMS_DIR + '/resnet50_imagenet/test_labels.bin', dtype=np.uint32)
X_tuner = np.fromfile(MODEL_PARAMS_DIR + '/resnet50_imagenet/tune_input.bin', dtype=np.float32)
X_tuner = X_tuner.reshape((-1, 3, 224, 224))
y_tuner = np.fromfile(MODEL_PARAMS_DIR + '/resnet50_imagenet/tune_labels.bin', dtype=np.uint32)
return X_train, y_train, X_test, y_test, X_tuner, y_tuner
def trainModel(self, model):
assert False, "ImageNet training not supported - use Pretrained weights"
if __name__ == '__main__':
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
# Changing to NCHW format
K.set_image_data_format('channels_first')
### Parameters specific to each benchmark
reload_dir = MODEL_PARAMS_DIR + '/resnet50_imagenet/'
keras_model_file = MODEL_PARAMS_DIR + '/keras/resnet50_imagenet.h5'
data_dir = 'data/resnet50_imagenet/'
src_dir = 'src/resnet50_imagenet_src/'
num_classes = 1000
batch_size = 50
ResNet50 = ResNet50('ResNet50_imagenet', reload_dir, keras_model_file, data_dir, src_dir, num_classes, batch_size)
ResNet50.exportToHPVM(sys.argv)
|
[
"numpy.fromfile",
"keras.models.Model",
"keras.backend.set_image_data_format"
] |
[((4799, 4840), 'keras.backend.set_image_data_format', 'K.set_image_data_format', (['"""channels_first"""'], {}), "('channels_first')\n", (4822, 4840), True, 'from keras import backend as K\n'), ((3827, 3846), 'keras.models.Model', 'Model', (['img_input', 'x'], {}), '(img_input, x)\n', (3832, 3846), False, 'from keras.models import Model, Sequential\n'), ((3978, 4068), 'numpy.fromfile', 'np.fromfile', (["(MODEL_PARAMS_DIR + '/resnet50_imagenet/test_input.bin')"], {'dtype': 'np.float32'}), "(MODEL_PARAMS_DIR + '/resnet50_imagenet/test_input.bin', dtype=\n np.float32)\n", (3989, 4068), True, 'import numpy as np\n'), ((4133, 4223), 'numpy.fromfile', 'np.fromfile', (["(MODEL_PARAMS_DIR + '/resnet50_imagenet/test_labels.bin')"], {'dtype': 'np.uint32'}), "(MODEL_PARAMS_DIR + '/resnet50_imagenet/test_labels.bin', dtype=\n np.uint32)\n", (4144, 4223), True, 'import numpy as np\n'), ((4246, 4336), 'numpy.fromfile', 'np.fromfile', (["(MODEL_PARAMS_DIR + '/resnet50_imagenet/tune_input.bin')"], {'dtype': 'np.float32'}), "(MODEL_PARAMS_DIR + '/resnet50_imagenet/tune_input.bin', dtype=\n np.float32)\n", (4257, 4336), True, 'import numpy as np\n'), ((4404, 4494), 'numpy.fromfile', 'np.fromfile', (["(MODEL_PARAMS_DIR + '/resnet50_imagenet/tune_labels.bin')"], {'dtype': 'np.uint32'}), "(MODEL_PARAMS_DIR + '/resnet50_imagenet/tune_labels.bin', dtype=\n np.uint32)\n", (4415, 4494), True, 'import numpy as np\n')]
|
from resource import *
import psutil
from collections import defaultdict
# FILE_IN_PATH = 'K3241.K03200DV.D00422.L00001'
FILE_IN_PATH = '/Users/cadu/Downloads/datasets/dadosabertos_CNPjs/K3241.K03200DV.D00422.L00001'
FILE_OUT_PATH = 'output.txt'
# head
# posicao de inicio da linha e TAMANHO de registros
# ini = acc(todas pos anteriores) + pos_initial
# position 1 is '0' for python strings, qtd fica como é.
LINE_HEAD_0_METRICS = ((0), (1, 16, 11, 8, 8, 1155, 1)) # tuple pos_initial, tamanho
# dados cadastrais
LINE_DETAL_1_METRICS = ((0), (
1, 1, 1, 14, 1, 150, 55, 2, 8, 2, 55, 3, 70, 4, 8, 7, 20, 60, 6, 156,
50, 8, 2, 4, 50, 12, 4, 8, 12, 4, 8, 12, 4, 8, 115, 2, 14, 2, 1, 8, 8,
1, 23, 8, 243, 1)
)
# dados socios
LINE_SOCIO_2_METRICS = ((0), (1, 1, 1, 14, 1, 150, 14, 2, 5, 8, 3, 70, 11, 60, 2, 855, 1))
# dados socios
LINE_CNAES_6_METRICS = ((0), (1, 1, 1, 14, 693, 489, 1))
ROWTYPE_HEAD = '0'
ROWTYPE_DACAD = '1'
ROWTYPE_SOCIO = '2'
ROWTYPE_CNAES = '6'
def get_row_data_list(row):
ROWTYPE_HEAD = '0'
ROWTYPE_DACAD = '1'
ROWTYPE_SOCIO = '2'
ROWTYPE_CNAES = '6'
l = []
def _recorta(row, row_metrics):
acc = row_metrics[0]
l = []
index = 0
for i in row_metrics[1]:
l.append((row[acc:acc + row_metrics[1][index]]).strip())
acc = acc + row_metrics[1][index]
#print(f'--> {row[acc:(acc + row_metrics[1][index])]}')
index += 1
return (";".join(l)) + '\n'
if row[0] == ROWTYPE_HEAD:
return _recorta(row, LINE_HEAD_0_METRICS)
if row[0] == ROWTYPE_DACAD:
return _recorta(row, LINE_DETAL_1_METRICS)
if row[0] == ROWTYPE_SOCIO:
return _recorta(row, LINE_SOCIO_2_METRICS)
if row[0] == ROWTYPE_CNAES:
return _recorta(row, LINE_CNAES_6_METRICS)
def main():
linhas_lidas = 0
linhas_gravadas = 0
l=list()
with open(FILE_IN_PATH, 'rt',encoding="latin-1") as f, open(FILE_OUT_PATH, 'w') as fo:
for row in f:
linhas_lidas += 1
try:
fo.write(get_row_data_list(row))
linhas_gravadas += 1
except:
print (f'FAIL at |{row}|')
print(f'Linha lidas: {linhas_lidas}')
print(f'Linha gravadas: {linhas_gravadas}')
# with open(FILE_OUT_PATH, 'w') as f, :
# print(l)
# f.writelines(l)
if __name__ == "__main__":
import time
start_time = time.time()
main()
print(f'----------- ESTATS-----------------------')
print(f'Python seconds: {time.time()-start_time}')
print(f'Python CPU: {getrusage(RUSAGE_SELF)}')
print (f'PSUTIL CPU: {psutil.cpu_percent()}')
print (f'PSUTIL VitMem: {psutil.virtual_memory()[2]}')
print(f'----------- ESTATS END-----------------------')
|
[
"psutil.virtual_memory",
"psutil.cpu_percent",
"time.time"
] |
[((2448, 2459), 'time.time', 'time.time', ([], {}), '()\n', (2457, 2459), False, 'import time\n'), ((2659, 2679), 'psutil.cpu_percent', 'psutil.cpu_percent', ([], {}), '()\n', (2677, 2679), False, 'import psutil\n'), ((2556, 2567), 'time.time', 'time.time', ([], {}), '()\n', (2565, 2567), False, 'import time\n'), ((2712, 2735), 'psutil.virtual_memory', 'psutil.virtual_memory', ([], {}), '()\n', (2733, 2735), False, 'import psutil\n')]
|
#!/usr/bin/python3
from apiclient.http import MediaFileUpload
from apiclient.errors import HttpError
from apiclient.discovery import build
import time
import sys
import random
import os
import httplib2
import json
import boto3
import requests
try:
import httplib
except ImportError: # python3 compatibility
from http import client
httplib = client
from oauth2client import client
from oauth2client import file
from oauth2client import tools
class cmd_flags(object):
"""
Used to provide command-line level authentication rather than
working through a web browser.
"""
def __init__(self):
self.auth_host_name = 'localhost'
self.auth_host_port = [8080, 8090]
self.logging_level = 'ERROR'
self.noauth_local_webserver = True
class ConnectYoutubeUploader:
"""
The ConnectYoutubeUploader enabled a video to be downloaded from s3 based on a session_id
and uploaded to YouTube with the title/descriptions populated based on data from the SchedDataInterface
module.
Attributes
----------
s3_bucket : string
The s3 bucket e.g static-linaro-org
videos_object_prefix: string
The s3 object key prefix to the video objects e.g. connect/SAN19/videos/
session_id: string
The session id for a given video.
Methods
-------
upload()
Uploads a local .mp4 video to YouTube and adds video metadata based on the data
provided by the SchedDataInterface.
"""
def __init__(self, secrets_dir, client_secrets_file_name):
# Explicitly tell the underlying HTTP transport library not to retry, since
# we are handling retry logic ourselves.
httplib2.RETRIES = 1
# Always retry when these exceptions are raised.
self.RETRIABLE_EXCEPTIONS = (httplib2.HttpLib2Error, IOError, httplib.NotConnected,
httplib.IncompleteRead, httplib.ImproperConnectionState,
httplib.CannotSendRequest, httplib.CannotSendHeader,
httplib.ResponseNotReady, httplib.BadStatusLine)
# Always retry when an apiclient.errors.HttpError with one of these status
# codes is raised.
self.RETRIABLE_STATUS_CODES = [500, 502, 503, 504]
# Maximum number of times to retry before giving up.
self.MAX_RETRIES = 10
# The secrets secrets_directory
self.SECRETS_DIRECTORY = secrets_dir
# The clients secrets file to use when authenticating our requests to the YouTube Data API
self.CLIENT_SECRETS_FILE = client_secrets_file_name
# This variable defines a message to display if the CLIENT_SECRETS_FILE is
# missing.
self.MISSING_CLIENT_SECRETS_MESSAGE = ""
# WARNING: Please configure OAuth 2.0
# To make this sample run you will need to populate the client_secrets.json file
# found at:
# %s
# with information from the {{ Cloud Console }}
# {{ https://cloud.google.com/console }}
# For more information about the client_secrets.json file format, please visit:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
# """ % os.path.abspath(os.path.join(os.path.dirname(__file__),
# self.CLIENT_SECRETS_FILE))
# This OAuth 2.0 access scope allows an application to upload files to the
# authenticated user's YouTube channel, but doesn't allow other types of access.
self.YOUTUBE_UPLOAD_SCOPE = ['https://www.googleapis.com/auth/youtube']
# Name of the API service
self.YOUTUBE_API_SERVICE_NAME = 'youtube'
# Version of the YouTube API
self.YOUTUBE_API_VERSION = 'v3'
# Privacy statuses we can use to set on YouTube videos
self.VALID_PRIVACY_STATUSES = ('public', 'private', 'unlisted')
# The ID of the playlist for the current Connect
# In the future this playlist ID should be retrieved dynamically based on the
# connect code
self.playlist_id = "PLKZSArYQptsOzc0kBoWyVSC3f0sHbJdBK"
# Get the authenticated service to use in requests to the API
self.service = self.get_authenticated_service()
# Authorize the request and store authorization credentials.
def get_authenticated_service(self):
"""
Gets an authenticated service object for requests to the
YouTube Data API
"""
store = file.Storage(self.SECRETS_DIRECTORY +
"connect_youtube_uploader-oauth2.json")
creds = store.get()
if creds is None or creds.invalid:
flow = client.flow_from_clientsecrets(self.SECRETS_DIRECTORY + self.CLIENT_SECRETS_FILE,
scope=self.YOUTUBE_UPLOAD_SCOPE,
message=self.MISSING_CLIENT_SECRETS_MESSAGE)
creds = tools.run_flow(flow, store, cmd_flags())
return build(self.YOUTUBE_API_SERVICE_NAME, self.YOUTUBE_API_VERSION,
http=creds.authorize(httplib2.Http()))
def get_video_id_based_on_session_id(self, session_id):
"""
Retrieve a video id of a YouTube video based on a session_id
"""
current_videos = self.get_current_youtube_videos_based_on_string(
session_id)
if len(current_videos) == 1:
return current_videos[0][1]
else:
return False
def download_video(self, video_url, output_folder):
"""Downloads a video from video_url and outputs to output_path"""
response = requests.get(video_url, stream=True)
filename = os.path.split(video_url)[1]
output_path = output_folder + filename
if os.path.exists(output_folder) != True:
print("Creating {}".format(output_folder))
os.makedirs(output_folder)
print("Downloading {} to {}".format(filename, output_folder))
with open(output_path, 'wb') as f:
for chunk in response.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
return output_path
def update_video_status(self, video_id, status):
"""
This method updates the status of a video based on the video_id and status provided.
"""
# Call the API's videos.list method to retrieve the video resource.
# Get the current video details
videos_list_response = self.service.videos().list(
id=video_id,
part='status'
).execute()
# If the response does not contain an array of 'items' then the video was
# not found.
if not videos_list_response['items']:
return False
# Since the request specified a video ID, the response only contains one
# video resource. This code extracts the snippet from that resource.
video_list_status = videos_list_response['items'][0]['status']
print(video_list_status)
input()
# Set the privacy status of the video
if status:
video_list_status['privacyStatus'] = status
# Update the video resource by calling the videos.update() method.
self.service.videos().update(
part='status',
body=dict(
status=video_list_status,
id=video_id
)).execute()
return True
def get_current_youtube_videos_based_on_string(self, string):
"""
Gets the current videos on YouTube that contain the specified string in
in the title or description
"""
# Get the channels on Youtube and their ID's i.e uploads
channels_response = self.service.channels().list(
mine=True,
part="contentDetails"
).execute()
# From the API response, extract the playlist ID that identifies the list
# of videos uploaded to the authenticated user's channel.
youtube_uploads_id = channels_response["items"][0]["contentDetails"]["relatedPlaylists"]["uploads"]
# Retrieve the list of videos uploaded to the authenticated user's channel.
playlistitems_list_request = self.service.playlistItems().list(
playlistId=youtube_uploads_id,
part="snippet",
maxResults=50
)
videos = []
while playlistitems_list_request:
playlistitems_list_response = playlistitems_list_request.execute()
# Print information about each video.
for playlist_item in playlistitems_list_response["items"]:
title = playlist_item["snippet"]["title"]
video_id = playlist_item["snippet"]["resourceId"]["videoId"]
if string.lower() in title:
print("%s (%s)" % (title, video_id))
videos.append([title, video_id])
playlistitems_list_request = self.service.playlistItems().list_next(
playlistitems_list_request, playlistitems_list_response)
if len(videos) > 0:
return videos
else:
return False
def build_resource(self, properties):
"""
# Build a resource based on a list of properties given as key-value pairs.
# Leave properties with empty values out of the inserted resource.
"""
resource = {}
for p in properties:
# Given a key like "snippet.title", split into "snippet" and "title", where
# "snippet" will be an object and "title" will be a property in that object.
prop_array = p.split('.')
ref = resource
for pa in range(0, len(prop_array)):
is_array = False
key = prop_array[pa]
# For properties that have array values, convert a name like
# "snippet.tags[]" to snippet.tags, and set a flag to handle
# the value as an array.
if key[-2:] == '[]':
key = key[0:len(key)-2:]
is_array = True
if pa == (len(prop_array) - 1):
# Leave properties without values out of inserted resource.
if properties[p]:
if is_array:
ref[key] = properties[p].split(',')
else:
ref[key] = properties[p]
elif key not in ref:
# For example, the property is "snippet.title", but the resource does
# not yet have a "snippet" object. Create the snippet object here.
# Setting "ref = ref[key]" means that in the next time through the
# "for pa in range ..." loop, we will be setting a property in the
# resource's "snippet" object.
ref[key] = {}
ref = ref[key]
else:
# For example, the property is "snippet.description", and the resource
# already has a "snippet" object.
ref = ref[key]
return resource
def upload_video(self, options):
"""
Takes a dictionary of all video details e.g
{
"file":"/home/kyle.kirkby/Documents/Marketing/Connect/YVR18/videos/yvr18-100k.mp4",
"title": "YVR18-100k: Opening Keynote by <NAME>",
"description": "The Opening Keynote by <NAME> at Linaro Connect Vancouver 2018",
"keywords": "Keynote,yvr18,Open Source,Arm, Vancouver",
"category": "28",
"privacyStatus": "private"
}
"""
request = self.get_upload_request(options)
# Output Details while uploading
video_id = self.resumable_upload(request, options["title"])
return video_id
def add_video_to_playlist(self, playlistId, videoId):
"""Adds a video(videoId) to a playlist(playlistId)"""
# Create the body of the request
bodyData = {'snippet.playlistId': playlistId,
'snippet.resourceId.kind': 'youtube#video',
'snippet.resourceId.videoId': videoId,
'snippet.position': ''}
resource = self.build_resource(bodyData)
add_to_playlist = self.service.playlistItems().insert(
body=resource,
part='snippet'
).execute()
return add_to_playlist
def set_custom_thumbnail(self, local_thumbnail_url, video_id):
"""
Sets the specified custom thumbnail for a given video(video_id)
"""
request = self.service.thumbnails().set(
videoId=video_id,
media_body=MediaFileUpload(local_thumbnail_url)
)
response = request.execute()
return response
def get_upload_request(self, options):
"""
Create the request to initialize the upload of a video.
Takes a service and a dictionary containing the various options
"""
# Get the Youtube Tags from the keywords
tags = None
try:
if options["tags"]:
tags = options["tags"].split(',')
except Exception as e:
tags = []
# Create the body of the request
body = dict(
snippet=dict(
title=options["title"][0:70],
description=options["description"],
tags=tags,
categoryId=28
),
status=dict(
privacyStatus=options["privacyStatus"]
)
)
# Call the API's videos.insert method to create and upload the video.
insert_request = self.service.videos().insert(
part=','.join(body.keys()),
body=body,
# The chunksize parameter specifies the size of each chunk of data, in
# bytes, that will be uploaded at a time. Set a higher value for
# reliable connections as fewer chunks lead to faster uploads. Set a lower
# value for better recovery on less reliable connections.
#
# Setting 'chunksize' equal to -1 in the code below means that the entire
# file will be uploaded in a single HTTP request. (If the upload fails,
# it will still be retried where it left off.) This is usually a best
# practice, but if you're using Python older than 2.6 or if you're
# running on App Engine, you should set the chunksize to something like
# 1024 * 1024 (1 megabyte).
media_body=MediaFileUpload(
options["file"], chunksize=-1, resumable=True)
)
return insert_request
def resumable_upload(self, request, title):
"""
Creates a resumable upload
"""
response = None
error = None
retry = 0
while response is None:
try:
print("Uploading {0} file...".format(title))
status, response = request.next_chunk()
if response is not None:
if 'id' in response:
print("Video id '%s' was successfully uploaded." %
response['id'])
video_id = response['id']
return video_id
else:
exit(
"The upload failed with an unexpected response: %s" % response)
except HttpError as e:
if e.resp.status in self.RETRIABLE_STATUS_CODES:
error = "A retriable HTTP error %d occurred:\n%s" % (e.resp.status,
e.content)
else:
raise
except self.RETRIABLE_EXCEPTIONS as e:
error = "A retriable error occurred: %s" % e
if error is not None:
print(error)
retry += 1
if retry > self.MAX_RETRIES:
exit("No longer attempting to retry.")
max_sleep = 2 ** retry
sleep_seconds = random.random() * max_sleep
print("Sleeping %f seconds and then retrying..." %
sleep_seconds)
time.sleep(sleep_seconds)
|
[
"oauth2client.file.Storage",
"httplib2.Http",
"os.makedirs",
"http.client.flow_from_clientsecrets",
"os.path.exists",
"time.sleep",
"random.random",
"apiclient.http.MediaFileUpload",
"requests.get",
"os.path.split"
] |
[((4566, 4643), 'oauth2client.file.Storage', 'file.Storage', (["(self.SECRETS_DIRECTORY + 'connect_youtube_uploader-oauth2.json')"], {}), "(self.SECRETS_DIRECTORY + 'connect_youtube_uploader-oauth2.json')\n", (4578, 4643), False, 'from oauth2client import file\n'), ((5747, 5783), 'requests.get', 'requests.get', (['video_url'], {'stream': '(True)'}), '(video_url, stream=True)\n', (5759, 5783), False, 'import requests\n'), ((4765, 4934), 'http.client.flow_from_clientsecrets', 'client.flow_from_clientsecrets', (['(self.SECRETS_DIRECTORY + self.CLIENT_SECRETS_FILE)'], {'scope': 'self.YOUTUBE_UPLOAD_SCOPE', 'message': 'self.MISSING_CLIENT_SECRETS_MESSAGE'}), '(self.SECRETS_DIRECTORY + self.\n CLIENT_SECRETS_FILE, scope=self.YOUTUBE_UPLOAD_SCOPE, message=self.\n MISSING_CLIENT_SECRETS_MESSAGE)\n', (4795, 4934), False, 'from http import client\n'), ((5803, 5827), 'os.path.split', 'os.path.split', (['video_url'], {}), '(video_url)\n', (5816, 5827), False, 'import os\n'), ((5889, 5918), 'os.path.exists', 'os.path.exists', (['output_folder'], {}), '(output_folder)\n', (5903, 5918), False, 'import os\n'), ((5995, 6021), 'os.makedirs', 'os.makedirs', (['output_folder'], {}), '(output_folder)\n', (6006, 6021), False, 'import os\n'), ((13018, 13054), 'apiclient.http.MediaFileUpload', 'MediaFileUpload', (['local_thumbnail_url'], {}), '(local_thumbnail_url)\n', (13033, 13054), False, 'from apiclient.http import MediaFileUpload\n'), ((14925, 14987), 'apiclient.http.MediaFileUpload', 'MediaFileUpload', (["options['file']"], {'chunksize': '(-1)', 'resumable': '(True)'}), "(options['file'], chunksize=-1, resumable=True)\n", (14940, 14987), False, 'from apiclient.http import MediaFileUpload\n'), ((16675, 16700), 'time.sleep', 'time.sleep', (['sleep_seconds'], {}), '(sleep_seconds)\n', (16685, 16700), False, 'import time\n'), ((5207, 5222), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (5220, 5222), False, 'import httplib2\n'), ((16527, 16542), 'random.random', 'random.random', ([], {}), '()\n', (16540, 16542), False, 'import random\n')]
|
#
# Copyright 2021 Red Hat Inc.
# SPDX-License-Identifier: Apache-2.0
#
"""Azure Client Configuration."""
from azure.common.credentials import ServicePrincipalCredentials
from azure.mgmt.costmanagement import CostManagementClient
from azure.mgmt.resource import ResourceManagementClient
from azure.mgmt.storage import StorageManagementClient
from azure.storage.blob import BlobServiceClient
from msrestazure.azure_cloud import AZURE_CHINA_CLOUD
from msrestazure.azure_cloud import AZURE_GERMAN_CLOUD
from msrestazure.azure_cloud import AZURE_PUBLIC_CLOUD
from msrestazure.azure_cloud import AZURE_US_GOV_CLOUD
class AzureClientFactory:
"""Azure client factory.
This class holds the Azure credentials and can create Service Clients for
querying the Azure Service APIs.
Args:
subscription_id (str): Subscription ID
tenant_id (str): Tenant ID for your Azure Subscription
client_id (str): Service Principal Application ID
client_secret (str): Service Principal Password
cloud (str): Cloud selector, must be one of ['china', 'germany', 'public', 'usgov']
"""
def __init__(self, subscription_id, tenant_id, client_id, client_secret, cloud="public"):
"""Constructor."""
self._subscription_id = subscription_id
clouds = {
"china": AZURE_CHINA_CLOUD,
"germany": AZURE_GERMAN_CLOUD,
"public": AZURE_PUBLIC_CLOUD,
"usgov": AZURE_US_GOV_CLOUD,
}
self._credentials = ServicePrincipalCredentials(
client_id=client_id, secret=client_secret, tenant=tenant_id, cloud_environment=clouds.get(cloud, "public")
)
@property
def credentials(self):
"""Service Principal Credentials property."""
return self._credentials
@property
def cost_management_client(self):
"""Get cost management client with subscription and credentials."""
return CostManagementClient(self.credentials, self.subscription_id)
@property
def resource_client(self):
"""Return a resource client."""
return ResourceManagementClient(self.credentials, self.subscription_id)
@property
def storage_client(self):
"""Get storage client with subscription and credentials."""
return StorageManagementClient(self.credentials, self.subscription_id)
@property
def subscription_id(self):
"""Subscription ID property."""
return self._subscription_id
def cloud_storage_account(self, resource_group_name, storage_account_name):
"""Get a BlobServiceClient."""
storage_account_keys = self.storage_client.storage_accounts.list_keys(
resource_group_name, storage_account_name
)
# Add check for keys and a get value
key = storage_account_keys.keys[0]
connect_str = (
f"DefaultEndpointsProtocol=https;"
f"AccountName={storage_account_name};"
f"AccountKey={key.value};"
f"EndpointSuffix=core.windows.net"
)
return BlobServiceClient.from_connection_string(connect_str)
|
[
"azure.mgmt.resource.ResourceManagementClient",
"azure.mgmt.storage.StorageManagementClient",
"azure.storage.blob.BlobServiceClient.from_connection_string",
"azure.mgmt.costmanagement.CostManagementClient"
] |
[((1945, 2005), 'azure.mgmt.costmanagement.CostManagementClient', 'CostManagementClient', (['self.credentials', 'self.subscription_id'], {}), '(self.credentials, self.subscription_id)\n', (1965, 2005), False, 'from azure.mgmt.costmanagement import CostManagementClient\n'), ((2107, 2171), 'azure.mgmt.resource.ResourceManagementClient', 'ResourceManagementClient', (['self.credentials', 'self.subscription_id'], {}), '(self.credentials, self.subscription_id)\n', (2131, 2171), False, 'from azure.mgmt.resource import ResourceManagementClient\n'), ((2300, 2363), 'azure.mgmt.storage.StorageManagementClient', 'StorageManagementClient', (['self.credentials', 'self.subscription_id'], {}), '(self.credentials, self.subscription_id)\n', (2323, 2363), False, 'from azure.mgmt.storage import StorageManagementClient\n'), ((3072, 3125), 'azure.storage.blob.BlobServiceClient.from_connection_string', 'BlobServiceClient.from_connection_string', (['connect_str'], {}), '(connect_str)\n', (3112, 3125), False, 'from azure.storage.blob import BlobServiceClient\n')]
|
import pytest
from year_2020.day23.crab_cups import get_crab_cups
TEST_INPUT = "389125467"
@pytest.mark.parametrize("num_moves, expected", [(10, "92658374"), (100, "67384529")])
def test_get_crap_cups(num_moves, expected):
assert get_crab_cups(TEST_INPUT, num_moves=num_moves) == expected
@pytest.mark.slow
def test_get_crap_cups_part_two():
assert (
get_crab_cups(TEST_INPUT, num_moves=10_000_000, is_part_two=True)
== 149245887792
)
|
[
"pytest.mark.parametrize",
"year_2020.day23.crab_cups.get_crab_cups"
] |
[((96, 185), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""num_moves, expected"""', "[(10, '92658374'), (100, '67384529')]"], {}), "('num_moves, expected', [(10, '92658374'), (100,\n '67384529')])\n", (119, 185), False, 'import pytest\n'), ((238, 284), 'year_2020.day23.crab_cups.get_crab_cups', 'get_crab_cups', (['TEST_INPUT'], {'num_moves': 'num_moves'}), '(TEST_INPUT, num_moves=num_moves)\n', (251, 284), False, 'from year_2020.day23.crab_cups import get_crab_cups\n'), ((373, 436), 'year_2020.day23.crab_cups.get_crab_cups', 'get_crab_cups', (['TEST_INPUT'], {'num_moves': '(10000000)', 'is_part_two': '(True)'}), '(TEST_INPUT, num_moves=10000000, is_part_two=True)\n', (386, 436), False, 'from year_2020.day23.crab_cups import get_crab_cups\n')]
|
from allauth.account.adapter import DefaultAccountAdapter
from django.conf import settings
import urlparse
class OsfMeetingsAdapter(DefaultAccountAdapter):
def get_login_redirect_url(self, request):
try:
refererUrl = request.environ['HTTP_REFERER']
nextUrl = urlparse.parse_qs(
urlparse.urlparse(refererUrl).query)['next'][0]
return nextUrl
except KeyError:
return settings.OSF_MEETINGS_HOME_URL
|
[
"urlparse.urlparse"
] |
[((333, 362), 'urlparse.urlparse', 'urlparse.urlparse', (['refererUrl'], {}), '(refererUrl)\n', (350, 362), False, 'import urlparse\n')]
|
"""
This script is written and tested in Blender 2.83.1 & BlenderGIS 1.0
"""
import bpy, bmesh, json, os, re
from pathlib import Path
def load_data(data_file):
with open(data_file) as f:
data = json.load(f)
f.close()
return data
def clean_mesh(obj_name):
# Clean up the mesh by delete some rogue vertices
bpy.ops.object.mode_set(mode='EDIT', toggle=False)
obj = bpy.data.objects[obj_name]
me = obj.data
wm = obj.matrix_world
bpy.context.view_layer.objects.active = obj
bm = bmesh.from_edit_mesh(me)
bm.select_mode = {'VERT'}
for v in bm.verts:
global_v = wm @ v.co # calculate global coordinates for the vertex
v.select = ( global_v.x < -20 and global_v.y <-16)
bm.select_flush_mode()
me.update()
bpy.ops.mesh.delete()
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_MASS', center='MEDIAN')
def add_geo_obj(shp_file):
# the objects & names in collection 'geo' will be referenced throughout the script
try:
bpy.ops.importgis.shapefile(filepath=shp_file,
fieldExtrudeName="base",
extrusionAxis='Z',
separateObjects=True,
fieldObjName="postcode"
)
except AttributeError:
print("Cannot seem to find Blender GIS addon. Make sure it's installed and enabled.")
for obj in bpy.data.collections['geo'].all_objects:
clean_mesh(obj.name)
bpy.ops.object.select_all(action='DESELECT')
bpy.data.objects['59'].select_set(True)
bpy.ops.object.delete()
def add_material(obj_name):
gradient_color0 = (0.05,0.05,0.05,1) # dark grey
gradient_color1 = (0.1,2,0,1) # green, also control emission strength, that's why green is > 1
bpy.context.view_layer.objects.active = bpy.data.objects[obj_name]
obj = bpy.data.objects[obj_name]
bpy.context.view_layer.objects.active = obj
mat = bpy.data.materials.new(name=obj_name)
obj.data.materials.append(mat)
mat.use_nodes = True
bpy.context.object.active_material.blend_method = 'BLEND'
nodes = mat.node_tree.nodes
links = mat.node_tree.links
output = nodes.get('Material Output')
output.location = (300,0)
bsdf = nodes.get('Principled BSDF')
bsdf.location = (0,0)
bsdf.inputs[18].default_value = 0.5 # alpha
bsdf.inputs[15].default_value = 1.0 # transmission
links.new(bsdf.outputs[0],output.inputs[0]) # BSDF to material surface
# add color ramp as input for main shader to get a color gradiant
color_ramp = nodes.new("ShaderNodeValToRGB")
color_ramp.location = (-300,0)
links.new(color_ramp.outputs[0],bsdf.inputs[0]) # color ramp to base color
links.new(color_ramp.outputs[0],bsdf.inputs[17]) # color ramp to emission color/strength
color_ramp.color_ramp.elements[0].color = gradient_color0
color_ramp.color_ramp.elements[1].color = gradient_color1
# the value node will be used for inserting keyframes
color_v = nodes.new("ShaderNodeValue")
color_v.location = (-600,0)
links.new(color_v.outputs[0],color_ramp.inputs[0]) # value node to ramp's color
def add_material_all(collection):
for obj in bpy.data.collections[collection].all_objects:
add_material(obj.name)
def add_shape_key(obj_name,max_height):
obj = bpy.data.objects[obj_name]
me = obj.data
bpy.context.view_layer.objects.active = obj
bpy.ops.object.shape_key_add(from_mix=False) # Base Key
bpy.ops.object.shape_key_add(from_mix=False) # Key 1
bpy.context.object.active_shape_key_index = 1
bpy.data.shape_keys["Key"].name = obj_name
bpy.ops.object.mode_set(mode='EDIT', toggle=False)
bm = bmesh.from_edit_mesh(me)
bm.select_mode = {'VERT'}
for v in bm.verts:
if v.co.z > 0: #since the base is at 0, this will effectively select top faces
v.co.z = max_height
bm.select_flush_mode()
me.update()
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
def add_shape_key_all(max_height,collection):
for obj in bpy.data.collections[collection].all_objects:
add_shape_key(obj.name,max_height=max_height)
def animate_obj_all(frame_step,data):
data_len = len(data['all']['date'])
bpy.context.scene.frame_end = data_len*frame_step
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
bpy.context.view_layer.objects.active = None
for keyframe_id in range(data_len):
for obj in bpy.data.collections['geo'].all_objects:
height = data[obj.name[:2]]['height'][keyframe_id]
# height values are scaled between 0 & 1, and are used for inserting keyframes
shapekey = bpy.data.shape_keys[obj.name].key_blocks["Key 1"]
shapekey.value = height
shapekey.keyframe_insert(data_path="value", frame=frame_step*keyframe_id)
def animate_material_all(frame_step,data):
data_len = len(data['all']['date'])
bpy.context.scene.frame_end = data_len*frame_step
for keyframe_id in range(data_len):
for mat in bpy.data.materials:
if mat.name in [obj.name for obj in bpy.data.collections['geo'].all_objects]:
color = data[mat.name]['color'][keyframe_id]
color_value = mat.node_tree.nodes["Value"].outputs[0]
color_value.default_value = color
color_value.keyframe_insert('default_value',frame=frame_step*keyframe_id)
def add_camera(lens):
cam = bpy.data.cameras.new("Camera")
cam.lens = lens
cam_obj = bpy.data.objects.new("Camera", cam)
bpy.context.scene.collection.objects.link(cam_obj)
def animate_camera(frame_step,data):
data_len = len(data['all']['date'])
camera = bpy.data.objects['Camera']
# pan down camera a bit at first, then a lot in the end
camera.location = (0,4,40)
camera.rotation_euler = (0,0,0)
camera.keyframe_insert(data_path="location", frame=0)
camera.keyframe_insert(data_path="rotation_euler", frame=0)
camera.location = (0,-4.6,40.17)
camera.rotation_euler = (0.175,0,0)
camera.keyframe_insert(data_path="location", frame=int(frame_step*data_len*0.5))
camera.keyframe_insert(data_path="rotation_euler", frame=int(frame_step*data_len*0.5))
camera.location = (0,-19.25,30.57)
camera.rotation_euler = (0.534,0,0)
camera.keyframe_insert(data_path="location", frame=int(frame_step*data_len*0.75))
camera.keyframe_insert(data_path="rotation_euler", frame=int(frame_step*data_len*0.75))
camera.location = (0,-22.69,24.64)
camera.rotation_euler = (0.698,0,0)
camera.keyframe_insert(data_path="location", frame=int(frame_step*data_len))
camera.keyframe_insert(data_path="rotation_euler", frame=int(frame_step*data_len))
def add_bg_plane(size):
# Adds a highly reflective background plane
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
bpy.ops.mesh.primitive_plane_add(size=size,enter_editmode=False,location=(0,0,0))
plane_mat = bpy.data.materials.new(name='plane_mat')
plane_mat.use_nodes = True
output = plane_mat.node_tree.nodes.get('Material Output')
bsdf = plane_mat.node_tree.nodes.get('Principled BSDF')
bsdf.inputs[0].default_value = (0.01,0.01,0.01,1) # base color, black
bsdf.inputs[7].default_value = 0.1 # roughness
plane_mat.node_tree.links.new(bsdf.outputs[0],output.inputs[0]) # bsdf to material surface
bpy.data.objects['Plane'].data.materials.append(plane_mat)
def animate_text(font_path,frame_step,data):
title_loc = (-38.9,24.5,0)
cap1_loc = (-29.784,-9.944,0)
cap2_loc = (-0.6316,-13.728,0)
cap3_loc = (-22.052,-15.814,0)
cap4_loc = (-3.2,-15.914,0)
foot_loc = (-30.4412,-16.75,0)
data_len=len(data['all']['date'])
title_curve = bpy.data.curves.new(type="FONT",name="title curve")
title_curve.extrude = 0.01
title_curve.font = bpy.data.fonts.load(font_file)
title_curve.body = f"""
Growth of Small-scale Solar PVs in Australia
Quantity & Output by Postcode"""
title_obj = bpy.data.objects.new("title", title_curve)
bpy.context.scene.collection.objects.link(title_obj)
title_obj.location = title_loc
title_obj.scale = (2,2,2)
footnote_curve = bpy.data.curves.new(type="FONT",name="footnote curve")
footnote_curve.extrude = 0.01
footnote_curve.font = bpy.data.fonts.load(font_file)
footnote_curve.body = f"""
Height represents install quantity, color represents output. Data Source: Clean Energy Regulator
"""
footnote_obj = bpy.data.objects.new("footnote", footnote_curve)
bpy.context.scene.collection.objects.link(footnote_obj)
footnote_obj.location = foot_loc
footnote_obj.scale = (0.7,0.7,0.7)
caption1_curve = bpy.data.curves.new(type="FONT",name="caption1")
caption1_curve.extrude = 0.01
caption1_curve.font = bpy.data.fonts.load(font_file)
caption1_curve.space_line = 1.6
caption1_obj = bpy.data.objects.new("caption1", caption1_curve)
bpy.context.scene.collection.objects.link(caption1_obj)
caption1_obj.location = cap1_loc
caption1_obj.scale = (1.1,1.2,1.2)
caption2_curve = bpy.data.curves.new(type="FONT",name="caption2")
caption2_curve.extrude = 0.01
caption2_curve.font = bpy.data.fonts.load(font_file)
caption2_obj = bpy.data.objects.new("caption2", caption2_curve)
bpy.context.scene.collection.objects.link(caption2_obj)
caption2_obj.location = cap2_loc
caption2_obj.scale = (2,2.2,2.2)
caption3_curve = bpy.data.curves.new(type="FONT",name="caption3")
caption3_curve.extrude = 0.01
caption3_curve.font = bpy.data.fonts.load(font_file)
caption3_curve.body = """Raising the total power output to"""
caption3_obj = bpy.data.objects.new("caption3", caption3_curve)
bpy.context.scene.collection.objects.link(caption3_obj)
caption3_obj.location = cap3_loc
caption3_obj.scale = (1.1,1.2,1.2)
caption4_curve = bpy.data.curves.new(type="FONT",name="caption4")
caption4_curve.extrude = 0.01
caption4_curve.font = bpy.data.fonts.load(font_file)
caption4_obj = bpy.data.objects.new("caption4", caption4_curve)
bpy.context.scene.collection.objects.link(caption4_obj)
caption4_obj.location = cap4_loc
caption4_obj.scale = (2,2.2,2.2)
# add white static material
font_mat = bpy.data.materials.new(name='font_mat')
font_mat.use_nodes = True
output = font_mat.node_tree.nodes.get('Material Output')
bsdf = font_mat.node_tree.nodes.get('Principled BSDF')
bsdf.inputs[17].default_value = (2,2,2,1) # emission color/strength
font_mat.node_tree.links.new(bsdf.outputs[0],output.inputs[0]) # bsdf to material surface
bpy.data.objects['title'].data.materials.append(font_mat)
bpy.data.objects['caption1'].data.materials.append(font_mat)
bpy.data.objects['footnote'].data.materials.append(font_mat)
# add green animated material
font_green_mat = bpy.data.materials.new(name='font_green_mat')
font_green_mat.use_nodes = True
output_green = font_green_mat.node_tree.nodes.get('Material Output')
bsdf_green = font_green_mat.node_tree.nodes.get('Principled BSDF')
font_green_mat.node_tree.links.new(bsdf_green.outputs[0],output_green.inputs[0]) # bsdf to material surface
color_ramp_font = font_green_mat.node_tree.nodes.new("ShaderNodeValToRGB")
color_ramp_font.location = (-300,0)
font_green_mat.node_tree.links.new(color_ramp_font.outputs[0],bsdf_green.inputs[0]) # ramp to base color
font_green_mat.node_tree.links.new(color_ramp_font.outputs[0],bsdf_green.inputs[17]) # ramp to emission color/strength
color_ramp_font.color_ramp.elements[0].color = (2,2,2,1) # white
color_ramp_font.color_ramp.elements[1].color = (0.1,2,0,1) # green
color_v_font = font_green_mat.node_tree.nodes.new("ShaderNodeValue")
color_v_font.location = (-600,0)
font_green_mat.node_tree.links.new(color_v_font.outputs[0],color_ramp_font.inputs[0]) # value to ramp's color
bpy.data.objects['title'].data.materials.append(font_mat)
bpy.data.objects['caption1'].data.materials.append(font_mat)
bpy.data.objects['caption3'].data.materials.append(font_mat)
bpy.data.objects['footnote'].data.materials.append(font_mat)
bpy.data.objects['caption2'].data.materials.append(font_green_mat)
bpy.data.objects['caption4'].data.materials.append(font_green_mat)
# animate green text, the text turn green linearly
mat_green = bpy.data.materials["font_green_mat"]
color_value = mat_green.node_tree.nodes["Value"].outputs[0]
color_value.default_value = 0
color_value.keyframe_insert('default_value',frame=0)
color_value.default_value = 0.95
color_value.keyframe_insert('default_value',frame=frame_step*data_len)
# update text with frames
def update(self):
caption1 = bpy.data.objects['caption1']
caption2 = bpy.data.objects['caption2']
caption4 = bpy.data.objects['caption4']
frame = bpy.context.scene.frame_current
data_index = int(frame/frame_step)
caption1.location = cap1_loc
caption1.data.body = \
f"""
By {data['all']['date'][data_index]}
The quantity of solar PVs has grown to
"""
caption2.location = cap2_loc
caption2.data.body = f"""{data['all']['install'][data_index]}"""
caption4.location = cap4_loc
caption4.data.body = f"""{data['all']['output'][data_index]} MW"""
if bpy.context.scene.frame_current in range(frame_step*data_len):
bpy.app.handlers.frame_change_post.append(update)
def build_scene(data_file,shp_file,font_file,frame_step,max_height):
data = load_data(data_file=data_file)
# Start scene by deleting all objects
bpy.ops.object.select_all(action='SELECT')
bpy.ops.object.delete(use_global=False)
# Add all objects & animate
add_geo_obj(shp_file=shp_file)
add_material_all(collection='geo')
add_shape_key_all(max_height,collection='geo')
animate_obj_all(frame_step,data)
add_material_all(collection='geo')
animate_material_all(frame_step,data)
add_camera(lens=18)
animate_camera(frame_step,data)
add_bg_plane(size=500)
animate_text(font_file,frame_step,data)
def update_render_setting():
# Tweak the rendering settings
bpy.context.scene.frame_start = 0
bpy.context.scene.render.engine = 'CYCLES'
bpy.context.scene.cycles.use_adaptive_sampling = True
bpy.context.scene.cycles.adaptive_threshold = 0.001
bpy.context.scene.cycles.use_animated_seed = True
bpy.context.scene.cycles.samples = 850
bpy.context.scene.cycles.sample_clamp_direct = 0.2
bpy.context.scene.cycles.sample_clamp_indirect = 10
bpy.context.scene.cycles.blur_glossy = 5
bpy.context.scene.cycles.max_bounces = 4
bpy.context.scene.world.light_settings.use_ambient_occlusion = True
bpy.context.scene.render.image_settings.color_depth = '16'
bpy.context.scene.render.tile_x = 256
bpy.context.scene.render.tile_y = 256
scene = bpy.data.scenes['Scene'].view_layers['View Layer']
scene.cycles.use_denoising = True
# Setup GPU
scene = bpy.context.scene
scene.cycles.device = 'GPU'
prefs = bpy.context.preferences
prefs.addons['cycles'].preferences.get_devices()
cprefs = prefs.addons['cycles'].preferences
print(cprefs)
# Attempt to set GPU device types if available
for compute_device_type in ('CUDA', 'OPENCL', 'NONE'):
try:
cprefs.compute_device_type = compute_device_type
print('Device found',compute_device_type)
break
except TypeError:
pass
# Enable all CPU and GPU devices
for device in cprefs.devices:
if not re.match('intel', device.name, re.I):
print('Activating',device)
device.use = True
if __name__ == '__main__':
frame_step = 4 # the steps between keyframes
max_height = 6
current_dir = Path(bpy.data.filepath).parent # this is where your blend file is saved
data_file = os.path.join(current_dir,'data.json')
shp_file = os.path.join(current_dir,'geo.shp')
# Download the free font at design.ubuntu.com/font/
font_file = os.path.join(current_dir.parent,'resource','UbuntuMono-Regular.ttf')
build_scene(data_file,shp_file,font_file,frame_step,max_height)
update_render_setting()
|
[
"bpy.ops.importgis.shapefile",
"pathlib.Path",
"bpy.data.materials.new",
"bpy.ops.mesh.primitive_plane_add",
"os.path.join",
"bpy.ops.mesh.delete",
"bpy.data.fonts.load",
"bpy.ops.object.shape_key_add",
"bpy.data.curves.new",
"bpy.ops.object.delete",
"re.match",
"bmesh.from_edit_mesh",
"bpy.data.objects.new",
"bpy.data.cameras.new",
"bpy.context.scene.collection.objects.link",
"bpy.ops.object.origin_set",
"json.load",
"bpy.ops.object.mode_set",
"bpy.app.handlers.frame_change_post.append",
"bpy.ops.object.select_all"
] |
[((340, 390), 'bpy.ops.object.mode_set', 'bpy.ops.object.mode_set', ([], {'mode': '"""EDIT"""', 'toggle': '(False)'}), "(mode='EDIT', toggle=False)\n", (363, 390), False, 'import bpy, bmesh, json, os, re\n'), ((534, 558), 'bmesh.from_edit_mesh', 'bmesh.from_edit_mesh', (['me'], {}), '(me)\n', (554, 558), False, 'import bpy, bmesh, json, os, re\n'), ((794, 815), 'bpy.ops.mesh.delete', 'bpy.ops.mesh.delete', ([], {}), '()\n', (813, 815), False, 'import bpy, bmesh, json, os, re\n'), ((829, 881), 'bpy.ops.object.mode_set', 'bpy.ops.object.mode_set', ([], {'mode': '"""OBJECT"""', 'toggle': '(False)'}), "(mode='OBJECT', toggle=False)\n", (852, 881), False, 'import bpy, bmesh, json, os, re\n'), ((886, 958), 'bpy.ops.object.origin_set', 'bpy.ops.object.origin_set', ([], {'type': '"""ORIGIN_CENTER_OF_MASS"""', 'center': '"""MEDIAN"""'}), "(type='ORIGIN_CENTER_OF_MASS', center='MEDIAN')\n", (911, 958), False, 'import bpy, bmesh, json, os, re\n'), ((1647, 1691), 'bpy.ops.object.select_all', 'bpy.ops.object.select_all', ([], {'action': '"""DESELECT"""'}), "(action='DESELECT')\n", (1672, 1691), False, 'import bpy, bmesh, json, os, re\n'), ((1740, 1763), 'bpy.ops.object.delete', 'bpy.ops.object.delete', ([], {}), '()\n', (1761, 1763), False, 'import bpy, bmesh, json, os, re\n'), ((2113, 2150), 'bpy.data.materials.new', 'bpy.data.materials.new', ([], {'name': 'obj_name'}), '(name=obj_name)\n', (2135, 2150), False, 'import bpy, bmesh, json, os, re\n'), ((3606, 3650), 'bpy.ops.object.shape_key_add', 'bpy.ops.object.shape_key_add', ([], {'from_mix': '(False)'}), '(from_mix=False)\n', (3634, 3650), False, 'import bpy, bmesh, json, os, re\n'), ((3666, 3710), 'bpy.ops.object.shape_key_add', 'bpy.ops.object.shape_key_add', ([], {'from_mix': '(False)'}), '(from_mix=False)\n', (3694, 3710), False, 'import bpy, bmesh, json, os, re\n'), ((3821, 3871), 'bpy.ops.object.mode_set', 'bpy.ops.object.mode_set', ([], {'mode': '"""EDIT"""', 'toggle': '(False)'}), "(mode='EDIT', toggle=False)\n", (3844, 3871), False, 'import bpy, bmesh, json, os, re\n'), ((3882, 3906), 'bmesh.from_edit_mesh', 'bmesh.from_edit_mesh', (['me'], {}), '(me)\n', (3902, 3906), False, 'import bpy, bmesh, json, os, re\n'), ((4137, 4189), 'bpy.ops.object.mode_set', 'bpy.ops.object.mode_set', ([], {'mode': '"""OBJECT"""', 'toggle': '(False)'}), "(mode='OBJECT', toggle=False)\n", (4160, 4189), False, 'import bpy, bmesh, json, os, re\n'), ((4502, 4554), 'bpy.ops.object.mode_set', 'bpy.ops.object.mode_set', ([], {'mode': '"""OBJECT"""', 'toggle': '(False)'}), "(mode='OBJECT', toggle=False)\n", (4525, 4554), False, 'import bpy, bmesh, json, os, re\n'), ((5671, 5701), 'bpy.data.cameras.new', 'bpy.data.cameras.new', (['"""Camera"""'], {}), "('Camera')\n", (5691, 5701), False, 'import bpy, bmesh, json, os, re\n'), ((5736, 5771), 'bpy.data.objects.new', 'bpy.data.objects.new', (['"""Camera"""', 'cam'], {}), "('Camera', cam)\n", (5756, 5771), False, 'import bpy, bmesh, json, os, re\n'), ((5776, 5826), 'bpy.context.scene.collection.objects.link', 'bpy.context.scene.collection.objects.link', (['cam_obj'], {}), '(cam_obj)\n', (5817, 5826), False, 'import bpy, bmesh, json, os, re\n'), ((7042, 7094), 'bpy.ops.object.mode_set', 'bpy.ops.object.mode_set', ([], {'mode': '"""OBJECT"""', 'toggle': '(False)'}), "(mode='OBJECT', toggle=False)\n", (7065, 7094), False, 'import bpy, bmesh, json, os, re\n'), ((7099, 7189), 'bpy.ops.mesh.primitive_plane_add', 'bpy.ops.mesh.primitive_plane_add', ([], {'size': 'size', 'enter_editmode': '(False)', 'location': '(0, 0, 0)'}), '(size=size, enter_editmode=False, location=\n (0, 0, 0))\n', (7131, 7189), False, 'import bpy, bmesh, json, os, re\n'), ((7198, 7238), 'bpy.data.materials.new', 'bpy.data.materials.new', ([], {'name': '"""plane_mat"""'}), "(name='plane_mat')\n", (7220, 7238), False, 'import bpy, bmesh, json, os, re\n'), ((7983, 8035), 'bpy.data.curves.new', 'bpy.data.curves.new', ([], {'type': '"""FONT"""', 'name': '"""title curve"""'}), "(type='FONT', name='title curve')\n", (8002, 8035), False, 'import bpy, bmesh, json, os, re\n'), ((8089, 8119), 'bpy.data.fonts.load', 'bpy.data.fonts.load', (['font_file'], {}), '(font_file)\n', (8108, 8119), False, 'import bpy, bmesh, json, os, re\n'), ((8281, 8323), 'bpy.data.objects.new', 'bpy.data.objects.new', (['"""title"""', 'title_curve'], {}), "('title', title_curve)\n", (8301, 8323), False, 'import bpy, bmesh, json, os, re\n'), ((8328, 8380), 'bpy.context.scene.collection.objects.link', 'bpy.context.scene.collection.objects.link', (['title_obj'], {}), '(title_obj)\n', (8369, 8380), False, 'import bpy, bmesh, json, os, re\n'), ((8468, 8523), 'bpy.data.curves.new', 'bpy.data.curves.new', ([], {'type': '"""FONT"""', 'name': '"""footnote curve"""'}), "(type='FONT', name='footnote curve')\n", (8487, 8523), False, 'import bpy, bmesh, json, os, re\n'), ((8583, 8613), 'bpy.data.fonts.load', 'bpy.data.fonts.load', (['font_file'], {}), '(font_file)\n', (8602, 8613), False, 'import bpy, bmesh, json, os, re\n'), ((8813, 8861), 'bpy.data.objects.new', 'bpy.data.objects.new', (['"""footnote"""', 'footnote_curve'], {}), "('footnote', footnote_curve)\n", (8833, 8861), False, 'import bpy, bmesh, json, os, re\n'), ((8866, 8921), 'bpy.context.scene.collection.objects.link', 'bpy.context.scene.collection.objects.link', (['footnote_obj'], {}), '(footnote_obj)\n', (8907, 8921), False, 'import bpy, bmesh, json, os, re\n'), ((9020, 9069), 'bpy.data.curves.new', 'bpy.data.curves.new', ([], {'type': '"""FONT"""', 'name': '"""caption1"""'}), "(type='FONT', name='caption1')\n", (9039, 9069), False, 'import bpy, bmesh, json, os, re\n'), ((9129, 9159), 'bpy.data.fonts.load', 'bpy.data.fonts.load', (['font_file'], {}), '(font_file)\n', (9148, 9159), False, 'import bpy, bmesh, json, os, re\n'), ((9215, 9263), 'bpy.data.objects.new', 'bpy.data.objects.new', (['"""caption1"""', 'caption1_curve'], {}), "('caption1', caption1_curve)\n", (9235, 9263), False, 'import bpy, bmesh, json, os, re\n'), ((9268, 9323), 'bpy.context.scene.collection.objects.link', 'bpy.context.scene.collection.objects.link', (['caption1_obj'], {}), '(caption1_obj)\n', (9309, 9323), False, 'import bpy, bmesh, json, os, re\n'), ((9422, 9471), 'bpy.data.curves.new', 'bpy.data.curves.new', ([], {'type': '"""FONT"""', 'name': '"""caption2"""'}), "(type='FONT', name='caption2')\n", (9441, 9471), False, 'import bpy, bmesh, json, os, re\n'), ((9531, 9561), 'bpy.data.fonts.load', 'bpy.data.fonts.load', (['font_file'], {}), '(font_file)\n', (9550, 9561), False, 'import bpy, bmesh, json, os, re\n'), ((9581, 9629), 'bpy.data.objects.new', 'bpy.data.objects.new', (['"""caption2"""', 'caption2_curve'], {}), "('caption2', caption2_curve)\n", (9601, 9629), False, 'import bpy, bmesh, json, os, re\n'), ((9634, 9689), 'bpy.context.scene.collection.objects.link', 'bpy.context.scene.collection.objects.link', (['caption2_obj'], {}), '(caption2_obj)\n', (9675, 9689), False, 'import bpy, bmesh, json, os, re\n'), ((9786, 9835), 'bpy.data.curves.new', 'bpy.data.curves.new', ([], {'type': '"""FONT"""', 'name': '"""caption3"""'}), "(type='FONT', name='caption3')\n", (9805, 9835), False, 'import bpy, bmesh, json, os, re\n'), ((9895, 9925), 'bpy.data.fonts.load', 'bpy.data.fonts.load', (['font_file'], {}), '(font_file)\n', (9914, 9925), False, 'import bpy, bmesh, json, os, re\n'), ((10011, 10059), 'bpy.data.objects.new', 'bpy.data.objects.new', (['"""caption3"""', 'caption3_curve'], {}), "('caption3', caption3_curve)\n", (10031, 10059), False, 'import bpy, bmesh, json, os, re\n'), ((10064, 10119), 'bpy.context.scene.collection.objects.link', 'bpy.context.scene.collection.objects.link', (['caption3_obj'], {}), '(caption3_obj)\n', (10105, 10119), False, 'import bpy, bmesh, json, os, re\n'), ((10218, 10267), 'bpy.data.curves.new', 'bpy.data.curves.new', ([], {'type': '"""FONT"""', 'name': '"""caption4"""'}), "(type='FONT', name='caption4')\n", (10237, 10267), False, 'import bpy, bmesh, json, os, re\n'), ((10327, 10357), 'bpy.data.fonts.load', 'bpy.data.fonts.load', (['font_file'], {}), '(font_file)\n', (10346, 10357), False, 'import bpy, bmesh, json, os, re\n'), ((10377, 10425), 'bpy.data.objects.new', 'bpy.data.objects.new', (['"""caption4"""', 'caption4_curve'], {}), "('caption4', caption4_curve)\n", (10397, 10425), False, 'import bpy, bmesh, json, os, re\n'), ((10430, 10485), 'bpy.context.scene.collection.objects.link', 'bpy.context.scene.collection.objects.link', (['caption4_obj'], {}), '(caption4_obj)\n', (10471, 10485), False, 'import bpy, bmesh, json, os, re\n'), ((10608, 10647), 'bpy.data.materials.new', 'bpy.data.materials.new', ([], {'name': '"""font_mat"""'}), "(name='font_mat')\n", (10630, 10647), False, 'import bpy, bmesh, json, os, re\n'), ((11213, 11258), 'bpy.data.materials.new', 'bpy.data.materials.new', ([], {'name': '"""font_green_mat"""'}), "(name='font_green_mat')\n", (11235, 11258), False, 'import bpy, bmesh, json, os, re\n'), ((14059, 14101), 'bpy.ops.object.select_all', 'bpy.ops.object.select_all', ([], {'action': '"""SELECT"""'}), "(action='SELECT')\n", (14084, 14101), False, 'import bpy, bmesh, json, os, re\n'), ((14106, 14145), 'bpy.ops.object.delete', 'bpy.ops.object.delete', ([], {'use_global': '(False)'}), '(use_global=False)\n', (14127, 14145), False, 'import bpy, bmesh, json, os, re\n'), ((16411, 16449), 'os.path.join', 'os.path.join', (['current_dir', '"""data.json"""'], {}), "(current_dir, 'data.json')\n", (16423, 16449), False, 'import bpy, bmesh, json, os, re\n'), ((16464, 16500), 'os.path.join', 'os.path.join', (['current_dir', '"""geo.shp"""'], {}), "(current_dir, 'geo.shp')\n", (16476, 16500), False, 'import bpy, bmesh, json, os, re\n'), ((16572, 16642), 'os.path.join', 'os.path.join', (['current_dir.parent', '"""resource"""', '"""UbuntuMono-Regular.ttf"""'], {}), "(current_dir.parent, 'resource', 'UbuntuMono-Regular.ttf')\n", (16584, 16642), False, 'import bpy, bmesh, json, os, re\n'), ((208, 220), 'json.load', 'json.load', (['f'], {}), '(f)\n', (217, 220), False, 'import bpy, bmesh, json, os, re\n'), ((1099, 1240), 'bpy.ops.importgis.shapefile', 'bpy.ops.importgis.shapefile', ([], {'filepath': 'shp_file', 'fieldExtrudeName': '"""base"""', 'extrusionAxis': '"""Z"""', 'separateObjects': '(True)', 'fieldObjName': '"""postcode"""'}), "(filepath=shp_file, fieldExtrudeName='base',\n extrusionAxis='Z', separateObjects=True, fieldObjName='postcode')\n", (1126, 1240), False, 'import bpy, bmesh, json, os, re\n'), ((13846, 13895), 'bpy.app.handlers.frame_change_post.append', 'bpy.app.handlers.frame_change_post.append', (['update'], {}), '(update)\n', (13887, 13895), False, 'import bpy, bmesh, json, os, re\n'), ((16323, 16346), 'pathlib.Path', 'Path', (['bpy.data.filepath'], {}), '(bpy.data.filepath)\n', (16327, 16346), False, 'from pathlib import Path\n'), ((16088, 16124), 're.match', 're.match', (['"""intel"""', 'device.name', 're.I'], {}), "('intel', device.name, re.I)\n", (16096, 16124), False, 'import bpy, bmesh, json, os, re\n')]
|
# encoding utf-8
import pandas as pd
import numpy as np
import statsmodels.api as sm
from statsmodels.tools import eval_measures
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
class Preprocessing:
def __init__(self, data_raw):
self.data_clean = data_raw
def run(self):
# EDAで安価な物件に外れ値が見受けられた
# 下位1%をとりあえず除外とする(適当ではないが、正確でもない)
THRESHOLD = 0.01
self.exclude_outlier(THRESHOLD)
# 上記以外の明らかな外れ値
self.exclude_idx([524, 1299])
# 正規分布に近づけ、線形回帰の精度を高める
self.convert_log(["SalePrice"])
# 多重共線性をなくす
self.create_adding_column("AllSF", ["GrLivArea", "TotalBsmtSF"])
self.create_adding_column("AllFlrsSF", ["1stFlrSF", "2ndFlrSF"])
def exclude_outlier(self, THRESHOLD):
low_row = round(self.data_clean.shape[0] * THRESHOLD)
low_ids = self.data_clean.iloc[:low_row]
low_ids = list(low_ids['Id'].unique())
self.data_clean = self.data_clean.query("Id not in @low_ids")
def exclude_idx(self, ids):
self.data_clean = self.data_clean.query("Id not in @ids")
def convert_log(self, columns):
for c in columns:
self.data_clean[c] = self.data_clean[c].apply(lambda x: np.log(x))
def create_adding_column(self, create, adding):
c1, c2 = adding
self.data_clean[create] = self.data_clean[c1] + self.data_clean[c2]
class Glm:
def __init__(self, preprocessing, X_columns, y_column):
self.X = preprocessing.data_clean[X_columns]
self.y = preprocessing.data_clean[y_column]
def fit(self):
TRAIN_SIZE = 0.8 # >=0.7 なら自由
RANDOM_STATE = 0 # チューニングはしていない
x_train, x_test, y_train, y_test = \
self.train_test_split(TRAIN_SIZE, RANDOM_STATE)
x_train, x_test = self.normalization(x_train, x_test)
self.model = sm.OLS(y_train, sm.add_constant(x_train))
self.model = self.model.fit()
def train_test_split(self, TRAIN_SIZE, RANDOM_STATE):
x_train, x_test, y_train, y_test = train_test_split(self.X, self.y,
train_size=TRAIN_SIZE,
random_state=RANDOM_STATE)
return x_train, x_test, y_train, y_test
def normalization(self, x_train, x_test):
scaler = StandardScaler()
scaler.fit(x_train)
x_train = scaler.transform(x_train)
x_test = scaler.transform(x_test)
return x_train, x_test
def write_summary(self, write_path):
with open(write_path, "w") as f:
f.write(str(self.model.summary()))
def main():
data_raw = pd.read_csv("./../../data/house_prices/train.csv")
preprocessing = Preprocessing(data_raw)
preprocessing.run()
X_columns = ["OverallQual", "GarageArea", "YearBuilt", "AllSF",
"AllFlrsSF", "YearRemodAdd", "OverallCond"]
y_column = ["SalePrice"]
model = Glm(preprocessing, X_columns, y_column)
model.fit()
model.write_summary("./GLM_summary.txt")
if __name__ == "__main__":
main()
|
[
"sklearn.preprocessing.StandardScaler",
"numpy.log",
"pandas.read_csv",
"sklearn.model_selection.train_test_split",
"statsmodels.api.add_constant"
] |
[((2743, 2793), 'pandas.read_csv', 'pd.read_csv', (['"""./../../data/house_prices/train.csv"""'], {}), "('./../../data/house_prices/train.csv')\n", (2754, 2793), True, 'import pandas as pd\n'), ((2112, 2199), 'sklearn.model_selection.train_test_split', 'train_test_split', (['self.X', 'self.y'], {'train_size': 'TRAIN_SIZE', 'random_state': 'RANDOM_STATE'}), '(self.X, self.y, train_size=TRAIN_SIZE, random_state=\n RANDOM_STATE)\n', (2128, 2199), False, 'from sklearn.model_selection import train_test_split\n'), ((2413, 2429), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (2427, 2429), False, 'from sklearn.preprocessing import StandardScaler\n'), ((1946, 1970), 'statsmodels.api.add_constant', 'sm.add_constant', (['x_train'], {}), '(x_train)\n', (1961, 1970), True, 'import statsmodels.api as sm\n'), ((1284, 1293), 'numpy.log', 'np.log', (['x'], {}), '(x)\n', (1290, 1293), True, 'import numpy as np\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.