repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
NHGmaniac/voctoconfig | playground.py | 55a803a5f9bc81b48eaa72ced1fddd402aa7a2e9 | #!/usr/bin/env python3
import signal
import logging
import sys
from gi.repository import GObject
GObject.threads_init()
import time
from lib.args import Args
from lib.loghandler import LogHandler
import lib.connection as Connection
def testCallback(args):
log = logging.getLogger("Test")
log.info(str(args))
class Voctoconfig(object):
def __init__(self):
self.log = logging.getLogger("Voctoconfig")
self.log.debug("Creating GObject Mainloop")
self.mainloop = GObject.MainLoop()
def run(self):
self.log.info("Running MainLoop")
try:
self.mainloop.run()
except KeyboardInterrupt:
self.log.info("Terminated via KeyboardInterrupt")
def quit(self):
self.log.info("Quitting MainLoop")
self.mainloop.quit()
def main():
docolor = (Args.color == 'always') or (Args.color == 'auto' and
sys.stderr.isatty())
loghandler = LogHandler(docolor, Args.timestamp)
logging.root.addHandler(loghandler)
if Args.verbose >= 2:
level = logging.DEBUG
elif Args.verbose == 1:
level = logging.INFO
else:
level = logging.WARNING
logging.root.setLevel(level)
logging.debug('setting SIGINT handler')
signal.signal(signal.SIGINT, signal.SIG_DFL)
Connection.establish(Args.host)
Connection.enterNonblockingMode()
Connection.on("message", testCallback)
mainloop = GObject.MainLoop()
mainloop.run()
while True:
logging.debug("mimimi...")
Connection.send("message", "test2")
time.sleep(10)
if __name__ == '__main__':
main() | [((102, 124), 'gi.repository.GObject.threads_init', 'GObject.threads_init', ([], {}), '()\n', (122, 124), False, 'from gi.repository import GObject\n'), ((273, 298), 'logging.getLogger', 'logging.getLogger', (['"""Test"""'], {}), "('Test')\n", (290, 298), False, 'import logging\n'), ((982, 1017), 'lib.loghandler.LogHandler', 'LogHandler', (['docolor', 'Args.timestamp'], {}), '(docolor, Args.timestamp)\n', (992, 1017), False, 'from lib.loghandler import LogHandler\n'), ((1022, 1057), 'logging.root.addHandler', 'logging.root.addHandler', (['loghandler'], {}), '(loghandler)\n', (1045, 1057), False, 'import logging\n'), ((1218, 1246), 'logging.root.setLevel', 'logging.root.setLevel', (['level'], {}), '(level)\n', (1239, 1246), False, 'import logging\n'), ((1251, 1290), 'logging.debug', 'logging.debug', (['"""setting SIGINT handler"""'], {}), "('setting SIGINT handler')\n", (1264, 1290), False, 'import logging\n'), ((1295, 1339), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'signal.SIG_DFL'], {}), '(signal.SIGINT, signal.SIG_DFL)\n', (1308, 1339), False, 'import signal\n'), ((1344, 1375), 'lib.connection.establish', 'Connection.establish', (['Args.host'], {}), '(Args.host)\n', (1364, 1375), True, 'import lib.connection as Connection\n'), ((1380, 1413), 'lib.connection.enterNonblockingMode', 'Connection.enterNonblockingMode', ([], {}), '()\n', (1411, 1413), True, 'import lib.connection as Connection\n'), ((1418, 1456), 'lib.connection.on', 'Connection.on', (['"""message"""', 'testCallback'], {}), "('message', testCallback)\n", (1431, 1456), True, 'import lib.connection as Connection\n'), ((1472, 1490), 'gi.repository.GObject.MainLoop', 'GObject.MainLoop', ([], {}), '()\n', (1488, 1490), False, 'from gi.repository import GObject\n'), ((396, 428), 'logging.getLogger', 'logging.getLogger', (['"""Voctoconfig"""'], {}), "('Voctoconfig')\n", (413, 428), False, 'import logging\n'), ((505, 523), 'gi.repository.GObject.MainLoop', 'GObject.MainLoop', ([], {}), '()\n', (521, 523), False, 'from gi.repository import GObject\n'), ((1534, 1560), 'logging.debug', 'logging.debug', (['"""mimimi..."""'], {}), "('mimimi...')\n", (1547, 1560), False, 'import logging\n'), ((1569, 1604), 'lib.connection.send', 'Connection.send', (['"""message"""', '"""test2"""'], {}), "('message', 'test2')\n", (1584, 1604), True, 'import lib.connection as Connection\n'), ((1613, 1627), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (1623, 1627), False, 'import time\n'), ((944, 963), 'sys.stderr.isatty', 'sys.stderr.isatty', ([], {}), '()\n', (961, 963), False, 'import sys\n')] |
Aceticia/tianshou | tianshou/utils/logger/tensorboard.py | 6377dc5006ba1111adac42472447b9de4a021c2d | import warnings
from typing import Any, Callable, Optional, Tuple
from tensorboard.backend.event_processing import event_accumulator
from torch.utils.tensorboard import SummaryWriter
from tianshou.utils.logger.base import LOG_DATA_TYPE, BaseLogger
class TensorboardLogger(BaseLogger):
"""A logger that relies on tensorboard SummaryWriter by default to visualize \
and log statistics.
:param SummaryWriter writer: the writer to log data.
:param int train_interval: the log interval in log_train_data(). Default to 1000.
:param int test_interval: the log interval in log_test_data(). Default to 1.
:param int update_interval: the log interval in log_update_data(). Default to 1000.
:param int save_interval: the save interval in save_data(). Default to 1 (save at
the end of each epoch).
"""
def __init__(
self,
writer: SummaryWriter,
train_interval: int = 1000,
test_interval: int = 1,
update_interval: int = 1000,
save_interval: int = 1,
) -> None:
super().__init__(train_interval, test_interval, update_interval)
self.save_interval = save_interval
self.last_save_step = -1
self.writer = writer
def write(self, step_type: str, step: int, data: LOG_DATA_TYPE) -> None:
for k, v in data.items():
self.writer.add_scalar(k, v, global_step=step)
def save_data(
self,
epoch: int,
env_step: int,
gradient_step: int,
save_checkpoint_fn: Optional[Callable[[int, int, int], None]] = None,
) -> None:
if save_checkpoint_fn and epoch - self.last_save_step >= self.save_interval:
self.last_save_step = epoch
save_checkpoint_fn(epoch, env_step, gradient_step)
self.write("save/epoch", epoch, {"save/epoch": epoch})
self.write("save/env_step", env_step, {"save/env_step": env_step})
self.write(
"save/gradient_step", gradient_step,
{"save/gradient_step": gradient_step}
)
def restore_data(self) -> Tuple[int, int, int]:
ea = event_accumulator.EventAccumulator(self.writer.log_dir)
ea.Reload()
try: # epoch / gradient_step
epoch = ea.scalars.Items("save/epoch")[-1].step
self.last_save_step = self.last_log_test_step = epoch
gradient_step = ea.scalars.Items("save/gradient_step")[-1].step
self.last_log_update_step = gradient_step
except KeyError:
epoch, gradient_step = 0, 0
try: # offline trainer doesn't have env_step
env_step = ea.scalars.Items("save/env_step")[-1].step
self.last_log_train_step = env_step
except KeyError:
env_step = 0
return epoch, env_step, gradient_step
class BasicLogger(TensorboardLogger):
"""BasicLogger has changed its name to TensorboardLogger in #427.
This class is for compatibility.
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
warnings.warn(
"Deprecated soon: BasicLogger has renamed to TensorboardLogger in #427."
)
super().__init__(*args, **kwargs)
| [((2143, 2198), 'tensorboard.backend.event_processing.event_accumulator.EventAccumulator', 'event_accumulator.EventAccumulator', (['self.writer.log_dir'], {}), '(self.writer.log_dir)\n', (2177, 2198), False, 'from tensorboard.backend.event_processing import event_accumulator\n'), ((3068, 3160), 'warnings.warn', 'warnings.warn', (['"""Deprecated soon: BasicLogger has renamed to TensorboardLogger in #427."""'], {}), "(\n 'Deprecated soon: BasicLogger has renamed to TensorboardLogger in #427.')\n", (3081, 3160), False, 'import warnings\n')] |
InsightGit/JetfuelGameEngine | PythonAPI/pythonwrappers/jetfuel/gui/menu.py | 3ea0bf2fb5e09aadf304b7b5a16882d72336c408 | # Jetfuel Game Engine- A SDL-based 2D game-engine
# Copyright (C) 2018 InfernoStudios
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ctypes import c_uint
from ctypes import c_int
from ctypes import c_void_p
from ctypes import c_bool
from ctypes import c_wchar_p
from jetfuel.draw.rectangleinterface import rectangle_interface
from jetfuel.draw.image import image
class menu(rectangle_interface):
def __init__(self, jetfuelsoloader, maxheight=None, columngap=None,
buttongap=None):
self._jetfuel = jetfuelsoloader.jetfuelso;
if(maxheight is not None and columngap is not None and
buttongap is not None):
self._jetfuel.Menu_new_from_heights_and_gaps.argtypes = [c_uint,
c_uint,
c_uint];
self._jetfuel.Menu_new_from_heights_and_gaps.restype = c_void_p;
self.drawableref = self._jetfuel.Menu_new_from_heights_and_gaps(
maxheight,
columngap,
buttongap);
else:
self._jetfuel.Menu_new.restype = c_void_p;
self.drawableref = self._jetfuel.Menu_new();
print("Constructed empty drawableref!");
def get_max_height(self):
self._jetfuel.Menu_get_max_height.argtypes = [c_void_p];
self._jetfuel.Menu_get_max_height.restype = c_uint;
return self._jetfuel.Menu_get_max_height(self.drawableref);
def set_max_height(self, maxheight):
self._jetfuel.Menu_set_max_height.argtypes = [c_void_p, c_uint];
self._jetfuel.Menu_set_max_height(self.drawableref, maxheight);
def get_column_gap(self):
self._jetfuel.Menu_get_column_gap.argtypes = [c_void_p];
self._jetfuel.Menu_get_column_gap.restype = c_uint;
return self._jetfuel.Menu_get_column_gap(self.drawableref);
def set_column_gap(self, columngap):
self._jetfuel.Menu_set_column_gap.argtypes = [c_void_p, c_uint];
self._jetfuel.Menu_set_column_height(self.drawableref, columngap);
def get_button_gap(self):
self._jetfuel.Menu_get_button_gap.argtypes = [c_void_p];
self._jetfuel.Menu_get_button_gap.restype = c_uint;
return self._jetfuel.Menu_get_column_gap(self.drawableref);
def set_button_gap(self, buttongap):
self._jetfuel.Menu_set_max_height.argtypes = [c_void_p, c_uint];
self._jetfuel.Menu_set_max_height(self.drawableref, buttongap);
def get_container_box_image(self, jetfuelsoloader):
self._jetfuel.Menu_get_container_box_image.argtypes = [c_void_p];
self._jetfuel.Menu_get_container_box_image.restype = c_void_p;
containerboximage = image(jetfuelsoloader);
self._jetfuel.Image_delete.argtypes = [c_void_p];
self._jetfuel.Image_delete(containerboximage.imageref);
containerboximage.imageref = self._jetfuel.Menu_get_container_box_image(
self.drawableref);
return containerboximage;
def set_container_box_image(self, image, borderwidth, borderheight):
self._jetfuel.Menu_set_container_box_image.argtypes = [c_void_p,
c_void_p, c_uint,
c_uint];
self._jetfuel.Menu_set_container_box_image(image.imageref, borderwidth,
borderheight);
def get_container_box_border_width(self):
self._jetfuel.Menu_get_container_box_border_width.argtypes = [c_void_p];
self._jetfuel.Menu_get_container_box_border_width.restype = c_uint;
return self._jetfuel.Menu_get_container_box_border_width(
self.drawableref);
def get_container_box_border_height(self):
self._jetfuel.Menu_get_container_box_border_height.argtypes = [c_void_p];
self._jetfuel.Menu_get_container_box_border_height.restype = c_uint;
return self._jetfuel.Menu_get_container_box_border_height(
self.drawableref);
def add_button(self, buttoncharsreplacement, uisactiontowatchfor,
messagetosenduponclick, messagebus):
self._jetfuel.Menu_add_button.argtypes = [c_void_p, c_void_p,
c_wchar_p, c_wchar_p,
c_void_p];
self._jetfuel.Menu_add_button.restype = c_bool;
return self._jetfuel.Menu_add_button(self.drawableref,
buttoncharsreplacement.buttoncharsref,
uisactiontowatchfor,
messagetosenduponclick,
messagebus.messagebusref);
def get_position_x(self):
self._jetfuel.Menu_get_position_x.argtypes = [c_void_p];
self._jetfuel.Menu_get_position_x.restype = c_int;
return self.Menu_get_position_x(self.drawableref);
def get_position_y(self):
self._jetfuel.Menu_get_position_y.argtypes = [c_void_p];
self._jetfuel.Menu_get_position_y.restype = c_int;
return self.Menu_get_position_y(self.drawableref);
def set_position(self, x, y):
self._jetfuel.Menu_set_position.argtypes = [c_void_p, c_int, c_int];
self._jetfuel.Menu_set_position(self.drawableref, x, y);
def get_rect_to_draw_width(self):
self._jetfuel.Menu_get_rect_to_draw_width.argtypes = [c_void_p];
self._jetfuel.Menu_get_rect_to_draw_width.restype = c_int;
return self.Menu_get_rect_to_draw_width(self.drawableref);
def get_rect_to_draw_height(self):
self._jetfuel.Menu_get_rect_to_draw_height.argtypes = [c_void_p];
self._jetfuel.Menu_get_rect_to_draw_height.restype = c_int;
return self.Menu_get_rect_to_draw_height(self.drawableref);
| [((3591, 3613), 'jetfuel.draw.image.image', 'image', (['jetfuelsoloader'], {}), '(jetfuelsoloader)\n', (3596, 3613), False, 'from jetfuel.draw.image import image\n')] |
ParikhKadam/google-research | latent_programmer/decomposition_transformer_attention/train.py | 00a282388e389e09ce29109eb050491c96cfab85 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# python3
"""Train seq-to-seq model on random supervised training tasks."""
# pytype: disable=wrong-arg-count
# pytype: disable=attribute-error
import collections
import functools
import json
import os
import random
import sys
import time
from absl import app
from absl import flags
from absl import logging
from flax import jax_utils
from flax import linen as nn
from flax import optim
from flax.metrics import tensorboard
from flax.training import checkpoints
from flax.training import common_utils
import jax
import jax.numpy as jnp
import numpy as np
import tensorflow.compat.v2 as tf
from latent_programmer import decode
from latent_programmer import models as base_models
from latent_programmer.decomposition_transformer_attention import decomposition_models as models
from latent_programmer.decomposition_transformer_attention import input_pipeline
from latent_programmer.tasks.robust_fill import dsl
from latent_programmer.tasks.robust_fill import tokens as dsl_tokens
sys.path.append('../../')
gfile = tf.io.gfile
FLAGS = flags.FLAGS
flags.DEFINE_integer('seed', 0, 'Fixed random seed for training.')
flags.DEFINE_float('lr', 1e-3, 'Learning rate.')
flags.DEFINE_float('weight_decay', 1e-1,
'Decay factor for AdamW-style weight decay.')
flags.DEFINE_integer('embedding_dim', 256, 'Embedding dimension.')
flags.DEFINE_integer('hidden_dim', 512, 'Hidden dimension.')
flags.DEFINE_integer('num_heads', 4, 'Number of layers.')
flags.DEFINE_integer('num_layers', 3, 'Number of Transformer heads.')
flags.DEFINE_boolean('slow_decode', True, 'Use slow decoding for prediction?')
flags.DEFINE_string('dataset_filepattern', None,
'Filepattern for TFRecord dataset.')
flags.DEFINE_integer('per_device_batch_size', 16,
'Number of program tasks in a batch.')
flags.DEFINE_integer('num_strings_per_task', 4,
'Number of input/output strings per task.')
flags.DEFINE_integer('max_program_length', 100,
'Maximum number of tokens in program.')
flags.DEFINE_integer('max_characters', 120,
'Maximum number of characters in input/output strings.')
flags.DEFINE_string('save_dir', None, 'Directory to save results to.')
flags.DEFINE_integer('num_train_steps', 2000000, 'Number of training steps.')
flags.DEFINE_integer('num_eval_steps', 10, 'Number of evaluation steps.')
flags.DEFINE_integer('log_freq', 1000, 'Number of steps between training logs.')
flags.DEFINE_integer('eval_freq', 2000, 'Number of steps between eval.')
flags.DEFINE_integer('predict_freq', 50000,
'Number of steps between prediction (beam search).')
flags.DEFINE_integer('checkpoint_freq', 50000,
'Number of steps between checkpoint saves.')
flags.DEFINE_integer('finetune_start_step', -1,
'Step the initial checkpoint should start at for '
'finetuning, or -1 if not finetuning.')
flags.DEFINE_bool('restore_checkpoints', True,
'Whether to restore from existing model checkpoints.')
flags.DEFINE_string('attention_mask_type', 'bos_full_attention',
'The kind of attention mask to use. Options are: baseline, '
'bos_to_bos, bos_full_attention')
flags.DEFINE_bool('use_relative_attention', True,
'Whether to use relative positonal embeddings.')
flags.DEFINE_bool('bos_special_attention', False,
'Whether to use special relative attention computation for '
'BOS tokens.')
_internal = False
if not _internal:
flags.DEFINE_string('xm_parameters', None,
'String specifying hyperparamter search.')
def create_learning_rate_scheduler(
base_learning_rate=0.5,
factors='constant * linear_warmup * rsqrt_normalized_decay',
warmup_steps=16000,
decay_factor=0.5,
steps_per_decay=50000,
steps_per_cycle=100000):
"""Creates learning rate schedule.
Interprets factors in the factors string which can consist of:
* constant: interpreted as the constant value,
* linear_warmup: interpreted as linear warmup until warmup_steps,
* rsqrt_decay: divide by square root of max(step, warmup_steps)
* decay_every: Every k steps decay the learning rate by decay_factor.
* cosine_decay: Cyclic cosine decay, uses steps_per_cycle parameter.
Args:
base_learning_rate: float, the starting constant for the lr schedule.
factors: a string with factors separated by '*' that defines the schedule.
warmup_steps: how many steps to warm up for in the warmup schedule.
decay_factor: The amount to decay the learning rate by.
steps_per_decay: How often to decay the learning rate.
steps_per_cycle: Steps per cycle when using cosine decay.
Returns:
A function learning_rate(step): float -> {'learning_rate': float}, the
step-dependent lr.
"""
factors = [n.strip() for n in factors.split('*')]
def step_fn(step):
"""Step to learning rate function."""
ret = 1.0
for name in factors:
if name == 'constant':
ret *= base_learning_rate
elif name == 'linear_warmup':
ret *= jnp.minimum(1.0, step / warmup_steps)
elif name == 'rsqrt_decay':
ret /= jnp.sqrt(jnp.maximum(1.0, step - warmup_steps))
elif name == 'rsqrt_normalized_decay':
ret *= jnp.sqrt(warmup_steps)
ret /= jnp.sqrt(jnp.maximum(step, warmup_steps))
elif name == 'decay_every':
ret *= (decay_factor**(step // steps_per_decay))
elif name == 'cosine_decay':
progress = jnp.maximum(0.0,
(step - warmup_steps) / float(steps_per_cycle))
ret *= jnp.maximum(0.0,
0.5 * (1.0 + jnp.cos(jnp.pi * (progress % 1.0))))
else:
raise ValueError('Unknown factor %s.' % name)
return jnp.asarray(ret, dtype=jnp.float32)
return step_fn
def compute_weighted_cross_entropy(logits, targets, weights=None):
"""Compute weighted cross entropy and entropy for log probs and targets.
Args:
logits: `[batch, length, num_classes]` float array.
targets: categorical targets `[batch, length]` int array.
weights: None or array of shape [batch, length, 1]
Returns:
Tuple of scalar loss and batch normalizing factor.
"""
if logits.ndim != targets.ndim + 1:
raise ValueError('Incorrect shapes. Got shape %s logits and %s targets' %
(str(logits.shape), str(targets.shape)))
onehot_targets = common_utils.onehot(targets, logits.shape[-1])
loss = -jnp.sum(onehot_targets * nn.log_softmax(logits), axis=-1)
normalizing_factor = jnp.prod(jnp.asarray(targets.shape))
if weights is not None:
loss = loss * weights
normalizing_factor = weights.sum()
return loss.sum(), normalizing_factor
def compute_weighted_accuracy(logits, targets, weights=None):
"""Compute weighted accuracy for log probs and targets.
Args:
logits: `[batch, length, num_classes]` float array.
targets: categorical targets `[batch, length]` int array.
weights: None or array of shape [batch, length, 1]
Returns:
Tuple of scalar accuracy and batch normalizing factor.
"""
if logits.ndim != targets.ndim + 1:
raise ValueError('Incorrect shapes. Got shape %s logits and %s targets' %
(str(logits.shape), str(targets.shape)))
acc = jnp.equal(jnp.argmax(logits, axis=-1), targets)
normalizing_factor = jnp.prod(jnp.asarray(targets.shape))
if weights is not None:
acc = acc * weights
normalizing_factor = weights.sum()
return acc.sum(), normalizing_factor
def compute_metrics(logits, targets, weights):
"""Compute summary metrics."""
loss, weight_sum = compute_weighted_cross_entropy(logits, targets, weights)
acc, _ = compute_weighted_accuracy(logits, targets, weights)
metrics = {
'loss': loss,
'accuracy': acc,
'denominator': weight_sum,
}
metrics = jax.lax.psum(metrics, 'batch')
return metrics
# Train / eval / decode step functions.
# -----------------------------------------------------------------------------
def train_step(optimizer,
inputs,
outputs,
programs,
learning_rate_fn,
config,
dropout_rng):
"""Train on batch of program tasks."""
# We handle PRNG splitting inside the top pmap, rather
# than handling it outside in the training loop - doing the
# latter can add some stalls to the devices.
dropout_rng, new_dropout_rng = jax.random.split(dropout_rng)
weights = jnp.where(programs > 0, 1, 0).astype(jnp.float32)
def loss_fn(params):
"""Loss function used for training."""
logits = models.DecomposeAttentionTransformer(config).apply(
{'params': params},
inputs,
outputs,
programs,
rngs={'dropout': dropout_rng})
loss, weight_sum = compute_weighted_cross_entropy(logits, programs, weights)
mean_loss = loss / weight_sum
return mean_loss, logits
step = optimizer.state.step
lr = learning_rate_fn(step)
grad_fn = jax.value_and_grad(loss_fn, has_aux=True)
(_, logits), grad = grad_fn(optimizer.target)
grad = jax.lax.pmean(grad, 'batch')
new_optimizer = optimizer.apply_gradient(grad, learning_rate=lr)
# Get metrics.
metrics = compute_metrics(logits, programs, weights)
metrics['learning_rate'] = lr
return new_optimizer, metrics, new_dropout_rng
def eval_step(params, inputs, outputs, programs, eos_token, config):
"""Collect metrics for evaluation during training."""
weights = jnp.where(
jnp.logical_and(programs > 0,
jnp.logical_and(programs != config.base_config.bos_token,
programs != eos_token)),
1, 0).astype(jnp.float32)
logits = models.DecomposeAttentionTransformer(config).apply(
{'params': params}, inputs, outputs, programs)
return compute_metrics(logits, programs, weights)
def initialize_cache(inputs, outputs, programs, max_decode_len, config):
"""Initialize a cache for a given input shape and max decode length."""
target_shape = (programs.shape[0], max_decode_len)
dtype = config.base_config.dtype
initial_variables = models.DecomposeAttentionTransformer(config).init(
jax.random.PRNGKey(0),
jnp.ones(inputs.shape, dtype),
jnp.ones(outputs.shape, dtype),
jnp.ones(target_shape, dtype))
return initial_variables['cache']
def predict_step(params,
inputs,
outputs,
cache,
beam_size,
eos_token,
max_decode_len,
config,
slow_decode=True):
"""Predict translation with fast decoding beam search on a batch."""
# Prepare transformer fast-decoder call for beam search: for beam search, we
# need to set up our decoder model to handle a batch size equal to
# batch_size * beam_size, where each batch item's data is expanded in-place
# rather than tiled.
flat_encoded = decode.flat_batch_beam_expand(
models.DecomposeAttentionTransformer(config).apply(
{'params': params},
inputs,
outputs,
method=models.DecomposeAttentionTransformer.encode),
beam_size)
encoded_padding_mask = jnp.where(outputs > 0, 1, 0).astype(jnp.float32)
flat_encoded_padding_mask = decode.flat_batch_beam_expand(
encoded_padding_mask, beam_size)
if slow_decode:
def tokens_ids_to_logits(flat_ids):
"""Token slice to logits from decoder model."""
# --> [batch * beam, 1, vocab]
flat_logits = models.DecomposeAttentionTransformer(config=config).apply(
{'params': params},
flat_ids,
flat_encoded,
flat_encoded_padding_mask,
method=models.DecomposeAttentionTransformer.decode)
return flat_logits
else:
def tokens_ids_to_logits(flat_ids, flat_cache):
"""Token slice to logits from decoder model."""
# --> [batch * beam, 1, vocab]
flat_logits, new_vars = models.DecomposeAttentionTransformer(
config=config).apply(
{'params': params, 'cache': flat_cache},
flat_ids,
flat_encoded,
flat_encoded_padding_mask,
mutable=['cache'],
method=models.DecomposeAttentionTransformer.decode)
new_flat_cache = new_vars['cache']
# Remove singleton sequence-length dimension:
# [batch * beam, 1, vocab] --> [batch * beam, vocab]
flat_logits = flat_logits.squeeze(axis=1)
return flat_logits, new_flat_cache
# Using the above-defined single-step decoder function, run a
# beam search over possible sequences given input encoding.
beam_seqs, _ = decode.beam_search(
inputs,
cache,
tokens_ids_to_logits,
beam_size=beam_size,
alpha=0.6,
bos_token=config.base_config.bos_token,
eos_token=eos_token,
max_decode_len=max_decode_len,
slow_decode=slow_decode)
# Beam search returns [n_batch, n_beam, n_length] with beam dimension
# sorted in increasing order of log-probability.
return beam_seqs
# Util functions for prediction
# -----------------------------------------------------------------------------
def pad_examples(x, desired_batch_size):
"""Expand batch to desired size by repeating last slice."""
batch_pad = desired_batch_size - x.shape[0]
tile_dims = [1] * len(x.shape)
tile_dims[0] = batch_pad
return np.concatenate([x, np.tile(x[-1], tile_dims)], axis=0)
def tohost(x):
"""Collect batches from all devices to host and flatten batch dimensions."""
n_device, n_batch, *remaining_dims = x.shape
return x.reshape((n_device * n_batch,) + tuple(remaining_dims))
def per_host_sum_pmap(in_tree):
"""Execute psum on in_tree's leaves over one device per host."""
host2devices = collections.defaultdict(list)
for d in jax.devices():
host2devices[d.host_id].append(d)
devices = [host2devices[k][0] for k in host2devices]
host_psum = jax.pmap(lambda x: jax.lax.psum(x, 'i'), 'i', devices=devices)
def pre_pmap(xs):
return jax.tree_map(lambda x: jnp.broadcast_to(x, (1,) + x.shape), xs)
def post_pmap(xs):
return jax.tree_map(lambda x: x[0], xs)
return post_pmap(host_psum(pre_pmap(in_tree)))
def eval_predicted(predicted, inputs, outputs, parse_beam_fn):
"""Evaluate predicted program beams."""
best_p, best_score = None, -1
# predicted shape [beam_size, length]
for beam in predicted[::-1]:
try:
p = parse_beam_fn(beam)
p_outs = [p(inp) for inp in inputs]
score = np.sum([p_out == out for p_out, out in zip(p_outs, outputs)])
if score > best_score:
best_p, best_score = p, score
except: # pylint: disable=bare-except
pass
if best_score >= len(inputs): # Found solution.
break
return best_p, best_score
def shorten(key):
splits = key.split('_')
return ''.join(s[0] for s in splits)
def main(_):
tf.enable_v2_behavior()
tf.random.set_seed(FLAGS.seed)
np.random.seed(FLAGS.seed)
random.seed(FLAGS.seed)
# BOS special attention only makes sense if we are using relative attention
# and it's not the baseline.
if FLAGS.bos_special_attention and (not FLAGS.use_relative_attention or
FLAGS.attention_mask_type == 'baseline'):
raise ValueError(
"bos_special_attention doesn't work when use_relative_attention={} and "
'attention_mask_type={}'.format(FLAGS.use_relative_attention,
FLAGS.attention_mask_type))
if not gfile.isdir(FLAGS.save_dir):
gfile.makedirs(FLAGS.save_dir)
hparam_str_dict = dict(seed=FLAGS.seed, lr=FLAGS.lr)
# Get hyperparmaters
if FLAGS.xm_parameters:
for key, value in json.loads(FLAGS.xm_parameters).items():
if key not in hparam_str_dict:
hparam_str_dict[key] = value
hparam_str = ','.join(['%s=%s' % (shorten(k), str(hparam_str_dict[k]))
for k in sorted(hparam_str_dict.keys())])
# Number of local devices for this host.
n_devices = jax.local_device_count()
if jax.host_id() == 0:
summary_writer = tensorboard.SummaryWriter(
os.path.join(FLAGS.save_dir, 'tb', hparam_str))
batch_size = FLAGS.per_device_batch_size * n_devices
io_shape = (FLAGS.per_device_batch_size,
FLAGS.num_strings_per_task,
FLAGS.max_characters)
program_shape = (FLAGS.per_device_batch_size, FLAGS.max_program_length)
# Setup DSL
# ---------------------------------------------------------------------------
# Build token tables.
id_char_table = {i+1: char for (i, char) in enumerate(dsl.CHARACTER)}
char_id_table = {char: id for id, char in id_char_table.items()}
id_token_table, token_id_table = dsl_tokens.build_token_tables()
io_vocab_size = len(char_id_table) + 1 # For padding.
program_vocab_size = len(token_id_table) + 1
bos_token = token_id_table[dsl.BOS]
eos_token = token_id_table[dsl.EOS]
# Parse io and program token sequences (for eval).
def decode_io(inputs, outputs):
"""Decode io examples tokens."""
def decode_str(s):
"""Decode string tokens."""
return ''.join([id_char_table[c_id] for c_id in s if c_id > 0])
inps, outs = [], []
for inp, out in zip(inputs, outputs):
inps.append(decode_str(inp))
outs.append(decode_str(out))
return inps, outs
def decode_program(program):
"""Decode program tokens."""
program = program[:np.argmax(program == eos_token) + 1].astype(np.int32)
program = program[program != bos_token]
try:
return dsl.decode_program(program.tolist(), id_token_table)
except: # pylint: disable=bare-except
return None # Program does not compile.
# Load Dataset
# ---------------------------------------------------------------------------
logging.info('Initializing dataset.')
if not FLAGS.dataset_filepattern:
raise ValueError('Must specify filepattern to dataset.')
# Training dataset.
logging.info('Loading dataset from %s', FLAGS.dataset_filepattern)
padded_shapes = (io_shape[1:], io_shape[1:], program_shape[1:])
logging.info('padded_shapes: %s', padded_shapes)
dataset = input_pipeline.create_dataset_from_tf_record(
FLAGS.dataset_filepattern, token_id_table, char_id_table)
dataset = dataset.padded_batch(
batch_size,
padded_shapes=padded_shapes,
drop_remainder=True)
# Split evaluation and training.
eval_ds = dataset.take(FLAGS.num_eval_steps)
# Decrease batch of predict dataset to handle beam search.
predict_ds = eval_ds.unbatch().padded_batch(
int(np.ceil(batch_size / 10)),
padded_shapes=padded_shapes)
train_ds = dataset.skip(FLAGS.num_eval_steps).repeat()
train_iter = train_ds.as_numpy_iterator()
# Build Model and Optimizer
# ---------------------------------------------------------------------------
use_dropout = False
base_config = base_models.TransformerConfig(
vocab_size=io_vocab_size,
output_vocab_size=program_vocab_size,
shift=True,
emb_dim=FLAGS.embedding_dim,
num_heads=FLAGS.num_heads,
num_layers=FLAGS.num_layers,
qkv_dim=FLAGS.embedding_dim,
mlp_dim=FLAGS.hidden_dim,
max_len=max(FLAGS.max_characters, FLAGS.max_program_length),
use_relative_attention=FLAGS.use_relative_attention,
deterministic=not use_dropout,
decode=False,
bos_token=bos_token)
train_config = models.DecomposeAttentionTransformerConfig(
base_config=base_config,
attention_mask_type=FLAGS.attention_mask_type,
bos_special_attention=FLAGS.bos_special_attention)
eval_config = models.DecomposeAttentionTransformerConfig(
base_config=base_config.replace(deterministic=not use_dropout),
attention_mask_type=FLAGS.attention_mask_type,
bos_special_attention=FLAGS.bos_special_attention)
predict_config = models.DecomposeAttentionTransformerConfig(
base_config=base_config.replace(
shift=False, deterministic=not use_dropout,
decode=not FLAGS.slow_decode),
attention_mask_type=FLAGS.attention_mask_type,
bos_special_attention=FLAGS.bos_special_attention)
rng = jax.random.PRNGKey(FLAGS.seed)
rng = jax.random.fold_in(rng, jax.host_id())
rng, init_rng = jax.random.split(rng)
m = models.DecomposeAttentionTransformer(eval_config)
initial_variables = jax.jit(m.init)(
{'params': init_rng, 'dropout': init_rng},
jnp.ones(io_shape, jnp.float32),
jnp.ones(io_shape, jnp.float32),
jnp.ones(program_shape, jnp.float32))
optimizer_def = optim.Adam(
FLAGS.lr,
beta1=0.9,
beta2=0.98,
eps=1e-9,
weight_decay=FLAGS.weight_decay)
optimizer = optimizer_def.create(initial_variables['params'])
del initial_variables # Don't keep a copy of the initial model.
start_step = 0
if FLAGS.restore_checkpoints:
# Restore unreplicated optimizer + model state from last checkpoint.
optimizer = checkpoints.restore_checkpoint(
os.path.join(FLAGS.save_dir, 'checkpoints', hparam_str), optimizer)
# Grab last step.
start_step = int(optimizer.state.step)
logging.info('Found model checkpointed at step %d.', start_step)
if FLAGS.finetune_start_step > 0:
logging.info('Checking that start_step (%s) == finetune_start_step (%s)',
start_step, FLAGS.finetune_start_step)
assert start_step == FLAGS.finetune_start_step
# Replicate optimizer.
optimizer = jax_utils.replicate(optimizer)
# TODO(jxihong): Implement fast decoding.
assert FLAGS.slow_decode, 'Fast decoding is not implemented yet.'
if FLAGS.finetune_start_step <= 0:
learning_rate_fn = create_learning_rate_scheduler(
base_learning_rate=FLAGS.lr)
else:
# Constant LR for finetuning.
learning_rate_fn = create_learning_rate_scheduler(
base_learning_rate=FLAGS.lr,
factors='constant')
p_train_step = jax.pmap(
functools.partial(
train_step,
learning_rate_fn=learning_rate_fn,
config=train_config),
axis_name='batch')
p_eval_step = jax.pmap(
functools.partial(eval_step,
eos_token=eos_token,
config=eval_config),
axis_name='batch')
p_init_cache = jax.pmap(
functools.partial(
initialize_cache,
max_decode_len=FLAGS.max_program_length,
config=predict_config),
axis_name='batch')
p_pred_step = jax.pmap(
functools.partial(
predict_step,
eos_token=eos_token,
max_decode_len=FLAGS.max_program_length,
config=predict_config,
slow_decode=FLAGS.slow_decode),
axis_name='batch',
static_broadcasted_argnums=(4,))
# Main Train Loop
# ---------------------------------------------------------------------------
dropout_rng = jax.random.split(rng, jax.local_device_count())
del rng
metrics_all = []
tick = time.time()
for step in range(start_step, FLAGS.num_train_steps):
inputs, outputs, programs = common_utils.shard(next(train_iter))
optimizer, metrics, dropout_rng = p_train_step(
optimizer, inputs, outputs, programs, dropout_rng=dropout_rng)
metrics_all.append(metrics)
is_last_step = step == FLAGS.num_train_steps - 1
# Save a Checkpoint
if (step % FLAGS.checkpoint_freq == 0 and step > 0) or is_last_step:
if jax.host_id() == 0:
# Save unreplicated optimizer + model state.
checkpoints.save_checkpoint(
os.path.join(FLAGS.save_dir, 'checkpoints', hparam_str),
jax_utils.unreplicate(optimizer),
step)
# Periodic metric handling.
# Training Metrics
if (step and step % FLAGS.log_freq == 0) or is_last_step:
logging.info('Gathering training metrics.')
metrics_all = common_utils.get_metrics(metrics_all)
lr = metrics_all.pop('learning_rate').mean()
metrics_sums = jax.tree_map(jnp.sum, metrics_all)
denominator = metrics_sums.pop('denominator')
summary = jax.tree_map(
lambda x: x / denominator, # pylint: disable=cell-var-from-loop
metrics_sums)
summary['learning_rate'] = lr
# Calculate (clipped) perplexity after averaging log-perplexities:
summary['perplexity'] = jnp.clip(jnp.exp(summary['loss']), a_max=1.0e4)
if jax.host_id() == 0:
logging.info('Train in step: %d, loss: %.4f', step, summary['loss'])
tock = time.time()
steps_per_sec = FLAGS.log_freq / (tock - tick)
tick = tock
summary_writer.scalar('train/steps per second', steps_per_sec, step)
for key, val in summary.items():
summary_writer.scalar('train/' + key, val, step)
summary_writer.flush()
# Reset metric accumulation for next evaluation cycle.
metrics_all = []
# Evaluation Metrics
if (step and step % FLAGS.eval_freq == 0) or is_last_step:
logging.info('Gathering evaluation metrics.')
t_evaluation_start = time.time()
eval_metrics = []
for batches in eval_ds.as_numpy_iterator():
inputs, outputs, programs = common_utils.shard(batches)
metrics = p_eval_step(optimizer.target, inputs, outputs, programs)
eval_metrics.append(metrics)
eval_metrics = common_utils.get_metrics(eval_metrics)
eval_metrics_sums = jax.tree_map(jnp.sum, eval_metrics)
eval_denominator = eval_metrics_sums.pop('denominator')
eval_summary = jax.tree_map(
lambda x: x / eval_denominator, # pylint: disable=cell-var-from-loop
eval_metrics_sums)
if jax.host_id() == 0:
logging.info('Evaluation time: %.4f s step %d, loss: %.4f.',
time.time()-t_evaluation_start, step, eval_summary['loss'])
for key, val in eval_summary.items():
summary_writer.scalar('eval/' + key, val, step)
summary_writer.flush()
# Beam search metrics.
if (step and step % FLAGS.predict_freq == 0) or is_last_step:
logging.info('Gathering beam search metrics.')
for beam_size in [1, 5, 10, 20, 50]:
t_inference_start = time.time()
pred_acc = 0
pred_denominator = 0
ios, targets, predictions, top_of_beams = [], [], [], []
for batches in predict_ds.as_numpy_iterator():
pred_batch = batches
# Handle final odd-sized batch by padding instead of dropping it.
cur_pred_batch_size = pred_batch[0].shape[0]
if cur_pred_batch_size % n_devices:
padded_size = int(
np.ceil(cur_pred_batch_size / n_devices) * n_devices)
# pylint: disable=cell-var-from-loop
pred_batch = jax.tree_map(
lambda x: pad_examples(x, padded_size), pred_batch)
inputs, outputs, programs = common_utils.shard(pred_batch)
cache = (p_init_cache(inputs, outputs, programs)
if not FLAGS.slow_decode else None)
predicted = p_pred_step(optimizer.target, inputs, outputs, cache,
beam_size)
predicted = tohost(predicted)
inputs, outputs, programs = map(tohost, (inputs, outputs, programs))
pred_denominator += programs.shape[0]
for i, beams in enumerate(predicted):
inps, outs = decode_io(inputs[i], outputs[i])
p, p_score = eval_predicted(
beams, inps, outs, parse_beam_fn=decode_program)
if p_score >= len(inps):
pred_acc += 1
ios.append(' ; '.join(map(str, zip(inps, outs))))
targets.append(decode_program(programs[i]).to_string())
try:
predictions.append(p.to_string())
except: # pylint: disable=bare-except
predictions.append('Did not compile')
logging.info('ios: %s', ios[-1])
logging.info('target: %s', targets[-1])
beams_log = []
for beam in beams:
try:
beams_log.append(decode_program(beam).to_string())
except: # pylint: disable=bare-except
beams_log.append('Did not compile')
logging.info('predicted beam: %s', '\n'.join(beams_log))
top_of_beam = []
for index, beam in enumerate(beams[:-5:-1]):
try:
decoded_program = decode_program(beam).to_string()
except: # pylint: disable=bare-except
decoded_program = 'Did not compile'
top_of_beam.append('index: {}, decoded: {}, tokens: {}'.format(
index, decoded_program, beam))
top_of_beams.append('\n\n'.join(top_of_beam))
all_pred_acc, all_pred_denominator = per_host_sum_pmap(
jax.tree_map(np.array, (pred_acc, pred_denominator)))
# Record beam search results as text summaries.
message = []
for n in np.random.choice(np.arange(len(predictions)), 8):
text = (f'ios: {ios[n]}\n\ntarget: {targets[n]}\n\n'
f'predicted: {predictions[n]}\n\n'
f'top of beam:\n\n{top_of_beams[n]}\n\n')
message.append(text)
# Write to tensorboard.
if jax.host_id() == 0:
slow_or_fast = 'slow' if FLAGS.slow_decode else 'fast'
logging.info(
'Prediction time, %s (beam %d): %.4f s, step %d, score %.4f',
slow_or_fast, beam_size, time.time() - t_inference_start, step,
all_pred_acc / all_pred_denominator)
summary_writer.scalar(
'predict-{}/score-{}'.format(slow_or_fast, beam_size),
all_pred_acc / all_pred_denominator, step)
summary_writer.text('samples-{}'.format(beam_size),
'\n------\n'.join(message), step)
summary_writer.flush()
if __name__ == '__main__':
app.run(main)
| [((1590, 1615), 'sys.path.append', 'sys.path.append', (['"""../../"""'], {}), "('../../')\n", (1605, 1615), False, 'import sys\n'), ((1658, 1724), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""seed"""', '(0)', '"""Fixed random seed for training."""'], {}), "('seed', 0, 'Fixed random seed for training.')\n", (1678, 1724), False, 'from absl import flags\n'), ((1725, 1774), 'absl.flags.DEFINE_float', 'flags.DEFINE_float', (['"""lr"""', '(0.001)', '"""Learning rate."""'], {}), "('lr', 0.001, 'Learning rate.')\n", (1743, 1774), False, 'from absl import flags\n'), ((1774, 1863), 'absl.flags.DEFINE_float', 'flags.DEFINE_float', (['"""weight_decay"""', '(0.1)', '"""Decay factor for AdamW-style weight decay."""'], {}), "('weight_decay', 0.1,\n 'Decay factor for AdamW-style weight decay.')\n", (1792, 1863), False, 'from absl import flags\n'), ((1880, 1946), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""embedding_dim"""', '(256)', '"""Embedding dimension."""'], {}), "('embedding_dim', 256, 'Embedding dimension.')\n", (1900, 1946), False, 'from absl import flags\n'), ((1947, 2007), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""hidden_dim"""', '(512)', '"""Hidden dimension."""'], {}), "('hidden_dim', 512, 'Hidden dimension.')\n", (1967, 2007), False, 'from absl import flags\n'), ((2008, 2065), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_heads"""', '(4)', '"""Number of layers."""'], {}), "('num_heads', 4, 'Number of layers.')\n", (2028, 2065), False, 'from absl import flags\n'), ((2066, 2135), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_layers"""', '(3)', '"""Number of Transformer heads."""'], {}), "('num_layers', 3, 'Number of Transformer heads.')\n", (2086, 2135), False, 'from absl import flags\n'), ((2136, 2214), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['"""slow_decode"""', '(True)', '"""Use slow decoding for prediction?"""'], {}), "('slow_decode', True, 'Use slow decoding for prediction?')\n", (2156, 2214), False, 'from absl import flags\n'), ((2216, 2305), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""dataset_filepattern"""', 'None', '"""Filepattern for TFRecord dataset."""'], {}), "('dataset_filepattern', None,\n 'Filepattern for TFRecord dataset.')\n", (2235, 2305), False, 'from absl import flags\n'), ((2322, 2414), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""per_device_batch_size"""', '(16)', '"""Number of program tasks in a batch."""'], {}), "('per_device_batch_size', 16,\n 'Number of program tasks in a batch.')\n", (2342, 2414), False, 'from absl import flags\n'), ((2432, 2527), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_strings_per_task"""', '(4)', '"""Number of input/output strings per task."""'], {}), "('num_strings_per_task', 4,\n 'Number of input/output strings per task.')\n", (2452, 2527), False, 'from absl import flags\n'), ((2545, 2636), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""max_program_length"""', '(100)', '"""Maximum number of tokens in program."""'], {}), "('max_program_length', 100,\n 'Maximum number of tokens in program.')\n", (2565, 2636), False, 'from absl import flags\n'), ((2654, 2758), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""max_characters"""', '(120)', '"""Maximum number of characters in input/output strings."""'], {}), "('max_characters', 120,\n 'Maximum number of characters in input/output strings.')\n", (2674, 2758), False, 'from absl import flags\n'), ((2777, 2847), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""save_dir"""', 'None', '"""Directory to save results to."""'], {}), "('save_dir', None, 'Directory to save results to.')\n", (2796, 2847), False, 'from absl import flags\n'), ((2848, 2925), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_train_steps"""', '(2000000)', '"""Number of training steps."""'], {}), "('num_train_steps', 2000000, 'Number of training steps.')\n", (2868, 2925), False, 'from absl import flags\n'), ((2926, 2999), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_eval_steps"""', '(10)', '"""Number of evaluation steps."""'], {}), "('num_eval_steps', 10, 'Number of evaluation steps.')\n", (2946, 2999), False, 'from absl import flags\n'), ((3000, 3085), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""log_freq"""', '(1000)', '"""Number of steps between training logs."""'], {}), "('log_freq', 1000, 'Number of steps between training logs.'\n )\n", (3020, 3085), False, 'from absl import flags\n'), ((3081, 3153), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""eval_freq"""', '(2000)', '"""Number of steps between eval."""'], {}), "('eval_freq', 2000, 'Number of steps between eval.')\n", (3101, 3153), False, 'from absl import flags\n'), ((3154, 3254), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""predict_freq"""', '(50000)', '"""Number of steps between prediction (beam search)."""'], {}), "('predict_freq', 50000,\n 'Number of steps between prediction (beam search).')\n", (3174, 3254), False, 'from absl import flags\n'), ((3272, 3367), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""checkpoint_freq"""', '(50000)', '"""Number of steps between checkpoint saves."""'], {}), "('checkpoint_freq', 50000,\n 'Number of steps between checkpoint saves.')\n", (3292, 3367), False, 'from absl import flags\n'), ((3385, 3529), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""finetune_start_step"""', '(-1)', '"""Step the initial checkpoint should start at for finetuning, or -1 if not finetuning."""'], {}), "('finetune_start_step', -1,\n 'Step the initial checkpoint should start at for finetuning, or -1 if not finetuning.'\n )\n", (3405, 3529), False, 'from absl import flags\n'), ((3566, 3671), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['"""restore_checkpoints"""', '(True)', '"""Whether to restore from existing model checkpoints."""'], {}), "('restore_checkpoints', True,\n 'Whether to restore from existing model checkpoints.')\n", (3583, 3671), False, 'from absl import flags\n'), ((3687, 3852), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""attention_mask_type"""', '"""bos_full_attention"""', '"""The kind of attention mask to use. Options are: baseline, bos_to_bos, bos_full_attention"""'], {}), "('attention_mask_type', 'bos_full_attention',\n 'The kind of attention mask to use. Options are: baseline, bos_to_bos, bos_full_attention'\n )\n", (3706, 3852), False, 'from absl import flags\n'), ((3888, 3990), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['"""use_relative_attention"""', '(True)', '"""Whether to use relative positonal embeddings."""'], {}), "('use_relative_attention', True,\n 'Whether to use relative positonal embeddings.')\n", (3905, 3990), False, 'from absl import flags\n'), ((4005, 4131), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['"""bos_special_attention"""', '(False)', '"""Whether to use special relative attention computation for BOS tokens."""'], {}), "('bos_special_attention', False,\n 'Whether to use special relative attention computation for BOS tokens.')\n", (4022, 4131), False, 'from absl import flags\n'), ((4207, 4296), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""xm_parameters"""', 'None', '"""String specifying hyperparamter search."""'], {}), "('xm_parameters', None,\n 'String specifying hyperparamter search.')\n", (4226, 4296), False, 'from absl import flags\n'), ((7127, 7173), 'flax.training.common_utils.onehot', 'common_utils.onehot', (['targets', 'logits.shape[-1]'], {}), '(targets, logits.shape[-1])\n', (7146, 7173), False, 'from flax.training import common_utils\n'), ((8564, 8594), 'jax.lax.psum', 'jax.lax.psum', (['metrics', '"""batch"""'], {}), "(metrics, 'batch')\n", (8576, 8594), False, 'import jax\n'), ((9159, 9188), 'jax.random.split', 'jax.random.split', (['dropout_rng'], {}), '(dropout_rng)\n', (9175, 9188), False, 'import jax\n'), ((9719, 9760), 'jax.value_and_grad', 'jax.value_and_grad', (['loss_fn'], {'has_aux': '(True)'}), '(loss_fn, has_aux=True)\n', (9737, 9760), False, 'import jax\n'), ((9818, 9846), 'jax.lax.pmean', 'jax.lax.pmean', (['grad', '"""batch"""'], {}), "(grad, 'batch')\n", (9831, 9846), False, 'import jax\n'), ((12015, 12077), 'latent_programmer.decode.flat_batch_beam_expand', 'decode.flat_batch_beam_expand', (['encoded_padding_mask', 'beam_size'], {}), '(encoded_padding_mask, beam_size)\n', (12044, 12077), False, 'from latent_programmer import decode\n'), ((13395, 13607), 'latent_programmer.decode.beam_search', 'decode.beam_search', (['inputs', 'cache', 'tokens_ids_to_logits'], {'beam_size': 'beam_size', 'alpha': '(0.6)', 'bos_token': 'config.base_config.bos_token', 'eos_token': 'eos_token', 'max_decode_len': 'max_decode_len', 'slow_decode': 'slow_decode'}), '(inputs, cache, tokens_ids_to_logits, beam_size=beam_size,\n alpha=0.6, bos_token=config.base_config.bos_token, eos_token=eos_token,\n max_decode_len=max_decode_len, slow_decode=slow_decode)\n', (13413, 13607), False, 'from latent_programmer import decode\n'), ((14514, 14543), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (14537, 14543), False, 'import collections\n'), ((14555, 14568), 'jax.devices', 'jax.devices', ([], {}), '()\n', (14566, 14568), False, 'import jax\n'), ((15633, 15656), 'tensorflow.compat.v2.enable_v2_behavior', 'tf.enable_v2_behavior', ([], {}), '()\n', (15654, 15656), True, 'import tensorflow.compat.v2 as tf\n'), ((15660, 15690), 'tensorflow.compat.v2.random.set_seed', 'tf.random.set_seed', (['FLAGS.seed'], {}), '(FLAGS.seed)\n', (15678, 15690), True, 'import tensorflow.compat.v2 as tf\n'), ((15693, 15719), 'numpy.random.seed', 'np.random.seed', (['FLAGS.seed'], {}), '(FLAGS.seed)\n', (15707, 15719), True, 'import numpy as np\n'), ((15722, 15745), 'random.seed', 'random.seed', (['FLAGS.seed'], {}), '(FLAGS.seed)\n', (15733, 15745), False, 'import random\n'), ((16766, 16790), 'jax.local_device_count', 'jax.local_device_count', ([], {}), '()\n', (16788, 16790), False, 'import jax\n'), ((17466, 17497), 'latent_programmer.tasks.robust_fill.tokens.build_token_tables', 'dsl_tokens.build_token_tables', ([], {}), '()\n', (17495, 17497), True, 'from latent_programmer.tasks.robust_fill import tokens as dsl_tokens\n'), ((18542, 18579), 'absl.logging.info', 'logging.info', (['"""Initializing dataset."""'], {}), "('Initializing dataset.')\n", (18554, 18579), False, 'from absl import logging\n'), ((18702, 18768), 'absl.logging.info', 'logging.info', (['"""Loading dataset from %s"""', 'FLAGS.dataset_filepattern'], {}), "('Loading dataset from %s', FLAGS.dataset_filepattern)\n", (18714, 18768), False, 'from absl import logging\n'), ((18837, 18885), 'absl.logging.info', 'logging.info', (['"""padded_shapes: %s"""', 'padded_shapes'], {}), "('padded_shapes: %s', padded_shapes)\n", (18849, 18885), False, 'from absl import logging\n'), ((18898, 19004), 'latent_programmer.decomposition_transformer_attention.input_pipeline.create_dataset_from_tf_record', 'input_pipeline.create_dataset_from_tf_record', (['FLAGS.dataset_filepattern', 'token_id_table', 'char_id_table'], {}), '(FLAGS.dataset_filepattern,\n token_id_table, char_id_table)\n', (18942, 19004), False, 'from latent_programmer.decomposition_transformer_attention import input_pipeline\n'), ((20156, 20330), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformerConfig', 'models.DecomposeAttentionTransformerConfig', ([], {'base_config': 'base_config', 'attention_mask_type': 'FLAGS.attention_mask_type', 'bos_special_attention': 'FLAGS.bos_special_attention'}), '(base_config=base_config,\n attention_mask_type=FLAGS.attention_mask_type, bos_special_attention=\n FLAGS.bos_special_attention)\n', (20198, 20330), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((20897, 20927), 'jax.random.PRNGKey', 'jax.random.PRNGKey', (['FLAGS.seed'], {}), '(FLAGS.seed)\n', (20915, 20927), False, 'import jax\n'), ((20993, 21014), 'jax.random.split', 'jax.random.split', (['rng'], {}), '(rng)\n', (21009, 21014), False, 'import jax\n'), ((21022, 21071), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['eval_config'], {}), '(eval_config)\n', (21058, 21071), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((21301, 21393), 'flax.optim.Adam', 'optim.Adam', (['FLAGS.lr'], {'beta1': '(0.9)', 'beta2': '(0.98)', 'eps': '(1e-09)', 'weight_decay': 'FLAGS.weight_decay'}), '(FLAGS.lr, beta1=0.9, beta2=0.98, eps=1e-09, weight_decay=FLAGS.\n weight_decay)\n', (21311, 21393), False, 'from flax import optim\n'), ((22201, 22231), 'flax.jax_utils.replicate', 'jax_utils.replicate', (['optimizer'], {}), '(optimizer)\n', (22220, 22231), False, 'from flax import jax_utils\n'), ((23679, 23690), 'time.time', 'time.time', ([], {}), '()\n', (23688, 23690), False, 'import time\n'), ((30635, 30648), 'absl.app.run', 'app.run', (['main'], {}), '(main)\n', (30642, 30648), False, 'from absl import app\n'), ((6479, 6514), 'jax.numpy.asarray', 'jnp.asarray', (['ret'], {'dtype': 'jnp.float32'}), '(ret, dtype=jnp.float32)\n', (6490, 6514), True, 'import jax.numpy as jnp\n'), ((7274, 7300), 'jax.numpy.asarray', 'jnp.asarray', (['targets.shape'], {}), '(targets.shape)\n', (7285, 7300), True, 'import jax.numpy as jnp\n'), ((8008, 8035), 'jax.numpy.argmax', 'jnp.argmax', (['logits'], {'axis': '(-1)'}), '(logits, axis=-1)\n', (8018, 8035), True, 'import jax.numpy as jnp\n'), ((8078, 8104), 'jax.numpy.asarray', 'jnp.asarray', (['targets.shape'], {}), '(targets.shape)\n', (8089, 8104), True, 'import jax.numpy as jnp\n'), ((10914, 10935), 'jax.random.PRNGKey', 'jax.random.PRNGKey', (['(0)'], {}), '(0)\n', (10932, 10935), False, 'import jax\n'), ((10943, 10972), 'jax.numpy.ones', 'jnp.ones', (['inputs.shape', 'dtype'], {}), '(inputs.shape, dtype)\n', (10951, 10972), True, 'import jax.numpy as jnp\n'), ((10980, 11010), 'jax.numpy.ones', 'jnp.ones', (['outputs.shape', 'dtype'], {}), '(outputs.shape, dtype)\n', (10988, 11010), True, 'import jax.numpy as jnp\n'), ((11018, 11047), 'jax.numpy.ones', 'jnp.ones', (['target_shape', 'dtype'], {}), '(target_shape, dtype)\n', (11026, 11047), True, 'import jax.numpy as jnp\n'), ((14867, 14899), 'jax.tree_map', 'jax.tree_map', (['(lambda x: x[0])', 'xs'], {}), '(lambda x: x[0], xs)\n', (14879, 14899), False, 'import jax\n'), ((16797, 16810), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (16808, 16810), False, 'import jax\n'), ((20960, 20973), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (20971, 20973), False, 'import jax\n'), ((21094, 21109), 'jax.jit', 'jax.jit', (['m.init'], {}), '(m.init)\n', (21101, 21109), False, 'import jax\n'), ((21166, 21197), 'jax.numpy.ones', 'jnp.ones', (['io_shape', 'jnp.float32'], {}), '(io_shape, jnp.float32)\n', (21174, 21197), True, 'import jax.numpy as jnp\n'), ((21205, 21236), 'jax.numpy.ones', 'jnp.ones', (['io_shape', 'jnp.float32'], {}), '(io_shape, jnp.float32)\n', (21213, 21236), True, 'import jax.numpy as jnp\n'), ((21244, 21280), 'jax.numpy.ones', 'jnp.ones', (['program_shape', 'jnp.float32'], {}), '(program_shape, jnp.float32)\n', (21252, 21280), True, 'import jax.numpy as jnp\n'), ((21867, 21931), 'absl.logging.info', 'logging.info', (['"""Found model checkpointed at step %d."""', 'start_step'], {}), "('Found model checkpointed at step %d.', start_step)\n", (21879, 21931), False, 'from absl import logging\n'), ((22670, 22760), 'functools.partial', 'functools.partial', (['train_step'], {'learning_rate_fn': 'learning_rate_fn', 'config': 'train_config'}), '(train_step, learning_rate_fn=learning_rate_fn, config=\n train_config)\n', (22687, 22760), False, 'import functools\n'), ((22845, 22914), 'functools.partial', 'functools.partial', (['eval_step'], {'eos_token': 'eos_token', 'config': 'eval_config'}), '(eval_step, eos_token=eos_token, config=eval_config)\n', (22862, 22914), False, 'import functools\n'), ((23022, 23125), 'functools.partial', 'functools.partial', (['initialize_cache'], {'max_decode_len': 'FLAGS.max_program_length', 'config': 'predict_config'}), '(initialize_cache, max_decode_len=FLAGS.max_program_length,\n config=predict_config)\n', (23039, 23125), False, 'import functools\n'), ((23211, 23363), 'functools.partial', 'functools.partial', (['predict_step'], {'eos_token': 'eos_token', 'max_decode_len': 'FLAGS.max_program_length', 'config': 'predict_config', 'slow_decode': 'FLAGS.slow_decode'}), '(predict_step, eos_token=eos_token, max_decode_len=FLAGS.\n max_program_length, config=predict_config, slow_decode=FLAGS.slow_decode)\n', (23228, 23363), False, 'import functools\n'), ((23614, 23638), 'jax.local_device_count', 'jax.local_device_count', ([], {}), '()\n', (23636, 23638), False, 'import jax\n'), ((9202, 9231), 'jax.numpy.where', 'jnp.where', (['(programs > 0)', '(1)', '(0)'], {}), '(programs > 0, 1, 0)\n', (9211, 9231), True, 'import jax.numpy as jnp\n'), ((10440, 10484), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['config'], {}), '(config)\n', (10476, 10484), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((10857, 10901), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['config'], {}), '(config)\n', (10893, 10901), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((11936, 11964), 'jax.numpy.where', 'jnp.where', (['(outputs > 0)', '(1)', '(0)'], {}), '(outputs > 0, 1, 0)\n', (11945, 11964), True, 'import jax.numpy as jnp\n'), ((14151, 14176), 'numpy.tile', 'np.tile', (['x[-1]', 'tile_dims'], {}), '(x[-1], tile_dims)\n', (14158, 14176), True, 'import numpy as np\n'), ((14696, 14716), 'jax.lax.psum', 'jax.lax.psum', (['x', '"""i"""'], {}), "(x, 'i')\n", (14708, 14716), False, 'import jax\n'), ((16873, 16919), 'os.path.join', 'os.path.join', (['FLAGS.save_dir', '"""tb"""', 'hparam_str'], {}), "(FLAGS.save_dir, 'tb', hparam_str)\n", (16885, 16919), False, 'import os\n'), ((19322, 19346), 'numpy.ceil', 'np.ceil', (['(batch_size / 10)'], {}), '(batch_size / 10)\n', (19329, 19346), True, 'import numpy as np\n'), ((21730, 21785), 'os.path.join', 'os.path.join', (['FLAGS.save_dir', '"""checkpoints"""', 'hparam_str'], {}), "(FLAGS.save_dir, 'checkpoints', hparam_str)\n", (21742, 21785), False, 'import os\n'), ((21976, 22092), 'absl.logging.info', 'logging.info', (['"""Checking that start_step (%s) == finetune_start_step (%s)"""', 'start_step', 'FLAGS.finetune_start_step'], {}), "('Checking that start_step (%s) == finetune_start_step (%s)',\n start_step, FLAGS.finetune_start_step)\n", (21988, 22092), False, 'from absl import logging\n'), ((24500, 24543), 'absl.logging.info', 'logging.info', (['"""Gathering training metrics."""'], {}), "('Gathering training metrics.')\n", (24512, 24543), False, 'from absl import logging\n'), ((24564, 24601), 'flax.training.common_utils.get_metrics', 'common_utils.get_metrics', (['metrics_all'], {}), '(metrics_all)\n', (24588, 24601), False, 'from flax.training import common_utils\n'), ((24674, 24708), 'jax.tree_map', 'jax.tree_map', (['jnp.sum', 'metrics_all'], {}), '(jnp.sum, metrics_all)\n', (24686, 24708), False, 'import jax\n'), ((24777, 24830), 'jax.tree_map', 'jax.tree_map', (['(lambda x: x / denominator)', 'metrics_sums'], {}), '(lambda x: x / denominator, metrics_sums)\n', (24789, 24830), False, 'import jax\n'), ((25673, 25718), 'absl.logging.info', 'logging.info', (['"""Gathering evaluation metrics."""'], {}), "('Gathering evaluation metrics.')\n", (25685, 25718), False, 'from absl import logging\n'), ((25746, 25757), 'time.time', 'time.time', ([], {}), '()\n', (25755, 25757), False, 'import time\n'), ((26031, 26069), 'flax.training.common_utils.get_metrics', 'common_utils.get_metrics', (['eval_metrics'], {}), '(eval_metrics)\n', (26055, 26069), False, 'from flax.training import common_utils\n'), ((26096, 26131), 'jax.tree_map', 'jax.tree_map', (['jnp.sum', 'eval_metrics'], {}), '(jnp.sum, eval_metrics)\n', (26108, 26131), False, 'import jax\n'), ((26215, 26278), 'jax.tree_map', 'jax.tree_map', (['(lambda x: x / eval_denominator)', 'eval_metrics_sums'], {}), '(lambda x: x / eval_denominator, eval_metrics_sums)\n', (26227, 26278), False, 'import jax\n'), ((26753, 26799), 'absl.logging.info', 'logging.info', (['"""Gathering beam search metrics."""'], {}), "('Gathering beam search metrics.')\n", (26765, 26799), False, 'from absl import logging\n'), ((7209, 7231), 'flax.linen.log_softmax', 'nn.log_softmax', (['logits'], {}), '(logits)\n', (7223, 7231), True, 'from flax import linen as nn\n'), ((9332, 9376), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['config'], {}), '(config)\n', (9368, 9376), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((11711, 11755), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['config'], {}), '(config)\n', (11747, 11755), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((14794, 14829), 'jax.numpy.broadcast_to', 'jnp.broadcast_to', (['x', '((1,) + x.shape)'], {}), '(x, (1,) + x.shape)\n', (14810, 14829), True, 'import jax.numpy as jnp\n'), ((16452, 16483), 'json.loads', 'json.loads', (['FLAGS.xm_parameters'], {}), '(FLAGS.xm_parameters)\n', (16462, 16483), False, 'import json\n'), ((24132, 24145), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (24143, 24145), False, 'import jax\n'), ((25038, 25062), 'jax.numpy.exp', 'jnp.exp', (["summary['loss']"], {}), "(summary['loss'])\n", (25045, 25062), True, 'import jax.numpy as jnp\n'), ((25087, 25100), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (25098, 25100), False, 'import jax\n'), ((25115, 25183), 'absl.logging.info', 'logging.info', (['"""Train in step: %d, loss: %.4f"""', 'step', "summary['loss']"], {}), "('Train in step: %d, loss: %.4f', step, summary['loss'])\n", (25127, 25183), False, 'from absl import logging\n'), ((25199, 25210), 'time.time', 'time.time', ([], {}), '()\n', (25208, 25210), False, 'import time\n'), ((25868, 25895), 'flax.training.common_utils.shard', 'common_utils.shard', (['batches'], {}), '(batches)\n', (25886, 25895), False, 'from flax.training import common_utils\n'), ((26348, 26361), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (26359, 26361), False, 'import jax\n'), ((26871, 26882), 'time.time', 'time.time', ([], {}), '()\n', (26880, 26882), False, 'import time\n'), ((5777, 5814), 'jax.numpy.minimum', 'jnp.minimum', (['(1.0)', '(step / warmup_steps)'], {}), '(1.0, step / warmup_steps)\n', (5788, 5814), True, 'import jax.numpy as jnp\n'), ((10276, 10361), 'jax.numpy.logical_and', 'jnp.logical_and', (['(programs != config.base_config.bos_token)', '(programs != eos_token)'], {}), '(programs != config.base_config.bos_token, programs != eos_token\n )\n', (10291, 10361), True, 'import jax.numpy as jnp\n'), ((12255, 12306), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', ([], {'config': 'config'}), '(config=config)\n', (12291, 12306), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((12693, 12744), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', ([], {'config': 'config'}), '(config=config)\n', (12729, 12744), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((24254, 24309), 'os.path.join', 'os.path.join', (['FLAGS.save_dir', '"""checkpoints"""', 'hparam_str'], {}), "(FLAGS.save_dir, 'checkpoints', hparam_str)\n", (24266, 24309), False, 'import os\n'), ((24323, 24355), 'flax.jax_utils.unreplicate', 'jax_utils.unreplicate', (['optimizer'], {}), '(optimizer)\n', (24344, 24355), False, 'from flax import jax_utils\n'), ((27557, 27587), 'flax.training.common_utils.shard', 'common_utils.shard', (['pred_batch'], {}), '(pred_batch)\n', (27575, 27587), False, 'from flax.training import common_utils\n'), ((29522, 29574), 'jax.tree_map', 'jax.tree_map', (['np.array', '(pred_acc, pred_denominator)'], {}), '(np.array, (pred_acc, pred_denominator))\n', (29534, 29574), False, 'import jax\n'), ((29972, 29985), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (29983, 29985), False, 'import jax\n'), ((26458, 26469), 'time.time', 'time.time', ([], {}), '()\n', (26467, 26469), False, 'import time\n'), ((28579, 28611), 'absl.logging.info', 'logging.info', (['"""ios: %s"""', 'ios[-1]'], {}), "('ios: %s', ios[-1])\n", (28591, 28611), False, 'from absl import logging\n'), ((28624, 28663), 'absl.logging.info', 'logging.info', (['"""target: %s"""', 'targets[-1]'], {}), "('target: %s', targets[-1])\n", (28636, 28663), False, 'from absl import logging\n'), ((5873, 5910), 'jax.numpy.maximum', 'jnp.maximum', (['(1.0)', '(step - warmup_steps)'], {}), '(1.0, step - warmup_steps)\n', (5884, 5910), True, 'import jax.numpy as jnp\n'), ((5972, 5994), 'jax.numpy.sqrt', 'jnp.sqrt', (['warmup_steps'], {}), '(warmup_steps)\n', (5980, 5994), True, 'import jax.numpy as jnp\n'), ((18178, 18209), 'numpy.argmax', 'np.argmax', (['(program == eos_token)'], {}), '(program == eos_token)\n', (18187, 18209), True, 'import numpy as np\n'), ((30196, 30207), 'time.time', 'time.time', ([], {}), '()\n', (30205, 30207), False, 'import time\n'), ((6019, 6050), 'jax.numpy.maximum', 'jnp.maximum', (['step', 'warmup_steps'], {}), '(step, warmup_steps)\n', (6030, 6050), True, 'import jax.numpy as jnp\n'), ((27309, 27349), 'numpy.ceil', 'np.ceil', (['(cur_pred_batch_size / n_devices)'], {}), '(cur_pred_batch_size / n_devices)\n', (27316, 27349), True, 'import numpy as np\n'), ((6365, 6399), 'jax.numpy.cos', 'jnp.cos', (['(jnp.pi * (progress % 1.0))'], {}), '(jnp.pi * (progress % 1.0))\n', (6372, 6399), True, 'import jax.numpy as jnp\n')] |
ziyixi/SeisScripts | plot/profile_interpolation/plot_profile.py | a484bc1747eae52b2441f0bfd47ac7e093150f1d | import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import click
import numba
def prepare_data(data_pd, parameter):
lon_set = set(data_pd["lon"])
lat_set = set(data_pd["lat"])
dep_set = set(data_pd["dep"])
lon_list = sorted(lon_set)
lat_list = sorted(lat_set)
dep_list = sorted(dep_set)
lon_mesh, lat_mesh, dep_mesh = np.meshgrid(
lon_list, lat_list, dep_list, indexing="ij")
dx, dy, dz = np.shape(lon_mesh)
value_mesh = np.zeros_like(lon_mesh)
x_mesh = np.zeros_like(lon_mesh)
y_mesh = np.zeros_like(lon_mesh)
z_mesh = np.zeros_like(lon_mesh)
r_mesh = np.zeros_like(lon_mesh)
for i in range(dx):
for j in range(dy):
for k in range(dz):
x_mesh[i, j, k], y_mesh[i, j, k], z_mesh[i, j, k], r_mesh[i, j, k] = lld2xyzr(
lat_mesh[i, j, k], lon_mesh[i, j, k], dep_mesh[i, j, k])
for index, row in data_pd.iterrows():
i = int(round((row.lon-lon_list[0])/(lon_list[1]-lon_list[0]), 0))
j = int(round((row.lat-lat_list[0])/(lat_list[1]-lat_list[0]), 0))
k = int(round((row.dep-dep_list[0])/(dep_list[1]-dep_list[0]), 0))
value_mesh[i, j, k] = row[parameter]
return x_mesh, y_mesh, z_mesh, value_mesh
def get_value(data_pd, lat, lon, dep, parameter):
return data_pd.loc[(data_pd.lat == lat) & (data_pd.lon == lon) & (data_pd.dep == dep)][parameter].values[0]
@numba.njit()
def lld2xyzr(lat, lon, dep):
R_EARTH_KM = 6371.0
r = (R_EARTH_KM-dep)/R_EARTH_KM
theta = 90-lat
phi = lon
z = r*cosd(theta)
h = r*sind(theta)
x = h*cosd(phi)
y = h*sind(phi)
return (x, y, z, r)
@numba.njit()
def cosd(x):
return np.cos(np.deg2rad(x))
@numba.njit()
def sind(x):
return np.sin(np.deg2rad(x))
# def get_value_func(x_mesh, y_mesh, z_mesh, value_mesh):
# value_func = RegularGridInterpolator(
# (x_mesh, y_mesh, z_mesh), value_mesh, method="nearest")
# return value_func
@numba.njit()
def interp_value(lat, lon, dep, x_mesh, y_mesh, z_mesh, value_mesh):
x, y, z, _ = lld2xyzr(lat, lon, dep)
distance2 = (x_mesh-x)**2+(y_mesh-y)**2+(z_mesh-z)**2
mindistance2 = np.min(distance2)
coors = np.where(distance2 == mindistance2)
value = value_mesh[coors[0][0], coors[1][0], coors[2][0]]
return value
def generate_vertical_profile_grids(lon_list, lat_list, dep_list, hnpts, vnpts):
lons = np.linspace(lon_list[0], lon_list[1], hnpts)
lats = np.linspace(lat_list[0], lat_list[1], hnpts)
deps = np.linspace(dep_list[0], dep_list[1], vnpts)
return lons, lats, deps
@click.command()
@click.option('--lon1', required=True, type=float, help="lon1")
@click.option('--lon2', required=True, type=float, help="lon2")
@click.option('--lat1', required=True, type=float, help="lat1")
@click.option('--lat2', required=True, type=float, help="lat2")
@click.option('--dep1', required=True, type=float, help="dep1")
@click.option('--dep2', required=True, type=float, help="dep2")
@click.option('--data', required=True, type=str, help="the pickle file")
@click.option('--parameter', required=True, type=str, help="physicial parameter to plot")
@click.option('--hnpts', required=True, type=int, help="horizontal npts")
@click.option('--vnpts', required=True, type=int, help="vertical npts")
def main(lon1, lon2, lat1, lat2, dep1, dep2, data, parameter, hnpts, vnpts):
lon_list = [lon1, lon2]
lat_list = [lat1, lat2]
dep_list = [dep1, dep2]
data_pd_raw = pd.read_pickle(data)
# data_pd is too big
minlon = min(lon1, lon2)
maxlon = max(lon1, lon2)
minlat = min(lat1, lat2)
maxlat = max(lat1, lat2)
mindep = min(dep1, dep2)
maxdep = max(dep1, dep2)
data_pd = data_pd_raw.loc[(data_pd_raw.lat <= maxlat) & (
data_pd_raw.lat >= minlat) & (data_pd_raw.lon < maxlon) & (data_pd_raw.lon > minlon) & (data_pd_raw.dep >= mindep) & (data_pd_raw.dep <= maxdep)]
x_mesh, y_mesh, z_mesh, value_mesh = prepare_data(data_pd, parameter)
lons_plot, lats_plot, deps_plot = generate_vertical_profile_grids(
lon_list, lat_list, dep_list, hnpts, vnpts)
values = np.zeros((hnpts, vnpts))
for ih in range(hnpts):
for iv in range(vnpts):
values[ih, iv] = interp_value(
lats_plot[ih], lons_plot[ih], deps_plot[iv], x_mesh, y_mesh, z_mesh, value_mesh)
# print(lats_plot[ih], lons_plot[ih], deps_plot[iv], values[ih, iv])
# plotting part
plt.figure()
mesh_plot_lat, mesh_plot_dep = np.meshgrid(
lats_plot, deps_plot, indexing="ij")
# get vmin and vmax
vmin_round = round(np.min(values), 2)
if(vmin_round < np.min(values)):
vmin = vmin_round
else:
vmin = vmin_round-0.01
vmax_round = round(np.max(values), 2)
if(vmax_round > np.max(values)):
vmax = vmax_round
else:
vmax = vmax_round+0.01
print(vmin, vmax, np.max(values), np.min(values), vmin_round, vmax_round)
plt.contourf(mesh_plot_lat, mesh_plot_dep,
values, 101, cmap=plt.cm.seismic_r)
v = np.arange(vmin, vmax, 0.01)
plt.colorbar(ticks=v, label="perturbation")
plt.gca().invert_yaxis()
plt.xlabel(
f"latitude(°) between (lon: {lon1}°, lat: {lat1}°) and (lon: {lon2}°, lat: {lat2}°)")
plt.ylabel("depth(km)")
plt.show()
if __name__ == "__main__":
main()
| [((1443, 1455), 'numba.njit', 'numba.njit', ([], {}), '()\n', (1453, 1455), False, 'import numba\n'), ((1691, 1703), 'numba.njit', 'numba.njit', ([], {}), '()\n', (1701, 1703), False, 'import numba\n'), ((1753, 1765), 'numba.njit', 'numba.njit', ([], {}), '()\n', (1763, 1765), False, 'import numba\n'), ((2009, 2021), 'numba.njit', 'numba.njit', ([], {}), '()\n', (2019, 2021), False, 'import numba\n'), ((2636, 2651), 'click.command', 'click.command', ([], {}), '()\n', (2649, 2651), False, 'import click\n'), ((2653, 2715), 'click.option', 'click.option', (['"""--lon1"""'], {'required': '(True)', 'type': 'float', 'help': '"""lon1"""'}), "('--lon1', required=True, type=float, help='lon1')\n", (2665, 2715), False, 'import click\n'), ((2717, 2779), 'click.option', 'click.option', (['"""--lon2"""'], {'required': '(True)', 'type': 'float', 'help': '"""lon2"""'}), "('--lon2', required=True, type=float, help='lon2')\n", (2729, 2779), False, 'import click\n'), ((2781, 2843), 'click.option', 'click.option', (['"""--lat1"""'], {'required': '(True)', 'type': 'float', 'help': '"""lat1"""'}), "('--lat1', required=True, type=float, help='lat1')\n", (2793, 2843), False, 'import click\n'), ((2845, 2907), 'click.option', 'click.option', (['"""--lat2"""'], {'required': '(True)', 'type': 'float', 'help': '"""lat2"""'}), "('--lat2', required=True, type=float, help='lat2')\n", (2857, 2907), False, 'import click\n'), ((2909, 2971), 'click.option', 'click.option', (['"""--dep1"""'], {'required': '(True)', 'type': 'float', 'help': '"""dep1"""'}), "('--dep1', required=True, type=float, help='dep1')\n", (2921, 2971), False, 'import click\n'), ((2973, 3035), 'click.option', 'click.option', (['"""--dep2"""'], {'required': '(True)', 'type': 'float', 'help': '"""dep2"""'}), "('--dep2', required=True, type=float, help='dep2')\n", (2985, 3035), False, 'import click\n'), ((3037, 3108), 'click.option', 'click.option', (['"""--data"""'], {'required': '(True)', 'type': 'str', 'help': '"""the pickle file"""'}), "('--data', required=True, type=str, help='the pickle file')\n", (3049, 3108), False, 'import click\n'), ((3110, 3203), 'click.option', 'click.option', (['"""--parameter"""'], {'required': '(True)', 'type': 'str', 'help': '"""physicial parameter to plot"""'}), "('--parameter', required=True, type=str, help=\n 'physicial parameter to plot')\n", (3122, 3203), False, 'import click\n'), ((3200, 3272), 'click.option', 'click.option', (['"""--hnpts"""'], {'required': '(True)', 'type': 'int', 'help': '"""horizontal npts"""'}), "('--hnpts', required=True, type=int, help='horizontal npts')\n", (3212, 3272), False, 'import click\n'), ((3274, 3344), 'click.option', 'click.option', (['"""--vnpts"""'], {'required': '(True)', 'type': 'int', 'help': '"""vertical npts"""'}), "('--vnpts', required=True, type=int, help='vertical npts')\n", (3286, 3344), False, 'import click\n'), ((369, 425), 'numpy.meshgrid', 'np.meshgrid', (['lon_list', 'lat_list', 'dep_list'], {'indexing': '"""ij"""'}), "(lon_list, lat_list, dep_list, indexing='ij')\n", (380, 425), True, 'import numpy as np\n'), ((452, 470), 'numpy.shape', 'np.shape', (['lon_mesh'], {}), '(lon_mesh)\n', (460, 470), True, 'import numpy as np\n'), ((488, 511), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\n', (501, 511), True, 'import numpy as np\n'), ((525, 548), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\n', (538, 548), True, 'import numpy as np\n'), ((562, 585), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\n', (575, 585), True, 'import numpy as np\n'), ((599, 622), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\n', (612, 622), True, 'import numpy as np\n'), ((636, 659), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\n', (649, 659), True, 'import numpy as np\n'), ((2209, 2226), 'numpy.min', 'np.min', (['distance2'], {}), '(distance2)\n', (2215, 2226), True, 'import numpy as np\n'), ((2239, 2274), 'numpy.where', 'np.where', (['(distance2 == mindistance2)'], {}), '(distance2 == mindistance2)\n', (2247, 2274), True, 'import numpy as np\n'), ((2448, 2492), 'numpy.linspace', 'np.linspace', (['lon_list[0]', 'lon_list[1]', 'hnpts'], {}), '(lon_list[0], lon_list[1], hnpts)\n', (2459, 2492), True, 'import numpy as np\n'), ((2504, 2548), 'numpy.linspace', 'np.linspace', (['lat_list[0]', 'lat_list[1]', 'hnpts'], {}), '(lat_list[0], lat_list[1], hnpts)\n', (2515, 2548), True, 'import numpy as np\n'), ((2560, 2604), 'numpy.linspace', 'np.linspace', (['dep_list[0]', 'dep_list[1]', 'vnpts'], {}), '(dep_list[0], dep_list[1], vnpts)\n', (2571, 2604), True, 'import numpy as np\n'), ((3524, 3544), 'pandas.read_pickle', 'pd.read_pickle', (['data'], {}), '(data)\n', (3538, 3544), True, 'import pandas as pd\n'), ((4172, 4196), 'numpy.zeros', 'np.zeros', (['(hnpts, vnpts)'], {}), '((hnpts, vnpts))\n', (4180, 4196), True, 'import numpy as np\n'), ((4503, 4515), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4513, 4515), True, 'import matplotlib.pyplot as plt\n'), ((4551, 4599), 'numpy.meshgrid', 'np.meshgrid', (['lats_plot', 'deps_plot'], {'indexing': '"""ij"""'}), "(lats_plot, deps_plot, indexing='ij')\n", (4562, 4599), True, 'import numpy as np\n'), ((5009, 5087), 'matplotlib.pyplot.contourf', 'plt.contourf', (['mesh_plot_lat', 'mesh_plot_dep', 'values', '(101)'], {'cmap': 'plt.cm.seismic_r'}), '(mesh_plot_lat, mesh_plot_dep, values, 101, cmap=plt.cm.seismic_r)\n', (5021, 5087), True, 'import matplotlib.pyplot as plt\n'), ((5114, 5141), 'numpy.arange', 'np.arange', (['vmin', 'vmax', '(0.01)'], {}), '(vmin, vmax, 0.01)\n', (5123, 5141), True, 'import numpy as np\n'), ((5146, 5189), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {'ticks': 'v', 'label': '"""perturbation"""'}), "(ticks=v, label='perturbation')\n", (5158, 5189), True, 'import matplotlib.pyplot as plt\n'), ((5223, 5329), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['f"""latitude(°) between (lon: {lon1}°, lat: {lat1}°) and (lon: {lon2}°, lat: {lat2}°)"""'], {}), "(\n f'latitude(°) between (lon: {lon1}°, lat: {lat1}°) and (lon: {lon2}°, lat: {lat2}°)'\n )\n", (5233, 5329), True, 'import matplotlib.pyplot as plt\n'), ((5333, 5356), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""depth(km)"""'], {}), "('depth(km)')\n", (5343, 5356), True, 'import matplotlib.pyplot as plt\n'), ((5361, 5371), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5369, 5371), True, 'import matplotlib.pyplot as plt\n'), ((1735, 1748), 'numpy.deg2rad', 'np.deg2rad', (['x'], {}), '(x)\n', (1745, 1748), True, 'import numpy as np\n'), ((1797, 1810), 'numpy.deg2rad', 'np.deg2rad', (['x'], {}), '(x)\n', (1807, 1810), True, 'import numpy as np\n'), ((4658, 4672), 'numpy.min', 'np.min', (['values'], {}), '(values)\n', (4664, 4672), True, 'import numpy as np\n'), ((4697, 4711), 'numpy.min', 'np.min', (['values'], {}), '(values)\n', (4703, 4711), True, 'import numpy as np\n'), ((4804, 4818), 'numpy.max', 'np.max', (['values'], {}), '(values)\n', (4810, 4818), True, 'import numpy as np\n'), ((4843, 4857), 'numpy.max', 'np.max', (['values'], {}), '(values)\n', (4849, 4857), True, 'import numpy as np\n'), ((4949, 4963), 'numpy.max', 'np.max', (['values'], {}), '(values)\n', (4955, 4963), True, 'import numpy as np\n'), ((4965, 4979), 'numpy.min', 'np.min', (['values'], {}), '(values)\n', (4971, 4979), True, 'import numpy as np\n'), ((5194, 5203), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (5201, 5203), True, 'import matplotlib.pyplot as plt\n')] |
edpaget/flask-appconfig | tests/test_heroku.py | 5264719ac9229339070b219a4358a3203ffd05b0 | from flask import Flask
from flask_appconfig import HerokuConfig
def create_sample_app():
app = Flask('testapp')
HerokuConfig(app)
return app
def test_herokupostgres(monkeypatch):
monkeypatch.setenv('HEROKU_POSTGRESQL_ORANGE_URL', 'heroku-db-uri')
app = create_sample_app()
assert app.config['SQLALCHEMY_DATABASE_URI'] == 'heroku-db-uri'
| [((102, 118), 'flask.Flask', 'Flask', (['"""testapp"""'], {}), "('testapp')\n", (107, 118), False, 'from flask import Flask\n'), ((123, 140), 'flask_appconfig.HerokuConfig', 'HerokuConfig', (['app'], {}), '(app)\n', (135, 140), False, 'from flask_appconfig import HerokuConfig\n')] |
Dev-Jahn/cms | flask/util/logger.py | 84ea115bdb865daff83d069502f6f0dd105fc4f0 | import logging
"""
Formatter
"""
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%d:%H:%M:%S')
"""
Set Flask logger
"""
logger = logging.getLogger('FLASK_LOG')
logger.setLevel(logging.DEBUG)
stream_log = logging.StreamHandler()
stream_log.setFormatter(formatter)
logger.addHandler(stream_log)
# if disabled
# logger.disabled = True
| [((50, 156), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {'datefmt': '"""%Y-%m-%d:%H:%M:%S"""'}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s',\n datefmt='%Y-%m-%d:%H:%M:%S')\n", (67, 156), False, 'import logging\n'), ((193, 223), 'logging.getLogger', 'logging.getLogger', (['"""FLASK_LOG"""'], {}), "('FLASK_LOG')\n", (210, 223), False, 'import logging\n'), ((268, 291), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (289, 291), False, 'import logging\n')] |
Krovatkin/NewsBlur | utils/backups/backup_psql.py | 2a5b52984c9d29c864eb80e9c60c658b1f25f7c5 | #!/usr/bin/python3
import os
import sys
import socket
CURRENT_DIR = os.path.dirname(__file__)
NEWSBLUR_DIR = ''.join([CURRENT_DIR, '/../../'])
sys.path.insert(0, NEWSBLUR_DIR)
os.environ['DJANGO_SETTINGS_MODULE'] = 'newsblur_web.settings'
import threading
class ProgressPercentage(object):
def __init__(self, filename):
self._filename = filename
self._size = float(os.path.getsize(filename))
self._seen_so_far = 0
self._lock = threading.Lock()
def __call__(self, bytes_amount):
# To simplify, assume this is hooked up to a single filename
with self._lock:
self._seen_so_far += bytes_amount
percentage = (self._seen_so_far / self._size) * 100
sys.stdout.write(
"\r%s %s / %s (%.2f%%)" % (
self._filename, self._seen_so_far, self._size,
percentage))
sys.stdout.flush()
import time
import boto3
from django.conf import settings
BACKUP_DIR = '/srv/newsblur/backup/'
s3 = boto3.client('s3', aws_access_key_id=settings.S3_ACCESS_KEY, aws_secret_access_key=settings.S3_SECRET)
hostname = socket.gethostname().replace('-','_')
s3_object_name = f'backup_{hostname}/backup_{hostname}_{time.strftime("%Y-%m-%d-%H-%M")}.sql'
path = os.listdir(BACKUP_DIR)[0]
full_path = os.path.join(BACKUP_DIR, path)
print('Uploading %s to %s on S3 bucket %s' % (full_path, s3_object_name, settings.S3_BACKUP_BUCKET))
s3.upload_file(full_path, settings.S3_BACKUP_BUCKET, s3_object_name, Callback=ProgressPercentage(full_path))
os.remove(full_path)
| [((69, 94), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (84, 94), False, 'import os\n'), ((144, 176), 'sys.path.insert', 'sys.path.insert', (['(0)', 'NEWSBLUR_DIR'], {}), '(0, NEWSBLUR_DIR)\n', (159, 176), False, 'import sys\n'), ((1037, 1143), 'boto3.client', 'boto3.client', (['"""s3"""'], {'aws_access_key_id': 'settings.S3_ACCESS_KEY', 'aws_secret_access_key': 'settings.S3_SECRET'}), "('s3', aws_access_key_id=settings.S3_ACCESS_KEY,\n aws_secret_access_key=settings.S3_SECRET)\n", (1049, 1143), False, 'import boto3\n'), ((1330, 1360), 'os.path.join', 'os.path.join', (['BACKUP_DIR', 'path'], {}), '(BACKUP_DIR, path)\n', (1342, 1360), False, 'import os\n'), ((1571, 1591), 'os.remove', 'os.remove', (['full_path'], {}), '(full_path)\n', (1580, 1591), False, 'import os\n'), ((1292, 1314), 'os.listdir', 'os.listdir', (['BACKUP_DIR'], {}), '(BACKUP_DIR)\n', (1302, 1314), False, 'import os\n'), ((467, 483), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (481, 483), False, 'import threading\n'), ((1153, 1173), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (1171, 1173), False, 'import socket\n'), ((1247, 1278), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d-%H-%M"""'], {}), "('%Y-%m-%d-%H-%M')\n", (1260, 1278), False, 'import time\n'), ((389, 414), 'os.path.getsize', 'os.path.getsize', (['filename'], {}), '(filename)\n', (404, 414), False, 'import os\n'), ((739, 849), 'sys.stdout.write', 'sys.stdout.write', (["('\\r%s %s / %s (%.2f%%)' % (self._filename, self._seen_so_far, self._size,\n percentage))"], {}), "('\\r%s %s / %s (%.2f%%)' % (self._filename, self.\n _seen_so_far, self._size, percentage))\n", (755, 849), False, 'import sys\n'), ((915, 933), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (931, 933), False, 'import sys\n')] |
Orange-OpenSource/xtesting-onap-tests | onap_tests/scenario/solution.py | ce4237f49089a91c81f5fad552f78fec384fd504 | #!/usr/bin/python
#
# This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# pylint: disable=missing-docstring
# pylint: disable=duplicate-code
import logging
import time
import onap_tests.components.aai as aai
import onap_tests.components.so as so
import onap_tests.components.sdnc as sdnc
import onap_tests.components.nbi as nbi
import onap_tests.utils.stack_checker as sc
import onap_tests.utils.utils as onap_utils
PROXY = onap_utils.get_config("general.proxy")
class Solution(object):
"""
VNF: Class to automate the instantiation of a VNF
It is assumed that the Design phase has been already done
The yaml template is available and stored in the template directory
TODO: automate the design phase
"""
__logger = logging.getLogger(__name__)
def __init__(self, **kwargs):
"""Initialize Solution object."""
super(Solution, self).__init__()
self.vnf_config = {}
self.components = {}
if "case" not in kwargs:
# by convention is VNF is not precised we set mrf
kwargs["case"] = "mrf"
self.vnf_config["vnf"] = kwargs["case"]
if "nbi" in kwargs:
self.vnf_config["nbi"] = kwargs["nbi"]
# can be useful to destroy resources, sdnc module name shall be given
if "sdnc_vnf_name" in kwargs:
self.vnf_config["sdnc_vnf_name"] = kwargs["sdnc_vnf_name"]
# Random part = 6 last char of the the vnf name
self.vnf_config["random_string"] = kwargs["sdnc_vnf_name"][-6:]
else:
self.vnf_config["random_string"] = (
onap_utils.random_string_generator())
self.vnf_config["sdnc_vnf_name"] = (
onap_utils.get_config("onap.service.name") + "_" +
kwargs["case"] + "_" + self.vnf_config["random_string"])
vnf_list = list(onap_utils.get_template_param(
self.vnf_config["vnf"],
"topology_template.node_templates"))
vf_module_list = list(onap_utils.get_template_param(
self.vnf_config["vnf"],
"topology_template.groups"))
# Class attributes for instance, vnf and module VF
self.service_infos = {}
self.vnf_infos = {'list': vnf_list}
self.module_infos = {'list': vf_module_list}
# retrieve infos from the configuration files
self.set_service_instance_var()
self.set_vnf_var()
self.set_module_var()
self.set_onap_components()
def set_service_instance_var(self):
"""
set service instance variables from the config file
"""
self.vnf_config["vnf_name"] = onap_utils.get_template_param(
self.vnf_config["vnf"], "metadata.name")
self.vnf_config["invariant_uuid"] = onap_utils.get_template_param(
self.vnf_config["vnf"], "metadata.invariantUUID")
self.vnf_config["uuid"] = onap_utils.get_template_param(
self.vnf_config["vnf"], "metadata.UUID")
def set_vnf_var(self):
"""
set vnf variables from the config file
"""
for i, elt in enumerate(self.vnf_infos['list']):
vnf_config = {}
self.__logger.info("get VNF %s info", elt)
vnf_config["vnf_customization_name"] = elt
vnf_config["vnf_model_name"] = onap_utils.get_template_param(
self.vnf_config["vnf"], "topology_template.node_templates." +
vnf_config["vnf_customization_name"] + ".metadata.name")
vnf_config["vnf_invariant_id"] = onap_utils.get_template_param(
self.vnf_config["vnf"], "topology_template.node_templates." +
vnf_config["vnf_customization_name"] +
".metadata.invariantUUID")
vnf_config["vnf_version_id"] = onap_utils.get_template_param(
self.vnf_config["vnf"], "topology_template.node_templates." +
vnf_config["vnf_customization_name"] + ".metadata.UUID")
vnf_config["vnf_customization_id"] = (
onap_utils.get_template_param(
self.vnf_config["vnf"],
"topology_template.node_templates." +
vnf_config["vnf_customization_name"] +
".metadata.customizationUUID"))
vnf_config["vnf_type"] = list(onap_utils.get_template_param(
self.vnf_config["vnf"], "topology_template.groups"))[i]
vnf_config["vnf_generic_name"] = (
self.vnf_config["vnf_name"] + "-service-instance-" +
self.vnf_config["random_string"])
vnf_config["vnf_generic_type"] = (
self.vnf_config["vnf_name"] + "/" +
vnf_config["vnf_customization_name"])
self.vnf_config[elt] = vnf_config
def set_module_var(self):
"""
set module variables from the config file
"""
for elt in self.vnf_infos['list']:
vf_config = {}
# we cannot be sure that the modules are in teh same order
# than the vnf
vf_index = onap_utils.get_vf_module_index(
self.module_infos['list'],
elt)
vnf_type = list(onap_utils.get_template_param(
self.vnf_config["vnf"],
"topology_template.groups"))[vf_index]
self.__logger.info("Complete Module info for VNF %s", elt)
vf_config["sdnc_vnf_type"] = onap_utils.get_template_param(
self.vnf_config["vnf"], "topology_template.groups." +
vnf_type +
".metadata.vfModuleModelName")
vnf_param = (self.vnf_config["vnf"] + "." +
str(elt) + ".vnf_parameters")
vf_config["vnf_parameters"] = onap_utils.get_config(vnf_param)
vf_config["module_invariant_id"] = onap_utils.get_template_param(
self.vnf_config["vnf"], "topology_template.groups." +
vnf_type + ".metadata.vfModuleModelInvariantUUID")
vf_config["module_name_version_id"] = (
onap_utils.get_template_param(
self.vnf_config["vnf"], "topology_template.groups." +
vnf_type + ".metadata.vfModuleModelUUID"))
vf_config["module_customization_id"] = (
onap_utils.get_template_param(
self.vnf_config["vnf"], "topology_template.groups." +
vnf_type + ".metadata.vfModuleModelCustomizationUUID"))
vf_config["module_version_id"] = onap_utils.get_template_param(
self.vnf_config["vnf"], "topology_template.groups." +
vnf_type + ".metadata.vfModuleModelUUID")
self.vnf_config[elt].update(vf_config)
def set_onap_components(self):
"""
Set ONAP component objects
"""
self.components["aai"] = aai.Aai(PROXY, self.__logger)
self.components["so"] = so.So(PROXY, self.__logger)
self.components["sdnc"] = sdnc.Sdnc(PROXY, self.__logger)
self.components["nbi"] = nbi.Nbi(PROXY, self.__logger)
def instantiate(self):
"""
Instantiate a VNF with ONAP
* Create the service instance (SO)
* Create the VNF instance (SO)
* preload the VNF in the SDNC
* Create the VF module instance (SO)
"""
instance_info = {"instance_id": ""}
vnf_info = {"vnf_id": ""}
module_info = {}
module_ref = {"instanceId": ""}
module_ok = False
check_vnf = False
self.__logger.info("Start the instantiation of the VNF")
instance_info = self.create_service_instance()
service_ok = self.components["aai"].check_service_instance(
self.vnf_config["vnf_name"],
instance_info["instance_id"])
if service_ok:
# create VNF instance(s)
for elt in self.vnf_infos['list']:
vnf_info = self.create_vnf_instance(elt)
self.__logger.info("Check vnf %s ....", elt)
vnf_ok = True
self.__logger.info("Check vnf %s ....", elt)
if not self.components["aai"].check_vnf_instance(
vnf_info["vnf_id"]):
vnf_ok = False
break
else:
# preload VNF(s) in SDNC
self.preload(elt)
time.sleep(10)
if vnf_ok:
# create VF module(s)
for elt in self.vnf_infos['list']:
module_info = self.create_module_instance(elt)
module_ok = True
module_ref = module_info['module_instance']
if not self.components["aai"].check_module_instance(
vnf_info["vnf_id"],
module_ref["requestReferences"]["instanceId"]):
module_ok = False
break
else:
# check VNF using OpenStack directly
check_vnf = self.check_vnf(
self.module_infos[elt]["module_instance_name"])
if check_vnf:
self.__logger.info("Stack successfully checked")
return {"status": module_ok,
"instance_id": instance_info,
"vnf_info": vnf_info,
"module_info": module_info,
"check_heat": check_vnf}
def clean(self):
"""
Clean VNF from ONAP
Args:
instance_id: The ID of the VNF service instance
vnf_id: The ID of the VNF instance
module_id: The ID of the VF module instance
"""
instance_id = self.service_infos['instance_id']
for elt in self.vnf_infos['list']:
vnf_id = self.vnf_infos[elt]["vnf_id"]
module_id = (self.module_infos[elt]["module_instance"]
["requestReferences"]["instanceId"])
self.clean_module(elt)
if not self.components["aai"].check_module_cleaned(vnf_id,
module_id):
return False
else:
self.clean_vnf(elt)
if not self.components["aai"].check_vnf_cleaned(vnf_id):
return False
else:
self.clean_instance(instance_id)
if self.components["aai"].check_service_instance_cleaned(
self.vnf_config["vnf_name"], instance_id):
self.__logger.debug("Instance still in AAI DB")
else:
return False
time.sleep(10)
self.clean_preload(elt)
return True
def create_service_instance(self):
"""
Create service instance
2 options to create the instance
* with SO
* with NBI
"""
instance_id = None
model_info = self.components["so"].get_service_model_info(
self.vnf_config['invariant_uuid'], self.vnf_config['uuid'])
if self.vnf_config["nbi"]:
self.__logger.info("1) Create Service instance from NBI")
self.__logger.info("***********************************")
request_info = self.components["nbi"].get_request_info()
service_payload = (
self.components["nbi"].get_nbi_service_order_payload())
nbi_info = self.components["nbi"].create_service_order_nbi(
service_payload)
time.sleep(5)
instance_id = (
self.components["nbi"].get_service_instance_id_from_order(
nbi_info["id"]))
else:
self.__logger.info("1) Create Service instance in SO")
self.__logger.info("********************************")
request_info = self.components["so"].get_request_info(
self.vnf_config["vnf"] + "-service-instance-" +
self.vnf_config['random_string'])
service_payload = self.components["so"].get_service_payload(
self.vnf_config["vnf"],
request_info,
model_info)
instance_id = self.components["so"].create_instance(
service_payload)
service_instance_info = {"instance_id": instance_id,
"request_info": request_info,
"service_payload": service_payload}
self.__logger.info("Service instance created: %s",
service_instance_info)
self.service_infos = service_instance_info
return service_instance_info
def create_vnf_instance(self, elt):
"""
Create VNF instance
Args:
* elt: the VNF
"""
vnf_id = None
self.__logger.info("2) Create VNF instance in SO")
self.__logger.info("****************************")
model_info = self.components["so"].get_vnf_model_info(
self.vnf_config[elt]['vnf_invariant_id'],
self.vnf_config[elt]['vnf_version_id'],
self.vnf_config[elt]['vnf_model_name'],
self.vnf_config[elt]['vnf_customization_id'],
self.vnf_config[elt]['vnf_customization_name'])
vnf_related_instance = self.components["so"].get_vnf_related_instance(
self.service_infos["instance_id"],
self.vnf_config['invariant_uuid'],
self.vnf_config['uuid'])
vnf_instance_name = (self.vnf_config["vnf"] + "-vnf-instance-" +
str(elt).replace(" ", "_") + ("_") +
self.vnf_config['random_string'])
request_info = self.components["so"].get_request_info(
vnf_instance_name)
vnf_payload = self.components["so"].get_vnf_payload(
self.vnf_config["vnf"],
request_info,
model_info,
vnf_related_instance)
# self.__logger.debug("VNF payload: %s", vnf_payload)
vnf_id = self.components["so"].create_vnf(
self.service_infos["instance_id"],
vnf_payload)
vnf_info = {"vnf_id": vnf_id,
"vnf_instance_name": vnf_instance_name,
"vnf_payload": vnf_payload,
"vnf_related_instance": vnf_related_instance}
self.__logger.info(">>>> SO vnf instance created %s", vnf_info)
self.vnf_infos[elt] = vnf_info
return vnf_info
def preload(self, elt):
"""
Preload VNF in SDNC
Args:
* elt: the VNF
"""
vnf_preload_infos = {}
self.__logger.info("3) Preload VNF %s in SDNC", elt)
self.__logger.info("*******************************")
vnf_name = (self.vnf_config["vnf"] +
"-vfmodule-instance-" +
str(elt).replace(" ", "_") + "_" +
self.vnf_config['random_string'])
vnf_topology_identifier = {
"generic-vnf-name": vnf_name,
"generic-vnf-type": (
self.vnf_config[elt]['vnf_generic_type']),
"service-type": self.service_infos["instance_id"],
"vnf-name": vnf_name,
"vnf-type": self.vnf_config[elt]['sdnc_vnf_type']}
sdnc_payload = self.components["sdnc"].get_preload_payload(
self.vnf_config[elt]['vnf_parameters'],
vnf_topology_identifier)
self.__logger.info("SDNC preload payload %s", sdnc_payload)
sdnc_preload = self.components["sdnc"].preload(sdnc_payload)
self.__logger.debug("SDNC preload answer: %s", sdnc_preload)
vnf_preload_infos[elt] = ({"sdnc_payload": sdnc_payload,
"sdnc_preload": sdnc_preload})
return vnf_preload_infos[elt]
def create_module_instance(self, elt):
"""
Create module instance
Args:
* instance_info: dict including the instance_id, the request_info and
the service payload
* vnf_info: dict including the vnf_id, vnf_related_instance and the
vnf payload
"""
module_info = {}
self.__logger.info("4) Create MODULE %s instance in SO", elt)
self.__logger.info("***************************************")
module_model_info = self.components["so"].get_module_model_info(
self.vnf_config[elt]['module_invariant_id'],
self.vnf_config[elt]['module_name_version_id'],
self.vnf_config[elt]['sdnc_vnf_type'],
self.vnf_config[elt]['module_customization_id'],
self.vnf_config[elt]['module_version_id'])
module_related_instance = (
self.components["so"].get_module_related_instance(
self.vnf_infos[elt]["vnf_id"],
self.vnf_config[elt]['vnf_invariant_id'],
self.vnf_config[elt]['vnf_version_id'],
self.vnf_config[elt]['vnf_model_name'],
self.vnf_config[elt]['vnf_customization_id'],
self.vnf_config[elt]['vnf_customization_name']))
module_instance_name = (self.vnf_config["vnf"] +
"-vfmodule-instance-" +
str(elt).replace(" ", "_") + "_" +
self.vnf_config['random_string'])
request_info = self.components["so"].get_request_info(
module_instance_name)
module_payload = self.components["so"].get_module_payload(
self.vnf_config["vnf"],
request_info,
module_model_info,
self.vnf_infos[elt]["vnf_related_instance"],
module_related_instance)
self.__logger.debug("Module payload %s", module_payload)
module_instance = self.components["so"].create_module(
self.service_infos["instance_id"],
self.vnf_infos[elt]["vnf_id"],
module_payload)
self.__logger.info(">>>> Module instance created: %s", module_instance)
module_info = (
{'module_instance': module_instance,
'module_instance_name': module_instance_name,
'module_payload': module_payload,
'module_model_info': module_model_info,
'module_related_instance': module_related_instance})
self.__logger.info("SO module vf(s) created: %s", module_info)
self.module_infos[elt] = module_info
return module_info
def check_vnf(self, stack_name):
"""
Check VNF stack has been properly started
"""
check_vnf = False
try:
my_stack_checker = sc.StackChecker()
if my_stack_checker.check_stack_is_complete(stack_name):
check_vnf = True
except Exception: # pylint: disable=broad-except
self.__logger.error("Impossible to find the stack %s in OpenStack",
stack_name)
return check_vnf
def clean_instance(self, instance_id):
"""
Clean VNF instance
Args:
* instance_id: The service instance of the VNF
"""
self.__logger.info(" Clean Service Instance ")
service_payload = self.components["so"].get_service_payload(
self.vnf_config["vnf"],
self.components["so"].get_request_info(
self.vnf_config['sdnc_vnf_name']),
self.components["so"].get_service_model_info(
self.vnf_config['invariant_uuid'],
self.vnf_config['uuid']))
self.components["so"].delete_instance(instance_id, service_payload)
def clean_vnf(self, elt):
"""
Clean VNF
Args:
* instance_id: The service instance of the VNF
* vnf_id:The VNF id of the VNF
"""
self.__logger.info(" Clean vnf Instance %s ", elt)
self.components["so"].delete_vnf(
self.service_infos["instance_id"],
self.vnf_infos[elt]["vnf_id"],
self.vnf_infos[elt]["vnf_payload"])
def clean_module(self, elt):
"""
Clean VNF Module
Args:
* instance_id: The service instance id of the VNF
* vnf_id:The VNF id of the VNF
* module_id: the VF module id of the VNF
"""
self.__logger.info(" Clean Module VF Instance %s ", elt)
instance_id = self.service_infos["instance_id"]
vnf_id = self.vnf_infos[elt]["vnf_id"]
module_id = (self.module_infos[elt]["module_instance"]
["requestReferences"]["instanceId"])
module_payload = self.module_infos[elt]["module_payload"]
self.components["so"].delete_module(
module_payload,
instance_id,
vnf_id,
module_id)
def clean_preload(self, elt):
"""
Clean VNF SDNC preload
"""
self.__logger.info(" Clean Preload of %s ", elt)
# if 1 of the expected preload clean is FAIL we return False
clean_preload = self.components["sdnc"].delete_preload(
self.module_infos[elt]["module_instance_name"],
self.vnf_config[elt]["sdnc_vnf_type"])
return clean_preload
def clean_all_preload(self):
"""
Clean VNF SDNC preload with the preload id
"""
self.__logger.info(" Clean Preload ")
for elt in self.vnf_infos['list']:
clean_preload = self.components["sdnc"].delete_preload(
self.module_infos[elt]["module_instance_name"],
self.vnf_config[elt]['sdnc_vnf_type'])
return clean_preload
def get_info(self):
"""
Get VNFs Info
"""
self.__logger.info("Class to manage VNFs")
self.__logger.info("VNF config: %s", self.vnf_config)
| [((620, 658), 'onap_tests.utils.utils.get_config', 'onap_utils.get_config', (['"""general.proxy"""'], {}), "('general.proxy')\n", (641, 658), True, 'import onap_tests.utils.utils as onap_utils\n'), ((950, 977), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (967, 977), False, 'import logging\n'), ((2905, 2975), 'onap_tests.utils.utils.get_template_param', 'onap_utils.get_template_param', (["self.vnf_config['vnf']", '"""metadata.name"""'], {}), "(self.vnf_config['vnf'], 'metadata.name')\n", (2934, 2975), True, 'import onap_tests.utils.utils as onap_utils\n'), ((3035, 3114), 'onap_tests.utils.utils.get_template_param', 'onap_utils.get_template_param', (["self.vnf_config['vnf']", '"""metadata.invariantUUID"""'], {}), "(self.vnf_config['vnf'], 'metadata.invariantUUID')\n", (3064, 3114), True, 'import onap_tests.utils.utils as onap_utils\n'), ((3164, 3234), 'onap_tests.utils.utils.get_template_param', 'onap_utils.get_template_param', (["self.vnf_config['vnf']", '"""metadata.UUID"""'], {}), "(self.vnf_config['vnf'], 'metadata.UUID')\n", (3193, 3234), True, 'import onap_tests.utils.utils as onap_utils\n'), ((7251, 7280), 'onap_tests.components.aai.Aai', 'aai.Aai', (['PROXY', 'self.__logger'], {}), '(PROXY, self.__logger)\n', (7258, 7280), True, 'import onap_tests.components.aai as aai\n'), ((7314, 7341), 'onap_tests.components.so.So', 'so.So', (['PROXY', 'self.__logger'], {}), '(PROXY, self.__logger)\n', (7319, 7341), True, 'import onap_tests.components.so as so\n'), ((7377, 7408), 'onap_tests.components.sdnc.Sdnc', 'sdnc.Sdnc', (['PROXY', 'self.__logger'], {}), '(PROXY, self.__logger)\n', (7386, 7408), True, 'import onap_tests.components.sdnc as sdnc\n'), ((7443, 7472), 'onap_tests.components.nbi.Nbi', 'nbi.Nbi', (['PROXY', 'self.__logger'], {}), '(PROXY, self.__logger)\n', (7450, 7472), True, 'import onap_tests.components.nbi as nbi\n'), ((1835, 1871), 'onap_tests.utils.utils.random_string_generator', 'onap_utils.random_string_generator', ([], {}), '()\n', (1869, 1871), True, 'import onap_tests.utils.utils as onap_utils\n'), ((2092, 2185), 'onap_tests.utils.utils.get_template_param', 'onap_utils.get_template_param', (["self.vnf_config['vnf']", '"""topology_template.node_templates"""'], {}), "(self.vnf_config['vnf'],\n 'topology_template.node_templates')\n", (2121, 2185), True, 'import onap_tests.utils.utils as onap_utils\n'), ((2241, 2326), 'onap_tests.utils.utils.get_template_param', 'onap_utils.get_template_param', (["self.vnf_config['vnf']", '"""topology_template.groups"""'], {}), "(self.vnf_config['vnf'],\n 'topology_template.groups')\n", (2270, 2326), True, 'import onap_tests.utils.utils as onap_utils\n'), ((3596, 3754), 'onap_tests.utils.utils.get_template_param', 'onap_utils.get_template_param', (["self.vnf_config['vnf']", "('topology_template.node_templates.' + vnf_config['vnf_customization_name'] +\n '.metadata.name')"], {}), "(self.vnf_config['vnf'], \n 'topology_template.node_templates.' + vnf_config[\n 'vnf_customization_name'] + '.metadata.name')\n", (3625, 3754), True, 'import onap_tests.utils.utils as onap_utils\n'), ((3826, 3993), 'onap_tests.utils.utils.get_template_param', 'onap_utils.get_template_param', (["self.vnf_config['vnf']", "('topology_template.node_templates.' + vnf_config['vnf_customization_name'] +\n '.metadata.invariantUUID')"], {}), "(self.vnf_config['vnf'], \n 'topology_template.node_templates.' + vnf_config[\n 'vnf_customization_name'] + '.metadata.invariantUUID')\n", (3855, 3993), True, 'import onap_tests.utils.utils as onap_utils\n'), ((4080, 4238), 'onap_tests.utils.utils.get_template_param', 'onap_utils.get_template_param', (["self.vnf_config['vnf']", "('topology_template.node_templates.' + vnf_config['vnf_customization_name'] +\n '.metadata.UUID')"], {}), "(self.vnf_config['vnf'], \n 'topology_template.node_templates.' + vnf_config[\n 'vnf_customization_name'] + '.metadata.UUID')\n", (4109, 4238), True, 'import onap_tests.utils.utils as onap_utils\n'), ((4333, 4504), 'onap_tests.utils.utils.get_template_param', 'onap_utils.get_template_param', (["self.vnf_config['vnf']", "('topology_template.node_templates.' + vnf_config['vnf_customization_name'] +\n '.metadata.customizationUUID')"], {}), "(self.vnf_config['vnf'], \n 'topology_template.node_templates.' + vnf_config[\n 'vnf_customization_name'] + '.metadata.customizationUUID')\n", (4362, 4504), True, 'import onap_tests.utils.utils as onap_utils\n'), ((5408, 5470), 'onap_tests.utils.utils.get_vf_module_index', 'onap_utils.get_vf_module_index', (["self.module_infos['list']", 'elt'], {}), "(self.module_infos['list'], elt)\n", (5438, 5470), True, 'import onap_tests.utils.utils as onap_utils\n'), ((5777, 5907), 'onap_tests.utils.utils.get_template_param', 'onap_utils.get_template_param', (["self.vnf_config['vnf']", "('topology_template.groups.' + vnf_type + '.metadata.vfModuleModelName')"], {}), "(self.vnf_config['vnf'], \n 'topology_template.groups.' + vnf_type + '.metadata.vfModuleModelName')\n", (5806, 5907), True, 'import onap_tests.utils.utils as onap_utils\n'), ((6111, 6143), 'onap_tests.utils.utils.get_config', 'onap_utils.get_config', (['vnf_param'], {}), '(vnf_param)\n', (6132, 6143), True, 'import onap_tests.utils.utils as onap_utils\n'), ((6194, 6337), 'onap_tests.utils.utils.get_template_param', 'onap_utils.get_template_param', (["self.vnf_config['vnf']", "('topology_template.groups.' + vnf_type +\n '.metadata.vfModuleModelInvariantUUID')"], {}), "(self.vnf_config['vnf'], \n 'topology_template.groups.' + vnf_type +\n '.metadata.vfModuleModelInvariantUUID')\n", (6223, 6337), True, 'import onap_tests.utils.utils as onap_utils\n'), ((6434, 6564), 'onap_tests.utils.utils.get_template_param', 'onap_utils.get_template_param', (["self.vnf_config['vnf']", "('topology_template.groups.' + vnf_type + '.metadata.vfModuleModelUUID')"], {}), "(self.vnf_config['vnf'], \n 'topology_template.groups.' + vnf_type + '.metadata.vfModuleModelUUID')\n", (6463, 6564), True, 'import onap_tests.utils.utils as onap_utils\n'), ((6675, 6822), 'onap_tests.utils.utils.get_template_param', 'onap_utils.get_template_param', (["self.vnf_config['vnf']", "('topology_template.groups.' + vnf_type +\n '.metadata.vfModuleModelCustomizationUUID')"], {}), "(self.vnf_config['vnf'], \n 'topology_template.groups.' + vnf_type +\n '.metadata.vfModuleModelCustomizationUUID')\n", (6704, 6822), True, 'import onap_tests.utils.utils as onap_utils\n'), ((6904, 7034), 'onap_tests.utils.utils.get_template_param', 'onap_utils.get_template_param', (["self.vnf_config['vnf']", "('topology_template.groups.' + vnf_type + '.metadata.vfModuleModelUUID')"], {}), "(self.vnf_config['vnf'], \n 'topology_template.groups.' + vnf_type + '.metadata.vfModuleModelUUID')\n", (6933, 7034), True, 'import onap_tests.utils.utils as onap_utils\n'), ((11272, 11286), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (11282, 11286), False, 'import time\n'), ((12176, 12189), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (12186, 12189), False, 'import time\n'), ((19518, 19535), 'onap_tests.utils.stack_checker.StackChecker', 'sc.StackChecker', ([], {}), '()\n', (19533, 19535), True, 'import onap_tests.utils.stack_checker as sc\n'), ((8844, 8858), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (8854, 8858), False, 'import time\n'), ((4624, 4709), 'onap_tests.utils.utils.get_template_param', 'onap_utils.get_template_param', (["self.vnf_config['vnf']", '"""topology_template.groups"""'], {}), "(self.vnf_config['vnf'],\n 'topology_template.groups')\n", (4653, 4709), True, 'import onap_tests.utils.utils as onap_utils\n'), ((5535, 5620), 'onap_tests.utils.utils.get_template_param', 'onap_utils.get_template_param', (["self.vnf_config['vnf']", '"""topology_template.groups"""'], {}), "(self.vnf_config['vnf'],\n 'topology_template.groups')\n", (5564, 5620), True, 'import onap_tests.utils.utils as onap_utils\n'), ((1940, 1982), 'onap_tests.utils.utils.get_config', 'onap_utils.get_config', (['"""onap.service.name"""'], {}), "('onap.service.name')\n", (1961, 1982), True, 'import onap_tests.utils.utils as onap_utils\n')] |
dominic-dev/pyformsd | tutorials/Controls4Docs/ControlEventsGraph.py | 23e31ceff2943bc0f7286d25dd14450a14b986af | #!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = "Ricardo Ribeiro"
__credits__ = ["Ricardo Ribeiro"]
__license__ = "MIT"
__version__ = "0.0"
__maintainer__ = "Ricardo Ribeiro"
__email__ = "[email protected]"
__status__ = "Development"
from __init__ import *
import random, time
from PyQt4 import QtCore
class SimpleExample(BaseWidget):
def __init__(self):
super(SimpleExample,self).__init__('Simple example')
#Definition of the forms fields
self._control0 = ControlEventsGraph('Check me')
self._control1 = ControlEventsGraph('Check me')
self._control2 = ControlEventsGraph('Check me')
self._control3 = ControlEventsGraph('Check me')
self._txt = ControlText('Time')
self._btn = ControlButton('Click')
self._btn1 = ControlButton('Click 1')
self._save = ControlButton('Save button')
self._load = ControlButton('Load button')
self.formset = [
('_btn','_btn1'),
('_control0','_control1'),
('_control2','_control3'),
'_txt',
('_save','_load')]
self._btn.value = self.__btn
self._btn1.value = self.__btn1
self._save.value = self.save_window
self._load.value = self.load_window
self._start = time.time()
self.INTERVAL = 500
self.N_TRACKS = 8
def __btn(self):
for i in range(40):
s = random.randint( 0, 10000 )
o = random.randint( 0, 1000 )
self._control0.add_event( s, s+o, track=random.randint(0,self.N_TRACKS) )
#self._control0.add_event( random.randint(0, 10000), s+o, track=random.randint(0,self.N_TRACKS), color="#00FFDD")
self._control0.value = 5000
def __addEvent0(self):
b = self._control0.value
e = b+self.INTERVAL
self._control0.add_event( b, e, track=random.randint(0,self.N_TRACKS) )
self._control0.value = e
self._txt.value = str(time.time() - self._start)
def __addEvent1(self):
b = self._control1.value
e = b+self.INTERVAL
self._control1.add_event( b, e, track=random.randint(0,self.N_TRACKS) )
self._control1.value = e
def __addEvent2(self):
b = self._control2.value
e = b+self.INTERVAL
self._control2.add_event( b, e, track=random.randint(0,self.N_TRACKS) )
self._control2.value = e
def __addEvent3(self):
b = self._control3.value
e = b+self.INTERVAL
self._control3.add_event( b, e, track=random.randint(0,self.N_TRACKS) )
self._control3.value = e
def __btn1(self):
self._start = time.time()
timer = QtCore.QTimer(self.form)
timer.timeout.connect(self.__addEvent0)
timer.start(self.INTERVAL)
timer = QtCore.QTimer(self.form)
timer.timeout.connect(self.__addEvent1)
timer.start(self.INTERVAL)
timer = QtCore.QTimer(self.form)
timer.timeout.connect(self.__addEvent2)
timer.start(self.INTERVAL)
timer = QtCore.QTimer(self.form)
timer.timeout.connect(self.__addEvent3)
timer.start(self.INTERVAL)
##################################################################################################################
##################################################################################################################
##################################################################################################################
#Execute the application
if __name__ == "__main__": pyforms.start_app( SimpleExample )
| [((1188, 1199), 'time.time', 'time.time', ([], {}), '()\n', (1197, 1199), False, 'import random, time\n'), ((2379, 2390), 'time.time', 'time.time', ([], {}), '()\n', (2388, 2390), False, 'import random, time\n'), ((2404, 2428), 'PyQt4.QtCore.QTimer', 'QtCore.QTimer', (['self.form'], {}), '(self.form)\n', (2417, 2428), False, 'from PyQt4 import QtCore\n'), ((2511, 2535), 'PyQt4.QtCore.QTimer', 'QtCore.QTimer', (['self.form'], {}), '(self.form)\n', (2524, 2535), False, 'from PyQt4 import QtCore\n'), ((2618, 2642), 'PyQt4.QtCore.QTimer', 'QtCore.QTimer', (['self.form'], {}), '(self.form)\n', (2631, 2642), False, 'from PyQt4 import QtCore\n'), ((2725, 2749), 'PyQt4.QtCore.QTimer', 'QtCore.QTimer', (['self.form'], {}), '(self.form)\n', (2738, 2749), False, 'from PyQt4 import QtCore\n'), ((1300, 1324), 'random.randint', 'random.randint', (['(0)', '(10000)'], {}), '(0, 10000)\n', (1314, 1324), False, 'import random, time\n'), ((1334, 1357), 'random.randint', 'random.randint', (['(0)', '(1000)'], {}), '(0, 1000)\n', (1348, 1357), False, 'import random, time\n'), ((1700, 1732), 'random.randint', 'random.randint', (['(0)', 'self.N_TRACKS'], {}), '(0, self.N_TRACKS)\n', (1714, 1732), False, 'import random, time\n'), ((1786, 1797), 'time.time', 'time.time', ([], {}), '()\n', (1795, 1797), False, 'import random, time\n'), ((1927, 1959), 'random.randint', 'random.randint', (['(0)', 'self.N_TRACKS'], {}), '(0, self.N_TRACKS)\n', (1941, 1959), False, 'import random, time\n'), ((2102, 2134), 'random.randint', 'random.randint', (['(0)', 'self.N_TRACKS'], {}), '(0, self.N_TRACKS)\n', (2116, 2134), False, 'import random, time\n'), ((2277, 2309), 'random.randint', 'random.randint', (['(0)', 'self.N_TRACKS'], {}), '(0, self.N_TRACKS)\n', (2291, 2309), False, 'import random, time\n'), ((1404, 1436), 'random.randint', 'random.randint', (['(0)', 'self.N_TRACKS'], {}), '(0, self.N_TRACKS)\n', (1418, 1436), False, 'import random, time\n')] |
lioncorpo/sfm.lion-judge-corporation | annotation_gui_gcp/orthophoto_view.py | 95fb11bff263c3faab62269cc907eec18b527e22 | from typing import Tuple
import numpy as np
import rasterio.warp
from opensfm import features
from .orthophoto_manager import OrthoPhotoManager
from .view import View
class OrthoPhotoView(View):
def __init__(
self,
main_ui,
path: str,
init_lat: float,
init_lon: float,
is_geo_reference: bool = False,
):
"""[summary]
Args:
main_ui (GUI.Gui)
path (str): path containing geotiffs
"""
self.image_manager = OrthoPhotoManager(path, 100.0)
self.images_in_list = self.image_manager.image_keys
self.zoom_window_size_px = 500
self.is_geo_reference = is_geo_reference
self.size = 50 # TODO add widget for zoom level
super(OrthoPhotoView, self).__init__(main_ui, False)
self.refocus(init_lat, init_lon)
self.populate_image_list()
if self.images_in_list:
self.bring_new_image(self.images_in_list[0])
self.set_title()
def get_image(self, new_image):
crop, image_window, geot = self.image_manager.read_image_around_latlon(
new_image, self.center_lat, self.center_lon, self.size
)
self.image_window = image_window
self.geot = geot
return crop
def get_candidate_images(self):
return self.image_manager.get_candidate_images(
self.center_lat, self.center_lon, self.size
)
def pixel_to_latlon(self, x: float, y: float):
"""
From pixels (in the viewing window) to latlon
"""
if not self.is_geo_reference:
return None
# Pixel to whatever crs the image is in
# pyre-fixme[16]: `OrthoPhotoView` has no attribute `geot`.
x, y = self.geot.xy(y, x)
# And then to WSG84 (lat/lon)
lons, lats = rasterio.warp.transform(self.geot.crs, "EPSG:4326", [x], [y])
return lats[0], lons[0]
def gcp_to_pixel_coordinates(self, x: float, y: float) -> Tuple[float, float]:
"""
Transforms from normalized coordinates (in the whole geotiff) to
pixels (in the viewing window)
"""
h, w = self.image_manager.get_image_size(self.current_image)
px = features.denormalized_image_coordinates(np.array([[x, y]]), w, h)[0]
# pyre-fixme[16]: `OrthoPhotoView` has no attribute `image_window`.
x = px[0] - self.image_window.col_off
y = px[1] - self.image_window.row_off
# pyre-fixme[7]: Expected `Tuple[float, float]` but got `List[typing.Any]`.
return [x, y]
def pixel_to_gcp_coordinates(self, x: float, y: float) -> Tuple[float, float]:
"""
Transforms from pixels (in the viewing window) to normalized coordinates
(in the whole geotiff)
"""
# pyre-fixme[16]: `OrthoPhotoView` has no attribute `image_window`.
x += self.image_window.col_off
y += self.image_window.row_off
h, w = self.image_manager.get_image_size(self.current_image)
coords = features.normalized_image_coordinates(np.array([[x, y]]), w, h)[0]
return coords.tolist()
def refocus(self, lat, lon):
self.center_lat = lat
self.center_lon = lon
self.populate_image_list()
if self.images_in_list:
if self.current_image not in self.images_in_list:
self.bring_new_image(self.images_in_list[0])
else:
self.bring_new_image(self.current_image)
self.set_title()
def bring_new_image(self, new_image):
super(OrthoPhotoView, self).bring_new_image(new_image, force=True)
xlim = self.ax.get_xlim()
ylim = self.ax.get_ylim()
artists = self.ax.plot(np.mean(xlim), np.mean(ylim), "rx")
self.plt_artists.extend(artists)
self.canvas.draw_idle()
def set_title(self):
lat, lon = self.center_lat, self.center_lon
if self.images_in_list:
t = "Images covering lat:{:.4f}, lon:{:.4f}".format(lat, lon)
shot = self.current_image
seq_ix = self.images_in_list.index(shot)
title = f"{t} [{seq_ix+1}/{len(self.images_in_list)}]: {shot}"
else:
title = f"No orthophotos around {lat}, {lon}"
self.current_image = None
self.ax.clear()
self.ax.axis("off")
self.canvas.draw_idle()
self.window.title(title)
| [((3748, 3761), 'numpy.mean', 'np.mean', (['xlim'], {}), '(xlim)\n', (3755, 3761), True, 'import numpy as np\n'), ((3763, 3776), 'numpy.mean', 'np.mean', (['ylim'], {}), '(ylim)\n', (3770, 3776), True, 'import numpy as np\n'), ((2286, 2304), 'numpy.array', 'np.array', (['[[x, y]]'], {}), '([[x, y]])\n', (2294, 2304), True, 'import numpy as np\n'), ((3087, 3105), 'numpy.array', 'np.array', (['[[x, y]]'], {}), '([[x, y]])\n', (3095, 3105), True, 'import numpy as np\n')] |
mail2nsrajesh/tempest | tempest/tests/lib/services/compute/test_security_group_default_rules_client.py | 1a3b3dc50b418d3a15839830d7d1ff88c8c76cff | # Copyright 2015 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib.services.compute import security_group_default_rules_client
from tempest.tests.lib import fake_auth_provider
from tempest.tests.lib.services import base
class TestSecurityGroupDefaultRulesClient(base.BaseServiceTest):
FAKE_RULE = {
"from_port": 80,
"id": 1,
"ip_protocol": "TCP",
"ip_range": {
"cidr": "10.10.10.0/24"
},
"to_port": 80
}
def setUp(self):
super(TestSecurityGroupDefaultRulesClient, self).setUp()
fake_auth = fake_auth_provider.FakeAuthProvider()
self.client = (security_group_default_rules_client.
SecurityGroupDefaultRulesClient(fake_auth, 'compute',
'regionOne'))
def _test_list_security_group_default_rules(self, bytes_body=False):
self.check_service_client_function(
self.client.list_security_group_default_rules,
'tempest.lib.common.rest_client.RestClient.get',
{"security_group_default_rules": [self.FAKE_RULE]},
to_utf=bytes_body)
def test_list_security_group_default_rules_with_str_body(self):
self._test_list_security_group_default_rules()
def test_list_security_group_default_rules_with_bytes_body(self):
self._test_list_security_group_default_rules(bytes_body=True)
def _test_show_security_group_default_rule(self, bytes_body=False):
self.check_service_client_function(
self.client.show_security_group_default_rule,
'tempest.lib.common.rest_client.RestClient.get',
{"security_group_default_rule": self.FAKE_RULE},
to_utf=bytes_body,
security_group_default_rule_id=1)
def test_show_security_group_default_rule_with_str_body(self):
self._test_show_security_group_default_rule()
def test_show_security_group_default_rule_with_bytes_body(self):
self._test_show_security_group_default_rule(bytes_body=True)
def _test_create_security_default_group_rule(self, bytes_body=False):
request_body = {
"to_port": 80,
"from_port": 80,
"ip_protocol": "TCP",
"cidr": "10.10.10.0/24"
}
self.check_service_client_function(
self.client.create_security_default_group_rule,
'tempest.lib.common.rest_client.RestClient.post',
{"security_group_default_rule": self.FAKE_RULE},
to_utf=bytes_body, **request_body)
def test_create_security_default_group_rule_with_str_body(self):
self._test_create_security_default_group_rule()
def test_create_security_default_group_rule_with_bytes_body(self):
self._test_create_security_default_group_rule(bytes_body=True)
def test_delete_security_group_default_rule(self):
self.check_service_client_function(
self.client.delete_security_group_default_rule,
'tempest.lib.common.rest_client.RestClient.delete',
{}, status=204, security_group_default_rule_id=1)
| [((1162, 1199), 'tempest.tests.lib.fake_auth_provider.FakeAuthProvider', 'fake_auth_provider.FakeAuthProvider', ([], {}), '()\n', (1197, 1199), False, 'from tempest.tests.lib import fake_auth_provider\n'), ((1223, 1329), 'tempest.lib.services.compute.security_group_default_rules_client.SecurityGroupDefaultRulesClient', 'security_group_default_rules_client.SecurityGroupDefaultRulesClient', (['fake_auth', '"""compute"""', '"""regionOne"""'], {}), "(fake_auth,\n 'compute', 'regionOne')\n", (1290, 1329), False, 'from tempest.lib.services.compute import security_group_default_rules_client\n')] |
itsyaboyrocket/pirates | pirates/leveleditor/worldData/interior_spanish_npc_b.py | 6ca1e7d571c670b0d976f65e608235707b5737e3 | # uncompyle6 version 3.2.0
# Python bytecode 2.4 (62061)
# Decompiled from: Python 2.7.14 (v2.7.14:84471935ed, Sep 16 2017, 20:19:30) [MSC v.1500 32 bit (Intel)]
# Embedded file name: pirates.leveleditor.worldData.interior_spanish_npc_b
from pandac.PandaModules import Point3, VBase3, Vec4, Vec3
objectStruct = {'Objects': {'1153420207.67dzlu01': {'Type': 'Building Interior', 'Name': '', 'Instanced': True, 'Objects': {'1165347933.66kmuller': {'Type': 'Log_Stack', 'DisableCollision': True, 'Hpr': VBase3(139.803, 0.0, 0.0), 'Pos': Point3(2.978, 25.796, 0.048), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/Log_stack_a'}}, '1166138034.99kmuller': {'Type': 'Log_Stack', 'DisableCollision': True, 'Hpr': VBase3(179.29, 0.0, 0.0), 'Pos': Point3(9.307, 24.592, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/Log_stack_b'}}, '1166138092.34kmuller': {'Type': 'Furniture', 'DisableCollision': False, 'Hpr': VBase3(-90.005, 0.0, 0.0), 'Pos': Point3(18.672, 15.355, 0.009), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/cabinet_spanish_low'}}, '1166138151.37kmuller': {'Type': 'Pots', 'DisableCollision': False, 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(18.938, 13.997, 2.735), 'Scale': VBase3(1.464, 1.464, 1.464), 'Visual': {'Model': 'models/props/pot_A'}}, '1166138161.79kmuller': {'Type': 'Pots', 'DisableCollision': False, 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(18.511, 15.482, 3.364), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/pot_B'}}, '1166138390.93kmuller': {'Type': 'Furniture', 'DisableCollision': False, 'Holiday': '', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(-0.303, 0.276, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Color': (0.75, 0.9300000071525574, 1.0, 1.0), 'Model': 'models/props/table_bar_round'}}, '1166138443.79kmuller': {'Type': 'Furniture', 'DisableCollision': False, 'Hpr': VBase3(-134.164, 0.0, 0.0), 'Pos': Point3(4.61, -3.84, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/chair_bank'}}, '1166138454.85kmuller': {'Type': 'Furniture', 'DisableCollision': False, 'Hpr': VBase3(54.358, 0.0, 0.0), 'Pos': Point3(-6.565, 0.327, 0.038), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/chair_bar'}}, '1166138510.96kmuller': {'Type': 'Furniture', 'DisableCollision': False, 'Hpr': VBase3(162.38, 0.0, 0.0), 'Pos': Point3(-3.36, -6.982, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/chair_bank'}}, '1166138524.92kmuller': {'Type': 'Furniture', 'DisableCollision': False, 'Hpr': VBase3(80.452, 0.0, 0.0), 'Pos': Point3(5.079, 5.725, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/chair_bar'}}, '1166138537.42kmuller': {'Type': 'Furniture', 'DisableCollision': False, 'Hpr': VBase3(25.255, 0.0, 0.0), 'Pos': Point3(-1.381, 6.177, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/chair_bank'}}, '1166138621.31kmuller': {'Type': 'Jugs_and_Jars', 'DisableCollision': False, 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(0.672, -2.129, 3.008), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/bottle_green'}}, '1166138646.6kmuller': {'Type': 'Jugs_and_Jars', 'DisableCollision': False, 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(-0.184, 1.377, 3.061), 'Scale': VBase3(1.429, 1.429, 1.429), 'Visual': {'Model': 'models/props/waterpitcher'}}, '1166138674.59kmuller': {'Type': 'Baskets', 'DisableCollision': False, 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(1.112, 0.235, 2.971), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/basket'}}, '1166138708.48kmuller': {'Type': 'Food', 'DisableCollision': False, 'Holiday': '', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(19.066, 23.998, 3.071), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/sausage'}}, '1166138742.6kmuller': {'Type': 'Food', 'DisableCollision': False, 'Hpr': VBase3(0.0, -4.607, 0.0), 'Pos': Point3(12.569, 24.56, 2.688), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/garlicString'}}, '1166138817.45kmuller': {'Type': 'Bucket', 'DisableCollision': False, 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(17.053, 10.72, 0.006), 'Scale': VBase3(0.665, 0.665, 0.665), 'Visual': {'Model': 'models/props/washtub'}}, '1166138973.9kmuller': {'Type': 'Tools', 'DisableCollision': False, 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(18.741, 7.367, 0.02), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/butter_churn'}}, '1166139009.4kmuller': {'Type': 'Tools', 'DisableCollision': False, 'Hpr': VBase3(-2.549, 12.708, -168.558), 'Pos': Point3(-7.195, -29.635, 4.369), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.5, 0.5, 0.5, 1.0), 'Model': 'models/props/broom'}}, '1166139125.65kmuller': {'Type': 'Furniture - Fancy', 'DisableCollision': True, 'Hpr': VBase3(179.014, 0.0, 0.0), 'Pos': Point3(-16.599, -28.46, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/cabinet_fancy_tall'}}, '1166139259.49kmuller': {'Type': 'Mortar_Pestle', 'DisableCollision': False, 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(19.246, 16.431, 3.391), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/mortar_pestle_stone'}}, '1166139339.62kmuller': {'Type': 'Prop_Groups', 'DisableCollision': True, 'Hpr': VBase3(57.552, 0.0, 0.0), 'Pos': Point3(15.438, -23.688, 0.048), 'Scale': VBase3(0.879, 0.879, 0.879), 'Visual': {'Color': (0.699999988079071, 0.699999988079071, 0.699999988079071, 1.0), 'Model': 'models/props/prop_group_G'}}, '1166139450.46kmuller': {'Type': 'Trunks', 'DisableCollision': True, 'Hpr': VBase3(-175.386, 0.0, 0.0), 'Pos': Point3(-11.623, -28.323, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/Trunk_rounded_2'}}, '1166139482.6kmuller': {'Type': 'Trunks', 'DisableCollision': False, 'Hpr': VBase3(-100.398, 0.0, 0.0), 'Pos': Point3(17.54, -12.363, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/Trunk_square'}}, '1166139534.14kmuller': {'Type': 'Furniture', 'DisableCollision': False, 'Hpr': VBase3(88.8, 0.0, 0.0), 'Pos': Point3(-19.032, -8.401, 0.172), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/bench_bank'}}, '1166139664.39kmuller': {'Type': 'Bucket', 'DisableCollision': True, 'Hpr': VBase3(-38.995, 0.0, 0.0), 'Pos': Point3(4.278, 24.282, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/bucket_handles'}}, '1166139726.17kmuller': {'Type': 'Light_Fixtures', 'DisableCollision': False, 'Hpr': VBase3(-56.33, 0.0, 0.0), 'Pos': Point3(20.726, 15.931, 4.923), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/candle_holder'}}, '1166139823.07kmuller': {'Type': 'Pan', 'DisableCollision': False, 'Hpr': VBase3(-45.198, -0.006, 0.006), 'Pos': Point3(21.602, 17.485, 4.688), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/pan'}}, '1166139883.79kmuller': {'Type': 'Jugs_and_Jars', 'DisableCollision': False, 'Hpr': VBase3(2.971, 0.0, 0.0), 'Pos': Point3(21.796, 18.912, 4.7), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/largejug_B'}}, '1166140032.53kmuller': {'Type': 'Wall_Hangings', 'DisableCollision': False, 'Hpr': VBase3(0.0, 0.0, 0.0), 'Pos': Point3(-2.651, 29.91, 7.991), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/Map_01'}}, '1166143136.15kmuller': {'Type': 'Light_Fixtures', 'DisableCollision': False, 'Hpr': VBase3(87.919, 0.0, 0.0), 'Pos': Point3(-19.128, 10.233, 7.623), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/lamp_candle'}}, '1166143173.57kmuller': {'Type': 'Light_Fixtures', 'DisableCollision': False, 'Hpr': VBase3(87.919, 0.0, 0.0), 'Pos': Point3(-19.101, -8.222, 7.695), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/lamp_candle'}}, '1166143204.95kmuller': {'Type': 'Light_Fixtures', 'DisableCollision': False, 'Hpr': VBase3(-90.159, 0.0, 0.0), 'Pos': Point3(18.91, 9.923, 7.471), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/lamp_candle'}}, '1166143219.04kmuller': {'Type': 'Light_Fixtures', 'DisableCollision': False, 'Holiday': '', 'Hpr': VBase3(-90.159, 0.0, 0.0), 'Pos': Point3(19.055, -9.027, 7.695), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/lamp_candle'}}, '1166143244.09kmuller': {'Type': 'Light_Fixtures', 'DisableCollision': False, 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(-0.798, 10.488, 17.608), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/chandelier_jail'}}, '1166143275.89kmuller': {'Type': 'Light_Fixtures', 'DisableCollision': False, 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(-0.592, -10.927, 17.594), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/chandelier_jail'}}, '1167972216.85kmuller': {'Type': 'Furniture', 'DisableCollision': True, 'Hpr': VBase3(44.958, 0.0, 0.0), 'Pos': Point3(-16.331, 26.168, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/bookshelf_spanish'}}, '1167972409.16kmuller': {'Type': 'Tools', 'DisableCollision': False, 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(-19.259, 21.62, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/butter_churn'}}, '1176423441.61dzlu': {'Type': 'Light - Dynamic', 'Attenuation': '0.005', 'ConeAngle': '97.7273', 'DropOff': '6.8182', 'FlickRate': 0.5, 'Flickering': False, 'Hpr': VBase3(6.993, -61.677, 8.03), 'Intensity': '0.4242', 'LightType': 'SPOT', 'Pos': Point3(2.574, -18.447, 27.908), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.8700000047683716, 1.0, 1.0, 1.0), 'Model': 'models/props/light_tool_bulb'}}, '1176423539.22dzlu': {'Type': 'Light - Dynamic', 'Attenuation': '0.005', 'ConeAngle': '64.3182', 'DropOff': '39.5455', 'FlickRate': 0.5, 'Flickering': False, 'Hpr': VBase3(5.763, -56.906, 6.972), 'Intensity': '0.4848', 'LightType': 'SPOT', 'Pos': Point3(-1.976, 15.649, 24.802), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.8700000047683716, 1.0, 1.0, 1.0), 'Model': 'models/props/light_tool_bulb'}}, '1176423736.28dzlu': {'Type': 'Light - Dynamic', 'Attenuation': '0.005', 'ConeAngle': '60.0000', 'DropOff': '0.0000', 'FlickRate': 0.5, 'Flickering': True, 'Hpr': VBase3(0.0, 1.848, 0.0), 'Intensity': '0.5152', 'LightType': 'POINT', 'Pos': Point3(-0.034, -10.675, 13.873), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.95, 0.78, 0.64, 1.0), 'Model': 'models/props/light_tool_bulb'}}, '1176424160.2dzlu': {'Type': 'Light - Dynamic', 'Attenuation': '0.005', 'ConeAngle': '60.0000', 'DropOff': '0.0000', 'FlickRate': 0.5, 'Flickering': False, 'Hpr': VBase3(0.0, 1.848, 0.0), 'Intensity': '0.6061', 'LightType': 'POINT', 'Pos': Point3(-0.105, 11.422, 13.384), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.95, 0.78, 0.64, 1.0), 'Model': 'models/props/light_tool_bulb'}}, '1185496415.31kmuller': {'Type': 'Collision Barrier', 'DisableCollision': False, 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(4.727, 26.813, -0.119), 'Scale': VBase3(2.057, 1.302, 1.198), 'Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_cube'}}, '1185496487.15kmuller': {'Type': 'Collision Barrier', 'DisableCollision': False, 'Hpr': VBase3(45.263, 0.0, 0.0), 'Pos': Point3(-15.061, 24.578, -0.449), 'Scale': VBase3(1.603, 1.0, 1.891), 'Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_plane'}}, '1185496538.15kmuller': {'Type': 'Collision Barrier', 'DisableCollision': False, 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(-15.225, -28.682, -0.316), 'Scale': VBase3(2.053, 0.567, 2.235), 'Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_cube'}}, '1185496598.36kmuller': {'Type': 'Barrel', 'DisableCollision': False, 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(8.521, -28.523, 0.0), 'Scale': VBase3(0.77, 0.77, 0.77), 'Visual': {'Color': (0.47999998927116394, 0.44999998807907104, 0.4099999964237213, 1.0), 'Model': 'models/props/barrel_grey'}}, '1185496634.87kmuller': {'Type': 'Collision Barrier', 'DisableCollision': False, 'Hpr': VBase3(-105.442, 0.0, 0.0), 'Pos': Point3(6.902, -26.349, -0.415), 'Scale': VBase3(0.856, 1.0, 1.451), 'Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_plane'}}, '1185496663.32kmuller': {'Type': 'Collision Barrier', 'DisableCollision': False, 'Hpr': VBase3(-134.387, 0.0, 0.0), 'Pos': Point3(11.183, -19.168, -0.394), 'Scale': VBase3(0.955, 1.0, 1.0), 'Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_plane'}}, '1185496695.84kmuller': {'Type': 'Collision Barrier', 'DisableCollision': False, 'Hpr': VBase3(177.474, 0.0, 0.0), 'Pos': Point3(18.836, -16.153, -1.477), 'Scale': VBase3(0.944, 1.0, 1.196), 'Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_plane'}}, '1192813036.19akelts': {'Type': 'Effect Node', 'EffectName': 'torch_effect', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(16.066, 27.69, 0.728), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1228171574.52kmuller': {'Type': 'Door Locator Node', 'Name': 'door_locator', 'Hpr': VBase3(-1.084, 0.0, 0.0), 'Pos': Point3(0.226, -30.04, -0.042), 'Scale': VBase3(1.0, 1.0, 1.0)}, '1228171636.05kmuller': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(90.0, 0.0, 0.0), 'Pos': Point3(-19.562, -12.628, 9.043), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}, '1228171658.06kmuller': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(90.0, 0.0, 0.0), 'Pos': Point3(-19.497, -4.055, 8.9), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}, '1228171680.97kmuller': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(90.0, 0.0, 0.0), 'Pos': Point3(-19.522, 13.075, 8.571), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}, '1228171681.0kmuller': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(90.0, 0.0, 0.0), 'Pos': Point3(-19.48, 6.987, 8.709), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}, '1228171718.55kmuller': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(90.0, 0.0, 0.0), 'Pos': Point3(-23.464, 2.055, 9.623), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoBow_winter08'}}, '1228171851.33kmuller': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-90.0, 0.0, 0.0), 'Pos': Point3(19.558, 12.771, 8.257), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}, '1228171851.36kmuller': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-90.0, 0.0, 0.0), 'Pos': Point3(19.6, 6.683, 8.394), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}, '1228171851.37kmuller': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-90.0, 0.0, 0.0), 'Pos': Point3(19.605, -5.139, 8.562), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}, '1228171851.39kmuller': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-90.0, 0.0, 0.0), 'Pos': Point3(19.519, -12.932, 8.729), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}, '1228171985.95kmuller': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-90.0, 0.0, 0.0), 'Pos': Point3(23.294, 2.108, 9.247), 'Scale': VBase3(1.749, 1.749, 1.749), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoBow_winter08'}}, '1228172029.81kmuller': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-22.915, 0.0, 0.0), 'Pos': Point3(-14.676, 27.506, 8.319), 'Scale': VBase3(0.745, 0.745, 0.745), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoGift03_winter08'}}, '1228172067.47kmuller': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(97.294, 0.0, 0.0), 'Pos': Point3(17.725, -11.752, 1.974), 'Scale': VBase3(0.877, 0.877, 0.877), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoGift03_winter08'}}, '1228172094.37kmuller': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': '', 'Hpr': VBase3(20.62, 0.0, 0.0), 'Pos': Point3(17.402, -13.417, 1.908), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoGift02_winter08'}}, '1228172137.52kmuller': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(22.222, 0.0, 0.0), 'Pos': Point3(-14.48, 27.114, 2.476), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoGift03_winter08'}}, '1228172150.87kmuller': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(43.198, 0.0, 0.0), 'Pos': Point3(-15.74, 26.194, 4.277), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoGift04_winter08'}}, '1257805377.33caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(29.215, 0.0, 0.0), 'Pos': Point3(-17.989, 24.828, 8.291), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoGift01_winter08'}}, '1257805389.23caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-80.692, 0.0, 0.0), 'Pos': Point3(-16.187, 26.439, 8.319), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoGift04_winter08'}}, '1257805548.61caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(179.828, 0.0, 0.0), 'Pos': Point3(0.134, -29.849, 16.921), 'Scale': VBase3(1.647, 1.647, 1.647), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoRibbon_winter08'}}, '1257805573.24caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-179.622, 0.0, 0.0), 'Pos': Point3(13.583, -29.761, 16.921), 'Scale': VBase3(1.647, 1.647, 1.647), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoRibbon_winter08'}}, '1257805604.96caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-3.461, -2.873, 38.03), 'Pos': Point3(1.516, -29.874, 17.264), 'Scale': VBase3(3.099, 3.099, 3.099), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257805629.21caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(178.92, 6.382, 0.0), 'Pos': Point3(-13.08, -29.713, 16.646), 'Scale': VBase3(1.795, 1.795, 1.795), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoBow_winter08'}}, '1257805691.46caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-178.182, 2.38, 35.723), 'Pos': Point3(-1.065, -29.816, 17.292), 'Scale': VBase3(3.099, 3.099, 3.099), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257805757.37caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(178.92, 6.382, 0.0), 'Pos': Point3(0.206, -29.526, 16.511), 'Scale': VBase3(1.795, 1.795, 1.795), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoBow_winter08'}}, '1257805801.97caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(178.92, 6.382, 0.0), 'Pos': Point3(13.537, -29.768, 16.596), 'Scale': VBase3(1.795, 1.795, 1.795), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoBow_winter08'}}, '1257891327.63caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(40.405, 0.0, 0.0), 'Pos': Point3(-1.49, 0.401, 2.948), 'Scale': VBase3(0.743, 0.743, 0.743), 'VisSize': '', 'Visual': {'Color': (0.6000000238418579, 1.0, 0.800000011920929, 1.0), 'Model': 'models/props/pir_m_prp_hol_decoGift01_winter08'}}, '1257891346.66caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-180.0, -89.326, -179.539), 'Pos': Point3(-2.572, 0.139, 2.984), 'Scale': VBase3(0.929, 0.929, 0.929), 'VisSize': '', 'Visual': {'Color': (0.800000011920929, 0.800000011920929, 1.0, 1.0), 'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257891403.07caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(-2.297, 1.647, 2.948), 'Scale': VBase3(0.515, 0.515, 0.515), 'VisSize': '', 'Visual': {'Color': (0.800000011920929, 0.800000011920929, 1.0, 1.0), 'Model': 'models/props/pir_m_prp_hol_decoGift01_winter08'}}, '1257891450.24caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(180.0, -89.326, 138.895), 'Pos': Point3(-2.13, -0.697, 2.993), 'Scale': VBase3(0.929, 0.929, 0.929), 'VisSize': '', 'Visual': {'Color': (0.800000011920929, 0.800000011920929, 1.0, 1.0), 'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}}, 'Visual': {'Model': 'models/buildings/interior_spanish_npc'}}}, 'Node Links': [], 'Layers': {}, 'ObjectIds': {'1153420207.67dzlu01': '["Objects"]["1153420207.67dzlu01"]', '1165347933.66kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1165347933.66kmuller"]', '1166138034.99kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166138034.99kmuller"]', '1166138092.34kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166138092.34kmuller"]', '1166138151.37kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166138151.37kmuller"]', '1166138161.79kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166138161.79kmuller"]', '1166138390.93kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166138390.93kmuller"]', '1166138443.79kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166138443.79kmuller"]', '1166138454.85kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166138454.85kmuller"]', '1166138510.96kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166138510.96kmuller"]', '1166138524.92kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166138524.92kmuller"]', '1166138537.42kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166138537.42kmuller"]', '1166138621.31kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166138621.31kmuller"]', '1166138646.6kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166138646.6kmuller"]', '1166138674.59kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166138674.59kmuller"]', '1166138708.48kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166138708.48kmuller"]', '1166138742.6kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166138742.6kmuller"]', '1166138817.45kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166138817.45kmuller"]', '1166138973.9kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166138973.9kmuller"]', '1166139009.4kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166139009.4kmuller"]', '1166139125.65kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166139125.65kmuller"]', '1166139259.49kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166139259.49kmuller"]', '1166139339.62kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166139339.62kmuller"]', '1166139450.46kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166139450.46kmuller"]', '1166139482.6kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166139482.6kmuller"]', '1166139534.14kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166139534.14kmuller"]', '1166139664.39kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166139664.39kmuller"]', '1166139726.17kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166139726.17kmuller"]', '1166139823.07kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166139823.07kmuller"]', '1166139883.79kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166139883.79kmuller"]', '1166140032.53kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166140032.53kmuller"]', '1166143136.15kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166143136.15kmuller"]', '1166143173.57kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166143173.57kmuller"]', '1166143204.95kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166143204.95kmuller"]', '1166143219.04kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166143219.04kmuller"]', '1166143244.09kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166143244.09kmuller"]', '1166143275.89kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1166143275.89kmuller"]', '1167972216.85kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1167972216.85kmuller"]', '1167972409.16kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1167972409.16kmuller"]', '1176423441.61dzlu': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1176423441.61dzlu"]', '1176423539.22dzlu': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1176423539.22dzlu"]', '1176423736.28dzlu': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1176423736.28dzlu"]', '1176424160.2dzlu': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1176424160.2dzlu"]', '1185496415.31kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1185496415.31kmuller"]', '1185496487.15kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1185496487.15kmuller"]', '1185496538.15kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1185496538.15kmuller"]', '1185496598.36kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1185496598.36kmuller"]', '1185496634.87kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1185496634.87kmuller"]', '1185496663.32kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1185496663.32kmuller"]', '1185496695.84kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1185496695.84kmuller"]', '1192813036.19akelts': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1192813036.19akelts"]', '1228171574.52kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1228171574.52kmuller"]', '1228171636.05kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1228171636.05kmuller"]', '1228171658.06kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1228171658.06kmuller"]', '1228171680.97kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1228171680.97kmuller"]', '1228171681.0kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1228171681.0kmuller"]', '1228171718.55kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1228171718.55kmuller"]', '1228171851.33kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1228171851.33kmuller"]', '1228171851.36kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1228171851.36kmuller"]', '1228171851.37kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1228171851.37kmuller"]', '1228171851.39kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1228171851.39kmuller"]', '1228171985.95kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1228171985.95kmuller"]', '1228172029.81kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1228172029.81kmuller"]', '1228172067.47kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1228172067.47kmuller"]', '1228172094.37kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1228172094.37kmuller"]', '1228172137.52kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1228172137.52kmuller"]', '1228172150.87kmuller': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1228172150.87kmuller"]', '1257805377.33caoconno': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1257805377.33caoconno"]', '1257805389.23caoconno': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1257805389.23caoconno"]', '1257805548.61caoconno': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1257805548.61caoconno"]', '1257805573.24caoconno': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1257805573.24caoconno"]', '1257805604.96caoconno': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1257805604.96caoconno"]', '1257805629.21caoconno': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1257805629.21caoconno"]', '1257805691.46caoconno': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1257805691.46caoconno"]', '1257805757.37caoconno': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1257805757.37caoconno"]', '1257805801.97caoconno': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1257805801.97caoconno"]', '1257891327.63caoconno': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1257891327.63caoconno"]', '1257891346.66caoconno': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1257891346.66caoconno"]', '1257891403.07caoconno': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1257891403.07caoconno"]', '1257891450.24caoconno': '["Objects"]["1153420207.67dzlu01"]["Objects"]["1257891450.24caoconno"]'}}
extraInfo = {'camPos': Point3(0, -14, 0), 'camHpr': VBase3(0, 0, 0), 'focalLength': 0.852765381336, 'skyState': -1, 'fog': 0} | [((29376, 29393), 'pandac.PandaModules.Point3', 'Point3', (['(0)', '(-14)', '(0)'], {}), '(0, -14, 0)\n', (29382, 29393), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((29405, 29420), 'pandac.PandaModules.VBase3', 'VBase3', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (29411, 29420), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((499, 524), 'pandac.PandaModules.VBase3', 'VBase3', (['(139.803)', '(0.0)', '(0.0)'], {}), '(139.803, 0.0, 0.0)\n', (505, 524), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((533, 561), 'pandac.PandaModules.Point3', 'Point3', (['(2.978)', '(25.796)', '(0.048)'], {}), '(2.978, 25.796, 0.048)\n', (539, 561), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((572, 593), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (578, 593), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((724, 748), 'pandac.PandaModules.VBase3', 'VBase3', (['(179.29)', '(0.0)', '(0.0)'], {}), '(179.29, 0.0, 0.0)\n', (730, 748), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((757, 783), 'pandac.PandaModules.Point3', 'Point3', (['(9.307)', '(24.592)', '(0.0)'], {}), '(9.307, 24.592, 0.0)\n', (763, 783), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((794, 815), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (800, 815), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((947, 972), 'pandac.PandaModules.VBase3', 'VBase3', (['(-90.005)', '(0.0)', '(0.0)'], {}), '(-90.005, 0.0, 0.0)\n', (953, 972), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((981, 1010), 'pandac.PandaModules.Point3', 'Point3', (['(18.672)', '(15.355)', '(0.009)'], {}), '(18.672, 15.355, 0.009)\n', (987, 1010), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((1021, 1042), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (1027, 1042), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((1177, 1198), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (1183, 1198), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((1207, 1236), 'pandac.PandaModules.Point3', 'Point3', (['(18.938)', '(13.997)', '(2.735)'], {}), '(18.938, 13.997, 2.735)\n', (1213, 1236), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((1247, 1274), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.464)', '(1.464)', '(1.464)'], {}), '(1.464, 1.464, 1.464)\n', (1253, 1274), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((1395, 1416), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (1401, 1416), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((1425, 1454), 'pandac.PandaModules.Point3', 'Point3', (['(18.511)', '(15.482)', '(3.364)'], {}), '(18.511, 15.482, 3.364)\n', (1431, 1454), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((1465, 1486), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (1471, 1486), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((1627, 1648), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (1633, 1648), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((1657, 1683), 'pandac.PandaModules.Point3', 'Point3', (['(-0.303)', '(0.276)', '(0.0)'], {}), '(-0.303, 0.276, 0.0)\n', (1663, 1683), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((1694, 1715), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (1700, 1715), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((1913, 1939), 'pandac.PandaModules.VBase3', 'VBase3', (['(-134.164)', '(0.0)', '(0.0)'], {}), '(-134.164, 0.0, 0.0)\n', (1919, 1939), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((1948, 1972), 'pandac.PandaModules.Point3', 'Point3', (['(4.61)', '(-3.84)', '(0.0)'], {}), '(4.61, -3.84, 0.0)\n', (1954, 1972), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((1983, 2004), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (1989, 2004), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((2135, 2159), 'pandac.PandaModules.VBase3', 'VBase3', (['(54.358)', '(0.0)', '(0.0)'], {}), '(54.358, 0.0, 0.0)\n', (2141, 2159), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((2168, 2196), 'pandac.PandaModules.Point3', 'Point3', (['(-6.565)', '(0.327)', '(0.038)'], {}), '(-6.565, 0.327, 0.038)\n', (2174, 2196), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((2207, 2228), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (2213, 2228), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((2358, 2382), 'pandac.PandaModules.VBase3', 'VBase3', (['(162.38)', '(0.0)', '(0.0)'], {}), '(162.38, 0.0, 0.0)\n', (2364, 2382), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((2391, 2417), 'pandac.PandaModules.Point3', 'Point3', (['(-3.36)', '(-6.982)', '(0.0)'], {}), '(-3.36, -6.982, 0.0)\n', (2397, 2417), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((2428, 2449), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (2434, 2449), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((2580, 2604), 'pandac.PandaModules.VBase3', 'VBase3', (['(80.452)', '(0.0)', '(0.0)'], {}), '(80.452, 0.0, 0.0)\n', (2586, 2604), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((2613, 2638), 'pandac.PandaModules.Point3', 'Point3', (['(5.079)', '(5.725)', '(0.0)'], {}), '(5.079, 5.725, 0.0)\n', (2619, 2638), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((2649, 2670), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (2655, 2670), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((2800, 2824), 'pandac.PandaModules.VBase3', 'VBase3', (['(25.255)', '(0.0)', '(0.0)'], {}), '(25.255, 0.0, 0.0)\n', (2806, 2824), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((2833, 2859), 'pandac.PandaModules.Point3', 'Point3', (['(-1.381)', '(6.177)', '(0.0)'], {}), '(-1.381, 6.177, 0.0)\n', (2839, 2859), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((2870, 2891), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (2876, 2891), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((3026, 3047), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (3032, 3047), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((3056, 3084), 'pandac.PandaModules.Point3', 'Point3', (['(0.672)', '(-2.129)', '(3.008)'], {}), '(0.672, -2.129, 3.008)\n', (3062, 3084), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((3095, 3116), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (3101, 3116), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((3252, 3273), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (3258, 3273), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((3282, 3310), 'pandac.PandaModules.Point3', 'Point3', (['(-0.184)', '(1.377)', '(3.061)'], {}), '(-0.184, 1.377, 3.061)\n', (3288, 3310), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((3321, 3348), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.429)', '(1.429)', '(1.429)'], {}), '(1.429, 1.429, 1.429)\n', (3327, 3348), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((3479, 3500), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (3485, 3500), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((3509, 3536), 'pandac.PandaModules.Point3', 'Point3', (['(1.112)', '(0.235)', '(2.971)'], {}), '(1.112, 0.235, 2.971)\n', (3515, 3536), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((3547, 3568), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (3553, 3568), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((3705, 3726), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (3711, 3726), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((3735, 3764), 'pandac.PandaModules.Point3', 'Point3', (['(19.066)', '(23.998)', '(3.071)'], {}), '(19.066, 23.998, 3.071)\n', (3741, 3764), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((3775, 3796), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (3781, 3796), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((3933, 3957), 'pandac.PandaModules.VBase3', 'VBase3', (['(0.0)', '(-4.607)', '(0.0)'], {}), '(0.0, -4.607, 0.0)\n', (3939, 3957), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((3966, 3994), 'pandac.PandaModules.Point3', 'Point3', (['(12.569)', '(24.56)', '(2.688)'], {}), '(12.569, 24.56, 2.688)\n', (3972, 3994), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((4005, 4026), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (4011, 4026), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((4156, 4177), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (4162, 4177), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((4186, 4214), 'pandac.PandaModules.Point3', 'Point3', (['(17.053)', '(10.72)', '(0.006)'], {}), '(17.053, 10.72, 0.006)\n', (4192, 4214), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((4225, 4252), 'pandac.PandaModules.VBase3', 'VBase3', (['(0.665)', '(0.665)', '(0.665)'], {}), '(0.665, 0.665, 0.665)\n', (4231, 4252), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((4375, 4396), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (4381, 4396), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((4405, 4432), 'pandac.PandaModules.Point3', 'Point3', (['(18.741)', '(7.367)', '(0.02)'], {}), '(18.741, 7.367, 0.02)\n', (4411, 4432), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((4443, 4464), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (4449, 4464), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((4592, 4624), 'pandac.PandaModules.VBase3', 'VBase3', (['(-2.549)', '(12.708)', '(-168.558)'], {}), '(-2.549, 12.708, -168.558)\n', (4598, 4624), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((4633, 4663), 'pandac.PandaModules.Point3', 'Point3', (['(-7.195)', '(-29.635)', '(4.369)'], {}), '(-7.195, -29.635, 4.369)\n', (4639, 4663), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((4674, 4695), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (4680, 4695), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((4859, 4884), 'pandac.PandaModules.VBase3', 'VBase3', (['(179.014)', '(0.0)', '(0.0)'], {}), '(179.014, 0.0, 0.0)\n', (4865, 4884), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((4893, 4921), 'pandac.PandaModules.Point3', 'Point3', (['(-16.599)', '(-28.46)', '(0.0)'], {}), '(-16.599, -28.46, 0.0)\n', (4899, 4921), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((4932, 4953), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (4938, 4953), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((5096, 5117), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (5102, 5117), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((5126, 5155), 'pandac.PandaModules.Point3', 'Point3', (['(19.246)', '(16.431)', '(3.391)'], {}), '(19.246, 16.431, 3.391)\n', (5132, 5155), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((5166, 5187), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (5172, 5187), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((5328, 5352), 'pandac.PandaModules.VBase3', 'VBase3', (['(57.552)', '(0.0)', '(0.0)'], {}), '(57.552, 0.0, 0.0)\n', (5334, 5352), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((5361, 5391), 'pandac.PandaModules.Point3', 'Point3', (['(15.438)', '(-23.688)', '(0.048)'], {}), '(15.438, -23.688, 0.048)\n', (5367, 5391), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((5402, 5429), 'pandac.PandaModules.VBase3', 'VBase3', (['(0.879)', '(0.879)', '(0.879)'], {}), '(0.879, 0.879, 0.879)\n', (5408, 5429), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((5631, 5657), 'pandac.PandaModules.VBase3', 'VBase3', (['(-175.386)', '(0.0)', '(0.0)'], {}), '(-175.386, 0.0, 0.0)\n', (5637, 5657), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((5666, 5695), 'pandac.PandaModules.Point3', 'Point3', (['(-11.623)', '(-28.323)', '(0.0)'], {}), '(-11.623, -28.323, 0.0)\n', (5672, 5695), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((5706, 5727), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (5712, 5727), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((5859, 5885), 'pandac.PandaModules.VBase3', 'VBase3', (['(-100.398)', '(0.0)', '(0.0)'], {}), '(-100.398, 0.0, 0.0)\n', (5865, 5885), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((5894, 5921), 'pandac.PandaModules.Point3', 'Point3', (['(17.54)', '(-12.363)', '(0.0)'], {}), '(17.54, -12.363, 0.0)\n', (5900, 5921), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((5932, 5953), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (5938, 5953), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((6086, 6108), 'pandac.PandaModules.VBase3', 'VBase3', (['(88.8)', '(0.0)', '(0.0)'], {}), '(88.8, 0.0, 0.0)\n', (6092, 6108), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((6117, 6147), 'pandac.PandaModules.Point3', 'Point3', (['(-19.032)', '(-8.401)', '(0.172)'], {}), '(-19.032, -8.401, 0.172)\n', (6123, 6147), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((6158, 6179), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (6164, 6179), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((6306, 6331), 'pandac.PandaModules.VBase3', 'VBase3', (['(-38.995)', '(0.0)', '(0.0)'], {}), '(-38.995, 0.0, 0.0)\n', (6312, 6331), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((6340, 6366), 'pandac.PandaModules.Point3', 'Point3', (['(4.278)', '(24.282)', '(0.0)'], {}), '(4.278, 24.282, 0.0)\n', (6346, 6366), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((6377, 6398), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (6383, 6398), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((6538, 6562), 'pandac.PandaModules.VBase3', 'VBase3', (['(-56.33)', '(0.0)', '(0.0)'], {}), '(-56.33, 0.0, 0.0)\n', (6544, 6562), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((6571, 6600), 'pandac.PandaModules.Point3', 'Point3', (['(20.726)', '(15.931)', '(4.923)'], {}), '(20.726, 15.931, 4.923)\n', (6577, 6600), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((6611, 6632), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (6617, 6632), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((6760, 6790), 'pandac.PandaModules.VBase3', 'VBase3', (['(-45.198)', '(-0.006)', '(0.006)'], {}), '(-45.198, -0.006, 0.006)\n', (6766, 6790), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((6799, 6828), 'pandac.PandaModules.Point3', 'Point3', (['(21.602)', '(17.485)', '(4.688)'], {}), '(21.602, 17.485, 4.688)\n', (6805, 6828), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((6839, 6860), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (6845, 6860), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((6988, 7011), 'pandac.PandaModules.VBase3', 'VBase3', (['(2.971)', '(0.0)', '(0.0)'], {}), '(2.971, 0.0, 0.0)\n', (6994, 7011), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((7020, 7047), 'pandac.PandaModules.Point3', 'Point3', (['(21.796)', '(18.912)', '(4.7)'], {}), '(21.796, 18.912, 4.7)\n', (7026, 7047), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((7058, 7079), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (7064, 7079), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((7214, 7235), 'pandac.PandaModules.VBase3', 'VBase3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (7220, 7235), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((7244, 7272), 'pandac.PandaModules.Point3', 'Point3', (['(-2.651)', '(29.91)', '(7.991)'], {}), '(-2.651, 29.91, 7.991)\n', (7250, 7272), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((7283, 7304), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (7289, 7304), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((7436, 7460), 'pandac.PandaModules.VBase3', 'VBase3', (['(87.919)', '(0.0)', '(0.0)'], {}), '(87.919, 0.0, 0.0)\n', (7442, 7460), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((7469, 7499), 'pandac.PandaModules.Point3', 'Point3', (['(-19.128)', '(10.233)', '(7.623)'], {}), '(-19.128, 10.233, 7.623)\n', (7475, 7499), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((7510, 7531), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (7516, 7531), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((7668, 7692), 'pandac.PandaModules.VBase3', 'VBase3', (['(87.919)', '(0.0)', '(0.0)'], {}), '(87.919, 0.0, 0.0)\n', (7674, 7692), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((7701, 7731), 'pandac.PandaModules.Point3', 'Point3', (['(-19.101)', '(-8.222)', '(7.695)'], {}), '(-19.101, -8.222, 7.695)\n', (7707, 7731), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((7742, 7763), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (7748, 7763), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((7900, 7925), 'pandac.PandaModules.VBase3', 'VBase3', (['(-90.159)', '(0.0)', '(0.0)'], {}), '(-90.159, 0.0, 0.0)\n', (7906, 7925), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((7934, 7961), 'pandac.PandaModules.Point3', 'Point3', (['(18.91)', '(9.923)', '(7.471)'], {}), '(18.91, 9.923, 7.471)\n', (7940, 7961), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((7972, 7993), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (7978, 7993), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((8145, 8170), 'pandac.PandaModules.VBase3', 'VBase3', (['(-90.159)', '(0.0)', '(0.0)'], {}), '(-90.159, 0.0, 0.0)\n', (8151, 8170), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((8179, 8208), 'pandac.PandaModules.Point3', 'Point3', (['(19.055)', '(-9.027)', '(7.695)'], {}), '(19.055, -9.027, 7.695)\n', (8185, 8208), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((8219, 8240), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (8225, 8240), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((8392, 8413), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (8398, 8413), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((8422, 8452), 'pandac.PandaModules.Point3', 'Point3', (['(-0.798)', '(10.488)', '(17.608)'], {}), '(-0.798, 10.488, 17.608)\n', (8428, 8452), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((8463, 8484), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (8469, 8484), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((8625, 8646), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (8631, 8646), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((8655, 8686), 'pandac.PandaModules.Point3', 'Point3', (['(-0.592)', '(-10.927)', '(17.594)'], {}), '(-0.592, -10.927, 17.594)\n', (8661, 8686), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((8697, 8718), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (8703, 8718), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((8853, 8877), 'pandac.PandaModules.VBase3', 'VBase3', (['(44.958)', '(0.0)', '(0.0)'], {}), '(44.958, 0.0, 0.0)\n', (8859, 8877), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((8886, 8914), 'pandac.PandaModules.Point3', 'Point3', (['(-16.331)', '(26.168)', '(0.0)'], {}), '(-16.331, 26.168, 0.0)\n', (8892, 8914), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((8925, 8946), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (8931, 8946), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((9080, 9101), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (9086, 9101), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((9110, 9137), 'pandac.PandaModules.Point3', 'Point3', (['(-19.259)', '(21.62)', '(0.0)'], {}), '(-19.259, 21.62, 0.0)\n', (9116, 9137), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((9148, 9169), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (9154, 9169), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((9386, 9414), 'pandac.PandaModules.VBase3', 'VBase3', (['(6.993)', '(-61.677)', '(8.03)'], {}), '(6.993, -61.677, 8.03)\n', (9392, 9414), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((9467, 9497), 'pandac.PandaModules.Point3', 'Point3', (['(2.574)', '(-18.447)', '(27.908)'], {}), '(2.574, -18.447, 27.908)\n', (9473, 9497), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((9508, 9529), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (9514, 9529), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((9796, 9825), 'pandac.PandaModules.VBase3', 'VBase3', (['(5.763)', '(-56.906)', '(6.972)'], {}), '(5.763, -56.906, 6.972)\n', (9802, 9825), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((9878, 9908), 'pandac.PandaModules.Point3', 'Point3', (['(-1.976)', '(15.649)', '(24.802)'], {}), '(-1.976, 15.649, 24.802)\n', (9884, 9908), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((9919, 9940), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (9925, 9940), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((10205, 10228), 'pandac.PandaModules.VBase3', 'VBase3', (['(0.0)', '(1.848)', '(0.0)'], {}), '(0.0, 1.848, 0.0)\n', (10211, 10228), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((10282, 10313), 'pandac.PandaModules.Point3', 'Point3', (['(-0.034)', '(-10.675)', '(13.873)'], {}), '(-0.034, -10.675, 13.873)\n', (10288, 10313), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((10324, 10345), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (10330, 10345), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((10598, 10621), 'pandac.PandaModules.VBase3', 'VBase3', (['(0.0)', '(1.848)', '(0.0)'], {}), '(0.0, 1.848, 0.0)\n', (10604, 10621), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((10675, 10705), 'pandac.PandaModules.Point3', 'Point3', (['(-0.105)', '(11.422)', '(13.384)'], {}), '(-0.105, 11.422, 13.384)\n', (10681, 10705), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((10716, 10737), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (10722, 10737), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((10915, 10936), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (10921, 10936), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((10945, 10974), 'pandac.PandaModules.Point3', 'Point3', (['(4.727)', '(26.813)', '(-0.119)'], {}), '(4.727, 26.813, -0.119)\n', (10951, 10974), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((10985, 11012), 'pandac.PandaModules.VBase3', 'VBase3', (['(2.057)', '(1.302)', '(1.198)'], {}), '(2.057, 1.302, 1.198)\n', (10991, 11012), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((11169, 11193), 'pandac.PandaModules.VBase3', 'VBase3', (['(45.263)', '(0.0)', '(0.0)'], {}), '(45.263, 0.0, 0.0)\n', (11175, 11193), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((11202, 11233), 'pandac.PandaModules.Point3', 'Point3', (['(-15.061)', '(24.578)', '(-0.449)'], {}), '(-15.061, 24.578, -0.449)\n', (11208, 11233), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((11244, 11269), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.603)', '(1.0)', '(1.891)'], {}), '(1.603, 1.0, 1.891)\n', (11250, 11269), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((11427, 11448), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (11433, 11448), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((11457, 11489), 'pandac.PandaModules.Point3', 'Point3', (['(-15.225)', '(-28.682)', '(-0.316)'], {}), '(-15.225, -28.682, -0.316)\n', (11463, 11489), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((11500, 11527), 'pandac.PandaModules.VBase3', 'VBase3', (['(2.053)', '(0.567)', '(2.235)'], {}), '(2.053, 0.567, 2.235)\n', (11506, 11527), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((11673, 11694), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (11679, 11694), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((11703, 11730), 'pandac.PandaModules.Point3', 'Point3', (['(8.521)', '(-28.523)', '(0.0)'], {}), '(8.521, -28.523, 0.0)\n', (11709, 11730), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((11741, 11765), 'pandac.PandaModules.VBase3', 'VBase3', (['(0.77)', '(0.77)', '(0.77)'], {}), '(0.77, 0.77, 0.77)\n', (11747, 11765), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((11983, 12009), 'pandac.PandaModules.VBase3', 'VBase3', (['(-105.442)', '(0.0)', '(0.0)'], {}), '(-105.442, 0.0, 0.0)\n', (11989, 12009), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((12018, 12048), 'pandac.PandaModules.Point3', 'Point3', (['(6.902)', '(-26.349)', '(-0.415)'], {}), '(6.902, -26.349, -0.415)\n', (12024, 12048), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((12059, 12084), 'pandac.PandaModules.VBase3', 'VBase3', (['(0.856)', '(1.0)', '(1.451)'], {}), '(0.856, 1.0, 1.451)\n', (12065, 12084), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((12242, 12268), 'pandac.PandaModules.VBase3', 'VBase3', (['(-134.387)', '(0.0)', '(0.0)'], {}), '(-134.387, 0.0, 0.0)\n', (12248, 12268), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((12277, 12308), 'pandac.PandaModules.Point3', 'Point3', (['(11.183)', '(-19.168)', '(-0.394)'], {}), '(11.183, -19.168, -0.394)\n', (12283, 12308), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((12319, 12342), 'pandac.PandaModules.VBase3', 'VBase3', (['(0.955)', '(1.0)', '(1.0)'], {}), '(0.955, 1.0, 1.0)\n', (12325, 12342), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((12500, 12525), 'pandac.PandaModules.VBase3', 'VBase3', (['(177.474)', '(0.0)', '(0.0)'], {}), '(177.474, 0.0, 0.0)\n', (12506, 12525), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((12534, 12565), 'pandac.PandaModules.Point3', 'Point3', (['(18.836)', '(-16.153)', '(-1.477)'], {}), '(18.836, -16.153, -1.477)\n', (12540, 12565), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((12576, 12601), 'pandac.PandaModules.VBase3', 'VBase3', (['(0.944)', '(1.0)', '(1.196)'], {}), '(0.944, 1.0, 1.196)\n', (12582, 12601), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((12755, 12776), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (12761, 12776), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((12785, 12813), 'pandac.PandaModules.Point3', 'Point3', (['(16.066)', '(27.69)', '(0.728)'], {}), '(16.066, 27.69, 0.728)\n', (12791, 12813), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((12824, 12845), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (12830, 12845), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((13002, 13026), 'pandac.PandaModules.VBase3', 'VBase3', (['(-1.084)', '(0.0)', '(0.0)'], {}), '(-1.084, 0.0, 0.0)\n', (13008, 13026), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((13035, 13064), 'pandac.PandaModules.Point3', 'Point3', (['(0.226)', '(-30.04)', '(-0.042)'], {}), '(0.226, -30.04, -0.042)\n', (13041, 13064), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((13075, 13096), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (13081, 13096), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((13206, 13228), 'pandac.PandaModules.VBase3', 'VBase3', (['(90.0)', '(0.0)', '(0.0)'], {}), '(90.0, 0.0, 0.0)\n', (13212, 13228), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((13237, 13268), 'pandac.PandaModules.Point3', 'Point3', (['(-19.562)', '(-12.628)', '(9.043)'], {}), '(-19.562, -12.628, 9.043)\n', (13243, 13268), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((13279, 13300), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (13285, 13300), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((13494, 13516), 'pandac.PandaModules.VBase3', 'VBase3', (['(90.0)', '(0.0)', '(0.0)'], {}), '(90.0, 0.0, 0.0)\n', (13500, 13516), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((13525, 13553), 'pandac.PandaModules.Point3', 'Point3', (['(-19.497)', '(-4.055)', '(8.9)'], {}), '(-19.497, -4.055, 8.9)\n', (13531, 13553), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((13564, 13585), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (13570, 13585), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((13779, 13801), 'pandac.PandaModules.VBase3', 'VBase3', (['(90.0)', '(0.0)', '(0.0)'], {}), '(90.0, 0.0, 0.0)\n', (13785, 13801), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((13810, 13840), 'pandac.PandaModules.Point3', 'Point3', (['(-19.522)', '(13.075)', '(8.571)'], {}), '(-19.522, 13.075, 8.571)\n', (13816, 13840), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((13851, 13872), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (13857, 13872), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((14065, 14087), 'pandac.PandaModules.VBase3', 'VBase3', (['(90.0)', '(0.0)', '(0.0)'], {}), '(90.0, 0.0, 0.0)\n', (14071, 14087), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((14096, 14124), 'pandac.PandaModules.Point3', 'Point3', (['(-19.48)', '(6.987)', '(8.709)'], {}), '(-19.48, 6.987, 8.709)\n', (14102, 14124), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((14135, 14156), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (14141, 14156), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((14350, 14372), 'pandac.PandaModules.VBase3', 'VBase3', (['(90.0)', '(0.0)', '(0.0)'], {}), '(90.0, 0.0, 0.0)\n', (14356, 14372), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((14381, 14410), 'pandac.PandaModules.Point3', 'Point3', (['(-23.464)', '(2.055)', '(9.623)'], {}), '(-23.464, 2.055, 9.623)\n', (14387, 14410), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((14421, 14442), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (14427, 14442), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((14635, 14658), 'pandac.PandaModules.VBase3', 'VBase3', (['(-90.0)', '(0.0)', '(0.0)'], {}), '(-90.0, 0.0, 0.0)\n', (14641, 14658), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((14667, 14696), 'pandac.PandaModules.Point3', 'Point3', (['(19.558)', '(12.771)', '(8.257)'], {}), '(19.558, 12.771, 8.257)\n', (14673, 14696), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((14707, 14728), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (14713, 14728), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((14922, 14945), 'pandac.PandaModules.VBase3', 'VBase3', (['(-90.0)', '(0.0)', '(0.0)'], {}), '(-90.0, 0.0, 0.0)\n', (14928, 14945), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((14954, 14980), 'pandac.PandaModules.Point3', 'Point3', (['(19.6)', '(6.683)', '(8.394)'], {}), '(19.6, 6.683, 8.394)\n', (14960, 14980), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((14991, 15012), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (14997, 15012), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((15206, 15229), 'pandac.PandaModules.VBase3', 'VBase3', (['(-90.0)', '(0.0)', '(0.0)'], {}), '(-90.0, 0.0, 0.0)\n', (15212, 15229), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((15238, 15267), 'pandac.PandaModules.Point3', 'Point3', (['(19.605)', '(-5.139)', '(8.562)'], {}), '(19.605, -5.139, 8.562)\n', (15244, 15267), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((15278, 15299), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (15284, 15299), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((15493, 15516), 'pandac.PandaModules.VBase3', 'VBase3', (['(-90.0)', '(0.0)', '(0.0)'], {}), '(-90.0, 0.0, 0.0)\n', (15499, 15516), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((15525, 15555), 'pandac.PandaModules.Point3', 'Point3', (['(19.519)', '(-12.932)', '(8.729)'], {}), '(19.519, -12.932, 8.729)\n', (15531, 15555), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((15566, 15587), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (15572, 15587), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((15781, 15804), 'pandac.PandaModules.VBase3', 'VBase3', (['(-90.0)', '(0.0)', '(0.0)'], {}), '(-90.0, 0.0, 0.0)\n', (15787, 15804), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((15813, 15841), 'pandac.PandaModules.Point3', 'Point3', (['(23.294)', '(2.108)', '(9.247)'], {}), '(23.294, 2.108, 9.247)\n', (15819, 15841), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((15852, 15879), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.749)', '(1.749)', '(1.749)'], {}), '(1.749, 1.749, 1.749)\n', (15858, 15879), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((16072, 16097), 'pandac.PandaModules.VBase3', 'VBase3', (['(-22.915)', '(0.0)', '(0.0)'], {}), '(-22.915, 0.0, 0.0)\n', (16078, 16097), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((16106, 16136), 'pandac.PandaModules.Point3', 'Point3', (['(-14.676)', '(27.506)', '(8.319)'], {}), '(-14.676, 27.506, 8.319)\n', (16112, 16136), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((16147, 16174), 'pandac.PandaModules.VBase3', 'VBase3', (['(0.745)', '(0.745)', '(0.745)'], {}), '(0.745, 0.745, 0.745)\n', (16153, 16174), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((16370, 16394), 'pandac.PandaModules.VBase3', 'VBase3', (['(97.294)', '(0.0)', '(0.0)'], {}), '(97.294, 0.0, 0.0)\n', (16376, 16394), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((16403, 16433), 'pandac.PandaModules.Point3', 'Point3', (['(17.725)', '(-11.752)', '(1.974)'], {}), '(17.725, -11.752, 1.974)\n', (16409, 16433), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((16444, 16471), 'pandac.PandaModules.VBase3', 'VBase3', (['(0.877)', '(0.877)', '(0.877)'], {}), '(0.877, 0.877, 0.877)\n', (16450, 16471), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((16653, 16676), 'pandac.PandaModules.VBase3', 'VBase3', (['(20.62)', '(0.0)', '(0.0)'], {}), '(20.62, 0.0, 0.0)\n', (16659, 16676), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((16685, 16715), 'pandac.PandaModules.Point3', 'Point3', (['(17.402)', '(-13.417)', '(1.908)'], {}), '(17.402, -13.417, 1.908)\n', (16691, 16715), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((16726, 16747), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (16732, 16747), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((16943, 16967), 'pandac.PandaModules.VBase3', 'VBase3', (['(22.222)', '(0.0)', '(0.0)'], {}), '(22.222, 0.0, 0.0)\n', (16949, 16967), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((16976, 17005), 'pandac.PandaModules.Point3', 'Point3', (['(-14.48)', '(27.114)', '(2.476)'], {}), '(-14.48, 27.114, 2.476)\n', (16982, 17005), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((17016, 17037), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (17022, 17037), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((17233, 17257), 'pandac.PandaModules.VBase3', 'VBase3', (['(43.198)', '(0.0)', '(0.0)'], {}), '(43.198, 0.0, 0.0)\n', (17239, 17257), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((17266, 17295), 'pandac.PandaModules.Point3', 'Point3', (['(-15.74)', '(26.194)', '(4.277)'], {}), '(-15.74, 26.194, 4.277)\n', (17272, 17295), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((17306, 17327), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (17312, 17327), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((17524, 17548), 'pandac.PandaModules.VBase3', 'VBase3', (['(29.215)', '(0.0)', '(0.0)'], {}), '(29.215, 0.0, 0.0)\n', (17530, 17548), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((17557, 17587), 'pandac.PandaModules.Point3', 'Point3', (['(-17.989)', '(24.828)', '(8.291)'], {}), '(-17.989, 24.828, 8.291)\n', (17563, 17587), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((17598, 17619), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (17604, 17619), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((17816, 17841), 'pandac.PandaModules.VBase3', 'VBase3', (['(-80.692)', '(0.0)', '(0.0)'], {}), '(-80.692, 0.0, 0.0)\n', (17822, 17841), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((17850, 17880), 'pandac.PandaModules.Point3', 'Point3', (['(-16.187)', '(26.439)', '(8.319)'], {}), '(-16.187, 26.439, 8.319)\n', (17856, 17880), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((17891, 17912), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.0)', '(1.0)', '(1.0)'], {}), '(1.0, 1.0, 1.0)\n', (17897, 17912), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((18109, 18134), 'pandac.PandaModules.VBase3', 'VBase3', (['(179.828)', '(0.0)', '(0.0)'], {}), '(179.828, 0.0, 0.0)\n', (18115, 18134), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((18143, 18173), 'pandac.PandaModules.Point3', 'Point3', (['(0.134)', '(-29.849)', '(16.921)'], {}), '(0.134, -29.849, 16.921)\n', (18149, 18173), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((18184, 18211), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.647)', '(1.647)', '(1.647)'], {}), '(1.647, 1.647, 1.647)\n', (18190, 18211), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((18408, 18434), 'pandac.PandaModules.VBase3', 'VBase3', (['(-179.622)', '(0.0)', '(0.0)'], {}), '(-179.622, 0.0, 0.0)\n', (18414, 18434), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((18443, 18474), 'pandac.PandaModules.Point3', 'Point3', (['(13.583)', '(-29.761)', '(16.921)'], {}), '(13.583, -29.761, 16.921)\n', (18449, 18474), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((18485, 18512), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.647)', '(1.647)', '(1.647)'], {}), '(1.647, 1.647, 1.647)\n', (18491, 18512), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((18709, 18738), 'pandac.PandaModules.VBase3', 'VBase3', (['(-3.461)', '(-2.873)', '(38.03)'], {}), '(-3.461, -2.873, 38.03)\n', (18715, 18738), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((18747, 18777), 'pandac.PandaModules.Point3', 'Point3', (['(1.516)', '(-29.874)', '(17.264)'], {}), '(1.516, -29.874, 17.264)\n', (18753, 18777), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((18788, 18815), 'pandac.PandaModules.VBase3', 'VBase3', (['(3.099)', '(3.099)', '(3.099)'], {}), '(3.099, 3.099, 3.099)\n', (18794, 18815), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((19011, 19037), 'pandac.PandaModules.VBase3', 'VBase3', (['(178.92)', '(6.382)', '(0.0)'], {}), '(178.92, 6.382, 0.0)\n', (19017, 19037), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((19046, 19077), 'pandac.PandaModules.Point3', 'Point3', (['(-13.08)', '(-29.713)', '(16.646)'], {}), '(-13.08, -29.713, 16.646)\n', (19052, 19077), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((19088, 19115), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.795)', '(1.795)', '(1.795)'], {}), '(1.795, 1.795, 1.795)\n', (19094, 19115), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((19309, 19339), 'pandac.PandaModules.VBase3', 'VBase3', (['(-178.182)', '(2.38)', '(35.723)'], {}), '(-178.182, 2.38, 35.723)\n', (19315, 19339), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((19348, 19379), 'pandac.PandaModules.Point3', 'Point3', (['(-1.065)', '(-29.816)', '(17.292)'], {}), '(-1.065, -29.816, 17.292)\n', (19354, 19379), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((19390, 19417), 'pandac.PandaModules.VBase3', 'VBase3', (['(3.099)', '(3.099)', '(3.099)'], {}), '(3.099, 3.099, 3.099)\n', (19396, 19417), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((19613, 19639), 'pandac.PandaModules.VBase3', 'VBase3', (['(178.92)', '(6.382)', '(0.0)'], {}), '(178.92, 6.382, 0.0)\n', (19619, 19639), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((19648, 19678), 'pandac.PandaModules.Point3', 'Point3', (['(0.206)', '(-29.526)', '(16.511)'], {}), '(0.206, -29.526, 16.511)\n', (19654, 19678), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((19689, 19716), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.795)', '(1.795)', '(1.795)'], {}), '(1.795, 1.795, 1.795)\n', (19695, 19716), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((19910, 19936), 'pandac.PandaModules.VBase3', 'VBase3', (['(178.92)', '(6.382)', '(0.0)'], {}), '(178.92, 6.382, 0.0)\n', (19916, 19936), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((19945, 19976), 'pandac.PandaModules.Point3', 'Point3', (['(13.537)', '(-29.768)', '(16.596)'], {}), '(13.537, -29.768, 16.596)\n', (19951, 19976), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((19987, 20014), 'pandac.PandaModules.VBase3', 'VBase3', (['(1.795)', '(1.795)', '(1.795)'], {}), '(1.795, 1.795, 1.795)\n', (19993, 20014), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((20208, 20232), 'pandac.PandaModules.VBase3', 'VBase3', (['(40.405)', '(0.0)', '(0.0)'], {}), '(40.405, 0.0, 0.0)\n', (20214, 20232), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((20241, 20268), 'pandac.PandaModules.Point3', 'Point3', (['(-1.49)', '(0.401)', '(2.948)'], {}), '(-1.49, 0.401, 2.948)\n', (20247, 20268), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((20279, 20306), 'pandac.PandaModules.VBase3', 'VBase3', (['(0.743)', '(0.743)', '(0.743)'], {}), '(0.743, 0.743, 0.743)\n', (20285, 20306), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((20563, 20596), 'pandac.PandaModules.VBase3', 'VBase3', (['(-180.0)', '(-89.326)', '(-179.539)'], {}), '(-180.0, -89.326, -179.539)\n', (20569, 20596), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((20605, 20633), 'pandac.PandaModules.Point3', 'Point3', (['(-2.572)', '(0.139)', '(2.984)'], {}), '(-2.572, 0.139, 2.984)\n', (20611, 20633), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((20644, 20671), 'pandac.PandaModules.VBase3', 'VBase3', (['(0.929)', '(0.929)', '(0.929)'], {}), '(0.929, 0.929, 0.929)\n', (20650, 20671), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((20926, 20947), 'pandac.PandaModules.Point3', 'Point3', (['(0.0)', '(0.0)', '(0.0)'], {}), '(0.0, 0.0, 0.0)\n', (20932, 20947), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((20956, 20984), 'pandac.PandaModules.Point3', 'Point3', (['(-2.297)', '(1.647)', '(2.948)'], {}), '(-2.297, 1.647, 2.948)\n', (20962, 20984), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((20995, 21022), 'pandac.PandaModules.VBase3', 'VBase3', (['(0.515)', '(0.515)', '(0.515)'], {}), '(0.515, 0.515, 0.515)\n', (21001, 21022), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((21278, 21309), 'pandac.PandaModules.VBase3', 'VBase3', (['(180.0)', '(-89.326)', '(138.895)'], {}), '(180.0, -89.326, 138.895)\n', (21284, 21309), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((21318, 21346), 'pandac.PandaModules.Point3', 'Point3', (['(-2.13)', '(-0.697)', '(2.993)'], {}), '(-2.13, -0.697, 2.993)\n', (21324, 21346), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n'), ((21357, 21384), 'pandac.PandaModules.VBase3', 'VBase3', (['(0.929)', '(0.929)', '(0.929)'], {}), '(0.929, 0.929, 0.929)\n', (21363, 21384), False, 'from pandac.PandaModules import Point3, VBase3, Vec4, Vec3\n')] |
Light-Lens/PassGen | main.py | 8f4f2ef08299d6243b939d0f08ac75bde3cabf5e | # PassGen
# These imports will be used for this project.
from colorama import Fore, Style
from colorama import init
import datetime
import string
import random
import sys
import os
# Initilaze File organizer.
os.system('title PassGen')
init(autoreset = True)
# Create Log Functions.
class LOG:
def INFO_LOG(message):
CurrentTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print(f"{CurrentTime} - INFO: {message}")
def STATUS_LOG(message):
CurrentTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print(f"{CurrentTime} - STATUS: {message}")
def ERROR_LOG(message):
CurrentTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print(Fore.RED + Style.BRIGHT + f"{CurrentTime} - ERROR: {message}")
def WARN_LOG(message):
CurrentTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print(Fore.YELLOW + Style.BRIGHT + f"{CurrentTime} - WARNING: {message}")
# This will Generate a Strong Password for the User!
def Generate(PassLen):
JoinChars = [] # Create an Empty List.
# Split the List of these String Operations, and Join them to JoinChars List.
JoinChars.extend(list(string.ascii_letters))
JoinChars.extend(list(string.digits))
JoinChars.extend(list(string.punctuation))
random.shuffle(JoinChars) # Shuffle the List.
# Get the random passoword.
return "".join(JoinChars[0:PassLen])
# Code Logic here.
LOG.WARN_LOG("Initialized PassGen!")
LOG.STATUS_LOG("Generating a Random Password for You.")
Password = Generate(random.randint(5, 17))
LOG.INFO_LOG(f"Your Password is: {Password}")
with open("Password.log", "a") as File: File.write(f"{Password}\n")
if (len(sys.argv) == 1) or (len(sys.argv) > 1 and sys.argv[1].lower() != "-o"):
os.system("start Password.log")
sys.exit() # Exiting the program successfully.
| [((221, 247), 'os.system', 'os.system', (['"""title PassGen"""'], {}), "('title PassGen')\n", (230, 247), False, 'import os\n'), ((249, 269), 'colorama.init', 'init', ([], {'autoreset': '(True)'}), '(autoreset=True)\n', (253, 269), False, 'from colorama import init\n'), ((1796, 1806), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1804, 1806), False, 'import sys\n'), ((1282, 1307), 'random.shuffle', 'random.shuffle', (['JoinChars'], {}), '(JoinChars)\n', (1296, 1307), False, 'import random\n'), ((1539, 1560), 'random.randint', 'random.randint', (['(5)', '(17)'], {}), '(5, 17)\n', (1553, 1560), False, 'import random\n'), ((1761, 1792), 'os.system', 'os.system', (['"""start Password.log"""'], {}), "('start Password.log')\n", (1770, 1792), False, 'import os\n'), ((353, 376), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (374, 376), False, 'import datetime\n'), ((498, 521), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (519, 521), False, 'import datetime\n'), ((644, 667), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (665, 667), False, 'import datetime\n'), ((814, 837), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (835, 837), False, 'import datetime\n')] |
iotexpert/docmgr | memos/memos/models/Memo.py | 735c7bcbaeb73bc44efecffb175f268f2438ac3a | """
The model file for a Memo
"""
import re
import os
import shutil
import json
from datetime import datetime
from flask import current_app
from memos import db
from memos.models.User import User
from memos.models.MemoState import MemoState
from memos.models.MemoFile import MemoFile
from memos.models.MemoSignature import MemoSignature
from memos.models.MemoReference import MemoReference
from memos.models.MemoHistory import MemoHistory
from memos.models.MemoActivity import MemoActivity
from memos.revletter import b10_to_rev, rev_to_b10
class Memo(db.Model):
"""This class is the single interface to a "memo" and all of the "memos"
"""
id = db.Column(db.Integer, primary_key=True)
number = db.Column(db.Integer) # Memo Number
version = db.Column(db.String) # A,B,..Z,AA,AB,...AZ,BA
confidential = db.Column(db.Boolean, default=False) # if true only author, signer, distribution can read
distribution = db.Column(db.String(128), default='') # user names on the distribution
keywords = db.Column(db.String(128), default='') # any keyword
title = db.Column(db.String(128), nullable=False, default='') # The title of the memo
num_files = db.Column(db.Integer, default=0) # The number of files attached to the memo
action_date = db.Column(db.DateTime, nullable=False, default=datetime.utcnow) # The last time anything happened
create_date = db.Column(db.DateTime) # when the memo was created
submit_date = db.Column(db.DateTime) # when the memo was most recently submitted (from created)
active_date = db.Column(db.DateTime) # when the memo was moved to active state (from submitted)
obsolete_date = db.Column(db.DateTime) # when the memo was moved to obsolete state (from active)
user_id = db.Column(db.String(120), db.ForeignKey('user.username'),nullable=False) # The key of the user who owns the memo
_signers = db.Column(db.String(128),default='') # the hidden list of signer usernames
_references = db.Column(db.String(128),default='') # The hidden list of references
memo_state = db.Column(db.Enum(MemoState)) # Draft, Signoff, Active, Obsolete
def __init__(self, **kwargs):
super().__init__(**kwargs)
# do custom initialization here
def __repr__(self):
return f"{self.user.username}-{self.number}{self.version}"
def __str__(self):
return f"{self.user.username}-{self.number}{self.version}"
########################################
# Permission Functions
########################################
@staticmethod
def can_create(owner=None, delegate=None):
"""Will return true if the delegate can create a memo for the owner"""
if owner is None:
return False
if delegate is None:
delegate = owner
return owner.is_delegate(delegate=delegate)
def can_revise(self, delegate=None):
"""Is the delgate allowed to update "this" memo?"""
if delegate is None:
return False
if not self.user.is_delegate(delegate):
return False
if self.memo_state == MemoState.Active or self.memo_state == MemoState.Obsolete:
return True
def can_sign(self, signer=None, delegate=None):
"""Can this memo be signed by delegate for the signers"""
if signer is None or delegate is None:
return False
if self.memo_state != MemoState.Signoff:
return False
if not signer.is_delegate(delegate=delegate):
return False
# The list of signers and if they have signed are kept in the MemoSignature table
status = MemoSignature.is_signer(self.id,signer)
return status['is_signer'] and not status['status']
def can_unsign(self, signer=None, delegate=None):
"""Can this memo be unsigned by delegate for the signer """
if signer is None or delegate is None:
return False
if self.memo_state != MemoState.Signoff:
return False
if not signer.is_delegate(delegate=delegate):
return False
status = MemoSignature.is_signer(self.id,signer)
return status['is_signer'] and status['status']
def can_obsolete(self, delegate=None):
""" Can this memo be obsoleted by the delegate? Only active memos can be obsoleted """
if delegate is None:
return False
if not self.user.is_delegate(delegate):
return False
if self.memo_state == MemoState.Active:
return True
return False
def can_cancel(self, delegate=None):
""" can this memo be cancled by the delegate. Only drafts memos can be canceled"""
if delegate is None:
return False
if self.memo_state != MemoState.Draft:
return False
if not self.user.is_delegate(delegate=delegate):
return False
return True
def can_reject(self, signer=None, delegate=None):
""" can this memo be rejected by the delegate. Only memos in signoff can be rejected"""
if signer is None or delegate is None:
return False
if self.memo_state != MemoState.Signoff:
return False
if not signer.is_delegate(delegate):
return False
status = MemoSignature.is_signer(memo_id=self.id,signer=signer)
# if you are a signer you can reject.. even if you have already signed
return status['is_signer']
def has_access(self, user=None):
"""This function will return True of the "username" has access to self"""
# if it is not confidential than anyone can access
if self.confidential == False:
return True
# at this point we know it is confidential so ... they must provide a username
if user is None:
return False
# you alway have access to your own memo's
if self.user.username == user.username:
return True
if user.admin:
return True
if user.readAll:
return True
# if the username is in the distribution list then provide access TODO: ARH do something better
if user.username in re.split('\s|\,|\t|\;|\:',self.distribution):
return True
return False
########################################
# ??? Functions
########################################
def get_fullpath(self):
""" This function gives the os path to a file """
path = os.path.join(current_app.root_path,"static","memos",f"{self.user_id}",f"{self.number}",f"{self.version}")
return path
def get_relpath(self):
""" Return the relative path of this memo """
path = os.path.join("/static","memos",f"{self.user_id}",f"{self.number}",f"{self.version}")
return path
def get_files(self):
""" Return a list of the files attached to this memo"""
memo_list = MemoFile.query.filter_by(memo_id=self.id).all()
return memo_list
def saveJson(self):
""" Create the JSON file which is a copy of all of the meta data """
js = {}
js['title']=self.title
js['number']=self.number
js['version']=self.version
js['confidential']=self.confidential
js['distribution']=self.distribution
js['keywords']=self.keywords
js['userid']=self.user_id
js['memo_state']=f"{self.memo_state}"
js['keywords']= self.keywords
js['signers']=self.signers['signers']
js['references']= self.references['ref_string']
js['files']=[]
for file in self.get_files():
js['files'].append(file.filename)
path = os.path.join(self.get_fullpath())
#current_app.logger.info(f"Making Directory {path}")
os.makedirs(path,exist_ok=True)
#current_app.logger.info(f"Making Succeeded {path}")
path = os.path.join(path,f"meta-{self.user_id}-{self.number}-{self.version}.json")
f = open(path,"w")
json.dump(js,f)
f.close()
@property
def signers(self):
# get the signers from the signing table and turn it back to a string and a list
siglist = MemoSignature.get_signers(self)
for sig in siglist:
sig.signer = User.find(username=sig.signer_id)
sig.delegate = User.find(username=sig.delegate_id)
return {'signers':self._signers,'siglist':siglist}
@signers.setter
def signers(self,signer_names):
self._signers = signer_names
MemoSignature.delete_signers(self)
users = User.valid_usernames(signer_names)
for signer in users['valid_users']:
MemoSignature.add_signer(memo=self,signer=signer)
######################################################################
# References
######################################################################
@staticmethod
def parse_reference(reference):
parts = re.split(r'-',reference)
if len(parts) == 2:
parts.append(None)
return parts
@staticmethod
def valid_references(references):
current_app.logger.info(f'references ={references}')
valid_memos = []
valid_refs = []
invalid = []
for memo_ref in re.split(r'\s|\,|\t|\;|\:',references):
if memo_ref == '':
continue
parts = Memo.parse_reference(memo_ref)
if len(parts) > 3 or len(parts) < 2:
invalid.append(memo_ref)
current_app.logger.info(f"INVALID length append {memo_ref} valid={valid_memos} invalid {invalid}")
continue
username = parts[0]
memo_number = parts[1]
memo_version = parts[2]
memo = Memo.find(username=username,memo_number=memo_number,memo_version=memo_version)
current_app.logger.info(f"Memo = {memo}")
if memo != None and (memo.memo_state == MemoState.Active or memo.memo_state == MemoState.Obsolete):
valid_memos.append(memo)
valid_refs.append(memo_ref)
else:
invalid.append(memo_ref)
rval = {'valid_refs':valid_refs, 'valid_memos' : valid_memos,'invalid':invalid}
return rval
@property
def references(self):
# this function will return a list of refeference objects + a string of the references
refs = MemoReference.get_refs(self)
rval = []
for ref in refs:
userid=ref[0]
memo = Memo.find(username=userid,memo_number=ref[1],memo_version=ref[2])
if ref[2] == None:
refstring=f"{userid}-{ref[1]}"
else:
refstring=f"{userid}-{ref[1]}-{ref[2]}"
rval.append((refstring,memo))
return {'reflist':rval,'ref_string':self._references}
@references.setter
def references(self,references):
self._references = references
refs = Memo.valid_references(references)
for i in range(len(refs['valid_refs'])):
parsed_ref = Memo.parse_reference(refs['valid_refs'][i])
user = User.find(username=parsed_ref[0])
MemoReference.add_ref(self.id,ref_user_id=user.username,ref_memo_number=parsed_ref[1],ref_memo_version=parsed_ref[2])
@property
def backrefs(self):
return MemoReference.get_back_refs(self)
######################################################################
#
######################################################################
def get_next_version(self):
memo = Memo.query.join(User).filter(Memo.number == self.number)\
.order_by(Memo.version.desc()).first()
current_app.logger.info(f"get_next_version {memo.id} {memo.number} {memo.version}")
if memo:
return b10_to_rev(rev_to_b10(memo.version)+1)
return b10_to_rev(1) # also known as 'A'
def save(self):
db.session.add(self)
db.session.commit()
self.saveJson()
################################################################################
# functions used to process the state
# these function would classiavally be called private
################################################################################
def obsolete_previous(self,acting=None):
prev_list = Memo.query.join(User).filter(Memo.number == self.number,Memo.version != self.version).all()
for memo in prev_list:
if memo.memo_state == MemoState.Active:
memo.memo_state = MemoState.Obsolete
MemoHistory.activity(memo=memo,memo_activity=MemoActivity.Obsolete,user=acting)
memo.save()
# This function is called when:
# 1- a valid draft is created
# 2- a signature happens
# 3- an unsign happens
def process_state(self,acting=None):
if self.memo_state == MemoState.Draft:
if MemoSignature.status(self.id) == False:
self.memo_state = MemoState.Signoff
self.submit_date = datetime.utcnow()
MemoHistory.activity(memo=self,memo_activity=MemoActivity.Signoff,user=acting)
self.notify_signers(f"memo {self.user.username}-{self.number}-{self.version} has gone into signoff")
else:
self.memo_state = MemoState.Active
self.active_date = datetime.utcnow()
MemoHistory.activity(memo=self,memo_activity=MemoActivity.Activate,user=acting)
self.obsolete_previous(acting=acting)
self.notify_distribution(f"memo {self.user.username}-{self.number}-{self.version} has been published")
if self.memo_state == MemoState.Signoff:
if MemoSignature.status(self.id):
self.memo_state = MemoState.Active
self.active_date = datetime.utcnow()
self.notify_distribution(f"memo {self.user.username}-{self.number}-{self.version} has been published")
MemoHistory.activity(memo=self,memo_activity=MemoActivity.Activate,user=acting)
self.obsolete_previous(acting=acting)
else:
current_app.logger.info(f"Signatures Still Required")
self.action_date = datetime.utcnow()
self.save()
# TODO: ARH
def notify_distribution(self,message):
current_app.logger.info(F"Notify Distribution {self.distribution} {message}")
# TODO: ARH
def notify_signers(self,message):
current_app.logger.info(F"Notify signers {message}")
################################################################################
# State machine functions called by the viewcontroller
################################################################################
# Owner Function
@staticmethod
def create_revise(owner=None,delegate=None,memo_number=None):
""" This function will return None or a new Memo if the owner/delgate and revise this memo """
assert owner != None and delegate != None
if owner == None or delegate == None:
return None
if owner.is_delegate(delegate) != True:
return None
memo = Memo.query.join(User).filter(User.username==owner.username,Memo.number==memo_number).order_by(Memo.version.desc()).first()
# create a new memo (i.e. not a new version of an existing memo)
if memo_number == None or memo==None:
memo_number = Memo.get_next_number(owner)
new_memo = Memo(number = memo_number,\
version = 'A',\
confidential = False,\
distribution = '',\
keywords = '',\
title = '',\
num_files = 0,\
user_id = owner.username,\
memo_state = MemoState.Draft,\
action_date = datetime.utcnow(),\
create_date = datetime.utcnow(),\
signers = '' )
new_memo.save()
MemoHistory.activity(memo=new_memo,memo_activity=MemoActivity.Create,user=delegate)
current_app.logger.info(f"Creating new memo {new_memo}")
return new_memo
if memo.memo_state == MemoState.Draft:
current_app.logger.info(f"Found a draft memo {memo}")
return memo
# revise an existing memo
new_memo = Memo(number = memo_number,\
version = memo.get_next_version(),\
confidential = memo.confidential,\
distribution = memo.distribution,\
keywords = memo.keywords,\
title = memo.title,\
num_files = 0,\
user_id = memo.user_id,\
memo_state = MemoState.Draft,\
action_date = datetime.utcnow(),\
create_date = datetime.utcnow(),\
)
new_memo.save()
new_memo.references = memo.references['ref_string'] # cannot be done until there is an id assigned by the save
new_memo.signers = memo._signers # cannot be done until there is an id assigned by the save
new_memo.save()
MemoHistory.activity(memo=new_memo,memo_activity=MemoActivity.Create,user=delegate)
return new_memo
# signer function
def sign(self,signer=None,delegate=None):
current_app.logger.info(f"signer = {signer} delegate={delegate}")
if not self.can_sign(signer,delegate):
current_app.logger.info("NOT!!@ allowed to sign")
return False
current_app.logger.info("allowed to sign")
MemoSignature.sign(self.id,signer,delegate)
MemoHistory.activity(memo=self,user=delegate,memo_activity=MemoActivity.Sign)
self.process_state(acting=delegate)
return True
# signer function
def unsign(self,signer=None,delegate=None):
if not self.can_unsign(signer,delegate):
return False
MemoSignature.unsign(self.id,signer,delegate)
MemoHistory.activity(memo=self,user=delegate,memo_activity=MemoActivity.Unsign)
self.process_state(acting=delegate)
return True
# Owner Function
def obsolete(self,delegate=None):
current_app.logger.info(f"Obsolete: {self} Delegate={delegate}")
if not self.can_obsolete(delegate=delegate):
return False
self.memo_state = MemoState.Obsolete
self.action_date = datetime.utcnow()
self.obsolete_date = datetime.utcnow()
MemoHistory.activity(memo=self,user=delegate,memo_activity=MemoActivity.Obsolete)
self.save()
return True
# Owner Function
def cancel(self,delegate=None):
current_app.logger.info(f"Cancel: {self} Delegate={delegate}")
memostring = f"{self}"
if not self.can_cancel(delegate=delegate):
return False
MemoFile.delete(self)
# delete all of the files in that directory & the directory
shutil.rmtree(self.get_fullpath())
MemoReference.delete(self)
MemoSignature.delete_signers(self)
MemoHistory.activity(memo=self,user=delegate,memo_activity=MemoActivity.Cancel)
db.session.delete(self)
db.session.commit()
current_app.logger.info(f"Canceling")
return True
# signer function
def reject(self,signer=None,delegate=None):
current_app.logger.info(f"signer = {signer} delegate={delegate}")
if not self.can_reject(signer,delegate):
return False
self.memo_state = MemoState.Draft
self.action_date = datetime.utcnow()
self.submit_date = None
self.active_date = None
self.obsolete_date = None
MemoHistory.activity(memo=self,memo_activity=MemoActivity.Reject,user=delegate)
MemoSignature.unsign_all(self)
self.save()
self.notify_signers(f"Memo {self.user.username}-{self.number}-{self.version} has been rejected for {signer.username} by {delegate.username}")
return True
################################################################################
# End of State machine functions
################################################################################
@staticmethod
def find(memo_id=None,username=None,memo_number=None,memo_version=None):
if memo_id != None:
return Memo.query.filter_by(id=memo_id).first()
current_app.logger.debug(f"FIND: Looking for {username}/{memo_number}/{memo_version}")
memoQry = Memo.query.filter_by(user_id=username,number=memo_number)
if memo_version != None:
memoQry.filter_by(version=memo_version)
memo = memoQry.first()
current_app.logger.debug(f"Found Memo id={memo}")
return memo
@staticmethod
def get_memo_list(username=None,memo_number=None,memo_version=None,page=1,pagesize=None):
if memo_version:
memo_list = Memo.query.join(User).filter(User.username==username,\
Memo.number==memo_number,\
Memo.version==memo_version)\
.paginate(page = page,per_page=pagesize)
elif memo_number:
memo_list = Memo.query.join(User).filter(User.username==username,Memo.number==memo_number)\
.order_by(Memo.action_date.desc()).paginate(page = page,per_page=pagesize)
elif username:
memo_list = Memo.query.join(User).filter(User.username==username,Memo.memo_state == MemoState.Active)\
.order_by(Memo.action_date.desc()).paginate(page = page,per_page=pagesize)
else:
memo_list = Memo.query.join(User).filter(Memo.memo_state == MemoState.Active)\
.order_by(Memo.action_date.desc()).paginate(page = page,per_page=pagesize)
return memo_list
@staticmethod
def search(title=None,keywords=None,page=1,pagesize=None):
current_app.logger.info(f"Search title={title}")
if title != None:
memo_list = Memo.query.filter(Memo.title.like(f"%{title}%")).order_by(Memo.action_date.desc()).paginate(page = page,per_page=pagesize)
if keywords != None:
memo_list = Memo.query.filter(Memo.keywords.like(f"%{keywords}%")).order_by(Memo.action_date.desc()).paginate(page = page,per_page=pagesize)
return memo_list
@staticmethod
def get_next_number(user=None):
assert user!=None
memo_list = Memo.query.join(User).filter(User.username==user.username)\
.order_by(Memo.number.desc()).first()
if memo_list == None:
return 1
return memo_list.number+1
@staticmethod
def get_inbox(user=None,page=1,pagesize=None):
assert user!=None,"User must not be none"
if user == None:
return None
msigs = MemoSignature.get_signatures(user,signed=False)
memolist = Memo.query.join(User).filter(Memo.memo_state==MemoState.Signoff,Memo.id.in_(msigs)).order_by(Memo.action_date.desc()).paginate(page = page,per_page=pagesize)
current_app.logger.info(f"Inbox for {user.username} = Items={len(memolist.items)} {memolist}")
return memolist
@staticmethod
def get_drafts(user=None,page=1,pagesize=None):
assert user!=None,"User must not be none"
if user == None:
return None
memolist = Memo.query.join(User).filter(Memo.memo_state==MemoState.Draft,User.username==user.username).order_by(Memo.action_date.desc()).paginate(page = page,per_page=pagesize)
return memolist | [((661, 700), 'memos.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (670, 700), False, 'from memos import db\n'), ((714, 735), 'memos.db.Column', 'db.Column', (['db.Integer'], {}), '(db.Integer)\n', (723, 735), False, 'from memos import db\n'), ((801, 821), 'memos.db.Column', 'db.Column', (['db.String'], {}), '(db.String)\n', (810, 821), False, 'from memos import db\n'), ((903, 939), 'memos.db.Column', 'db.Column', (['db.Boolean'], {'default': '(False)'}), '(db.Boolean, default=False)\n', (912, 939), False, 'from memos import db\n'), ((1312, 1344), 'memos.db.Column', 'db.Column', (['db.Integer'], {'default': '(0)'}), '(db.Integer, default=0)\n', (1321, 1344), False, 'from memos import db\n'), ((1430, 1493), 'memos.db.Column', 'db.Column', (['db.DateTime'], {'nullable': '(False)', 'default': 'datetime.utcnow'}), '(db.DateTime, nullable=False, default=datetime.utcnow)\n', (1439, 1493), False, 'from memos import db\n'), ((1547, 1569), 'memos.db.Column', 'db.Column', (['db.DateTime'], {}), '(db.DateTime)\n', (1556, 1569), False, 'from memos import db\n'), ((1619, 1641), 'memos.db.Column', 'db.Column', (['db.DateTime'], {}), '(db.DateTime)\n', (1628, 1641), False, 'from memos import db\n'), ((1723, 1745), 'memos.db.Column', 'db.Column', (['db.DateTime'], {}), '(db.DateTime)\n', (1732, 1745), False, 'from memos import db\n'), ((1828, 1850), 'memos.db.Column', 'db.Column', (['db.DateTime'], {}), '(db.DateTime)\n', (1837, 1850), False, 'from memos import db\n'), ((1038, 1052), 'memos.db.String', 'db.String', (['(128)'], {}), '(128)\n', (1047, 1052), False, 'from memos import db\n'), ((1139, 1153), 'memos.db.String', 'db.String', (['(128)'], {}), '(128)\n', (1148, 1153), False, 'from memos import db\n'), ((1222, 1236), 'memos.db.String', 'db.String', (['(128)'], {}), '(128)\n', (1231, 1236), False, 'from memos import db\n'), ((1939, 1953), 'memos.db.String', 'db.String', (['(120)'], {}), '(120)\n', (1948, 1953), False, 'from memos import db\n'), ((1955, 1985), 'memos.db.ForeignKey', 'db.ForeignKey', (['"""user.username"""'], {}), "('user.username')\n", (1968, 1985), False, 'from memos import db\n'), ((2074, 2088), 'memos.db.String', 'db.String', (['(128)'], {}), '(128)\n', (2083, 2088), False, 'from memos import db\n'), ((2199, 2213), 'memos.db.String', 'db.String', (['(128)'], {}), '(128)\n', (2208, 2213), False, 'from memos import db\n'), ((2314, 2332), 'memos.db.Enum', 'db.Enum', (['MemoState'], {}), '(MemoState)\n', (2321, 2332), False, 'from memos import db\n'), ((3945, 3985), 'memos.models.MemoSignature.MemoSignature.is_signer', 'MemoSignature.is_signer', (['self.id', 'signer'], {}), '(self.id, signer)\n', (3968, 3985), False, 'from memos.models.MemoSignature import MemoSignature\n'), ((4413, 4453), 'memos.models.MemoSignature.MemoSignature.is_signer', 'MemoSignature.is_signer', (['self.id', 'signer'], {}), '(self.id, signer)\n', (4436, 4453), False, 'from memos.models.MemoSignature import MemoSignature\n'), ((5625, 5680), 'memos.models.MemoSignature.MemoSignature.is_signer', 'MemoSignature.is_signer', ([], {'memo_id': 'self.id', 'signer': 'signer'}), '(memo_id=self.id, signer=signer)\n', (5648, 5680), False, 'from memos.models.MemoSignature import MemoSignature\n'), ((6837, 6951), 'os.path.join', 'os.path.join', (['current_app.root_path', '"""static"""', '"""memos"""', 'f"""{self.user_id}"""', 'f"""{self.number}"""', 'f"""{self.version}"""'], {}), "(current_app.root_path, 'static', 'memos', f'{self.user_id}',\n f'{self.number}', f'{self.version}')\n", (6849, 6951), False, 'import os\n'), ((7060, 7152), 'os.path.join', 'os.path.join', (['"""/static"""', '"""memos"""', 'f"""{self.user_id}"""', 'f"""{self.number}"""', 'f"""{self.version}"""'], {}), "('/static', 'memos', f'{self.user_id}', f'{self.number}',\n f'{self.version}')\n", (7072, 7152), False, 'import os\n'), ((8138, 8170), 'os.makedirs', 'os.makedirs', (['path'], {'exist_ok': '(True)'}), '(path, exist_ok=True)\n', (8149, 8170), False, 'import os\n'), ((8247, 8323), 'os.path.join', 'os.path.join', (['path', 'f"""meta-{self.user_id}-{self.number}-{self.version}.json"""'], {}), "(path, f'meta-{self.user_id}-{self.number}-{self.version}.json')\n", (8259, 8323), False, 'import os\n'), ((8358, 8374), 'json.dump', 'json.dump', (['js', 'f'], {}), '(js, f)\n', (8367, 8374), False, 'import json\n'), ((8537, 8568), 'memos.models.MemoSignature.MemoSignature.get_signers', 'MemoSignature.get_signers', (['self'], {}), '(self)\n', (8562, 8568), False, 'from memos.models.MemoSignature import MemoSignature\n'), ((8880, 8914), 'memos.models.MemoSignature.MemoSignature.delete_signers', 'MemoSignature.delete_signers', (['self'], {}), '(self)\n', (8908, 8914), False, 'from memos.models.MemoSignature import MemoSignature\n'), ((8932, 8966), 'memos.models.User.User.valid_usernames', 'User.valid_usernames', (['signer_names'], {}), '(signer_names)\n', (8952, 8966), False, 'from memos.models.User import User\n'), ((9301, 9325), 're.split', 're.split', (['"""-"""', 'reference'], {}), "('-', reference)\n", (9309, 9325), False, 'import re\n'), ((9483, 9535), 'flask.current_app.logger.info', 'current_app.logger.info', (['f"""references ={references}"""'], {}), "(f'references ={references}')\n", (9506, 9535), False, 'from flask import current_app\n'), ((9630, 9673), 're.split', 're.split', (['"""\\\\s|\\\\,|\\\\t|\\\\;|\\\\:"""', 'references'], {}), "('\\\\s|\\\\,|\\\\t|\\\\;|\\\\:', references)\n", (9638, 9673), False, 'import re\n'), ((10799, 10827), 'memos.models.MemoReference.MemoReference.get_refs', 'MemoReference.get_refs', (['self'], {}), '(self)\n', (10821, 10827), False, 'from memos.models.MemoReference import MemoReference\n'), ((11746, 11779), 'memos.models.MemoReference.MemoReference.get_back_refs', 'MemoReference.get_back_refs', (['self'], {}), '(self)\n', (11773, 11779), False, 'from memos.models.MemoReference import MemoReference\n'), ((12100, 12188), 'flask.current_app.logger.info', 'current_app.logger.info', (['f"""get_next_version {memo.id} {memo.number} {memo.version}"""'], {}), "(\n f'get_next_version {memo.id} {memo.number} {memo.version}')\n", (12123, 12188), False, 'from flask import current_app\n'), ((12275, 12288), 'memos.revletter.b10_to_rev', 'b10_to_rev', (['(1)'], {}), '(1)\n', (12285, 12288), False, 'from memos.revletter import b10_to_rev, rev_to_b10\n'), ((12338, 12358), 'memos.db.session.add', 'db.session.add', (['self'], {}), '(self)\n', (12352, 12358), False, 'from memos import db\n'), ((12367, 12386), 'memos.db.session.commit', 'db.session.commit', ([], {}), '()\n', (12384, 12386), False, 'from memos import db\n'), ((14677, 14694), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (14692, 14694), False, 'from datetime import datetime\n'), ((14784, 14861), 'flask.current_app.logger.info', 'current_app.logger.info', (['f"""Notify Distribution {self.distribution} {message}"""'], {}), "(f'Notify Distribution {self.distribution} {message}')\n", (14807, 14861), False, 'from flask import current_app\n'), ((14925, 14977), 'flask.current_app.logger.info', 'current_app.logger.info', (['f"""Notify signers {message}"""'], {}), "(f'Notify signers {message}')\n", (14948, 14977), False, 'from flask import current_app\n'), ((17922, 18012), 'memos.models.MemoHistory.MemoHistory.activity', 'MemoHistory.activity', ([], {'memo': 'new_memo', 'memo_activity': 'MemoActivity.Create', 'user': 'delegate'}), '(memo=new_memo, memo_activity=MemoActivity.Create, user\n =delegate)\n', (17942, 18012), False, 'from memos.models.MemoHistory import MemoHistory\n'), ((18104, 18169), 'flask.current_app.logger.info', 'current_app.logger.info', (['f"""signer = {signer} delegate={delegate}"""'], {}), "(f'signer = {signer} delegate={delegate}')\n", (18127, 18169), False, 'from flask import current_app\n'), ((18321, 18363), 'flask.current_app.logger.info', 'current_app.logger.info', (['"""allowed to sign"""'], {}), "('allowed to sign')\n", (18344, 18363), False, 'from flask import current_app\n'), ((18372, 18417), 'memos.models.MemoSignature.MemoSignature.sign', 'MemoSignature.sign', (['self.id', 'signer', 'delegate'], {}), '(self.id, signer, delegate)\n', (18390, 18417), False, 'from memos.models.MemoSignature import MemoSignature\n'), ((18424, 18503), 'memos.models.MemoHistory.MemoHistory.activity', 'MemoHistory.activity', ([], {'memo': 'self', 'user': 'delegate', 'memo_activity': 'MemoActivity.Sign'}), '(memo=self, user=delegate, memo_activity=MemoActivity.Sign)\n', (18444, 18503), False, 'from memos.models.MemoHistory import MemoHistory\n'), ((18738, 18785), 'memos.models.MemoSignature.MemoSignature.unsign', 'MemoSignature.unsign', (['self.id', 'signer', 'delegate'], {}), '(self.id, signer, delegate)\n', (18758, 18785), False, 'from memos.models.MemoSignature import MemoSignature\n'), ((18792, 18878), 'memos.models.MemoHistory.MemoHistory.activity', 'MemoHistory.activity', ([], {'memo': 'self', 'user': 'delegate', 'memo_activity': 'MemoActivity.Unsign'}), '(memo=self, user=delegate, memo_activity=MemoActivity.\n Unsign)\n', (18812, 18878), False, 'from memos.models.MemoHistory import MemoHistory\n'), ((19023, 19087), 'flask.current_app.logger.info', 'current_app.logger.info', (['f"""Obsolete: {self} Delegate={delegate}"""'], {}), "(f'Obsolete: {self} Delegate={delegate}')\n", (19046, 19087), False, 'from flask import current_app\n'), ((19256, 19273), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (19271, 19273), False, 'from datetime import datetime\n'), ((19303, 19320), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (19318, 19320), False, 'from datetime import datetime\n'), ((19329, 19417), 'memos.models.MemoHistory.MemoHistory.activity', 'MemoHistory.activity', ([], {'memo': 'self', 'user': 'delegate', 'memo_activity': 'MemoActivity.Obsolete'}), '(memo=self, user=delegate, memo_activity=MemoActivity.\n Obsolete)\n', (19349, 19417), False, 'from memos.models.MemoHistory import MemoHistory\n'), ((19513, 19575), 'flask.current_app.logger.info', 'current_app.logger.info', (['f"""Cancel: {self} Delegate={delegate}"""'], {}), "(f'Cancel: {self} Delegate={delegate}')\n", (19536, 19575), False, 'from flask import current_app\n'), ((19723, 19744), 'memos.models.MemoFile.MemoFile.delete', 'MemoFile.delete', (['self'], {}), '(self)\n', (19738, 19744), False, 'from memos.models.MemoFile import MemoFile\n'), ((19882, 19908), 'memos.models.MemoReference.MemoReference.delete', 'MemoReference.delete', (['self'], {}), '(self)\n', (19902, 19908), False, 'from memos.models.MemoReference import MemoReference\n'), ((19917, 19951), 'memos.models.MemoSignature.MemoSignature.delete_signers', 'MemoSignature.delete_signers', (['self'], {}), '(self)\n', (19945, 19951), False, 'from memos.models.MemoSignature import MemoSignature\n'), ((19960, 20046), 'memos.models.MemoHistory.MemoHistory.activity', 'MemoHistory.activity', ([], {'memo': 'self', 'user': 'delegate', 'memo_activity': 'MemoActivity.Cancel'}), '(memo=self, user=delegate, memo_activity=MemoActivity.\n Cancel)\n', (19980, 20046), False, 'from memos.models.MemoHistory import MemoHistory\n'), ((20049, 20072), 'memos.db.session.delete', 'db.session.delete', (['self'], {}), '(self)\n', (20066, 20072), False, 'from memos import db\n'), ((20081, 20100), 'memos.db.session.commit', 'db.session.commit', ([], {}), '()\n', (20098, 20100), False, 'from memos import db\n'), ((20116, 20153), 'flask.current_app.logger.info', 'current_app.logger.info', (['f"""Canceling"""'], {}), "(f'Canceling')\n", (20139, 20153), False, 'from flask import current_app\n'), ((20259, 20324), 'flask.current_app.logger.info', 'current_app.logger.info', (['f"""signer = {signer} delegate={delegate}"""'], {}), "(f'signer = {signer} delegate={delegate}')\n", (20282, 20324), False, 'from flask import current_app\n'), ((20486, 20503), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (20501, 20503), False, 'from datetime import datetime\n'), ((20610, 20696), 'memos.models.MemoHistory.MemoHistory.activity', 'MemoHistory.activity', ([], {'memo': 'self', 'memo_activity': 'MemoActivity.Reject', 'user': 'delegate'}), '(memo=self, memo_activity=MemoActivity.Reject, user=\n delegate)\n', (20630, 20696), False, 'from memos.models.MemoHistory import MemoHistory\n'), ((20698, 20728), 'memos.models.MemoSignature.MemoSignature.unsign_all', 'MemoSignature.unsign_all', (['self'], {}), '(self)\n', (20722, 20728), False, 'from memos.models.MemoSignature import MemoSignature\n'), ((21336, 21427), 'flask.current_app.logger.debug', 'current_app.logger.debug', (['f"""FIND: Looking for {username}/{memo_number}/{memo_version}"""'], {}), "(\n f'FIND: Looking for {username}/{memo_number}/{memo_version}')\n", (21360, 21427), False, 'from flask import current_app\n'), ((21641, 21690), 'flask.current_app.logger.debug', 'current_app.logger.debug', (['f"""Found Memo id={memo}"""'], {}), "(f'Found Memo id={memo}')\n", (21665, 21690), False, 'from flask import current_app\n'), ((22966, 23014), 'flask.current_app.logger.info', 'current_app.logger.info', (['f"""Search title={title}"""'], {}), "(f'Search title={title}')\n", (22989, 23014), False, 'from flask import current_app\n'), ((23954, 24002), 'memos.models.MemoSignature.MemoSignature.get_signatures', 'MemoSignature.get_signatures', (['user'], {'signed': '(False)'}), '(user, signed=False)\n', (23982, 24002), False, 'from memos.models.MemoSignature import MemoSignature\n'), ((6539, 6588), 're.split', 're.split', (['"""\\\\s|\\\\,|\t|\\\\;|\\\\:"""', 'self.distribution'], {}), "('\\\\s|\\\\,|\\t|\\\\;|\\\\:', self.distribution)\n", (6547, 6588), False, 'import re\n'), ((8622, 8655), 'memos.models.User.User.find', 'User.find', ([], {'username': 'sig.signer_id'}), '(username=sig.signer_id)\n', (8631, 8655), False, 'from memos.models.User import User\n'), ((8683, 8718), 'memos.models.User.User.find', 'User.find', ([], {'username': 'sig.delegate_id'}), '(username=sig.delegate_id)\n', (8692, 8718), False, 'from memos.models.User import User\n'), ((9024, 9074), 'memos.models.MemoSignature.MemoSignature.add_signer', 'MemoSignature.add_signer', ([], {'memo': 'self', 'signer': 'signer'}), '(memo=self, signer=signer)\n', (9048, 9074), False, 'from memos.models.MemoSignature import MemoSignature\n'), ((10221, 10262), 'flask.current_app.logger.info', 'current_app.logger.info', (['f"""Memo = {memo}"""'], {}), "(f'Memo = {memo}')\n", (10244, 10262), False, 'from flask import current_app\n'), ((11528, 11561), 'memos.models.User.User.find', 'User.find', ([], {'username': 'parsed_ref[0]'}), '(username=parsed_ref[0])\n', (11537, 11561), False, 'from memos.models.User import User\n'), ((11574, 11699), 'memos.models.MemoReference.MemoReference.add_ref', 'MemoReference.add_ref', (['self.id'], {'ref_user_id': 'user.username', 'ref_memo_number': 'parsed_ref[1]', 'ref_memo_version': 'parsed_ref[2]'}), '(self.id, ref_user_id=user.username, ref_memo_number=\n parsed_ref[1], ref_memo_version=parsed_ref[2])\n', (11595, 11699), False, 'from memos.models.MemoReference import MemoReference\n'), ((14148, 14177), 'memos.models.MemoSignature.MemoSignature.status', 'MemoSignature.status', (['self.id'], {}), '(self.id)\n', (14168, 14177), False, 'from memos.models.MemoSignature import MemoSignature\n'), ((16590, 16680), 'memos.models.MemoHistory.MemoHistory.activity', 'MemoHistory.activity', ([], {'memo': 'new_memo', 'memo_activity': 'MemoActivity.Create', 'user': 'delegate'}), '(memo=new_memo, memo_activity=MemoActivity.Create, user\n =delegate)\n', (16610, 16680), False, 'from memos.models.MemoHistory import MemoHistory\n'), ((16699, 16755), 'flask.current_app.logger.info', 'current_app.logger.info', (['f"""Creating new memo {new_memo}"""'], {}), "(f'Creating new memo {new_memo}')\n", (16722, 16755), False, 'from flask import current_app\n'), ((16860, 16913), 'flask.current_app.logger.info', 'current_app.logger.info', (['f"""Found a draft memo {memo}"""'], {}), "(f'Found a draft memo {memo}')\n", (16883, 16913), False, 'from flask import current_app\n'), ((18229, 18278), 'flask.current_app.logger.info', 'current_app.logger.info', (['"""NOT!!@ allowed to sign"""'], {}), "('NOT!!@ allowed to sign')\n", (18252, 18278), False, 'from flask import current_app\n'), ((7275, 7316), 'memos.models.MemoFile.MemoFile.query.filter_by', 'MemoFile.query.filter_by', ([], {'memo_id': 'self.id'}), '(memo_id=self.id)\n', (7299, 7316), False, 'from memos.models.MemoFile import MemoFile\n'), ((9883, 9986), 'flask.current_app.logger.info', 'current_app.logger.info', (['f"""INVALID length append {memo_ref} valid={valid_memos} invalid {invalid}"""'], {}), "(\n f'INVALID length append {memo_ref} valid={valid_memos} invalid {invalid}')\n", (9906, 9986), False, 'from flask import current_app\n'), ((12994, 13080), 'memos.models.MemoHistory.MemoHistory.activity', 'MemoHistory.activity', ([], {'memo': 'memo', 'memo_activity': 'MemoActivity.Obsolete', 'user': 'acting'}), '(memo=memo, memo_activity=MemoActivity.Obsolete, user=\n acting)\n', (13014, 13080), False, 'from memos.models.MemoHistory import MemoHistory\n'), ((13332, 13361), 'memos.models.MemoSignature.MemoSignature.status', 'MemoSignature.status', (['self.id'], {}), '(self.id)\n', (13352, 13361), False, 'from memos.models.MemoSignature import MemoSignature\n'), ((13459, 13476), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (13474, 13476), False, 'from datetime import datetime\n'), ((13493, 13578), 'memos.models.MemoHistory.MemoHistory.activity', 'MemoHistory.activity', ([], {'memo': 'self', 'memo_activity': 'MemoActivity.Signoff', 'user': 'acting'}), '(memo=self, memo_activity=MemoActivity.Signoff, user=acting\n )\n', (13513, 13578), False, 'from memos.models.MemoHistory import MemoHistory\n'), ((13793, 13810), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (13808, 13810), False, 'from datetime import datetime\n'), ((13827, 13913), 'memos.models.MemoHistory.MemoHistory.activity', 'MemoHistory.activity', ([], {'memo': 'self', 'memo_activity': 'MemoActivity.Activate', 'user': 'acting'}), '(memo=self, memo_activity=MemoActivity.Activate, user=\n acting)\n', (13847, 13913), False, 'from memos.models.MemoHistory import MemoHistory\n'), ((14265, 14282), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (14280, 14282), False, 'from datetime import datetime\n'), ((14418, 14504), 'memos.models.MemoHistory.MemoHistory.activity', 'MemoHistory.activity', ([], {'memo': 'self', 'memo_activity': 'MemoActivity.Activate', 'user': 'acting'}), '(memo=self, memo_activity=MemoActivity.Activate, user=\n acting)\n', (14438, 14504), False, 'from memos.models.MemoHistory import MemoHistory\n'), ((14587, 14640), 'flask.current_app.logger.info', 'current_app.logger.info', (['f"""Signatures Still Required"""'], {}), "(f'Signatures Still Required')\n", (14610, 14640), False, 'from flask import current_app\n'), ((17513, 17530), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (17528, 17530), False, 'from datetime import datetime\n'), ((17575, 17592), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (17590, 17592), False, 'from datetime import datetime\n'), ((12231, 12255), 'memos.revletter.rev_to_b10', 'rev_to_b10', (['memo.version'], {}), '(memo.version)\n', (12241, 12255), False, 'from memos.revletter import b10_to_rev, rev_to_b10\n'), ((16412, 16429), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (16427, 16429), False, 'from datetime import datetime\n'), ((16474, 16491), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (16489, 16491), False, 'from datetime import datetime\n')] |
mitodl/open-discussions | course_catalog/etl/conftest.py | ab6e9fac70b8a1222a84e78ba778a7a065c20541 | """Common ETL test fixtures"""
import json
import pytest
@pytest.fixture(autouse=True)
def mitx_settings(settings):
"""Test settings for MITx import"""
settings.EDX_API_CLIENT_ID = "fake-client-id"
settings.EDX_API_CLIENT_SECRET = "fake-client-secret"
settings.EDX_API_ACCESS_TOKEN_URL = "http://localhost/fake/access/token/url"
settings.EDX_API_URL = "http://localhost/fake/api/url"
settings.MITX_BASE_URL = "http://localhost/fake/base/url"
settings.MITX_ALT_URL = "http://localhost/fake/alt/url"
return settings
@pytest.fixture(autouse=True)
def oll_settings(settings):
"""Test settings for MITx import"""
settings.OLL_API_CLIENT_ID = "fake-client-id"
settings.OLL_API_CLIENT_SECRET = "fake-client-secret"
settings.OLL_API_ACCESS_TOKEN_URL = "http://localhost/fake/access/token/url"
settings.OLL_API_URL = "http://localhost/fake/api/url"
settings.OLL_BASE_URL = "http://localhost/fake/base/url"
settings.OLL_ALT_URL = "http://localhost/fake/alt/url"
return settings
@pytest.fixture
def mitx_course_data():
"""Catalog data fixture"""
with open("./test_json/test_mitx_course.json", "r") as f:
yield json.loads(f.read())
@pytest.fixture
def non_mitx_course_data():
"""Catalog data fixture"""
with open("./test_json/test_non_mitx_course.json", "r") as f:
yield json.loads(f.read())
| [((61, 89), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (75, 89), False, 'import pytest\n'), ((552, 580), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (566, 580), False, 'import pytest\n')] |
PageotD/juliaset | juliaset/juliaset.py | 7c1f98020eeff291fcf040cfcdf25a89e72f46a9 | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import random
class JuliaSet:
def __init__(self):
"""
Constructor of the JuliaSet class
:param size: size in pixels (for both width and height)
:param dpi: dots per inch (default 300)
"""
# Initialize image related parameters
self.size = 256
self.dpi = 300
self.norm = True
self.mirror = False
# Initialize process related parameters
self.escrad = 3
self.niter = 250
def param(self, **kwargs):
"""
Get parameters from input dictionary and set attributes.
:param kwargs: a dictionary in the form
`{'arg1':value, ..., 'argN': value}`
"""
# Check if kwargs in not empty
if kwargs is not None:
# Image related parameters
if 'size' in kwargs:
self.size = kwargs.pop('size', 256)
if 'dpi' in kwargs:
self.dpi = kwargs.pop('dpi', 300)
if 'norm' in kwargs:
self.norm = kwargs.pop('norm', True)
if 'mirror' in kwargs:
self.mirror = kwargs.pop('mirror', False)
# Process related parameters
if 'escrad' in kwargs:
self.escrad = kwargs.pop('escrad', 3)
if 'niter' in kwargs:
self.niter = kwargs.pop('niter', 250)
# If kwargs is not empty there is some invalid keywords
if kwargs:
print("{} are invalid keyword arguments!".format(kwargs.keys()))
def run(self, show=False, fname='juilaset-output'):
"""
Run the Julia set generator
:param mirror: if True the julia is mirrored horizontally and
vertically; each mirror is concatenate with the original
to produce a new image
:param norm: if true the Julia set is normalized by its
absolute maximum value.
:param show: if show is `False` th eoutput image will be
written as a PNG file named `fname`
:param fname: Name of the output PNG file to write on disk
"""
# Get a complex value among a list of best Julia sets
cpxNum = self.getComplexValue()
# Get the target area
# For more randomness, the target area is a random
# subset of a wide one defined with x[-1.5, 1.5] and
# y[-1.5, 1.5]
xrng, yrng = self.getTargetArea()
# Process
julia = self.processJulia(cpxNum, xrng, yrng)
# Normalization
if(self.norm):
julia /= np.amax(np.abs(julia))
# Mirroring
if(self.mirror):
# Horizontal mirroring and concatenate
juliamirror = np.flip(julia, axis=1)
julia = np.concatenate((julia, juliamirror), axis=1)
# Vertical mirroring and concatenate
juliamirror = np.flip(julia, axis=0)
julia = np.concatenate((julia, juliamirror), axis=0)
# Plot the output with a random colormap using matplotlib
self.plotJuliaSet(julia, show=show, fname=fname)
def getComplexValue(self):
"""
Random choice in a list of best complex values for Julia
sets (real, imag).
:return cpxNum: a semi-random complex value
"""
# Define the list of best complex values
cpxList = [
(-0.10, 0.650), (0.00, 0.80), (0.370, 0.100),
(0.355, 0.355), (-0.54, 0.54), (0.340, -0.05),
(0.37, 0.10), (0.355, 0.355)
]
# Randomly choose one
cpxTmp = random.choice(cpxList)
# Manipulate the base value slightly to make it a little more unique
cpxNum = self.twearkComplex(cpxTmp)
return cpxNum
def twearkComplex(self, cpxTmp):
"""
Manipulate the base value slightly to make it a little more unique.
:param cpxTmp: complex value to modify
:param cpxNum: a slightly manipulate version of the input
"""
# Get the signs for the imaginary parts
isign = random.randrange(-1, 1, 2)
# Get a value variation for for real and imaginary parts
# The possible variation range is fixed at +/- 2% to stay
# In the neightborhood of the initial value
rsigma = random.uniform(0.98, 1.02)
isigma = random.uniform(0.98, 1.02)
# Apply modification and return the new complex value
realPart = cpxTmp[0] * rsigma
imagPart = cpxTmp[1] * isigma * isign
return complex(realPart, imagPart)
def getTargetArea(self):
"""
For more randomness, the target area is a random
subset of a wide one defined with x[-1.5, 1.5] and
y[-1.5, 1.5]
:return xrng, yrng: tuples containing (xmin, xmax)
and (ymin, ymax)
"""
# Randomly choose the center of the target area
# Possible values are in [-1.0, 1.0] to stay in an
# area where there are always pieces of fractals
xctr = random.uniform(-1.0,1.0)
yctr = random.uniform(-1.0,1.0)
# Extend around the center
xrng = (xctr-0.5, xctr+0.5)
yrng = (yctr-0.5, yctr+0.5)
return xrng, yrng
def processJulia(self, cpxNum, xrng, yrng):
"""
Calculate the Julia set for the given input parameters.
:param cpxNum: complex value acting as a seed for the Julia set
:param xrng: range of values (min, max) for the x-axis
:param yrng: range of values (min, max) for the y-axis
:param escrad: escape radius
:param niter: maximum number of iterations
"""
# Initialize numpy array of dimensions (size, size) with zeros
julia = np.ones((self.size, self.size), dtype=np.float32)
# Calculate the width (equal to height) of the image since the
# image is defined as a square
width = xrng[1] - xrng[0] # xmax - xmin = ymax - ymin
# Randomly choose the sign of the shade
#ssign = random.randrange(-1, 1, 2)
ssign = -1.
# Loop over x range
for ix in range(self.size):
# Get the pixel position in the complex plane
# For the real part
realPart = float(ix) / self.size * width + xrng[0]
# Loop over y range
for iy in range(self.size):
# Get the pixel position in the complex plane
# For the imaginary part
imagPart = float(iy) / self.size * width + yrng[0]
# Build the complex
cpxTmp = complex(realPart, imagPart)
# Initialize iteration counter
it = 0
# Loop over iterations
while(np.abs(cpxTmp) <= self.escrad**2 and it < self.niter):
# Quadratic polynomial
cpxTmp = cpxTmp**2 + cpxNum
# Increment iteration counter
it += 1
# Calculate the shade (a cool thing find somewhere on the net)
shade = 1. - np.sqrt(it/self.niter)
# Fill the outpout array
julia[ix][iy] = ssign * shade
return julia
def plotJuliaSet(self, julia, fname='juilaset-output', show=False):
"""
Plot the output Julia set and show it in matplotlib window or
write it on disk as a png file.
:param julia: the Julia set
:param show: if show is `False` th eoutput image will be
written as a PNG file named `fname`
:param fname: Name of the output PNG file to write on disk
"""
# List of beautiful colormap for Julia sets
cmapList = [
cm.Blues, cm.Greens, cm.Purples, cm.hot, cm.inferno,
cm.binary, cm.rainbow, cm.twilight_shifted, cm.plasma
]
# Randomly chose one colormap
cmapName = random.choice(cmapList)
# Plot the image with a gaussian interpolation
fig = plt.gcf()
fig.set_size_inches(3., 3.)
plt.imshow(julia, interpolation='gaussian', cmap=cmapName)
# Disable axis
plt.axis('off')
if(show):
plt.show()
else:
# Write on disk
fig.savefig(fname+".png", dpi=self.dpi, pad_inches=0.05, bbox_inches='tight')
def julia(**kwargs):
"""
temp
"""
# Initialize Julia Set instance
juliaInstance = JuliaSet()
# If kwargs not empty update the attributes
if kwargs is not None:
juliaInstance.param(**kwargs)
return juliaInstance
if __name__ == "__main__":
# execute only if run as a script
genJuliaSet = JuliaSet()
genJuliaSet.param()
genJuliaSet.run()
| [((3661, 3683), 'random.choice', 'random.choice', (['cpxList'], {}), '(cpxList)\n', (3674, 3683), False, 'import random\n'), ((4146, 4172), 'random.randrange', 'random.randrange', (['(-1)', '(1)', '(2)'], {}), '(-1, 1, 2)\n', (4162, 4172), False, 'import random\n'), ((4374, 4400), 'random.uniform', 'random.uniform', (['(0.98)', '(1.02)'], {}), '(0.98, 1.02)\n', (4388, 4400), False, 'import random\n'), ((4418, 4444), 'random.uniform', 'random.uniform', (['(0.98)', '(1.02)'], {}), '(0.98, 1.02)\n', (4432, 4444), False, 'import random\n'), ((5105, 5130), 'random.uniform', 'random.uniform', (['(-1.0)', '(1.0)'], {}), '(-1.0, 1.0)\n', (5119, 5130), False, 'import random\n'), ((5145, 5170), 'random.uniform', 'random.uniform', (['(-1.0)', '(1.0)'], {}), '(-1.0, 1.0)\n', (5159, 5170), False, 'import random\n'), ((5817, 5866), 'numpy.ones', 'np.ones', (['(self.size, self.size)'], {'dtype': 'np.float32'}), '((self.size, self.size), dtype=np.float32)\n', (5824, 5866), True, 'import numpy as np\n'), ((8018, 8041), 'random.choice', 'random.choice', (['cmapList'], {}), '(cmapList)\n', (8031, 8041), False, 'import random\n'), ((8112, 8121), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (8119, 8121), True, 'import matplotlib.pyplot as plt\n'), ((8166, 8224), 'matplotlib.pyplot.imshow', 'plt.imshow', (['julia'], {'interpolation': '"""gaussian"""', 'cmap': 'cmapName'}), "(julia, interpolation='gaussian', cmap=cmapName)\n", (8176, 8224), True, 'import matplotlib.pyplot as plt\n'), ((8265, 8280), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (8273, 8280), True, 'import matplotlib.pyplot as plt\n'), ((2795, 2817), 'numpy.flip', 'np.flip', (['julia'], {'axis': '(1)'}), '(julia, axis=1)\n', (2802, 2817), True, 'import numpy as np\n'), ((2838, 2882), 'numpy.concatenate', 'np.concatenate', (['(julia, juliamirror)'], {'axis': '(1)'}), '((julia, juliamirror), axis=1)\n', (2852, 2882), True, 'import numpy as np\n'), ((2958, 2980), 'numpy.flip', 'np.flip', (['julia'], {'axis': '(0)'}), '(julia, axis=0)\n', (2965, 2980), True, 'import numpy as np\n'), ((3001, 3045), 'numpy.concatenate', 'np.concatenate', (['(julia, juliamirror)'], {'axis': '(0)'}), '((julia, juliamirror), axis=0)\n', (3015, 3045), True, 'import numpy as np\n'), ((8312, 8322), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (8320, 8322), True, 'import matplotlib.pyplot as plt\n'), ((2657, 2670), 'numpy.abs', 'np.abs', (['julia'], {}), '(julia)\n', (2663, 2670), True, 'import numpy as np\n'), ((7187, 7211), 'numpy.sqrt', 'np.sqrt', (['(it / self.niter)'], {}), '(it / self.niter)\n', (7194, 7211), True, 'import numpy as np\n'), ((6838, 6852), 'numpy.abs', 'np.abs', (['cpxTmp'], {}), '(cpxTmp)\n', (6844, 6852), True, 'import numpy as np\n')] |
ShivanS93/VAtest_withOKN | eye_detection.py | 8da76f4c3ff526c9e16268194accfdc6221b0a66 | #!python3
# eye_detection.py - detect eyes using webcam
# tutorial: https://www.roytuts.com/real-time-eye-detection-in-webcam-using-python-3/
import cv2
import math
import numpy as np
def main():
faceCascade = cv2.CascadeClassifier("haarcascade_frontalface_alt.xml")
eyeCascade = cv2.CascadeClassifier("haarcascade_eye.xml")
# grab the reference to the webcam
# try:
vs = cv2.VideoCapture(0)
print(vs)
while True:
ret, frame = vs.read()
if frame is None:
break
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray = cv2.equalizeHist(gray)
faces = faceCascade.detectMultiScale(frame)
for (x, y, w, h) in faces:
roi_gray = gray[y : y + h, x : x + w]
roi_color = frame[y : y + h, x : x + w]
eyes = eyeCascade.detectMultiScale(roi_gray)
for (ex, ey, ew, eh) in eyes:
cv2.rectangle(roi_color, (ex, ey), (ex + ew, ey + eh), (0, 0, 255), 2)
cv2.imshow("Video", frame)
key = cv2.waitKey(1) & 0xFF
if key == ord("q") or key == 27:
break
cv2.destroyAllWindows()
if __name__ == "__main__":
main()
| [((218, 274), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""haarcascade_frontalface_alt.xml"""'], {}), "('haarcascade_frontalface_alt.xml')\n", (239, 274), False, 'import cv2\n'), ((292, 336), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""haarcascade_eye.xml"""'], {}), "('haarcascade_eye.xml')\n", (313, 336), False, 'import cv2\n'), ((397, 416), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (413, 416), False, 'import cv2\n'), ((1148, 1171), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1169, 1171), False, 'import cv2\n'), ((540, 579), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2GRAY'], {}), '(frame, cv2.COLOR_BGR2GRAY)\n', (552, 579), False, 'import cv2\n'), ((595, 617), 'cv2.equalizeHist', 'cv2.equalizeHist', (['gray'], {}), '(gray)\n', (611, 617), False, 'import cv2\n'), ((1008, 1034), 'cv2.imshow', 'cv2.imshow', (['"""Video"""', 'frame'], {}), "('Video', frame)\n", (1018, 1034), False, 'import cv2\n'), ((924, 994), 'cv2.rectangle', 'cv2.rectangle', (['roi_color', '(ex, ey)', '(ex + ew, ey + eh)', '(0, 0, 255)', '(2)'], {}), '(roi_color, (ex, ey), (ex + ew, ey + eh), (0, 0, 255), 2)\n', (937, 994), False, 'import cv2\n'), ((1053, 1067), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (1064, 1067), False, 'import cv2\n')] |
lmdu/bioinfo | scripts/make_gene_table.py | 4542b0718410d15f3956c6545d9824a16608e02b | #!/usr/bin/env python
descripts = {}
with open('macaca_genes.txt') as fh:
fh.readline()
for line in fh:
cols = line.strip('\n').split('\t')
if cols[1]:
descripts[cols[0]] = cols[1].split('[')[0].strip()
else:
descripts[cols[0]] = cols[1]
with open('gene_info.txt') as fh:
for line in fh:
cols = line.strip().split('\t')
cols.append(descripts[cols[1]])
print "\t".join(cols)
| [] |
nussl/cookiecutter | {{cookiecutter.repo_name}}/src/mix_with_scaper.py | 5df8512592778ea7155b05e3e4b54676227968b0 | import gin
from scaper import Scaper, generate_from_jams
import copy
import logging
import p_tqdm
import nussl
import os
import numpy as np
def _reset_event_spec(sc):
sc.reset_fg_event_spec()
sc.reset_bg_event_spec()
def check_mixture(path_to_mix):
mix_signal = nussl.AudioSignal(path_to_mix)
if mix_signal.rms() < .01:
return False
return True
def make_one_mixture(sc, path_to_file, num_sources,
event_parameters, allow_repeated_label):
"""
Creates a single mixture, incoherent. Instantiates according to
the event parameters for each source.
"""
check = False
while not check:
for j in range(num_sources):
sc.add_event(**event_parameters)
sc.generate(
path_to_file,
path_to_file.replace('.wav', '.jams'),
no_audio=False,
allow_repeated_label=allow_repeated_label,
save_isolated_events=True,
)
_reset_event_spec(sc)
check = check_mixture(path_to_file)
def instantiate_and_get_event_spec(sc, master_label, event_parameters):
_reset_event_spec(sc)
_event_parameters = copy.deepcopy(event_parameters)
_event_parameters['label'] = ('const', master_label)
sc.add_event(**_event_parameters)
event = sc._instantiate_event(sc.fg_spec[-1])
_reset_event_spec(sc)
return sc, event
def make_one_mixture_coherent(sc, path_to_file, labels, event_parameters,
allow_repeated_label):
check = False
while not check:
sc, event = instantiate_and_get_event_spec(
sc, labels[0], event_parameters)
for label in labels:
try:
sc.add_event(
label=('const', label),
source_file=('const', event.source_file.replace(labels[0], label)),
source_time=('const', event.source_time),
event_time=('const', 0),
event_duration=('const', sc.duration),
snr=event_parameters['snr'],
pitch_shift=('const', event.pitch_shift),
time_stretch=('const', event.time_stretch)
)
except:
logging.exception(
f"Got an error for {label} @ {_source_file}. Moving on...")
sc.generate(
path_to_file,
path_to_file.replace('.wav', '.jams'),
no_audio=False,
allow_repeated_label=allow_repeated_label,
save_isolated_events=True,
)
sc.fg_spec = []
check = check_mixture(path_to_file)
@gin.configurable
def make_scaper_datasets(scopes=['train', 'val']):
for scope in scopes:
with gin.config_scope(scope):
mix_with_scaper()
@gin.configurable
def mix_with_scaper(num_mixtures, foreground_path, background_path,
scene_duration, sample_rate, target_folder,
event_parameters, num_sources=None, labels=None,
coherent=False, allow_repeated_label=False,
ref_db=-40, bitdepth=16, seed=0, num_workers=1):
nussl.utils.seed(seed)
os.makedirs(target_folder, exist_ok=True)
scaper_seed = np.random.randint(100)
logging.info('Starting mixing.')
if num_sources is None and labels is None:
raise ValueError("One of labels or num_sources must be set!")
if coherent and labels is None:
raise ValueError("Coherent mixing requires explicit labels!")
generators = []
if background_path is None:
background_path = foreground_path
for i in range(num_mixtures):
sc = Scaper(
scene_duration,
fg_path=foreground_path,
bg_path=background_path,
random_state=scaper_seed,
)
sc.ref_db = ref_db
sc.sr = sample_rate
sc.bitdepth = bitdepth
generators.append(sc)
scaper_seed += 1
mix_func = make_one_mixture_coherent if coherent else make_one_mixture
def arg_tuple(i):
_args = (
generators[i],
os.path.join(target_folder, f'{i:08d}.wav'),
labels if coherent else num_sources,
event_parameters,
allow_repeated_label
)
return _args
args = [arg_tuple(i) for i in range(num_mixtures)]
# do one by itself for testing
mix_func(*args[0])
args = list(zip(*args[1:]))
args = [list(a) for a in args]
# now do the rest in parallel
p_tqdm.p_map(mix_func, *args, num_cpus=num_workers)
| [((276, 306), 'nussl.AudioSignal', 'nussl.AudioSignal', (['path_to_mix'], {}), '(path_to_mix)\n', (293, 306), False, 'import nussl\n'), ((1177, 1208), 'copy.deepcopy', 'copy.deepcopy', (['event_parameters'], {}), '(event_parameters)\n', (1190, 1208), False, 'import copy\n'), ((3193, 3215), 'nussl.utils.seed', 'nussl.utils.seed', (['seed'], {}), '(seed)\n', (3209, 3215), False, 'import nussl\n'), ((3220, 3261), 'os.makedirs', 'os.makedirs', (['target_folder'], {'exist_ok': '(True)'}), '(target_folder, exist_ok=True)\n', (3231, 3261), False, 'import os\n'), ((3281, 3303), 'numpy.random.randint', 'np.random.randint', (['(100)'], {}), '(100)\n', (3298, 3303), True, 'import numpy as np\n'), ((3308, 3340), 'logging.info', 'logging.info', (['"""Starting mixing."""'], {}), "('Starting mixing.')\n", (3320, 3340), False, 'import logging\n'), ((4575, 4626), 'p_tqdm.p_map', 'p_tqdm.p_map', (['mix_func', '*args'], {'num_cpus': 'num_workers'}), '(mix_func, *args, num_cpus=num_workers)\n', (4587, 4626), False, 'import p_tqdm\n'), ((3710, 3812), 'scaper.Scaper', 'Scaper', (['scene_duration'], {'fg_path': 'foreground_path', 'bg_path': 'background_path', 'random_state': 'scaper_seed'}), '(scene_duration, fg_path=foreground_path, bg_path=background_path,\n random_state=scaper_seed)\n', (3716, 3812), False, 'from scaper import Scaper, generate_from_jams\n'), ((2777, 2800), 'gin.config_scope', 'gin.config_scope', (['scope'], {}), '(scope)\n', (2793, 2800), False, 'import gin\n'), ((4166, 4209), 'os.path.join', 'os.path.join', (['target_folder', 'f"""{i:08d}.wav"""'], {}), "(target_folder, f'{i:08d}.wav')\n", (4178, 4209), False, 'import os\n'), ((2269, 2346), 'logging.exception', 'logging.exception', (['f"""Got an error for {label} @ {_source_file}. Moving on..."""'], {}), "(f'Got an error for {label} @ {_source_file}. Moving on...')\n", (2286, 2346), False, 'import logging\n')] |
DaneRosa/adventure-cards | adventure-cards/package/main.py | 0685feeec8b56627795e685ff4fffad187881e1c | import json
def hydrateCards(rawDeckDataPath):
pack = []
rawDeckData = json.load(open(rawDeckDataPath,))
for index, item in enumerate(rawDeckData):
deck = []
# print(index,item)
for i in rawDeckData[item]:
card ={
f'{index}':
{
"name": "",
"type": "",
"level": None,
"spell_name": "",
"creature_name": "",
"artifact_name": "",
"enchantment_name": "",
"spell_magnifier": "",
"spell_type": "",
"name_modifier": "",
"creature_modifier": "",
"mythic_creature_modifier": "",
"location": "",
"mythic_location": ""
}
}
nameSplit = i[0].split()
card[f'{index}']['name'] = i[0]
card[f'{index}']['type']= i[1]
card[f'{index}']['level']=i[2]
if i[1] == 'spell':
if len(nameSplit) == 1:
card[f'{index}']['spell_name']= i[0]
elif len(nameSplit) == 2:
card[f'{index}']['spell_type']= nameSplit[0]
card[f'{index}']['spell_name']= nameSplit[1]
elif len(nameSplit) == 3:
card[f'{index}']['spell_magnifier']=nameSplit[0]
card[f'{index}']['spell_type']=nameSplit[1]
card[f'{index}']['spell_name']=nameSplit[2]
elif i[1] == 'artifact':
if 'Divine Robe' or 'Ghost Wand' in i[0]:
if 'Divine Robe' in i[0]:
i[0] = i[0].replace('Divine Robe', 'DivineRobe')
if 'Ghost Wand' in i[0]:
i[0] = i[0].replace('Ghost Wand', 'GhostWand')
nameSplit = i[0].split()
card[f'{index}']['name'] = i[0]
if len(nameSplit) == 1:
card[f'{index}']['artifact_name']= i[0]
elif len(nameSplit) == 2:
card[f'{index}']['artifact_name']= nameSplit[1]
card[f'{index}']['spell_type']= nameSplit[0]
elif len(nameSplit) == 3:
card[f'{index}']['artifact_name']= nameSplit[2]
card[f'{index}']['spell_magnifier']= nameSplit[0]
card[f'{index}']['spell_type']= nameSplit[1]
elif i[1] == 'enchantment':
if len(nameSplit) == 1:
card[f'{index}']['enchantment_name']= i[0]
if len(nameSplit) == 2:
card[f'{index}']['enchantment_name']= nameSplit[1]
card[f'{index}']['spell_type']= nameSplit[0]
if len(nameSplit) == 3:
card[f'{index}']['enchantment_name']=nameSplit[2]
card[f'{index}']['spell_type']=nameSplit[1]
card[f'{index}']['spell_magnifier']=nameSplit[0]
elif i[1] == 'monster':
card[f'{index}']['type']= 'creature'
if len(nameSplit) == 1:
card[f'{index}']['creature_name']= nameSplit[0]
if len(nameSplit) == 3:
card[f'{index}']['creature_name']= nameSplit[2]
card[f'{index}']['creature_modifier']= nameSplit[1]
card[f'{index}']['name_modifier']= nameSplit[0]
if len(nameSplit) >3:
keyword = 'of'
before_keyword, keyword, after_keyword = i[0].partition(keyword)
if i[2] == 2:
card[f'{index}']['creature_name']= nameSplit[2]
card[f'{index}']['creature_modifier']= nameSplit[1]
card[f'{index}']['name_modifier']= nameSplit[0]
card[f'{index}']['location']= nameSplit[2] = keyword + after_keyword
elif i[2] == 3:
card[f'{index}']['creature_name']= nameSplit[2]
card[f'{index}']['mythic_creature_modifier']= nameSplit[1]
card[f'{index}']['name_modifier']= nameSplit[0]
card[f'{index}']['mythic_location']= keyword + after_keyword
deck.append(card[f'{index}'])
index +=1
if len(deck) == 45:
break
pack.append(deck)
return(pack) | [] |
huynhtnhut97/keras-video-classifier | demo/cnn_predict.py | 3ea6a8d671f3bd3cc8eddef64ad75abc2a2d593a | import numpy as np
from keras import backend as K
import os
import sys
K.set_image_dim_ordering('tf')
def patch_path(path):
return os.path.join(os.path.dirname(__file__), path)
def main():
sys.path.append(patch_path('..'))
data_dir_path = patch_path('very_large_data')
model_dir_path = patch_path('models/UCF-101')
from keras_video_classifier.library.convolutional import CnnVideoClassifier
from keras_video_classifier.library.utility.ucf.UCF101_loader import load_ucf, scan_ucf_with_labels
config_file_path = CnnVideoClassifier.get_config_file_path(model_dir_path)
weight_file_path = CnnVideoClassifier.get_weight_file_path(model_dir_path)
np.random.seed(42)
load_ucf(data_dir_path)
predictor = CnnVideoClassifier()
predictor.load_model(config_file_path, weight_file_path)
videos = scan_ucf_with_labels(data_dir_path, [label for (label, label_index) in predictor.labels.items()])
video_file_path_list = np.array([file_path for file_path in videos.keys()])
np.random.shuffle(video_file_path_list)
for video_file_path in video_file_path_list:
label = videos[video_file_path]
predicted_label = predictor.predict(video_file_path)
print('predicted: ' + predicted_label + ' actual: ' + label)
if __name__ == '__main__':
main() | [((72, 102), 'keras.backend.set_image_dim_ordering', 'K.set_image_dim_ordering', (['"""tf"""'], {}), "('tf')\n", (96, 102), True, 'from keras import backend as K\n'), ((545, 600), 'keras_video_classifier.library.convolutional.CnnVideoClassifier.get_config_file_path', 'CnnVideoClassifier.get_config_file_path', (['model_dir_path'], {}), '(model_dir_path)\n', (584, 600), False, 'from keras_video_classifier.library.convolutional import CnnVideoClassifier\n'), ((624, 679), 'keras_video_classifier.library.convolutional.CnnVideoClassifier.get_weight_file_path', 'CnnVideoClassifier.get_weight_file_path', (['model_dir_path'], {}), '(model_dir_path)\n', (663, 679), False, 'from keras_video_classifier.library.convolutional import CnnVideoClassifier\n'), ((685, 703), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (699, 703), True, 'import numpy as np\n'), ((709, 732), 'keras_video_classifier.library.utility.ucf.UCF101_loader.load_ucf', 'load_ucf', (['data_dir_path'], {}), '(data_dir_path)\n', (717, 732), False, 'from keras_video_classifier.library.utility.ucf.UCF101_loader import load_ucf, scan_ucf_with_labels\n'), ((750, 770), 'keras_video_classifier.library.convolutional.CnnVideoClassifier', 'CnnVideoClassifier', ([], {}), '()\n', (768, 770), False, 'from keras_video_classifier.library.convolutional import CnnVideoClassifier\n'), ((1029, 1068), 'numpy.random.shuffle', 'np.random.shuffle', (['video_file_path_list'], {}), '(video_file_path_list)\n', (1046, 1068), True, 'import numpy as np\n'), ((151, 176), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (166, 176), False, 'import os\n')] |
EuroPython/djep | pyconde/context_processors.py | afcccbdda483e5f6962ac97f0dc4c4c5ea67fd21 | from django.conf import settings
def less_settings(request):
return {
'use_dynamic_less_in_debug': getattr(settings, 'LESS_USE_DYNAMIC_IN_DEBUG', True)
}
| [] |
tuomijal/pmdarima | pmdarima/preprocessing/endog/boxcox.py | 5bf84a2a5c42b81b949bd252ad3d4c6c311343f8 | # -*- coding: utf-8 -*-
from scipy import stats
import numpy as np
import warnings
from ...compat import check_is_fitted, pmdarima as pm_compat
from .base import BaseEndogTransformer
__all__ = ['BoxCoxEndogTransformer']
class BoxCoxEndogTransformer(BaseEndogTransformer):
r"""Apply the Box-Cox transformation to an endogenous array
The Box-Cox transformation is applied to non-normal data to coerce it more
towards a normal distribution. It's specified as::
(((y + lam2) ** lam1) - 1) / lam1, if lmbda != 0, else
log(y + lam2)
Parameters
----------
lmbda : float or None, optional (default=None)
The lambda value for the Box-Cox transformation, if known. If not
specified, it will be estimated via MLE.
lmbda2 : float, optional (default=0.)
The value to add to ``y`` to make it non-negative. If, after adding
``lmbda2``, there are still negative values, a ValueError will be
raised.
neg_action : str, optional (default="raise")
How to respond if any values in ``y <= 0`` after adding ``lmbda2``.
One of ('raise', 'warn', 'ignore'). If anything other than 'raise',
values <= 0 will be truncated to the value of ``floor``.
floor : float, optional (default=1e-16)
A positive value that truncate values to if there are values in ``y``
that are zero or negative and ``neg_action`` is not 'raise'. Note that
if values are truncated, invertibility will not be preserved, and the
transformed array may not be perfectly inverse-transformed.
"""
def __init__(self, lmbda=None, lmbda2=0, neg_action="raise", floor=1e-16):
self.lmbda = lmbda
self.lmbda2 = lmbda2
self.neg_action = neg_action
self.floor = floor
def fit(self, y, X=None, **kwargs): # TODO: kwargs go away
"""Fit the transformer
Learns the value of ``lmbda``, if not specified in the constructor.
If defined in the constructor, is not re-learned.
Parameters
----------
y : array-like or None, shape=(n_samples,)
The endogenous (time-series) array.
X : array-like or None, shape=(n_samples, n_features), optional
The exogenous array of additional covariates. Not used for
endogenous transformers. Default is None, and non-None values will
serve as pass-through arrays.
"""
lam1 = self.lmbda
lam2 = self.lmbda2
# Temporary shim until we remove `exogenous` support completely
X, _ = pm_compat.get_X(X, **kwargs)
if lam2 < 0:
raise ValueError("lmbda2 must be a non-negative scalar value")
if lam1 is None:
y, _ = self._check_y_X(y, X)
_, lam1 = stats.boxcox(y + lam2, lmbda=None, alpha=None)
self.lam1_ = lam1
self.lam2_ = lam2
return self
def transform(self, y, X=None, **kwargs):
"""Transform the new array
Apply the Box-Cox transformation to the array after learning the
lambda parameter.
Parameters
----------
y : array-like or None, shape=(n_samples,)
The endogenous (time-series) array.
X : array-like or None, shape=(n_samples, n_features), optional
The exogenous array of additional covariates. Not used for
endogenous transformers. Default is None, and non-None values will
serve as pass-through arrays.
Returns
-------
y_transform : array-like or None
The Box-Cox transformed y array
X : array-like or None
The X array
"""
check_is_fitted(self, "lam1_")
# Temporary shim until we remove `exogenous` support completely
X, _ = pm_compat.get_X(X, **kwargs)
lam1 = self.lam1_
lam2 = self.lam2_
y, exog = self._check_y_X(y, X)
y += lam2
neg_mask = y <= 0.
if neg_mask.any():
action = self.neg_action
msg = "Negative or zero values present in y"
if action == "raise":
raise ValueError(msg)
elif action == "warn":
warnings.warn(msg, UserWarning)
y[neg_mask] = self.floor
if lam1 == 0:
return np.log(y), exog
return (y ** lam1 - 1) / lam1, exog
def inverse_transform(self, y, X=None, **kwargs): # TODO: kwargs go away
"""Inverse transform a transformed array
Inverse the Box-Cox transformation on the transformed array. Note that
if truncation happened in the ``transform`` method, invertibility will
not be preserved, and the transformed array may not be perfectly
inverse-transformed.
Parameters
----------
y : array-like or None, shape=(n_samples,)
The transformed endogenous (time-series) array.
X : array-like or None, shape=(n_samples, n_features), optional
The exogenous array of additional covariates. Not used for
endogenous transformers. Default is None, and non-None values will
serve as pass-through arrays.
Returns
-------
y : array-like or None
The inverse-transformed y array
X : array-like or None
The inverse-transformed X array
"""
check_is_fitted(self, "lam1_")
# Temporary shim until we remove `exogenous` support completely
X, _ = pm_compat.get_X(X, **kwargs)
lam1 = self.lam1_
lam2 = self.lam2_
y, exog = self._check_y_X(y, X)
if lam1 == 0:
return np.exp(y) - lam2, exog
numer = y * lam1 # remove denominator
numer += 1. # add 1 back to it
de_exp = numer ** (1. / lam1) # de-exponentiate
return de_exp - lam2, exog
| [((2802, 2848), 'scipy.stats.boxcox', 'stats.boxcox', (['(y + lam2)'], {'lmbda': 'None', 'alpha': 'None'}), '(y + lam2, lmbda=None, alpha=None)\n', (2814, 2848), False, 'from scipy import stats\n'), ((4344, 4353), 'numpy.log', 'np.log', (['y'], {}), '(y)\n', (4350, 4353), True, 'import numpy as np\n'), ((4233, 4264), 'warnings.warn', 'warnings.warn', (['msg', 'UserWarning'], {}), '(msg, UserWarning)\n', (4246, 4264), False, 'import warnings\n'), ((5695, 5704), 'numpy.exp', 'np.exp', (['y'], {}), '(y)\n', (5701, 5704), True, 'import numpy as np\n')] |
ashishdhngr/baserow | backend/src/baserow/api/user/registries.py | b098678d2165eb7c42930ee24dc6753a3cb520c3 | from baserow.core.registry import Instance, Registry
class UserDataType(Instance):
"""
The user data type can be used to inject an additional payload to the API
JWT response. This is the response when a user authenticates or refreshes his
token. The returned dict of the `get_user_data` method is added to the payload
under the key containing the type name.
Example:
class TestUserDataType(UserDataType):
type = "test"
def get_user_data(user, request):
return {"test": "value"}
user_data_registry.register(TestUserDataType())
Will result into the following response when the user authenticates:
{
"token": "eyJ....",
"user: {
"id": 1,
...
},
"test": {
"test": "value"
}
}
"""
def get_user_data(self, user, request) -> dict:
"""
Should return a dict containing the additional information that must be added
to the response payload after the user authenticates.
:param user: The related user that just authenticated.
:type user: User
:param request: The request when the user authenticated.
:type request: Request
:return: a dict containing the user data that must be added to the response.
"""
raise NotImplementedError(
"The get_user_data must be implemented and should return a dict."
)
class UserDataRegistry(Registry):
name = "api_user_data"
def get_all_user_data(self, user, request) -> dict:
"""
Collects the additional user data of all the registered user data type
instances.
:param user: The user that just authenticated.
:type user: User
:param request: The request when the user authenticated.
:type request: Request
:return: a dict containing all additional user data payload for all the
registered instances.
"""
return {
key: value.get_user_data(user, request)
for key, value in self.registry.items()
}
user_data_registry = UserDataRegistry()
| [] |
aklsh/EE2703 | Week 2/code.py | 546b70c9adac4a4de294d83affbb74e480c2f65d | '''
-------------------------------------
Assignment 2 - EE2703 (Jan-May 2020)
Done by Akilesh Kannan (EE18B122)
Created on 18/01/20
Last Modified on 04/02/20
-------------------------------------
'''
# importing necessary libraries
import sys
import cmath
import numpy as np
import pandas as pd
# To improve readability
CIRCUIT_START = ".circuit"
CIRCUIT_END = ".end"
RESISTOR = "R"
CAPACITOR = "C"
INDUCTOR = "L"
IVS = "V"
ICS = "I"
VCVS = "E"
VCCS = "G"
CCVS = "H"
CCCS = "F"
PI = np.pi
# Classes for each circuit component
class resistor:
def __init__(self, name, n1, n2, val):
self.name = name
self.value = enggToMath(val)
self.node1 = n1
self.node2 = n2
class inductor:
def __init__(self, name, n1, n2, val):
self.name = name
self.value = enggToMath(val)
self.node1 = n1
self.node2 = n2
class capacitor:
def __init__(self, name, n1, n2, val):
self.name = name
self.value = enggToMath(val)
self.node1 = n1
self.node2 = n2
class voltageSource:
def __init__(self, name, n1, n2, val, phase=0):
self.name = name
self.value = enggToMath(val)
self.node1 = n1
self.node2 = n2
self.phase = float(phase)
class currentSource:
def __init__(self, name, n1, n2, val, phase=0):
self.name = name
self.value = enggToMath(val)
self.node1 = n1
self.node2 = n2
self.phase = float(phase)
class vcvs:
def __init__(self, name, n1, n2, n3, n4, val):
self.name = name
self.value = enggToMath(val)
self.node1 = n1
self.node2 = n2
self.node3 = n3
self.node4 = n4
class vccs:
def __init__(self, name, n1, n2, n3, n4, val):
self.name = name
self.value = enggToMath(val)
self.node1 = n1
self.node2 = n2
self.node3 = n3
self.node4 = n4
class ccvs:
def __init__(self, name, n1, n2, vName, val):
self.name = name
self.value = enggToMath(val)
self.node1 = n1
self.node2 = n2
self.vSource = vName
class cccs:
def __init__(self, name, n1, n2, vName, val):
self.name = name
self.value = enggToMath(val)
self.node1 = n1
self.node2 = n2
self.vSource = vName
# Convert a number in engineer's format to math
def enggToMath(enggNumber):
try:
return float(enggNumber)
except:
lenEnggNumber = len(enggNumber)
# Kilo
if enggNumber[lenEnggNumber-1] == 'k':
base = int(enggNumber[0:lenEnggNumber-1])
return base*1e3
# Milli
elif enggNumber[lenEnggNumber-1] == 'm':
base = int(enggNumber[0:lenEnggNumber-1])
return base*1e-3
# Micro
elif enggNumber[lenEnggNumber-1] == 'u':
base = int(enggNumber[0:lenEnggNumber-1])
return base*1e-6
# Nano
elif enggNumber[lenEnggNumber-1] == 'n':
base = int(enggNumber[0:lenEnggNumber-1])
return base*1e-9
# Mega
elif enggNumber[lenEnggNumber-1] == 'M':
base = int(enggNumber[0:lenEnggNumber-1])
return base*1e6
else:
sys.exit("Please check the component values given. Supported engineer units are: M, k, m, u, n\nYou can also enter values in exponential format (eg. 1e3 = 1000).")
if __name__ == "__main__":
# checking number of command line arguments
if len(sys.argv)!=2 :
sys.exit("Invalid number of arguments!")
else:
try:
circuitFile = sys.argv[1]
circuitFreq = 1e-100
circuitComponents = { RESISTOR: [], CAPACITOR: [], INDUCTOR: [], IVS: [], ICS: [], VCVS: [], VCCS: [], CCVS: [], CCCS: [] }
circuitNodes = []
# checking if given netlist file is of correct type
if (not circuitFile.endswith(".netlist")):
print("Wrong file type!")
else:
netlistFileLines = []
with open (circuitFile, "r") as f:
for line in f.readlines():
netlistFileLines.append(line.split('#')[0].split('\n')[0])
# Getting frequency, if any
if(line[:3] == '.ac'):
circuitFreq = float(line.split()[2])
# Setting Angular Frequency w
w = 2*PI*circuitFreq
try:
# Finding the location of the identifiers
identifier1 = netlistFileLines.index(CIRCUIT_START)
identifier2 = netlistFileLines.index(CIRCUIT_END)
circuitBody = netlistFileLines[identifier1+1:identifier2]
for line in circuitBody:
# Extracting the data from the line
lineTokens = line.split()
# Appending new nodes to a list
try:
if lineTokens[1] not in circuitNodes:
circuitNodes.append(lineTokens[1])
if lineTokens[2] not in circuitNodes:
circuitNodes.append(lineTokens[2])
except IndexError:
continue
# Resistor
if lineTokens[0][0] == RESISTOR:
circuitComponents[RESISTOR].append(resistor(lineTokens[0], lineTokens[1], lineTokens[2], lineTokens[3]))
# Capacitor
elif lineTokens[0][0] == CAPACITOR:
circuitComponents[CAPACITOR].append(capacitor(lineTokens[0], lineTokens[1], lineTokens[2], lineTokens[3]))
# Inductor
elif lineTokens[0][0] == INDUCTOR:
circuitComponents[INDUCTOR].append(inductor(lineTokens[0], lineTokens[1], lineTokens[2], lineTokens[3]))
# Voltage Source
elif lineTokens[0][0] == IVS:
if len(lineTokens) == 5: # DC Source
circuitComponents[IVS].append(voltageSource(lineTokens[0], lineTokens[1], lineTokens[2], float(lineTokens[4])))
elif len(lineTokens) == 6: # AC Source
if circuitFreq == 1e-100:
sys.exit("Frequency of AC Source not specified!!")
circuitComponents[IVS].append(voltageSource(lineTokens[0], lineTokens[1], lineTokens[2], float(lineTokens[4])/2, lineTokens[5]))
# Current Source
elif lineTokens[0][0] == ICS:
if len(lineTokens) == 5: # DC Source
circuitComponents[ICS].append(currentSource(lineTokens[0], lineTokens[1], lineTokens[2], float(lineTokens[4])))
elif len(lineTokens) == 6: # AC Source
if circuitFreq == 1e-100:
sys.exit("Frequency of AC Source not specified!!")
circuitComponents[ICS].append(currentSource(lineTokens[0], lineTokens[1], lineTokens[2], float(lineTokens[4])/2, lineTokens[5]))
# VCVS
elif lineTokens[0][0] == VCVS:
circuitComponents[VCVS].append(vcvs(lineTokens[0], lineTokens[1], lineTokens[2], lineTokens[3], lineTokens[4], lineTokens[5]))
# VCCS
elif lineTokens[0][0] == VCCS:
circuitComponents[VCCS].append(vcvs(lineTokens[0], lineTokens[1], lineTokens[2], lineTokens[3], lineTokens[4], lineTokens[5]))
# CCVS
elif lineTokens[0][0] == CCVS:
circuitComponents[CCVS].append(ccvs(lineTokens[0], lineTokens[1], lineTokens[2], lineTokens[3], lineTokens[4]))
# CCCS
elif lineTokens[0][0] == CCCS:
circuitComponents[CCCS].append(cccs(lineTokens[0], lineTokens[1], lineTokens[2], lineTokens[3], lineTokens[4]))
# Erroneous Component Name
else:
sys.exit("Wrong Component Given. ABORT!")
try:
circuitNodes.remove('GND')
circuitNodes = ['GND'] + circuitNodes
except:
sys.exit("No ground node specified in the circuit!!")
# Creating a dictionary with node names and their numbers (to reduce the time taken by later parts of the program)
nodeNumbers = {circuitNodes[i]:i for i in range(len(circuitNodes))}
numNodes = len(circuitNodes)
numVS = len(circuitComponents[IVS])+len(circuitComponents[VCVS])+len(circuitComponents[CCVS])
# Creating Matrices M and b
matrixM = np.zeros((numNodes+numVS, numNodes+numVS), np.complex)
matrixB = np.zeros((numNodes+numVS,), np.complex)
# GND Equation
matrixM[0][0] = 1.0
# Resistor Equations
for r in circuitComponents[RESISTOR]:
if r.node1 != 'GND':
matrixM[nodeNumbers[r.node1]][nodeNumbers[r.node1]] += 1/r.value
matrixM[nodeNumbers[r.node1]][nodeNumbers[r.node2]] -= 1/r.value
if r.node2 != 'GND':
matrixM[nodeNumbers[r.node2]][nodeNumbers[r.node1]] -= 1/r.value
matrixM[nodeNumbers[r.node2]][nodeNumbers[r.node2]] += 1/r.value
# Capacitor Equations
for c in circuitComponents[CAPACITOR]:
if c.node1 != 'GND':
matrixM[nodeNumbers[c.node1]][nodeNumbers[c.node1]] += complex(0, w*c.value)
matrixM[nodeNumbers[c.node1]][nodeNumbers[c.node2]] -= complex(0, w*c.value)
if c.node2 != 'GND':
matrixM[nodeNumbers[c.node2]][nodeNumbers[c.node1]] -= complex(0, w*c.value)
matrixM[nodeNumbers[c.node2]][nodeNumbers[c.node2]] += complex(0, w*c.value)
# Inductor Equations
for l in circuitComponents[INDUCTOR]:
if l.node1 != 'GND':
matrixM[nodeNumbers[l.node1]][nodeNumbers[l.node1]] += complex(0, -1.0/(w*l.value))
matrixM[nodeNumbers[l.node1]][nodeNumbers[l.node2]] -= complex(0, -1.0/(w*l.value))
if l.node2 != 'GND':
matrixM[nodeNumbers[l.node2]][nodeNumbers[l.node1]] -= complex(0, -1.0/(w*l.value))
matrixM[nodeNumbers[l.node2]][nodeNumbers[l.node2]] += complex(0, -1.0/(w*l.value))
# Voltage Source Equations
for i in range(len(circuitComponents[IVS])):
# Equation accounting for current through the source
if circuitComponents[IVS][i].node1 != 'GND':
matrixM[nodeNumbers[circuitComponents[IVS][i].node1]][numNodes+i] = 1.0
if circuitComponents[IVS][i].node2 != 'GND':
matrixM[nodeNumbers[circuitComponents[IVS][i].node2]][numNodes+i] = -1.0
# Auxiliary Equations
matrixM[numNodes+i][nodeNumbers[circuitComponents[IVS][i].node1]] = -1.0
matrixM[numNodes+i][nodeNumbers[circuitComponents[IVS][i].node2]] = +1.0
matrixB[numNodes+i] = cmath.rect(circuitComponents[IVS][i].value, circuitComponents[IVS][i].phase*PI/180)
# Current Source Equations
for i in circuitComponents[ICS]:
if i.node1 != 'GND':
matrixB[nodeNumbers[i.node1]] = -1*i.value
if i.node2 != 'GND':
matrixB[nodeNumbers[i.node2]] = i.value
# VCVS Equations
for i in range(len(circuitComponents[VCVS])):
# Equation accounting for current through the source
if circuitComponents[VCVS][i].node1 != 'GND':
matrixM[nodeNumbers[circuitComponents[VCVS][i].node1]][numNodes+len(circuitComponents[IVS])+i] = 1.0
if circuitComponents[VCVS][i].node2 != 'GND':
matrixM[nodeNumbers[circuitComponents[VCVS][i].node2]][numNodes+len(circuitComponents[IVS])+i] = -1.0
matrixM[numNodes+len(circuitComponents[IVS])+i][nodeNumbers[circuitComponents[VCVS][i].node1]] = 1.0
matrixM[numNodes+len(circuitComponents[IVS])+i][nodeNumbers[circuitComponents[VCVS][i].node2]] = -1.0
matrixM[numNodes+len(circuitComponents[IVS])+i][nodeNumbers[circuitComponents[VCVS][i].node3]] = -1.0*circuitComponents[VCVS][i].value
matrixM[numNodes+len(circuitComponents[IVS])+i][nodeNumbers[circuitComponents[VCVS][i].node4]] = 1.0*circuitComponents[VCVS][i].value
# CCVS Equations
for i in range(len(circuitComponents[CCVS])):
# Equation accounting for current through the source
if circuitComponents[VCVS][i].node1 != 'GND':
matrixM[nodeNumbers[circuitComponents[CCVS][i].node1]][numNodes+len(circuitComponents[IVS])+len(circuitComponents[VCVS])+i] = 1.0
if circuitComponents[VCVS][i].node2 != 'GND':
matrixM[nodeNumbers[circuitComponents[VCVS][i].node2]][numNodes+len(circuitComponents[IVS])+len(circuitComponents[VCVS])+i] = -1.0
matrixM[numNodes+len(circuitComponents[IVS])+len(circuitComponents[VCVS])+i][nodeNumbers[circuitComponents[CCVS][i].node1]] = 1.0
matrixM[numNodes+len(circuitComponents[IVS])+len(circuitComponents[VCVS])+i][nodeNumbers[circuitComponents[CCVS][i].node2]] = -1.0
matrixM[numNodes+len(circuitComponents[IVS])+len(circuitComponents[VCVS])+i][numNodes+len(circuitComponents[IVS])+len(circuitComponents[VCVS])+i] = -1.0*circuitComponents[CCVS][i].value
# VCCS Equations
for vccs in circuitComponents[VCCS]:
if vccs.node1 != 'GND':
matrixM[nodeNumbers[vccs.node1]][nodeNumbers[vccs.node4]]+=vccs.value
matrixM[nodeNumbers[vccs.node1]][nodeNumbers[vccs.node3]]-=vccs.value
if vccs.node2 != 'GND':
matrixM[nodeNumbers[vccs.node2]][nodeNumbers[vccs.node4]]-=vccs.value
matrixM[nodeNumbers[vccs.node3]][nodeNumbers[vccs.node3]]+=vccs.value
# CCCS Equations
for cccs in circuitComponents[CCCS]:
def getIndexIVS(vName):
for i in range(len(circuitComponents[IVS])):
if circuitComponents[IVS][i].name == vName:
return i
if cccs.node1 != 'GND':
matrixM[nodeNumbers[cccs.node1]][numNodes+getIndexIVS(cccs.vSource)]-=cccs.value
if cccs.node2 != 'GND':
matrixM[nodeNumbers[cccs.node2]][numNodes+getIndexIVS(cccs.vSource)]+=cccs.value
try:
x = np.linalg.solve(matrixM, matrixB)
circuitCurrents = []
# Formatting Output Data
for v in circuitComponents[IVS]:
circuitCurrents.append("current in "+v.name)
for v in circuitComponents[VCVS]:
circuitCurrents.append("current in "+v.name)
for v in circuitComponents[CCVS]:
circuitCurrents.append("current in "+v.name)
# Printing output in table format
print(pd.DataFrame(x, circuitNodes+circuitCurrents, columns=['Voltage / Current']))
print("The values given above are AMPLITUDE values and NOT RMS values.")
except np.linalg.LinAlgError:
sys.exit("Singular Matrix Formed! Please check if you have entered the circuit definition correctly!")
except ValueError:
sys.exit("Netlist does not abide to given format!")
except FileNotFoundError:
sys.exit("Given file does not exist!")
| [((3532, 3572), 'sys.exit', 'sys.exit', (['"""Invalid number of arguments!"""'], {}), "('Invalid number of arguments!')\n", (3540, 3572), False, 'import sys\n'), ((17158, 17196), 'sys.exit', 'sys.exit', (['"""Given file does not exist!"""'], {}), "('Given file does not exist!')\n", (17166, 17196), False, 'import sys\n'), ((9221, 9279), 'numpy.zeros', 'np.zeros', (['(numNodes + numVS, numNodes + numVS)', 'np.complex'], {}), '((numNodes + numVS, numNodes + numVS), np.complex)\n', (9229, 9279), True, 'import numpy as np\n'), ((9306, 9347), 'numpy.zeros', 'np.zeros', (['(numNodes + numVS,)', 'np.complex'], {}), '((numNodes + numVS,), np.complex)\n', (9314, 9347), True, 'import numpy as np\n'), ((12044, 12135), 'cmath.rect', 'cmath.rect', (['circuitComponents[IVS][i].value', '(circuitComponents[IVS][i].phase * PI / 180)'], {}), '(circuitComponents[IVS][i].value, circuitComponents[IVS][i].phase *\n PI / 180)\n', (12054, 12135), False, 'import cmath\n'), ((16045, 16078), 'numpy.linalg.solve', 'np.linalg.solve', (['matrixM', 'matrixB'], {}), '(matrixM, matrixB)\n', (16060, 16078), True, 'import numpy as np\n'), ((17060, 17111), 'sys.exit', 'sys.exit', (['"""Netlist does not abide to given format!"""'], {}), "('Netlist does not abide to given format!')\n", (17068, 17111), False, 'import sys\n'), ((8703, 8756), 'sys.exit', 'sys.exit', (['"""No ground node specified in the circuit!!"""'], {}), "('No ground node specified in the circuit!!')\n", (8711, 8756), False, 'import sys\n'), ((16653, 16731), 'pandas.DataFrame', 'pd.DataFrame', (['x', '(circuitNodes + circuitCurrents)'], {'columns': "['Voltage / Current']"}), "(x, circuitNodes + circuitCurrents, columns=['Voltage / Current'])\n", (16665, 16731), True, 'import pandas as pd\n'), ((16902, 17014), 'sys.exit', 'sys.exit', (['"""Singular Matrix Formed! Please check if you have entered the circuit definition correctly!"""'], {}), "(\n 'Singular Matrix Formed! Please check if you have entered the circuit definition correctly!'\n )\n", (16910, 17014), False, 'import sys\n'), ((3258, 3434), 'sys.exit', 'sys.exit', (['"""Please check the component values given. Supported engineer units are: M, k, m, u, n\nYou can also enter values in exponential format (eg. 1e3 = 1000)."""'], {}), '(\n """Please check the component values given. Supported engineer units are: M, k, m, u, n\nYou can also enter values in exponential format (eg. 1e3 = 1000)."""\n )\n', (3266, 3434), False, 'import sys\n'), ((6539, 6589), 'sys.exit', 'sys.exit', (['"""Frequency of AC Source not specified!!"""'], {}), "('Frequency of AC Source not specified!!')\n", (6547, 6589), False, 'import sys\n'), ((7216, 7266), 'sys.exit', 'sys.exit', (['"""Frequency of AC Source not specified!!"""'], {}), "('Frequency of AC Source not specified!!')\n", (7224, 7266), False, 'import sys\n'), ((8471, 8512), 'sys.exit', 'sys.exit', (['"""Wrong Component Given. ABORT!"""'], {}), "('Wrong Component Given. ABORT!')\n", (8479, 8512), False, 'import sys\n')] |
M507/Guessing-passwords-using-machine-learning | Lib/Co.py | da90cfa30ce2e7a5e08ee528f594fa047ecea75c | import subprocess
import os.path
"""
Stylish input()
"""
def s_input(string):
return input(string+">").strip("\n")
"""
Execute command locally
"""
def execute_command(command):
if len(command) > 0:
print(command)
proc = subprocess.Popen(command.split(" "), stdout=subprocess.PIPE, cwd="/tmp")
return proc
"""
Get all subdirectories of a directory.
"""
def getSubs(dirname):
dirs = [d for d in os.listdir(dirname) if os.path.isdir(os.path.join(dirname, d))]
# subdirectories = [dirname + "/" + subDirName for subDirName in subdirectories]
subdirectories = []
for dir in dirs:
subdirectories.append(dirname + '/' + dir)
return subdirectories
"""
Rocket science
"""
def answer(string):
a = input(string)
if a == "Y" or a == 'y' or a == 'Yes' or a == 'yes':
return True
else:
return False
| [] |
rachelbrown347/CS294-26_code | project3_code/part_0/main.py | 72a20a9ab75345091d2a743b13857d7a88adf9be | import numpy as np
import matplotlib.pyplot as plt
from skimage.exposure import rescale_intensity
from unsharp import *
# Load file and normalize to 0-1
fname = 'iguana.jpg'
im = plt.imread(fname)
if im.mean() >= 1:
im = im/255.
sigma = 5
amplitude = 1.5
imsharp = unsharp_mask(im, sigma, amplitude)
imsharp = rescale_intensity(imsharp, in_range=(0, 1), out_range=(0,1))
new_fname = fname[:-4]+'_sharp.jpg'
plt.imsave(new_fname, imsharp) | [((182, 199), 'matplotlib.pyplot.imread', 'plt.imread', (['fname'], {}), '(fname)\n', (192, 199), True, 'import matplotlib.pyplot as plt\n'), ((318, 379), 'skimage.exposure.rescale_intensity', 'rescale_intensity', (['imsharp'], {'in_range': '(0, 1)', 'out_range': '(0, 1)'}), '(imsharp, in_range=(0, 1), out_range=(0, 1))\n', (335, 379), False, 'from skimage.exposure import rescale_intensity\n'), ((416, 446), 'matplotlib.pyplot.imsave', 'plt.imsave', (['new_fname', 'imsharp'], {}), '(new_fname, imsharp)\n', (426, 446), True, 'import matplotlib.pyplot as plt\n')] |
MERegistro/meregistro | meregistro/apps/registro/models/EstablecimientoDomicilio.py | 6cde3cab2bd1a8e3084fa38147de377d229391e3 | # -*- coding: utf-8 -*-
from django.db import models
from apps.registro.models.TipoDomicilio import TipoDomicilio
from apps.registro.models.Localidad import Localidad
from apps.registro.models.Establecimiento import Establecimiento
from django.core.exceptions import ValidationError
from apps.seguridad.audit import audit
@audit
class EstablecimientoDomicilio(models.Model):
TIPO_POSTAL = 'Postal'
TIPO_INSTITUCIONAL = 'Institucional'
establecimiento = models.ForeignKey(Establecimiento, related_name='domicilios')
tipo_domicilio = models.ForeignKey(TipoDomicilio)
localidad = models.ForeignKey(Localidad, related_name='domicilios_establecimientos')
calle = models.CharField(max_length=100)
altura = models.CharField(max_length=15)
referencia = models.CharField(max_length=255, null=True, blank=True)
cp = models.CharField(max_length=20)
class Meta:
app_label = 'registro'
db_table = 'registro_establecimiento_domicilio'
def __unicode__(self):
if self.cp:
cp = " (CP: " + self.cp + ")"
else:
cp = ""
return "%s %s - %s %s" % (self.calle, self.altura, self.localidad.nombre, cp)
def __init__(self, *args, **kwargs):
super(EstablecimientoDomicilio, self).__init__(*args, **kwargs)
| [((462, 523), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Establecimiento'], {'related_name': '"""domicilios"""'}), "(Establecimiento, related_name='domicilios')\n", (479, 523), False, 'from django.db import models\n'), ((542, 574), 'django.db.models.ForeignKey', 'models.ForeignKey', (['TipoDomicilio'], {}), '(TipoDomicilio)\n', (559, 574), False, 'from django.db import models\n'), ((588, 660), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Localidad'], {'related_name': '"""domicilios_establecimientos"""'}), "(Localidad, related_name='domicilios_establecimientos')\n", (605, 660), False, 'from django.db import models\n'), ((670, 702), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (686, 702), False, 'from django.db import models\n'), ((713, 744), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(15)'}), '(max_length=15)\n', (729, 744), False, 'from django.db import models\n'), ((759, 814), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'null': '(True)', 'blank': '(True)'}), '(max_length=255, null=True, blank=True)\n', (775, 814), False, 'from django.db import models\n'), ((821, 852), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)'}), '(max_length=20)\n', (837, 852), False, 'from django.db import models\n')] |
timothyyu/p4e-prac | python_for_everybody/py2_p4i_old/6.5findslicestringextract.py | f978b71ce147b6e9058372929f2666c2e67d0741 | # 6.5 Write code using find() and string slicing (see section 6.10) to extract
# the number at the end of the line below.
# Convert the extracted value to a floating point number and print it out.
text = "X-DSPAM-Confidence: 0.8475";
pos = text.find(':')
text = float(text[pos+1:])
print text | [] |
Petr-By/qtpyvis | tools/lucid/engine.py | 0b9a151ee6b9a56b486c2bece9c1f03414629efc | import logging
logger = logging.getLogger(__name__)
print(f"!!!!!!!!!! getEffectiveLevel: {logger.getEffectiveLevel()} !!!!!!!!!!!!!")
from dltb.base.observer import Observable, change
from network import Network, loader
from network.lucid import Network as LucidNetwork
# lucid.modelzoo.vision_models:
# A module providinge the pretrained networks by name, e.g.
# models.AlexNet
import lucid.modelzoo.vision_models as models
import lucid.modelzoo.nets_factory as nets
from lucid.modelzoo.vision_base import Model as LucidModel
import lucid.optvis.objectives as objectives
import lucid.optvis.param as param
import lucid.optvis.render as render
import lucid.optvis.transform as transform
class Engine(Observable, method='engine_changed',
changes={'engine_changed', 'model_changed', 'unit_changed'}):
"""The Engine is a wrapper around the lucid module.
Attributes
----------
_network: LucidNetwork
The currently selected lucid network. None if no model
is selected.
_model: LucidModel
The currently selected lucid model. None if no model is
selected.
"""
def __init__(self):
super().__init__()
self._network = None
self._model = None
self._layer = None
self._unit = None
self.image = None
self.running = False
@property
def model(self) -> LucidModel:
"""The currently selected lucid model. None if no model is
selected.
"""
return self._model
@property
def model_name(self) -> str:
"""The name of the currently selected lucid model. None if
no model is selected.
"""
return None if self._network is None else self._network.name
@change
def load_model(self, name: str) -> LucidModel:
"""Load the Lucid model with the given name.
Returns
-------
model: LucidModel
A reference to the LucidModel.
"""
logger.info(f"load_model({name})")
try:
#self._network = LucidNetwork(name=name)
self._network = loader.load_lucid(name)
self._model = self._network.model
except KeyError as e:
self._network = None
self._model = None
logger.info(f"NAME={name}/{self.model_name} : {self._model}")
self._layer = None
self._unit = None
self.change(model_changed=True, unit_changed=True)
return self._model
@change
def set_layer(self, name: str, unit: int=0) -> None:
"""Set the currently selected layer.
Arguments
---------
name: str
The name of the layer.
unit: int
The index of the unit in the layer.
"""
if name == self.layer:
return
if self._model is None:
return
try:
self._layer = next(x for x in self._model.layers
if x['name'] == name)
self._unit = unit
except StopIteration: # name not in layer list
self._layer = None
self._unit = None
self.change(unit_changed=True)
@property
def layer(self) -> str:
"""The name of the currently selected layer.
"""
return None if self._layer is None else self._layer['name']
@layer.setter
def layer(self, name: str) -> None:
"""Set the currently selected layer.
"""
self.set_layer(name)
@property
def layer_type(self) -> str:
"""The type of the currently selected layer.
"""
return None if self._layer is None else self._layer['type']
@property
def layer_units(self) -> int:
"""The number of units in the currently selected layer.
"""
return None if self._layer is None else self._layer['size']
@change
def _set_unit(self, unit: int) -> None:
if unit == self.unit:
return
if unit is None:
self._unit = None
self.change(unit_changed=True)
elif self._layer is None:
raise ValueError('Setting unit failed as no layer is selected')
elif not 0 <= unit < self._layer['size']:
raise ValueError(f"Invalid unit {unit} for current layer"
f" of size {self._layer['size']}")
else:
self._unit = unit
self.change(unit_changed=True)
@property
def unit(self) -> int:
"""The index of the currently selected unit or None if no
unit is selected.
"""
return None if self._unit is None else self._unit
@unit.setter
def unit(self, unit: int) -> None:
"""The index of the currently selected unit or None if no
unit is selected.
"""
self._set_unit(unit)
@property
def layer_id(self) -> str:
"""The id of the currently selected layer or None if no
unit is selected.
"""
if self._layer is None:
return None
if self._layer['type'] == 'conv':
return self._layer['name'] + '_pre_relu'
return self._layer['name']
@property
def unit_id(self) -> str:
"""The id of the currently selected unit or None if no
unit is selected.
"""
return (None if self._layer is None
else self.layer_id + ':' + str(self._unit))
def _doRun(self, running: bool=True) -> None:
self.running = running
self.notify_observers(EngineChange(engine_changed=True))
def start(self):
self.image = None
self._doRun(True)
obj = objectives.channel(self.layer_id, self.unit)
self.image = render.render_vis(self.model, obj)
#self.image = render.render_vis(self.model, self.unit_id)
self._doRun(False)
def stop(self):
self._doRun(False)
def start_multi(self):
self.image = None
self._doRun(True)
logger.info("!!! running all:")
for unit in range(self.layer_units):
self.unit = unit
self.notify_observers(EngineChange(unit_changed=True))
logger.info(f"!!! running unit {unit}")
obj = objectives.channel(self.layer_id, unit)
self.image = render.render_vis(self.model, obj)
if not self.running:
break
self._doRun(True)
self._doRun(False)
# FIXME[old]: this is too make old code happy. New code should use
# Engine.Change and Engine.Observer directly.
EngineChange = Engine.Change
EngineObserver = Engine.Observer
| [((24, 51), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (41, 51), False, 'import logging\n'), ((5696, 5740), 'lucid.optvis.objectives.channel', 'objectives.channel', (['self.layer_id', 'self.unit'], {}), '(self.layer_id, self.unit)\n', (5714, 5740), True, 'import lucid.optvis.objectives as objectives\n'), ((5762, 5796), 'lucid.optvis.render.render_vis', 'render.render_vis', (['self.model', 'obj'], {}), '(self.model, obj)\n', (5779, 5796), True, 'import lucid.optvis.render as render\n'), ((2132, 2155), 'network.loader.load_lucid', 'loader.load_lucid', (['name'], {}), '(name)\n', (2149, 2155), False, 'from network import Network, loader\n'), ((6271, 6310), 'lucid.optvis.objectives.channel', 'objectives.channel', (['self.layer_id', 'unit'], {}), '(self.layer_id, unit)\n', (6289, 6310), True, 'import lucid.optvis.objectives as objectives\n'), ((6336, 6370), 'lucid.optvis.render.render_vis', 'render.render_vis', (['self.model', 'obj'], {}), '(self.model, obj)\n', (6353, 6370), True, 'import lucid.optvis.render as render\n')] |
natamelo/synapse | synapse/storage/events.py | 3d870ecfc5353e455917166cb5c2bb8ba48a6ebd | # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018-2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import logging
from collections import Counter as c_counter, OrderedDict, deque, namedtuple
from functools import wraps
from six import iteritems, text_type
from six.moves import range
from canonicaljson import json
from prometheus_client import Counter, Histogram
from twisted.internet import defer
import synapse.metrics
from synapse.api.constants import EventTypes
from synapse.api.errors import SynapseError
from synapse.events import EventBase # noqa: F401
from synapse.events.snapshot import EventContext # noqa: F401
from synapse.metrics import BucketCollector
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.state import StateResolutionStore
from synapse.storage.background_updates import BackgroundUpdateStore
from synapse.storage.event_federation import EventFederationStore
from synapse.storage.events_worker import EventsWorkerStore
from synapse.storage.state import StateGroupWorkerStore
from synapse.types import RoomStreamToken, get_domain_from_id
from synapse.util import batch_iter
from synapse.util.async_helpers import ObservableDeferred
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
from synapse.util.frozenutils import frozendict_json_encoder
from synapse.util.logcontext import PreserveLoggingContext, make_deferred_yieldable
from synapse.util.logutils import log_function
from synapse.util.metrics import Measure
logger = logging.getLogger(__name__)
persist_event_counter = Counter("synapse_storage_events_persisted_events", "")
event_counter = Counter(
"synapse_storage_events_persisted_events_sep",
"",
["type", "origin_type", "origin_entity"],
)
# The number of times we are recalculating the current state
state_delta_counter = Counter("synapse_storage_events_state_delta", "")
# The number of times we are recalculating state when there is only a
# single forward extremity
state_delta_single_event_counter = Counter(
"synapse_storage_events_state_delta_single_event", ""
)
# The number of times we are reculating state when we could have resonably
# calculated the delta when we calculated the state for an event we were
# persisting.
state_delta_reuse_delta_counter = Counter(
"synapse_storage_events_state_delta_reuse_delta", ""
)
# The number of forward extremities for each new event.
forward_extremities_counter = Histogram(
"synapse_storage_events_forward_extremities_persisted",
"Number of forward extremities for each new event",
buckets=(1, 2, 3, 5, 7, 10, 15, 20, 50, 100, 200, 500, "+Inf"),
)
# The number of stale forward extremities for each new event. Stale extremities
# are those that were in the previous set of extremities as well as the new.
stale_forward_extremities_counter = Histogram(
"synapse_storage_events_stale_forward_extremities_persisted",
"Number of unchanged forward extremities for each new event",
buckets=(0, 1, 2, 3, 5, 7, 10, 15, 20, 50, 100, 200, 500, "+Inf"),
)
def encode_json(json_object):
"""
Encode a Python object as JSON and return it in a Unicode string.
"""
out = frozendict_json_encoder.encode(json_object)
if isinstance(out, bytes):
out = out.decode("utf8")
return out
class _EventPeristenceQueue(object):
"""Queues up events so that they can be persisted in bulk with only one
concurrent transaction per room.
"""
_EventPersistQueueItem = namedtuple(
"_EventPersistQueueItem", ("events_and_contexts", "backfilled", "deferred")
)
def __init__(self):
self._event_persist_queues = {}
self._currently_persisting_rooms = set()
def add_to_queue(self, room_id, events_and_contexts, backfilled):
"""Add events to the queue, with the given persist_event options.
NB: due to the normal usage pattern of this method, it does *not*
follow the synapse logcontext rules, and leaves the logcontext in
place whether or not the returned deferred is ready.
Args:
room_id (str):
events_and_contexts (list[(EventBase, EventContext)]):
backfilled (bool):
Returns:
defer.Deferred: a deferred which will resolve once the events are
persisted. Runs its callbacks *without* a logcontext.
"""
queue = self._event_persist_queues.setdefault(room_id, deque())
if queue:
# if the last item in the queue has the same `backfilled` setting,
# we can just add these new events to that item.
end_item = queue[-1]
if end_item.backfilled == backfilled:
end_item.events_and_contexts.extend(events_and_contexts)
return end_item.deferred.observe()
deferred = ObservableDeferred(defer.Deferred(), consumeErrors=True)
queue.append(
self._EventPersistQueueItem(
events_and_contexts=events_and_contexts,
backfilled=backfilled,
deferred=deferred,
)
)
return deferred.observe()
def handle_queue(self, room_id, per_item_callback):
"""Attempts to handle the queue for a room if not already being handled.
The given callback will be invoked with for each item in the queue,
of type _EventPersistQueueItem. The per_item_callback will continuously
be called with new items, unless the queue becomnes empty. The return
value of the function will be given to the deferreds waiting on the item,
exceptions will be passed to the deferreds as well.
This function should therefore be called whenever anything is added
to the queue.
If another callback is currently handling the queue then it will not be
invoked.
"""
if room_id in self._currently_persisting_rooms:
return
self._currently_persisting_rooms.add(room_id)
@defer.inlineCallbacks
def handle_queue_loop():
try:
queue = self._get_drainining_queue(room_id)
for item in queue:
try:
ret = yield per_item_callback(item)
except Exception:
with PreserveLoggingContext():
item.deferred.errback()
else:
with PreserveLoggingContext():
item.deferred.callback(ret)
finally:
queue = self._event_persist_queues.pop(room_id, None)
if queue:
self._event_persist_queues[room_id] = queue
self._currently_persisting_rooms.discard(room_id)
# set handle_queue_loop off in the background
run_as_background_process("persist_events", handle_queue_loop)
def _get_drainining_queue(self, room_id):
queue = self._event_persist_queues.setdefault(room_id, deque())
try:
while True:
yield queue.popleft()
except IndexError:
# Queue has been drained.
pass
_EventCacheEntry = namedtuple("_EventCacheEntry", ("event", "redacted_event"))
def _retry_on_integrity_error(func):
"""Wraps a database function so that it gets retried on IntegrityError,
with `delete_existing=True` passed in.
Args:
func: function that returns a Deferred and accepts a `delete_existing` arg
"""
@wraps(func)
@defer.inlineCallbacks
def f(self, *args, **kwargs):
try:
res = yield func(self, *args, **kwargs)
except self.database_engine.module.IntegrityError:
logger.exception("IntegrityError, retrying.")
res = yield func(self, *args, delete_existing=True, **kwargs)
defer.returnValue(res)
return f
# inherits from EventFederationStore so that we can call _update_backward_extremities
# and _handle_mult_prev_events (though arguably those could both be moved in here)
class EventsStore(
StateGroupWorkerStore,
EventFederationStore,
EventsWorkerStore,
BackgroundUpdateStore,
):
def __init__(self, db_conn, hs):
super(EventsStore, self).__init__(db_conn, hs)
self._event_persist_queue = _EventPeristenceQueue()
self._state_resolution_handler = hs.get_state_resolution_handler()
# Collect metrics on the number of forward extremities that exist.
# Counter of number of extremities to count
self._current_forward_extremities_amount = c_counter()
BucketCollector(
"synapse_forward_extremities",
lambda: self._current_forward_extremities_amount,
buckets=[1, 2, 3, 5, 7, 10, 15, 20, 50, 100, 200, 500, "+Inf"],
)
# Read the extrems every 60 minutes
def read_forward_extremities():
# run as a background process to make sure that the database transactions
# have a logcontext to report to
return run_as_background_process(
"read_forward_extremities", self._read_forward_extremities
)
hs.get_clock().looping_call(read_forward_extremities, 60 * 60 * 1000)
@defer.inlineCallbacks
def _read_forward_extremities(self):
def fetch(txn):
txn.execute(
"""
select count(*) c from event_forward_extremities
group by room_id
"""
)
return txn.fetchall()
res = yield self.runInteraction("read_forward_extremities", fetch)
self._current_forward_extremities_amount = c_counter(list(x[0] for x in res))
@defer.inlineCallbacks
def persist_events(self, events_and_contexts, backfilled=False):
"""
Write events to the database
Args:
events_and_contexts: list of tuples of (event, context)
backfilled (bool): Whether the results are retrieved from federation
via backfill or not. Used to determine if they're "new" events
which might update the current state etc.
Returns:
Deferred[int]: the stream ordering of the latest persisted event
"""
partitioned = {}
for event, ctx in events_and_contexts:
partitioned.setdefault(event.room_id, []).append((event, ctx))
deferreds = []
for room_id, evs_ctxs in iteritems(partitioned):
d = self._event_persist_queue.add_to_queue(
room_id, evs_ctxs, backfilled=backfilled
)
deferreds.append(d)
for room_id in partitioned:
self._maybe_start_persisting(room_id)
yield make_deferred_yieldable(
defer.gatherResults(deferreds, consumeErrors=True)
)
max_persisted_id = yield self._stream_id_gen.get_current_token()
defer.returnValue(max_persisted_id)
@defer.inlineCallbacks
@log_function
def persist_event(self, event, context, backfilled=False):
"""
Args:
event (EventBase):
context (EventContext):
backfilled (bool):
Returns:
Deferred: resolves to (int, int): the stream ordering of ``event``,
and the stream ordering of the latest persisted event
"""
deferred = self._event_persist_queue.add_to_queue(
event.room_id, [(event, context)], backfilled=backfilled
)
self._maybe_start_persisting(event.room_id)
yield make_deferred_yieldable(deferred)
max_persisted_id = yield self._stream_id_gen.get_current_token()
defer.returnValue((event.internal_metadata.stream_ordering, max_persisted_id))
def _maybe_start_persisting(self, room_id):
@defer.inlineCallbacks
def persisting_queue(item):
with Measure(self._clock, "persist_events"):
yield self._persist_events(
item.events_and_contexts, backfilled=item.backfilled
)
self._event_persist_queue.handle_queue(room_id, persisting_queue)
@_retry_on_integrity_error
@defer.inlineCallbacks
def _persist_events(
self, events_and_contexts, backfilled=False, delete_existing=False
):
"""Persist events to db
Args:
events_and_contexts (list[(EventBase, EventContext)]):
backfilled (bool):
delete_existing (bool):
Returns:
Deferred: resolves when the events have been persisted
"""
if not events_and_contexts:
return
if backfilled:
stream_ordering_manager = self._backfill_id_gen.get_next_mult(
len(events_and_contexts)
)
else:
stream_ordering_manager = self._stream_id_gen.get_next_mult(
len(events_and_contexts)
)
with stream_ordering_manager as stream_orderings:
for (event, context), stream in zip(events_and_contexts, stream_orderings):
event.internal_metadata.stream_ordering = stream
chunks = [
events_and_contexts[x : x + 100]
for x in range(0, len(events_and_contexts), 100)
]
for chunk in chunks:
# We can't easily parallelize these since different chunks
# might contain the same event. :(
# NB: Assumes that we are only persisting events for one room
# at a time.
# map room_id->list[event_ids] giving the new forward
# extremities in each room
new_forward_extremeties = {}
# map room_id->(type,state_key)->event_id tracking the full
# state in each room after adding these events.
# This is simply used to prefill the get_current_state_ids
# cache
current_state_for_room = {}
# map room_id->(to_delete, to_insert) where to_delete is a list
# of type/state keys to remove from current state, and to_insert
# is a map (type,key)->event_id giving the state delta in each
# room
state_delta_for_room = {}
if not backfilled:
with Measure(self._clock, "_calculate_state_and_extrem"):
# Work out the new "current state" for each room.
# We do this by working out what the new extremities are and then
# calculating the state from that.
events_by_room = {}
for event, context in chunk:
events_by_room.setdefault(event.room_id, []).append(
(event, context)
)
for room_id, ev_ctx_rm in iteritems(events_by_room):
latest_event_ids = yield self.get_latest_event_ids_in_room(
room_id
)
new_latest_event_ids = yield self._calculate_new_extremities(
room_id, ev_ctx_rm, latest_event_ids
)
latest_event_ids = set(latest_event_ids)
if new_latest_event_ids == latest_event_ids:
# No change in extremities, so no change in state
continue
# there should always be at least one forward extremity.
# (except during the initial persistence of the send_join
# results, in which case there will be no existing
# extremities, so we'll `continue` above and skip this bit.)
assert new_latest_event_ids, "No forward extremities left!"
new_forward_extremeties[room_id] = new_latest_event_ids
len_1 = (
len(latest_event_ids) == 1
and len(new_latest_event_ids) == 1
)
if len_1:
all_single_prev_not_state = all(
len(event.prev_event_ids()) == 1
and not event.is_state()
for event, ctx in ev_ctx_rm
)
# Don't bother calculating state if they're just
# a long chain of single ancestor non-state events.
if all_single_prev_not_state:
continue
state_delta_counter.inc()
if len(new_latest_event_ids) == 1:
state_delta_single_event_counter.inc()
# This is a fairly handwavey check to see if we could
# have guessed what the delta would have been when
# processing one of these events.
# What we're interested in is if the latest extremities
# were the same when we created the event as they are
# now. When this server creates a new event (as opposed
# to receiving it over federation) it will use the
# forward extremities as the prev_events, so we can
# guess this by looking at the prev_events and checking
# if they match the current forward extremities.
for ev, _ in ev_ctx_rm:
prev_event_ids = set(ev.prev_event_ids())
if latest_event_ids == prev_event_ids:
state_delta_reuse_delta_counter.inc()
break
logger.info("Calculating state delta for room %s", room_id)
with Measure(
self._clock, "persist_events.get_new_state_after_events"
):
res = yield self._get_new_state_after_events(
room_id,
ev_ctx_rm,
latest_event_ids,
new_latest_event_ids,
)
current_state, delta_ids = res
# If either are not None then there has been a change,
# and we need to work out the delta (or use that
# given)
if delta_ids is not None:
# If there is a delta we know that we've
# only added or replaced state, never
# removed keys entirely.
state_delta_for_room[room_id] = ([], delta_ids)
elif current_state is not None:
with Measure(
self._clock, "persist_events.calculate_state_delta"
):
delta = yield self._calculate_state_delta(
room_id, current_state
)
state_delta_for_room[room_id] = delta
# If we have the current_state then lets prefill
# the cache with it.
if current_state is not None:
current_state_for_room[room_id] = current_state
yield self.runInteraction(
"persist_events",
self._persist_events_txn,
events_and_contexts=chunk,
backfilled=backfilled,
delete_existing=delete_existing,
state_delta_for_room=state_delta_for_room,
new_forward_extremeties=new_forward_extremeties,
)
persist_event_counter.inc(len(chunk))
if not backfilled:
# backfilled events have negative stream orderings, so we don't
# want to set the event_persisted_position to that.
synapse.metrics.event_persisted_position.set(
chunk[-1][0].internal_metadata.stream_ordering
)
for event, context in chunk:
if context.app_service:
origin_type = "local"
origin_entity = context.app_service.id
elif self.hs.is_mine_id(event.sender):
origin_type = "local"
origin_entity = "*client*"
else:
origin_type = "remote"
origin_entity = get_domain_from_id(event.sender)
event_counter.labels(event.type, origin_type, origin_entity).inc()
for room_id, new_state in iteritems(current_state_for_room):
self.get_current_state_ids.prefill((room_id,), new_state)
for room_id, latest_event_ids in iteritems(new_forward_extremeties):
self.get_latest_event_ids_in_room.prefill(
(room_id,), list(latest_event_ids)
)
@defer.inlineCallbacks
def _calculate_new_extremities(self, room_id, event_contexts, latest_event_ids):
"""Calculates the new forward extremities for a room given events to
persist.
Assumes that we are only persisting events for one room at a time.
"""
# we're only interested in new events which aren't outliers and which aren't
# being rejected.
new_events = [
event
for event, ctx in event_contexts
if not event.internal_metadata.is_outlier()
and not ctx.rejected
and not event.internal_metadata.is_soft_failed()
]
latest_event_ids = set(latest_event_ids)
# start with the existing forward extremities
result = set(latest_event_ids)
# add all the new events to the list
result.update(event.event_id for event in new_events)
# Now remove all events which are prev_events of any of the new events
result.difference_update(
e_id for event in new_events for e_id in event.prev_event_ids()
)
# Remove any events which are prev_events of any existing events.
existing_prevs = yield self._get_events_which_are_prevs(result)
result.difference_update(existing_prevs)
# Finally handle the case where the new events have soft-failed prev
# events. If they do we need to remove them and their prev events,
# otherwise we end up with dangling extremities.
existing_prevs = yield self._get_prevs_before_rejected(
e_id for event in new_events for e_id in event.prev_event_ids()
)
result.difference_update(existing_prevs)
# We only update metrics for events that change forward extremities
# (e.g. we ignore backfill/outliers/etc)
if result != latest_event_ids:
forward_extremities_counter.observe(len(result))
stale = latest_event_ids & result
stale_forward_extremities_counter.observe(len(stale))
defer.returnValue(result)
@defer.inlineCallbacks
def _get_events_which_are_prevs(self, event_ids):
"""Filter the supplied list of event_ids to get those which are prev_events of
existing (non-outlier/rejected) events.
Args:
event_ids (Iterable[str]): event ids to filter
Returns:
Deferred[List[str]]: filtered event ids
"""
results = []
def _get_events_which_are_prevs_txn(txn, batch):
sql = """
SELECT prev_event_id, internal_metadata
FROM event_edges
INNER JOIN events USING (event_id)
LEFT JOIN rejections USING (event_id)
LEFT JOIN event_json USING (event_id)
WHERE
prev_event_id IN (%s)
AND NOT events.outlier
AND rejections.event_id IS NULL
""" % (
",".join("?" for _ in batch),
)
txn.execute(sql, batch)
results.extend(r[0] for r in txn if not json.loads(r[1]).get("soft_failed"))
for chunk in batch_iter(event_ids, 100):
yield self.runInteraction(
"_get_events_which_are_prevs", _get_events_which_are_prevs_txn, chunk
)
defer.returnValue(results)
@defer.inlineCallbacks
def _get_prevs_before_rejected(self, event_ids):
"""Get soft-failed ancestors to remove from the extremities.
Given a set of events, find all those that have been soft-failed or
rejected. Returns those soft failed/rejected events and their prev
events (whether soft-failed/rejected or not), and recurses up the
prev-event graph until it finds no more soft-failed/rejected events.
This is used to find extremities that are ancestors of new events, but
are separated by soft failed events.
Args:
event_ids (Iterable[str]): Events to find prev events for. Note
that these must have already been persisted.
Returns:
Deferred[set[str]]
"""
# The set of event_ids to return. This includes all soft-failed events
# and their prev events.
existing_prevs = set()
def _get_prevs_before_rejected_txn(txn, batch):
to_recursively_check = batch
while to_recursively_check:
sql = """
SELECT
event_id, prev_event_id, internal_metadata,
rejections.event_id IS NOT NULL
FROM event_edges
INNER JOIN events USING (event_id)
LEFT JOIN rejections USING (event_id)
LEFT JOIN event_json USING (event_id)
WHERE
event_id IN (%s)
AND NOT events.outlier
""" % (
",".join("?" for _ in to_recursively_check),
)
txn.execute(sql, to_recursively_check)
to_recursively_check = []
for event_id, prev_event_id, metadata, rejected in txn:
if prev_event_id in existing_prevs:
continue
soft_failed = json.loads(metadata).get("soft_failed")
if soft_failed or rejected:
to_recursively_check.append(prev_event_id)
existing_prevs.add(prev_event_id)
for chunk in batch_iter(event_ids, 100):
yield self.runInteraction(
"_get_prevs_before_rejected", _get_prevs_before_rejected_txn, chunk
)
defer.returnValue(existing_prevs)
@defer.inlineCallbacks
def _get_new_state_after_events(
self, room_id, events_context, old_latest_event_ids, new_latest_event_ids
):
"""Calculate the current state dict after adding some new events to
a room
Args:
room_id (str):
room to which the events are being added. Used for logging etc
events_context (list[(EventBase, EventContext)]):
events and contexts which are being added to the room
old_latest_event_ids (iterable[str]):
the old forward extremities for the room.
new_latest_event_ids (iterable[str]):
the new forward extremities for the room.
Returns:
Deferred[tuple[dict[(str,str), str]|None, dict[(str,str), str]|None]]:
Returns a tuple of two state maps, the first being the full new current
state and the second being the delta to the existing current state.
If both are None then there has been no change.
If there has been a change then we only return the delta if its
already been calculated. Conversely if we do know the delta then
the new current state is only returned if we've already calculated
it.
"""
# map from state_group to ((type, key) -> event_id) state map
state_groups_map = {}
# Map from (prev state group, new state group) -> delta state dict
state_group_deltas = {}
for ev, ctx in events_context:
if ctx.state_group is None:
# This should only happen for outlier events.
if not ev.internal_metadata.is_outlier():
raise Exception(
"Context for new event %s has no state "
"group" % (ev.event_id,)
)
continue
if ctx.state_group in state_groups_map:
continue
# We're only interested in pulling out state that has already
# been cached in the context. We'll pull stuff out of the DB later
# if necessary.
current_state_ids = ctx.get_cached_current_state_ids()
if current_state_ids is not None:
state_groups_map[ctx.state_group] = current_state_ids
if ctx.prev_group:
state_group_deltas[(ctx.prev_group, ctx.state_group)] = ctx.delta_ids
# We need to map the event_ids to their state groups. First, let's
# check if the event is one we're persisting, in which case we can
# pull the state group from its context.
# Otherwise we need to pull the state group from the database.
# Set of events we need to fetch groups for. (We know none of the old
# extremities are going to be in events_context).
missing_event_ids = set(old_latest_event_ids)
event_id_to_state_group = {}
for event_id in new_latest_event_ids:
# First search in the list of new events we're adding.
for ev, ctx in events_context:
if event_id == ev.event_id and ctx.state_group is not None:
event_id_to_state_group[event_id] = ctx.state_group
break
else:
# If we couldn't find it, then we'll need to pull
# the state from the database
missing_event_ids.add(event_id)
if missing_event_ids:
# Now pull out the state groups for any missing events from DB
event_to_groups = yield self._get_state_group_for_events(missing_event_ids)
event_id_to_state_group.update(event_to_groups)
# State groups of old_latest_event_ids
old_state_groups = set(
event_id_to_state_group[evid] for evid in old_latest_event_ids
)
# State groups of new_latest_event_ids
new_state_groups = set(
event_id_to_state_group[evid] for evid in new_latest_event_ids
)
# If they old and new groups are the same then we don't need to do
# anything.
if old_state_groups == new_state_groups:
defer.returnValue((None, None))
if len(new_state_groups) == 1 and len(old_state_groups) == 1:
# If we're going from one state group to another, lets check if
# we have a delta for that transition. If we do then we can just
# return that.
new_state_group = next(iter(new_state_groups))
old_state_group = next(iter(old_state_groups))
delta_ids = state_group_deltas.get((old_state_group, new_state_group), None)
if delta_ids is not None:
# We have a delta from the existing to new current state,
# so lets just return that. If we happen to already have
# the current state in memory then lets also return that,
# but it doesn't matter if we don't.
new_state = state_groups_map.get(new_state_group)
defer.returnValue((new_state, delta_ids))
# Now that we have calculated new_state_groups we need to get
# their state IDs so we can resolve to a single state set.
missing_state = new_state_groups - set(state_groups_map)
if missing_state:
group_to_state = yield self._get_state_for_groups(missing_state)
state_groups_map.update(group_to_state)
if len(new_state_groups) == 1:
# If there is only one state group, then we know what the current
# state is.
defer.returnValue((state_groups_map[new_state_groups.pop()], None))
# Ok, we need to defer to the state handler to resolve our state sets.
state_groups = {sg: state_groups_map[sg] for sg in new_state_groups}
events_map = {ev.event_id: ev for ev, _ in events_context}
# We need to get the room version, which is in the create event.
# Normally that'd be in the database, but its also possible that we're
# currently trying to persist it.
room_version = None
for ev, _ in events_context:
if ev.type == EventTypes.Create and ev.state_key == "":
room_version = ev.content.get("room_version", "1")
break
if not room_version:
room_version = yield self.get_room_version(room_id)
logger.debug("calling resolve_state_groups from preserve_events")
res = yield self._state_resolution_handler.resolve_state_groups(
room_id,
room_version,
state_groups,
events_map,
state_res_store=StateResolutionStore(self),
)
defer.returnValue((res.state, None))
@defer.inlineCallbacks
def _calculate_state_delta(self, room_id, current_state):
"""Calculate the new state deltas for a room.
Assumes that we are only persisting events for one room at a time.
Returns:
tuple[list, dict] (to_delete, to_insert): where to_delete are the
type/state_keys to remove from current_state_events and `to_insert`
are the updates to current_state_events.
"""
existing_state = yield self.get_current_state_ids(room_id)
to_delete = [key for key in existing_state if key not in current_state]
to_insert = {
key: ev_id
for key, ev_id in iteritems(current_state)
if ev_id != existing_state.get(key)
}
defer.returnValue((to_delete, to_insert))
@log_function
def _persist_events_txn(
self,
txn,
events_and_contexts,
backfilled,
delete_existing=False,
state_delta_for_room={},
new_forward_extremeties={},
):
"""Insert some number of room events into the necessary database tables.
Rejected events are only inserted into the events table, the events_json table,
and the rejections table. Things reading from those table will need to check
whether the event was rejected.
Args:
txn (twisted.enterprise.adbapi.Connection): db connection
events_and_contexts (list[(EventBase, EventContext)]):
events to persist
backfilled (bool): True if the events were backfilled
delete_existing (bool): True to purge existing table rows for the
events from the database. This is useful when retrying due to
IntegrityError.
state_delta_for_room (dict[str, (list, dict)]):
The current-state delta for each room. For each room, a tuple
(to_delete, to_insert), being a list of type/state keys to be
removed from the current state, and a state set to be added to
the current state.
new_forward_extremeties (dict[str, list[str]]):
The new forward extremities for each room. For each room, a
list of the event ids which are the forward extremities.
"""
all_events_and_contexts = events_and_contexts
min_stream_order = events_and_contexts[0][0].internal_metadata.stream_ordering
max_stream_order = events_and_contexts[-1][0].internal_metadata.stream_ordering
self._update_current_state_txn(txn, state_delta_for_room, min_stream_order)
self._update_forward_extremities_txn(
txn,
new_forward_extremities=new_forward_extremeties,
max_stream_order=max_stream_order,
)
# Ensure that we don't have the same event twice.
events_and_contexts = self._filter_events_and_contexts_for_duplicates(
events_and_contexts
)
self._update_room_depths_txn(
txn, events_and_contexts=events_and_contexts, backfilled=backfilled
)
# _update_outliers_txn filters out any events which have already been
# persisted, and returns the filtered list.
events_and_contexts = self._update_outliers_txn(
txn, events_and_contexts=events_and_contexts
)
# From this point onwards the events are only events that we haven't
# seen before.
if delete_existing:
# For paranoia reasons, we go and delete all the existing entries
# for these events so we can reinsert them.
# This gets around any problems with some tables already having
# entries.
self._delete_existing_rows_txn(txn, events_and_contexts=events_and_contexts)
self._store_event_txn(txn, events_and_contexts=events_and_contexts)
# Insert into event_to_state_groups.
self._store_event_state_mappings_txn(txn, events_and_contexts)
# We want to store event_auth mappings for rejected events, as they're
# used in state res v2.
# This is only necessary if the rejected event appears in an accepted
# event's auth chain, but its easier for now just to store them (and
# it doesn't take much storage compared to storing the entire event
# anyway).
self._simple_insert_many_txn(
txn,
table="event_auth",
values=[
{
"event_id": event.event_id,
"room_id": event.room_id,
"auth_id": auth_id,
}
for event, _ in events_and_contexts
for auth_id in event.auth_event_ids()
if event.is_state()
],
)
# _store_rejected_events_txn filters out any events which were
# rejected, and returns the filtered list.
events_and_contexts = self._store_rejected_events_txn(
txn, events_and_contexts=events_and_contexts
)
# From this point onwards the events are only ones that weren't
# rejected.
self._update_metadata_tables_txn(
txn,
events_and_contexts=events_and_contexts,
all_events_and_contexts=all_events_and_contexts,
backfilled=backfilled,
)
def _update_current_state_txn(self, txn, state_delta_by_room, stream_id):
for room_id, current_state_tuple in iteritems(state_delta_by_room):
to_delete, to_insert = current_state_tuple
# First we add entries to the current_state_delta_stream. We
# do this before updating the current_state_events table so
# that we can use it to calculate the `prev_event_id`. (This
# allows us to not have to pull out the existing state
# unnecessarily).
#
# The stream_id for the update is chosen to be the minimum of the stream_ids
# for the batch of the events that we are persisting; that means we do not
# end up in a situation where workers see events before the
# current_state_delta updates.
#
sql = """
INSERT INTO current_state_delta_stream
(stream_id, room_id, type, state_key, event_id, prev_event_id)
SELECT ?, ?, ?, ?, ?, (
SELECT event_id FROM current_state_events
WHERE room_id = ? AND type = ? AND state_key = ?
)
"""
txn.executemany(
sql,
(
(
stream_id,
room_id,
etype,
state_key,
None,
room_id,
etype,
state_key,
)
for etype, state_key in to_delete
# We sanity check that we're deleting rather than updating
if (etype, state_key) not in to_insert
),
)
txn.executemany(
sql,
(
(
stream_id,
room_id,
etype,
state_key,
ev_id,
room_id,
etype,
state_key,
)
for (etype, state_key), ev_id in iteritems(to_insert)
),
)
# Now we actually update the current_state_events table
txn.executemany(
"DELETE FROM current_state_events"
" WHERE room_id = ? AND type = ? AND state_key = ?",
(
(room_id, etype, state_key)
for etype, state_key in itertools.chain(to_delete, to_insert)
),
)
self._simple_insert_many_txn(
txn,
table="current_state_events",
values=[
{
"event_id": ev_id,
"room_id": room_id,
"type": key[0],
"state_key": key[1],
}
for key, ev_id in iteritems(to_insert)
],
)
txn.call_after(
self._curr_state_delta_stream_cache.entity_has_changed,
room_id,
stream_id,
)
# Invalidate the various caches
# Figure out the changes of membership to invalidate the
# `get_rooms_for_user` cache.
# We find out which membership events we may have deleted
# and which we have added, then we invlidate the caches for all
# those users.
members_changed = set(
state_key
for ev_type, state_key in itertools.chain(to_delete, to_insert)
if ev_type == EventTypes.Member
)
for member in members_changed:
txn.call_after(
self.get_rooms_for_user_with_stream_ordering.invalidate, (member,)
)
self._invalidate_state_caches_and_stream(txn, room_id, members_changed)
def _update_forward_extremities_txn(
self, txn, new_forward_extremities, max_stream_order
):
for room_id, new_extrem in iteritems(new_forward_extremities):
self._simple_delete_txn(
txn, table="event_forward_extremities", keyvalues={"room_id": room_id}
)
txn.call_after(self.get_latest_event_ids_in_room.invalidate, (room_id,))
self._simple_insert_many_txn(
txn,
table="event_forward_extremities",
values=[
{"event_id": ev_id, "room_id": room_id}
for room_id, new_extrem in iteritems(new_forward_extremities)
for ev_id in new_extrem
],
)
# We now insert into stream_ordering_to_exterm a mapping from room_id,
# new stream_ordering to new forward extremeties in the room.
# This allows us to later efficiently look up the forward extremeties
# for a room before a given stream_ordering
self._simple_insert_many_txn(
txn,
table="stream_ordering_to_exterm",
values=[
{
"room_id": room_id,
"event_id": event_id,
"stream_ordering": max_stream_order,
}
for room_id, new_extrem in iteritems(new_forward_extremities)
for event_id in new_extrem
],
)
@classmethod
def _filter_events_and_contexts_for_duplicates(cls, events_and_contexts):
"""Ensure that we don't have the same event twice.
Pick the earliest non-outlier if there is one, else the earliest one.
Args:
events_and_contexts (list[(EventBase, EventContext)]):
Returns:
list[(EventBase, EventContext)]: filtered list
"""
new_events_and_contexts = OrderedDict()
for event, context in events_and_contexts:
prev_event_context = new_events_and_contexts.get(event.event_id)
if prev_event_context:
if not event.internal_metadata.is_outlier():
if prev_event_context[0].internal_metadata.is_outlier():
# To ensure correct ordering we pop, as OrderedDict is
# ordered by first insertion.
new_events_and_contexts.pop(event.event_id, None)
new_events_and_contexts[event.event_id] = (event, context)
else:
new_events_and_contexts[event.event_id] = (event, context)
return list(new_events_and_contexts.values())
def _update_room_depths_txn(self, txn, events_and_contexts, backfilled):
"""Update min_depth for each room
Args:
txn (twisted.enterprise.adbapi.Connection): db connection
events_and_contexts (list[(EventBase, EventContext)]): events
we are persisting
backfilled (bool): True if the events were backfilled
"""
depth_updates = {}
for event, context in events_and_contexts:
# Remove the any existing cache entries for the event_ids
txn.call_after(self._invalidate_get_event_cache, event.event_id)
if not backfilled:
txn.call_after(
self._events_stream_cache.entity_has_changed,
event.room_id,
event.internal_metadata.stream_ordering,
)
if not event.internal_metadata.is_outlier() and not context.rejected:
depth_updates[event.room_id] = max(
event.depth, depth_updates.get(event.room_id, event.depth)
)
for room_id, depth in iteritems(depth_updates):
self._update_min_depth_for_room_txn(txn, room_id, depth)
def _update_outliers_txn(self, txn, events_and_contexts):
"""Update any outliers with new event info.
This turns outliers into ex-outliers (unless the new event was
rejected).
Args:
txn (twisted.enterprise.adbapi.Connection): db connection
events_and_contexts (list[(EventBase, EventContext)]): events
we are persisting
Returns:
list[(EventBase, EventContext)] new list, without events which
are already in the events table.
"""
txn.execute(
"SELECT event_id, outlier FROM events WHERE event_id in (%s)"
% (",".join(["?"] * len(events_and_contexts)),),
[event.event_id for event, _ in events_and_contexts],
)
have_persisted = {event_id: outlier for event_id, outlier in txn}
to_remove = set()
for event, context in events_and_contexts:
if event.event_id not in have_persisted:
continue
to_remove.add(event)
if context.rejected:
# If the event is rejected then we don't care if the event
# was an outlier or not.
continue
outlier_persisted = have_persisted[event.event_id]
if not event.internal_metadata.is_outlier() and outlier_persisted:
# We received a copy of an event that we had already stored as
# an outlier in the database. We now have some state at that
# so we need to update the state_groups table with that state.
# insert into event_to_state_groups.
try:
self._store_event_state_mappings_txn(txn, ((event, context),))
except Exception:
logger.exception("")
raise
metadata_json = encode_json(event.internal_metadata.get_dict())
sql = (
"UPDATE event_json SET internal_metadata = ?" " WHERE event_id = ?"
)
txn.execute(sql, (metadata_json, event.event_id))
# Add an entry to the ex_outlier_stream table to replicate the
# change in outlier status to our workers.
stream_order = event.internal_metadata.stream_ordering
state_group_id = context.state_group
self._simple_insert_txn(
txn,
table="ex_outlier_stream",
values={
"event_stream_ordering": stream_order,
"event_id": event.event_id,
"state_group": state_group_id,
},
)
sql = "UPDATE events SET outlier = ?" " WHERE event_id = ?"
txn.execute(sql, (False, event.event_id))
# Update the event_backward_extremities table now that this
# event isn't an outlier any more.
self._update_backward_extremeties(txn, [event])
return [ec for ec in events_and_contexts if ec[0] not in to_remove]
@classmethod
def _delete_existing_rows_txn(cls, txn, events_and_contexts):
if not events_and_contexts:
# nothing to do here
return
logger.info("Deleting existing")
for table in (
"events",
"event_auth",
"event_json",
"event_edges",
"event_forward_extremities",
"event_reference_hashes",
"event_search",
"event_to_state_groups",
"guest_access",
"history_visibility",
"local_invites",
"room_names",
"state_events",
"rejections",
"redactions",
"room_memberships",
"topics",
):
txn.executemany(
"DELETE FROM %s WHERE event_id = ?" % (table,),
[(ev.event_id,) for ev, _ in events_and_contexts],
)
for table in ("event_push_actions",):
txn.executemany(
"DELETE FROM %s WHERE room_id = ? AND event_id = ?" % (table,),
[(ev.room_id, ev.event_id) for ev, _ in events_and_contexts],
)
def _store_event_txn(self, txn, events_and_contexts):
"""Insert new events into the event and event_json tables
Args:
txn (twisted.enterprise.adbapi.Connection): db connection
events_and_contexts (list[(EventBase, EventContext)]): events
we are persisting
"""
if not events_and_contexts:
# nothing to do here
return
def event_dict(event):
d = event.get_dict()
d.pop("redacted", None)
d.pop("redacted_because", None)
return d
self._simple_insert_many_txn(
txn,
table="event_json",
values=[
{
"event_id": event.event_id,
"room_id": event.room_id,
"internal_metadata": encode_json(
event.internal_metadata.get_dict()
),
"json": encode_json(event_dict(event)),
"format_version": event.format_version,
}
for event, _ in events_and_contexts
],
)
self._simple_insert_many_txn(
txn,
table="events",
values=[
{
"stream_ordering": event.internal_metadata.stream_ordering,
"topological_ordering": event.depth,
"depth": event.depth,
"event_id": event.event_id,
"room_id": event.room_id,
"type": event.type,
"processed": True,
"outlier": event.internal_metadata.is_outlier(),
"origin_server_ts": int(event.origin_server_ts),
"received_ts": self._clock.time_msec(),
"sender": event.sender,
"contains_url": (
"url" in event.content
and isinstance(event.content["url"], text_type)
),
}
for event, _ in events_and_contexts
],
)
def _store_rejected_events_txn(self, txn, events_and_contexts):
"""Add rows to the 'rejections' table for received events which were
rejected
Args:
txn (twisted.enterprise.adbapi.Connection): db connection
events_and_contexts (list[(EventBase, EventContext)]): events
we are persisting
Returns:
list[(EventBase, EventContext)] new list, without the rejected
events.
"""
# Remove the rejected events from the list now that we've added them
# to the events table and the events_json table.
to_remove = set()
for event, context in events_and_contexts:
if context.rejected:
# Insert the event_id into the rejections table
self._store_rejections_txn(txn, event.event_id, context.rejected)
to_remove.add(event)
return [ec for ec in events_and_contexts if ec[0] not in to_remove]
def _update_metadata_tables_txn(
self, txn, events_and_contexts, all_events_and_contexts, backfilled
):
"""Update all the miscellaneous tables for new events
Args:
txn (twisted.enterprise.adbapi.Connection): db connection
events_and_contexts (list[(EventBase, EventContext)]): events
we are persisting
all_events_and_contexts (list[(EventBase, EventContext)]): all
events that we were going to persist. This includes events
we've already persisted, etc, that wouldn't appear in
events_and_context.
backfilled (bool): True if the events were backfilled
"""
# Insert all the push actions into the event_push_actions table.
self._set_push_actions_for_event_and_users_txn(
txn,
events_and_contexts=events_and_contexts,
all_events_and_contexts=all_events_and_contexts,
)
if not events_and_contexts:
# nothing to do here
return
for event, context in events_and_contexts:
if event.type == EventTypes.Redaction and event.redacts is not None:
# Remove the entries in the event_push_actions table for the
# redacted event.
self._remove_push_actions_for_event_id_txn(
txn, event.room_id, event.redacts
)
# Remove from relations table.
self._handle_redaction(txn, event.redacts)
# Update the event_forward_extremities, event_backward_extremities and
# event_edges tables.
self._handle_mult_prev_events(
txn, events=[event for event, _ in events_and_contexts]
)
for event, _ in events_and_contexts:
if event.type == EventTypes.Name:
# Insert into the room_names and event_search tables.
self._store_room_name_txn(txn, event)
elif event.type == EventTypes.Topic:
# Insert into the topics table and event_search table.
self._store_room_topic_txn(txn, event)
elif event.type == EventTypes.Message:
# Insert into the event_search table.
self._store_room_message_txn(txn, event)
elif event.type == EventTypes.Redaction:
# Insert into the redactions table.
self._store_redaction(txn, event)
elif event.type == EventTypes.RoomHistoryVisibility:
# Insert into the event_search table.
self._store_history_visibility_txn(txn, event)
elif event.type == EventTypes.GuestAccess:
# Insert into the event_search table.
self._store_guest_access_txn(txn, event)
self._handle_event_relations(txn, event)
# Insert into the room_memberships table.
self._store_room_members_txn(
txn,
[
event
for event, _ in events_and_contexts
if event.type == EventTypes.Member
],
backfilled=backfilled,
)
# Insert event_reference_hashes table.
self._store_event_reference_hashes_txn(
txn, [event for event, _ in events_and_contexts]
)
state_events_and_contexts = [
ec for ec in events_and_contexts if ec[0].is_state()
]
state_values = []
for event, context in state_events_and_contexts:
vals = {
"event_id": event.event_id,
"room_id": event.room_id,
"type": event.type,
"state_key": event.state_key,
}
# TODO: How does this work with backfilling?
if hasattr(event, "replaces_state"):
vals["prev_state"] = event.replaces_state
state_values.append(vals)
self._simple_insert_many_txn(txn, table="state_events", values=state_values)
# Prefill the event cache
self._add_to_cache(txn, events_and_contexts)
def _add_to_cache(self, txn, events_and_contexts):
to_prefill = []
rows = []
N = 200
for i in range(0, len(events_and_contexts), N):
ev_map = {e[0].event_id: e[0] for e in events_and_contexts[i : i + N]}
if not ev_map:
break
sql = (
"SELECT "
" e.event_id as event_id, "
" r.redacts as redacts,"
" rej.event_id as rejects "
" FROM events as e"
" LEFT JOIN rejections as rej USING (event_id)"
" LEFT JOIN redactions as r ON e.event_id = r.redacts"
" WHERE e.event_id IN (%s)"
) % (",".join(["?"] * len(ev_map)),)
txn.execute(sql, list(ev_map))
rows = self.cursor_to_dict(txn)
for row in rows:
event = ev_map[row["event_id"]]
if not row["rejects"] and not row["redacts"]:
to_prefill.append(
_EventCacheEntry(event=event, redacted_event=None)
)
def prefill():
for cache_entry in to_prefill:
self._get_event_cache.prefill((cache_entry[0].event_id,), cache_entry)
txn.call_after(prefill)
def _store_redaction(self, txn, event):
# invalidate the cache for the redacted event
txn.call_after(self._invalidate_get_event_cache, event.redacts)
txn.execute(
"INSERT INTO redactions (event_id, redacts) VALUES (?,?)",
(event.event_id, event.redacts),
)
@defer.inlineCallbacks
def count_daily_messages(self):
"""
Returns an estimate of the number of messages sent in the last day.
If it has been significantly less or more than one day since the last
call to this function, it will return None.
"""
def _count_messages(txn):
sql = """
SELECT COALESCE(COUNT(*), 0) FROM events
WHERE type = 'm.room.message'
AND stream_ordering > ?
"""
txn.execute(sql, (self.stream_ordering_day_ago,))
count, = txn.fetchone()
return count
ret = yield self.runInteraction("count_messages", _count_messages)
defer.returnValue(ret)
@defer.inlineCallbacks
def count_daily_sent_messages(self):
def _count_messages(txn):
# This is good enough as if you have silly characters in your own
# hostname then thats your own fault.
like_clause = "%:" + self.hs.hostname
sql = """
SELECT COALESCE(COUNT(*), 0) FROM events
WHERE type = 'm.room.message'
AND sender LIKE ?
AND stream_ordering > ?
"""
txn.execute(sql, (like_clause, self.stream_ordering_day_ago))
count, = txn.fetchone()
return count
ret = yield self.runInteraction("count_daily_sent_messages", _count_messages)
defer.returnValue(ret)
@defer.inlineCallbacks
def count_daily_active_rooms(self):
def _count(txn):
sql = """
SELECT COALESCE(COUNT(DISTINCT room_id), 0) FROM events
WHERE type = 'm.room.message'
AND stream_ordering > ?
"""
txn.execute(sql, (self.stream_ordering_day_ago,))
count, = txn.fetchone()
return count
ret = yield self.runInteraction("count_daily_active_rooms", _count)
defer.returnValue(ret)
def get_current_backfill_token(self):
"""The current minimum token that backfilled events have reached"""
return -self._backfill_id_gen.get_current_token()
def get_current_events_token(self):
"""The current maximum token that events have reached"""
return self._stream_id_gen.get_current_token()
def get_all_new_forward_event_rows(self, last_id, current_id, limit):
if last_id == current_id:
return defer.succeed([])
def get_all_new_forward_event_rows(txn):
sql = (
"SELECT e.stream_ordering, e.event_id, e.room_id, e.type,"
" state_key, redacts, relates_to_id"
" FROM events AS e"
" LEFT JOIN redactions USING (event_id)"
" LEFT JOIN state_events USING (event_id)"
" LEFT JOIN event_relations USING (event_id)"
" WHERE ? < stream_ordering AND stream_ordering <= ?"
" ORDER BY stream_ordering ASC"
" LIMIT ?"
)
txn.execute(sql, (last_id, current_id, limit))
new_event_updates = txn.fetchall()
if len(new_event_updates) == limit:
upper_bound = new_event_updates[-1][0]
else:
upper_bound = current_id
sql = (
"SELECT event_stream_ordering, e.event_id, e.room_id, e.type,"
" state_key, redacts, relates_to_id"
" FROM events AS e"
" INNER JOIN ex_outlier_stream USING (event_id)"
" LEFT JOIN redactions USING (event_id)"
" LEFT JOIN state_events USING (event_id)"
" LEFT JOIN event_relations USING (event_id)"
" WHERE ? < event_stream_ordering"
" AND event_stream_ordering <= ?"
" ORDER BY event_stream_ordering DESC"
)
txn.execute(sql, (last_id, upper_bound))
new_event_updates.extend(txn)
return new_event_updates
return self.runInteraction(
"get_all_new_forward_event_rows", get_all_new_forward_event_rows
)
def get_all_new_backfill_event_rows(self, last_id, current_id, limit):
if last_id == current_id:
return defer.succeed([])
def get_all_new_backfill_event_rows(txn):
sql = (
"SELECT -e.stream_ordering, e.event_id, e.room_id, e.type,"
" state_key, redacts, relates_to_id"
" FROM events AS e"
" LEFT JOIN redactions USING (event_id)"
" LEFT JOIN state_events USING (event_id)"
" LEFT JOIN event_relations USING (event_id)"
" WHERE ? > stream_ordering AND stream_ordering >= ?"
" ORDER BY stream_ordering ASC"
" LIMIT ?"
)
txn.execute(sql, (-last_id, -current_id, limit))
new_event_updates = txn.fetchall()
if len(new_event_updates) == limit:
upper_bound = new_event_updates[-1][0]
else:
upper_bound = current_id
sql = (
"SELECT -event_stream_ordering, e.event_id, e.room_id, e.type,"
" state_key, redacts, relates_to_id"
" FROM events AS e"
" INNER JOIN ex_outlier_stream USING (event_id)"
" LEFT JOIN redactions USING (event_id)"
" LEFT JOIN state_events USING (event_id)"
" LEFT JOIN event_relations USING (event_id)"
" WHERE ? > event_stream_ordering"
" AND event_stream_ordering >= ?"
" ORDER BY event_stream_ordering DESC"
)
txn.execute(sql, (-last_id, -upper_bound))
new_event_updates.extend(txn.fetchall())
return new_event_updates
return self.runInteraction(
"get_all_new_backfill_event_rows", get_all_new_backfill_event_rows
)
@cached(num_args=5, max_entries=10)
def get_all_new_events(
self,
last_backfill_id,
last_forward_id,
current_backfill_id,
current_forward_id,
limit,
):
"""Get all the new events that have arrived at the server either as
new events or as backfilled events"""
have_backfill_events = last_backfill_id != current_backfill_id
have_forward_events = last_forward_id != current_forward_id
if not have_backfill_events and not have_forward_events:
return defer.succeed(AllNewEventsResult([], [], [], [], []))
def get_all_new_events_txn(txn):
sql = (
"SELECT e.stream_ordering, e.event_id, e.room_id, e.type,"
" state_key, redacts"
" FROM events AS e"
" LEFT JOIN redactions USING (event_id)"
" LEFT JOIN state_events USING (event_id)"
" WHERE ? < stream_ordering AND stream_ordering <= ?"
" ORDER BY stream_ordering ASC"
" LIMIT ?"
)
if have_forward_events:
txn.execute(sql, (last_forward_id, current_forward_id, limit))
new_forward_events = txn.fetchall()
if len(new_forward_events) == limit:
upper_bound = new_forward_events[-1][0]
else:
upper_bound = current_forward_id
sql = (
"SELECT event_stream_ordering, event_id, state_group"
" FROM ex_outlier_stream"
" WHERE ? > event_stream_ordering"
" AND event_stream_ordering >= ?"
" ORDER BY event_stream_ordering DESC"
)
txn.execute(sql, (last_forward_id, upper_bound))
forward_ex_outliers = txn.fetchall()
else:
new_forward_events = []
forward_ex_outliers = []
sql = (
"SELECT -e.stream_ordering, e.event_id, e.room_id, e.type,"
" state_key, redacts"
" FROM events AS e"
" LEFT JOIN redactions USING (event_id)"
" LEFT JOIN state_events USING (event_id)"
" WHERE ? > stream_ordering AND stream_ordering >= ?"
" ORDER BY stream_ordering DESC"
" LIMIT ?"
)
if have_backfill_events:
txn.execute(sql, (-last_backfill_id, -current_backfill_id, limit))
new_backfill_events = txn.fetchall()
if len(new_backfill_events) == limit:
upper_bound = new_backfill_events[-1][0]
else:
upper_bound = current_backfill_id
sql = (
"SELECT -event_stream_ordering, event_id, state_group"
" FROM ex_outlier_stream"
" WHERE ? > event_stream_ordering"
" AND event_stream_ordering >= ?"
" ORDER BY event_stream_ordering DESC"
)
txn.execute(sql, (-last_backfill_id, -upper_bound))
backward_ex_outliers = txn.fetchall()
else:
new_backfill_events = []
backward_ex_outliers = []
return AllNewEventsResult(
new_forward_events,
new_backfill_events,
forward_ex_outliers,
backward_ex_outliers,
)
return self.runInteraction("get_all_new_events", get_all_new_events_txn)
def purge_history(self, room_id, token, delete_local_events):
"""Deletes room history before a certain point
Args:
room_id (str):
token (str): A topological token to delete events before
delete_local_events (bool):
if True, we will delete local events as well as remote ones
(instead of just marking them as outliers and deleting their
state groups).
"""
return self.runInteraction(
"purge_history",
self._purge_history_txn,
room_id,
token,
delete_local_events,
)
def _purge_history_txn(self, txn, room_id, token_str, delete_local_events):
token = RoomStreamToken.parse(token_str)
# Tables that should be pruned:
# event_auth
# event_backward_extremities
# event_edges
# event_forward_extremities
# event_json
# event_push_actions
# event_reference_hashes
# event_search
# event_to_state_groups
# events
# rejections
# room_depth
# state_groups
# state_groups_state
# we will build a temporary table listing the events so that we don't
# have to keep shovelling the list back and forth across the
# connection. Annoyingly the python sqlite driver commits the
# transaction on CREATE, so let's do this first.
#
# furthermore, we might already have the table from a previous (failed)
# purge attempt, so let's drop the table first.
txn.execute("DROP TABLE IF EXISTS events_to_purge")
txn.execute(
"CREATE TEMPORARY TABLE events_to_purge ("
" event_id TEXT NOT NULL,"
" should_delete BOOLEAN NOT NULL"
")"
)
# First ensure that we're not about to delete all the forward extremeties
txn.execute(
"SELECT e.event_id, e.depth FROM events as e "
"INNER JOIN event_forward_extremities as f "
"ON e.event_id = f.event_id "
"AND e.room_id = f.room_id "
"WHERE f.room_id = ?",
(room_id,),
)
rows = txn.fetchall()
max_depth = max(row[1] for row in rows)
if max_depth < token.topological:
# We need to ensure we don't delete all the events from the database
# otherwise we wouldn't be able to send any events (due to not
# having any backwards extremeties)
raise SynapseError(
400, "topological_ordering is greater than forward extremeties"
)
logger.info("[purge] looking for events to delete")
should_delete_expr = "state_key IS NULL"
should_delete_params = ()
if not delete_local_events:
should_delete_expr += " AND event_id NOT LIKE ?"
# We include the parameter twice since we use the expression twice
should_delete_params += ("%:" + self.hs.hostname, "%:" + self.hs.hostname)
should_delete_params += (room_id, token.topological)
# Note that we insert events that are outliers and aren't going to be
# deleted, as nothing will happen to them.
txn.execute(
"INSERT INTO events_to_purge"
" SELECT event_id, %s"
" FROM events AS e LEFT JOIN state_events USING (event_id)"
" WHERE (NOT outlier OR (%s)) AND e.room_id = ? AND topological_ordering < ?"
% (should_delete_expr, should_delete_expr),
should_delete_params,
)
# We create the indices *after* insertion as that's a lot faster.
# create an index on should_delete because later we'll be looking for
# the should_delete / shouldn't_delete subsets
txn.execute(
"CREATE INDEX events_to_purge_should_delete"
" ON events_to_purge(should_delete)"
)
# We do joins against events_to_purge for e.g. calculating state
# groups to purge, etc., so lets make an index.
txn.execute("CREATE INDEX events_to_purge_id" " ON events_to_purge(event_id)")
txn.execute("SELECT event_id, should_delete FROM events_to_purge")
event_rows = txn.fetchall()
logger.info(
"[purge] found %i events before cutoff, of which %i can be deleted",
len(event_rows),
sum(1 for e in event_rows if e[1]),
)
logger.info("[purge] Finding new backward extremities")
# We calculate the new entries for the backward extremeties by finding
# events to be purged that are pointed to by events we're not going to
# purge.
txn.execute(
"SELECT DISTINCT e.event_id FROM events_to_purge AS e"
" INNER JOIN event_edges AS ed ON e.event_id = ed.prev_event_id"
" LEFT JOIN events_to_purge AS ep2 ON ed.event_id = ep2.event_id"
" WHERE ep2.event_id IS NULL"
)
new_backwards_extrems = txn.fetchall()
logger.info("[purge] replacing backward extremities: %r", new_backwards_extrems)
txn.execute(
"DELETE FROM event_backward_extremities WHERE room_id = ?", (room_id,)
)
# Update backward extremeties
txn.executemany(
"INSERT INTO event_backward_extremities (room_id, event_id)"
" VALUES (?, ?)",
[(room_id, event_id) for event_id, in new_backwards_extrems],
)
logger.info("[purge] finding redundant state groups")
# Get all state groups that are referenced by events that are to be
# deleted. We then go and check if they are referenced by other events
# or state groups, and if not we delete them.
txn.execute(
"""
SELECT DISTINCT state_group FROM events_to_purge
INNER JOIN event_to_state_groups USING (event_id)
"""
)
referenced_state_groups = set(sg for sg, in txn)
logger.info(
"[purge] found %i referenced state groups", len(referenced_state_groups)
)
logger.info("[purge] finding state groups that can be deleted")
_ = self._find_unreferenced_groups_during_purge(txn, referenced_state_groups)
state_groups_to_delete, remaining_state_groups = _
logger.info(
"[purge] found %i state groups to delete", len(state_groups_to_delete)
)
logger.info(
"[purge] de-delta-ing %i remaining state groups",
len(remaining_state_groups),
)
# Now we turn the state groups that reference to-be-deleted state
# groups to non delta versions.
for sg in remaining_state_groups:
logger.info("[purge] de-delta-ing remaining state group %s", sg)
curr_state = self._get_state_groups_from_groups_txn(txn, [sg])
curr_state = curr_state[sg]
self._simple_delete_txn(
txn, table="state_groups_state", keyvalues={"state_group": sg}
)
self._simple_delete_txn(
txn, table="state_group_edges", keyvalues={"state_group": sg}
)
self._simple_insert_many_txn(
txn,
table="state_groups_state",
values=[
{
"state_group": sg,
"room_id": room_id,
"type": key[0],
"state_key": key[1],
"event_id": state_id,
}
for key, state_id in iteritems(curr_state)
],
)
logger.info("[purge] removing redundant state groups")
txn.executemany(
"DELETE FROM state_groups_state WHERE state_group = ?",
((sg,) for sg in state_groups_to_delete),
)
txn.executemany(
"DELETE FROM state_groups WHERE id = ?",
((sg,) for sg in state_groups_to_delete),
)
logger.info("[purge] removing events from event_to_state_groups")
txn.execute(
"DELETE FROM event_to_state_groups "
"WHERE event_id IN (SELECT event_id from events_to_purge)"
)
for event_id, _ in event_rows:
txn.call_after(self._get_state_group_for_event.invalidate, (event_id,))
# Delete all remote non-state events
for table in (
"events",
"event_json",
"event_auth",
"event_edges",
"event_forward_extremities",
"event_reference_hashes",
"event_search",
"rejections",
):
logger.info("[purge] removing events from %s", table)
txn.execute(
"DELETE FROM %s WHERE event_id IN ("
" SELECT event_id FROM events_to_purge WHERE should_delete"
")" % (table,)
)
# event_push_actions lacks an index on event_id, and has one on
# (room_id, event_id) instead.
for table in ("event_push_actions",):
logger.info("[purge] removing events from %s", table)
txn.execute(
"DELETE FROM %s WHERE room_id = ? AND event_id IN ("
" SELECT event_id FROM events_to_purge WHERE should_delete"
")" % (table,),
(room_id,),
)
# Mark all state and own events as outliers
logger.info("[purge] marking remaining events as outliers")
txn.execute(
"UPDATE events SET outlier = ?"
" WHERE event_id IN ("
" SELECT event_id FROM events_to_purge "
" WHERE NOT should_delete"
")",
(True,),
)
# synapse tries to take out an exclusive lock on room_depth whenever it
# persists events (because upsert), and once we run this update, we
# will block that for the rest of our transaction.
#
# So, let's stick it at the end so that we don't block event
# persistence.
#
# We do this by calculating the minimum depth of the backwards
# extremities. However, the events in event_backward_extremities
# are ones we don't have yet so we need to look at the events that
# point to it via event_edges table.
txn.execute(
"""
SELECT COALESCE(MIN(depth), 0)
FROM event_backward_extremities AS eb
INNER JOIN event_edges AS eg ON eg.prev_event_id = eb.event_id
INNER JOIN events AS e ON e.event_id = eg.event_id
WHERE eb.room_id = ?
""",
(room_id,),
)
min_depth, = txn.fetchone()
logger.info("[purge] updating room_depth to %d", min_depth)
txn.execute(
"UPDATE room_depth SET min_depth = ? WHERE room_id = ?",
(min_depth, room_id),
)
# finally, drop the temp table. this will commit the txn in sqlite,
# so make sure to keep this actually last.
txn.execute("DROP TABLE events_to_purge")
logger.info("[purge] done")
def _find_unreferenced_groups_during_purge(self, txn, state_groups):
"""Used when purging history to figure out which state groups can be
deleted and which need to be de-delta'ed (due to one of its prev groups
being scheduled for deletion).
Args:
txn
state_groups (set[int]): Set of state groups referenced by events
that are going to be deleted.
Returns:
tuple[set[int], set[int]]: The set of state groups that can be
deleted and the set of state groups that need to be de-delta'ed
"""
# Graph of state group -> previous group
graph = {}
# Set of events that we have found to be referenced by events
referenced_groups = set()
# Set of state groups we've already seen
state_groups_seen = set(state_groups)
# Set of state groups to handle next.
next_to_search = set(state_groups)
while next_to_search:
# We bound size of groups we're looking up at once, to stop the
# SQL query getting too big
if len(next_to_search) < 100:
current_search = next_to_search
next_to_search = set()
else:
current_search = set(itertools.islice(next_to_search, 100))
next_to_search -= current_search
# Check if state groups are referenced
sql = """
SELECT DISTINCT state_group FROM event_to_state_groups
LEFT JOIN events_to_purge AS ep USING (event_id)
WHERE state_group IN (%s) AND ep.event_id IS NULL
""" % (
",".join("?" for _ in current_search),
)
txn.execute(sql, list(current_search))
referenced = set(sg for sg, in txn)
referenced_groups |= referenced
# We don't continue iterating up the state group graphs for state
# groups that are referenced.
current_search -= referenced
rows = self._simple_select_many_txn(
txn,
table="state_group_edges",
column="prev_state_group",
iterable=current_search,
keyvalues={},
retcols=("prev_state_group", "state_group"),
)
prevs = set(row["state_group"] for row in rows)
# We don't bother re-handling groups we've already seen
prevs -= state_groups_seen
next_to_search |= prevs
state_groups_seen |= prevs
for row in rows:
# Note: Each state group can have at most one prev group
graph[row["state_group"]] = row["prev_state_group"]
to_delete = state_groups_seen - referenced_groups
to_dedelta = set()
for sg in referenced_groups:
prev_sg = graph.get(sg)
if prev_sg and prev_sg in to_delete:
to_dedelta.add(sg)
return to_delete, to_dedelta
@defer.inlineCallbacks
def is_event_after(self, event_id1, event_id2):
"""Returns True if event_id1 is after event_id2 in the stream
"""
to_1, so_1 = yield self._get_event_ordering(event_id1)
to_2, so_2 = yield self._get_event_ordering(event_id2)
defer.returnValue((to_1, so_1) > (to_2, so_2))
@cachedInlineCallbacks(max_entries=5000)
def _get_event_ordering(self, event_id):
res = yield self._simple_select_one(
table="events",
retcols=["topological_ordering", "stream_ordering"],
keyvalues={"event_id": event_id},
allow_none=True,
)
if not res:
raise SynapseError(404, "Could not find event %s" % (event_id,))
defer.returnValue(
(int(res["topological_ordering"]), int(res["stream_ordering"]))
)
def get_all_updated_current_state_deltas(self, from_token, to_token, limit):
def get_all_updated_current_state_deltas_txn(txn):
sql = """
SELECT stream_id, room_id, type, state_key, event_id
FROM current_state_delta_stream
WHERE ? < stream_id AND stream_id <= ?
ORDER BY stream_id ASC LIMIT ?
"""
txn.execute(sql, (from_token, to_token, limit))
return txn.fetchall()
return self.runInteraction(
"get_all_updated_current_state_deltas",
get_all_updated_current_state_deltas_txn,
)
AllNewEventsResult = namedtuple(
"AllNewEventsResult",
[
"new_forward_events",
"new_backfill_events",
"forward_ex_outliers",
"backward_ex_outliers",
],
)
| [((2138, 2165), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (2155, 2165), False, 'import logging\n'), ((2191, 2245), 'prometheus_client.Counter', 'Counter', (['"""synapse_storage_events_persisted_events"""', '""""""'], {}), "('synapse_storage_events_persisted_events', '')\n", (2198, 2245), False, 'from prometheus_client import Counter, Histogram\n'), ((2262, 2366), 'prometheus_client.Counter', 'Counter', (['"""synapse_storage_events_persisted_events_sep"""', '""""""', "['type', 'origin_type', 'origin_entity']"], {}), "('synapse_storage_events_persisted_events_sep', '', ['type',\n 'origin_type', 'origin_entity'])\n", (2269, 2366), False, 'from prometheus_client import Counter, Histogram\n'), ((2462, 2511), 'prometheus_client.Counter', 'Counter', (['"""synapse_storage_events_state_delta"""', '""""""'], {}), "('synapse_storage_events_state_delta', '')\n", (2469, 2511), False, 'from prometheus_client import Counter, Histogram\n'), ((2645, 2707), 'prometheus_client.Counter', 'Counter', (['"""synapse_storage_events_state_delta_single_event"""', '""""""'], {}), "('synapse_storage_events_state_delta_single_event', '')\n", (2652, 2707), False, 'from prometheus_client import Counter, Histogram\n'), ((2911, 2972), 'prometheus_client.Counter', 'Counter', (['"""synapse_storage_events_state_delta_reuse_delta"""', '""""""'], {}), "('synapse_storage_events_state_delta_reuse_delta', '')\n", (2918, 2972), False, 'from prometheus_client import Counter, Histogram\n'), ((3066, 3255), 'prometheus_client.Histogram', 'Histogram', (['"""synapse_storage_events_forward_extremities_persisted"""', '"""Number of forward extremities for each new event"""'], {'buckets': "(1, 2, 3, 5, 7, 10, 15, 20, 50, 100, 200, 500, '+Inf')"}), "('synapse_storage_events_forward_extremities_persisted',\n 'Number of forward extremities for each new event', buckets=(1, 2, 3, 5,\n 7, 10, 15, 20, 50, 100, 200, 500, '+Inf'))\n", (3075, 3255), False, 'from prometheus_client import Counter, Histogram\n'), ((3457, 3666), 'prometheus_client.Histogram', 'Histogram', (['"""synapse_storage_events_stale_forward_extremities_persisted"""', '"""Number of unchanged forward extremities for each new event"""'], {'buckets': "(0, 1, 2, 3, 5, 7, 10, 15, 20, 50, 100, 200, 500, '+Inf')"}), "('synapse_storage_events_stale_forward_extremities_persisted',\n 'Number of unchanged forward extremities for each new event', buckets=(\n 0, 1, 2, 3, 5, 7, 10, 15, 20, 50, 100, 200, 500, '+Inf'))\n", (3466, 3666), False, 'from prometheus_client import Counter, Histogram\n'), ((7841, 7900), 'collections.namedtuple', 'namedtuple', (['"""_EventCacheEntry"""', "('event', 'redacted_event')"], {}), "('_EventCacheEntry', ('event', 'redacted_event'))\n", (7851, 7900), False, 'from collections import Counter as c_counter, OrderedDict, deque, namedtuple\n'), ((87472, 87602), 'collections.namedtuple', 'namedtuple', (['"""AllNewEventsResult"""', "['new_forward_events', 'new_backfill_events', 'forward_ex_outliers',\n 'backward_ex_outliers']"], {}), "('AllNewEventsResult', ['new_forward_events',\n 'new_backfill_events', 'forward_ex_outliers', 'backward_ex_outliers'])\n", (87482, 87602), False, 'from collections import Counter as c_counter, OrderedDict, deque, namedtuple\n'), ((3801, 3844), 'synapse.util.frozenutils.frozendict_json_encoder.encode', 'frozendict_json_encoder.encode', (['json_object'], {}), '(json_object)\n', (3831, 3844), False, 'from synapse.util.frozenutils import frozendict_json_encoder\n'), ((4114, 4205), 'collections.namedtuple', 'namedtuple', (['"""_EventPersistQueueItem"""', "('events_and_contexts', 'backfilled', 'deferred')"], {}), "('_EventPersistQueueItem', ('events_and_contexts', 'backfilled',\n 'deferred'))\n", (4124, 4205), False, 'from collections import Counter as c_counter, OrderedDict, deque, namedtuple\n'), ((8167, 8178), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (8172, 8178), False, 'from functools import wraps\n'), ((67894, 67928), 'synapse.util.caches.descriptors.cached', 'cached', ([], {'num_args': '(5)', 'max_entries': '(10)'}), '(num_args=5, max_entries=10)\n', (67900, 67928), False, 'from synapse.util.caches.descriptors import cached, cachedInlineCallbacks\n'), ((86284, 86323), 'synapse.util.caches.descriptors.cachedInlineCallbacks', 'cachedInlineCallbacks', ([], {'max_entries': '(5000)'}), '(max_entries=5000)\n', (86305, 86323), False, 'from synapse.util.caches.descriptors import cached, cachedInlineCallbacks\n'), ((7480, 7542), 'synapse.metrics.background_process_metrics.run_as_background_process', 'run_as_background_process', (['"""persist_events"""', 'handle_queue_loop'], {}), "('persist_events', handle_queue_loop)\n", (7505, 7542), False, 'from synapse.metrics.background_process_metrics import run_as_background_process\n'), ((8504, 8526), 'twisted.internet.defer.returnValue', 'defer.returnValue', (['res'], {}), '(res)\n', (8521, 8526), False, 'from twisted.internet import defer\n'), ((9244, 9255), 'collections.Counter', 'c_counter', ([], {}), '()\n', (9253, 9255), True, 'from collections import Counter as c_counter, OrderedDict, deque, namedtuple\n'), ((9265, 9435), 'synapse.metrics.BucketCollector', 'BucketCollector', (['"""synapse_forward_extremities"""', '(lambda : self._current_forward_extremities_amount)'], {'buckets': "[1, 2, 3, 5, 7, 10, 15, 20, 50, 100, 200, 500, '+Inf']"}), "('synapse_forward_extremities', lambda : self.\n _current_forward_extremities_amount, buckets=[1, 2, 3, 5, 7, 10, 15, 20,\n 50, 100, 200, 500, '+Inf'])\n", (9280, 9435), False, 'from synapse.metrics import BucketCollector\n'), ((11126, 11148), 'six.iteritems', 'iteritems', (['partitioned'], {}), '(partitioned)\n', (11135, 11148), False, 'from six import iteritems, text_type\n'), ((11592, 11627), 'twisted.internet.defer.returnValue', 'defer.returnValue', (['max_persisted_id'], {}), '(max_persisted_id)\n', (11609, 11627), False, 'from twisted.internet import defer\n'), ((12360, 12438), 'twisted.internet.defer.returnValue', 'defer.returnValue', (['(event.internal_metadata.stream_ordering, max_persisted_id)'], {}), '((event.internal_metadata.stream_ordering, max_persisted_id))\n', (12377, 12438), False, 'from twisted.internet import defer\n'), ((24714, 24739), 'twisted.internet.defer.returnValue', 'defer.returnValue', (['result'], {}), '(result)\n', (24731, 24739), False, 'from twisted.internet import defer\n'), ((25825, 25851), 'synapse.util.batch_iter', 'batch_iter', (['event_ids', '(100)'], {}), '(event_ids, 100)\n', (25835, 25851), False, 'from synapse.util import batch_iter\n'), ((26001, 26027), 'twisted.internet.defer.returnValue', 'defer.returnValue', (['results'], {}), '(results)\n', (26018, 26027), False, 'from twisted.internet import defer\n'), ((28210, 28236), 'synapse.util.batch_iter', 'batch_iter', (['event_ids', '(100)'], {}), '(event_ids, 100)\n', (28220, 28236), False, 'from synapse.util import batch_iter\n'), ((28384, 28417), 'twisted.internet.defer.returnValue', 'defer.returnValue', (['existing_prevs'], {}), '(existing_prevs)\n', (28401, 28417), False, 'from twisted.internet import defer\n'), ((35202, 35238), 'twisted.internet.defer.returnValue', 'defer.returnValue', (['(res.state, None)'], {}), '((res.state, None))\n', (35219, 35238), False, 'from twisted.internet import defer\n'), ((36016, 36057), 'twisted.internet.defer.returnValue', 'defer.returnValue', (['(to_delete, to_insert)'], {}), '((to_delete, to_insert))\n', (36033, 36057), False, 'from twisted.internet import defer\n'), ((40778, 40808), 'six.iteritems', 'iteritems', (['state_delta_by_room'], {}), '(state_delta_by_room)\n', (40787, 40808), False, 'from six import iteritems, text_type\n'), ((44898, 44932), 'six.iteritems', 'iteritems', (['new_forward_extremities'], {}), '(new_forward_extremities)\n', (44907, 44932), False, 'from six import iteritems, text_type\n'), ((46641, 46654), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (46652, 46654), False, 'from collections import Counter as c_counter, OrderedDict, deque, namedtuple\n'), ((48515, 48539), 'six.iteritems', 'iteritems', (['depth_updates'], {}), '(depth_updates)\n', (48524, 48539), False, 'from six import iteritems, text_type\n'), ((62541, 62563), 'twisted.internet.defer.returnValue', 'defer.returnValue', (['ret'], {}), '(ret)\n', (62558, 62563), False, 'from twisted.internet import defer\n'), ((63296, 63318), 'twisted.internet.defer.returnValue', 'defer.returnValue', (['ret'], {}), '(ret)\n', (63313, 63318), False, 'from twisted.internet import defer\n'), ((63816, 63838), 'twisted.internet.defer.returnValue', 'defer.returnValue', (['ret'], {}), '(ret)\n', (63833, 63838), False, 'from twisted.internet import defer\n'), ((72296, 72328), 'synapse.types.RoomStreamToken.parse', 'RoomStreamToken.parse', (['token_str'], {}), '(token_str)\n', (72317, 72328), False, 'from synapse.types import RoomStreamToken, get_domain_from_id\n'), ((86231, 86277), 'twisted.internet.defer.returnValue', 'defer.returnValue', (['((to_1, so_1) > (to_2, so_2))'], {}), '((to_1, so_1) > (to_2, so_2))\n', (86248, 86277), False, 'from twisted.internet import defer\n'), ((5066, 5073), 'collections.deque', 'deque', ([], {}), '()\n', (5071, 5073), False, 'from collections import Counter as c_counter, OrderedDict, deque, namedtuple\n'), ((5479, 5495), 'twisted.internet.defer.Deferred', 'defer.Deferred', ([], {}), '()\n', (5493, 5495), False, 'from twisted.internet import defer\n'), ((7653, 7660), 'collections.deque', 'deque', ([], {}), '()\n', (7658, 7660), False, 'from collections import Counter as c_counter, OrderedDict, deque, namedtuple\n'), ((9708, 9798), 'synapse.metrics.background_process_metrics.run_as_background_process', 'run_as_background_process', (['"""read_forward_extremities"""', 'self._read_forward_extremities'], {}), "('read_forward_extremities', self.\n _read_forward_extremities)\n", (9733, 9798), False, 'from synapse.metrics.background_process_metrics import run_as_background_process\n'), ((12244, 12277), 'synapse.util.logcontext.make_deferred_yieldable', 'make_deferred_yieldable', (['deferred'], {}), '(deferred)\n', (12267, 12277), False, 'from synapse.util.logcontext import PreserveLoggingContext, make_deferred_yieldable\n'), ((32637, 32668), 'twisted.internet.defer.returnValue', 'defer.returnValue', (['(None, None)'], {}), '((None, None))\n', (32654, 32668), False, 'from twisted.internet import defer\n'), ((64305, 64322), 'twisted.internet.defer.succeed', 'defer.succeed', (['[]'], {}), '([])\n', (64318, 64322), False, 'from twisted.internet import defer\n'), ((66151, 66168), 'twisted.internet.defer.succeed', 'defer.succeed', (['[]'], {}), '([])\n', (66164, 66168), False, 'from twisted.internet import defer\n'), ((74182, 74259), 'synapse.api.errors.SynapseError', 'SynapseError', (['(400)', '"""topological_ordering is greater than forward extremeties"""'], {}), "(400, 'topological_ordering is greater than forward extremeties')\n", (74194, 74259), False, 'from synapse.api.errors import SynapseError\n'), ((86631, 86689), 'synapse.api.errors.SynapseError', 'SynapseError', (['(404)', "('Could not find event %s' % (event_id,))"], {}), "(404, 'Could not find event %s' % (event_id,))\n", (86643, 86689), False, 'from synapse.api.errors import SynapseError\n'), ((11448, 11498), 'twisted.internet.defer.gatherResults', 'defer.gatherResults', (['deferreds'], {'consumeErrors': '(True)'}), '(deferreds, consumeErrors=True)\n', (11467, 11498), False, 'from twisted.internet import defer\n'), ((12572, 12610), 'synapse.util.metrics.Measure', 'Measure', (['self._clock', '"""persist_events"""'], {}), "(self._clock, 'persist_events')\n", (12579, 12610), False, 'from synapse.util.metrics import Measure\n'), ((22314, 22347), 'six.iteritems', 'iteritems', (['current_state_for_room'], {}), '(current_state_for_room)\n', (22323, 22347), False, 'from six import iteritems, text_type\n'), ((22477, 22511), 'six.iteritems', 'iteritems', (['new_forward_extremeties'], {}), '(new_forward_extremeties)\n', (22486, 22511), False, 'from six import iteritems, text_type\n'), ((33523, 33564), 'twisted.internet.defer.returnValue', 'defer.returnValue', (['(new_state, delta_ids)'], {}), '((new_state, delta_ids))\n', (33540, 33564), False, 'from twisted.internet import defer\n'), ((35924, 35948), 'six.iteritems', 'iteritems', (['current_state'], {}), '(current_state)\n', (35933, 35948), False, 'from six import iteritems, text_type\n'), ((35155, 35181), 'synapse.state.StateResolutionStore', 'StateResolutionStore', (['self'], {}), '(self)\n', (35175, 35181), False, 'from synapse.state import StateResolutionStore\n'), ((84177, 84214), 'itertools.islice', 'itertools.islice', (['next_to_search', '(100)'], {}), '(next_to_search, 100)\n', (84193, 84214), False, 'import itertools\n'), ((15059, 15110), 'synapse.util.metrics.Measure', 'Measure', (['self._clock', '"""_calculate_state_and_extrem"""'], {}), "(self._clock, '_calculate_state_and_extrem')\n", (15066, 15110), False, 'from synapse.util.metrics import Measure\n'), ((15643, 15668), 'six.iteritems', 'iteritems', (['events_by_room'], {}), '(events_by_room)\n', (15652, 15668), False, 'from six import iteritems, text_type\n'), ((42890, 42910), 'six.iteritems', 'iteritems', (['to_insert'], {}), '(to_insert)\n', (42899, 42910), False, 'from six import iteritems, text_type\n'), ((43273, 43310), 'itertools.chain', 'itertools.chain', (['to_delete', 'to_insert'], {}), '(to_delete, to_insert)\n', (43288, 43310), False, 'import itertools\n'), ((44387, 44424), 'itertools.chain', 'itertools.chain', (['to_delete', 'to_insert'], {}), '(to_delete, to_insert)\n', (44402, 44424), False, 'import itertools\n'), ((45380, 45414), 'six.iteritems', 'iteritems', (['new_forward_extremities'], {}), '(new_forward_extremities)\n', (45389, 45414), False, 'from six import iteritems, text_type\n'), ((46100, 46134), 'six.iteritems', 'iteritems', (['new_forward_extremities'], {}), '(new_forward_extremities)\n', (46109, 46134), False, 'from six import iteritems, text_type\n'), ((7088, 7112), 'synapse.util.logcontext.PreserveLoggingContext', 'PreserveLoggingContext', ([], {}), '()\n', (7110, 7112), False, 'from synapse.util.logcontext import PreserveLoggingContext, make_deferred_yieldable\n'), ((22150, 22182), 'synapse.types.get_domain_from_id', 'get_domain_from_id', (['event.sender'], {}), '(event.sender)\n', (22168, 22182), False, 'from synapse.types import RoomStreamToken, get_domain_from_id\n'), ((27975, 27995), 'canonicaljson.json.loads', 'json.loads', (['metadata'], {}), '(metadata)\n', (27985, 27995), False, 'from canonicaljson import json\n'), ((43733, 43753), 'six.iteritems', 'iteritems', (['to_insert'], {}), '(to_insert)\n', (43742, 43753), False, 'from six import iteritems, text_type\n'), ((79298, 79319), 'six.iteritems', 'iteritems', (['curr_state'], {}), '(curr_state)\n', (79307, 79319), False, 'from six import iteritems, text_type\n'), ((6955, 6979), 'synapse.util.logcontext.PreserveLoggingContext', 'PreserveLoggingContext', ([], {}), '()\n', (6977, 6979), False, 'from synapse.util.logcontext import PreserveLoggingContext, make_deferred_yieldable\n'), ((19072, 19137), 'synapse.util.metrics.Measure', 'Measure', (['self._clock', '"""persist_events.get_new_state_after_events"""'], {}), "(self._clock, 'persist_events.get_new_state_after_events')\n", (19079, 19137), False, 'from synapse.util.metrics import Measure\n'), ((25766, 25782), 'canonicaljson.json.loads', 'json.loads', (['r[1]'], {}), '(r[1])\n', (25776, 25782), False, 'from canonicaljson import json\n'), ((20209, 20269), 'synapse.util.metrics.Measure', 'Measure', (['self._clock', '"""persist_events.calculate_state_delta"""'], {}), "(self._clock, 'persist_events.calculate_state_delta')\n", (20216, 20269), False, 'from synapse.util.metrics import Measure\n')] |
premm1983/Spinnaker | dev/buildtool/metrics.py | 535f78b8f5402eea942c260cb9ca26682772a3e6 | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Metrics support manager."""
import logging
from buildtool import in_memory_metrics
from buildtool import prometheus_metrics
from buildtool import stackdriver_metrics
from buildtool.util import add_parser_argument
class MetricsManager(object):
"""Acts as factory for specialized BaseMetricsRegistry singleton."""
__metrics_registry = None
@staticmethod
def singleton():
"""Returns the BaseMetricsRegistry once startup_metrics is called."""
if MetricsManager.__metrics_registry is None:
raise Exception('startup_metrics was not called.')
return MetricsManager.__metrics_registry
@staticmethod
def init_argument_parser(parser, defaults):
"""Init argparser with metrics-related options."""
in_memory_metrics.init_argument_parser(parser, defaults)
prometheus_metrics.init_argument_parser(parser, defaults)
stackdriver_metrics.init_argument_parser(parser, defaults)
add_parser_argument(
parser, 'metric_name_scope', defaults, 'buildtool',
help='scope prefix for metrics generated by this tool')
add_parser_argument(
parser, 'monitoring_enabled', defaults, False, type=bool,
help='Enable monitoring to stackdriver.')
add_parser_argument(
parser, 'monitoring_flush_frequency', defaults, 5,
help='Frequency at which to push metrics in seconds.')
add_parser_argument(
parser, 'monitoring_system', defaults, 'file',
choices=['file', 'prometheus', 'stackdriver'],
help='Where to store metrics.')
@staticmethod
def startup_metrics(options):
"""Startup metrics module with concrete system."""
monitoring_systems = {
'file': in_memory_metrics.InMemoryMetricsRegistry,
'prometheus': prometheus_metrics.PrometheusMetricsRegistry,
'stackdriver': stackdriver_metrics.StackdriverMetricsRegistry
}
klas = monitoring_systems[options.monitoring_system]
logging.info('Initializing monitoring with systme="%s"', klas.__name__)
MetricsManager.__metrics_registry = klas(options)
if options.monitoring_enabled and options.monitoring_flush_frequency > 0:
MetricsManager.__metrics_registry.start_pusher_thread()
return MetricsManager.__metrics_registry
@staticmethod
def shutdown_metrics():
"""Write final metrics out to metrics server."""
registry = MetricsManager.singleton()
registry.stop_pusher_thread()
registry.flush_updated_metrics()
registry.flush_final_metrics()
| [((1330, 1386), 'buildtool.in_memory_metrics.init_argument_parser', 'in_memory_metrics.init_argument_parser', (['parser', 'defaults'], {}), '(parser, defaults)\n', (1368, 1386), False, 'from buildtool import in_memory_metrics\n'), ((1391, 1448), 'buildtool.prometheus_metrics.init_argument_parser', 'prometheus_metrics.init_argument_parser', (['parser', 'defaults'], {}), '(parser, defaults)\n', (1430, 1448), False, 'from buildtool import prometheus_metrics\n'), ((1453, 1511), 'buildtool.stackdriver_metrics.init_argument_parser', 'stackdriver_metrics.init_argument_parser', (['parser', 'defaults'], {}), '(parser, defaults)\n', (1493, 1511), False, 'from buildtool import stackdriver_metrics\n'), ((1516, 1647), 'buildtool.util.add_parser_argument', 'add_parser_argument', (['parser', '"""metric_name_scope"""', 'defaults', '"""buildtool"""'], {'help': '"""scope prefix for metrics generated by this tool"""'}), "(parser, 'metric_name_scope', defaults, 'buildtool',\n help='scope prefix for metrics generated by this tool')\n", (1535, 1647), False, 'from buildtool.util import add_parser_argument\n'), ((1665, 1789), 'buildtool.util.add_parser_argument', 'add_parser_argument', (['parser', '"""monitoring_enabled"""', 'defaults', '(False)'], {'type': 'bool', 'help': '"""Enable monitoring to stackdriver."""'}), "(parser, 'monitoring_enabled', defaults, False, type=\n bool, help='Enable monitoring to stackdriver.')\n", (1684, 1789), False, 'from buildtool.util import add_parser_argument\n'), ((1806, 1936), 'buildtool.util.add_parser_argument', 'add_parser_argument', (['parser', '"""monitoring_flush_frequency"""', 'defaults', '(5)'], {'help': '"""Frequency at which to push metrics in seconds."""'}), "(parser, 'monitoring_flush_frequency', defaults, 5, help\n ='Frequency at which to push metrics in seconds.')\n", (1825, 1936), False, 'from buildtool.util import add_parser_argument\n'), ((1953, 2103), 'buildtool.util.add_parser_argument', 'add_parser_argument', (['parser', '"""monitoring_system"""', 'defaults', '"""file"""'], {'choices': "['file', 'prometheus', 'stackdriver']", 'help': '"""Where to store metrics."""'}), "(parser, 'monitoring_system', defaults, 'file', choices=\n ['file', 'prometheus', 'stackdriver'], help='Where to store metrics.')\n", (1972, 2103), False, 'from buildtool.util import add_parser_argument\n'), ((2519, 2590), 'logging.info', 'logging.info', (['"""Initializing monitoring with systme="%s\\""""', 'klas.__name__'], {}), '(\'Initializing monitoring with systme="%s"\', klas.__name__)\n', (2531, 2590), False, 'import logging\n')] |
hythloday/pants | src/python/pants/backend/android/tasks/aapt_builder.py | 107e9b0957f6949ac4bd535fbef8d2d8cba05c5c | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (nested_scopes, generators, division, absolute_import, with_statement,
print_function, unicode_literals)
import os
import subprocess
from twitter.common import log
from pants.backend.android.targets.android_binary import AndroidBinary
from pants.backend.android.targets.android_resources import AndroidResources
from pants.backend.android.tasks.aapt_task import AaptTask
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TaskError
from pants.base.workunit import WorkUnit
from pants.util.dirutil import safe_mkdir
class AaptBuilder(AaptTask):
"""Build an android bundle with compiled code and assets.
This class gathers compiled classes (an Android dex archive) and packages it with the
target's resource files. The output is an unsigned .apk, an Android application package file.
"""
@classmethod
def product_types(cls):
return ['apk']
@staticmethod
def is_app(target):
return isinstance(target, AndroidBinary)
def __init__(self, *args, **kwargs):
super(AaptBuilder, self).__init__(*args, **kwargs)
def prepare(self, round_manager):
round_manager.require_data('dex')
def render_args(self, target, resource_dir, inputs):
args = []
# Glossary of used aapt flags. Aapt handles a ton of action, this will continue to expand.
# : 'package' is the main aapt operation (see class docstring for more info).
# : '-M' is the AndroidManifest.xml of the project.
# : '-S' points to the resource_dir to "spider" down while collecting resources.
# : '-I' packages to add to base "include" set, here the android.jar of the target-sdk.
# : '--ignored-assets' patterns for the aapt to skip. This is the default w/ 'BUILD*' added.
# : '-F' The name and location of the .apk file to output
# : additional positional arguments are treated as input directories to gather files from.
args.extend([self.aapt_tool(target.build_tools_version)])
args.extend(['package', '-M', target.manifest])
args.extend(['-S'])
args.extend(resource_dir)
args.extend(['-I', self.android_jar_tool(target.target_sdk)])
args.extend(['--ignore-assets', self.ignored_assets])
args.extend(['-F', os.path.join(self.workdir, target.app_name + '-unsigned.apk')])
args.extend(inputs)
log.debug('Executing: {0}'.format(args))
return args
def execute(self):
safe_mkdir(self.workdir)
# TODO(mateor) map stderr and stdout to workunit streams (see CR 859)
with self.context.new_workunit(name='apk-bundle', labels=[WorkUnit.MULTITOOL]):
targets = self.context.targets(self.is_app)
with self.invalidated(targets) as invalidation_check:
invalid_targets = []
for vt in invalidation_check.invalid_vts:
invalid_targets.extend(vt.targets)
for target in invalid_targets:
# 'input_dirs' is the folder containing the Android dex file
input_dirs = []
# 'gen_out' holds resource folders (e.g. 'res')
gen_out = []
mapping = self.context.products.get('dex')
for basedir in mapping.get(target):
input_dirs.append(basedir)
def gather_resources(target):
"""Gather the 'resource_dir' of the target"""
if isinstance(target, AndroidResources):
gen_out.append(os.path.join(get_buildroot(), target.resource_dir))
target.walk(gather_resources)
process = subprocess.Popen(self.render_args(target, gen_out, input_dirs))
result = process.wait()
if result != 0:
raise TaskError('Android aapt tool exited non-zero ({code})'.format(code=result))
for target in targets:
self.context.products.get('apk').add(target, self.workdir).append(target.app_name + "-unsigned.apk")
| [((2585, 2609), 'pants.util.dirutil.safe_mkdir', 'safe_mkdir', (['self.workdir'], {}), '(self.workdir)\n', (2595, 2609), False, 'from pants.util.dirutil import safe_mkdir\n'), ((2410, 2471), 'os.path.join', 'os.path.join', (['self.workdir', "(target.app_name + '-unsigned.apk')"], {}), "(self.workdir, target.app_name + '-unsigned.apk')\n", (2422, 2471), False, 'import os\n'), ((3551, 3566), 'pants.base.build_environment.get_buildroot', 'get_buildroot', ([], {}), '()\n', (3564, 3566), False, 'from pants.base.build_environment import get_buildroot\n')] |
kiss2u/google-research | fat/fat_bert_nq/ppr/apr_lib.py | 2cd66234656f9e2f4218ed90a2d8aa9cf3139093 | # coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file contains a class which acts as a wrapper around the PPR algorithm.
This class has the following functionality:
1. Load the KB graph,
2. Given list of seed entities, get topk entities from PPR.
3. Get unique facts between all extracted entities.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from fat.fat_bert_nq.ppr.apr_algo import csr_personalized_pagerank
from fat.fat_bert_nq.ppr.apr_algo import csr_topk_fact_extractor
from fat.fat_bert_nq.ppr.kb_csr_io import CsrData
flags = tf.flags
FLAGS = flags.FLAGS
flags.DEFINE_bool(
'verbose_logging', False,
'If true, all of the warnings related to data processing will be printed. '
'A number of warnings are expected for a normal NQ evaluation.')
class ApproximatePageRank(object):
"""APR main lib which is used to wrap functions around ppr algo."""
def __init__(self):
self.data = CsrData()
self.data.load_csr_data(
full_wiki=FLAGS.full_wiki, files_dir=FLAGS.apr_files_dir)
def get_topk_extracted_ent(self, seeds, alpha, topk):
"""Extract topk entities given seeds.
Args:
seeds: An Ex1 vector with weight on every seed entity
alpha: probability for PPR
topk: max top entities to extract
Returns:
extracted_ents: list of selected entities
extracted_scores: list of scores of selected entities
"""
ppr_scores = csr_personalized_pagerank(seeds, self.data.adj_mat_t_csr,
alpha)
sorted_idx = np.argsort(ppr_scores)[::-1]
extracted_ents = sorted_idx[:topk]
extracted_scores = ppr_scores[sorted_idx[:topk]]
# Check for really low values
# Get idx of First value < 1e-6, limit extracted ents till there
zero_idx = np.where(ppr_scores[extracted_ents] < 1e-6)[0]
if zero_idx.shape[0] > 0:
extracted_ents = extracted_ents[:zero_idx[0]]
return extracted_ents, extracted_scores
def get_facts(self, entities, topk, alpha, seed_weighting=True):
"""Get subgraph describing a neighbourhood around given entities.
Args:
entities: A list of Wikidata entities
topk: Max entities to extract from PPR
alpha: Node probability for PPR
seed_weighting: Boolean for performing weighting seeds by freq in passage
Returns:
unique_facts: A list of unique facts around the seeds.
"""
if FLAGS.verbose_logging:
tf.logging.info('Getting subgraph')
entity_ids = [
int(self.data.ent2id[x]) for x in entities if x in self.data.ent2id
]
if FLAGS.verbose_logging:
tf.logging.info(
str([self.data.entity_names['e'][str(x)]['name'] for x in entity_ids
]))
freq_dict = {x: entity_ids.count(x) for x in entity_ids}
seed = np.zeros((self.data.adj_mat.shape[0], 1))
if not seed_weighting:
seed[entity_ids] = 1. / len(set(entity_ids))
else:
for x, y in freq_dict.items():
seed[x] = y
seed = seed / seed.sum()
extracted_ents, extracted_scores = self.get_topk_extracted_ent(
seed, alpha, topk)
if FLAGS.verbose_logging:
tf.logging.info('Extracted ents: ')
tf.logging.info(
str([
self.data.entity_names['e'][str(x)]['name']
for x in extracted_ents
]))
facts = csr_topk_fact_extractor(self.data.adj_mat_t_csr, self.data.rel_dict,
freq_dict, self.data.entity_names,
extracted_ents, extracted_scores)
if FLAGS.verbose_logging:
tf.logging.info('Extracted facts: ')
tf.logging.info(str(facts))
# Extract 1 unique fact per pair of entities (fact with highest score)
# Sort by scores
unique_facts = {}
for (sub, obj, rel, score) in facts:
fwd_dir = (sub, obj)
rev_dir = (obj, sub)
if fwd_dir in unique_facts and score > unique_facts[fwd_dir][1]:
unique_facts[fwd_dir] = (rel, score)
elif rev_dir in unique_facts and score > unique_facts[rev_dir][1]:
unique_facts[fwd_dir] = (rel, score)
del unique_facts[rev_dir] # Remove existing entity pair
else:
unique_facts[(sub, obj)] = (rel, score)
unique_facts = list(unique_facts.items())
return unique_facts
| [((1590, 1599), 'fat.fat_bert_nq.ppr.kb_csr_io.CsrData', 'CsrData', ([], {}), '()\n', (1597, 1599), False, 'from fat.fat_bert_nq.ppr.kb_csr_io import CsrData\n'), ((2084, 2148), 'fat.fat_bert_nq.ppr.apr_algo.csr_personalized_pagerank', 'csr_personalized_pagerank', (['seeds', 'self.data.adj_mat_t_csr', 'alpha'], {}), '(seeds, self.data.adj_mat_t_csr, alpha)\n', (2109, 2148), False, 'from fat.fat_bert_nq.ppr.apr_algo import csr_personalized_pagerank\n'), ((3459, 3500), 'numpy.zeros', 'np.zeros', (['(self.data.adj_mat.shape[0], 1)'], {}), '((self.data.adj_mat.shape[0], 1))\n', (3467, 3500), True, 'import numpy as np\n'), ((4007, 4148), 'fat.fat_bert_nq.ppr.apr_algo.csr_topk_fact_extractor', 'csr_topk_fact_extractor', (['self.data.adj_mat_t_csr', 'self.data.rel_dict', 'freq_dict', 'self.data.entity_names', 'extracted_ents', 'extracted_scores'], {}), '(self.data.adj_mat_t_csr, self.data.rel_dict,\n freq_dict, self.data.entity_names, extracted_ents, extracted_scores)\n', (4030, 4148), False, 'from fat.fat_bert_nq.ppr.apr_algo import csr_topk_fact_extractor\n'), ((2209, 2231), 'numpy.argsort', 'np.argsort', (['ppr_scores'], {}), '(ppr_scores)\n', (2219, 2231), True, 'import numpy as np\n'), ((2449, 2493), 'numpy.where', 'np.where', (['(ppr_scores[extracted_ents] < 1e-06)'], {}), '(ppr_scores[extracted_ents] < 1e-06)\n', (2457, 2493), True, 'import numpy as np\n'), ((3099, 3134), 'tensorflow.logging.info', 'tf.logging.info', (['"""Getting subgraph"""'], {}), "('Getting subgraph')\n", (3114, 3134), True, 'import tensorflow as tf\n'), ((3809, 3844), 'tensorflow.logging.info', 'tf.logging.info', (['"""Extracted ents: """'], {}), "('Extracted ents: ')\n", (3824, 3844), True, 'import tensorflow as tf\n'), ((4253, 4289), 'tensorflow.logging.info', 'tf.logging.info', (['"""Extracted facts: """'], {}), "('Extracted facts: ')\n", (4268, 4289), True, 'import tensorflow as tf\n')] |
evanlynch/optimal-gardening | src/optimal_gardening.py | 447ca8575efac1ad5cdd975091f3cbb67721e167 | import os
import sys
import time
from IPython.display import Image
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sb
sb.set_style("dark")
#### Initial Setup ####
#plant info
plant_info = pd.read_csv('../data/plant_data.csv')
plant_info.index.name = 'plant_index'
plants = plant_info.name.to_numpy()
plant_index = plant_info.index.to_numpy()
num_plants = len(plants)
plant_sun_req = plant_info.sun.to_numpy()
perennials = plant_info[plant_info.perennial==1].index.to_list()
problem_plants = plant_info[plant_info.problem_plant==1].index.to_list()
#calculate weighted average preference for each plant
family = ['evan','gina','liesse','lizzie','jack']
plant_info['avg_pref'] = np.average(plant_info[family],axis=1,weights=[.5,.5,0,0,0])
plant_info.drop(family,axis=1,inplace=True)
preferences = plant_info.avg_pref.to_numpy()
#bed info
bed_info = pd.read_csv('../data/bed_data.csv')
bed_info.index.name = 'bed_index'
beds = bed_info.bed.to_numpy()
bed_index = bed_info.index.to_numpy()
bed_sun_req = bed_info.sun.to_numpy()
num_beds = len(beds)
#time dimension
num_years = 3
years = np.array(range(1,num_years+1))
year_index = np.array(range(num_years))
#for keeping track of what axis is which
plant_axis = 0
bed_axis = 1
year_axis = 2
##### Constraints #####
#initialize sun constraint. 1 where plant can feasibly be planted in bed. 0 where sun requirements do not match.
sun_constraint = np.ones(shape=(num_plants,num_beds,num_years))
for p in plant_index:
for b in bed_index:
p_sun = plant_sun_req[p]
b_sun = bed_sun_req[b]
if p_sun != b_sun:
sun_constraint[p,b,:] = 0
def enforce_sun_constraint(plan,sun_constraint):
"""
Force plan to be 0 where sun requirements for plant and bed do not match.
"""
return plan*sun_constraint
def enforce_perennial_constraint(plan,plant,bed,year,perennials):
"""Forward fill plan for perennial plants. If 1 in a given bed/year, it will be 1 in same bed thereafter."""
perennial_plan = plan.copy()
#what was planted the year before
plant_last_year = perennial_plan[:,bed,year-1].argmax()
#if the plant is a perennial, plant it this year and every year thereafter
if plant in perennials:
perennial_plan[:,bed,year:] = 0 # zeros out anything else that may have been planted in bed in current and subsequent years during a previous make_neighbor call
perennial_plan[plant,bed,year:] = 1 #sets plant to 1 in bed every year after the current year
#if what was planted already in this bed was a perennial, remove it from previous years
elif plant_last_year in perennials:
perennial_plan[plant_last_year,bed,:year] = 0
return perennial_plan
def enforce_disease_constraint(plan,problem_plants):
"""Creates a mask to determine if the same veg was planted in the same bed over multiple years.
Multiplies the plan for problem plants by 0 in subsequent years where we planned to put them in the same bed
"""
disease_plan = plan.copy()
#mask to determine cases where same thing was planted in the same bed yoy
same_veg_in_bed_yoy = disease_plan.cumsum(axis=year_axis)>1
#multiply plan for specific problem plants by 0
disease_plan[problem_plants] = disease_plan[problem_plants]*(abs(1-same_veg_in_bed_yoy)[problem_plants])
return disease_plan
##### Objectives #####
#the most satisfied you could be (planting fruit or vegetable with highest preference in all beds every year)
max_yums = num_beds*num_years*np.max(preferences)
def compute_yummy_score(plan,preferences,max_yums):
"""Takes the weighted average of the preferences of each plant, weighted by the total qty of plants
in the current plan for each plant. Maximization encourages plants with higher preferences to be planted in higher quantities."""
plan_yummy = plan.copy()
plan_by_plant = plan_yummy.sum(axis=(bed_axis,year_axis))
yums = round(np.dot(preferences,plan_by_plant)/max_yums*100,1)
return yums
def compute_variety_score(plan,num_plants):
"""Sums the number of unique plants that are actually planted in the garden. Counts the number of plants that are being planted across all beds.
Then counts the number of plants with non-zero planting plan.
Maximization encourages more unique plants to be planted."""
plan_variety = plan.copy()
num_plants_in_plan = (plan_variety.sum(axis=(bed_axis,year_axis)) > 0).sum()
variety_score = round(num_plants_in_plan/num_plants*100,1)
return variety_score
#### Analysis & Visualization ####
def visualize_garden(bed_info):
garden_layout = bed_info.sun.map({'Full sun':1,'Partial sun':2,'Partial shade':3}).to_numpy().reshape(14,3)
palette = ["#ffa200","#fcbd53","#ffd58f"]
f, ax = plt.subplots(figsize=(10, 6))
ax = sb.heatmap(garden_layout,linewidths=5,linecolor='white',cmap=sb.color_palette(palette),cbar=False)
ax.xaxis.set_ticklabels([])
ax.yaxis.set_ticklabels([])
plt.rcParams.update({'font.size': 13})
return ax
def visualize_plan(bed_info,bed_index,years):
for year in years:
garden_viz = visualize_garden(bed_info)
garden_viz.set_title(f'Year {year}')
for bed in bed_index:
x = bed_info.iloc[bed].x
y = bed_info.iloc[bed].y
plt.text(x + 0.5, y + 0.5, bed_info.loc[(bed_info.x==x)&(bed_info.y==y)][f'year_{year}'].iloc[0],
horizontalalignment='center',verticalalignment='center')
def annual_bed_plan(best_plan,bed_info,plant_info,bed_index,year_index):
for t in year_index:
bed_plan = []
for b in bed_index:
plant_idx = np.argmax(best_plan[:,b,t])
plant = plant_info.iloc[plant_idx]['name']
bed_plan.append(plant)
bed_info[f'year_{t+1}'] = pd.Series(bed_plan)
return bed_info
def visualize_obj_iters(current_plan_obj_values):
objectives = []
yummy_scores = []
variety_scores = []
for i in current_plan_obj_values:
objectives.append(i[1]['objective'])
yummy_scores.append(i[1]['yummy_score'])
variety_scores.append(i[1]['variety_score'])
df = pd.DataFrame([objectives,yummy_scores,variety_scores]).T#,yummy_scores,variety_scores]).T
df.columns = ['obj_value','yummy_scores','variety_scores']#,'yummy_score','variety_score']
df.reset_index(inplace=True)
df = df.melt(id_vars=['index'],var_name='objective')
fig, ax = plt.subplots(figsize=(20,8))
sb.scatterplot(data=df,x='index',y='value',hue='objective',edgecolor=None,s=5)
plt.legend(bbox_to_anchor=(1.05, 1), loc='upper left', borderaxespad=0)
ax.set_title('Objective Values of Current Solution by Iteration')
# ax2 = plt.twinx()
# sb.scatterplot(data=df.drop_duplicates(['index','total_plants']),x='index',y='objective',edgecolor=None,ax=ax2,color='black',s=5) | [((160, 180), 'seaborn.set_style', 'sb.set_style', (['"""dark"""'], {}), "('dark')\n", (172, 180), True, 'import seaborn as sb\n'), ((232, 269), 'pandas.read_csv', 'pd.read_csv', (['"""../data/plant_data.csv"""'], {}), "('../data/plant_data.csv')\n", (243, 269), True, 'import pandas as pd\n'), ((721, 788), 'numpy.average', 'np.average', (['plant_info[family]'], {'axis': '(1)', 'weights': '[0.5, 0.5, 0, 0, 0]'}), '(plant_info[family], axis=1, weights=[0.5, 0.5, 0, 0, 0])\n', (731, 788), True, 'import numpy as np\n'), ((892, 927), 'pandas.read_csv', 'pd.read_csv', (['"""../data/bed_data.csv"""'], {}), "('../data/bed_data.csv')\n", (903, 927), True, 'import pandas as pd\n'), ((1441, 1489), 'numpy.ones', 'np.ones', ([], {'shape': '(num_plants, num_beds, num_years)'}), '(shape=(num_plants, num_beds, num_years))\n', (1448, 1489), True, 'import numpy as np\n'), ((3568, 3587), 'numpy.max', 'np.max', (['preferences'], {}), '(preferences)\n', (3574, 3587), True, 'import numpy as np\n'), ((4826, 4855), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(10, 6)'}), '(figsize=(10, 6))\n', (4838, 4855), True, 'import matplotlib.pyplot as plt\n'), ((5032, 5070), 'matplotlib.pyplot.rcParams.update', 'plt.rcParams.update', (["{'font.size': 13}"], {}), "({'font.size': 13})\n", (5051, 5070), True, 'import matplotlib.pyplot as plt\n'), ((6523, 6552), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(20, 8)'}), '(figsize=(20, 8))\n', (6535, 6552), True, 'import matplotlib.pyplot as plt\n'), ((6556, 6644), 'seaborn.scatterplot', 'sb.scatterplot', ([], {'data': 'df', 'x': '"""index"""', 'y': '"""value"""', 'hue': '"""objective"""', 'edgecolor': 'None', 's': '(5)'}), "(data=df, x='index', y='value', hue='objective', edgecolor=\n None, s=5)\n", (6570, 6644), True, 'import seaborn as sb\n'), ((6639, 6710), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'bbox_to_anchor': '(1.05, 1)', 'loc': '"""upper left"""', 'borderaxespad': '(0)'}), "(bbox_to_anchor=(1.05, 1), loc='upper left', borderaxespad=0)\n", (6649, 6710), True, 'import matplotlib.pyplot as plt\n'), ((5873, 5892), 'pandas.Series', 'pd.Series', (['bed_plan'], {}), '(bed_plan)\n', (5882, 5892), True, 'import pandas as pd\n'), ((6233, 6289), 'pandas.DataFrame', 'pd.DataFrame', (['[objectives, yummy_scores, variety_scores]'], {}), '([objectives, yummy_scores, variety_scores])\n', (6245, 6289), True, 'import pandas as pd\n'), ((4926, 4951), 'seaborn.color_palette', 'sb.color_palette', (['palette'], {}), '(palette)\n', (4942, 4951), True, 'import seaborn as sb\n'), ((5365, 5534), 'matplotlib.pyplot.text', 'plt.text', (['(x + 0.5)', '(y + 0.5)', "bed_info.loc[(bed_info.x == x) & (bed_info.y == y)][f'year_{year}'].iloc[0]"], {'horizontalalignment': '"""center"""', 'verticalalignment': '"""center"""'}), "(x + 0.5, y + 0.5, bed_info.loc[(bed_info.x == x) & (bed_info.y ==\n y)][f'year_{year}'].iloc[0], horizontalalignment='center',\n verticalalignment='center')\n", (5373, 5534), True, 'import matplotlib.pyplot as plt\n'), ((5721, 5754), 'numpy.argmax', 'np.argmax', (['best_plan[:, (b), (t)]'], {}), '(best_plan[:, (b), (t)])\n', (5730, 5754), True, 'import numpy as np\n'), ((3988, 4022), 'numpy.dot', 'np.dot', (['preferences', 'plan_by_plant'], {}), '(preferences, plan_by_plant)\n', (3994, 4022), True, 'import numpy as np\n')] |
Daulbaev/adversarial-library | adv_lib/utils/attack_utils.py | 6f979a511ad78908374cd55855a9e2c5a874be7d | import warnings
from collections import OrderedDict
from distutils.version import LooseVersion
from functools import partial
from inspect import isclass
from typing import Callable, Optional, Dict, Union
import numpy as np
import torch
import tqdm
from torch import Tensor, nn
from torch.nn import functional as F
from adv_lib.distances.lp_norms import l0_distances, l1_distances, l2_distances, linf_distances
from adv_lib.utils import ForwardCounter, BackwardCounter, predict_inputs
def generate_random_targets(labels: Tensor, num_classes: int) -> Tensor:
"""
Generates one random target in (num_classes - 1) possibilities for each label that is different from the original
label.
Parameters
----------
labels: Tensor
Original labels. Generated targets will be different from labels.
num_classes: int
Number of classes to generate the random targets from.
Returns
-------
targets: Tensor
Random target for each label. Has the same shape as labels.
"""
random = torch.rand(len(labels), num_classes, device=labels.device, dtype=torch.float)
random.scatter_(1, labels.unsqueeze(-1), 0)
return random.argmax(1)
def get_all_targets(labels: Tensor, num_classes: int):
"""
Generates all possible targets that are different from the original labels.
Parameters
----------
labels: Tensor
Original labels. Generated targets will be different from labels.
num_classes: int
Number of classes to generate the random targets from.
Returns
-------
targets: Tensor
Random targets for each label. shape: (len(labels), num_classes - 1).
"""
all_possible_targets = torch.zeros(len(labels), num_classes - 1, dtype=torch.long)
all_classes = set(range(num_classes))
for i in range(len(labels)):
this_label = labels[i].item()
other_labels = list(all_classes.difference({this_label}))
all_possible_targets[i] = torch.tensor(other_labels)
return all_possible_targets
def run_attack(model: nn.Module,
inputs: Tensor,
labels: Tensor,
attack: Callable,
targets: Optional[Tensor] = None,
batch_size: Optional[int] = None) -> dict:
device = next(model.parameters()).device
to_device = lambda tensor: tensor.to(device)
targeted, adv_labels = False, labels
if targets is not None:
targeted, adv_labels = True, targets
batch_size = batch_size or len(inputs)
# run attack only on non already adversarial samples
already_adv = []
chunks = [tensor.split(batch_size) for tensor in [inputs, adv_labels]]
for (inputs_chunk, label_chunk) in zip(*chunks):
batch_chunk_d, label_chunk_d = [to_device(tensor) for tensor in [inputs_chunk, label_chunk]]
preds = model(batch_chunk_d).argmax(1)
is_adv = (preds == label_chunk_d) if targeted else (preds != label_chunk_d)
already_adv.append(is_adv.cpu())
not_adv = ~torch.cat(already_adv, 0)
start, end = torch.cuda.Event(enable_timing=True), torch.cuda.Event(enable_timing=True)
forward_counter, backward_counter = ForwardCounter(), BackwardCounter()
model.register_forward_pre_hook(forward_counter)
if LooseVersion(torch.__version__) >= LooseVersion('1.8'):
model.register_full_backward_hook(backward_counter)
else:
model.register_backward_hook(backward_counter)
average_forwards, average_backwards = [], [] # number of forward and backward calls per sample
advs_chunks = []
chunks = [tensor.split(batch_size) for tensor in [inputs[not_adv], adv_labels[not_adv]]]
total_time = 0
for (inputs_chunk, label_chunk) in tqdm.tqdm(zip(*chunks), ncols=80, total=len(chunks[0])):
batch_chunk_d, label_chunk_d = [to_device(tensor.clone()) for tensor in [inputs_chunk, label_chunk]]
start.record()
advs_chunk_d = attack(model, batch_chunk_d, label_chunk_d, targeted=targeted)
# performance monitoring
end.record()
torch.cuda.synchronize()
total_time += (start.elapsed_time(end)) / 1000 # times for cuda Events are in milliseconds
average_forwards.append(forward_counter.num_samples_called / len(batch_chunk_d))
average_backwards.append(backward_counter.num_samples_called / len(batch_chunk_d))
forward_counter.reset(), backward_counter.reset()
advs_chunks.append(advs_chunk_d.cpu())
if isinstance(attack, partial) and (callback := attack.keywords.get('callback')) is not None:
callback.reset_windows()
adv_inputs = inputs.clone()
adv_inputs[not_adv] = torch.cat(advs_chunks, 0)
data = {
'inputs': inputs,
'labels': labels,
'targets': adv_labels if targeted else None,
'adv_inputs': adv_inputs,
'time': total_time,
'num_forwards': sum(average_forwards) / len(chunks[0]),
'num_backwards': sum(average_backwards) / len(chunks[0]),
}
return data
_default_metrics = OrderedDict([
('linf', linf_distances),
('l0', l0_distances),
('l1', l1_distances),
('l2', l2_distances),
])
def compute_attack_metrics(model: nn.Module,
attack_data: Dict[str, Union[Tensor, float]],
batch_size: Optional[int] = None,
metrics: Dict[str, Callable] = _default_metrics) -> Dict[str, Union[Tensor, float]]:
inputs, labels, targets, adv_inputs = map(attack_data.get, ['inputs', 'labels', 'targets', 'adv_inputs'])
if adv_inputs.min() < 0 or adv_inputs.max() > 1:
warnings.warn('Values of produced adversarials are not in the [0, 1] range -> Clipping to [0, 1].')
adv_inputs.clamp_(min=0, max=1)
device = next(model.parameters()).device
to_device = lambda tensor: tensor.to(device)
batch_size = batch_size or len(inputs)
chunks = [tensor.split(batch_size) for tensor in [inputs, labels, adv_inputs]]
all_predictions = [[] for _ in range(6)]
distances = {k: [] for k in metrics.keys()}
metrics = {k: v().to(device) if (isclass(v.func) if isinstance(v, partial) else False) else v for k, v in
metrics.items()}
append = lambda list, data: list.append(data.cpu())
for inputs_chunk, labels_chunk, adv_chunk in zip(*chunks):
inputs_chunk, adv_chunk = map(to_device, [inputs_chunk, adv_chunk])
clean_preds, adv_preds = [predict_inputs(model, chunk.to(device)) for chunk in [inputs_chunk, adv_chunk]]
list(map(append, all_predictions, [*clean_preds, *adv_preds]))
for metric, metric_func in metrics.items():
distances[metric].append(metric_func(adv_chunk, inputs_chunk).detach().cpu())
logits, probs, preds, logits_adv, probs_adv, preds_adv = [torch.cat(l) for l in all_predictions]
for metric in metrics.keys():
distances[metric] = torch.cat(distances[metric], 0)
accuracy_orig = (preds == labels).float().mean().item()
if targets is not None:
success = (preds_adv == targets)
labels = targets
else:
success = (preds_adv != labels)
prob_orig = probs.gather(1, labels.unsqueeze(1)).squeeze(1)
prob_adv = probs_adv.gather(1, labels.unsqueeze(1)).squeeze(1)
labels_infhot = torch.zeros_like(logits_adv).scatter_(1, labels.unsqueeze(1), float('inf'))
real = logits_adv.gather(1, labels.unsqueeze(1)).squeeze(1)
other = (logits_adv - labels_infhot).max(1).values
diff_vs_max_adv = (real - other)
nll = F.cross_entropy(logits, labels, reduction='none')
nll_adv = F.cross_entropy(logits_adv, labels, reduction='none')
data = {
'time': attack_data['time'],
'num_forwards': attack_data['num_forwards'],
'num_backwards': attack_data['num_backwards'],
'targeted': targets is not None,
'preds': preds,
'adv_preds': preds_adv,
'accuracy_orig': accuracy_orig,
'success': success,
'probs_orig': prob_orig,
'probs_adv': prob_adv,
'logit_diff_adv': diff_vs_max_adv,
'nll': nll,
'nll_adv': nll_adv,
'distances': distances,
}
return data
def print_metrics(metrics: dict) -> None:
np.set_printoptions(formatter={'float': '{:0.3f}'.format}, threshold=16, edgeitems=3,
linewidth=120) # To print arrays with less precision
print('Original accuracy: {:.2%}'.format(metrics['accuracy_orig']))
print('Attack done in: {:.2f}s with {:.4g} forwards and {:.4g} backwards.'.format(
metrics['time'], metrics['num_forwards'], metrics['num_backwards']))
success = metrics['success'].numpy()
fail = bool(success.mean() != 1)
print('Attack success: {:.2%}'.format(success.mean()) + fail * ' - {}'.format(success))
for distance, values in metrics['distances'].items():
data = values.numpy()
print('{}: {} - Average: {:.3f} - Median: {:.3f}'.format(distance, data, data.mean(), np.median(data)) +
fail * ' | Avg over success: {:.3f}'.format(data[success].mean()))
attack_type = 'targets' if metrics['targeted'] else 'correct'
print('Logit({} class) - max_Logit(other classes): {} - Average: {:.2f}'.format(
attack_type, metrics['logit_diff_adv'].numpy(), metrics['logit_diff_adv'].numpy().mean()))
print('NLL of target/pred class: {:.3f}'.format(metrics['nll_adv'].numpy().mean()))
| [((5060, 5169), 'collections.OrderedDict', 'OrderedDict', (["[('linf', linf_distances), ('l0', l0_distances), ('l1', l1_distances), (\n 'l2', l2_distances)]"], {}), "([('linf', linf_distances), ('l0', l0_distances), ('l1',\n l1_distances), ('l2', l2_distances)])\n", (5071, 5169), False, 'from collections import OrderedDict\n'), ((4679, 4704), 'torch.cat', 'torch.cat', (['advs_chunks', '(0)'], {}), '(advs_chunks, 0)\n', (4688, 4704), False, 'import torch\n'), ((7563, 7612), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['logits', 'labels'], {'reduction': '"""none"""'}), "(logits, labels, reduction='none')\n", (7578, 7612), True, 'from torch.nn import functional as F\n'), ((7627, 7680), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['logits_adv', 'labels'], {'reduction': '"""none"""'}), "(logits_adv, labels, reduction='none')\n", (7642, 7680), True, 'from torch.nn import functional as F\n'), ((8263, 8367), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'formatter': "{'float': '{:0.3f}'.format}", 'threshold': '(16)', 'edgeitems': '(3)', 'linewidth': '(120)'}), "(formatter={'float': '{:0.3f}'.format}, threshold=16,\n edgeitems=3, linewidth=120)\n", (8282, 8367), True, 'import numpy as np\n'), ((1980, 2006), 'torch.tensor', 'torch.tensor', (['other_labels'], {}), '(other_labels)\n', (1992, 2006), False, 'import torch\n'), ((3023, 3048), 'torch.cat', 'torch.cat', (['already_adv', '(0)'], {}), '(already_adv, 0)\n', (3032, 3048), False, 'import torch\n'), ((3067, 3103), 'torch.cuda.Event', 'torch.cuda.Event', ([], {'enable_timing': '(True)'}), '(enable_timing=True)\n', (3083, 3103), False, 'import torch\n'), ((3105, 3141), 'torch.cuda.Event', 'torch.cuda.Event', ([], {'enable_timing': '(True)'}), '(enable_timing=True)\n', (3121, 3141), False, 'import torch\n'), ((3182, 3198), 'adv_lib.utils.ForwardCounter', 'ForwardCounter', ([], {}), '()\n', (3196, 3198), False, 'from adv_lib.utils import ForwardCounter, BackwardCounter, predict_inputs\n'), ((3200, 3217), 'adv_lib.utils.BackwardCounter', 'BackwardCounter', ([], {}), '()\n', (3215, 3217), False, 'from adv_lib.utils import ForwardCounter, BackwardCounter, predict_inputs\n'), ((3278, 3309), 'distutils.version.LooseVersion', 'LooseVersion', (['torch.__version__'], {}), '(torch.__version__)\n', (3290, 3309), False, 'from distutils.version import LooseVersion\n'), ((3313, 3332), 'distutils.version.LooseVersion', 'LooseVersion', (['"""1.8"""'], {}), "('1.8')\n", (3325, 3332), False, 'from distutils.version import LooseVersion\n'), ((4070, 4094), 'torch.cuda.synchronize', 'torch.cuda.synchronize', ([], {}), '()\n', (4092, 4094), False, 'import torch\n'), ((5649, 5758), 'warnings.warn', 'warnings.warn', (['"""Values of produced adversarials are not in the [0, 1] range -> Clipping to [0, 1]."""'], {}), "(\n 'Values of produced adversarials are not in the [0, 1] range -> Clipping to [0, 1].'\n )\n", (5662, 5758), False, 'import warnings\n'), ((6831, 6843), 'torch.cat', 'torch.cat', (['l'], {}), '(l)\n', (6840, 6843), False, 'import torch\n'), ((6932, 6963), 'torch.cat', 'torch.cat', (['distances[metric]', '(0)'], {}), '(distances[metric], 0)\n', (6941, 6963), False, 'import torch\n'), ((7321, 7349), 'torch.zeros_like', 'torch.zeros_like', (['logits_adv'], {}), '(logits_adv)\n', (7337, 7349), False, 'import torch\n'), ((6140, 6155), 'inspect.isclass', 'isclass', (['v.func'], {}), '(v.func)\n', (6147, 6155), False, 'from inspect import isclass\n'), ((9015, 9030), 'numpy.median', 'np.median', (['data'], {}), '(data)\n', (9024, 9030), True, 'import numpy as np\n')] |
Summer0328/ChangeDet_DL-1 | thawSlumpChangeDet/polygons_compare.py | f2474ee4200d9ad093c0e5a27a94bfbd3bd038e7 | #!/usr/bin/env python
# Filename: polygons_cd
"""
introduction: compare two polygons in to shape file
authors: Huang Lingcao
email:[email protected]
add time: 26 February, 2020
"""
import sys,os
from optparse import OptionParser
# added path of DeeplabforRS
sys.path.insert(0, os.path.expanduser('~/codes/PycharmProjects/DeeplabforRS'))
import basic_src.io_function as io_function
import basic_src.basic as basic
import basic_src.map_projection as map_projection
import parameters
import polygons_cd_multi
import polygons_cd
def main(options, args):
old_shp_path = args[0]
new_shp_path = args[1]
# check files do exist
assert io_function.is_file_exist(new_shp_path)
assert io_function.is_file_exist(old_shp_path)
# check projection of the shape file, should be the same
old_shp_proj4 = map_projection.get_raster_or_vector_srs_info_proj4(old_shp_path)
new_shp_proj4 = map_projection.get_raster_or_vector_srs_info_proj4(new_shp_path)
if old_shp_proj4 != new_shp_proj4:
raise ValueError('error, projection insistence between %s and %s' % (old_shp_proj4, new_shp_proj4))
main_shp_name = polygons_cd_multi.get_main_shp_name(old_shp_path,new_shp_path)
# conduct change detection
if options.output is not None:
main_shp_name = options.output
# get expanding and shrinking parts
output_path_expand = 'expand_' + main_shp_name
output_path_shrink = 'shrink_' + main_shp_name
polygons_cd.polygons_change_detection(old_shp_path, new_shp_path, output_path_expand,output_path_shrink)
if __name__ == "__main__":
usage = "usage: %prog [options] old_shape_file new_shape_file "
parser = OptionParser(usage=usage, version="1.0 2020-02-26")
parser.description = 'Introduction: compare two groups of polygons '
parser.add_option("-p", "--para",
action="store", dest="para_file",
help="the parameters file")
parser.add_option('-o', '--output',
action="store", dest = 'output',
help='the path to save the change detection results')
(options, args) = parser.parse_args()
if len(sys.argv) < 2:
parser.print_help()
sys.exit(2)
# # set parameters files
# if options.para_file is None:
# print('error, no parameters file')
# parser.print_help()
# sys.exit(2)
# else:
# parameters.set_saved_parafile_path(options.para_file)
basic.setlogfile('polygons_changeDetection.log')
main(options, args)
| [((286, 344), 'os.path.expanduser', 'os.path.expanduser', (['"""~/codes/PycharmProjects/DeeplabforRS"""'], {}), "('~/codes/PycharmProjects/DeeplabforRS')\n", (304, 344), False, 'import sys, os\n'), ((655, 694), 'basic_src.io_function.is_file_exist', 'io_function.is_file_exist', (['new_shp_path'], {}), '(new_shp_path)\n', (680, 694), True, 'import basic_src.io_function as io_function\n'), ((706, 745), 'basic_src.io_function.is_file_exist', 'io_function.is_file_exist', (['old_shp_path'], {}), '(old_shp_path)\n', (731, 745), True, 'import basic_src.io_function as io_function\n'), ((828, 892), 'basic_src.map_projection.get_raster_or_vector_srs_info_proj4', 'map_projection.get_raster_or_vector_srs_info_proj4', (['old_shp_path'], {}), '(old_shp_path)\n', (878, 892), True, 'import basic_src.map_projection as map_projection\n'), ((913, 977), 'basic_src.map_projection.get_raster_or_vector_srs_info_proj4', 'map_projection.get_raster_or_vector_srs_info_proj4', (['new_shp_path'], {}), '(new_shp_path)\n', (963, 977), True, 'import basic_src.map_projection as map_projection\n'), ((1147, 1210), 'polygons_cd_multi.get_main_shp_name', 'polygons_cd_multi.get_main_shp_name', (['old_shp_path', 'new_shp_path'], {}), '(old_shp_path, new_shp_path)\n', (1182, 1210), False, 'import polygons_cd_multi\n'), ((1463, 1572), 'polygons_cd.polygons_change_detection', 'polygons_cd.polygons_change_detection', (['old_shp_path', 'new_shp_path', 'output_path_expand', 'output_path_shrink'], {}), '(old_shp_path, new_shp_path,\n output_path_expand, output_path_shrink)\n', (1500, 1572), False, 'import polygons_cd\n'), ((1679, 1730), 'optparse.OptionParser', 'OptionParser', ([], {'usage': 'usage', 'version': '"""1.0 2020-02-26"""'}), "(usage=usage, version='1.0 2020-02-26')\n", (1691, 1730), False, 'from optparse import OptionParser\n'), ((2482, 2530), 'basic_src.basic.setlogfile', 'basic.setlogfile', (['"""polygons_changeDetection.log"""'], {}), "('polygons_changeDetection.log')\n", (2498, 2530), True, 'import basic_src.basic as basic\n'), ((2226, 2237), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (2234, 2237), False, 'import sys, os\n')] |
brotich/andela_bootcamp_X | andela_labs/Car Class Lab (OOP)/car.py | 19fc5bb66d3c930d4e6b9afeb45abc00bbc4c2ea | class Car(object):
"""
Car class that can be used to instantiate various vehicles.
It takes in arguments that depict the type, model, and name
of the vehicle
"""
def __init__(self, name="General", model="GM", car_type="saloon"):
num_of_wheels = 4
num_of_doors = 4
if car_type == "trailer":
num_of_wheels = 8
if name == "Porshe" or name == "Koenigsegg":
num_of_doors = 2
self.name = name
self.model = model
self.type = car_type
self.num_of_doors = num_of_doors
self.num_of_wheels = num_of_wheels
self.speed = 0
def drive(self, gear):
if self.type == "trailer":
self.speed = gear * 77 / 7
elif self.type == "saloon":
self.speed = gear * 1000 / 3
return self
def is_saloon(self):
return self.type == 'saloon'
| [] |
fcsiba/Smart-Cart | CV Model/Model - JupyterNotebook/mrcnn/tfliteconverter.py | 7d45b9f2a2be2015936c2a61068b2fd8b6c95fe5 | import tensorflow as tf
# Convert the model.
converter = tf.lite.TFLiteConverter.from_saved_model('model.py')
tflite_model = converter.convert()
open("trash_ai.tflite", "wb").write(tflite_model) | [((58, 110), 'tensorflow.lite.TFLiteConverter.from_saved_model', 'tf.lite.TFLiteConverter.from_saved_model', (['"""model.py"""'], {}), "('model.py')\n", (98, 110), True, 'import tensorflow as tf\n')] |
shivamsinghal212/Url-Shortener | basicapp/cron.py | 4127a993272744f6f8592415314c8e8514d43153 | from django_cron import CronJobBase, Schedule
from .models import Link
from django.utils import timezone
class MyCronJob(CronJobBase):
RUN_EVERY_MINS = 1 # every 2 hours
schedule = Schedule(run_every_mins=RUN_EVERY_MINS)
code = 'basicapp.cron' # a unique code
def do(self):
current_time = timezone.now()
links = Link.objects.all()
for obj in links:
print("Checking last hit date for: ", obj.shortenURL)
delta = current_time - obj.last_hit
if delta.days > 2:
print('link is older than 2 days, DELETING!')
obj.delete()
else:
print('link was recently hit, Wont Delete.')
| [((191, 230), 'django_cron.Schedule', 'Schedule', ([], {'run_every_mins': 'RUN_EVERY_MINS'}), '(run_every_mins=RUN_EVERY_MINS)\n', (199, 230), False, 'from django_cron import CronJobBase, Schedule\n'), ((319, 333), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (331, 333), False, 'from django.utils import timezone\n')] |
Smylers/WeasyPrint | weasyprint/tests/test_stacking.py | 25ce91a34755386b3350d898aa1638c349723b57 | # coding: utf8
"""
weasyprint.tests.stacking
-------------------------
:copyright: Copyright 2011-2012 Simon Sapin and contributors, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import division, unicode_literals
from ..stacking import StackingContext
from .test_boxes import serialize
from .test_layout import parse
from .testing_utils import assert_no_logs
def to_lists(page):
html, = page.children
return serialize_stacking(StackingContext.from_box(html, page))
def serialize_box(box):
return '%s %s' % (box.element_tag, box.sourceline)
def serialize_stacking(context):
return (
serialize_box(context.box),
[serialize_box(b) for b in context.blocks_and_cells],
[serialize_stacking(c) for c in context.zero_z_contexts],
)
@assert_no_logs
def test_nested():
page, = parse('''\
<p id=lorem></p>
<div style="position: relative">
<p id=lipsum></p>
</p>
''')
assert to_lists(page) == (
'html 1',
['body 1', 'p 1'],
[(
'div 2',
['p 3'],
[])])
page, = parse('''\
<div style="position: relative">
<p style="position: relative"></p>
</div>
''')
assert to_lists(page) == (
'html 1',
['body 1'],
[('div 1', [], []), # In this order
('p 2', [], [])])
@assert_no_logs
def test_image_contexts():
page, = parse('''
<body>Some text: <img style="position: relative" src=pattern.png>''')
html, = page.children
context = StackingContext.from_box(html, page)
# The image is *not* in this context:
assert serialize([context.box]) == [
('html', 'Block', [
('body', 'Block', [
('body', 'Line', [
('body', 'Text', 'Some text: ')])])])]
# ... but in a sub-context:
assert serialize(c.box for c in context.zero_z_contexts) == [
('img', 'InlineReplaced', '<replaced>')]
| [] |
industrydive/sourcelist | django-magic-link/customers/views.py | 9db4ec5c9cb9246a644615ca401a3c8f8d560b6e | from django.shortcuts import render
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from sesame import utils
from django.core.mail import send_mail
def login_page(request):
if request.method == "POST":
email = request.POST.get("emailId")
user = User.objects.get(email=email)
login_token = utils.get_query_string(user)
login_link = "http://127.0.0.1:8000/customers/{}".format(login_token)
html_message = """
<p>Hi there,</p>
<p>Here is your <a href="{}">magic link</a> </p>
<p>Thanks,</p>
<p>Django Admin</p>
""".format(login_link)
send_mail(
'Django Magic Link',
html_message,
'[email protected]',
[email],
fail_silently=False,
html_message = html_message
)
return render(request, "login.html", context={"message":"Please check your email for magic link."})
return render(request, "login.html")
@login_required
def customers_home_page(request):
return render(request, "customers/index.html") | [((1012, 1041), 'django.shortcuts.render', 'render', (['request', '"""login.html"""'], {}), "(request, 'login.html')\n", (1018, 1041), False, 'from django.shortcuts import render\n'), ((1104, 1143), 'django.shortcuts.render', 'render', (['request', '"""customers/index.html"""'], {}), "(request, 'customers/index.html')\n", (1110, 1143), False, 'from django.shortcuts import render\n'), ((321, 350), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'email': 'email'}), '(email=email)\n', (337, 350), False, 'from django.contrib.auth.models import User\n'), ((373, 401), 'sesame.utils.get_query_string', 'utils.get_query_string', (['user'], {}), '(user)\n', (395, 401), False, 'from sesame import utils\n'), ((681, 812), 'django.core.mail.send_mail', 'send_mail', (['"""Django Magic Link"""', 'html_message', '"""[email protected]"""', '[email]'], {'fail_silently': '(False)', 'html_message': 'html_message'}), "('Django Magic Link', html_message, '[email protected]', [\n email], fail_silently=False, html_message=html_message)\n", (690, 812), False, 'from django.core.mail import send_mail\n'), ((907, 1004), 'django.shortcuts.render', 'render', (['request', '"""login.html"""'], {'context': "{'message': 'Please check your email for magic link.'}"}), "(request, 'login.html', context={'message':\n 'Please check your email for magic link.'})\n", (913, 1004), False, 'from django.shortcuts import render\n')] |
dataiku/dss-plugin-nlp-analysis | python-lib/config/dss_parameter.py | ff9dce56500dc8f28f83158afbdf7db01074ee38 | from .custom_check import CustomCheck, CustomCheckError
from typing import Any, List
import logging
logger = logging.getLogger(__name__)
class DSSParameterError(Exception):
"""Exception raised when at least one CustomCheck fails."""
pass
class DSSParameter:
"""Object related to one parameter. It is mainly used for checks to run in backend for custom forms.
Attributes:
name(str): Name of the parameter
value(Any): Value of the parameter
checks(list[dict], optional): Checks to run on provided value
required(bool, optional): Whether the value can be None
"""
def __init__(
self, name: str, value: Any, checks: List[dict] = None, required: bool = False
):
"""Initialization method for the DSSParameter class
Args:
name(str): Name of the parameter
value(Any): Value of the parameter
checks(list[dict], optional): Checks to run on provided value
required(bool, optional): Whether the value can be None
"""
if checks is None:
checks = []
self.name = name
self.value = value
self.checks = [CustomCheck(**check) for check in checks]
if required:
self.checks.append(CustomCheck(type="exists"))
self.run_checks()
def run_checks(self):
"""Runs all checks provided for this parameter"""
errors = []
for check in self.checks:
try:
check.run(self.value)
except CustomCheckError as err:
errors.append(err)
if errors:
self.handle_failure(errors)
self.handle_success()
def handle_failure(self, errors: List[CustomCheckError]):
"""Is called when at least one test fails. It will raise an Exception with understandable text
Args:
errors(list[CustomCheckError]: Errors met when running checks
Raises:
DSSParameterError: Raises if at least on check fails
"""
raise DSSParameterError(self.format_failure_message(errors))
def format_failure_message(self, errors: List[CustomCheckError]) -> str:
"""Format failure text
Args:
errors(list[CustomCheckError]: Errors met when running checks
Returns:
str: Formatted error message
"""
return """
Error for parameter \"{name}\" :
{errors}
""".format(
name=self.name, errors="\n".join(["\t {}".format(e) for e in errors])
)
def handle_success(self):
"""Called if all checks are successful. Prints a success message"""
self.print_success_message()
def print_success_message(self):
"""Formats the succee message"""
logger.info("All checks have been successfully done for {}.".format(self.name))
def __repr__(self):
return "DSSParameter(name={}, value={})".format(self.name, self.value)
def __str__(self):
return "DSSParameter(name={}, value={})".format(self.name, self.value)
| [((111, 138), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (128, 138), False, 'import logging\n')] |
mstarikov/transitfeed | misc/import_ch_zurich.py | c018d7b14f6fccaa670629c00c83a390b5461fc1 | #!/usr/bin/python2.4
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""imports Zurich timetables, converting them from DIVA export format
to Google Transit format."""
from __future__ import print_function
# This was written before transitfeed.py and we haven't yet found the
# motivation to port it. Please see the examples directory for better
# examples.
try:
from io import StringIO as cStringIO
except ImportError:
import cStringIO
import csv
import datetime
import optparse
import sys
import urllib
import zipfile
# Zurich tram lines
TRAM_LINES = {'2': ['FF3300', 'FFFFFF'],
'3': ['009933', 'FFFFFF'],
'4': ['333399', 'FFFFFF'],
'5': ['996600', 'FFFFFF'],
'6': ['CC9933', 'FFFFFF'],
'7': ['000000', 'FFFFFF'],
'8': ['99CC00', '000000'],
'9': ['333399', 'FFFFFF'],
'10': ['FF6699', 'FFFFFF'],
'11': ['009933', 'FFFFFF'],
'12': ['FFFFFF', '000000'],
'13': ['FFCC33', '000000'],
'14': ['3399CC', 'FFFFFF'],
'15': ['FF3300', 'FFFFFF']}
# Terms that indicate points of interest. Used to split station names
# to (name, city).
POI_TERMS = {'Bahnhof': 1, 'Dorfzentrum': 1, 'Schiffstation': 1,
'Station': 1, u'Zentrum': 1,
'Dorfplatz': 1, 'Zentrum/Bahnhof': 1, 'Dorf': 1}
# Maps station names to (name, city). Used as exception list where our
# simple heuristcs doesn't work.
SPECIAL_NAMES = {
'Freienbach SOB, Bahnhof': ('Freienbach SOB', 'Freienbach'),
'Herrliberg-Feldmeilen,Bhf West': ('Bahnhof West', 'Herrliberg-Feldmeilen'),
'Neue Forch': ('Neue Forch', u'Z\u00fcrich'),
'Oberrieden Dorf Bahnhof': ('Oberrieden Dorf', 'Oberrieden'),
'Spital Zollikerberg': ('Spital', 'Zollikerberg'),
'Triemli': ('Triemli', u'Z\u00fcrich'),
'Zentrum Glatt': ('Zentrum Glatt', 'Wallisellen'),
}
# Cities whose names we want to prettify/correct at import time.
SPECIAL_CITIES = {
'Affoltern a. A.': 'Affoltern am Albis',
'Wangen b. D.': 'Wangen'
}
def read_csv(s, cols):
csv_dialect = csv.Sniffer().sniff(s[0])
reader = csv.reader(s, csv_dialect)
header = next(reader)
col_index = [-1] * len(cols)
for i in range(len(cols)):
if cols[i] in header:
col_index[i] = header.index(cols[i])
for row in reader:
result = [None] * len(cols)
for i in range(len(cols)):
ci = col_index[i]
if ci >= 0:
result[i] = row[ci].decode('iso-8859-1').strip()
yield result
def convert_c_h1903(x, y):
"Converts coordinates from the 1903 Swiss national grid system to WGS-84."
yb = (x - 600000.0) / 1e6;
xb = (y - 200000.0) / 1e6;
lam = 2.6779094 \
+ 4.728982 * yb \
+ 0.791484 * yb * xb \
+ 0.1306 * yb * xb * xb \
- 0.0436 * yb * yb * yb
phi = 16.9023892 \
+ 3.238372 * xb \
- 0.270978 * yb * yb \
- 0.002582 * xb * xb \
- 0.0447 * yb * yb * xb \
- 0.0140 * xb * xb * xb
return phi * 100.0 / 36.0, lam * 100.0 / 36.0
def encode_for_csv(x):
"Encodes one value for CSV."
k = x.encode('utf-8')
if ',' in k or '"' in k:
return '"%s"' % k.replace('"', '""')
else:
return k
def write_row(stream, values):
"writes one row of comma-separated values to stream."
stream.write(','.join([encode_for_csv(val) for val in values]))
stream.write('\n')
class Station:
pass
class Route:
pass
class Pattern:
pass
class Trip:
pass
# https://developers.google.com/transit/gtfs/
TYPE_TRAM = 0
TYPE_BUS = 3
class Divaimporter:
def __init__(self, coord_converter, drop_unadvertised_lines):
self.coord_converter = coord_converter
self.stations = {} # id --> Station
self.routes = {} # id --> Route
self.patterns = {} # id --> Pattern
self.services = {} # id --> [date, date, ...] (sorted)
self.pickup_type = {} # (trip_id, stop_seq) --> '0'=normal/'1'=no pickup
self.drop_off_type = {} # (trip_id, stop_seq) --> '0'/'1', '1'=no drop-off
self.trips = {} # id --> Trip
self.goodTrips = {}
self._drop_unadvertised_lines = drop_unadvertised_lines
@staticmethod
def demangle_name(name):
"Applies some simple heuristics to split names into (city, name)."
# Handle special cases where our heuristcs doesn't work.
# Example:"Triemli" --> ("Triemli", "Zurich").
if name in SPECIAL_NAMES:
return SPECIAL_NAMES[name]
# Expand abbreviations.
for abbrev, expanded in [('str.', 'strasse'),
('Schiffst.', 'Schiffstation')]:
suffix_pos = name.rfind(abbrev)
if suffix_pos > 0:
name = name[:suffix_pos] + expanded
# end for
names = name.split(", ", 1)
if len(names) == 2:
if names[1] in POI_TERMS:
nam = u'%s %s' % (names[0], names[1])
else:
nam = names[1]
city = names[0]
else:
# "Zurich Enge": First word of station name designates the city
nam = names[0]
city = nam.split(' ')[0]
return nam, SPECIAL_CITIES.get(city, city)
def import_feeds(self, inpath):
inzip = zipfile.ZipFile(inpath, mode="r")
read = lambda name, prefix="": (prefix + inzip.read(name)).splitlines()
# The advertised lines file has no column headers.
self.import_stations(read('rec_ort.mdv'), read('bedienendeLinien_google.csv',
"ORT_NR;LI_NR;;;;"))
self.import_routes(read('rec_lin_ber.mdv'))
self.import_patterns(read('lid_verlauf.mdv'))
self.import_services(read('tagesart_merkmal.mdv'),
read('firmenkalender.mdv'))
self.import_traffic_restrictions(read('vb_regio.mdv'))
self.import_boarding(read('bedverb.mdv'))
self.import_stop_times(read('lid_fahrzeitart.mdv'))
self.import_trips(read('rec_frt.mdv'))
def import_stations(self, station_file, adv_file):
"imports the rec_ort.mdv file."
for id, name, x, y, uic_code in \
read_csv(station_file, ['ORT_NR', 'ORT_NAME',
'ORT_POS_X', 'ORT_POS_Y', 'ORT_NR_NATIONAL']):
station = Station()
station.id = id
station.position = self.coord_converter(float(x), float(y))
station.uic_code = ''
if uic_code and len(uic_code) == 7 and uic_code[:2] == '85':
station.uic_code = uic_code
station.name, station.city = self.demangle_name(name)
station.country = 'CH'
station.url = 'http://fahrplan.zvv.ch/?to.0=' + \
urllib.quote(name.encode('iso-8859-1'))
station.advertised_lines = set()
self.stations[id] = station
for station_id, line_id in read_csv(adv_file, ['ORT_NR', 'LI_NR']):
if station_id in self.stations:
# Line ids in this file have leading zeroes, remove.
self.stations[station_id].advertised_lines.add(line_id.lstrip("0"))
else:
print("Warning, advertised lines file references " \
"unknown station, id " + station_id)
def import_routes(self, s):
"imports the rec_lin_ber.mdv file."
# the line id is really qualified with an area_id (BEREICH_NR), but the
# table of advertised lines does not include area. Fortunately, it seems
# that line ids are unique across all areas, so we can just throw it away.
for line_id, name in \
read_csv(s, ['LI_NR', 'LINIEN_BEZ_DRUCK']):
route = Route()
route.id = line_id
route.name = name
route.color = "FFFFFF"
route.color_text = "000000"
if name in TRAM_LINES:
route.type = TYPE_TRAM
route.color = TRAM_LINES[name][0]
route.color_text = TRAM_LINES[name][1]
else:
route.type = TYPE_BUS
if route.name[0:1] == "N":
route.color = "000000"
route.color_text = "FFFF00"
self.routes[route.id] = route
def import_patterns(self, s):
"imports the lid_verlauf.mdv file."
for line, strli, direction, seq, station_id in \
read_csv(s, ['LI_NR', 'STR_LI_VAR', 'LI_RI_NR', 'LI_LFD_NR', 'ORT_NR']):
pattern_id = u'Pat.%s.%s.%s' % (line, strli, direction)
pattern = self.patterns.get(pattern_id, None)
if not pattern:
pattern = Pattern()
pattern.id = pattern_id
pattern.stops = []
pattern.stoptimes = {}
self.patterns[pattern_id] = pattern
seq = int(seq) - 1
if len(pattern.stops) <= seq:
pattern.stops.extend([None] * (seq - len(pattern.stops) + 1))
pattern.stops[seq] = station_id
def import_boarding(self, drop_off_file):
"Reads the bedverb.mdv file."
for trip_id, seq, code in \
read_csv(drop_off_file, ['FRT_FID', 'LI_LFD_NR', 'BEDVERB_CODE']):
key = (trip_id, int(seq) - 1)
if code == 'A':
self.pickup_type[key] = '1' # '1' = no pick-up
elif code == 'E':
self.drop_off_type[key] = '1' # '1' = no drop-off
elif code == 'B':
# 'B' just means that rider needs to push a button to have the driver
# stop. We don't encode this for now.
pass
else:
raise ValueError('Unexpected code in bedverb.mdv; '
'FRT_FID=%s BEDVERB_CODE=%s' % (trip_id, code))
def import_services(self, daytype_file, days_file):
daytypes = {} # 'j06' --> {20060713:1, 20060714:1, ...}
schedules = {} # {'j06':1, 'p27':1}
for schedule, daytype, date in \
read_csv(days_file, ['FPL_KUERZEL', 'TAGESART_NR', 'BETRIEBSTAG']):
schedule = schedule.strip()
daytypes.setdefault('%s.%s' % (schedule, daytype), {})[int(date)] = 1
schedules[schedule] = 1
schedules = schedules.keys()
service_days = {} # 'Cj06.H9' --> {20060713:1, 20060714:1, ...}
for daytype, service_id in \
read_csv(daytype_file, ['TAGESART_NR', 'TAGESMERKMAL_NR']):
for schedule in schedules:
service = 'C%s.%s' % (schedule, service_id)
for date in daytypes['%s.%s' % (schedule, daytype)].iterkeys():
service_days.setdefault(service, {})[date] = 1
for k in service_days.iterkeys():
self.services[k] = service_days[k].keys()
self.services[k].sort()
def import_traffic_restrictions(self, restrictions_file):
"Reads the vb_regio.mdv file."
ParseDate = lambda x: datetime.date(int(x[:4]), int(x[4:6]), int(x[6:8]))
MonthNr = lambda x: int(x[:4]) * 12 + int(x[4:6])
for schedule, id, bitmask, start_date, end_date in \
read_csv(restrictions_file,
['FPL_KUERZEL', 'VB', 'VB_DATUM', 'DATUM_VON', 'DATUM_BIS']):
id = u"VB%s.%s" % (schedule, id)
bitmask = bitmask.strip()
dates = {}
# This is ugly as hell, I know. I briefly explain what I do:
# 8 characters in the bitmask equal a month ( 8 * 4bits = 32, no month has
# more than 31 days, so it's ok).
# Then I check if the current day of the month is in the bitmask (by
# shifting the bit by x days and comparing it to the bitmask).
# If so I calculate back what year month and actual day I am in
# (very disgusting) and mark that date...
for i in range(MonthNr(end_date) - MonthNr(start_date) + 1):
mask = int(bitmask[i * 8:i * 8 + 8], 16)
for d in range(32):
if 1 << d & mask:
year = int(start_date[0:4]) + ((int(start_date[4:6]) + i - 1)) / 12
month = ((int(start_date[4:6]) + i - 1) % 12) + 1
day = d + 1
cur_date = str(year) + ("0" + str(month))[-2:] + ("0" + str(day))[-2:]
dates[int(cur_date)] = 1
self.services[id] = dates.keys()
self.services[id].sort()
def import_stop_times(self, stoptimes_file):
"imports the lid_fahrzeitart.mdv file."
for line, strli, direction, seq, stoptime_id, drive_secs, wait_secs in \
read_csv(stoptimes_file,
['LI_NR', 'STR_LI_VAR', 'LI_RI_NR', 'LI_LFD_NR',
'FGR_NR', 'FZT_REL', 'HZEIT']):
pattern = self.patterns[u'Pat.%s.%s.%s' % (line, strli, direction)]
stoptimes = pattern.stoptimes.setdefault(stoptime_id, [])
seq = int(seq) - 1
drive_secs = int(drive_secs)
wait_secs = int(wait_secs)
assert len(stoptimes) == seq # fails if seq not in order
stoptimes.append((drive_secs, wait_secs))
def import_trips(self, trips_file):
"imports the rec_frt.mdv file."
for trip_id, trip_starttime, line, strli, direction, \
stoptime_id, schedule_id, daytype_id, restriction_id, \
dest_station_id, dest_stop_id, trip_type in \
read_csv(trips_file,
['FRT_FID', 'FRT_START', 'LI_NR', 'STR_LI_VAR', 'LI_RI_NR',
'FGR_NR', 'FPL_KUERZEL', 'TAGESMERKMAL_NR', 'VB',
'FRT_HP_AUS', 'HALTEPUNKT_NR_ZIEL', 'FAHRTART_NR']):
if trip_type != '1':
print("skipping Trip ", trip_id, line, direction, \
dest_station_id, trip_type)
continue # 1=normal, 2=empty, 3=from depot, 4=to depot, 5=other
trip = Trip()
# The trip_id (FRT_FID) field is not unique in the vbz data, as of Dec 2009
# to prevent overwritingimported trips when we key them by trip.id
# we should make trip.id unique, by combining trip_id and line
trip.id = ("%s_%s") % (trip_id, line)
trip.starttime = int(trip_starttime)
trip.route = self.routes[line]
dest_station = self.stations[dest_station_id]
pattern_id = u'Pat.%s.%s.%s' % (line, strli, direction)
trip.pattern = self.patterns[pattern_id]
trip.stoptimes = trip.pattern.stoptimes[stoptime_id]
if restriction_id:
service_id = u'VB%s.%s' % (schedule_id, restriction_id)
else:
service_id = u'C%s.%s' % (schedule_id, daytype_id)
trip.service_id = service_id
assert len(self.services[service_id]) > 0
assert not trip.id in self.trips
self.trips[trip.id] = trip
def write(self, outpath):
"writes a .zip file in Google Transit format."
out = zipfile.ZipFile(outpath, mode="w", compression=zipfile.ZIP_DEFLATED)
for filename, func in [('agency.txt', self.write_agency),
('calendar.txt', self.write_calendar),
('calendar_dates.txt', self.write_calendarDates),
('routes.txt', self.write_routes),
('trips.txt', self.write_trips),
('stops.txt', self.write_stations),
('stop_times.txt', self.write_stop_times)]:
s = cStringIO.StringIO()
func(s)
out.writestr(filename, s.getvalue())
out.close()
@staticmethod
def write_agency(out):
out.write('agency_name,agency_url,agency_lang,agency_timezone\n')
out.write('VBZ,http://www.vbz.ch/,de,Europe/Zurich\n')
def write_routes(self, out):
out.write('route_id,route_short_name,route_long_name,route_type,'
'route_color,route_text_color\n')
k = [(r.id, r) for r in self.routes.itervalues()]
k.sort()
for id, route in k:
name = encode_for_csv(route.name)
out.write('%s,%s,%s,%s,%s,%s\n' % (
id, name, name, route.type, route.color, route.color_text))
def write_stations(self, out):
out.write('stop_id,stop_uic_code,stop_name,stop_city,stop_country,'
'stop_lat,stop_lon,stop_url\n')
stations = [(s.id, s) for s in self.stations.itervalues()]
stations.sort()
for id, s in stations:
write_row(out,
[id, s.uic_code, s.name, s.city, s.country,
str(s.position[0]), str(s.position[1]), s.url])
def write_calendar(self, out):
out.write('service_id,monday,tuesday,wednesday,thursday,'
'friday,saturday,sunday,start_date,end_date\n')
for service_id, service in self.services.iteritems():
out.write('%s,0,0,0,0,0,0,0,%d,%d\n' %
(encode_for_csv(service_id), service[0], service[-1]))
def write_calendarDates(self, out):
out.write('service_id,date,exception_type\n')
for service_id, service in self.services.iteritems():
encoded_service_id = encode_for_csv(service_id)
for date in service:
out.write('%s,%d,1\n' % (encoded_service_id, date))
def write_trips(self, out):
out.write('trip_id,route_id,service_id,trip_headsign\n')
trips = [(t.id, t) for t in self.trips.itervalues()]
trips.sort()
for (trip_id, trip) in trips:
if (not len(trip.pattern.stops)) or (None in trip.pattern.stops):
print("*** Skipping bad trip: ", [trip.id])
continue
self.goodTrips[trip_id] = True
headsign = self.stations[trip.pattern.stops[-1]].name
write_row(out, [trip.id, trip.route.id, trip.service_id, headsign])
@staticmethod
def format_time(t):
return "%02d:%02d:%02d" % (t / 3600, (t % 3600) / 60, t % 60)
def write_stop_times(self, out):
out.write('trip_id,stop_sequence,stop_id,arrival_time,departure_time,'
'pickup_type,drop_off_type\n')
trips = [(t.id, t) for t in self.trips.itervalues()]
trips.sort()
for (trip_id, trip) in trips:
if trip_id not in self.goodTrips:
continue
assert len(trip.stoptimes) == len(trip.pattern.stops)
time = trip.starttime
for seq in range(len(trip.stoptimes)):
drive_time, wait_time = trip.stoptimes[seq]
time += drive_time
station = self.stations[trip.pattern.stops[seq]]
if not self._drop_unadvertised_lines or \
trip.route.id in station.advertised_lines:
write_row(out, [trip.id, str(seq + 1), station.id,
self.format_time(time),
self.format_time(time + wait_time),
self.pickup_type.get((trip.id, seq), '0'),
self.drop_off_type.get((trip.id, seq), '0')])
time += wait_time
def main(argv):
# It's hard to replicate the old behavior of --drop_unadvertised_lines, so we
# don't. Instead, there are only two options without arguments:
# nothing drop
# --nodrop_unadvertised_lines do not drop
# --drop_unadvertised_lines drop
opt_parser = optparse.OptionParser()
# drop_unadvertised_lines: Only export the departures of lines that
# are advertised at the station in question. This is used to remove
# depot trips etc, to not confuse the data in schedule bubbles. Use
# --nodrop_unadvertised_lines to disable that.
opt_parser.add_option('--drop_unadvertised_lines', action='store_true',
dest='drop_unadvertised_lines', default=True)
opt_parser.add_option('--nodrop_unadvertised_lines', action='store_false',
dest='drop_unadvertised_lines')
opt_parser.add_option('--in_file', action='store', type='string')
opt_parser.add_option('--out_file', action='store', type='string')
options, unused_arguments = opt_parser.parse_args(argv[1:])
if options.in_file is None:
raise SystemExit('Please provide a value to the --in_file flag.')
if options.out_file is None:
raise SystemExit('Please provide a value to the --out_file flag.')
importer = Divaimporter(convert_c_h1903, options.drop_unadvertised_lines)
importer.Import(options.in_file)
importer.write(options.out_file)
print('Wrote output to', options.out_file)
if __name__ == '__main__':
main(sys.argv)
| [((2709, 2735), 'csv.reader', 'csv.reader', (['s', 'csv_dialect'], {}), '(s, csv_dialect)\n', (2719, 2735), False, 'import csv\n'), ((20623, 20646), 'optparse.OptionParser', 'optparse.OptionParser', ([], {}), '()\n', (20644, 20646), False, 'import optparse\n'), ((5975, 6008), 'zipfile.ZipFile', 'zipfile.ZipFile', (['inpath'], {'mode': '"""r"""'}), "(inpath, mode='r')\n", (5990, 6008), False, 'import zipfile\n'), ((15977, 16045), 'zipfile.ZipFile', 'zipfile.ZipFile', (['outpath'], {'mode': '"""w"""', 'compression': 'zipfile.ZIP_DEFLATED'}), "(outpath, mode='w', compression=zipfile.ZIP_DEFLATED)\n", (15992, 16045), False, 'import zipfile\n'), ((2670, 2683), 'csv.Sniffer', 'csv.Sniffer', ([], {}), '()\n', (2681, 2683), False, 'import csv\n'), ((16551, 16571), 'cStringIO.StringIO', 'cStringIO.StringIO', ([], {}), '()\n', (16569, 16571), False, 'import cStringIO\n')] |
rotsee/protokollen | modules/documents.py | a001a1db86df57adcf5c53c95c4c2fae426340f1 | # -*- coding: utf-8 -*-
"""This module contains classes for documents, and lists of documents.
Documents are defined by the document rules in settings.py
A file can contain one or more document. However, a document can
not be constructed from more than one file. This is a limitation,
obvious in cases like Gotlands kommun, where meeting minutes are
split up in a large number of files.
"""
import settings
from modules.utils import make_unicode, last_index
from modules.extractors.documentBase import ExtractionNotAllowed
document_headers = {
"Content-Type": "text/plain",
"Content-Disposition": "attachment",
"Cache-Control": "public"
}
class DocumentList(object):
"""Contains a list of documents, extracted from a file.
"""
def __init__(self, extractor):
"""Create a list of documents, using `extractor`
"""
self._documents = []
page_types_and_dates = []
"""Keep track of documents by type and date, to be able to merge
documents depending on `settings.document_type_settings`
"""
# Loop through pages, and add pages of the same type and date together
last_page_type = None
last_page_date = None
documents = []
try:
for page in extractor.get_next_page():
temp_doc = Document(page, extractor)
if (len(documents) > 0 and
temp_doc.type_ == last_page_type and
temp_doc.date == last_page_date):
documents[-1].merge_with(temp_doc)
else:
documents.append(temp_doc)
page_types_and_dates.append((temp_doc.type_, temp_doc.date))
last_page_type = temp_doc.type_
last_page_date = temp_doc.date
except ExtractionNotAllowed:
raise ExtractionNotAllowed
# merge documents, if disallow_infixes == True
doc_settings = settings.document_type_settings
disallow_infixes = [d for d in doc_settings
if doc_settings[d]["disallow_infixes"] is True]
"""Document types that disallow holes"""
num_docs = len(page_types_and_dates)
i = 0
while i < num_docs:
(type_, date) = page_types_and_dates[i]
last_match = last_index(page_types_and_dates, (type_, date))
if type_ in disallow_infixes and last_match > i:
num_docs_to_merge = last_match - i + 1
new_doc = documents.pop(0)
for j in range(i, last_match):
new_doc.merge_with(documents.pop(0))
self._documents.append(new_doc)
i += num_docs_to_merge
else:
doc_to_merge = documents.pop(0)
self._documents.append(doc_to_merge)
i += 1
def get_next_document(self):
for document in self._documents:
yield document
def __len__(self):
"""len is the number of documents"""
return len(self._documents)
class Document(object):
"""Represents a single document
"""
text = ""
header = ""
date = None
type_ = None
def __init__(self, page, extractor):
"""Create a document stub from a page. Use add_page
to keep extending this document.
"""
self.text = page.get_text()
self.header = page.get_header() or extractor.get_header()
self.date = page.get_date() or extractor.get_date()
self.type_ = self.get_document_type()
self.date = page.get_date() or extractor.get_date()
def append_page(self, page):
"""Append content from a page to this document.
"""
pass
def append_text(self, text):
"""Append content to this document.
"""
self.text += text
def merge_with(self, document):
"""Merge this document with another one"""
try:
self.text += document.text
except UnicodeDecodeError:
self.text = make_unicode(self.text) + make_unicode(document.text)
def __len__(self):
"""len is the length of the total plaintext"""
return len(self.text)
def get_document_type(self):
"""
Return the first matching document type, based on this
header text.
"""
for document_type in settings.document_rules:
if self.parse_rules(document_type[1], self.header):
return document_type[0]
return None
def parse_rules(self, tuple_, header):
"""Parse document rules. See settings.py for syntax"""
rule_key = tuple_[0].upper()
rule_val = tuple_[1]
header = header.upper()
# --------- Logical separators --------
if rule_key == "AND":
hit = True
for rule in rule_val:
hit = hit and self.parse_rules(rule, header)
return hit
elif rule_key == "OR":
hit = False
for rule in rule_val:
hit = hit or self.parse_rules(rule, header)
return hit
elif rule_key == "NOT":
hit = not self.parse_rules(rule_val, header)
return hit
# -------------- Rules ----------------
elif rule_key == "HEADER_CONTAINS":
try:
pos = make_unicode(header).find(rule_val.upper())
except UnicodeDecodeError:
pos = -1
return pos > -1
if __name__ == "__main__":
print "This module is only intended to be called from other scripts."
import sys
sys.exit()
| [] |
saratkumar/galaxy | tools/amp_segment/ina_speech_segmenter.py | 35cd0987239c1b006d6eaf70b4a03a58fb857a12 | #!/usr/bin/env python3
import os
import os.path
import shutil
import subprocess
import sys
import tempfile
import uuid
import mgm_utils
def main():
(root_dir, input_file, json_file) = sys.argv[1:4]
tmpName = str(uuid.uuid4())
tmpdir = "/tmp"
temp_input_file = f"{tmpdir}/{tmpName}.dat"
temp_output_file = f"{tmpdir}/{tmpName}.json"
shutil.copy(input_file, temp_input_file)
sif = mgm_utils.get_sif_dir(root_dir) + "/ina_segmentation.sif"
r = subprocess.run(["singularity", "run", sif, temp_input_file, temp_output_file])
shutil.copy(temp_output_file, json_file)
if os.path.exists(temp_input_file):
os.remove(temp_input_file)
if os.path.exists(temp_output_file):
os.remove(temp_output_file)
exit(r.returncode)
if __name__ == "__main__":
main()
| [((341, 381), 'shutil.copy', 'shutil.copy', (['input_file', 'temp_input_file'], {}), '(input_file, temp_input_file)\n', (352, 381), False, 'import shutil\n'), ((453, 531), 'subprocess.run', 'subprocess.run', (["['singularity', 'run', sif, temp_input_file, temp_output_file]"], {}), "(['singularity', 'run', sif, temp_input_file, temp_output_file])\n", (467, 531), False, 'import subprocess\n'), ((535, 575), 'shutil.copy', 'shutil.copy', (['temp_output_file', 'json_file'], {}), '(temp_output_file, json_file)\n', (546, 575), False, 'import shutil\n'), ((581, 612), 'os.path.exists', 'os.path.exists', (['temp_input_file'], {}), '(temp_input_file)\n', (595, 612), False, 'import os\n'), ((648, 680), 'os.path.exists', 'os.path.exists', (['temp_output_file'], {}), '(temp_output_file)\n', (662, 680), False, 'import os\n'), ((217, 229), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (227, 229), False, 'import uuid\n'), ((390, 421), 'mgm_utils.get_sif_dir', 'mgm_utils.get_sif_dir', (['root_dir'], {}), '(root_dir)\n', (411, 421), False, 'import mgm_utils\n'), ((616, 642), 'os.remove', 'os.remove', (['temp_input_file'], {}), '(temp_input_file)\n', (625, 642), False, 'import os\n'), ((684, 711), 'os.remove', 'os.remove', (['temp_output_file'], {}), '(temp_output_file)\n', (693, 711), False, 'import os\n')] |
jhill1/thetis | test/tracerEq/test_steady_adv-diff_mms.py | 1be5d28d5d0d7248f2bbce4986b3e886116e103a | """
Testing 3D tracer advection-diffusion equation with method of manufactured solution (MMS).
"""
from thetis import *
import numpy
from scipy import stats
import pytest
class Setup1:
"""
Constant bathymetry and u velocty, zero diffusivity, non-trivial tracer
"""
def bath(self, x, y, lx, ly):
return Constant(40.0)
def elev(self, x, y, lx, ly):
return Constant(0.0)
def uv(self, x, y, z, lx, ly):
return as_vector(
[
Constant(1.0),
Constant(0.0),
Constant(0),
])
def w(self, x, y, z, lx, ly):
return as_vector(
[
Constant(0),
Constant(0),
Constant(0),
])
def kappa(self, x, y, z, lx, ly):
return Constant(0.0)
def tracer(self, x, y, z, lx, ly):
return sin(0.2*pi*(3.0*x + 1.0*y)/lx)
def residual(self, x, y, z, lx, ly):
return 0.6*pi*cos(0.2*pi*(3.0*x + 1.0*y)/lx)/lx
class Setup2:
"""
Constant bathymetry, zero velocity, constant kappa, x-varying T
"""
def bath(self, x, y, lx, ly):
return Constant(40.0)
def elev(self, x, y, lx, ly):
return Constant(0.0)
def uv(self, x, y, z, lx, ly):
return as_vector(
[
Constant(1.0),
Constant(0.0),
Constant(0),
])
def w(self, x, y, z, lx, ly):
return as_vector(
[
Constant(0),
Constant(0),
Constant(0),
])
def kappa(self, x, y, z, lx, ly):
return Constant(50.0)
def tracer(self, x, y, z, lx, ly):
return sin(3*pi*x/lx)
def residual(self, x, y, z, lx, ly):
return 3.0*pi*cos(3*pi*x/lx)/lx - 450.0*pi**2*sin(3*pi*x/lx)/lx**2
class Setup3:
"""
Constant bathymetry, zero kappa, non-trivial velocity and T
"""
def bath(self, x, y, lx, ly):
return Constant(40.0)
def elev(self, x, y, lx, ly):
return Constant(0.0)
def uv(self, x, y, z, lx, ly):
return as_vector(
[
sin(pi*z/40)*sin(pi*(y/ly + 2*x/lx)),
sin(pi*z/40)*sin(pi*(0.3*y/ly + 0.3*x/lx)),
Constant(0),
])
def w(self, x, y, z, lx, ly):
return as_vector(
[
Constant(0),
Constant(0),
12.0*cos(pi*z/40)*cos(pi*(0.3*y/ly + 0.3*x/lx))/ly + 12.0*cos(pi*(0.3*y/ly + 0.3*x/lx))/ly + 80*cos(pi*z/40)*cos(pi*(y/ly + 2*x/lx))/lx + 80*cos(pi*(y/ly + 2*x/lx))/lx,
])
def kappa(self, x, y, z, lx, ly):
return Constant(0.0)
def tracer(self, x, y, z, lx, ly):
return (0.8*cos(0.0125*pi*z) + 0.2)*cos(pi*(0.75*y/ly + 1.5*x/lx))
def residual(self, x, y, z, lx, ly):
return (-0.3*pi*sin(pi*z/40)*cos(pi*(0.3*y/ly + 0.3*x/lx))/ly - 2*pi*sin(pi*z/40)*cos(pi*(y/ly + 2*x/lx))/lx)*(0.8*cos(0.0125*pi*z) + 0.2)*cos(pi*(0.75*y/ly + 1.5*x/lx)) - 0.01*pi*(12.0*cos(pi*z/40)*cos(pi*(0.3*y/ly + 0.3*x/lx))/ly + 12.0*cos(pi*(0.3*y/ly + 0.3*x/lx))/ly + 80*cos(pi*z/40)*cos(pi*(y/ly + 2*x/lx))/lx + 80*cos(pi*(y/ly + 2*x/lx))/lx)*sin(0.0125*pi*z)*cos(pi*(0.75*y/ly + 1.5*x/lx)) - 0.75*pi*(0.8*cos(0.0125*pi*z) + 0.2)*sin(pi*z/40)*sin(pi*(0.3*y/ly + 0.3*x/lx))*sin(pi*(0.75*y/ly + 1.5*x/lx))/ly + 0.3*pi*(0.8*cos(0.0125*pi*z) + 0.2)*sin(pi*z/40)*cos(pi*(0.3*y/ly + 0.3*x/lx))*cos(pi*(0.75*y/ly + 1.5*x/lx))/ly - 1.5*pi*(0.8*cos(0.0125*pi*z) + 0.2)*sin(pi*z/40)*sin(pi*(0.75*y/ly + 1.5*x/lx))*sin(pi*(y/ly + 2*x/lx))/lx + 2*pi*(0.8*cos(0.0125*pi*z) + 0.2)*sin(pi*z/40)*cos(pi*(0.75*y/ly + 1.5*x/lx))*cos(pi*(y/ly + 2*x/lx))/lx
class Setup4:
"""
Constant bathymetry, constant kappa, non-trivial velocity and T
"""
def bath(self, x, y, lx, ly):
return Constant(40.0)
def elev(self, x, y, lx, ly):
return Constant(0.0)
def uv(self, x, y, z, lx, ly):
return as_vector(
[
sin(pi*z/40)*sin(pi*(y/ly + 2*x/lx)),
sin(pi*z/40)*sin(pi*(0.3*y/ly + 0.3*x/lx)),
Constant(0),
])
def w(self, x, y, z, lx, ly):
return as_vector(
[
Constant(0),
Constant(0),
12.0*cos(pi*z/40)*cos(pi*(0.3*y/ly + 0.3*x/lx))/ly + 12.0*cos(pi*(0.3*y/ly + 0.3*x/lx))/ly + 80*cos(pi*z/40)*cos(pi*(y/ly + 2*x/lx))/lx + 80*cos(pi*(y/ly + 2*x/lx))/lx,
])
def kappa(self, x, y, z, lx, ly):
return Constant(50.0)
def tracer(self, x, y, z, lx, ly):
return (0.8*cos(0.0125*pi*z) + 0.2)*cos(pi*(0.75*y/ly + 1.5*x/lx))
def residual(self, x, y, z, lx, ly):
return (-0.3*pi*sin(pi*z/40)*cos(pi*(0.3*y/ly + 0.3*x/lx))/ly - 2*pi*sin(pi*z/40)*cos(pi*(y/ly + 2*x/lx))/lx)*(0.8*cos(0.0125*pi*z) + 0.2)*cos(pi*(0.75*y/ly + 1.5*x/lx)) - 0.01*pi*(12.0*cos(pi*z/40)*cos(pi*(0.3*y/ly + 0.3*x/lx))/ly + 12.0*cos(pi*(0.3*y/ly + 0.3*x/lx))/ly + 80*cos(pi*z/40)*cos(pi*(y/ly + 2*x/lx))/lx + 80*cos(pi*(y/ly + 2*x/lx))/lx)*sin(0.0125*pi*z)*cos(pi*(0.75*y/ly + 1.5*x/lx)) - 0.75*pi*(0.8*cos(0.0125*pi*z) + 0.2)*sin(pi*z/40)*sin(pi*(0.3*y/ly + 0.3*x/lx))*sin(pi*(0.75*y/ly + 1.5*x/lx))/ly + 0.3*pi*(0.8*cos(0.0125*pi*z) + 0.2)*sin(pi*z/40)*cos(pi*(0.3*y/ly + 0.3*x/lx))*cos(pi*(0.75*y/ly + 1.5*x/lx))/ly - 28.125*pi**2*(0.8*cos(0.0125*pi*z) + 0.2)*cos(pi*(0.75*y/ly + 1.5*x/lx))/ly**2 - 1.5*pi*(0.8*cos(0.0125*pi*z) + 0.2)*sin(pi*z/40)*sin(pi*(0.75*y/ly + 1.5*x/lx))*sin(pi*(y/ly + 2*x/lx))/lx + 2*pi*(0.8*cos(0.0125*pi*z) + 0.2)*sin(pi*z/40)*cos(pi*(0.75*y/ly + 1.5*x/lx))*cos(pi*(y/ly + 2*x/lx))/lx - 112.5*pi**2*(0.8*cos(0.0125*pi*z) + 0.2)*cos(pi*(0.75*y/ly + 1.5*x/lx))/(lx*ly) - 112.5*pi**2*(0.8*cos(0.0125*pi*z) + 0.2)*cos(pi*(0.75*y/ly + 1.5*x/lx))/lx**2
def run(setup, refinement, order, do_export=True, **options):
"""Run single test and return L2 error"""
print_output('--- running {:} refinement {:}'.format(setup.__name__, refinement))
# domain dimensions
lx = 15e3
ly = 10e3
area = lx*ly
t_end = 200.0
setup_obj = setup()
# mesh
n_layers = 4*refinement
nx = 4*refinement
ny = 4*refinement
mesh2d = RectangleMesh(nx, ny, lx, ly)
# outputs
outputdir = 'outputs'
if do_export:
out_t = File(os.path.join(outputdir, 'T.pvd'))
# bathymetry
x_2d, y_2d = SpatialCoordinate(mesh2d)
p1_2d = get_functionspace(mesh2d, 'CG', 1)
bathymetry_2d = Function(p1_2d, name='Bathymetry')
bathymetry_2d.project(setup_obj.bath(x_2d, y_2d, lx, ly))
solver_obj = solver.FlowSolver(mesh2d, bathymetry_2d, n_layers)
solver_obj.options.element_family = 'dg-dg'
solver_obj.options.polynomial_degree = order
solver_obj.options.horizontal_velocity_scale = Constant(1.0)
solver_obj.options.use_bottom_friction = False
solver_obj.options.no_exports = not do_export
solver_obj.options.output_directory = outputdir
solver_obj.options.simulation_end_time = t_end
solver_obj.options.fields_to_export = ['salt_3d', 'uv_3d', 'w_3d']
solver_obj.options.horizontal_viscosity_scale = Constant(50.0)
solver_obj.options.update(options)
solver_obj.create_function_spaces()
# functions for source terms
x, y, z = SpatialCoordinate(solver_obj.mesh)
solver_obj.options.salinity_source_3d = setup_obj.residual(x, y, z, lx, ly)
# diffusivuty
solver_obj.options.horizontal_diffusivity = setup_obj.kappa(x, y, z, lx, ly)
# analytical solution
trac_ana = setup_obj.tracer(x, y, z, lx, ly)
bnd_salt = {'value': trac_ana}
solver_obj.bnd_functions['salt'] = {1: bnd_salt, 2: bnd_salt,
3: bnd_salt, 4: bnd_salt}
# NOTE use symmetic uv condition to get correct w
bnd_mom = {'symm': None}
solver_obj.bnd_functions['momentum'] = {1: bnd_mom, 2: bnd_mom,
3: bnd_mom, 4: bnd_mom}
solver_obj.create_equations()
dt = solver_obj.dt
# elevation field
solver_obj.fields.elev_2d.project(setup_obj.elev(x_2d, y_2d, lx, ly))
# update mesh and fields
solver_obj.mesh_updater.update_mesh_coordinates()
# salinity field
solver_obj.fields.salt_3d.project(setup_obj.tracer(x, y, z, lx, ly))
# velocity field
solver_obj.fields.uv_3d.project(setup_obj.uv(x, y, z, lx, ly))
solver_obj.w_solver.solve()
if do_export:
out_t.write(trac_ana)
solver_obj.export()
# solve salinity advection-diffusion equation with residual source term
ti = solver_obj.timestepper
ti.timesteppers.salt_expl.initialize(ti.fields.salt_3d)
t = 0
while t < t_end - 1e-5:
ti.timesteppers.salt_expl.advance(t)
if ti.options.use_limiter_for_tracers:
ti.solver.tracer_limiter.apply(ti.fields.salt_3d)
t += dt
if do_export:
out_t.write(trac_ana)
solver_obj.export()
l2_err = errornorm(trac_ana, solver_obj.fields.salt_3d)/numpy.sqrt(area)
print_output('L2 error {:.12f}'.format(l2_err))
return l2_err
def run_convergence(setup, ref_list, order, do_export=False, save_plot=False, **options):
"""Runs test for a list of refinements and computes error convergence rate"""
l2_err = []
for r in ref_list:
l2_err.append(run(setup, r, order, do_export=do_export, **options))
x_log = numpy.log10(numpy.array(ref_list, dtype=float)**-1)
y_log = numpy.log10(numpy.array(l2_err))
def check_convergence(x_log, y_log, expected_slope, field_str, save_plot):
slope_rtol = 0.2
slope, intercept, r_value, p_value, std_err = stats.linregress(x_log, y_log)
setup_name = setup.__name__
if save_plot:
import matplotlib.pyplot as plt
fig, ax = plt.subplots(figsize=(5, 5))
# plot points
ax.plot(x_log, y_log, 'k.')
x_min = x_log.min()
x_max = x_log.max()
offset = 0.05*(x_max - x_min)
npoints = 50
xx = numpy.linspace(x_min - offset, x_max + offset, npoints)
yy = intercept + slope*xx
# plot line
ax.plot(xx, yy, linestyle='--', linewidth=0.5, color='k')
ax.text(xx[2*int(npoints/3)], yy[2*int(npoints/3)], '{:4.2f}'.format(slope),
verticalalignment='top',
horizontalalignment='left')
ax.set_xlabel('log10(dx)')
ax.set_ylabel('log10(L2 error)')
ax.set_title('tracer adv-diff MMS DG p={:}'.format(order))
ref_str = 'ref-' + '-'.join([str(r) for r in ref_list])
order_str = 'o{:}'.format(order)
imgfile = '_'.join(['convergence', setup_name, field_str, ref_str, order_str])
imgfile += '.png'
img_dir = create_directory('plots')
imgfile = os.path.join(img_dir, imgfile)
print_output('saving figure {:}'.format(imgfile))
plt.savefig(imgfile, dpi=200, bbox_inches='tight')
if expected_slope is not None:
err_msg = '{:}: Wrong convergence rate {:.4f}, expected {:.4f}'.format(setup_name, slope, expected_slope)
assert abs(slope - expected_slope)/expected_slope < slope_rtol, err_msg
print_output('{:}: convergence rate {:.4f} PASSED'.format(setup_name, slope))
else:
print_output('{:}: {:} convergence rate {:.4f}'.format(setup_name, field_str, slope))
return slope
check_convergence(x_log, y_log, order+1, 'tracer', save_plot)
# ---------------------------
# standard tests for pytest
# ---------------------------
@pytest.fixture(params=['LeapFrog', 'SSPRK22'])
def timestepper_type(request):
return request.param
@pytest.fixture(params=[Setup1,
Setup2,
Setup3,
Setup4],
ids=['setup1', 'setup2', 'setup3', 'setup4'])
def setup(request):
return request.param
def test_convergence(setup, timestepper_type):
run_convergence(setup, [1, 2, 3], 1, save_plot=False,
timestepper_type=timestepper_type)
if __name__ == '__main__':
run_convergence(Setup4, [1, 2, 3], 1, save_plot=True, timestepper_type='SSPRK22')
| [((11695, 11741), 'pytest.fixture', 'pytest.fixture', ([], {'params': "['LeapFrog', 'SSPRK22']"}), "(params=['LeapFrog', 'SSPRK22'])\n", (11709, 11741), False, 'import pytest\n'), ((11801, 11906), 'pytest.fixture', 'pytest.fixture', ([], {'params': '[Setup1, Setup2, Setup3, Setup4]', 'ids': "['setup1', 'setup2', 'setup3', 'setup4']"}), "(params=[Setup1, Setup2, Setup3, Setup4], ids=['setup1',\n 'setup2', 'setup3', 'setup4'])\n", (11815, 11906), False, 'import pytest\n'), ((9044, 9060), 'numpy.sqrt', 'numpy.sqrt', (['area'], {}), '(area)\n', (9054, 9060), False, 'import numpy\n'), ((9509, 9528), 'numpy.array', 'numpy.array', (['l2_err'], {}), '(l2_err)\n', (9520, 9528), False, 'import numpy\n'), ((9689, 9719), 'scipy.stats.linregress', 'stats.linregress', (['x_log', 'y_log'], {}), '(x_log, y_log)\n', (9705, 9719), False, 'from scipy import stats\n'), ((9445, 9479), 'numpy.array', 'numpy.array', (['ref_list'], {'dtype': 'float'}), '(ref_list, dtype=float)\n', (9456, 9479), False, 'import numpy\n'), ((9844, 9872), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(5, 5)'}), '(figsize=(5, 5))\n', (9856, 9872), True, 'import matplotlib.pyplot as plt\n'), ((10087, 10142), 'numpy.linspace', 'numpy.linspace', (['(x_min - offset)', '(x_max + offset)', 'npoints'], {}), '(x_min - offset, x_max + offset, npoints)\n', (10101, 10142), False, 'import numpy\n'), ((11021, 11071), 'matplotlib.pyplot.savefig', 'plt.savefig', (['imgfile'], {'dpi': '(200)', 'bbox_inches': '"""tight"""'}), "(imgfile, dpi=200, bbox_inches='tight')\n", (11032, 11071), True, 'import matplotlib.pyplot as plt\n')] |
GaretJax/csat | csat/django/fields.py | db63441136369436140a91c9657444353c8944e6 | from lxml import etree
from django import forms
from django.db import models
class XMLFileField(models.FileField):
def __init__(self, *args, **kwargs):
self.schema = kwargs.pop('schema')
super(XMLFileField, self).__init__(*args, **kwargs)
def clean(self, *args, **kwargs):
data = super(XMLFileField, self).clean(*args, **kwargs)
with data as fh:
doc = etree.parse(fh)
with open(self.schema) as fh:
schema = etree.XMLSchema(etree.parse(fh))
if not schema.validate(doc):
raise forms.ValidationError('The XML file failed to validate '
'against the supplied schema.')
return data
| [((409, 424), 'lxml.etree.parse', 'etree.parse', (['fh'], {}), '(fh)\n', (420, 424), False, 'from lxml import etree\n'), ((574, 664), 'django.forms.ValidationError', 'forms.ValidationError', (['"""The XML file failed to validate against the supplied schema."""'], {}), "(\n 'The XML file failed to validate against the supplied schema.')\n", (595, 664), False, 'from django import forms\n'), ((501, 516), 'lxml.etree.parse', 'etree.parse', (['fh'], {}), '(fh)\n', (512, 516), False, 'from lxml import etree\n')] |
RishabhSehgal/keras_cv_attention_models | keras_cv_attention_models/yolox/yolox.py | c1e20e45815339d70a987ec7dd9e6f926b4eb21d | import tensorflow as tf
from tensorflow import keras
from keras_cv_attention_models.attention_layers import (
activation_by_name,
batchnorm_with_activation,
conv2d_no_bias,
depthwise_conv2d_no_bias,
add_pre_post_process,
)
from keras_cv_attention_models import model_surgery
from keras_cv_attention_models.download_and_load import reload_model_weights
from keras_cv_attention_models.coco.eval_func import DecodePredictions
PRETRAINED_DICT = {
"yolox_nano": {"coco": "7c97d60d4cc9d54321176f844acee627"},
"yolox_tiny": {"coco": "f9b51ff24290090c86a10a45f811140b"},
"yolox_s": {"coco": "a989f5a808ddc4a8242157a6a3e64977"},
"yolox_m": {"coco": "5c2333d2f12b2f48e3ec8555b29d242f"},
"yolox_l": {"coco": "a07c48994b7a67dba421025ef39b858b"},
"yolox_x": {"coco": "de9741d3f67f50c54856bcae0f07b7ef"},
}
""" CSPDarknet backbone """
BATCH_NORM_EPSILON = 1e-3
BATCH_NORM_MOMENTUM = 0.03
def conv_dw_pw_block(inputs, filters, kernel_size=1, strides=1, use_depthwise_conv=False, activation="swish", name=""):
nn = inputs
if use_depthwise_conv:
nn = depthwise_conv2d_no_bias(nn, kernel_size, strides, padding="SAME", name=name)
nn = batchnorm_with_activation(nn, activation=activation, epsilon=BATCH_NORM_EPSILON, momentum=BATCH_NORM_MOMENTUM, name=name + "dw_")
kernel_size, strides = 1, 1
nn = conv2d_no_bias(nn, filters, kernel_size, strides, padding="SAME", name=name)
nn = batchnorm_with_activation(nn, activation=activation, epsilon=BATCH_NORM_EPSILON, momentum=BATCH_NORM_MOMENTUM, name=name)
return nn
def csp_block(inputs, expansion=0.5, use_shortcut=True, use_depthwise_conv=False, activation="swish", name=""):
input_channels = inputs.shape[-1]
nn = conv_dw_pw_block(inputs, int(input_channels * expansion), activation=activation, name=name + "1_")
nn = conv_dw_pw_block(nn, input_channels, kernel_size=3, strides=1, use_depthwise_conv=use_depthwise_conv, activation=activation, name=name + "2_")
if use_shortcut:
nn = keras.layers.Add()([inputs, nn])
return nn
def csp_stack(inputs, depth, out_channels=-1, expansion=0.5, use_shortcut=True, use_depthwise_conv=False, activation="swish", name=""):
out_channels = inputs.shape[-1] if out_channels == -1 else out_channels
hidden_channels = int(out_channels * expansion)
short = conv_dw_pw_block(inputs, hidden_channels, kernel_size=1, activation=activation, name=name + "short_")
deep = conv_dw_pw_block(inputs, hidden_channels, kernel_size=1, activation=activation, name=name + "deep_")
for id in range(depth):
block_name = name + "block{}_".format(id + 1)
deep = csp_block(deep, 1, use_shortcut=use_shortcut, use_depthwise_conv=use_depthwise_conv, activation=activation, name=block_name)
out = tf.concat([deep, short], axis=-1)
out = conv_dw_pw_block(out, out_channels, kernel_size=1, activation=activation, name=name + "output_")
return out
def spatial_pyramid_pooling(inputs, pool_sizes=(5, 9, 13), activation="swish", name=""):
input_channels = inputs.shape[-1]
nn = conv_dw_pw_block(inputs, input_channels // 2, kernel_size=1, activation=activation, name=name + "1_")
pp = [keras.layers.MaxPooling2D(pool_size=ii, strides=1, padding="SAME")(nn) for ii in pool_sizes]
nn = tf.concat([nn, *pp], axis=-1)
nn = conv_dw_pw_block(nn, input_channels, kernel_size=1, activation=activation, name=name + "2_")
return nn
def focus_stem(inputs, filters, kernel_size=3, strides=1, padding="valid", activation="swish", name=""):
if padding.lower() == "same": # Handling odd input_shape
inputs = tf.pad(inputs, [[0, 0], [0, 1], [0, 1], [0, 0]])
patch_top_left = inputs[:, :-1:2, :-1:2]
patch_top_right = inputs[:, :-1:2, 1::2]
patch_bottom_left = inputs[:, 1::2, :-1:2]
patch_bottom_right = inputs[:, 1::2, 1::2]
else:
patch_top_left = inputs[:, ::2, ::2]
patch_top_right = inputs[:, ::2, 1::2]
patch_bottom_left = inputs[:, 1::2, ::2]
patch_bottom_right = inputs[:, 1::2, 1::2]
nn = tf.concat([patch_top_left, patch_bottom_left, patch_top_right, patch_bottom_right], axis=-1)
nn = conv_dw_pw_block(nn, filters, kernel_size=kernel_size, strides=strides, activation=activation, name=name)
return nn
def CSPDarknet(width_mul=1, depth_mul=1, out_features=[-3, -2, -1], use_depthwise_conv=False, input_shape=(512, 512, 3), activation="swish", model_name=""):
base_channels, base_depth = int(width_mul * 64), max(round(depth_mul * 3), 1)
inputs = keras.layers.Input(input_shape)
""" Stem """
nn = focus_stem(inputs, base_channels, activation=activation, name="stem_")
features = [nn]
""" dark blocks """
depthes = [base_depth, base_depth * 3, base_depth * 3, base_depth]
channels = [base_channels * 2, base_channels * 4, base_channels * 8, base_channels * 16]
use_spps = [False, False, False, True]
use_shortcuts = [True, True, True, False]
for id, (channel, depth, use_spp, use_shortcut) in enumerate(zip(channels, depthes, use_spps, use_shortcuts)):
stack_name = "stack{}_".format(id + 1)
nn = conv_dw_pw_block(nn, channel, kernel_size=3, strides=2, use_depthwise_conv=use_depthwise_conv, activation=activation, name=stack_name)
if use_spp:
nn = spatial_pyramid_pooling(nn, activation=activation, name=stack_name + "spp_")
# nn = SPPBottleneck(base_channels * 16, base_channels * 16, activation=act)
nn = csp_stack(nn, depth, use_shortcut=use_shortcut, use_depthwise_conv=use_depthwise_conv, activation=activation, name=stack_name)
features.append(nn)
nn = [features[ii] for ii in out_features]
model = keras.models.Model(inputs, nn, name=model_name)
return model
""" path aggregation fpn """
def upsample_merge(inputs, csp_depth, use_depthwise_conv=False, activation="swish", name=""):
# print(f">>>> upsample_merge inputs: {[ii.shape for ii in inputs] = }")
target_channel = inputs[-1].shape[-1]
fpn_out = conv_dw_pw_block(inputs[0], target_channel, activation=activation, name=name + "fpn_")
# inputs[0] = keras.layers.UpSampling2D(size=(2, 2), interpolation="nearest", name=name + "up")(fpn_out)
inputs[0] = tf.image.resize(fpn_out, tf.shape(inputs[-1])[1:-1], method="nearest")
nn = tf.concat(inputs, axis=-1)
nn = csp_stack(nn, csp_depth, target_channel, 0.5, False, use_depthwise_conv, activation=activation, name=name)
return fpn_out, nn
def downsample_merge(inputs, csp_depth, use_depthwise_conv=False, activation="swish", name=""):
# print(f">>>> downsample_merge inputs: {[ii.shape for ii in inputs] = }")
inputs[0] = conv_dw_pw_block(inputs[0], inputs[-1].shape[-1], 3, 2, use_depthwise_conv, activation=activation, name=name + "down_")
nn = tf.concat(inputs, axis=-1)
nn = csp_stack(nn, csp_depth, nn.shape[-1], 0.5, False, use_depthwise_conv, activation=activation, name=name)
return nn
def path_aggregation_fpn(features, depth_mul=1, use_depthwise_conv=False, activation="swish", name=""):
# p5 ─> fpn_out0 ───────────> pan_out0
# ↓ ↑
# p4 ─> f_out0 ─> fpn_out1 ─> pan_out1
# ↓ ↑
# p3 ───────────> pan_out2 ──────┘
csp_depth = max(round(depth_mul * 3), 1)
p3, p4, p5 = features # p3: [64, 64, 256], p4: [32, 32, 512], p5: [16, 16, 1024]
# fpn_out0: [16, 16, 512], f_out0: [32, 32, 512]
fpn_out0, f_out0 = upsample_merge([p5, p4], csp_depth, use_depthwise_conv=use_depthwise_conv, activation=activation, name=name + "c3p4_")
# fpn_out1: [32, 32, 256], pan_out2: [64, 64, 256]
fpn_out1, pan_out2 = upsample_merge([f_out0, p3], csp_depth, use_depthwise_conv=use_depthwise_conv, activation=activation, name=name + "c3p3_")
# pan_out1: [32, 32, 512]
pan_out1 = downsample_merge([pan_out2, fpn_out1], csp_depth, use_depthwise_conv=use_depthwise_conv, activation=activation, name=name + "c3n3_")
# pan_out0: [16, 16, 1024]
pan_out0 = downsample_merge([pan_out1, fpn_out0], csp_depth, use_depthwise_conv=use_depthwise_conv, activation=activation, name=name + "c3n4_")
return [pan_out2, pan_out1, pan_out0]
""" YOLOXHead """
def yolox_head_single(inputs, out_channels, num_classes=80, num_anchors=1, use_depthwise_conv=False, use_object_scores=True, activation="swish", name=""):
bias_init = tf.constant_initializer(-tf.math.log((1 - 0.01) / 0.01).numpy())
# stem
stem = conv_dw_pw_block(inputs, out_channels, activation=activation, name=name + "stem_")
# cls_convs, cls_preds
cls_nn = conv_dw_pw_block(stem, out_channels, kernel_size=3, use_depthwise_conv=use_depthwise_conv, activation=activation, name=name + "cls_1_")
cls_nn = conv_dw_pw_block(cls_nn, out_channels, kernel_size=3, use_depthwise_conv=use_depthwise_conv, activation=activation, name=name + "cls_2_")
cls_out = keras.layers.Conv2D(num_classes * num_anchors, kernel_size=1, bias_initializer=bias_init, name=name + "class_out")(cls_nn)
cls_out = activation_by_name(cls_out, "sigmoid", name=name + "class_out_")
cls_out = keras.layers.Reshape([-1, num_classes], name=name + "class_out_reshape")(cls_out)
# reg_convs, reg_preds
reg_nn = conv_dw_pw_block(stem, out_channels, kernel_size=3, use_depthwise_conv=use_depthwise_conv, activation=activation, name=name + "reg_1_")
reg_nn = conv_dw_pw_block(reg_nn, out_channels, kernel_size=3, use_depthwise_conv=use_depthwise_conv, activation=activation, name=name + "reg_2_")
reg_out = keras.layers.Conv2D(4 * num_anchors, kernel_size=1, name=name + "regression_out")(reg_nn)
reg_out = keras.layers.Reshape([-1, 4], name=name + "regression_out_reshape")(reg_out)
# obj_preds
if use_object_scores:
obj_out = keras.layers.Conv2D(1 * num_anchors, kernel_size=1, bias_initializer=bias_init, name=name + "object_out")(reg_nn)
obj_out = activation_by_name(obj_out, "sigmoid", name=name + "object_out_")
obj_out = keras.layers.Reshape([-1, 1], name=name + "object_out_reshape")(obj_out)
return tf.concat([reg_out, cls_out, obj_out], axis=-1)
else:
return tf.concat([reg_out, cls_out], axis=-1)
def yolox_head(inputs, width_mul=1.0, num_classes=80, num_anchors=1, use_depthwise_conv=False, use_object_scores=True, activation="swish", name=""):
out_channel = int(256 * width_mul)
outputs = []
for id, input in enumerate(inputs):
cur_name = name + "{}_".format(id + 1)
out = yolox_head_single(input, out_channel, num_classes, num_anchors, use_depthwise_conv, use_object_scores, activation=activation, name=cur_name)
outputs.append(out)
# outputs = tf.concat([keras.layers.Reshape([-1, ii.shape[-1]])(ii) for ii in outputs], axis=1)
outputs = tf.concat(outputs, axis=1)
return outputs
""" YOLOX models """
def YOLOX(
backbone=None,
features_pick=[-3, -2, -1],
depth_mul=1,
width_mul=-1, # -1 means: `min([ii.shape[-1] for ii in features]) / 256` for custom backbones.
use_depthwise_conv=False,
use_anchor_free_mode=True,
num_anchors="auto", # "auto" means 1 if use_anchor_free_mode else 9
use_object_scores="auto", # "auto" means same with use_anchor_free_mode
input_shape=(640, 640, 3),
num_classes=80,
activation="swish",
freeze_backbone=False,
pretrained=None,
model_name="yolox",
pyramid_levels_min=3, # Init anchors for model prediction.
anchor_scale="auto", # Init anchors for model prediction. "auto" means 1 if use_anchor_free_mode else 4
rescale_mode="raw", # For decode predictions, raw means input value in range [0, 255].
kwargs=None, # Not using, recieving parameter
):
if backbone is None:
width_mul = width_mul if width_mul > 0 else 1
backbone = CSPDarknet(width_mul, depth_mul, features_pick, use_depthwise_conv, input_shape, activation=activation, model_name="darknet")
features = backbone.outputs
else:
if isinstance(features_pick[0], str):
features = [backbone.get_layer(layer_name) for layer_name in features_pick]
else:
features = model_surgery.get_pyramide_feture_layers(backbone)
features = [features[id] for id in features_pick]
print(">>>> features:", {ii.name: ii.output_shape for ii in features})
features = [ii.output for ii in features]
width_mul = width_mul if width_mul > 0 else min([ii.shape[-1] for ii in features]) / 256
print(">>>> width_mul:", width_mul)
if freeze_backbone:
backbone.trainable = False
else:
backbone.trainable = True
inputs = backbone.inputs[0]
use_object_scores = use_anchor_free_mode if use_object_scores == "auto" else use_object_scores
num_anchors = (1 if use_anchor_free_mode else 9) if num_anchors == "auto" else num_anchors
fpn_features = path_aggregation_fpn(features, depth_mul=depth_mul, use_depthwise_conv=use_depthwise_conv, activation=activation, name="pafpn_")
outputs = yolox_head(fpn_features, width_mul, num_classes, num_anchors, use_depthwise_conv, use_object_scores, activation=activation, name="head_")
outputs = keras.layers.Activation("linear", dtype="float32", name="outputs_fp32")(outputs)
model = keras.models.Model(inputs, outputs, name=model_name)
reload_model_weights(model, PRETRAINED_DICT, "yolox", pretrained)
# AA = {"aspect_ratios": anchor_aspect_ratios, "num_scales": anchor_num_scales, "anchor_scale": anchor_scale, "grid_zero_start": anchor_grid_zero_start}
pyramid_levels = [pyramid_levels_min, pyramid_levels_min + len(features_pick) - 1] # -> [3, 5]
anchor_scale = (1 if use_anchor_free_mode else 4) if anchor_scale == "auto" else anchor_scale
post_process = DecodePredictions(backbone.input_shape[1:], pyramid_levels, anchor_scale, use_anchor_free_mode, use_object_scores)
add_pre_post_process(model, rescale_mode=rescale_mode, post_process=post_process)
return model
def YOLOXNano(input_shape=(416, 416, 3), freeze_backbone=False, num_classes=80, backbone=None, activation="swish", pretrained="coco", **kwargs):
return YOLOX(**locals(), depth_mul=0.33, width_mul=0.25, use_depthwise_conv=True, model_name=kwargs.pop("model_name", "yolox_nano"), **kwargs)
def YOLOXTiny(input_shape=(416, 416, 3), freeze_backbone=False, num_classes=80, backbone=None, activation="swish", pretrained="coco", **kwargs):
return YOLOX(**locals(), depth_mul=0.33, width_mul=0.375, model_name=kwargs.pop("model_name", "yolox_tiny"), **kwargs)
def YOLOXS(input_shape=(640, 640, 3), freeze_backbone=False, num_classes=80, backbone=None, activation="swish", pretrained="coco", **kwargs):
return YOLOX(**locals(), depth_mul=0.33, width_mul=0.5, model_name=kwargs.pop("model_name", "yolox_s"), **kwargs)
def YOLOXM(input_shape=(640, 640, 3), freeze_backbone=False, num_classes=80, backbone=None, activation="swish", pretrained="coco", **kwargs):
return YOLOX(**locals(), depth_mul=0.67, width_mul=0.75, model_name=kwargs.pop("model_name", "yolox_m"), **kwargs)
def YOLOXL(input_shape=(640, 640, 3), freeze_backbone=False, num_classes=80, backbone=None, activation="swish", pretrained="coco", **kwargs):
return YOLOX(**locals(), depth_mul=1.0, width_mul=1.0, model_name=kwargs.pop("model_name", "yolox_l"), **kwargs)
def YOLOXX(input_shape=(640, 640, 3), freeze_backbone=False, num_classes=80, backbone=None, activation="swish", pretrained="coco", **kwargs):
return YOLOX(**locals(), depth_mul=1.33, width_mul=1.25, model_name=kwargs.pop("model_name", "yolox_x"), **kwargs)
| [((1365, 1441), 'keras_cv_attention_models.attention_layers.conv2d_no_bias', 'conv2d_no_bias', (['nn', 'filters', 'kernel_size', 'strides'], {'padding': '"""SAME"""', 'name': 'name'}), "(nn, filters, kernel_size, strides, padding='SAME', name=name)\n", (1379, 1441), False, 'from keras_cv_attention_models.attention_layers import activation_by_name, batchnorm_with_activation, conv2d_no_bias, depthwise_conv2d_no_bias, add_pre_post_process\n'), ((1451, 1577), 'keras_cv_attention_models.attention_layers.batchnorm_with_activation', 'batchnorm_with_activation', (['nn'], {'activation': 'activation', 'epsilon': 'BATCH_NORM_EPSILON', 'momentum': 'BATCH_NORM_MOMENTUM', 'name': 'name'}), '(nn, activation=activation, epsilon=\n BATCH_NORM_EPSILON, momentum=BATCH_NORM_MOMENTUM, name=name)\n', (1476, 1577), False, 'from keras_cv_attention_models.attention_layers import activation_by_name, batchnorm_with_activation, conv2d_no_bias, depthwise_conv2d_no_bias, add_pre_post_process\n'), ((2806, 2839), 'tensorflow.concat', 'tf.concat', (['[deep, short]'], {'axis': '(-1)'}), '([deep, short], axis=-1)\n', (2815, 2839), True, 'import tensorflow as tf\n'), ((3314, 3343), 'tensorflow.concat', 'tf.concat', (['[nn, *pp]'], {'axis': '(-1)'}), '([nn, *pp], axis=-1)\n', (3323, 3343), True, 'import tensorflow as tf\n'), ((4106, 4202), 'tensorflow.concat', 'tf.concat', (['[patch_top_left, patch_bottom_left, patch_top_right, patch_bottom_right]'], {'axis': '(-1)'}), '([patch_top_left, patch_bottom_left, patch_top_right,\n patch_bottom_right], axis=-1)\n', (4115, 4202), True, 'import tensorflow as tf\n'), ((4582, 4613), 'tensorflow.keras.layers.Input', 'keras.layers.Input', (['input_shape'], {}), '(input_shape)\n', (4600, 4613), False, 'from tensorflow import keras\n'), ((5747, 5794), 'tensorflow.keras.models.Model', 'keras.models.Model', (['inputs', 'nn'], {'name': 'model_name'}), '(inputs, nn, name=model_name)\n', (5765, 5794), False, 'from tensorflow import keras\n'), ((6365, 6391), 'tensorflow.concat', 'tf.concat', (['inputs'], {'axis': '(-1)'}), '(inputs, axis=-1)\n', (6374, 6391), True, 'import tensorflow as tf\n'), ((6853, 6879), 'tensorflow.concat', 'tf.concat', (['inputs'], {'axis': '(-1)'}), '(inputs, axis=-1)\n', (6862, 6879), True, 'import tensorflow as tf\n'), ((9088, 9152), 'keras_cv_attention_models.attention_layers.activation_by_name', 'activation_by_name', (['cls_out', '"""sigmoid"""'], {'name': "(name + 'class_out_')"}), "(cls_out, 'sigmoid', name=name + 'class_out_')\n", (9106, 9152), False, 'from keras_cv_attention_models.attention_layers import activation_by_name, batchnorm_with_activation, conv2d_no_bias, depthwise_conv2d_no_bias, add_pre_post_process\n'), ((10840, 10866), 'tensorflow.concat', 'tf.concat', (['outputs'], {'axis': '(1)'}), '(outputs, axis=1)\n', (10849, 10866), True, 'import tensorflow as tf\n'), ((13329, 13381), 'tensorflow.keras.models.Model', 'keras.models.Model', (['inputs', 'outputs'], {'name': 'model_name'}), '(inputs, outputs, name=model_name)\n', (13347, 13381), False, 'from tensorflow import keras\n'), ((13386, 13451), 'keras_cv_attention_models.download_and_load.reload_model_weights', 'reload_model_weights', (['model', 'PRETRAINED_DICT', '"""yolox"""', 'pretrained'], {}), "(model, PRETRAINED_DICT, 'yolox', pretrained)\n", (13406, 13451), False, 'from keras_cv_attention_models.download_and_load import reload_model_weights\n'), ((13827, 13945), 'keras_cv_attention_models.coco.eval_func.DecodePredictions', 'DecodePredictions', (['backbone.input_shape[1:]', 'pyramid_levels', 'anchor_scale', 'use_anchor_free_mode', 'use_object_scores'], {}), '(backbone.input_shape[1:], pyramid_levels, anchor_scale,\n use_anchor_free_mode, use_object_scores)\n', (13844, 13945), False, 'from keras_cv_attention_models.coco.eval_func import DecodePredictions\n'), ((13946, 14032), 'keras_cv_attention_models.attention_layers.add_pre_post_process', 'add_pre_post_process', (['model'], {'rescale_mode': 'rescale_mode', 'post_process': 'post_process'}), '(model, rescale_mode=rescale_mode, post_process=\n post_process)\n', (13966, 14032), False, 'from keras_cv_attention_models.attention_layers import activation_by_name, batchnorm_with_activation, conv2d_no_bias, depthwise_conv2d_no_bias, add_pre_post_process\n'), ((1099, 1176), 'keras_cv_attention_models.attention_layers.depthwise_conv2d_no_bias', 'depthwise_conv2d_no_bias', (['nn', 'kernel_size', 'strides'], {'padding': '"""SAME"""', 'name': 'name'}), "(nn, kernel_size, strides, padding='SAME', name=name)\n", (1123, 1176), False, 'from keras_cv_attention_models.attention_layers import activation_by_name, batchnorm_with_activation, conv2d_no_bias, depthwise_conv2d_no_bias, add_pre_post_process\n'), ((1190, 1324), 'keras_cv_attention_models.attention_layers.batchnorm_with_activation', 'batchnorm_with_activation', (['nn'], {'activation': 'activation', 'epsilon': 'BATCH_NORM_EPSILON', 'momentum': 'BATCH_NORM_MOMENTUM', 'name': "(name + 'dw_')"}), "(nn, activation=activation, epsilon=\n BATCH_NORM_EPSILON, momentum=BATCH_NORM_MOMENTUM, name=name + 'dw_')\n", (1215, 1324), False, 'from keras_cv_attention_models.attention_layers import activation_by_name, batchnorm_with_activation, conv2d_no_bias, depthwise_conv2d_no_bias, add_pre_post_process\n'), ((3646, 3694), 'tensorflow.pad', 'tf.pad', (['inputs', '[[0, 0], [0, 1], [0, 1], [0, 0]]'], {}), '(inputs, [[0, 0], [0, 1], [0, 1], [0, 0]])\n', (3652, 3694), True, 'import tensorflow as tf\n'), ((8951, 9069), 'tensorflow.keras.layers.Conv2D', 'keras.layers.Conv2D', (['(num_classes * num_anchors)'], {'kernel_size': '(1)', 'bias_initializer': 'bias_init', 'name': "(name + 'class_out')"}), "(num_classes * num_anchors, kernel_size=1,\n bias_initializer=bias_init, name=name + 'class_out')\n", (8970, 9069), False, 'from tensorflow import keras\n'), ((9167, 9239), 'tensorflow.keras.layers.Reshape', 'keras.layers.Reshape', (['[-1, num_classes]'], {'name': "(name + 'class_out_reshape')"}), "([-1, num_classes], name=name + 'class_out_reshape')\n", (9187, 9239), False, 'from tensorflow import keras\n'), ((9591, 9676), 'tensorflow.keras.layers.Conv2D', 'keras.layers.Conv2D', (['(4 * num_anchors)'], {'kernel_size': '(1)', 'name': "(name + 'regression_out')"}), "(4 * num_anchors, kernel_size=1, name=name +\n 'regression_out')\n", (9610, 9676), False, 'from tensorflow import keras\n'), ((9695, 9762), 'tensorflow.keras.layers.Reshape', 'keras.layers.Reshape', (['[-1, 4]'], {'name': "(name + 'regression_out_reshape')"}), "([-1, 4], name=name + 'regression_out_reshape')\n", (9715, 9762), False, 'from tensorflow import keras\n'), ((9965, 10030), 'keras_cv_attention_models.attention_layers.activation_by_name', 'activation_by_name', (['obj_out', '"""sigmoid"""'], {'name': "(name + 'object_out_')"}), "(obj_out, 'sigmoid', name=name + 'object_out_')\n", (9983, 10030), False, 'from keras_cv_attention_models.attention_layers import activation_by_name, batchnorm_with_activation, conv2d_no_bias, depthwise_conv2d_no_bias, add_pre_post_process\n'), ((10137, 10184), 'tensorflow.concat', 'tf.concat', (['[reg_out, cls_out, obj_out]'], {'axis': '(-1)'}), '([reg_out, cls_out, obj_out], axis=-1)\n', (10146, 10184), True, 'import tensorflow as tf\n'), ((10210, 10248), 'tensorflow.concat', 'tf.concat', (['[reg_out, cls_out]'], {'axis': '(-1)'}), '([reg_out, cls_out], axis=-1)\n', (10219, 10248), True, 'import tensorflow as tf\n'), ((13236, 13307), 'tensorflow.keras.layers.Activation', 'keras.layers.Activation', (['"""linear"""'], {'dtype': '"""float32"""', 'name': '"""outputs_fp32"""'}), "('linear', dtype='float32', name='outputs_fp32')\n", (13259, 13307), False, 'from tensorflow import keras\n'), ((2033, 2051), 'tensorflow.keras.layers.Add', 'keras.layers.Add', ([], {}), '()\n', (2049, 2051), False, 'from tensorflow import keras\n'), ((3212, 3278), 'tensorflow.keras.layers.MaxPooling2D', 'keras.layers.MaxPooling2D', ([], {'pool_size': 'ii', 'strides': '(1)', 'padding': '"""SAME"""'}), "(pool_size=ii, strides=1, padding='SAME')\n", (3237, 3278), False, 'from tensorflow import keras\n'), ((6310, 6330), 'tensorflow.shape', 'tf.shape', (['inputs[-1]'], {}), '(inputs[-1])\n', (6318, 6330), True, 'import tensorflow as tf\n'), ((9833, 9943), 'tensorflow.keras.layers.Conv2D', 'keras.layers.Conv2D', (['(1 * num_anchors)'], {'kernel_size': '(1)', 'bias_initializer': 'bias_init', 'name': "(name + 'object_out')"}), "(1 * num_anchors, kernel_size=1, bias_initializer=\n bias_init, name=name + 'object_out')\n", (9852, 9943), False, 'from tensorflow import keras\n'), ((10049, 10112), 'tensorflow.keras.layers.Reshape', 'keras.layers.Reshape', (['[-1, 1]'], {'name': "(name + 'object_out_reshape')"}), "([-1, 1], name=name + 'object_out_reshape')\n", (10069, 10112), False, 'from tensorflow import keras\n'), ((12208, 12258), 'keras_cv_attention_models.model_surgery.get_pyramide_feture_layers', 'model_surgery.get_pyramide_feture_layers', (['backbone'], {}), '(backbone)\n', (12248, 12258), False, 'from keras_cv_attention_models import model_surgery\n'), ((8463, 8493), 'tensorflow.math.log', 'tf.math.log', (['((1 - 0.01) / 0.01)'], {}), '((1 - 0.01) / 0.01)\n', (8474, 8493), True, 'import tensorflow as tf\n')] |
mrod0101/opentrons | robot-server/tests/service/json_api/test_response.py | 6450edb0421f1c2484c292f8583602d8f6fd13b8 | from pytest import raises
from pydantic import ValidationError
from robot_server.service.json_api.response import (
ResponseDataModel,
ResponseModel,
MultiResponseModel,
)
from tests.service.helpers import ItemResponseModel
def test_attributes_as_dict() -> None:
MyResponse = ResponseModel[ResponseDataModel, None]
obj_to_validate = {
"data": {"id": "123"},
"links": None,
}
my_response_object = MyResponse(**obj_to_validate)
assert my_response_object.dict() == {
"links": None,
"data": {
"id": "123",
},
}
def test_attributes_as_item_model() -> None:
ItemResponse = ResponseModel[ItemResponseModel, None]
obj_to_validate = {
"links": None,
"data": {"id": "123", "name": "apple", "quantity": 10, "price": 1.20},
}
my_response_obj = ItemResponse(**obj_to_validate)
assert my_response_obj.dict() == {
"links": None,
"data": {
"id": "123",
"name": "apple",
"quantity": 10,
"price": 1.20,
},
}
def test_list_item_model() -> None:
ItemResponse = MultiResponseModel[ItemResponseModel, None]
obj_to_validate = {
"links": None,
"data": [
{"id": "123", "name": "apple", "quantity": 10, "price": 1.20},
{"id": "321", "name": "banana", "quantity": 20, "price": 2.34},
],
}
my_response_obj = ItemResponse(**obj_to_validate)
assert my_response_obj.dict() == {
"links": None,
"data": [
{
"id": "123",
"name": "apple",
"quantity": 10,
"price": 1.20,
},
{
"id": "321",
"name": "banana",
"quantity": 20,
"price": 2.34,
},
],
}
def test_attributes_as_item_model_empty_dict() -> None:
ItemResponse = ResponseModel[ItemResponseModel, None]
obj_to_validate = {
"links": None,
"data": {
"id": "123",
},
}
with raises(ValidationError) as e:
ItemResponse(**obj_to_validate)
assert e.value.errors() == [
{
"loc": ("data", "name"),
"msg": "field required",
"type": "value_error.missing",
},
{
"loc": ("data", "quantity"),
"msg": "field required",
"type": "value_error.missing",
},
{
"loc": ("data", "price"),
"msg": "field required",
"type": "value_error.missing",
},
]
def test_response_constructed_with_resource_object() -> None:
ItemResponse = ResponseModel[ItemResponseModel, None]
item = ItemResponseModel(id="abc123", name="pear", price=1.2, quantity=10)
data = item.dict()
assert ItemResponse(data=data, links=None).dict() == {
"links": None,
"data": {
"id": "abc123",
"name": "pear",
"price": 1.2,
"quantity": 10,
},
}
def test_response_constructed_with_resource_object_list() -> None:
ItemResponse = MultiResponseModel[ItemResponseModel, None]
items = [
ItemResponseModel(id="1", name="apple", price=1.5, quantity=3),
ItemResponseModel(id="2", name="pear", price=1.2, quantity=10),
ItemResponseModel(id="3", name="orange", price=2.2, quantity=5),
]
response = ItemResponse(data=items, links=None)
assert response.dict() == {
"links": None,
"data": [
{
"id": "1",
"name": "apple",
"price": 1.5,
"quantity": 3,
},
{
"id": "2",
"name": "pear",
"price": 1.2,
"quantity": 10,
},
{
"id": "3",
"name": "orange",
"price": 2.2,
"quantity": 5,
},
],
}
| [((2783, 2850), 'tests.service.helpers.ItemResponseModel', 'ItemResponseModel', ([], {'id': '"""abc123"""', 'name': '"""pear"""', 'price': '(1.2)', 'quantity': '(10)'}), "(id='abc123', name='pear', price=1.2, quantity=10)\n", (2800, 2850), False, 'from tests.service.helpers import ItemResponseModel\n'), ((2121, 2144), 'pytest.raises', 'raises', (['ValidationError'], {}), '(ValidationError)\n', (2127, 2144), False, 'from pytest import raises\n'), ((3256, 3318), 'tests.service.helpers.ItemResponseModel', 'ItemResponseModel', ([], {'id': '"""1"""', 'name': '"""apple"""', 'price': '(1.5)', 'quantity': '(3)'}), "(id='1', name='apple', price=1.5, quantity=3)\n", (3273, 3318), False, 'from tests.service.helpers import ItemResponseModel\n'), ((3328, 3390), 'tests.service.helpers.ItemResponseModel', 'ItemResponseModel', ([], {'id': '"""2"""', 'name': '"""pear"""', 'price': '(1.2)', 'quantity': '(10)'}), "(id='2', name='pear', price=1.2, quantity=10)\n", (3345, 3390), False, 'from tests.service.helpers import ItemResponseModel\n'), ((3400, 3463), 'tests.service.helpers.ItemResponseModel', 'ItemResponseModel', ([], {'id': '"""3"""', 'name': '"""orange"""', 'price': '(2.2)', 'quantity': '(5)'}), "(id='3', name='orange', price=2.2, quantity=5)\n", (3417, 3463), False, 'from tests.service.helpers import ItemResponseModel\n')] |
MichaelMcFarland98/cse210-project | stickmanZ/__main__.py | 9e5a45a75f465fe123e33712d3c19dd88e98246a |
from game.game_view import GameView
from game.menu_view import menu_view
from game import constants
import arcade
SCREEN_WIDTH = constants.SCREEN_WIDTH
SCREEN_HEIGHT = constants.SCREEN_HEIGHT
SCREEN_TITLE = constants.SCREEN_TITLE
window = arcade.Window(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
start_view = menu_view()
window.show_view(start_view)
arcade.run()
| [((243, 299), 'arcade.Window', 'arcade.Window', (['SCREEN_WIDTH', 'SCREEN_HEIGHT', 'SCREEN_TITLE'], {}), '(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)\n', (256, 299), False, 'import arcade\n'), ((313, 324), 'game.menu_view.menu_view', 'menu_view', ([], {}), '()\n', (322, 324), False, 'from game.menu_view import menu_view\n'), ((354, 366), 'arcade.run', 'arcade.run', ([], {}), '()\n', (364, 366), False, 'import arcade\n')] |
congnt95/neutron | neutron/db/migration/alembic_migrations/versions/mitaka/contract/c6c112992c9_rbac_qos_policy.py | 6a73a362c5ff5b7c28c15a49f47a9900c0d2b4e1 | # Copyright 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from alembic import op
from oslo_utils import uuidutils
import sqlalchemy as sa
from neutron.db import rbac_db_models
"""rbac_qos_policy
Revision ID: c6c112992c9
Revises: 8a6d8bdae39
Create Date: 2015-11-25 18:45:03.831359
"""
# revision identifiers, used by Alembic.
revision = 'c6c112992c9'
down_revision = 'e3278ee65050'
depends_on = ('15e43b934f81',)
qos_rbacs = sa.Table(
'qospolicyrbacs', sa.MetaData(),
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('tenant_id', sa.String(length=255),
nullable=True),
sa.Column('target_tenant', sa.String(length=255),
nullable=False),
sa.Column('action', sa.String(length=255), nullable=False),
sa.Column('object_id', sa.String(length=36), nullable=False))
# A simple model of the qos_policies table with only the fields needed for
# the migration.
qos_policy = sa.Table('qos_policies', sa.MetaData(),
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('tenant_id',
sa.String(length=255)),
sa.Column('shared', sa.Boolean(), nullable=False))
def upgrade():
op.bulk_insert(qos_rbacs, get_values())
op.drop_column('qos_policies', 'shared')
def get_values():
session = sa.orm.Session(bind=op.get_bind())
values = []
for row in session.query(qos_policy).filter(qos_policy.c.shared).all():
values.append({'id': uuidutils.generate_uuid(), 'object_id': row[0],
'tenant_id': row[1], 'target_tenant': '*',
'action': rbac_db_models.ACCESS_SHARED})
session.commit()
return values
| [((1021, 1034), 'sqlalchemy.MetaData', 'sa.MetaData', ([], {}), '()\n', (1032, 1034), True, 'import sqlalchemy as sa\n'), ((1521, 1534), 'sqlalchemy.MetaData', 'sa.MetaData', ([], {}), '()\n', (1532, 1534), True, 'import sqlalchemy as sa\n'), ((1852, 1892), 'alembic.op.drop_column', 'op.drop_column', (['"""qos_policies"""', '"""shared"""'], {}), "('qos_policies', 'shared')\n", (1866, 1892), False, 'from alembic import op\n'), ((1056, 1076), 'sqlalchemy.String', 'sa.String', ([], {'length': '(36)'}), '(length=36)\n', (1065, 1076), True, 'import sqlalchemy as sa\n'), ((1122, 1143), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (1131, 1143), True, 'import sqlalchemy as sa\n'), ((1206, 1227), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (1215, 1227), True, 'import sqlalchemy as sa\n'), ((1284, 1305), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (1293, 1305), True, 'import sqlalchemy as sa\n'), ((1351, 1371), 'sqlalchemy.String', 'sa.String', ([], {'length': '(36)'}), '(length=36)\n', (1360, 1371), True, 'import sqlalchemy as sa\n'), ((1574, 1594), 'sqlalchemy.String', 'sa.String', ([], {'length': '(36)'}), '(length=36)\n', (1583, 1594), True, 'import sqlalchemy as sa\n'), ((1690, 1711), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (1699, 1711), True, 'import sqlalchemy as sa\n'), ((1756, 1768), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (1766, 1768), True, 'import sqlalchemy as sa\n'), ((1947, 1960), 'alembic.op.get_bind', 'op.get_bind', ([], {}), '()\n', (1958, 1960), False, 'from alembic import op\n'), ((2083, 2108), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (2106, 2108), False, 'from oslo_utils import uuidutils\n')] |
ericchou1/network-devops-kafka-up-and-running | chapter5/ch5_gcp_subscriber.py | c128cf7359ba40c3005a02d3033b16b67c196779 | from concurrent.futures import TimeoutError
from google.cloud import pubsub_v1
project_id = "pubsub-testing-331300"
subscription_id = "test-sub"
# Number of seconds the subscriber should listen for messages
timeout = 5.0
subscriber = pubsub_v1.SubscriberClient()
# The `subscription_path` method creates a fully qualified identifier
# in the form `projects/{project_id}/subscriptions/{subscription_id}`
subscription_path = subscriber.subscription_path(project_id, subscription_id)
def callback(message: pubsub_v1.subscriber.message.Message) -> None:
print(f"Received {message}.")
message.ack()
streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback)
print(f"Listening for messages on {subscription_path}..\n")
# Wrap subscriber in a 'with' block to automatically call close() when done.
with subscriber:
try:
# When `timeout` is not set, result() will block indefinitely,
# unless an exception is encountered first.
streaming_pull_future.result(timeout=timeout)
except TimeoutError:
streaming_pull_future.cancel() # Trigger the shutdown.
streaming_pull_future.result() # Block until the shutdown is complete.
| [((236, 264), 'google.cloud.pubsub_v1.SubscriberClient', 'pubsub_v1.SubscriberClient', ([], {}), '()\n', (262, 264), False, 'from google.cloud import pubsub_v1\n')] |
VaibhavBhujade/Blockchain-ERP-interoperability | odoo-13.0/addons/google_drive/models/res_config_settings.py | b5190a037fb6615386f7cbad024d51b0abd4ba03 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
class ResConfigSettings(models.TransientModel):
_inherit = "res.config.settings"
google_drive_authorization_code = fields.Char(string='Authorization Code', config_parameter='google_drive_authorization_code')
google_drive_uri = fields.Char(compute='_compute_drive_uri', string='URI', help="The URL to generate the authorization code from Google")
is_google_drive_token_generated = fields.Boolean(string='Refresh Token Generated')
@api.depends('google_drive_authorization_code')
def _compute_drive_uri(self):
google_drive_uri = self.env['google.service']._get_google_token_uri('drive', scope=self.env['google.drive.config'].get_google_scope())
for config in self:
config.google_drive_uri = google_drive_uri
def get_values(self):
res = super(ResConfigSettings, self).get_values()
refresh_token = self.env['ir.config_parameter'].sudo().get_param('google_drive_refresh_token', False)
res.update(is_google_drive_token_generated=bool(refresh_token))
return res
def confirm_setup_token(self):
params = self.env['ir.config_parameter'].sudo()
authorization_code_before = params.get_param('google_drive_authorization_code')
authorization_code = self.google_drive_authorization_code
if authorization_code != authorization_code_before:
refresh_token = (
self.env['google.service'].generate_refresh_token('drive', authorization_code)
if authorization_code else False
)
params.set_param('google_drive_refresh_token', refresh_token)
def action_setup_token(self):
self.ensure_one()
template = self.env.ref('google_drive.google_drive_auth_code_wizard')
return {
'name': _('Set up refresh token'),
'type': 'ir.actions.act_window',
'res_model': 'res.config.settings',
'views': [(template.id, 'form')],
'target': 'new',
}
| [((266, 363), 'odoo.fields.Char', 'fields.Char', ([], {'string': '"""Authorization Code"""', 'config_parameter': '"""google_drive_authorization_code"""'}), "(string='Authorization Code', config_parameter=\n 'google_drive_authorization_code')\n", (277, 363), False, 'from odoo import api, fields, models, _\n'), ((382, 505), 'odoo.fields.Char', 'fields.Char', ([], {'compute': '"""_compute_drive_uri"""', 'string': '"""URI"""', 'help': '"""The URL to generate the authorization code from Google"""'}), "(compute='_compute_drive_uri', string='URI', help=\n 'The URL to generate the authorization code from Google')\n", (393, 505), False, 'from odoo import api, fields, models, _\n'), ((539, 587), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'string': '"""Refresh Token Generated"""'}), "(string='Refresh Token Generated')\n", (553, 587), False, 'from odoo import api, fields, models, _\n'), ((594, 640), 'odoo.api.depends', 'api.depends', (['"""google_drive_authorization_code"""'], {}), "('google_drive_authorization_code')\n", (605, 640), False, 'from odoo import api, fields, models, _\n'), ((1931, 1956), 'odoo._', '_', (['"""Set up refresh token"""'], {}), "('Set up refresh token')\n", (1932, 1956), False, 'from odoo import api, fields, models, _\n')] |
sanger640/attMPTI | dataloaders/loader.py | a2784b784e0900f3603baa3779631da67bcd0562 | """ Data Loader for Generating Tasks
Author: Zhao Na, 2020
"""
import os
import random
import math
import glob
import numpy as np
import h5py as h5
import transforms3d
from itertools import combinations
import torch
from torch.utils.data import Dataset
def sample_K_pointclouds(data_path, num_point, pc_attribs, pc_augm, pc_augm_config,
scan_names, sampled_class, sampled_classes, is_support=False):
'''sample K pointclouds and the corresponding labels for one class (one_way)'''
ptclouds = []
labels = []
for scan_name in scan_names:
ptcloud, label = sample_pointcloud(data_path, num_point, pc_attribs, pc_augm, pc_augm_config,
scan_name, sampled_classes, sampled_class, support=is_support)
ptclouds.append(ptcloud)
labels.append(label)
ptclouds = np.stack(ptclouds, axis=0)
labels = np.stack(labels, axis=0)
return ptclouds, labels
def sample_pointcloud(data_path, num_point, pc_attribs, pc_augm, pc_augm_config, scan_name,
sampled_classes, sampled_class=0, support=False, random_sample=False):
sampled_classes = list(sampled_classes)
data = np.load(os.path.join(data_path, 'data', '%s.npy' %scan_name))
N = data.shape[0] #number of points in this scan
if random_sample:
sampled_point_inds = np.random.choice(np.arange(N), num_point, replace=(N < num_point))
else:
# If this point cloud is for support/query set, make sure that the sampled points contain target class
valid_point_inds = np.nonzero(data[:,6] == sampled_class)[0] # indices of points belonging to the sampled class
if N < num_point:
sampled_valid_point_num = len(valid_point_inds)
else:
valid_ratio = len(valid_point_inds)/float(N)
sampled_valid_point_num = int(valid_ratio*num_point)
sampled_valid_point_inds = np.random.choice(valid_point_inds, sampled_valid_point_num, replace=False)
sampled_other_point_inds = np.random.choice(np.arange(N), num_point-sampled_valid_point_num,
replace=(N<num_point))
sampled_point_inds = np.concatenate([sampled_valid_point_inds, sampled_other_point_inds])
data = data[sampled_point_inds]
xyz = data[:, 0:3]
rgb = data[:, 3:6]
labels = data[:,6].astype(np.int)
xyz_min = np.amin(xyz, axis=0)
xyz -= xyz_min
if pc_augm:
xyz = augment_pointcloud(xyz, pc_augm_config)
if 'XYZ' in pc_attribs:
xyz_min = np.amin(xyz, axis=0)
XYZ = xyz - xyz_min
xyz_max = np.amax(XYZ, axis=0)
XYZ = XYZ/xyz_max
ptcloud = []
if 'xyz' in pc_attribs: ptcloud.append(xyz)
if 'rgb' in pc_attribs: ptcloud.append(rgb/255.)
if 'XYZ' in pc_attribs: ptcloud.append(XYZ)
ptcloud = np.concatenate(ptcloud, axis=1)
if support:
groundtruth = labels==sampled_class
else:
groundtruth = np.zeros_like(labels)
for i, label in enumerate(labels):
if label in sampled_classes:
groundtruth[i] = sampled_classes.index(label)+1
return ptcloud, groundtruth
def augment_pointcloud(P, pc_augm_config):
"""" Augmentation on XYZ and jittering of everything """
M = transforms3d.zooms.zfdir2mat(1)
if pc_augm_config['scale'] > 1:
s = random.uniform(1 / pc_augm_config['scale'], pc_augm_config['scale'])
M = np.dot(transforms3d.zooms.zfdir2mat(s), M)
if pc_augm_config['rot'] == 1:
angle = random.uniform(0, 2 * math.pi)
M = np.dot(transforms3d.axangles.axangle2mat([0, 0, 1], angle), M) # z=upright assumption
if pc_augm_config['mirror_prob'] > 0: # mirroring x&y, not z
if random.random() < pc_augm_config['mirror_prob'] / 2:
M = np.dot(transforms3d.zooms.zfdir2mat(-1, [1, 0, 0]), M)
if random.random() < pc_augm_config['mirror_prob'] / 2:
M = np.dot(transforms3d.zooms.zfdir2mat(-1, [0, 1, 0]), M)
P[:, :3] = np.dot(P[:, :3], M.T)
if pc_augm_config['jitter']:
sigma, clip = 0.01, 0.05 # https://github.com/charlesq34/pointnet/blob/master/provider.py#L74
P = P + np.clip(sigma * np.random.randn(*P.shape), -1 * clip, clip).astype(np.float32)
return P
class MyDataset(Dataset):
def __init__(self, data_path, dataset_name, cvfold=0, num_episode=50000, n_way=3, k_shot=5, n_queries=1,
phase=None, mode='train', num_point=4096, pc_attribs='xyz', pc_augm=False, pc_augm_config=None):
super(MyDataset).__init__()
self.data_path = data_path
self.n_way = n_way
self.k_shot = k_shot
self.n_queries = n_queries
self.num_episode = num_episode
self.phase = phase
self.mode = mode
self.num_point = num_point
self.pc_attribs = pc_attribs
self.pc_augm = pc_augm
self.pc_augm_config = pc_augm_config
if dataset_name == 's3dis':
from dataloaders.s3dis import S3DISDataset
self.dataset = S3DISDataset(cvfold, data_path)
elif dataset_name == 'scannet':
from dataloaders.scannet import ScanNetDataset
self.dataset = ScanNetDataset(cvfold, data_path)
else:
raise NotImplementedError('Unknown dataset %s!' % dataset_name)
if mode == 'train':
self.classes = np.array(self.dataset.train_classes)
elif mode == 'test':
self.classes = np.array(self.dataset.test_classes)
else:
raise NotImplementedError('Unkown mode %s! [Options: train/test]' % mode)
print('MODE: {0} | Classes: {1}'.format(mode, self.classes))
self.class2scans = self.dataset.class2scans
def __len__(self):
return self.num_episode
def __getitem__(self, index, n_way_classes=None):
if n_way_classes is not None:
sampled_classes = np.array(n_way_classes)
else:
sampled_classes = np.random.choice(self.classes, self.n_way, replace=False)
support_ptclouds, support_masks, query_ptclouds, query_labels = self.generate_one_episode(sampled_classes)
if self.mode == 'train' and self.phase == 'metatrain':
remain_classes = list(set(self.classes) - set(sampled_classes))
try:
sampled_valid_classes = np.random.choice(np.array(remain_classes), self.n_way, replace=False)
except:
raise NotImplementedError('Error! The number remaining classes is less than %d_way' %self.n_way)
valid_support_ptclouds, valid_support_masks, valid_query_ptclouds, \
valid_query_labels = self.generate_one_episode(sampled_valid_classes)
return support_ptclouds.astype(np.float32), \
support_masks.astype(np.int32), \
query_ptclouds.astype(np.float32), \
query_labels.astype(np.int64), \
valid_support_ptclouds.astype(np.float32), \
valid_support_masks.astype(np.int32), \
valid_query_ptclouds.astype(np.float32), \
valid_query_labels.astype(np.int64)
else:
return support_ptclouds.astype(np.float32), \
support_masks.astype(np.int32), \
query_ptclouds.astype(np.float32), \
query_labels.astype(np.int64), \
sampled_classes.astype(np.int32)
def generate_one_episode(self, sampled_classes):
support_ptclouds = []
support_masks = []
query_ptclouds = []
query_labels = []
black_list = [] # to store the sampled scan names, in order to prevent sampling one scan several times...
for sampled_class in sampled_classes:
all_scannames = self.class2scans[sampled_class].copy()
if len(black_list) != 0:
all_scannames = [x for x in all_scannames if x not in black_list]
selected_scannames = np.random.choice(all_scannames, self.k_shot+self.n_queries, replace=False)
black_list.extend(selected_scannames)
query_scannames = selected_scannames[:self.n_queries]
support_scannames = selected_scannames[self.n_queries:]
query_ptclouds_one_way, query_labels_one_way = sample_K_pointclouds(self.data_path, self.num_point,
self.pc_attribs, self.pc_augm,
self.pc_augm_config,
query_scannames,
sampled_class,
sampled_classes,
is_support=False)
support_ptclouds_one_way, support_masks_one_way = sample_K_pointclouds(self.data_path, self.num_point,
self.pc_attribs, self.pc_augm,
self.pc_augm_config,
support_scannames,
sampled_class,
sampled_classes,
is_support=True)
query_ptclouds.append(query_ptclouds_one_way)
query_labels.append(query_labels_one_way)
support_ptclouds.append(support_ptclouds_one_way)
support_masks.append(support_masks_one_way)
support_ptclouds = np.stack(support_ptclouds, axis=0)
support_masks = np.stack(support_masks, axis=0)
query_ptclouds = np.concatenate(query_ptclouds, axis=0)
query_labels = np.concatenate(query_labels, axis=0)
return support_ptclouds, support_masks, query_ptclouds, query_labels
def batch_train_task_collate(batch):
task_train_support_ptclouds, task_train_support_masks, task_train_query_ptclouds, task_train_query_labels, \
task_valid_support_ptclouds, task_valid_support_masks, task_valid_query_ptclouds, task_valid_query_labels = list(zip(*batch))
task_train_support_ptclouds = np.stack(task_train_support_ptclouds)
task_train_support_masks = np.stack(task_train_support_masks)
task_train_query_ptclouds = np.stack(task_train_query_ptclouds)
task_train_query_labels = np.stack(task_train_query_labels)
task_valid_support_ptclouds = np.stack(task_valid_support_ptclouds)
task_valid_support_masks = np.stack(task_valid_support_masks)
task_valid_query_ptclouds = np.array(task_valid_query_ptclouds)
task_valid_query_labels = np.stack(task_valid_query_labels)
data = [torch.from_numpy(task_train_support_ptclouds).transpose(3,4), torch.from_numpy(task_train_support_masks),
torch.from_numpy(task_train_query_ptclouds).transpose(2,3), torch.from_numpy(task_train_query_labels),
torch.from_numpy(task_valid_support_ptclouds).transpose(3,4), torch.from_numpy(task_valid_support_masks),
torch.from_numpy(task_valid_query_ptclouds).transpose(2,3), torch.from_numpy(task_valid_query_labels)]
return data
################################################ Static Testing Dataset ################################################
class MyTestDataset(Dataset):
def __init__(self, data_path, dataset_name, cvfold=0, num_episode_per_comb=100, n_way=3, k_shot=5, n_queries=1,
num_point=4096, pc_attribs='xyz', mode='valid'):
super(MyTestDataset).__init__()
dataset = MyDataset(data_path, dataset_name, cvfold=cvfold, n_way=n_way, k_shot=k_shot, n_queries=n_queries,
mode='test', num_point=num_point, pc_attribs=pc_attribs, pc_augm=False)
self.classes = dataset.classes
if mode == 'valid':
test_data_path = os.path.join(data_path, 'S_%d_N_%d_K_%d_episodes_%d_pts_%d' % (
cvfold, n_way, k_shot, num_episode_per_comb, num_point))
elif mode == 'test':
test_data_path = os.path.join(data_path, 'S_%d_N_%d_K_%d_test_episodes_%d_pts_%d' % (
cvfold, n_way, k_shot, num_episode_per_comb, num_point))
else:
raise NotImplementedError('Mode (%s) is unknown!' %mode)
if os.path.exists(test_data_path):
self.file_names = glob.glob(os.path.join(test_data_path, '*.h5'))
self.num_episode = len(self.file_names)
else:
print('Test dataset (%s) does not exist...\n Constructing...' %test_data_path)
os.mkdir(test_data_path)
class_comb = list(combinations(self.classes, n_way)) # [(),(),(),...]
self.num_episode = len(class_comb) * num_episode_per_comb
episode_ind = 0
self.file_names = []
for sampled_classes in class_comb:
sampled_classes = list(sampled_classes)
for i in range(num_episode_per_comb):
data = dataset.__getitem__(episode_ind, sampled_classes)
out_filename = os.path.join(test_data_path, '%d.h5' % episode_ind)
write_episode(out_filename, data)
self.file_names.append(out_filename)
episode_ind += 1
def __len__(self):
return self.num_episode
def __getitem__(self, index):
file_name = self.file_names[index]
return read_episode(file_name)
def batch_test_task_collate(batch):
batch_support_ptclouds, batch_support_masks, batch_query_ptclouds, batch_query_labels, batch_sampled_classes = batch[0]
data = [torch.from_numpy(batch_support_ptclouds).transpose(2,3), torch.from_numpy(batch_support_masks),
torch.from_numpy(batch_query_ptclouds).transpose(1,2), torch.from_numpy(batch_query_labels.astype(np.int64))]
return data, batch_sampled_classes
def write_episode(out_filename, data):
support_ptclouds, support_masks, query_ptclouds, query_labels, sampled_classes = data
data_file = h5.File(out_filename, 'w')
data_file.create_dataset('support_ptclouds', data=support_ptclouds, dtype='float32')
data_file.create_dataset('support_masks', data=support_masks, dtype='int32')
data_file.create_dataset('query_ptclouds', data=query_ptclouds, dtype='float32')
data_file.create_dataset('query_labels', data=query_labels, dtype='int64')
data_file.create_dataset('sampled_classes', data=sampled_classes, dtype='int32')
data_file.close()
print('\t {0} saved! | classes: {1}'.format(out_filename, sampled_classes))
def read_episode(file_name):
data_file = h5.File(file_name, 'r')
support_ptclouds = data_file['support_ptclouds'][:]
support_masks = data_file['support_masks'][:]
query_ptclouds = data_file['query_ptclouds'][:]
query_labels = data_file['query_labels'][:]
sampled_classes = data_file['sampled_classes'][:]
return support_ptclouds, support_masks, query_ptclouds, query_labels, sampled_classes
################################################ Pre-train Dataset ################################################
class MyPretrainDataset(Dataset):
def __init__(self, data_path, classes, class2scans, mode='train', num_point=4096, pc_attribs='xyz',
pc_augm=False, pc_augm_config=None):
super(MyPretrainDataset).__init__()
self.data_path = data_path
self.classes = classes
self.num_point = num_point
self.pc_attribs = pc_attribs
self.pc_augm = pc_augm
self.pc_augm_config = pc_augm_config
train_block_names = []
all_block_names = []
for k, v in sorted(class2scans.items()):
all_block_names.extend(v)
n_blocks = len(v)
n_test_blocks = int(n_blocks * 0.1)
n_train_blocks = n_blocks - n_test_blocks
train_block_names.extend(v[:n_train_blocks])
if mode == 'train':
self.block_names = list(set(train_block_names))
elif mode == 'test':
self.block_names = list(set(all_block_names) - set(train_block_names))
else:
raise NotImplementedError('Mode is unknown!')
print('[Pretrain Dataset] Mode: {0} | Num_blocks: {1}'.format(mode, len(self.block_names)))
def __len__(self):
return len(self.block_names)
def __getitem__(self, index):
block_name = self.block_names[index]
ptcloud, label = sample_pointcloud(self.data_path, self.num_point, self.pc_attribs, self.pc_augm,
self.pc_augm_config, block_name, self.classes, random_sample=True)
return torch.from_numpy(ptcloud.transpose().astype(np.float32)), torch.from_numpy(label.astype(np.int64)) | [((868, 894), 'numpy.stack', 'np.stack', (['ptclouds'], {'axis': '(0)'}), '(ptclouds, axis=0)\n', (876, 894), True, 'import numpy as np\n'), ((908, 932), 'numpy.stack', 'np.stack', (['labels'], {'axis': '(0)'}), '(labels, axis=0)\n', (916, 932), True, 'import numpy as np\n'), ((2424, 2444), 'numpy.amin', 'np.amin', (['xyz'], {'axis': '(0)'}), '(xyz, axis=0)\n', (2431, 2444), True, 'import numpy as np\n'), ((2875, 2906), 'numpy.concatenate', 'np.concatenate', (['ptcloud'], {'axis': '(1)'}), '(ptcloud, axis=1)\n', (2889, 2906), True, 'import numpy as np\n'), ((3317, 3348), 'transforms3d.zooms.zfdir2mat', 'transforms3d.zooms.zfdir2mat', (['(1)'], {}), '(1)\n', (3345, 3348), False, 'import transforms3d\n'), ((4053, 4074), 'numpy.dot', 'np.dot', (['P[:, :3]', 'M.T'], {}), '(P[:, :3], M.T)\n', (4059, 4074), True, 'import numpy as np\n'), ((10645, 10682), 'numpy.stack', 'np.stack', (['task_train_support_ptclouds'], {}), '(task_train_support_ptclouds)\n', (10653, 10682), True, 'import numpy as np\n'), ((10714, 10748), 'numpy.stack', 'np.stack', (['task_train_support_masks'], {}), '(task_train_support_masks)\n', (10722, 10748), True, 'import numpy as np\n'), ((10781, 10816), 'numpy.stack', 'np.stack', (['task_train_query_ptclouds'], {}), '(task_train_query_ptclouds)\n', (10789, 10816), True, 'import numpy as np\n'), ((10847, 10880), 'numpy.stack', 'np.stack', (['task_train_query_labels'], {}), '(task_train_query_labels)\n', (10855, 10880), True, 'import numpy as np\n'), ((10915, 10952), 'numpy.stack', 'np.stack', (['task_valid_support_ptclouds'], {}), '(task_valid_support_ptclouds)\n', (10923, 10952), True, 'import numpy as np\n'), ((10984, 11018), 'numpy.stack', 'np.stack', (['task_valid_support_masks'], {}), '(task_valid_support_masks)\n', (10992, 11018), True, 'import numpy as np\n'), ((11051, 11086), 'numpy.array', 'np.array', (['task_valid_query_ptclouds'], {}), '(task_valid_query_ptclouds)\n', (11059, 11086), True, 'import numpy as np\n'), ((11117, 11150), 'numpy.stack', 'np.stack', (['task_valid_query_labels'], {}), '(task_valid_query_labels)\n', (11125, 11150), True, 'import numpy as np\n'), ((14578, 14604), 'h5py.File', 'h5.File', (['out_filename', '"""w"""'], {}), "(out_filename, 'w')\n", (14585, 14604), True, 'import h5py as h5\n'), ((15174, 15197), 'h5py.File', 'h5.File', (['file_name', '"""r"""'], {}), "(file_name, 'r')\n", (15181, 15197), True, 'import h5py as h5\n'), ((1212, 1265), 'os.path.join', 'os.path.join', (['data_path', '"""data"""', "('%s.npy' % scan_name)"], {}), "(data_path, 'data', '%s.npy' % scan_name)\n", (1224, 1265), False, 'import os\n'), ((1939, 2013), 'numpy.random.choice', 'np.random.choice', (['valid_point_inds', 'sampled_valid_point_num'], {'replace': '(False)'}), '(valid_point_inds, sampled_valid_point_num, replace=False)\n', (1955, 2013), True, 'import numpy as np\n'), ((2219, 2287), 'numpy.concatenate', 'np.concatenate', (['[sampled_valid_point_inds, sampled_other_point_inds]'], {}), '([sampled_valid_point_inds, sampled_other_point_inds])\n', (2233, 2287), True, 'import numpy as np\n'), ((2580, 2600), 'numpy.amin', 'np.amin', (['xyz'], {'axis': '(0)'}), '(xyz, axis=0)\n', (2587, 2600), True, 'import numpy as np\n'), ((2647, 2667), 'numpy.amax', 'np.amax', (['XYZ'], {'axis': '(0)'}), '(XYZ, axis=0)\n', (2654, 2667), True, 'import numpy as np\n'), ((3000, 3021), 'numpy.zeros_like', 'np.zeros_like', (['labels'], {}), '(labels)\n', (3013, 3021), True, 'import numpy as np\n'), ((3397, 3465), 'random.uniform', 'random.uniform', (["(1 / pc_augm_config['scale'])", "pc_augm_config['scale']"], {}), "(1 / pc_augm_config['scale'], pc_augm_config['scale'])\n", (3411, 3465), False, 'import random\n'), ((3572, 3602), 'random.uniform', 'random.uniform', (['(0)', '(2 * math.pi)'], {}), '(0, 2 * math.pi)\n', (3586, 3602), False, 'import random\n'), ((10035, 10069), 'numpy.stack', 'np.stack', (['support_ptclouds'], {'axis': '(0)'}), '(support_ptclouds, axis=0)\n', (10043, 10069), True, 'import numpy as np\n'), ((10094, 10125), 'numpy.stack', 'np.stack', (['support_masks'], {'axis': '(0)'}), '(support_masks, axis=0)\n', (10102, 10125), True, 'import numpy as np\n'), ((10151, 10189), 'numpy.concatenate', 'np.concatenate', (['query_ptclouds'], {'axis': '(0)'}), '(query_ptclouds, axis=0)\n', (10165, 10189), True, 'import numpy as np\n'), ((10213, 10249), 'numpy.concatenate', 'np.concatenate', (['query_labels'], {'axis': '(0)'}), '(query_labels, axis=0)\n', (10227, 10249), True, 'import numpy as np\n'), ((11226, 11268), 'torch.from_numpy', 'torch.from_numpy', (['task_train_support_masks'], {}), '(task_train_support_masks)\n', (11242, 11268), False, 'import torch\n'), ((11342, 11383), 'torch.from_numpy', 'torch.from_numpy', (['task_train_query_labels'], {}), '(task_train_query_labels)\n', (11358, 11383), False, 'import torch\n'), ((11459, 11501), 'torch.from_numpy', 'torch.from_numpy', (['task_valid_support_masks'], {}), '(task_valid_support_masks)\n', (11475, 11501), False, 'import torch\n'), ((11575, 11616), 'torch.from_numpy', 'torch.from_numpy', (['task_valid_query_labels'], {}), '(task_valid_query_labels)\n', (11591, 11616), False, 'import torch\n'), ((12836, 12866), 'os.path.exists', 'os.path.exists', (['test_data_path'], {}), '(test_data_path)\n', (12850, 12866), False, 'import os\n'), ((14230, 14267), 'torch.from_numpy', 'torch.from_numpy', (['batch_support_masks'], {}), '(batch_support_masks)\n', (14246, 14267), False, 'import torch\n'), ((1388, 1400), 'numpy.arange', 'np.arange', (['N'], {}), '(N)\n', (1397, 1400), True, 'import numpy as np\n'), ((1586, 1627), 'numpy.nonzero', 'np.nonzero', (['(data[:, (6)] == sampled_class)'], {}), '(data[:, (6)] == sampled_class)\n', (1596, 1627), True, 'import numpy as np\n'), ((2066, 2078), 'numpy.arange', 'np.arange', (['N'], {}), '(N)\n', (2075, 2078), True, 'import numpy as np\n'), ((3485, 3516), 'transforms3d.zooms.zfdir2mat', 'transforms3d.zooms.zfdir2mat', (['s'], {}), '(s)\n', (3513, 3516), False, 'import transforms3d\n'), ((3622, 3673), 'transforms3d.axangles.axangle2mat', 'transforms3d.axangles.axangle2mat', (['[0, 0, 1]', 'angle'], {}), '([0, 0, 1], angle)\n', (3655, 3673), False, 'import transforms3d\n'), ((3779, 3794), 'random.random', 'random.random', ([], {}), '()\n', (3792, 3794), False, 'import random\n'), ((3914, 3929), 'random.random', 'random.random', ([], {}), '()\n', (3927, 3929), False, 'import random\n'), ((5091, 5122), 'dataloaders.s3dis.S3DISDataset', 'S3DISDataset', (['cvfold', 'data_path'], {}), '(cvfold, data_path)\n', (5103, 5122), False, 'from dataloaders.s3dis import S3DISDataset\n'), ((5429, 5465), 'numpy.array', 'np.array', (['self.dataset.train_classes'], {}), '(self.dataset.train_classes)\n', (5437, 5465), True, 'import numpy as np\n'), ((5959, 5982), 'numpy.array', 'np.array', (['n_way_classes'], {}), '(n_way_classes)\n', (5967, 5982), True, 'import numpy as np\n'), ((6027, 6084), 'numpy.random.choice', 'np.random.choice', (['self.classes', 'self.n_way'], {'replace': '(False)'}), '(self.classes, self.n_way, replace=False)\n', (6043, 6084), True, 'import numpy as np\n'), ((8089, 8165), 'numpy.random.choice', 'np.random.choice', (['all_scannames', '(self.k_shot + self.n_queries)'], {'replace': '(False)'}), '(all_scannames, self.k_shot + self.n_queries, replace=False)\n', (8105, 8165), True, 'import numpy as np\n'), ((12332, 12455), 'os.path.join', 'os.path.join', (['data_path', "('S_%d_N_%d_K_%d_episodes_%d_pts_%d' % (cvfold, n_way, k_shot,\n num_episode_per_comb, num_point))"], {}), "(data_path, 'S_%d_N_%d_K_%d_episodes_%d_pts_%d' % (cvfold,\n n_way, k_shot, num_episode_per_comb, num_point))\n", (12344, 12455), False, 'import os\n'), ((13115, 13139), 'os.mkdir', 'os.mkdir', (['test_data_path'], {}), '(test_data_path)\n', (13123, 13139), False, 'import os\n'), ((3855, 3898), 'transforms3d.zooms.zfdir2mat', 'transforms3d.zooms.zfdir2mat', (['(-1)', '[1, 0, 0]'], {}), '(-1, [1, 0, 0])\n', (3883, 3898), False, 'import transforms3d\n'), ((3990, 4033), 'transforms3d.zooms.zfdir2mat', 'transforms3d.zooms.zfdir2mat', (['(-1)', '[0, 1, 0]'], {}), '(-1, [0, 1, 0])\n', (4018, 4033), False, 'import transforms3d\n'), ((5249, 5282), 'dataloaders.scannet.ScanNetDataset', 'ScanNetDataset', (['cvfold', 'data_path'], {}), '(cvfold, data_path)\n', (5263, 5282), False, 'from dataloaders.scannet import ScanNetDataset\n'), ((5522, 5557), 'numpy.array', 'np.array', (['self.dataset.test_classes'], {}), '(self.dataset.test_classes)\n', (5530, 5557), True, 'import numpy as np\n'), ((11164, 11209), 'torch.from_numpy', 'torch.from_numpy', (['task_train_support_ptclouds'], {}), '(task_train_support_ptclouds)\n', (11180, 11209), False, 'import torch\n'), ((11282, 11325), 'torch.from_numpy', 'torch.from_numpy', (['task_train_query_ptclouds'], {}), '(task_train_query_ptclouds)\n', (11298, 11325), False, 'import torch\n'), ((11397, 11442), 'torch.from_numpy', 'torch.from_numpy', (['task_valid_support_ptclouds'], {}), '(task_valid_support_ptclouds)\n', (11413, 11442), False, 'import torch\n'), ((11515, 11558), 'torch.from_numpy', 'torch.from_numpy', (['task_valid_query_ptclouds'], {}), '(task_valid_query_ptclouds)\n', (11531, 11558), False, 'import torch\n'), ((12563, 12691), 'os.path.join', 'os.path.join', (['data_path', "('S_%d_N_%d_K_%d_test_episodes_%d_pts_%d' % (cvfold, n_way, k_shot,\n num_episode_per_comb, num_point))"], {}), "(data_path, 'S_%d_N_%d_K_%d_test_episodes_%d_pts_%d' % (cvfold,\n n_way, k_shot, num_episode_per_comb, num_point))\n", (12575, 12691), False, 'import os\n'), ((12908, 12944), 'os.path.join', 'os.path.join', (['test_data_path', '"""*.h5"""'], {}), "(test_data_path, '*.h5')\n", (12920, 12944), False, 'import os\n'), ((13171, 13204), 'itertools.combinations', 'combinations', (['self.classes', 'n_way'], {}), '(self.classes, n_way)\n', (13183, 13204), False, 'from itertools import combinations\n'), ((14173, 14213), 'torch.from_numpy', 'torch.from_numpy', (['batch_support_ptclouds'], {}), '(batch_support_ptclouds)\n', (14189, 14213), False, 'import torch\n'), ((14281, 14319), 'torch.from_numpy', 'torch.from_numpy', (['batch_query_ptclouds'], {}), '(batch_query_ptclouds)\n', (14297, 14319), False, 'import torch\n'), ((6415, 6439), 'numpy.array', 'np.array', (['remain_classes'], {}), '(remain_classes)\n', (6423, 6439), True, 'import numpy as np\n'), ((13625, 13676), 'os.path.join', 'os.path.join', (['test_data_path', "('%d.h5' % episode_ind)"], {}), "(test_data_path, '%d.h5' % episode_ind)\n", (13637, 13676), False, 'import os\n'), ((4244, 4269), 'numpy.random.randn', 'np.random.randn', (['*P.shape'], {}), '(*P.shape)\n', (4259, 4269), True, 'import numpy as np\n')] |
grayfallstown/greendoge-blockchain | greendoge/types/condition_with_args.py | 31e325913374d694dc0859140d006a642e7f95ac | from dataclasses import dataclass
from typing import List
from greendoge.types.condition_opcodes import ConditionOpcode
from greendoge.util.streamable import Streamable, streamable
@dataclass(frozen=True)
@streamable
class ConditionWithArgs(Streamable):
"""
This structure is used to store parsed CLVM conditions
Conditions in CLVM have either format of (opcode, var1) or (opcode, var1, var2)
"""
opcode: ConditionOpcode
vars: List[bytes]
| [((185, 207), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (194, 207), False, 'from dataclasses import dataclass\n')] |
pp81381/home-assistant | homeassistant/components/hunterdouglas_powerview/entity.py | 23e362faf387c1535be0abab81b30d8e4631df4b | """The nexia integration base entity."""
from aiopvapi.resources.shade import ATTR_TYPE
from homeassistant.const import ATTR_MODEL, ATTR_SW_VERSION
import homeassistant.helpers.device_registry as dr
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
DEVICE_FIRMWARE,
DEVICE_MAC_ADDRESS,
DEVICE_MODEL,
DEVICE_NAME,
DEVICE_SERIAL_NUMBER,
DOMAIN,
FIRMWARE,
FIRMWARE_BUILD,
FIRMWARE_REVISION,
FIRMWARE_SUB_REVISION,
MANUFACTURER,
)
class HDEntity(CoordinatorEntity):
"""Base class for hunter douglas entities."""
def __init__(self, coordinator, device_info, room_name, unique_id):
"""Initialize the entity."""
super().__init__(coordinator)
self._room_name = room_name
self._unique_id = unique_id
self._device_info = device_info
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def device_info(self) -> DeviceInfo:
"""Return the device_info of the device."""
firmware = self._device_info[DEVICE_FIRMWARE]
sw_version = f"{firmware[FIRMWARE_REVISION]}.{firmware[FIRMWARE_SUB_REVISION]}.{firmware[FIRMWARE_BUILD]}"
return DeviceInfo(
identifiers={(DOMAIN, self._device_info[DEVICE_SERIAL_NUMBER])},
connections={
(dr.CONNECTION_NETWORK_MAC, self._device_info[DEVICE_MAC_ADDRESS])
},
name=self._device_info[DEVICE_NAME],
suggested_area=self._room_name,
model=self._device_info[DEVICE_MODEL],
sw_version=sw_version,
manufacturer=MANUFACTURER,
)
class ShadeEntity(HDEntity):
"""Base class for hunter douglas shade entities."""
def __init__(self, coordinator, device_info, room_name, shade, shade_name):
"""Initialize the shade."""
super().__init__(coordinator, device_info, room_name, shade.id)
self._shade_name = shade_name
self._shade = shade
@property
def device_info(self) -> DeviceInfo:
"""Return the device_info of the device."""
device_info = DeviceInfo(
identifiers={(DOMAIN, self._shade.id)},
name=self._shade_name,
suggested_area=self._room_name,
manufacturer=MANUFACTURER,
model=str(self._shade.raw_data[ATTR_TYPE]),
via_device=(DOMAIN, self._device_info[DEVICE_SERIAL_NUMBER]),
)
for shade in self._shade.shade_types:
if shade.shade_type == device_info[ATTR_MODEL]:
device_info[ATTR_MODEL] = shade.description
break
if FIRMWARE not in self._shade.raw_data:
return device_info
firmware = self._shade.raw_data[FIRMWARE]
sw_version = f"{firmware[FIRMWARE_REVISION]}.{firmware[FIRMWARE_SUB_REVISION]}.{firmware[FIRMWARE_BUILD]}"
device_info[ATTR_SW_VERSION] = sw_version
return device_info
| [((1314, 1646), 'homeassistant.helpers.entity.DeviceInfo', 'DeviceInfo', ([], {'identifiers': '{(DOMAIN, self._device_info[DEVICE_SERIAL_NUMBER])}', 'connections': '{(dr.CONNECTION_NETWORK_MAC, self._device_info[DEVICE_MAC_ADDRESS])}', 'name': 'self._device_info[DEVICE_NAME]', 'suggested_area': 'self._room_name', 'model': 'self._device_info[DEVICE_MODEL]', 'sw_version': 'sw_version', 'manufacturer': 'MANUFACTURER'}), '(identifiers={(DOMAIN, self._device_info[DEVICE_SERIAL_NUMBER])},\n connections={(dr.CONNECTION_NETWORK_MAC, self._device_info[\n DEVICE_MAC_ADDRESS])}, name=self._device_info[DEVICE_NAME],\n suggested_area=self._room_name, model=self._device_info[DEVICE_MODEL],\n sw_version=sw_version, manufacturer=MANUFACTURER)\n', (1324, 1646), False, 'from homeassistant.helpers.entity import DeviceInfo\n')] |
daxter-army/key-cast | keycast_env/lib/python3.8/site-packages/Xlib/ext/res.py | cadc88c6760839b37b7fef969294800d4c38fb1b | # Xlib.ext.res -- X-Resource extension module
#
# Copyright (C) 2021 Aleksei Bavshin <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street,
# Fifth Floor,
# Boston, MA 02110-1301 USA
"""X-Resource extension allows a client to query the X server about its usage
of various resources.
For detailed description see any of the following documents.
Protocol specification:
https://www.x.org/releases/current/doc/resourceproto/resproto.txt
XCB Protocol specification:
https://cgit.freedesktop.org/xcb/proto/tree/src/res.xml
"""
from Xlib.protocol import rq
RES_MAJOR_VERSION = 1
RES_MINOR_VERSION = 2
extname = "X-Resource"
# v1.0
ResQueryVersion = 0
ResQueryClients = 1
ResQueryClientResources = 2
ResQueryClientPixmapBytes = 3
# v1.2
ResQueryClientIds = 4
ResQueryResourceBytes = 5
class QueryVersion(rq.ReplyRequest):
_request = rq.Struct(
rq.Card8("opcode"),
rq.Opcode(ResQueryVersion),
rq.RequestLength(),
rq.Card8("client_major"),
rq.Card8("client_minor"),
rq.Pad(2))
_reply = rq.Struct(
rq.ReplyCode(),
rq.Pad(1),
rq.Card16("sequence_number"),
rq.ReplyLength(),
rq.Card16("server_major"),
rq.Card16("server_minor"),
rq.Pad(20))
def query_version(self, client_major=RES_MAJOR_VERSION,
client_minor=RES_MINOR_VERSION):
""" Query the protocol version supported by the X server.
The client sends the highest supported version to the server and the
server sends the highest version it supports, but no higher than the
requested version."""
return QueryVersion(
display=self.display,
opcode=self.display.get_extension_major(extname),
client_major=client_major,
client_minor=client_minor)
Client = rq.Struct(
rq.Card32("resource_base"),
rq.Card32("resource_mask"))
class QueryClients(rq.ReplyRequest):
_request = rq.Struct(
rq.Card8("opcode"),
rq.Opcode(ResQueryClients),
rq.RequestLength())
_reply = rq.Struct(
rq.ReplyCode(),
rq.Pad(1),
rq.Card16("sequence_number"),
rq.ReplyLength(),
rq.LengthOf("clients", 4),
rq.Pad(20),
rq.List("clients", Client))
def query_clients(self):
"""Request the list of all currently connected clients."""
return QueryClients(
display=self.display,
opcode=self.display.get_extension_major(extname))
Type = rq.Struct(
rq.Card32("resource_type"),
rq.Card32("count"))
class QueryClientResources(rq.ReplyRequest):
_request = rq.Struct(
rq.Card8("opcode"),
rq.Opcode(ResQueryClientResources),
rq.RequestLength(),
rq.Card32("client"))
_reply = rq.Struct(
rq.ReplyCode(),
rq.Pad(1),
rq.Card16("sequence_number"),
rq.ReplyLength(),
rq.LengthOf("types", 4),
rq.Pad(20),
rq.List("types", Type))
def query_client_resources(self, client):
"""Request the number of resources owned by a client.
The server will return the counts of each type of resource.
"""
return QueryClientResources(
display=self.display,
opcode=self.display.get_extension_major(extname),
client=client)
class QueryClientPixmapBytes(rq.ReplyRequest):
_request = rq.Struct(
rq.Card8("opcode"),
rq.Opcode(ResQueryClientPixmapBytes),
rq.RequestLength(),
rq.Card32("client"))
_reply = rq.Struct(
rq.ReplyCode(),
rq.Pad(1),
rq.Card16("sequence_number"),
rq.ReplyLength(),
rq.Card32("bytes"),
rq.Card32("bytes_overflow"),
rq.Pad(16))
def query_client_pixmap_bytes(self, client):
"""Query the pixmap usage of some client.
The returned number is a sum of memory usage of each pixmap that can be
attributed to the given client.
"""
return QueryClientPixmapBytes(
display=self.display,
opcode=self.display.get_extension_major(extname),
client=client)
class SizeOf(rq.LengthOf):
"""A SizeOf stores the size in bytes of some other Field whose size
may vary, e.g. List
"""
def __init__(self, name, size, item_size):
rq.LengthOf.__init__(self, name, size)
self.item_size = item_size
def parse_value(self, length, display):
return length // self.item_size
ClientXIDMask = 1 << 0
LocalClientPIDMask = 1 << 1
ClientIdSpec = rq.Struct(
rq.Card32("client"),
rq.Card32("mask"))
ClientIdValue = rq.Struct(
rq.Object("spec", ClientIdSpec),
SizeOf("value", 4, 4),
rq.List("value", rq.Card32Obj))
class QueryClientIds(rq.ReplyRequest):
_request = rq.Struct(
rq.Card8("opcode"),
rq.Opcode(ResQueryClientIds),
rq.RequestLength(),
rq.LengthOf("specs", 4),
rq.List("specs", ClientIdSpec))
_reply = rq.Struct(
rq.ReplyCode(),
rq.Pad(1),
rq.Card16("sequence_number"),
rq.ReplyLength(),
rq.LengthOf("ids", 4),
rq.Pad(20),
rq.List("ids", ClientIdValue))
def query_client_ids(self, specs):
"""Request to identify a given set of clients with some identification method.
The request sends a list of specifiers that select clients and
identification methods to server. The server then tries to identify the
chosen clients using the identification methods specified for each client.
The server returns IDs for those clients that were successfully identified.
"""
return QueryClientIds(
display=self.display,
opcode=self.display.get_extension_major(extname),
specs=specs)
ResourceIdSpec = rq.Struct(
rq.Card32("resource"),
rq.Card32("type"))
ResourceSizeSpec = rq.Struct(
# inline struct ResourceIdSpec to work around
# a parser bug with nested objects
rq.Card32("resource"),
rq.Card32("type"),
rq.Card32("bytes"),
rq.Card32("ref_count"),
rq.Card32("use_count"))
ResourceSizeValue = rq.Struct(
rq.Object("size", ResourceSizeSpec),
rq.LengthOf("cross_references", 4),
rq.List("cross_references", ResourceSizeSpec))
class QueryResourceBytes(rq.ReplyRequest):
_request = rq.Struct(
rq.Card8("opcode"),
rq.Opcode(ResQueryResourceBytes),
rq.RequestLength(),
rq.Card32("client"),
rq.LengthOf("specs", 4),
rq.List("specs", ResourceIdSpec))
_reply = rq.Struct(
rq.ReplyCode(),
rq.Pad(1),
rq.Card16("sequence_number"),
rq.ReplyLength(),
rq.LengthOf("sizes", 4),
rq.Pad(20),
rq.List("sizes", ResourceSizeValue))
def query_resource_bytes(self, client, specs):
"""Query the sizes of resources from X server.
The request sends a list of specifiers that selects resources for size
calculation. The server tries to calculate the sizes of chosen resources
and returns an estimate for a resource only if the size could be determined
"""
return QueryResourceBytes(
display=self.display,
opcode=self.display.get_extension_major(extname),
client=client,
specs=specs)
def init(disp, info):
disp.extension_add_method("display", "res_query_version", query_version)
disp.extension_add_method("display", "res_query_clients", query_clients)
disp.extension_add_method("display", "res_query_client_resources",
query_client_resources)
disp.extension_add_method("display", "res_query_client_pixmap_bytes",
query_client_pixmap_bytes)
disp.extension_add_method("display", "res_query_client_ids",
query_client_ids)
disp.extension_add_method("display", "res_query_resource_bytes",
query_resource_bytes)
| [((2634, 2660), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""resource_base"""'], {}), "('resource_base')\n", (2643, 2660), False, 'from Xlib.protocol import rq\n'), ((2671, 2697), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""resource_mask"""'], {}), "('resource_mask')\n", (2680, 2697), False, 'from Xlib.protocol import rq\n'), ((3383, 3409), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""resource_type"""'], {}), "('resource_type')\n", (3392, 3409), False, 'from Xlib.protocol import rq\n'), ((3420, 3438), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""count"""'], {}), "('count')\n", (3429, 3438), False, 'from Xlib.protocol import rq\n'), ((5582, 5601), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""client"""'], {}), "('client')\n", (5591, 5601), False, 'from Xlib.protocol import rq\n'), ((5612, 5629), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""mask"""'], {}), "('mask')\n", (5621, 5629), False, 'from Xlib.protocol import rq\n'), ((5672, 5703), 'Xlib.protocol.rq.Object', 'rq.Object', (['"""spec"""', 'ClientIdSpec'], {}), "('spec', ClientIdSpec)\n", (5681, 5703), False, 'from Xlib.protocol import rq\n'), ((5746, 5776), 'Xlib.protocol.rq.List', 'rq.List', (['"""value"""', 'rq.Card32Obj'], {}), "('value', rq.Card32Obj)\n", (5753, 5776), False, 'from Xlib.protocol import rq\n'), ((6933, 6954), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""resource"""'], {}), "('resource')\n", (6942, 6954), False, 'from Xlib.protocol import rq\n'), ((6965, 6982), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""type"""'], {}), "('type')\n", (6974, 6982), False, 'from Xlib.protocol import rq\n'), ((7127, 7148), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""resource"""'], {}), "('resource')\n", (7136, 7148), False, 'from Xlib.protocol import rq\n'), ((7159, 7176), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""type"""'], {}), "('type')\n", (7168, 7176), False, 'from Xlib.protocol import rq\n'), ((7187, 7205), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""bytes"""'], {}), "('bytes')\n", (7196, 7205), False, 'from Xlib.protocol import rq\n'), ((7216, 7238), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""ref_count"""'], {}), "('ref_count')\n", (7225, 7238), False, 'from Xlib.protocol import rq\n'), ((7249, 7271), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""use_count"""'], {}), "('use_count')\n", (7258, 7271), False, 'from Xlib.protocol import rq\n'), ((7318, 7353), 'Xlib.protocol.rq.Object', 'rq.Object', (['"""size"""', 'ResourceSizeSpec'], {}), "('size', ResourceSizeSpec)\n", (7327, 7353), False, 'from Xlib.protocol import rq\n'), ((7364, 7398), 'Xlib.protocol.rq.LengthOf', 'rq.LengthOf', (['"""cross_references"""', '(4)'], {}), "('cross_references', 4)\n", (7375, 7398), False, 'from Xlib.protocol import rq\n'), ((7409, 7454), 'Xlib.protocol.rq.List', 'rq.List', (['"""cross_references"""', 'ResourceSizeSpec'], {}), "('cross_references', ResourceSizeSpec)\n", (7416, 7454), False, 'from Xlib.protocol import rq\n'), ((1590, 1608), 'Xlib.protocol.rq.Card8', 'rq.Card8', (['"""opcode"""'], {}), "('opcode')\n", (1598, 1608), False, 'from Xlib.protocol import rq\n'), ((1623, 1649), 'Xlib.protocol.rq.Opcode', 'rq.Opcode', (['ResQueryVersion'], {}), '(ResQueryVersion)\n', (1632, 1649), False, 'from Xlib.protocol import rq\n'), ((1664, 1682), 'Xlib.protocol.rq.RequestLength', 'rq.RequestLength', ([], {}), '()\n', (1680, 1682), False, 'from Xlib.protocol import rq\n'), ((1697, 1721), 'Xlib.protocol.rq.Card8', 'rq.Card8', (['"""client_major"""'], {}), "('client_major')\n", (1705, 1721), False, 'from Xlib.protocol import rq\n'), ((1736, 1760), 'Xlib.protocol.rq.Card8', 'rq.Card8', (['"""client_minor"""'], {}), "('client_minor')\n", (1744, 1760), False, 'from Xlib.protocol import rq\n'), ((1775, 1784), 'Xlib.protocol.rq.Pad', 'rq.Pad', (['(2)'], {}), '(2)\n', (1781, 1784), False, 'from Xlib.protocol import rq\n'), ((1824, 1838), 'Xlib.protocol.rq.ReplyCode', 'rq.ReplyCode', ([], {}), '()\n', (1836, 1838), False, 'from Xlib.protocol import rq\n'), ((1853, 1862), 'Xlib.protocol.rq.Pad', 'rq.Pad', (['(1)'], {}), '(1)\n', (1859, 1862), False, 'from Xlib.protocol import rq\n'), ((1877, 1905), 'Xlib.protocol.rq.Card16', 'rq.Card16', (['"""sequence_number"""'], {}), "('sequence_number')\n", (1886, 1905), False, 'from Xlib.protocol import rq\n'), ((1920, 1936), 'Xlib.protocol.rq.ReplyLength', 'rq.ReplyLength', ([], {}), '()\n', (1934, 1936), False, 'from Xlib.protocol import rq\n'), ((1951, 1976), 'Xlib.protocol.rq.Card16', 'rq.Card16', (['"""server_major"""'], {}), "('server_major')\n", (1960, 1976), False, 'from Xlib.protocol import rq\n'), ((1991, 2016), 'Xlib.protocol.rq.Card16', 'rq.Card16', (['"""server_minor"""'], {}), "('server_minor')\n", (2000, 2016), False, 'from Xlib.protocol import rq\n'), ((2031, 2041), 'Xlib.protocol.rq.Pad', 'rq.Pad', (['(20)'], {}), '(20)\n', (2037, 2041), False, 'from Xlib.protocol import rq\n'), ((2781, 2799), 'Xlib.protocol.rq.Card8', 'rq.Card8', (['"""opcode"""'], {}), "('opcode')\n", (2789, 2799), False, 'from Xlib.protocol import rq\n'), ((2814, 2840), 'Xlib.protocol.rq.Opcode', 'rq.Opcode', (['ResQueryClients'], {}), '(ResQueryClients)\n', (2823, 2840), False, 'from Xlib.protocol import rq\n'), ((2855, 2873), 'Xlib.protocol.rq.RequestLength', 'rq.RequestLength', ([], {}), '()\n', (2871, 2873), False, 'from Xlib.protocol import rq\n'), ((2913, 2927), 'Xlib.protocol.rq.ReplyCode', 'rq.ReplyCode', ([], {}), '()\n', (2925, 2927), False, 'from Xlib.protocol import rq\n'), ((2942, 2951), 'Xlib.protocol.rq.Pad', 'rq.Pad', (['(1)'], {}), '(1)\n', (2948, 2951), False, 'from Xlib.protocol import rq\n'), ((2966, 2994), 'Xlib.protocol.rq.Card16', 'rq.Card16', (['"""sequence_number"""'], {}), "('sequence_number')\n", (2975, 2994), False, 'from Xlib.protocol import rq\n'), ((3009, 3025), 'Xlib.protocol.rq.ReplyLength', 'rq.ReplyLength', ([], {}), '()\n', (3023, 3025), False, 'from Xlib.protocol import rq\n'), ((3040, 3065), 'Xlib.protocol.rq.LengthOf', 'rq.LengthOf', (['"""clients"""', '(4)'], {}), "('clients', 4)\n", (3051, 3065), False, 'from Xlib.protocol import rq\n'), ((3080, 3090), 'Xlib.protocol.rq.Pad', 'rq.Pad', (['(20)'], {}), '(20)\n', (3086, 3090), False, 'from Xlib.protocol import rq\n'), ((3105, 3131), 'Xlib.protocol.rq.List', 'rq.List', (['"""clients"""', 'Client'], {}), "('clients', Client)\n", (3112, 3131), False, 'from Xlib.protocol import rq\n'), ((3530, 3548), 'Xlib.protocol.rq.Card8', 'rq.Card8', (['"""opcode"""'], {}), "('opcode')\n", (3538, 3548), False, 'from Xlib.protocol import rq\n'), ((3563, 3597), 'Xlib.protocol.rq.Opcode', 'rq.Opcode', (['ResQueryClientResources'], {}), '(ResQueryClientResources)\n', (3572, 3597), False, 'from Xlib.protocol import rq\n'), ((3612, 3630), 'Xlib.protocol.rq.RequestLength', 'rq.RequestLength', ([], {}), '()\n', (3628, 3630), False, 'from Xlib.protocol import rq\n'), ((3645, 3664), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""client"""'], {}), "('client')\n", (3654, 3664), False, 'from Xlib.protocol import rq\n'), ((3704, 3718), 'Xlib.protocol.rq.ReplyCode', 'rq.ReplyCode', ([], {}), '()\n', (3716, 3718), False, 'from Xlib.protocol import rq\n'), ((3733, 3742), 'Xlib.protocol.rq.Pad', 'rq.Pad', (['(1)'], {}), '(1)\n', (3739, 3742), False, 'from Xlib.protocol import rq\n'), ((3757, 3785), 'Xlib.protocol.rq.Card16', 'rq.Card16', (['"""sequence_number"""'], {}), "('sequence_number')\n", (3766, 3785), False, 'from Xlib.protocol import rq\n'), ((3800, 3816), 'Xlib.protocol.rq.ReplyLength', 'rq.ReplyLength', ([], {}), '()\n', (3814, 3816), False, 'from Xlib.protocol import rq\n'), ((3831, 3854), 'Xlib.protocol.rq.LengthOf', 'rq.LengthOf', (['"""types"""', '(4)'], {}), "('types', 4)\n", (3842, 3854), False, 'from Xlib.protocol import rq\n'), ((3869, 3879), 'Xlib.protocol.rq.Pad', 'rq.Pad', (['(20)'], {}), '(20)\n', (3875, 3879), False, 'from Xlib.protocol import rq\n'), ((3894, 3916), 'Xlib.protocol.rq.List', 'rq.List', (['"""types"""', 'Type'], {}), "('types', Type)\n", (3901, 3916), False, 'from Xlib.protocol import rq\n'), ((4352, 4370), 'Xlib.protocol.rq.Card8', 'rq.Card8', (['"""opcode"""'], {}), "('opcode')\n", (4360, 4370), False, 'from Xlib.protocol import rq\n'), ((4385, 4421), 'Xlib.protocol.rq.Opcode', 'rq.Opcode', (['ResQueryClientPixmapBytes'], {}), '(ResQueryClientPixmapBytes)\n', (4394, 4421), False, 'from Xlib.protocol import rq\n'), ((4436, 4454), 'Xlib.protocol.rq.RequestLength', 'rq.RequestLength', ([], {}), '()\n', (4452, 4454), False, 'from Xlib.protocol import rq\n'), ((4469, 4488), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""client"""'], {}), "('client')\n", (4478, 4488), False, 'from Xlib.protocol import rq\n'), ((4528, 4542), 'Xlib.protocol.rq.ReplyCode', 'rq.ReplyCode', ([], {}), '()\n', (4540, 4542), False, 'from Xlib.protocol import rq\n'), ((4557, 4566), 'Xlib.protocol.rq.Pad', 'rq.Pad', (['(1)'], {}), '(1)\n', (4563, 4566), False, 'from Xlib.protocol import rq\n'), ((4581, 4609), 'Xlib.protocol.rq.Card16', 'rq.Card16', (['"""sequence_number"""'], {}), "('sequence_number')\n", (4590, 4609), False, 'from Xlib.protocol import rq\n'), ((4624, 4640), 'Xlib.protocol.rq.ReplyLength', 'rq.ReplyLength', ([], {}), '()\n', (4638, 4640), False, 'from Xlib.protocol import rq\n'), ((4655, 4673), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""bytes"""'], {}), "('bytes')\n", (4664, 4673), False, 'from Xlib.protocol import rq\n'), ((4688, 4715), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""bytes_overflow"""'], {}), "('bytes_overflow')\n", (4697, 4715), False, 'from Xlib.protocol import rq\n'), ((4730, 4740), 'Xlib.protocol.rq.Pad', 'rq.Pad', (['(16)'], {}), '(16)\n', (4736, 4740), False, 'from Xlib.protocol import rq\n'), ((5322, 5360), 'Xlib.protocol.rq.LengthOf.__init__', 'rq.LengthOf.__init__', (['self', 'name', 'size'], {}), '(self, name, size)\n', (5342, 5360), False, 'from Xlib.protocol import rq\n'), ((5862, 5880), 'Xlib.protocol.rq.Card8', 'rq.Card8', (['"""opcode"""'], {}), "('opcode')\n", (5870, 5880), False, 'from Xlib.protocol import rq\n'), ((5895, 5923), 'Xlib.protocol.rq.Opcode', 'rq.Opcode', (['ResQueryClientIds'], {}), '(ResQueryClientIds)\n', (5904, 5923), False, 'from Xlib.protocol import rq\n'), ((5938, 5956), 'Xlib.protocol.rq.RequestLength', 'rq.RequestLength', ([], {}), '()\n', (5954, 5956), False, 'from Xlib.protocol import rq\n'), ((5971, 5994), 'Xlib.protocol.rq.LengthOf', 'rq.LengthOf', (['"""specs"""', '(4)'], {}), "('specs', 4)\n", (5982, 5994), False, 'from Xlib.protocol import rq\n'), ((6009, 6039), 'Xlib.protocol.rq.List', 'rq.List', (['"""specs"""', 'ClientIdSpec'], {}), "('specs', ClientIdSpec)\n", (6016, 6039), False, 'from Xlib.protocol import rq\n'), ((6079, 6093), 'Xlib.protocol.rq.ReplyCode', 'rq.ReplyCode', ([], {}), '()\n', (6091, 6093), False, 'from Xlib.protocol import rq\n'), ((6108, 6117), 'Xlib.protocol.rq.Pad', 'rq.Pad', (['(1)'], {}), '(1)\n', (6114, 6117), False, 'from Xlib.protocol import rq\n'), ((6132, 6160), 'Xlib.protocol.rq.Card16', 'rq.Card16', (['"""sequence_number"""'], {}), "('sequence_number')\n", (6141, 6160), False, 'from Xlib.protocol import rq\n'), ((6175, 6191), 'Xlib.protocol.rq.ReplyLength', 'rq.ReplyLength', ([], {}), '()\n', (6189, 6191), False, 'from Xlib.protocol import rq\n'), ((6206, 6227), 'Xlib.protocol.rq.LengthOf', 'rq.LengthOf', (['"""ids"""', '(4)'], {}), "('ids', 4)\n", (6217, 6227), False, 'from Xlib.protocol import rq\n'), ((6242, 6252), 'Xlib.protocol.rq.Pad', 'rq.Pad', (['(20)'], {}), '(20)\n', (6248, 6252), False, 'from Xlib.protocol import rq\n'), ((6267, 6296), 'Xlib.protocol.rq.List', 'rq.List', (['"""ids"""', 'ClientIdValue'], {}), "('ids', ClientIdValue)\n", (6274, 6296), False, 'from Xlib.protocol import rq\n'), ((7544, 7562), 'Xlib.protocol.rq.Card8', 'rq.Card8', (['"""opcode"""'], {}), "('opcode')\n", (7552, 7562), False, 'from Xlib.protocol import rq\n'), ((7577, 7609), 'Xlib.protocol.rq.Opcode', 'rq.Opcode', (['ResQueryResourceBytes'], {}), '(ResQueryResourceBytes)\n', (7586, 7609), False, 'from Xlib.protocol import rq\n'), ((7624, 7642), 'Xlib.protocol.rq.RequestLength', 'rq.RequestLength', ([], {}), '()\n', (7640, 7642), False, 'from Xlib.protocol import rq\n'), ((7657, 7676), 'Xlib.protocol.rq.Card32', 'rq.Card32', (['"""client"""'], {}), "('client')\n", (7666, 7676), False, 'from Xlib.protocol import rq\n'), ((7691, 7714), 'Xlib.protocol.rq.LengthOf', 'rq.LengthOf', (['"""specs"""', '(4)'], {}), "('specs', 4)\n", (7702, 7714), False, 'from Xlib.protocol import rq\n'), ((7729, 7761), 'Xlib.protocol.rq.List', 'rq.List', (['"""specs"""', 'ResourceIdSpec'], {}), "('specs', ResourceIdSpec)\n", (7736, 7761), False, 'from Xlib.protocol import rq\n'), ((7801, 7815), 'Xlib.protocol.rq.ReplyCode', 'rq.ReplyCode', ([], {}), '()\n', (7813, 7815), False, 'from Xlib.protocol import rq\n'), ((7830, 7839), 'Xlib.protocol.rq.Pad', 'rq.Pad', (['(1)'], {}), '(1)\n', (7836, 7839), False, 'from Xlib.protocol import rq\n'), ((7854, 7882), 'Xlib.protocol.rq.Card16', 'rq.Card16', (['"""sequence_number"""'], {}), "('sequence_number')\n", (7863, 7882), False, 'from Xlib.protocol import rq\n'), ((7897, 7913), 'Xlib.protocol.rq.ReplyLength', 'rq.ReplyLength', ([], {}), '()\n', (7911, 7913), False, 'from Xlib.protocol import rq\n'), ((7928, 7951), 'Xlib.protocol.rq.LengthOf', 'rq.LengthOf', (['"""sizes"""', '(4)'], {}), "('sizes', 4)\n", (7939, 7951), False, 'from Xlib.protocol import rq\n'), ((7966, 7976), 'Xlib.protocol.rq.Pad', 'rq.Pad', (['(20)'], {}), '(20)\n', (7972, 7976), False, 'from Xlib.protocol import rq\n'), ((7991, 8026), 'Xlib.protocol.rq.List', 'rq.List', (['"""sizes"""', 'ResourceSizeValue'], {}), "('sizes', ResourceSizeValue)\n", (7998, 8026), False, 'from Xlib.protocol import rq\n')] |
scwatts/rubra | rubra/cmdline_args.py | 0be2c1e8d56badf134954baab9705f3aeb38d426 | # Process the unix command line of the pipeline.
import argparse
from version import rubra_version
def get_cmdline_args():
return parser.parse_args()
parser = argparse.ArgumentParser(
description='A bioinformatics pipeline system.')
parser.add_argument(
'pipeline',
metavar='PIPELINE_FILE',
type=str,
help='Your Ruffus pipeline stages (a Python module)')
parser.add_argument(
'--config',
metavar='CONFIG_FILE',
type=str,
nargs='+',
required=True,
help='One or more configuration files (Python modules)')
parser.add_argument(
'--verbose',
type=int,
choices=(0, 1, 2),
required=False,
default=1,
help='Output verbosity level: 0 = quiet; 1 = normal; \
2 = chatty (default is 1)')
parser.add_argument(
'--style',
type=str,
choices=('print', 'run', 'flowchart', 'touchfiles'),
required=False,
default='print',
help='Pipeline behaviour: print; run; touchfiles; flowchart (default is print)')
parser.add_argument(
'--force',
metavar='TASKNAME',
type=str,
required=False,
default=[],
nargs='+',
help='tasks which are forced to be out of date regardless of timestamps')
parser.add_argument(
'--end',
metavar='TASKNAME',
type=str,
required=False,
help='end points (tasks) for the pipeline')
parser.add_argument(
'--rebuild',
type=str,
choices=('fromstart', 'fromend'),
required=False,
default='fromstart',
help='rebuild outputs by working back from end tasks or forwards \
from start tasks (default is fromstart)')
parser.add_argument(
'--version', action='version', version='%(prog)s ' + rubra_version)
| [((166, 238), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""A bioinformatics pipeline system."""'}), "(description='A bioinformatics pipeline system.')\n", (189, 238), False, 'import argparse\n')] |
KH241/Geohashing | main.py | d5d51278776c4dc0e3d6e6c39cbd31c1f4442fc1 | import webbrowser
import config
from Generator import Generator
def main():
generator = Generator()
latitude, longitude = generator.getCoordinates()
webbrowser.open(config.api_request.format(latitude, longitude))
if __name__ == '__main__':
main()
| [((94, 105), 'Generator.Generator', 'Generator', ([], {}), '()\n', (103, 105), False, 'from Generator import Generator\n'), ((180, 226), 'config.api_request.format', 'config.api_request.format', (['latitude', 'longitude'], {}), '(latitude, longitude)\n', (205, 226), False, 'import config\n')] |
WAvdBeek/CoAPthon3 | knx-test.py | 5aa9d6a6d9a2903d86b113da538df9bd970e6b44 | #!/usr/bin/env python
import getopt
import socket
import sys
import cbor
#from cbor2 import dumps, loads
import json
import time
import traceback
from coapthon.client.helperclient import HelperClient
from coapthon.utils import parse_uri
from coapthon import defines
client = None
paths = {}
paths_extend = {}
my_base = ""
def usage(): # pragma: no cover
print("Command:\tknxcoapclient.py -o -p [-P]")
print("Options:")
print("\t-o, --operation=\tGET|GETNONE|PUT|POST|DELETE|DISCOVER|OBSERVE")
print("\t-p, --path=\t\t\tPath of the request")
print("\t-P, --payload=\t\tPayload of the request")
print("\t-c, --contenttype=\t\tcontenttype of the request")
print("\t-f, --payload-file=\t\tFile with payload of the request")
def get_url(line):
data = line.split(">")
url = data[0]
return url[1:]
def get_ct(line):
tagvalues = line.split(";")
for tag in tagvalues:
if tag.startswith("ct"):
ct_value_all = tag.split("=")
ct_value = ct_value_all[1].split(",")
return ct_value[0]
return ""
def get_base(url):
# python3 knxcoapclient.py -o GET -p coap://[fe80::6513:3050:71a7:5b98]:63914/a -c 50
my_url = url.replace("coap://","")
mybase = my_url.split("/")
return mybase[0]
def get_base_from_link(payload):
print("get_base_from_link\n")
global paths
global paths_extend
lines = payload.splitlines()
# add the
if len(paths) == 0:
my_base = get_base(get_url(lines[0]))
return my_base
def get_sn(my_base):
print("Get SN :");
sn = execute_get("coap://"+my_base+"/dev/sn", 60)
json_data = cbor.loads(sn.payload)
#print ("SN : ", json_data)
return json_data
def install(my_base):
sn = get_sn(my_base)
print (" SN : ", sn)
iid = "5" # installation id
if "000001" == sn :
# sensor, e.g sending
print ("--------------------")
print ("Installing SN: ", sn)
content = { 2: "reset"}
print("reset :", content);
execute_post("coap://"+my_base+"/.well-known/knx", 60, 60, content)
content = True
print("set PM :", content);
execute_put("coap://"+my_base+"/dev/pm", 60, 60, content)
content = 1
print("set IA :", content);
execute_put("coap://"+my_base+"/dev/ia", 60, 60, content)
content = iid
execute_put("coap://"+my_base+"/dev/iid", 60, 60, content)
content = { 2: "startLoading"}
print("lsm :", content);
execute_post("coap://"+my_base+"/a/lsm", 60, 60, content)
execute_get("coap://"+my_base+"/a/lsm", 60)
# group object table
# id (0)= 1
# url (11)= /p/light
# ga (7 )= 1
# cflags (8) = ["r" ] ; read = 1, write = 2, transmit = 3 update = 4
content = [ {0: 1, 11: "p/push", 7:[1], 8: [2] } ]
execute_post("coap://"+my_base+"/fp/g", 60, 60, content)
execute_get("coap://"+my_base+"/fp/g", 40)
# recipient table
# id (0)= 1
# ia (12)
# url (11)= .knx
# ga (7 )= 1
# cflags (8) = ["r" ] ; read = 1, write = 2, transmit = 3 update = 4
content = [ {0: 1, 11: "/p/push", 7:[1], 12 :"blah.blah" } ]
execute_post("coap://"+my_base+"/fp/r", 60, 60, content)
content = False
print("set PM :", content);
execute_put("coap://"+my_base+"/dev/pm", 60, 60, content)
content = { 2: "loadComplete"}
print("lsm :", content);
execute_post("coap://"+my_base+"/a/lsm", 60, 60, content)
execute_get("coap://"+my_base+"/a/lsm", 60)
if "000002" == sn :
# actuator ==> receipient
# should use /fp/r
print ("--------------------")
print ("installing SN: ", sn)
content = True
print("set PM :", content);
execute_put("coap://"+my_base+"/dev/pm", 60, 60, content)
content = 2
print("set IA :", content);
execute_put("coap://"+my_base+"/dev/ia", 60, 60, content)
content = iid
execute_put("coap://"+my_base+"/dev/iid", 60, 60, content)
content = { 2: "startLoading"}
print("lsm :", content);
execute_post("coap://"+my_base+"/a/lsm", 60, 60, content)
execute_get("coap://"+my_base+"/a/lsm", 60)
# group object table
# id (0)= 1
# url (11)= /p/light
# ga (7 )= 1
# cflags (8) = ["r" ] ; read = 1, write = 2, transmit = 3 update = 4
content = [ { 0: 1, 11: "/p/light", 7:[1], 8: [1] } ]
execute_post("coap://"+my_base+"/fp/g", 60, 60, content)
execute_get("coap://"+my_base+"/fp/g", 40)
# publisher table
# id (0)= 1
# ia (12)
# url (11)= .knx
# ga (7 )= 1
# cflags (8) = ["r" ] ; read = 1, write = 2, transmit = 3 update = 4
content = [ {0: 1, 11: ".knx", 7:[1], 12 :"blah.blah" } ]
execute_post("coap://"+my_base+"/fp/p", 60, 60, content)
content = False
print("set PM :", content);
execute_put("coap://"+my_base+"/dev/pm", 60, 60, content)
content = { 2: "loadComplete"}
print("lsm :", content);
execute_post("coap://"+my_base+"/a/lsm", 60, 60, content)
execute_get("coap://"+my_base+"/a/lsm", 60)
# do a post
content = {"sia": 5678, "st": 55, "ga": 1, "value": 100 }
content = { 4: 5678, "st": 55, 7: 1, "value": 100 }
# st ga value (1)
#content = { 5: { 6: 1, 7: 1, 1: True } }
#execute_post("coap://"+my_base+"/.knx", 60, 60, content)
content = {4: 5678, 5: { 6: 1, 7: 1, 1: False } }
#execute_post("coap://"+my_base+"/.knx", 60, 60, content)
#execute_post("coap://[FF02::FD]:5683/.knx", 60, 60, content)
# no json tags as strings
def do_sequence_dev(my_base):
print("===================")
print("Get SN :");
sn = execute_get("coap://"+my_base+"/dev/sn", 60)
sn = get_sn(my_base)
print (" SN : ", sn)
print("===================")
print("Get HWT :");
execute_get("coap://"+my_base+"/dev/hwt", 60)
print("===================")
print("Get HWV :");
execute_get("coap://"+my_base+"/dev/hwv", 60)
print("===================")
print("Get FWV :");
execute_get("coap://"+my_base+"/dev/fwv", 60)
print("===================")
print("Get Model :");
execute_get("coap://"+my_base+"/dev/model", 60)
print("===================")
content = True
print("set PM :", content);
execute_put("coap://"+my_base+"/dev/pm", 60, 60, content)
execute_get("coap://"+my_base+"/dev/pm", 60)
content = False
print("set PM :", content);
execute_put("coap://"+my_base+"/dev/pm", 60, 60, content)
execute_get("coap://"+my_base+"/dev/pm", 60)
print("===================")
content = 44
print("set IA :", content);
execute_put("coap://"+my_base+"/dev/ia", 60, 60, content)
execute_get("coap://"+my_base+"/dev/ia", 60)
print("===================")
content = "my host name"
print("set hostname :", content);
execute_put("coap://"+my_base+"/dev/hostname", 60, 60, content)
execute_get("coap://"+my_base+"/dev/hostname", 60)
print("===================")
content = " iid xxx"
print("set iid :", content);
execute_put("coap://"+my_base+"/dev/iid", 60, 60, content)
execute_get("coap://"+my_base+"/dev/iid", 60)
# id ==> 0
# href ==> 11
# ga ==> 7
# cflag ==> 8
def do_sequence_fp_g_int(my_base):
# url, content, accept, contents
content = [ {0: 1, 11: "xxxx1", 8: [1,2,3,4,5], 7:[2222,3333]} ]
execute_post("coap://"+my_base+"/fp/g", 60, 60, content)
execute_get("coap://"+my_base+"/fp/g/1", 60)
execute_get("coap://"+my_base+"/fp/g", 40)
content = [ {0: 2, 11: "xxxxyyy2", 8: [1,4,5], 7:[44,55,33]}, {0: 3, 1: "xxxxyyy3", 8: [1,4,5], 7:[44,55,33]} ]
execute_post("coap://"+my_base+"/fp/g", 60, 60, content)
execute_get("coap://"+my_base+"/fp/g/2", 60)
execute_get("coap://"+my_base+"/fp/g/3", 60)
execute_get("coap://"+my_base+"/fp/g", 40)
execute_del("coap://"+my_base+"/fp/g/3", 60)
execute_get("coap://"+my_base+"/fp/g/3", 60)
execute_get("coap://"+my_base+"/fp/g", 40)
def do_sequence_fp_g(my_base):
# url, content, accept, contents
content = [ {"id": 1, "href": "xxxx1", "cflag": [1,2,3,4,5], "ga":[2222,3333]} ]
execute_post("coap://"+my_base+"/fp/g", 60, 60, content)
execute_get("coap://"+my_base+"/fp/g/1", 60)
execute_get("coap://"+my_base+"/fp/g", 40)
content = [ {"id": 2, "href": "xxxxyyy2", "cflag": [1,4,5], "ga":[44,55,33]}, {"id": 3, "href": "xxxxyyy3", "cflag": [1,4,5], "ga":[44,55,33]} ]
execute_post("coap://"+my_base+"/fp/g", 60, 60, content)
execute_get("coap://"+my_base+"/fp/g/2", 60)
execute_get("coap://"+my_base+"/fp/g/3", 60)
execute_get("coap://"+my_base+"/fp/g", 40)
execute_del("coap://"+my_base+"/fp/g/3", 60)
execute_get("coap://"+my_base+"/fp/g/3", 60)
execute_get("coap://"+my_base+"/fp/g", 40)
# id ==> 0
# ia ==> 12
# path ==> 112
# url ==> 10
# ga ==> 7
def do_sequence_fp_p_int(my_base):
# url, content, accept, contents
content = [ {0: 1, 12: "Ia.IA1", 112: "path1", 7:[2222,3333]} ]
execute_post("coap://"+my_base+"/fp/p", 60, 60, content)
execute_get("coap://"+my_base+"/fp/p/1", 60)
# 40 == application-link format
execute_get("coap://"+my_base+"/fp/p", 40)
content = [ {0: 2, 12: "xxxxyyyia2", 112: "path2", 7:[44,55,33]},
{0: 3, 12: "xxxxyyyia3", 112: "path3", 7:[44,55,33]} ]
execute_post("coap://"+my_base+"/fp/p", 60, 60, content)
execute_get("coap://"+my_base+"/fp/p/2", 60)
execute_get("coap://"+my_base+"/fp/p/3", 60)
execute_get("coap://"+my_base+"/fp/p", 40)
execute_del("coap://"+my_base+"/fp/p/3", 60)
execute_get("coap://"+my_base+"/fp/p/3", 60)
execute_get("coap://"+my_base+"/fp/p", 40)
def do_sequence_fp_p(my_base):
# url, content, accept, contents
content = [ {"id": 1, "ia": "Ia.IA1", "path": "path1", "ga":[2222,3333]} ]
execute_post("coap://"+my_base+"/fp/p", 60, 60, content)
execute_get("coap://"+my_base+"/fp/p/1", 60)
# 40 == application-link format
execute_get("coap://"+my_base+"/fp/p", 40)
content = [ {"id": 2, "ia": "xxxxyyyia2", "path": "path2","ga":[44,55,33]}, {"id": 3, "ia": "xxxxyyyia3", "path": "path3","ga":[44,55,33]} ]
execute_post("coap://"+my_base+"/fp/p", 60, 60, content)
execute_get("coap://"+my_base+"/fp/p/2", 60)
execute_get("coap://"+my_base+"/fp/p/3", 60)
execute_get("coap://"+my_base+"/fp/p", 40)
execute_del("coap://"+my_base+"/fp/p/3", 60)
execute_get("coap://"+my_base+"/fp/p/3", 60)
execute_get("coap://"+my_base+"/fp/p", 40)
# id ==> 0
# ia ==> 12
# path ==> 112
# url ==> 10
# ga ==> 7
def do_sequence_fp_r_int(my_base):
# url, content, accept, contents
content = [ { 0: 1, 12: "r-Ia.IA1", 112: "r-path1", 7:[2222,3333]} ]
execute_post("coap://"+my_base+"/fp/r", 60, 60, content)
execute_get("coap://"+my_base+"/fp/r/1", 60)
execute_get("coap://"+my_base+"/fp/r", 40)
content = [ { 0: 2, 12: "r-Ia.IA2", 10: "url2", 112: "r-path2", 7:[44,55,33]},
{0: 3, 12: "r-Ia.IA3", 112: "r-path3", 7:[44,55,33]} ]
execute_post("coap://"+my_base+"/fp/r", 60, 60, content)
execute_get("coap://"+my_base+"/fp/r/2", 60)
execute_get("coap://"+my_base+"/fp/r/3", 60)
execute_get("coap://"+my_base+"/fp/r", 40)
execute_del("coap://"+my_base+"/fp/r/3", 60)
execute_get("coap://"+my_base+"/fp/r/3", 60)
execute_get("coap://"+my_base+"/fp/r", 40)
def do_sequence_fp_r(my_base):
# url, content, accept, contents
content = [ {"id": 1, "ia": "r-Ia.IA1", "path": "r-path1", "ga":[2222,3333]} ]
execute_post("coap://"+my_base+"/fp/r", 60, 60, content)
execute_get("coap://"+my_base+"/fp/r/1", 60)
execute_get("coap://"+my_base+"/fp/r", 40)
content = [ {"id": 2, "ia": "r-Ia.IA2", "path": "r-path2", "ga":[44,55,33]}, {"id": 3, "ia": "r-Ia.IA3", "path": "r-path3", "ga":[44,55,33]} ]
execute_post("coap://"+my_base+"/fp/r", 60, 60, content)
execute_get("coap://"+my_base+"/fp/r/2", 60)
execute_get("coap://"+my_base+"/fp/r/3", 60)
execute_get("coap://"+my_base+"/fp/r", 40)
execute_del("coap://"+my_base+"/fp/r/3", 60)
execute_get("coap://"+my_base+"/fp/r/3", 60)
execute_get("coap://"+my_base+"/fp/r", 40)
# cmd ==> 2
def do_sequence_lsm_int(my_base):
# url, content, accept, contents
execute_get("coap://"+my_base+"/a/lsm", 60)
content = {2 : "startLoading"}
execute_post("coap://"+my_base+"/a/lsm", 60, 60, content)
execute_get("coap://"+my_base+"/a/lsm", 60)
content = {2 : "loadComplete"}
execute_post("coap://"+my_base+"/a/lsm", 60, 60, content)
execute_get("coap://"+my_base+"/a/lsm", 60)
content = {2 : "unload"}
execute_post("coap://"+my_base+"/a/lsm", 60, 60, content)
execute_get("coap://"+my_base+"/a/lsm", 60)
def do_sequence_lsm(my_base):
# url, content, accept, contents
execute_get("coap://"+my_base+"/a/lsm", 60)
content = {"cmd": "startLoading"}
execute_post("coap://"+my_base+"/a/lsm", 60, 60, content)
execute_get("coap://"+my_base+"/a/lsm", 60)
content = {"cmd": "loadComplete"}
execute_post("coap://"+my_base+"/a/lsm", 60, 60, content)
execute_get("coap://"+my_base+"/a/lsm", 60)
content = {"cmd": "unload"}
execute_post("coap://"+my_base+"/a/lsm", 60, 60, content)
execute_get("coap://"+my_base+"/a/lsm", 60)
# ./knx resource
# sia ==> 4
# ga ==> 7
# st 6
def do_sequence_knx_knx_int(my_base):
# url, content, accept, contents
execute_get("coap://"+my_base+"/.knx", 60)
content = {"value": { 4 : 5, 7: 7777 , 6 : "rp"}}
execute_post("coap://"+my_base+"/.knx", 60, 60, content)
execute_get("coap://"+my_base+"/.knx", 60)
# ./knx resource
def do_sequence_knx_knx(my_base):
# url, content, accept, contents
execute_get("coap://"+my_base+"/.knx", 60)
content = {"value": { "sia" : 5, "ga": 7, "st": "rp"}}
execute_post("coap://"+my_base+"/.knx", 60, 60, content)
execute_get("coap://"+my_base+"/.knx", 60)
def do_sequence_knx_spake(my_base):
# url, content, accept, contents
# sequence:
# - parameter exchange: 15 (rnd)- return value
# - credential exchange: 10 - return value
# - pase verification exchange: 14 - no return value
content = { 15: b"a-15-sdfsdred"}
execute_post("coap://"+my_base+"/.well-known/knx/spake", 60, 60, content)
# pa
content = { 10: b"s10dfsdfsfs" }
execute_post("coap://"+my_base+"/.well-known/knx/spake", 60, 60, content)
# ca
content = { 14: b"a15sdfsdred"}
execute_post("coap://"+my_base+"/.well-known/knx/spake", 60, 60, content)
# expecting return
def do_sequence_knx_idevid(my_base):
# url, content, accept, contents
execute_get("coap://"+my_base+"/.well-known/knx/idevid", 282)
def do_sequence_knx_ldevid(my_base):
# url, content, accept, contents
execute_get("coap://"+my_base+"/.well-known/knx/ldevid", 282)
def do_sequence_knx_osn(my_base):
# url, content, accept, contents
execute_get("coap://"+my_base+"/.well-known/knx/osn", 60)
def do_sequence_knx_crc(my_base):
# url, content, accept, contents
execute_get("coap://"+my_base+"/.well-known/knx/crc", 60)
def do_sequence_oscore(my_base):
# url, content, accept, contents
execute_get("coap://"+my_base+"/f/oscore", 40)
execute_get("coap://"+my_base+"/p/oscore/replwdo", 60)
content = 105
execute_put("coap://"+my_base+"/p/oscore/replwdo", 60, 60, content)
execute_get("coap://"+my_base+"/p/oscore/replwdo", 60)
execute_get("coap://"+my_base+"/p/oscore/osndelay", 60)
content = 1050
execute_put("coap://"+my_base+"/p/oscore/osndelay", 60, 60, content)
execute_get("coap://"+my_base+"/p/oscore/osndelay", 60)
def do_sequence_core_knx(my_base):
# url, content, accept, contents
execute_get("coap://"+my_base+"/.well-known/knx", 60)
content = { 1 : 5, 2: "reset"}
execute_post("coap://"+my_base+"/.well-known/knx", 60, 60, content)
def do_sequence_a_sen(my_base):
# url, content, accept, contents
content = {2: "reset"}
execute_post("coap://"+my_base+"/a/sen", 60, 60, content)
def do_sequence_auth(my_base):
# url, content, accept, contents
execute_get("coap://"+my_base+"/auth", 40)
def do_sequence_auth_at(my_base):
# url, content, accept, contents
execute_get("coap://"+my_base+"/auth/at", 40)
#
content = {0: b"id", 1 : 20, 2:b"ms",3:"hkdf", 4:"alg", 5:b"salt", 6:b"contextId"}
execute_post("coap://"+my_base+"/auth/at", 60, 60, content)
content = {0: b"id2", 1 : 20, 2:b"ms",3:"hkdf", 4:"alg", 5:b"salt", 6:b"contextId2"}
execute_post("coap://"+my_base+"/auth/at", 60, 60, content)
execute_get("coap://"+my_base+"/auth/at", 40)
execute_get("coap://"+my_base+"/auth/at/id", 60)
execute_del("coap://"+my_base+"/auth/at/id", 60)
def do_sequence_f(my_base):
# url, content, accept, contents
execute_get("coap://"+my_base+"/f", 40)
# note this one is a bit dirty hard coded...
execute_get("coap://"+my_base+"/f/417", 40)
execute_get("coap://"+my_base+"/.well-known/core", 40)
def do_sequence(my_base):
#sn = get_sn(my_base)
install(my_base)
return
do_sequence_dev(my_base)
#return
do_sequence_fp_g_int(my_base)
#do_sequence_fp_g(my_base)
do_sequence_fp_p_int(my_base)
#do_sequence_fp_p(my_base)
do_sequence_fp_r_int(my_base)
#do_sequence_fp_r(my_base)
do_sequence_lsm_int(my_base)
#do_sequence_lsm(my_base)
do_sequence_lsm_int(my_base)
# .knx
do_sequence_knx_knx_int(my_base)
#do_sequence_knx_knx(my_base)
do_sequence_knx_spake(my_base)
do_sequence_knx_idevid(my_base)
do_sequence_knx_ldevid(my_base)
do_sequence_knx_crc(my_base)
do_sequence_knx_osn(my_base)
do_sequence_oscore(my_base)
do_sequence_core_knx(my_base)
do_sequence_a_sen(my_base)
do_sequence_auth(my_base)
do_sequence_auth_at(my_base)
do_sequence_f(my_base)
def client_callback_discovery(response, checkdata=None):
print(" --- Discovery Callback ---")
global my_base
if response is not None:
print ("response code:",response.code)
print ("response type:",response.content_type)
if response.code > 100:
print("+++returned error+++")
return
if response.content_type == defines.Content_types["application/link-format"]:
print (response.payload.decode())
my_base = get_base_from_link(response.payload.decode())
do_sequence(my_base)
def code2string(code):
if code == 68:
return "(Changed)"
if code == 69:
return "(Content)"
if code == 132:
return "(Not Found)"
if code == 133:
return "(METHOD_NOT_ALLOWED)"
if code == 160:
return "(INTERNAL_SERVER_ERROR)"
return ""
def client_callback(response, checkdata=None):
print(" --- Callback ---")
if response is not None:
print ("response code:",response.code, code2string(response.code))
print ("response type:",response.content_type)
if response.code > 100:
print("+++returned error+++")
return
#print(response.pretty_print())
if response.content_type == defines.Content_types["text/plain"]:
if response.payload is not None:
print (type(response.payload), len(response.payload))
print ("=========")
print (response.payload)
print ("=========")
else:
print ("payload: none")
elif response.content_type == defines.Content_types["application/cbor"]:
print (type(response.payload), len(response.payload))
print ("=========")
print (response.payload)
print ("=========")
#json_data = loads(response.payload)
#print(json_data)
#print ("=========")
json_string = ""
try:
json_data = cbor.loads(response.payload)
json_string = json.dumps(json_data, indent=2, sort_keys=True)
except:
print("error in cbor..")
print (json_string)
print ("===+++===")
if checkdata is not None:
check_data = cbor.loads(checkdata)
check_string = json.dumps(check_data, indent=2, sort_keys=True)
print(" check: ")
print (check_string)
if check_string == json_string:
print(" =+++===> OK ")
else:
print(" =+++===> NOT OK ")
print (json_string)
elif response.content_type == defines.Content_types["application/vnd.ocf+cbor"]:
print ("application/vnd.ocf+cbor")
try:
print (type(response.payload), len(response.payload))
print ("=========")
print (response.payload)
print ("=========")
json_data = cbor.loads(response.payload)
print (json_data)
print ("---------")
except:
traceback.print_exc()
json_string = json.dumps(json_data, indent=2, sort_keys=True)
print (json_string)
elif response.content_type == defines.Content_types["application/link-format"]:
print (response.payload.decode())
else:
if response.payload is not None:
print ("type, len", type(response.payload), len(response.payload))
print (response.payload)
#else:
# print (" not handled: ", response)
else:
print (" Response : None")
#check = True
#while check:
# chosen = eval(input("Stop observing? [y/N]: "))
# if chosen != "" and not (chosen == "n" or chosen == "N" or chosen == "y" or chosen == "Y"):
# print("Unrecognized choose.")
# continue
def client_callback_observe(response): # pragma: no cover
global client
print("Callback_observe")
check = True
while check:
chosen = eval(input("Stop observing? [y/N]: "))
if chosen != "" and not (chosen == "n" or chosen == "N" or chosen == "y" or chosen == "Y"):
print("Unrecognized choose.")
continue
elif chosen == "y" or chosen == "Y":
while True:
rst = eval(input("Send RST message? [Y/n]: "))
if rst != "" and not (rst == "n" or rst == "N" or rst == "y" or rst == "Y"):
print("Unrecognized choose.")
continue
elif rst == "" or rst == "y" or rst == "Y":
client.cancel_observing(response, True)
else:
client.cancel_observing(response, False)
check = False
break
else:
break
def execute_get(mypath, ct_value):
print ("---------------------------")
print ("execute_get: ", ct_value, mypath)
print (type(mypath))
if (mypath is None or len(mypath) < 5):
return
if mypath.startswith("coap://") == False:
print(" not executing: ", mypath);
return;
ct = {}
ct['accept'] = ct_value
host, port, path = parse_uri(mypath)
try:
tmp = socket.gethostbyname(host)
host = tmp
except socket.gaierror:
pass
nclient = HelperClient(server=(host, port))
response = nclient.get(path, None, None, **ct)
client_callback(response)
nclient.stop()
return response
def execute_del(mypath, ct_value):
print ("---------------------------")
print ("execute_del: ", ct_value, mypath)
do_exit = False
ct = {}
ct['accept'] = ct_value
ct['content_type'] = ct_value
if mypath.startswith("coap://") == False:
print(" not executing: ", mypath);
return;
host, port, path = parse_uri(mypath)
try:
tmp = socket.gethostbyname(host)
host = tmp
except socket.gaierror:
pass
nclient = HelperClient(server=(host, port))
nclientcheck = HelperClient(server=(host, port))
payload = 0
response = nclient.delete(path, None, None, **ct)
client_callback(response)
#nclient.stop()
#sys.exit(2)
print ("=======")
def execute_put(mypath, ct_value, accept, content):
print ("---------------------------")
print ("execute_put: ", ct_value, mypath)
do_exit = False
ct = {}
ct['accept'] = accept
ct['content_type'] = ct_value
if mypath.startswith("coap://") == False:
print(" not executing: ", mypath);
return
host, port, path = parse_uri(mypath)
try:
tmp = socket.gethostbyname(host)
host = tmp
except socket.gaierror:
pass
nclient = HelperClient(server=(host, port))
nclientcheck = HelperClient(server=(host, port))
payload = 0
if accept == 60:
payload = cbor.dumps(content)
else:
payload = content
print ("payload: ", payload)
response = nclient.put(path, payload, None, None , None, **ct)
client_callback(response)
nclient.stop()
def execute_post(mypath, ct_value, accept, content):
print ("---------------------------")
print ("execute_post: ", ct_value, mypath)
print (content)
print (" ---------------------")
do_exit = False
ct = {}
ct['accept'] = accept
ct['content_type'] = ct_value
if mypath.startswith("coap://") == False:
print(" not executing: ", mypath);
return
host, port, path = parse_uri(mypath)
try:
tmp = socket.gethostbyname(host)
host = tmp
except socket.gaierror:
pass
nclient = HelperClient(server=(host, port))
#nclientcheck = HelperClient(server=(host, port))
payload = 0
if accept == 60:
#print(" content :", content)
payload = cbor.dumps(content)
else:
payload = content
response = nclient.post(path, payload, None, None , None, **ct)
client_callback(response)
nclient.stop()
def main(): # pragma: no cover
global client
op = None
path = None
payload = None
content_type = None
#ct = {'content_type': defines.Content_types["application/link-format"]}
ct = {}
ct['accept'] = 40
try:
opts, args = getopt.getopt(sys.argv[1:], "ho:p:P:f:c:", ["help", "operation=", "path=", "payload=",
"payload_file=","content-type"])
except getopt.GetoptError as err:
# print help information and exit:
print((str(err))) # will print something like "option -a not recognized"
usage()
sys.exit(2)
for o, a in opts:
if o in ("-o", "--operation"):
op = a
elif o in ("-p", "--path"):
path = a
elif o in ("-P", "--payload"):
payload = a
elif o in ("-c", "--content-type"):
ct['accept'] = a
print ("content type request : ", ct)
elif o in ("-f", "--payload-file"):
with open(a, 'r') as f:
payload = f.read()
elif o in ("-h", "--help"):
usage()
sys.exit()
else:
usage()
sys.exit(2)
if op is None:
print("Operation must be specified")
usage()
sys.exit(2)
if path is None:
print("Path must be specified")
usage()
sys.exit(2)
if not path.startswith("coap://"):
print("Path must be conform to coap://host[:port]/path")
usage()
sys.exit(2)
host, port, path = parse_uri(path)
try:
tmp = socket.gethostbyname(host)
host = tmp
except socket.gaierror:
pass
client = HelperClient(server=(host, port))
if op == "GET":
if path is None:
print("Path cannot be empty for a GET request")
usage()
sys.exit(2)
response = client.get(path, None, None, **ct)
print((response.pretty_print()))
if response.content_type == defines.Content_types["application/json"]:
json_data = json.loads(response.payload)
json_string = json.dumps(json_data, indent=2, sort_keys=True)
print ("JSON ::")
print (json_string)
if response.content_type == defines.Content_types["application/cbor"]:
json_data = cbor.loads(response.payload)
json_string = json.dumps(json_data, indent=2, sort_keys=True)
print ("JSON ::")
print (json_string)
if response.content_type == defines.Content_types["application/link-format"]:
#json_data = cbor.loads(response.payload)
#json_string = json.dumps(json_data, indent=2, sort_keys=True)
#print ("JSON ::")
print (response.payload.decode())
print ("\n\n")
if response.content_type == defines.Content_types["application/vnd.ocf+cbor"]:
json_data = cbor.loads(response.payload)
json_string = json.dumps(json_data, indent=2, sort_keys=True)
print ("JSON ::")
print (json_string)
client.stop()
elif op == "GETNONE":
if path is None:
print("Path cannot be empty for a GET-None request")
usage()
sys.exit(2)
response = client.get_non(path, None, None, **ct)
print((response.pretty_print()))
if response.content_type == defines.Content_types["application/json"]:
json_data = json.loads(response.payload)
json_string = json.dumps(json_data, indent=2, sort_keys=True)
print ("JSON ::")
print (json_string)
if response.content_type == defines.Content_types["application/cbor"]:
json_data = cbor.loads(response.payload)
json_string = json.dumps(json_data, indent=2, sort_keys=True)
print ("JSON ::")
print (json_string)
if response.content_type == defines.Content_types["application/vnd.ocf+cbor"]:
json_data = cbor.loads(response.payload)
json_string = json.dumps(json_data, indent=2, sort_keys=True)
print ("JSON ::")
print (json_string)
client.stop()
elif op == "OBSERVE":
if path is None:
print("Path cannot be empty for a GET request")
usage()
sys.exit(2)
client.observe(path, client_callback_observe)
elif op == "DELETE":
if path is None:
print("Path cannot be empty for a DELETE request")
usage()
sys.exit(2)
response = client.delete(path)
print((response.pretty_print()))
client.stop()
elif op == "POST":
if path is None:
print("Path cannot be empty for a POST request")
usage()
sys.exit(2)
if payload is None:
print("Payload cannot be empty for a POST request")
usage()
sys.exit(2)
print ( "payload for POST (ascii):", payload )
print (ct['accept'] )
if ct['accept'] == str(defines.Content_types["application/cbor"]):
json_data = json.loads(payload)
cbor_data = cbor.dumps(json_data)
payload = bytes(cbor_data)
if ct['accept'] == str(defines.Content_types["application/vnd.ocf+cbor"]):
json_data = json.loads(payload)
cbor_data = cbor.loads(json_data)
payload = cbor_data
response = client.post(path, payload, None, None, **ct)
print((response.pretty_print()))
if response.content_type == defines.Content_types["application/cbor"]:
json_data = cbor.loads(response.payload)
json_string = json.dumps(json_data, indent=2, sort_keys=True)
print (json_string)
if response.content_type == defines.Content_types["application/vnd.ocf+cbor"]:
json_data = cbor.loads(response.payload)
json_string = json.dumps(json_data, indent=2, sort_keys=True)
print (json_string)
client.stop()
elif op == "PUT":
if path is None:
print("Path cannot be empty for a PUT request")
usage()
sys.exit(2)
if payload is None:
print("Payload cannot be empty for a PUT request")
usage()
sys.exit(2)
response = client.put(path, payload)
print((response.pretty_print()))
client.stop()
elif op == "DISCOVER":
#response = client.discover( path, client_callback, None, **ct)
response = client.discover( path, None, None, **ct)
if response is not None:
print(response.pretty_print())
if response.content_type == defines.Content_types["application/cbor"]:
json_data = cbor.loads(response.payload)
json_string = json.dumps(json_data, indent=2, sort_keys=True)
print (json_string)
if response.content_type == defines.Content_types["application/vnd.ocf+cbor"]:
json_data = cbor.loads(response.payload)
json_string = json.dumps(json_data, indent=2, sort_keys=True)
print (json_string)
if response.content_type == defines.Content_types["application/link-format"]:
#json_data = cbor.loads(response.payload)
#json_string = json.dumps(json_data, indent=2, sort_keys=True)
print (response.payload.decode())
# do_get(response.payload.decode(), client)
client_callback_discovery(response)
counter = 2
try:
while counter > 0:
time.sleep(1)
counter = counter - 1
#client.stop()
except KeyboardInterrupt:
print("Client Shutdown")
#client.stop()
#execute_list()
client.stop()
else:
print("Operation not recognized")
usage()
sys.exit(2)
if __name__ == '__main__': # pragma: no cover
main()
| [((1660, 1682), 'cbor.loads', 'cbor.loads', (['sn.payload'], {}), '(sn.payload)\n', (1670, 1682), False, 'import cbor\n'), ((23947, 23964), 'coapthon.utils.parse_uri', 'parse_uri', (['mypath'], {}), '(mypath)\n', (23956, 23964), False, 'from coapthon.utils import parse_uri\n'), ((24095, 24128), 'coapthon.client.helperclient.HelperClient', 'HelperClient', ([], {'server': '(host, port)'}), '(server=(host, port))\n', (24107, 24128), False, 'from coapthon.client.helperclient import HelperClient\n'), ((24633, 24650), 'coapthon.utils.parse_uri', 'parse_uri', (['mypath'], {}), '(mypath)\n', (24642, 24650), False, 'from coapthon.utils import parse_uri\n'), ((24781, 24814), 'coapthon.client.helperclient.HelperClient', 'HelperClient', ([], {'server': '(host, port)'}), '(server=(host, port))\n', (24793, 24814), False, 'from coapthon.client.helperclient import HelperClient\n'), ((24836, 24869), 'coapthon.client.helperclient.HelperClient', 'HelperClient', ([], {'server': '(host, port)'}), '(server=(host, port))\n', (24848, 24869), False, 'from coapthon.client.helperclient import HelperClient\n'), ((25438, 25455), 'coapthon.utils.parse_uri', 'parse_uri', (['mypath'], {}), '(mypath)\n', (25447, 25455), False, 'from coapthon.utils import parse_uri\n'), ((25586, 25619), 'coapthon.client.helperclient.HelperClient', 'HelperClient', ([], {'server': '(host, port)'}), '(server=(host, port))\n', (25598, 25619), False, 'from coapthon.client.helperclient import HelperClient\n'), ((25641, 25674), 'coapthon.client.helperclient.HelperClient', 'HelperClient', ([], {'server': '(host, port)'}), '(server=(host, port))\n', (25653, 25674), False, 'from coapthon.client.helperclient import HelperClient\n'), ((26420, 26437), 'coapthon.utils.parse_uri', 'parse_uri', (['mypath'], {}), '(mypath)\n', (26429, 26437), False, 'from coapthon.utils import parse_uri\n'), ((26568, 26601), 'coapthon.client.helperclient.HelperClient', 'HelperClient', ([], {'server': '(host, port)'}), '(server=(host, port))\n', (26580, 26601), False, 'from coapthon.client.helperclient import HelperClient\n'), ((28524, 28539), 'coapthon.utils.parse_uri', 'parse_uri', (['path'], {}), '(path)\n', (28533, 28539), False, 'from coapthon.utils import parse_uri\n'), ((28663, 28696), 'coapthon.client.helperclient.HelperClient', 'HelperClient', ([], {'server': '(host, port)'}), '(server=(host, port))\n', (28675, 28696), False, 'from coapthon.client.helperclient import HelperClient\n'), ((23990, 24016), 'socket.gethostbyname', 'socket.gethostbyname', (['host'], {}), '(host)\n', (24010, 24016), False, 'import socket\n'), ((24676, 24702), 'socket.gethostbyname', 'socket.gethostbyname', (['host'], {}), '(host)\n', (24696, 24702), False, 'import socket\n'), ((25481, 25507), 'socket.gethostbyname', 'socket.gethostbyname', (['host'], {}), '(host)\n', (25501, 25507), False, 'import socket\n'), ((25735, 25754), 'cbor.dumps', 'cbor.dumps', (['content'], {}), '(content)\n', (25745, 25754), False, 'import cbor\n'), ((26463, 26489), 'socket.gethostbyname', 'socket.gethostbyname', (['host'], {}), '(host)\n', (26483, 26489), False, 'import socket\n'), ((26756, 26775), 'cbor.dumps', 'cbor.dumps', (['content'], {}), '(content)\n', (26766, 26775), False, 'import cbor\n'), ((27203, 27327), 'getopt.getopt', 'getopt.getopt', (['sys.argv[1:]', '"""ho:p:P:f:c:"""', "['help', 'operation=', 'path=', 'payload=', 'payload_file=', 'content-type']"], {}), "(sys.argv[1:], 'ho:p:P:f:c:', ['help', 'operation=', 'path=',\n 'payload=', 'payload_file=', 'content-type'])\n", (27216, 27327), False, 'import getopt\n'), ((28249, 28260), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (28257, 28260), False, 'import sys\n'), ((28347, 28358), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (28355, 28358), False, 'import sys\n'), ((28488, 28499), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (28496, 28499), False, 'import sys\n'), ((28563, 28589), 'socket.gethostbyname', 'socket.gethostbyname', (['host'], {}), '(host)\n', (28583, 28589), False, 'import socket\n'), ((27573, 27584), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (27581, 27584), False, 'import sys\n'), ((28834, 28845), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (28842, 28845), False, 'import sys\n'), ((29044, 29072), 'json.loads', 'json.loads', (['response.payload'], {}), '(response.payload)\n', (29054, 29072), False, 'import json\n'), ((29099, 29146), 'json.dumps', 'json.dumps', (['json_data'], {'indent': '(2)', 'sort_keys': '(True)'}), '(json_data, indent=2, sort_keys=True)\n', (29109, 29146), False, 'import json\n'), ((29312, 29340), 'cbor.loads', 'cbor.loads', (['response.payload'], {}), '(response.payload)\n', (29322, 29340), False, 'import cbor\n'), ((29367, 29414), 'json.dumps', 'json.dumps', (['json_data'], {'indent': '(2)', 'sort_keys': '(True)'}), '(json_data, indent=2, sort_keys=True)\n', (29377, 29414), False, 'import json\n'), ((29933, 29961), 'cbor.loads', 'cbor.loads', (['response.payload'], {}), '(response.payload)\n', (29943, 29961), False, 'import cbor\n'), ((29988, 30035), 'json.dumps', 'json.dumps', (['json_data'], {'indent': '(2)', 'sort_keys': '(True)'}), '(json_data, indent=2, sort_keys=True)\n', (29998, 30035), False, 'import json\n'), ((30268, 30279), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (30276, 30279), False, 'import sys\n'), ((30482, 30510), 'json.loads', 'json.loads', (['response.payload'], {}), '(response.payload)\n', (30492, 30510), False, 'import json\n'), ((30537, 30584), 'json.dumps', 'json.dumps', (['json_data'], {'indent': '(2)', 'sort_keys': '(True)'}), '(json_data, indent=2, sort_keys=True)\n', (30547, 30584), False, 'import json\n'), ((30750, 30778), 'cbor.loads', 'cbor.loads', (['response.payload'], {}), '(response.payload)\n', (30760, 30778), False, 'import cbor\n'), ((30805, 30852), 'json.dumps', 'json.dumps', (['json_data'], {'indent': '(2)', 'sort_keys': '(True)'}), '(json_data, indent=2, sort_keys=True)\n', (30815, 30852), False, 'import json\n'), ((31026, 31054), 'cbor.loads', 'cbor.loads', (['response.payload'], {}), '(response.payload)\n', (31036, 31054), False, 'import cbor\n'), ((31081, 31128), 'json.dumps', 'json.dumps', (['json_data'], {'indent': '(2)', 'sort_keys': '(True)'}), '(json_data, indent=2, sort_keys=True)\n', (31091, 31128), False, 'import json\n'), ((20603, 20631), 'cbor.loads', 'cbor.loads', (['response.payload'], {}), '(response.payload)\n', (20613, 20631), False, 'import cbor\n'), ((20662, 20709), 'json.dumps', 'json.dumps', (['json_data'], {'indent': '(2)', 'sort_keys': '(True)'}), '(json_data, indent=2, sort_keys=True)\n', (20672, 20709), False, 'import json\n'), ((20901, 20922), 'cbor.loads', 'cbor.loads', (['checkdata'], {}), '(checkdata)\n', (20911, 20922), False, 'import cbor\n'), ((20953, 21001), 'json.dumps', 'json.dumps', (['check_data'], {'indent': '(2)', 'sort_keys': '(True)'}), '(check_data, indent=2, sort_keys=True)\n', (20963, 21001), False, 'import json\n'), ((21822, 21869), 'json.dumps', 'json.dumps', (['json_data'], {'indent': '(2)', 'sort_keys': '(True)'}), '(json_data, indent=2, sort_keys=True)\n', (21832, 21869), False, 'import json\n'), ((31356, 31367), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (31364, 31367), False, 'import sys\n'), ((21639, 21667), 'cbor.loads', 'cbor.loads', (['response.payload'], {}), '(response.payload)\n', (21649, 21667), False, 'import cbor\n'), ((31576, 31587), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (31584, 31587), False, 'import sys\n'), ((21774, 21795), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (21793, 21795), False, 'import traceback\n'), ((31831, 31842), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (31839, 31842), False, 'import sys\n'), ((31967, 31978), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (31975, 31978), False, 'import sys\n'), ((32163, 32182), 'json.loads', 'json.loads', (['payload'], {}), '(payload)\n', (32173, 32182), False, 'import json\n'), ((32207, 32228), 'cbor.dumps', 'cbor.dumps', (['json_data'], {}), '(json_data)\n', (32217, 32228), False, 'import cbor\n'), ((32375, 32394), 'json.loads', 'json.loads', (['payload'], {}), '(payload)\n', (32385, 32394), False, 'import json\n'), ((32419, 32440), 'cbor.loads', 'cbor.loads', (['json_data'], {}), '(json_data)\n', (32429, 32440), False, 'import cbor\n'), ((32703, 32731), 'cbor.loads', 'cbor.loads', (['response.payload'], {}), '(response.payload)\n', (32713, 32731), False, 'import cbor\n'), ((32758, 32805), 'json.dumps', 'json.dumps', (['json_data'], {'indent': '(2)', 'sort_keys': '(True)'}), '(json_data, indent=2, sort_keys=True)\n', (32768, 32805), False, 'import json\n'), ((32949, 32977), 'cbor.loads', 'cbor.loads', (['response.payload'], {}), '(response.payload)\n', (32959, 32977), False, 'import cbor\n'), ((33004, 33051), 'json.dumps', 'json.dumps', (['json_data'], {'indent': '(2)', 'sort_keys': '(True)'}), '(json_data, indent=2, sort_keys=True)\n', (33014, 33051), False, 'import json\n'), ((28091, 28101), 'sys.exit', 'sys.exit', ([], {}), '()\n', (28099, 28101), False, 'import sys\n'), ((28148, 28159), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (28156, 28159), False, 'import sys\n'), ((33245, 33256), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (33253, 33256), False, 'import sys\n'), ((33380, 33391), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (33388, 33391), False, 'import sys\n'), ((35032, 35043), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (35040, 35043), False, 'import sys\n'), ((33846, 33874), 'cbor.loads', 'cbor.loads', (['response.payload'], {}), '(response.payload)\n', (33856, 33874), False, 'import cbor\n'), ((33905, 33952), 'json.dumps', 'json.dumps', (['json_data'], {'indent': '(2)', 'sort_keys': '(True)'}), '(json_data, indent=2, sort_keys=True)\n', (33915, 33952), False, 'import json\n'), ((34108, 34136), 'cbor.loads', 'cbor.loads', (['response.payload'], {}), '(response.payload)\n', (34118, 34136), False, 'import cbor\n'), ((34167, 34214), 'json.dumps', 'json.dumps', (['json_data'], {'indent': '(2)', 'sort_keys': '(True)'}), '(json_data, indent=2, sort_keys=True)\n', (34177, 34214), False, 'import json\n'), ((34726, 34739), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (34736, 34739), False, 'import time\n')] |
alexsigaras/SWIM | SWIM-Executables/Windows/pyinstaller-2.0 for windows/PyInstaller/hooks/hook-PyQt4.phonon.py | 1a35df8acb26bdcb307a1b8f60e9feba68ed1715 | hiddenimports = ['sip', 'PyQt4.QtGui', 'PyQt4._qt']
from PyInstaller.hooks.hookutils import qt4_plugins_binaries
def hook(mod):
mod.binaries.extend(qt4_plugins_binaries('phonon_backend'))
return mod
| [((155, 193), 'PyInstaller.hooks.hookutils.qt4_plugins_binaries', 'qt4_plugins_binaries', (['"""phonon_backend"""'], {}), "('phonon_backend')\n", (175, 193), False, 'from PyInstaller.hooks.hookutils import qt4_plugins_binaries\n')] |
zlopez101/PyTradier | PyTradier/data.py | 83397cf38bd636c471993b57fb71a12885affcb7 | from PyTradier.base import BasePyTradier
from typing import Union
from datetime import datetime
class MarketData(BasePyTradier):
"""All Methods currently only support string API calls, no datetime, bools, etc
"""
def quotes(self, symbols: Union[str, list], greeks: bool = False) -> dict:
"""Get a list of symbols using a keyword lookup on the symbols description. Results are in descending order by average volume of the security. This can be used for simple search functions
:param symbols: Comma-delimited list of symbols (equity or option)
:type symbols: Union[str, list]
:param greeks: Add greeks and volatility information (option only), defaults to False
:type greeks: bool, optional
:return: quotes for requested symbols
:rtype: dict
"""
symbols = self._symbol_prep(symbols)
return self._get(
"/v1/markets/quotes",
params=self.create_params(locals()),
dict_args=("quotes", "quotes"),
)
def option_chain(
self,
symbol: str,
expiration: Union[str, datetime],
greeks: Union[str, bool] = "false",
) -> dict:
"""Get all quotes in an option chain. Greek and IV data is included courtesy of ORATS. Please check out their APIs for more in-depth options data.
:param symbol: Underlying symbol of the chain
:type symbol: str
:param expiration: Expiration for the chain
:type expiration: Union[str, datetime]
:param greeks: Add greeks and volatility information, defaults to "false"
:type greeks: Union[str, bool], optional
:return: Get all quotes in an option chain
:rtype: dict
"""
return self._get(
"/v1/markets/options/chains",
params=self.create_params(locals()),
dict_args=("options", "option"),
)
def option_strike(self, symbol: str, expiration: Union[str, datetime]) -> list:
"""Get an options strike prices for a specified expiration date.
:param symbol: Underlying symbol of the chain
:type symbol: str
:param expiration: Expiration for the chain
:type expiration: Union[str, datetime]
:return: [description]
:rtype: list
"""
return self._get(
"/v1/markets/options/strikes", params=self.create_params(locals())
)
def option_lookup(self, underlying: str) -> dict:
"""Get all options symbols for the given underlying. This will include additional option roots (ex. SPXW, RUTW) if applicable.
:param underlying: Underlying symbol of the chain
:type underlying: str
:return: dict {"rootSymbol": underlying, "options": [list of option symbols]}
:rtype: dict
"""
return self._get(
"/v1/markets/options/lookup", params=self.create_params(locals())
)
def option_expirations(
self,
symbol: str,
includeAllRoots: Union[str, bool] = "",
strikes: Union[str, bool] = "",
) -> list:
"""Get expiration dates for a particular underlying.
Note that some underlying securities use a different symbol for their weekly options (RUT/RUTW, SPX/SPXW). To make sure you see all expirations, make sure to send the includeAllRoots parameter. This will also ensure any unique options due to corporate actions (AAPL1) are returned.
:param symbol: Underlying symbol of the chain
:type symbol: str
:param includeAllRoots: Send expirations related to all option roots, defaults to ''
:type includeAllRoots: Union[str, bool], optional
:param strikes: Add strike prices to each expiration, defaults to ''
:type strikes: Union[str, bool], optional
:return: list of expiration dates as str %Y-%m-%d
:rtype: list
"""
response = self._get(
"/v1/markets/options/expirations", params=self.create_params(locals())
)
return response
def historic_quotes(
self, symbol: str, interval: str = "daily", start: str = None, end: str = None
) -> list:
"""Get historical pricing for a security. This data will usually cover the entire lifetime of the company if sending reasonable start/end times. You can fetch historical pricing for options by passing the OCC option symbol (ex. AAPL220617C00270000) as the symbol.
:param symbol: Symbol to query
:type symbol: str
:param interval: Interval of time per timesale. One of: daily, weekly, monthly, defaults to "daily"
:type interval: str, optional
:param start: Start date represented as YYYY-MM-DD, defaults to None
:type start: str, optional
:param end: End date represented as YYYY-MM-DD, defaults to None
:type end: str, optional
:return: [description]
:rtype: list
"""
return self._get(
"/v1/markets/history",
params=self.create_params(locals()),
dict_args=("history", "day"),
)
def time_and_sales(
self, symbol: str, start: str, end: str, interval: str = "1min"
) -> list:
"""Time and Sales (timesales) is typically used for charting purposes. It captures pricing across a time slice at predefined intervals.
Tick data is also available through this endpoint. This results in a very large data set for high-volume symbols, so the time slice needs to be much smaller to keep downloads time reasonable.`
:param symbol: A single security symbol.
:type symbol: str
:param start: Start date/time for timesales range represented as YYYY-MM-DD HH:MM
:type start: str
:param end: Start date/time for timesales range represented as YYYY-MM-DD HH:MM
:type end: str
:param interval: Interval of time per timesale. One of: tick, 1min, 5min, 15min, defaults to "1min"
:type interval: str, optional
:return: list of dictionaries containing keys of ['time', 'timestamp', 'price', 'open', 'high', 'close', low', 'volume', 'vwap']
:rtype: list
"""
return self._get(
"/v1/markets/timesales",
params=self.create_params(locals()),
dict_args=("series", "data"),
)
if __name__ == "__main__":
from utils import printer
data = MarketData()
symbol = "AAPL"
response = data.option_lookup(symbol)
# response = data.option_strike(symbol, dates[0])
printer(response)
| [((6561, 6578), 'utils.printer', 'printer', (['response'], {}), '(response)\n', (6568, 6578), False, 'from utils import printer\n')] |
Axel-Jacobsen/pyjoulescope_ui | joulescope_ui/meter_widget.py | 7d296b1ead0d36c6524dc399372f7888a340e9fa | # Copyright 2018 Jetperch LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from PySide2 import QtCore, QtWidgets
from . import joulescope_rc
from .meter_value_widget import MeterValueWidget
import logging
log = logging.getLogger(__name__)
FIELDS = [
('current', 'A', 'Amps'),
('voltage', 'V', 'Volts'),
('power', 'W', 'Watts'),
('energy', 'J', 'Joules'),
]
class MeterWidget(QtWidgets.QWidget):
def __init__(self, *args, **kwargs):
QtWidgets.QWidget.__init__(self, *args, **kwargs)
self.verticalLayout = QtWidgets.QVBoxLayout(self)
self.verticalLayout.setObjectName("verticalLayout")
self.verticalLayout.setSpacing(0)
self.controlWidget = QtWidgets.QWidget(self)
self.controlLayout = QtWidgets.QHBoxLayout(self.controlWidget)
self.verticalLayout.addWidget(self.controlWidget)
self.accumulateButton = QtWidgets.QPushButton(self.controlWidget)
self.accumulateButton.setCheckable(True)
self.accumulateButton.setObjectName("accumulateButton")
self.controlLayout.addWidget(self.accumulateButton)
self.accumulateButton.toggled.connect(self.on_accumulate_toggled)
self.controlSpacer = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.controlLayout.addItem(self.controlSpacer)
self.values = {}
for name, units_short, units_long in FIELDS:
w = MeterValueWidget(self)
w.setStyleSheet("QWidget { background-color : black; color : green; }")
w.configure(name.capitalize(), units_short, units_long)
self.values[name] = w
w.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.addWidget(w)
self.values['energy'].configure_energy()
self.sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
self.sizePolicy.setHorizontalStretch(0)
self.sizePolicy.setVerticalStretch(0)
self.setSizePolicy(self.sizePolicy)
self.retranslateUi()
@QtCore.Slot(bool)
def on_accumulate_toggled(self, checked):
self.values['current'].accumulate_enable = checked
self.values['voltage'].accumulate_enable = checked
self.values['power'].accumulate_enable = checked
def update(self, statistics):
"""Update the multimeter display
:param statistics: The statistics data structure
"""
for name, field in statistics['signals'].items():
d = field['statistics']
self.values[name].update_value(mean=d['μ'], variance=d['σ2'], v_min=d['min'], v_max=d['max'])
energy = statistics['accumulators']['energy']['value']
charge = statistics['accumulators']['charge']['value']
self.values['energy'].update_energy(energy, charge)
def retranslateUi(self):
_translate = QtCore.QCoreApplication.translate
self.accumulateButton.setText(_translate("meter_widget", "Accumulate"))
| [((713, 740), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (730, 740), False, 'import logging\n'), ((2592, 2609), 'PySide2.QtCore.Slot', 'QtCore.Slot', (['bool'], {}), '(bool)\n', (2603, 2609), False, 'from PySide2 import QtCore, QtWidgets\n'), ((967, 1016), 'PySide2.QtWidgets.QWidget.__init__', 'QtWidgets.QWidget.__init__', (['self', '*args'], {}), '(self, *args, **kwargs)\n', (993, 1016), False, 'from PySide2 import QtCore, QtWidgets\n'), ((1047, 1074), 'PySide2.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self'], {}), '(self)\n', (1068, 1074), False, 'from PySide2 import QtCore, QtWidgets\n'), ((1207, 1230), 'PySide2.QtWidgets.QWidget', 'QtWidgets.QWidget', (['self'], {}), '(self)\n', (1224, 1230), False, 'from PySide2 import QtCore, QtWidgets\n'), ((1260, 1301), 'PySide2.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', (['self.controlWidget'], {}), '(self.controlWidget)\n', (1281, 1301), False, 'from PySide2 import QtCore, QtWidgets\n'), ((1393, 1434), 'PySide2.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.controlWidget'], {}), '(self.controlWidget)\n', (1414, 1434), False, 'from PySide2 import QtCore, QtWidgets\n'), ((1712, 1810), 'PySide2.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(40)', '(20)', 'QtWidgets.QSizePolicy.Expanding', 'QtWidgets.QSizePolicy.Minimum'], {}), '(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.\n QSizePolicy.Minimum)\n', (1733, 1810), False, 'from PySide2 import QtCore, QtWidgets\n'), ((2331, 2423), 'PySide2.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Expanding', 'QtWidgets.QSizePolicy.Expanding'], {}), '(QtWidgets.QSizePolicy.Expanding, QtWidgets.\n QSizePolicy.Expanding)\n', (2352, 2423), False, 'from PySide2 import QtCore, QtWidgets\n')] |
bbonf/rpyc | rpyc/core/service.py | 2c66dd6936a0d9e6e36c1ba0cda1139676acf95c | """
Services are the heart of RPyC: each side of the connection exposes a *service*,
which define the capabilities available to the other side.
Note that the services by both parties need not be symmetric, e.g., one side may
exposed *service A*, while the other may expose *service B*. As long as the two
can interoperate, you're good to go.
"""
from functools import partial
from rpyc.lib import hybridmethod
from rpyc.lib.compat import execute, is_py3k
from rpyc.core.protocol import Connection
class Service(object):
"""The service base-class. Derive from this class to implement custom RPyC
services:
* The name of the class implementing the ``Foo`` service should match the
pattern ``FooService`` (suffixed by the word 'Service') ::
class FooService(Service):
pass
FooService.get_service_name() # 'FOO'
FooService.get_service_aliases() # ['FOO']
* To supply a different name or aliases, use the ``ALIASES`` class attribute ::
class Foobar(Service):
ALIASES = ["foo", "bar", "lalaland"]
Foobar.get_service_name() # 'FOO'
Foobar.get_service_aliases() # ['FOO', 'BAR', 'LALALAND']
* Override :func:`on_connect` to perform custom initialization
* Override :func:`on_disconnect` to perform custom finalization
* To add exposed methods or attributes, simply define them normally,
but prefix their name by ``exposed_``, e.g. ::
class FooService(Service):
def exposed_add(self, x, y):
return x + y
* All other names (not prefixed by ``exposed_``) are local (not accessible
to the other party)
.. note::
You can override ``_rpyc_getattr``, ``_rpyc_setattr`` and ``_rpyc_delattr``
to change attribute lookup -- but beware of possible **security implications!**
"""
__slots__ = ()
ALIASES = ()
_protocol = Connection
def on_connect(self, conn):
"""called when the connection is established"""
pass
def on_disconnect(self, conn):
"""called when the connection had already terminated for cleanup
(must not perform any IO on the connection)"""
pass
# Using default defined in 'protocol.Connection._access_attr' for:
# def _rpyc_getattr(self, name):
def _rpyc_delattr(self, name):
raise AttributeError("access denied")
def _rpyc_setattr(self, name, value):
raise AttributeError("access denied")
@classmethod
def get_service_aliases(cls):
"""returns a list of the aliases of this service"""
if cls.ALIASES:
return tuple(str(n).upper() for n in cls.ALIASES)
name = cls.__name__.upper()
if name.endswith("SERVICE"):
name = name[:-7]
return (name,)
@classmethod
def get_service_name(cls):
"""returns the canonical name of the service (which is its first
alias)"""
return cls.get_service_aliases()[0]
exposed_get_service_aliases = get_service_aliases
exposed_get_service_name = get_service_name
@hybridmethod
def _connect(self, channel, config={}):
"""Setup a connection via the given channel."""
if isinstance(self, type): # autovivify if accessed as class method
self = self()
# Note that we are here passing in `self` as root object for backward
# compatibility and convenience. You could pass in a different root if
# you wanted:
conn = self._protocol(self, channel, config)
self.on_connect(conn)
return conn
class VoidService(Service):
"""void service - an do-nothing service"""
__slots__ = ()
class ModuleNamespace(object):
"""used by the :class:`SlaveService` to implement the magical
'module namespace'"""
__slots__ = ["__getmodule", "__cache", "__weakref__"]
def __init__(self, getmodule):
self.__getmodule = getmodule
self.__cache = {}
def __contains__(self, name):
try:
self[name]
except ImportError:
return False
else:
return True
def __getitem__(self, name):
if type(name) is tuple:
name = ".".join(name)
if name not in self.__cache:
self.__cache[name] = self.__getmodule(name)
return self.__cache[name]
def __getattr__(self, name):
return self[name]
class Slave(object):
__slots__ = ["_conn", "namespace"]
def __init__(self):
self._conn = None
self.namespace = {}
def execute(self, text):
"""execute arbitrary code (using ``exec``)"""
execute(text, self.namespace)
def eval(self, text):
"""evaluate arbitrary code (using ``eval``)"""
return eval(text, self.namespace)
def getmodule(self, name):
"""imports an arbitrary module"""
return __import__(name, None, None, "*")
def getconn(self):
"""returns the local connection instance to the other side"""
return self._conn
class SlaveService(Slave, Service):
"""The SlaveService allows the other side to perform arbitrary imports and
execution arbitrary code on the server. This is provided for compatibility
with the classic RPyC (2.6) modus operandi.
This service is very useful in local, secure networks, but it exposes
a **major security risk** otherwise."""
__slots__ = ()
def on_connect(self, conn):
self._conn = conn
self._conn._config.update(dict(
allow_all_attrs = True,
allow_pickle = True,
allow_getattr = True,
allow_setattr = True,
allow_delattr = True,
allow_exposed_attrs = False,
import_custom_exceptions = True,
instantiate_custom_exceptions = True,
instantiate_oldstyle_exceptions = True,
))
super(SlaveService, self).on_connect(conn)
class FakeSlaveService(VoidService):
"""VoidService that can be used for connecting to peers that operate a
:class:`MasterService`, :class:`ClassicService`, or the old
``SlaveService`` (pre v3.5) without exposing any functionality to them."""
__slots__ = ()
exposed_namespace = None
exposed_execute = None
exposed_eval = None
exposed_getmodule = None
exposed_getconn = None
class MasterService(Service):
"""Peer for a new-style (>=v3.5) :class:`SlaveService`. Use this service
if you want to connect to a ``SlaveService`` without exposing any
functionality to them."""
__slots__ = ()
def on_connect(self, conn):
super(MasterService, self).on_connect(conn)
self._install(conn, conn.root)
@staticmethod
def _install(conn, slave):
modules = ModuleNamespace(slave.getmodule)
builtin = modules.builtins if is_py3k else modules.__builtin__
conn.modules = modules
conn.eval = slave.eval
conn.execute = slave.execute
conn.namespace = slave.namespace
conn.builtin = builtin
conn.builtins = builtin
from rpyc.utils.classic import teleport_function
conn.teleport = partial(teleport_function, conn)
class ClassicService(MasterService, SlaveService):
"""Full duplex master/slave service, i.e. both parties have full control
over the other. Must be used by both parties."""
__slots__ = ()
class ClassicClient(MasterService, FakeSlaveService):
"""MasterService that can be used for connecting to peers that operate a
:class:`MasterService`, :class:`ClassicService` without exposing any
functionality to them."""
__slots__ = ()
| [((4665, 4694), 'rpyc.lib.compat.execute', 'execute', (['text', 'self.namespace'], {}), '(text, self.namespace)\n', (4672, 4694), False, 'from rpyc.lib.compat import execute, is_py3k\n'), ((7188, 7220), 'functools.partial', 'partial', (['teleport_function', 'conn'], {}), '(teleport_function, conn)\n', (7195, 7220), False, 'from functools import partial\n')] |
altenia/taskmator | tests/task/manager_test.py | 4090d414125614a57649c5c92a017c12a231a2ef | import unittest
from testbase import TaskmatorTestBase
from taskmator.task import core, util
from taskmator import context
class ManagerTest(TaskmatorTestBase):
def testManager(self):
print ("Pending")
def main():
unittest.main()
if __name__ == '__main__':
unittest.main()
| [((291, 306), 'unittest.main', 'unittest.main', ([], {}), '()\n', (304, 306), False, 'import unittest\n'), ((242, 257), 'unittest.main', 'unittest.main', ([], {}), '()\n', (255, 257), False, 'import unittest\n')] |
tbarbette/core | tests/components/zwave_js/test_discovery.py | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | """Test discovery of entities for device-specific schemas for the Z-Wave JS integration."""
async def test_iblinds_v2(hass, client, iblinds_v2, integration):
"""Test that an iBlinds v2.0 multilevel switch value is discovered as a cover."""
node = iblinds_v2
assert node.device_class.specific.label == "Unused"
state = hass.states.get("light.window_blind_controller")
assert not state
state = hass.states.get("cover.window_blind_controller")
assert state
async def test_ge_12730(hass, client, ge_12730, integration):
"""Test GE 12730 Fan Controller v2.0 multilevel switch is discovered as a fan."""
node = ge_12730
assert node.device_class.specific.label == "Multilevel Power Switch"
state = hass.states.get("light.in_wall_smart_fan_control")
assert not state
state = hass.states.get("fan.in_wall_smart_fan_control")
assert state
async def test_inovelli_lzw36(hass, client, inovelli_lzw36, integration):
"""Test LZW36 Fan Controller multilevel switch endpoint 2 is discovered as a fan."""
node = inovelli_lzw36
assert node.device_class.specific.label == "Unused"
state = hass.states.get("light.family_room_combo")
assert state.state == "off"
state = hass.states.get("fan.family_room_combo_2")
assert state
| [] |
cowboygneox/boto3_type_annotations | boto3_type_annotations/boto3_type_annotations/guardduty/client.py | 450dce1de4e066b939de7eac2ec560ed1a7ddaa2 | from typing import Optional
from botocore.client import BaseClient
from typing import Dict
from typing import Union
from botocore.paginate import Paginator
from botocore.waiter import Waiter
from typing import List
class Client(BaseClient):
def accept_invitation(self, DetectorId: str, InvitationId: str, MasterId: str) -> Dict:
pass
def archive_findings(self, DetectorId: str, FindingIds: List) -> Dict:
pass
def can_paginate(self, operation_name: str = None):
pass
def create_detector(self, Enable: bool, ClientToken: str = None, FindingPublishingFrequency: str = None) -> Dict:
pass
def create_filter(self, DetectorId: str, FindingCriteria: Dict, Name: str, Action: str = None, ClientToken: str = None, Description: str = None, Rank: int = None) -> Dict:
pass
def create_ip_set(self, Activate: bool, DetectorId: str, Format: str, Location: str, Name: str, ClientToken: str = None) -> Dict:
pass
def create_members(self, AccountDetails: List, DetectorId: str) -> Dict:
pass
def create_sample_findings(self, DetectorId: str, FindingTypes: List = None) -> Dict:
pass
def create_threat_intel_set(self, Activate: bool, DetectorId: str, Format: str, Location: str, Name: str, ClientToken: str = None) -> Dict:
pass
def decline_invitations(self, AccountIds: List) -> Dict:
pass
def delete_detector(self, DetectorId: str) -> Dict:
pass
def delete_filter(self, DetectorId: str, FilterName: str) -> Dict:
pass
def delete_invitations(self, AccountIds: List) -> Dict:
pass
def delete_ip_set(self, DetectorId: str, IpSetId: str) -> Dict:
pass
def delete_members(self, AccountIds: List, DetectorId: str) -> Dict:
pass
def delete_threat_intel_set(self, DetectorId: str, ThreatIntelSetId: str) -> Dict:
pass
def disassociate_from_master_account(self, DetectorId: str) -> Dict:
pass
def disassociate_members(self, AccountIds: List, DetectorId: str) -> Dict:
pass
def generate_presigned_url(self, ClientMethod: str = None, Params: Dict = None, ExpiresIn: int = None, HttpMethod: str = None):
pass
def get_detector(self, DetectorId: str) -> Dict:
pass
def get_filter(self, DetectorId: str, FilterName: str) -> Dict:
pass
def get_findings(self, DetectorId: str, FindingIds: List, SortCriteria: Dict = None) -> Dict:
pass
def get_findings_statistics(self, DetectorId: str, FindingStatisticTypes: List, FindingCriteria: Dict = None) -> Dict:
pass
def get_invitations_count(self) -> Dict:
pass
def get_ip_set(self, DetectorId: str, IpSetId: str) -> Dict:
pass
def get_master_account(self, DetectorId: str) -> Dict:
pass
def get_members(self, AccountIds: List, DetectorId: str) -> Dict:
pass
def get_paginator(self, operation_name: str = None) -> Paginator:
pass
def get_threat_intel_set(self, DetectorId: str, ThreatIntelSetId: str) -> Dict:
pass
def get_waiter(self, waiter_name: str = None) -> Waiter:
pass
def invite_members(self, AccountIds: List, DetectorId: str, DisableEmailNotification: bool = None, Message: str = None) -> Dict:
pass
def list_detectors(self, MaxResults: int = None, NextToken: str = None) -> Dict:
pass
def list_filters(self, DetectorId: str, MaxResults: int = None, NextToken: str = None) -> Dict:
pass
def list_findings(self, DetectorId: str, FindingCriteria: Dict = None, MaxResults: int = None, NextToken: str = None, SortCriteria: Dict = None) -> Dict:
pass
def list_invitations(self, MaxResults: int = None, NextToken: str = None) -> Dict:
pass
def list_ip_sets(self, DetectorId: str, MaxResults: int = None, NextToken: str = None) -> Dict:
pass
def list_members(self, DetectorId: str, MaxResults: int = None, NextToken: str = None, OnlyAssociated: str = None) -> Dict:
pass
def list_threat_intel_sets(self, DetectorId: str, MaxResults: int = None, NextToken: str = None) -> Dict:
pass
def start_monitoring_members(self, AccountIds: List, DetectorId: str) -> Dict:
pass
def stop_monitoring_members(self, AccountIds: List, DetectorId: str) -> Dict:
pass
def unarchive_findings(self, DetectorId: str, FindingIds: List) -> Dict:
pass
def update_detector(self, DetectorId: str, Enable: bool = None, FindingPublishingFrequency: str = None) -> Dict:
pass
def update_filter(self, DetectorId: str, FilterName: str, Action: str = None, Description: str = None, FindingCriteria: Dict = None, Rank: int = None) -> Dict:
pass
def update_findings_feedback(self, DetectorId: str, Feedback: str, FindingIds: List, Comments: str = None) -> Dict:
pass
def update_ip_set(self, DetectorId: str, IpSetId: str, Activate: bool = None, Location: str = None, Name: str = None) -> Dict:
pass
def update_threat_intel_set(self, DetectorId: str, ThreatIntelSetId: str, Activate: bool = None, Location: str = None, Name: str = None) -> Dict:
pass
| [] |
ChenYi015/Raven | test/workload/tpch_loop_workload_test.py | e732e03f8dd118ed805a143fc6916f0e5fc53c2c | # Copyright 2021 Raven Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from queue import Queue
from threading import Thread
from benchmark.workload.tpch import TpchLoopWorkload
def print_queries(queue: Queue):
while True:
query = queue.get()
print(query)
if __name__ == '__main__':
workload = TpchLoopWorkload()
print(workload)
queue = Queue()
generate_thread = Thread(
target=workload.generate_one_loop_queries,
args=(queue,),
name='QueryGenerator'
)
generate_thread.start()
print_thread = Thread(
target=print_queries,
args=(queue,),
name='QueryPrinter'
)
print_thread.start()
| [((849, 867), 'benchmark.workload.tpch.TpchLoopWorkload', 'TpchLoopWorkload', ([], {}), '()\n', (865, 867), False, 'from benchmark.workload.tpch import TpchLoopWorkload\n'), ((901, 908), 'queue.Queue', 'Queue', ([], {}), '()\n', (906, 908), False, 'from queue import Queue\n'), ((932, 1024), 'threading.Thread', 'Thread', ([], {'target': 'workload.generate_one_loop_queries', 'args': '(queue,)', 'name': '"""QueryGenerator"""'}), "(target=workload.generate_one_loop_queries, args=(queue,), name=\n 'QueryGenerator')\n", (938, 1024), False, 'from threading import Thread\n'), ((1098, 1162), 'threading.Thread', 'Thread', ([], {'target': 'print_queries', 'args': '(queue,)', 'name': '"""QueryPrinter"""'}), "(target=print_queries, args=(queue,), name='QueryPrinter')\n", (1104, 1162), False, 'from threading import Thread\n')] |
wendy006/Web-Dev-Course | Final-Project/server/art/serializers.py | 2f0cfddb7ab4db88ffb4483c7cd4a00abf36c720 | from rest_framework import serializers
from .models import *
class CollectionSerializer(serializers.ModelSerializer):
class Meta:
model = Collection
fields = ('collectionID', 'name', 'display_name', 'description', 'img_url')
class ArtSerializer(serializers.ModelSerializer):
img_url = serializers.ReadOnlyField()
thumb_url = serializers.ReadOnlyField()
class Meta:
model = Art
fields = ('artID', 'title', 'filename', 'rarity', 'collection', 'img_url', 'thumb_url')
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'password', 'coins', 'art')
extra_kwargs = {
'password': {'write_only': True}
}
def create(self, validated_data):
password = validated_data.pop('password', None)
instance = self.Meta.model(**validated_data)
if password is not None:
instance.set_password(password)
instance.save()
return instance
class OwnSerializer(serializers.ModelSerializer):
duplicates = serializers.ReadOnlyField()
class Meta:
model = Own
fields = ('ownID', 'user', 'art', 'duplicates')
class SaleSerializer(serializers.ModelSerializer):
class Meta:
model = Sale
fields = ('saleID', 'seller', 'buyer', 'ownership', 'art', 'price', 'available', 'sold', 'postDate', 'purchaseDate') | [((320, 347), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (345, 347), False, 'from rest_framework import serializers\n'), ((365, 392), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (390, 392), False, 'from rest_framework import serializers\n'), ((1134, 1161), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (1159, 1161), False, 'from rest_framework import serializers\n')] |
joezqren/google-cloud-cpp | google/cloud/google_cloud_cpp_common_unit_tests.bzl | 325d312b0a21569f3c57515aec7d91f3540d3b48 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# DO NOT EDIT -- GENERATED BY CMake -- Change the CMakeLists.txt file if needed
"""Automatically generated unit tests list - DO NOT EDIT."""
google_cloud_cpp_common_unit_tests = [
"common_options_test.cc",
"future_generic_test.cc",
"future_generic_then_test.cc",
"future_void_test.cc",
"future_void_then_test.cc",
"iam_bindings_test.cc",
"internal/algorithm_test.cc",
"internal/api_client_header_test.cc",
"internal/backoff_policy_test.cc",
"internal/base64_transforms_test.cc",
"internal/big_endian_test.cc",
"internal/compiler_info_test.cc",
"internal/credentials_impl_test.cc",
"internal/env_test.cc",
"internal/filesystem_test.cc",
"internal/format_time_point_test.cc",
"internal/future_impl_test.cc",
"internal/invoke_result_test.cc",
"internal/log_impl_test.cc",
"internal/pagination_range_test.cc",
"internal/parse_rfc3339_test.cc",
"internal/random_test.cc",
"internal/retry_policy_test.cc",
"internal/status_payload_keys_test.cc",
"internal/strerror_test.cc",
"internal/throw_delegate_test.cc",
"internal/tuple_test.cc",
"internal/type_list_test.cc",
"internal/user_agent_prefix_test.cc",
"internal/utility_test.cc",
"kms_key_name_test.cc",
"log_test.cc",
"options_test.cc",
"polling_policy_test.cc",
"project_test.cc",
"status_or_test.cc",
"status_test.cc",
"stream_range_test.cc",
"terminate_handler_test.cc",
"tracing_options_test.cc",
]
| [] |
codacy-badger/politico-api | api/tests/ver1/test_base.py | 10d926bf34f12631cb19bb9c82ccded36557c790 | import unittest
from api import create_app
class TestBase(unittest.TestCase):
"""Default super class for api ver 1 tests"""
# setup testing
def setUp(self):
self.app = create_app('testing')
self.client = self.app.test_client()
self.item_list = []
# deconstructs test elements
def tearDown(self):
self.app = None
self.item_list.clear()
| [((190, 211), 'api.create_app', 'create_app', (['"""testing"""'], {}), "('testing')\n", (200, 211), False, 'from api import create_app\n')] |
CMPUT404-Project-Group/CMPUT404-Group-Project | socialdistribution/app/templatetags/filters.py | e541cc609f260d7221fe0be8975c5b2444d74af0 | from django import template
from django.template.defaultfilters import stringfilter
from django.utils.safestring import SafeString
import markdown
import urllib
register = template.Library()
@register.filter
def strip_space(value):
return value.replace(' ', '')
@register.filter
@stringfilter
def commonmark(value):
return markdown.Markdown().convert(value)
@register.filter(name="getID")
def get_ID(value):
if not type(value) is str:
return value
return value.split('/')[-1]
@register.filter(name="getNav")
def get_nav(value):
return value.split('/')[-2]
@register.filter(name="encode_url")
def encode_url(value):
return urllib.parse.quote(value)
@register.filter
def get_post_id(url):
"""
gets the post id from the comment page url
"""
return urllib.parse.urlparse(url.get_full_path()).path.rsplit('/', 1)[0] | [((173, 191), 'django.template.Library', 'template.Library', ([], {}), '()\n', (189, 191), False, 'from django import template\n'), ((660, 685), 'urllib.parse.quote', 'urllib.parse.quote', (['value'], {}), '(value)\n', (678, 685), False, 'import urllib\n'), ((334, 353), 'markdown.Markdown', 'markdown.Markdown', ([], {}), '()\n', (351, 353), False, 'import markdown\n')] |
antopen/alipay-sdk-python-all | alipay/aop/api/domain/MetroOdItem.py | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.CloudbusUserInfo import CloudbusUserInfo
class MetroOdItem(object):
def __init__(self):
self._dest_geo = None
self._od = None
self._time = None
self._user_info = None
self._week_od = None
self._work_od = None
@property
def dest_geo(self):
return self._dest_geo
@dest_geo.setter
def dest_geo(self, value):
self._dest_geo = value
@property
def od(self):
return self._od
@od.setter
def od(self, value):
self._od = value
@property
def time(self):
return self._time
@time.setter
def time(self, value):
self._time = value
@property
def user_info(self):
return self._user_info
@user_info.setter
def user_info(self, value):
if isinstance(value, CloudbusUserInfo):
self._user_info = value
else:
self._user_info = CloudbusUserInfo.from_alipay_dict(value)
@property
def week_od(self):
return self._week_od
@week_od.setter
def week_od(self, value):
self._week_od = value
@property
def work_od(self):
return self._work_od
@work_od.setter
def work_od(self, value):
self._work_od = value
def to_alipay_dict(self):
params = dict()
if self.dest_geo:
if hasattr(self.dest_geo, 'to_alipay_dict'):
params['dest_geo'] = self.dest_geo.to_alipay_dict()
else:
params['dest_geo'] = self.dest_geo
if self.od:
if hasattr(self.od, 'to_alipay_dict'):
params['od'] = self.od.to_alipay_dict()
else:
params['od'] = self.od
if self.time:
if hasattr(self.time, 'to_alipay_dict'):
params['time'] = self.time.to_alipay_dict()
else:
params['time'] = self.time
if self.user_info:
if hasattr(self.user_info, 'to_alipay_dict'):
params['user_info'] = self.user_info.to_alipay_dict()
else:
params['user_info'] = self.user_info
if self.week_od:
if hasattr(self.week_od, 'to_alipay_dict'):
params['week_od'] = self.week_od.to_alipay_dict()
else:
params['week_od'] = self.week_od
if self.work_od:
if hasattr(self.work_od, 'to_alipay_dict'):
params['work_od'] = self.work_od.to_alipay_dict()
else:
params['work_od'] = self.work_od
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = MetroOdItem()
if 'dest_geo' in d:
o.dest_geo = d['dest_geo']
if 'od' in d:
o.od = d['od']
if 'time' in d:
o.time = d['time']
if 'user_info' in d:
o.user_info = d['user_info']
if 'week_od' in d:
o.week_od = d['week_od']
if 'work_od' in d:
o.work_od = d['work_od']
return o
| [((1063, 1103), 'alipay.aop.api.domain.CloudbusUserInfo.CloudbusUserInfo.from_alipay_dict', 'CloudbusUserInfo.from_alipay_dict', (['value'], {}), '(value)\n', (1096, 1103), False, 'from alipay.aop.api.domain.CloudbusUserInfo import CloudbusUserInfo\n')] |
vsalat/djangocms-redirect | djangocms_redirect/migrations/0003_auto_20190810_1009.py | a2577f08430b6b65ae4a51293f861b697bf4ab9d | # Generated by Django 2.2.4 on 2019-08-10 08:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('djangocms_redirect', '0002_auto_20170321_1807'),
]
operations = [
migrations.AddField(
model_name='redirect',
name='catchall_redirect',
field=models.BooleanField(default=False, help_text='If selected all the pages starting with the given string will be redirected to the given redirect path', verbose_name='Catchall redirect'),
),
migrations.AddField(
model_name='redirect',
name='subpath_match',
field=models.BooleanField(default=False, help_text='If selected all the pages starting with the given string will be redirected by replacing the matching subpath with the provided redirect path.', verbose_name='Subpath match'),
),
]
| [((358, 552), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': '"""If selected all the pages starting with the given string will be redirected to the given redirect path"""', 'verbose_name': '"""Catchall redirect"""'}), "(default=False, help_text=\n 'If selected all the pages starting with the given string will be redirected to the given redirect path'\n , verbose_name='Catchall redirect')\n", (377, 552), False, 'from django.db import migrations, models\n'), ((671, 901), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': '"""If selected all the pages starting with the given string will be redirected by replacing the matching subpath with the provided redirect path."""', 'verbose_name': '"""Subpath match"""'}), "(default=False, help_text=\n 'If selected all the pages starting with the given string will be redirected by replacing the matching subpath with the provided redirect path.'\n , verbose_name='Subpath match')\n", (690, 901), False, 'from django.db import migrations, models\n')] |
nicholaschiang/dl-datasheets | octopart/scrape_octopart.py | 1c5ab2545a85c1ea7643fc655005259544617d90 | #! /usr/bin/env python
import sys
import json
import urllib
import urllib2
import time
import argparse
import re
# Category ID for Discrete Semiconductors > Transistors > BJTs
TRANSISTOR_ID = b814751e89ff63d3
def find_total_hits(search_query):
"""
Function: find_total_hits
--------------------
Returns the number of hits that correspond to the search query.
"""
url = "http://octopart.com/api/v3/categories/"
# NOTE: Use your API key here (https://octopart.com/api/register)
url += "?apikey=09b32c6c"
args = [
('q', search_query),
('start', 0),
('limit', 1), #change to increase number of datasheets
('include[]','datasheets')
]
url += '&' + urllib.urlencode(args)
data = urllib.urlopen(url).read() # perform a SearchRequest
search_response = json.loads(data) # Grab the SearchResponse
# return number of hits
return search_response['hits']
def download_datasheets(search_query):
"""
Function: download_datasheets
--------------------
Uses the OctoPart API to download all datasheets associated with a given
set of search keywords.
"""
MAX_RESULTS = 100
counter = 0
total_hits = find_total_hits(search_query)
# print number of hits
print "[info] Search Response Hits: %s" % (total_hits)
# Calculate how many multiples of 100s of hits there are
num_hundreds = total_hits / MAX_RESULTS
print "[info] Performing %s iterations of %s results." % (num_hundreds, MAX_RESULTS)
for i in range(num_hundreds+1):
url = "http://octopart.com/api/v3/parts/search"
# NOTE: Use your API key here (https://octopart.com/api/register)
url += "?apikey=09b32c6c"
args = [
('q', search_query),
('start', (i * MAX_RESULTS)),
('limit', MAX_RESULTS), # change to edit number of datasheets
('include[]','datasheets')
# ('include[]','specs'),
# ('include[]','descriptions')
]
url += '&' + urllib.urlencode(args)
data = urllib.urlopen(url).read() # perform a SearchRequest
search_response = json.loads(data) # Grab the SearchResponse
# Iterate through the SearchResults in the SearchResponse
if not search_response.get('results'):
print "[error] no results returned in outer loop: " + str(i)
continue
for result in search_response['results']:
part = result['item'] # Grab the Part in the SearchResult
print ("[info] %s_%s..." % (part['brand']['name'].replace(" ", ""), part['mpn'])),
sys.stdout.flush()
# Iterate through list of datasheets for the given part
for datasheet in part['datasheets']:
# Grab the Datasheet URL
pdflink = datasheet['url']
if pdflink is not None:
# Download the PDF
try:
response = urllib2.urlopen(pdflink)
except urllib2.HTTPError, err:
if err.code == 404:
print "[error] Page not found!...",
elif err.code == 403:
print "[error] Access Denied!...",
else:
print "[error] HTTP Error code ", err.code,
continue; # advance to next datasheet rather than crashing
try:
filename = re.search('([^/]*)\.[^.]*$', datasheet['url']).group(1)
except AttributeError:
continue; # skip to next datasheet rather than crashing
file = open("../datasheets/%s.pdf" % filename, 'w')
file.write(response.read())
file.close()
counter += 1 # Increment the counter of files downloaded
# NOTE: Not sure if this is necessary. Just a precaution.
time.sleep(0.4) # Limit ourselves to 3 HTTP Requests/second
print("DONE")
print("[info] %s Parts Completed." % MAX_RESULTS)
print("[info] COMPLETED: %s datasheets for the query were downloaded." % counter)
def parse_args():
"""
Function: parse_args
--------------------
Parse the arguments for the Octopart Datasheet Scraper
"""
# Define what commandline arguments can be accepted
parser = argparse.ArgumentParser()
parser.add_argument('query',metavar="\"SEARCH_KEYWORDS\"",
help="keywords to query in quotes (required)")
parser.add_argument('--version', action='version', version='%(prog)s 0.1.0')
args = parser.parse_args()
return args.query
# Main Function
if __name__ == "__main__":
reload(sys)
sys.setdefaultencoding('utf-8')
search_query = parse_args() # Parse commandline arguments
start_time = time.time()
print "[info] Download datasheets for %s" % search_query
download_datasheets(search_query)
finish_time = time.time()
print '[info] Took', finish_time - start_time, 'sec total.'
| [] |
foglamp/FogLAMP | extras/python/fogbench/__main__.py | 918dff88b440e6ad580efdaa5f0fbdf4143a73d4 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# FOGLAMP_BEGIN
# See: http://foglamp.readthedocs.io/
# FOGLAMP_END
""" fogbench -- a Python script used to test FogLAMP.
The objective is to simulate payloads for input, REST and other requests against one or
more FogLAMP instances. This version of fogbench is meant to test the CoAP and HTTP plugins
interface of FogLAMP southbound services.
fogbench
[IN] -h --help Print this help
-i --interval The interval in seconds between each iteration (default: 0)
[IN] -k --keep Do not delete (keep) the running sample (default: no)
[IN] -o --output Set the output file for statistics
[IN] -p --payload Type of payload and protocol (default: coap)
[IN] -t --template Set the template to use
[IN] -v --version Display the version and exit
[IN] -H --host The FogLAMP host (default: localhost)
-I --iterations The number of iterations of the test (default: 1)
[IN] -O --occurrences The number of occurrences of the template (default: 1)
[IN] -P --port The FogLAMP port. Default depends on payload and protocol
[IN] -S --statistic The type of statistics to collect
Example:
$ cd $FOGLAMP_ROOT/bin
$ ./fogbench
Help:
$ ./fogbench -h
* Create reading objects from given template, as per the json file name specified with -t
* Save those objects to the file, as per the file name specified with -o
* Read those objects
* Send those to CoAP or HTTP south plugin server, on specific host and port
.. todo::
* Try generators
"""
import sys
import os
import random
import json
from datetime import datetime, timezone
import argparse
import collections
import asyncio
import aiohttp
from .exceptions import *
__author__ = "Praveen Garg"
__copyright__ = "Copyright (c) 2017 OSIsoft, LLC"
__license__ = "Apache 2.0"
__version__ = "${VERSION}"
_FOGBENCH_VERSION = u"0.1.1"
_start_time = []
_end_time = []
_tot_msgs_transferred = []
_tot_byte_transferred = []
_num_iterated = 0
"""Statistics to be collected"""
# _logger = logger.setup(__name__)
def local_timestamp():
"""
:return: str - current time stamp with microseconds and machine timezone info
:example '2018-05-08 14:06:40.517313+05:30'
"""
return str(datetime.now(timezone.utc).astimezone())
def read_templates():
templates = []
return templates
def parse_template_and_prepare_json(_template_file,
_write_to_file=None, _occurrences=1):
# template_file = os.path.join(os.path.dirname(__file__), "templates/" + _template_file)
with open(_template_file) as data_file:
data = json.load(data_file)
supported_format_types = ["number", "enum"]
for _ in range(_occurrences):
readings_ = _prepare_sensor_reading(data, supported_format_types)
for r in readings_:
_write_readings_to_file(_write_to_file, r)
def _write_readings_to_file(to_file, r):
with open(to_file, 'a') as the_file:
json.dump(r, the_file)
the_file.write(os.linesep)
def _prepare_sensor_reading(data, supported_format_types):
readings = []
for d in data:
x_sensor_values = dict()
_sensor_value_object_formats = d["sensor_values"]
for fmt in _sensor_value_object_formats:
if fmt["type"] not in supported_format_types:
raise InvalidSensorValueObjectTemplateFormat(u"Invalid format, "
u"Can not parse type {}".format(fmt["type"]))
if fmt["type"] == "number":
# check float precision if any
precision = fmt.get("precision", None)
min_val = fmt.get("min", None)
max_val = fmt.get("max", None)
if min_val is None or max_val is None:
raise InvalidSensorValueObjectTemplateFormat(u"Invalid format, "
u"Min and Max values must be defined for type number.")
# print(precision)
# print(min_val)
# print(max_val)
reading = round(random.uniform(min_val, max_val), precision)
elif fmt["type"] == "enum":
reading = random.choice(fmt["list"])
# print(fmt["name"], reading)
x_sensor_values[fmt["name"]] = reading
# print(d["name"])
sensor_value_object = dict()
sensor_value_object["asset"] = d['name']
sensor_value_object["readings"] = x_sensor_values
sensor_value_object["timestamp"] = "{!s}".format(local_timestamp())
# print(json.dumps(sensor_value_object))
ord_dict = collections.OrderedDict(sorted(sensor_value_object.items()))
readings.append(ord_dict)
return readings
def read_out_file(_file=None, _keep=False, _iterations=1, _interval=0, send_to='coap'):
global _start_time
global _end_time
global _tot_msgs_transferred
global _tot_byte_transferred
global _num_iterated
# from pprint import pprint
import time
# _file = os.path.join(os.path.dirname(__file__), "out/{}".format(outfile))
with open(_file) as f:
readings_list = [json.loads(line) for line in f]
loop = asyncio.get_event_loop()
while _iterations > 0:
# Pre-calculate the messages and size
msg_transferred_itr = 0 # Messages transferred in every iteration
byte_transferred_itr = 0 # Bytes transferred in every iteration
for r in readings_list:
msg_transferred_itr += 1
byte_transferred_itr += sys.getsizeof(r)
if send_to == 'coap':
_start_time.append(datetime.now())
for r in readings_list:
is_sent = loop.run_until_complete(send_to_coap(r))
if not is_sent:
break
elif send_to == 'http':
_start_time.append(datetime.now())
loop.run_until_complete(send_to_http(readings_list))
_end_time.append(datetime.now()) # End time of every iteration
_tot_msgs_transferred.append(msg_transferred_itr)
_tot_byte_transferred.append(byte_transferred_itr)
_iterations -= 1
_num_iterated += 1
if _iterations != 0:
# print(u"Iteration {} completed, waiting for {} seconds".format(_iterations, _interval))
time.sleep(_interval)
if not _keep:
os.remove(_file)
async def send_to_coap(payload):
"""
POST request to:
localhost
port 5683 (official IANA assigned CoAP port),
URI "/other/sensor-values".
"""
from aiocoap import Context, Message
from aiocoap.numbers.codes import Code
from cbor2 import dumps
context = await Context.create_client_context()
request = Message(payload=dumps(payload), code=Code.POST)
request.opt.uri_host = arg_host
request.opt.uri_port = arg_port
request.opt.uri_path = ("other", "sensor-values")
response = await context.request(request).response
str_res = str(response.code)
status_code = str_res[:4] # or str_res.split()[0]
if status_code == "4.00" or status_code == "5.00":
print("Error: ", str_res)
return False
return True
async def send_to_http(payload):
"""
POST request to:
host localhost
port 6683 (default HTTP south plugin port),
uri sensor-reading
"""
headers = {'content-type': 'application/json'}
url = 'http://{}:{}/sensor-reading'.format(arg_host, arg_port)
async with aiohttp.ClientSession() as session:
async with session.post(url, data=json.dumps(payload), headers=headers) as resp:
await resp.text()
status_code = resp.status
if status_code in range(400, 500):
print("Bad request error | code:{}, reason: {}".format(status_code, resp.reason))
return False
if status_code in range(500, 600):
print("Server error | code:{}, reason: {}".format(status_code, resp.reason))
return False
return True
def get_statistics(_stats_type=None, _out_file=None):
stat = ''
global _start_time
global _end_time
global _tot_msgs_transferred
global _tot_byte_transferred
global _num_iterated
if _stats_type == 'total':
stat += u"Total Statistics:\n"
stat += (u"\nStart Time: {}".format(datetime.strftime(_start_time[0], "%Y-%m-%d %H:%M:%S.%f")))
stat += (u"\nEnd Time: {}\n".format(datetime.strftime(_end_time[-1], "%Y-%m-%d %H:%M:%S.%f")))
stat += (u"\nTotal Messages Transferred: {}".format(sum(_tot_msgs_transferred)))
stat += (u"\nTotal Bytes Transferred: {}\n".format(sum(_tot_byte_transferred)))
stat += (u"\nTotal Iterations: {}".format(_num_iterated))
stat += (u"\nTotal Messages per Iteration: {}".format(sum(_tot_msgs_transferred)/_num_iterated))
stat += (u"\nTotal Bytes per Iteration: {}\n".format(sum(_tot_byte_transferred)/_num_iterated))
_msg_rate = []
_byte_rate = []
for itr in range(_num_iterated):
time_taken = _end_time[itr] - _start_time[itr]
_msg_rate.append(_tot_msgs_transferred[itr]/(time_taken.seconds+time_taken.microseconds/1E6))
_byte_rate.append(_tot_byte_transferred[itr] / (time_taken.seconds+time_taken.microseconds/1E6))
stat += (u"\nMin messages/second: {}".format(min(_msg_rate)))
stat += (u"\nMax messages/second: {}".format(max(_msg_rate)))
stat += (u"\nAvg messages/second: {}\n".format(sum(_msg_rate)/_num_iterated))
stat += (u"\nMin Bytes/second: {}".format(min(_byte_rate)))
stat += (u"\nMax Bytes/second: {}".format(max(_byte_rate)))
stat += (u"\nAvg Bytes/second: {}".format(sum(_byte_rate)/_num_iterated))
if _out_file:
with open(_out_file, 'w') as f:
f.write(stat)
else:
print(stat)
# should we also show total time diff? end_time - start_time
def check_server(payload_type='coap'):
template_str = ">>> Make sure south {} plugin service is running \n & listening on specified host and port \n"
if payload_type == 'coap':
print(template_str.format("CoAP"))
elif payload_type == 'http':
print(template_str.format("HTTP"))
parser = argparse.ArgumentParser(prog='fogbench')
parser.description = '%(prog)s -- a Python script used to test FogLAMP (simulate payloads)'
parser.epilog = 'The initial version of %(prog)s is meant to test the south plugin interface of ' \
'FogLAMP using CoAP or HTTP'
parser.add_argument('-v', '--version', action='version', version='%(prog)s {0!s}'.format(_FOGBENCH_VERSION))
parser.add_argument('-k', '--keep', default=False, choices=['y', 'yes', 'n', 'no'],
help='Do not delete the running sample (default: no)')
parser.add_argument('-t', '--template', required=True, help='Set the template file, json extension')
parser.add_argument('-o', '--output', default=None, help='Set the statistics output file')
parser.add_argument('-p', '--payload', default='coap', choices=['coap', 'http'], help='Type of payload '
'and protocol (default: coap)')
parser.add_argument('-I', '--iterations', help='The number of iterations of the test (default: 1)')
parser.add_argument('-O', '--occurrences', help='The number of occurrences of the template (default: 1)')
parser.add_argument('-H', '--host', help='Server host address (default: localhost)')
parser.add_argument('-P', '--port', help='The FogLAMP port. (default: 5683)')
parser.add_argument('-i', '--interval', default=0, help='The interval in seconds for each iteration (default: 0)')
parser.add_argument('-S', '--statistics', default='total', choices=['total'], help='The type of statistics to collect '
'(default: total)')
namespace = parser.parse_args(sys.argv[1:])
infile = '{0}'.format(namespace.template if namespace.template else '')
statistics_file = os.path.join(os.path.dirname(__file__), "out/{}".format(namespace.output)) if namespace.output else None
keep_the_file = True if namespace.keep in ['y', 'yes'] else False
# iterations and occurrences
arg_iterations = int(namespace.iterations) if namespace.iterations else 1
arg_occurrences = int(namespace.occurrences) if namespace.occurrences else 1
# interval between each iteration
arg_interval = int(namespace.interval) if namespace.interval else 0
arg_stats_type = '{0}'.format(namespace.statistics) if namespace.statistics else 'total'
if namespace.payload:
arg_payload_protocol = namespace.payload
arg_host = '{0}'.format(namespace.host) if namespace.host else 'localhost'
default_port = 6683 if arg_payload_protocol == 'http' else 5683
arg_port = int(namespace.port) if namespace.port else default_port
check_server(arg_payload_protocol)
sample_file = os.path.join("/tmp", "foglamp_running_sample.{}".format(os.getpid()))
parse_template_and_prepare_json(_template_file=infile, _write_to_file=sample_file, _occurrences=arg_occurrences)
read_out_file(_file=sample_file, _keep=keep_the_file, _iterations=arg_iterations, _interval=arg_interval,
send_to=arg_payload_protocol)
get_statistics(_stats_type=arg_stats_type, _out_file=statistics_file)
# TODO: Change below per local_timestamp() values
""" Expected output from given template
{
"timestamp" : "2017-08-04T06:59:57.503Z",
"asset" : "TI sensorTag/luxometer",
"sensor_values" : { "lux" : 49 }
}
{
"timestamp" : "2017-08-04T06:59:57.863Z",
"asset" : "TI sensorTag/pressure",
"sensor_values" : { "pressure" : 1021.2 }
}
{
"timestamp" : "2017-08-04T06:59:58.863Z",
"asset" : "TI sensorTag/humidity",
"sensor_values" : { "humidity" : 71.2, "temperature" : 18.6 }
}
{
"timestamp" : "2017-08-04T06:59:59.863Z",
"asset" : "TI sensorTag/temperature",
"sensor_values" : { "object" : 18.2, "ambient" : 21.6 }
}
{
"timestamp" : "2017-08-04T07:00:00.863Z",
"asset" : "TI sensorTag/accelerometer",
"sensor_values" : { "x" : 1.2, "y" : 0.0, "z" : -0.6 }
}
{
"timestamp" : "2017-08-04T07:00:01.863Z",
"asset" : "TI sensorTag/gyroscope",
"sensor_values" : { "x" : 101.2, "y" : 46.2, "z" : -12.6 }
}
{
"timestamp" : "2017-08-04T07:00:02.863Z",
"asset" : "TI sensorTag/magnetometer",
"sensor_values" : { "x" : 101.2, "y" : 46.2, "z" : -12.6 }
}
{
"timestamp" : "2017-08-04T07:00:03.863Z",
"asset" : "mouse",
"sensor_values" : { "button" : "down" }
}
{
"timestamp" : "2017-08-04T07:00:04.863Z",
"asset" : "wall clock",
"sensor_values" : { "tick" : "tock" }
}
"""
| [((10456, 10496), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""fogbench"""'}), "(prog='fogbench')\n", (10479, 10496), False, 'import argparse\n'), ((5351, 5375), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (5373, 5375), False, 'import asyncio\n'), ((2696, 2716), 'json.load', 'json.load', (['data_file'], {}), '(data_file)\n', (2705, 2716), False, 'import json\n'), ((3057, 3079), 'json.dump', 'json.dump', (['r', 'the_file'], {}), '(r, the_file)\n', (3066, 3079), False, 'import json\n'), ((6539, 6555), 'os.remove', 'os.remove', (['_file'], {}), '(_file)\n', (6548, 6555), False, 'import os\n'), ((6861, 6892), 'aiocoap.Context.create_client_context', 'Context.create_client_context', ([], {}), '()\n', (6890, 6892), False, 'from aiocoap import Context, Message\n'), ((7652, 7675), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (7673, 7675), False, 'import aiohttp\n'), ((12274, 12299), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (12289, 12299), False, 'import os\n'), ((13188, 13199), 'os.getpid', 'os.getpid', ([], {}), '()\n', (13197, 13199), False, 'import os\n'), ((5307, 5323), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (5317, 5323), False, 'import json\n'), ((5705, 5721), 'sys.getsizeof', 'sys.getsizeof', (['r'], {}), '(r)\n', (5718, 5721), False, 'import sys\n'), ((6131, 6145), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6143, 6145), False, 'from datetime import datetime, timezone\n'), ((6490, 6511), 'time.sleep', 'time.sleep', (['_interval'], {}), '(_interval)\n', (6500, 6511), False, 'import time\n'), ((6924, 6938), 'cbor2.dumps', 'dumps', (['payload'], {}), '(payload)\n', (6929, 6938), False, 'from cbor2 import dumps\n'), ((8531, 8588), 'datetime.datetime.strftime', 'datetime.strftime', (['_start_time[0]', '"""%Y-%m-%d %H:%M:%S.%f"""'], {}), "(_start_time[0], '%Y-%m-%d %H:%M:%S.%f')\n", (8548, 8588), False, 'from datetime import datetime, timezone\n'), ((8637, 8693), 'datetime.datetime.strftime', 'datetime.strftime', (['_end_time[-1]', '"""%Y-%m-%d %H:%M:%S.%f"""'], {}), "(_end_time[-1], '%Y-%m-%d %H:%M:%S.%f')\n", (8654, 8693), False, 'from datetime import datetime, timezone\n'), ((2309, 2335), 'datetime.datetime.now', 'datetime.now', (['timezone.utc'], {}), '(timezone.utc)\n', (2321, 2335), False, 'from datetime import datetime, timezone\n'), ((5784, 5798), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5796, 5798), False, 'from datetime import datetime, timezone\n'), ((4235, 4267), 'random.uniform', 'random.uniform', (['min_val', 'max_val'], {}), '(min_val, max_val)\n', (4249, 4267), False, 'import random\n'), ((4346, 4372), 'random.choice', 'random.choice', (["fmt['list']"], {}), "(fmt['list'])\n", (4359, 4372), False, 'import random\n'), ((6024, 6038), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6036, 6038), False, 'from datetime import datetime, timezone\n'), ((7730, 7749), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (7740, 7749), False, 'import json\n')] |
paulineollitrault/qiskit-ignis | qiskit/ignis/mitigation/measurement/filters.py | 99f24ea6533cd284be4c44a48d43e54f62f05674 | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2019.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
# pylint: disable=cell-var-from-loop,invalid-name
"""
Measurement correction filters.
"""
from typing import List, Union
from copy import deepcopy
from scipy.optimize import minimize
import scipy.linalg as la
import numpy as np
import qiskit
from qiskit import QiskitError
from qiskit.tools import parallel_map
from qiskit.ignis.verification.tomography import count_keys
class MeasurementFilter():
"""
Measurement error mitigation filter.
Produced from a measurement calibration fitter and can be applied
to data.
"""
def __init__(self,
cal_matrix: np.matrix,
state_labels: list):
"""
Initialize a measurement error mitigation filter using the cal_matrix
from a measurement calibration fitter.
Args:
cal_matrix: the calibration matrix for applying the correction
state_labels: the states for the ordering of the cal matrix
"""
self._cal_matrix = cal_matrix
self._state_labels = state_labels
@property
def cal_matrix(self):
"""Return cal_matrix."""
return self._cal_matrix
@property
def state_labels(self):
"""return the state label ordering of the cal matrix"""
return self._state_labels
@state_labels.setter
def state_labels(self, new_state_labels):
"""set the state label ordering of the cal matrix"""
self._state_labels = new_state_labels
@cal_matrix.setter
def cal_matrix(self, new_cal_matrix):
"""Set cal_matrix."""
self._cal_matrix = new_cal_matrix
def apply(self,
raw_data,
method='least_squares'):
"""Apply the calibration matrix to results.
Args:
raw_data (dict or list): The data to be corrected. Can be in a number of forms:
Form 1: a counts dictionary from results.get_counts
Form 2: a list of counts of `length==len(state_labels)`
Form 3: a list of counts of `length==M*len(state_labels)` where M is an
integer (e.g. for use with the tomography data)
Form 4: a qiskit Result
method (str): fitting method. If `None`, then least_squares is used.
``pseudo_inverse``: direct inversion of the A matrix
``least_squares``: constrained to have physical probabilities
Returns:
dict or list: The corrected data in the same form as `raw_data`
Raises:
QiskitError: if `raw_data` is not an integer multiple
of the number of calibrated states.
"""
# check forms of raw_data
if isinstance(raw_data, dict):
# counts dictionary
for data_label in raw_data.keys():
if data_label not in self._state_labels:
raise QiskitError("Unexpected state label '" + data_label +
"', verify the fitter's state labels "
"correspond to the input data")
data_format = 0
# convert to form2
raw_data2 = [np.zeros(len(self._state_labels), dtype=float)]
for stateidx, state in enumerate(self._state_labels):
raw_data2[0][stateidx] = raw_data.get(state, 0)
elif isinstance(raw_data, list):
size_ratio = len(raw_data)/len(self._state_labels)
if len(raw_data) == len(self._state_labels):
data_format = 1
raw_data2 = [raw_data]
elif int(size_ratio) == size_ratio:
data_format = 2
size_ratio = int(size_ratio)
# make the list into chunks the size of state_labels for easier
# processing
raw_data2 = np.zeros([size_ratio, len(self._state_labels)])
for i in range(size_ratio):
raw_data2[i][:] = raw_data[
i * len(self._state_labels):(i + 1)*len(
self._state_labels)]
else:
raise QiskitError("Data list is not an integer multiple "
"of the number of calibrated states")
elif isinstance(raw_data, qiskit.result.result.Result):
# extract out all the counts, re-call the function with the
# counts and push back into the new result
new_result = deepcopy(raw_data)
new_counts_list = parallel_map(
self._apply_correction,
[resultidx for resultidx, _ in enumerate(raw_data.results)],
task_args=(raw_data, method))
for resultidx, new_counts in new_counts_list:
new_result.results[resultidx].data.counts = new_counts
return new_result
else:
raise QiskitError("Unrecognized type for raw_data.")
if method == 'pseudo_inverse':
pinv_cal_mat = la.pinv(self._cal_matrix)
# Apply the correction
for data_idx, _ in enumerate(raw_data2):
if method == 'pseudo_inverse':
raw_data2[data_idx] = np.dot(
pinv_cal_mat, raw_data2[data_idx])
elif method == 'least_squares':
nshots = sum(raw_data2[data_idx])
def fun(x):
return sum(
(raw_data2[data_idx] - np.dot(self._cal_matrix, x))**2)
x0 = np.random.rand(len(self._state_labels))
x0 = x0 / sum(x0)
cons = ({'type': 'eq', 'fun': lambda x: nshots - sum(x)})
bnds = tuple((0, nshots) for x in x0)
res = minimize(fun, x0, method='SLSQP',
constraints=cons, bounds=bnds, tol=1e-6)
raw_data2[data_idx] = res.x
else:
raise QiskitError("Unrecognized method.")
if data_format == 2:
# flatten back out the list
raw_data2 = raw_data2.flatten()
elif data_format == 0:
# convert back into a counts dictionary
new_count_dict = {}
for stateidx, state in enumerate(self._state_labels):
if raw_data2[0][stateidx] != 0:
new_count_dict[state] = raw_data2[0][stateidx]
raw_data2 = new_count_dict
else:
# TODO: should probably change to:
# raw_data2 = raw_data2[0].tolist()
raw_data2 = raw_data2[0]
return raw_data2
def _apply_correction(self, resultidx, raw_data, method):
"""Wrapper to call apply with a counts dictionary."""
new_counts = self.apply(
raw_data.get_counts(resultidx), method=method)
return resultidx, new_counts
class TensoredFilter():
"""
Tensored measurement error mitigation filter.
Produced from a tensored measurement calibration fitter and can be applied
to data.
"""
def __init__(self,
cal_matrices: np.matrix,
substate_labels_list: list,
mit_pattern: list):
"""
Initialize a tensored measurement error mitigation filter using
the cal_matrices from a tensored measurement calibration fitter.
A simple usage this class is explained [here]
(https://qiskit.org/documentation/tutorials/noise/3_measurement_error_mitigation.html).
Args:
cal_matrices: the calibration matrices for applying the correction.
substate_labels_list: for each calibration matrix
a list of the states (as strings, states in the subspace)
mit_pattern: for each calibration matrix
a list of the logical qubit indices (as int, states in the subspace)
"""
self._cal_matrices = cal_matrices
self._qubit_list_sizes = []
self._indices_list = []
self._substate_labels_list = []
self.substate_labels_list = substate_labels_list
self._mit_pattern = mit_pattern
@property
def cal_matrices(self):
"""Return cal_matrices."""
return self._cal_matrices
@cal_matrices.setter
def cal_matrices(self, new_cal_matrices):
"""Set cal_matrices."""
self._cal_matrices = deepcopy(new_cal_matrices)
@property
def substate_labels_list(self):
"""Return _substate_labels_list"""
return self._substate_labels_list
@substate_labels_list.setter
def substate_labels_list(self, new_substate_labels_list):
"""Return _substate_labels_list"""
self._substate_labels_list = new_substate_labels_list
# get the number of qubits in each subspace
self._qubit_list_sizes = []
for _, substate_label_list in enumerate(self._substate_labels_list):
self._qubit_list_sizes.append(
int(np.log2(len(substate_label_list))))
# get the indices in the calibration matrix
self._indices_list = []
for _, sub_labels in enumerate(self._substate_labels_list):
self._indices_list.append(
{lab: ind for ind, lab in enumerate(sub_labels)})
@property
def qubit_list_sizes(self):
"""Return _qubit_list_sizes."""
return self._qubit_list_sizes
@property
def nqubits(self):
"""Return the number of qubits. See also MeasurementFilter.apply() """
return sum(self._qubit_list_sizes)
def apply(self,
raw_data: Union[qiskit.result.result.Result, dict],
method: str = 'least_squares',
meas_layout: List[int] = None):
"""
Apply the calibration matrices to results.
Args:
raw_data (dict or Result): The data to be corrected. Can be in one of two forms:
* A counts dictionary from results.get_counts
* A Qiskit Result
method (str): fitting method. The following methods are supported:
* 'pseudo_inverse': direct inversion of the cal matrices.
Mitigated counts can contain negative values
and the sum of counts would not equal to the shots.
Mitigation is conducted qubit wise:
For each qubit, mitigate the whole counts using the calibration matrices
which affect the corresponding qubit.
For example, assume we are mitigating the 3rd bit of the 4-bit counts
using '2\times 2' calibration matrix `A_3`.
When mitigating the count of '0110' in this step,
the following formula is applied:
`count['0110'] = A_3^{-1}[1, 0]*count['0100'] + A_3^{-1}[1, 1]*count['0110']`.
The total time complexity of this method is `O(m2^{n + t})`,
where `n` is the size of calibrated qubits,
`m` is the number of sets in `mit_pattern`,
and `t` is the size of largest set of mit_pattern.
If the `mit_pattern` is shaped like `[[0], [1], [2], ..., [n-1]]`,
which corresponds to the tensor product noise model without cross-talk,
then the time complexity would be `O(n2^n)`.
If the `mit_pattern` is shaped like `[[0, 1, 2, ..., n-1]]`,
which exactly corresponds to the complete error mitigation,
then the time complexity would be `O(2^(n+n)) = O(4^n)`.
* 'least_squares': constrained to have physical probabilities.
Instead of directly applying inverse calibration matrices,
this method solve a constrained optimization problem to find
the closest probability vector to the result from 'pseudo_inverse' method.
Sequential least square quadratic programming (SLSQP) is used
in the internal process.
Every updating step in SLSQP takes `O(m2^{n+t})` time.
Since this method is using the SLSQP optimization over
the vector with lenght `2^n`, the mitigation for 8 bit counts
with the `mit_pattern = [[0], [1], [2], ..., [n-1]]` would
take 10 seconds or more.
* If `None`, 'least_squares' is used.
meas_layout (list of int): the mapping from classical registers to qubits
* If you measure qubit `2` to clbit `0`, `0` to `1`, and `1` to `2`,
the list becomes `[2, 0, 1]`
* If `None`, flatten(mit_pattern) is used.
Returns:
dict or Result: The corrected data in the same form as raw_data
Raises:
QiskitError: if raw_data is not in a one of the defined forms.
"""
all_states = count_keys(self.nqubits)
num_of_states = 2**self.nqubits
if meas_layout is None:
meas_layout = []
for qubits in self._mit_pattern:
meas_layout += qubits
# check forms of raw_data
if isinstance(raw_data, dict):
# counts dictionary
# convert to list
raw_data2 = [np.zeros(num_of_states, dtype=float)]
for state, count in raw_data.items():
stateidx = int(state, 2)
raw_data2[0][stateidx] = count
elif isinstance(raw_data, qiskit.result.result.Result):
# extract out all the counts, re-call the function with the
# counts and push back into the new result
new_result = deepcopy(raw_data)
new_counts_list = parallel_map(
self._apply_correction,
[resultidx for resultidx, _ in enumerate(raw_data.results)],
task_args=(raw_data, method, meas_layout))
for resultidx, new_counts in new_counts_list:
new_result.results[resultidx].data.counts = new_counts
return new_result
else:
raise QiskitError("Unrecognized type for raw_data.")
if method == 'pseudo_inverse':
pinv_cal_matrices = []
for cal_mat in self._cal_matrices:
pinv_cal_matrices.append(la.pinv(cal_mat))
meas_layout = meas_layout[::-1] # reverse endian
qubits_to_clbits = [-1 for _ in range(max(meas_layout) + 1)]
for i, qubit in enumerate(meas_layout):
qubits_to_clbits[qubit] = i
# Apply the correction
for data_idx, _ in enumerate(raw_data2):
if method == 'pseudo_inverse':
for pinv_cal_mat, pos_qubits, indices in zip(pinv_cal_matrices,
self._mit_pattern,
self._indices_list):
inv_mat_dot_x = np.zeros([num_of_states], dtype=float)
pos_clbits = [qubits_to_clbits[qubit] for qubit in pos_qubits]
for state_idx, state in enumerate(all_states):
first_index = self.compute_index_of_cal_mat(state, pos_clbits, indices)
for i in range(len(pinv_cal_mat)): # i is index of pinv_cal_mat
source_state = self.flip_state(state, i, pos_clbits)
second_index = self.compute_index_of_cal_mat(source_state,
pos_clbits,
indices)
inv_mat_dot_x[state_idx] += pinv_cal_mat[first_index, second_index]\
* raw_data2[data_idx][int(source_state, 2)]
raw_data2[data_idx] = inv_mat_dot_x
elif method == 'least_squares':
def fun(x):
mat_dot_x = deepcopy(x)
for cal_mat, pos_qubits, indices in zip(self._cal_matrices,
self._mit_pattern,
self._indices_list):
res_mat_dot_x = np.zeros([num_of_states], dtype=float)
pos_clbits = [qubits_to_clbits[qubit] for qubit in pos_qubits]
for state_idx, state in enumerate(all_states):
second_index = self.compute_index_of_cal_mat(state, pos_clbits, indices)
for i in range(len(cal_mat)):
target_state = self.flip_state(state, i, pos_clbits)
first_index =\
self.compute_index_of_cal_mat(target_state, pos_clbits, indices)
res_mat_dot_x[int(target_state, 2)]\
+= cal_mat[first_index, second_index] * mat_dot_x[state_idx]
mat_dot_x = res_mat_dot_x
return sum((raw_data2[data_idx] - mat_dot_x) ** 2)
x0 = np.random.rand(num_of_states)
x0 = x0 / sum(x0)
nshots = sum(raw_data2[data_idx])
cons = ({'type': 'eq', 'fun': lambda x: nshots - sum(x)})
bnds = tuple((0, nshots) for x in x0)
res = minimize(fun, x0, method='SLSQP',
constraints=cons, bounds=bnds, tol=1e-6)
raw_data2[data_idx] = res.x
else:
raise QiskitError("Unrecognized method.")
# convert back into a counts dictionary
new_count_dict = {}
for state_idx, state in enumerate(all_states):
if raw_data2[0][state_idx] != 0:
new_count_dict[state] = raw_data2[0][state_idx]
return new_count_dict
def flip_state(self, state: str, mat_index: int, flip_poses: List[int]) -> str:
"""Flip the state according to the chosen qubit positions"""
flip_poses = [pos for i, pos in enumerate(flip_poses) if (mat_index >> i) & 1]
flip_poses = sorted(flip_poses)
new_state = ""
pos = 0
for flip_pos in flip_poses:
new_state += state[pos:flip_pos]
new_state += str(int(state[flip_pos], 2) ^ 1) # flip the state
pos = flip_pos + 1
new_state += state[pos:]
return new_state
def compute_index_of_cal_mat(self, state: str, pos_qubits: List[int], indices: dict) -> int:
"""Return the index of (pseudo inverse) calibration matrix for the input quantum state"""
sub_state = ""
for pos in pos_qubits:
sub_state += state[pos]
return indices[sub_state]
def _apply_correction(self,
resultidx: int,
raw_data: qiskit.result.result.Result,
method: str,
meas_layout: List[int]):
"""Wrapper to call apply with a counts dictionary."""
new_counts = self.apply(
raw_data.get_counts(resultidx), method=method, meas_layout=meas_layout)
return resultidx, new_counts
| [((8879, 8905), 'copy.deepcopy', 'deepcopy', (['new_cal_matrices'], {}), '(new_cal_matrices)\n', (8887, 8905), False, 'from copy import deepcopy\n'), ((13514, 13538), 'qiskit.ignis.verification.tomography.count_keys', 'count_keys', (['self.nqubits'], {}), '(self.nqubits)\n', (13524, 13538), False, 'from qiskit.ignis.verification.tomography import count_keys\n'), ((5529, 5554), 'scipy.linalg.pinv', 'la.pinv', (['self._cal_matrix'], {}), '(self._cal_matrix)\n', (5536, 5554), True, 'import scipy.linalg as la\n'), ((5718, 5759), 'numpy.dot', 'np.dot', (['pinv_cal_mat', 'raw_data2[data_idx]'], {}), '(pinv_cal_mat, raw_data2[data_idx])\n', (5724, 5759), True, 'import numpy as np\n'), ((13885, 13921), 'numpy.zeros', 'np.zeros', (['num_of_states'], {'dtype': 'float'}), '(num_of_states, dtype=float)\n', (13893, 13921), True, 'import numpy as np\n'), ((14279, 14297), 'copy.deepcopy', 'deepcopy', (['raw_data'], {}), '(raw_data)\n', (14287, 14297), False, 'from copy import deepcopy\n'), ((14713, 14759), 'qiskit.QiskitError', 'QiskitError', (['"""Unrecognized type for raw_data."""'], {}), "('Unrecognized type for raw_data.')\n", (14724, 14759), False, 'from qiskit import QiskitError\n'), ((3400, 3525), 'qiskit.QiskitError', 'QiskitError', (['("Unexpected state label \'" + data_label +\n "\', verify the fitter\'s state labels correspond to the input data")'], {}), '("Unexpected state label \'" + data_label +\n "\', verify the fitter\'s state labels correspond to the input data")\n', (3411, 3525), False, 'from qiskit import QiskitError\n'), ((4994, 5012), 'copy.deepcopy', 'deepcopy', (['raw_data'], {}), '(raw_data)\n', (5002, 5012), False, 'from copy import deepcopy\n'), ((5415, 5461), 'qiskit.QiskitError', 'QiskitError', (['"""Unrecognized type for raw_data."""'], {}), "('Unrecognized type for raw_data.')\n", (5426, 5461), False, 'from qiskit import QiskitError\n'), ((6262, 6337), 'scipy.optimize.minimize', 'minimize', (['fun', 'x0'], {'method': '"""SLSQP"""', 'constraints': 'cons', 'bounds': 'bnds', 'tol': '(1e-06)'}), "(fun, x0, method='SLSQP', constraints=cons, bounds=bnds, tol=1e-06)\n", (6270, 6337), False, 'from scipy.optimize import minimize\n'), ((6453, 6488), 'qiskit.QiskitError', 'QiskitError', (['"""Unrecognized method."""'], {}), "('Unrecognized method.')\n", (6464, 6488), False, 'from qiskit import QiskitError\n'), ((14923, 14939), 'scipy.linalg.pinv', 'la.pinv', (['cal_mat'], {}), '(cal_mat)\n', (14930, 14939), True, 'import scipy.linalg as la\n'), ((15560, 15598), 'numpy.zeros', 'np.zeros', (['[num_of_states]'], {'dtype': 'float'}), '([num_of_states], dtype=float)\n', (15568, 15598), True, 'import numpy as np\n'), ((17793, 17822), 'numpy.random.rand', 'np.random.rand', (['num_of_states'], {}), '(num_of_states)\n', (17807, 17822), True, 'import numpy as np\n'), ((18057, 18132), 'scipy.optimize.minimize', 'minimize', (['fun', 'x0'], {'method': '"""SLSQP"""', 'constraints': 'cons', 'bounds': 'bnds', 'tol': '(1e-06)'}), "(fun, x0, method='SLSQP', constraints=cons, bounds=bnds, tol=1e-06)\n", (18065, 18132), False, 'from scipy.optimize import minimize\n'), ((18248, 18283), 'qiskit.QiskitError', 'QiskitError', (['"""Unrecognized method."""'], {}), "('Unrecognized method.')\n", (18259, 18283), False, 'from qiskit import QiskitError\n'), ((4652, 4743), 'qiskit.QiskitError', 'QiskitError', (['"""Data list is not an integer multiple of the number of calibrated states"""'], {}), "(\n 'Data list is not an integer multiple of the number of calibrated states')\n", (4663, 4743), False, 'from qiskit import QiskitError\n'), ((16603, 16614), 'copy.deepcopy', 'deepcopy', (['x'], {}), '(x)\n', (16611, 16614), False, 'from copy import deepcopy\n'), ((16895, 16933), 'numpy.zeros', 'np.zeros', (['[num_of_states]'], {'dtype': 'float'}), '([num_of_states], dtype=float)\n', (16903, 16933), True, 'import numpy as np\n'), ((5984, 6011), 'numpy.dot', 'np.dot', (['self._cal_matrix', 'x'], {}), '(self._cal_matrix, x)\n', (5990, 6011), True, 'import numpy as np\n')] |
fcharlier/AdventOfCode | 2017/adv2017-1.py | 6b2765da9e4d6f6b1f201897bb56043482a65bb2 | #!/usr/bin/python
def meh(captcha):
"""Returns the sum of the digits which match the next one in the captcha
input string.
>>> meh('1122')
3
>>> meh('1111')
4
>>> meh('1234')
0
>>> meh('91212129')
9
"""
result = 0
for n in range(len(captcha)):
if captcha[n] == captcha[(n + 1) % len(captcha)]:
result += int(captcha[n])
return result
def meh2(captcha):
"""Returns the sum of the digits which match the next one in the captcha
input string.
>>> meh2('1212')
6
>>> meh2('1221')
0
>>> meh2('123425')
4
>>> meh2('123123')
12
>>> meh2('12131415')
4
"""
result = 0
for n in range(len(captcha)):
if captcha[n] == captcha[(n + len(captcha) / 2) % len(captcha)]:
result += int(captcha[n])
return result
if __name__ == '__main__':
input = '57276274387944537823652626177853384411146325384494935924454336611953119173638191671326254832624841593421667683474349154668177743437745965461678636631863541462893547616877914914662358836365421198516263335926544716331814125295712581158399321372683742773423626286669759415959391374744214595682795818615532673877868424196926497731144319736445141728123322962547288572434564178492753681842244888368542423832228211172842456231275738182764232265933625119312598161192193214898949267765417468348935134618964683127194391796165368145548814473129857697989322621368744725685183346825333247866734735894493395218781464346951777873929898961358796274889826894529599645442657423438562423853247543621565468819799931598754753467593832328147439341586125262733737128386961596394728159719292787597426898945198788211417854662948358422729471312456437778978749753927251431677533575752312447488337156956217451965643454445329758327129966657189332824969141448538681979632611199385896965946849725421978137753366252459914913637858783146735469758716752765718189175583956476935185985918536318424248425426398158278111751711911227818826766177996223718837428972784328925743869885232266127727865267881592395643836999244218345184474613129823933659422223685422732186536199153988717455568523781673393698356967355875123554797755491181791593156433735591529495984256519631187849654633243225118132152549712643273819314433877592644693826861523243946998615722951182474773173215527598949553185313259992227879964482121769617218685394776778423378182462422788277997523913176326468957342296368178321958626168785578977414537368686438348124283789748775163821457641135163495649331144436157836647912852483177542224864952271874645274572426458614384917923623627532487625396914111582754953944965462576624728896917137599778828769958626788685374749661741223741834844643725486925886933118382649581481351844943368484853956759877215252766294896496444835264357169642341291412768946589781812493421379575569593678354241223363739129813633236996588711791919421574583924743119867622229659211793468744163297478952475933163259769578345894367855534294493613767564497137369969315192443795512585'
print meh(input)
print meh2(input)
| [] |
Roy027/pymatgen | pymatgen/analysis/graphs.py | a4aa91d011033c1151b82335abd080e2b1a310d5 | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
Module for graph representations of crystals.
"""
import copy
import logging
import os.path
import subprocess
import warnings
from collections import defaultdict, namedtuple
from itertools import combinations
from operator import itemgetter
import networkx as nx
import networkx.algorithms.isomorphism as iso
import numpy as np
from monty.json import MSONable
from monty.os.path import which
from networkx.drawing.nx_agraph import write_dot
from networkx.readwrite import json_graph
from scipy.spatial import KDTree
from scipy.stats import describe
from pymatgen.core import Lattice, Molecule, PeriodicSite, Structure
from pymatgen.core.structure import FunctionalGroups
from pymatgen.util.coord import lattice_points_in_supercell
from pymatgen.vis.structure_vtk import EL_COLORS
try:
import igraph
IGRAPH_AVAILABLE = True
except ImportError:
IGRAPH_AVAILABLE = False
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
__author__ = "Matthew Horton, Evan Spotte-Smith, Samuel Blau"
__version__ = "0.1"
__maintainer__ = "Matthew Horton"
__email__ = "[email protected]"
__status__ = "Production"
__date__ = "August 2017"
ConnectedSite = namedtuple("ConnectedSite", "site, jimage, index, weight, dist")
def _compare(g1, g2, i1, i2):
"""
Helper function called by isomorphic to ensure comparison of node identities.
"""
return g1.vs[i1]["species"] == g2.vs[i2]["species"]
def _igraph_from_nxgraph(graph):
"""
Helper function that converts a networkx graph object into an igraph graph object.
"""
nodes = graph.nodes(data=True)
new_igraph = igraph.Graph()
for node in nodes:
new_igraph.add_vertex(name=str(node[0]), species=node[1]["specie"], coords=node[1]["coords"])
new_igraph.add_edges([(str(edge[0]), str(edge[1])) for edge in graph.edges()])
return new_igraph
def _isomorphic(frag1, frag2):
"""
Internal function to check if two graph objects are isomorphic, using igraph if
if is available and networkx if it is not.
"""
f1_nodes = frag1.nodes(data=True)
f2_nodes = frag2.nodes(data=True)
if len(f1_nodes) != len(f2_nodes):
return False
f2_edges = frag2.edges()
if len(f2_edges) != len(f2_edges):
return False
f1_comp_dict = {}
f2_comp_dict = {}
for node in f1_nodes:
if node[1]["specie"] not in f1_comp_dict:
f1_comp_dict[node[1]["specie"]] = 1
else:
f1_comp_dict[node[1]["specie"]] += 1
for node in f2_nodes:
if node[1]["specie"] not in f2_comp_dict:
f2_comp_dict[node[1]["specie"]] = 1
else:
f2_comp_dict[node[1]["specie"]] += 1
if f1_comp_dict != f2_comp_dict:
return False
if IGRAPH_AVAILABLE:
ifrag1 = _igraph_from_nxgraph(frag1)
ifrag2 = _igraph_from_nxgraph(frag2)
return ifrag1.isomorphic_vf2(ifrag2, node_compat_fn=_compare)
nm = iso.categorical_node_match("specie", "ERROR")
return nx.is_isomorphic(frag1.to_undirected(), frag2.to_undirected(), node_match=nm)
class StructureGraph(MSONable):
"""
This is a class for annotating a Structure with
bond information, stored in the form of a graph. A "bond" does
not necessarily have to be a chemical bond, but can store any
kind of information that connects two Sites.
"""
def __init__(self, structure, graph_data=None):
"""
If constructing this class manually, use the `with_empty_graph`
method or `with_local_env_strategy` method (using an algorithm
provided by the `local_env` module, such as O'Keeffe).
This class that contains connection information:
relationships between sites represented by a Graph structure,
and an associated structure object.
This class uses the NetworkX package to store and operate
on the graph itself, but contains a lot of helper methods
to make associating a graph with a given crystallographic
structure easier.
Use cases for this include storing bonding information,
NMR J-couplings, Heisenberg exchange parameters, etc.
For periodic graphs, class stores information on the graph
edges of what lattice image the edge belongs to.
:param structure: a Structure object
:param graph_data: dict containing graph information in
dict format (not intended to be constructed manually,
see as_dict method for format)
"""
if isinstance(structure, StructureGraph):
# just make a copy from input
graph_data = structure.as_dict()["graphs"]
self.structure = structure
self.graph = nx.readwrite.json_graph.adjacency_graph(graph_data)
# tidy up edge attr dicts, reading to/from json duplicates
# information
for u, v, k, d in self.graph.edges(keys=True, data=True):
if "id" in d:
del d["id"]
if "key" in d:
del d["key"]
# ensure images are tuples (conversion to lists happens
# when serializing back from json), it's important images
# are hashable/immutable
if "to_jimage" in d:
d["to_jimage"] = tuple(d["to_jimage"])
if "from_jimage" in d:
d["from_jimage"] = tuple(d["from_jimage"])
@classmethod
def with_empty_graph(cls, structure, name="bonds", edge_weight_name=None, edge_weight_units=None):
"""
Constructor for StructureGraph, returns a StructureGraph
object with an empty graph (no edges, only nodes defined
that correspond to Sites in Structure).
:param structure (Structure):
:param name (str): name of graph, e.g. "bonds"
:param edge_weight_name (str): name of edge weights,
e.g. "bond_length" or "exchange_constant"
:param edge_weight_units (str): name of edge weight units
e.g. "Å" or "eV"
:return (StructureGraph):
"""
if edge_weight_name and (edge_weight_units is None):
raise ValueError(
"Please specify units associated "
"with your edge weights. Can be "
"empty string if arbitrary or "
"dimensionless."
)
# construct graph with one node per site
# graph attributes don't change behavior of graph,
# they're just for book-keeping
graph = nx.MultiDiGraph(
edge_weight_name=edge_weight_name,
edge_weight_units=edge_weight_units,
name=name,
)
graph.add_nodes_from(range(len(structure)))
graph_data = json_graph.adjacency_data(graph)
return cls(structure, graph_data=graph_data)
@staticmethod
def with_edges(structure, edges):
"""
Constructor for MoleculeGraph, using pre-existing or pre-defined edges
with optional edge parameters.
:param molecule: Molecule object
:param edges: dict representing the bonds of the functional
group (format: {(from_index, to_index, from_image, to_image): props},
where props is a dictionary of properties, including weight.
Props should be None if no additional properties are to be
specified.
:return: sg, a StructureGraph
"""
sg = StructureGraph.with_empty_graph(structure, name="bonds", edge_weight_name="weight", edge_weight_units="")
for edge, props in edges.items():
try:
from_index = edge[0]
to_index = edge[1]
from_image = edge[2]
to_image = edge[3]
except TypeError:
raise ValueError("Edges must be given as (from_index, to_index," " from_image, to_image) tuples")
if props is not None:
if "weight" in props.keys():
weight = props["weight"]
del props["weight"]
else:
weight = None
if len(props.items()) == 0:
props = None
else:
weight = None
nodes = sg.graph.nodes
if not (from_index in nodes and to_index in nodes):
raise ValueError(
"Edges cannot be added if nodes are not" " present in the graph. Please check your" " indices."
)
sg.add_edge(
from_index,
to_index,
from_jimage=from_image,
to_jimage=to_image,
weight=weight,
edge_properties=props,
)
sg.set_node_attributes()
return sg
@staticmethod
def with_local_env_strategy(structure, strategy, weights=False):
"""
Constructor for StructureGraph, using a strategy
from :Class: `pymatgen.analysis.local_env`.
:param structure: Structure object
:param strategy: an instance of a
:Class: `pymatgen.analysis.local_env.NearNeighbors` object
:param weights: if True, use weights from local_env class
(consult relevant class for their meaning)
:return:
"""
if not strategy.structures_allowed:
raise ValueError(
"Chosen strategy is not designed for use with structures! " "Please choose another strategy."
)
sg = StructureGraph.with_empty_graph(structure, name="bonds")
for n, neighbors in enumerate(strategy.get_all_nn_info(structure)):
for neighbor in neighbors:
# local_env will always try to add two edges
# for any one bond, one from site u to site v
# and another form site v to site u: this is
# harmless, so warn_duplicates=False
sg.add_edge(
from_index=n,
from_jimage=(0, 0, 0),
to_index=neighbor["site_index"],
to_jimage=neighbor["image"],
weight=neighbor["weight"] if weights else None,
warn_duplicates=False,
)
return sg
@property
def name(self):
"""
:return: Name of graph
"""
return self.graph.graph["name"]
@property
def edge_weight_name(self):
"""
:return: Name of the edge weight property of graph
"""
return self.graph.graph["edge_weight_name"]
@property
def edge_weight_unit(self):
"""
:return: Units of the edge weight property of graph
"""
return self.graph.graph["edge_weight_units"]
def add_edge(
self,
from_index,
to_index,
from_jimage=(0, 0, 0),
to_jimage=None,
weight=None,
warn_duplicates=True,
edge_properties=None,
):
"""
Add edge to graph.
Since physically a 'bond' (or other connection
between sites) doesn't have a direction, from_index,
from_jimage can be swapped with to_index, to_jimage.
However, images will always always be shifted so that
from_index < to_index and from_jimage becomes (0, 0, 0).
:param from_index: index of site connecting from
:param to_index: index of site connecting to
:param from_jimage (tuple of ints): lattice vector of periodic
image, e.g. (1, 0, 0) for periodic image in +x direction
:param to_jimage (tuple of ints): lattice vector of image
:param weight (float): e.g. bond length
:param warn_duplicates (bool): if True, will warn if
trying to add duplicate edges (duplicate edges will not
be added in either case)
:param edge_properties (dict): any other information to
store on graph edges, similar to Structure's site_properties
:return:
"""
# this is not necessary for the class to work, but
# just makes it neater
if to_index < from_index:
to_index, from_index = from_index, to_index
to_jimage, from_jimage = from_jimage, to_jimage
# constrain all from_jimages to be (0, 0, 0),
# initial version of this class worked even if
# from_jimage != (0, 0, 0), but making this
# assumption simplifies logic later
if not np.array_equal(from_jimage, (0, 0, 0)):
shift = from_jimage
from_jimage = np.subtract(from_jimage, shift)
to_jimage = np.subtract(to_jimage, shift)
# automatic detection of to_jimage if user doesn't specify
# will try and detect all equivalent images and add multiple
# edges if appropriate
if to_jimage is None:
# assume we want the closest site
warnings.warn("Please specify to_jimage to be unambiguous, " "trying to automatically detect.")
dist, to_jimage = self.structure[from_index].distance_and_image(self.structure[to_index])
if dist == 0:
# this will happen when from_index == to_index,
# typically in primitive single-atom lattices
images = [1, 0, 0], [0, 1, 0], [0, 0, 1]
dists = []
for image in images:
dists.append(
self.structure[from_index].distance_and_image(self.structure[from_index], jimage=image)[0]
)
dist = min(dists)
equiv_sites = self.structure.get_neighbors_in_shell(
self.structure[from_index].coords, dist, dist * 0.01, include_index=True
)
for nnsite in equiv_sites:
to_jimage = np.subtract(nnsite.frac_coords, self.structure[from_index].frac_coords)
to_jimage = np.round(to_jimage).astype(int)
self.add_edge(
from_index=from_index,
from_jimage=(0, 0, 0),
to_jimage=to_jimage,
to_index=nnsite.index,
)
return
# sanitize types
from_jimage, to_jimage = (
tuple(map(int, from_jimage)),
tuple(map(int, to_jimage)),
)
from_index, to_index = int(from_index), int(to_index)
# check we're not trying to add a duplicate edge
# there should only ever be at most one edge
# between a given (site, jimage) pair and another
# (site, jimage) pair
existing_edge_data = self.graph.get_edge_data(from_index, to_index)
if existing_edge_data:
for key, d in existing_edge_data.items():
if d["to_jimage"] == to_jimage:
if warn_duplicates:
warnings.warn(
"Trying to add an edge that already exists from "
"site {} to site {} in {}.".format(from_index, to_index, to_jimage)
)
return
# generic container for additional edge properties,
# similar to site properties
edge_properties = edge_properties or {}
if weight:
self.graph.add_edge(from_index, to_index, to_jimage=to_jimage, weight=weight, **edge_properties)
else:
self.graph.add_edge(from_index, to_index, to_jimage=to_jimage, **edge_properties)
def insert_node(
self,
i,
species,
coords,
coords_are_cartesian=False,
validate_proximity=False,
site_properties=None,
edges=None,
):
"""
A wrapper around Molecule.insert(), which also incorporates the new
site into the MoleculeGraph.
:param i: Index at which to insert the new site
:param species: Species for the new site
:param coords: 3x1 array representing coordinates of the new site
:param coords_are_cartesian: Whether coordinates are cartesian.
Defaults to False.
:param validate_proximity: For Molecule.insert(); if True (default
False), distance will be checked to ensure that site can be safely
added.
:param site_properties: Site properties for Molecule
:param edges: List of dicts representing edges to be added to the
MoleculeGraph. These edges must include the index of the new site i,
and all indices used for these edges should reflect the
MoleculeGraph AFTER the insertion, NOT before. Each dict should at
least have a "to_index" and "from_index" key, and can also have a
"weight" and a "properties" key.
:return:
"""
self.structure.insert(
i,
species,
coords,
coords_are_cartesian=coords_are_cartesian,
validate_proximity=validate_proximity,
properties=site_properties,
)
mapping = {}
for j in range(len(self.structure) - 1):
if j < i:
mapping[j] = j
else:
mapping[j] = j + 1
nx.relabel_nodes(self.graph, mapping, copy=False)
self.graph.add_node(i)
self.set_node_attributes()
if edges is not None:
for edge in edges:
try:
self.add_edge(
edge["from_index"],
edge["to_index"],
from_jimage=(0, 0, 0),
to_jimage=edge["to_jimage"],
weight=edge.get("weight", None),
edge_properties=edge.get("properties", None),
)
except KeyError:
raise RuntimeError("Some edges are invalid.")
def set_node_attributes(self):
"""
Gives each node a "specie" and a "coords" attribute, updated with the
current species and coordinates.
:return:
"""
species = {}
coords = {}
properties = {}
for node in self.graph.nodes():
species[node] = self.structure[node].specie.symbol
coords[node] = self.structure[node].coords
properties[node] = self.structure[node].properties
nx.set_node_attributes(self.graph, species, "specie")
nx.set_node_attributes(self.graph, coords, "coords")
nx.set_node_attributes(self.graph, properties, "properties")
def alter_edge(
self,
from_index,
to_index,
to_jimage=None,
new_weight=None,
new_edge_properties=None,
):
"""
Alters either the weight or the edge_properties of
an edge in the StructureGraph.
:param from_index: int
:param to_index: int
:param to_jimage: tuple
:param new_weight: alter_edge does not require
that weight be altered. As such, by default, this
is None. If weight is to be changed, it should be a
float.
:param new_edge_properties: alter_edge does not require
that edge_properties be altered. As such, by default,
this is None. If any edge properties are to be changed,
it should be a dictionary of edge properties to be changed.
:return:
"""
existing_edges = self.graph.get_edge_data(from_index, to_index)
# ensure that edge exists before attempting to change it
if not existing_edges:
raise ValueError(
"Edge between {} and {} cannot be altered;\
no edge exists between those sites.".format(
from_index, to_index
)
)
if to_jimage is None:
edge_index = 0
else:
for i, properties in existing_edges.items():
if properties["to_jimage"] == to_jimage:
edge_index = i
if new_weight is not None:
self.graph[from_index][to_index][edge_index]["weight"] = new_weight
if new_edge_properties is not None:
for prop in list(new_edge_properties.keys()):
self.graph[from_index][to_index][edge_index][prop] = new_edge_properties[prop]
def break_edge(self, from_index, to_index, to_jimage=None, allow_reverse=False):
"""
Remove an edge from the StructureGraph. If no image is given, this method will fail.
:param from_index: int
:param to_index: int
:param to_jimage: tuple
:param allow_reverse: If allow_reverse is True, then break_edge will
attempt to break both (from_index, to_index) and, failing that,
will attempt to break (to_index, from_index).
:return:
"""
# ensure that edge exists before attempting to remove it
existing_edges = self.graph.get_edge_data(from_index, to_index)
existing_reverse = None
if to_jimage is None:
raise ValueError("Image must be supplied, to avoid ambiguity.")
if existing_edges:
for i, properties in existing_edges.items():
if properties["to_jimage"] == to_jimage:
edge_index = i
self.graph.remove_edge(from_index, to_index, edge_index)
else:
if allow_reverse:
existing_reverse = self.graph.get_edge_data(to_index, from_index)
if existing_reverse:
for i, properties in existing_reverse.items():
if properties["to_jimage"] == to_jimage:
edge_index = i
self.graph.remove_edge(to_index, from_index, edge_index)
else:
raise ValueError(
"Edge cannot be broken between {} and {};\
no edge exists between those sites.".format(
from_index, to_index
)
)
def remove_nodes(self, indices):
"""
A wrapper for Molecule.remove_sites().
:param indices: list of indices in the current Molecule (and graph) to
be removed.
:return:
"""
self.structure.remove_sites(indices)
self.graph.remove_nodes_from(indices)
mapping = {}
for correct, current in enumerate(sorted(self.graph.nodes)):
mapping[current] = correct
nx.relabel_nodes(self.graph, mapping, copy=False)
self.set_node_attributes()
def substitute_group(
self,
index,
func_grp,
strategy,
bond_order=1,
graph_dict=None,
strategy_params=None,
):
"""
Builds off of Structure.substitute to replace an atom in self.structure
with a functional group. This method also amends self.graph to
incorporate the new functional group.
NOTE: Care must be taken to ensure that the functional group that is
substituted will not place atoms to close to each other, or violate the
dimensions of the Lattice.
:param index: Index of atom to substitute.
:param func_grp: Substituent molecule. There are two options:
1. Providing an actual Molecule as the input. The first atom
must be a DummySpecies X, indicating the position of
nearest neighbor. The second atom must be the next
nearest atom. For example, for a methyl group
substitution, func_grp should be X-CH3, where X is the
first site and C is the second site. What the code will
do is to remove the index site, and connect the nearest
neighbor to the C atom in CH3. The X-C bond indicates the
directionality to connect the atoms.
2. A string name. The molecule will be obtained from the
relevant template in func_groups.json.
:param strategy: Class from pymatgen.analysis.local_env.
:param bond_order: A specified bond order to calculate the bond
length between the attached functional group and the nearest
neighbor site. Defaults to 1.
:param graph_dict: Dictionary representing the bonds of the functional
group (format: {(u, v): props}, where props is a dictionary of
properties, including weight. If None, then the algorithm
will attempt to automatically determine bonds using one of
a list of strategies defined in pymatgen.analysis.local_env.
:param strategy_params: dictionary of keyword arguments for strategy.
If None, default parameters will be used.
:return:
"""
def map_indices(grp):
grp_map = {}
# Get indices now occupied by functional group
# Subtracting 1 because the dummy atom X should not count
atoms = len(grp) - 1
offset = len(self.structure) - atoms
for i in range(atoms):
grp_map[i] = i + offset
return grp_map
if isinstance(func_grp, Molecule):
func_grp = copy.deepcopy(func_grp)
else:
try:
func_grp = copy.deepcopy(FunctionalGroups[func_grp])
except Exception:
raise RuntimeError("Can't find functional group in list. " "Provide explicit coordinate instead")
self.structure.substitute(index, func_grp, bond_order=bond_order)
mapping = map_indices(func_grp)
# Remove dummy atom "X"
func_grp.remove_species("X")
if graph_dict is not None:
for (u, v) in graph_dict.keys():
edge_props = graph_dict[(u, v)]
if "to_jimage" in edge_props.keys():
to_jimage = edge_props["to_jimage"]
del edge_props["to_jimage"]
else:
# By default, assume that all edges should stay remain
# inside the initial image
to_jimage = (0, 0, 0)
if "weight" in edge_props.keys():
weight = edge_props["weight"]
del edge_props["weight"]
self.add_edge(
mapping[u],
mapping[v],
to_jimage=to_jimage,
weight=weight,
edge_properties=edge_props,
)
else:
if strategy_params is None:
strategy_params = {}
strat = strategy(**strategy_params)
for site in mapping.values():
neighbors = strat.get_nn_info(self.structure, site)
for neighbor in neighbors:
self.add_edge(
from_index=site,
from_jimage=(0, 0, 0),
to_index=neighbor["site_index"],
to_jimage=neighbor["image"],
weight=neighbor["weight"],
warn_duplicates=False,
)
def get_connected_sites(self, n, jimage=(0, 0, 0)):
"""
Returns a named tuple of neighbors of site n:
periodic_site, jimage, index, weight.
Index is the index of the corresponding site
in the original structure, weight can be
None if not defined.
:param n: index of Site in Structure
:param jimage: lattice vector of site
:return: list of ConnectedSite tuples,
sorted by closest first
"""
connected_sites = set()
connected_site_images = set()
out_edges = [(u, v, d, "out") for u, v, d in self.graph.out_edges(n, data=True)]
in_edges = [(u, v, d, "in") for u, v, d in self.graph.in_edges(n, data=True)]
for u, v, d, dir in out_edges + in_edges:
to_jimage = d["to_jimage"]
if dir == "in":
u, v = v, u
to_jimage = np.multiply(-1, to_jimage)
to_jimage = tuple(map(int, np.add(to_jimage, jimage)))
site_d = self.structure[v].as_dict()
site_d["abc"] = np.add(site_d["abc"], to_jimage).tolist()
site = PeriodicSite.from_dict(site_d)
# from_site if jimage arg != (0, 0, 0)
relative_jimage = np.subtract(to_jimage, jimage)
dist = self.structure[u].distance(self.structure[v], jimage=relative_jimage)
weight = d.get("weight", None)
if (v, to_jimage) not in connected_site_images:
connected_site = ConnectedSite(site=site, jimage=to_jimage, index=v, weight=weight, dist=dist)
connected_sites.add(connected_site)
connected_site_images.add((v, to_jimage))
# return list sorted by closest sites first
connected_sites = list(connected_sites)
connected_sites.sort(key=lambda x: x.dist)
return connected_sites
def get_coordination_of_site(self, n):
"""
Returns the number of neighbors of site n.
In graph terms, simply returns degree
of node corresponding to site n.
:param n: index of site
:return (int):
"""
number_of_self_loops = sum([1 for n, v in self.graph.edges(n) if n == v])
return self.graph.degree(n) - number_of_self_loops
def draw_graph_to_file(
self,
filename="graph",
diff=None,
hide_unconnected_nodes=False,
hide_image_edges=True,
edge_colors=False,
node_labels=False,
weight_labels=False,
image_labels=False,
color_scheme="VESTA",
keep_dot=False,
algo="fdp",
):
"""
Draws graph using GraphViz.
The networkx graph object itself can also be drawn
with networkx's in-built graph drawing methods, but
note that this might give misleading results for
multigraphs (edges are super-imposed on each other).
If visualization is difficult to interpret,
`hide_image_edges` can help, especially in larger
graphs.
:param filename: filename to output, will detect filetype
from extension (any graphviz filetype supported, such as
pdf or png)
:param diff (StructureGraph): an additional graph to
compare with, will color edges red that do not exist in diff
and edges green that are in diff graph but not in the
reference graph
:param hide_unconnected_nodes: if True, hide unconnected
nodes
:param hide_image_edges: if True, do not draw edges that
go through periodic boundaries
:param edge_colors (bool): if True, use node colors to
color edges
:param node_labels (bool): if True, label nodes with
species and site index
:param weight_labels (bool): if True, label edges with
weights
:param image_labels (bool): if True, label edges with
their periodic images (usually only used for debugging,
edges to periodic images always appear as dashed lines)
:param color_scheme (str): "VESTA" or "JMOL"
:param keep_dot (bool): keep GraphViz .dot file for later
visualization
:param algo: any graphviz algo, "neato" (for simple graphs)
or "fdp" (for more crowded graphs) usually give good outputs
:return:
"""
if not which(algo):
raise RuntimeError("StructureGraph graph drawing requires " "GraphViz binaries to be in the path.")
# Developer note: NetworkX also has methods for drawing
# graphs using matplotlib, these also work here. However,
# a dedicated tool like GraphViz allows for much easier
# control over graph appearance and also correctly displays
# mutli-graphs (matplotlib can superimpose multiple edges).
g = self.graph.copy()
g.graph = {"nodesep": 10.0, "dpi": 300, "overlap": "false"}
# add display options for nodes
for n in g.nodes():
# get label by species name
label = "{}({})".format(str(self.structure[n].specie), n) if node_labels else ""
# use standard color scheme for nodes
c = EL_COLORS[color_scheme].get(str(self.structure[n].specie.symbol), [0, 0, 0])
# get contrasting font color
# magic numbers account for perceived luminescence
# https://stackoverflow.com/questions/1855884/determine-font-color-based-on-background-color
fontcolor = "#000000" if 1 - (c[0] * 0.299 + c[1] * 0.587 + c[2] * 0.114) / 255 < 0.5 else "#ffffff"
# convert color to hex string
color = "#{:02x}{:02x}{:02x}".format(c[0], c[1], c[2])
g.add_node(
n,
fillcolor=color,
fontcolor=fontcolor,
label=label,
fontname="Helvetica-bold",
style="filled",
shape="circle",
)
edges_to_delete = []
# add display options for edges
for u, v, k, d in g.edges(keys=True, data=True):
# retrieve from/to images, set as origin if not defined
to_image = d["to_jimage"]
# set edge style
d["style"] = "solid"
if to_image != (0, 0, 0):
d["style"] = "dashed"
if hide_image_edges:
edges_to_delete.append((u, v, k))
# don't show edge directions
d["arrowhead"] = "none"
# only add labels for images that are not the origin
if image_labels:
d["headlabel"] = "" if to_image == (0, 0, 0) else "to {}".format((to_image))
d["arrowhead"] = "normal" if d["headlabel"] else "none"
# optionally color edges using node colors
color_u = g.nodes[u]["fillcolor"]
color_v = g.nodes[v]["fillcolor"]
d["color_uv"] = "{};0.5:{};0.5".format(color_u, color_v) if edge_colors else "#000000"
# optionally add weights to graph
if weight_labels:
units = g.graph.get("edge_weight_units", "")
if d.get("weight"):
d["label"] = "{:.2f} {}".format(d["weight"], units)
# update edge with our new style attributes
g.edges[u, v, k].update(d)
# optionally remove periodic image edges,
# these can be confusing due to periodic boundaries
if hide_image_edges:
for edge_to_delete in edges_to_delete:
g.remove_edge(*edge_to_delete)
# optionally hide unconnected nodes,
# these can appear when removing periodic edges
if hide_unconnected_nodes:
g = g.subgraph([n for n in g.degree() if g.degree()[n] != 0])
# optionally highlight differences with another graph
if diff:
diff = self.diff(diff, strict=True)
green_edges = []
red_edges = []
for u, v, k, d in g.edges(keys=True, data=True):
if (u, v, d["to_jimage"]) in diff["self"]:
# edge has been deleted
red_edges.append((u, v, k))
elif (u, v, d["to_jimage"]) in diff["other"]:
# edge has been added
green_edges.append((u, v, k))
for u, v, k in green_edges:
g.edges[u, v, k].update({"color_uv": "#00ff00"})
for u, v, k in red_edges:
g.edges[u, v, k].update({"color_uv": "#ff0000"})
basename, extension = os.path.splitext(filename)
extension = extension[1:]
write_dot(g, basename + ".dot")
with open(filename, "w") as f:
args = [algo, "-T", extension, basename + ".dot"]
rs = subprocess.Popen(args, stdout=f, stdin=subprocess.PIPE, close_fds=True)
rs.communicate()
if rs.returncode != 0:
raise RuntimeError("{} exited with return code {}.".format(algo, rs.returncode))
if not keep_dot:
os.remove(basename + ".dot")
@property
def types_and_weights_of_connections(self):
"""
Extract a dictionary summarizing the types and weights
of edges in the graph.
:return: A dictionary with keys specifying the
species involved in a connection in alphabetical order
(e.g. string 'Fe-O') and values which are a list of
weights for those connections (e.g. bond lengths).
"""
def get_label(u, v):
u_label = self.structure[u].species_string
v_label = self.structure[v].species_string
return "-".join(sorted((u_label, v_label)))
types = defaultdict(list)
for u, v, d in self.graph.edges(data=True):
label = get_label(u, v)
types[label].append(d["weight"])
return dict(types)
@property
def weight_statistics(self):
"""
Extract a statistical summary of edge weights present in
the graph.
:return: A dict with an 'all_weights' list, 'minimum',
'maximum', 'median', 'mean', 'std_dev'
"""
all_weights = [d.get("weight", None) for u, v, d in self.graph.edges(data=True)]
stats = describe(all_weights, nan_policy="omit")
return {
"all_weights": all_weights,
"min": stats.minmax[0],
"max": stats.minmax[1],
"mean": stats.mean,
"variance": stats.variance,
}
def types_of_coordination_environments(self, anonymous=False):
"""
Extract information on the different co-ordination environments
present in the graph.
:param anonymous: if anonymous, will replace specie names
with A, B, C, etc.
:return: a list of co-ordination environments,
e.g. ['Mo-S(6)', 'S-Mo(3)']
"""
motifs = set()
for idx, site in enumerate(self.structure):
centre_sp = site.species_string
connected_sites = self.get_connected_sites(idx)
connected_species = [connected_site.site.species_string for connected_site in connected_sites]
labels = []
for sp in set(connected_species):
count = connected_species.count(sp)
labels.append((count, sp))
labels = sorted(labels, reverse=True)
if anonymous:
mapping = {centre_sp: "A"}
available_letters = [chr(66 + i) for i in range(25)]
for label in labels:
sp = label[1]
if sp not in mapping:
mapping[sp] = available_letters.pop(0)
centre_sp = "A"
labels = [(label[0], mapping[label[1]]) for label in labels]
labels = ["{}({})".format(label[1], label[0]) for label in labels]
motif = "{}-{}".format(centre_sp, ",".join(labels))
motifs.add(motif)
return sorted(list(motifs))
def as_dict(self):
"""
As in :Class: `pymatgen.core.Structure` except
with using `to_dict_of_dicts` from NetworkX
to store graph information.
"""
d = {
"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"structure": self.structure.as_dict(),
"graphs": json_graph.adjacency_data(self.graph),
}
return d
@classmethod
def from_dict(cls, d):
"""
As in :Class: `pymatgen.core.Structure` except
restoring graphs using `from_dict_of_dicts`
from NetworkX to restore graph information.
"""
s = Structure.from_dict(d["structure"])
return cls(s, d["graphs"])
def __mul__(self, scaling_matrix):
"""
Replicates the graph, creating a supercell,
intelligently joining together
edges that lie on periodic boundaries.
In principle, any operations on the expanded
graph could also be done on the original
graph, but a larger graph can be easier to
visualize and reason about.
:param scaling_matrix: same as Structure.__mul__
:return:
"""
# Developer note: a different approach was also trialed, using
# a simple Graph (instead of MultiDiGraph), with node indices
# representing both site index and periodic image. Here, the
# number of nodes != number of sites in the Structure. This
# approach has many benefits, but made it more difficult to
# keep the graph in sync with its corresponding Structure.
# Broadly, it would be easier to multiply the Structure
# *before* generating the StructureGraph, but this isn't
# possible when generating the graph using critic2 from
# charge density.
# Multiplication works by looking for the expected position
# of an image node, and seeing if that node exists in the
# supercell. If it does, the edge is updated. This is more
# computationally expensive than just keeping track of the
# which new lattice images present, but should hopefully be
# easier to extend to a general 3x3 scaling matrix.
# code adapted from Structure.__mul__
scale_matrix = np.array(scaling_matrix, np.int16)
if scale_matrix.shape != (3, 3):
scale_matrix = np.array(scale_matrix * np.eye(3), np.int16)
else:
# TODO: test __mul__ with full 3x3 scaling matrices
raise NotImplementedError("Not tested with 3x3 scaling matrices yet.")
new_lattice = Lattice(np.dot(scale_matrix, self.structure.lattice.matrix))
f_lat = lattice_points_in_supercell(scale_matrix)
c_lat = new_lattice.get_cartesian_coords(f_lat)
new_sites = []
new_graphs = []
for v in c_lat:
# create a map of nodes from original graph to its image
mapping = {n: n + len(new_sites) for n in range(len(self.structure))}
for idx, site in enumerate(self.structure):
s = PeriodicSite(
site.species,
site.coords + v,
new_lattice,
properties=site.properties,
coords_are_cartesian=True,
to_unit_cell=False,
)
new_sites.append(s)
new_graphs.append(nx.relabel_nodes(self.graph, mapping, copy=True))
new_structure = Structure.from_sites(new_sites)
# merge all graphs into one big graph
new_g = nx.MultiDiGraph()
for new_graph in new_graphs:
new_g = nx.union(new_g, new_graph)
edges_to_remove = [] # tuple of (u, v, k)
edges_to_add = [] # tuple of (u, v, attr_dict)
# list of new edges inside supercell
# for duplicate checking
edges_inside_supercell = [{u, v} for u, v, d in new_g.edges(data=True) if d["to_jimage"] == (0, 0, 0)]
new_periodic_images = []
orig_lattice = self.structure.lattice
# use k-d tree to match given position to an
# existing Site in Structure
kd_tree = KDTree(new_structure.cart_coords)
# tolerance in Å for sites to be considered equal
# this could probably be a lot smaller
tol = 0.05
for u, v, k, d in new_g.edges(keys=True, data=True):
to_jimage = d["to_jimage"] # for node v
# reduce unnecessary checking
if to_jimage != (0, 0, 0):
# get index in original site
n_u = u % len(self.structure)
n_v = v % len(self.structure)
# get fractional co-ordinates of where atoms defined
# by edge are expected to be, relative to original
# lattice (keeping original lattice has
# significant benefits)
v_image_frac = np.add(self.structure[n_v].frac_coords, to_jimage)
u_frac = self.structure[n_u].frac_coords
# using the position of node u as a reference,
# get relative Cartesian co-ordinates of where
# atoms defined by edge are expected to be
v_image_cart = orig_lattice.get_cartesian_coords(v_image_frac)
u_cart = orig_lattice.get_cartesian_coords(u_frac)
v_rel = np.subtract(v_image_cart, u_cart)
# now retrieve position of node v in
# new supercell, and get asgolute Cartesian
# co-ordinates of where atoms defined by edge
# are expected to be
v_expec = new_structure[u].coords + v_rel
# now search in new structure for these atoms
# query returns (distance, index)
v_present = kd_tree.query(v_expec)
v_present = v_present[1] if v_present[0] <= tol else None
# check if image sites now present in supercell
# and if so, delete old edge that went through
# periodic boundary
if v_present is not None:
new_u = u
new_v = v_present
new_d = d.copy()
# node now inside supercell
new_d["to_jimage"] = (0, 0, 0)
edges_to_remove.append((u, v, k))
# make sure we don't try to add duplicate edges
# will remove two edges for everyone one we add
if {new_u, new_v} not in edges_inside_supercell:
# normalize direction
if new_v < new_u:
new_u, new_v = new_v, new_u
edges_inside_supercell.append({new_u, new_v})
edges_to_add.append((new_u, new_v, new_d))
else:
# want to find new_v such that we have
# full periodic boundary conditions
# so that nodes on one side of supercell
# are connected to nodes on opposite side
v_expec_frac = new_structure.lattice.get_fractional_coords(v_expec)
# find new to_jimage
# use np.around to fix issues with finite precision leading to incorrect image
v_expec_image = np.around(v_expec_frac, decimals=3)
v_expec_image = v_expec_image - v_expec_image % 1
v_expec_frac = np.subtract(v_expec_frac, v_expec_image)
v_expec = new_structure.lattice.get_cartesian_coords(v_expec_frac)
v_present = kd_tree.query(v_expec)
v_present = v_present[1] if v_present[0] <= tol else None
if v_present is not None:
new_u = u
new_v = v_present
new_d = d.copy()
new_to_jimage = tuple(map(int, v_expec_image))
# normalize direction
if new_v < new_u:
new_u, new_v = new_v, new_u
new_to_jimage = tuple(np.multiply(-1, d["to_jimage"]).astype(int))
new_d["to_jimage"] = new_to_jimage
edges_to_remove.append((u, v, k))
if (new_u, new_v, new_to_jimage) not in new_periodic_images:
edges_to_add.append((new_u, new_v, new_d))
new_periodic_images.append((new_u, new_v, new_to_jimage))
logger.debug("Removing {} edges, adding {} new edges.".format(len(edges_to_remove), len(edges_to_add)))
# add/delete marked edges
for edges_to_remove in edges_to_remove:
new_g.remove_edge(*edges_to_remove)
for (u, v, d) in edges_to_add:
new_g.add_edge(u, v, **d)
# return new instance of StructureGraph with supercell
d = {
"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"structure": new_structure.as_dict(),
"graphs": json_graph.adjacency_data(new_g),
}
sg = StructureGraph.from_dict(d)
return sg
def __rmul__(self, other):
return self.__mul__(other)
@classmethod
def _edges_to_string(cls, g):
header = "from to to_image "
header_line = "---- ---- ------------"
edge_weight_name = g.graph["edge_weight_name"]
if edge_weight_name:
print_weights = ["weight"]
edge_label = g.graph["edge_weight_name"]
edge_weight_units = g.graph["edge_weight_units"]
if edge_weight_units:
edge_label += " ({})".format(edge_weight_units)
header += " {}".format(edge_label)
header_line += " {}".format("-" * max([18, len(edge_label)]))
else:
print_weights = False
s = header + "\n" + header_line + "\n"
edges = list(g.edges(data=True))
# sort edges for consistent ordering
edges.sort(key=itemgetter(0, 1))
if print_weights:
for u, v, data in edges:
s += "{:4} {:4} {:12} {:.3e}\n".format(
u, v, str(data.get("to_jimage", (0, 0, 0))), data.get("weight", 0)
)
else:
for u, v, data in edges:
s += "{:4} {:4} {:12}\n".format(u, v, str(data.get("to_jimage", (0, 0, 0))))
return s
def __str__(self):
s = "Structure Graph"
s += "\nStructure: \n{}".format(self.structure.__str__())
s += "\nGraph: {}\n".format(self.name)
s += self._edges_to_string(self.graph)
return s
def __repr__(self):
s = "Structure Graph"
s += "\nStructure: \n{}".format(self.structure.__repr__())
s += "\nGraph: {}\n".format(self.name)
s += self._edges_to_string(self.graph)
return s
def __len__(self):
"""
:return: length of Structure / number of nodes in graph
"""
return len(self.structure)
def sort(self, key=None, reverse=False):
"""
Same as Structure.sort(), also remaps nodes in graph.
:param key:
:param reverse:
:return:
"""
old_structure = self.structure.copy()
# sort Structure
self.structure._sites = sorted(self.structure._sites, key=key, reverse=reverse)
# apply Structure ordering to graph
mapping = {idx: self.structure.index(site) for idx, site in enumerate(old_structure)}
self.graph = nx.relabel_nodes(self.graph, mapping, copy=True)
# normalize directions of edges
edges_to_remove = []
edges_to_add = []
for u, v, k, d in self.graph.edges(keys=True, data=True):
if v < u:
new_v, new_u, new_d = u, v, d.copy()
new_d["to_jimage"] = tuple(np.multiply(-1, d["to_jimage"]).astype(int))
edges_to_remove.append((u, v, k))
edges_to_add.append((new_u, new_v, new_d))
# add/delete marked edges
for edges_to_remove in edges_to_remove:
self.graph.remove_edge(*edges_to_remove)
for (u, v, d) in edges_to_add:
self.graph.add_edge(u, v, **d)
def __copy__(self):
return StructureGraph.from_dict(self.as_dict())
def __eq__(self, other):
"""
Two StructureGraphs are equal if they have equal Structures,
and have the same edges between Sites. Edge weights can be
different and StructureGraphs can still be considered equal.
:param other: StructureGraph
:return (bool):
"""
# sort for consistent node indices
# PeriodicSite should have a proper __hash__() value,
# using its frac_coords as a convenient key
mapping = {tuple(site.frac_coords): self.structure.index(site) for site in other.structure}
other_sorted = other.__copy__()
other_sorted.sort(key=lambda site: mapping[tuple(site.frac_coords)])
edges = {(u, v, d["to_jimage"]) for u, v, d in self.graph.edges(keys=False, data=True)}
edges_other = {(u, v, d["to_jimage"]) for u, v, d in other_sorted.graph.edges(keys=False, data=True)}
return (edges == edges_other) and (self.structure == other_sorted.structure)
def diff(self, other, strict=True):
"""
Compares two StructureGraphs. Returns dict with
keys 'self', 'other', 'both' with edges that are
present in only one StructureGraph ('self' and
'other'), and edges that are present in both.
The Jaccard distance is a simple measure of the
dissimilarity between two StructureGraphs (ignoring
edge weights), and is defined by 1 - (size of the
intersection / size of the union) of the sets of
edges. This is returned with key 'dist'.
Important note: all node indices are in terms
of the StructureGraph this method is called
from, not the 'other' StructureGraph: there
is no guarantee the node indices will be the
same if the underlying Structures are ordered
differently.
:param other: StructureGraph
:param strict: if False, will compare bonds
from different Structures, with node indices
replaced by Species strings, will not count
number of occurrences of bonds
:return:
"""
if self.structure != other.structure and strict:
return ValueError("Meaningless to compare StructureGraphs if " "corresponding Structures are different.")
if strict:
# sort for consistent node indices
# PeriodicSite should have a proper __hash__() value,
# using its frac_coords as a convenient key
mapping = {tuple(site.frac_coords): self.structure.index(site) for site in other.structure}
other_sorted = other.__copy__()
other_sorted.sort(key=lambda site: mapping[tuple(site.frac_coords)])
edges = {(u, v, d["to_jimage"]) for u, v, d in self.graph.edges(keys=False, data=True)}
edges_other = {(u, v, d["to_jimage"]) for u, v, d in other_sorted.graph.edges(keys=False, data=True)}
else:
edges = {
(str(self.structure[u].specie), str(self.structure[v].specie))
for u, v, d in self.graph.edges(keys=False, data=True)
}
edges_other = {
(str(other.structure[u].specie), str(other.structure[v].specie))
for u, v, d in other.graph.edges(keys=False, data=True)
}
if len(edges) == 0 and len(edges_other) == 0:
jaccard_dist = 0 # by definition
else:
jaccard_dist = 1 - len(edges.intersection(edges_other)) / len(edges.union(edges_other))
return {
"self": edges - edges_other,
"other": edges_other - edges,
"both": edges.intersection(edges_other),
"dist": jaccard_dist,
}
def get_subgraphs_as_molecules(self, use_weights=False):
"""
Retrieve subgraphs as molecules, useful for extracting
molecules from periodic crystals.
Will only return unique molecules, not any duplicates
present in the crystal (a duplicate defined as an
isomorphic subgraph).
:param use_weights (bool): If True, only treat subgraphs
as isomorphic if edges have the same weights. Typically,
this means molecules will need to have the same bond
lengths to be defined as duplicates, otherwise bond
lengths can differ. This is a fairly robust approach,
but will treat e.g. enantiomers as being duplicates.
:return: list of unique Molecules in Structure
"""
# creating a supercell is an easy way to extract
# molecules (and not, e.g., layers of a 2D crystal)
# without adding extra logic
if getattr(self, "_supercell_sg", None) is None:
self._supercell_sg = supercell_sg = self * (3, 3, 3)
# make undirected to find connected subgraphs
supercell_sg.graph = nx.Graph(supercell_sg.graph)
# find subgraphs
all_subgraphs = [supercell_sg.graph.subgraph(c) for c in nx.connected_components(supercell_sg.graph)]
# discount subgraphs that lie across *supercell* boundaries
# these will subgraphs representing crystals
molecule_subgraphs = []
for subgraph in all_subgraphs:
intersects_boundary = any(d["to_jimage"] != (0, 0, 0) for u, v, d in subgraph.edges(data=True))
if not intersects_boundary:
molecule_subgraphs.append(nx.MultiDiGraph(subgraph))
# add specie names to graph to be able to test for isomorphism
for subgraph in molecule_subgraphs:
for n in subgraph:
subgraph.add_node(n, specie=str(supercell_sg.structure[n].specie))
# now define how we test for isomorphism
def node_match(n1, n2):
return n1["specie"] == n2["specie"]
def edge_match(e1, e2):
if use_weights:
return e1["weight"] == e2["weight"]
return True
# prune duplicate subgraphs
unique_subgraphs = []
for subgraph in molecule_subgraphs:
already_present = [
nx.is_isomorphic(subgraph, g, node_match=node_match, edge_match=edge_match) for g in unique_subgraphs
]
if not any(already_present):
unique_subgraphs.append(subgraph)
# get Molecule objects for each subgraph
molecules = []
for subgraph in unique_subgraphs:
coords = [supercell_sg.structure[n].coords for n in subgraph.nodes()]
species = [supercell_sg.structure[n].specie for n in subgraph.nodes()]
molecule = Molecule(species, coords)
# shift so origin is at center of mass
molecule = molecule.get_centered_molecule()
molecules.append(molecule)
return molecules
class MolGraphSplitError(Exception):
"""
Raised when a molecule graph is failed to split into two disconnected
subgraphs
"""
pass
class MoleculeGraph(MSONable):
"""
This is a class for annotating a Molecule with
bond information, stored in the form of a graph. A "bond" does
not necessarily have to be a chemical bond, but can store any
kind of information that connects two Sites.
"""
def __init__(self, molecule, graph_data=None):
"""
If constructing this class manually, use the `with_empty_graph`
method or `with_local_env_strategy` method (using an algorithm
provided by the `local_env` module, such as O'Keeffe).
This class that contains connection information:
relationships between sites represented by a Graph structure,
and an associated structure object.
This class uses the NetworkX package to store and operate
on the graph itself, but contains a lot of helper methods
to make associating a graph with a given molecule easier.
Use cases for this include storing bonding information,
NMR J-couplings, Heisenberg exchange parameters, etc.
:param molecule: Molecule object
:param graph_data: dict containing graph information in
dict format (not intended to be constructed manually,
see as_dict method for format)
"""
if isinstance(molecule, MoleculeGraph):
# just make a copy from input
graph_data = molecule.as_dict()["graphs"]
self.molecule = molecule
self.graph = nx.readwrite.json_graph.adjacency_graph(graph_data)
# tidy up edge attr dicts, reading to/from json duplicates
# information
for u, v, k, d in self.graph.edges(keys=True, data=True):
if "id" in d:
del d["id"]
if "key" in d:
del d["key"]
# ensure images are tuples (conversion to lists happens
# when serializing back from json), it's important images
# are hashable/immutable
if "to_jimage" in d:
d["to_jimage"] = tuple(d["to_jimage"])
if "from_jimage" in d:
d["from_jimage"] = tuple(d["from_jimage"])
self.set_node_attributes()
@classmethod
def with_empty_graph(cls, molecule, name="bonds", edge_weight_name=None, edge_weight_units=None):
"""
Constructor for MoleculeGraph, returns a MoleculeGraph
object with an empty graph (no edges, only nodes defined
that correspond to Sites in Molecule).
:param molecule (Molecule):
:param name (str): name of graph, e.g. "bonds"
:param edge_weight_name (str): name of edge weights,
e.g. "bond_length" or "exchange_constant"
:param edge_weight_units (str): name of edge weight units
e.g. "Å" or "eV"
:return (MoleculeGraph):
"""
if edge_weight_name and (edge_weight_units is None):
raise ValueError(
"Please specify units associated "
"with your edge weights. Can be "
"empty string if arbitrary or "
"dimensionless."
)
# construct graph with one node per site
# graph attributes don't change behavior of graph,
# they're just for book-keeping
graph = nx.MultiDiGraph(
edge_weight_name=edge_weight_name,
edge_weight_units=edge_weight_units,
name=name,
)
graph.add_nodes_from(range(len(molecule)))
graph_data = json_graph.adjacency_data(graph)
return cls(molecule, graph_data=graph_data)
@staticmethod
def with_edges(molecule, edges):
"""
Constructor for MoleculeGraph, using pre-existing or pre-defined edges
with optional edge parameters.
:param molecule: Molecule object
:param edges: dict representing the bonds of the functional
group (format: {(u, v): props}, where props is a dictionary of
properties, including weight. Props should be None if no
additional properties are to be specified.
:return: mg, a MoleculeGraph
"""
mg = MoleculeGraph.with_empty_graph(molecule, name="bonds", edge_weight_name="weight", edge_weight_units="")
for edge, props in edges.items():
try:
from_index = edge[0]
to_index = edge[1]
except TypeError:
raise ValueError("Edges must be given as (from_index, to_index)" "tuples")
if props is not None:
if "weight" in props.keys():
weight = props["weight"]
del props["weight"]
else:
weight = None
if len(props.items()) == 0:
props = None
else:
weight = None
nodes = mg.graph.nodes
if not (from_index in nodes and to_index in nodes):
raise ValueError(
"Edges cannot be added if nodes are not" " present in the graph. Please check your" " indices."
)
mg.add_edge(from_index, to_index, weight=weight, edge_properties=props)
mg.set_node_attributes()
return mg
@staticmethod
def with_local_env_strategy(molecule, strategy):
"""
Constructor for MoleculeGraph, using a strategy
from :Class: `pymatgen.analysis.local_env`.
:param molecule: Molecule object
:param strategy: an instance of a
:Class: `pymatgen.analysis.local_env.NearNeighbors` object
:return: mg, a MoleculeGraph
"""
if not strategy.molecules_allowed:
raise ValueError(
"Chosen strategy is not designed for use with molecules! " "Please choose another strategy."
)
extend_structure = strategy.extend_structure_molecules
mg = MoleculeGraph.with_empty_graph(molecule, name="bonds", edge_weight_name="weight", edge_weight_units="")
# NearNeighbor classes only (generally) work with structures
# molecules have to be boxed first
coords = molecule.cart_coords
if extend_structure:
a = max(coords[:, 0]) - min(coords[:, 0]) + 100
b = max(coords[:, 1]) - min(coords[:, 1]) + 100
c = max(coords[:, 2]) - min(coords[:, 2]) + 100
structure = molecule.get_boxed_structure(a, b, c, no_cross=True, reorder=False)
else:
structure = None
for n in range(len(molecule)):
if structure is None:
neighbors = strategy.get_nn_info(molecule, n)
else:
neighbors = strategy.get_nn_info(structure, n)
for neighbor in neighbors:
# all bonds in molecules should not cross
# (artificial) periodic boundaries
if not np.array_equal(neighbor["image"], [0, 0, 0]):
continue
if n > neighbor["site_index"]:
from_index = neighbor["site_index"]
to_index = n
else:
from_index = n
to_index = neighbor["site_index"]
mg.add_edge(
from_index=from_index,
to_index=to_index,
weight=neighbor["weight"],
warn_duplicates=False,
)
duplicates = []
for edge in mg.graph.edges:
if edge[2] != 0:
duplicates.append(edge)
for duplicate in duplicates:
mg.graph.remove_edge(duplicate[0], duplicate[1], key=duplicate[2])
mg.set_node_attributes()
return mg
@property
def name(self):
"""
:return: Name of graph
"""
return self.graph.graph["name"]
@property
def edge_weight_name(self):
"""
:return: Name of the edge weight property of graph
"""
return self.graph.graph["edge_weight_name"]
@property
def edge_weight_unit(self):
"""
:return: Units of the edge weight property of graph
"""
return self.graph.graph["edge_weight_units"]
def add_edge(
self,
from_index,
to_index,
weight=None,
warn_duplicates=True,
edge_properties=None,
):
"""
Add edge to graph.
Since physically a 'bond' (or other connection
between sites) doesn't have a direction, from_index,
from_jimage can be swapped with to_index, to_jimage.
However, images will always always be shifted so that
from_index < to_index and from_jimage becomes (0, 0, 0).
:param from_index: index of site connecting from
:param to_index: index of site connecting to
:param weight (float): e.g. bond length
:param warn_duplicates (bool): if True, will warn if
trying to add duplicate edges (duplicate edges will not
be added in either case)
:param edge_properties (dict): any other information to
store on graph edges, similar to Structure's site_properties
:return:
"""
# this is not necessary for the class to work, but
# just makes it neater
if to_index < from_index:
to_index, from_index = from_index, to_index
# sanitize types
from_index, to_index = int(from_index), int(to_index)
# check we're not trying to add a duplicate edge
# there should only ever be at most one edge
# between two sites
existing_edge_data = self.graph.get_edge_data(from_index, to_index)
if existing_edge_data and warn_duplicates:
warnings.warn(
"Trying to add an edge that already exists from " "site {} to site {}.".format(from_index, to_index)
)
return
# generic container for additional edge properties,
# similar to site properties
edge_properties = edge_properties or {}
if weight:
self.graph.add_edge(from_index, to_index, weight=weight, **edge_properties)
else:
self.graph.add_edge(from_index, to_index, **edge_properties)
def insert_node(
self,
i,
species,
coords,
validate_proximity=False,
site_properties=None,
edges=None,
):
"""
A wrapper around Molecule.insert(), which also incorporates the new
site into the MoleculeGraph.
:param i: Index at which to insert the new site
:param species: Species for the new site
:param coords: 3x1 array representing coordinates of the new site
:param validate_proximity: For Molecule.insert(); if True (default
False), distance will be checked to ensure that site can be safely
added.
:param site_properties: Site properties for Molecule
:param edges: List of dicts representing edges to be added to the
MoleculeGraph. These edges must include the index of the new site i,
and all indices used for these edges should reflect the
MoleculeGraph AFTER the insertion, NOT before. Each dict should at
least have a "to_index" and "from_index" key, and can also have a
"weight" and a "properties" key.
:return:
"""
self.molecule.insert(
i,
species,
coords,
validate_proximity=validate_proximity,
properties=site_properties,
)
mapping = {}
for j in range(len(self.molecule) - 1):
if j < i:
mapping[j] = j
else:
mapping[j] = j + 1
nx.relabel_nodes(self.graph, mapping, copy=False)
self.graph.add_node(i)
self.set_node_attributes()
if edges is not None:
for edge in edges:
try:
self.add_edge(
edge["from_index"],
edge["to_index"],
weight=edge.get("weight", None),
edge_properties=edge.get("properties", None),
)
except KeyError:
raise RuntimeError("Some edges are invalid.")
def set_node_attributes(self):
"""
Replicates molecule site properties (specie, coords, etc.) in the
MoleculeGraph.
:return:
"""
species = {}
coords = {}
properties = {}
for node in self.graph.nodes():
species[node] = self.molecule[node].specie.symbol
coords[node] = self.molecule[node].coords
properties[node] = self.molecule[node].properties
nx.set_node_attributes(self.graph, species, "specie")
nx.set_node_attributes(self.graph, coords, "coords")
nx.set_node_attributes(self.graph, properties, "properties")
def alter_edge(self, from_index, to_index, new_weight=None, new_edge_properties=None):
"""
Alters either the weight or the edge_properties of
an edge in the MoleculeGraph.
:param from_index: int
:param to_index: int
:param new_weight: alter_edge does not require
that weight be altered. As such, by default, this
is None. If weight is to be changed, it should be a
float.
:param new_edge_properties: alter_edge does not require
that edge_properties be altered. As such, by default,
this is None. If any edge properties are to be changed,
it should be a dictionary of edge properties to be changed.
:return:
"""
existing_edge = self.graph.get_edge_data(from_index, to_index)
# ensure that edge exists before attempting to change it
if not existing_edge:
raise ValueError(
"Edge between {} and {} cannot be altered;\
no edge exists between those sites.".format(
from_index, to_index
)
)
# Third index should always be 0 because there should only be one edge between any two nodes
if new_weight is not None:
self.graph[from_index][to_index][0]["weight"] = new_weight
if new_edge_properties is not None:
for prop in list(new_edge_properties.keys()):
self.graph[from_index][to_index][0][prop] = new_edge_properties[prop]
def break_edge(self, from_index, to_index, allow_reverse=False):
"""
Remove an edge from the MoleculeGraph
:param from_index: int
:param to_index: int
:param allow_reverse: If allow_reverse is True, then break_edge will
attempt to break both (from_index, to_index) and, failing that,
will attempt to break (to_index, from_index).
:return:
"""
# ensure that edge exists before attempting to remove it
existing_edge = self.graph.get_edge_data(from_index, to_index)
existing_reverse = None
if existing_edge:
self.graph.remove_edge(from_index, to_index)
else:
if allow_reverse:
existing_reverse = self.graph.get_edge_data(to_index, from_index)
if existing_reverse:
self.graph.remove_edge(to_index, from_index)
else:
raise ValueError(
"Edge cannot be broken between {} and {};\
no edge exists between those sites.".format(
from_index, to_index
)
)
def remove_nodes(self, indices):
"""
A wrapper for Molecule.remove_sites().
:param indices: list of indices in the current Molecule (and graph) to
be removed.
:return:
"""
self.molecule.remove_sites(indices)
self.graph.remove_nodes_from(indices)
mapping = {}
for correct, current in enumerate(sorted(self.graph.nodes)):
mapping[current] = correct
nx.relabel_nodes(self.graph, mapping, copy=False)
self.set_node_attributes()
def get_disconnected_fragments(self):
"""
Determine if the MoleculeGraph is connected. If it is not, separate the
MoleculeGraph into different MoleculeGraphs, where each resulting
MoleculeGraph is a disconnected subgraph of the original.
Currently, this function naively assigns the charge
of the total molecule to a single submolecule. A
later effort will be to actually accurately assign
charge.
NOTE: This function does not modify the original
MoleculeGraph. It creates a copy, modifies that, and
returns two or more new MoleculeGraph objects.
:return: list of MoleculeGraphs
"""
if nx.is_weakly_connected(self.graph):
return [copy.deepcopy(self)]
original = copy.deepcopy(self)
sub_mols = list()
# Had to use nx.weakly_connected_components because of deprecation
# of nx.weakly_connected_component_subgraphs
subgraphs = [original.graph.subgraph(c) for c in nx.weakly_connected_components(original.graph)]
for subg in subgraphs:
nodes = sorted(list(subg.nodes))
# Molecule indices are essentially list-based, so node indices
# must be remapped, incrementing from 0
mapping = {}
for i, n in enumerate(nodes):
mapping[n] = i
# just give charge to whatever subgraph has node with index 0
# TODO: actually figure out how to distribute charge
if 0 in nodes:
charge = self.molecule.charge
else:
charge = 0
# relabel nodes in graph to match mapping
new_graph = nx.relabel_nodes(subg, mapping)
species = nx.get_node_attributes(new_graph, "specie")
coords = nx.get_node_attributes(new_graph, "coords")
raw_props = nx.get_node_attributes(new_graph, "properties")
properties = {}
for prop_set in raw_props.values():
for prop in prop_set.keys():
if prop in properties:
properties[prop].append(prop_set[prop])
else:
properties[prop] = [prop_set[prop]]
# Site properties must be present for all atoms in the molecule
# in order to be used for Molecule instantiation
for k, v in properties.items():
if len(v) != len(species):
del properties[k]
new_mol = Molecule(species, coords, charge=charge, site_properties=properties)
graph_data = json_graph.adjacency_data(new_graph)
# create new MoleculeGraph
sub_mols.append(MoleculeGraph(new_mol, graph_data=graph_data))
return sub_mols
def split_molecule_subgraphs(self, bonds, allow_reverse=False, alterations=None):
"""
Split MoleculeGraph into two or more MoleculeGraphs by
breaking a set of bonds. This function uses
MoleculeGraph.break_edge repeatedly to create
disjoint graphs (two or more separate molecules).
This function does not only alter the graph
information, but also changes the underlying
Molecules.
If the bonds parameter does not include sufficient
bonds to separate two molecule fragments, then this
function will fail.
Currently, this function naively assigns the charge
of the total molecule to a single submolecule. A
later effort will be to actually accurately assign
charge.
NOTE: This function does not modify the original
MoleculeGraph. It creates a copy, modifies that, and
returns two or more new MoleculeGraph objects.
:param bonds: list of tuples (from_index, to_index)
representing bonds to be broken to split the MoleculeGraph.
:param alterations: a dict {(from_index, to_index): alt},
where alt is a dictionary including weight and/or edge
properties to be changed following the split.
:param allow_reverse: If allow_reverse is True, then break_edge will
attempt to break both (from_index, to_index) and, failing that,
will attempt to break (to_index, from_index).
:return: list of MoleculeGraphs
"""
self.set_node_attributes()
original = copy.deepcopy(self)
for bond in bonds:
original.break_edge(bond[0], bond[1], allow_reverse=allow_reverse)
if nx.is_weakly_connected(original.graph):
raise MolGraphSplitError(
"Cannot split molecule; \
MoleculeGraph is still connected."
)
# alter any bonds before partition, to avoid remapping
if alterations is not None:
for (u, v) in alterations.keys():
if "weight" in alterations[(u, v)]:
weight = alterations[(u, v)]["weight"]
del alterations[(u, v)]["weight"]
edge_properties = alterations[(u, v)] if len(alterations[(u, v)]) != 0 else None
original.alter_edge(u, v, new_weight=weight, new_edge_properties=edge_properties)
else:
original.alter_edge(u, v, new_edge_properties=alterations[(u, v)])
return original.get_disconnected_fragments()
def build_unique_fragments(self):
"""
Find all possible fragment combinations of the MoleculeGraphs (in other
words, all connected induced subgraphs)
:return:
"""
self.set_node_attributes()
graph = self.graph.to_undirected()
# find all possible fragments, aka connected induced subgraphs
frag_dict = {}
for ii in range(1, len(self.molecule)):
for combination in combinations(graph.nodes, ii):
mycomp = []
for idx in combination:
mycomp.append(str(self.molecule[idx].specie))
mycomp = "".join(sorted(mycomp))
subgraph = nx.subgraph(graph, combination)
if nx.is_connected(subgraph):
mykey = mycomp + str(len(subgraph.edges()))
if mykey not in frag_dict:
frag_dict[mykey] = [copy.deepcopy(subgraph)]
else:
frag_dict[mykey].append(copy.deepcopy(subgraph))
# narrow to all unique fragments using graph isomorphism
unique_frag_dict = {}
for key in frag_dict:
unique_frags = []
for frag in frag_dict[key]:
found = False
for f in unique_frags:
if _isomorphic(frag, f):
found = True
break
if not found:
unique_frags.append(frag)
unique_frag_dict[key] = copy.deepcopy(unique_frags)
# convert back to molecule graphs
unique_mol_graph_dict = {}
for key in unique_frag_dict:
unique_mol_graph_list = []
for fragment in unique_frag_dict[key]:
mapping = {e: i for i, e in enumerate(sorted(fragment.nodes))}
remapped = nx.relabel_nodes(fragment, mapping)
species = nx.get_node_attributes(remapped, "specie")
coords = nx.get_node_attributes(remapped, "coords")
edges = {}
for from_index, to_index, key in remapped.edges:
edge_props = fragment.get_edge_data(from_index, to_index, key=key)
edges[(from_index, to_index)] = edge_props
unique_mol_graph_list.append(
self.with_edges(
Molecule(species=species, coords=coords, charge=self.molecule.charge),
edges,
)
)
frag_key = (
str(unique_mol_graph_list[0].molecule.composition.alphabetical_formula)
+ " E"
+ str(len(unique_mol_graph_list[0].graph.edges()))
)
unique_mol_graph_dict[frag_key] = copy.deepcopy(unique_mol_graph_list)
return unique_mol_graph_dict
def substitute_group(
self,
index,
func_grp,
strategy,
bond_order=1,
graph_dict=None,
strategy_params=None,
):
"""
Builds off of Molecule.substitute to replace an atom in self.molecule
with a functional group. This method also amends self.graph to
incorporate the new functional group.
NOTE: using a MoleculeGraph will generally produce a different graph
compared with using a Molecule or str (when not using graph_dict).
:param index: Index of atom to substitute.
:param func_grp: Substituent molecule. There are three options:
1. Providing an actual molecule as the input. The first atom
must be a DummySpecies X, indicating the position of
nearest neighbor. The second atom must be the next
nearest atom. For example, for a methyl group
substitution, func_grp should be X-CH3, where X is the
first site and C is the second site. What the code will
do is to remove the index site, and connect the nearest
neighbor to the C atom in CH3. The X-C bond indicates the
directionality to connect the atoms.
2. A string name. The molecule will be obtained from the
relevant template in func_groups.json.
3. A MoleculeGraph object.
:param strategy: Class from pymatgen.analysis.local_env.
:param bond_order: A specified bond order to calculate the bond
length between the attached functional group and the nearest
neighbor site. Defaults to 1.
:param graph_dict: Dictionary representing the bonds of the functional
group (format: {(u, v): props}, where props is a dictionary of
properties, including weight. If None, then the algorithm
will attempt to automatically determine bonds using one of
a list of strategies defined in pymatgen.analysis.local_env.
:param strategy_params: dictionary of keyword arguments for strategy.
If None, default parameters will be used.
:return:
"""
def map_indices(grp):
grp_map = {}
# Get indices now occupied by functional group
# Subtracting 1 because the dummy atom X should not count
atoms = len(grp) - 1
offset = len(self.molecule) - atoms
for i in range(atoms):
grp_map[i] = i + offset
return grp_map
# Work is simplified if a graph is already in place
if isinstance(func_grp, MoleculeGraph):
self.molecule.substitute(index, func_grp.molecule, bond_order=bond_order)
mapping = map_indices(func_grp.molecule)
for (u, v) in list(func_grp.graph.edges()):
edge_props = func_grp.graph.get_edge_data(u, v)[0]
weight = None
if "weight" in edge_props.keys():
weight = edge_props["weight"]
del edge_props["weight"]
self.add_edge(mapping[u], mapping[v], weight=weight, edge_properties=edge_props)
else:
if isinstance(func_grp, Molecule):
func_grp = copy.deepcopy(func_grp)
else:
try:
func_grp = copy.deepcopy(FunctionalGroups[func_grp])
except Exception:
raise RuntimeError("Can't find functional group in list. " "Provide explicit coordinate instead")
self.molecule.substitute(index, func_grp, bond_order=bond_order)
mapping = map_indices(func_grp)
# Remove dummy atom "X"
func_grp.remove_species("X")
if graph_dict is not None:
for (u, v) in graph_dict.keys():
edge_props = graph_dict[(u, v)]
if "weight" in edge_props.keys():
weight = edge_props["weight"]
del edge_props["weight"]
self.add_edge(
mapping[u],
mapping[v],
weight=weight,
edge_properties=edge_props,
)
else:
if strategy_params is None:
strategy_params = {}
strat = strategy(**strategy_params)
graph = self.with_local_env_strategy(func_grp, strat)
for (u, v) in list(graph.graph.edges()):
edge_props = graph.graph.get_edge_data(u, v)[0]
weight = None
if "weight" in edge_props.keys():
weight = edge_props["weight"]
del edge_props["weight"]
if 0 not in list(graph.graph.nodes()):
# If graph indices have different indexing
u, v = (u - 1), (v - 1)
self.add_edge(
mapping[u],
mapping[v],
weight=weight,
edge_properties=edge_props,
)
def replace_group(
self,
index,
func_grp,
strategy,
bond_order=1,
graph_dict=None,
strategy_params=None,
):
"""
Builds off of Molecule.substitute and MoleculeGraph.substitute_group
to replace a functional group in self.molecule with a functional group.
This method also amends self.graph to incorporate the new functional
group.
TODO: Figure out how to replace into a ring structure.
:param index: Index of atom to substitute.
:param func_grp: Substituent molecule. There are three options:
1. Providing an actual molecule as the input. The first atom
must be a DummySpecies X, indicating the position of
nearest neighbor. The second atom must be the next
nearest atom. For example, for a methyl group
substitution, func_grp should be X-CH3, where X is the
first site and C is the second site. What the code will
do is to remove the index site, and connect the nearest
neighbor to the C atom in CH3. The X-C bond indicates the
directionality to connect the atoms.
2. A string name. The molecule will be obtained from the
relevant template in func_groups.json.
3. A MoleculeGraph object.
:param strategy: Class from pymatgen.analysis.local_env.
:param bond_order: A specified bond order to calculate the bond
length between the attached functional group and the nearest
neighbor site. Defaults to 1.
:param graph_dict: Dictionary representing the bonds of the functional
group (format: {(u, v): props}, where props is a dictionary of
properties, including weight. If None, then the algorithm
will attempt to automatically determine bonds using one of
a list of strategies defined in pymatgen.analysis.local_env.
:param strategy_params: dictionary of keyword arguments for strategy.
If None, default parameters will be used.
:return:
"""
self.set_node_attributes()
neighbors = self.get_connected_sites(index)
# If the atom at index is terminal
if len(neighbors) == 1:
self.substitute_group(
index,
func_grp,
strategy,
bond_order=bond_order,
graph_dict=graph_dict,
strategy_params=strategy_params,
)
else:
rings = self.find_rings(including=[index])
if len(rings) != 0:
raise RuntimeError(
"Currently functional group replacement" "cannot occur at an atom within a ring" "structure."
)
to_remove = set()
sizes = dict()
disconnected = self.graph.to_undirected()
disconnected.remove_node(index)
for neighbor in neighbors:
sizes[neighbor[2]] = len(nx.descendants(disconnected, neighbor[2]))
keep = max(sizes, key=lambda x: sizes[x])
for i in sizes.keys():
if i != keep:
to_remove.add(i)
self.remove_nodes(list(to_remove))
self.substitute_group(
index,
func_grp,
strategy,
bond_order=bond_order,
graph_dict=graph_dict,
strategy_params=strategy_params,
)
def find_rings(self, including=None):
"""
Find ring structures in the MoleculeGraph.
:param including: list of site indices. If
including is not None, then find_rings will
only return those rings including the specified
sites. By default, this parameter is None, and
all rings will be returned.
:return: dict {index:cycle}. Each
entry will be a ring (cycle, in graph theory terms) including the index
found in the Molecule. If there is no cycle including an index, the
value will be an empty list.
"""
# Copies self.graph such that all edges (u, v) matched by edges (v, u)
undirected = self.graph.to_undirected()
directed = undirected.to_directed()
cycles_nodes = []
cycles_edges = []
# Remove all two-edge cycles
all_cycles = [c for c in nx.simple_cycles(directed) if len(c) > 2]
# Using to_directed() will mean that each cycle always appears twice
# So, we must also remove duplicates
unique_sorted = []
unique_cycles = []
for cycle in all_cycles:
if sorted(cycle) not in unique_sorted:
unique_sorted.append(sorted(cycle))
unique_cycles.append(cycle)
if including is None:
cycles_nodes = unique_cycles
else:
for i in including:
for cycle in unique_cycles:
if i in cycle and cycle not in cycles_nodes:
cycles_nodes.append(cycle)
for cycle in cycles_nodes:
edges = []
for i, e in enumerate(cycle):
edges.append((cycle[i - 1], e))
cycles_edges.append(edges)
return cycles_edges
def get_connected_sites(self, n):
"""
Returns a named tuple of neighbors of site n:
periodic_site, jimage, index, weight.
Index is the index of the corresponding site
in the original structure, weight can be
None if not defined.
:param n: index of Site in Molecule
:param jimage: lattice vector of site
:return: list of ConnectedSite tuples,
sorted by closest first
"""
connected_sites = set()
out_edges = list(self.graph.out_edges(n, data=True))
in_edges = list(self.graph.in_edges(n, data=True))
for u, v, d in out_edges + in_edges:
weight = d.get("weight", None)
if v == n:
site = self.molecule[u]
dist = self.molecule[v].distance(self.molecule[u])
connected_site = ConnectedSite(site=site, jimage=(0, 0, 0), index=u, weight=weight, dist=dist)
else:
site = self.molecule[v]
dist = self.molecule[u].distance(self.molecule[v])
connected_site = ConnectedSite(site=site, jimage=(0, 0, 0), index=v, weight=weight, dist=dist)
connected_sites.add(connected_site)
# return list sorted by closest sites first
connected_sites = list(connected_sites)
connected_sites.sort(key=lambda x: x.dist)
return connected_sites
def get_coordination_of_site(self, n):
"""
Returns the number of neighbors of site n.
In graph terms, simply returns degree
of node corresponding to site n.
:param n: index of site
:return (int):
"""
number_of_self_loops = sum([1 for n, v in self.graph.edges(n) if n == v])
return self.graph.degree(n) - number_of_self_loops
def draw_graph_to_file(
self,
filename="graph",
diff=None,
hide_unconnected_nodes=False,
hide_image_edges=True,
edge_colors=False,
node_labels=False,
weight_labels=False,
image_labels=False,
color_scheme="VESTA",
keep_dot=False,
algo="fdp",
):
"""
Draws graph using GraphViz.
The networkx graph object itself can also be drawn
with networkx's in-built graph drawing methods, but
note that this might give misleading results for
multigraphs (edges are super-imposed on each other).
If visualization is difficult to interpret,
`hide_image_edges` can help, especially in larger
graphs.
:param filename: filename to output, will detect filetype
from extension (any graphviz filetype supported, such as
pdf or png)
:param diff (StructureGraph): an additional graph to
compare with, will color edges red that do not exist in diff
and edges green that are in diff graph but not in the
reference graph
:param hide_unconnected_nodes: if True, hide unconnected
nodes
:param hide_image_edges: if True, do not draw edges that
go through periodic boundaries
:param edge_colors (bool): if True, use node colors to
color edges
:param node_labels (bool): if True, label nodes with
species and site index
:param weight_labels (bool): if True, label edges with
weights
:param image_labels (bool): if True, label edges with
their periodic images (usually only used for debugging,
edges to periodic images always appear as dashed lines)
:param color_scheme (str): "VESTA" or "JMOL"
:param keep_dot (bool): keep GraphViz .dot file for later
visualization
:param algo: any graphviz algo, "neato" (for simple graphs)
or "fdp" (for more crowded graphs) usually give good outputs
:return:
"""
if not which(algo):
raise RuntimeError("StructureGraph graph drawing requires " "GraphViz binaries to be in the path.")
# Developer note: NetworkX also has methods for drawing
# graphs using matplotlib, these also work here. However,
# a dedicated tool like GraphViz allows for much easier
# control over graph appearance and also correctly displays
# mutli-graphs (matplotlib can superimpose multiple edges).
g = self.graph.copy()
g.graph = {"nodesep": 10.0, "dpi": 300, "overlap": "false"}
# add display options for nodes
for n in g.nodes():
# get label by species name
label = "{}({})".format(str(self.molecule[n].specie), n) if node_labels else ""
# use standard color scheme for nodes
c = EL_COLORS[color_scheme].get(str(self.molecule[n].specie.symbol), [0, 0, 0])
# get contrasting font color
# magic numbers account for perceived luminescence
# https://stackoverflow.com/questions/1855884/determine-font-color-based-on-background-color
fontcolor = "#000000" if 1 - (c[0] * 0.299 + c[1] * 0.587 + c[2] * 0.114) / 255 < 0.5 else "#ffffff"
# convert color to hex string
color = "#{:02x}{:02x}{:02x}".format(c[0], c[1], c[2])
g.add_node(
n,
fillcolor=color,
fontcolor=fontcolor,
label=label,
fontname="Helvetica-bold",
style="filled",
shape="circle",
)
edges_to_delete = []
# add display options for edges
for u, v, k, d in g.edges(keys=True, data=True):
# retrieve from/to images, set as origin if not defined
if "to_image" in d:
to_image = d["to_jimage"]
else:
to_image = (0, 0, 0)
# set edge style
d["style"] = "solid"
if to_image != (0, 0, 0):
d["style"] = "dashed"
if hide_image_edges:
edges_to_delete.append((u, v, k))
# don't show edge directions
d["arrowhead"] = "none"
# only add labels for images that are not the origin
if image_labels:
d["headlabel"] = "" if to_image == (0, 0, 0) else "to {}".format((to_image))
d["arrowhead"] = "normal" if d["headlabel"] else "none"
# optionally color edges using node colors
color_u = g.node[u]["fillcolor"]
color_v = g.node[v]["fillcolor"]
d["color_uv"] = "{};0.5:{};0.5".format(color_u, color_v) if edge_colors else "#000000"
# optionally add weights to graph
if weight_labels:
units = g.graph.get("edge_weight_units", "")
if d.get("weight"):
d["label"] = "{:.2f} {}".format(d["weight"], units)
# update edge with our new style attributes
g.edges[u, v, k].update(d)
# optionally remove periodic image edges,
# these can be confusing due to periodic boundaries
if hide_image_edges:
for edge_to_delete in edges_to_delete:
g.remove_edge(*edge_to_delete)
# optionally hide unconnected nodes,
# these can appear when removing periodic edges
if hide_unconnected_nodes:
g = g.subgraph([n for n in g.degree() if g.degree()[n] != 0])
# optionally highlight differences with another graph
if diff:
diff = self.diff(diff, strict=True)
green_edges = []
red_edges = []
for u, v, k, d in g.edges(keys=True, data=True):
if (u, v, d["to_jimage"]) in diff["self"]:
# edge has been deleted
red_edges.append((u, v, k))
elif (u, v, d["to_jimage"]) in diff["other"]:
# edge has been added
green_edges.append((u, v, k))
for u, v, k in green_edges:
g.edges[u, v, k].update({"color_uv": "#00ff00"})
for u, v, k in red_edges:
g.edges[u, v, k].update({"color_uv": "#ff0000"})
basename, extension = os.path.splitext(filename)
extension = extension[1:]
write_dot(g, basename + ".dot")
with open(filename, "w") as f:
args = [algo, "-T", extension, basename + ".dot"]
rs = subprocess.Popen(args, stdout=f, stdin=subprocess.PIPE, close_fds=True)
rs.communicate()
if rs.returncode != 0:
raise RuntimeError("{} exited with return code {}.".format(algo, rs.returncode))
if not keep_dot:
os.remove(basename + ".dot")
def as_dict(self):
"""
As in :Class: `pymatgen.core.Molecule` except
with using `to_dict_of_dicts` from NetworkX
to store graph information.
"""
d = {
"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"molecule": self.molecule.as_dict(),
"graphs": json_graph.adjacency_data(self.graph),
}
return d
@classmethod
def from_dict(cls, d):
"""
As in :Class: `pymatgen.core.Molecule` except
restoring graphs using `from_dict_of_dicts`
from NetworkX to restore graph information.
"""
m = Molecule.from_dict(d["molecule"])
return cls(m, d["graphs"])
@classmethod
def _edges_to_string(cls, g):
header = "from to to_image "
header_line = "---- ---- ------------"
edge_weight_name = g.graph["edge_weight_name"]
if edge_weight_name:
print_weights = ["weight"]
edge_label = g.graph["edge_weight_name"]
edge_weight_units = g.graph["edge_weight_units"]
if edge_weight_units:
edge_label += " ({})".format(edge_weight_units)
header += " {}".format(edge_label)
header_line += " {}".format("-" * max([18, len(edge_label)]))
else:
print_weights = False
s = header + "\n" + header_line + "\n"
edges = list(g.edges(data=True))
# sort edges for consistent ordering
edges.sort(key=itemgetter(0, 1))
if print_weights:
for u, v, data in edges:
s += "{:4} {:4} {:12} {:.3e}\n".format(
u, v, str(data.get("to_jimage", (0, 0, 0))), data.get("weight", 0)
)
else:
for u, v, data in edges:
s += "{:4} {:4} {:12}\n".format(u, v, str(data.get("to_jimage", (0, 0, 0))))
return s
def __str__(self):
s = "Molecule Graph"
s += "\nMolecule: \n{}".format(self.molecule.__str__())
s += "\nGraph: {}\n".format(self.name)
s += self._edges_to_string(self.graph)
return s
def __repr__(self):
s = "Molecule Graph"
s += "\nMolecule: \n{}".format(self.molecule.__repr__())
s += "\nGraph: {}\n".format(self.name)
s += self._edges_to_string(self.graph)
return s
def __len__(self):
"""
:return: length of Molecule / number of nodes in graph
"""
return len(self.molecule)
def sort(self, key=None, reverse=False):
"""
Same as Molecule.sort(), also remaps nodes in graph.
:param key:
:param reverse:
:return:
"""
old_molecule = self.molecule.copy()
# sort Molecule
self.molecule._sites = sorted(self.molecule._sites, key=key, reverse=reverse)
# apply Molecule ordering to graph
mapping = {idx: self.molecule.index(site) for idx, site in enumerate(old_molecule)}
self.graph = nx.relabel_nodes(self.graph, mapping, copy=True)
# normalize directions of edges
edges_to_remove = []
edges_to_add = []
for u, v, k, d in self.graph.edges(keys=True, data=True):
if v < u:
new_v, new_u, new_d = u, v, d.copy()
new_d["to_jimage"] = (0, 0, 0)
edges_to_remove.append((u, v, k))
edges_to_add.append((new_u, new_v, new_d))
# add/delete marked edges
for edges_to_remove in edges_to_remove:
self.graph.remove_edge(*edges_to_remove)
for (u, v, d) in edges_to_add:
self.graph.add_edge(u, v, **d)
def __copy__(self):
return MoleculeGraph.from_dict(self.as_dict())
def __eq__(self, other):
"""
Two MoleculeGraphs are equal if they have equal Molecules,
and have the same edges between Sites. Edge weights can be
different and MoleculeGraphs can still be considered equal.
:param other: MoleculeGraph
:return (bool):
"""
# sort for consistent node indices
# PeriodicSite should have a proper __hash__() value,
# using its frac_coords as a convenient key
try:
mapping = {tuple(site.coords): self.molecule.index(site) for site in other.molecule}
except ValueError:
return False
other_sorted = other.__copy__()
other_sorted.sort(key=lambda site: mapping[tuple(site.coords)])
edges = {(u, v) for u, v, d in self.graph.edges(keys=False, data=True)}
edges_other = {(u, v) for u, v, d in other_sorted.graph.edges(keys=False, data=True)}
return (edges == edges_other) and (self.molecule == other_sorted.molecule)
def isomorphic_to(self, other):
"""
Checks if the graphs of two MoleculeGraphs are isomorphic to one
another. In order to prevent problems with misdirected edges, both
graphs are converted into undirected nx.Graph objects.
:param other: MoleculeGraph object to be compared.
:return: bool
"""
if len(self.molecule) != len(other.molecule):
return False
if self.molecule.composition.alphabetical_formula != other.molecule.composition.alphabetical_formula:
return False
if len(self.graph.edges()) != len(other.graph.edges()):
return False
return _isomorphic(self.graph, other.graph)
def diff(self, other, strict=True):
"""
Compares two MoleculeGraphs. Returns dict with
keys 'self', 'other', 'both' with edges that are
present in only one MoleculeGraph ('self' and
'other'), and edges that are present in both.
The Jaccard distance is a simple measure of the
dissimilarity between two MoleculeGraphs (ignoring
edge weights), and is defined by 1 - (size of the
intersection / size of the union) of the sets of
edges. This is returned with key 'dist'.
Important note: all node indices are in terms
of the MoleculeGraph this method is called
from, not the 'other' MoleculeGraph: there
is no guarantee the node indices will be the
same if the underlying Molecules are ordered
differently.
:param other: MoleculeGraph
:param strict: if False, will compare bonds
from different Molecules, with node indices
replaced by Species strings, will not count
number of occurrences of bonds
:return:
"""
if self.molecule != other.molecule and strict:
return ValueError("Meaningless to compare MoleculeGraphs if " "corresponding Molecules are different.")
if strict:
# sort for consistent node indices
# PeriodicSite should have a proper __hash__() value,
# using its frac_coords as a convenient key
mapping = {tuple(site.frac_coords): self.molecule.index(site) for site in other.molecule}
other_sorted = other.__copy__()
other_sorted.sort(key=lambda site: mapping[tuple(site.frac_coords)])
edges = {(u, v, d.get("to_jimage", (0, 0, 0))) for u, v, d in self.graph.edges(keys=False, data=True)}
edges_other = {
(u, v, d.get("to_jimage", (0, 0, 0))) for u, v, d in other_sorted.graph.edges(keys=False, data=True)
}
else:
edges = {
(str(self.molecule[u].specie), str(self.molecule[v].specie))
for u, v, d in self.graph.edges(keys=False, data=True)
}
edges_other = {
(str(other.structure[u].specie), str(other.structure[v].specie))
for u, v, d in other.graph.edges(keys=False, data=True)
}
if len(edges) == 0 and len(edges_other) == 0:
jaccard_dist = 0 # by definition
else:
jaccard_dist = 1 - len(edges.intersection(edges_other)) / len(edges.union(edges_other))
return {
"self": edges - edges_other,
"other": edges_other - edges,
"both": edges.intersection(edges_other),
"dist": jaccard_dist,
}
| [((1010, 1037), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1027, 1037), False, 'import logging\n'), ((1284, 1348), 'collections.namedtuple', 'namedtuple', (['"""ConnectedSite"""', '"""site, jimage, index, weight, dist"""'], {}), "('ConnectedSite', 'site, jimage, index, weight, dist')\n", (1294, 1348), False, 'from collections import defaultdict, namedtuple\n'), ((1725, 1739), 'igraph.Graph', 'igraph.Graph', ([], {}), '()\n', (1737, 1739), False, 'import igraph\n'), ((3045, 3090), 'networkx.algorithms.isomorphism.categorical_node_match', 'iso.categorical_node_match', (['"""specie"""', '"""ERROR"""'], {}), "('specie', 'ERROR')\n", (3071, 3090), True, 'import networkx.algorithms.isomorphism as iso\n'), ((4817, 4868), 'networkx.readwrite.json_graph.adjacency_graph', 'nx.readwrite.json_graph.adjacency_graph', (['graph_data'], {}), '(graph_data)\n', (4856, 4868), True, 'import networkx as nx\n'), ((6606, 6709), 'networkx.MultiDiGraph', 'nx.MultiDiGraph', ([], {'edge_weight_name': 'edge_weight_name', 'edge_weight_units': 'edge_weight_units', 'name': 'name'}), '(edge_weight_name=edge_weight_name, edge_weight_units=\n edge_weight_units, name=name)\n', (6621, 6709), True, 'import networkx as nx\n'), ((6826, 6858), 'networkx.readwrite.json_graph.adjacency_data', 'json_graph.adjacency_data', (['graph'], {}), '(graph)\n', (6851, 6858), False, 'from networkx.readwrite import json_graph\n'), ((17359, 17408), 'networkx.relabel_nodes', 'nx.relabel_nodes', (['self.graph', 'mapping'], {'copy': '(False)'}), '(self.graph, mapping, copy=False)\n', (17375, 17408), True, 'import networkx as nx\n'), ((18521, 18574), 'networkx.set_node_attributes', 'nx.set_node_attributes', (['self.graph', 'species', '"""specie"""'], {}), "(self.graph, species, 'specie')\n", (18543, 18574), True, 'import networkx as nx\n'), ((18583, 18635), 'networkx.set_node_attributes', 'nx.set_node_attributes', (['self.graph', 'coords', '"""coords"""'], {}), "(self.graph, coords, 'coords')\n", (18605, 18635), True, 'import networkx as nx\n'), ((18644, 18704), 'networkx.set_node_attributes', 'nx.set_node_attributes', (['self.graph', 'properties', '"""properties"""'], {}), "(self.graph, properties, 'properties')\n", (18666, 18704), True, 'import networkx as nx\n'), ((22699, 22748), 'networkx.relabel_nodes', 'nx.relabel_nodes', (['self.graph', 'mapping'], {'copy': '(False)'}), '(self.graph, mapping, copy=False)\n', (22715, 22748), True, 'import networkx as nx\n'), ((36121, 36152), 'networkx.drawing.nx_agraph.write_dot', 'write_dot', (['g', "(basename + '.dot')"], {}), "(g, basename + '.dot')\n", (36130, 36152), False, 'from networkx.drawing.nx_agraph import write_dot\n'), ((37217, 37234), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (37228, 37234), False, 'from collections import defaultdict, namedtuple\n'), ((37773, 37813), 'scipy.stats.describe', 'describe', (['all_weights'], {'nan_policy': '"""omit"""'}), "(all_weights, nan_policy='omit')\n", (37781, 37813), False, 'from scipy.stats import describe\n'), ((40237, 40272), 'pymatgen.core.Structure.from_dict', 'Structure.from_dict', (["d['structure']"], {}), "(d['structure'])\n", (40256, 40272), False, 'from pymatgen.core import Lattice, Molecule, PeriodicSite, Structure\n'), ((41874, 41908), 'numpy.array', 'np.array', (['scaling_matrix', 'np.int16'], {}), '(scaling_matrix, np.int16)\n', (41882, 41908), True, 'import numpy as np\n'), ((42283, 42324), 'pymatgen.util.coord.lattice_points_in_supercell', 'lattice_points_in_supercell', (['scale_matrix'], {}), '(scale_matrix)\n', (42310, 42324), False, 'from pymatgen.util.coord import lattice_points_in_supercell\n'), ((43097, 43128), 'pymatgen.core.Structure.from_sites', 'Structure.from_sites', (['new_sites'], {}), '(new_sites)\n', (43117, 43128), False, 'from pymatgen.core import Lattice, Molecule, PeriodicSite, Structure\n'), ((43192, 43209), 'networkx.MultiDiGraph', 'nx.MultiDiGraph', ([], {}), '()\n', (43207, 43209), True, 'import networkx as nx\n'), ((43781, 43814), 'scipy.spatial.KDTree', 'KDTree', (['new_structure.cart_coords'], {}), '(new_structure.cart_coords)\n', (43787, 43814), False, 'from scipy.spatial import KDTree\n'), ((51358, 51406), 'networkx.relabel_nodes', 'nx.relabel_nodes', (['self.graph', 'mapping'], {'copy': '(True)'}), '(self.graph, mapping, copy=True)\n', (51374, 51406), True, 'import networkx as nx\n'), ((57024, 57052), 'networkx.Graph', 'nx.Graph', (['supercell_sg.graph'], {}), '(supercell_sg.graph)\n', (57032, 57052), True, 'import networkx as nx\n'), ((60599, 60650), 'networkx.readwrite.json_graph.adjacency_graph', 'nx.readwrite.json_graph.adjacency_graph', (['graph_data'], {}), '(graph_data)\n', (60638, 60650), True, 'import networkx as nx\n'), ((62417, 62520), 'networkx.MultiDiGraph', 'nx.MultiDiGraph', ([], {'edge_weight_name': 'edge_weight_name', 'edge_weight_units': 'edge_weight_units', 'name': 'name'}), '(edge_weight_name=edge_weight_name, edge_weight_units=\n edge_weight_units, name=name)\n', (62432, 62520), True, 'import networkx as nx\n'), ((62636, 62668), 'networkx.readwrite.json_graph.adjacency_data', 'json_graph.adjacency_data', (['graph'], {}), '(graph)\n', (62661, 62668), False, 'from networkx.readwrite import json_graph\n'), ((70973, 71022), 'networkx.relabel_nodes', 'nx.relabel_nodes', (['self.graph', 'mapping'], {'copy': '(False)'}), '(self.graph, mapping, copy=False)\n', (70989, 71022), True, 'import networkx as nx\n'), ((72010, 72063), 'networkx.set_node_attributes', 'nx.set_node_attributes', (['self.graph', 'species', '"""specie"""'], {}), "(self.graph, species, 'specie')\n", (72032, 72063), True, 'import networkx as nx\n'), ((72072, 72124), 'networkx.set_node_attributes', 'nx.set_node_attributes', (['self.graph', 'coords', '"""coords"""'], {}), "(self.graph, coords, 'coords')\n", (72094, 72124), True, 'import networkx as nx\n'), ((72133, 72193), 'networkx.set_node_attributes', 'nx.set_node_attributes', (['self.graph', 'properties', '"""properties"""'], {}), "(self.graph, properties, 'properties')\n", (72155, 72193), True, 'import networkx as nx\n'), ((75401, 75450), 'networkx.relabel_nodes', 'nx.relabel_nodes', (['self.graph', 'mapping'], {'copy': '(False)'}), '(self.graph, mapping, copy=False)\n', (75417, 75450), True, 'import networkx as nx\n'), ((76190, 76224), 'networkx.is_weakly_connected', 'nx.is_weakly_connected', (['self.graph'], {}), '(self.graph)\n', (76212, 76224), True, 'import networkx as nx\n'), ((76287, 76306), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (76300, 76306), False, 'import copy\n'), ((79919, 79938), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (79932, 79938), False, 'import copy\n'), ((80058, 80096), 'networkx.is_weakly_connected', 'nx.is_weakly_connected', (['original.graph'], {}), '(original.graph)\n', (80080, 80096), True, 'import networkx as nx\n'), ((102890, 102921), 'networkx.drawing.nx_agraph.write_dot', 'write_dot', (['g', "(basename + '.dot')"], {}), "(g, basename + '.dot')\n", (102899, 102921), False, 'from networkx.drawing.nx_agraph import write_dot\n'), ((104021, 104054), 'pymatgen.core.Molecule.from_dict', 'Molecule.from_dict', (["d['molecule']"], {}), "(d['molecule'])\n", (104039, 104054), False, 'from pymatgen.core import Lattice, Molecule, PeriodicSite, Structure\n'), ((106419, 106467), 'networkx.relabel_nodes', 'nx.relabel_nodes', (['self.graph', 'mapping'], {'copy': '(True)'}), '(self.graph, mapping, copy=True)\n', (106435, 106467), True, 'import networkx as nx\n'), ((12594, 12632), 'numpy.array_equal', 'np.array_equal', (['from_jimage', '(0, 0, 0)'], {}), '(from_jimage, (0, 0, 0))\n', (12608, 12632), True, 'import numpy as np\n'), ((12692, 12723), 'numpy.subtract', 'np.subtract', (['from_jimage', 'shift'], {}), '(from_jimage, shift)\n', (12703, 12723), True, 'import numpy as np\n'), ((12748, 12777), 'numpy.subtract', 'np.subtract', (['to_jimage', 'shift'], {}), '(to_jimage, shift)\n', (12759, 12777), True, 'import numpy as np\n'), ((13034, 13136), 'warnings.warn', 'warnings.warn', (['"""Please specify to_jimage to be unambiguous, trying to automatically detect."""'], {}), "(\n 'Please specify to_jimage to be unambiguous, trying to automatically detect.'\n )\n", (13047, 13136), False, 'import warnings\n'), ((25442, 25465), 'copy.deepcopy', 'copy.deepcopy', (['func_grp'], {}), '(func_grp)\n', (25455, 25465), False, 'import copy\n'), ((28540, 28570), 'pymatgen.core.PeriodicSite.from_dict', 'PeriodicSite.from_dict', (['site_d'], {}), '(site_d)\n', (28562, 28570), False, 'from pymatgen.core import Lattice, Molecule, PeriodicSite, Structure\n'), ((28653, 28683), 'numpy.subtract', 'np.subtract', (['to_jimage', 'jimage'], {}), '(to_jimage, jimage)\n', (28664, 28683), True, 'import numpy as np\n'), ((31822, 31833), 'monty.os.path.which', 'which', (['algo'], {}), '(algo)\n', (31827, 31833), False, 'from monty.os.path import which\n'), ((36273, 36344), 'subprocess.Popen', 'subprocess.Popen', (['args'], {'stdout': 'f', 'stdin': 'subprocess.PIPE', 'close_fds': '(True)'}), '(args, stdout=f, stdin=subprocess.PIPE, close_fds=True)\n', (36289, 36344), False, 'import subprocess\n'), ((39930, 39967), 'networkx.readwrite.json_graph.adjacency_data', 'json_graph.adjacency_data', (['self.graph'], {}), '(self.graph)\n', (39955, 39967), False, 'from networkx.readwrite import json_graph\n'), ((42213, 42264), 'numpy.dot', 'np.dot', (['scale_matrix', 'self.structure.lattice.matrix'], {}), '(scale_matrix, self.structure.lattice.matrix)\n', (42219, 42264), True, 'import numpy as np\n'), ((43267, 43293), 'networkx.union', 'nx.union', (['new_g', 'new_graph'], {}), '(new_g, new_graph)\n', (43275, 43293), True, 'import networkx as nx\n'), ((48840, 48872), 'networkx.readwrite.json_graph.adjacency_data', 'json_graph.adjacency_data', (['new_g'], {}), '(new_g)\n', (48865, 48872), False, 'from networkx.readwrite import json_graph\n'), ((58768, 58793), 'pymatgen.core.Molecule', 'Molecule', (['species', 'coords'], {}), '(species, coords)\n', (58776, 58793), False, 'from pymatgen.core import Lattice, Molecule, PeriodicSite, Structure\n'), ((77208, 77239), 'networkx.relabel_nodes', 'nx.relabel_nodes', (['subg', 'mapping'], {}), '(subg, mapping)\n', (77224, 77239), True, 'import networkx as nx\n'), ((77263, 77306), 'networkx.get_node_attributes', 'nx.get_node_attributes', (['new_graph', '"""specie"""'], {}), "(new_graph, 'specie')\n", (77285, 77306), True, 'import networkx as nx\n'), ((77328, 77371), 'networkx.get_node_attributes', 'nx.get_node_attributes', (['new_graph', '"""coords"""'], {}), "(new_graph, 'coords')\n", (77350, 77371), True, 'import networkx as nx\n'), ((77396, 77443), 'networkx.get_node_attributes', 'nx.get_node_attributes', (['new_graph', '"""properties"""'], {}), "(new_graph, 'properties')\n", (77418, 77443), True, 'import networkx as nx\n'), ((78045, 78113), 'pymatgen.core.Molecule', 'Molecule', (['species', 'coords'], {'charge': 'charge', 'site_properties': 'properties'}), '(species, coords, charge=charge, site_properties=properties)\n', (78053, 78113), False, 'from pymatgen.core import Lattice, Molecule, PeriodicSite, Structure\n'), ((78139, 78175), 'networkx.readwrite.json_graph.adjacency_data', 'json_graph.adjacency_data', (['new_graph'], {}), '(new_graph)\n', (78164, 78175), False, 'from networkx.readwrite import json_graph\n'), ((81398, 81427), 'itertools.combinations', 'combinations', (['graph.nodes', 'ii'], {}), '(graph.nodes, ii)\n', (81410, 81427), False, 'from itertools import combinations\n'), ((82485, 82512), 'copy.deepcopy', 'copy.deepcopy', (['unique_frags'], {}), '(unique_frags)\n', (82498, 82512), False, 'import copy\n'), ((83757, 83793), 'copy.deepcopy', 'copy.deepcopy', (['unique_mol_graph_list'], {}), '(unique_mol_graph_list)\n', (83770, 83793), False, 'import copy\n'), ((98504, 98515), 'monty.os.path.which', 'which', (['algo'], {}), '(algo)\n', (98509, 98515), False, 'from monty.os.path import which\n'), ((103042, 103113), 'subprocess.Popen', 'subprocess.Popen', (['args'], {'stdout': 'f', 'stdin': 'subprocess.PIPE', 'close_fds': '(True)'}), '(args, stdout=f, stdin=subprocess.PIPE, close_fds=True)\n', (103058, 103113), False, 'import subprocess\n'), ((103715, 103752), 'networkx.readwrite.json_graph.adjacency_data', 'json_graph.adjacency_data', (['self.graph'], {}), '(self.graph)\n', (103740, 103752), False, 'from networkx.readwrite import json_graph\n'), ((13945, 14016), 'numpy.subtract', 'np.subtract', (['nnsite.frac_coords', 'self.structure[from_index].frac_coords'], {}), '(nnsite.frac_coords, self.structure[from_index].frac_coords)\n', (13956, 14016), True, 'import numpy as np\n'), ((25524, 25565), 'copy.deepcopy', 'copy.deepcopy', (['FunctionalGroups[func_grp]'], {}), '(FunctionalGroups[func_grp])\n', (25537, 25565), False, 'import copy\n'), ((28307, 28333), 'numpy.multiply', 'np.multiply', (['(-1)', 'to_jimage'], {}), '(-1, to_jimage)\n', (28318, 28333), True, 'import numpy as np\n'), ((42683, 42819), 'pymatgen.core.PeriodicSite', 'PeriodicSite', (['site.species', '(site.coords + v)', 'new_lattice'], {'properties': 'site.properties', 'coords_are_cartesian': '(True)', 'to_unit_cell': '(False)'}), '(site.species, site.coords + v, new_lattice, properties=site.\n properties, coords_are_cartesian=True, to_unit_cell=False)\n', (42695, 42819), False, 'from pymatgen.core import Lattice, Molecule, PeriodicSite, Structure\n'), ((43022, 43070), 'networkx.relabel_nodes', 'nx.relabel_nodes', (['self.graph', 'mapping'], {'copy': '(True)'}), '(self.graph, mapping, copy=True)\n', (43038, 43070), True, 'import networkx as nx\n'), ((44540, 44590), 'numpy.add', 'np.add', (['self.structure[n_v].frac_coords', 'to_jimage'], {}), '(self.structure[n_v].frac_coords, to_jimage)\n', (44546, 44590), True, 'import numpy as np\n'), ((45004, 45037), 'numpy.subtract', 'np.subtract', (['v_image_cart', 'u_cart'], {}), '(v_image_cart, u_cart)\n', (45015, 45037), True, 'import numpy as np\n'), ((49823, 49839), 'operator.itemgetter', 'itemgetter', (['(0)', '(1)'], {}), '(0, 1)\n', (49833, 49839), False, 'from operator import itemgetter\n'), ((57144, 57187), 'networkx.connected_components', 'nx.connected_components', (['supercell_sg.graph'], {}), '(supercell_sg.graph)\n', (57167, 57187), True, 'import networkx as nx\n'), ((58256, 58331), 'networkx.is_isomorphic', 'nx.is_isomorphic', (['subgraph', 'g'], {'node_match': 'node_match', 'edge_match': 'edge_match'}), '(subgraph, g, node_match=node_match, edge_match=edge_match)\n', (58272, 58331), True, 'import networkx as nx\n'), ((76246, 76265), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (76259, 76265), False, 'import copy\n'), ((76519, 76565), 'networkx.weakly_connected_components', 'nx.weakly_connected_components', (['original.graph'], {}), '(original.graph)\n', (76549, 76565), True, 'import networkx as nx\n'), ((81639, 81670), 'networkx.subgraph', 'nx.subgraph', (['graph', 'combination'], {}), '(graph, combination)\n', (81650, 81670), True, 'import networkx as nx\n'), ((81690, 81715), 'networkx.is_connected', 'nx.is_connected', (['subgraph'], {}), '(subgraph)\n', (81705, 81715), True, 'import networkx as nx\n'), ((82824, 82859), 'networkx.relabel_nodes', 'nx.relabel_nodes', (['fragment', 'mapping'], {}), '(fragment, mapping)\n', (82840, 82859), True, 'import networkx as nx\n'), ((82887, 82929), 'networkx.get_node_attributes', 'nx.get_node_attributes', (['remapped', '"""specie"""'], {}), "(remapped, 'specie')\n", (82909, 82929), True, 'import networkx as nx\n'), ((82955, 82997), 'networkx.get_node_attributes', 'nx.get_node_attributes', (['remapped', '"""coords"""'], {}), "(remapped, 'coords')\n", (82977, 82997), True, 'import networkx as nx\n'), ((87183, 87206), 'copy.deepcopy', 'copy.deepcopy', (['func_grp'], {}), '(func_grp)\n', (87196, 87206), False, 'import copy\n'), ((93648, 93674), 'networkx.simple_cycles', 'nx.simple_cycles', (['directed'], {}), '(directed)\n', (93664, 93674), True, 'import networkx as nx\n'), ((104901, 104917), 'operator.itemgetter', 'itemgetter', (['(0)', '(1)'], {}), '(0, 1)\n', (104911, 104917), False, 'from operator import itemgetter\n'), ((28374, 28399), 'numpy.add', 'np.add', (['to_jimage', 'jimage'], {}), '(to_jimage, jimage)\n', (28380, 28399), True, 'import numpy as np\n'), ((28479, 28511), 'numpy.add', 'np.add', (["site_d['abc']", 'to_jimage'], {}), "(site_d['abc'], to_jimage)\n", (28485, 28511), True, 'import numpy as np\n'), ((42001, 42010), 'numpy.eye', 'np.eye', (['(3)'], {}), '(3)\n', (42007, 42010), True, 'import numpy as np\n'), ((47031, 47066), 'numpy.around', 'np.around', (['v_expec_frac'], {'decimals': '(3)'}), '(v_expec_frac, decimals=3)\n', (47040, 47066), True, 'import numpy as np\n'), ((47173, 47213), 'numpy.subtract', 'np.subtract', (['v_expec_frac', 'v_expec_image'], {}), '(v_expec_frac, v_expec_image)\n', (47184, 47213), True, 'import numpy as np\n'), ((57572, 57597), 'networkx.MultiDiGraph', 'nx.MultiDiGraph', (['subgraph'], {}), '(subgraph)\n', (57587, 57597), True, 'import networkx as nx\n'), ((66051, 66095), 'numpy.array_equal', 'np.array_equal', (["neighbor['image']", '[0, 0, 0]'], {}), "(neighbor['image'], [0, 0, 0])\n", (66065, 66095), True, 'import numpy as np\n'), ((87277, 87318), 'copy.deepcopy', 'copy.deepcopy', (['FunctionalGroups[func_grp]'], {}), '(FunctionalGroups[func_grp])\n', (87290, 87318), False, 'import copy\n'), ((92221, 92262), 'networkx.descendants', 'nx.descendants', (['disconnected', 'neighbor[2]'], {}), '(disconnected, neighbor[2])\n', (92235, 92262), True, 'import networkx as nx\n'), ((14045, 14064), 'numpy.round', 'np.round', (['to_jimage'], {}), '(to_jimage)\n', (14053, 14064), True, 'import numpy as np\n'), ((83351, 83420), 'pymatgen.core.Molecule', 'Molecule', ([], {'species': 'species', 'coords': 'coords', 'charge': 'self.molecule.charge'}), '(species=species, coords=coords, charge=self.molecule.charge)\n', (83359, 83420), False, 'from pymatgen.core import Lattice, Molecule, PeriodicSite, Structure\n'), ((51687, 51718), 'numpy.multiply', 'np.multiply', (['(-1)', "d['to_jimage']"], {}), "(-1, d['to_jimage'])\n", (51698, 51718), True, 'import numpy as np\n'), ((81872, 81895), 'copy.deepcopy', 'copy.deepcopy', (['subgraph'], {}), '(subgraph)\n', (81885, 81895), False, 'import copy\n'), ((81971, 81994), 'copy.deepcopy', 'copy.deepcopy', (['subgraph'], {}), '(subgraph)\n', (81984, 81994), False, 'import copy\n'), ((47865, 47896), 'numpy.multiply', 'np.multiply', (['(-1)', "d['to_jimage']"], {}), "(-1, d['to_jimage'])\n", (47876, 47896), True, 'import numpy as np\n')] |
akashdhruv/maple | maple/backend/singularity/__init__.py | 11e562f51b18b2251ea507c629a1981b031d2f35 | from . import image
from . import container
from . import system
| [] |
Ahmed-skb/blogyfy | articles/views.py | 2cfa3d9503f1846ccd89c2bf1934293eb97ad44a | from django.shortcuts import render, redirect
from django.http import HttpResponse
from .models import Article
from django.contrib.auth.decorators import login_required
from . import forms
def Articles(request):
articles = Article.objects.all().order_by('date')
return render(request, 'articles/article_list.html', {'articles': articles})
def article_detail(request, slug):
# return HttpResponse(slug)
article = Article.objects.get(slug=slug)
return render(request, 'articles/article_details.html', {'article': article})
@login_required(login_url="/accounts/login")
def article_create(request):
if request.method == 'POST':
form = forms.CreateArticle(request.POST, request.FILES)
if form.is_valid():
#save article to DB
instance = form.save(commit=False)
instance.author = request.user
instance.save( )
return redirect ('articles:list')
else:
form = forms.CreateArticle()
return render(request, 'articles/article_create.html', {'form':form})
| [((548, 591), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login"""'}), "(login_url='/accounts/login')\n", (562, 591), False, 'from django.contrib.auth.decorators import login_required\n'), ((278, 347), 'django.shortcuts.render', 'render', (['request', '"""articles/article_list.html"""', "{'articles': articles}"], {}), "(request, 'articles/article_list.html', {'articles': articles})\n", (284, 347), False, 'from django.shortcuts import render, redirect\n'), ((474, 544), 'django.shortcuts.render', 'render', (['request', '"""articles/article_details.html"""', "{'article': article}"], {}), "(request, 'articles/article_details.html', {'article': article})\n", (480, 544), False, 'from django.shortcuts import render, redirect\n'), ((1001, 1064), 'django.shortcuts.render', 'render', (['request', '"""articles/article_create.html"""', "{'form': form}"], {}), "(request, 'articles/article_create.html', {'form': form})\n", (1007, 1064), False, 'from django.shortcuts import render, redirect\n'), ((916, 941), 'django.shortcuts.redirect', 'redirect', (['"""articles:list"""'], {}), "('articles:list')\n", (924, 941), False, 'from django.shortcuts import render, redirect\n')] |
russell/sifter | sifter/grammar/grammar.py | 03e85349fd2329439ae3f7eb3c1f484ba2ebf807 | # Parser based on RFC 5228, especially the grammar as defined in section 8. All
# references are to sections in RFC 5228 unless stated otherwise.
import ply.yacc
import sifter.grammar
from sifter.grammar.lexer import tokens
import sifter.handler
import logging
__all__ = ('parser',)
def parser(**kwargs):
return ply.yacc.yacc(**kwargs)
def p_commands_list(p):
"""commands : commands command"""
p[0] = p[1]
# section 3.2: REQUIRE command must come before any other commands
if p[2].RULE_IDENTIFIER == 'REQUIRE':
if any(command.RULE_IDENTIFIER != 'REQUIRE'
for command in p[0].commands):
log = logging.getLogger("sifter")
log.error(("REQUIRE command on line %d must come before any "
"other non-REQUIRE commands" % p.lineno(2)))
raise SyntaxError
# section 3.1: ELSIF and ELSE must follow IF or another ELSIF
elif p[2].RULE_IDENTIFIER in ('ELSIF', 'ELSE'):
if p[0].commands[-1].RULE_IDENTIFIER not in ('IF', 'ELSIF'):
log = logging.getLogger("sifter")
log.error(("ELSIF/ELSE command on line %d must follow an IF/ELSIF "
"command" % p.lineno(2)))
raise SyntaxError
p[0].commands.append(p[2])
def p_commands_empty(p):
"""commands : """
p[0] = sifter.grammar.CommandList()
def p_command(p):
"""command : IDENTIFIER arguments ';'
| IDENTIFIER arguments block"""
#print("COMMAND:", p[1], p[2], p[3])
tests = p[2].get('tests')
block = None
if p[3] != ';': block = p[3]
handler = sifter.handler.get('command', p[1])
if handler is None:
log = logging.getLogger("sifter")
log.error(("No handler registered for command '%s' on line %d" %
(p[1], p.lineno(1))))
raise SyntaxError
p[0] = handler(arguments=p[2]['args'], tests=tests, block=block)
def p_command_error(p):
"""command : IDENTIFIER error ';'
| IDENTIFIER error block"""
log = logging.getLogger("sifter")
log.error(("Syntax error in command definition after %s on line %d" %
(p[1], p.lineno(1))))
raise SyntaxError
def p_block(p):
"""block : '{' commands '}' """
# section 3.2: REQUIRE command must come before any other commands,
# which means it can't be in the block of another command
if any(command.RULE_IDENTIFIER == 'REQUIRE'
for command in p[2].commands):
log = logging.getLogger("sifter")
log.error(("REQUIRE command not allowed inside of a block (line %d)" %
(p.lineno(2))))
raise SyntaxError
p[0] = p[2]
def p_block_error(p):
"""block : '{' error '}'"""
log = logging.getLogger("sifter")
log.error(("Syntax error in command block that starts on line %d" %
(p.lineno(1),)))
raise SyntaxError
def p_arguments(p):
"""arguments : argumentlist
| argumentlist test
| argumentlist '(' testlist ')'"""
p[0] = { 'args' : p[1], }
if len(p) > 2:
if p[2] == '(':
p[0]['tests'] = p[3]
else:
p[0]['tests'] = [ p[2] ]
def p_testlist_error(p):
"""arguments : argumentlist '(' error ')'"""
log = logging.getLogger("sifter")
log.error(("Syntax error in test list that starts on line %d" % p.lineno(2)))
raise SyntaxError
def p_argumentlist_list(p):
"""argumentlist : argumentlist argument"""
p[0] = p[1]
p[0].append(p[2])
def p_argumentlist_empty(p):
"""argumentlist : """
p[0] = []
def p_test(p):
"""test : IDENTIFIER arguments"""
#print("TEST:", p[1], p[2])
tests = p[2].get('tests')
handler = sifter.handler.get('test', p[1])
if handler is None:
log = logging.getLogger("sifter")
log.error(("No handler registered for test '%s' on line %d" %
(p[1], p.lineno(1))))
raise SyntaxError
p[0] = handler(arguments=p[2]['args'], tests=tests)
def p_testlist_list(p):
"""testlist : test ',' testlist"""
p[0] = p[3]
p[0].insert(0, p[1])
def p_testlist_single(p):
"""testlist : test"""
p[0] = [ p[1] ]
def p_argument_stringlist(p):
"""argument : '[' stringlist ']'"""
p[0] = p[2]
def p_argument_string(p):
"""argument : string"""
# for simplicity, we treat all single strings as a string list
p[0] = [ p[1] ]
def p_argument_number(p):
"""argument : NUMBER"""
p[0] = p[1]
def p_argument_tag(p):
"""argument : TAG"""
p[0] = sifter.grammar.Tag(p[1])
def p_stringlist_error(p):
"""argument : '[' error ']'"""
log = logging.getLogger("sifter")
log.error(("Syntax error in string list that starts on line %d" %
p.lineno(1)))
raise SyntaxError
def p_stringlist_list(p):
"""stringlist : string ',' stringlist"""
p[0] = p[3]
p[0].insert(0, p[1])
def p_stringlist_single(p):
"""stringlist : string"""
p[0] = [ p[1] ]
def p_string(p):
"""string : QUOTED_STRING"""
p[0] = sifter.grammar.String(p[1])
| [((2021, 2048), 'logging.getLogger', 'logging.getLogger', (['"""sifter"""'], {}), "('sifter')\n", (2038, 2048), False, 'import logging\n'), ((2708, 2735), 'logging.getLogger', 'logging.getLogger', (['"""sifter"""'], {}), "('sifter')\n", (2725, 2735), False, 'import logging\n'), ((3239, 3266), 'logging.getLogger', 'logging.getLogger', (['"""sifter"""'], {}), "('sifter')\n", (3256, 3266), False, 'import logging\n'), ((4610, 4637), 'logging.getLogger', 'logging.getLogger', (['"""sifter"""'], {}), "('sifter')\n", (4627, 4637), False, 'import logging\n'), ((1675, 1702), 'logging.getLogger', 'logging.getLogger', (['"""sifter"""'], {}), "('sifter')\n", (1692, 1702), False, 'import logging\n'), ((2466, 2493), 'logging.getLogger', 'logging.getLogger', (['"""sifter"""'], {}), "('sifter')\n", (2483, 2493), False, 'import logging\n'), ((3756, 3783), 'logging.getLogger', 'logging.getLogger', (['"""sifter"""'], {}), "('sifter')\n", (3773, 3783), False, 'import logging\n'), ((655, 682), 'logging.getLogger', 'logging.getLogger', (['"""sifter"""'], {}), "('sifter')\n", (672, 682), False, 'import logging\n'), ((1056, 1083), 'logging.getLogger', 'logging.getLogger', (['"""sifter"""'], {}), "('sifter')\n", (1073, 1083), False, 'import logging\n')] |
dropofwill/author-attr-experiments | multidoc_mnb.py | a90e2743591358a6253f3b3664f5e398517f84bc | from sklearn import datasets
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.cross_validation import train_test_split
from sklearn.cross_validation import cross_val_score
from sklearn.cross_validation import ShuffleSplit
from sklearn.cross_validation import Bootstrap
from sklearn.naive_bayes import MultinomialNB
from sklearn.grid_search import GridSearchCV
from scipy.stats import sem
from pprint import pprint
import numpy as np
import pylab as pl
import string
import matplotlib.pyplot as plt
# Calculates the mean of the scores with the standard deviation
def mean_sem(scores):
return ("Mean score: {0:.3f} (+/-{1:.3f})").format(np.mean(scores), sem(scores))
def test_docs(dir):
# Load documents
docs = datasets.load_files(container_path="../../sklearn_data/"+dir)
X, y = docs.data, docs.target
baseline = 1/float(len(list(np.unique(y))))
# Select Features via Bag of Words approach without stop words
#X = CountVectorizer(charset_error='ignore', stop_words='english', strip_accents='unicode', ).fit_transform(X)
X = TfidfVectorizer(charset_error='ignore', stop_words='english', analyzer='char', ngram_range=(2,4), strip_accents='unicode', sublinear_tf=True, max_df=0.5).fit_transform(X)
n_samples, n_features = X.shape
# sklearn's grid search
parameters = { 'alpha': np.logspace(-100,0,10)}
bv = Bootstrap(n_samples, n_iter=10, test_size=0.3, random_state=42)
mnb_gv = GridSearchCV(MultinomialNB(), parameters, cv=bv,)
#scores = cross_val_score(mnb_gv, X, y, cv=bv)
mnb_gv.fit(X, y)
mnb_gv_best_params = mnb_gv.best_params_.values()[0]
print mnb_gv.best_score_
print mnb_gv_best_params
# CV with Bootstrap
mnb = MultinomialNB(alpha=mnb_gv_best_params)
boot_scores = cross_val_score(mnb, X, y, cv=bv)
print mean_sem(boot_scores)
improvement = (mnb_gv.best_score_ - baseline) / baseline
rand_baseline.append(baseline)
test_results.append([mnb_gv.best_score_])
com_results.append(improvement)
sem_results.append(sem(boot_scores))
def graph(base_list, results_list, com_list, arange):
N=arange
base=np.array(base_list)
res=np.array(results_list)
com = np.array(com_list)
ind = np.arange(N) # the x locations for the groups
width = 0.3 # the width of the bars: can also be len(x) sequence
#fig, ax = plt.sublots()
p1 = plt.bar(ind, base, width, color='r')
p2 = plt.bar(ind+0.3, res, width, color='y')
p3 = plt.bar(ind+0.6, com, width, color='b')
plt.rcParams['figure.figsize'] = 10, 7.5
plt.rcParams['axes.grid'] = True
plt.gray()
plt.ylabel('Accuracy')
plt.title('AAAC Problem Accuracy')
plt.yticks(np.arange(0,3,30))
plt.xticks(np.arange(0,13,13))
#plt.set_xticks(('A','B','C','D','E','F','G','H','I','J','K','L','M'))
plt.legend( (p1[0], p2[0], p3[0]), ('Baseline', 'Algorithm', 'Improvement'))
plt.show()
rand_baseline = list()
test_results = list()
sem_results = list()
com_results = list()
#test_docs("problemA")
for i in string.uppercase[:13]:
test_docs("problem"+i)
#graph(rand_baseline,test_results,com_results,13)
import os
import time as tm
sub_dir = "Results/"
location = "multiDoc" + tm.strftime("%Y%m%d-%H%M%S") + ".txt"
with open(os.path.join(sub_dir, location), 'w') as myFile:
myFile.write(str(rand_baseline))
myFile.write("\n")
myFile.write(str(test_results))
myFile.write("\n")
myFile.write(str(sem_results))
myFile.write("\n")
myFile.write(str(com_results))
# CV with ShuffleSpit
'''
cv = ShuffleSplit(n_samples, n_iter=100, test_size=0.2, random_state=0)
test_scores = cross_val_score(mnb, X, y, cv=cv)
print np.mean(test_scores)
'''
# Single run through
'''
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
print X_train.shape
print y_train.shape
print X_test.shape
print y_test.shape
mnb = MultinomialNB().fit(X_train, y_train)
print mnb.score(X_test, y_test)
''' | [] |
yamins81/tabular | tabular/__init__.py | 1caf091c8c395960a9ad7078f95158b533cc52dd | import io
import fast
import spreadsheet
import tab
import utils
import web
from io import *
from fast import *
from spreadsheet import *
from tab import *
from utils import *
from web import *
__all__ = []
__all__.extend(io.__all__)
__all__.extend(fast.__all__)
__all__.extend(spreadsheet.__all__)
__all__.extend(tab.__all__)
__all__.extend(utils.__all__)
__all__.extend(web.__all__) | [] |
KSchopmeyer/smipyping | smipyping/_targetstable.py | 9c60b3489f02592bd9099b8719ca23ae43a9eaa5 | # (C) Copyright 2017 Inova Development Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Define the base of targets (i.e. systems to be tested)
TargetID = Column(Integer(11), primary_key=True)
IPAddress = Column(String(15), nullable=False)
CompanyID = Column(Integer(11), ForeignKey("Companies.CompanyID"))
Namespace = Column(String(30), nullable=False)
SMIVersion = Column(String(15), nullable=False)
Product = Column(String(30), nullable=False)
Principal = Column(String(30), nullable=False)
Credential = Column(String(30), nullable=False)
CimomVersion = Column(String(30), nullable=False)
InteropNamespace = Column(String(30), nullable=False)
Notify = Column(Enum('Enabled', 'Disabled'), default='Disabled')
NotifyUsers = Column(String(12), nullable=False)
ScanEnabled = Column(Enum('Enabled', 'Disabled'), default='Enabled')
Protocol = Column(String(10), default='http')
Port = Column(String(10), nullable=False)
"""
# TODO change ip_address to hostname where host name is name : port
from __future__ import print_function, absolute_import
import os
import csv
import re
from collections import OrderedDict
from textwrap import wrap
import six
from mysql.connector import Error as mysqlerror
from ._dbtablebase import DBTableBase
from ._mysqldbmixin import MySQLDBMixin
from ._common import get_url_str
from ._logging import AUDIT_LOGGER_NAME, get_logger
from ._companiestable import CompaniesTable
__all__ = ['TargetsTable']
class TargetsTable(DBTableBase):
"""
Class representing the targets db table.
This base contains information on the targets, host systems, etc. in the
environment.
The factory method should be used to construct a new TargetsTable object
since that creates the correct object for the defined database type.
"""
table_name = 'Targets'
key_field = 'TargetID'
# Fields that are required to create new records
required_fields = [
'IPAddress', 'CompanyID', 'Namespace',
'SMIVersion', 'Product', 'Principal', 'Credential',
'CimomVersion', 'InteropNamespace', 'Notify', 'NotifyUsers',
'ScanEnabled', 'Protocol', 'Port']
# All fields in each record.
fields = [key_field] + required_fields
join_fields = ['CompanyName']
all_fields = fields + join_fields
hints = {
'IPAddress': "Host name or ip address",
'CompanyID': "DB id of company",
'Namespace': "User namespace",
'SMIVersion': "SMI version",
'Product': "Product name",
'Principal': "User Name to access target",
'Credential': "User password to access target",
'CimomVersion': "Version of CIMOM",
'InteropNamespace': "Interop Namespace name",
'Notify': "'Enabled' if users to be notified of issues, else "
"'Disabled'",
'NotifyUsers': "List of UserIDs to notify",
'ScanEnabled': "Enabled if this target to be scanned",
'Protocol': '"http" or "https"',
'Port': "Integer defining WBEM server port."}
# # Defines each record for the data base and outputs.
# # The Name is the database name for the property
# # The value tuple is display name and max width for the record
table_format_dict = OrderedDict([
('TargetID', ('ID', 2, int)),
('CompanyName', ('CompanyName', 12, str)),
('Namespace', ('Namespace', 12, str)),
('SMIVersion', ('SMIVersion', 12, str)),
('Product', ('Product', 15, str)),
('Principal', ('Principal', 12, str)),
('Credential', ('Credential', 12, str)),
('CimomVersion', ('CimomVersion', 15, str)),
('IPAddress', ('IPAddress', 12, str)),
('InteropNamespace', ('Interop', 8, str)),
('Notify', ('Notify', 12, str)),
('NotifyUsers', ('NotifyUsers', 12, str)),
('Protocol', ('Prot', 5, str)),
('Port', ('Port', 4, int)),
('ScanEnabled', ('Enabled', 6, str)),
]) # noqa: E123
def __init__(self, db_dict, db_type, verbose, output_format):
"""Initialize the abstract Targets instance.
This controls all other
target bases. This defines the common definition of all targets bases
including field names, and common methods.
Parameters:
db_dict (:term: `dictionary')
Dictionary containing all of the parameters to open the database
defined by the db_dict attribute.
db_type (:term: `string`)
String defining one of the allowed database types for the
target database.
verbose (:class:`py:bool`)
Boolean. If true detailed info is displayed on the processing
of the TargetData class
output_format (:term:`string`)
String defining one of the legal report output formats. If not
provided, the default is a simple report format.
"""
super(TargetsTable, self).__init__(db_dict, db_type, verbose)
self.output_format = output_format
# def __str__(self):
# # # TODO this and __repr__ do not really match.
# # """String info on targetdata. TODO. Put more info here"""
# # return ('type=%s db=%s, len=%s' % (self.db_type, self.get_dbdict(),
# # # len(self.data_dict)))
# def __repr__(self):
# # """Rep of target data"""
# # return ('Targetdata db_type %s, rep count=%s' %
# # # (self.db_type, len(self.data_dict)))
def test_fieldnames(self, fields):
"""Test a list of field names. This test generates an exception,
KeyError if a field in fields is not in the table
"""
for field in fields:
self.table_format_dict[field] # pylint: disable=pointless-statement
def get_dbdict(self):
"""Get string for the db_dict"""
return '%s' % self.db_dict
@classmethod
def factory(cls, db_dict, db_type, verbose, output_format='simple'):
"""Factory method to select subclass based on database type (db_type).
Currently the types sql and csv are supported.
Returns instance object of the defined provider type.
"""
inst = None
if verbose:
print('targetdata factory datafile %s dbtype %s verbose %s'
% (db_dict, db_type, verbose))
if db_type == ('csv'):
inst = CsvTargetsTable(db_dict, db_type, verbose,
output_format=output_format)
elif db_type == ('mysql'):
inst = MySQLTargetsTable(db_dict, db_type, verbose,
output_format=output_format)
else:
ValueError('Invalid targets factory db_type %s' % db_type)
if verbose:
print('Resulting targets factory inst %r' % inst)
return inst
def get_field_list(self):
"""Return a list of the base table field names in the order defined."""
return list(self.table_format_dict)
def get_format_dict(self, name):
"""Return tuple of display name and length for name."""
return self.table_format_dict[name]
def get_enabled_targetids(self):
"""Get list of target ids that are marked enabled."""
return [x for x in self.data_dict if not self.disabled_target_id(x)]
def get_disabled_targetids(self):
"""Get list of target ids that are marked disabled"""
return [x for x in self.data_dict
if self.disabled_target_id(x)]
# TODO we have multiple of these. See get dict_for_host,get_hostid_list
def get_targets_host(self, host_data):
"""
If an record for `host_data` exists return that record,
otherwise return None.
There may be multiple ipaddress, port entries for a
single ipaddress, port in the database
Parameters:
host_id(tuple of hostname or ipaddress and port)
Returns list of targetdata keys
"""
# TODO clean up for PY 3
return_list = []
for key, value in self.data_dict.items():
port = value["Port"]
# TODO port from database is a string. Should be int internal.
if value["IPAddress"] == host_data[0] and int(port) == host_data[1]:
return_list.append(key)
return return_list
def get_target(self, targetid):
"""
Get the target data for the parameter target_id.
This is alternate to using [id] directly. It does an additonal check
for correct type for target_id
Returns:
target as dictionary
Exceptions:
KeyError if target not in targets dictionary
"""
if not isinstance(targetid, six.integer_types):
targetid = int(targetid)
return self.data_dict[targetid]
def filter_targets(self, ip_filter=None, company_name_filter=None):
"""
Filter for match of ip_filter and companyname filter if they exist
and return list of any targets that match.
The filters are regex strings.
"""
rtn = OrderedDict()
for key, value in self.data_dict.items():
if ip_filter and re.match(ip_filter, value['IPAddress']):
rtn[key] = value
if company_name_filter and \
re.match(value['CompanyName'], company_name_filter):
rtn[key] = value
return rtn
def build_url(self, targetid):
"""Get the string representing the url for targetid. Gets the
Protocol, IPaddress and port and uses the common get_url_str to
create a string. Port info is included only if it is not the
WBEM CIM-XML standard definitions.
"""
target = self[targetid]
return get_url_str(target['Protocol'], target['IPAddress'],
target['Port'])
def get_hostid_list(self, ip_filter=None, company_name_filter=None):
"""
Get all WBEM Server ipaddresses in the targets base.
Returns list of IP addresses:port entries.
TODO: Does not include port right now.
"""
output_list = []
# TODO clean up for python 3
for _id, value in self.data_dict.items():
if self.verbose:
print('get_hostid_list value %s' % (value,))
output_list.append(value['IPAddress'])
return output_list
def tbl_hdr(self, record_list):
"""Return a list of all the column headers from the record_list."""
hdr = []
for name in record_list:
value = self.get_format_dict(name)
hdr.append(value[0])
return hdr
def get_notifyusers(self, targetid):
"""
Get list of entries in the notify users field and split into python
list and return the list of integers representing the userids.
This list stored in db as string of integers separated by commas.
Returns None if there is no data in NotifyUsers.
"""
notify_users = self[targetid]['NotifyUsers']
if notify_users:
notify_users_list = notify_users.split(',')
notify_users_list = [int(userid) for userid in notify_users_list]
return notify_users_list
return None
def format_record(self, record_id, fields, fold=False):
"""Return the fields defined in field_list for the record_id in
display format.
String fields will be folded if their width is greater than the
specification in the format_dictionary and fold=True
"""
# TODO can we make this a std cvt function.
target = self.get_target(record_id)
line = []
for field_name in fields:
field_value = target[field_name]
fmt_value = self.get_format_dict(field_name)
max_width = fmt_value[1]
field_type = fmt_value[2]
if isinstance(field_type, six.string_types) and field_value:
if max_width < len(field_value):
line.append('\n'.join(wrap(field_value, max_width)))
else:
line.append('%s' % field_value)
else:
line.append('%s' % field_value)
return line
def disabled_target(self, target_record): # pylint: disable=no-self-use
"""
If target_record disabled, return true, else return false.
"""
val = target_record['ScanEnabled'].lower()
if val == 'enabled':
return False
if val == 'disabled':
return True
ValueError('ScanEnabled field must contain "Enabled" or "Disabled'
' string. %s is invalid.' % val)
def disabled_target_id(self, targetid):
"""
Return True if target recorded for this target_id marked
disabled. Otherwise return True
Parameters:
target_id(:term:`integer`)
Valid target Id for the Target_Tableue .
Returns: (:class:`py:bool`)
True if this target id disabled
Exceptions:
KeyError if target_id not in database
"""
return(self.disabled_target(self.data_dict[targetid]))
def get_output_width(self, col_list):
"""
Get the width of a table from the column names in the list
"""
total_width = 0
for name in col_list:
value = self.get_format_dict(name)
total_width += value[1]
return total_width
def get_unique_creds(self):
"""
Get the set of Credentials and Principal that represents the
unique combination of both. The result could be used to test with
all Principals/Credentials knows in the db.
Return list of targetIDs that represent unique sets of Principal and
Credential
"""
creds = {k: '%s%s' % (v['Principal'], v['Credential'])
for k, v in self.data_dict.items()}
ucreds = dict([[v, k] for k, v in creds.items()])
unique_keys = dict([[v, k] for k, v in ucreds.items()])
unique_creds = [(self.data_dict[k]['Principal'],
self.data_dict[k]['Credential']) for k in unique_keys]
return unique_creds
class SQLTargetsTable(TargetsTable):
"""
Subclass of Targets data for all SQL databases. Subclasses of this class
support specialized sql databases.
"""
def __init__(self, db_dict, dbtype, verbose, output_format):
"""Pass through to SQL"""
if verbose:
print('SQL Database type %s verbose=%s' % (db_dict, verbose))
super(SQLTargetsTable, self).__init__(db_dict, dbtype, verbose,
output_format)
self.connection = None
class MySQLTargetsTable(SQLTargetsTable, MySQLDBMixin):
"""
This subclass of TargetsTable process targets infromation from an sql
database.
Generate the targetstable from the sql database targets table and
the companies table, by mapping the data to the dictionary defined
for targets
"""
# TODO filename is config file name, not actual file name.
def __init__(self, db_dict, dbtype, verbose, output_format):
"""Read the input file into a dictionary."""
super(MySQLTargetsTable, self).__init__(db_dict, dbtype, verbose,
output_format)
self.connectdb(db_dict, verbose)
self._load_table()
self._load_joins()
def _load_joins(self):
"""
Load the tables that would normally be joins. In this case it is the
companies table. Move the companyName into the targets table
TODO we should not be doing this in this manner but with a
join.
"""
# Get companies table and insert into targets table:
# TODO in smipyping name is db_dict. Elsewhere it is db_info
companies_tbl = CompaniesTable.factory(self.db_dict,
self.db_type,
self.verbose)
try:
# set the companyname into the targets table
for target_key in self.data_dict:
target = self.data_dict[target_key]
if target['CompanyID'] in companies_tbl:
company = companies_tbl[target['CompanyID']]
target['CompanyName'] = company['CompanyName']
else:
target['CompanyName'] = "TableError CompanyID %s" % \
target['CompanyID']
except Exception as ex:
raise ValueError('Error: putting Company Name in table %r error %s'
% (self.db_dict, ex))
def update_fields(self, targetid, changes):
"""
Update the database record defined by targetid with the dictionary
of items defined by changes where each item is an entry in the
target record. Update does NOT test if the new value is the same
as the original value.
"""
cursor = self.connection.cursor()
# dynamically build the update sql based on the changes dictionary
set_names = "SET "
values = []
comma = False
for key, value in changes.items():
if comma:
set_names = set_names + ", "
else:
comma = True
set_names = set_names + "{0} = %s".format(key)
values.append(value)
values.append(targetid)
sql = "Update Targets " + set_names
# append targetid component
sql = sql + " WHERE TargetID=%s"
# Record the original data for the audit log.
original_data = {}
target_record = self.get_target(targetid)
for change in changes:
original_data[change] = target_record[change]
try:
cursor.execute(sql, tuple(values))
self.connection.commit()
audit_logger = get_logger(AUDIT_LOGGER_NAME)
audit_logger.info('TargetsTable TargetID: %s, update fields: %s, '
'original fields: %s',
targetid, changes, original_data)
except Exception as ex:
self.connection.rollback()
audit_logger = get_logger(AUDIT_LOGGER_NAME)
audit_logger.error('TargetsTable TargetID: %s failed SQL update. '
'SQL: %s Changes: %s Exception: %s',
targetid, sql, changes, ex)
raise ex
finally:
self._load_table()
self._load_joins()
cursor.close()
def activate(self, targetid, activate_flag):
"""
Activate or deactivate the table entry defined by the
targetid parameter to the value defined by the activate_flag
Parameters:
targetid (:term:`py:integer`):
The database key property for this table
activate_flag (:class:`py:bool`):
Next state that will be set into the database for this target.
Since the db field is an enum it actually sete Active or Inactive
strings into the field
"""
cursor = self.connection.cursor()
enabled_kw = 'Enabled' if activate_flag else 'Disabled'
sql = 'UPDATE Targets SET ScanEnabled = %s WHERE TargetID = %s'
try:
cursor.execute(sql, (enabled_kw, targetid)) # noqa F841
self.connection.commit()
audit_logger = get_logger(AUDIT_LOGGER_NAME)
audit_logger.info('TargetTable TargetId %s,set scanEnabled to %s',
targetid, enabled_kw)
except mysqlerror as ex:
audit_logger = get_logger(AUDIT_LOGGER_NAME)
audit_logger.error('TargetTable userid %s failed SQL change '
'ScanEnabled. SQL=%s '
'Change to %s exception %s: %s',
targetid, sql, enabled_kw, ex.__class__.__name__,
ex)
self.connection.rollback()
raise ex
finally:
self._load_table()
self._load_joins()
def delete(self, targetid):
"""
Delete the target in the targets table defined by the targetid
"""
cursor = self.connection.cursor()
sql = "DELETE FROM Targets WHERE TargetID=%s"
try:
# pylint: disable=unused-variable
mydata = cursor.execute(sql, (targetid,)) # noqa F841
self.connection.commit()
audit_logger = get_logger(AUDIT_LOGGER_NAME)
audit_logger.info('TargetTable TargetId %s Deleted', targetid)
except mysqlerror as ex:
audit_logger = get_logger(AUDIT_LOGGER_NAME)
audit_logger.error('TargetTable targetid %s failed SQL DELETE. '
'SQL=%s exception %s: %s',
targetid, sql, ex.__class__.__name__, ex)
self.connection.rollback()
raise ex
finally:
self._load_table()
self._load_joins()
self.connection.close()
def insert(self, fields):
"""
Write a new record to the database containing the fields defined in
the input.
Parameters:
field_data ()
Dictionary of fields to be inserted into the table. There is
one entry in the dictionary for each field to be inserted.
Exceptions:
"""
cursor = self.connection.cursor()
placeholders = ', '.join(['%s'] * len(fields))
columns = ', '.join(fields.keys())
sql = "INSERT INTO %s ( %s ) VALUES ( %s )" % (self.table_name,
columns,
placeholders)
try:
cursor.execute(sql, fields.values())
self.connection.commit()
new_targetid = cursor.lastrowid
audit_logger = get_logger(AUDIT_LOGGER_NAME)
audit_logger.info('TargetsTable TargetId %s added. %s',
new_targetid, fields)
except mysqlerror as ex:
audit_logger = get_logger(AUDIT_LOGGER_NAME)
audit_logger.error('TargetTable INSERT failed SQL update. SQL=%s. '
'data=%s. Exception %s: %s', sql, fields,
ex.__class__.__name__, ex)
self.connection.rollback()
raise ex
finally:
self._load_table()
self._load_joins()
self.connection.close()
class CsvTargetsTable(TargetsTable):
"""Comma Separated Values form of the Target base."""
def __init__(self, db_dict, dbtype, verbose, output_format):
"""Read the input file into a dictionary."""
super(CsvTargetsTable, self).__init__(db_dict, dbtype, verbose,
output_format)
fn = db_dict['targetsfilename']
self.filename = fn
# If the filename is not a full directory, the data file must be
# either in the local directory or the same directory as the
# config file defined by the db_dict entry directory
if os.path.isabs(fn):
if not os.path.isfile(fn):
ValueError('CSV file %s does not exist ' % fn)
else:
self.filename = fn
else:
if os.path.isfile(fn):
self.filename = fn
else:
full_fn = os.path.join(db_dict['directory'], fn)
if not os.path.isfile(full_fn):
ValueError('CSV file %s does not exist '
'in local directory or config directory %s' %
(fn, db_dict['directory']))
else:
self.filename = full_fn
with open(self.filename) as input_file:
reader = csv.DictReader(input_file)
# create dictionary (id = key) with dictionary for
# each set of entries
result = {}
for row in reader:
key = int(row['TargetID'])
if key in result:
# duplicate row handling
print('ERROR. Duplicate Id in table: %s\nrow=%s' %
(key, row))
raise ValueError('Input Error. duplicate Id')
else:
result[key] = row
self.data_dict = result
def write_updated_record(self, record_id):
"""Backup the existing file and write the new one.
with cvs it writes the whole file back
"""
backfile = '%s.bak' % self.filename
# TODO does this cover directories/clean up for possible exceptions.
if os.path.isfile(backfile):
os.remove(backfile)
os.rename(self.filename, backfile)
self.write_file(self.filename)
def write_file(self, file_name):
"""Write the current Target base to the named file."""
with open(file_name, 'wb') as f:
writer = csv.DictWriter(f, fieldnames=self.get_field_list())
writer.writeheader()
for key, value in sorted(self.data_dict.items()):
writer.writerow(value)
| [((3808, 4429), 'collections.OrderedDict', 'OrderedDict', (["[('TargetID', ('ID', 2, int)), ('CompanyName', ('CompanyName', 12, str)), (\n 'Namespace', ('Namespace', 12, str)), ('SMIVersion', ('SMIVersion', 12,\n str)), ('Product', ('Product', 15, str)), ('Principal', ('Principal', \n 12, str)), ('Credential', ('Credential', 12, str)), ('CimomVersion', (\n 'CimomVersion', 15, str)), ('IPAddress', ('IPAddress', 12, str)), (\n 'InteropNamespace', ('Interop', 8, str)), ('Notify', ('Notify', 12, str\n )), ('NotifyUsers', ('NotifyUsers', 12, str)), ('Protocol', ('Prot', 5,\n str)), ('Port', ('Port', 4, int)), ('ScanEnabled', ('Enabled', 6, str))]"], {}), "([('TargetID', ('ID', 2, int)), ('CompanyName', ('CompanyName', \n 12, str)), ('Namespace', ('Namespace', 12, str)), ('SMIVersion', (\n 'SMIVersion', 12, str)), ('Product', ('Product', 15, str)), (\n 'Principal', ('Principal', 12, str)), ('Credential', ('Credential', 12,\n str)), ('CimomVersion', ('CimomVersion', 15, str)), ('IPAddress', (\n 'IPAddress', 12, str)), ('InteropNamespace', ('Interop', 8, str)), (\n 'Notify', ('Notify', 12, str)), ('NotifyUsers', ('NotifyUsers', 12, str\n )), ('Protocol', ('Prot', 5, str)), ('Port', ('Port', 4, int)), (\n 'ScanEnabled', ('Enabled', 6, str))])\n", (3819, 4429), False, 'from collections import OrderedDict\n'), ((9728, 9741), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (9739, 9741), False, 'from collections import OrderedDict\n'), ((24106, 24123), 'os.path.isabs', 'os.path.isabs', (['fn'], {}), '(fn)\n', (24119, 24123), False, 'import os\n'), ((25695, 25719), 'os.path.isfile', 'os.path.isfile', (['backfile'], {}), '(backfile)\n', (25709, 25719), False, 'import os\n'), ((25761, 25795), 'os.rename', 'os.rename', (['self.filename', 'backfile'], {}), '(self.filename, backfile)\n', (25770, 25795), False, 'import os\n'), ((24309, 24327), 'os.path.isfile', 'os.path.isfile', (['fn'], {}), '(fn)\n', (24323, 24327), False, 'import os\n'), ((24828, 24854), 'csv.DictReader', 'csv.DictReader', (['input_file'], {}), '(input_file)\n', (24842, 24854), False, 'import csv\n'), ((25733, 25752), 'os.remove', 'os.remove', (['backfile'], {}), '(backfile)\n', (25742, 25752), False, 'import os\n'), ((9821, 9860), 're.match', 're.match', (['ip_filter', "value['IPAddress']"], {}), "(ip_filter, value['IPAddress'])\n", (9829, 9860), False, 'import re\n'), ((9956, 10007), 're.match', 're.match', (["value['CompanyName']", 'company_name_filter'], {}), "(value['CompanyName'], company_name_filter)\n", (9964, 10007), False, 'import re\n'), ((24144, 24162), 'os.path.isfile', 'os.path.isfile', (['fn'], {}), '(fn)\n', (24158, 24162), False, 'import os\n'), ((24408, 24446), 'os.path.join', 'os.path.join', (["db_dict['directory']", 'fn'], {}), "(db_dict['directory'], fn)\n", (24420, 24446), False, 'import os\n'), ((24470, 24493), 'os.path.isfile', 'os.path.isfile', (['full_fn'], {}), '(full_fn)\n', (24484, 24493), False, 'import os\n'), ((12722, 12750), 'textwrap.wrap', 'wrap', (['field_value', 'max_width'], {}), '(field_value, max_width)\n', (12726, 12750), False, 'from textwrap import wrap\n')] |
jeikabu/lumberyard | dev/Code/Framework/AzFramework/CodeGen/AzEBusInline.py | 07228c605ce16cbf5aaa209a94a3cb9d6c1a4115 | #
# All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
# its licensors.
#
# For complete copyright and license terms please see the LICENSE at the root of this
# distribution (the "License"). All use of this software is governed by the License,
# or, if provided, by the license below or the license accompanying this file. Do not
# remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
import os
from az_code_gen.base import *
from AzReflectionCpp import format_cpp_annotations
class AZEBusInline_Driver(TemplateDriver):
def apply_transformations(self, json_object):
format_cpp_annotations(json_object)
def render_templates(self, input_file, **template_kwargs):
input_file_name, input_file_ext = os.path.splitext(input_file)
self.render_template_to_file(
"AzEBusInline.tpl", template_kwargs, '{}.generated.inline'.format(input_file_name))
# Factory function - called from launcher
def create_drivers(env):
return [AZEBusInline_Driver(env)]
| [((719, 754), 'AzReflectionCpp.format_cpp_annotations', 'format_cpp_annotations', (['json_object'], {}), '(json_object)\n', (741, 754), False, 'from AzReflectionCpp import format_cpp_annotations\n'), ((861, 889), 'os.path.splitext', 'os.path.splitext', (['input_file'], {}), '(input_file)\n', (877, 889), False, 'import os\n')] |
QUANTAXISER/QUANTAXIS | QUANTAXIS/QASU/crawl_eastmoney.py | 6ebd727b2900e8910fa45814bf45eeffca395250 | import os
from QUANTAXIS.QASetting import QALocalize
#from QUANTAXIS_CRAWLY.run_selenium_alone import (read_east_money_page_zjlx_to_sqllite, open_chrome_driver, close_chrome_dirver)
from QUANTAXIS_CRAWLY.run_selenium_alone import *
import urllib
import pandas as pd
import time
from QUANTAXIS.QAUtil import (DATABASE)
def QA_request_eastmoney_zjlx( param_stock_code_list ):
# 改用
strUrl = "http://data.eastmoney.com/zjlx/{}.html".format(param_stock_code_list[0])
# 延时
time.sleep(1.223)
response = urllib.request.urlopen(strUrl)
content = response.read()
# 🛠todo 改用 re 正则表达式做匹配
strings = content.decode("utf-8", "ignore")
string_lines = strings.split("\r\n")
#for aline in string_lines:
# aline = aline.strip()
# if '_stockCode' in aline:
# _stockCode = aline[len('var _stockCode = '):]
# _stockCode = _stockCode.strip("\"\"\,")
# if '_stockMarke' in aline:
# _stockMarke = aline[len('_stockMarke = '):]
# _stockMarke = _stockMarke.strip("\"\"\,")
# # 60XXXX ,
#_stockMarke = 1
# 00XXXX ,
# _stockMarke = 2
# 30XXXX ,
# _stockMarke = 2
# if '_stockName' in aline:
# _stockName = aline[len('_stockName = '):]
# _stockName = _stockName.strip("\"\"\,")
# if '_market' in aline:
# _market = aline[len('_market = '):]
# _market = _market.strip("\"\"\,")
# break
#_market= 'hsa'
# print(_stockCode)
# print(_stockMarke)
# print(_stockName)
# print(_market)
values = []
for aline in string_lines:
aline = aline.strip()
if 'EM_CapitalFlowInterface' in aline:
# print(aline)
# print('------------------')
aline = aline.strip()
if aline.startswith('var strUrl = '):
if 'var strUrl = ' in aline:
aline = aline[len('var strUrl = '):]
values = aline.split('+')
# print(values)
break
# print('------------------')
print(values)
for iStockCode in range(len(param_stock_code_list)):
requestStr = ""
strCode = param_stock_code_list[iStockCode]
if strCode[0:2] == '60':
_stockMarke = '1'
elif strCode[0:2] == '00' or strCode[0:2] == '30':
_stockMarke = '2'
else:
print(strCode + " 暂不支持, 60, 00, 30 开头的股票代码")
return
for iItem in values:
if '_stockCode' in iItem:
requestStr = requestStr + param_stock_code_list[iStockCode]
elif '_stockMarke' in iItem:
requestStr = requestStr + _stockMarke
else:
if 'http://ff.eastmoney.com/' in iItem:
requestStr = 'http://ff.eastmoney.com/'
else:
iItem = iItem.strip(' "')
iItem = iItem.rstrip(' "')
requestStr = requestStr + iItem
# print(requestStr)
# 延时
time.sleep(1.456)
response = urllib.request.urlopen(requestStr)
content2 = response.read()
# print(content2)
strings = content2.decode("utf-8", "ignore")
# print(strings)
list_data_zjlx = []
if 'var aff_data=({data:[["' in strings:
leftChars = strings[len('var aff_data=({data:[["'):]
# print(leftChars)
dataArrays = leftChars.split(',')
# print(dataArrays)
for aItemIndex in range(0, len(dataArrays), 13):
'''
日期
收盘价
涨跌幅
主力净流入 净额 净占比
超大单净流入 净额 净占比
大单净流入 净额 净占比
中单净流入 净额 净占比
小单净流入 净额 净占比
'''
dict_row = {}
dict_row['stock_code'] = param_stock_code_list[iStockCode]
# 日期
# print(aItemIndex)
data01 = dataArrays[aItemIndex]
data01 = data01.strip('"')
# print('日期',data01)
dict_row['date'] = data01
# 主力净流入 净额
data02 = dataArrays[aItemIndex + 1]
data02 = data02.strip('"')
# print('主力净流入 净额',data02)
dict_row['zljll_je_wy'] = data02
# 主力净流入 净占比
data03 = dataArrays[aItemIndex + 2]
data03 = data03.strip('"')
# print('主力净流入 净占比',data03)
# date01 = aItemData.strip('[\'\'')
dict_row['zljll_jzb_bfb'] = data03
# 超大单净流入 净额
data04 = dataArrays[aItemIndex + 3]
data04 = data04.strip('"')
# print('超大单净流入 净额',data04)
dict_row['cddjll_je_wy'] = data04
# 超大单净流入 净占比
data05 = dataArrays[aItemIndex + 4]
data05 = data05.strip('"')
# print('超大单净流入 净占比',data05)
dict_row['cddjll_je_jzb'] = data05
# 大单净流入 净额
data06 = dataArrays[aItemIndex + 5]
data06 = data06.strip('"')
# print('大单净流入 净额',data06)
dict_row['ddjll_je_wy'] = data06
# 大单净流入 净占比
data07 = dataArrays[aItemIndex + 6]
data07 = data07.strip('"')
# print('大单净流入 净占比',data07)
dict_row['ddjll_je_jzb'] = data07
# 中单净流入 净额
data08 = dataArrays[aItemIndex + 7]
data08 = data08.strip('"')
# print('中单净流入 净额',data08)
dict_row['zdjll_je_wy'] = data08
# 中单净流入 净占比
data09 = dataArrays[aItemIndex + 8]
data09 = data09.strip('"')
# print('中单净流入 净占比',data09)
dict_row['zdjll_je_jzb'] = data09
# 小单净流入 净额
data10 = dataArrays[aItemIndex + 9]
data10 = data10.strip('"')
# print('小单净流入 净额',data10)
dict_row['xdjll_je_wy'] = data10
# 小单净流入 净占比
data11 = dataArrays[aItemIndex + 10]
data11 = data11.strip('"')
# print('小单净流入 净占比',data11)
dict_row['xdjll_je_jzb'] = data11
# 收盘价
data12 = dataArrays[aItemIndex + 11]
data12 = data12.strip('"')
# print('收盘价',data12)
dict_row['close_price'] = data12
# 涨跌幅
data13 = dataArrays[aItemIndex + 12]
data13 = data13.strip('"')
data13 = data13.strip('"]]})')
# print('涨跌幅',data13)
dict_row['change_price'] = data13
# 读取一条记录成功
# print("成功读取一条记录")
# print(dict_row)
list_data_zjlx.append(dict_row)
# print(list_data_zjlx)
df = pd.DataFrame(list_data_zjlx)
# print(df)
client = DATABASE
coll_stock_zjlx = client.eastmoney_stock_zjlx
# coll_stock_zjlx.insert_many(QA_util_to_json_from_pandas(df))
for i in range(len(list_data_zjlx)):
aRec = list_data_zjlx[i]
# 🛠todo 当天结束后,获取当天的资金流相,当天的资金流向是瞬时间点的
ret = coll_stock_zjlx.find_one(aRec)
if ret == None:
coll_stock_zjlx.insert_one(aRec)
print("🤑 插入新的记录 ", aRec)
else:
print("😵 记录已经存在 ", ret)
'''
作为测试用例来获取, 对比 reqeust 方式的获取数据是否一致
'''
def QA_read_eastmoney_zjlx_web_page_to_sqllite(stockCodeList = None):
# todo 🛠 check stockCode 是否存在有效合法
# todo 🛠 QALocalize 从QALocalize 目录中读取 固定位置存放驱动文件
print("📨当前工作路径文件位置 : ",os.getcwd())
path_check = os.getcwd()+"/QUANTAXIS_WEBDRIVER"
if os.path.exists(path_check) == False:
print("😵 确认当前路径是否包含selenium_driver目录 😰 ")
return
else:
print(os.getcwd()+"/QUANTAXIS_WEBDRIVER"," 目录存在 😁")
print("")
# path_for_save_data = QALocalize.download_path + "/eastmoney_stock_zjlx"
# isExists = os.path.exists(path_for_save_data)
# if isExists == False:
# os.mkdir(path_for_save_data)
# isExists = os.path.exists(path_for_save_data)
# if isExists == True:
# print(path_for_save_data,"目录不存在! 成功建立目录 😢")
# else:
# print(path_for_save_data,"目录不存在! 失败建立目录 🤮, 可能没有权限 🈲")
# return
# else:
# print(path_for_save_data,"目录存在!准备读取数据 😋")
browser = open_chrome_driver()
for indexCode in range(len(stockCodeList)):
#full_path_name = path_for_save_data + "/" + stockCodeList[indexCode] + "_zjlx.sqlite.db"
read_east_money_page_zjlx_to_sqllite(stockCodeList[indexCode], browser)
pass
close_chrome_dirver(browser)
#创建目录
#启动线程读取网页,写入数据库
#等待完成 | [((488, 505), 'time.sleep', 'time.sleep', (['(1.223)'], {}), '(1.223)\n', (498, 505), False, 'import time\n'), ((522, 552), 'urllib.request.urlopen', 'urllib.request.urlopen', (['strUrl'], {}), '(strUrl)\n', (544, 552), False, 'import urllib\n'), ((3133, 3150), 'time.sleep', 'time.sleep', (['(1.456)'], {}), '(1.456)\n', (3143, 3150), False, 'import time\n'), ((3171, 3205), 'urllib.request.urlopen', 'urllib.request.urlopen', (['requestStr'], {}), '(requestStr)\n', (3193, 3205), False, 'import urllib\n'), ((7139, 7167), 'pandas.DataFrame', 'pd.DataFrame', (['list_data_zjlx'], {}), '(list_data_zjlx)\n', (7151, 7167), True, 'import pandas as pd\n'), ((7951, 7962), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (7960, 7962), False, 'import os\n'), ((7969, 7980), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (7978, 7980), False, 'import os\n'), ((8011, 8037), 'os.path.exists', 'os.path.exists', (['path_check'], {}), '(path_check)\n', (8025, 8037), False, 'import os\n'), ((8137, 8148), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (8146, 8148), False, 'import os\n')] |
javicacheiro/salt-git-synchronizer-proxy | wsgi.py | c93de5c0b26afe2b9ec72156497894df7f15d692 | #!/usr/bin/env python
import logging
import sys
from app import app as application
def setup_flask_logging():
# Log to stdout
handler = logging.StreamHandler(sys.stdout)
# Log to a file
#handler = logging.FileHandler('./application.log')
handler.setLevel(logging.INFO)
handler.setFormatter(logging.Formatter(
'%(asctime)s [%(funcName)s] %(levelname)s: %(message)s '
))
application.logger.addHandler(handler)
# Set default log level for the general logger
# each handler can then restrict the messages logged
application.logger.setLevel(logging.INFO)
setup_flask_logging()
if __name__ == '__main__':
application.run()
| [((552, 593), 'app.app.logger.setLevel', 'application.logger.setLevel', (['logging.INFO'], {}), '(logging.INFO)\n', (579, 593), True, 'from app import app as application\n'), ((146, 179), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (167, 179), False, 'import logging\n'), ((411, 449), 'app.app.logger.addHandler', 'application.logger.addHandler', (['handler'], {}), '(handler)\n', (440, 449), True, 'from app import app as application\n'), ((649, 666), 'app.app.run', 'application.run', ([], {}), '()\n', (664, 666), True, 'from app import app as application\n'), ((316, 391), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s [%(funcName)s] %(levelname)s: %(message)s """'], {}), "('%(asctime)s [%(funcName)s] %(levelname)s: %(message)s ')\n", (333, 391), False, 'import logging\n')] |
PythonixCoders/PyWeek29 | game/base/enemy.py | 5c7492466481dec40619272a3da7fa4b9a72c1d6 | #!/usr/bin/env python
from game.base.being import Being
class Enemy(Being):
def __init__(self, app, scene, **kwargs):
super().__init__(app, scene, **kwargs)
self.friendly = False
| [] |
Hawk94/coin_tracker | main/rates/migrations/0002_auto_20170625_1510.py | 082909e17308a8dd460225c1b035751d12a27106 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-06-25 15:10
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('rates', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='rate',
old_name='euro_rate',
new_name='eur_rate',
),
migrations.RenameField(
model_name='rate',
old_name='pound_rates',
new_name='gbp_rate',
),
]
| [((279, 368), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""rate"""', 'old_name': '"""euro_rate"""', 'new_name': '"""eur_rate"""'}), "(model_name='rate', old_name='euro_rate', new_name=\n 'eur_rate')\n", (301, 368), False, 'from django.db import migrations\n'), ((420, 511), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""rate"""', 'old_name': '"""pound_rates"""', 'new_name': '"""gbp_rate"""'}), "(model_name='rate', old_name='pound_rates', new_name=\n 'gbp_rate')\n", (442, 511), False, 'from django.db import migrations\n')] |
dojeda/quetzal-openapi-client | setup.py | d9d4dc99bb425a3f89dcbb80d5096f554bc42fff | # coding: utf-8
"""
Quetzal API
Quetzal: an API to manage data files and their associated metadata.
OpenAPI spec version: 0.5.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from setuptools import setup, find_packages # noqa: H301
NAME = "quetzal-openapi-client"
VERSION = "0.5.0"
# To install the library, run the following
#
# python setup.py install
#
# prerequisite: setuptools
# http://pypi.python.org/pypi/setuptools
REQUIRES = ["urllib3 >= 1.15", "six >= 1.10", "certifi", "python-dateutil"]
setup(
name=NAME,
version=VERSION,
description="Quetzal API auto-generated client",
author='David Ojeda',
author_email="[email protected]",
url="https://github.com/quet.zal/quetzal-openapi-client",
project_urls={
"Documentation": "https://quetzal-openapi-client.readthedocs.io",
"Code": "https://github.com/quetz-al/quetzal-openapi-client",
"Issue tracker": "https://github.com/quetz-al/quetzal-openapi-client/issues",
},
license="BSD-3-Clause",
keywords=["OpenAPI", "OpenAPI-Generator", "Quetzal API"],
install_requires=REQUIRES,
packages=find_packages(exclude=['test', 'docs']),
namespace_packages=['quetzal'],
include_package_data=True,
long_description="""\
quetzal-openapi-client
======================
This is an auto-generated package using
[openapi-generator](https://github.com/OpenAPITools/openapi-generator)
from an OpenAPI specification of the Quetzal API.
An improvement layer on this client exists in the quetzal-client package.
Quetzal is an API to manage data files and their associated metadata.
See more at [quetz.al](https://quetz.al) and its
[readthedocs documentation](https://quetzal-api.readthedocs.io).
""",
long_description_content_type='text/markdown',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Database :: Front-Ends',
'Topic :: Internet :: WWW/HTTP',
'Topic :: System :: Archiving',
],
)
| [((1168, 1207), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['test', 'docs']"}), "(exclude=['test', 'docs'])\n", (1181, 1207), False, 'from setuptools import setup, find_packages\n')] |
jonyg80/youtube-dl | youtube_dl/extractor/turner.py | ef3a87fb77891329de1d3dbebfee53bf50645261 | # coding: utf-8
from __future__ import unicode_literals
import re
from .adobepass import AdobePassIE
from ..compat import compat_str
from ..utils import (
fix_xml_ampersands,
xpath_text,
int_or_none,
determine_ext,
float_or_none,
parse_duration,
xpath_attr,
update_url_query,
ExtractorError,
strip_or_none,
url_or_none,
)
class TurnerBaseIE(AdobePassIE):
_AKAMAI_SPE_TOKEN_CACHE = {}
def _extract_timestamp(self, video_data):
return int_or_none(xpath_attr(video_data, 'dateCreated', 'uts'))
def _add_akamai_spe_token(self, tokenizer_src, video_url, content_id, ap_data, custom_tokenizer_query=None):
secure_path = self._search_regex(r'https?://[^/]+(.+/)', video_url, 'secure path') + '*'
token = self._AKAMAI_SPE_TOKEN_CACHE.get(secure_path)
if not token:
query = {
'path': secure_path,
}
if custom_tokenizer_query:
query.update(custom_tokenizer_query)
else:
query['videoId'] = content_id
if ap_data.get('auth_required'):
query['accessToken'] = self._extract_mvpd_auth(ap_data['url'], content_id, ap_data['site_name'], ap_data['site_name'])
auth = self._download_xml(
tokenizer_src, content_id, query=query)
error_msg = xpath_text(auth, 'error/msg')
if error_msg:
raise ExtractorError(error_msg, expected=True)
token = xpath_text(auth, 'token')
if not token:
return video_url
self._AKAMAI_SPE_TOKEN_CACHE[secure_path] = token
return video_url + '?hdnea=' + token
def _extract_cvp_info(self, data_src, video_id, path_data={}, ap_data={}, fatal=False):
video_data = self._download_xml(
data_src, video_id,
transform_source=lambda s: fix_xml_ampersands(s).strip(),
fatal=fatal)
if not video_data:
return {}
video_id = video_data.attrib['id']
title = xpath_text(video_data, 'headline', fatal=True)
content_id = xpath_text(video_data, 'contentId') or video_id
# rtmp_src = xpath_text(video_data, 'akamai/src')
# if rtmp_src:
# split_rtmp_src = rtmp_src.split(',')
# if len(split_rtmp_src) == 2:
# rtmp_src = split_rtmp_src[1]
# aifp = xpath_text(video_data, 'akamai/aifp', default='')
urls = []
formats = []
thumbnails = []
subtitles = {}
rex = re.compile(
r'(?P<width>[0-9]+)x(?P<height>[0-9]+)(?:_(?P<bitrate>[0-9]+))?')
# Possible formats locations: files/file, files/groupFiles/files
# and maybe others
for video_file in video_data.findall('.//file'):
video_url = url_or_none(video_file.text.strip())
if not video_url:
continue
ext = determine_ext(video_url)
if video_url.startswith('/mp4:protected/'):
continue
# TODO Correct extraction for these files
# protected_path_data = path_data.get('protected')
# if not protected_path_data or not rtmp_src:
# continue
# protected_path = self._search_regex(
# r'/mp4:(.+)\.[a-z0-9]', video_url, 'secure path')
# auth = self._download_webpage(
# protected_path_data['tokenizer_src'], query={
# 'path': protected_path,
# 'videoId': content_id,
# 'aifp': aifp,
# })
# token = xpath_text(auth, 'token')
# if not token:
# continue
# video_url = rtmp_src + video_url + '?' + token
elif video_url.startswith('/secure/'):
secure_path_data = path_data.get('secure')
if not secure_path_data:
continue
video_url = self._add_akamai_spe_token(
secure_path_data['tokenizer_src'],
secure_path_data['media_src'] + video_url,
content_id, ap_data)
elif not re.match('https?://', video_url):
base_path_data = path_data.get(ext, path_data.get('default', {}))
media_src = base_path_data.get('media_src')
if not media_src:
continue
video_url = media_src + video_url
if video_url in urls:
continue
urls.append(video_url)
format_id = video_file.get('bitrate')
if ext in ('scc', 'srt', 'vtt'):
subtitles.setdefault('en', []).append({
'ext': ext,
'url': video_url,
})
elif ext == 'png':
thumbnails.append({
'id': format_id,
'url': video_url,
})
elif ext == 'smil':
formats.extend(self._extract_smil_formats(
video_url, video_id, fatal=False))
elif re.match(r'https?://[^/]+\.akamaihd\.net/[iz]/', video_url):
formats.extend(self._extract_akamai_formats(
video_url, video_id, {
'hds': path_data.get('f4m', {}).get('host'),
# nba.cdn.turner.com, ht.cdn.turner.com, ht2.cdn.turner.com
# ht3.cdn.turner.com, i.cdn.turner.com, s.cdn.turner.com
# ssl.cdn.turner.com
'http': 'pmd.cdn.turner.com',
}))
elif ext == 'm3u8':
m3u8_formats = self._extract_m3u8_formats(
video_url, video_id, 'mp4',
m3u8_id=format_id or 'hls', fatal=False)
if '/secure/' in video_url and '?hdnea=' in video_url:
for f in m3u8_formats:
f['_seekable'] = False
formats.extend(m3u8_formats)
elif ext == 'f4m':
formats.extend(self._extract_f4m_formats(
update_url_query(video_url, {'hdcore': '3.7.0'}),
video_id, f4m_id=format_id or 'hds', fatal=False))
else:
f = {
'format_id': format_id,
'url': video_url,
'ext': ext,
}
mobj = rex.search(video_url)
if mobj:
f.update({
'width': int(mobj.group('width')),
'height': int(mobj.group('height')),
'tbr': int_or_none(mobj.group('bitrate')),
})
elif isinstance(format_id, compat_str):
if format_id.isdigit():
f['tbr'] = int(format_id)
else:
mobj = re.match(r'ios_(audio|[0-9]+)$', format_id)
if mobj:
if mobj.group(1) == 'audio':
f.update({
'vcodec': 'none',
'ext': 'm4a',
})
else:
f['tbr'] = int(mobj.group(1))
formats.append(f)
self._sort_formats(formats)
for source in video_data.findall('closedCaptions/source'):
for track in source.findall('track'):
track_url = url_or_none(track.get('url'))
if not track_url or track_url.endswith('/big'):
continue
lang = track.get('lang') or track.get('label') or 'en'
subtitles.setdefault(lang, []).append({
'url': track_url,
'ext': {
'scc': 'scc',
'webvtt': 'vtt',
'smptett': 'tt',
}.get(source.get('format'))
})
thumbnails.extend({
'id': image.get('cut') or image.get('name'),
'url': image.text,
'width': int_or_none(image.get('width')),
'height': int_or_none(image.get('height')),
} for image in video_data.findall('images/image'))
is_live = xpath_text(video_data, 'isLive') == 'true'
return {
'id': video_id,
'title': self._live_title(title) if is_live else title,
'formats': formats,
'subtitles': subtitles,
'thumbnails': thumbnails,
'thumbnail': xpath_text(video_data, 'poster'),
'description': strip_or_none(xpath_text(video_data, 'description')),
'duration': parse_duration(xpath_text(video_data, 'length') or xpath_text(video_data, 'trt')),
'timestamp': self._extract_timestamp(video_data),
'upload_date': xpath_attr(video_data, 'metas', 'version'),
'series': xpath_text(video_data, 'showTitle'),
'season_number': int_or_none(xpath_text(video_data, 'seasonNumber')),
'episode_number': int_or_none(xpath_text(video_data, 'episodeNumber')),
'is_live': is_live,
}
def _extract_ngtv_info(self, media_id, tokenizer_query, ap_data=None):
streams_data = self._download_json(
'http://medium.ngtv.io/media/%s/tv' % media_id,
media_id)['media']['tv']
duration = None
chapters = []
formats = []
for supported_type in ('unprotected', 'bulkaes'):
stream_data = streams_data.get(supported_type, {})
m3u8_url = stream_data.get('secureUrl') or stream_data.get('url')
if not m3u8_url:
continue
if stream_data.get('playlistProtection') == 'spe':
m3u8_url = self._add_akamai_spe_token(
'http://token.ngtv.io/token/token_spe',
m3u8_url, media_id, ap_data or {}, tokenizer_query)
formats.extend(self._extract_m3u8_formats(
m3u8_url, media_id, 'mp4', m3u8_id='hls', fatal=False))
duration = float_or_none(stream_data.get('totalRuntime'))
if not chapters:
for chapter in stream_data.get('contentSegments', []):
start_time = float_or_none(chapter.get('start'))
chapter_duration = float_or_none(chapter.get('duration'))
if start_time is None or chapter_duration is None:
continue
chapters.append({
'start_time': start_time,
'end_time': start_time + chapter_duration,
})
self._sort_formats(formats)
return {
'formats': formats,
'chapters': chapters,
'duration': duration,
}
| [((2585, 2660), 're.compile', 're.compile', (['"""(?P<width>[0-9]+)x(?P<height>[0-9]+)(?:_(?P<bitrate>[0-9]+))?"""'], {}), "('(?P<width>[0-9]+)x(?P<height>[0-9]+)(?:_(?P<bitrate>[0-9]+))?')\n", (2595, 2660), False, 'import re\n'), ((4294, 4326), 're.match', 're.match', (['"""https?://"""', 'video_url'], {}), "('https?://', video_url)\n", (4302, 4326), False, 'import re\n'), ((5241, 5301), 're.match', 're.match', (['"""https?://[^/]+\\\\.akamaihd\\\\.net/[iz]/"""', 'video_url'], {}), "('https?://[^/]+\\\\.akamaihd\\\\.net/[iz]/', video_url)\n", (5249, 5301), False, 'import re\n'), ((7089, 7131), 're.match', 're.match', (['"""ios_(audio|[0-9]+)$"""', 'format_id'], {}), "('ios_(audio|[0-9]+)$', format_id)\n", (7097, 7131), False, 'import re\n')] |
robk-dev/algo-trading | ml/sandbox/00-data.py | aa8d76ee739431ab24407fe094e0753c588dc8c6 | from alpha_vantage.timeseries import TimeSeries
from pprint import pprint
import json
import argparse
def save_dataset(symbol='MSFT', time_window='daily_adj'):
credentials = json.load(open('creds.json', 'r'))
api_key = credentials['av_api_key']
print(symbol, time_window)
ts = TimeSeries(key=api_key, output_format='pandas')
if time_window == 'intraday':
data, meta_data = ts.get_intraday(
symbol=symbol, interval='1min', outputsize='full')
elif time_window == 'daily':
data, meta_data = ts.get_daily(symbol, outputsize='full')
elif time_window == 'daily_adj':
data, meta_data = ts.get_daily_adjusted(symbol, outputsize='full')
pprint(data.head(10))
data.to_csv(f'./{symbol}_{time_window}.csv')
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('symbol', type=str, help="the stock symbol you want to download")
parser.add_argument('time_window', type=str, choices=[
'intraday', 'daily', 'daily_adj'], help="the time period you want to download the stock history for")
namespace = parser.parse_args()
save_dataset(**vars(namespace))
| [((295, 342), 'alpha_vantage.timeseries.TimeSeries', 'TimeSeries', ([], {'key': 'api_key', 'output_format': '"""pandas"""'}), "(key=api_key, output_format='pandas')\n", (305, 342), False, 'from alpha_vantage.timeseries import TimeSeries\n'), ((813, 838), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (836, 838), False, 'import argparse\n')] |
al3pht/cloud-custodian | tests/zpill.py | ce6613d1b716f336384c5e308eee300389e6bf50 | # Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
import fnmatch
from io import StringIO
import json
import os
import shutil
import zipfile
import re
from datetime import datetime, timedelta, tzinfo
from distutils.util import strtobool
import boto3
import placebo
from botocore.response import StreamingBody
from placebo import pill
from c7n.testing import CustodianTestCore
from .constants import ACCOUNT_ID
# Custodian Test Account. This is used only for testing.
# Access is available for community project maintainers.
###########################################################################
# BEGIN PLACEBO MONKEY PATCH
#
# Placebo is effectively abandoned upstream, since mitch went back to work at AWS, irony...
# These monkeypatch patches represent fixes on trunk of that repo that have not been released
# into an extant version, we carry them here. We can drop this when this issue is resolved
#
# https://github.com/garnaat/placebo/issues/63
#
# License - Apache 2.0
# Copyright (c) 2015 Mitch Garnaat
class UTC(tzinfo):
"""UTC"""
def utcoffset(self, dt):
return timedelta(0)
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return timedelta(0)
utc = UTC()
def deserialize(obj):
"""Convert JSON dicts back into objects."""
# Be careful of shallow copy here
target = dict(obj)
class_name = None
if "__class__" in target:
class_name = target.pop("__class__")
if "__module__" in obj:
obj.pop("__module__")
# Use getattr(module, class_name) for custom types if needed
if class_name == "datetime":
return datetime(tzinfo=utc, **target)
if class_name == "StreamingBody":
return StringIO(target["body"])
# Return unrecognized structures as-is
return obj
def serialize(obj):
"""Convert objects into JSON structures."""
# Record class and module information for deserialization
result = {"__class__": obj.__class__.__name__}
try:
result["__module__"] = obj.__module__
except AttributeError:
pass
# Convert objects to dictionary representation based on type
if isinstance(obj, datetime):
result["year"] = obj.year
result["month"] = obj.month
result["day"] = obj.day
result["hour"] = obj.hour
result["minute"] = obj.minute
result["second"] = obj.second
result["microsecond"] = obj.microsecond
return result
if isinstance(obj, StreamingBody):
result["body"] = obj.read()
obj._raw_stream = StringIO(result["body"])
obj._amount_read = 0
return result
if isinstance(obj, bytes):
return obj.decode('utf8')
# Raise a TypeError if the object isn't recognized
raise TypeError("Type not serializable")
pill.FakeHttpResponse.raw = None
placebo.pill.serialize = serialize
placebo.pill.deserialize = deserialize
# END PLACEBO MONKEY
##########################################################################
class BluePill(pill.Pill):
def playback(self):
super(BluePill, self).playback()
self._avail = self.get_available()
def get_available(self):
return {
os.path.join(self.data_path, n)
for n in fnmatch.filter(os.listdir(self.data_path), "*.json")
}
def get_next_file_path(self, service, operation):
fn, format = super(BluePill, self).get_next_file_path(service, operation)
# couple of double use cases
if fn in self._avail:
self._avail.remove(fn)
else:
print("\ndouble use %s\n" % fn)
return (fn, format)
def stop(self):
result = super(BluePill, self).stop()
if self._avail:
print("Unused json files \n %s" % ("\n".join(sorted(self._avail))))
return result
class ZippedPill(pill.Pill):
def __init__(self, path, prefix=None, debug=False):
super(ZippedPill, self).__init__(prefix, debug)
self.path = path
self._used = set()
self.archive = None
def playback(self):
self.archive = zipfile.ZipFile(self.path, "r")
self._files = set(self.archive.namelist())
return super(ZippedPill, self).playback()
def record(self):
self.archive = zipfile.ZipFile(self.path, "a", zipfile.ZIP_DEFLATED)
self._files = set()
files = {n for n in self.archive.namelist() if n.startswith(self.prefix)}
if not files:
return super(ZippedPill, self).record()
# We can't update files in a zip, so copy
self.archive.close()
os.rename(self.path, "%s.tmp" % self.path)
src = zipfile.ZipFile("%s.tmp" % self.path, "r")
self.archive = zipfile.ZipFile(self.path, "w", zipfile.ZIP_DEFLATED)
for n in src.namelist():
if n in files:
continue
self.archive.writestr(n, src.read(n))
os.remove("%s.tmp" % self.path)
return super(ZippedPill, self).record()
def stop(self):
super(ZippedPill, self).stop()
if self.archive:
self.archive.close()
def save_response(self, service, operation, response_data, http_response=200):
filepath = self.get_new_file_path(service, operation)
pill.LOG.debug("save_response: path=%s", filepath)
json_data = {"status_code": http_response, "data": response_data}
self.archive.writestr(
filepath,
json.dumps(json_data, indent=4, default=pill.serialize),
zipfile.ZIP_DEFLATED,
)
self._files.add(filepath)
def load_response(self, service, operation):
response_file = self.get_next_file_path(service, operation)
self._used.add(response_file)
pill.LOG.debug("load_responses: %s", response_file)
response_data = json.loads(
self.archive.read(response_file), object_hook=pill.deserialize
)
return (
pill.FakeHttpResponse(response_data["status_code"]), response_data["data"]
)
def get_new_file_path(self, service, operation):
base_name = "{0}.{1}".format(service, operation)
if self.prefix:
base_name = "{0}.{1}".format(self.prefix, base_name)
pill.LOG.debug("get_new_file_path: %s", base_name)
index = 0
glob_pattern = os.path.join(self._data_path, base_name + "*")
for file_path in fnmatch.filter(self._files, glob_pattern):
file_name = os.path.basename(file_path)
m = self.filename_re.match(file_name)
if m:
i = int(m.group("index"))
if i > index:
index = i
index += 1
return os.path.join(self._data_path, "{0}_{1}.json".format(base_name, index))
def get_next_file_path(self, service, operation):
base_name = "{0}.{1}".format(service, operation)
if self.prefix:
base_name = "{0}.{1}".format(self.prefix, base_name)
pill.LOG.debug("get_next_file_path: %s", base_name)
next_file = None
while next_file is None:
index = self._index.setdefault(base_name, 1)
fn = os.path.join(self._data_path, base_name + "_{0}.json".format(index))
fn = fn.replace('\\', '/')
if fn in self._files:
next_file = fn
self._index[base_name] += 1
self._files.add(fn)
elif index != 1:
self._index[base_name] = 1
else:
# we are looking for the first index and it's not here
raise IOError("response file ({0}) not found".format(fn))
return fn
def attach(session, data_path, prefix=None, debug=False):
pill = ZippedPill(data_path, prefix=prefix, debug=debug)
pill.attach(session, prefix)
return pill
class RedPill(pill.Pill):
def datetime_converter(self, obj):
if isinstance(obj, datetime):
return obj.isoformat()
def save_response(self, service, operation, response_data,
http_response=200):
"""
Override to sanitize response metadata and account_ids
"""
# aws sso setups involve a short lived credential transfer
if service == "portal.sso":
return
if 'ResponseMetadata' in response_data:
response_data['ResponseMetadata'] = {}
response_data = json.dumps(response_data, default=serialize)
response_data = re.sub(r"\b\d{12}\b", ACCOUNT_ID, response_data) # noqa
response_data = json.loads(response_data, object_hook=deserialize)
super(RedPill, self).save_response(service, operation, response_data,
http_response)
class PillTest(CustodianTestCore):
archive_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "placebo_data.zip"
)
placebo_dir = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "data", "placebo"
)
output_dir = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "data", "output"
)
recording = False
def cleanUp(self):
self.pill = None
def record_flight_data(self, test_case, zdata=False, augment=False, region=None):
self.recording = True
test_dir = os.path.join(self.placebo_dir, test_case)
if not (zdata or augment):
if os.path.exists(test_dir):
shutil.rmtree(test_dir)
os.makedirs(test_dir)
session = boto3.Session(region_name=region)
default_region = session.region_name
if not zdata:
pill = RedPill()
pill.attach(session, test_dir)
else:
pill = attach(session, self.archive_path, test_case, debug=True)
pill.record()
self.pill = pill
self.addCleanup(pill.stop)
self.addCleanup(self.cleanUp)
class FakeFactory:
def __call__(fake, region=None, assume=None):
new_session = None
# slightly experimental for test recording, using
# cross account assumes, note this will record sts
# assume role api calls creds into test data, they will
# go stale, but its best to modify before commiting.
# Disabled by default.
if 0 and (assume is not False and fake.assume_role):
client = session.client('sts')
creds = client.assume_role(
RoleArn=fake.assume_role,
RoleSessionName='CustodianTest')['Credentials']
new_session = boto3.Session(
aws_access_key_id=creds['AccessKeyId'],
aws_secret_access_key=creds['SecretAccessKey'],
aws_session_token=creds['SessionToken'],
region_name=region or fake.region or default_region)
elif region and region != default_region:
new_session = boto3.Session(region_name=region)
if new_session:
assert not zdata
new_pill = placebo.attach(new_session, test_dir, debug=True)
new_pill.record()
self.addCleanup(new_pill.stop)
return new_session
return session
return FakeFactory()
def replay_flight_data(self, test_case, zdata=False, region=None):
"""
The `region` argument is to allow functional tests to override the
default region. It is unused when replaying stored data.
"""
if strtobool(os.environ.get('C7N_FUNCTIONAL', 'no')):
self.recording = True
return lambda region=region, assume=None: boto3.Session(region_name=region)
if not zdata:
test_dir = os.path.join(self.placebo_dir, test_case)
if not os.path.exists(test_dir):
raise RuntimeError("Invalid Test Dir for flight data %s" % test_dir)
session = boto3.Session(region_name=region)
if not zdata:
pill = placebo.attach(session, test_dir)
# pill = BluePill()
# pill.attach(session, test_dir)
else:
pill = attach(session, self.archive_path, test_case, False)
pill.playback()
self.addCleanup(pill.stop)
self.addCleanup(self.cleanUp)
return lambda region=None, assume=None: session
| [((7861, 7889), 'placebo.pill.attach', 'pill.attach', (['session', 'prefix'], {}), '(session, prefix)\n', (7872, 7889), False, 'from placebo import pill\n'), ((1130, 1142), 'datetime.timedelta', 'timedelta', (['(0)'], {}), '(0)\n', (1139, 1142), False, 'from datetime import datetime, timedelta, tzinfo\n'), ((1230, 1242), 'datetime.timedelta', 'timedelta', (['(0)'], {}), '(0)\n', (1239, 1242), False, 'from datetime import datetime, timedelta, tzinfo\n'), ((1658, 1688), 'datetime.datetime', 'datetime', ([], {'tzinfo': 'utc'}), '(tzinfo=utc, **target)\n', (1666, 1688), False, 'from datetime import datetime, timedelta, tzinfo\n'), ((1742, 1766), 'io.StringIO', 'StringIO', (["target['body']"], {}), "(target['body'])\n", (1750, 1766), False, 'from io import StringIO\n'), ((2586, 2610), 'io.StringIO', 'StringIO', (["result['body']"], {}), "(result['body'])\n", (2594, 2610), False, 'from io import StringIO\n'), ((4137, 4168), 'zipfile.ZipFile', 'zipfile.ZipFile', (['self.path', '"""r"""'], {}), "(self.path, 'r')\n", (4152, 4168), False, 'import zipfile\n'), ((4316, 4369), 'zipfile.ZipFile', 'zipfile.ZipFile', (['self.path', '"""a"""', 'zipfile.ZIP_DEFLATED'], {}), "(self.path, 'a', zipfile.ZIP_DEFLATED)\n", (4331, 4369), False, 'import zipfile\n'), ((4644, 4686), 'os.rename', 'os.rename', (['self.path', "('%s.tmp' % self.path)"], {}), "(self.path, '%s.tmp' % self.path)\n", (4653, 4686), False, 'import os\n'), ((4701, 4743), 'zipfile.ZipFile', 'zipfile.ZipFile', (["('%s.tmp' % self.path)", '"""r"""'], {}), "('%s.tmp' % self.path, 'r')\n", (4716, 4743), False, 'import zipfile\n'), ((4768, 4821), 'zipfile.ZipFile', 'zipfile.ZipFile', (['self.path', '"""w"""', 'zipfile.ZIP_DEFLATED'], {}), "(self.path, 'w', zipfile.ZIP_DEFLATED)\n", (4783, 4821), False, 'import zipfile\n'), ((4966, 4997), 'os.remove', 'os.remove', (["('%s.tmp' % self.path)"], {}), "('%s.tmp' % self.path)\n", (4975, 4997), False, 'import os\n'), ((5318, 5368), 'placebo.pill.LOG.debug', 'pill.LOG.debug', (['"""save_response: path=%s"""', 'filepath'], {}), "('save_response: path=%s', filepath)\n", (5332, 5368), False, 'from placebo import pill\n'), ((5807, 5858), 'placebo.pill.LOG.debug', 'pill.LOG.debug', (['"""load_responses: %s"""', 'response_file'], {}), "('load_responses: %s', response_file)\n", (5821, 5858), False, 'from placebo import pill\n'), ((6302, 6352), 'placebo.pill.LOG.debug', 'pill.LOG.debug', (['"""get_new_file_path: %s"""', 'base_name'], {}), "('get_new_file_path: %s', base_name)\n", (6316, 6352), False, 'from placebo import pill\n'), ((6394, 6440), 'os.path.join', 'os.path.join', (['self._data_path', "(base_name + '*')"], {}), "(self._data_path, base_name + '*')\n", (6406, 6440), False, 'import os\n'), ((6467, 6508), 'fnmatch.filter', 'fnmatch.filter', (['self._files', 'glob_pattern'], {}), '(self._files, glob_pattern)\n', (6481, 6508), False, 'import fnmatch\n'), ((7046, 7097), 'placebo.pill.LOG.debug', 'pill.LOG.debug', (['"""get_next_file_path: %s"""', 'base_name'], {}), "('get_next_file_path: %s', base_name)\n", (7060, 7097), False, 'from placebo import pill\n'), ((8485, 8529), 'json.dumps', 'json.dumps', (['response_data'], {'default': 'serialize'}), '(response_data, default=serialize)\n', (8495, 8529), False, 'import json\n'), ((8554, 8604), 're.sub', 're.sub', (['"""\\\\b\\\\d{12}\\\\b"""', 'ACCOUNT_ID', 'response_data'], {}), "('\\\\b\\\\d{12}\\\\b', ACCOUNT_ID, response_data)\n", (8560, 8604), False, 'import re\n'), ((8635, 8685), 'json.loads', 'json.loads', (['response_data'], {'object_hook': 'deserialize'}), '(response_data, object_hook=deserialize)\n', (8645, 8685), False, 'import json\n'), ((9372, 9413), 'os.path.join', 'os.path.join', (['self.placebo_dir', 'test_case'], {}), '(self.placebo_dir, test_case)\n', (9384, 9413), False, 'import os\n'), ((9583, 9616), 'boto3.Session', 'boto3.Session', ([], {'region_name': 'region'}), '(region_name=region)\n', (9596, 9616), False, 'import boto3\n'), ((9856, 9869), 'placebo.pill.record', 'pill.record', ([], {}), '()\n', (9867, 9869), False, 'from placebo import pill\n'), ((12144, 12177), 'boto3.Session', 'boto3.Session', ([], {'region_name': 'region'}), '(region_name=region)\n', (12157, 12177), False, 'import boto3\n'), ((12425, 12440), 'placebo.pill.playback', 'pill.playback', ([], {}), '()\n', (12438, 12440), False, 'from placebo import pill\n'), ((3231, 3262), 'os.path.join', 'os.path.join', (['self.data_path', 'n'], {}), '(self.data_path, n)\n', (3243, 3262), False, 'import os\n'), ((5508, 5563), 'json.dumps', 'json.dumps', (['json_data'], {'indent': '(4)', 'default': 'pill.serialize'}), '(json_data, indent=4, default=pill.serialize)\n', (5518, 5563), False, 'import json\n'), ((6009, 6060), 'placebo.pill.FakeHttpResponse', 'pill.FakeHttpResponse', (["response_data['status_code']"], {}), "(response_data['status_code'])\n", (6030, 6060), False, 'from placebo import pill\n'), ((6534, 6561), 'os.path.basename', 'os.path.basename', (['file_path'], {}), '(file_path)\n', (6550, 6561), False, 'import os\n'), ((8895, 8920), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (8910, 8920), False, 'import os\n'), ((9005, 9030), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (9020, 9030), False, 'import os\n'), ((9113, 9138), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (9128, 9138), False, 'import os\n'), ((9464, 9488), 'os.path.exists', 'os.path.exists', (['test_dir'], {}), '(test_dir)\n', (9478, 9488), False, 'import os\n'), ((9542, 9563), 'os.makedirs', 'os.makedirs', (['test_dir'], {}), '(test_dir)\n', (9553, 9563), False, 'import os\n'), ((9725, 9755), 'placebo.pill.attach', 'pill.attach', (['session', 'test_dir'], {}), '(session, test_dir)\n', (9736, 9755), False, 'from placebo import pill\n'), ((11744, 11782), 'os.environ.get', 'os.environ.get', (['"""C7N_FUNCTIONAL"""', '"""no"""'], {}), "('C7N_FUNCTIONAL', 'no')\n", (11758, 11782), False, 'import os\n'), ((11953, 11994), 'os.path.join', 'os.path.join', (['self.placebo_dir', 'test_case'], {}), '(self.placebo_dir, test_case)\n', (11965, 11994), False, 'import os\n'), ((12219, 12252), 'placebo.attach', 'placebo.attach', (['session', 'test_dir'], {}), '(session, test_dir)\n', (12233, 12252), False, 'import placebo\n'), ((9506, 9529), 'shutil.rmtree', 'shutil.rmtree', (['test_dir'], {}), '(test_dir)\n', (9519, 9529), False, 'import shutil\n'), ((11873, 11906), 'boto3.Session', 'boto3.Session', ([], {'region_name': 'region'}), '(region_name=region)\n', (11886, 11906), False, 'import boto3\n'), ((12014, 12038), 'os.path.exists', 'os.path.exists', (['test_dir'], {}), '(test_dir)\n', (12028, 12038), False, 'import os\n'), ((3299, 3325), 'os.listdir', 'os.listdir', (['self.data_path'], {}), '(self.data_path)\n', (3309, 3325), False, 'import os\n'), ((10727, 10931), 'boto3.Session', 'boto3.Session', ([], {'aws_access_key_id': "creds['AccessKeyId']", 'aws_secret_access_key': "creds['SecretAccessKey']", 'aws_session_token': "creds['SessionToken']", 'region_name': '(region or fake.region or default_region)'}), "(aws_access_key_id=creds['AccessKeyId'], aws_secret_access_key\n =creds['SecretAccessKey'], aws_session_token=creds['SessionToken'],\n region_name=region or fake.region or default_region)\n", (10740, 10931), False, 'import boto3\n'), ((11247, 11296), 'placebo.attach', 'placebo.attach', (['new_session', 'test_dir'], {'debug': '(True)'}), '(new_session, test_dir, debug=True)\n', (11261, 11296), False, 'import placebo\n'), ((11112, 11145), 'boto3.Session', 'boto3.Session', ([], {'region_name': 'region'}), '(region_name=region)\n', (11125, 11145), False, 'import boto3\n')] |
metaMMA/Flexget | flexget/tests/test_next_series_seasons.py | a38986422461d7935ead1e2b4ed4c88bcd0a90f5 | from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import pytest
from flexget.entry import Entry
# TODO Add more standard tests
class TestNextSeriesSeasonSeasonsPack(object):
_config = """
templates:
global:
parsing:
series: internal
anchors:
_nss_backfill: &nss_backfill
next_series_seasons:
backfill: yes
_nss_from_start: &nss_from_start
next_series_seasons:
from_start: yes
_nss_backfill_from_start: &nss_backfill_from_start
next_series_seasons:
backfill: yes
from_start: yes
_series_ep_pack: &series_ep_pack
identified_by: ep
tracking: backfill
season_packs:
threshold: 1000
reject_eps: yes
_series_ep_tracking_pack: &series_ep_tracking_pack
identified_by: ep
tracking: backfill
season_packs:
threshold: 1000
reject_eps: yes
_series_ep_tracking_begin_s02e01: &series_ep_tracking_pack_begin_s02e01
identified_by: ep
tracking: backfill
begin: s02e01
season_packs:
threshold: 1000
reject_eps: yes
_series_ep_tracking_begin_s04e01: &series_ep_tracking_pack_begin_s04e01
identified_by: ep
tracking: backfill
begin: s04e01
season_packs:
threshold: 1000
reject_eps: yes
tasks:
inject_series:
series:
settings:
test_series:
season_packs: always
test_series:
- Test Series 1
- Test Series 2
- Test Series 3
- Test Series 4
- Test Series 5
- Test Series 6
- Test Series 7
- Test Series 8
- Test Series 9
- Test Series 10
- Test Series 11
- Test Series 12
- Test Series 13
- Test Series 14
- Test Series 15
- Test Series 16
- Test Series 17
- Test Series 18
- Test Series 19
- Test Series 20
- Test Series 21
- Test Series 22
- Test Series 23
- Test Series 24
- Test Series 25
- Test Series 50
- Test Series 100
test_next_series_seasons_season_pack:
next_series_seasons: yes
series:
- Test Series 1:
<<: *series_ep_pack
max_reruns: 0
test_next_series_seasons_season_pack_backfill:
<<: *nss_backfill
series:
- Test Series 2:
<<: *series_ep_tracking_pack
max_reruns: 0
test_next_series_seasons_season_pack_backfill_and_begin:
<<: *nss_backfill
series:
- Test Series 3:
<<: *series_ep_tracking_pack_begin_s02e01
max_reruns: 0
test_next_series_seasons_season_pack_from_start:
<<: *nss_from_start
series:
- Test Series 4:
<<: *series_ep_pack
max_reruns: 0
test_next_series_seasons_season_pack_from_start_backfill:
<<: *nss_backfill_from_start
series:
- Test Series 5:
<<: *series_ep_tracking_pack
max_reruns: 0
test_next_series_seasons_season_pack_from_start_backfill_and_begin:
<<: *nss_backfill_from_start
series:
- Test Series 6:
<<: *series_ep_tracking_pack_begin_s02e01
max_reruns: 0
test_next_series_seasons_season_pack_and_ep:
next_series_seasons: yes
series:
- Test Series 7:
<<: *series_ep_pack
max_reruns: 0
test_next_series_seasons_season_pack_and_ep_backfill:
<<: *nss_backfill
series:
- Test Series 8:
<<: *series_ep_tracking_pack
max_reruns: 0
test_next_series_seasons_season_pack_and_ep_backfill_and_begin:
<<: *nss_backfill
series:
- Test Series 9:
<<: *series_ep_tracking_pack_begin_s02e01
max_reruns: 0
test_next_series_seasons_season_pack_and_ep_from_start:
<<: *nss_from_start
series:
- Test Series 10:
<<: *series_ep_pack
max_reruns: 0
test_next_series_seasons_season_pack_and_ep_from_start_backfill:
<<: *nss_backfill_from_start
series:
- Test Series 11:
<<: *series_ep_tracking_pack
max_reruns: 0
test_next_series_seasons_season_pack_and_ep_from_start_backfill_and_begin:
<<: *nss_backfill_from_start
series:
- Test Series 12:
<<: *series_ep_tracking_pack_begin_s02e01
max_reruns: 0
test_next_series_seasons_season_pack_gap:
next_series_seasons: yes
series:
- Test Series 13:
<<: *series_ep_pack
max_reruns: 0
test_next_series_seasons_season_pack_gap_backfill:
<<: *nss_backfill
series:
- Test Series 14:
<<: *series_ep_tracking_pack
max_reruns: 0
test_next_series_seasons_season_pack_gap_backfill_and_begin:
<<: *nss_backfill
series:
- Test Series 15:
<<: *series_ep_tracking_pack_begin_s04e01
max_reruns: 0
test_next_series_seasons_season_pack_gap_from_start:
<<: *nss_from_start
series:
- Test Series 16:
<<: *series_ep_pack
max_reruns: 0
test_next_series_seasons_season_pack_gap_from_start_backfill:
<<: *nss_backfill_from_start
series:
- Test Series 17:
<<: *series_ep_tracking_pack
max_reruns: 0
test_next_series_seasons_season_pack_gap_from_start_backfill_and_begin:
<<: *nss_backfill_from_start
series:
- Test Series 18:
<<: *series_ep_tracking_pack_begin_s04e01
max_reruns: 0
test_next_series_seasons_season_pack_and_ep_gap:
next_series_seasons: yes
series:
- Test Series 19:
<<: *series_ep_pack
max_reruns: 0
test_next_series_seasons_season_pack_and_ep_gap_backfill:
<<: *nss_backfill
series:
- Test Series 20:
<<: *series_ep_tracking_pack
max_reruns: 0
test_next_series_seasons_season_pack_and_ep_gap_backfill_and_begin:
<<: *nss_backfill
series:
- Test Series 21:
<<: *series_ep_tracking_pack_begin_s04e01
max_reruns: 0
test_next_series_seasons_season_pack_and_ep_gap_from_start:
<<: *nss_from_start
series:
- Test Series 22:
<<: *series_ep_pack
max_reruns: 0
test_next_series_seasons_season_pack_and_ep_gap_from_start_backfill:
<<: *nss_backfill_from_start
series:
- Test Series 23:
<<: *series_ep_tracking_pack
max_reruns: 0
test_next_series_seasons_season_pack_and_ep_gap_from_start_backfill_and_begin:
<<: *nss_backfill_from_start
series:
- Test Series 24:
<<: *series_ep_tracking_pack_begin_s04e01
max_reruns: 0
test_next_series_seasons_season_pack_begin_completed:
next_series_seasons: yes
series:
- Test Series 50:
identified_by: ep
begin: S02E01
season_packs:
threshold: 1000
reject_eps: yes
max_reruns: 0
test_next_series_seasons_season_pack_from_start_multirun:
next_series_seasons:
from_start: yes
series:
- Test Series 100:
<<: *series_ep_pack
max_reruns: 0
"""
@pytest.fixture()
def config(self):
"""Season packs aren't supported by guessit yet."""
return self._config
def inject_series(self, execute_task, release_name):
execute_task(
'inject_series',
options={'inject': [Entry(title=release_name, url='')], 'disable_tracking': True},
)
@pytest.mark.parametrize(
"task_name,inject,result_find",
[
('test_next_series_seasons_season_pack', ['Test Series 1 S02'], ['Test Series 1 S03']),
(
'test_next_series_seasons_season_pack_backfill',
['Test Series 2 S02'],
['Test Series 2 S01', 'Test Series 2 S03'],
),
(
'test_next_series_seasons_season_pack_backfill_and_begin',
['Test Series 3 S02'],
['Test Series 3 S03'],
),
(
'test_next_series_seasons_season_pack_from_start',
['Test Series 4 S02'],
['Test Series 4 S03'],
),
(
'test_next_series_seasons_season_pack_from_start_backfill',
['Test Series 5 S02'],
['Test Series 5 S03', 'Test Series 5 S01'],
),
(
'test_next_series_seasons_season_pack_from_start_backfill_and_begin',
['Test Series 6 S02'],
['Test Series 6 S03'],
),
(
'test_next_series_seasons_season_pack_and_ep',
['Test Series 7 S02', 'Test Series 7 S03E01'],
['Test Series 7 S03'],
),
(
'test_next_series_seasons_season_pack_and_ep_backfill',
['Test Series 8 S02', 'Test Series 8 S03E01'],
['Test Series 8 S01', 'Test Series 8 S03'],
),
(
'test_next_series_seasons_season_pack_and_ep_backfill_and_begin',
['Test Series 9 S02', 'Test Series 9 S03E01'],
['Test Series 9 S03'],
),
(
'test_next_series_seasons_season_pack_and_ep_from_start',
['Test Series 10 S02', 'Test Series 10 S03E01'],
['Test Series 10 S03'],
),
(
'test_next_series_seasons_season_pack_and_ep_from_start_backfill',
['Test Series 11 S02', 'Test Series 11 S03E01'],
['Test Series 11 S03', 'Test Series 11 S01'],
),
(
'test_next_series_seasons_season_pack_and_ep_from_start_backfill_and_begin',
['Test Series 12 S02', 'Test Series 12 S03E01'],
['Test Series 12 S03'],
),
(
'test_next_series_seasons_season_pack_gap',
['Test Series 13 S02', 'Test Series 13 S06'],
['Test Series 13 S07'],
),
(
'test_next_series_seasons_season_pack_gap_backfill',
['Test Series 14 S02', 'Test Series 14 S06'],
[
'Test Series 14 S07',
'Test Series 14 S05',
'Test Series 14 S04',
'Test Series 14 S03',
'Test Series 14 S01',
],
),
(
'test_next_series_seasons_season_pack_gap_backfill_and_begin',
['Test Series 15 S02', 'Test Series 15 S06'],
['Test Series 15 S07', 'Test Series 15 S05', 'Test Series 15 S04'],
),
(
'test_next_series_seasons_season_pack_gap_from_start',
['Test Series 16 S02', 'Test Series 16 S06'],
['Test Series 16 S07'],
),
(
'test_next_series_seasons_season_pack_gap_from_start_backfill',
['Test Series 17 S02', 'Test Series 17 S06'],
[
'Test Series 17 S07',
'Test Series 17 S05',
'Test Series 17 S04',
'Test Series 17 S03',
'Test Series 17 S01',
],
),
(
'test_next_series_seasons_season_pack_gap_from_start_backfill_and_begin',
['Test Series 18 S02', 'Test Series 18 S06'],
['Test Series 18 S07', 'Test Series 18 S05', 'Test Series 18 S04'],
),
(
'test_next_series_seasons_season_pack_and_ep_gap',
['Test Series 19 S02', 'Test Series 19 S06', 'Test Series 19 S07E01'],
['Test Series 19 S07'],
),
(
'test_next_series_seasons_season_pack_and_ep_gap_backfill',
['Test Series 20 S02', 'Test Series 20 S06', 'Test Series 20 S07E01'],
[
'Test Series 20 S07',
'Test Series 20 S05',
'Test Series 20 S04',
'Test Series 20 S03',
'Test Series 20 S01',
],
),
(
'test_next_series_seasons_season_pack_and_ep_gap_backfill_and_begin',
['Test Series 21 S02', 'Test Series 21 S06', 'Test Series 21 S07E01'],
['Test Series 21 S07', 'Test Series 21 S05', 'Test Series 21 S04'],
),
(
'test_next_series_seasons_season_pack_and_ep_gap_from_start',
['Test Series 22 S02', 'Test Series 22 S03E01', 'Test Series 22 S06'],
['Test Series 22 S07'],
),
(
'test_next_series_seasons_season_pack_and_ep_gap_from_start_backfill',
['Test Series 23 S02', 'Test Series 23 S03E01', 'Test Series 23 S06'],
[
'Test Series 23 S07',
'Test Series 23 S05',
'Test Series 23 S04',
'Test Series 23 S03',
'Test Series 23 S01',
],
),
(
'test_next_series_seasons_season_pack_and_ep_gap_from_start_backfill_and_begin',
['Test Series 24 S02', 'Test Series 24 S03E01', 'Test Series 24 S06'],
['Test Series 24 S07', 'Test Series 24 S05', 'Test Series 24 S04'],
),
(
'test_next_series_seasons_season_pack_begin_completed',
['Test Series 50 S02'],
['Test Series 50 S03'],
),
],
)
def test_next_series_seasons(self, execute_task, task_name, inject, result_find):
for entity_id in inject:
self.inject_series(execute_task, entity_id)
task = execute_task(task_name)
for result_title in result_find:
assert task.find_entry(title=result_title)
assert len(task.all_entries) == len(result_find)
# Tests which require multiple tasks to be executed in order
# Each run_parameter is a tuple of lists: [task name, list of series ID(s) to inject, list of result(s) to find]
@pytest.mark.parametrize(
"run_parameters",
[
(
[
'test_next_series_seasons_season_pack_from_start_multirun',
[],
['Test Series 100 S01'],
],
[
'test_next_series_seasons_season_pack_from_start_multirun',
[],
['Test Series 100 S02'],
],
)
],
)
def test_next_series_seasons_multirun(self, execute_task, run_parameters):
for this_test in run_parameters:
for entity_id in this_test[1]:
self.inject_series(execute_task, entity_id)
task = execute_task(this_test[0])
for result_title in this_test[2]:
assert task.find_entry(title=result_title)
assert len(task.all_entries) == len(this_test[2])
| [((8759, 8775), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (8773, 8775), False, 'import pytest\n'), ((9106, 13381), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""task_name,inject,result_find"""', "[('test_next_series_seasons_season_pack', ['Test Series 1 S02'], [\n 'Test Series 1 S03']), ('test_next_series_seasons_season_pack_backfill',\n ['Test Series 2 S02'], ['Test Series 2 S01', 'Test Series 2 S03']), (\n 'test_next_series_seasons_season_pack_backfill_and_begin', [\n 'Test Series 3 S02'], ['Test Series 3 S03']), (\n 'test_next_series_seasons_season_pack_from_start', ['Test Series 4 S02'\n ], ['Test Series 4 S03']), (\n 'test_next_series_seasons_season_pack_from_start_backfill', [\n 'Test Series 5 S02'], ['Test Series 5 S03', 'Test Series 5 S01']), (\n 'test_next_series_seasons_season_pack_from_start_backfill_and_begin', [\n 'Test Series 6 S02'], ['Test Series 6 S03']), (\n 'test_next_series_seasons_season_pack_and_ep', ['Test Series 7 S02',\n 'Test Series 7 S03E01'], ['Test Series 7 S03']), (\n 'test_next_series_seasons_season_pack_and_ep_backfill', [\n 'Test Series 8 S02', 'Test Series 8 S03E01'], ['Test Series 8 S01',\n 'Test Series 8 S03']), (\n 'test_next_series_seasons_season_pack_and_ep_backfill_and_begin', [\n 'Test Series 9 S02', 'Test Series 9 S03E01'], ['Test Series 9 S03']), (\n 'test_next_series_seasons_season_pack_and_ep_from_start', [\n 'Test Series 10 S02', 'Test Series 10 S03E01'], ['Test Series 10 S03']),\n ('test_next_series_seasons_season_pack_and_ep_from_start_backfill', [\n 'Test Series 11 S02', 'Test Series 11 S03E01'], ['Test Series 11 S03',\n 'Test Series 11 S01']), (\n 'test_next_series_seasons_season_pack_and_ep_from_start_backfill_and_begin'\n , ['Test Series 12 S02', 'Test Series 12 S03E01'], [\n 'Test Series 12 S03']), ('test_next_series_seasons_season_pack_gap', [\n 'Test Series 13 S02', 'Test Series 13 S06'], ['Test Series 13 S07']), (\n 'test_next_series_seasons_season_pack_gap_backfill', [\n 'Test Series 14 S02', 'Test Series 14 S06'], ['Test Series 14 S07',\n 'Test Series 14 S05', 'Test Series 14 S04', 'Test Series 14 S03',\n 'Test Series 14 S01']), (\n 'test_next_series_seasons_season_pack_gap_backfill_and_begin', [\n 'Test Series 15 S02', 'Test Series 15 S06'], ['Test Series 15 S07',\n 'Test Series 15 S05', 'Test Series 15 S04']), (\n 'test_next_series_seasons_season_pack_gap_from_start', [\n 'Test Series 16 S02', 'Test Series 16 S06'], ['Test Series 16 S07']), (\n 'test_next_series_seasons_season_pack_gap_from_start_backfill', [\n 'Test Series 17 S02', 'Test Series 17 S06'], ['Test Series 17 S07',\n 'Test Series 17 S05', 'Test Series 17 S04', 'Test Series 17 S03',\n 'Test Series 17 S01']), (\n 'test_next_series_seasons_season_pack_gap_from_start_backfill_and_begin',\n ['Test Series 18 S02', 'Test Series 18 S06'], ['Test Series 18 S07',\n 'Test Series 18 S05', 'Test Series 18 S04']), (\n 'test_next_series_seasons_season_pack_and_ep_gap', [\n 'Test Series 19 S02', 'Test Series 19 S06', 'Test Series 19 S07E01'], [\n 'Test Series 19 S07']), (\n 'test_next_series_seasons_season_pack_and_ep_gap_backfill', [\n 'Test Series 20 S02', 'Test Series 20 S06', 'Test Series 20 S07E01'], [\n 'Test Series 20 S07', 'Test Series 20 S05', 'Test Series 20 S04',\n 'Test Series 20 S03', 'Test Series 20 S01']), (\n 'test_next_series_seasons_season_pack_and_ep_gap_backfill_and_begin', [\n 'Test Series 21 S02', 'Test Series 21 S06', 'Test Series 21 S07E01'], [\n 'Test Series 21 S07', 'Test Series 21 S05', 'Test Series 21 S04']), (\n 'test_next_series_seasons_season_pack_and_ep_gap_from_start', [\n 'Test Series 22 S02', 'Test Series 22 S03E01', 'Test Series 22 S06'], [\n 'Test Series 22 S07']), (\n 'test_next_series_seasons_season_pack_and_ep_gap_from_start_backfill',\n ['Test Series 23 S02', 'Test Series 23 S03E01', 'Test Series 23 S06'],\n ['Test Series 23 S07', 'Test Series 23 S05', 'Test Series 23 S04',\n 'Test Series 23 S03', 'Test Series 23 S01']), (\n 'test_next_series_seasons_season_pack_and_ep_gap_from_start_backfill_and_begin'\n , ['Test Series 24 S02', 'Test Series 24 S03E01', 'Test Series 24 S06'],\n ['Test Series 24 S07', 'Test Series 24 S05', 'Test Series 24 S04']), (\n 'test_next_series_seasons_season_pack_begin_completed', [\n 'Test Series 50 S02'], ['Test Series 50 S03'])]"], {}), "('task_name,inject,result_find', [(\n 'test_next_series_seasons_season_pack', ['Test Series 1 S02'], [\n 'Test Series 1 S03']), ('test_next_series_seasons_season_pack_backfill',\n ['Test Series 2 S02'], ['Test Series 2 S01', 'Test Series 2 S03']), (\n 'test_next_series_seasons_season_pack_backfill_and_begin', [\n 'Test Series 3 S02'], ['Test Series 3 S03']), (\n 'test_next_series_seasons_season_pack_from_start', ['Test Series 4 S02'\n ], ['Test Series 4 S03']), (\n 'test_next_series_seasons_season_pack_from_start_backfill', [\n 'Test Series 5 S02'], ['Test Series 5 S03', 'Test Series 5 S01']), (\n 'test_next_series_seasons_season_pack_from_start_backfill_and_begin', [\n 'Test Series 6 S02'], ['Test Series 6 S03']), (\n 'test_next_series_seasons_season_pack_and_ep', ['Test Series 7 S02',\n 'Test Series 7 S03E01'], ['Test Series 7 S03']), (\n 'test_next_series_seasons_season_pack_and_ep_backfill', [\n 'Test Series 8 S02', 'Test Series 8 S03E01'], ['Test Series 8 S01',\n 'Test Series 8 S03']), (\n 'test_next_series_seasons_season_pack_and_ep_backfill_and_begin', [\n 'Test Series 9 S02', 'Test Series 9 S03E01'], ['Test Series 9 S03']), (\n 'test_next_series_seasons_season_pack_and_ep_from_start', [\n 'Test Series 10 S02', 'Test Series 10 S03E01'], ['Test Series 10 S03']),\n ('test_next_series_seasons_season_pack_and_ep_from_start_backfill', [\n 'Test Series 11 S02', 'Test Series 11 S03E01'], ['Test Series 11 S03',\n 'Test Series 11 S01']), (\n 'test_next_series_seasons_season_pack_and_ep_from_start_backfill_and_begin'\n , ['Test Series 12 S02', 'Test Series 12 S03E01'], [\n 'Test Series 12 S03']), ('test_next_series_seasons_season_pack_gap', [\n 'Test Series 13 S02', 'Test Series 13 S06'], ['Test Series 13 S07']), (\n 'test_next_series_seasons_season_pack_gap_backfill', [\n 'Test Series 14 S02', 'Test Series 14 S06'], ['Test Series 14 S07',\n 'Test Series 14 S05', 'Test Series 14 S04', 'Test Series 14 S03',\n 'Test Series 14 S01']), (\n 'test_next_series_seasons_season_pack_gap_backfill_and_begin', [\n 'Test Series 15 S02', 'Test Series 15 S06'], ['Test Series 15 S07',\n 'Test Series 15 S05', 'Test Series 15 S04']), (\n 'test_next_series_seasons_season_pack_gap_from_start', [\n 'Test Series 16 S02', 'Test Series 16 S06'], ['Test Series 16 S07']), (\n 'test_next_series_seasons_season_pack_gap_from_start_backfill', [\n 'Test Series 17 S02', 'Test Series 17 S06'], ['Test Series 17 S07',\n 'Test Series 17 S05', 'Test Series 17 S04', 'Test Series 17 S03',\n 'Test Series 17 S01']), (\n 'test_next_series_seasons_season_pack_gap_from_start_backfill_and_begin',\n ['Test Series 18 S02', 'Test Series 18 S06'], ['Test Series 18 S07',\n 'Test Series 18 S05', 'Test Series 18 S04']), (\n 'test_next_series_seasons_season_pack_and_ep_gap', [\n 'Test Series 19 S02', 'Test Series 19 S06', 'Test Series 19 S07E01'], [\n 'Test Series 19 S07']), (\n 'test_next_series_seasons_season_pack_and_ep_gap_backfill', [\n 'Test Series 20 S02', 'Test Series 20 S06', 'Test Series 20 S07E01'], [\n 'Test Series 20 S07', 'Test Series 20 S05', 'Test Series 20 S04',\n 'Test Series 20 S03', 'Test Series 20 S01']), (\n 'test_next_series_seasons_season_pack_and_ep_gap_backfill_and_begin', [\n 'Test Series 21 S02', 'Test Series 21 S06', 'Test Series 21 S07E01'], [\n 'Test Series 21 S07', 'Test Series 21 S05', 'Test Series 21 S04']), (\n 'test_next_series_seasons_season_pack_and_ep_gap_from_start', [\n 'Test Series 22 S02', 'Test Series 22 S03E01', 'Test Series 22 S06'], [\n 'Test Series 22 S07']), (\n 'test_next_series_seasons_season_pack_and_ep_gap_from_start_backfill',\n ['Test Series 23 S02', 'Test Series 23 S03E01', 'Test Series 23 S06'],\n ['Test Series 23 S07', 'Test Series 23 S05', 'Test Series 23 S04',\n 'Test Series 23 S03', 'Test Series 23 S01']), (\n 'test_next_series_seasons_season_pack_and_ep_gap_from_start_backfill_and_begin'\n , ['Test Series 24 S02', 'Test Series 24 S03E01', 'Test Series 24 S06'],\n ['Test Series 24 S07', 'Test Series 24 S05', 'Test Series 24 S04']), (\n 'test_next_series_seasons_season_pack_begin_completed', [\n 'Test Series 50 S02'], ['Test Series 50 S03'])])\n", (9129, 13381), False, 'import pytest\n'), ((15951, 16198), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""run_parameters"""', "[(['test_next_series_seasons_season_pack_from_start_multirun', [], [\n 'Test Series 100 S01']], [\n 'test_next_series_seasons_season_pack_from_start_multirun', [], [\n 'Test Series 100 S02']])]"], {}), "('run_parameters', [([\n 'test_next_series_seasons_season_pack_from_start_multirun', [], [\n 'Test Series 100 S01']], [\n 'test_next_series_seasons_season_pack_from_start_multirun', [], [\n 'Test Series 100 S02']])])\n", (15974, 16198), False, 'import pytest\n'), ((9027, 9060), 'flexget.entry.Entry', 'Entry', ([], {'title': 'release_name', 'url': '""""""'}), "(title=release_name, url='')\n", (9032, 9060), False, 'from flexget.entry import Entry\n')] |
hpatel1567/pymatgen | pymatgen/analysis/wulff.py | 8304b25464206c74305214e45935df90bab95500 | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module define a WulffShape class to generate the Wulff shape from
a lattice, a list of indices and their corresponding surface energies,
and the total area and volume of the wulff shape,the weighted surface energy,
the anisotropy and shape_factor can also be calculated.
In support of plotting from a given view in terms of miller index.
The lattice is from the conventional unit cell, and (hkil) for hexagonal
lattices.
If you use this code extensively, consider citing the following:
Tran, R.; Xu, Z.; Radhakrishnan, B.; Winston, D.; Persson, K. A.; Ong, S. P.
(2016). Surface energies of elemental crystals. Scientific Data.
"""
from pymatgen.core.structure import Structure
from pymatgen.util.coord import get_angle
import numpy as np
import scipy as sp
from scipy.spatial import ConvexHull
import logging
import warnings
__author__ = 'Zihan Xu, Richard Tran, Shyue Ping Ong'
__copyright__ = 'Copyright 2013, The Materials Virtual Lab'
__version__ = '0.1'
__maintainer__ = 'Zihan Xu'
__email__ = '[email protected]'
__date__ = 'May 5 2016'
logger = logging.getLogger(__name__)
def hkl_tuple_to_str(hkl):
"""
Prepare for display on plots
"(hkl)" for surfaces
Agrs:
hkl: in the form of [h, k, l] or (h, k, l)
"""
str_format = '($'
for x in hkl:
if x < 0:
str_format += '\\overline{' + str(-x) + '}'
else:
str_format += str(x)
str_format += '$)'
return str_format
def get_tri_area(pts):
"""
Given a list of coords for 3 points,
Compute the area of this triangle.
Args:
pts: [a, b, c] three points
"""
a, b, c = pts[0], pts[1], pts[2]
v1 = np.array(b) - np.array(a)
v2 = np.array(c) - np.array(a)
area_tri = abs(sp.linalg.norm(sp.cross(v1, v2)) / 2)
return area_tri
class WulffFacet:
"""
Helper container for each Wulff plane.
"""
def __init__(self, normal, e_surf, normal_pt, dual_pt, index, m_ind_orig,
miller):
"""
:param normal:
:param e_surf:
:param normal_pt:
:param dual_pt:
:param index:
:param m_ind_orig:
:param miller:
"""
self.normal = normal
self.e_surf = e_surf
self.normal_pt = normal_pt
self.dual_pt = dual_pt
self.index = index
self.m_ind_orig = m_ind_orig
self.miller = miller
self.points = []
self.outer_lines = []
class WulffShape:
"""
Generate Wulff Shape from list of miller index and surface energies,
with given conventional unit cell.
surface energy (Jm^2) is the length of normal.
Wulff shape is the convex hull.
Based on:
http://scipy.github.io/devdocs/generated/scipy.spatial.ConvexHull.html
Process:
1. get wulff simplices
2. label with color
3. get wulff_area and other properties
.. attribute:: debug (bool)
.. attribute:: alpha
transparency
.. attribute:: color_set
.. attribute:: grid_off (bool)
.. attribute:: axis_off (bool)
.. attribute:: show_area
.. attribute:: off_color
color of facets off wulff
.. attribute:: structure
Structure object, input conventional unit cell (with H ) from lattice
.. attribute:: miller_list
list of input miller index, for hcp in the form of hkil
.. attribute:: hkl_list
modify hkill to hkl, in the same order with input_miller
.. attribute:: e_surf_list
list of input surface energies, in the same order with input_miller
.. attribute:: lattice
Lattice object, the input lattice for the conventional unit cell
.. attribute:: facets
[WulffFacet] for all facets considering symm
.. attribute:: dual_cv_simp
simplices from the dual convex hull (dual_pt)
.. attribute:: wulff_pt_list
.. attribute:: wulff_cv_simp
simplices from the convex hull of wulff_pt_list
.. attribute:: on_wulff
list for all input_miller, True is on wulff.
.. attribute:: color_area
list for all input_miller, total area on wulff, off_wulff = 0.
.. attribute:: miller_area
($hkl$): area for all input_miller
"""
def __init__(self, lattice, miller_list, e_surf_list, symprec=1e-5):
"""
Args:
lattice: Lattice object of the conventional unit cell
miller_list ([(hkl), ...]: list of hkl or hkil for hcp
e_surf_list ([float]): list of corresponding surface energies
symprec (float): for recp_operation, default is 1e-5.
"""
if any([se < 0 for se in e_surf_list]):
warnings.warn("Unphysical (negative) surface energy detected.")
self.color_ind = list(range(len(miller_list)))
self.input_miller_fig = [hkl_tuple_to_str(x) for x in miller_list]
# store input data
self.structure = Structure(lattice, ["H"], [[0, 0, 0]])
self.miller_list = tuple([tuple(x) for x in miller_list])
self.hkl_list = tuple([(x[0], x[1], x[-1]) for x in miller_list])
self.e_surf_list = tuple(e_surf_list)
self.lattice = lattice
self.symprec = symprec
# 2. get all the data for wulff construction
# get all the surface normal from get_all_miller_e()
self.facets = self._get_all_miller_e()
logger.debug(len(self.facets))
# 3. consider the dual condition
dual_pts = [x.dual_pt for x in self.facets]
dual_convex = ConvexHull(dual_pts)
dual_cv_simp = dual_convex.simplices
# simplices (ndarray of ints, shape (nfacet, ndim))
# list of [i, j, k] , ndim = 3
# i, j, k: ind for normal_e_m
# recalculate the dual of dual, get the wulff shape.
# conner <-> surface
# get cross point from the simplices of the dual convex hull
wulff_pt_list = [self._get_cross_pt_dual_simp(dual_simp)
for dual_simp in dual_cv_simp]
wulff_convex = ConvexHull(wulff_pt_list)
wulff_cv_simp = wulff_convex.simplices
logger.debug(", ".join([str(len(x)) for x in wulff_cv_simp]))
# store simplices and convex
self.dual_cv_simp = dual_cv_simp
self.wulff_pt_list = wulff_pt_list
self.wulff_cv_simp = wulff_cv_simp
self.wulff_convex = wulff_convex
self.on_wulff, self.color_area = self._get_simpx_plane()
miller_area = []
for m, in_mill_fig in enumerate(self.input_miller_fig):
miller_area.append(
in_mill_fig + ' : ' + str(round(self.color_area[m], 4)))
self.miller_area = miller_area
def _get_all_miller_e(self):
"""
from self:
get miller_list(unique_miller), e_surf_list and symmetry
operations(symmops) according to lattice
apply symmops to get all the miller index, then get normal,
get all the facets functions for wulff shape calculation:
|normal| = 1, e_surf is plane's distance to (0, 0, 0),
normal[0]x + normal[1]y + normal[2]z = e_surf
return:
[WulffFacet]
"""
all_hkl = []
color_ind = self.color_ind
planes = []
recp = self.structure.lattice.reciprocal_lattice_crystallographic
recp_symmops = self.lattice.get_recp_symmetry_operation(self.symprec)
for i, (hkl, energy) in enumerate(zip(self.hkl_list,
self.e_surf_list)):
for op in recp_symmops:
miller = tuple([int(x) for x in op.operate(hkl)])
if miller not in all_hkl:
all_hkl.append(miller)
normal = recp.get_cartesian_coords(miller)
normal /= sp.linalg.norm(normal)
normal_pt = [x * energy for x in normal]
dual_pt = [x / energy for x in normal]
color_plane = color_ind[divmod(i, len(color_ind))[1]]
planes.append(WulffFacet(normal, energy, normal_pt,
dual_pt, color_plane, i, hkl))
# sort by e_surf
planes.sort(key=lambda x: x.e_surf)
return planes
def _get_cross_pt_dual_simp(self, dual_simp):
"""
|normal| = 1, e_surf is plane's distance to (0, 0, 0),
plane function:
normal[0]x + normal[1]y + normal[2]z = e_surf
from self:
normal_e_m to get the plane functions
dual_simp: (i, j, k) simplices from the dual convex hull
i, j, k: plane index(same order in normal_e_m)
"""
matrix_surfs = [self.facets[dual_simp[i]].normal for i in range(3)]
matrix_e = [self.facets[dual_simp[i]].e_surf for i in range(3)]
cross_pt = sp.dot(sp.linalg.inv(matrix_surfs), matrix_e)
return cross_pt
def _get_simpx_plane(self):
"""
Locate the plane for simpx of on wulff_cv, by comparing the center of
the simpx triangle with the plane functions.
"""
on_wulff = [False] * len(self.miller_list)
surface_area = [0.0] * len(self.miller_list)
for simpx in self.wulff_cv_simp:
pts = [self.wulff_pt_list[simpx[i]] for i in range(3)]
center = np.sum(pts, 0) / 3.0
# check whether the center of the simplices is on one plane
for plane in self.facets:
abs_diff = abs(np.dot(plane.normal, center) - plane.e_surf)
if abs_diff < 1e-5:
on_wulff[plane.index] = True
surface_area[plane.index] += get_tri_area(pts)
plane.points.append(pts)
plane.outer_lines.append([simpx[0], simpx[1]])
plane.outer_lines.append([simpx[1], simpx[2]])
plane.outer_lines.append([simpx[0], simpx[2]])
# already find the plane, move to the next simplices
break
for plane in self.facets:
plane.outer_lines.sort()
plane.outer_lines = [line for line in plane.outer_lines
if plane.outer_lines.count(line) != 2]
return on_wulff, surface_area
def _get_colors(self, color_set, alpha, off_color, custom_colors={}):
"""
assign colors according to the surface energies of on_wulff facets.
return:
(color_list, color_proxy, color_proxy_on_wulff, miller_on_wulff,
e_surf_on_wulff_list)
"""
import matplotlib as mpl
import matplotlib.pyplot as plt
color_list = [off_color] * len(self.hkl_list)
color_proxy_on_wulff = []
miller_on_wulff = []
e_surf_on_wulff = [(i, e_surf)
for i, e_surf in enumerate(self.e_surf_list)
if self.on_wulff[i]]
c_map = plt.get_cmap(color_set)
e_surf_on_wulff.sort(key=lambda x: x[1], reverse=False)
e_surf_on_wulff_list = [x[1] for x in e_surf_on_wulff]
if len(e_surf_on_wulff) > 1:
cnorm = mpl.colors.Normalize(vmin=min(e_surf_on_wulff_list),
vmax=max(e_surf_on_wulff_list))
else:
# if there is only one hkl on wulff, choose the color of the median
cnorm = mpl.colors.Normalize(vmin=min(e_surf_on_wulff_list) - 0.1,
vmax=max(e_surf_on_wulff_list) + 0.1)
scalar_map = mpl.cm.ScalarMappable(norm=cnorm, cmap=c_map)
for i, e_surf in e_surf_on_wulff:
color_list[i] = scalar_map.to_rgba(e_surf, alpha=alpha)
if tuple(self.miller_list[i]) in custom_colors.keys():
color_list[i] = custom_colors[tuple(self.miller_list[i])]
color_proxy_on_wulff.append(
plt.Rectangle((2, 2), 1, 1, fc=color_list[i], alpha=alpha))
miller_on_wulff.append(self.input_miller_fig[i])
scalar_map.set_array([x[1] for x in e_surf_on_wulff])
color_proxy = [plt.Rectangle((2, 2), 1, 1, fc=x, alpha=alpha)
for x in color_list]
return color_list, color_proxy, color_proxy_on_wulff, miller_on_wulff, e_surf_on_wulff_list
def show(self, *args, **kwargs):
r"""
Show the Wulff plot.
Args:
*args: Passed to get_plot.
**kwargs: Passed to get_plot.
"""
self.get_plot(*args, **kwargs).show()
def get_line_in_facet(self, facet):
"""
Returns the sorted pts in a facet used to draw a line
"""
lines = list(facet.outer_lines)
pt = []
prev = None
while len(lines) > 0:
if prev is None:
l = lines.pop(0)
else:
for i, l in enumerate(lines):
if prev in l:
l = lines.pop(i)
if l[1] == prev:
l.reverse()
break
# make sure the lines are connected one by one.
# find the way covering all pts and facets
pt.append(self.wulff_pt_list[l[0]].tolist())
pt.append(self.wulff_pt_list[l[1]].tolist())
prev = l[1]
return pt
def get_plot(self, color_set='PuBu', grid_off=True, axis_off=True,
show_area=False, alpha=1, off_color='red', direction=None,
bar_pos=(0.75, 0.15, 0.05, 0.65), bar_on=False, units_in_JPERM2=True,
legend_on=True, aspect_ratio=(8, 8), custom_colors={}):
"""
Get the Wulff shape plot.
Args:
color_set: default is 'PuBu'
grid_off (bool): default is True
axis_off (bool): default is Ture
show_area (bool): default is False
alpha (float): chosen from 0 to 1 (float), default is 1
off_color: Default color for facets not present on the Wulff shape.
direction: default is (1, 1, 1)
bar_pos: default is [0.75, 0.15, 0.05, 0.65]
bar_on (bool): default is False
legend_on (bool): default is True
aspect_ratio: default is (8, 8)
custom_colors ({(h,k,l}: [r,g,b,alpha}): Customize color of each
facet with a dictionary. The key is the corresponding Miller
index and value is the color. Undefined facets will use default
color site. Note: If you decide to set your own colors, it
probably won't make any sense to have the color bar on.
Return:
(matplotlib.pyplot)
"""
import matplotlib as mpl
import matplotlib.pyplot as plt
import mpl_toolkits.mplot3d as mpl3
color_list, color_proxy, color_proxy_on_wulff, miller_on_wulff, e_surf_on_wulff = self._get_colors(
color_set, alpha, off_color, custom_colors=custom_colors)
if not direction:
# If direction is not specified, use the miller indices of
# maximum area.
direction = max(self.area_fraction_dict.items(),
key=lambda x: x[1])[0]
fig = plt.figure()
fig.set_size_inches(aspect_ratio[0], aspect_ratio[1])
azim, elev = self._get_azimuth_elev([direction[0], direction[1],
direction[-1]])
wulff_pt_list = self.wulff_pt_list
ax = mpl3.Axes3D(fig, azim=azim, elev=elev)
for plane in self.facets:
# check whether [pts] is empty
if len(plane.points) < 1:
# empty, plane is not on_wulff.
continue
# assign the color for on_wulff facets according to its
# index and the color_list for on_wulff
plane_color = color_list[plane.index]
pt = self.get_line_in_facet(plane)
# plot from the sorted pts from [simpx]
tri = mpl3.art3d.Poly3DCollection([pt])
tri.set_color(plane_color)
tri.set_edgecolor("#808080")
ax.add_collection3d(tri)
# set ranges of x, y, z
# find the largest distance between on_wulff pts and the origin,
# to ensure complete and consistent display for all directions
r_range = max([np.linalg.norm(x) for x in wulff_pt_list])
ax.set_xlim([-r_range * 1.1, r_range * 1.1])
ax.set_ylim([-r_range * 1.1, r_range * 1.1])
ax.set_zlim([-r_range * 1.1, r_range * 1.1])
# add legend
if legend_on:
color_proxy = color_proxy
if show_area:
ax.legend(color_proxy, self.miller_area, loc='upper left',
bbox_to_anchor=(0, 1), fancybox=True, shadow=False)
else:
ax.legend(color_proxy_on_wulff, miller_on_wulff,
loc='upper center',
bbox_to_anchor=(0.5, 1), ncol=3, fancybox=True,
shadow=False)
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
# Add colorbar
if bar_on:
cmap = plt.get_cmap(color_set)
cmap.set_over('0.25')
cmap.set_under('0.75')
bounds = [round(e, 2) for e in e_surf_on_wulff]
bounds.append(1.2 * bounds[-1])
norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
# display surface energies
ax1 = fig.add_axes(bar_pos)
cbar = mpl.colorbar.ColorbarBase(
ax1, cmap=cmap, norm=norm, boundaries=[0] + bounds + [10],
extend='both', ticks=bounds[:-1], spacing='proportional',
orientation='vertical')
units = "$J/m^2$" if units_in_JPERM2 else r"$eV/\AA^2$"
cbar.set_label('Surface Energies (%s)' % (units), fontsize=100)
if grid_off:
ax.grid('off')
if axis_off:
ax.axis('off')
return plt
def _get_azimuth_elev(self, miller_index):
"""
Args:
miller_index: viewing direction
Returns:
azim, elev for plotting
"""
if miller_index == (0, 0, 1) or miller_index == (0, 0, 0, 1):
return 0, 90
else:
cart = self.lattice.get_cartesian_coords(miller_index)
azim = get_angle([cart[0], cart[1], 0], (1, 0, 0))
v = [cart[0], cart[1], 0]
elev = get_angle(cart, v)
return azim, elev
@property
def volume(self):
"""
Volume of the Wulff shape
"""
return self.wulff_convex.volume
@property
def miller_area_dict(self):
"""
Returns {hkl: area_hkl on wulff}
"""
return dict(zip(self.miller_list, self.color_area))
@property
def miller_energy_dict(self):
"""
Returns {hkl: surface energy_hkl}
"""
return dict(zip(self.miller_list, self.e_surf_list))
@property
def surface_area(self):
"""
Total surface area of Wulff shape.
"""
return sum(self.miller_area_dict.values())
@property
def weighted_surface_energy(self):
"""
Returns:
sum(surface_energy_hkl * area_hkl)/ sum(area_hkl)
"""
return self.total_surface_energy / self.surface_area
@property
def area_fraction_dict(self):
"""
Returns:
(dict): {hkl: area_hkl/total area on wulff}
"""
return {hkl: self.miller_area_dict[hkl] / self.surface_area
for hkl in self.miller_area_dict.keys()}
@property
def anisotropy(self):
"""
Returns:
(float) Coefficient of Variation from weighted surface energy
The ideal sphere is 0.
"""
square_diff_energy = 0
weighted_energy = self.weighted_surface_energy
area_frac_dict = self.area_fraction_dict
miller_energy_dict = self.miller_energy_dict
for hkl in miller_energy_dict.keys():
square_diff_energy += (miller_energy_dict[hkl] - weighted_energy) \
** 2 * area_frac_dict[hkl]
return np.sqrt(square_diff_energy) / weighted_energy
@property
def shape_factor(self):
"""
This is useful for determining the critical nucleus size.
A large shape factor indicates great anisotropy.
See Ballufi, R. W., Allen, S. M. & Carter, W. C. Kinetics
of Materials. (John Wiley & Sons, 2005), p.461
Returns:
(float) Shape factor.
"""
return self.surface_area / (self.volume ** (2 / 3))
@property
def effective_radius(self):
"""
Radius of the Wulffshape when the
Wulffshape is approximated as a sphere.
Returns:
(float) radius.
"""
return ((3 / 4) * (self.volume / np.pi)) ** (1 / 3)
@property
def total_surface_energy(self):
"""
Total surface energy of the Wulff shape.
Returns:
(float) sum(surface_energy_hkl * area_hkl)
"""
tot_surface_energy = 0
for hkl in self.miller_energy_dict.keys():
tot_surface_energy += self.miller_energy_dict[hkl] * \
self.miller_area_dict[hkl]
return tot_surface_energy
@property
def tot_corner_sites(self):
"""
Returns the number of vertices in the convex hull.
Useful for identifying catalytically active sites.
"""
return len(self.wulff_convex.vertices)
@property
def tot_edges(self):
"""
Returns the number of edges in the convex hull.
Useful for identifying catalytically active sites.
"""
all_edges = []
for facet in self.facets:
edges = []
pt = self.get_line_in_facet(facet)
lines = []
for i, p in enumerate(pt):
if i == len(pt) / 2:
break
lines.append(tuple(sorted(tuple([tuple(pt[i * 2]), tuple(pt[i * 2 + 1])]))))
for i, p in enumerate(lines):
if p not in all_edges:
edges.append(p)
all_edges.extend(edges)
return len(all_edges)
| [((1180, 1207), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1197, 1207), False, 'import logging\n'), ((1792, 1803), 'numpy.array', 'np.array', (['b'], {}), '(b)\n', (1800, 1803), True, 'import numpy as np\n'), ((1806, 1817), 'numpy.array', 'np.array', (['a'], {}), '(a)\n', (1814, 1817), True, 'import numpy as np\n'), ((1827, 1838), 'numpy.array', 'np.array', (['c'], {}), '(c)\n', (1835, 1838), True, 'import numpy as np\n'), ((1841, 1852), 'numpy.array', 'np.array', (['a'], {}), '(a)\n', (1849, 1852), True, 'import numpy as np\n'), ((5051, 5089), 'pymatgen.core.structure.Structure', 'Structure', (['lattice', "['H']", '[[0, 0, 0]]'], {}), "(lattice, ['H'], [[0, 0, 0]])\n", (5060, 5089), False, 'from pymatgen.core.structure import Structure\n'), ((5655, 5675), 'scipy.spatial.ConvexHull', 'ConvexHull', (['dual_pts'], {}), '(dual_pts)\n', (5665, 5675), False, 'from scipy.spatial import ConvexHull\n'), ((6162, 6187), 'scipy.spatial.ConvexHull', 'ConvexHull', (['wulff_pt_list'], {}), '(wulff_pt_list)\n', (6172, 6187), False, 'from scipy.spatial import ConvexHull\n'), ((11107, 11130), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['color_set'], {}), '(color_set)\n', (11119, 11130), True, 'import matplotlib.pyplot as plt\n'), ((11714, 11759), 'matplotlib.cm.ScalarMappable', 'mpl.cm.ScalarMappable', ([], {'norm': 'cnorm', 'cmap': 'c_map'}), '(norm=cnorm, cmap=c_map)\n', (11735, 11759), True, 'import matplotlib as mpl\n'), ((15439, 15451), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (15449, 15451), True, 'import matplotlib.pyplot as plt\n'), ((15706, 15744), 'mpl_toolkits.mplot3d.Axes3D', 'mpl3.Axes3D', (['fig'], {'azim': 'azim', 'elev': 'elev'}), '(fig, azim=azim, elev=elev)\n', (15717, 15744), True, 'import mpl_toolkits.mplot3d as mpl3\n'), ((4803, 4866), 'warnings.warn', 'warnings.warn', (['"""Unphysical (negative) surface energy detected."""'], {}), "('Unphysical (negative) surface energy detected.')\n", (4816, 4866), False, 'import warnings\n'), ((9000, 9027), 'scipy.linalg.inv', 'sp.linalg.inv', (['matrix_surfs'], {}), '(matrix_surfs)\n', (9013, 9027), True, 'import scipy as sp\n'), ((12275, 12321), 'matplotlib.pyplot.Rectangle', 'plt.Rectangle', (['(2, 2)', '(1)', '(1)'], {'fc': 'x', 'alpha': 'alpha'}), '((2, 2), 1, 1, fc=x, alpha=alpha)\n', (12288, 12321), True, 'import matplotlib.pyplot as plt\n'), ((16221, 16254), 'mpl_toolkits.mplot3d.art3d.Poly3DCollection', 'mpl3.art3d.Poly3DCollection', (['[pt]'], {}), '([pt])\n', (16248, 16254), True, 'import mpl_toolkits.mplot3d as mpl3\n'), ((17420, 17443), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['color_set'], {}), '(color_set)\n', (17432, 17443), True, 'import matplotlib.pyplot as plt\n'), ((17636, 17675), 'matplotlib.colors.BoundaryNorm', 'mpl.colors.BoundaryNorm', (['bounds', 'cmap.N'], {}), '(bounds, cmap.N)\n', (17659, 17675), True, 'import matplotlib as mpl\n'), ((17774, 17948), 'matplotlib.colorbar.ColorbarBase', 'mpl.colorbar.ColorbarBase', (['ax1'], {'cmap': 'cmap', 'norm': 'norm', 'boundaries': '([0] + bounds + [10])', 'extend': '"""both"""', 'ticks': 'bounds[:-1]', 'spacing': '"""proportional"""', 'orientation': '"""vertical"""'}), "(ax1, cmap=cmap, norm=norm, boundaries=[0] +\n bounds + [10], extend='both', ticks=bounds[:-1], spacing='proportional',\n orientation='vertical')\n", (17799, 17948), True, 'import matplotlib as mpl\n'), ((18629, 18672), 'pymatgen.util.coord.get_angle', 'get_angle', (['[cart[0], cart[1], 0]', '(1, 0, 0)'], {}), '([cart[0], cart[1], 0], (1, 0, 0))\n', (18638, 18672), False, 'from pymatgen.util.coord import get_angle\n'), ((18730, 18748), 'pymatgen.util.coord.get_angle', 'get_angle', (['cart', 'v'], {}), '(cart, v)\n', (18739, 18748), False, 'from pymatgen.util.coord import get_angle\n'), ((20494, 20521), 'numpy.sqrt', 'np.sqrt', (['square_diff_energy'], {}), '(square_diff_energy)\n', (20501, 20521), True, 'import numpy as np\n'), ((1887, 1903), 'scipy.cross', 'sp.cross', (['v1', 'v2'], {}), '(v1, v2)\n', (1895, 1903), True, 'import scipy as sp\n'), ((9484, 9498), 'numpy.sum', 'np.sum', (['pts', '(0)'], {}), '(pts, 0)\n', (9490, 9498), True, 'import numpy as np\n'), ((12069, 12127), 'matplotlib.pyplot.Rectangle', 'plt.Rectangle', (['(2, 2)', '(1)', '(1)'], {'fc': 'color_list[i]', 'alpha': 'alpha'}), '((2, 2), 1, 1, fc=color_list[i], alpha=alpha)\n', (12082, 12127), True, 'import matplotlib.pyplot as plt\n'), ((16572, 16589), 'numpy.linalg.norm', 'np.linalg.norm', (['x'], {}), '(x)\n', (16586, 16589), True, 'import numpy as np\n'), ((7947, 7969), 'scipy.linalg.norm', 'sp.linalg.norm', (['normal'], {}), '(normal)\n', (7961, 7969), True, 'import scipy as sp\n'), ((9646, 9674), 'numpy.dot', 'np.dot', (['plane.normal', 'center'], {}), '(plane.normal, center)\n', (9652, 9674), True, 'import numpy as np\n')] |
stanojevic/ccgtools | ccg/supertagger/any2int.py | d87521d66fcd1b3110fbecc6b78b15a60e5095a3 |
class Any2Int:
def __init__(self, min_count: int, include_UNK: bool, include_PAD: bool):
self.min_count = min_count
self.include_UNK = include_UNK
self.include_PAD = include_PAD
self.frozen = False
self.UNK_i = -1
self.UNK_s = "<UNK>"
self.PAD_i = -2
self.PAD_s = "<PAD>"
self.voc_size = 0
self._s2i = dict()
self._i2s = []
self.frequency = dict()
def iter_item(self):
return enumerate(self._i2s)
def get_s2i(self, s, default: int):
assert self.frozen
i = self._s2i.get(s, -1)
if i >= 0:
return i
elif self.include_UNK:
return self.UNK_i
else:
return default
def __getitem__(self, s):
return self.s2i(s)
def s2i(self, s):
i = self.get_s2i(s, -1)
if i >= 0:
return i
else:
raise Exception(f"out of vocabulary entry {s}")
def contains(self, s):
return self.get_s2i(s, -1) != -1
def i2s(self, i):
assert self.frozen
if 0 <= i < self.voc_size:
return self._i2s[i]
else:
raise Exception(f"not entry at position {i} for a vocabulary of size {self.voc_size}")
def add_to_counts(self, s):
assert not self.frozen
self.frequency[s] = self.frequency.get(s, 0)+1
def freeze(self):
assert not self.frozen
if self.include_UNK:
self.UNK_i = len(self._i2s)
self._i2s.append(self.UNK_s)
if self.include_PAD:
self.PAD_i = len(self._i2s)
self._i2s.append(self.PAD_s)
for s, count in sorted(self.frequency.items(), key=lambda x: -x[1]):
if count >= self.min_count:
self._i2s.append(s)
for i, s in enumerate(self._i2s):
self._s2i[s] = i
self.voc_size = len(self._i2s)
self.frozen = True
def __reduce__(self):
return Any2Int, (2, self.include_UNK, self.include_PAD), (self.min_count, self.include_UNK, self.frozen,
self.UNK_i, self.UNK_s, self.PAD_i, self.PAD_s,
self.voc_size, self._s2i, self._i2s, self.frequency)
def __setstate__(self, state):
self.min_count = state[0]
self.include_UNK = state[1]
self.frozen = state[2]
self.UNK_i = state[3]
self.UNK_s = state[4]
self.PAD_i = state[5]
self.PAD_s = state[6]
self.voc_size = state[7]
self._s2i = state[8]
self._i2s = state[9]
self.frequency = state[10]
| [] |
sosprz/nettemp | app/sensor.py | 334b3124263267c931bd7dc5c1bd8eb70614b4ef | from app import app
from flask import Flask, request, jsonify, g
import sqlite3
import os
import json
from random import randint
from flask_jwt_extended import jwt_required
import datetime
from flask_mysqldb import MySQL
mysql = MySQL()
def get_db(rom):
db = getattr(g, '_database', None)
if db is None:
db = g._database = sqlite3.connect(rom)
return db
@app.teardown_appcontext
def close_connection(exception):
db = getattr(g, '_database', None)
if db is not None:
db.close()
def check_value(value, type, rom):
adj=''
tm=''
value=float(value)
m = mysql.connection.cursor()
sql = "SELECT adj, tmp FROM sensors WHERE rom=%s"
m.execute(sql, [rom])
sensor=m.fetchall()
for adj, tmp in sensor:
tmp=float(tmp)
adj=float(adj)
msg=[]
sql = "SELECT min, max, value1, value2, value3 FROM types WHERE type=%s"
m.execute(sql, [type])
list=m.fetchall()
msg.append("IN VALUE: %f" % value)
msg.append(list)
m.close()
if adj:
value=float(value)+(adj)
msg.append("ADJ: %d" % value)
for min, max, v1, v2, v3 in list:
if (value>=float(min)) and (value<=float(max)):
if(value==v1) or (value==v2) or (value==v3):
msg.append("filter 2 back to previous %f" % tmp)
value=tmp
else:
value=float(value)
else:
msg.append("filter 1 back to previous %f" % tmp)
value=tmp
msg.append("VALUE OUT: %f" % value)
print(msg)
return value
def new_db(rom):
rom = rom+'.sql'
conn = sqlite3.connect(app.romdir+rom)
c = conn.cursor()
sql = "SELECT count() FROM sqlite_master WHERE type='table' AND name='def'"
c.execute(sql)
if c.fetchone()[0]==1:
print ("Database %s exists" %rom)
return True
else:
with app.app_context():
db = get_db(app.romdir+rom)
with app.open_resource('schema/sensors_db_schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
db.commit()
print ("Database %s created" %rom)
return False
def insert_db(rom,value):
rom = rom+'.sql'
conn = sqlite3.connect(app.romdir+rom)
c = conn.cursor()
sql = "SELECT count() FROM sqlite_master WHERE type='table' AND name='def'"
c.execute(sql)
if c.fetchone()[0]==1:
data = [value]
sql = "INSERT OR IGNORE INTO def (value) VALUES (?)"
c.execute(sql, data)
conn.commit()
conn.close()
print ("[ nettemp ][ sensor ] Database %s insert ok" %rom)
return True
else:
print ("[ nettemp ][ sensor ] Database %s not exist" %rom)
return False
def update_sensor_tmp(rom,value):
m = mysql.connection.cursor()
rom1 = [rom]
sql="SELECT count(*) FROM sensors WHERE rom=%s"
m.execute(sql, rom1)
coun=m.fetchone()
if coun[0]==1:
if int(datetime.datetime.now().strftime("%M"))%5==0:
tmp_5ago=value
sql = "UPDATE sensors SET tmp=%s, tmp_5ago=%s, nodata='', time=CURRENT_TIMESTAMP() WHERE rom=%s"
data = [value,tmp_5ago,rom]
else:
sql = "UPDATE sensors SET tmp=%s, nodata='', time=CURRENT_TIMESTAMP() WHERE rom=%s"
data = [value,rom]
m.execute(sql, data)
# stat min max
data = [value, value, rom]
sql = "UPDATE sensors SET stat_min=%s, stat_min_time=CURRENT_TIMESTAMP() WHERE (stat_min>%s OR stat_min is null OR stat_min='0.0') AND rom=%s"
m.execute(sql, data)
sql = "UPDATE sensors SET stat_max=%s, stat_max_time=CURRENT_TIMESTAMP() WHERE (stat_max<%s OR stat_max is null OR stat_max='0.0') AND rom=%s"
m.execute(sql, data)
m.connection.commit()
m.close()
print ("[ nettemp ][ sensor ] Sensor %s updated" %rom)
return True
else:
print ("[ nettemp ][ sensor ] Sensor %s not exist" %rom)
return False
def delete_db(rom):
rom=rom+'.sql'
if os.path.isfile(app.romdir+rom):
os.remove(rom)
print ("[ nettemp ][ sensor ] Database %s deleted" %rom)
return True
else:
print ("[ nettemp ][ sensor ] Database %s not exist" %rom)
return False
def delete_sensor(id,rom):
data = [id, rom]
m = mysql.connection.cursor()
sql="DELETE FROM sensors WHERE id=? AND rom=%s"
m.execute(sql, data)
m.connection.commit()
m.close()
delete_db(rom)
print ("[ nettemp ][ sensor ] Sensor %s removed ok" %rom)
def create_sensor(rom, data, data2, map_settings):
m = mysql.connection.cursor()
rom1 = [rom]
sql = "SELECT count(*) FROM sensors WHERE rom=%s"
m.execute(sql, rom1)
coun = m.fetchone()
if coun[0]==0:
sql = "INSERT INTO sensors (rom,type,device,ip,gpio,i2c,usb,name) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)"
m.execute(sql, data)
sql2 = "UPDATE sensors SET alarm='off', adj='0', charts='on', status='on', ch_group=%s, tmp_min='0', tmp_max='0', minmax='off', stat_min='0', stat_max='0', tmp_5ago='0', fiveago='on', map_id=%s, nodata_time='5', email_delay='10' WHERE rom=%s"
m.execute(sql2, data2)
map = "INSERT INTO maps (type, pos_x, pos_y, map_on, map_id, display_name) VALUES (%s, %s, %s, %s, %s, %s)"
m.execute(map, map_settings)
m.connection.commit()
m.close()
print ("[ nettemp ][ sensor ] Sensor %s added ok" %rom)
else:
print ("[ nettemp ][ sensor ] Sensor %s already exist" %rom)
return None
def sensor():
data = request.get_json()
for j in data:
rom = None
if 'rom' in j:
rom=j['rom']
type = None
if 'type' in j:
type=j['type']
device = None
if 'device' in j:
device=j['device']
ip = None
if 'ip' in j:
ip = j['ip']
gpio = None
if 'gpio' in j:
gpio=j['gpio']
i2c = None
if 'i2c' in j:
i2c=j['i2c']
usb = None
if 'usb' in j:
usb=j['usb']
name = randint(1000,9000)
if 'name' in j:
name=j['name']
if not j['name']:
name = randint(1000,9000)
tmp = None
if 'tmp' in j:
tmp=j['tmp']
value = None
if 'value' in j:
value=j['value']
group = type
if 'group' in j:
group=j['group']
map_id = randint(1000,9000)
map_y = randint(50,600)
map_x = randint(50,600)
data = [rom, type, device, ip, gpio, i2c, usb, name]
data2 = [group, map_id, rom]
map_settings = [type, map_y, map_x, 'on', map_id, 'on']
value=check_value(value, type, rom)
if insert_db(rom, value) == False:
new_db(rom)
insert_db(rom,value)
if update_sensor_tmp(rom,value) == False:
create_sensor(rom,data,data2,map_settings)
update_sensor_tmp(rom,value)
@app.route('/sensor', methods=['POST'])
@jwt_required
def url_sensor():
sensor()
return '', 200
@app.route('/local', methods=['POST'])
def url_localhost():
if request.remote_addr == '127.0.0.1':
sensor()
return 'Local'
else:
return '', 404
| [((229, 236), 'flask_mysqldb.MySQL', 'MySQL', ([], {}), '()\n', (234, 236), False, 'from flask_mysqldb import MySQL\n'), ((6543, 6581), 'app.app.route', 'app.route', (['"""/sensor"""'], {'methods': "['POST']"}), "('/sensor', methods=['POST'])\n", (6552, 6581), False, 'from app import app\n'), ((6645, 6682), 'app.app.route', 'app.route', (['"""/local"""'], {'methods': "['POST']"}), "('/local', methods=['POST'])\n", (6654, 6682), False, 'from app import app\n'), ((1503, 1536), 'sqlite3.connect', 'sqlite3.connect', (['(app.romdir + rom)'], {}), '(app.romdir + rom)\n', (1518, 1536), False, 'import sqlite3\n'), ((2051, 2084), 'sqlite3.connect', 'sqlite3.connect', (['(app.romdir + rom)'], {}), '(app.romdir + rom)\n', (2066, 2084), False, 'import sqlite3\n'), ((3721, 3753), 'os.path.isfile', 'os.path.isfile', (['(app.romdir + rom)'], {}), '(app.romdir + rom)\n', (3735, 3753), False, 'import os\n'), ((5183, 5201), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (5199, 5201), False, 'from flask import Flask, request, jsonify, g\n'), ((340, 360), 'sqlite3.connect', 'sqlite3.connect', (['rom'], {}), '(rom)\n', (355, 360), False, 'import sqlite3\n'), ((3757, 3771), 'os.remove', 'os.remove', (['rom'], {}), '(rom)\n', (3766, 3771), False, 'import os\n'), ((5685, 5704), 'random.randint', 'randint', (['(1000)', '(9000)'], {}), '(1000, 9000)\n', (5692, 5704), False, 'from random import randint\n'), ((6034, 6053), 'random.randint', 'randint', (['(1000)', '(9000)'], {}), '(1000, 9000)\n', (6041, 6053), False, 'from random import randint\n'), ((6067, 6083), 'random.randint', 'randint', (['(50)', '(600)'], {}), '(50, 600)\n', (6074, 6083), False, 'from random import randint\n'), ((6097, 6113), 'random.randint', 'randint', (['(50)', '(600)'], {}), '(50, 600)\n', (6104, 6113), False, 'from random import randint\n'), ((1746, 1763), 'app.app.app_context', 'app.app_context', ([], {}), '()\n', (1761, 1763), False, 'from app import app\n'), ((1810, 1869), 'app.app.open_resource', 'app.open_resource', (['"""schema/sensors_db_schema.sql"""'], {'mode': '"""r"""'}), "('schema/sensors_db_schema.sql', mode='r')\n", (1827, 1869), False, 'from app import app\n'), ((5793, 5812), 'random.randint', 'randint', (['(1000)', '(9000)'], {}), '(1000, 9000)\n', (5800, 5812), False, 'from random import randint\n'), ((2729, 2752), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2750, 2752), False, 'import datetime\n')] |
Subsets and Splits