repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
he-actlab/polymath | polymath/srdfg/base.py | 9b7937d0ddf7452f6cc74ee90d05f8c6acef737e |
from polymath import UNSET_SHAPE, DEFAULT_SHAPES
import builtins
import operator
from collections import OrderedDict, Mapping, Sequence, deque
import functools
from numbers import Integral, Rational, Real
import contextlib
import traceback
import uuid
import numpy as np
import importlib
from .graph import Graph
from .domain import Domain
from .util import _noop_callback, _flatten_iterable, node_hash, \
_is_node_type_instance, is_iterable
class Node(object):
"""
Base class for nodes.
Parameters
----------
args : tuple
Positional arguments passed to the `_evaluate` method.
name : str or None
Name of the node or `None` to use a random, unique identifier.
shape : tuple or None
Shape of the output for a node. This can be a tuple of integers or parameter node names.
graph : Node or None
Parent graph of this node. If graph is `None`, this is the top-level graph.
op_name : str
Operation name which describes the node functionality.
value : Any or None
If a node has a default value to use for execution, it can be set using `value`.
kwargs : dict
Keyword arguments passed to the `_evaluate` method.
"""
_graph_stack = deque([None])
_eval_stack = []
stack_size = 5
evaluated_nodes = 0
def __init__(self, *args,
name=None,
shape=None,
graph=None,
dependencies=None,
op_name=None,
value=None,
**kwargs):
self.nodes = Graph()
self.value = value
self.dependencies = []
self._args = []
self._predeecessors = []
self._succesors = []
self.args = args
if "name" in kwargs:
kwargs.pop("name")
self.added_attrs = []
# TODO: CHange this to underscore private variable
self.kwargs = kwargs
self.graph = graph
self._shape = OrderedDict()
self.shape = shape or tuple([])
# Get a list of all dependencies relevant to this node
self.dependencies = [] if dependencies is None else dependencies
if self.graph:
self.dependencies.extend(self.graph.dependencies)
# Choose a name for the node and add the node to the graph
self._name = None
self.name = name or uuid.uuid4().hex
self._op_name = None
self.op_name = op_name
# Get the stack context so we can report where the node was defined
self._stack = traceback.extract_stack(limit=1)
@property
def graph(self):
"""
polymath.srdfg.graph.Graph : Parent graph of this node. If graph is `None`, this is the top-level graph.
"""
return self._graph
def preds(self):
return self._preds
def succs(self):
return self._preds
def add_predecessor(self, pred):
if isinstance(pred, Node):
self._predecessors.append(pred.gname)
else:
self._predecessors.append(pred)
def add_successor(self, succ):
if isinstance(succ, Node):
self._succesors.append(succ.gname)
else:
self._succesors.append(succ)
def set_edges(self):
for e in self.args:
self.add_predecessor(e)
if isinstance(e, Node):
e.add_successor(self)
@property
def domain(self):
return Domain(tuple([]))
@property
def args(self):
"""
tuple : Positional arguments which are used for executing this node.
"""
return tuple(self._args)
@property
def argnames(self):
return [a.name if isinstance(a, Node) else a for a in self.args]
@property
def shape(self):
"""
tuple : Shape of the output for a node. This can be a tuple of integers or parameter node names.
"""
return self._shape
@property
def var(self):
return self
@property
def name(self):
"""str : Unique name of the node"""
return self._name
@property
def op_name(self):
"""
str : Operation name which describes the node functionality.
"""
return self._op_name
@op_name.setter
def op_name(self, op_name):
if op_name:
self._op_name = op_name
elif self.__class__.__name__ == "Node":
self._op_name = self.name
else:
self._op_name = self.__class__.__name__
@name.setter
def name(self, name):
self.set_name(name)
@args.setter
def args(self, args):
new_args = []
for arg in args:
if isinstance(arg, Node):
if self.__class__.__name__ == "Node":
self.nodes[arg.name] = self.graph[arg.name]
new_args.append(arg)
self._args = tuple(new_args)
@shape.setter
def shape(self, shape):
self.set_shape(shape, init=True)
@graph.setter
def graph(self, graph):
self._graph = Node.get_active_graph(graph)
@property
def gname(self):
scope_names = [self.name]
cgraph = self.graph
while cgraph:
scope_names.append(cgraph.name)
cgraph = cgraph.graph
return "/".join(list(reversed(scope_names)))
def __enter__(self):
Node._graph_stack.append(self)
return self
def __exit__(self, *args):
assert self == Node._graph_stack.pop()
def __repr__(self):
return "<node '%s'>" % self.name
def add_attribute(self, key, value):
self.added_attrs.append(key)
self.kwargs[key] = value
def is_shape_finalized(self):
if self.shape == UNSET_SHAPE:
return False
for s in self.shape:
if not isinstance(s, Integral):
return False
return True
def set_shape(self, shape=None, init=False):
if isinstance(shape, float):
self._shape = tuple([np.int(shape)])
elif isinstance(shape, Integral):
self._shape = tuple([shape])
elif isinstance(shape, Node):
self._shape = tuple([shape])
elif not shape or len(shape) == 0:
# TODO: Change in order to enable "is shape finalized" to work
self._shape = UNSET_SHAPE
else:
shapes = []
for dim in shape:
if isinstance(dim, (Node, Integral)):
shapes.append(dim)
elif isinstance(dim, float):
shapes.append(int(dim))
else:
raise TypeError(f"Shape value must be placeholder or integer value for {self.name}\n"
f"\tDim: {dim}"
f"\n\t{self.kwargs} ")
self._shape = tuple(shapes)
@staticmethod
def get_active_graph(graph=None):
"""
Obtain the currently active graph instance by returning the explicitly given graph or using
the default graph.
Parameters
----------
graph : Node or None
Graph to return or `None` to use the default graph.
Raises
------
ValueError
If no `Graph` instance can be obtained.
"""
graph = graph or Node._graph_stack[-1]
return graph
def instantiate_node(self, node): # pylint:disable=W0621
"""
Instantiate nodes by retrieving the node object associated with the node name.
Parameters
----------
node : Node or str
Node instance or name of an node.
Returns
-------
instantiated_node : Node
Node instance.
Raises
------
ValueError
If `node` is not an `Node` instance or an node name.
RuntimeError
If `node` is an `Node` instance but does not belong to this graph.
"""
if isinstance(node, str):
return self.nodes[node]
if isinstance(node, Node):
if node.name not in self.nodes and (node.graph != self):
raise RuntimeError(f"node '{node}' does not belong to {self} graph, instead belongs to"
f" {node.graph}")
return node
raise ValueError(f"'{node}' is not an `Node` instance or node name")
def instantiate_graph(self, context, **kwargs):
"""
Instantiate a graph by replacing all node names with node instances.
.. note::
This function modifies the context in place. Use :code:`context=context.copy()` to avoid
the context being modified.
Parameters
----------
context : dict[Node or str, object]
Context whose keys are node instances or names.
kwargs : dict[str, object]
Additional context information keyed by variable name.
Returns
-------
normalized_context : dict[Node, object]
Normalized context whose keys are node instances.
Raises
------
ValueError
If the context specifies more than one value for any node.
ValueError
If `context` is not a mapping.
"""
if context is None:
context = {}
elif not isinstance(context, Mapping):
raise ValueError("`context` must be a mapping.")
nodes = list(context)
# Add the keyword arguments
for node in nodes: # pylint:disable=W0621
value = context.pop(node)
node = self.instantiate_node(node)
if node in context:
raise ValueError(f"duplicate unequal value for node '{node}'")
context[node] = value
if node.op_name in ["placeholder", "state", "input", "output", "temp"] and not node.is_shape_finalized():
context[node] = node.evaluate(context)
for name, value in kwargs.items():
node = self.nodes[name]
if node in context:
raise ValueError(f"duplicate value for node '{node}'")
context[node] = value
if node.op_name in ["placeholder", "state", "input", "output", "temp"] and not node.is_shape_finalized():
context[node] = node.evaluate(context)
return context
def run(self, fetches, context=None, *, callback=None, **kwargs):
"""
Evaluate one or more nodes given a dictionary of node names with their values.
.. note::
This function modifies the context in place. Use :code:`context=context.copy()` to avoid
the context being modified.
Parameters
----------
fetches : list[str or Node] or str or Node
One or more `Node` instances or names to evaluate.
context : dict or None
Context in which to evaluate the nodes.
callback : callable or None
Callback to be evaluated when an node is evaluated.
kwargs : dict
Additional context information keyed by variable name.
Returns
-------
values : Node or tuple[object]
Output of the nodes given the context.
Raises
------
ValueError
If `fetches` is not an `Node` instance, node name, or a sequence thereof.
"""
if isinstance(fetches, (str, Node)):
fetches = [fetches]
single = True
elif isinstance(fetches, Sequence):
single = False
else:
raise ValueError("`fetches` must be an `Node` instance, node name, or a "
"sequence thereof.")
fetches = [self.instantiate_node(node) for node in fetches]
context = self.instantiate_graph(context, **kwargs)
for c in context:
if c in fetches and c.op_name in ["output", "state", "temp"]:
write_name = "/".join([f"{i}{c.write_count-1}" for i in c.name.split("/")]) if c.write_count > 0 else c.name
fetches[fetches.index(c)] = c.graph.nodes[write_name]
values = [fetch.evaluate_node(fetch, context, callback=callback) for fetch in fetches]
return values[0] if single else tuple(values)
def __getstate__(self):
return self.__dict__
def __setstate__(self, data):
self.__dict__.update(data)
def set_name(self, name):
"""
Set the name of the node and update the graph.
Parameters
----------
value : str
Unique name of the node.
Returns
-------
self : Node
This node.
Raises
------
ValueError
If an node with `value` already exists in the associated graph.
KeyError
If the current name of the node cannot be found in the associated graph.
"""
name = name or uuid.uuid4().hex
# TODO: Need a way to check if the existing node is not equal to the current ndoe as ewll
if self.graph and name in self.graph.nodes:
raise ValueError(f"duplicate name '{name}' in {self.graph.name}:\n\t"
f"Existing: {self.graph.nodes[name].args}\n\t"
f"New: {self.args}")
if self.graph:
graph = self.graph
if self._name and self._name in graph.nodes:
graph.update_graph_key(self._name, name)
else:
graph.nodes[name] = self
self._name = name
return self
def evaluate_dependencies(self, context, callback=None):
"""
Evaluate the dependencies of this node and discard the values.
Parameters
----------
context : dict
Normalised context in which to evaluate the node.
callback : callable or None
Callback to be evaluated when an node is evaluated.
"""
for node in self.dependencies:
node.evaluate(context, callback)
def evaluate(self, context, callback=None):
"""
Evaluate the node given a context.
Parameters
----------
context : dict
Normalised context in which to evaluate the node.
callback : callable or None
Callback to be evaluated when an node is evaluated.
Returns
-------
value : object
Output of the node given the context.
"""
# Evaluate all explicit dependencies first
self.evaluate_dependencies(context, callback)
if self in context:
return context[self]
# Evaluate the parents
partial = functools.partial(self.evaluate_node, context=context, callback=callback)
args = [partial(arg) for arg in self.args]
kwargs = {key: partial(value) for key, value in self.kwargs.items() if key not in self.added_attrs}
# Evaluate the node
callback = callback or _noop_callback
with callback(self, context):
if self.__class__.__name__ == "Node":
context[self] = self.value = self._evaluate(*args, context=context, **kwargs)
else:
context[self] = self.value = self._evaluate(*args, **kwargs)
return self.value
def _evaluate(self, *args, context=None, **kwargs):
"""
Inheriting nodes should implement this function to evaluate the node.
"""
return self(*args, context, **kwargs)
@classmethod
def evaluate_node(cls, node, context, **kwargs):
"""
Evaluate an node or constant given a context.
"""
Node.evaluated_nodes += 1
try:
if isinstance(node, Node):
Node._eval_stack.append(node.name)
return node.evaluate(context, **kwargs)
partial = functools.partial(cls.evaluate_node, context=context, **kwargs)
if isinstance(node, tuple):
return tuple(partial(element) for element in node)
if isinstance(node, list):
return [partial(element) for element in node]
if isinstance(node, dict):
return {partial(key): partial(value) for key, value in node.items()}
if isinstance(node, slice):
return slice(*[partial(getattr(node, attr))
for attr in ['start', 'stop', 'step']])
return node
except Exception as ex: # pragma: no cover
messages = []
interactive = False
if isinstance(node, Node) or not is_iterable(node):
node = [node]
for n in node:
stack = []
if isinstance(n, Node):
for frame in reversed(n._stack): # pylint: disable=protected-access
# Do not capture any internal stack traces
fname = frame.filename
if 'polymath' in fname:
continue
# Stop tracing at the last interactive cell
if interactive and not fname.startswith('<'):
break # pragma: no cover
interactive = fname.startswith('<')
stack.append(frame)
stack = "".join(traceback.format_list(reversed(stack)))
message = "Failed to evaluate node `%s` defined at:\n\n%s" % (n, stack)
messages.append(message)
raise ex from EvaluationError("".join(messages))
@classmethod
def init_from_args(cls, *args,
name=None,
shape=None,
graph=None,
dependencies=None,
op_name=None,
value=None,
**kwargs):
if len(args) == 0:
n = cls(name=name,
shape=shape,
graph=graph,
op_name=op_name,
dependencies=dependencies,
value=value,
**kwargs)
else:
n = cls(*args,
name=name,
shape=shape,
graph=graph,
op_name=op_name,
dependencies=dependencies,
value=value,
**kwargs)
return n
def __bool__(self):
return True
def __hash__(self):
return id(self)
def func_hash(self):
"""
This returns the functional hash of a particular node. The default hash returns an object id, whereas this function
returns a hash of all attributes and subgraphs of a node.
"""
return node_hash(self)
def find_node(self, name):
g = self.graph
while g is not None and name not in g.nodes:
g = g.graph
if name in g.nodes:
return g.nodes[name]
raise RuntimeError(f"Cannot find {name} in graph nodes. Graph: {self.graph}")
def __len__(self):
#TODO: Update this to check for finalzied shape
if self.shape == UNSET_SHAPE:
raise TypeError(f'`shape` must be specified explicitly for nodes {self}')
return self.shape[0]
def __iter__(self):
num = len(self)
for i in range(num):
yield self[i]
def __eq__(self, other):
return hash(self) == hash(other)
def __getattr__(self, name):
return getattr_(self, name, graph=self.graph)
def __getitem__(self, key):
if self.__class__.__name__ != "Node":
if isinstance(key, (slice, Integral)):
return getitem(self, key, graph=self.graph)
else:
if isinstance(key, (list)):
return var_index(self, key, graph=self)
elif isinstance(key, tuple):
return var_index(self, list(key), graph=self)
else:
return var_index(self, [key], graph=self)
else:
return self.nodes[key]
def __add__(self, other):
return add(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__radd__(self)
def __radd__(self, other):
return add(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__add__(self)
def __sub__(self, other):
return sub(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rsub__(self)
def __rsub__(self, other):
return sub(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__sub__(self)
def __pow__(self, other):
return pow_(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rpow__(self)
def __rpow__(self, other):
return pow_(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rpow__(self)
def __matmul__(self, other):
return matmul(self, other, graph=self.graph)
def __rmatmul__(self, other):
return matmul(other, self, graph=self.graph)
def __mul__(self, other):
return mul(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rmul__(self)
def __rmul__(self, other):
return mul(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__mul__(self)
def __truediv__(self, other):
return truediv(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__truediv__(self)
def __rtruediv__(self, other):
return truediv(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rtruediv__(self)
def __floordiv__(self, other):
return floordiv(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rfloordiv__(self)
def __rfloordiv__(self, other):
return floordiv(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__floordiv__(self)
def __mod__(self, other):
return mod(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rmod__(self)
def __rmod__(self, other):
return mod(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__mod__(self)
def __lshift__(self, other):
return lshift(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rlshift__(self)
def __rlshift__(self, other):
return lshift(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__lshift__(self)
def __rshift__(self, other):
return rshift(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rrshift__(self)
def __rrshift__(self, other):
return rshift(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rshift__(self)
def __and__(self, other):
return and_(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rand__(self)
def __rand__(self, other):
return and_(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__and__(self)
def __or__(self, other):
return or_(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__ror__(self)
def __ror__(self, other):
return or_(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__or__(self)
def __xor__(self, other):
return xor(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__rxor__(self)
def __rxor__(self, other):
return xor(other, self, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__xor__(self)
def __lt__(self, other):
return lt(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__gt__(self)
def __le__(self, other):
return le(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__ge__(self)
def __ne__(self, other):
return ne(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__ne__(self)
def __gt__(self, other):
return gt(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__lt__(self)
def __ge__(self, other):
return ge(self, other, graph=self.graph) if not _is_node_type_instance(other, ("slice_op", "var_index", "index")) else other.__le__(self)
def __invert__(self):
return inv(self, graph=self.graph)
def __neg__(self):
return neg(self, graph=self.graph)
def __abs__(self):
return abs_(self, graph=self.graph)
def __pos__(self):
return pos(self, graph=self.graph)
def __reversed__(self):
return reversed_(self, graph=self.graph)
def update_graph_key(self, old_key, new_key):
n = list(map(lambda k: (new_key, self.nodes[k]) if k == old_key else (k, self.nodes[k]), self.nodes.keys()))
self.nodes = Graph(n)
def insert_node(self, node, idx):
node_list = list(self.nodes.items())
node_list.insert(idx, (node.name, node))
self.nodes = Graph(node_list)
def __call__(self, *args, **kwargs):
return self.run(*args, **kwargs)
class EvaluationError(RuntimeError):
"""
Failed to evaluate an node.
"""
class var_index(Node): # pylint: disable=C0103,W0223
"""
Node representing values of a variable corresponding to input index values.
Parameters
----------
var : Node
The multi-dimensional variable used for indexing into.
idx : tuple
Tuple of either integer values or index/index_op nodes.
"""
def __init__(self, var, idx, name=None, **kwargs): # pylint: disable=W0235
if "domain" in kwargs:
domain = tuple(kwargs.pop("domain")) if isinstance(kwargs["domain"], list) else kwargs.pop("domain")
else:
domain = Domain(idx)
super(var_index, self).__init__(var, idx, name=name, domain=domain, **kwargs)
@property
def domain(self):
return self.kwargs["domain"]
@property
def var(self):
var, index_list = self.args
return var
def set_name(self, name):
"""
Set the name for a variable index, making sure to replicate the new name with
a unique stringwhich corresponds to the variable, index combination.
Parameters
----------
value : str
Unique name of the node.
Returns
-------
self : Node
This node.
Raises
------
ValueError
If an node with `value` already exists in the associated graph.
KeyError
If the current name of the node cannot be found in the associated graph.
"""
# TODO: Need a way to check if the existing node is not equal to the current ndoe as ewll
if self.graph and name in self.graph.nodes:
raise ValueError(f"duplicate name '{name}' in {self.graph.name}:"
f"Existing: {self.graph.nodes[name].args}\n"
f"New: {self.args}")
if self.graph:
graph = self.graph
if self._name is not None and self._name in graph.nodes:
graph.update_graph_key(self._name, name)
else:
graph.nodes[name] = self
self._name = name
return self
def __getitem__(self, key):
if self.is_shape_finalized() and len(self.nodes) >= np.prod(self.shape):
if isinstance(key, Integral):
key = tuple([key])
idx = np.ravel_multi_index(key, dims=self.shape, order='C')
ret = self.nodes.item_by_index(idx)
return ret
else:
if isinstance(key, (list)):
ret = var_index(self.var, tuple(key), graph=self)
elif isinstance(key, tuple):
ret = var_index(self.var, key, graph=self)
else:
ret = var_index(self.var, tuple([key]), graph=self)
return ret
def is_scalar(self, val=None):
if val is not None and (not isinstance(val, np.ndarray) or (len(val.shape) == 1 and val.shape[0] == 1)):
if self.var.shape != DEFAULT_SHAPES[0] and (len(self.var.shape) == 1 and not isinstance(self.var.shape[0],Node)):
raise ValueError(f"Invalid shape var for var index {self} with variable shape {self.var.shape}")
return True
else:
return self.var.shape == DEFAULT_SHAPES[0]
def _evaluate(self, var, indices, **kwargs):
if self.is_scalar(var):
out_shape = (1,)
indices = (0,)
single = True
else:
out_shape = self.domain.shape_from_indices(indices)
indices = self.domain.compute_pairs()
single = False
if isinstance(var, (Integral, Real, str)):
var = np.asarray([var])
elif not isinstance(var, (np.ndarray, list)):
raise TypeError(f"Variable {var} with type {type(var)} is not a list or numpy array, and cannot be sliced for {self.name}")
elif isinstance(var, list):
var = np.asarray(var)
if len(var.shape) != len(out_shape) and np.prod(var.shape) == np.prod(out_shape):
if len(out_shape) > len(var.shape):
for i in range(len(out_shape)):
if out_shape[i] == 1:
var = np.expand_dims(var, axis=i)
else:
var = np.squeeze(var)
if len(var.shape) != len(out_shape) and np.prod(var.shape) != np.prod(out_shape):
raise ValueError(f"Index list does not match {var.shape} in {self.var.name} - {self.var.op_name}"
f"dimensions for slice {self.args[0].name} with {out_shape}.\n"
f"Domain: {self.domain}\n"
f"Eval Stack: {Node._eval_stack}")
if not single and not all([(idx_val - 1) >= indices[-1][idx] for idx, idx_val in enumerate(var.shape)]):
raise ValueError(f"var_index {self.name} has indices which are greater than the variable shape:\n"
f"\tArgs: {self.args}\n"
f"\tVar shape: {var.shape}\n"
f"\tNode shape: {self.var.shape}\n"
f"\tIndex Upper bounds: {indices[-1]}")
indices = list(map(lambda x: x.tolist() if isinstance(x, np.ndarray) else x, indices))
res = var[indices] if single else np.asarray([var[idx] for idx in indices]).reshape(out_shape)
if out_shape == (1,) and len(indices) == 1:
res = res[0]
self.domain.set_computed(out_shape, indices)
return res
def __add__(self, other):
return slice_op(operator.add, self, other, graph=self.graph)
def __radd__(self, other):
return slice_op(operator.add, other, self, graph=self.graph)
def __sub__(self, other):
return slice_op(operator.sub, self, other, graph=self.graph)
def __rsub__(self, other):
return slice_op(operator.sub, other, self, graph=self.graph)
def __pow__(self, other):
return slice_op(builtins.pow, self, other, graph=self.graph)
def __rpow__(self, other):
return slice_op(builtins.pow, other, self, graph=self.graph)
def __mul__(self, other):
return slice_op(operator.mul, self, other, graph=self.graph)
def __rmul__(self, other):
return slice_op(operator.mul, other, self, graph=self.graph)
def __truediv__(self, other):
return slice_op(operator.truediv, self, other, graph=self.graph)
def __rtruediv__(self, other):
return slice_op(operator.truediv, other, self, graph=self.graph)
def __floordiv__(self, other):
return slice_op(operator.floordiv, self, other, graph=self.graph)
def __rfloordiv__(self, other):
return slice_op(operator.floordiv, other, self, graph=self.graph)
def __mod__(self, other):
return slice_op(operator.mod, self, other, graph=self.graph)
def __rmod__(self, other):
return slice_op(operator.mod, other, self, graph=self.graph)
def __lshift__(self, other):
return slice_op(operator.lshift, self, other, graph=self.graph)
def __rlshift__(self, other):
return slice_op(operator.lshift, other, self, graph=self.graph)
def __rshift__(self, other):
return slice_op(operator.rshift, self, other, graph=self.graph)
def __rrshift__(self, other):
return slice_op(operator.rshift, other, self, graph=self.graph)
def __and__(self, other):
return slice_op(operator.and_, self, other, graph=self.graph)
def __rand__(self, other):
return slice_op(operator.and_, other, self, graph=self.graph)
def __or__(self, other):
return slice_op(operator.or_, self, other, graph=self.graph)
def __ror__(self, other):
return slice_op(operator.or_, other, self, graph=self.graph)
def __xor__(self, other):
return slice_op(operator.xor, self, other, graph=self.graph)
def __rxor__(self, other):
return slice_op(operator.xor, other, self, graph=self.graph)
def __lt__(self, other):
return slice_op(operator.lt, self, other, graph=self.graph)
def __le__(self, other):
return slice_op(operator.lt, other, self, graph=self.graph)
def __ne__(self, other):
return slice_op(operator.ne, self, other, graph=self.graph)
def __gt__(self, other):
return slice_op(operator.gt, self, other, graph=self.graph)
def __ge__(self, other):
return slice_op(operator.ge, self, other, graph=self.graph)
def __repr__(self):
return "<var_index name=%s, index=%s>" % (self.name, self.args)
class slice_op(Node):
"""
Node representing multi-dimensional operations performed on a node.
Parameters
----------
target : cal
The multi-dimensional variable used for indexing into.
idx : tuple
Tuple of either integer values or index/index_op nodes.
"""
def __init__(self, target, *args, **kwargs):
if "domain" in kwargs:
domain = tuple(kwargs.pop("domain")) if isinstance(kwargs["domain"], list) else kwargs.pop("domain")
else:
all_args = _flatten_iterable(args)
slice1_var, slice1_idx, slice2_var, slice2_idx = self.get_index_nodes(all_args[0], all_args[1])
domain = slice1_idx.combine_set_domains(slice2_idx)
if "op_name" in kwargs:
kwargs.pop("op_name")
target_name = f"{target.__module__}.{target.__name__}"
super(slice_op, self).__init__(*args, target=target_name, domain=domain, op_name=f"slice_{target.__name__}", **kwargs)
self.target = target
@property
def domain(self):
return self.kwargs["domain"]
def __getitem__(self, key):
if isinstance(key, (tuple, list, np.ndarray)) and len(key) == 0:
return self
elif self.is_shape_finalized() and len(self.nodes) > 0:
if isinstance(key, (int, Node)):
key = tuple([key])
if len(key) != len(self.shape):
raise KeyError(f"Invalid key shape for {self.name}:\n"
f"Shape: {self.shape}\n"
f"Key: {key}")
name = f"{self.name}{key}"
if name not in self.nodes.keys():
raise KeyError(f"{name} not in {self.name} keys:\n"
f"Node keys: {list(self.nodes.keys())}")
ret = self.nodes[name]
return ret
else:
name = []
if isinstance(key, Node):
name.append(key.name)
elif hasattr(key, "__len__") and not isinstance(key, str):
for k in key:
if isinstance(k, Node):
name.append(k.name)
else:
name.append(str(k))
else:
name.append(key)
name = self.var.name + "[" + "][".join(name) + "]"
if name in self.graph.nodes:
return self.graph.nodes[name]
elif isinstance(key, (list)):
return var_index(self, key, name=name, graph=self.graph)
elif isinstance(key, tuple):
return var_index(self, list(key), name=name, graph=self.graph)
else:
return var_index(self, [key], name=name, graph=self.graph)
def set_shape(self, shape=None, init=False):
s = []
assert isinstance(shape, (tuple, list))
if all([isinstance(sv, Integral) for sv in shape]) and len(self.domain) == np.product(shape) and len(shape) > 0:
self._shape = shape if isinstance(shape, tuple) else tuple(shape)
else:
for idx, d in enumerate(self.domain.dom_set):
if shape and isinstance(shape[idx], (func_op, Integral)):
s.append(shape[idx])
elif shape and isinstance(shape[idx], float):
s.append(int(shape[idx]))
elif isinstance(d, float):
s.append(int(d))
elif isinstance(d, var_index):
s.append(d.domain)
else:
s.append(d)
self._shape = tuple(s)
def is_scalar(self, val):
return not isinstance(val, np.ndarray) or (len(val.shape) == 1 and val.shape[0] == 1)
def _evaluate(self, op1, op2, context=None, **kwargs):
if self.is_scalar(op1) or self.is_scalar(op2):
value = self.target(op1, op2)
else:
arg0_dom = self.args[0].domain
arg1_dom = self.args[1].domain
op1_idx = self.domain.map_sub_domain(arg0_dom) if isinstance(self.args[0], Node) else tuple([])
op2_idx = self.domain.map_sub_domain(arg1_dom) if isinstance(self.args[1], Node) else tuple([])
op1 = np.asarray(list(map(lambda x: op1[x], op1_idx))).reshape(self.domain.computed_shape)
op2 = np.asarray(list(map(lambda x: op2[x], op2_idx))).reshape(self.domain.computed_shape)
value = self.target(op1, op2)
return value
def get_index_nodes(self, slice1_var=None, slice2_var=None):
if slice1_var is None and slice2_var is None:
slice1_var, slice2_var = self.args
if isinstance(slice1_var, (slice_op, var_index)) or _is_node_type_instance(slice1_var, "GroupNode"):
slice1_idx = slice1_var.domain
elif _is_node_type_instance(slice1_var, "index"):
slice1_idx = slice1_var.domain
else:
slice1_idx = Domain(tuple([]))
if isinstance(slice2_var, (slice_op, var_index)) or _is_node_type_instance(slice2_var, "GroupNode"):
slice2_idx = slice2_var.domain
elif _is_node_type_instance(slice2_var, "index"):
slice2_idx = slice2_var.domain
else:
slice2_idx = Domain(tuple([]))
return slice1_var, slice1_idx, slice2_var, slice2_idx
def __add__(self, other):
return slice_op(operator.add, self, other, graph=self.graph)
def __radd__(self, other):
return slice_op(operator.add, other, self, graph=self.graph)
def __sub__(self, other):
return slice_op(operator.sub, self, other, graph=self.graph)
def __rsub__(self, other):
return slice_op(operator.sub, other, self, graph=self.graph)
def __pow__(self, other):
return slice_op(builtins.pow, self, other, graph=self.graph)
def __rpow__(self, other):
return slice_op(builtins.pow, other, self, graph=self.graph)
def __mul__(self, other):
return slice_op(operator.mul, self, other, graph=self.graph)
def __rmul__(self, other):
return slice_op(operator.mul, other, self, graph=self.graph)
def __truediv__(self, other):
return slice_op(operator.truediv, self, other, graph=self.graph)
def __rtruediv__(self, other):
return slice_op(operator.truediv, other, self, graph=self.graph)
def __floordiv__(self, other):
return slice_op(operator.floordiv, self, other, graph=self.graph)
def __rfloordiv__(self, other):
return slice_op(operator.floordiv, other, self, graph=self.graph)
def __mod__(self, other):
return slice_op(operator.mod, self, other, graph=self.graph)
def __rmod__(self, other):
return slice_op(operator.mod, other, self, graph=self.graph)
def __lshift__(self, other):
return slice_op(operator.lshift, self, other, graph=self.graph)
def __rlshift__(self, other):
return slice_op(operator.lshift, other, self, graph=self.graph)
def __rshift__(self, other):
return slice_op(operator.rshift, self, other, graph=self.graph)
def __rrshift__(self, other):
return slice_op(operator.rshift, other, self, graph=self.graph)
def __and__(self, other):
return slice_op(operator.and_, self, other, graph=self.graph)
def __rand__(self, other):
return slice_op(operator.and_, other, self, graph=self.graph)
def __or__(self, other):
return slice_op(operator.or_, self, other, graph=self.graph)
def __ror__(self, other):
return slice_op(operator.or_, other, self, graph=self.graph)
def __xor__(self, other):
return slice_op(operator.xor, self, other, graph=self.graph)
def __rxor__(self, other):
return slice_op(operator.xor, other, self, graph=self.graph)
def __lt__(self, other):
return slice_op(operator.lt, self, other, graph=self.graph)
def __le__(self, other):
return slice_op(operator.lt, other, self, graph=self.graph)
def __ne__(self, other):
return slice_op(operator.ne, self, other, graph=self.graph)
def __gt__(self, other):
return slice_op(operator.gt, self, other, graph=self.graph)
def __ge__(self, other):
return slice_op(operator.ge, self, other, graph=self.graph)
def __repr__(self):
return "<slice_%s '%s'>" % (self.target.__name__, self.name)
class func_op(Node): # pylint: disable=C0103,R0903
"""
Node wrapper for stateless functions.
Parameters
----------
target : callable
function to evaluate the node
args : tuple
positional arguments passed to the target
kwargs : dict
keywoard arguments passed to the target
"""
def __init__(self, target, *args, **kwargs):
kwargs["op_name"] = kwargs["op_name"] if "op_name" in kwargs \
else f"{target.__name__}"
if "domain" in kwargs:
domain = tuple(kwargs.pop("domain")) if isinstance(kwargs["domain"], list) else kwargs.pop("domain")
elif len(args) == 2:
all_args = _flatten_iterable(args)
slice1_var, slice1_idx, slice2_var, slice2_idx = self.get_index_nodes(all_args[0], all_args[1])
domain = slice1_idx.combine_set_domains(slice2_idx)
else:
domain = Domain(tuple([]))
self._target = None
super(func_op, self).__init__(*args, target=f"{target.__module__}.{target.__name__}", domain=domain, **kwargs)
self.target = target
self.added_attrs += ["domain", "target"]
@property
def target(self):
return self._target
@target.setter
def target(self, fnc):
self._target = fnc
self.op_name = f"{fnc.__name__}"
self.kwargs["target"] = f"{fnc.__module__}.{fnc.__name__}"
def __getitem__(self, key):
return self
@property
def domain(self):
return self.kwargs["domain"]
def get_index_nodes(self, slice1_var=None, slice2_var=None):
if slice1_var is None and slice2_var is None:
slice1_var, slice2_var = self.args
if isinstance(slice1_var, (slice_op, var_index)) or _is_node_type_instance(slice1_var, "GroupNode"):
slice1_idx = slice1_var.domain
else:
slice1_idx = Domain(tuple([]))
if isinstance(slice2_var, (slice_op, var_index)) or _is_node_type_instance(slice2_var, "GroupNode"):
slice2_idx = slice2_var.domain
else:
slice2_idx = Domain(tuple([]))
return slice1_var, slice1_idx, slice2_var, slice2_idx
def _evaluate(self, *args, **kwargs):
for aa in list(kwargs.keys()):
if aa in self.added_attrs:
kwargs.pop(aa)
return self.target(*args, **kwargs)
def __call__(self, *args, **kwargs):
return call(self, *args, **kwargs)
def __repr__(self):
return "<func_op '%s' target=%s args=<%d items>>" % \
(self.name, self.kwargs["target"], len(self.args))
def nodeop(target=None, **kwargs):
"""
Decorator for creating nodes from functions.
"""
# This is called when the decorator is used with arguments
if target is None:
return functools.partial(nodeop, **kwargs)
# This is called when the decorator is used without arguments
@functools.wraps(target)
def _wrapper(*args, **kwargs_inner):
return func_op(target, *args, **kwargs_inner, **kwargs)
return _wrapper
@nodeop
def call(func, *args, **kwargs):
"""
Call `func` with positional arguments `args` and keyword arguments `kwargs`.
Parameters
----------
func : callable
Function to call when the node is executed.
args : list
Sequence of positional arguments passed to `func`.
kwargs : dict
Mapping of keyword arguments passed to `func`.
"""
return func(*args, **kwargs)
@contextlib.contextmanager
def control_dependencies(dependencies, graph=None):
"""
Ensure that all `dependencies` are executed before any nodes in this scope.
Parameters
----------
dependencies : list
Sequence of nodes to be evaluted before evaluating any nodes defined in this
scope.
"""
# Add dependencies to the graph
graph = Node.get_active_graph(graph)
graph.dependencies.extend(dependencies)
yield
# Remove dependencies from the graph
del graph.dependencies[-len(dependencies):]
#pylint: disable=C0103
abs_ = nodeop(builtins.abs)
dict_ = nodeop(builtins.dict)
help_ = nodeop(builtins.help)
min_ = nodeop(builtins.min)
setattr_ = nodeop(builtins.setattr)
all_ = nodeop(builtins.all)
dir_ = nodeop(builtins.dir)
hex_ = nodeop(builtins.hex)
next_ = nodeop(builtins.next)
slice_ = nodeop(builtins.slice)
any_ = nodeop(builtins.any)
divmod_ = nodeop(builtins.divmod)
id_ = nodeop(builtins.id)
object_ = nodeop(builtins.object)
sorted_ = nodeop(builtins.sorted)
ascii_ = nodeop(builtins.ascii)
enumerate_ = nodeop(builtins.enumerate)
input_ = nodeop(builtins.input)
oct_ = nodeop(builtins.oct)
staticmethod_ = nodeop(builtins.staticmethod)
bin_ = nodeop(builtins.bin)
eval_ = nodeop(builtins.eval)
int_ = nodeop(builtins.int)
open_ = nodeop(builtins.open)
str_ = nodeop(builtins.str)
bool_ = nodeop(builtins.bool)
exec_ = nodeop(builtins.exec)
isinstance_ = nodeop(builtins.isinstance)
ord_ = nodeop(builtins.ord)
sum_ = nodeop(builtins.sum)
bytearray_ = nodeop(builtins.bytearray)
filter_ = nodeop(builtins.filter)
issubclass_ = nodeop(builtins.issubclass)
pow_ = nodeop(builtins.pow)
super_ = nodeop(builtins.super)
bytes_ = nodeop(builtins.bytes)
float_ = nodeop(builtins.float)
iter_ = nodeop(builtins.iter)
print_ = nodeop(builtins.print)
tuple_ = nodeop(builtins.tuple)
callable_ = nodeop(builtins.callable)
format_ = nodeop(builtins.format)
len_ = nodeop(builtins.len)
property_ = nodeop(builtins.property)
type_ = nodeop(builtins.type)
chr_ = nodeop(builtins.chr)
frozenset_ = nodeop(builtins.frozenset)
list_ = nodeop(builtins.list)
range_ = nodeop(builtins.range)
vars_ = nodeop(builtins.vars)
classmethod_ = nodeop(builtins.classmethod)
getattr_ = nodeop(builtins.getattr)
locals_ = nodeop(builtins.locals)
repr_ = nodeop(builtins.repr)
zip_ = nodeop(builtins.zip)
compile_ = nodeop(builtins.compile)
globals_ = nodeop(builtins.globals)
map_ = nodeop(builtins.map)
reversed_ = nodeop(builtins.reversed)
complex_ = nodeop(builtins.complex)
hasattr_ = nodeop(builtins.hasattr)
max_ = nodeop(builtins.max)
round_ = nodeop(builtins.round)
delattr_ = nodeop(builtins.delattr)
hash_ = nodeop(builtins.hash)
memoryview_ = nodeop(builtins.memoryview)
set_ = nodeop(builtins.set)
add = nodeop(operator.add)
and_ = nodeop(operator.and_)
attrgetter = nodeop(operator.attrgetter)
concat = nodeop(operator.concat)
contains = nodeop(operator.contains)
countOf = nodeop(operator.countOf)
delitem = nodeop(operator.delitem)
eq = nodeop(operator.eq)
floordiv = nodeop(operator.floordiv)
ge = nodeop(operator.ge)
getitem = nodeop(operator.getitem)
gt = nodeop(operator.gt)
index = nodeop(operator.index)
indexOf = nodeop(operator.indexOf)
inv = nodeop(operator.inv)
invert = nodeop(operator.invert)
ior = nodeop(operator.ior)
ipow = nodeop(operator.ipow)
irshift = nodeop(operator.irshift)
is_ = nodeop(operator.is_)
is_not = nodeop(operator.is_not)
itemgetter = nodeop(operator.itemgetter)
le = nodeop(operator.le)
length_hint = nodeop(operator.length_hint)
lshift = nodeop(operator.lshift)
lt = nodeop(operator.lt)
matmul = nodeop(operator.matmul)
methodcaller = nodeop(operator.methodcaller)
mod = nodeop(operator.mod)
mul = nodeop(operator.mul)
ne = nodeop(operator.ne)
neg = nodeop(operator.neg)
not_ = nodeop(operator.not_)
or_ = nodeop(operator.or_)
pos = nodeop(operator.pos)
rshift = nodeop(operator.rshift)
setitem = nodeop(operator.setitem)
sub = nodeop(operator.sub)
truediv = nodeop(operator.truediv)
truth = nodeop(operator.truth)
xor = nodeop(operator.xor)
import_ = nodeop(importlib.import_module)
| [((1238, 1251), 'collections.deque', 'deque', (['[None]'], {}), '([None])\n', (1243, 1251), False, 'from collections import OrderedDict, Mapping, Sequence, deque\n'), ((46542, 46565), 'functools.wraps', 'functools.wraps', (['target'], {}), '(target)\n', (46557, 46565), False, 'import functools\n'), ((1981, 1994), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (1992, 1994), False, 'from collections import OrderedDict, Mapping, Sequence, deque\n'), ((2554, 2586), 'traceback.extract_stack', 'traceback.extract_stack', ([], {'limit': '(1)'}), '(limit=1)\n', (2577, 2586), False, 'import traceback\n'), ((14781, 14854), 'functools.partial', 'functools.partial', (['self.evaluate_node'], {'context': 'context', 'callback': 'callback'}), '(self.evaluate_node, context=context, callback=callback)\n', (14798, 14854), False, 'import functools\n'), ((46434, 46469), 'functools.partial', 'functools.partial', (['nodeop'], {}), '(nodeop, **kwargs)\n', (46451, 46469), False, 'import functools\n'), ((15961, 16024), 'functools.partial', 'functools.partial', (['cls.evaluate_node'], {'context': 'context'}), '(cls.evaluate_node, context=context, **kwargs)\n', (15978, 16024), False, 'import functools\n'), ((28968, 29021), 'numpy.ravel_multi_index', 'np.ravel_multi_index', (['key'], {'dims': 'self.shape', 'order': '"""C"""'}), "(key, dims=self.shape, order='C')\n", (28988, 29021), True, 'import numpy as np\n'), ((30292, 30309), 'numpy.asarray', 'np.asarray', (['[var]'], {}), '([var])\n', (30302, 30309), True, 'import numpy as np\n'), ((2379, 2391), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2389, 2391), False, 'import uuid\n'), ((13002, 13014), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (13012, 13014), False, 'import uuid\n'), ((28852, 28871), 'numpy.prod', 'np.prod', (['self.shape'], {}), '(self.shape)\n', (28859, 28871), True, 'import numpy as np\n'), ((30618, 30636), 'numpy.prod', 'np.prod', (['var.shape'], {}), '(var.shape)\n', (30625, 30636), True, 'import numpy as np\n'), ((30640, 30658), 'numpy.prod', 'np.prod', (['out_shape'], {}), '(out_shape)\n', (30647, 30658), True, 'import numpy as np\n'), ((30896, 30911), 'numpy.squeeze', 'np.squeeze', (['var'], {}), '(var)\n', (30906, 30911), True, 'import numpy as np\n'), ((30961, 30979), 'numpy.prod', 'np.prod', (['var.shape'], {}), '(var.shape)\n', (30968, 30979), True, 'import numpy as np\n'), ((30983, 31001), 'numpy.prod', 'np.prod', (['out_shape'], {}), '(out_shape)\n', (30990, 31001), True, 'import numpy as np\n'), ((38171, 38188), 'numpy.product', 'np.product', (['shape'], {}), '(shape)\n', (38181, 38188), True, 'import numpy as np\n'), ((6037, 6050), 'numpy.int', 'np.int', (['shape'], {}), '(shape)\n', (6043, 6050), True, 'import numpy as np\n'), ((30554, 30569), 'numpy.asarray', 'np.asarray', (['var'], {}), '(var)\n', (30564, 30569), True, 'import numpy as np\n'), ((31938, 31979), 'numpy.asarray', 'np.asarray', (['[var[idx] for idx in indices]'], {}), '([var[idx] for idx in indices])\n', (31948, 31979), True, 'import numpy as np\n'), ((30828, 30855), 'numpy.expand_dims', 'np.expand_dims', (['var'], {'axis': 'i'}), '(var, axis=i)\n', (30842, 30855), True, 'import numpy as np\n')] |
rngallen/beyond_basics | actors/models.py | 2cfb7d97699a733251e68357a70eada3d0278680 | from django.db import models
from django.utils.translation import ugettext_lazy as _
# Create your models here.
class Actor(models.Model):
name = models.CharField(_("name"), max_length=200)
# if is_star he/she will be directed to hollywood else directed to commercial
is_star = models.BooleanField(_("is start"), default=False)
def __str__(self):
return self.name
| [((170, 179), 'django.utils.translation.ugettext_lazy', '_', (['"""name"""'], {}), "('name')\n", (171, 179), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((313, 326), 'django.utils.translation.ugettext_lazy', '_', (['"""is start"""'], {}), "('is start')\n", (314, 326), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
cwlalyy/mongo-c-driver | docs/buildscripts/docs.py | d771be13bc8f7d8b84d233de6fdc725d9bb337cc | """Build the C client docs.
"""
from __future__ import with_statement
import os
import shutil
import socket
import subprocess
import time
import urllib2
def clean_dir(dir):
try:
shutil.rmtree(dir)
except:
pass
os.makedirs(dir)
def gen_api(dir):
clean_dir(dir)
clean_dir("docs/source/doxygen")
with open(os.devnull, 'w') as null:
subprocess.call(["doxygen", "doxygenConfig"], stdout=null, stderr=null)
os.rename("docs/source/doxygen/html", dir)
def gen_sphinx(dir):
clean_dir(dir)
os.chdir("docs/source/sphinx")
with open(os.devnull, 'w') as null:
subprocess.call(["make", "html"], stdout=null, stderr=null)
os.chdir("../../../")
if os.path.isdir("docs/source/sphinx/build/html"):
os.rename("docs/source/sphinx/build/html", dir)
def version():
"""Get the driver version from doxygenConfig.
"""
with open("doxygenConfig") as f:
for line in f.readlines():
if line.startswith("PROJECT_NUMBER"):
return line.split("=")[1].strip()
def main():
print("Generating Sphinx docs in docs/html")
gen_sphinx("docs/html")
print("Generating Doxygen docs in docs/html/api")
gen_api("docs/html/api")
if __name__ == "__main__":
main()
| [((240, 256), 'os.makedirs', 'os.makedirs', (['dir'], {}), '(dir)\n', (251, 256), False, 'import os\n'), ((458, 500), 'os.rename', 'os.rename', (['"""docs/source/doxygen/html"""', 'dir'], {}), "('docs/source/doxygen/html', dir)\n", (467, 500), False, 'import os\n'), ((546, 576), 'os.chdir', 'os.chdir', (['"""docs/source/sphinx"""'], {}), "('docs/source/sphinx')\n", (554, 576), False, 'import os\n'), ((691, 712), 'os.chdir', 'os.chdir', (['"""../../../"""'], {}), "('../../../')\n", (699, 712), False, 'import os\n'), ((720, 766), 'os.path.isdir', 'os.path.isdir', (['"""docs/source/sphinx/build/html"""'], {}), "('docs/source/sphinx/build/html')\n", (733, 766), False, 'import os\n'), ((192, 210), 'shutil.rmtree', 'shutil.rmtree', (['dir'], {}), '(dir)\n', (205, 210), False, 'import shutil\n'), ((381, 452), 'subprocess.call', 'subprocess.call', (["['doxygen', 'doxygenConfig']"], {'stdout': 'null', 'stderr': 'null'}), "(['doxygen', 'doxygenConfig'], stdout=null, stderr=null)\n", (396, 452), False, 'import subprocess\n'), ((626, 685), 'subprocess.call', 'subprocess.call', (["['make', 'html']"], {'stdout': 'null', 'stderr': 'null'}), "(['make', 'html'], stdout=null, stderr=null)\n", (641, 685), False, 'import subprocess\n'), ((776, 823), 'os.rename', 'os.rename', (['"""docs/source/sphinx/build/html"""', 'dir'], {}), "('docs/source/sphinx/build/html', dir)\n", (785, 823), False, 'import os\n')] |
defgsus/thegame | tilegame/render/rs.py | 38a627d9108f1418b94b08831fd640dd87fbba83 | import glm
import math
from lib.opengl import RenderSettings
class GameProjection:
def __init__(self, rs: "GameRenderSettings"):
self.rs = rs
self.scale = 10.
self.rotation_deg = 0.
self.location = glm.vec3(0)
self._stack = []
def projection_matrix_4(self) -> glm.mat4:
scale = 1.
ratio = self.rs.render_width / self.rs.render_height
m = glm.ortho(-scale * ratio, scale * ratio, -scale, scale, -10, 10)
return m
def transformation_matrix_4(self) -> glm.mat4:
m = glm.rotate(
glm.mat4(1), -self.rotation_deg / 180 * glm.pi(), glm.vec3(0, 0, 1)
)
m = m * glm.scale(glm.mat4(), glm.vec3(2. / self.scale))
m = m * glm.translate(glm.mat4(), glm.vec3(-self.location.x, -self.location.y, 0))
return m
def transformation_matrix(self) -> glm.mat3:
m = rotation_matrix_2d(self.rotation_deg)
m *= self.scale * .5
m[2][0] = self.location.x
m[2][1] = self.location.y
return m
def push(self):
self._stack.append({
"scale": self.scale,
"rotation": self.rotation_deg,
"location": self.location.__copy__(),
})
def pop(self):
s = self._stack.pop(-1)
self.scale = s["scale"]
self.rotation_deg = s["rotation"]
self.location = s["location"]
def __enter__(self):
self.push()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.pop()
def rotation_matrix_2d(degree: float) -> glm.mat3:
a = degree / 180. * math.pi
sa = math.sin(a)
ca = math.cos(a)
return glm.mat3(
ca, sa, 0,
-sa, ca, 0,
0, 0, 1
)
class GameRenderSettings(RenderSettings):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.projection = GameProjection(self)
| [((1632, 1643), 'math.sin', 'math.sin', (['a'], {}), '(a)\n', (1640, 1643), False, 'import math\n'), ((1653, 1664), 'math.cos', 'math.cos', (['a'], {}), '(a)\n', (1661, 1664), False, 'import math\n'), ((1676, 1716), 'glm.mat3', 'glm.mat3', (['ca', 'sa', '(0)', '(-sa)', 'ca', '(0)', '(0)', '(0)', '(1)'], {}), '(ca, sa, 0, -sa, ca, 0, 0, 0, 1)\n', (1684, 1716), False, 'import glm\n'), ((238, 249), 'glm.vec3', 'glm.vec3', (['(0)'], {}), '(0)\n', (246, 249), False, 'import glm\n'), ((415, 479), 'glm.ortho', 'glm.ortho', (['(-scale * ratio)', '(scale * ratio)', '(-scale)', 'scale', '(-10)', '(10)'], {}), '(-scale * ratio, scale * ratio, -scale, scale, -10, 10)\n', (424, 479), False, 'import glm\n'), ((585, 596), 'glm.mat4', 'glm.mat4', (['(1)'], {}), '(1)\n', (593, 596), False, 'import glm\n'), ((635, 652), 'glm.vec3', 'glm.vec3', (['(0)', '(0)', '(1)'], {}), '(0, 0, 1)\n', (643, 652), False, 'import glm\n'), ((625, 633), 'glm.pi', 'glm.pi', ([], {}), '()\n', (631, 633), False, 'import glm\n'), ((689, 699), 'glm.mat4', 'glm.mat4', ([], {}), '()\n', (697, 699), False, 'import glm\n'), ((701, 727), 'glm.vec3', 'glm.vec3', (['(2.0 / self.scale)'], {}), '(2.0 / self.scale)\n', (709, 727), False, 'import glm\n'), ((758, 768), 'glm.mat4', 'glm.mat4', ([], {}), '()\n', (766, 768), False, 'import glm\n'), ((770, 817), 'glm.vec3', 'glm.vec3', (['(-self.location.x)', '(-self.location.y)', '(0)'], {}), '(-self.location.x, -self.location.y, 0)\n', (778, 817), False, 'import glm\n')] |
stungkit/pytorch | tools/stats/export_slow_tests.py | 0f05e398705bf15406bce79f7ee57d3935ad2abd | #!/usr/bin/env python3
import argparse
import json
import os
import statistics
from collections import defaultdict
from tools.stats.s3_stat_parser import (
get_previous_reports_for_branch,
Report,
Version2Report,
)
from typing import cast, DefaultDict, Dict, List, Any
from urllib.request import urlopen
SLOW_TESTS_FILE = ".pytorch-slow-tests.json"
SLOW_TEST_CASE_THRESHOLD_SEC = 60.0
RELATIVE_DIFFERENCE_THRESHOLD = 0.1
IGNORED_JOBS = ["asan", "periodic"]
def get_test_case_times() -> Dict[str, float]:
reports: List[Report] = get_previous_reports_for_branch("origin/viable/strict", "")
# an entry will be like ("test_doc_examples (__main__.TestTypeHints)" -> [values]))
test_names_to_times: DefaultDict[str, List[float]] = defaultdict(list)
for report in reports:
if report.get("format_version", 1) != 2: # type: ignore[misc]
raise RuntimeError("S3 format currently handled is version 2 only")
v2report = cast(Version2Report, report)
if any(job_name in str(report["build_job"]) for job_name in IGNORED_JOBS):
continue
for test_file in v2report["files"].values():
for suitename, test_suite in test_file["suites"].items():
for casename, test_case in test_suite["cases"].items():
# The below attaches a __main__ as that matches the format of test.__class__ in
# common_utils.py (where this data will be used), and also matches what the output
# of a running test would look like.
name = f"{casename} (__main__.{suitename})"
succeeded: bool = test_case["status"] is None
if succeeded:
test_names_to_times[name].append(test_case["seconds"])
return {
test_case: statistics.mean(times)
for test_case, times in test_names_to_times.items()
}
def filter_slow_tests(test_cases_dict: Dict[str, float]) -> Dict[str, float]:
return {
test_case: time
for test_case, time in test_cases_dict.items()
if time >= SLOW_TEST_CASE_THRESHOLD_SEC
}
def get_test_infra_slow_tests() -> Dict[str, float]:
url = "https://raw.githubusercontent.com/pytorch/test-infra/generated-stats/stats/slow-tests.json"
contents = urlopen(url, timeout=1).read().decode("utf-8")
return cast(Dict[str, float], json.loads(contents))
def too_similar(
calculated_times: Dict[str, float], other_times: Dict[str, float], threshold: float
) -> bool:
# check that their keys are the same
if calculated_times.keys() != other_times.keys():
return False
for test_case, test_time in calculated_times.items():
other_test_time = other_times[test_case]
relative_difference = abs(
(other_test_time - test_time) / max(other_test_time, test_time)
)
if relative_difference > threshold:
return False
return True
def export_slow_tests(options: Any) -> None:
filename = options.filename
if os.path.exists(filename):
print(f"Overwriting existent file: {filename}")
with open(filename, "w+") as file:
slow_test_times: Dict[str, float] = filter_slow_tests(get_test_case_times())
if options.ignore_small_diffs:
test_infra_slow_tests_dict = get_test_infra_slow_tests()
if too_similar(
slow_test_times, test_infra_slow_tests_dict, options.ignore_small_diffs
):
slow_test_times = test_infra_slow_tests_dict
json.dump(
slow_test_times, file, indent=" ", separators=(",", ": "), sort_keys=True
)
file.write("\n")
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
description="Export a JSON of slow test cases in PyTorch unit test suite"
)
parser.add_argument(
"-f",
"--filename",
nargs="?",
type=str,
default=SLOW_TESTS_FILE,
const=SLOW_TESTS_FILE,
help="Specify a file path to dump slow test times from previous S3 stats. Default file path: .pytorch-slow-tests.json",
)
parser.add_argument(
"--ignore-small-diffs",
nargs="?",
type=float,
const=RELATIVE_DIFFERENCE_THRESHOLD,
help="Compares generated results with stats/slow-tests.json in pytorch/test-infra. If the relative differences "
"between test times for each test are smaller than the threshold and the set of test cases have not "
"changed, we will export the stats already in stats/slow-tests.json. Else, we will export the calculated "
"results. The default threshold is 10%.",
)
return parser.parse_args()
def main() -> None:
options = parse_args()
export_slow_tests(options)
if __name__ == "__main__":
main()
| [((548, 607), 'tools.stats.s3_stat_parser.get_previous_reports_for_branch', 'get_previous_reports_for_branch', (['"""origin/viable/strict"""', '""""""'], {}), "('origin/viable/strict', '')\n", (579, 607), False, 'from tools.stats.s3_stat_parser import get_previous_reports_for_branch, Report, Version2Report\n'), ((753, 770), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (764, 770), False, 'from collections import defaultdict\n'), ((3058, 3082), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (3072, 3082), False, 'import os\n'), ((3762, 3865), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Export a JSON of slow test cases in PyTorch unit test suite"""'}), "(description=\n 'Export a JSON of slow test cases in PyTorch unit test suite')\n", (3785, 3865), False, 'import argparse\n'), ((968, 996), 'typing.cast', 'cast', (['Version2Report', 'report'], {}), '(Version2Report, report)\n', (972, 996), False, 'from typing import cast, DefaultDict, Dict, List, Any\n'), ((1833, 1855), 'statistics.mean', 'statistics.mean', (['times'], {}), '(times)\n', (1848, 1855), False, 'import statistics\n'), ((2402, 2422), 'json.loads', 'json.loads', (['contents'], {}), '(contents)\n', (2412, 2422), False, 'import json\n'), ((3572, 3663), 'json.dump', 'json.dump', (['slow_test_times', 'file'], {'indent': '""" """', 'separators': "(',', ': ')", 'sort_keys': '(True)'}), "(slow_test_times, file, indent=' ', separators=(',', ': '),\n sort_keys=True)\n", (3581, 3663), False, 'import json\n'), ((2321, 2344), 'urllib.request.urlopen', 'urlopen', (['url'], {'timeout': '(1)'}), '(url, timeout=1)\n', (2328, 2344), False, 'from urllib.request import urlopen\n')] |
michaeltashman/Horizon | ml/rl/evaluation/weighted_sequential_doubly_robust_estimator.py | ee310b34adeb807bbae379a6e1703d0f725f26a9 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import itertools
import logging
import numpy as np
import scipy as sp
import torch
from ml.rl.evaluation.cpe import CpeEstimate
from ml.rl.evaluation.evaluation_data_page import EvaluationDataPage
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class WeightedSequentialDoublyRobustEstimator:
NUM_SUBSETS_FOR_CB_ESTIMATES = 25
CONFIDENCE_INTERVAL = 0.9
NUM_BOOTSTRAP_SAMPLES = 50
BOOTSTRAP_SAMPLE_PCT = 0.5
def __init__(self, gamma):
self.gamma = gamma
def estimate(
self,
edp: EvaluationDataPage,
num_j_steps,
whether_self_normalize_importance_weights,
) -> CpeEstimate:
# For details, visit https://arxiv.org/pdf/1604.00923.pdf Section 5, 7, 8
(
actions,
rewards,
logged_propensities,
target_propensities,
estimated_q_values,
) = WeightedSequentialDoublyRobustEstimator.transform_to_equal_length_trajectories(
edp.mdp_id,
edp.action_mask.cpu().numpy(),
edp.logged_rewards.cpu().numpy().flatten(),
edp.logged_propensities.cpu().numpy().flatten(),
edp.model_propensities.cpu().numpy(),
edp.model_values.cpu().numpy(),
)
num_trajectories = actions.shape[0]
trajectory_length = actions.shape[1]
j_steps = [float("inf")]
if num_j_steps > 1:
j_steps.append(-1)
if num_j_steps > 2:
interval = trajectory_length // (num_j_steps - 1)
j_steps.extend([i * interval for i in range(1, num_j_steps - 1)])
target_propensity_for_logged_action = np.sum(
np.multiply(target_propensities, actions), axis=2
)
estimated_q_values_for_logged_action = np.sum(
np.multiply(estimated_q_values, actions), axis=2
)
estimated_state_values = np.sum(
np.multiply(target_propensities, estimated_q_values), axis=2
)
importance_weights = target_propensity_for_logged_action / logged_propensities
importance_weights = np.cumprod(importance_weights, axis=1)
importance_weights = WeightedSequentialDoublyRobustEstimator.normalize_importance_weights(
importance_weights, whether_self_normalize_importance_weights
)
importance_weights_one_earlier = (
np.ones([num_trajectories, 1]) * 1.0 / num_trajectories
)
importance_weights_one_earlier = np.hstack(
[importance_weights_one_earlier, importance_weights[:, :-1]]
)
discounts = np.logspace(
start=0, stop=trajectory_length - 1, num=trajectory_length, base=self.gamma
)
j_step_return_trajectories = []
for j_step in j_steps:
j_step_return_trajectories.append(
WeightedSequentialDoublyRobustEstimator.calculate_step_return(
rewards,
discounts,
importance_weights,
importance_weights_one_earlier,
estimated_state_values,
estimated_q_values_for_logged_action,
j_step,
)
)
j_step_return_trajectories = np.array(j_step_return_trajectories)
j_step_returns = np.sum(j_step_return_trajectories, axis=1)
if len(j_step_returns) == 1:
weighted_doubly_robust = j_step_returns[0]
weighted_doubly_robust_std_error = 0.0
else:
# break trajectories into several subsets to estimate confidence bounds
infinite_step_returns = []
num_subsets = int(
min(
num_trajectories / 2,
WeightedSequentialDoublyRobustEstimator.NUM_SUBSETS_FOR_CB_ESTIMATES,
)
)
interval = num_trajectories / num_subsets
for i in range(num_subsets):
trajectory_subset = np.arange(
int(i * interval), int((i + 1) * interval)
)
importance_weights = (
target_propensity_for_logged_action[trajectory_subset]
/ logged_propensities[trajectory_subset]
)
importance_weights = np.cumprod(importance_weights, axis=1)
importance_weights = WeightedSequentialDoublyRobustEstimator.normalize_importance_weights(
importance_weights, whether_self_normalize_importance_weights
)
importance_weights_one_earlier = (
np.ones([len(trajectory_subset), 1]) * 1.0 / len(trajectory_subset)
)
importance_weights_one_earlier = np.hstack(
[importance_weights_one_earlier, importance_weights[:, :-1]]
)
infinite_step_return = np.sum(
WeightedSequentialDoublyRobustEstimator.calculate_step_return(
rewards[trajectory_subset],
discounts,
importance_weights,
importance_weights_one_earlier,
estimated_state_values[trajectory_subset],
estimated_q_values_for_logged_action[trajectory_subset],
float("inf"),
)
)
infinite_step_returns.append(infinite_step_return)
# Compute weighted_doubly_robust mean point estimate using all data
weighted_doubly_robust = self.compute_weighted_doubly_robust_point_estimate(
j_steps,
num_j_steps,
j_step_returns,
infinite_step_returns,
j_step_return_trajectories,
)
# Use bootstrapping to compute weighted_doubly_robust standard error
bootstrapped_means = []
sample_size = int(
WeightedSequentialDoublyRobustEstimator.BOOTSTRAP_SAMPLE_PCT
* num_subsets
)
for _ in range(
WeightedSequentialDoublyRobustEstimator.NUM_BOOTSTRAP_SAMPLES
):
random_idxs = np.random.choice(num_j_steps, sample_size, replace=False)
random_idxs.sort()
wdr_estimate = self.compute_weighted_doubly_robust_point_estimate(
j_steps=[j_steps[i] for i in random_idxs],
num_j_steps=sample_size,
j_step_returns=j_step_returns[random_idxs],
infinite_step_returns=infinite_step_returns,
j_step_return_trajectories=j_step_return_trajectories[random_idxs],
)
bootstrapped_means.append(wdr_estimate)
weighted_doubly_robust_std_error = np.std(bootstrapped_means)
episode_values = np.sum(np.multiply(rewards, discounts), axis=1)
denominator = np.nanmean(episode_values)
if abs(denominator) < 1e-6:
return CpeEstimate(
raw=0.0, normalized=0.0, raw_std_error=0.0, normalized_std_error=0.0
)
return CpeEstimate(
raw=weighted_doubly_robust,
normalized=weighted_doubly_robust / denominator,
raw_std_error=weighted_doubly_robust_std_error,
normalized_std_error=weighted_doubly_robust_std_error / denominator,
)
def compute_weighted_doubly_robust_point_estimate(
self,
j_steps,
num_j_steps,
j_step_returns,
infinite_step_returns,
j_step_return_trajectories,
):
low_bound, high_bound = WeightedSequentialDoublyRobustEstimator.confidence_bounds(
infinite_step_returns,
WeightedSequentialDoublyRobustEstimator.CONFIDENCE_INTERVAL,
)
# decompose error into bias + variance
j_step_bias = np.zeros([num_j_steps])
where_lower = np.where(j_step_returns < low_bound)[0]
j_step_bias[where_lower] = low_bound - j_step_returns[where_lower]
where_higher = np.where(j_step_returns > high_bound)[0]
j_step_bias[where_higher] = j_step_returns[where_higher] - high_bound
covariance = np.cov(j_step_return_trajectories)
error = covariance + j_step_bias.T * j_step_bias
# minimize mse error
constraint = {"type": "eq", "fun": lambda x: np.sum(x) - 1.0}
x = np.zeros([len(j_steps)])
res = sp.optimize.minimize(
mse_loss,
x,
args=error,
constraints=constraint,
bounds=[(0, 1) for _ in range(x.shape[0])],
)
x = np.array(res.x)
return float(np.dot(x, j_step_returns))
@staticmethod
def transform_to_equal_length_trajectories(
mdp_ids,
actions,
rewards,
logged_propensities,
target_propensities,
estimated_q_values,
):
"""
Take in samples (action, rewards, propensities, etc.) and output lists
of equal-length trajectories (episodes) accoriding to terminals.
As the raw trajectories are of various lengths, the shorter ones are
filled with zeros(ones) at the end.
"""
num_actions = len(target_propensities[0])
terminals = np.zeros(mdp_ids.shape[0])
for x in range(0, mdp_ids.shape[0]):
if x + 1 == mdp_ids.shape[0] or mdp_ids[x, 0] != mdp_ids[x + 1, 0]:
terminals[x] = 1
trajectories = []
episode_start = 0
episode_ends = np.nonzero(terminals)[0]
if len(terminals) - 1 not in episode_ends:
episode_ends = np.append(episode_ends, len(terminals) - 1)
for episode_end in episode_ends:
trajectories.append(np.arange(episode_start, episode_end + 1))
episode_start = episode_end + 1
action_trajectories = []
reward_trajectories = []
logged_propensity_trajectories = []
target_propensity_trajectories = []
Q_value_trajectories = []
for trajectory in trajectories:
action_trajectories.append(actions[trajectory])
reward_trajectories.append(rewards[trajectory])
logged_propensity_trajectories.append(logged_propensities[trajectory])
target_propensity_trajectories.append(target_propensities[trajectory])
Q_value_trajectories.append(estimated_q_values[trajectory])
def to_equal_length(x, fill_value):
x_equal_length = np.array(
list(itertools.zip_longest(*x, fillvalue=fill_value))
).swapaxes(0, 1)
return x_equal_length
action_trajectories = to_equal_length(
action_trajectories, np.zeros([num_actions])
)
reward_trajectories = to_equal_length(reward_trajectories, 0)
logged_propensity_trajectories = to_equal_length(
logged_propensity_trajectories, 1
)
target_propensity_trajectories = to_equal_length(
target_propensity_trajectories, np.zeros([num_actions])
)
Q_value_trajectories = to_equal_length(
Q_value_trajectories, np.zeros([num_actions])
)
return (
action_trajectories,
reward_trajectories,
logged_propensity_trajectories,
target_propensity_trajectories,
Q_value_trajectories,
)
@staticmethod
def normalize_importance_weights(
importance_weights, whether_self_normalize_importance_weights
):
if whether_self_normalize_importance_weights:
sum_importance_weights = np.sum(importance_weights, axis=0)
where_zeros = np.where(sum_importance_weights == 0.0)[0]
sum_importance_weights[where_zeros] = len(importance_weights)
importance_weights[:, where_zeros] = 1.0
importance_weights /= sum_importance_weights
return importance_weights
else:
importance_weights /= importance_weights.shape[0]
return importance_weights
@staticmethod
def calculate_step_return(
rewards,
discounts,
importance_weights,
importance_weights_one_earlier,
estimated_state_values,
estimated_q_values,
j_step,
):
trajectory_length = len(rewards[0])
num_trajectories = len(rewards)
j_step = int(min(j_step, trajectory_length - 1))
weighted_discounts = np.multiply(discounts, importance_weights)
weighted_discounts_one_earlier = np.multiply(
discounts, importance_weights_one_earlier
)
importance_sampled_cumulative_reward = np.sum(
np.multiply(weighted_discounts[:, : j_step + 1], rewards[:, : j_step + 1]),
axis=1,
)
if j_step < trajectory_length - 1:
direct_method_value = (
weighted_discounts_one_earlier[:, j_step + 1]
* estimated_state_values[:, j_step + 1]
)
else:
direct_method_value = np.zeros([num_trajectories])
control_variate = np.sum(
np.multiply(
weighted_discounts[:, : j_step + 1], estimated_q_values[:, : j_step + 1]
)
- np.multiply(
weighted_discounts_one_earlier[:, : j_step + 1],
estimated_state_values[:, : j_step + 1],
),
axis=1,
)
j_step_return = (
importance_sampled_cumulative_reward + direct_method_value - control_variate
)
return j_step_return
@staticmethod
def confidence_bounds(x, confidence):
n = len(x)
m, se = np.mean(x), sp.stats.sem(x)
h = se * sp.stats.t._ppf((1 + confidence) / 2.0, n - 1)
return m - h, m + h
def mse_loss(x, error):
return np.dot(np.dot(x, error), x.T)
| [((305, 332), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (322, 332), False, 'import logging\n'), ((2219, 2257), 'numpy.cumprod', 'np.cumprod', (['importance_weights'], {'axis': '(1)'}), '(importance_weights, axis=1)\n', (2229, 2257), True, 'import numpy as np\n'), ((2604, 2675), 'numpy.hstack', 'np.hstack', (['[importance_weights_one_earlier, importance_weights[:, :-1]]'], {}), '([importance_weights_one_earlier, importance_weights[:, :-1]])\n', (2613, 2675), True, 'import numpy as np\n'), ((2719, 2811), 'numpy.logspace', 'np.logspace', ([], {'start': '(0)', 'stop': '(trajectory_length - 1)', 'num': 'trajectory_length', 'base': 'self.gamma'}), '(start=0, stop=trajectory_length - 1, num=trajectory_length,\n base=self.gamma)\n', (2730, 2811), True, 'import numpy as np\n'), ((3379, 3415), 'numpy.array', 'np.array', (['j_step_return_trajectories'], {}), '(j_step_return_trajectories)\n', (3387, 3415), True, 'import numpy as np\n'), ((3442, 3484), 'numpy.sum', 'np.sum', (['j_step_return_trajectories'], {'axis': '(1)'}), '(j_step_return_trajectories, axis=1)\n', (3448, 3484), True, 'import numpy as np\n'), ((7126, 7152), 'numpy.nanmean', 'np.nanmean', (['episode_values'], {}), '(episode_values)\n', (7136, 7152), True, 'import numpy as np\n'), ((7336, 7549), 'ml.rl.evaluation.cpe.CpeEstimate', 'CpeEstimate', ([], {'raw': 'weighted_doubly_robust', 'normalized': '(weighted_doubly_robust / denominator)', 'raw_std_error': 'weighted_doubly_robust_std_error', 'normalized_std_error': '(weighted_doubly_robust_std_error / denominator)'}), '(raw=weighted_doubly_robust, normalized=weighted_doubly_robust /\n denominator, raw_std_error=weighted_doubly_robust_std_error,\n normalized_std_error=weighted_doubly_robust_std_error / denominator)\n', (7347, 7549), False, 'from ml.rl.evaluation.cpe import CpeEstimate\n'), ((8085, 8108), 'numpy.zeros', 'np.zeros', (['[num_j_steps]'], {}), '([num_j_steps])\n', (8093, 8108), True, 'import numpy as np\n'), ((8410, 8444), 'numpy.cov', 'np.cov', (['j_step_return_trajectories'], {}), '(j_step_return_trajectories)\n', (8416, 8444), True, 'import numpy as np\n'), ((8851, 8866), 'numpy.array', 'np.array', (['res.x'], {}), '(res.x)\n', (8859, 8866), True, 'import numpy as np\n'), ((9494, 9520), 'numpy.zeros', 'np.zeros', (['mdp_ids.shape[0]'], {}), '(mdp_ids.shape[0])\n', (9502, 9520), True, 'import numpy as np\n'), ((12708, 12750), 'numpy.multiply', 'np.multiply', (['discounts', 'importance_weights'], {}), '(discounts, importance_weights)\n', (12719, 12750), True, 'import numpy as np\n'), ((12792, 12846), 'numpy.multiply', 'np.multiply', (['discounts', 'importance_weights_one_earlier'], {}), '(discounts, importance_weights_one_earlier)\n', (12803, 12846), True, 'import numpy as np\n'), ((14105, 14121), 'numpy.dot', 'np.dot', (['x', 'error'], {}), '(x, error)\n', (14111, 14121), True, 'import numpy as np\n'), ((1792, 1833), 'numpy.multiply', 'np.multiply', (['target_propensities', 'actions'], {}), '(target_propensities, actions)\n', (1803, 1833), True, 'import numpy as np\n'), ((1919, 1959), 'numpy.multiply', 'np.multiply', (['estimated_q_values', 'actions'], {}), '(estimated_q_values, actions)\n', (1930, 1959), True, 'import numpy as np\n'), ((2031, 2083), 'numpy.multiply', 'np.multiply', (['target_propensities', 'estimated_q_values'], {}), '(target_propensities, estimated_q_values)\n', (2042, 2083), True, 'import numpy as np\n'), ((7003, 7029), 'numpy.std', 'np.std', (['bootstrapped_means'], {}), '(bootstrapped_means)\n', (7009, 7029), True, 'import numpy as np\n'), ((7063, 7094), 'numpy.multiply', 'np.multiply', (['rewards', 'discounts'], {}), '(rewards, discounts)\n', (7074, 7094), True, 'import numpy as np\n'), ((7208, 7293), 'ml.rl.evaluation.cpe.CpeEstimate', 'CpeEstimate', ([], {'raw': '(0.0)', 'normalized': '(0.0)', 'raw_std_error': '(0.0)', 'normalized_std_error': '(0.0)'}), '(raw=0.0, normalized=0.0, raw_std_error=0.0,\n normalized_std_error=0.0)\n', (7219, 7293), False, 'from ml.rl.evaluation.cpe import CpeEstimate\n'), ((8131, 8167), 'numpy.where', 'np.where', (['(j_step_returns < low_bound)'], {}), '(j_step_returns < low_bound)\n', (8139, 8167), True, 'import numpy as np\n'), ((8269, 8306), 'numpy.where', 'np.where', (['(j_step_returns > high_bound)'], {}), '(j_step_returns > high_bound)\n', (8277, 8306), True, 'import numpy as np\n'), ((8888, 8913), 'numpy.dot', 'np.dot', (['x', 'j_step_returns'], {}), '(x, j_step_returns)\n', (8894, 8913), True, 'import numpy as np\n'), ((9755, 9776), 'numpy.nonzero', 'np.nonzero', (['terminals'], {}), '(terminals)\n', (9765, 9776), True, 'import numpy as np\n'), ((10949, 10972), 'numpy.zeros', 'np.zeros', (['[num_actions]'], {}), '([num_actions])\n', (10957, 10972), True, 'import numpy as np\n'), ((11269, 11292), 'numpy.zeros', 'np.zeros', (['[num_actions]'], {}), '([num_actions])\n', (11277, 11292), True, 'import numpy as np\n'), ((11385, 11408), 'numpy.zeros', 'np.zeros', (['[num_actions]'], {}), '([num_actions])\n', (11393, 11408), True, 'import numpy as np\n'), ((11860, 11894), 'numpy.sum', 'np.sum', (['importance_weights'], {'axis': '(0)'}), '(importance_weights, axis=0)\n', (11866, 11894), True, 'import numpy as np\n'), ((12937, 13009), 'numpy.multiply', 'np.multiply', (['weighted_discounts[:, :j_step + 1]', 'rewards[:, :j_step + 1]'], {}), '(weighted_discounts[:, :j_step + 1], rewards[:, :j_step + 1])\n', (12948, 13009), True, 'import numpy as np\n'), ((13303, 13331), 'numpy.zeros', 'np.zeros', (['[num_trajectories]'], {}), '([num_trajectories])\n', (13311, 13331), True, 'import numpy as np\n'), ((13941, 13951), 'numpy.mean', 'np.mean', (['x'], {}), '(x)\n', (13948, 13951), True, 'import numpy as np\n'), ((13953, 13968), 'scipy.stats.sem', 'sp.stats.sem', (['x'], {}), '(x)\n', (13965, 13968), True, 'import scipy as sp\n'), ((13986, 14032), 'scipy.stats.t._ppf', 'sp.stats.t._ppf', (['((1 + confidence) / 2.0)', '(n - 1)'], {}), '((1 + confidence) / 2.0, n - 1)\n', (14001, 14032), True, 'import scipy as sp\n'), ((2497, 2527), 'numpy.ones', 'np.ones', (['[num_trajectories, 1]'], {}), '([num_trajectories, 1])\n', (2504, 2527), True, 'import numpy as np\n'), ((4435, 4473), 'numpy.cumprod', 'np.cumprod', (['importance_weights'], {'axis': '(1)'}), '(importance_weights, axis=1)\n', (4445, 4473), True, 'import numpy as np\n'), ((4887, 4958), 'numpy.hstack', 'np.hstack', (['[importance_weights_one_earlier, importance_weights[:, :-1]]'], {}), '([importance_weights_one_earlier, importance_weights[:, :-1]])\n', (4896, 4958), True, 'import numpy as np\n'), ((6381, 6438), 'numpy.random.choice', 'np.random.choice', (['num_j_steps', 'sample_size'], {'replace': '(False)'}), '(num_j_steps, sample_size, replace=False)\n', (6397, 6438), True, 'import numpy as np\n'), ((9976, 10017), 'numpy.arange', 'np.arange', (['episode_start', '(episode_end + 1)'], {}), '(episode_start, episode_end + 1)\n', (9985, 10017), True, 'import numpy as np\n'), ((11921, 11960), 'numpy.where', 'np.where', (['(sum_importance_weights == 0.0)'], {}), '(sum_importance_weights == 0.0)\n', (11929, 11960), True, 'import numpy as np\n'), ((13379, 13467), 'numpy.multiply', 'np.multiply', (['weighted_discounts[:, :j_step + 1]', 'estimated_q_values[:, :j_step + 1]'], {}), '(weighted_discounts[:, :j_step + 1], estimated_q_values[:, :\n j_step + 1])\n', (13390, 13467), True, 'import numpy as np\n'), ((13509, 13612), 'numpy.multiply', 'np.multiply', (['weighted_discounts_one_earlier[:, :j_step + 1]', 'estimated_state_values[:, :j_step + 1]'], {}), '(weighted_discounts_one_earlier[:, :j_step + 1],\n estimated_state_values[:, :j_step + 1])\n', (13520, 13612), True, 'import numpy as np\n'), ((8585, 8594), 'numpy.sum', 'np.sum', (['x'], {}), '(x)\n', (8591, 8594), True, 'import numpy as np\n'), ((10756, 10803), 'itertools.zip_longest', 'itertools.zip_longest', (['*x'], {'fillvalue': 'fill_value'}), '(*x, fillvalue=fill_value)\n', (10777, 10803), False, 'import itertools\n')] |
HeRuivio/-Algorithm | LeetCode/2019-08-03-384-Shuffle-an-Array.py | 1fbe6256630758fda3af68f469471ee246730afc | # -*- coding: utf-8 -*-
# @Author: 何睿
# @Create Date: 2019-08-03 10:48:30
# @Last Modified by: 何睿
# @Last Modified time: 2019-08-03 10:53:15
import copy
import random
from typing import List
class Solution:
def __init__(self, nums: List[int]):
self.shuffle_ = nums
self.original = copy.copy(nums)
def reset(self) -> List[int]:
"""
Resets the array to its original configuration and return it.
"""
return self.original
def shuffle(self) -> List[int]:
"""
Returns a random shuffling of the array.
"""
random.shuffle(self.shuffle_)
return self.shuffle_
| [((327, 342), 'copy.copy', 'copy.copy', (['nums'], {}), '(nums)\n', (336, 342), False, 'import copy\n'), ((619, 648), 'random.shuffle', 'random.shuffle', (['self.shuffle_'], {}), '(self.shuffle_)\n', (633, 648), False, 'import random\n')] |
shakedna1/wspc_rep | src/wspc/feature_selection.py | f4492af8cec25a3f7b00687c08d30754a1c0c91f | import numpy as np
import sklearn
import pandas as pd
import scipy.spatial.distance as ssd
from scipy.cluster import hierarchy
from scipy.stats import chi2_contingency
from sklearn.base import BaseEstimator
from sklearn.ensemble import RandomForestClassifier
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_selection import SelectKBest, SelectorMixin
from sklearn.pipeline import Pipeline
class SelectHierarchicalClustering(SelectorMixin, BaseEstimator):
"""
A transformer that clusters the features in X according to dist_matrix, and selects a feature from each cluster with
the highest chi2 score of X[feature] versus y
"""
def __init__(self, dist_matrix=None, threshold=1):
self.dist_matrix = dist_matrix
self.threshold = threshold
def _phi_coef(self, x, y):
"""
Calculates phi coefficient between features
Parameters
----------
x - feature x column
y - feature y column
Returns
----------
phi coefficient value
"""
confusion_matrix = pd.crosstab(x, y)
chi2 = chi2_contingency(confusion_matrix)[0]
n = confusion_matrix.sum().sum()
corr = np.sqrt(chi2 / n)
return corr
def _calc_dist_matrix(self, X):
"""
Calculate distance matrix between each two features in X, each value is 1-phi_correlation
"""
X_df = pd.DataFrame.sparse.from_spmatrix(X)
X_corr_mat = X_df.corr(method=self._phi_coef)
feature_corr_dist_matrix = 1 - X_corr_mat
feature_corr_dist_matrix_condensed = ssd.squareform(feature_corr_dist_matrix)
self.dist_matrix = feature_corr_dist_matrix_condensed
def _corr_linkage(self, method='average'):
linkage = hierarchy.linkage(self.dist_matrix, method=method)
return linkage
def _hierarchical_clustering(self, linkage):
"""
Perform hierarchical clustering
Parameters
----------
linkage - linkage dendogram created by hierarchy.linkage(self.distance_matrix, method=method)
Returns
----------
a list of lists, each list represents a cluster and contains the indexes of features belonging
to the cluster
"""
# array of len(X) - array[i] is the cluster number to which sample i belongs
cluster_ids = hierarchy.fcluster(linkage, self.threshold, criterion='distance')
cluster_id_to_feature_idx = {}
for idx, cluster_id in enumerate(cluster_ids):
cluster_id_to_feature_idx.setdefault(cluster_id, []).append(idx)
return list(cluster_id_to_feature_idx.values())
def fit(self, X, y):
"""
Clusters the features (X columns) using self.dist_matrix and self.threshold, and selects a feature from each
cluster with the highest chi2 score versus y.
The attribute self.n_features_ represents the number of features selected (=number of clusters)
The attribute self.selected_features_ is a list of indexes that correspond to the selected features
"""
if not self.dist_matrix:
self._calc_dist_matrix(X)
linkage = self._corr_linkage()
clusters = self._hierarchical_clustering(linkage)
chi2_vals, __ = sklearn.feature_selection.chi2(X, y)
chi2_vals = pd.Series(chi2_vals)
# fitted attributes
self.n_features_ = X.shape[1]
self.selected_features_ = [chi2_vals[cluster].idxmax() for cluster in clusters]
self.clusters_ = clusters
print(f'threshold={self.threshold:.2f}, selected_features={len(self.selected_features_)}')
return self
def _get_support_mask(self):
"""
Get the boolean mask indicating which features are selected
Returns
----------
mask - boolean array of shape [# input features]
An element is True iff its corresponding feature is selected for
retention.
"""
# Checks if the estimator is fitted by verifying the presence of fitted attributes (ending with a trailing
# underscore) and otherwise raises a NotFittedError with the given message.
sklearn.utils.validation.check_is_fitted(self)
mask = np.zeros((self.n_features_, ), dtype=bool)
mask[self.selected_features_] = 1
return mask
def get_fs_pipeline(k, threshold, random_state=0):
"""
Creates feature selection pipeline
Parameters
----------
k - the k parameter for the SelectKBest features function
threshold - clustering threshold for the Hierarchial clustering
random_state - random state for the RandomForestClassifier. Deafult value: 0
Returns
----------
pipeline - feature selection pipeline
"""
pipeline = Pipeline(steps=[('vectorize', CountVectorizer(lowercase=False, binary=True)),
('k_best', SelectKBest(score_func=sklearn.feature_selection.chi2, k=k)),
('cluster', SelectHierarchicalClustering(threshold=threshold)),
('rf', RandomForestClassifier(random_state=random_state))])
return pipeline
| [((1112, 1129), 'pandas.crosstab', 'pd.crosstab', (['x', 'y'], {}), '(x, y)\n', (1123, 1129), True, 'import pandas as pd\n'), ((1239, 1256), 'numpy.sqrt', 'np.sqrt', (['(chi2 / n)'], {}), '(chi2 / n)\n', (1246, 1256), True, 'import numpy as np\n'), ((1453, 1489), 'pandas.DataFrame.sparse.from_spmatrix', 'pd.DataFrame.sparse.from_spmatrix', (['X'], {}), '(X)\n', (1486, 1489), True, 'import pandas as pd\n'), ((1641, 1681), 'scipy.spatial.distance.squareform', 'ssd.squareform', (['feature_corr_dist_matrix'], {}), '(feature_corr_dist_matrix)\n', (1655, 1681), True, 'import scipy.spatial.distance as ssd\n'), ((1812, 1862), 'scipy.cluster.hierarchy.linkage', 'hierarchy.linkage', (['self.dist_matrix'], {'method': 'method'}), '(self.dist_matrix, method=method)\n', (1829, 1862), False, 'from scipy.cluster import hierarchy\n'), ((2422, 2487), 'scipy.cluster.hierarchy.fcluster', 'hierarchy.fcluster', (['linkage', 'self.threshold'], {'criterion': '"""distance"""'}), "(linkage, self.threshold, criterion='distance')\n", (2440, 2487), False, 'from scipy.cluster import hierarchy\n'), ((3345, 3381), 'sklearn.feature_selection.chi2', 'sklearn.feature_selection.chi2', (['X', 'y'], {}), '(X, y)\n', (3375, 3381), False, 'import sklearn\n'), ((3402, 3422), 'pandas.Series', 'pd.Series', (['chi2_vals'], {}), '(chi2_vals)\n', (3411, 3422), True, 'import pandas as pd\n'), ((4260, 4306), 'sklearn.utils.validation.check_is_fitted', 'sklearn.utils.validation.check_is_fitted', (['self'], {}), '(self)\n', (4300, 4306), False, 'import sklearn\n'), ((4323, 4364), 'numpy.zeros', 'np.zeros', (['(self.n_features_,)'], {'dtype': 'bool'}), '((self.n_features_,), dtype=bool)\n', (4331, 4364), True, 'import numpy as np\n'), ((1145, 1179), 'scipy.stats.chi2_contingency', 'chi2_contingency', (['confusion_matrix'], {}), '(confusion_matrix)\n', (1161, 1179), False, 'from scipy.stats import chi2_contingency\n'), ((4896, 4941), 'sklearn.feature_extraction.text.CountVectorizer', 'CountVectorizer', ([], {'lowercase': '(False)', 'binary': '(True)'}), '(lowercase=False, binary=True)\n', (4911, 4941), False, 'from sklearn.feature_extraction.text import CountVectorizer\n'), ((4986, 5045), 'sklearn.feature_selection.SelectKBest', 'SelectKBest', ([], {'score_func': 'sklearn.feature_selection.chi2', 'k': 'k'}), '(score_func=sklearn.feature_selection.chi2, k=k)\n', (4997, 5045), False, 'from sklearn.feature_selection import SelectKBest, SelectorMixin\n'), ((5181, 5230), 'sklearn.ensemble.RandomForestClassifier', 'RandomForestClassifier', ([], {'random_state': 'random_state'}), '(random_state=random_state)\n', (5203, 5230), False, 'from sklearn.ensemble import RandomForestClassifier\n')] |
fsj-digital/pages | Python3/PS_scraping_selenium.py | 8360f27e67974ed2b4f39eb64377f39c0189a224 | from bs4 import BeautifulSoup
import requests
import re
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import time
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.touch_actions import TouchActions
from selenium.common.exceptions import TimeoutException
URL = 'https://shopping.thinkwithgoogle.com'
EXAMPLES = ["Demonstrate unexpected use-case",
"Demonstrate google search",
"Demonstrate search on thinkwithgoogle",
"Demonstrate search on WebDriverWait",
"Demonstrate search on thinkwithgoogle search result",
"Download and extract additional data",
"Demonstrate maximizing screen",
"Demonstrate mouse actions for Chrome",
"Demonstrate navigation"]
def run(input, URL):
if(input == 0):
content = requests.get(URL)
soup = BeautifulSoup(content.text,'html.parser')
print(soup.prettify()) # Print row with HTML formatting
elif(input == 1):
driver = webdriver.Safari()
driver.get("https://www.google.com")
search = driver.find_element_by_name("q")
search.send_keys("Selenium") # Google Search "Selenium"
search.submit()
elif(input == 2):
browser = webdriver.Safari()
browser.get(URL)
time.sleep(5)
search = browser.find_elements_by_id('subjectInput')[1]
search.send_keys('Google Pixel 3') # Google Search "Google Pixel 3"
time.sleep(5)
search.send_keys(Keys.RETURN)
elif(input == 3):
browser = webdriver.Safari()
browser.maximize_window() # Required for the input tag visibility
browser.get('https://trends.google.com/trends/')
try: # proceed if element is found within 3 seconds otherwise raise TimeoutException
element = WebDriverWait(browser, 3).until(EC.presence_of_element_located((By.ID, 'input-254')))
except TimeoutException:
print("Loading took too much time!")
search = browser.find_elements(By.ID,'input-254')[0]
search.send_keys('Google Pixel 3')
elif(input == 4):
browser = webdriver.Safari()
browser.get(URL) # with visibility search
time.sleep(2)
search = returnVisibleElement(browser.find_elements_by_id('subjectInput'))
search.send_keys('Google Pixel 3')
time.sleep(2)
search.send_keys(Keys.ENTER)
elif(input == 5):
browser = webdriver.Safari()
browser.maximize_window() # Required for the button visibility
browser.get(URL) # with visibility search
time.sleep(2)
search = returnVisibleElement(browser.find_elements_by_id('subjectInput'))
search.send_keys('Google Pixel 3')
time.sleep(2)
search.send_keys(Keys.ENTER)
time.sleep(2)
browser.find_element_by_class_name('si-button-data download-all').click()
data = browser.find_element_by_class_name('content content-breakpoint-gt-md')
dataList = data.find_elements_by_tag_name('li')
for item in dataList:
text = item.text
print(text)
elif(input == 6):
browser = webdriver.Safari()
browser.maximize_window() # Required for the button visibility
browser.get(URL) # with visibility search
time.sleep(2)
element_to_hover_over = returnVisibleElement(browser.find_elements_by_xpath("//i[@class='material-icons'][contains(./text(),'help')]"))
elif(input == 7):
browser = webdriver.Chrome()
browser.maximize_window() # Required for the button visibility
browser.get(URL) # with visibility search
time.sleep(2)
element_to_hover_over = returnVisibleElement(browser.find_elements_by_xpath("//i[@class='material-icons'][contains(./text(),'help')]"))
## ActionChains are not supported in safari but will work on other browser
## https://github.com/seleniumhq/selenium-google-code-issue-archive/issues/4136
ActionChains(browser).click(element_to_hover_over).perform()
TouchActions(browser).long_press(element_to_hover_over).perform()
elif(input == 8):
browser = webdriver.Safari()
browser.maximize_window() # Required for the button visibility
browser.get(URL) # with visibility search
time.sleep(2)
search = returnVisibleElement(browser.find_elements_by_id('subjectInput'))
search.send_keys('Google Pixel 3')
time.sleep(2)
search.send_keys(Keys.ENTER)
time.sleep(2)
data = browser.find_element_by_class_name('content content-breakpoint-gt-md')
dataList = data.find_elements_by_tag_name('li')
for item in dataList:
text = item.text
print(text)
browser.back()
print('\n' * 5) # For convenient visual
def returnVisibleElement(listOfInputElements):
for element in listOfInputElements:
if element.is_displayed():
return element
def printSelection():
print('Press:')
for i in range(0, len(EXAMPLES)):
print('',i,'to',EXAMPLES[i], sep = ' ')
if __name__ == '__main__':
while(True):
printSelection()
choice = input('Enter choice: ')
try:
choice = int(choice)
except ValueError:
print('Invalid input, stop program')
break
if(choice not in range(0,9)):
print('Invalid input, stop program')
break
run(int(choice), URL)
| [((1107, 1124), 'requests.get', 'requests.get', (['URL'], {}), '(URL)\n', (1119, 1124), False, 'import requests\n'), ((1140, 1182), 'bs4.BeautifulSoup', 'BeautifulSoup', (['content.text', '"""html.parser"""'], {}), "(content.text, 'html.parser')\n", (1153, 1182), False, 'from bs4 import BeautifulSoup\n'), ((1296, 1314), 'selenium.webdriver.Safari', 'webdriver.Safari', ([], {}), '()\n', (1312, 1314), False, 'from selenium import webdriver\n'), ((1563, 1581), 'selenium.webdriver.Safari', 'webdriver.Safari', ([], {}), '()\n', (1579, 1581), False, 'from selenium import webdriver\n'), ((1628, 1641), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1638, 1641), False, 'import time\n'), ((1796, 1809), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1806, 1809), False, 'import time\n'), ((1888, 1906), 'selenium.webdriver.Safari', 'webdriver.Safari', ([], {}), '()\n', (1904, 1906), False, 'from selenium import webdriver\n'), ((2493, 2511), 'selenium.webdriver.Safari', 'webdriver.Safari', ([], {}), '()\n', (2509, 2511), False, 'from selenium import webdriver\n'), ((2589, 2602), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (2599, 2602), False, 'import time\n'), ((2737, 2750), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (2747, 2750), False, 'import time\n'), ((2213, 2265), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'EC.presence_of_element_located', (["(By.ID, 'input-254')"], {}), "((By.ID, 'input-254'))\n", (2243, 2265), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((2828, 2846), 'selenium.webdriver.Safari', 'webdriver.Safari', ([], {}), '()\n', (2844, 2846), False, 'from selenium import webdriver\n'), ((3001, 3014), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (3011, 3014), False, 'import time\n'), ((3149, 3162), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (3159, 3162), False, 'import time\n'), ((3208, 3221), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (3218, 3221), False, 'import time\n'), ((2181, 2206), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['browser', '(3)'], {}), '(browser, 3)\n', (2194, 2206), False, 'from selenium.webdriver.support.ui import WebDriverWait\n'), ((3569, 3587), 'selenium.webdriver.Safari', 'webdriver.Safari', ([], {}), '()\n', (3585, 3587), False, 'from selenium import webdriver\n'), ((3742, 3755), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (3752, 3755), False, 'import time\n'), ((3940, 3958), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {}), '()\n', (3956, 3958), False, 'from selenium import webdriver\n'), ((4113, 4126), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (4123, 4126), False, 'import time\n'), ((4619, 4637), 'selenium.webdriver.Safari', 'webdriver.Safari', ([], {}), '()\n', (4635, 4637), False, 'from selenium import webdriver\n'), ((4792, 4805), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (4802, 4805), False, 'import time\n'), ((4940, 4953), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (4950, 4953), False, 'import time\n'), ((4999, 5012), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (5009, 5012), False, 'import time\n'), ((4444, 4465), 'selenium.webdriver.common.action_chains.ActionChains', 'ActionChains', (['browser'], {}), '(browser)\n', (4456, 4465), False, 'from selenium.webdriver.common.action_chains import ActionChains\n'), ((4513, 4534), 'selenium.webdriver.common.touch_actions.TouchActions', 'TouchActions', (['browser'], {}), '(browser)\n', (4525, 4534), False, 'from selenium.webdriver.common.touch_actions import TouchActions\n')] |
STRATOLOGIC/SpacePyLibrary | AppTest/testTCPserver.py | 89fc3873c6d787ad4e391f6080d9dd3218ffc4a2 | #!/usr/bin/env python3
#******************************************************************************
# (C) 2018, Stefan Korner, Austria *
# *
# The Space Python Library is free software; you can redistribute it and/or *
# modify it under under the terms of the MIT License as published by the *
# Massachusetts Institute of Technology. *
# *
# The Space Python Library is distributed in the hope that it will be useful, *
# but WITHOUT ANY WARRANTY; without even the implied warranty of *
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the MIT License *
# for more details. *
#******************************************************************************
# Unit Tests *
#******************************************************************************
import sys
from UTIL.SYS import Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR
import UTIL.SYS, UTIL.TASK, UTIL.TCP
#############
# constants #
#############
LINEBUFFERLEN = 256
###########
# classes #
###########
# =============================================================================
class TCPserver(UTIL.TCP.SingleClientServer):
"""Subclass of UTIL.TCP.SingleClientServer"""
# ---------------------------------------------------------------------------
def __init__(self, portNr):
"""Initialise attributes only"""
modelTask = UTIL.TASK.s_processingTask
UTIL.TCP.SingleClientServer.__init__(self, modelTask, portNr)
self.tcpLineBuffer = ""
# ---------------------------------------------------------------------------
def receiveCallback(self, socket, stateMask):
"""Callback when a client has send data"""
LOG("*** receiveCallback ***")
# read the next set of byte from the data socket
data = self.recv(LINEBUFFERLEN)
if data == None:
# client is automatically disconnected
return
tcpLineBuffer = self.tcpLineBuffer
tcpLineBuffer += data.decode("ascii")
LOG("tcpLineBuffer: " + tcpLineBuffer)
# handle the input: extract the lines from the line buffer
lines = tcpLineBuffer.split("\n")
# the last line has to be handled in a special way and can not be
# processed directly
lastLine = lines[-1]
lines = lines[:-1]
if lastLine == "":
# read of the data was complete (incl. "\n")
pass
else:
# last line was cutt off and the rest should come with the next read
self.tcpLineBuffer = lastLine
for line in lines:
# remove a terminating "\r" for clients like telnet
if line[-1] == "\r":
line = line[:-1]
# terminate the client connection if exit has been entered (case insensitive)
upperLine = line.upper()
if (upperLine == "X") or (upperLine == "EXIT"):
LOG("Exit requested")
# send the OK response back to the client
retString = "OK\n"
self.send(retString.encode())
# terminate the client connection
self.disconnectClient();
return
if (upperLine == "Q") or (upperLine == "QUIT"):
LOG("Quit requested")
# send the OK response back to the client
retString = "OK\n"
self.send(retString.encode())
# terminate the client connection
self.disconnectClient();
sys.exit(0)
# delegate the input
pstatus = self.processLine(line);
if pstatus == 0:
LOG("OK")
# send the OK response back to the TECO
retString = "OK\n";
self.send(retString.encode())
else:
LOG_ERROR(str(pstatus))
# set the Error response back to the client:
retString = "Error: execution failed (see log)!\n"
self.send(retString.encode())
# ---------------------------------------------------------------------------
def processLine(self, line):
"""Callback when a client has send a data line"""
LOG("line = " + line)
return 0
#############
# functions #
#############
# -----------------------------------------------------------------------------
def initConfiguration():
"""initialise the system configuration"""
UTIL.SYS.s_configuration.setDefaults([
["HOST", "127.0.0.1"],
["SERVER_PORT", "1234"]])
# -----------------------------------------------------------------------------
def createServer():
"""create the TCP server"""
server = TCPserver(portNr=int(UTIL.SYS.s_configuration.SERVER_PORT))
if not server.openConnectPort(UTIL.SYS.s_configuration.HOST):
sys.exit(-1)
# activate zyclic idle function
idleFunction()
# -----------------------------------------------------------------------------
def idleFunction():
UTIL.TASK.s_processingTask.createTimeHandler(1000, idleFunction)
LOG("--- idle ---")
########
# main #
########
if __name__ == "__main__":
# initialise the system configuration
initConfiguration()
# initialise the console handler
consoleHandler = UTIL.TASK.ConsoleHandler()
# initialise the model
modelTask = UTIL.TASK.ProcessingTask(isParent=True)
# register the console handler
modelTask.registerConsoleHandler(consoleHandler)
# create the TCP server
LOG("Open the TCP server")
createServer()
# start the tasks
LOG("start modelTask...")
modelTask.start()
| [((5010, 5029), 'UTIL.SYS.LOG', 'LOG', (['"""--- idle ---"""'], {}), "('--- idle ---')\n", (5013, 5029), False, 'from UTIL.SYS import Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR\n'), ((5419, 5445), 'UTIL.SYS.LOG', 'LOG', (['"""Open the TCP server"""'], {}), "('Open the TCP server')\n", (5422, 5445), False, 'from UTIL.SYS import Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR\n'), ((5485, 5510), 'UTIL.SYS.LOG', 'LOG', (['"""start modelTask..."""'], {}), "('start modelTask...')\n", (5488, 5510), False, 'from UTIL.SYS import Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR\n'), ((1994, 2024), 'UTIL.SYS.LOG', 'LOG', (['"""*** receiveCallback ***"""'], {}), "('*** receiveCallback ***')\n", (1997, 2024), False, 'from UTIL.SYS import Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR\n'), ((2278, 2316), 'UTIL.SYS.LOG', 'LOG', (["('tcpLineBuffer: ' + tcpLineBuffer)"], {}), "('tcpLineBuffer: ' + tcpLineBuffer)\n", (2281, 2316), False, 'from UTIL.SYS import Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR\n'), ((4183, 4204), 'UTIL.SYS.LOG', 'LOG', (["('line = ' + line)"], {}), "('line = ' + line)\n", (4186, 4204), False, 'from UTIL.SYS import Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR\n'), ((4777, 4789), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (4785, 4789), False, 'import sys\n'), ((3077, 3098), 'UTIL.SYS.LOG', 'LOG', (['"""Exit requested"""'], {}), "('Exit requested')\n", (3080, 3098), False, 'from UTIL.SYS import Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR\n'), ((3366, 3387), 'UTIL.SYS.LOG', 'LOG', (['"""Quit requested"""'], {}), "('Quit requested')\n", (3369, 3387), False, 'from UTIL.SYS import Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR\n'), ((3586, 3597), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3594, 3597), False, 'import sys\n'), ((3696, 3705), 'UTIL.SYS.LOG', 'LOG', (['"""OK"""'], {}), "('OK')\n", (3699, 3705), False, 'from UTIL.SYS import Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR\n')] |
yoavcaspi/pre-commit | tests/clientlib_test.py | 77947f212e7b88a479dbe6feebc60a9f773e8c13 | from __future__ import unicode_literals
import logging
import cfgv
import pytest
import pre_commit.constants as C
from pre_commit.clientlib import check_type_tag
from pre_commit.clientlib import CONFIG_HOOK_DICT
from pre_commit.clientlib import CONFIG_REPO_DICT
from pre_commit.clientlib import CONFIG_SCHEMA
from pre_commit.clientlib import DEFAULT_LANGUAGE_VERSION
from pre_commit.clientlib import MANIFEST_SCHEMA
from pre_commit.clientlib import MigrateShaToRev
from pre_commit.clientlib import validate_config_main
from pre_commit.clientlib import validate_manifest_main
from testing.fixtures import sample_local_config
def is_valid_according_to_schema(obj, obj_schema):
try:
cfgv.validate(obj, obj_schema)
return True
except cfgv.ValidationError:
return False
@pytest.mark.parametrize('value', ('definitely-not-a-tag', 'fiel'))
def test_check_type_tag_failures(value):
with pytest.raises(cfgv.ValidationError):
check_type_tag(value)
@pytest.mark.parametrize(
('config_obj', 'expected'), (
(
{
'repos': [{
'repo': '[email protected]:pre-commit/pre-commit-hooks',
'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
'hooks': [{'id': 'pyflakes', 'files': '\\.py$'}],
}],
},
True,
),
(
{
'repos': [{
'repo': '[email protected]:pre-commit/pre-commit-hooks',
'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
'hooks': [
{
'id': 'pyflakes',
'files': '\\.py$',
'args': ['foo', 'bar', 'baz'],
},
],
}],
},
True,
),
(
{
'repos': [{
'repo': '[email protected]:pre-commit/pre-commit-hooks',
'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',
'hooks': [
{
'id': 'pyflakes',
'files': '\\.py$',
# Exclude pattern must be a string
'exclude': 0,
'args': ['foo', 'bar', 'baz'],
},
],
}],
},
False,
),
),
)
def test_config_valid(config_obj, expected):
ret = is_valid_according_to_schema(config_obj, CONFIG_SCHEMA)
assert ret is expected
def test_local_hooks_with_rev_fails():
config_obj = {'repos': [dict(sample_local_config(), rev='foo')]}
with pytest.raises(cfgv.ValidationError):
cfgv.validate(config_obj, CONFIG_SCHEMA)
def test_config_with_local_hooks_definition_passes():
config_obj = {'repos': [sample_local_config()]}
cfgv.validate(config_obj, CONFIG_SCHEMA)
def test_config_schema_does_not_contain_defaults():
"""Due to the way our merging works, if this schema has any defaults they
will clobber potentially useful values in the backing manifest. #227
"""
for item in CONFIG_HOOK_DICT.items:
assert not isinstance(item, cfgv.Optional)
def test_validate_manifest_main_ok():
assert not validate_manifest_main(('.pre-commit-hooks.yaml',))
def test_validate_config_main_ok():
assert not validate_config_main(('.pre-commit-config.yaml',))
def test_validate_config_old_list_format_ok(tmpdir):
f = tmpdir.join('cfg.yaml')
f.write('- {repo: meta, hooks: [{id: identity}]}')
assert not validate_config_main((f.strpath,))
def test_validate_warn_on_unknown_keys_at_repo_level(tmpdir, caplog):
f = tmpdir.join('cfg.yaml')
f.write(
'- repo: https://gitlab.com/pycqa/flake8\n'
' rev: 3.7.7\n'
' hooks:\n'
' - id: flake8\n'
' args: [--some-args]\n',
)
ret_val = validate_config_main((f.strpath,))
assert not ret_val
assert caplog.record_tuples == [
(
'pre_commit',
logging.WARNING,
'Unexpected config key(s): args',
),
]
def test_validate_warn_on_unknown_keys_at_top_level(tmpdir, caplog):
f = tmpdir.join('cfg.yaml')
f.write(
'repos:\n'
'- repo: https://gitlab.com/pycqa/flake8\n'
' rev: 3.7.7\n'
' hooks:\n'
' - id: flake8\n'
'foo:\n'
' id: 1.0.0\n',
)
ret_val = validate_config_main((f.strpath,))
assert not ret_val
assert caplog.record_tuples == [
(
'pre_commit',
logging.WARNING,
'Unexpected config key(s): foo',
),
]
@pytest.mark.parametrize('fn', (validate_config_main, validate_manifest_main))
def test_mains_not_ok(tmpdir, fn):
not_yaml = tmpdir.join('f.notyaml')
not_yaml.write('{')
not_schema = tmpdir.join('notconfig.yaml')
not_schema.write('{}')
assert fn(('does-not-exist',))
assert fn((not_yaml.strpath,))
assert fn((not_schema.strpath,))
@pytest.mark.parametrize(
('manifest_obj', 'expected'),
(
(
[{
'id': 'a',
'name': 'b',
'entry': 'c',
'language': 'python',
'files': r'\.py$',
}],
True,
),
(
[{
'id': 'a',
'name': 'b',
'entry': 'c',
'language': 'python',
'language_version': 'python3.4',
'files': r'\.py$',
}],
True,
),
(
# A regression in 0.13.5: always_run and files are permissible
[{
'id': 'a',
'name': 'b',
'entry': 'c',
'language': 'python',
'files': '',
'always_run': True,
}],
True,
),
),
)
def test_valid_manifests(manifest_obj, expected):
ret = is_valid_according_to_schema(manifest_obj, MANIFEST_SCHEMA)
assert ret is expected
@pytest.mark.parametrize(
'dct',
(
{'repo': 'local'}, {'repo': 'meta'},
{'repo': 'wat', 'sha': 'wat'}, {'repo': 'wat', 'rev': 'wat'},
),
)
def test_migrate_sha_to_rev_ok(dct):
MigrateShaToRev().check(dct)
def test_migrate_sha_to_rev_dont_specify_both():
with pytest.raises(cfgv.ValidationError) as excinfo:
MigrateShaToRev().check({'repo': 'a', 'sha': 'b', 'rev': 'c'})
msg, = excinfo.value.args
assert msg == 'Cannot specify both sha and rev'
@pytest.mark.parametrize(
'dct',
(
{'repo': 'a'},
{'repo': 'meta', 'sha': 'a'}, {'repo': 'meta', 'rev': 'a'},
),
)
def test_migrate_sha_to_rev_conditional_check_failures(dct):
with pytest.raises(cfgv.ValidationError):
MigrateShaToRev().check(dct)
def test_migrate_to_sha_apply_default():
dct = {'repo': 'a', 'sha': 'b'}
MigrateShaToRev().apply_default(dct)
assert dct == {'repo': 'a', 'rev': 'b'}
def test_migrate_to_sha_ok():
dct = {'repo': 'a', 'rev': 'b'}
MigrateShaToRev().apply_default(dct)
assert dct == {'repo': 'a', 'rev': 'b'}
@pytest.mark.parametrize(
'config_repo',
(
# i-dont-exist isn't a valid hook
{'repo': 'meta', 'hooks': [{'id': 'i-dont-exist'}]},
# invalid to set a language for a meta hook
{'repo': 'meta', 'hooks': [{'id': 'identity', 'language': 'python'}]},
# name override must be string
{'repo': 'meta', 'hooks': [{'id': 'identity', 'name': False}]},
),
)
def test_meta_hook_invalid(config_repo):
with pytest.raises(cfgv.ValidationError):
cfgv.validate(config_repo, CONFIG_REPO_DICT)
@pytest.mark.parametrize(
'mapping',
(
# invalid language key
{'pony': '1.0'},
# not a string for version
{'python': 3},
),
)
def test_default_language_version_invalid(mapping):
with pytest.raises(cfgv.ValidationError):
cfgv.validate(mapping, DEFAULT_LANGUAGE_VERSION)
def test_minimum_pre_commit_version_failing():
with pytest.raises(cfgv.ValidationError) as excinfo:
cfg = {'repos': [], 'minimum_pre_commit_version': '999'}
cfgv.validate(cfg, CONFIG_SCHEMA)
assert str(excinfo.value) == (
'\n'
'==> At Config()\n'
'==> At key: minimum_pre_commit_version\n'
'=====> pre-commit version 999 is required but version {} is '
'installed. Perhaps run `pip install --upgrade pre-commit`.'.format(
C.VERSION,
)
)
def test_minimum_pre_commit_version_passing():
cfg = {'repos': [], 'minimum_pre_commit_version': '0'}
cfgv.validate(cfg, CONFIG_SCHEMA)
@pytest.mark.parametrize('schema', (CONFIG_SCHEMA, CONFIG_REPO_DICT))
def test_warn_additional(schema):
allowed_keys = {item.key for item in schema.items if hasattr(item, 'key')}
warn_additional, = [
x for x in schema.items if isinstance(x, cfgv.WarnAdditionalKeys)
]
assert allowed_keys == set(warn_additional.keys)
| [((805, 871), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""value"""', "('definitely-not-a-tag', 'fiel')"], {}), "('value', ('definitely-not-a-tag', 'fiel'))\n", (828, 871), False, 'import pytest\n'), ((992, 1697), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('config_obj', 'expected')", "(({'repos': [{'repo': '[email protected]:pre-commit/pre-commit-hooks', 'rev':\n 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [{'id': 'pyflakes',\n 'files': '\\\\.py$'}]}]}, True), ({'repos': [{'repo':\n '[email protected]:pre-commit/pre-commit-hooks', 'rev':\n 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [{'id': 'pyflakes',\n 'files': '\\\\.py$', 'args': ['foo', 'bar', 'baz']}]}]}, True), ({'repos':\n [{'repo': '[email protected]:pre-commit/pre-commit-hooks', 'rev':\n 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [{'id': 'pyflakes',\n 'files': '\\\\.py$', 'exclude': 0, 'args': ['foo', 'bar', 'baz']}]}]}, False)\n )"], {}), "(('config_obj', 'expected'), (({'repos': [{'repo':\n '[email protected]:pre-commit/pre-commit-hooks', 'rev':\n 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [{'id': 'pyflakes',\n 'files': '\\\\.py$'}]}]}, True), ({'repos': [{'repo':\n '[email protected]:pre-commit/pre-commit-hooks', 'rev':\n 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [{'id': 'pyflakes',\n 'files': '\\\\.py$', 'args': ['foo', 'bar', 'baz']}]}]}, True), ({'repos':\n [{'repo': '[email protected]:pre-commit/pre-commit-hooks', 'rev':\n 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [{'id': 'pyflakes',\n 'files': '\\\\.py$', 'exclude': 0, 'args': ['foo', 'bar', 'baz']}]}]}, \n False)))\n", (1015, 1697), False, 'import pytest\n'), ((4832, 4909), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""fn"""', '(validate_config_main, validate_manifest_main)'], {}), "('fn', (validate_config_main, validate_manifest_main))\n", (4855, 4909), False, 'import pytest\n'), ((5194, 5590), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('manifest_obj', 'expected')", "(([{'id': 'a', 'name': 'b', 'entry': 'c', 'language': 'python', 'files':\n '\\\\.py$'}], True), ([{'id': 'a', 'name': 'b', 'entry': 'c', 'language':\n 'python', 'language_version': 'python3.4', 'files': '\\\\.py$'}], True),\n ([{'id': 'a', 'name': 'b', 'entry': 'c', 'language': 'python', 'files':\n '', 'always_run': True}], True))"], {}), "(('manifest_obj', 'expected'), (([{'id': 'a', 'name':\n 'b', 'entry': 'c', 'language': 'python', 'files': '\\\\.py$'}], True), ([\n {'id': 'a', 'name': 'b', 'entry': 'c', 'language': 'python',\n 'language_version': 'python3.4', 'files': '\\\\.py$'}], True), ([{'id':\n 'a', 'name': 'b', 'entry': 'c', 'language': 'python', 'files': '',\n 'always_run': True}], True)))\n", (5217, 5590), False, 'import pytest\n'), ((6259, 6395), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""dct"""', "({'repo': 'local'}, {'repo': 'meta'}, {'repo': 'wat', 'sha': 'wat'}, {\n 'repo': 'wat', 'rev': 'wat'})"], {}), "('dct', ({'repo': 'local'}, {'repo': 'meta'}, {\n 'repo': 'wat', 'sha': 'wat'}, {'repo': 'wat', 'rev': 'wat'}))\n", (6282, 6395), False, 'import pytest\n'), ((6759, 6870), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""dct"""', "({'repo': 'a'}, {'repo': 'meta', 'sha': 'a'}, {'repo': 'meta', 'rev': 'a'})"], {}), "('dct', ({'repo': 'a'}, {'repo': 'meta', 'sha': 'a'},\n {'repo': 'meta', 'rev': 'a'}))\n", (6782, 6870), False, 'import pytest\n'), ((7365, 7605), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""config_repo"""', "({'repo': 'meta', 'hooks': [{'id': 'i-dont-exist'}]}, {'repo': 'meta',\n 'hooks': [{'id': 'identity', 'language': 'python'}]}, {'repo': 'meta',\n 'hooks': [{'id': 'identity', 'name': False}]})"], {}), "('config_repo', ({'repo': 'meta', 'hooks': [{'id':\n 'i-dont-exist'}]}, {'repo': 'meta', 'hooks': [{'id': 'identity',\n 'language': 'python'}]}, {'repo': 'meta', 'hooks': [{'id': 'identity',\n 'name': False}]}))\n", (7388, 7605), False, 'import pytest\n'), ((7912, 7980), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""mapping"""', "({'pony': '1.0'}, {'python': 3})"], {}), "('mapping', ({'pony': '1.0'}, {'python': 3}))\n", (7935, 7980), False, 'import pytest\n'), ((8913, 8981), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""schema"""', '(CONFIG_SCHEMA, CONFIG_REPO_DICT)'], {}), "('schema', (CONFIG_SCHEMA, CONFIG_REPO_DICT))\n", (8936, 8981), False, 'import pytest\n'), ((2992, 3032), 'cfgv.validate', 'cfgv.validate', (['config_obj', 'CONFIG_SCHEMA'], {}), '(config_obj, CONFIG_SCHEMA)\n', (3005, 3032), False, 'import cfgv\n'), ((4050, 4084), 'pre_commit.clientlib.validate_config_main', 'validate_config_main', (['(f.strpath,)'], {}), '((f.strpath,))\n', (4070, 4084), False, 'from pre_commit.clientlib import validate_config_main\n'), ((4607, 4641), 'pre_commit.clientlib.validate_config_main', 'validate_config_main', (['(f.strpath,)'], {}), '((f.strpath,))\n', (4627, 4641), False, 'from pre_commit.clientlib import validate_config_main\n'), ((8876, 8909), 'cfgv.validate', 'cfgv.validate', (['cfg', 'CONFIG_SCHEMA'], {}), '(cfg, CONFIG_SCHEMA)\n', (8889, 8909), False, 'import cfgv\n'), ((697, 727), 'cfgv.validate', 'cfgv.validate', (['obj', 'obj_schema'], {}), '(obj, obj_schema)\n', (710, 727), False, 'import cfgv\n'), ((922, 957), 'pytest.raises', 'pytest.raises', (['cfgv.ValidationError'], {}), '(cfgv.ValidationError)\n', (935, 957), False, 'import pytest\n'), ((967, 988), 'pre_commit.clientlib.check_type_tag', 'check_type_tag', (['value'], {}), '(value)\n', (981, 988), False, 'from pre_commit.clientlib import check_type_tag\n'), ((2794, 2829), 'pytest.raises', 'pytest.raises', (['cfgv.ValidationError'], {}), '(cfgv.ValidationError)\n', (2807, 2829), False, 'import pytest\n'), ((2839, 2879), 'cfgv.validate', 'cfgv.validate', (['config_obj', 'CONFIG_SCHEMA'], {}), '(config_obj, CONFIG_SCHEMA)\n', (2852, 2879), False, 'import cfgv\n'), ((3392, 3443), 'pre_commit.clientlib.validate_manifest_main', 'validate_manifest_main', (["('.pre-commit-hooks.yaml',)"], {}), "(('.pre-commit-hooks.yaml',))\n", (3414, 3443), False, 'from pre_commit.clientlib import validate_manifest_main\n'), ((3497, 3547), 'pre_commit.clientlib.validate_config_main', 'validate_config_main', (["('.pre-commit-config.yaml',)"], {}), "(('.pre-commit-config.yaml',))\n", (3517, 3547), False, 'from pre_commit.clientlib import validate_config_main\n'), ((3706, 3740), 'pre_commit.clientlib.validate_config_main', 'validate_config_main', (['(f.strpath,)'], {}), '((f.strpath,))\n', (3726, 3740), False, 'from pre_commit.clientlib import validate_config_main\n'), ((6555, 6590), 'pytest.raises', 'pytest.raises', (['cfgv.ValidationError'], {}), '(cfgv.ValidationError)\n', (6568, 6590), False, 'import pytest\n'), ((6971, 7006), 'pytest.raises', 'pytest.raises', (['cfgv.ValidationError'], {}), '(cfgv.ValidationError)\n', (6984, 7006), False, 'import pytest\n'), ((7819, 7854), 'pytest.raises', 'pytest.raises', (['cfgv.ValidationError'], {}), '(cfgv.ValidationError)\n', (7832, 7854), False, 'import pytest\n'), ((7864, 7908), 'cfgv.validate', 'cfgv.validate', (['config_repo', 'CONFIG_REPO_DICT'], {}), '(config_repo, CONFIG_REPO_DICT)\n', (7877, 7908), False, 'import cfgv\n'), ((8142, 8177), 'pytest.raises', 'pytest.raises', (['cfgv.ValidationError'], {}), '(cfgv.ValidationError)\n', (8155, 8177), False, 'import pytest\n'), ((8187, 8235), 'cfgv.validate', 'cfgv.validate', (['mapping', 'DEFAULT_LANGUAGE_VERSION'], {}), '(mapping, DEFAULT_LANGUAGE_VERSION)\n', (8200, 8235), False, 'import cfgv\n'), ((8294, 8329), 'pytest.raises', 'pytest.raises', (['cfgv.ValidationError'], {}), '(cfgv.ValidationError)\n', (8307, 8329), False, 'import pytest\n'), ((8415, 8448), 'cfgv.validate', 'cfgv.validate', (['cfg', 'CONFIG_SCHEMA'], {}), '(cfg, CONFIG_SCHEMA)\n', (8428, 8448), False, 'import cfgv\n'), ((2964, 2985), 'testing.fixtures.sample_local_config', 'sample_local_config', ([], {}), '()\n', (2983, 2985), False, 'from testing.fixtures import sample_local_config\n'), ((6466, 6483), 'pre_commit.clientlib.MigrateShaToRev', 'MigrateShaToRev', ([], {}), '()\n', (6481, 6483), False, 'from pre_commit.clientlib import MigrateShaToRev\n'), ((7128, 7145), 'pre_commit.clientlib.MigrateShaToRev', 'MigrateShaToRev', ([], {}), '()\n', (7143, 7145), False, 'from pre_commit.clientlib import MigrateShaToRev\n'), ((7281, 7298), 'pre_commit.clientlib.MigrateShaToRev', 'MigrateShaToRev', ([], {}), '()\n', (7296, 7298), False, 'from pre_commit.clientlib import MigrateShaToRev\n'), ((2749, 2770), 'testing.fixtures.sample_local_config', 'sample_local_config', ([], {}), '()\n', (2768, 2770), False, 'from testing.fixtures import sample_local_config\n'), ((6611, 6628), 'pre_commit.clientlib.MigrateShaToRev', 'MigrateShaToRev', ([], {}), '()\n', (6626, 6628), False, 'from pre_commit.clientlib import MigrateShaToRev\n'), ((7016, 7033), 'pre_commit.clientlib.MigrateShaToRev', 'MigrateShaToRev', ([], {}), '()\n', (7031, 7033), False, 'from pre_commit.clientlib import MigrateShaToRev\n')] |
fetus-hina/IkaLog | ikalog/ui/options.py | bd476da541fcc296f792d4db76a6b9174c4777ad | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# IkaLog
# ======
# Copyright (C) 2015 Takeshi HASEGAWA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import gettext
import wx
import wx.lib.scrolledpanel
import ikalog.outputs
from ikalog.ui.events import *
from ikalog.ui.panel import *
from ikalog.ui import VideoCapture
from ikalog.utils import *
_ = Localization.gettext_translation('IkaUI', fallback=True).gettext
class OptionsGUI(object):
def __init__(self, ikalog_gui):
self.ikalog_gui = ikalog_gui
self.frame = None
self._init_frame()
def _init_frame(self):
if self.frame:
return
self.frame = wx.Frame(
self.ikalog_gui.frame, wx.ID_ANY, _("Options"), size=(640, 500))
self.notebook = wx.Notebook(self.frame, wx.ID_ANY)
# Apply button
button_apply = wx.Button(self.frame, wx.ID_ANY, _(u'Apply'))
# Use a bold font.
apply_font = button_apply.GetFont()
apply_font.SetWeight(wx.FONTWEIGHT_BOLD)
button_apply.SetFont(apply_font)
button_cancel = wx.Button(self.frame, wx.ID_ANY, _(u'Cancel'))
button_load_default = wx.Button(
self.frame, wx.ID_ANY, _(u'Load default'))
buttons_sizer = wx.BoxSizer(wx.HORIZONTAL)
buttons_sizer.Add(button_apply)
buttons_sizer.Add(button_cancel)
buttons_sizer.Add(button_load_default)
top_sizer = wx.BoxSizer(wx.VERTICAL)
top_sizer.Add(self.notebook)
top_sizer.Add(buttons_sizer)
self.frame.SetSizer(top_sizer)
# Set event handlers for buttons.
button_apply.Bind(wx.EVT_BUTTON, self.on_button_apply)
button_cancel.Bind(wx.EVT_BUTTON, self.on_button_cancel)
button_load_default.Bind(wx.EVT_BUTTON, self.on_button_load_default)
outputs = [self.ikalog_gui.capture] + self.ikalog_gui.outputs
self._init_outputs(outputs)
# self.capture.panel is a part of self.frame. This Bind propagates
# capture's source change to the preview.
self.ikalog_gui.capture.panel.Bind(
EVT_INPUT_INITIALIZED, self.ikalog_gui.on_input_initialized)
# Refresh UI of each plugin.
self.ikalog_gui.engine.call_plugins(
'on_config_load_from_context', debug=True)
def show(self):
if not self.frame:
self._init_frame()
self.frame.Show()
self.frame.Raise()
def on_button_apply(self, event):
self.ikalog_gui.on_options_apply(event)
def on_button_cancel(self, event):
self.ikalog_gui.on_options_cancel(event)
def on_button_load_default(self, event):
self.ikalog_gui.on_options_load_default(event)
def _init_outputs(self, outputs):
output_dict = {}
for output in outputs:
output_dict[output.__class__] = output
# Keys for outputs in the main page.
keys = [
ikalog.ui.VideoCapture,
ikalog.outputs.OBS,
ikalog.outputs.StatInk,
ikalog.outputs.Twitter
]
# Keys for outputs combined into the misc tab.
misc_keys = [
ikalog.outputs.CSV,
ikalog.outputs.JSON,
ikalog.outputs.Screenshot,
ikalog.outputs.Boyomi,
ikalog.outputs.Slack,
ikalog.outputs.WebSocketServer,
]
for key in output_dict.keys():
if key in misc_keys:
continue
if key not in keys:
keys.append(key)
# Main tabs
index = 0
for key in keys:
output = output_dict.get(key)
if not output:
continue
output.on_option_tab_create(self.notebook)
self.notebook.InsertPage(index, output.panel, output.panel_name)
index += 1
# Misc tab
self.misc_panel = wx.lib.scrolledpanel.ScrolledPanel(
self.notebook, wx.ID_ANY, size=(640, 360))
self.misc_panel_sizer = wx.BoxSizer(wx.VERTICAL)
default_font = self.misc_panel.GetFont()
title_font = wx.Font(default_font.GetPointSize(),
wx.FONTFAMILY_DEFAULT,
wx.FONTSTYLE_NORMAL,
wx.FONTWEIGHT_BOLD)
for key in misc_keys:
output = output_dict.get(key)
if not output:
continue
output.on_option_tab_create(self.misc_panel)
title = wx.StaticText(self.misc_panel, wx.ID_ANY, output.panel_name)
title.SetFont(title_font)
self.misc_panel_sizer.Add(title)
self.misc_panel_sizer.Add(
output.panel, flag=wx.EXPAND | wx.ALL, border=10)
self.misc_panel_sizer.Add((-1, 25))
self.misc_panel.SetSizer(self.misc_panel_sizer)
self.misc_panel.SetupScrolling()
self.notebook.InsertPage(index, self.misc_panel, _('Misc.'))
| [((1293, 1327), 'wx.Notebook', 'wx.Notebook', (['self.frame', 'wx.ID_ANY'], {}), '(self.frame, wx.ID_ANY)\n', (1304, 1327), False, 'import wx\n'), ((1776, 1802), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (1787, 1802), False, 'import wx\n'), ((1952, 1976), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (1963, 1976), False, 'import wx\n'), ((4420, 4497), 'wx.lib.scrolledpanel.ScrolledPanel', 'wx.lib.scrolledpanel.ScrolledPanel', (['self.notebook', 'wx.ID_ANY'], {'size': '(640, 360)'}), '(self.notebook, wx.ID_ANY, size=(640, 360))\n', (4454, 4497), False, 'import wx\n'), ((4543, 4567), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (4554, 4567), False, 'import wx\n'), ((5029, 5089), 'wx.StaticText', 'wx.StaticText', (['self.misc_panel', 'wx.ID_ANY', 'output.panel_name'], {}), '(self.misc_panel, wx.ID_ANY, output.panel_name)\n', (5042, 5089), False, 'import wx\n')] |
CyberTKR/Simple-LINELIB | setup.py | 8596afb6b201b13675a0ed6314b3151f6bbf208b | from setuptools import setup, find_packages
with open("README.md", 'r',encoding="utf-8") as f:
long_description = f.read()
setup(
name='LineBot',
version='0.1.0',
description='Simple-LINELIB',
long_description=long_description,
author='Tolg KR',
author_email='[email protected]',
url='https://github.com/CyberTKR/Simple-LINELIB',
packages=find_packages(include=['CyberTK', 'CyberTK.*']),
install_requires=[
'httpx==0.19.0',
'requests',
'thrift',
'CyberTKAPI'
],
extras_require={'httpx': ['http2']}
)
| [((379, 426), 'setuptools.find_packages', 'find_packages', ([], {'include': "['CyberTK', 'CyberTK.*']"}), "(include=['CyberTK', 'CyberTK.*'])\n", (392, 426), False, 'from setuptools import setup, find_packages\n')] |
multi-service-fabric/element-manager | lib/SeparateDriver/CgwshDeviceDriverSetParameterECDB.py | e550d1b5ec9419f1fb3eb6e058ce46b57c92ee2f | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright(c) 2019 Nippon Telegraph and Telephone Corporation
# Filename: CgwshDeviceDriverSetParameterECDB.py
'''
Parameter module for Cgwsh driver configuration
'''
import GlobalModule
from EmCommonLog import decorater_log
from DriverSetParameterECDB import DriverSetParameterECDB
class CgwshDeviceDriverSetParameterECDB(DriverSetParameterECDB):
'''
Parameter class for Cgwsh driver configuration
'''
@decorater_log
def __init__(self,
device_name=None,
ec_message=None,
db_info=None):
'''
Constructor
'''
super(CgwshDeviceDriverSetParameterECDB, self).__init__(device_name,
ec_message,
db_info)
self.ec_message = self.ec_message["device"]
@decorater_log
def get_service_info(self):
'''
Service information is acquired.
'''
pass
@decorater_log
def get_management_info(self):
'''
Management information is acquired.
'''
get_info = {}
get_info["device_name"] = self.ec_message.get("name")
GlobalModule.EM_LOGGER.debug("get management_info = %s" % (get_info,))
return get_info
@decorater_log
def get_static_route_info(self):
'''
Static route information is acquired.
acquired dict:
{
static_route:[{
ip_address:str,
subnet_mask:str,
gateway_address:str
}]
}
'''
get_info = {}
tmp_list = []
routes = self.ec_message.get("serviceInfo", {}).get("staticRoute", ())
for route in routes:
tmp_item = {}
tmp_item["ip_address"] = route.get("ipAddress")
tmp_item["subnet_mask"] = route.get("subnetMask")
tmp_item["gateway_address"] = route.get("gatewayIpAddress")
tmp_list.append(tmp_item)
get_info["static_route"] = tmp_list
GlobalModule.EM_LOGGER.debug("get static_route = %s" % (get_info,))
return get_info
@decorater_log
def get_tunnel_if_info(self):
'''
Tunnel interface information is acquired.
acquired dict:
{
tunnel_if:[{
vrf_name:str,
if_name:str,
uni_if_name:str,
uni_vlan_id:str,
tunnel_source:str,
}]
}
'''
get_info = {}
tmp_list = []
tunnel_uni = self.ec_message.get("serviceInfo", {}).get("uni", ())
tunnel_officeInfo = self.ec_message.get(
"serviceInfo", {}).get("officeInfo", ())
vrf_name = tunnel_uni.get("vrfName")
uni_if_name = tunnel_uni.get("ifName")
uni_vlan_id = tunnel_uni.get("vlanId")
for tunnel in tunnel_officeInfo:
tmp_item = {}
tmp_item["vrf_name"] = vrf_name
tmp_item["if_name"] = tunnel.get("tunnelIfName")
tmp_item["uni_if_name"] = uni_if_name
tmp_item["uni_vlan_id"] = uni_vlan_id
tmp_item["tunnel_source"] = tunnel.get(
"tunnelSrcIpAddress")
tmp_list.append(tmp_item)
get_info["tunnel_if"] = tmp_list
GlobalModule.EM_LOGGER.debug("get tunnel_if = %s" % (get_info,))
return get_info
@decorater_log
def get_pppoe_info(self):
'''
PPPoE information is acquired.
acquired dict:
{
pppoe:[{
username:str,
password:str,
tenant:str,
pp_no:str
}]
}
'''
get_info = {}
tmp_list = []
ppp_infos = self.ec_message.get("serviceInfo", {}).get("pppInfo", ())
for ppp_info in ppp_infos:
tmp_item = {}
tmp_item["username"] = ppp_info.get("connectionId")
tmp_item["password"] = ppp_info.get("connectionPassword")
tmp_item["tenant"] = ppp_info.get("corporationId")
tmp_item["pp_no"] = ppp_info.get("ppId")
tmp_list.append(tmp_item)
get_info["pppoe"] = tmp_list
GlobalModule.EM_LOGGER.debug("get pppoe = %s" % (get_info,))
return get_info
| [((1321, 1391), 'GlobalModule.EM_LOGGER.debug', 'GlobalModule.EM_LOGGER.debug', (["('get management_info = %s' % (get_info,))"], {}), "('get management_info = %s' % (get_info,))\n", (1349, 1391), False, 'import GlobalModule\n'), ((2218, 2285), 'GlobalModule.EM_LOGGER.debug', 'GlobalModule.EM_LOGGER.debug', (["('get static_route = %s' % (get_info,))"], {}), "('get static_route = %s' % (get_info,))\n", (2246, 2285), False, 'import GlobalModule\n'), ((3526, 3590), 'GlobalModule.EM_LOGGER.debug', 'GlobalModule.EM_LOGGER.debug', (["('get tunnel_if = %s' % (get_info,))"], {}), "('get tunnel_if = %s' % (get_info,))\n", (3554, 3590), False, 'import GlobalModule\n'), ((4465, 4525), 'GlobalModule.EM_LOGGER.debug', 'GlobalModule.EM_LOGGER.debug', (["('get pppoe = %s' % (get_info,))"], {}), "('get pppoe = %s' % (get_info,))\n", (4493, 4525), False, 'import GlobalModule\n')] |
Bhaskers-Blu-Org1/wc-devops-utilities | scripts/common_lib/build_lib.py | d8131261cb3d67ce872b541c5e2d8ff22fcbf614 | #!/usr/bin/env python3.6
import os
import subprocess
import json
import argparse
import zipfile
import shutil
import requests
import datetime
import re
import operator
import unicodedata
# global list of error messages to keep track of all error msgs
errorMessages = []
"""
Collection of Common Functions used by Build Scripts
A collection of common functions shared by each individual build scripts.
"""
def get(url, usr, pwd):
"""
HTTP/HTTPS GET requests using external Python module requests
@param url the url of the REST call
@param usr the functional username for the docker registry
@param pwd the password for the docker registry functional user
@return a JSON response
"""
headers = {
'Accept': 'application/vnd.docker.distribution.manifest.v1+json',
}
# TEMP: Remove the suppressed verification once the docker cert location
# is figured out and we specify it in REQUESTS_CA_BUNDLE
return requests.get(url, auth=(usr, pwd), headers=headers, verify=False)
def get_latest_tag(registry_path, usr, pwd):
"""
Retrieve the latest version of an image based on its tags: vX-YYYYMMDD-HHmm.
The latest, by definition, is defined to be the one with the highest version
number (vX) and the latest timestamp (YYYYMMDD-HHmm).
@param registry_path docker registry path
@param usr the functional username for the docker registry
@param pwd the password for the docker registry functional user
@return the latest image tag
"""
tag_list_url = registry_path + '/tags/list'
request = get(tag_list_url, usr, pwd)
tag_list = json.loads(request.text)
for tag in tag_list['tags']:
if '-' not in tag:
continue
str_version, str_dash, str_timestamp = tag.partition('-')
tag_format="%Y%m%d-%H%M"
try:
dt_timestamp = datetime.datetime.strptime(str_timestamp, tag_format)
except ValueError:
continue
try:
latest_version
latest_timestamp
latest_tag
except NameError:
latest_version = str_version
latest_timestamp = dt_timestamp
latest_tag = tag
else:
if latest_version > str_version:
continue
elif latest_version < str_version:
latest_version = str_version
latest_timestamp = dt_timestamp
latest_tag = tag
else:
if latest_timestamp < dt_timestamp:
latest_timestamp = dt_timestamp
latest_tag = tag
return latest_tag
def unzip(zip_file, to_dir):
"""
Generic unzip function for extracting zip files
@param zip_file the zip file to be extracted
@param to_dir the destination directory to extract the zip file to
"""
with zipfile.ZipFile(zip_file, "r") as zip_ref:
zip_ref.extractall(to_dir)
zip_ref.close()
def create_dockerfile(dockerfile_parent_dir, docker_url, image_namespace, image_name, image_tag_latest):
"""
Creates a dockerfile using the correct docker registry URL associated
with the datacenter this script is being run on
:param str dockerfile_parent_dir: path to the parent directory for the Dockerfile
:param str docker_url: the docker registry VIP accessible from the mesos slaves
:param str image_namespace: the name of the image
:param str image_name: the name of the image
:param str image_tag_latest: the latest version tag of the base image
:returns: None
"""
# Form the path for the Dockerfile based on the parent of the caller script
dockerfile_path = os.path.join(dockerfile_parent_dir, "Dockerfile")
# Create the Dockerfile
dockerfile = open(dockerfile_path, "w+")
# Format the FROM command
dockerfile_from_cmd = "FROM " + docker_url + image_namespace + "/" + image_name + ":" + image_tag_latest
# Write the FROM command string to the Dockerfile
dockerfile.write(dockerfile_from_cmd)
# Close the open file instance
dockerfile.close()
def set_docker_client_timeout():
"""
Sets the DOCKER_CLIENT_TIMEOUT environment variable to 300
"""
os.environ['DOCKER_CLIENT_TIMEOUT'] = '300'
print("The timeout set for docker client: " + os.environ['DOCKER_CLIENT_TIMEOUT'] + " seconds")
# ======================= verify bundle Structure ===============================================
def openJSONfile(jsonFile):
"""
Function to open a JSON file
@param jsonFile path to the JSON file
@return the loaded JSON file
"""
try:
with open(jsonFile) as json_data_file:
data = json.load(json_data_file)
except:
addToErrorMessages("The specified JSON file is not valid: " + jsonFile)
raise
return data
def directoryToJSON(directory):
"""
Function to convert objects in a given directory into JSON form.
The parent object is always a dict, it may contain children if type=directory.
A directory is composed of a list and may contain files and/or directories.
@param directory directory to convert
@return JSON representation of a directory
"""
d = {'name': os.path.basename(directory)} # the parent object is dict
if os.path.isdir(directory):
d['type'] = "directory"
# directory may have children
# the children in a directory is a list composed of more files/directories
d['children'] = [directoryToJSON(os.path.join(directory,x)) for x in os.listdir(directory)]
else:
d['type'] = "file"
return d
def verifyBundleStructure(expected, actual, currentPath):
"""
Function to verify if an uploaded bundle follows IBM defined structure
@param expected the JSON representation of the IBM defined structure
@param actual the JSON representation of the actual structure of the uploaded bundle
@param currentPath the path currently being checked (used to build paths recursively for error msg)
@return True if structure of the uploaded bundle follows IBM defined structure. False otherwise.
"""
isMatched = True
if type(expected) is dict:
if matches(expected,actual): # a matching file or directory was found
if expected['type'] == 'directory':
currentPath = currentPath + actual['name'] + "/"
if expected['children'] == "_any":
isMatched = isMatched & True # if the contents of the directory can be anything then do no further checking
else:
isMatched = isMatched & verifyBundleStructure(expected['children'], actual['children'], currentPath) # do further checking
else: # a matching file or directory was not found
if expected['fail-if-not-found'] == "yes":
logBundleStructureErrorMessage(expected, currentPath)
return False
if type(expected) is list:
for k in range(0,len(expected)):
isMatched = isMatched & verifyActualContainsExpectedElement(actual, expected[k], currentPath, isMatched)
return isMatched
def logBundleStructureErrorMessage(expected, currentPath):
"""
Function to adds error messages to the global array.
@param expected the expected element
@param currentPath the current path we are on that has the missing file or directory
"""
addToErrorMessages("A "+ expected['type'] +" is missing from the path: \"" + currentPath + "\"")
addToErrorMessages(expected['error-message-if-fails'])
return
def matches(expectedElement, actualElement):
"""
Function to check if files/directories match. They must have the same name and must both be the same type.
@param expectedElement the expected element. May be defined by regular expression
@param actualElement the actual element
"""
ret = False
if re.fullmatch(expectedElement['name'], actualElement['name']) is not None and expectedElement['type'] == actualElement['type']:
ret = True
return ret
def verifyActualContainsExpectedElement(actual, expectedElement, currentPath, isMatched):
"""
Function to verify if an actual list of objects contains an expected element. Helper method to verifyBundleStructure.
@param actual list of the actual files and directories in the bundle
@param expectedElement the expected element to find in the bundle
@param currentPath the path currently being checked (used to build paths recursively for error msg)
@param isMatched (only used for recursive calls)
@return True if the list of actual objects contain the expected element
"""
# if actual is a dict then verify it and its children
if type(actual) is dict:
isMatched = isMatched & verifyBundleStructure(expectedElement,actual, currentPath)
# if actual is a list then find out if they match anywhere, if so get the matched position
elif type(actual) is list:
matchedPosition = -1
for i in range(0, len(actual)):
if matches(expectedElement,actual[i]):
matchedPosition = i
break
if matchedPosition != -1: # if they match then verify their children too
isMatched = isMatched & verifyBundleStructure(expectedElement, actual[matchedPosition] , currentPath)
else : # if they don't match then log the error msg and return false
if expectedElement['fail-if-not-found'] == "yes": # log error msg and return false if needed
isMatched = False
logBundleStructureErrorMessage(expectedElement, currentPath)
return isMatched
def addToErrorMessages(errorMessage):
"""
Function to add error messages to the global list of errorMessages
@param errorMessage the error message to add
"""
print(errorMessage)
global errorMessges
errorMessages.extend([errorMessage])
return
def unzipRecursively(zipFileName, directoryToUnzipTo):
"""
Function to unzip a ZIP file recursively
@param zipFileName the zip file to be extracted
@param directoryToUnzipTo the destination directory to extract the zip file to
"""
# update
if zipFileName.endswith(".zip"): #check if it's a .zip
unzip(zipFileName,directoryToUnzipTo)
os.remove(zipFileName)
for x in os.listdir(directoryToUnzipTo):
subdirectory = os.path.join(directoryToUnzipTo, os.path.splitext(x)[0])
subfile = os.path.join(directoryToUnzipTo, x )
unzipRecursively(subfile, subdirectory)
return
def zipFileIsGood(filePath):
"""
Function to test if a ZIP file is good or bad
@param filePath the zip file to be tested
@return True if the ZIP file is good. False otherwise.
"""
ret = True
try:
the_zip_file = zipfile.ZipFile(filePath)
badFile = the_zip_file.testzip()
if badFile is not None:
ret = False
else:
ret = True
except:
ret = False
return ret
def verifyZipFile(zipDirectory, nameOfBundle):
"""
Function to verify if an uploaded bundle is:
1) a valid zip file
2) follows IBM defined structure
@param zipDirectory where the bundle ZIP is located
@param nameOfBundle name of the bundle ZIP file
"""
print ('Validating bundle structure...')
bundleIsGood = True
bundleZip = os.path.join(zipDirectory, nameOfBundle)
if zipFileIsGood(bundleZip):
try:
# copy bundle into new working directory -----------------------------------------------------------
directoryToUnzipTo = os.path.join(zipDirectory, "temp")
if not os.path.exists(directoryToUnzipTo):
os.makedirs(directoryToUnzipTo)
shutil.copy(bundleZip, os.path.join(directoryToUnzipTo, nameOfBundle))
# unzip the bundle ----------------------------------------------------------------------------------
unzipRecursively(os.path.join(directoryToUnzipTo, nameOfBundle), os.path.join(directoryToUnzipTo, os.path.splitext(nameOfBundle)[0]))
# verify structure of bundle ------------------------------------------------------------------------
# check package stucture
expectedPackageStructure = openJSONfile(os.path.join(zipDirectory, "bundle-definition.json"))
actualBundleStructure = directoryToJSON(directoryToUnzipTo) # convert the unzipped directory to JSON file
bundleIsGood = verifyBundleStructure(expectedPackageStructure, actualBundleStructure, "")
if not bundleIsGood:
addToErrorMessages("The uploaded bundle does not meet predefined structure. Could not proceed with deployment.")
# clean up unzipped stuff and package structure Json -------------------------------------------------
shutil.rmtree(directoryToUnzipTo)
except:
addToErrorMessages("Exception occurred while verifying bundle structure. Could not proceed with deployment.")
bundleIsGood = False
else:
bundleIsGood = False
addToErrorMessages("The uploaded bundle could not be unzipped. Could not proceed with deployment.")
# out put report value , join all the messages together
print ("report=[" + ". ".join(str(x) for x in errorMessages) + "]")
return bundleIsGood
| [((962, 1027), 'requests.get', 'requests.get', (['url'], {'auth': '(usr, pwd)', 'headers': 'headers', 'verify': '(False)'}), '(url, auth=(usr, pwd), headers=headers, verify=False)\n', (974, 1027), False, 'import requests\n'), ((1631, 1655), 'json.loads', 'json.loads', (['request.text'], {}), '(request.text)\n', (1641, 1655), False, 'import json\n'), ((3702, 3751), 'os.path.join', 'os.path.join', (['dockerfile_parent_dir', '"""Dockerfile"""'], {}), "(dockerfile_parent_dir, 'Dockerfile')\n", (3714, 3751), False, 'import os\n'), ((5346, 5370), 'os.path.isdir', 'os.path.isdir', (['directory'], {}), '(directory)\n', (5359, 5370), False, 'import os\n'), ((11689, 11729), 'os.path.join', 'os.path.join', (['zipDirectory', 'nameOfBundle'], {}), '(zipDirectory, nameOfBundle)\n', (11701, 11729), False, 'import os\n'), ((2882, 2912), 'zipfile.ZipFile', 'zipfile.ZipFile', (['zip_file', '"""r"""'], {}), "(zip_file, 'r')\n", (2897, 2912), False, 'import zipfile\n'), ((5282, 5309), 'os.path.basename', 'os.path.basename', (['directory'], {}), '(directory)\n', (5298, 5309), False, 'import os\n'), ((10535, 10557), 'os.remove', 'os.remove', (['zipFileName'], {}), '(zipFileName)\n', (10544, 10557), False, 'import os\n'), ((10575, 10605), 'os.listdir', 'os.listdir', (['directoryToUnzipTo'], {}), '(directoryToUnzipTo)\n', (10585, 10605), False, 'import os\n'), ((11085, 11110), 'zipfile.ZipFile', 'zipfile.ZipFile', (['filePath'], {}), '(filePath)\n', (11100, 11110), False, 'import zipfile\n'), ((1879, 1932), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['str_timestamp', 'tag_format'], {}), '(str_timestamp, tag_format)\n', (1905, 1932), False, 'import datetime\n'), ((4731, 4756), 'json.load', 'json.load', (['json_data_file'], {}), '(json_data_file)\n', (4740, 4756), False, 'import json\n'), ((8055, 8115), 're.fullmatch', 're.fullmatch', (["expectedElement['name']", "actualElement['name']"], {}), "(expectedElement['name'], actualElement['name'])\n", (8067, 8115), False, 'import re\n'), ((10713, 10748), 'os.path.join', 'os.path.join', (['directoryToUnzipTo', 'x'], {}), '(directoryToUnzipTo, x)\n', (10725, 10748), False, 'import os\n'), ((11923, 11957), 'os.path.join', 'os.path.join', (['zipDirectory', '"""temp"""'], {}), "(zipDirectory, 'temp')\n", (11935, 11957), False, 'import os\n'), ((13244, 13277), 'shutil.rmtree', 'shutil.rmtree', (['directoryToUnzipTo'], {}), '(directoryToUnzipTo)\n', (13257, 13277), False, 'import shutil\n'), ((5576, 5602), 'os.path.join', 'os.path.join', (['directory', 'x'], {}), '(directory, x)\n', (5588, 5602), False, 'import os\n'), ((5612, 5633), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (5622, 5633), False, 'import os\n'), ((11982, 12016), 'os.path.exists', 'os.path.exists', (['directoryToUnzipTo'], {}), '(directoryToUnzipTo)\n', (11996, 12016), False, 'import os\n'), ((12034, 12065), 'os.makedirs', 'os.makedirs', (['directoryToUnzipTo'], {}), '(directoryToUnzipTo)\n', (12045, 12065), False, 'import os\n'), ((12114, 12160), 'os.path.join', 'os.path.join', (['directoryToUnzipTo', 'nameOfBundle'], {}), '(directoryToUnzipTo, nameOfBundle)\n', (12126, 12160), False, 'import os\n'), ((12318, 12364), 'os.path.join', 'os.path.join', (['directoryToUnzipTo', 'nameOfBundle'], {}), '(directoryToUnzipTo, nameOfBundle)\n', (12330, 12364), False, 'import os\n'), ((12651, 12703), 'os.path.join', 'os.path.join', (['zipDirectory', '"""bundle-definition.json"""'], {}), "(zipDirectory, 'bundle-definition.json')\n", (12663, 12703), False, 'import os\n'), ((10667, 10686), 'os.path.splitext', 'os.path.splitext', (['x'], {}), '(x)\n', (10683, 10686), False, 'import os\n'), ((12399, 12429), 'os.path.splitext', 'os.path.splitext', (['nameOfBundle'], {}), '(nameOfBundle)\n', (12415, 12429), False, 'import os\n')] |
ivalab/GraspKpNet | src/static_grasp_kt.py | d4b6186d74ac82a745d778892742d52a204bd1cf | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import _init_paths
import os
import json
import cv2
import cv2.aruco as aruco
import numpy as np
import sys
import rospy
from std_msgs.msg import Bool
from std_msgs.msg import Float64MultiArray
from sensor_msgs.msg import Image, CameraInfo
from cv_bridge import CvBridge, CvBridgeError
import message_filters
import torch
from external.nms import soft_nms
from opts import opts
from logger import Logger
from utils.utils import AverageMeter
from datasets.dataset_factory import dataset_factory
from detectors.detector_factory import detector_factory
# transformation from the robot base to aruco tag
M_BL = np.array([[1., 0., 0., 0.30000],
[0., 1., 0., 0.32000],
[0., 0., 1., -0.0450],
[0., 0., 0., 1.00000]])
# default transformation from the camera to aruco tag
default_M_CL = np.array([[-0.07134498, -0.99639369, 0.0459293, -0.13825178],
[-0.8045912, 0.03027403, -0.59305689, 0.08434352],
[ 0.58952768, -0.07926594, -0.8038495, 0.66103522],
[ 0., 0., 0., 1. ]]
)
# camera intrinsic matrix of Realsense D435
cameraMatrix = np.array([[607.47165, 0.0, 325.90064],
[0.0, 606.30420, 240.91934],
[0.0, 0.0, 1.0]])
# distortion of Realsense D435
distCoeffs = np.array([0.08847, -0.04283, 0.00134, -0.00102, 0.0])
# initialize GKNet Detector
opt = opts().parse()
Dataset = dataset_factory[opt.dataset]
opt = opts().update_dataset_info_and_set_heads(opt, Dataset)
print(opt)
Detector = detector_factory[opt.task]
detector = Detector(opt)
# Publisher of perception result
pub_res = rospy.Publisher('/result', Float64MultiArray, queue_size=10)
def get_M_CL_info(gray, image_init, visualize=False):
# parameters
markerLength_CL = 0.093
aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_ARUCO_ORIGINAL)
# aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_6X6_250)
parameters = aruco.DetectorParameters_create()
corners_CL, ids_CL, rejectedImgPoints = aruco.detectMarkers(gray, aruco_dict_CL, parameters=parameters)
# for the first frame, it may contain nothing
if ids_CL is None:
return default_M_CL, None
rvec_CL, tvec_CL, _objPoints_CL = aruco.estimatePoseSingleMarkers(corners_CL[0], markerLength_CL,
cameraMatrix, distCoeffs)
dst_CL, jacobian_CL = cv2.Rodrigues(rvec_CL)
M_CL = np.zeros((4, 4))
M_CL[:3, :3] = dst_CL
M_CL[:3, 3] = tvec_CL
M_CL[3, :] = np.array([0, 0, 0, 1])
if visualize:
# print('aruco is located at mean position (%d, %d)' %(mean_x ,mean_y))
aruco.drawAxis(image_init, cameraMatrix, distCoeffs, rvec_CL, tvec_CL, markerLength_CL)
return M_CL, corners_CL[0][0, :, :]
def aruco_tag_remove(rgb_image, corners):
img_out = rgb_image.copy()
# find the top-left and right-bottom corners
min = sys.maxsize
max = -sys.maxsize
tl_pxl = None
br_pxl = None
for corner in corners:
if corner[0] + corner[1] < min:
min = corner[0] + corner[1]
tl_pxl = [int(corner[0]), int(corner[1])]
if corner[0] + corner[1] > max:
max = corner[0] + corner[1]
br_pxl = [int(corner[0]), int(corner[1])]
# get the replacement pixel value
rep_color = img_out[tl_pxl[0] - 10, tl_pxl[1] - 10, :]
for h in range(tl_pxl[1] - 45, br_pxl[1] + 46):
for w in range(tl_pxl[0] - 45, br_pxl[0] + 46):
img_out[h, w, :] = rep_color
return img_out
def project(pixel, depth_image, M_CL, M_BL, cameraMatrix):
'''
project 2d pixel on the image to 3d by depth info
:param pixel: x, y
:param M_CL: trans from camera to aruco tag
:param cameraMatrix: camera intrinsic matrix
:param depth_image: depth image
:param depth_scale: depth scale that trans raw data to mm
:return:
q_B: 3d coordinate of pixel with respect to base frame
'''
depth = depth_image[pixel[1], pixel[0]]
# if the depth of the detected pixel is 0, check the depth of its neighbors
# by counter-clock wise
nei_range = 1
while depth == 0:
for delta_x in range(-nei_range, nei_range + 1):
for delta_y in range(-nei_range, nei_range + 1):
nei = [pixel[0] + delta_x, pixel[1] + delta_y]
depth = depth_image[nei[1], nei[0]]
if depth != 0:
break
if depth != 0:
break
nei_range += 1
pxl = np.linalg.inv(cameraMatrix).dot(
np.array([pixel[0] * depth, pixel[1] * depth, depth]))
q_C = np.array([pxl[0], pxl[1], pxl[2], 1])
q_L = np.linalg.inv(M_CL).dot(q_C)
q_B = M_BL.dot(q_L)
return q_B
def pre_process(rgb_img, depth_img):
inp_image = rgb_img
inp_image[:, :, 0] = depth_img
inp_image = cv2.resize(inp_image, (256, 256))
return inp_image
def kinect_rgbd_callback(rgb_data, depth_data):
"""
Save raw RGB and depth input from Kinect V1
:param rgb_data: RGB image
:param depth_data: raw depth image
:return: None
"""
try:
cv_rgb = cv_bridge.imgmsg_to_cv2(rgb_data, "bgr8")
cv_depth = cv_bridge.imgmsg_to_cv2(depth_data, "32FC1")
cv_rgb_arr = np.array(cv_rgb, dtype=np.uint8)
cv_depth_arr = np.array(cv_depth, dtype=np.float32)
# cv_depth_arr = np.nan_to_num(cv_depth_arr)
cv2.imshow("Depth", cv_depth)
cv2.imshow("RGB", cv_rgb)
img = cv_rgb_arr.copy()
depth_raw = cv_depth_arr.copy()
gray = img.astype(np.uint8)
depth = (depth_raw * 1000).astype(np.uint8)
# get the current transformation from the camera to aruco tag
M_CL, corners = get_M_CL_info(gray, img, False)
# remove aruco tag from input image to avoid mis-detection
if corners is not None:
img_wo_at = aruco_tag_remove(img, corners)
# replace blue channel with the depth channel
inp_image = pre_process(img_wo_at, depth)
# pass the image into the network
ret = detector.run(inp_image[:, :, :])
ret = ret["results"]
loc_ori = KpsToGrasppose(ret, img, depth_raw, M_CL, M_BL, cameraMatrix)
pub_res.publish(loc_ori)
except CvBridgeError as e:
print(e)
def isWithinRange(pxl, w, h):
x, y = pxl[:]
return w/12. <= x <= 11*w/12 and h/12. <= y <= 11*h/12
def KpsToGrasppose(net_output, rgb_img, depth_map, M_CL, M_BL, cameraMatrix, visualize=True):
kps_pr = []
for category_id, preds in net_output.items():
if len(preds) == 0:
continue
for pred in preds:
kps = pred[:4]
score = pred[-1]
kps_pr.append([kps[0], kps[1], kps[2], kps[3], score])
# no detection
if len(kps_pr) == 0:
return [0, 0, 0, 0]
# sort by the confidence score
kps_pr = sorted(kps_pr, key=lambda x: x[-1], reverse=True)
# select the top 1 grasp prediction within the workspace
res = None
for kp_pr in kps_pr:
f_w, f_h = 640. / 512., 480. / 512.
kp_lm = (int(kp_pr[0] * f_w), int(kp_pr[1] * f_h))
kp_rm = (int(kp_pr[2] * f_w), int(kp_pr[3] * f_h))
if isWithinRange(kp_lm, 640, 480) and isWithinRange(kp_rm, 640, 480):
res = kp_pr
break
if res is None:
return [0, 0, 0, 0]
f_w, f_h = 640./512., 480./512.
kp_lm = (int(res[0]*f_w), int(res[1]*f_h))
kp_rm = (int(res[2]*f_w), int(res[3]*f_h))
center = (int((kp_lm[0]+kp_rm[0])/2), int((kp_lm[1]+kp_rm[1])/2))
kp_lm_3d = project(kp_lm, depth_map, M_CL, M_BL, cameraMatrix)
kp_rm_3d = project(kp_rm, depth_map, M_CL, M_BL, cameraMatrix)
center_3d = project(center, depth_map, M_CL, M_BL, cameraMatrix)
orientation = np.arctan2(kp_rm_3d[1] - kp_lm_3d[1], kp_rm_3d[0] - kp_lm_3d[0])
# motor 7 is clockwise
if orientation > np.pi / 2:
orientation = np.pi - orientation
elif orientation < -np.pi / 2:
orientation = -np.pi - orientation
else:
orientation = -orientation
# compute the open width
dist = np.linalg.norm(kp_lm_3d[:2] - kp_rm_3d[:2])
# draw arrow for left-middle and right-middle key-points
lm_ep = (int(kp_lm[0] + (kp_rm[0] - kp_lm[0]) / 5.), int(kp_lm[1] + (kp_rm[1] - kp_lm[1]) / 5.))
rm_ep = (int(kp_rm[0] + (kp_lm[0] - kp_rm[0]) / 5.), int(kp_rm[1] + (kp_lm[1] - kp_rm[1]) / 5.))
rgb_img = cv2.arrowedLine(rgb_img, kp_lm, lm_ep, (0, 0, 0), 2)
rgb_img = cv2.arrowedLine(rgb_img, kp_rm, rm_ep, (0, 0, 0), 2)
# draw left-middle, right-middle and center key-points
rgb_img = cv2.circle(rgb_img, (int(kp_lm[0]), int(kp_lm[1])), 2, (0, 0, 255), 2)
rgb_img = cv2.circle(rgb_img, (int(kp_rm[0]), int(kp_rm[1])), 2, (0, 0, 255), 2)
rgb_img = cv2.circle(rgb_img, (int(center[0]), int(center[1])), 2, (0, 0, 255), 2)
if visualize:
cv2.namedWindow('visual', cv2.WINDOW_AUTOSIZE)
cv2.imshow('visual', rgb_img)
return [center_3d[0], center_3d[1], center_3d[2], orientation, dist]
if __name__ == '__main__':
# initialize ros node
rospy.init_node("Static_grasping")
# Bridge to convert ROS Image type to OpenCV Image type
cv_bridge = CvBridge()
cv2.WITH_QT = False
# Get camera calibration parameters
cam_param = rospy.wait_for_message('/camera/rgb/camera_info', CameraInfo, timeout=None)
# Subscribe to rgb and depth channel
image_sub = message_filters.Subscriber("/camera/rgb/image_rect_color", Image)
depth_sub = message_filters.Subscriber("/camera/depth_registered/image", Image)
ts = message_filters.ApproximateTimeSynchronizer([image_sub, depth_sub], 1, 0.1)
ts.registerCallback(kinect_rgbd_callback)
rospy.spin() | [((721, 828), 'numpy.array', 'np.array', (['[[1.0, 0.0, 0.0, 0.3], [0.0, 1.0, 0.0, 0.32], [0.0, 0.0, 1.0, -0.045], [0.0,\n 0.0, 0.0, 1.0]]'], {}), '([[1.0, 0.0, 0.0, 0.3], [0.0, 1.0, 0.0, 0.32], [0.0, 0.0, 1.0, -\n 0.045], [0.0, 0.0, 0.0, 1.0]])\n', (729, 828), True, 'import numpy as np\n'), ((949, 1145), 'numpy.array', 'np.array', (['[[-0.07134498, -0.99639369, 0.0459293, -0.13825178], [-0.8045912, \n 0.03027403, -0.59305689, 0.08434352], [0.58952768, -0.07926594, -\n 0.8038495, 0.66103522], [0.0, 0.0, 0.0, 1.0]]'], {}), '([[-0.07134498, -0.99639369, 0.0459293, -0.13825178], [-0.8045912, \n 0.03027403, -0.59305689, 0.08434352], [0.58952768, -0.07926594, -\n 0.8038495, 0.66103522], [0.0, 0.0, 0.0, 1.0]])\n', (957, 1145), True, 'import numpy as np\n'), ((1336, 1425), 'numpy.array', 'np.array', (['[[607.47165, 0.0, 325.90064], [0.0, 606.3042, 240.91934], [0.0, 0.0, 1.0]]'], {}), '([[607.47165, 0.0, 325.90064], [0.0, 606.3042, 240.91934], [0.0, \n 0.0, 1.0]])\n', (1344, 1425), True, 'import numpy as np\n'), ((1518, 1571), 'numpy.array', 'np.array', (['[0.08847, -0.04283, 0.00134, -0.00102, 0.0]'], {}), '([0.08847, -0.04283, 0.00134, -0.00102, 0.0])\n', (1526, 1571), True, 'import numpy as np\n'), ((1840, 1900), 'rospy.Publisher', 'rospy.Publisher', (['"""/result"""', 'Float64MultiArray'], {'queue_size': '(10)'}), "('/result', Float64MultiArray, queue_size=10)\n", (1855, 1900), False, 'import rospy\n'), ((2021, 2068), 'cv2.aruco.Dictionary_get', 'aruco.Dictionary_get', (['aruco.DICT_ARUCO_ORIGINAL'], {}), '(aruco.DICT_ARUCO_ORIGINAL)\n', (2041, 2068), True, 'import cv2.aruco as aruco\n'), ((2149, 2182), 'cv2.aruco.DetectorParameters_create', 'aruco.DetectorParameters_create', ([], {}), '()\n', (2180, 2182), True, 'import cv2.aruco as aruco\n'), ((2228, 2291), 'cv2.aruco.detectMarkers', 'aruco.detectMarkers', (['gray', 'aruco_dict_CL'], {'parameters': 'parameters'}), '(gray, aruco_dict_CL, parameters=parameters)\n', (2247, 2291), True, 'import cv2.aruco as aruco\n'), ((2439, 2532), 'cv2.aruco.estimatePoseSingleMarkers', 'aruco.estimatePoseSingleMarkers', (['corners_CL[0]', 'markerLength_CL', 'cameraMatrix', 'distCoeffs'], {}), '(corners_CL[0], markerLength_CL,\n cameraMatrix, distCoeffs)\n', (2470, 2532), True, 'import cv2.aruco as aruco\n'), ((2625, 2647), 'cv2.Rodrigues', 'cv2.Rodrigues', (['rvec_CL'], {}), '(rvec_CL)\n', (2638, 2647), False, 'import cv2\n'), ((2659, 2675), 'numpy.zeros', 'np.zeros', (['(4, 4)'], {}), '((4, 4))\n', (2667, 2675), True, 'import numpy as np\n'), ((2745, 2767), 'numpy.array', 'np.array', (['[0, 0, 0, 1]'], {}), '([0, 0, 0, 1])\n', (2753, 2767), True, 'import numpy as np\n'), ((4877, 4914), 'numpy.array', 'np.array', (['[pxl[0], pxl[1], pxl[2], 1]'], {}), '([pxl[0], pxl[1], pxl[2], 1])\n', (4885, 4914), True, 'import numpy as np\n'), ((5108, 5141), 'cv2.resize', 'cv2.resize', (['inp_image', '(256, 256)'], {}), '(inp_image, (256, 256))\n', (5118, 5141), False, 'import cv2\n'), ((8068, 8132), 'numpy.arctan2', 'np.arctan2', (['(kp_rm_3d[1] - kp_lm_3d[1])', '(kp_rm_3d[0] - kp_lm_3d[0])'], {}), '(kp_rm_3d[1] - kp_lm_3d[1], kp_rm_3d[0] - kp_lm_3d[0])\n', (8078, 8132), True, 'import numpy as np\n'), ((8398, 8441), 'numpy.linalg.norm', 'np.linalg.norm', (['(kp_lm_3d[:2] - kp_rm_3d[:2])'], {}), '(kp_lm_3d[:2] - kp_rm_3d[:2])\n', (8412, 8441), True, 'import numpy as np\n'), ((8720, 8772), 'cv2.arrowedLine', 'cv2.arrowedLine', (['rgb_img', 'kp_lm', 'lm_ep', '(0, 0, 0)', '(2)'], {}), '(rgb_img, kp_lm, lm_ep, (0, 0, 0), 2)\n', (8735, 8772), False, 'import cv2\n'), ((8787, 8839), 'cv2.arrowedLine', 'cv2.arrowedLine', (['rgb_img', 'kp_rm', 'rm_ep', '(0, 0, 0)', '(2)'], {}), '(rgb_img, kp_rm, rm_ep, (0, 0, 0), 2)\n', (8802, 8839), False, 'import cv2\n'), ((9400, 9434), 'rospy.init_node', 'rospy.init_node', (['"""Static_grasping"""'], {}), "('Static_grasping')\n", (9415, 9434), False, 'import rospy\n'), ((9512, 9522), 'cv_bridge.CvBridge', 'CvBridge', ([], {}), '()\n', (9520, 9522), False, 'from cv_bridge import CvBridge, CvBridgeError\n'), ((9603, 9678), 'rospy.wait_for_message', 'rospy.wait_for_message', (['"""/camera/rgb/camera_info"""', 'CameraInfo'], {'timeout': 'None'}), "('/camera/rgb/camera_info', CameraInfo, timeout=None)\n", (9625, 9678), False, 'import rospy\n'), ((9737, 9802), 'message_filters.Subscriber', 'message_filters.Subscriber', (['"""/camera/rgb/image_rect_color"""', 'Image'], {}), "('/camera/rgb/image_rect_color', Image)\n", (9763, 9802), False, 'import message_filters\n'), ((9819, 9886), 'message_filters.Subscriber', 'message_filters.Subscriber', (['"""/camera/depth_registered/image"""', 'Image'], {}), "('/camera/depth_registered/image', Image)\n", (9845, 9886), False, 'import message_filters\n'), ((9896, 9971), 'message_filters.ApproximateTimeSynchronizer', 'message_filters.ApproximateTimeSynchronizer', (['[image_sub, depth_sub]', '(1)', '(0.1)'], {}), '([image_sub, depth_sub], 1, 0.1)\n', (9939, 9971), False, 'import message_filters\n'), ((10023, 10035), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (10033, 10035), False, 'import rospy\n'), ((1607, 1613), 'opts.opts', 'opts', ([], {}), '()\n', (1611, 1613), False, 'from opts import opts\n'), ((1667, 1673), 'opts.opts', 'opts', ([], {}), '()\n', (1671, 1673), False, 'from opts import opts\n'), ((2875, 2966), 'cv2.aruco.drawAxis', 'aruco.drawAxis', (['image_init', 'cameraMatrix', 'distCoeffs', 'rvec_CL', 'tvec_CL', 'markerLength_CL'], {}), '(image_init, cameraMatrix, distCoeffs, rvec_CL, tvec_CL,\n markerLength_CL)\n', (2889, 2966), True, 'import cv2.aruco as aruco\n'), ((4812, 4865), 'numpy.array', 'np.array', (['[pixel[0] * depth, pixel[1] * depth, depth]'], {}), '([pixel[0] * depth, pixel[1] * depth, depth])\n', (4820, 4865), True, 'import numpy as np\n'), ((5519, 5551), 'numpy.array', 'np.array', (['cv_rgb'], {'dtype': 'np.uint8'}), '(cv_rgb, dtype=np.uint8)\n', (5527, 5551), True, 'import numpy as np\n'), ((5575, 5611), 'numpy.array', 'np.array', (['cv_depth'], {'dtype': 'np.float32'}), '(cv_depth, dtype=np.float32)\n', (5583, 5611), True, 'import numpy as np\n'), ((5674, 5703), 'cv2.imshow', 'cv2.imshow', (['"""Depth"""', 'cv_depth'], {}), "('Depth', cv_depth)\n", (5684, 5703), False, 'import cv2\n'), ((5712, 5737), 'cv2.imshow', 'cv2.imshow', (['"""RGB"""', 'cv_rgb'], {}), "('RGB', cv_rgb)\n", (5722, 5737), False, 'import cv2\n'), ((9183, 9229), 'cv2.namedWindow', 'cv2.namedWindow', (['"""visual"""', 'cv2.WINDOW_AUTOSIZE'], {}), "('visual', cv2.WINDOW_AUTOSIZE)\n", (9198, 9229), False, 'import cv2\n'), ((9238, 9267), 'cv2.imshow', 'cv2.imshow', (['"""visual"""', 'rgb_img'], {}), "('visual', rgb_img)\n", (9248, 9267), False, 'import cv2\n'), ((4771, 4798), 'numpy.linalg.inv', 'np.linalg.inv', (['cameraMatrix'], {}), '(cameraMatrix)\n', (4784, 4798), True, 'import numpy as np\n'), ((4925, 4944), 'numpy.linalg.inv', 'np.linalg.inv', (['M_CL'], {}), '(M_CL)\n', (4938, 4944), True, 'import numpy as np\n')] |
dovietchinh/multi-task-classification | source/utils/augmentations.py | 23a70300a7a800bc982f87902b6aa1faaf91b489 | import numpy as np
import cv2
import random
def preprocess(img,img_size,padding=True):
"""[summary]
Args:
img (np.ndarray): images
img_size (int,list,tuple): target size. eg: 224 , (224,224) or [224,224]
padding (bool): padding img before resize. Prevent from image distortion. Defaults to True.
Returns:
images (np.ndarray): images in target size
"""
if padding:
height,width,_ = img.shape
delta = height - width
if delta > 0:
img = np.pad(img,[[0,0],[delta//2,delta//2],[0,0]], mode='constant',constant_values =255)
else:
img = np.pad(img,[[-delta//2,-delta//2],[0,0],[0,0]], mode='constant',constant_values =255)
if isinstance(img_size,int):
img_size = (img_size,img_size)
return cv2.resize(img,img_size)
class RandAugment:
def __init__(self, augment_params):
self.num_layers = augment_params['num_layers']
self.AUGMENT_FUNCTION = {
'fliplr' : RandAugment.augment_fliplr if augment_params.get('fliplr') else None,
'augment_hsv' : RandAugment.augment_hsv if augment_params.get('augment_hsv') else None,
'hist_equalize' : RandAugment.hist_equalize if augment_params.get('hist_equalize') else None,
'solarize' : RandAugment.solarize if augment_params.get('solarize') else None,
'posterize': RandAugment.posterize if augment_params.get('posterize') else None,
'adjust_brightness': RandAugment.adjust_brightness if augment_params.get('adjust_brightness') else None,
'invert' : RandAugment.invert if augment_params.get('invert') else None,
'contrast': RandAugment.contrast if augment_params.get('contrast') else None,
'shearX' : RandAugment.shear_x if augment_params.get('shearX') else None,
'shearY' : RandAugment.shear_y if augment_params.get('shearY') else None,
'translateX' : RandAugment.translate_x if augment_params.get('translateX') else None,
'translateY' : RandAugment.translate_y if augment_params.get('translateY') else None,
'sharpness' : RandAugment.sharpness if augment_params.get('sharpness') else None,
'cutout' : RandAugment.cutout if augment_params.get('cutout') else None,
'rotate' : RandAugment.rotate if augment_params.get('rotate') else None,
'cut_25_left' : RandAugment.cut_25_left if augment_params.get('cut_25_left') else None,
'cut_25_right': RandAugment.cut_25_right if augment_params.get('cut_25_right') else None,
'cut_25_above': RandAugment.cut_25_above if augment_params.get('cut_25_above') else None,
'cut_25_under': RandAugment.cut_25_under if augment_params.get('cut_25_under') else None,
# 'random_crop':random_crop
}
self.ARGS_LIMIT = {
'fliplr' : augment_params.get('fliplr'),
'augment_hsv': augment_params.get('augment_hsv'),
'hist_equalize' : augment_params.get('hist_equalize'),
'solarize' : augment_params.get('solarize'),
'posterize': augment_params.get('posterize'),
'adjust_brightness': augment_params.get('adjust_brightness'),
'invert' : augment_params.get('invert'),
'contrast': augment_params.get('contrast'),
'shearX' : augment_params.get('shearX'),
'shearY' : augment_params.get('shearY'),
'translateX' : augment_params.get('translateX'),
'translateY' : augment_params.get('translateY'),
'sharpness' : augment_params.get('sharpness'),
'cutout' : augment_params.get('cutout'),
'rotate' : augment_params.get('rotate'),
'cut_25_left' : augment_params.get('cut_25_left'),
'cut_25_right': augment_params.get('cut_25_right'),
'cut_25_above': augment_params.get('cut_25_above'),
'cut_25_under': augment_params.get('cut_25_under')
# 'random_crop':random_crop
}
self.policy = list(k for k,v in self.AUGMENT_FUNCTION.items() if v)
# print(self.policy)
def mixup(img1,img2,factor):
img = img1.astype('float')* factor + img2.astype('float') * (1-factor)
img = np.clip(img, 0,255)
img = img.astype('uint8')
return img
def augment_fliplr(img,level):
if random.random() < level:
return np.fliplr(img)
return img
def augment_hsv(im, level=None, hgain=0.015, sgain=0.7, vgain=0.4):
im = im.copy()
# HSV color-space augmentation
if hgain or sgain or vgain:
r = np.random.uniform(-1, 1, 3) * [hgain, sgain, vgain] + 1 # random gains
hue, sat, val = cv2.split(cv2.cvtColor(im, cv2.COLOR_BGR2HSV))
dtype = im.dtype # uint8
x = np.arange(0, 256, dtype=r.dtype)
lut_hue = ((x * r[0]) % 180).astype(dtype)
lut_sat = np.clip(x * r[1], 0, 255).astype(dtype)
lut_val = np.clip(x * r[2], 0, 255).astype(dtype)
im_hsv = cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val)))
cv2.cvtColor(im_hsv, cv2.COLOR_HSV2BGR, dst=im) # no return needed
return im_hsv
def hist_equalize(im, level=None,clahe=True, bgr=True):
im = im.copy()
# Equalize histogram on BGR image 'im' with im.shape(n,m,3) and range 0-255
yuv = cv2.cvtColor(im, cv2.COLOR_BGR2YUV if bgr else cv2.COLOR_RGB2YUV)
if clahe:
c = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8))
yuv[:, :, 0] = c.apply(yuv[:, :, 0])
else:
yuv[:, :, 0] = cv2.equalizeHist(yuv[:, :, 0]) # equalize Y channel histogram
return cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR if bgr else cv2.COLOR_YUV2RGB) # convert YUV image to RGB
def solarize(image, level=128):
threshold = level
image = image.copy()
# For each pixel in the image, select the pixel
# if the value is less than the threshold.
# Otherwise, subtract 255 from the pixel.
return np.where(image <= threshold, image, 255 - image)
def posterize(img, level=3):
bits = level
shift = 8 - bits
# img = img >> shift
img = np.left_shift(img,shift)
img = np.right_shift(img,shift)
return img.astype('uint8')
def adjust_brightness(img,level=0.5):
factor = level
degenerate = np.zeros(img.shape,dtype='uint8')
img = RandAugment.mixup(img,degenerate,factor)
return img
def invert(img,level=None):
return 255-img
def contrast(img,factor=0.5):
degenerate = cv2.cvtColor(cv2.cvtColor(img, cv2.COLOR_BGR2GRAY), cv2.COLOR_GRAY2BGR)
return RandAugment.mixup(img,degenerate,factor)
def shear_x(img,level=0.4,mode='reflect'):
M = np.array([[1, level, 0],
[0, 1 , 0],
[0, 0 , 1]],dtype='float')
height,width,_ = img.shape
option_mode ={
'reflect' : cv2.BORDER_REPLICATE,
'constant' : cv2.BORDER_CONSTANT
}
mode = option_mode[mode]
sheared_img = cv2.warpPerspective(img, M, (width, height), borderMode=mode)
return sheared_img
def shear_y(img,level=0.4,mode='reflect'):
M = np.array([[1, 0 , 0],
[level, 1 , 0],
[0, 0 , 1]],dtype='float')
height,width,_ = img.shape
option_mode ={
'reflect' : cv2.BORDER_REPLICATE,
'constant' : cv2.BORDER_CONSTANT
}
mode = option_mode[mode]
sheared_img = cv2.warpPerspective(img, M, (width, height), borderMode=mode)
return sheared_img
def translate_x(img,level,mode='reflect'):
height,width,_ = img.shape
option_mode ={
'reflect' : cv2.BORDER_REPLICATE,
'constant' : cv2.BORDER_CONSTANT
}
mode = option_mode[mode]
translate_pixel = int(width * level)
M = np.array([[1, 0 , translate_pixel],
[level, 1 , 0],
[0, 0 , 1]],dtype='float')
translate_img = cv2.warpPerspective(img, M, (width, height), borderMode=mode)
return translate_img
def translate_y(img,level,mode='reflect'):
height,width,_ = img.shape
option_mode ={
'reflect' : cv2.BORDER_REPLICATE,
'constant' : cv2.BORDER_CONSTANT
}
mode = option_mode[mode]
translate_pixel = int(width * level)
M = np.array([[1, 0 , 0],
[level, 1 , translate_pixel],
[0, 0 , 1]],dtype='float')
translate_img = cv2.warpPerspective(img, M, (width, height), borderMode=mode)
return translate_img
# def sharpness(img,):
# kernel = np.array(
# [[1, 1, 1],
# [1, 5, 1],
# [1, 1, 1]], dtype=tf.float32,
# shape=[3, 3, 1, 1]) / 13.
# cv2.
def cutout(img,level,**kwargs):
img = img.copy()
height,width ,_ = img.shape
padding_size = int(height*level),int(width*level)
value = kwargs.get('value')
cordinate_h = np.random.randint(0,height-padding_size[0])
cordinate_w = np.random.randint(0,width-padding_size[1])
img[cordinate_h:cordinate_h+padding_size[0],cordinate_w:cordinate_w+padding_size[1],:] = 255
return img
def rotate(image, level=45, center = None, scale = 1.0):
angle=level
(h, w) = image.shape[:2]
if center is None:
center = (w / 2, h / 2)
# Perform the rotation
M = cv2.getRotationMatrix2D(center, angle, scale)
rotated = cv2.warpAffine(image, M, (w, h),borderMode=cv2.BORDER_REPLICATE)
return rotated
def cut_25_under(img,level=0.25):
ratio = level
height,width,_ = img.shape
new_height = int((1-ratio)*height)
img_ = img[:new_height,:,:]
height,width,_ = img_.shape
if height > width :
img2 = np.pad(img_,[[0,0],[(height-width)//2,(height-width)//2],[0,0]],mode='constant',constant_values=255)
else:
img2 = np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255)
img2 = cv2.resize(img2,(224,224))
return img2
def cut_25_above(img,level=0.25):
ratio = level
height,width,_ = img.shape
new_height = int(ratio*height)
img_ = img[new_height:,:,:]
height,width,_ = img_.shape
if height > width :
img2 = np.pad(img_,[[0,0],[(height-width)//2,(height-width)//2],[0,0]],mode='constant',constant_values=255)
else:
img2 = np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255)
img2 = cv2.resize(img2,(224,224))
return img2
def cut_25_right(img,level=0.25):
ratio = level
height,width,_ = img.shape
new_width = int((1-ratio)*width)
img_ = img[:,:new_width,:]
height,width,_ = img_.shape
if height > width :
img2 = np.pad(img_,[[0,0],[(height-width)//2,(height-width)//2],[0,0]],mode='constant',constant_values=255)
else:
img2 = np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255)
img2 = cv2.resize(img2,(224,224))
return img2
def cut_25_left(img,level=0.25):
ratio = level
height,width,_ = img.shape
new_width = int(ratio*width)
img_ = img[:,new_width:,:]
height,width,_ = img_.shape
if height > width :
img2 = np.pad(img_,[[0,0],[(height-width)//2,(height-width)//2],[0,0]],mode='constant',constant_values=255)
else:
img2 = np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255)
img2 = cv2.resize(img2,(224,224))
return img2
def __call__(self,img):
augmenters = random.choices(self.policy, k=self.num_layers)
for augmenter in augmenters:
level = random.random()
# try:
min_arg,max_arg = self.ARGS_LIMIT[augmenter]
level = min_arg + (max_arg - min_arg) * level
img = self.AUGMENT_FUNCTION[augmenter](img,level=level)
# except:
# print(augmenter)
return img
def augmentation_test():
img_org = cv2.imread('test.jpg')
import yaml
augment_params = yaml.safe_load(open('config/default/train_config.yaml')).get('augment_params')
augmenter = RandAugment(augment_params=augment_params)#(num_layers=1)
for _ in range(10000):
img_aug = augmenter(img_org)
img_pad = preprocess(img_aug,224)
# cv2.imshow('a',img_org)
# cv2.imshow('b',img_aug)
# cv2.imshow('c',img_pad)
# if cv2.waitKey(0)==ord('q'):
# exit()
if __name__ =='__main__':
augmentation_test() | [((822, 847), 'cv2.resize', 'cv2.resize', (['img', 'img_size'], {}), '(img, img_size)\n', (832, 847), False, 'import cv2\n'), ((12747, 12769), 'cv2.imread', 'cv2.imread', (['"""test.jpg"""'], {}), "('test.jpg')\n", (12757, 12769), False, 'import cv2\n'), ((4304, 4324), 'numpy.clip', 'np.clip', (['img', '(0)', '(255)'], {}), '(img, 0, 255)\n', (4311, 4324), True, 'import numpy as np\n'), ((5491, 5556), 'cv2.cvtColor', 'cv2.cvtColor', (['im', '(cv2.COLOR_BGR2YUV if bgr else cv2.COLOR_RGB2YUV)'], {}), '(im, cv2.COLOR_BGR2YUV if bgr else cv2.COLOR_RGB2YUV)\n', (5503, 5556), False, 'import cv2\n'), ((5811, 5877), 'cv2.cvtColor', 'cv2.cvtColor', (['yuv', '(cv2.COLOR_YUV2BGR if bgr else cv2.COLOR_YUV2RGB)'], {}), '(yuv, cv2.COLOR_YUV2BGR if bgr else cv2.COLOR_YUV2RGB)\n', (5823, 5877), False, 'import cv2\n'), ((6170, 6218), 'numpy.where', 'np.where', (['(image <= threshold)', 'image', '(255 - image)'], {}), '(image <= threshold, image, 255 - image)\n', (6178, 6218), True, 'import numpy as np\n'), ((6342, 6367), 'numpy.left_shift', 'np.left_shift', (['img', 'shift'], {}), '(img, shift)\n', (6355, 6367), True, 'import numpy as np\n'), ((6381, 6407), 'numpy.right_shift', 'np.right_shift', (['img', 'shift'], {}), '(img, shift)\n', (6395, 6407), True, 'import numpy as np\n'), ((6529, 6563), 'numpy.zeros', 'np.zeros', (['img.shape'], {'dtype': '"""uint8"""'}), "(img.shape, dtype='uint8')\n", (6537, 6563), True, 'import numpy as np\n'), ((6938, 7000), 'numpy.array', 'np.array', (['[[1, level, 0], [0, 1, 0], [0, 0, 1]]'], {'dtype': '"""float"""'}), "([[1, level, 0], [0, 1, 0], [0, 0, 1]], dtype='float')\n", (6946, 7000), True, 'import numpy as np\n'), ((7263, 7324), 'cv2.warpPerspective', 'cv2.warpPerspective', (['img', 'M', '(width, height)'], {'borderMode': 'mode'}), '(img, M, (width, height), borderMode=mode)\n', (7282, 7324), False, 'import cv2\n'), ((7412, 7474), 'numpy.array', 'np.array', (['[[1, 0, 0], [level, 1, 0], [0, 0, 1]]'], {'dtype': '"""float"""'}), "([[1, 0, 0], [level, 1, 0], [0, 0, 1]], dtype='float')\n", (7420, 7474), True, 'import numpy as np\n'), ((7749, 7810), 'cv2.warpPerspective', 'cv2.warpPerspective', (['img', 'M', '(width, height)'], {'borderMode': 'mode'}), '(img, M, (width, height), borderMode=mode)\n', (7768, 7810), False, 'import cv2\n'), ((8137, 8213), 'numpy.array', 'np.array', (['[[1, 0, translate_pixel], [level, 1, 0], [0, 0, 1]]'], {'dtype': '"""float"""'}), "([[1, 0, translate_pixel], [level, 1, 0], [0, 0, 1]], dtype='float')\n", (8145, 8213), True, 'import numpy as np\n'), ((8297, 8358), 'cv2.warpPerspective', 'cv2.warpPerspective', (['img', 'M', '(width, height)'], {'borderMode': 'mode'}), '(img, M, (width, height), borderMode=mode)\n', (8316, 8358), False, 'import cv2\n'), ((8687, 8763), 'numpy.array', 'np.array', (['[[1, 0, 0], [level, 1, translate_pixel], [0, 0, 1]]'], {'dtype': '"""float"""'}), "([[1, 0, 0], [level, 1, translate_pixel], [0, 0, 1]], dtype='float')\n", (8695, 8763), True, 'import numpy as np\n'), ((8847, 8908), 'cv2.warpPerspective', 'cv2.warpPerspective', (['img', 'M', '(width, height)'], {'borderMode': 'mode'}), '(img, M, (width, height), borderMode=mode)\n', (8866, 8908), False, 'import cv2\n'), ((9349, 9395), 'numpy.random.randint', 'np.random.randint', (['(0)', '(height - padding_size[0])'], {}), '(0, height - padding_size[0])\n', (9366, 9395), True, 'import numpy as np\n'), ((9415, 9460), 'numpy.random.randint', 'np.random.randint', (['(0)', '(width - padding_size[1])'], {}), '(0, width - padding_size[1])\n', (9432, 9460), True, 'import numpy as np\n'), ((9802, 9847), 'cv2.getRotationMatrix2D', 'cv2.getRotationMatrix2D', (['center', 'angle', 'scale'], {}), '(center, angle, scale)\n', (9825, 9847), False, 'import cv2\n'), ((9866, 9931), 'cv2.warpAffine', 'cv2.warpAffine', (['image', 'M', '(w, h)'], {'borderMode': 'cv2.BORDER_REPLICATE'}), '(image, M, (w, h), borderMode=cv2.BORDER_REPLICATE)\n', (9880, 9931), False, 'import cv2\n'), ((10489, 10517), 'cv2.resize', 'cv2.resize', (['img2', '(224, 224)'], {}), '(img2, (224, 224))\n', (10499, 10517), False, 'import cv2\n'), ((11066, 11094), 'cv2.resize', 'cv2.resize', (['img2', '(224, 224)'], {}), '(img2, (224, 224))\n', (11076, 11094), False, 'import cv2\n'), ((11639, 11667), 'cv2.resize', 'cv2.resize', (['img2', '(224, 224)'], {}), '(img2, (224, 224))\n', (11649, 11667), False, 'import cv2\n'), ((12207, 12235), 'cv2.resize', 'cv2.resize', (['img2', '(224, 224)'], {}), '(img2, (224, 224))\n', (12217, 12235), False, 'import cv2\n'), ((12307, 12353), 'random.choices', 'random.choices', (['self.policy'], {'k': 'self.num_layers'}), '(self.policy, k=self.num_layers)\n', (12321, 12353), False, 'import random\n'), ((537, 634), 'numpy.pad', 'np.pad', (['img', '[[0, 0], [delta // 2, delta // 2], [0, 0]]'], {'mode': '"""constant"""', 'constant_values': '(255)'}), "(img, [[0, 0], [delta // 2, delta // 2], [0, 0]], mode='constant',\n constant_values=255)\n", (543, 634), True, 'import numpy as np\n'), ((653, 752), 'numpy.pad', 'np.pad', (['img', '[[-delta // 2, -delta // 2], [0, 0], [0, 0]]'], {'mode': '"""constant"""', 'constant_values': '(255)'}), "(img, [[-delta // 2, -delta // 2], [0, 0], [0, 0]], mode='constant',\n constant_values=255)\n", (659, 752), True, 'import numpy as np\n'), ((4424, 4439), 'random.random', 'random.random', ([], {}), '()\n', (4437, 4439), False, 'import random\n'), ((4468, 4482), 'numpy.fliplr', 'np.fliplr', (['img'], {}), '(img)\n', (4477, 4482), True, 'import numpy as np\n'), ((4892, 4924), 'numpy.arange', 'np.arange', (['(0)', '(256)'], {'dtype': 'r.dtype'}), '(0, 256, dtype=r.dtype)\n', (4901, 4924), True, 'import numpy as np\n'), ((5219, 5266), 'cv2.cvtColor', 'cv2.cvtColor', (['im_hsv', 'cv2.COLOR_HSV2BGR'], {'dst': 'im'}), '(im_hsv, cv2.COLOR_HSV2BGR, dst=im)\n', (5231, 5266), False, 'import cv2\n'), ((5591, 5642), 'cv2.createCLAHE', 'cv2.createCLAHE', ([], {'clipLimit': '(2.0)', 'tileGridSize': '(8, 8)'}), '(clipLimit=2.0, tileGridSize=(8, 8))\n', (5606, 5642), False, 'import cv2\n'), ((5733, 5765), 'cv2.equalizeHist', 'cv2.equalizeHist', (['yuv[:, :, (0)]'], {}), '(yuv[:, :, (0)])\n', (5749, 5765), False, 'import cv2\n'), ((6763, 6800), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2GRAY'], {}), '(img, cv2.COLOR_BGR2GRAY)\n', (6775, 6800), False, 'import cv2\n'), ((10231, 10352), 'numpy.pad', 'np.pad', (['img_', '[[0, 0], [(height - width) // 2, (height - width) // 2], [0, 0]]'], {'mode': '"""constant"""', 'constant_values': '(255)'}), "(img_, [[0, 0], [(height - width) // 2, (height - width) // 2], [0, 0\n ]], mode='constant', constant_values=255)\n", (10237, 10352), True, 'import numpy as np\n'), ((10373, 10494), 'numpy.pad', 'np.pad', (['img_', '[[(width - height) // 2, (width - height) // 2], [0, 0], [0, 0]]'], {'mode': '"""constant"""', 'constant_values': '(255)'}), "(img_, [[(width - height) // 2, (width - height) // 2], [0, 0], [0, 0\n ]], mode='constant', constant_values=255)\n", (10379, 10494), True, 'import numpy as np\n'), ((10808, 10929), 'numpy.pad', 'np.pad', (['img_', '[[0, 0], [(height - width) // 2, (height - width) // 2], [0, 0]]'], {'mode': '"""constant"""', 'constant_values': '(255)'}), "(img_, [[0, 0], [(height - width) // 2, (height - width) // 2], [0, 0\n ]], mode='constant', constant_values=255)\n", (10814, 10929), True, 'import numpy as np\n'), ((10950, 11071), 'numpy.pad', 'np.pad', (['img_', '[[(width - height) // 2, (width - height) // 2], [0, 0], [0, 0]]'], {'mode': '"""constant"""', 'constant_values': '(255)'}), "(img_, [[(width - height) // 2, (width - height) // 2], [0, 0], [0, 0\n ]], mode='constant', constant_values=255)\n", (10956, 11071), True, 'import numpy as np\n'), ((11377, 11498), 'numpy.pad', 'np.pad', (['img_', '[[0, 0], [(height - width) // 2, (height - width) // 2], [0, 0]]'], {'mode': '"""constant"""', 'constant_values': '(255)'}), "(img_, [[0, 0], [(height - width) // 2, (height - width) // 2], [0, 0\n ]], mode='constant', constant_values=255)\n", (11383, 11498), True, 'import numpy as np\n'), ((11519, 11640), 'numpy.pad', 'np.pad', (['img_', '[[(width - height) // 2, (width - height) // 2], [0, 0], [0, 0]]'], {'mode': '"""constant"""', 'constant_values': '(255)'}), "(img_, [[(width - height) // 2, (width - height) // 2], [0, 0], [0, 0\n ]], mode='constant', constant_values=255)\n", (11525, 11640), True, 'import numpy as np\n'), ((11945, 12066), 'numpy.pad', 'np.pad', (['img_', '[[0, 0], [(height - width) // 2, (height - width) // 2], [0, 0]]'], {'mode': '"""constant"""', 'constant_values': '(255)'}), "(img_, [[0, 0], [(height - width) // 2, (height - width) // 2], [0, 0\n ]], mode='constant', constant_values=255)\n", (11951, 12066), True, 'import numpy as np\n'), ((12087, 12208), 'numpy.pad', 'np.pad', (['img_', '[[(width - height) // 2, (width - height) // 2], [0, 0], [0, 0]]'], {'mode': '"""constant"""', 'constant_values': '(255)'}), "(img_, [[(width - height) // 2, (width - height) // 2], [0, 0], [0, 0\n ]], mode='constant', constant_values=255)\n", (12093, 12208), True, 'import numpy as np\n'), ((12411, 12426), 'random.random', 'random.random', ([], {}), '()\n', (12424, 12426), False, 'import random\n'), ((4800, 4835), 'cv2.cvtColor', 'cv2.cvtColor', (['im', 'cv2.COLOR_BGR2HSV'], {}), '(im, cv2.COLOR_BGR2HSV)\n', (4812, 4835), False, 'import cv2\n'), ((4690, 4717), 'numpy.random.uniform', 'np.random.uniform', (['(-1)', '(1)', '(3)'], {}), '(-1, 1, 3)\n', (4707, 4717), True, 'import numpy as np\n'), ((5002, 5027), 'numpy.clip', 'np.clip', (['(x * r[1])', '(0)', '(255)'], {}), '(x * r[1], 0, 255)\n', (5009, 5027), True, 'import numpy as np\n'), ((5064, 5089), 'numpy.clip', 'np.clip', (['(x * r[2])', '(0)', '(255)'], {}), '(x * r[2], 0, 255)\n', (5071, 5089), True, 'import numpy as np\n'), ((5137, 5158), 'cv2.LUT', 'cv2.LUT', (['hue', 'lut_hue'], {}), '(hue, lut_hue)\n', (5144, 5158), False, 'import cv2\n'), ((5160, 5181), 'cv2.LUT', 'cv2.LUT', (['sat', 'lut_sat'], {}), '(sat, lut_sat)\n', (5167, 5181), False, 'import cv2\n'), ((5183, 5204), 'cv2.LUT', 'cv2.LUT', (['val', 'lut_val'], {}), '(val, lut_val)\n', (5190, 5204), False, 'import cv2\n')] |
James992927108/uEFI_Edk2_Practice | BaseTools/Source/Python/GenFds/CapsuleData.py | 2cac7618dfee10bfa5104a2e167c85425fde0100 | ## @file
# generate capsule
#
# Copyright (c) 2007-2017, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
##
# Import Modules
#
import Ffs
from GenFdsGlobalVariable import GenFdsGlobalVariable
import StringIO
from struct import pack
import os
from Common.Misc import SaveFileOnChange
import uuid
## base class for capsule data
#
#
class CapsuleData:
## The constructor
#
# @param self The object pointer
def __init__(self):
pass
## generate capsule data
#
# @param self The object pointer
def GenCapsuleSubItem(self):
pass
## FFS class for capsule data
#
#
class CapsuleFfs (CapsuleData):
## The constructor
#
# @param self The object pointer
#
def __init__(self) :
self.Ffs = None
self.FvName = None
## generate FFS capsule data
#
# @param self The object pointer
# @retval string Generated file name
#
def GenCapsuleSubItem(self):
FfsFile = self.Ffs.GenFfs()
return FfsFile
## FV class for capsule data
#
#
class CapsuleFv (CapsuleData):
## The constructor
#
# @param self The object pointer
#
def __init__(self) :
self.Ffs = None
self.FvName = None
self.CapsuleName = None
## generate FV capsule data
#
# @param self The object pointer
# @retval string Generated file name
#
def GenCapsuleSubItem(self):
if self.FvName.find('.fv') == -1:
if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper())
FdBuffer = StringIO.StringIO('')
FvObj.CapsuleName = self.CapsuleName
FvFile = FvObj.AddToBuffer(FdBuffer)
FvObj.CapsuleName = None
FdBuffer.close()
return FvFile
else:
FvFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvName)
return FvFile
## FD class for capsule data
#
#
class CapsuleFd (CapsuleData):
## The constructor
#
# @param self The object pointer
#
def __init__(self) :
self.Ffs = None
self.FdName = None
self.CapsuleName = None
## generate FD capsule data
#
# @param self The object pointer
# @retval string Generated file name
#
def GenCapsuleSubItem(self):
if self.FdName.find('.fd') == -1:
if self.FdName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper())
FdFile = FdObj.GenFd()
return FdFile
else:
FdFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FdName)
return FdFile
## AnyFile class for capsule data
#
#
class CapsuleAnyFile (CapsuleData):
## The constructor
#
# @param self The object pointer
#
def __init__(self) :
self.Ffs = None
self.FileName = None
## generate AnyFile capsule data
#
# @param self The object pointer
# @retval string Generated file name
#
def GenCapsuleSubItem(self):
return self.FileName
## Afile class for capsule data
#
#
class CapsuleAfile (CapsuleData):
## The constructor
#
# @param self The object pointer
#
def __init__(self) :
self.Ffs = None
self.FileName = None
## generate Afile capsule data
#
# @param self The object pointer
# @retval string Generated file name
#
def GenCapsuleSubItem(self):
return self.FileName
class CapsulePayload(CapsuleData):
'''Generate payload file, the header is defined below:
#pragma pack(1)
typedef struct {
UINT32 Version;
EFI_GUID UpdateImageTypeId;
UINT8 UpdateImageIndex;
UINT8 reserved_bytes[3];
UINT32 UpdateImageSize;
UINT32 UpdateVendorCodeSize;
UINT64 UpdateHardwareInstance; //Introduced in v2
} EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER;
'''
def __init__(self):
self.UiName = None
self.Version = None
self.ImageTypeId = None
self.ImageIndex = None
self.HardwareInstance = None
self.ImageFile = []
self.VendorCodeFile = []
self.Certificate_Guid = None
self.MonotonicCount = None
self.Existed = False
self.Buffer = None
def GenCapsuleSubItem(self, AuthData=[]):
if not self.Version:
self.Version = '0x00000002'
if not self.ImageIndex:
self.ImageIndex = '0x1'
if not self.HardwareInstance:
self.HardwareInstance = '0x0'
ImageFileSize = os.path.getsize(self.ImageFile)
if AuthData:
# the ImageFileSize need include the full authenticated info size. From first bytes of MonotonicCount to last bytes of certificate.
# the 32 bit is the MonotonicCount, dwLength, wRevision, wCertificateType and CertType
ImageFileSize += 32
VendorFileSize = 0
if self.VendorCodeFile:
VendorFileSize = os.path.getsize(self.VendorCodeFile)
#
# Fill structure
#
Guid = self.ImageTypeId.split('-')
Buffer = pack('=ILHHBBBBBBBBBBBBIIQ',
int(self.Version,16),
int(Guid[0], 16),
int(Guid[1], 16),
int(Guid[2], 16),
int(Guid[3][-4:-2], 16),
int(Guid[3][-2:], 16),
int(Guid[4][-12:-10], 16),
int(Guid[4][-10:-8], 16),
int(Guid[4][-8:-6], 16),
int(Guid[4][-6:-4], 16),
int(Guid[4][-4:-2], 16),
int(Guid[4][-2:], 16),
int(self.ImageIndex, 16),
0,
0,
0,
ImageFileSize,
VendorFileSize,
int(self.HardwareInstance, 16)
)
if AuthData:
Buffer += pack('QIHH', AuthData[0], AuthData[1], AuthData[2], AuthData[3])
Buffer += uuid.UUID(AuthData[4]).get_bytes_le()
#
# Append file content to the structure
#
ImageFile = open(self.ImageFile, 'rb')
Buffer += ImageFile.read()
ImageFile.close()
if self.VendorCodeFile:
VendorFile = open(self.VendorCodeFile, 'rb')
Buffer += VendorFile.read()
VendorFile.close()
self.Existed = True
return Buffer
| [((5358, 5389), 'os.path.getsize', 'os.path.getsize', (['self.ImageFile'], {}), '(self.ImageFile)\n', (5373, 5389), False, 'import os\n'), ((2439, 2494), 'GenFdsGlobalVariable.GenFdsGlobalVariable.ReplaceWorkspaceMacro', 'GenFdsGlobalVariable.ReplaceWorkspaceMacro', (['self.FvName'], {}), '(self.FvName)\n', (2481, 2494), False, 'from GenFdsGlobalVariable import GenFdsGlobalVariable\n'), ((3282, 3337), 'GenFdsGlobalVariable.GenFdsGlobalVariable.ReplaceWorkspaceMacro', 'GenFdsGlobalVariable.ReplaceWorkspaceMacro', (['self.FdName'], {}), '(self.FdName)\n', (3324, 3337), False, 'from GenFdsGlobalVariable import GenFdsGlobalVariable\n'), ((5774, 5810), 'os.path.getsize', 'os.path.getsize', (['self.VendorCodeFile'], {}), '(self.VendorCodeFile)\n', (5789, 5810), False, 'import os\n'), ((6829, 6893), 'struct.pack', 'pack', (['"""QIHH"""', 'AuthData[0]', 'AuthData[1]', 'AuthData[2]', 'AuthData[3]'], {}), "('QIHH', AuthData[0], AuthData[1], AuthData[2], AuthData[3])\n", (6833, 6893), False, 'from struct import pack\n'), ((1996, 2048), 'GenFdsGlobalVariable.GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys', 'GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys', ([], {}), '()\n', (2046, 2048), False, 'from GenFdsGlobalVariable import GenFdsGlobalVariable\n'), ((2172, 2193), 'StringIO.StringIO', 'StringIO.StringIO', (['""""""'], {}), "('')\n", (2189, 2193), False, 'import StringIO\n'), ((3029, 3081), 'GenFdsGlobalVariable.GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys', 'GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys', ([], {}), '()\n', (3079, 3081), False, 'from GenFdsGlobalVariable import GenFdsGlobalVariable\n'), ((6916, 6938), 'uuid.UUID', 'uuid.UUID', (['AuthData[4]'], {}), '(AuthData[4])\n', (6925, 6938), False, 'import uuid\n')] |
davidmallasen/Hello_MPI | CalculatingPi/pi_linear_plot.py | 8a5b5694ffc1515d2bb2dee45355f92f1b68fbed | import matplotlib.pyplot as plt
import numpy as np
# Read data
size = []
time = []
with open("pi_linear.txt") as file:
for line in file.readlines():
x, y = line.split(',')
size.append(int(x.strip()))
time.append(float(y.strip()))
# Plot data
fig, ax = plt.subplots()
ax.plot(size, time)
ax.set(xlabel='Num. processes', ylabel='Time (s)',
title='Pi linear')
#ax.grid()
fig.savefig("pi_linear.png")
plt.show()
| [((288, 302), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (300, 302), True, 'import matplotlib.pyplot as plt\n'), ((442, 452), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (450, 452), True, 'import matplotlib.pyplot as plt\n')] |
real-digital/esque-wire | esque_wire/protocol/structs/api/elect_preferred_leaders_response.py | eb02c49f38b89ad5e5d25aad15fb4ad795e52807 | from typing import ClassVar, List, Optional
from ...constants import ApiKey, ErrorCode
from ..base import ResponseData
class PartitionResult:
partition_id: int
error_code: ErrorCode
error_message: Optional[str]
def __init__(self, partition_id: int, error_code: ErrorCode, error_message: Optional[str]):
"""
:param partition_id: The partition id
:type partition_id: int
:param error_code: The result error, or zero if there was no error.
:type error_code: ErrorCode
:param error_message: The result message, or null if there was no error.
:type error_message: Optional[str]
"""
self.partition_id = partition_id
self.error_code = error_code
self.error_message = error_message
class ReplicaElectionResult:
topic: str
partition_result: List[PartitionResult]
def __init__(self, topic: str, partition_result: List[PartitionResult]):
"""
:param topic: The topic name
:type topic: str
:param partition_result: The results for each partition
:type partition_result: List[PartitionResult]
"""
self.topic = topic
self.partition_result = partition_result
class ElectPreferredLeadersResponseData(ResponseData):
throttle_time_ms: int
replica_election_results: List[ReplicaElectionResult]
api_key: ClassVar[ApiKey] = ApiKey.ELECT_PREFERRED_LEADERS
def __init__(self, throttle_time_ms: int, replica_election_results: List[ReplicaElectionResult]):
"""
:param throttle_time_ms: The duration in milliseconds for which the request was throttled due to a quota
violation, or zero if the request did not violate any quota.
:type throttle_time_ms: int
:param replica_election_results: The election results, or an empty array if the requester did not have
permission and the request asks for all partitions.
:type replica_election_results: List[ReplicaElectionResult]
"""
self.throttle_time_ms = throttle_time_ms
self.replica_election_results = replica_election_results
| [] |
arthurlogilab/py_zipkin | tests/stack_test.py | 8e733506c399967ea74c56b99a9a421e1bb1736a | import mock
import pytest
import py_zipkin.storage
@pytest.fixture(autouse=True, scope="module")
def create_zipkin_attrs():
# The following tests all expect _thread_local.zipkin_attrs to exist: if it
# doesn't, mock.patch will fail.
py_zipkin.storage.ThreadLocalStack().get()
def test_get_zipkin_attrs_returns_none_if_no_zipkin_attrs():
tracer = py_zipkin.storage.get_default_tracer()
with mock.patch.object(tracer._context_stack, "_storage", []):
assert not py_zipkin.storage.ThreadLocalStack().get()
assert not py_zipkin.storage.ThreadLocalStack().get()
def test_get_zipkin_attrs_with_context_returns_none_if_no_zipkin_attrs():
with mock.patch.object(py_zipkin.storage.log, "warning", autospec=True) as log:
assert not py_zipkin.storage.Stack([]).get()
assert log.call_count == 1
def test_storage_stack_still_works_if_you_dont_pass_in_storage():
# Let's make sure this still works if we don't pass in a custom storage.
assert not py_zipkin.storage.Stack().get()
def test_get_zipkin_attrs_returns_the_last_of_the_list():
tracer = py_zipkin.storage.get_default_tracer()
with mock.patch.object(tracer._context_stack, "_storage", ["foo"]):
assert "foo" == py_zipkin.storage.ThreadLocalStack().get()
def test_get_zipkin_attrs_with_context_returns_the_last_of_the_list():
assert "foo" == py_zipkin.storage.Stack(["bar", "foo"]).get()
def test_pop_zipkin_attrs_does_nothing_if_no_requests():
tracer = py_zipkin.storage.get_default_tracer()
with mock.patch.object(tracer._context_stack, "_storage", []):
assert not py_zipkin.storage.ThreadLocalStack().pop()
def test_pop_zipkin_attrs_with_context_does_nothing_if_no_requests():
assert not py_zipkin.storage.Stack([]).pop()
def test_pop_zipkin_attrs_removes_the_last_zipkin_attrs():
tracer = py_zipkin.storage.get_default_tracer()
with mock.patch.object(tracer._context_stack, "_storage", ["foo", "bar"]):
assert "bar" == py_zipkin.storage.ThreadLocalStack().pop()
assert "foo" == py_zipkin.storage.ThreadLocalStack().get()
def test_pop_zipkin_attrs_with_context_removes_the_last_zipkin_attrs():
context_stack = py_zipkin.storage.Stack(["foo", "bar"])
assert "bar" == context_stack.pop()
assert "foo" == context_stack.get()
def test_push_zipkin_attrs_adds_new_zipkin_attrs_to_list():
tracer = py_zipkin.storage.get_default_tracer()
with mock.patch.object(tracer._context_stack, "_storage", ["foo"]):
assert "foo" == py_zipkin.storage.ThreadLocalStack().get()
py_zipkin.storage.ThreadLocalStack().push("bar")
assert "bar" == py_zipkin.storage.ThreadLocalStack().get()
def test_push_zipkin_attrs_with_context_adds_new_zipkin_attrs_to_list():
stack = py_zipkin.storage.Stack(["foo"])
assert "foo" == stack.get()
stack.push("bar")
assert "bar" == stack.get()
def test_stack_copy():
stack = py_zipkin.storage.Stack()
stack.push("a")
stack.push("b")
the_copy = stack.copy()
the_copy.push("c")
stack.push("d")
assert ["a", "b", "c"] == the_copy._storage
assert ["a", "b", "d"] == stack._storage
| [((55, 99), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)', 'scope': '"""module"""'}), "(autouse=True, scope='module')\n", (69, 99), False, 'import pytest\n'), ((415, 471), 'mock.patch.object', 'mock.patch.object', (['tracer._context_stack', '"""_storage"""', '[]'], {}), "(tracer._context_stack, '_storage', [])\n", (432, 471), False, 'import mock\n'), ((682, 748), 'mock.patch.object', 'mock.patch.object', (['py_zipkin.storage.log', '"""warning"""'], {'autospec': '(True)'}), "(py_zipkin.storage.log, 'warning', autospec=True)\n", (699, 748), False, 'import mock\n'), ((1158, 1219), 'mock.patch.object', 'mock.patch.object', (['tracer._context_stack', '"""_storage"""', "['foo']"], {}), "(tracer._context_stack, '_storage', ['foo'])\n", (1175, 1219), False, 'import mock\n'), ((1547, 1603), 'mock.patch.object', 'mock.patch.object', (['tracer._context_stack', '"""_storage"""', '[]'], {}), "(tracer._context_stack, '_storage', [])\n", (1564, 1603), False, 'import mock\n'), ((1910, 1978), 'mock.patch.object', 'mock.patch.object', (['tracer._context_stack', '"""_storage"""', "['foo', 'bar']"], {}), "(tracer._context_stack, '_storage', ['foo', 'bar'])\n", (1927, 1978), False, 'import mock\n'), ((2451, 2512), 'mock.patch.object', 'mock.patch.object', (['tracer._context_stack', '"""_storage"""', "['foo']"], {}), "(tracer._context_stack, '_storage', ['foo'])\n", (2468, 2512), False, 'import mock\n')] |
cp4cds/cp4cds-wps-template | myapp/processes/plotter.py | ed170fcee72146dc07c64f76ec71cc289672fd32 |
from pywps import Process, LiteralInput, ComplexInput, ComplexOutput
from pywps import Format
import logging
LOGGER = logging.getLogger('PYWPS')
import matplotlib
# no X11 server ... must be run first
# https://github.com/matplotlib/matplotlib/issues/3466/
matplotlib.use('Agg')
import matplotlib.pylab as plt
import cartopy.crs as ccrs
from netCDF4 import Dataset
AIR_DS = 'https://www.esrl.noaa.gov/psd/thredds/fileServer/Datasets/ncep.reanalysis.derived/surface/air.mon.ltm.nc'
def simple_plot(resource, variable=None, output=None):
output = output or 'plot.png'
ds = Dataset(resource)
values = ds.variables[variable]
fig = plt.figure(figsize=(20, 10))
ax = plt.axes(projection=ccrs.PlateCarree())
plt.contourf(values[0, :, :])
ax.stock_img()
ax.coastlines()
plt.colorbar()
fig.savefig(output)
plt.close()
return output
class SimplePlot(Process):
def __init__(self):
inputs = [
ComplexInput('dataset', 'Dataset', supported_formats=[Format('application/x-netcdf')],
default=AIR_DS,
abstract='Example: {0}'.format(AIR_DS)),
LiteralInput('variable', 'Variable', data_type='string',
default='air',
abstract='Please enter the variable name to be plotted, example: air'),
]
outputs = [
ComplexOutput('output', 'Simple Plot', supported_formats=[Format('image/png')],
as_reference=True),
]
super(SimplePlot, self).__init__(
self._handler,
identifier='simple_plot',
title='Simple Plot',
abstract='Returns a nice and simple plot.',
version='1.0',
inputs=inputs,
outputs=outputs,
store_supported=True,
status_supported=True
)
def _handler(self, request, response):
variable = request.inputs['variable'][0].data
output = simple_plot(
resource=request.inputs['dataset'][0].file,
variable=variable)
LOGGER.info("produced output: %s", output)
response.outputs['output'].file = output
response.update_status("simple_plot done", 100)
return response
| [((120, 146), 'logging.getLogger', 'logging.getLogger', (['"""PYWPS"""'], {}), "('PYWPS')\n", (137, 146), False, 'import logging\n'), ((260, 281), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (274, 281), False, 'import matplotlib\n'), ((586, 603), 'netCDF4.Dataset', 'Dataset', (['resource'], {}), '(resource)\n', (593, 603), False, 'from netCDF4 import Dataset\n'), ((650, 678), 'matplotlib.pylab.figure', 'plt.figure', ([], {'figsize': '(20, 10)'}), '(figsize=(20, 10))\n', (660, 678), True, 'import matplotlib.pylab as plt\n'), ((732, 763), 'matplotlib.pylab.contourf', 'plt.contourf', (['values[(0), :, :]'], {}), '(values[(0), :, :])\n', (744, 763), True, 'import matplotlib.pylab as plt\n'), ((805, 819), 'matplotlib.pylab.colorbar', 'plt.colorbar', ([], {}), '()\n', (817, 819), True, 'import matplotlib.pylab as plt\n'), ((848, 859), 'matplotlib.pylab.close', 'plt.close', ([], {}), '()\n', (857, 859), True, 'import matplotlib.pylab as plt\n'), ((708, 726), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ([], {}), '()\n', (724, 726), True, 'import cartopy.crs as ccrs\n'), ((1168, 1314), 'pywps.LiteralInput', 'LiteralInput', (['"""variable"""', '"""Variable"""'], {'data_type': '"""string"""', 'default': '"""air"""', 'abstract': '"""Please enter the variable name to be plotted, example: air"""'}), "('variable', 'Variable', data_type='string', default='air',\n abstract='Please enter the variable name to be plotted, example: air')\n", (1180, 1314), False, 'from pywps import Process, LiteralInput, ComplexInput, ComplexOutput\n'), ((1016, 1046), 'pywps.Format', 'Format', (['"""application/x-netcdf"""'], {}), "('application/x-netcdf')\n", (1022, 1046), False, 'from pywps import Format\n'), ((1462, 1481), 'pywps.Format', 'Format', (['"""image/png"""'], {}), "('image/png')\n", (1468, 1481), False, 'from pywps import Format\n')] |
zorgulle/json_schema_checker | json_schema_checker/composed/__init__.py | 20cac68f899528619e5059f0e1fbee0a0f7219d6 | from .composed import List
from .composed import IntList | [] |
jmigual/socialQuiz | backend/social_quiz.py | 3d9d0980961619b555732899121d8ce6366fa96f | # -*- coding: utf-8 -*-
import json
import os.path
import random
import re
from flask import Flask, send_from_directory
from flask import request, abort
from flaskrun.flaskrun import flask_run
import datab.social_database as db
app = Flask(__name__)
# Regular expression to only accept certain files
fileChecker = re.compile(r"(.*\.js|.*\.html|.*\.png|.*\.css|.*\.map)$")
numberOfAnswers = 4
random.seed(7)
def root_dir(): # pragma: no cover
return os.path.abspath(os.path.dirname(__file__))
@app.route('/')
def root():
return index("index2.html")
@app.route('/<path:filename>')
def index(filename):
if fileChecker.match(filename):
return send_from_directory(os.path.join(root_dir(), 'static'), filename)
abort(403)
@app.route('/register')
def register():
# To obtain the mail
email = request.args.get('email')
print(email)
if email is None:
return json.dumps({})
id_user = db.register_or_get_email(email)
return json.dumps({"id": id_user})
@app.route('/join_room')
def join_room():
room_id = request.args.get('room_id')
email = request.args.get('email')
user_id = db.register_or_get_email(email)
db.exec_query("REPLACE INTO room_members (room_id, user_id) VALUES (%s,%s)", [room_id, user_id])
return json.dumps({"id": user_id})
@app.route('/answered_room')
def answered_room():
room_id = request.args.get('room_id')
user_id = request.args.get('user_id')
values = db.exec_query("SELECT a.id "
"FROM answer a INNER JOIN question q "
"WHERE a.question_id = q.id AND q.room_id = %s AND a.user_id= %s",
[room_id, user_id])
return json.dumps({"answered": len(values) > 0})
@app.route('/get_user_id')
def get_user_id():
email = request.args.get('email')
id_user = db.register_or_get_email(email)
return json.dumps({"id": id_user})
@app.route('/create_room')
def create_room():
user_id = request.args.get('user_id')
room_id = db.exec_query("INSERT INTO room (creator) VALUES (%s)", [user_id])
return json.dumps({"id": room_id})
@app.route('/get_rooms')
def get_rooms():
user_id = request.args.get('user_id')
values = db.exec_query("SELECT r.id, r.status FROM room r WHERE r.creator=%s", [user_id])
response = []
for val in values:
response.append({"id": val[0], "status": val[1]})
return json.dumps(response)
@app.route('/fill_room', methods=['POST'])
def fill_room():
json_data = request.get_json()
if json_data is None:
return json.dumps({"error": "no JSON found"})
else:
room_id = json_data["room_id"]
questions = json_data["question"]
for q in questions:
db.exec_query("INSERT INTO question (room_id, question) VALUES (%s, %s)", [room_id, q])
return json.dumps({"info": "Data received"})
@app.route('/open_room')
def open_room():
room_id = request.args.get('room_id')
print(room_id)
db.exec_query("UPDATE room r SET r.status='started' WHERE r.id = %s", [room_id])
return json.dumps({"info": "The room has been opened successfully", "status": "started"})
@app.route('/close_room')
def close_room():
room_id = request.args.get('room_id')
db.exec_query("UPDATE room r SET r.status='closed' WHERE r.id = %s", [room_id])
return json.dumps({"info": "The room has been closed successfully", "status": "closed"})
@app.route('/finish_room')
def finish_room():
room_id = request.args.get('room_id')
db.exec_query("UPDATE room r SET r.status='finished' WHERE r.id = %s", [room_id])
# for
# SELECT id, COUNT(a.id), COUNT(a.id) FROM Room r INNER JOIN
values = db.exec_query("SELECT u.email , COUNT(qq.id) "
"FROM quiz_question qq "
"INNER JOIN users u ON (qq.asked_user_id = u.id) "
"INNER JOIN room_members rm ON (u.id = rm.user_id) "
"WHERE qq.correct_answer_id = qq.answered_id AND rm.room_id = %s "
"GROUP BY u.email "
"ORDER BY COUNT(qq.id) DESC",
[room_id])
ranking = []
for row in values:
ranking.append({"email": row[0], "correct": row[1]})
return json.dumps({"ranking": ranking})
@app.route('/room_status')
def status_room():
room_id = request.args.get('room_id')
# SELECT status FROM Room WHERE id = 1
values = db.exec_query("SELECT status FROM room WHERE id = %s", [room_id])
return json.dumps({
"status": values[0][0]
})
@app.route('/get_room_questions')
def get_room_question():
room_id = request.args.get('room_id')
values = db.exec_query("SELECT id, question FROM question WHERE room_id = %s", [room_id])
response = []
for val in values:
response.append({"id": val[0], "text": val[1]})
return json.dumps({"questions": response})
@app.route('/post_room_answers', methods=['POST'])
def post_room_answers():
json_data = request.get_json()
if json_data is None:
return json.dumps({"error": "no JSON found"}), 404
user_id = json_data["user_id"]
values = []
for a in json_data["answers"]:
values.append((a["id"], user_id, a["text"]))
print(values[len(values) - 1])
db.exec_many_query("INSERT INTO answer (question_id, user_id, answer) VALUES(%s,%s,%s)", values)
return json.dumps({"info": "Data received"})
@app.route('/get_quiz_question')
def get_question():
room_id = int(request.args.get('room_id'))
user_id = int(request.args.get('user_id'))
possible_questions = db.get_non_answered_questions(room_id, user_id)
possible_users_to_ask = db.get_non_answered_people(room_id, user_id)
question_id = []
asked_about_id = []
if len(possible_questions) > 0:
question_id = random.sample(possible_questions, 1)
else:
possible_questions = db.get_all_questions(room_id)
if len(possible_questions) > 0:
question_id = random.sample(possible_questions, 1)
if len(possible_users_to_ask) > 0:
asked_about_id = random.sample(possible_users_to_ask, 1)
else:
possible_users_to_ask = db.get_all_different_people(room_id, user_id)
if len(possible_questions) > 0:
asked_about_id = random.sample(possible_users_to_ask, 1)
if len(question_id) > 0 and 0 < len(asked_about_id):
quiz_question_id = db.insert_quiz_question(user_id, asked_about_id[0], question_id[0])
other_users = db.get_all_different_people(room_id, asked_about_id[0])
random.shuffle(other_users)
answers = []
(answer_id, text_id) = db.get_answer(question_id[0], asked_about_id[0])
db.exec_query("UPDATE quiz_question SET correct_answer_id=%s WHERE id = %s", [answer_id, quiz_question_id])
answers.append((answer_id, text_id))
if min(numberOfAnswers - 1, len(other_users)) > 0:
for i in range(min(numberOfAnswers - 1, len(other_users))):
(answer_id, text_id) = db.get_answer(question_id[0], other_users[i])
answers.append((answer_id, text_id))
# if commented the first answer will be the correct one
random.shuffle(answers)
answer_json = []
for (answer_id, text_id) in answers:
answer_json.append({"id": answer_id, "text": text_id})
print(quiz_question_id)
# SELECT 'question' FROM 'Question' WHERE 'id' = 3
value = db.exec_query("SELECT id "
"FROM quiz_question "
"WHERE asked_user_id = %s AND about_user_id = %s AND question_id = %s",
[user_id, asked_about_id[0], question_id[0]])
quiz_question_id = value[0][0]
value = db.exec_query("SELECT q.question "
"FROM question q "
"WHERE q.id = %s",
[question_id[0]])
question_text = value[0][0]
value = db.exec_query("SELECT u.email "
"FROM users u "
"WHERE u.id=%s",
[asked_about_id[0]])
user_name = value[0][0]
question_text = "What did %s answer to '%s' ?" % (user_name, question_text)
return json.dumps({
"id": quiz_question_id,
"question": question_text,
"answers": answer_json
})
else:
return json.dumps({"error": "Not available questions for this user in this room"})
@app.route('/post_quiz_answer')
def post_answer():
quiz_question_id = request.args.get('quiz_question_id')
quiz_answer_id = request.args.get('quiz_answer_id')
db.exec_query("UPDATE quiz_question SET answered_id = %s WHERE id = %s", [quiz_answer_id, quiz_question_id])
value = db.exec_query("SELECT qq.answered_id, qq.correct_answer_id, qq.question_id "
"FROM quiz_question qq "
"WHERE qq.id = %s", [quiz_question_id])
answered_id = value[0][0]
correct_answer_id = value[0][1]
question_id = value[0][2]
value = db.exec_query("SELECT a.answer FROM answer a WHERE a.id = %s ", [correct_answer_id])
if len(value) > 0:
text = value[0][0]
else:
text = "something when wrong"
if value is None:
return json.dumps({"error": "Internal server error"})
return json.dumps({
"correct": answered_id == correct_answer_id,
"question": question_id,
"correct_answer": {"id": correct_answer_id, "text": text}
})
if __name__ == '__main__':
flask_run(app)
| [((238, 253), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (243, 253), False, 'from flask import Flask, send_from_directory\n'), ((319, 380), 're.compile', 're.compile', (['"""(.*\\\\.js|.*\\\\.html|.*\\\\.png|.*\\\\.css|.*\\\\.map)$"""'], {}), "('(.*\\\\.js|.*\\\\.html|.*\\\\.png|.*\\\\.css|.*\\\\.map)$')\n", (329, 380), False, 'import re\n'), ((398, 412), 'random.seed', 'random.seed', (['(7)'], {}), '(7)\n', (409, 412), False, 'import random\n'), ((742, 752), 'flask.abort', 'abort', (['(403)'], {}), '(403)\n', (747, 752), False, 'from flask import request, abort\n'), ((832, 857), 'flask.request.args.get', 'request.args.get', (['"""email"""'], {}), "('email')\n", (848, 857), False, 'from flask import request, abort\n'), ((942, 973), 'datab.social_database.register_or_get_email', 'db.register_or_get_email', (['email'], {}), '(email)\n', (966, 973), True, 'import datab.social_database as db\n'), ((985, 1012), 'json.dumps', 'json.dumps', (["{'id': id_user}"], {}), "({'id': id_user})\n", (995, 1012), False, 'import json\n'), ((1071, 1098), 'flask.request.args.get', 'request.args.get', (['"""room_id"""'], {}), "('room_id')\n", (1087, 1098), False, 'from flask import request, abort\n'), ((1111, 1136), 'flask.request.args.get', 'request.args.get', (['"""email"""'], {}), "('email')\n", (1127, 1136), False, 'from flask import request, abort\n'), ((1151, 1182), 'datab.social_database.register_or_get_email', 'db.register_or_get_email', (['email'], {}), '(email)\n', (1175, 1182), True, 'import datab.social_database as db\n'), ((1187, 1287), 'datab.social_database.exec_query', 'db.exec_query', (['"""REPLACE INTO room_members (room_id, user_id) VALUES (%s,%s)"""', '[room_id, user_id]'], {}), "('REPLACE INTO room_members (room_id, user_id) VALUES (%s,%s)',\n [room_id, user_id])\n", (1200, 1287), True, 'import datab.social_database as db\n'), ((1295, 1322), 'json.dumps', 'json.dumps', (["{'id': user_id}"], {}), "({'id': user_id})\n", (1305, 1322), False, 'import json\n'), ((1389, 1416), 'flask.request.args.get', 'request.args.get', (['"""room_id"""'], {}), "('room_id')\n", (1405, 1416), False, 'from flask import request, abort\n'), ((1431, 1458), 'flask.request.args.get', 'request.args.get', (['"""user_id"""'], {}), "('user_id')\n", (1447, 1458), False, 'from flask import request, abort\n'), ((1472, 1630), 'datab.social_database.exec_query', 'db.exec_query', (['"""SELECT a.id FROM answer a INNER JOIN question q WHERE a.question_id = q.id AND q.room_id = %s AND a.user_id= %s"""', '[room_id, user_id]'], {}), "(\n 'SELECT a.id FROM answer a INNER JOIN question q WHERE a.question_id = q.id AND q.room_id = %s AND a.user_id= %s'\n , [room_id, user_id])\n", (1485, 1630), True, 'import datab.social_database as db\n'), ((1821, 1846), 'flask.request.args.get', 'request.args.get', (['"""email"""'], {}), "('email')\n", (1837, 1846), False, 'from flask import request, abort\n'), ((1861, 1892), 'datab.social_database.register_or_get_email', 'db.register_or_get_email', (['email'], {}), '(email)\n', (1885, 1892), True, 'import datab.social_database as db\n'), ((1904, 1931), 'json.dumps', 'json.dumps', (["{'id': id_user}"], {}), "({'id': id_user})\n", (1914, 1931), False, 'import json\n'), ((1994, 2021), 'flask.request.args.get', 'request.args.get', (['"""user_id"""'], {}), "('user_id')\n", (2010, 2021), False, 'from flask import request, abort\n'), ((2036, 2102), 'datab.social_database.exec_query', 'db.exec_query', (['"""INSERT INTO room (creator) VALUES (%s)"""', '[user_id]'], {}), "('INSERT INTO room (creator) VALUES (%s)', [user_id])\n", (2049, 2102), True, 'import datab.social_database as db\n'), ((2114, 2141), 'json.dumps', 'json.dumps', (["{'id': room_id}"], {}), "({'id': room_id})\n", (2124, 2141), False, 'import json\n'), ((2200, 2227), 'flask.request.args.get', 'request.args.get', (['"""user_id"""'], {}), "('user_id')\n", (2216, 2227), False, 'from flask import request, abort\n'), ((2241, 2326), 'datab.social_database.exec_query', 'db.exec_query', (['"""SELECT r.id, r.status FROM room r WHERE r.creator=%s"""', '[user_id]'], {}), "('SELECT r.id, r.status FROM room r WHERE r.creator=%s', [user_id]\n )\n", (2254, 2326), True, 'import datab.social_database as db\n'), ((2432, 2452), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (2442, 2452), False, 'import json\n'), ((2531, 2549), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (2547, 2549), False, 'from flask import request, abort\n'), ((2961, 2988), 'flask.request.args.get', 'request.args.get', (['"""room_id"""'], {}), "('room_id')\n", (2977, 2988), False, 'from flask import request, abort\n'), ((3012, 3097), 'datab.social_database.exec_query', 'db.exec_query', (['"""UPDATE room r SET r.status=\'started\' WHERE r.id = %s"""', '[room_id]'], {}), '("UPDATE room r SET r.status=\'started\' WHERE r.id = %s", [room_id]\n )\n', (3025, 3097), True, 'import datab.social_database as db\n'), ((3104, 3190), 'json.dumps', 'json.dumps', (["{'info': 'The room has been opened successfully', 'status': 'started'}"], {}), "({'info': 'The room has been opened successfully', 'status':\n 'started'})\n", (3114, 3190), False, 'import json\n'), ((3247, 3274), 'flask.request.args.get', 'request.args.get', (['"""room_id"""'], {}), "('room_id')\n", (3263, 3274), False, 'from flask import request, abort\n'), ((3279, 3364), 'datab.social_database.exec_query', 'db.exec_query', (['"""UPDATE room r SET r.status=\'closed\' WHERE r.id = %s"""', '[room_id]'], {}), '("UPDATE room r SET r.status=\'closed\' WHERE r.id = %s", [room_id]\n )\n', (3292, 3364), True, 'import datab.social_database as db\n'), ((3371, 3456), 'json.dumps', 'json.dumps', (["{'info': 'The room has been closed successfully', 'status': 'closed'}"], {}), "({'info': 'The room has been closed successfully', 'status':\n 'closed'})\n", (3381, 3456), False, 'import json\n'), ((3515, 3542), 'flask.request.args.get', 'request.args.get', (['"""room_id"""'], {}), "('room_id')\n", (3531, 3542), False, 'from flask import request, abort\n'), ((3547, 3634), 'datab.social_database.exec_query', 'db.exec_query', (['"""UPDATE room r SET r.status=\'finished\' WHERE r.id = %s"""', '[room_id]'], {}), '("UPDATE room r SET r.status=\'finished\' WHERE r.id = %s", [\n room_id])\n', (3560, 3634), True, 'import datab.social_database as db\n'), ((3718, 4013), 'datab.social_database.exec_query', 'db.exec_query', (['"""SELECT u.email , COUNT(qq.id) FROM quiz_question qq INNER JOIN users u ON (qq.asked_user_id = u.id) INNER JOIN room_members rm ON (u.id = rm.user_id) WHERE qq.correct_answer_id = qq.answered_id AND rm.room_id = %s GROUP BY u.email ORDER BY COUNT(qq.id) DESC"""', '[room_id]'], {}), "(\n 'SELECT u.email , COUNT(qq.id) FROM quiz_question qq INNER JOIN users u ON (qq.asked_user_id = u.id) INNER JOIN room_members rm ON (u.id = rm.user_id) WHERE qq.correct_answer_id = qq.answered_id AND rm.room_id = %s GROUP BY u.email ORDER BY COUNT(qq.id) DESC'\n , [room_id])\n", (3731, 4013), True, 'import datab.social_database as db\n'), ((4324, 4356), 'json.dumps', 'json.dumps', (["{'ranking': ranking}"], {}), "({'ranking': ranking})\n", (4334, 4356), False, 'import json\n'), ((4419, 4446), 'flask.request.args.get', 'request.args.get', (['"""room_id"""'], {}), "('room_id')\n", (4435, 4446), False, 'from flask import request, abort\n'), ((4503, 4568), 'datab.social_database.exec_query', 'db.exec_query', (['"""SELECT status FROM room WHERE id = %s"""', '[room_id]'], {}), "('SELECT status FROM room WHERE id = %s', [room_id])\n", (4516, 4568), True, 'import datab.social_database as db\n'), ((4580, 4616), 'json.dumps', 'json.dumps', (["{'status': values[0][0]}"], {}), "({'status': values[0][0]})\n", (4590, 4616), False, 'import json\n'), ((4706, 4733), 'flask.request.args.get', 'request.args.get', (['"""room_id"""'], {}), "('room_id')\n", (4722, 4733), False, 'from flask import request, abort\n'), ((4747, 4832), 'datab.social_database.exec_query', 'db.exec_query', (['"""SELECT id, question FROM question WHERE room_id = %s"""', '[room_id]'], {}), "('SELECT id, question FROM question WHERE room_id = %s', [room_id]\n )\n", (4760, 4832), True, 'import datab.social_database as db\n'), ((4938, 4973), 'json.dumps', 'json.dumps', (["{'questions': response}"], {}), "({'questions': response})\n", (4948, 4973), False, 'import json\n'), ((5068, 5086), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (5084, 5086), False, 'from flask import request, abort\n'), ((5354, 5459), 'datab.social_database.exec_many_query', 'db.exec_many_query', (['"""INSERT INTO answer (question_id, user_id, answer) VALUES(%s,%s,%s)"""', 'values'], {}), "(\n 'INSERT INTO answer (question_id, user_id, answer) VALUES(%s,%s,%s)',\n values)\n", (5372, 5459), True, 'import datab.social_database as db\n'), ((5462, 5499), 'json.dumps', 'json.dumps', (["{'info': 'Data received'}"], {}), "({'info': 'Data received'})\n", (5472, 5499), False, 'import json\n'), ((5675, 5722), 'datab.social_database.get_non_answered_questions', 'db.get_non_answered_questions', (['room_id', 'user_id'], {}), '(room_id, user_id)\n', (5704, 5722), True, 'import datab.social_database as db\n'), ((5751, 5795), 'datab.social_database.get_non_answered_people', 'db.get_non_answered_people', (['room_id', 'user_id'], {}), '(room_id, user_id)\n', (5777, 5795), True, 'import datab.social_database as db\n'), ((8731, 8767), 'flask.request.args.get', 'request.args.get', (['"""quiz_question_id"""'], {}), "('quiz_question_id')\n", (8747, 8767), False, 'from flask import request, abort\n'), ((8789, 8823), 'flask.request.args.get', 'request.args.get', (['"""quiz_answer_id"""'], {}), "('quiz_answer_id')\n", (8805, 8823), False, 'from flask import request, abort\n'), ((8829, 8942), 'datab.social_database.exec_query', 'db.exec_query', (['"""UPDATE quiz_question SET answered_id = %s WHERE id = %s"""', '[quiz_answer_id, quiz_question_id]'], {}), "('UPDATE quiz_question SET answered_id = %s WHERE id = %s', [\n quiz_answer_id, quiz_question_id])\n", (8842, 8942), True, 'import datab.social_database as db\n'), ((8951, 9097), 'datab.social_database.exec_query', 'db.exec_query', (['"""SELECT qq.answered_id, qq.correct_answer_id, qq.question_id FROM quiz_question qq WHERE qq.id = %s"""', '[quiz_question_id]'], {}), "(\n 'SELECT qq.answered_id, qq.correct_answer_id, qq.question_id FROM quiz_question qq WHERE qq.id = %s'\n , [quiz_question_id])\n", (8964, 9097), True, 'import datab.social_database as db\n'), ((9256, 9345), 'datab.social_database.exec_query', 'db.exec_query', (['"""SELECT a.answer FROM answer a WHERE a.id = %s """', '[correct_answer_id]'], {}), "('SELECT a.answer FROM answer a WHERE a.id = %s ', [\n correct_answer_id])\n", (9269, 9345), True, 'import datab.social_database as db\n'), ((9536, 9681), 'json.dumps', 'json.dumps', (["{'correct': answered_id == correct_answer_id, 'question': question_id,\n 'correct_answer': {'id': correct_answer_id, 'text': text}}"], {}), "({'correct': answered_id == correct_answer_id, 'question':\n question_id, 'correct_answer': {'id': correct_answer_id, 'text': text}})\n", (9546, 9681), False, 'import json\n'), ((9752, 9766), 'flaskrun.flaskrun.flask_run', 'flask_run', (['app'], {}), '(app)\n', (9761, 9766), False, 'from flaskrun.flaskrun import flask_run\n'), ((912, 926), 'json.dumps', 'json.dumps', (['{}'], {}), '({})\n', (922, 926), False, 'import json\n'), ((2591, 2629), 'json.dumps', 'json.dumps', (["{'error': 'no JSON found'}"], {}), "({'error': 'no JSON found'})\n", (2601, 2629), False, 'import json\n'), ((2865, 2902), 'json.dumps', 'json.dumps', (["{'info': 'Data received'}"], {}), "({'info': 'Data received'})\n", (2875, 2902), False, 'import json\n'), ((5573, 5600), 'flask.request.args.get', 'request.args.get', (['"""room_id"""'], {}), "('room_id')\n", (5589, 5600), False, 'from flask import request, abort\n'), ((5620, 5647), 'flask.request.args.get', 'request.args.get', (['"""user_id"""'], {}), "('user_id')\n", (5636, 5647), False, 'from flask import request, abort\n'), ((5901, 5937), 'random.sample', 'random.sample', (['possible_questions', '(1)'], {}), '(possible_questions, 1)\n', (5914, 5937), False, 'import random\n'), ((5977, 6006), 'datab.social_database.get_all_questions', 'db.get_all_questions', (['room_id'], {}), '(room_id)\n', (5997, 6006), True, 'import datab.social_database as db\n'), ((6174, 6213), 'random.sample', 'random.sample', (['possible_users_to_ask', '(1)'], {}), '(possible_users_to_ask, 1)\n', (6187, 6213), False, 'import random\n'), ((6256, 6301), 'datab.social_database.get_all_different_people', 'db.get_all_different_people', (['room_id', 'user_id'], {}), '(room_id, user_id)\n', (6283, 6301), True, 'import datab.social_database as db\n'), ((6496, 6563), 'datab.social_database.insert_quiz_question', 'db.insert_quiz_question', (['user_id', 'asked_about_id[0]', 'question_id[0]'], {}), '(user_id, asked_about_id[0], question_id[0])\n', (6519, 6563), True, 'import datab.social_database as db\n'), ((6587, 6642), 'datab.social_database.get_all_different_people', 'db.get_all_different_people', (['room_id', 'asked_about_id[0]'], {}), '(room_id, asked_about_id[0])\n', (6614, 6642), True, 'import datab.social_database as db\n'), ((6651, 6678), 'random.shuffle', 'random.shuffle', (['other_users'], {}), '(other_users)\n', (6665, 6678), False, 'import random\n'), ((6732, 6780), 'datab.social_database.get_answer', 'db.get_answer', (['question_id[0]', 'asked_about_id[0]'], {}), '(question_id[0], asked_about_id[0])\n', (6745, 6780), True, 'import datab.social_database as db\n'), ((6790, 6901), 'datab.social_database.exec_query', 'db.exec_query', (['"""UPDATE quiz_question SET correct_answer_id=%s WHERE id = %s"""', '[answer_id, quiz_question_id]'], {}), "('UPDATE quiz_question SET correct_answer_id=%s WHERE id = %s',\n [answer_id, quiz_question_id])\n", (6803, 6901), True, 'import datab.social_database as db\n'), ((7286, 7309), 'random.shuffle', 'random.shuffle', (['answers'], {}), '(answers)\n', (7300, 7309), False, 'import random\n'), ((7556, 7726), 'datab.social_database.exec_query', 'db.exec_query', (['"""SELECT id FROM quiz_question WHERE asked_user_id = %s AND about_user_id = %s AND question_id = %s"""', '[user_id, asked_about_id[0], question_id[0]]'], {}), "(\n 'SELECT id FROM quiz_question WHERE asked_user_id = %s AND about_user_id = %s AND question_id = %s'\n , [user_id, asked_about_id[0], question_id[0]])\n", (7569, 7726), True, 'import datab.social_database as db\n'), ((7869, 7958), 'datab.social_database.exec_query', 'db.exec_query', (['"""SELECT q.question FROM question q WHERE q.id = %s"""', '[question_id[0]]'], {}), "('SELECT q.question FROM question q WHERE q.id = %s', [\n question_id[0]])\n", (7882, 7958), True, 'import datab.social_database as db\n'), ((8104, 8183), 'datab.social_database.exec_query', 'db.exec_query', (['"""SELECT u.email FROM users u WHERE u.id=%s"""', '[asked_about_id[0]]'], {}), "('SELECT u.email FROM users u WHERE u.id=%s', [asked_about_id[0]])\n", (8117, 8183), True, 'import datab.social_database as db\n'), ((8413, 8504), 'json.dumps', 'json.dumps', (["{'id': quiz_question_id, 'question': question_text, 'answers': answer_json}"], {}), "({'id': quiz_question_id, 'question': question_text, 'answers':\n answer_json})\n", (8423, 8504), False, 'import json\n'), ((8579, 8654), 'json.dumps', 'json.dumps', (["{'error': 'Not available questions for this user in this room'}"], {}), "({'error': 'Not available questions for this user in this room'})\n", (8589, 8654), False, 'import json\n'), ((9478, 9524), 'json.dumps', 'json.dumps', (["{'error': 'Internal server error'}"], {}), "({'error': 'Internal server error'})\n", (9488, 9524), False, 'import json\n'), ((2761, 2853), 'datab.social_database.exec_query', 'db.exec_query', (['"""INSERT INTO question (room_id, question) VALUES (%s, %s)"""', '[room_id, q]'], {}), "('INSERT INTO question (room_id, question) VALUES (%s, %s)', [\n room_id, q])\n", (2774, 2853), True, 'import datab.social_database as db\n'), ((5128, 5166), 'json.dumps', 'json.dumps', (["{'error': 'no JSON found'}"], {}), "({'error': 'no JSON found'})\n", (5138, 5166), False, 'import json\n'), ((6073, 6109), 'random.sample', 'random.sample', (['possible_questions', '(1)'], {}), '(possible_questions, 1)\n', (6086, 6109), False, 'import random\n'), ((6371, 6410), 'random.sample', 'random.sample', (['possible_users_to_ask', '(1)'], {}), '(possible_users_to_ask, 1)\n', (6384, 6410), False, 'import random\n'), ((7114, 7159), 'datab.social_database.get_answer', 'db.get_answer', (['question_id[0]', 'other_users[i]'], {}), '(question_id[0], other_users[i])\n', (7127, 7159), True, 'import datab.social_database as db\n')] |
aiven/astacus | astacus/node/snapshotter.py | 2d64e1f33e01d50a41127f41d9da3d1ab0ce0387 | """
Copyright (c) 2020 Aiven Ltd
See LICENSE for details
"""
from astacus.common import magic, utils
from astacus.common.ipc import SnapshotFile, SnapshotHash, SnapshotState
from astacus.common.progress import increase_worth_reporting, Progress
from pathlib import Path
from typing import Optional
import base64
import hashlib
import logging
import os
import threading
logger = logging.getLogger(__name__)
_hash = hashlib.blake2s
def hash_hexdigest_readable(f, *, read_buffer=1_000_000):
h = _hash()
while True:
data = f.read(read_buffer)
if not data:
break
h.update(data)
return h.hexdigest()
class Snapshotter:
"""Snapshotter keeps track of files on disk, and their hashes.
The hash on disk MAY change, which may require subsequent
incremential snapshot and-or ignoring the files which have changed.
The output to outside is just root object's hash, as well as list
of other hashes which correspond to files referred to within the
file list contained in root object.
Note that any call to public API MUST be made with
snapshotter.lock held. This is because Snapshotter is process-wide
utility that is shared across operations, possibly used from
multiple threads, and the single-operation-only mode of operation
is not exactly flawless (the 'new operation can be started with
old running' is intentional feature but new operation should
eventually replace the old). The lock itself might not need to be
built-in to Snapshotter, but having it there enables asserting its
state during public API calls.
"""
def __init__(self, *, src, dst, globs, parallel):
assert globs # model has empty; either plugin or configuration must supply them
self.src = Path(src)
self.dst = Path(dst)
self.globs = globs
self.relative_path_to_snapshotfile = {}
self.hexdigest_to_snapshotfiles = {}
self.parallel = parallel
self.lock = threading.Lock()
def _list_files(self, basepath: Path):
result_files = set()
for glob in self.globs:
for path in basepath.glob(glob):
if not path.is_file() or path.is_symlink():
continue
relpath = path.relative_to(basepath)
for parent in relpath.parents:
if parent.name == magic.ASTACUS_TMPDIR:
break
else:
result_files.add(relpath)
return sorted(result_files)
def _list_dirs_and_files(self, basepath: Path):
files = self._list_files(basepath)
dirs = {p.parent for p in files}
return sorted(dirs), files
def _add_snapshotfile(self, snapshotfile: SnapshotFile):
old_snapshotfile = self.relative_path_to_snapshotfile.get(snapshotfile.relative_path, None)
if old_snapshotfile:
self._remove_snapshotfile(old_snapshotfile)
self.relative_path_to_snapshotfile[snapshotfile.relative_path] = snapshotfile
if snapshotfile.hexdigest:
self.hexdigest_to_snapshotfiles.setdefault(snapshotfile.hexdigest, []).append(snapshotfile)
def _remove_snapshotfile(self, snapshotfile: SnapshotFile):
assert self.relative_path_to_snapshotfile[snapshotfile.relative_path] == snapshotfile
del self.relative_path_to_snapshotfile[snapshotfile.relative_path]
if snapshotfile.hexdigest:
self.hexdigest_to_snapshotfiles[snapshotfile.hexdigest].remove(snapshotfile)
def _snapshotfile_from_path(self, relative_path):
src_path = self.src / relative_path
st = src_path.stat()
return SnapshotFile(relative_path=relative_path, mtime_ns=st.st_mtime_ns, file_size=st.st_size)
def _get_snapshot_hash_list(self, relative_paths):
same = 0
lost = 0
for relative_path in relative_paths:
old_snapshotfile = self.relative_path_to_snapshotfile.get(relative_path)
try:
snapshotfile = self._snapshotfile_from_path(relative_path)
except FileNotFoundError:
lost += 1
if increase_worth_reporting(lost):
logger.debug("#%d. lost - %s disappeared before stat, ignoring", lost, self.src / relative_path)
continue
if old_snapshotfile:
snapshotfile.hexdigest = old_snapshotfile.hexdigest
snapshotfile.content_b64 = old_snapshotfile.content_b64
if old_snapshotfile == snapshotfile:
same += 1
if increase_worth_reporting(same):
logger.debug("#%d. same - %r in %s is same", same, old_snapshotfile, relative_path)
continue
yield snapshotfile
def get_snapshot_hashes(self):
assert self.lock.locked()
return [
SnapshotHash(hexdigest=dig, size=sf[0].file_size) for dig, sf in self.hexdigest_to_snapshotfiles.items() if sf
]
def get_snapshot_state(self):
assert self.lock.locked()
return SnapshotState(root_globs=self.globs, files=sorted(self.relative_path_to_snapshotfile.values()))
def _snapshot_create_missing_directories(self, *, src_dirs, dst_dirs):
changes = 0
for i, relative_dir in enumerate(set(src_dirs).difference(dst_dirs), 1):
dst_path = self.dst / relative_dir
dst_path.mkdir(parents=True, exist_ok=True)
if increase_worth_reporting(i):
logger.debug("#%d. new directory: %r", i, relative_dir)
changes += 1
return changes
def _snapshot_remove_extra_files(self, *, src_files, dst_files):
changes = 0
for i, relative_path in enumerate(set(dst_files).difference(src_files), 1):
dst_path = self.dst / relative_path
snapshotfile = self.relative_path_to_snapshotfile.get(relative_path)
if snapshotfile:
self._remove_snapshotfile(snapshotfile)
dst_path.unlink()
if increase_worth_reporting(i):
logger.debug("#%d. extra file: %r", i, relative_path)
changes += 1
return changes
def _snapshot_add_missing_files(self, *, src_files, dst_files):
existing = 0
disappeared = 0
changes = 0
for i, relative_path in enumerate(set(src_files).difference(dst_files), 1):
src_path = self.src / relative_path
dst_path = self.dst / relative_path
try:
os.link(src=src_path, dst=dst_path, follow_symlinks=False)
except FileExistsError:
# This happens only if snapshot is started twice at
# same time. While it is technically speaking upstream
# error, we rather handle it here than leave
# exceptions not handled.
existing += 1
if increase_worth_reporting(existing):
logger.debug("#%d. %s already existed, ignoring", existing, src_path)
continue
except FileNotFoundError:
disappeared += 1
if increase_worth_reporting(disappeared):
logger.debug("#%d. %s disappeared before linking, ignoring", disappeared, src_path)
continue
if increase_worth_reporting(i - disappeared):
logger.debug("#%d. new file: %r", i - disappeared, relative_path)
changes += 1
return changes
def snapshot(self, *, progress: Optional[Progress] = None):
assert self.lock.locked()
if progress is None:
progress = Progress()
src_dirs, src_files = self._list_dirs_and_files(self.src)
progress.start(1)
if self.src == self.dst:
# The src=dst mode should be used if and only if it is
# known that files will not disappear between snapshot and
# upload steps (e.g. Astacus controls the lifecycle of the
# files within). In that case, there is little point in
# making extra symlinks and we can just use the src
# directory contents as-is.
dst_dirs, dst_files = src_dirs, src_files
else:
progress.add_total(3)
dst_dirs, dst_files = self._list_dirs_and_files(self.dst)
# Create missing directories
changes = self._snapshot_create_missing_directories(src_dirs=src_dirs, dst_dirs=dst_dirs)
progress.add_success()
# Remove extra files
changes += self._snapshot_remove_extra_files(src_files=src_files, dst_files=dst_files)
progress.add_success()
# Add missing files
changes += self._snapshot_add_missing_files(src_files=src_files, dst_files=dst_files)
progress.add_success()
# We COULD also remove extra directories, but it is not
# probably really worth it and due to ignored files it
# actually might not even work.
# Then, create/update corresponding snapshotfile objects (old
# ones were already removed)
dst_dirs, dst_files = self._list_dirs_and_files(self.dst)
snapshotfiles = list(self._get_snapshot_hash_list(dst_files))
progress.add_total(len(snapshotfiles))
def _cb(snapshotfile):
# src may or may not be present; dst is present as it is in snapshot
with snapshotfile.open_for_reading(self.dst) as f:
if snapshotfile.file_size <= magic.EMBEDDED_FILE_SIZE:
snapshotfile.content_b64 = base64.b64encode(f.read()).decode()
else:
snapshotfile.hexdigest = hash_hexdigest_readable(f)
return snapshotfile
def _result_cb(*, map_in, map_out):
self._add_snapshotfile(map_out)
progress.add_success()
return True
changes += len(snapshotfiles)
utils.parallel_map_to(iterable=snapshotfiles, fun=_cb, result_callback=_result_cb, n=self.parallel)
# We initially started with 1 extra
progress.add_success()
return changes
| [((383, 410), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (400, 410), False, 'import logging\n'), ((1794, 1803), 'pathlib.Path', 'Path', (['src'], {}), '(src)\n', (1798, 1803), False, 'from pathlib import Path\n'), ((1823, 1832), 'pathlib.Path', 'Path', (['dst'], {}), '(dst)\n', (1827, 1832), False, 'from pathlib import Path\n'), ((2006, 2022), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (2020, 2022), False, 'import threading\n'), ((3701, 3793), 'astacus.common.ipc.SnapshotFile', 'SnapshotFile', ([], {'relative_path': 'relative_path', 'mtime_ns': 'st.st_mtime_ns', 'file_size': 'st.st_size'}), '(relative_path=relative_path, mtime_ns=st.st_mtime_ns,\n file_size=st.st_size)\n', (3713, 3793), False, 'from astacus.common.ipc import SnapshotFile, SnapshotHash, SnapshotState\n'), ((10082, 10186), 'astacus.common.utils.parallel_map_to', 'utils.parallel_map_to', ([], {'iterable': 'snapshotfiles', 'fun': '_cb', 'result_callback': '_result_cb', 'n': 'self.parallel'}), '(iterable=snapshotfiles, fun=_cb, result_callback=\n _result_cb, n=self.parallel)\n', (10103, 10186), False, 'from astacus.common import magic, utils\n'), ((4937, 4986), 'astacus.common.ipc.SnapshotHash', 'SnapshotHash', ([], {'hexdigest': 'dig', 'size': 'sf[0].file_size'}), '(hexdigest=dig, size=sf[0].file_size)\n', (4949, 4986), False, 'from astacus.common.ipc import SnapshotFile, SnapshotHash, SnapshotState\n'), ((5533, 5560), 'astacus.common.progress.increase_worth_reporting', 'increase_worth_reporting', (['i'], {}), '(i)\n', (5557, 5560), False, 'from astacus.common.progress import increase_worth_reporting, Progress\n'), ((6115, 6142), 'astacus.common.progress.increase_worth_reporting', 'increase_worth_reporting', (['i'], {}), '(i)\n', (6139, 6142), False, 'from astacus.common.progress import increase_worth_reporting, Progress\n'), ((7419, 7460), 'astacus.common.progress.increase_worth_reporting', 'increase_worth_reporting', (['(i - disappeared)'], {}), '(i - disappeared)\n', (7443, 7460), False, 'from astacus.common.progress import increase_worth_reporting, Progress\n'), ((7744, 7754), 'astacus.common.progress.Progress', 'Progress', ([], {}), '()\n', (7752, 7754), False, 'from astacus.common.progress import increase_worth_reporting, Progress\n'), ((6609, 6667), 'os.link', 'os.link', ([], {'src': 'src_path', 'dst': 'dst_path', 'follow_symlinks': '(False)'}), '(src=src_path, dst=dst_path, follow_symlinks=False)\n', (6616, 6667), False, 'import os\n'), ((4185, 4215), 'astacus.common.progress.increase_worth_reporting', 'increase_worth_reporting', (['lost'], {}), '(lost)\n', (4209, 4215), False, 'from astacus.common.progress import increase_worth_reporting, Progress\n'), ((4638, 4668), 'astacus.common.progress.increase_worth_reporting', 'increase_worth_reporting', (['same'], {}), '(same)\n', (4662, 4668), False, 'from astacus.common.progress import increase_worth_reporting, Progress\n'), ((6995, 7029), 'astacus.common.progress.increase_worth_reporting', 'increase_worth_reporting', (['existing'], {}), '(existing)\n', (7019, 7029), False, 'from astacus.common.progress import increase_worth_reporting, Progress\n'), ((7236, 7273), 'astacus.common.progress.increase_worth_reporting', 'increase_worth_reporting', (['disappeared'], {}), '(disappeared)\n', (7260, 7273), False, 'from astacus.common.progress import increase_worth_reporting, Progress\n')] |
richiware/colcon-gradle | colcon_gradle/task/gradle/build.py | 00b121def8c15abd1dca310d0ea4e1f34f98f4d1 | # Copyright 2018 Esteve Fernandez
# Licensed under the Apache License, Version 2.0
from distutils import dir_util
import glob
import os
from pathlib import Path
import shutil
from colcon_core.environment import create_environment_scripts
from colcon_core.logging import colcon_logger
from colcon_core.plugin_system import satisfies_version
from colcon_core.shell import create_environment_hook
from colcon_core.shell import get_command_environment
from colcon_core.task import run
from colcon_core.task import TaskExtensionPoint
from colcon_gradle.task.gradle import get_wrapper_executable
from colcon_gradle.task.gradle import GRADLE_EXECUTABLE
from colcon_gradle.task.gradle import has_wrapper_executable
logger = colcon_logger.getChild(__name__)
class GradleBuildTask(TaskExtensionPoint):
"""Build gradle packages."""
def __init__(self): # noqa: D107
super().__init__()
satisfies_version(TaskExtensionPoint.EXTENSION_POINT_VERSION, '^1.0')
def _build_file_tree(self, start_path):
out_dirnames = set()
out_filenames = set()
for dirname, dirnames, filenames in os.walk(start_path):
for subdirname in dirnames:
out_dirnames.add(
os.path.relpath(
os.path.join(dirname, subdirname), start=start_path))
for filename in filenames:
out_filenames.add(
os.path.relpath(
os.path.join(dirname, filename), start=start_path))
return (out_dirnames, out_filenames)
def add_arguments(self, *, parser): # noqa: D102
parser.add_argument(
'--gradle-args',
nargs='*', metavar='*', type=str.lstrip,
help='Pass arguments to Gradle projects. '
'Arguments matching other options must be prefixed by a space,\n'
'e.g. --gradle-args " --help"')
parser.add_argument(
'--gradle-task',
help='Run a specific task instead of the default task')
async def build( # noqa: D102
self, *, additional_hooks=None, skip_hook_creation=False
):
pkg = self.context.pkg
args = self.context.args
logger.info(
"Building Gradle package in '{args.path}'".format_map(locals()))
if additional_hooks is None:
additional_hooks = []
# add jars and classes to CLASSPATH with wildcards
# https://docs.oracle.com/javase/8/docs/technotes/tools/windows/classpath.html#A1100762
additional_hooks += create_environment_hook(
'classpath_jars', Path(args.install_base), pkg.name,
'CLASSPATH', os.path.join('share', pkg.name, 'java', '*'),
mode='prepend')
additional_hooks += create_environment_hook(
'classpath_classes', Path(args.install_base), pkg.name,
'CLASSPATH', os.path.join('share', pkg.name, 'java'),
mode='prepend')
try:
env = await get_command_environment(
'build', args.build_base, self.context.dependencies)
except RuntimeError as e:
logger.error(str(e))
return 1
rc = await self._build(args, env)
if rc and rc.returncode:
return rc.returncode
rc = await self._install(args, env)
if rc and rc.returncode:
return rc.returncode
if not skip_hook_creation:
create_environment_scripts(
pkg, args, additional_hooks=additional_hooks)
async def _build(self, args, env):
self.progress('build')
# remove anything on the destination tree but not in the source tree
src_package_src_dir = os.path.join(args.path, 'src')
dst_package_src_dir = os.path.join(args.build_base, 'src')
src_dirnames, src_filenames = self._build_file_tree(
src_package_src_dir)
dst_dirnames, dst_filenames = self._build_file_tree(
dst_package_src_dir)
prune_dirnames = dst_dirnames - src_dirnames
prune_filenames = dst_filenames - src_filenames
for prune_filename in prune_filenames:
os.remove(os.path.join(dst_package_src_dir, prune_filename))
for prune_dirname in prune_dirnames:
if os.path.exists(prune_dirname):
shutil.rmtree(os.path.join(dst_package_src_dir, prune_dirname))
# copy files from the source directory to the build one to avoid
# polluting the latter during the build process
dir_util.copy_tree(args.path, args.build_base, update=1)
# Gradle Executable
if has_wrapper_executable(args):
cmd = [str(get_wrapper_executable(args).absolute())]
elif GRADLE_EXECUTABLE is not None:
cmd = [GRADLE_EXECUTABLE]
else:
raise RuntimeError(
"Could not find 'gradle' or 'wrapper' executable")
# Gradle Task (by default 'assemble')
if args.gradle_task:
cmd += [args.gradle_task]
else:
cmd += ['assemble']
# Gradle Arguments
cmd += (args.gradle_args or [])
cmd += ['--stacktrace']
# Add install_base to environment in GRADLE_INSTALL_PREFIX
env['GRADLE_INSTALL_PREFIX'] = args.install_base
# invoke build step
return await run(
self.context, cmd, cwd=args.build_base, env=env)
async def _install(self, args, env):
self.progress('install')
pkg = self.context.pkg
# remove anything on the destination tree but not in the build tree
bld_package_jar_dir = os.path.join(args.build_base, 'build', 'libs')
dst_package_jar_dir = os.path.join(
args.install_base, 'share', pkg.name, 'java')
os.makedirs(dst_package_jar_dir, exist_ok=True)
bld_dirnames, bld_filenames = self._build_file_tree(
bld_package_jar_dir)
dst_dirnames, dst_filenames = self._build_file_tree(
dst_package_jar_dir)
prune_dirnames = dst_dirnames - bld_dirnames
prune_filenames = dst_filenames - bld_filenames
for prune_filename in prune_filenames:
os.remove(os.path.join(dst_package_jar_dir, prune_filename))
for prune_dirname in prune_dirnames:
if os.path.exists(prune_dirname):
shutil.rmtree(
os.path.join(dst_package_jar_dir, prune_dirname))
for jar in glob.glob(os.path.join(bld_package_jar_dir, '*.jar')):
jar_filename = os.path.basename(jar)
shutil.copy2(jar, os.path.join(dst_package_jar_dir, jar_filename))
| [((720, 752), 'colcon_core.logging.colcon_logger.getChild', 'colcon_logger.getChild', (['__name__'], {}), '(__name__)\n', (742, 752), False, 'from colcon_core.logging import colcon_logger\n'), ((905, 974), 'colcon_core.plugin_system.satisfies_version', 'satisfies_version', (['TaskExtensionPoint.EXTENSION_POINT_VERSION', '"""^1.0"""'], {}), "(TaskExtensionPoint.EXTENSION_POINT_VERSION, '^1.0')\n", (922, 974), False, 'from colcon_core.plugin_system import satisfies_version\n'), ((1123, 1142), 'os.walk', 'os.walk', (['start_path'], {}), '(start_path)\n', (1130, 1142), False, 'import os\n'), ((3723, 3753), 'os.path.join', 'os.path.join', (['args.path', '"""src"""'], {}), "(args.path, 'src')\n", (3735, 3753), False, 'import os\n'), ((3784, 3820), 'os.path.join', 'os.path.join', (['args.build_base', '"""src"""'], {}), "(args.build_base, 'src')\n", (3796, 3820), False, 'import os\n'), ((4550, 4606), 'distutils.dir_util.copy_tree', 'dir_util.copy_tree', (['args.path', 'args.build_base'], {'update': '(1)'}), '(args.path, args.build_base, update=1)\n', (4568, 4606), False, 'from distutils import dir_util\n'), ((4647, 4675), 'colcon_gradle.task.gradle.has_wrapper_executable', 'has_wrapper_executable', (['args'], {}), '(args)\n', (4669, 4675), False, 'from colcon_gradle.task.gradle import has_wrapper_executable\n'), ((5651, 5697), 'os.path.join', 'os.path.join', (['args.build_base', '"""build"""', '"""libs"""'], {}), "(args.build_base, 'build', 'libs')\n", (5663, 5697), False, 'import os\n'), ((5728, 5786), 'os.path.join', 'os.path.join', (['args.install_base', '"""share"""', 'pkg.name', '"""java"""'], {}), "(args.install_base, 'share', pkg.name, 'java')\n", (5740, 5786), False, 'import os\n'), ((5808, 5855), 'os.makedirs', 'os.makedirs', (['dst_package_jar_dir'], {'exist_ok': '(True)'}), '(dst_package_jar_dir, exist_ok=True)\n', (5819, 5855), False, 'import os\n'), ((2617, 2640), 'pathlib.Path', 'Path', (['args.install_base'], {}), '(args.install_base)\n', (2621, 2640), False, 'from pathlib import Path\n'), ((2677, 2721), 'os.path.join', 'os.path.join', (['"""share"""', 'pkg.name', '"""java"""', '"""*"""'], {}), "('share', pkg.name, 'java', '*')\n", (2689, 2721), False, 'import os\n'), ((2837, 2860), 'pathlib.Path', 'Path', (['args.install_base'], {}), '(args.install_base)\n', (2841, 2860), False, 'from pathlib import Path\n'), ((2897, 2936), 'os.path.join', 'os.path.join', (['"""share"""', 'pkg.name', '"""java"""'], {}), "('share', pkg.name, 'java')\n", (2909, 2936), False, 'import os\n'), ((3454, 3526), 'colcon_core.environment.create_environment_scripts', 'create_environment_scripts', (['pkg', 'args'], {'additional_hooks': 'additional_hooks'}), '(pkg, args, additional_hooks=additional_hooks)\n', (3480, 3526), False, 'from colcon_core.environment import create_environment_scripts\n'), ((4301, 4330), 'os.path.exists', 'os.path.exists', (['prune_dirname'], {}), '(prune_dirname)\n', (4315, 4330), False, 'import os\n'), ((5372, 5424), 'colcon_core.task.run', 'run', (['self.context', 'cmd'], {'cwd': 'args.build_base', 'env': 'env'}), '(self.context, cmd, cwd=args.build_base, env=env)\n', (5375, 5424), False, 'from colcon_core.task import run\n'), ((6336, 6365), 'os.path.exists', 'os.path.exists', (['prune_dirname'], {}), '(prune_dirname)\n', (6350, 6365), False, 'import os\n'), ((6498, 6540), 'os.path.join', 'os.path.join', (['bld_package_jar_dir', '"""*.jar"""'], {}), "(bld_package_jar_dir, '*.jar')\n", (6510, 6540), False, 'import os\n'), ((6570, 6591), 'os.path.basename', 'os.path.basename', (['jar'], {}), '(jar)\n', (6586, 6591), False, 'import os\n'), ((3004, 3080), 'colcon_core.shell.get_command_environment', 'get_command_environment', (['"""build"""', 'args.build_base', 'self.context.dependencies'], {}), "('build', args.build_base, self.context.dependencies)\n", (3027, 3080), False, 'from colcon_core.shell import get_command_environment\n'), ((4190, 4239), 'os.path.join', 'os.path.join', (['dst_package_src_dir', 'prune_filename'], {}), '(dst_package_src_dir, prune_filename)\n', (4202, 4239), False, 'import os\n'), ((6225, 6274), 'os.path.join', 'os.path.join', (['dst_package_jar_dir', 'prune_filename'], {}), '(dst_package_jar_dir, prune_filename)\n', (6237, 6274), False, 'import os\n'), ((6622, 6669), 'os.path.join', 'os.path.join', (['dst_package_jar_dir', 'jar_filename'], {}), '(dst_package_jar_dir, jar_filename)\n', (6634, 6669), False, 'import os\n'), ((4362, 4410), 'os.path.join', 'os.path.join', (['dst_package_src_dir', 'prune_dirname'], {}), '(dst_package_src_dir, prune_dirname)\n', (4374, 4410), False, 'import os\n'), ((6418, 6466), 'os.path.join', 'os.path.join', (['dst_package_jar_dir', 'prune_dirname'], {}), '(dst_package_jar_dir, prune_dirname)\n', (6430, 6466), False, 'import os\n'), ((1279, 1312), 'os.path.join', 'os.path.join', (['dirname', 'subdirname'], {}), '(dirname, subdirname)\n', (1291, 1312), False, 'import os\n'), ((1469, 1500), 'os.path.join', 'os.path.join', (['dirname', 'filename'], {}), '(dirname, filename)\n', (1481, 1500), False, 'import os\n'), ((4700, 4728), 'colcon_gradle.task.gradle.get_wrapper_executable', 'get_wrapper_executable', (['args'], {}), '(args)\n', (4722, 4728), False, 'from colcon_gradle.task.gradle import get_wrapper_executable\n')] |
eladc-git/model_optimization | model_compression_toolkit/keras/quantizer/gradient_ptq/utils.py | 46d1c893ca23e61d8ef7597184ad2ba6e2ae6e7a | # Copyright 2021 Sony Semiconductors Israel, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import tensorflow as tf
from model_compression_toolkit.common.constants import MIN_THRESHOLD, THRESHOLD
def ste_ceil(x: tf.Tensor) -> tf.Tensor:
"""
Return the ceil values of a tensor.
"""
error = tf.stop_gradient(tf.math.ceil(x) - x)
return error + x
def ste_round(x: tf.Tensor) -> tf.Tensor:
"""
Return the rounded values of a tensor.
"""
error = tf.stop_gradient(tf.math.round(x) - x)
return error + x
def log2(x: tf.Tensor) -> tf.Tensor:
"""
Compute log2 of a tensor.
"""
return tf.math.log(x) / tf.math.log(2.0)
def power_of_two_max(max_tensor: tf.Tensor) -> tf.Tensor:
"""
Compute the power of two threshold for a tensor.
"""
return tf.math.pow(2.0, ste_ceil(log2(tf.maximum(max_tensor, MIN_THRESHOLD))))
def calculate_delta(max_tensor: tf.Tensor,
num_bits: int,
signed: bool) -> tf.Tensor:
"""
Compute the step size for the quantization.
"""
return max_tensor / (2 ** (num_bits - int(signed)))
def adjustable_steps(x: tf.Variable, t: float) -> tf.Tensor:
"""
A function to gradually quantize a float variable to an integer of values [-1, 0 ,1]
Args:
x: input float variable
t: temperature to control quantization
Returns:
semi-quantized variable
"""
return tf.sigmoid(tf.add(x, 1) / t) + tf.sigmoid(tf.add(x, -1) / t) - 1
def ste_clip(x: [tf.Tensor, tf.Variable], max_val=1, min_val=None) -> tf.Tensor:
"""
clip a variable between fixed values such that min_val<=output<=max_val
Args:
x: input variable
max_val: maximum value for clipping
min_val: minimum value for clipping (defaults to -max_val)
Returns:
clipped variable
"""
min_val = -max_val if min_val is None else min_val
return tf.stop_gradient(tf.math.minimum(tf.math.maximum(x, min_val), max_val) - x) + x
def symmetric_quantizer(input_tensor: tf.Tensor,
max_tensor: tf.Tensor,
num_bits: int,
signed: bool,
power_of_two: bool) -> tf.Tensor:
"""
Quantize a tensor symmetrically.
Args:
input_tensor: Tensor to quantize.
max_tensor: Tensor with max values to compute the threshold.
num_bits: Num of bits to use.
signed: Signedness of the quantization range.
power_of_two: Whether the threshold should be constrained or not.
Returns:
A quantized tensor.
"""
if power_of_two:
max_tensor = power_of_two_max(max_tensor)
delta = calculate_delta(max_tensor, num_bits, signed)
tensor_q = ste_round(input_tensor / delta)
min_int = -int(signed) * (2 ** (num_bits - int(signed)))
max_int = (2 ** (num_bits - int(signed))) - 1
return delta * tf.math.minimum(tf.math.maximum(tensor_q, min_int), max_int)
def symmetric_constrained_quantizer(input_tensor: tf.Tensor,
auxvar_tensor: tf.Variable,
max_tensor: tf.Tensor,
num_bits: int,
signed: bool,
power_of_two: bool,
max_lsbs_change: int = 1) -> tf.Tensor:
"""
Quantize a tensor symmetrically with maximum LSBs shift.
Args:
input_tensor: Tensor to quantize. values of this tensor are not changed during gptq.
auxvar_tensor: Tensor that manifests the bit shift the weight due to gptq
max_tensor: Tensor with max values to compute the threshold.
num_bits: Num of bits to use.
signed: Signedness of the quantization range.
power_of_two: Whether the threshold should be constrained or not.
max_lsbs_change: maximum number of LSBs that the auxvar is allowed to change
Returns:
A quantized tensor.
"""
if power_of_two:
max_tensor = power_of_two_max(max_tensor)
delta = calculate_delta(max_tensor, num_bits, signed)
tensor_q = ste_round(tf.stop_gradient(tf.round(input_tensor / delta)) + ste_clip(auxvar_tensor, max_val=max_lsbs_change))
min_int = -int(signed) * (2 ** (num_bits - int(signed)))
max_int = (2 ** (num_bits - int(signed))) - 1
return delta * ste_clip(tensor_q, max_val=max_int, min_val=min_int)
| [((1244, 1258), 'tensorflow.math.log', 'tf.math.log', (['x'], {}), '(x)\n', (1255, 1258), True, 'import tensorflow as tf\n'), ((1261, 1277), 'tensorflow.math.log', 'tf.math.log', (['(2.0)'], {}), '(2.0)\n', (1272, 1277), True, 'import tensorflow as tf\n'), ((931, 946), 'tensorflow.math.ceil', 'tf.math.ceil', (['x'], {}), '(x)\n', (943, 946), True, 'import tensorflow as tf\n'), ((1105, 1121), 'tensorflow.math.round', 'tf.math.round', (['x'], {}), '(x)\n', (1118, 1121), True, 'import tensorflow as tf\n'), ((3564, 3598), 'tensorflow.math.maximum', 'tf.math.maximum', (['tensor_q', 'min_int'], {}), '(tensor_q, min_int)\n', (3579, 3598), True, 'import tensorflow as tf\n'), ((1449, 1486), 'tensorflow.maximum', 'tf.maximum', (['max_tensor', 'MIN_THRESHOLD'], {}), '(max_tensor, MIN_THRESHOLD)\n', (1459, 1486), True, 'import tensorflow as tf\n'), ((4825, 4855), 'tensorflow.round', 'tf.round', (['(input_tensor / delta)'], {}), '(input_tensor / delta)\n', (4833, 4855), True, 'import tensorflow as tf\n'), ((2064, 2076), 'tensorflow.add', 'tf.add', (['x', '(1)'], {}), '(x, 1)\n', (2070, 2076), True, 'import tensorflow as tf\n'), ((2095, 2108), 'tensorflow.add', 'tf.add', (['x', '(-1)'], {}), '(x, -1)\n', (2101, 2108), True, 'import tensorflow as tf\n'), ((2579, 2606), 'tensorflow.math.maximum', 'tf.math.maximum', (['x', 'min_val'], {}), '(x, min_val)\n', (2594, 2606), True, 'import tensorflow as tf\n')] |
btashton/pygments | pygments/lexers/tnt.py | ceaad0372055ed0064121020fea032fdda429779 | # -*- coding: utf-8 -*-
"""
pygments.lexers.tnt
~~~~~~~~~~~~~~~~~~~
Lexer for Typographic Number Theory.
:copyright: Copyright 2019-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer
from pygments.token import Text, Comment, Operator, Keyword, Name, Number, \
Punctuation, Error
__all__ = ['TNTLexer']
class TNTLexer(Lexer):
"""
Lexer for Typographic Number Theory, as described in the book
Gödel, Escher, Bach, by Douglas R. Hofstadter,
or as summarized here:
https://github.com/Kenny2github/language-tnt/blob/master/README.md#summary-of-tnt
.. versionadded:: 2.7
"""
name = 'Typographic Number Theory'
aliases = ['tnt']
filenames = ['*.tnt']
cur = []
LOGIC = set('⊃→]&∧^|∨Vv')
OPERATORS = set('+.⋅*')
VARIABLES = set('abcde')
PRIMES = set("'′")
NEGATORS = set('~!')
QUANTIFIERS = set('AE∀∃')
NUMBERS = set('0123456789')
WHITESPACE = set('\t \v\n')
RULES = re.compile('''(?xi)
joining | separation | double-tilde | fantasy\\ rule
| carry[- ]over(?:\\ of)?(?:\\ line)?\\ ([0-9]+) | detachment
| contrapositive | De\\ Morgan | switcheroo
| specification | generalization | interchange
| existence | symmetry | transitivity
| add\\ S | drop\\ S | induction
| axiom\\ ([1-5]) | premise | push | pop
''')
LINENOS = re.compile(r'(?:[0-9]+)(?:(?:, ?|,? and )(?:[0-9]+))*')
COMMENT = re.compile(r'\[[^\n\]]+\]')
def whitespace(self, start, text, required=False):
"""Tokenize whitespace."""
end = start
try:
while text[end] in self.WHITESPACE:
end += 1
except IndexError:
end = len(text)
if required:
assert end != start
if end != start:
self.cur.append((start, Text, text[start:end]))
return end
def variable(self, start, text):
"""Tokenize a variable."""
assert text[start] in self.VARIABLES
end = start+1
while text[end] in self.PRIMES:
end += 1
self.cur.append((start, Name.Variable, text[start:end]))
return end
def term(self, start, text):
"""Tokenize a term."""
if text[start] == 'S': # S...S(...) or S...0
end = start+1
while text[end] == 'S':
end += 1
self.cur.append((start, Number.Integer, text[start:end]))
return self.term(end, text)
if text[start] == '0': # the singleton 0
self.cur.append((start, Number.Integer, text[start]))
return start+1
if text[start] in self.VARIABLES: # a''...
return self.variable(start, text)
if text[start] == '(': # (...+...)
self.cur.append((start, Punctuation, text[start]))
start = self.term(start+1, text)
assert text[start] in self.OPERATORS
self.cur.append((start, Operator, text[start]))
start = self.term(start+1, text)
assert text[start] == ')'
self.cur.append((start, Punctuation, text[start]))
return start+1
raise AssertionError # no matches
def formula(self, start, text):
"""Tokenize a formula."""
if text[start] in '[]': # fantasy push or pop
self.cur.append((start, Keyword, text[start]))
return start+1
if text[start] in self.NEGATORS: # ~<...>
end = start+1
while text[end] in self.NEGATORS:
end += 1
self.cur.append((start, Operator, text[start:end]))
return self.formula(end, text)
if text[start] in self.QUANTIFIERS: # Aa:<...>
self.cur.append((start, Keyword.Declaration, text[start]))
start = self.variable(start+1, text)
assert text[start] == ':'
self.cur.append((start, Punctuation, text[start]))
return self.formula(start+1, text)
if text[start] == '<': # <...&...>
self.cur.append((start, Punctuation, text[start]))
start = self.formula(start+1, text)
assert text[start] in self.LOGIC
self.cur.append((start, Operator, text[start]))
start = self.formula(start+1, text)
assert text[start] == '>'
self.cur.append((start, Punctuation, text[start]))
return start+1
# ...=...
start = self.term(start, text)
assert text[start] == '='
self.cur.append((start, Operator, text[start]))
start = self.term(start+1, text)
return start
def rule(self, start, text):
"""Tokenize a rule."""
match = self.RULES.match(text, start)
assert match is not None
groups = sorted(match.regs[1:]) # exclude whole match
for group in groups:
if group[0] >= 0: # this group matched
self.cur.append((start, Keyword, text[start:group[0]]))
self.cur.append((group[0], Number.Integer,
text[group[0]:group[1]]))
if group[1] != match.end():
self.cur.append((group[1], Keyword,
text[group[1]:match.end()]))
break
else:
self.cur.append((start, Keyword, text[start:match.end()]))
return match.end()
def lineno(self, start, text):
"""Tokenize a line marker."""
end = start
while text[end] not in self.NUMBERS:
end += 1
self.cur.append((start, Punctuation, text[start]))
self.cur.append((start+1, Text, text[start+1:end]))
start = end
match = self.LINENOS.match(text, start)
assert match is not None
assert text[match.end()] == ')'
self.cur.append((match.start(), Number.Integer, match.group(0)))
self.cur.append((match.end(), Punctuation, text[match.end()]))
return match.end() + 1
def error_till_line_end(self, start, text):
"""Mark everything from ``start`` to the end of the line as Error."""
end = start
try:
while text[end] != '\n': # there's whitespace in rules
end += 1
except IndexError:
end = len(text)
if end != start:
self.cur.append((start, Error, text[start:end]))
end = self.whitespace(end, text)
return end
def get_tokens_unprocessed(self, text):
"""Returns a list of TNT tokens."""
self.cur = []
start = end = self.whitespace(0, text)
while start <= end < len(text):
# try line number
while text[end] in self.NUMBERS:
end += 1
if end != start: # actual number present
self.cur.append((start, Number.Integer, text[start:end]))
# whitespace is required after a line number
orig = len(self.cur)
try:
start = end = self.whitespace(end, text, True)
except AssertionError:
del self.cur[orig:]
start = end = self.error_till_line_end(end, text)
continue
# at this point it could be a comment
match = self.COMMENT.match(text, start)
if match is not None:
self.cur.append((start, Comment, text[start:match.end()]))
start = end = match.end()
# anything after the closing bracket is invalid
start = end = self.error_till_line_end(start, text)
# do not attempt to process the rest
continue
del match
# one formula, possibly containing subformulae
orig = len(self.cur)
try:
start = end = self.formula(start, text)
except AssertionError: # not well-formed
del self.cur[orig:]
while text[end] not in self.WHITESPACE:
end += 1
self.cur.append((start, Error, text[start:end]))
start = end
# skip whitespace after formula
orig = len(self.cur)
try:
start = end = self.whitespace(end, text, True)
except AssertionError:
del self.cur[orig:]
start = end = self.error_till_line_end(start, text)
continue
# rule proving this formula a theorem
orig = len(self.cur)
try:
start = end = self.rule(start, text)
except AssertionError:
del self.cur[orig:]
start = end = self.error_till_line_end(start, text)
continue
# skip whitespace after rule
start = end = self.whitespace(end, text)
# line marker
if text[start] == '(':
orig = len(self.cur)
try:
start = end = self.lineno(start, text)
except AssertionError:
del self.cur[orig:]
start = end = self.error_till_line_end(start, text)
continue
start = end = self.whitespace(start, text)
return self.cur
| [((1048, 1460), 're.compile', 're.compile', (['"""(?xi)\n joining | separation | double-tilde | fantasy\\\\ rule\n | carry[- ]over(?:\\\\ of)?(?:\\\\ line)?\\\\ ([0-9]+) | detachment\n | contrapositive | De\\\\ Morgan | switcheroo\n | specification | generalization | interchange\n | existence | symmetry | transitivity\n | add\\\\ S | drop\\\\ S | induction\n | axiom\\\\ ([1-5]) | premise | push | pop\n """'], {}), '(\n """(?xi)\n joining | separation | double-tilde | fantasy\\\\ rule\n | carry[- ]over(?:\\\\ of)?(?:\\\\ line)?\\\\ ([0-9]+) | detachment\n | contrapositive | De\\\\ Morgan | switcheroo\n | specification | generalization | interchange\n | existence | symmetry | transitivity\n | add\\\\ S | drop\\\\ S | induction\n | axiom\\\\ ([1-5]) | premise | push | pop\n """\n )\n', (1058, 1460), False, 'import re\n'), ((1465, 1519), 're.compile', 're.compile', (['"""(?:[0-9]+)(?:(?:, ?|,? and )(?:[0-9]+))*"""'], {}), "('(?:[0-9]+)(?:(?:, ?|,? and )(?:[0-9]+))*')\n", (1475, 1519), False, 'import re\n'), ((1535, 1565), 're.compile', 're.compile', (['"""\\\\[[^\\\\n\\\\]]+\\\\]"""'], {}), "('\\\\[[^\\\\n\\\\]]+\\\\]')\n", (1545, 1565), False, 'import re\n')] |
cheradenine/Django-CRM | contacts/urls.py | 692572ced050d314c1f880af8b4000c97cbf7440 | from django.urls import path
from contacts.views import (
ContactsListView, CreateContactView, ContactDetailView,
UpdateContactView, RemoveContactView,
GetContactsView, AddCommentView, UpdateCommentView,
DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView)
app_name = 'contacts'
urlpatterns = [
path('list/', ContactsListView.as_view(), name='list'),
path('create/', CreateContactView.as_view(), name='add_contact'),
path('<int:pk>/view/', ContactDetailView.as_view(), name="view_contact"),
path('<int:pk>/edit/', UpdateContactView.as_view(), name="edit_contact"),
path('<int:pk>/delete/',
RemoveContactView.as_view(),
name="remove_contact"),
path('get/list/', GetContactsView.as_view(), name="get_contacts"),
path('comment/add/', AddCommentView.as_view(), name="add_comment"),
path('comment/edit/', UpdateCommentView.as_view(), name="edit_comment"),
path('comment/remove/',
DeleteCommentView.as_view(),
name="remove_comment"),
path('attachment/add/',
AddAttachmentsView.as_view(),
name="add_attachment"),
path('attachment/remove/', DeleteAttachmentsView.as_view(),
name="remove_attachment"),
]
| [((341, 367), 'contacts.views.ContactsListView.as_view', 'ContactsListView.as_view', ([], {}), '()\n', (365, 367), False, 'from contacts.views import ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView\n'), ((403, 430), 'contacts.views.CreateContactView.as_view', 'CreateContactView.as_view', ([], {}), '()\n', (428, 430), False, 'from contacts.views import ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView\n'), ((480, 507), 'contacts.views.ContactDetailView.as_view', 'ContactDetailView.as_view', ([], {}), '()\n', (505, 507), False, 'from contacts.views import ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView\n'), ((558, 585), 'contacts.views.UpdateContactView.as_view', 'UpdateContactView.as_view', ([], {}), '()\n', (583, 585), False, 'from contacts.views import ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView\n'), ((647, 674), 'contacts.views.RemoveContactView.as_view', 'RemoveContactView.as_view', ([], {}), '()\n', (672, 674), False, 'from contacts.views import ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView\n'), ((732, 757), 'contacts.views.GetContactsView.as_view', 'GetContactsView.as_view', ([], {}), '()\n', (755, 757), False, 'from contacts.views import ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView\n'), ((807, 831), 'contacts.views.AddCommentView.as_view', 'AddCommentView.as_view', ([], {}), '()\n', (829, 831), False, 'from contacts.views import ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView\n'), ((880, 907), 'contacts.views.UpdateCommentView.as_view', 'UpdateCommentView.as_view', ([], {}), '()\n', (905, 907), False, 'from contacts.views import ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView\n'), ((968, 995), 'contacts.views.DeleteCommentView.as_view', 'DeleteCommentView.as_view', ([], {}), '()\n', (993, 995), False, 'from contacts.views import ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView\n'), ((1068, 1096), 'contacts.views.AddAttachmentsView.as_view', 'AddAttachmentsView.as_view', ([], {}), '()\n', (1094, 1096), False, 'from contacts.views import ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView\n'), ((1162, 1193), 'contacts.views.DeleteAttachmentsView.as_view', 'DeleteAttachmentsView.as_view', ([], {}), '()\n', (1191, 1193), False, 'from contacts.views import ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView\n')] |
marpie/PythonForWindows | windows/winobject/network.py | b253bc5873e7d97087ed22f2753b51fc6880ec18 | import windows
import ctypes
import socket
import struct
from windows import winproxy
import windows.generated_def as gdef
from windows.com import interfaces as cominterfaces
from windows.generated_def.winstructs import *
from windows.generated_def.windef import *
class TCP4Connection(MIB_TCPROW_OWNER_PID):
"""A TCP4 socket (connected or listening)"""
@property
def established(self):
"""``True`` if connection is established else it's a listening socket"""
return self.dwState == MIB_TCP_STATE_ESTAB
@property
def remote_port(self):
""":type: :class:`int`"""
if not self.established:
return None
return socket.ntohs(self.dwRemotePort)
@property
def local_port(self):
""":type: :class:`int`"""
return socket.ntohs(self.dwLocalPort)
@property
def local_addr(self):
"""Local address IP (x.x.x.x)
:type: :class:`str`"""
return socket.inet_ntoa(struct.pack("<I", self.dwLocalAddr))
@property
def remote_addr(self):
"""remote address IP (x.x.x.x)
:type: :class:`str`"""
if not self.established:
return None
return socket.inet_ntoa(struct.pack("<I", self.dwRemoteAddr))
@property
def remote_proto(self):
"""Identification of the protocol associated with the remote port.
Equals ``remote_port`` if no protocol is associated with it.
:type: :class:`str` or :class:`int`
"""
try:
return socket.getservbyport(self.remote_port, 'tcp')
except socket.error:
return self.remote_port
@property
def remote_host(self):
"""Identification of the remote hostname.
Equals ``remote_addr`` if the resolution fails
:type: :class:`str` or :class:`int`
"""
try:
return socket.gethostbyaddr(self.remote_addr)
except socket.error:
return self.remote_addr
def close(self):
"""Close the connection <require elevated process>"""
closing = MIB_TCPROW()
closing.dwState = MIB_TCP_STATE_DELETE_TCB
closing.dwLocalAddr = self.dwLocalAddr
closing.dwLocalPort = self.dwLocalPort
closing.dwRemoteAddr = self.dwRemoteAddr
closing.dwRemotePort = self.dwRemotePort
return winproxy.SetTcpEntry(ctypes.byref(closing))
def __repr__(self):
if not self.established:
return "<TCP IPV4 Listening socket on {0}:{1}>".format(self.local_addr, self.local_port)
return "<TCP IPV4 Connection {s.local_addr}:{s.local_port} -> {s.remote_addr}:{s.remote_port}>".format(s=self)
class TCP6Connection(MIB_TCP6ROW_OWNER_PID):
"""A TCP6 socket (connected or listening)"""
@staticmethod
def _str_ipv6_addr(addr):
return ":".join(c.encode('hex') for c in addr)
@property
def established(self):
"""``True`` if connection is established else it's a listening socket"""
return self.dwState == MIB_TCP_STATE_ESTAB
@property
def remote_port(self):
""":type: :class:`int`"""
if not self.established:
return None
return socket.ntohs(self.dwRemotePort)
@property
def local_port(self):
""":type: :class:`int`"""
return socket.ntohs(self.dwLocalPort)
@property
def local_addr(self):
"""Local address IP
:type: :class:`str`"""
return self._str_ipv6_addr(self.ucLocalAddr)
@property
def remote_addr(self):
"""remote address IP
:type: :class:`str`"""
if not self.established:
return None
return self._str_ipv6_addr(self.ucRemoteAddr)
@property
def remote_proto(self):
"""Equals to ``self.remote_port`` for Ipv6"""
return self.remote_port
@property
def remote_host(self):
"""Equals to ``self.remote_addr`` for Ipv6"""
return self.remote_addr
def close(self):
raise NotImplementedError("Closing IPV6 connection non implemented")
def __repr__(self):
if not self.established:
return "<TCP IPV6 Listening socket on {0}:{1}>".format(self.local_addr, self.local_port)
return "<TCP IPV6 Connection {0}:{1} -> {2}:{3}>".format(self.local_addr, self.local_port, self.remote_addr, self.remote_port)
def get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer):
x = windows.generated_def.winstructs.MIB_TCPTABLE_OWNER_PID.from_buffer(buffer)
nb_entry = x.dwNumEntries
class _GENERATED_MIB_TCPTABLE_OWNER_PID(ctypes.Structure):
_fields_ = [
("dwNumEntries", DWORD),
("table", TCP4Connection * nb_entry),
]
return _GENERATED_MIB_TCPTABLE_OWNER_PID.from_buffer(buffer)
def get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer):
x = windows.generated_def.winstructs.MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer)
nb_entry = x.dwNumEntries
# Struct _MIB_TCP6TABLE_OWNER_PID definitions
class _GENERATED_MIB_TCP6TABLE_OWNER_PID(Structure):
_fields_ = [
("dwNumEntries", DWORD),
("table", TCP6Connection * nb_entry),
]
return _GENERATED_MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer)
class Firewall(cominterfaces.INetFwPolicy2):
"""The windows firewall"""
@property
def rules(self):
"""The rules of the firewall
:type: [:class:`FirewallRule`] -- A list of rule
"""
ifw_rules = cominterfaces.INetFwRules()
self.get_Rules(ifw_rules)
nb_rules = gdef.LONG()
ifw_rules.get_Count(nb_rules)
unknw = cominterfaces.IUnknown()
ifw_rules.get__NewEnum(unknw)
pVariant = cominterfaces.IEnumVARIANT()
unknw.QueryInterface(pVariant.IID, pVariant)
count = gdef.ULONG()
var = windows.com.ImprovedVariant()
rules = []
for i in range(nb_rules.value):
pVariant.Next(1, var, count)
if not count.value:
break
rule = FirewallRule()
idisp = var.asdispatch
idisp.QueryInterface(rule.IID, rule)
rules.append(rule)
return rules
@property
def current_profile_types(self):
"""Mask of the profiles currently enabled
:type: :class:`long`
"""
cpt = gdef.LONG()
self.get_CurrentProfileTypes(cpt)
return cpt.value
@property
def enabled(self):
"""A maping of the active firewall profiles
{
``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_DOMAIN(0x1L)``: ``True`` or ``False``,
``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PRIVATE(0x2L)``: ``True`` or ``False``,
``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PUBLIC(0x4L)``: ``True`` or ``False``,
}
:type: :class:`dict`
"""
profiles = [gdef.NET_FW_PROFILE2_DOMAIN, gdef.NET_FW_PROFILE2_PRIVATE, gdef.NET_FW_PROFILE2_PUBLIC]
return {prof: self.enabled_for_profile_type(prof) for prof in profiles}
def enabled_for_profile_type(self, profile_type):
enabled = gdef.VARIANT_BOOL()
self.get_FirewallEnabled(profile_type, enabled)
return enabled.value
class FirewallRule(cominterfaces.INetFwRule):
"""A rule of the firewall"""
@property
def name(self):
"""Name of the rule
:type: :class:`unicode`
"""
name = gdef.BSTR()
self.get_Name(name)
return name.value
@property
def description(self):
"""Description of the rule
:type: :class:`unicode`
"""
description = gdef.BSTR()
self.get_Description(description)
return description.value
@property
def application_name(self):
"""Name of the application to which apply the rule
:type: :class:`unicode`
"""
applicationname = gdef.BSTR()
self.get_ApplicationName(applicationname)
return applicationname.value
@property
def service_name(self):
"""Name of the service to which apply the rule
:type: :class:`unicode`
"""
servicename = gdef.BSTR()
self.get_ServiceName(servicename)
return servicename.value
@property
def protocol(self):
"""Protocol to which apply the rule
:type: :class:`long`
"""
protocol = gdef.LONG()
self.get_Protocol(protocol)
return protocol.value
@property
def local_address(self):
"""Local address of the rule
:type: :class:`unicode`
"""
local_address = gdef.BSTR()
self.get_LocalAddresses(local_address)
return local_address.value
@property
def remote_address(self):
"""Remote address of the rule
:type: :class:`unicode`
"""
remote_address = gdef.BSTR()
self.get_RemoteAddresses(remote_address)
return remote_address.value
@property
def direction(self):
"""Direction of the rule, values might be:
* ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_IN(0x1L)``
* ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_OUT(0x2L)``
subclass of :class:`long`
"""
direction = gdef.NET_FW_RULE_DIRECTION()
self.get_Direction(direction)
return direction.value
@property
def interface_types(self):
"""Types of interface of the rule
:type: :class:`unicode`
"""
interface_type = gdef.BSTR()
self.get_InterfaceTypes(interface_type)
return interface_type.value
@property
def local_port(self):
"""Local port of the rule
:type: :class:`unicode`
"""
local_port = gdef.BSTR()
self.get_LocalPorts(local_port)
return local_port.value
@property
def remote_port(self):
"""Remote port of the rule
:type: :class:`unicode`
"""
remote_port = gdef.BSTR()
self.get_RemotePorts(remote_port)
return remote_port.value
@property
def action(self):
"""Action of the rule, values might be:
* ``NET_FW_ACTION_.NET_FW_ACTION_BLOCK(0x0L)``
* ``NET_FW_ACTION_.NET_FW_ACTION_ALLOW(0x1L)``
subclass of :class:`long`
"""
action = gdef.NET_FW_ACTION()
self.get_Action(action)
return action.value
@property
def enabled(self):
"""``True`` if rule is enabled"""
enabled = gdef.VARIANT_BOOL()
self.get_Enabled(enabled)
return enabled.value
@property
def grouping(self):
"""Grouping of the rule
:type: :class:`unicode`
"""
grouping = gdef.BSTR()
self.get_RemotePorts(grouping)
return grouping.value
@property
def icmp_type_and_code(self):
icmp_type_and_code = gdef.BSTR()
self.get_RemotePorts(icmp_type_and_code)
return icmp_type_and_code.value
def __repr__(self):
return u'<{0} "{1}">'.format(type(self).__name__, self.name).encode("ascii", errors='backslashreplace')
class Network(object):
NetFwPolicy2 = windows.com.IID.from_string("E2B3C97F-6AE1-41AC-817A-F6F92166D7DD")
@property
def firewall(self):
"""The firewall of the system
:type: :class:`Firewall`
"""
windows.com.init()
firewall = Firewall()
windows.com.create_instance(self.NetFwPolicy2, firewall)
return firewall
@staticmethod
def _get_tcp_ipv4_sockets():
size = ctypes.c_uint(0)
try:
winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET)
except winproxy.IphlpapiError:
pass # Allow us to set size to the needed value
buffer = (ctypes.c_char * size.value)()
winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET)
t = get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer)
return list(t.table)
@staticmethod
def _get_tcp_ipv6_sockets():
size = ctypes.c_uint(0)
try:
winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET6)
except winproxy.IphlpapiError:
pass # Allow us to set size to the needed value
buffer = (ctypes.c_char * size.value)()
winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET6)
t = get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer)
return list(t.table)
ipv4 = property(lambda self: self._get_tcp_ipv4_sockets())
"""List of TCP IPv4 socket (connection and listening)
:type: [:class:`TCP4Connection`]"""
ipv6 = property(lambda self: self._get_tcp_ipv6_sockets())
"""List of TCP IPv6 socket (connection and listening)
:type: [:class:`TCP6Connection`]
"""
| [((4444, 4519), 'windows.generated_def.winstructs.MIB_TCPTABLE_OWNER_PID.from_buffer', 'windows.generated_def.winstructs.MIB_TCPTABLE_OWNER_PID.from_buffer', (['buffer'], {}), '(buffer)\n', (4511, 4519), False, 'import windows\n'), ((4877, 4953), 'windows.generated_def.winstructs.MIB_TCP6TABLE_OWNER_PID.from_buffer', 'windows.generated_def.winstructs.MIB_TCP6TABLE_OWNER_PID.from_buffer', (['buffer'], {}), '(buffer)\n', (4945, 4953), False, 'import windows\n'), ((11219, 11286), 'windows.com.IID.from_string', 'windows.com.IID.from_string', (['"""E2B3C97F-6AE1-41AC-817A-F6F92166D7DD"""'], {}), "('E2B3C97F-6AE1-41AC-817A-F6F92166D7DD')\n", (11246, 11286), False, 'import windows\n'), ((682, 713), 'socket.ntohs', 'socket.ntohs', (['self.dwRemotePort'], {}), '(self.dwRemotePort)\n', (694, 713), False, 'import socket\n'), ((804, 834), 'socket.ntohs', 'socket.ntohs', (['self.dwLocalPort'], {}), '(self.dwLocalPort)\n', (816, 834), False, 'import socket\n'), ((3211, 3242), 'socket.ntohs', 'socket.ntohs', (['self.dwRemotePort'], {}), '(self.dwRemotePort)\n', (3223, 3242), False, 'import socket\n'), ((3333, 3363), 'socket.ntohs', 'socket.ntohs', (['self.dwLocalPort'], {}), '(self.dwLocalPort)\n', (3345, 3363), False, 'import socket\n'), ((5532, 5559), 'windows.com.interfaces.INetFwRules', 'cominterfaces.INetFwRules', ([], {}), '()\n', (5557, 5559), True, 'from windows.com import interfaces as cominterfaces\n'), ((5614, 5625), 'windows.generated_def.LONG', 'gdef.LONG', ([], {}), '()\n', (5623, 5625), True, 'import windows.generated_def as gdef\n'), ((5681, 5705), 'windows.com.interfaces.IUnknown', 'cominterfaces.IUnknown', ([], {}), '()\n', (5703, 5705), True, 'from windows.com import interfaces as cominterfaces\n'), ((5764, 5792), 'windows.com.interfaces.IEnumVARIANT', 'cominterfaces.IEnumVARIANT', ([], {}), '()\n', (5790, 5792), True, 'from windows.com import interfaces as cominterfaces\n'), ((5863, 5875), 'windows.generated_def.ULONG', 'gdef.ULONG', ([], {}), '()\n', (5873, 5875), True, 'import windows.generated_def as gdef\n'), ((5890, 5919), 'windows.com.ImprovedVariant', 'windows.com.ImprovedVariant', ([], {}), '()\n', (5917, 5919), False, 'import windows\n'), ((6403, 6414), 'windows.generated_def.LONG', 'gdef.LONG', ([], {}), '()\n', (6412, 6414), True, 'import windows.generated_def as gdef\n'), ((7164, 7183), 'windows.generated_def.VARIANT_BOOL', 'gdef.VARIANT_BOOL', ([], {}), '()\n', (7181, 7183), True, 'import windows.generated_def as gdef\n'), ((7473, 7484), 'windows.generated_def.BSTR', 'gdef.BSTR', ([], {}), '()\n', (7482, 7484), True, 'import windows.generated_def as gdef\n'), ((7683, 7694), 'windows.generated_def.BSTR', 'gdef.BSTR', ([], {}), '()\n', (7692, 7694), True, 'import windows.generated_def as gdef\n'), ((7947, 7958), 'windows.generated_def.BSTR', 'gdef.BSTR', ([], {}), '()\n', (7956, 7958), True, 'import windows.generated_def as gdef\n'), ((8211, 8222), 'windows.generated_def.BSTR', 'gdef.BSTR', ([], {}), '()\n', (8220, 8222), True, 'import windows.generated_def as gdef\n'), ((8442, 8453), 'windows.generated_def.LONG', 'gdef.LONG', ([], {}), '()\n', (8451, 8453), True, 'import windows.generated_def as gdef\n'), ((8670, 8681), 'windows.generated_def.BSTR', 'gdef.BSTR', ([], {}), '()\n', (8679, 8681), True, 'import windows.generated_def as gdef\n'), ((8917, 8928), 'windows.generated_def.BSTR', 'gdef.BSTR', ([], {}), '()\n', (8926, 8928), True, 'import windows.generated_def as gdef\n'), ((9306, 9334), 'windows.generated_def.NET_FW_RULE_DIRECTION', 'gdef.NET_FW_RULE_DIRECTION', ([], {}), '()\n', (9332, 9334), True, 'import windows.generated_def as gdef\n'), ((9562, 9573), 'windows.generated_def.BSTR', 'gdef.BSTR', ([], {}), '()\n', (9571, 9573), True, 'import windows.generated_def as gdef\n'), ((9799, 9810), 'windows.generated_def.BSTR', 'gdef.BSTR', ([], {}), '()\n', (9808, 9810), True, 'import windows.generated_def as gdef\n'), ((10027, 10038), 'windows.generated_def.BSTR', 'gdef.BSTR', ([], {}), '()\n', (10036, 10038), True, 'import windows.generated_def as gdef\n'), ((10382, 10402), 'windows.generated_def.NET_FW_ACTION', 'gdef.NET_FW_ACTION', ([], {}), '()\n', (10400, 10402), True, 'import windows.generated_def as gdef\n'), ((10561, 10580), 'windows.generated_def.VARIANT_BOOL', 'gdef.VARIANT_BOOL', ([], {}), '()\n', (10578, 10580), True, 'import windows.generated_def as gdef\n'), ((10779, 10790), 'windows.generated_def.BSTR', 'gdef.BSTR', ([], {}), '()\n', (10788, 10790), True, 'import windows.generated_def as gdef\n'), ((10938, 10949), 'windows.generated_def.BSTR', 'gdef.BSTR', ([], {}), '()\n', (10947, 10949), True, 'import windows.generated_def as gdef\n'), ((11418, 11436), 'windows.com.init', 'windows.com.init', ([], {}), '()\n', (11434, 11436), False, 'import windows\n'), ((11475, 11531), 'windows.com.create_instance', 'windows.com.create_instance', (['self.NetFwPolicy2', 'firewall'], {}), '(self.NetFwPolicy2, firewall)\n', (11502, 11531), False, 'import windows\n'), ((11623, 11639), 'ctypes.c_uint', 'ctypes.c_uint', (['(0)'], {}), '(0)\n', (11636, 11639), False, 'import ctypes\n'), ((12116, 12132), 'ctypes.c_uint', 'ctypes.c_uint', (['(0)'], {}), '(0)\n', (12129, 12132), False, 'import ctypes\n'), ((978, 1013), 'struct.pack', 'struct.pack', (['"""<I"""', 'self.dwLocalAddr'], {}), "('<I', self.dwLocalAddr)\n", (989, 1013), False, 'import struct\n'), ((1217, 1253), 'struct.pack', 'struct.pack', (['"""<I"""', 'self.dwRemoteAddr'], {}), "('<I', self.dwRemoteAddr)\n", (1228, 1253), False, 'import struct\n'), ((1537, 1582), 'socket.getservbyport', 'socket.getservbyport', (['self.remote_port', '"""tcp"""'], {}), "(self.remote_port, 'tcp')\n", (1557, 1582), False, 'import socket\n'), ((1891, 1929), 'socket.gethostbyaddr', 'socket.gethostbyaddr', (['self.remote_addr'], {}), '(self.remote_addr)\n', (1911, 1929), False, 'import socket\n'), ((2389, 2410), 'ctypes.byref', 'ctypes.byref', (['closing'], {}), '(closing)\n', (2401, 2410), False, 'import ctypes\n'), ((11927, 11945), 'ctypes.byref', 'ctypes.byref', (['size'], {}), '(size)\n', (11939, 11945), False, 'import ctypes\n'), ((12421, 12439), 'ctypes.byref', 'ctypes.byref', (['size'], {}), '(size)\n', (12433, 12439), False, 'import ctypes\n'), ((11700, 11718), 'ctypes.byref', 'ctypes.byref', (['size'], {}), '(size)\n', (11712, 11718), False, 'import ctypes\n'), ((12193, 12211), 'ctypes.byref', 'ctypes.byref', (['size'], {}), '(size)\n', (12205, 12211), False, 'import ctypes\n')] |
Bhare8972/LOFAR-LIM | LIM_scripts/func_curry.py | 89f25be8c02cb8980c2e237da3eaac279d40a06a | #!/usr/bin/env python3
# Coded by Massimiliano Tomassoli, 2012.
#
# - Thanks to b49P23TIvg for suggesting that I should use a set operation
# instead of repeated membership tests.
# - Thanks to Ian Kelly for pointing out that
# - "minArgs = None" is better than "minArgs = -1",
# - "if args" is better than "if len(args)", and
# - I should use "isdisjoint".
#
def genCur(func, unique = True, minArgs = None):
""" Generates a 'curried' version of a function. """
def g(*myArgs, **myKwArgs):
def f(*args, **kwArgs):
if args or kwArgs: # some more args!
# Allocates data to assign to the next 'f'.
newArgs = myArgs + args
newKwArgs = dict.copy(myKwArgs)
# If unique is True, we don't want repeated keyword arguments.
if unique and not kwArgs.keys().isdisjoint(newKwArgs):
raise ValueError("Repeated kw arg while unique = True")
# Adds/updates keyword arguments.
newKwArgs.update(kwArgs)
# Checks whether it's time to evaluate func.
if minArgs is not None and minArgs <= len(newArgs) + len(newKwArgs):
return func(*newArgs, **newKwArgs) # time to evaluate func
else:
return g(*newArgs, **newKwArgs) # returns a new 'f'
else: # the evaluation was forced
return func(*myArgs, **myKwArgs)
return f
return g
def cur(f, minArgs = None):
return genCur(f, True, minArgs)
def curr(f, minArgs = None):
return genCur(f, False, minArgs)
if __name__ == "__main__":
# Simple Function.
def func(a, b, c, d, e, f, g = 100):
print(a, b, c, d, e, f, g)
# NOTE: '<====' means "this line prints to the screen".
# Example 1.
f = cur(func) # f is a "curried" version of func
c1 = f(1)
c2 = c1(2, d = 4) # Note that c is still unbound
c3 = c2(3)(f = 6)(e = 5) # now c = 3
c3() # () forces the evaluation <====
# it prints "1 2 3 4 5 6 100"
c4 = c2(30)(f = 60)(e = 50) # now c = 30
c4() # () forces the evaluation <====
# it prints "1 2 30 4 50 60 100"
print("\n------\n")
# Example 2.
f = curr(func) # f is a "curried" version of func
# curr = cur with possibly repeated
# keyword args
c1 = f(1, 2)(3, 4)
c2 = c1(e = 5)(f = 6)(e = 10)() # ops... we repeated 'e' because we <====
# changed our mind about it!
# again, () forces the evaluation
# it prints "1 2 3 4 10 6 100"
print("\n------\n")
# Example 3.
f = cur(func, 6) # forces the evaluation after 6 arguments
c1 = f(1, 2, 3) # num args = 3
c2 = c1(4, f = 6) # num args = 5
c3 = c2(5) # num args = 6 ==> evalution <====
# it prints "1 2 3 4 5 6 100"
c4 = c2(5, g = -1) # num args = 7 ==> evaluation <====
# we can specify more than 6 arguments, but
# 6 are enough to force the evaluation
# it prints "1 2 3 4 5 6 -1"
print("\n------\n")
# Example 4.
def printTree(func, level = None):
if level is None:
printTree(cur(func), 0)
elif level == 6:
func(g = '')() # or just func('')()
else:
printTree(func(0), level + 1)
printTree(func(1), level + 1)
printTree(func)
print("\n------\n")
def f2(*args):
print(", ".join(["%3d"%(x) for x in args]))
def stress(f, n):
if n: stress(f(n), n - 1)
else: f() # enough is enough
stress(cur(f2), 100) | [] |
Ewpratten/chat | src/messages.py | 4cc8461e442b6530b7874f234b1a2261f3db8456 | greeting = """
--------------- BEGIN SESSION ---------------
You have connected to a chat server. Welcome!
:: About
Chat is a small piece of server software
written by Evan Pratten to allow people to
talk to eachother from any computer as long
as it has an internet connection. (Even an
arduino!). Check out the project at:
https://github.com/Ewpratten/chat
:: Disclaimer
While chatting, keep in mind that, if there
is a rule or regulation about privacy, this
server does not follow it. All data is sent
to and from this server over a raw TCP socket
and data is temporarily stored in plaintext
while the server handles message broadcasting
Now that's out of the way so, happy chatting!
---------------------------------------------
""" | [] |
khayam-hafezi/CRNN-keras-persian | Prediction.py | 3f99838e5b3b0e0ca79899e25b0648940b7fdfac | import cv2
import itertools, os, time
import numpy as np
from Model import get_Model
from parameter import letters
import argparse
from keras import backend as K
K.set_learning_phase(0)
Region = {"A": "서울 ", "B": "경기 ", "C": "인천 ", "D": "강원 ", "E": "충남 ", "F": "대전 ",
"G": "충북 ", "H": "부산 ", "I": "울산 ", "J": "대구 ", "K": "경북 ", "L": "경남 ",
"M": "전남 ", "N": "광주 ", "O": "전북 ", "P": "제주 "}
Hangul = {"dk": "아", "dj": "어", "dh": "오", "dn": "우", "qk": "바", "qj": "버", "qh": "보", "qn": "부",
"ek": "다", "ej": "더", "eh": "도", "en": "두", "rk": "가", "rj": "거", "rh": "고", "rn": "구",
"wk": "자", "wj": "저", "wh": "조", "wn": "주", "ak": "마", "aj": "머", "ah": "모", "an": "무",
"sk": "나", "sj": "너", "sh": "노", "sn": "누", "fk": "라", "fj": "러", "fh": "로", "fn": "루",
"tk": "사", "tj": "서", "th": "소", "tn": "수", "gj": "허"}
def decode_label(out):
# out : (1, 32, 42)
out_best = list(np.argmax(out[0, 2:], axis=1)) # get max index -> len = 32
out_best = [k for k, g in itertools.groupby(out_best)] # remove overlap value
outstr = ''
for i in out_best:
if i < len(letters):
outstr += letters[i]
return outstr
def label_to_hangul(label): # eng -> hangul
region = label[0]
two_num = label[1:3]
hangul = label[3:5]
four_num = label[5:]
try:
region = Region[region] if region != 'Z' else ''
except:
pass
try:
hangul = Hangul[hangul]
except:
pass
return region + two_num + hangul + four_num
parser = argparse.ArgumentParser()
parser.add_argument("-w", "--weight", help="weight file directory",
type=str, default="models/weights.best.hdf5")
parser.add_argument("-t", "--test_img", help="Test image directory",
type=str, default="./DB/test/")
args = parser.parse_args()
# Get CRNN model
model = get_Model(training=False)
try:
model.load_weights(args.weight)
print("...Previous weight data...")
except:
raise Exception("No weight file!")
test_dir =args.test_img
test_imgs = os.listdir(args.test_img)
total = 0
acc = 0
letter_total = 0
letter_acc = 0
start = time.time()
for test_img in test_imgs:
img = cv2.imread(test_dir + test_img, cv2.IMREAD_GRAYSCALE)
img_pred = img.astype(np.float32)
img_pred = cv2.resize(img_pred, (128, 64))
img_pred = (img_pred / 255.0) * 2.0 - 1.0
img_pred = img_pred.T
img_pred = np.expand_dims(img_pred, axis=-1)
img_pred = np.expand_dims(img_pred, axis=0)
net_out_value = model.predict(img_pred)
pred_texts = decode_label(net_out_value)
for i in range(min(len(pred_texts), len(test_img[0:-4]))):
if pred_texts[i] == test_img[i]:
letter_acc += 1
letter_total += max(len(pred_texts), len(test_img[0:-4]))
predOk = "True"
if pred_texts == test_img[0:-4]:
acc += 1
else:
predOk = "False"
total += 1
# print('Predicted: %s / True: %s / net_out_value: %s / ' % (label_to_hangul(pred_texts), label_to_hangul(test_img[0:-4])))
print('Predicted: %s / True: %s / predOk: %s ' % (pred_texts, test_img[0:-4], predOk ))
# cv2.rectangle(img, (0,0), (150, 30), (0,0,0), -1)
# cv2.putText(img, pred_texts, (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255,255,255),2)
#cv2.imshow("q", img)
#if cv2.waitKey(0) == 27:
# break
#cv2.destroyAllWindows()
end = time.time()
total_time = (end - start)
print("Time : ",total_time / total)
print("ACC : ", acc / total)
print("letter ACC : ", letter_acc / letter_total)
| [((162, 185), 'keras.backend.set_learning_phase', 'K.set_learning_phase', (['(0)'], {}), '(0)\n', (182, 185), True, 'from keras import backend as K\n'), ((1555, 1580), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1578, 1580), False, 'import argparse\n'), ((1889, 1914), 'Model.get_Model', 'get_Model', ([], {'training': '(False)'}), '(training=False)\n', (1898, 1914), False, 'from Model import get_Model\n'), ((2082, 2107), 'os.listdir', 'os.listdir', (['args.test_img'], {}), '(args.test_img)\n', (2092, 2107), False, 'import itertools, os, time\n'), ((2166, 2177), 'time.time', 'time.time', ([], {}), '()\n', (2175, 2177), False, 'import itertools, os, time\n'), ((3418, 3429), 'time.time', 'time.time', ([], {}), '()\n', (3427, 3429), False, 'import itertools, os, time\n'), ((2215, 2268), 'cv2.imread', 'cv2.imread', (['(test_dir + test_img)', 'cv2.IMREAD_GRAYSCALE'], {}), '(test_dir + test_img, cv2.IMREAD_GRAYSCALE)\n', (2225, 2268), False, 'import cv2\n'), ((2323, 2354), 'cv2.resize', 'cv2.resize', (['img_pred', '(128, 64)'], {}), '(img_pred, (128, 64))\n', (2333, 2354), False, 'import cv2\n'), ((2442, 2475), 'numpy.expand_dims', 'np.expand_dims', (['img_pred'], {'axis': '(-1)'}), '(img_pred, axis=-1)\n', (2456, 2475), True, 'import numpy as np\n'), ((2491, 2523), 'numpy.expand_dims', 'np.expand_dims', (['img_pred'], {'axis': '(0)'}), '(img_pred, axis=0)\n', (2505, 2523), True, 'import numpy as np\n'), ((934, 965), 'numpy.argmax', 'np.argmax', (['out[(0), 2:]'], {'axis': '(1)'}), '(out[(0), 2:], axis=1)\n', (943, 965), True, 'import numpy as np\n'), ((1024, 1051), 'itertools.groupby', 'itertools.groupby', (['out_best'], {}), '(out_best)\n', (1041, 1051), False, 'import itertools, os, time\n')] |
OswinC/torcharrow | torcharrow/_interop.py | 45a57c45afeffee488c51e3387179292b3504a6c | # Copyright (c) Facebook, Inc. and its affiliates.
from typing import List, Optional, cast
# Skipping analyzing 'numpy': found module but no type hints or library stubs
import numpy as np # type: ignore
import numpy.ma as ma # type: ignore
# Skipping analyzing 'pandas': found module but no type hints or library stubs
import pandas as pd # type: ignore
import pyarrow as pa # type: ignore
import torcharrow.dtypes as dt
from torcharrow import Scope
def from_arrow_table(
table,
dtype: Optional[dt.DType] = None,
columns: Optional[List[str]] = None,
scope=None,
device="",
):
""" "
Convert arrow table to a torcharrow dataframe.
"""
scope = scope or Scope.default
device = device or scope.device
assert isinstance(table, pa.Table)
if dtype is not None:
assert dt.is_struct(dtype)
dtype = cast(dt.Struct, dtype)
res = {}
for f in dtype.fields:
chunked_array = table.column(f.name)
pydata = chunked_array.to_pylist()
res[f.name] = scope.Column(pydata, f.dtype)
return scope.DataFrame(res, device=device)
else:
res = {}
table = table.select(columns) if columns is not None else table
for n in table.column_names:
chunked_array = table.column(n)
pydata = chunked_array.to_pylist()
res[n] = scope.Column(
pydata,
dtype=_arrowtype_to_dtype(
table.schema.field(n).type, table.column(n).null_count > 0
),
)
return scope.DataFrame(res, device=device)
def from_pandas_dataframe(
df,
dtype: Optional[dt.DType] = None,
columns: Optional[List[str]] = None,
scope=None,
device="",
):
"""
Convert pandas dataframe to torcharrow dataframe (drops indices).
Parameters
----------
df : Pandas dataframe
dtype : dtype, default None
Data type to force, if None will automatically infer.
columns : array-like
List of column names to extract from df.
scope : Scope or None
Scope to use, or None for default scope.
device : str or ""
Device to use, or default if blank.
Examples
--------
>>> import pandas as pd
>>> import torcharrow as ta
>>> pdf = pd.DataFrame({'a': [0, 1, 2, 3],'b': [0.1, 0.2, None, 0.3]})
>>> gdf = ta.from_pandas_dataframe(pdf)
>>> gdf
index a b
------- --- ---
0 0 0.1
1 1 0.2
2 2
3 3 0.3
dtype: Struct([Field('a', int64), Field('b', Float64(nullable=True))]), count: 4, null_count: 0
"""
scope = scope or Scope.default
device = device or scope.device
if dtype is not None:
assert dt.is_struct(dtype)
dtype = cast(dt.Struct, dtype)
res = {}
for f in dtype.fields:
# this shows that Column shoud also construct Dataframes!
res[f.name] = from_pandas_series(
pd.Series(df[f.name]), f.dtype, scope=scope
)
return scope.Frame(res, dtype=dtype, device=device)
else:
res = {}
for n in df.columns:
if columns is None or n in columns:
res[n] = from_pandas_series(pd.Series(df[n]), scope=scope)
return scope.Frame(res, device=device)
def from_arrow_array(array, dtype=None, scope=None, device=""):
""" "
Convert arrow array to a torcharrow column.
"""
scope = scope or Scope.default
device = device or scope.device
assert isinstance(array, pa.Array)
pydata = _arrow_scalar_to_py(array)
if dtype is not None:
assert not dt.is_struct(dtype)
return scope.Column(pydata, dtype, device=device)
else:
return scope.Column(
pydata,
dtype=_arrowtype_to_dtype(array.type, array.null_count > 0),
device=device,
)
def from_pandas_series(series, dtype=None, scope=None, device=""):
""" "
Convert pandas series array to a torcharrow column (drops indices).
"""
scope = scope or Scope.default
device = device or scope.device
return from_numpy(series.to_numpy(), dtype, scope, device)
def from_numpy(array, dtype, scope=None, device=""):
"""
Convert 1dim numpy array to a torcharrow column (zero copy).
"""
scope = scope or Scope.default
device = device or scope.device
if isinstance(array, ma.core.MaskedArray) and array.ndim == 1:
return _from_numpy_ma(array.data, array.mask, dtype, scope, device)
elif isinstance(array, np.ndarray) and array.ndim == 1:
return _from_numpy_nd(array, dtype, scope, device)
else:
raise TypeError(f"cannot convert numpy array of type {array.dtype}")
def _is_not_str(s):
return not isinstance(s, str)
def _from_numpy_ma(data, mask, dtype, scope=None, device=""):
# adopt types
if dtype is None:
dtype = dt.typeof_np_dtype(data.dtype).with_null()
else:
assert dt.is_primitive_type(dtype)
assert dtype == dt.typeof_np_dtype(data.dtype).with_null()
# TODO if not, adopt the type or?
# Something like ma.array
# np.array([np.nan, np.nan, 3.]).astype(np.int64),
# mask = np.isnan([np.nan, np.nan, 3.]))
# create column, only zero copy supported
if dt.is_boolean_or_numerical(dtype):
assert not np.all(np.isnan(ma.array(data, mask).compressed()))
return scope._FullColumn(data, dtype=dtype, mask=mask)
elif dt.is_string(dtype) or dtype == "object":
assert np.all(np.vectorize(_is_not_str)(ma.array(data, mask).compressed()))
return scope._FullColumn(data, dtype=dtype, mask=mask)
else:
raise TypeError(f"cannot convert masked numpy array of type {data.dtype}")
def _from_numpy_nd(data, dtype, scope=None, device=""):
# adopt types
if dtype is None:
dtype = dt.typeof_np_dtype(data.dtype)
if dtype is None:
dtype = dt.string
else:
assert dt.is_primitive(dtype)
# TODO Check why teh following assert isn't the case
# assert dtype == dt.typeof_np_dtype(data.dtype)
# create column, only zero copy supported
if dt.is_boolean_or_numerical(dtype):
mask = np.isnan(data)
return scope._FullColumn(data, dtype=dtype, mask=mask)
elif dt.is_string(dtype):
mask = np.vectorize(_is_not_str)(data)
if np.any(mask):
dtype = dtype.with_null()
return scope._FullColumn(data, dtype=dtype, mask=mask)
else:
raise TypeError("can not convert numpy array of type {data.dtype,}")
# def _column_without_nan(series, dtype):
# if dtype is None or is_floating(dtype):
# for i in series:
# if isinstance(i, float) and np.isnan(i):
# yield None
# else:
# yield i
# else:
# for i in series:
# yield i
def _arrow_scalar_to_py(array):
for i in array:
yield i.as_py()
def _pandatype_to_dtype(t, nullable):
return dt.typeof_nptype(t, nullable)
def _arrowtype_to_dtype(t, nullable):
if pa.types.is_boolean(t):
return dt.Boolean(nullable)
if pa.types.is_int8(t):
return dt.Int8(nullable)
if pa.types.is_int16(t):
return dt.Int16(nullable)
if pa.types.is_int32(t):
return dt.Int32(nullable)
if pa.types.is_int64(t):
return dt.Int64(nullable)
if pa.types.is_float32(t):
return dt.Float32(nullable)
if pa.types.is_float64(t):
return dt.Float64(nullable)
if pa.types.is_list(t):
return List(t.value_type, nullable)
if pa.types.is_struct(t):
return _pandatype_to_dtype(t.to_pandas_dtype(), True)
if pa.types.is_null(t):
return dt.Void()
if pa.types.is_string(t):
return dt.String(nullable)
if pa.types.is_map(t):
return dt.Map(t.item_type, t.key_type, nullable)
raise NotImplementedError("unsupported case")
| [((5363, 5396), 'torcharrow.dtypes.is_boolean_or_numerical', 'dt.is_boolean_or_numerical', (['dtype'], {}), '(dtype)\n', (5389, 5396), True, 'import torcharrow.dtypes as dt\n'), ((6246, 6279), 'torcharrow.dtypes.is_boolean_or_numerical', 'dt.is_boolean_or_numerical', (['dtype'], {}), '(dtype)\n', (6272, 6279), True, 'import torcharrow.dtypes as dt\n'), ((7101, 7130), 'torcharrow.dtypes.typeof_nptype', 'dt.typeof_nptype', (['t', 'nullable'], {}), '(t, nullable)\n', (7117, 7130), True, 'import torcharrow.dtypes as dt\n'), ((7178, 7200), 'pyarrow.types.is_boolean', 'pa.types.is_boolean', (['t'], {}), '(t)\n', (7197, 7200), True, 'import pyarrow as pa\n'), ((7245, 7264), 'pyarrow.types.is_int8', 'pa.types.is_int8', (['t'], {}), '(t)\n', (7261, 7264), True, 'import pyarrow as pa\n'), ((7306, 7326), 'pyarrow.types.is_int16', 'pa.types.is_int16', (['t'], {}), '(t)\n', (7323, 7326), True, 'import pyarrow as pa\n'), ((7369, 7389), 'pyarrow.types.is_int32', 'pa.types.is_int32', (['t'], {}), '(t)\n', (7386, 7389), True, 'import pyarrow as pa\n'), ((7432, 7452), 'pyarrow.types.is_int64', 'pa.types.is_int64', (['t'], {}), '(t)\n', (7449, 7452), True, 'import pyarrow as pa\n'), ((7495, 7517), 'pyarrow.types.is_float32', 'pa.types.is_float32', (['t'], {}), '(t)\n', (7514, 7517), True, 'import pyarrow as pa\n'), ((7562, 7584), 'pyarrow.types.is_float64', 'pa.types.is_float64', (['t'], {}), '(t)\n', (7581, 7584), True, 'import pyarrow as pa\n'), ((7629, 7648), 'pyarrow.types.is_list', 'pa.types.is_list', (['t'], {}), '(t)\n', (7645, 7648), True, 'import pyarrow as pa\n'), ((7701, 7722), 'pyarrow.types.is_struct', 'pa.types.is_struct', (['t'], {}), '(t)\n', (7719, 7722), True, 'import pyarrow as pa\n'), ((7793, 7812), 'pyarrow.types.is_null', 'pa.types.is_null', (['t'], {}), '(t)\n', (7809, 7812), True, 'import pyarrow as pa\n'), ((7846, 7867), 'pyarrow.types.is_string', 'pa.types.is_string', (['t'], {}), '(t)\n', (7864, 7867), True, 'import pyarrow as pa\n'), ((7911, 7929), 'pyarrow.types.is_map', 'pa.types.is_map', (['t'], {}), '(t)\n', (7926, 7929), True, 'import pyarrow as pa\n'), ((824, 843), 'torcharrow.dtypes.is_struct', 'dt.is_struct', (['dtype'], {}), '(dtype)\n', (836, 843), True, 'import torcharrow.dtypes as dt\n'), ((860, 882), 'typing.cast', 'cast', (['dt.Struct', 'dtype'], {}), '(dt.Struct, dtype)\n', (864, 882), False, 'from typing import List, Optional, cast\n'), ((2776, 2795), 'torcharrow.dtypes.is_struct', 'dt.is_struct', (['dtype'], {}), '(dtype)\n', (2788, 2795), True, 'import torcharrow.dtypes as dt\n'), ((2812, 2834), 'typing.cast', 'cast', (['dt.Struct', 'dtype'], {}), '(dt.Struct, dtype)\n', (2816, 2834), False, 'from typing import List, Optional, cast\n'), ((5028, 5055), 'torcharrow.dtypes.is_primitive_type', 'dt.is_primitive_type', (['dtype'], {}), '(dtype)\n', (5048, 5055), True, 'import torcharrow.dtypes as dt\n'), ((5938, 5968), 'torcharrow.dtypes.typeof_np_dtype', 'dt.typeof_np_dtype', (['data.dtype'], {}), '(data.dtype)\n', (5956, 5968), True, 'import torcharrow.dtypes as dt\n'), ((6050, 6072), 'torcharrow.dtypes.is_primitive', 'dt.is_primitive', (['dtype'], {}), '(dtype)\n', (6065, 6072), True, 'import torcharrow.dtypes as dt\n'), ((6296, 6310), 'numpy.isnan', 'np.isnan', (['data'], {}), '(data)\n', (6304, 6310), True, 'import numpy as np\n'), ((6383, 6402), 'torcharrow.dtypes.is_string', 'dt.is_string', (['dtype'], {}), '(dtype)\n', (6395, 6402), True, 'import torcharrow.dtypes as dt\n'), ((7217, 7237), 'torcharrow.dtypes.Boolean', 'dt.Boolean', (['nullable'], {}), '(nullable)\n', (7227, 7237), True, 'import torcharrow.dtypes as dt\n'), ((7281, 7298), 'torcharrow.dtypes.Int8', 'dt.Int8', (['nullable'], {}), '(nullable)\n', (7288, 7298), True, 'import torcharrow.dtypes as dt\n'), ((7343, 7361), 'torcharrow.dtypes.Int16', 'dt.Int16', (['nullable'], {}), '(nullable)\n', (7351, 7361), True, 'import torcharrow.dtypes as dt\n'), ((7406, 7424), 'torcharrow.dtypes.Int32', 'dt.Int32', (['nullable'], {}), '(nullable)\n', (7414, 7424), True, 'import torcharrow.dtypes as dt\n'), ((7469, 7487), 'torcharrow.dtypes.Int64', 'dt.Int64', (['nullable'], {}), '(nullable)\n', (7477, 7487), True, 'import torcharrow.dtypes as dt\n'), ((7534, 7554), 'torcharrow.dtypes.Float32', 'dt.Float32', (['nullable'], {}), '(nullable)\n', (7544, 7554), True, 'import torcharrow.dtypes as dt\n'), ((7601, 7621), 'torcharrow.dtypes.Float64', 'dt.Float64', (['nullable'], {}), '(nullable)\n', (7611, 7621), True, 'import torcharrow.dtypes as dt\n'), ((7665, 7693), 'typing.List', 'List', (['t.value_type', 'nullable'], {}), '(t.value_type, nullable)\n', (7669, 7693), False, 'from typing import List, Optional, cast\n'), ((7829, 7838), 'torcharrow.dtypes.Void', 'dt.Void', ([], {}), '()\n', (7836, 7838), True, 'import torcharrow.dtypes as dt\n'), ((7884, 7903), 'torcharrow.dtypes.String', 'dt.String', (['nullable'], {}), '(nullable)\n', (7893, 7903), True, 'import torcharrow.dtypes as dt\n'), ((7946, 7987), 'torcharrow.dtypes.Map', 'dt.Map', (['t.item_type', 't.key_type', 'nullable'], {}), '(t.item_type, t.key_type, nullable)\n', (7952, 7987), True, 'import torcharrow.dtypes as dt\n'), ((3686, 3705), 'torcharrow.dtypes.is_struct', 'dt.is_struct', (['dtype'], {}), '(dtype)\n', (3698, 3705), True, 'import torcharrow.dtypes as dt\n'), ((5542, 5561), 'torcharrow.dtypes.is_string', 'dt.is_string', (['dtype'], {}), '(dtype)\n', (5554, 5561), True, 'import torcharrow.dtypes as dt\n'), ((6462, 6474), 'numpy.any', 'np.any', (['mask'], {}), '(mask)\n', (6468, 6474), True, 'import numpy as np\n'), ((3015, 3036), 'pandas.Series', 'pd.Series', (['df[f.name]'], {}), '(df[f.name])\n', (3024, 3036), True, 'import pandas as pd\n'), ((4960, 4990), 'torcharrow.dtypes.typeof_np_dtype', 'dt.typeof_np_dtype', (['data.dtype'], {}), '(data.dtype)\n', (4978, 4990), True, 'import torcharrow.dtypes as dt\n'), ((6419, 6444), 'numpy.vectorize', 'np.vectorize', (['_is_not_str'], {}), '(_is_not_str)\n', (6431, 6444), True, 'import numpy as np\n'), ((3281, 3297), 'pandas.Series', 'pd.Series', (['df[n]'], {}), '(df[n])\n', (3290, 3297), True, 'import pandas as pd\n'), ((5080, 5110), 'torcharrow.dtypes.typeof_np_dtype', 'dt.typeof_np_dtype', (['data.dtype'], {}), '(data.dtype)\n', (5098, 5110), True, 'import torcharrow.dtypes as dt\n'), ((5606, 5631), 'numpy.vectorize', 'np.vectorize', (['_is_not_str'], {}), '(_is_not_str)\n', (5618, 5631), True, 'import numpy as np\n'), ((5433, 5453), 'numpy.ma.array', 'ma.array', (['data', 'mask'], {}), '(data, mask)\n', (5441, 5453), True, 'import numpy.ma as ma\n'), ((5632, 5652), 'numpy.ma.array', 'ma.array', (['data', 'mask'], {}), '(data, mask)\n', (5640, 5652), True, 'import numpy.ma as ma\n')] |
jdavidagudelo/tensorflow-models | research/gan/image_compression/eval.py | 6f019beec73b01861363bf717706e27f4210b979 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Evaluates a TFGAN trained compression model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
import tensorflow as tf
from research.gan.image_compression import data_provider
from research.gan.image_compression import networks
from research.gan.image_compression import summaries
FLAGS = tf.app.flags.FLAGS
flags = tf.app.flags
flags.DEFINE_string('master', '', 'Name of the TensorFlow master to use.')
flags.DEFINE_string('checkpoint_dir', '/tmp/compression/',
'Directory where the model was written to.')
flags.DEFINE_string('eval_dir', '/tmp/compression/',
'Directory where the results are saved to.')
flags.DEFINE_integer('max_number_of_evaluations', None,
'Number of times to run evaluation. If `None`, run '
'forever.')
flags.DEFINE_string('dataset_dir', 'testdata', 'Location of data.')
# Compression-specific flags.
flags.DEFINE_integer('batch_size', 32, 'The number of images in each batch.')
flags.DEFINE_integer('patch_size', 32, 'The size of the patches to train on.')
flags.DEFINE_integer('bits_per_patch', 1230,
'The number of bits to produce per patch.')
flags.DEFINE_integer('model_depth', 64,
'Number of filters for compression model')
def main(_, run_eval_loop=True):
with tf.name_scope('inputs'):
images = data_provider.provide_data(
'validation', FLAGS.batch_size, dataset_dir=FLAGS.dataset_dir,
patch_size=FLAGS.patch_size)
# In order for variables to load, use the same variable scope as in the
# train job.
with tf.variable_scope('generator'):
reconstructions, _, prebinary = networks.compression_model(
images,
num_bits=FLAGS.bits_per_patch,
depth=FLAGS.model_depth,
is_training=False)
summaries.add_reconstruction_summaries(images, reconstructions, prebinary)
# Visualize losses.
pixel_loss_per_example = tf.reduce_mean(
tf.abs(images - reconstructions), axis=[1, 2, 3])
pixel_loss = tf.reduce_mean(pixel_loss_per_example)
tf.summary.histogram('pixel_l1_loss_hist', pixel_loss_per_example)
tf.summary.scalar('pixel_l1_loss', pixel_loss)
# Create ops to write images to disk.
uint8_images = data_provider.float_image_to_uint8(images)
uint8_reconstructions = data_provider.float_image_to_uint8(reconstructions)
uint8_reshaped = summaries.stack_images(uint8_images, uint8_reconstructions)
image_write_ops = tf.write_file(
'%s/%s' % (FLAGS.eval_dir, 'compression.png'),
tf.image.encode_png(uint8_reshaped[0]))
# For unit testing, use `run_eval_loop=False`.
if not run_eval_loop: return
tf.contrib.training.evaluate_repeatedly(
FLAGS.checkpoint_dir,
master=FLAGS.master,
hooks=[tf.contrib.training.SummaryAtEndHook(FLAGS.eval_dir),
tf.contrib.training.StopAfterNEvalsHook(1)],
eval_ops=image_write_ops,
max_number_of_evaluations=FLAGS.max_number_of_evaluations)
if __name__ == '__main__':
app.run(_)
| [((2633, 2707), 'research.gan.image_compression.summaries.add_reconstruction_summaries', 'summaries.add_reconstruction_summaries', (['images', 'reconstructions', 'prebinary'], {}), '(images, reconstructions, prebinary)\n', (2671, 2707), False, 'from research.gan.image_compression import summaries\n'), ((2853, 2891), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['pixel_loss_per_example'], {}), '(pixel_loss_per_example)\n', (2867, 2891), True, 'import tensorflow as tf\n'), ((2896, 2962), 'tensorflow.summary.histogram', 'tf.summary.histogram', (['"""pixel_l1_loss_hist"""', 'pixel_loss_per_example'], {}), "('pixel_l1_loss_hist', pixel_loss_per_example)\n", (2916, 2962), True, 'import tensorflow as tf\n'), ((2967, 3013), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""pixel_l1_loss"""', 'pixel_loss'], {}), "('pixel_l1_loss', pixel_loss)\n", (2984, 3013), True, 'import tensorflow as tf\n'), ((3076, 3118), 'research.gan.image_compression.data_provider.float_image_to_uint8', 'data_provider.float_image_to_uint8', (['images'], {}), '(images)\n', (3110, 3118), False, 'from research.gan.image_compression import data_provider\n'), ((3147, 3198), 'research.gan.image_compression.data_provider.float_image_to_uint8', 'data_provider.float_image_to_uint8', (['reconstructions'], {}), '(reconstructions)\n', (3181, 3198), False, 'from research.gan.image_compression import data_provider\n'), ((3220, 3279), 'research.gan.image_compression.summaries.stack_images', 'summaries.stack_images', (['uint8_images', 'uint8_reconstructions'], {}), '(uint8_images, uint8_reconstructions)\n', (3242, 3279), False, 'from research.gan.image_compression import summaries\n'), ((3872, 3882), 'absl.app.run', 'app.run', (['_'], {}), '(_)\n', (3879, 3882), False, 'from absl import app\n'), ((2109, 2132), 'tensorflow.name_scope', 'tf.name_scope', (['"""inputs"""'], {}), "('inputs')\n", (2122, 2132), True, 'import tensorflow as tf\n'), ((2151, 2274), 'research.gan.image_compression.data_provider.provide_data', 'data_provider.provide_data', (['"""validation"""', 'FLAGS.batch_size'], {'dataset_dir': 'FLAGS.dataset_dir', 'patch_size': 'FLAGS.patch_size'}), "('validation', FLAGS.batch_size, dataset_dir=\n FLAGS.dataset_dir, patch_size=FLAGS.patch_size)\n", (2177, 2274), False, 'from research.gan.image_compression import data_provider\n'), ((2398, 2428), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""generator"""'], {}), "('generator')\n", (2415, 2428), True, 'import tensorflow as tf\n'), ((2470, 2584), 'research.gan.image_compression.networks.compression_model', 'networks.compression_model', (['images'], {'num_bits': 'FLAGS.bits_per_patch', 'depth': 'FLAGS.model_depth', 'is_training': '(False)'}), '(images, num_bits=FLAGS.bits_per_patch, depth=\n FLAGS.model_depth, is_training=False)\n', (2496, 2584), False, 'from research.gan.image_compression import networks\n'), ((2786, 2818), 'tensorflow.abs', 'tf.abs', (['(images - reconstructions)'], {}), '(images - reconstructions)\n', (2792, 2818), True, 'import tensorflow as tf\n'), ((3380, 3418), 'tensorflow.image.encode_png', 'tf.image.encode_png', (['uint8_reshaped[0]'], {}), '(uint8_reshaped[0])\n', (3399, 3418), True, 'import tensorflow as tf\n'), ((3624, 3676), 'tensorflow.contrib.training.SummaryAtEndHook', 'tf.contrib.training.SummaryAtEndHook', (['FLAGS.eval_dir'], {}), '(FLAGS.eval_dir)\n', (3660, 3676), True, 'import tensorflow as tf\n'), ((3693, 3735), 'tensorflow.contrib.training.StopAfterNEvalsHook', 'tf.contrib.training.StopAfterNEvalsHook', (['(1)'], {}), '(1)\n', (3732, 3735), True, 'import tensorflow as tf\n')] |
CheyenneNS/metrics | source/dump_query_results.py | cfeeac6d01d99679897a998b193d630ada169c61 | #!/usr/local/bin/python
import os
import mysql.connector as mysql
metrics_mysql_password = os.environ['METRICS_MYSQL_PWD']
sql_host = os.environ['SQL_HOST']
metrics = os.environ['QUERY_ON']
def dump_query_results():
"""
This is a simple SQL table dump of a given query so we can supply users with custom tables.
Note that the SQL query itself and column headers portion need to be changed if you want to change
the query/results. Otherwise it is good to go.
It can be called simply with the bin shell script.
Read the README at the top level for an example.
"""
#connect to mysql
db_connection = mysql.connect(
host = sql_host,#"mysql1", #"localhost",
user = "metrics", #"root",
passwd = metrics_mysql_password,
database = "metrics" #"datacamp"
)
cursor = db_connection.cursor()
query = "use "+metrics
cursor.execute(query)
#CHANGE QUERY HERE
query = "select username, display_name, email, orcid, kb_internal_user, institution, country, signup_date, last_signin_date from user_info order by signup_date"
#CHANGE COLUMN HEADERS HERE TO MATCH QUERY HEADERS
print("username\tdisplay_name\temail\torcid\tkb_internal_user\tinstitution\tcountry\tsignup_date\tlast_signin_date")
cursor.execute(query)
row_values = list()
for (row_values) in cursor:
temp_string = ""
for i in range(len(row_values) - 1):
if row_values[i] is not None:
temp_string += str(row_values[i])
temp_string += "\t"
if row_values[-1] is not None:
temp_string += str(row_values[-1])
print(temp_string)
return 1
dump_query_results()
| [((643, 742), 'mysql.connector.connect', 'mysql.connect', ([], {'host': 'sql_host', 'user': '"""metrics"""', 'passwd': 'metrics_mysql_password', 'database': '"""metrics"""'}), "(host=sql_host, user='metrics', passwd=metrics_mysql_password,\n database='metrics')\n", (656, 742), True, 'import mysql.connector as mysql\n')] |
UAEKondaya1/expressvpn_leak_testing | desktop_local_tests/windows/test_windows_packet_capture_disrupt_force_public_dns_servers.py | 9e4cee899ac04f7820ac351fa55efdc0c01370ba | from desktop_local_tests.local_packet_capture_test_case_with_disrupter import LocalPacketCaptureTestCaseWithDisrupter
from desktop_local_tests.windows.windows_dns_force_public_dns_servers_disrupter import WindowsDNSForcePublicDNSServersDisrupter
class TestWindowsPacketCaptureDisruptForcePublicDNSServers(LocalPacketCaptureTestCaseWithDisrupter):
# TODO: Make the packet capture here DNS specific?
def __init__(self, devices, parameters):
super().__init__(WindowsDNSForcePublicDNSServersDisrupter, devices, parameters)
| [] |
geojeff/kivy | kivy/loader.py | 25ab20e5b0e87269531abe1f8cc76bf270bcc755 | '''
Asynchronous data loader
========================
This is the Asynchronous Loader. You can use it to load an image
and use it, even if data are not yet available. You must specify a default
loading image for using a such loader::
from kivy import *
image = Loader.image('mysprite.png')
You can also load image from url::
image = Loader.image('http://mysite.com/test.png')
If you want to change the default loading image, you can do::
Loader.loading_image = Image('another_loading.png')
Tweaking the asynchronous loader
--------------------------------
.. versionadded:: 1.6.0
You can now tweak the loader to have a better user experience or more
performance, depending of the images you're gonna to load. Take a look at the
parameters:
- :data:`Loader.num_workers` - define the number of threads to start for
loading images
- :data:`Loader.max_upload_per_frame` - define the maximum image uploads in
GPU to do per frames.
'''
__all__ = ('Loader', 'LoaderBase', 'ProxyImage')
from kivy import kivy_data_dir
from kivy.logger import Logger
from kivy.clock import Clock
from kivy.cache import Cache
from kivy.core.image import ImageLoader, Image
from kivy.compat import PY2
from collections import deque
from time import sleep
from os.path import join
from os import write, close, unlink, environ
import threading
# Register a cache for loader
Cache.register('kv.loader', limit=500, timeout=60)
class ProxyImage(Image):
'''Image returned by the Loader.image() function.
:Properties:
`loaded`: bool, default to False
It can be True if the image is already cached
:Events:
`on_load`
Fired when the image is loaded and changed
'''
__events__ = ('on_load', )
def __init__(self, arg, **kwargs):
kwargs.setdefault('loaded', False)
super(ProxyImage, self).__init__(arg, **kwargs)
self.loaded = kwargs.get('loaded')
def on_load(self):
pass
class LoaderBase(object):
'''Common base for Loader and specific implementation.
By default, Loader will be the best available loader implementation.
The _update() function is called every 1 / 25.s or each frame if we have
less than 25 FPS.
'''
def __init__(self):
self._loading_image = None
self._error_image = None
self._num_workers = 2
self._max_upload_per_frame = 2
self._paused = False
self._resume_cond = threading.Condition()
self._q_load = deque()
self._q_done = deque()
self._client = []
self._running = False
self._start_wanted = False
self._trigger_update = Clock.create_trigger(self._update)
def __del__(self):
try:
Clock.unschedule(self._update)
except Exception:
pass
def _set_num_workers(self, num):
if num < 2:
raise Exception('Must have at least 2 workers')
self._num_workers = num
def _get_num_workers(self):
return self._num_workers
num_workers = property(_get_num_workers, _set_num_workers)
'''Number of workers to use while loading. (used only if the loader
implementation support it.). This setting impact the loader only at the
beginning. Once the loader is started, the setting has no impact::
from kivy.loader import Loader
Loader.num_workers = 4
The default value is 2 for giving a smooth user experience. You could
increase the number of workers, then all the images will be loaded faster,
but the user will not been able to use the application while loading.
Prior to 1.6.0, the default number was 20, and loading many full-hd images
was blocking completly the application.
.. versionadded:: 1.6.0
'''
def _set_max_upload_per_frame(self, num):
if num is not None and num < 1:
raise Exception('Must have at least 1 image processing per image')
self._max_upload_per_frame = num
def _get_max_upload_per_frame(self):
return self._max_upload_per_frame
max_upload_per_frame = property(_get_max_upload_per_frame,
_set_max_upload_per_frame)
'''Number of image to upload per frame. By default, we'll upload only 2
images in the GPU per frame. If you are uploading many tiny images, you can
easily increase this parameter to 10, or more.
If you are loading multiples Full-HD images, the upload time can be
consequent, and can stuck the application during the upload. If you want a
smooth experience, let the default.
As matter of fact, a Full-HD RGB image will take ~6MB in memory, so it will
take times. If you have activated mipmap=True too, then the GPU must
calculate the mipmap of this big images too, in real time. Then it can be
smart to reduce the :data:`max_upload_per_frame` to 1 or 2. If you get ride
of that (or reduce it a lot), take a look at the DDS format.
.. versionadded:: 1.6.0
'''
def _get_loading_image(self):
if not self._loading_image:
loading_png_fn = join(kivy_data_dir, 'images', 'image-loading.gif')
self._loading_image = ImageLoader.load(filename=loading_png_fn)
return self._loading_image
def _set_loading_image(self, image):
if isinstance(image, basestring):
self._loading_image = ImageLoader.load(filename=image)
else:
self._loading_image = image
loading_image = property(_get_loading_image, _set_loading_image)
'''Image used for loading.
You can change it by doing::
Loader.loading_image = 'loading.png'
.. versionchanged:: 1.6.0
Not readonly anymore.
'''
def _get_error_image(self):
if not self._error_image:
error_png_fn = join(
'atlas://data/images/defaulttheme/image-missing')
self._error_image = ImageLoader.load(filename=error_png_fn)
return self._error_image
def _set_error_image(self, image):
if isinstance(image, basestring):
self._error_image = ImageLoader.load(filename=image)
else:
self._error_image = image
error_image = property(_get_error_image, _set_error_image)
'''Image used for error.
You can change it by doing::
Loader.error_image = 'error.png'
.. versionchanged:: 1.6.0
Not readonly anymore.
'''
def start(self):
'''Start the loader thread/process'''
self._running = True
def run(self, *largs):
'''Main loop for the loader.'''
pass
def stop(self):
'''Stop the loader thread/process'''
self._running = False
def pause(self):
'''Pause the loader, can be useful during interactions
.. versionadded:: 1.6.0
'''
self._paused = True
def resume(self):
'''Resume the loader, after a :meth:`pause`.
.. versionadded:: 1.6.0
'''
self._paused = False
self._resume_cond.acquire()
self._resume_cond.notify_all()
self._resume_cond.release()
def _wait_for_resume(self):
while self._running and self._paused:
self._resume_cond.acquire()
self._resume_cond.wait(0.25)
self._resume_cond.release()
def _load(self, kwargs):
'''(internal) Loading function, called by the thread.
Will call _load_local() if the file is local,
or _load_urllib() if the file is on Internet
'''
while len(self._q_done) >= (
self.max_upload_per_frame * self._num_workers):
sleep(0.1)
self._wait_for_resume()
filename = kwargs['filename']
load_callback = kwargs['load_callback']
post_callback = kwargs['post_callback']
try:
proto = filename.split(':', 1)[0]
except:
#if blank filename then return
return
if load_callback is not None:
data = load_callback(filename)
elif proto in ('http', 'https', 'ftp', 'smb'):
data = self._load_urllib(filename, kwargs['kwargs'])
else:
data = self._load_local(filename, kwargs['kwargs'])
if post_callback:
data = post_callback(data)
self._q_done.appendleft((filename, data))
self._trigger_update()
def _load_local(self, filename, kwargs):
'''(internal) Loading a local file'''
# With recent changes to CoreImage, we must keep data otherwise,
# we might be unable to recreate the texture afterwise.
return ImageLoader.load(filename, keep_data=True, **kwargs)
def _load_urllib(self, filename, kwargs):
'''(internal) Loading a network file. First download it, save it to a
temporary file, and pass it to _load_local()'''
if PY2:
import urllib2 as urllib_request
else:
import urllib.request as urllib_request
proto = filename.split(':', 1)[0]
if proto == 'smb':
try:
# note: it's important to load SMBHandler every time
# otherwise the data is occasionaly not loaded
from smb.SMBHandler import SMBHandler
except ImportError:
Logger.warning(
'Loader: can not load PySMB: make sure it is installed')
return
import tempfile
data = fd = _out_osfd = None
try:
_out_filename = ''
suffix = '.%s' % (filename.split('.')[-1])
_out_osfd, _out_filename = tempfile.mkstemp(
prefix='kivyloader', suffix=suffix)
if proto == 'smb':
# read from samba shares
fd = urllib_request.build_opener(SMBHandler).open(filename)
else:
# read from internet
fd = urllib_request.urlopen(filename)
idata = fd.read()
fd.close()
fd = None
# write to local filename
write(_out_osfd, idata)
close(_out_osfd)
_out_osfd = None
# load data
data = self._load_local(_out_filename, kwargs)
# FIXME create a clean API for that
for imdata in data._data:
imdata.source = filename
except Exception:
Logger.exception('Failed to load image <%s>' % filename)
# close file when remote file not found or download error
try:
close(_out_osfd)
except OSError:
pass
return self.error_image
finally:
if fd:
fd.close()
if _out_osfd:
close(_out_osfd)
if _out_filename != '':
unlink(_out_filename)
return data
def _update(self, *largs):
'''(internal) Check if a data is loaded, and pass to the client'''
# want to start it ?
if self._start_wanted:
if not self._running:
self.start()
self._start_wanted = False
# in pause mode, don't unqueue anything.
if self._paused:
self._trigger_update()
return
for x in range(self.max_upload_per_frame):
try:
filename, data = self._q_done.pop()
except IndexError:
return
# create the image
image = data # ProxyImage(data)
if not image.nocache:
Cache.append('kv.loader', filename, image)
# update client
for c_filename, client in self._client[:]:
if filename != c_filename:
continue
# got one client to update
client.image = image
client.loaded = True
client.dispatch('on_load')
self._client.remove((c_filename, client))
self._trigger_update()
def image(self, filename, load_callback=None, post_callback=None, **kwargs):
'''Load a image using the Loader. A ProxyImage is returned with a
loading image. You can use it as follows::
from kivy.app import App
from kivy.uix.image import Image
from kivy.loader import Loader
class TestApp(App):
def _image_loaded(self, proxyImage):
if proxyImage.image.texture:
self.image.texture = proxyImage.image.texture
def build(self):
proxyImage = Loader.image("myPic.jpg")
proxyImage.bind(on_load=self._image_loaded)
self.image = Image()
return self.image
TestApp().run()
In order to cancel all background loading, call *Loader.stop()*.
'''
data = Cache.get('kv.loader', filename)
if data not in (None, False):
# found image, if data is not here, need to reload.
return ProxyImage(data,
loading_image=self.loading_image,
loaded=True, **kwargs)
client = ProxyImage(self.loading_image,
loading_image=self.loading_image, **kwargs)
self._client.append((filename, client))
if data is None:
# if data is None, this is really the first time
self._q_load.appendleft({
'filename': filename,
'load_callback': load_callback,
'post_callback': post_callback,
'kwargs': kwargs})
if not kwargs.get('nocache', False):
Cache.append('kv.loader', filename, False)
self._start_wanted = True
self._trigger_update()
else:
# already queued for loading
pass
return client
#
# Loader implementation
#
if 'KIVY_DOC' in environ:
Loader = None
else:
#
# Try to use pygame as our first choice for loader
#
from kivy.compat import queue
from threading import Thread
class _Worker(Thread):
'''Thread executing tasks from a given tasks queue
'''
def __init__(self, pool, tasks):
Thread.__init__(self)
self.tasks = tasks
self.daemon = True
self.pool = pool
self.start()
def run(self):
while self.pool.running:
func, args, kargs = self.tasks.get()
try:
func(*args, **kargs)
except Exception as e:
print(e)
self.tasks.task_done()
class _ThreadPool(object):
'''Pool of threads consuming tasks from a queue
'''
def __init__(self, num_threads):
super(_ThreadPool, self).__init__()
self.running = True
self.tasks = queue.Queue()
for _ in range(num_threads):
_Worker(self, self.tasks)
def add_task(self, func, *args, **kargs):
'''Add a task to the queue
'''
self.tasks.put((func, args, kargs))
def stop(self):
self.running = False
self.tasks.join()
class LoaderThreadPool(LoaderBase):
def __init__(self):
super(LoaderThreadPool, self).__init__()
self.pool = None
def start(self):
super(LoaderThreadPool, self).start()
self.pool = _ThreadPool(self._num_workers)
Clock.schedule_interval(self.run, 0)
def stop(self):
super(LoaderThreadPool, self).stop()
Clock.unschedule(self.run)
self.pool.stop()
def run(self, *largs):
while self._running:
try:
parameters = self._q_load.pop()
except:
return
self.pool.add_task(self._load, parameters)
Loader = LoaderThreadPool()
Logger.info('Loader: using a thread pool of {} workers'.format(
Loader.num_workers))
| [((1379, 1429), 'kivy.cache.Cache.register', 'Cache.register', (['"""kv.loader"""'], {'limit': '(500)', 'timeout': '(60)'}), "('kv.loader', limit=500, timeout=60)\n", (1393, 1429), False, 'from kivy.cache import Cache\n'), ((2461, 2482), 'threading.Condition', 'threading.Condition', ([], {}), '()\n', (2480, 2482), False, 'import threading\n'), ((2507, 2514), 'collections.deque', 'deque', ([], {}), '()\n', (2512, 2514), False, 'from collections import deque\n'), ((2538, 2545), 'collections.deque', 'deque', ([], {}), '()\n', (2543, 2545), False, 'from collections import deque\n'), ((2668, 2702), 'kivy.clock.Clock.create_trigger', 'Clock.create_trigger', (['self._update'], {}), '(self._update)\n', (2688, 2702), False, 'from kivy.clock import Clock\n'), ((8614, 8666), 'kivy.core.image.ImageLoader.load', 'ImageLoader.load', (['filename'], {'keep_data': '(True)'}), '(filename, keep_data=True, **kwargs)\n', (8630, 8666), False, 'from kivy.core.image import ImageLoader, Image\n'), ((12932, 12964), 'kivy.cache.Cache.get', 'Cache.get', (['"""kv.loader"""', 'filename'], {}), "('kv.loader', filename)\n", (12941, 12964), False, 'from kivy.cache import Cache\n'), ((2752, 2782), 'kivy.clock.Clock.unschedule', 'Clock.unschedule', (['self._update'], {}), '(self._update)\n', (2768, 2782), False, 'from kivy.clock import Clock\n'), ((5090, 5140), 'os.path.join', 'join', (['kivy_data_dir', '"""images"""', '"""image-loading.gif"""'], {}), "(kivy_data_dir, 'images', 'image-loading.gif')\n", (5094, 5140), False, 'from os.path import join\n'), ((5175, 5216), 'kivy.core.image.ImageLoader.load', 'ImageLoader.load', ([], {'filename': 'loading_png_fn'}), '(filename=loading_png_fn)\n', (5191, 5216), False, 'from kivy.core.image import ImageLoader, Image\n'), ((5370, 5402), 'kivy.core.image.ImageLoader.load', 'ImageLoader.load', ([], {'filename': 'image'}), '(filename=image)\n', (5386, 5402), False, 'from kivy.core.image import ImageLoader, Image\n'), ((5800, 5854), 'os.path.join', 'join', (['"""atlas://data/images/defaulttheme/image-missing"""'], {}), "('atlas://data/images/defaulttheme/image-missing')\n", (5804, 5854), False, 'from os.path import join\n'), ((5904, 5943), 'kivy.core.image.ImageLoader.load', 'ImageLoader.load', ([], {'filename': 'error_png_fn'}), '(filename=error_png_fn)\n', (5920, 5943), False, 'from kivy.core.image import ImageLoader, Image\n'), ((6091, 6123), 'kivy.core.image.ImageLoader.load', 'ImageLoader.load', ([], {'filename': 'image'}), '(filename=image)\n', (6107, 6123), False, 'from kivy.core.image import ImageLoader, Image\n'), ((7627, 7637), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (7632, 7637), False, 'from time import sleep\n'), ((9610, 9662), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'prefix': '"""kivyloader"""', 'suffix': 'suffix'}), "(prefix='kivyloader', suffix=suffix)\n", (9626, 9662), False, 'import tempfile\n'), ((10068, 10091), 'os.write', 'write', (['_out_osfd', 'idata'], {}), '(_out_osfd, idata)\n', (10073, 10091), False, 'from os import write, close, unlink, environ\n'), ((10104, 10120), 'os.close', 'close', (['_out_osfd'], {}), '(_out_osfd)\n', (10109, 10120), False, 'from os import write, close, unlink, environ\n'), ((14301, 14322), 'threading.Thread.__init__', 'Thread.__init__', (['self'], {}), '(self)\n', (14316, 14322), False, 'from threading import Thread\n'), ((14968, 14981), 'kivy.compat.queue.Queue', 'queue.Queue', ([], {}), '()\n', (14979, 14981), False, 'from kivy.compat import queue\n'), ((15601, 15637), 'kivy.clock.Clock.schedule_interval', 'Clock.schedule_interval', (['self.run', '(0)'], {}), '(self.run, 0)\n', (15624, 15637), False, 'from kivy.clock import Clock\n'), ((15724, 15750), 'kivy.clock.Clock.unschedule', 'Clock.unschedule', (['self.run'], {}), '(self.run)\n', (15740, 15750), False, 'from kivy.clock import Clock\n'), ((9909, 9941), 'urllib.request.urlopen', 'urllib_request.urlopen', (['filename'], {}), '(filename)\n', (9931, 9941), True, 'import urllib.request as urllib_request\n'), ((10400, 10456), 'kivy.logger.Logger.exception', 'Logger.exception', (["('Failed to load image <%s>' % filename)"], {}), "('Failed to load image <%s>' % filename)\n", (10416, 10456), False, 'from kivy.logger import Logger\n'), ((10767, 10783), 'os.close', 'close', (['_out_osfd'], {}), '(_out_osfd)\n', (10772, 10783), False, 'from os import write, close, unlink, environ\n'), ((10836, 10857), 'os.unlink', 'unlink', (['_out_filename'], {}), '(_out_filename)\n', (10842, 10857), False, 'from os import write, close, unlink, environ\n'), ((11579, 11621), 'kivy.cache.Cache.append', 'Cache.append', (['"""kv.loader"""', 'filename', 'image'], {}), "('kv.loader', filename, image)\n", (11591, 11621), False, 'from kivy.cache import Cache\n'), ((13720, 13762), 'kivy.cache.Cache.append', 'Cache.append', (['"""kv.loader"""', 'filename', '(False)'], {}), "('kv.loader', filename, False)\n", (13732, 13762), False, 'from kivy.cache import Cache\n'), ((9295, 9366), 'kivy.logger.Logger.warning', 'Logger.warning', (['"""Loader: can not load PySMB: make sure it is installed"""'], {}), "('Loader: can not load PySMB: make sure it is installed')\n", (9309, 9366), False, 'from kivy.logger import Logger\n'), ((10560, 10576), 'os.close', 'close', (['_out_osfd'], {}), '(_out_osfd)\n', (10565, 10576), False, 'from os import write, close, unlink, environ\n'), ((9778, 9817), 'urllib.request.build_opener', 'urllib_request.build_opener', (['SMBHandler'], {}), '(SMBHandler)\n', (9805, 9817), True, 'import urllib.request as urllib_request\n')] |
Pythobit/Python-tutorial | Season 01 - Intro to Python/Episode 13 - Join.py | b0743eaa9c237c3578131ead1b3f2c295f11b7ee | # 13. Join
# it allows to print list a bit better
friends = ['Pythobit','boy','Pythoman']
print(f'My friends are {friends}.') # Output - My friends are ['Pythobit', 'boy', 'Pythoman'].
# So, the Output needs to be a bit clearer.
friends = ['Pythobit','boy','Pythoman']
friend = ', '.join(friends)
print(f'My friends are {friend}') # Output - My friends are Pythobit, boy, Pythoman
# Here (, ) comma n space is used as separator, but you can use anything.
| [] |
randomizax/polygon-label | buildsettings.py | 5091bd54aee5166d418b240f34d7a5c336685c06 | # settings file for builds.
# if you want to have custom builds, copy this file to "localbuildsettings.py" and make changes there.
# possible fields:
# resourceBaseUrl - optional - the URL base for external resources (all resources embedded in standard IITC)
# distUrlBase - optional - the base URL to use for update checks
# buildMobile - optional - if set, mobile builds are built with 'ant'. requires the Android SDK and appropriate mobile/local.properties file configured
# preBuild - optional - an array of strings to run as commands, via os.system, before building the scripts
# postBuild - optional - an array of string to run as commands, via os.system, after all builds are complete
buildSettings = {
# local: use this build if you're not modifying external resources
# no external resources allowed - they're not needed any more
'randomizax': {
'resourceUrlBase': None,
'distUrlBase': 'https://randomizax.github.io/polygon-label',
},
# local8000: if you need to modify external resources, this build will load them from
# the web server at http://0.0.0.0:8000/dist
# (This shouldn't be required any more - all resources are embedded. but, it remains just in case some new feature
# needs external resources)
'local8000': {
'resourceUrlBase': 'http://0.0.0.0:8000/dist',
'distUrlBase': None,
},
# mobile: default entry that also builds the mobile .apk
# you will need to have the android-sdk installed, and the file mobile/local.properties created as required
'mobile': {
'resourceUrlBase': None,
'distUrlBase': None,
'buildMobile': 'debug',
},
# if you want to publish your own fork of the project, and host it on your own web site
# create a localbuildsettings.py file containing something similar to this
# note: Firefox+Greasemonkey require the distUrlBase to be "https" - they won't check for updates on regular "http" URLs
#'example': {
# 'resourceBaseUrl': 'http://www.example.com/iitc/dist',
# 'distUrlBase': 'https://secure.example.com/iitc/dist',
#},
}
# defaultBuild - the name of the default build to use if none is specified on the build.py command line
# (in here as an example - it only works in localbuildsettings.py)
#defaultBuild = 'local'
| [] |
riverma/osaka | osaka/storage/sftp.py | f9ed386936500303c629d7213d91215085bcf346 | from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from builtins import int
from future import standard_library
standard_library.install_aliases()
import os
import os.path
import stat
import urllib.parse
import paramiko
import traceback
import osaka.utils
"""
A backend used to handle stfp using parimiko
@author starchmd
"""
class SFTP(object):
"""
SFTP handling for Osaka
"""
def __init__(self, params={}):
"""
Constructor
"""
self.keyfile = params["keyfile"] if "keyfile" in params else None
def connect(self, host=None, port=None, user=None, password=None, secure=False):
"""
Connect to this storage medium. All data is parsed out of the url and may be None
scheme:
@param host - may be None, host to connect to
implementor must handle defaulting
@param port - may be None, port to connect to
implementor must handle a None port
@param user - may be None, user to connect as
implementor must handle a None user
@param password - may be None, password to connect with
implementor must handle a None password
"""
self.client = paramiko.client.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.client.connect(
host,
port=22 if port is None else int(port),
username=user,
password=password,
key_filename=self.keyfile,
timeout=15,
)
self.sftp = self.client.open_sftp()
@classmethod
def getSchemes(clazz):
"""
Returns a list of schemes this handler handles
Note: handling the scheme of another handler produces unknown results
@returns list of handled schemes
"""
return ["sftp"]
def put(self, path, url):
"""
Put the given path to the given url
@param path - local path of file/folder to put
@param url - url to put file/folder to
"""
rpath = urllib.parse.urlparse(url).path.lstrip("/")
print("\n\n\n\nUploading:", path)
if not os.path.isdir(path):
print("As file")
try:
self.sftp.mkdir(os.path.dirname(rpath))
except IOError:
pass
dest = rpath
try:
if stat.S_ISDIR(self.sftp.stat(rpath).st_mode) != 0:
dest = os.path.join(rpath, os.path.basename(path))
except:
pass
return self.upload(path, dest)
print("As Dir")
try:
self.sftp.mkdir(rpath)
except IOError:
pass
for dirpath, dirname, filenames in os.walk(path):
extra = os.path.relpath(dirpath, os.path.dirname(path))
try:
self.sftp.mkdir(os.path.join(rpath, extra))
except IOError:
pass
for filename in filenames:
self.upload(
os.path.join(dirpath, filename),
os.path.join(rpath, extra, filename),
)
def upload(self, path, rpath):
"""
Uploads a file to remote path
@param path - path to upload
@param rpath - remote path to upload to
"""
self.sftp.put(path, rpath)
return True
def get(self, url, path):
"""
Get the url (file/folder) to local path
@param url - url to get file/folder from
@param path - path to place fetched files
"""
rpath = urllib.parse.urlparse(url).path
try:
self.sftp.get(rpath, path)
except Exception as e:
osaka.utils.LOGGER.warning(
"Encountered exception: {}\n{}".format(e, traceback.format_exc())
)
raise osaka.utils.OsakaFileNotFound("File {} doesn't exist.".format(url))
def rm(self, url):
"""
Remove the item
@param url - url to remove
"""
rpath = urllib.parse.urlparse(url).path
self.sftp.remove(rpath)
def close(self):
"""
Close this connection
"""
self.client.close()
| [((213, 247), 'future.standard_library.install_aliases', 'standard_library.install_aliases', ([], {}), '()\n', (245, 247), False, 'from future import standard_library\n'), ((1354, 1381), 'paramiko.client.SSHClient', 'paramiko.client.SSHClient', ([], {}), '()\n', (1379, 1381), False, 'import paramiko\n'), ((2909, 2922), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (2916, 2922), False, 'import os\n'), ((1430, 1454), 'paramiko.AutoAddPolicy', 'paramiko.AutoAddPolicy', ([], {}), '()\n', (1452, 1454), False, 'import paramiko\n'), ((2315, 2334), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (2328, 2334), False, 'import os\n'), ((2969, 2990), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (2984, 2990), False, 'import os\n'), ((1544, 1553), 'builtins.int', 'int', (['port'], {}), '(port)\n', (1547, 1553), False, 'from builtins import int\n'), ((2414, 2436), 'os.path.dirname', 'os.path.dirname', (['rpath'], {}), '(rpath)\n', (2429, 2436), False, 'import os\n'), ((3041, 3067), 'os.path.join', 'os.path.join', (['rpath', 'extra'], {}), '(rpath, extra)\n', (3053, 3067), False, 'import os\n'), ((3206, 3237), 'os.path.join', 'os.path.join', (['dirpath', 'filename'], {}), '(dirpath, filename)\n', (3218, 3237), False, 'import os\n'), ((3259, 3295), 'os.path.join', 'os.path.join', (['rpath', 'extra', 'filename'], {}), '(rpath, extra, filename)\n', (3271, 3295), False, 'import os\n'), ((2645, 2667), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (2661, 2667), False, 'import os\n'), ((3984, 4006), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (4004, 4006), False, 'import traceback\n')] |
heavenlysf/thesis | thesis/pettingzoo/butterfly/cooperative_pong/cake_paddle.py | 646553c45860f337c91a48ab7f666a174784472f | import os
os.environ["PYGAME_HIDE_SUPPORT_PROMPT"] = "hide"
import pygame
RENDER_RATIO = 2
class CakePaddle(pygame.sprite.Sprite):
def __init__(self, speed=12):
# surf is the right-most (largest) tier of the cake
self.surf = pygame.Surface((30 // RENDER_RATIO, 120 // RENDER_RATIO))
self.rect = self.surf.get_rect()
self.surf2 = pygame.Surface((30 // RENDER_RATIO, 80 // RENDER_RATIO))
self.rect2 = self.surf2.get_rect()
self.surf3 = pygame.Surface((30 // RENDER_RATIO, 40 // RENDER_RATIO))
self.rect3 = self.surf3.get_rect()
self.surf4 = pygame.Surface((30 // RENDER_RATIO, 10 // RENDER_RATIO))
self.rect4 = self.surf4.get_rect()
self.speed = speed
def reset(self):
# self.rect is set from envs class
self.rect2.midright = self.rect.midleft
self.rect3.midright = self.rect2.midleft
self.rect4.midright = self.rect3.midleft
def draw(self, screen):
pygame.draw.rect(screen, (255, 255, 255), self.rect)
pygame.draw.rect(screen, (255, 255, 255), self.rect2)
pygame.draw.rect(screen, (255, 255, 255), self.rect3)
pygame.draw.rect(screen, (255, 255, 255), self.rect4)
def update(self, area, action):
# action: 1 - up, 2 - down
movepos = [0, 0]
if action == 1:
movepos[1] = movepos[1] - self.speed
elif action == 2:
movepos[1] = movepos[1] + self.speed
newpos = self.rect.move(movepos)
if area.contains(newpos):
self.rect = newpos
# move other rects too
self.rect2 = self.rect2.move(movepos)
self.rect3 = self.rect3.move(movepos)
self.rect4 = self.rect4.move(movepos)
def process_collision(self, b_rect, dx, dy, b_speed, paddle_type):
"""
Parameters
----------
b_rect : Ball rect
dx, dy : Ball speed along single axis
b_speed : Ball speed
ignore paddle type
Returns
-------
is_collision: 1 if ball collides with paddle
b_rect: new ball rect
b_speed: new ball speed
"""
if self.rect4.colliderect(b_rect):
is_collision = True
if dx > 0:
b_rect.right = self.rect4.left
b_speed[0] = -b_speed[0]
# top or bottom edge
elif dy > 0:
b_rect.bottom = self.rect4.top
b_speed[1] = -b_speed[1]
elif dy < 0:
b_rect.top = self.rect4.bottom
b_speed[1] = -b_speed[1]
return is_collision, b_rect, b_speed
elif self.rect3.colliderect(b_rect):
is_collision = True
if dx > 0:
b_rect.right = self.rect3.left
b_speed[0] = -b_speed[0]
# top or bottom edge
elif dy > 0:
b_rect.bottom = self.rect3.top
b_speed[1] = -b_speed[1]
elif dy < 0:
b_rect.top = self.rect3.bottom
b_speed[1] = -b_speed[1]
return is_collision, b_rect, b_speed
elif self.rect2.colliderect(b_rect):
is_collision = True
if dx > 0:
b_rect.right = self.rect2.left
b_speed[0] = -b_speed[0]
# top or bottom edge
elif dy > 0:
b_rect.bottom = self.rect2.top
b_speed[1] = -b_speed[1]
elif dy < 0:
b_rect.top = self.rect2.bottom
b_speed[1] = -b_speed[1]
return is_collision, b_rect, b_speed
elif self.rect.colliderect(b_rect):
is_collision = True
if dx > 0:
b_rect.right = self.rect.left
b_speed[0] = -b_speed[0]
# top or bottom edge
elif dy > 0:
b_rect.bottom = self.rect.top
b_speed[1] = -b_speed[1]
elif dy < 0:
b_rect.top = self.rect.bottom
b_speed[1] = -b_speed[1]
return is_collision, b_rect, b_speed
return False, b_rect, b_speed
| [((249, 306), 'pygame.Surface', 'pygame.Surface', (['(30 // RENDER_RATIO, 120 // RENDER_RATIO)'], {}), '((30 // RENDER_RATIO, 120 // RENDER_RATIO))\n', (263, 306), False, 'import pygame\n'), ((369, 425), 'pygame.Surface', 'pygame.Surface', (['(30 // RENDER_RATIO, 80 // RENDER_RATIO)'], {}), '((30 // RENDER_RATIO, 80 // RENDER_RATIO))\n', (383, 425), False, 'import pygame\n'), ((490, 546), 'pygame.Surface', 'pygame.Surface', (['(30 // RENDER_RATIO, 40 // RENDER_RATIO)'], {}), '((30 // RENDER_RATIO, 40 // RENDER_RATIO))\n', (504, 546), False, 'import pygame\n'), ((611, 667), 'pygame.Surface', 'pygame.Surface', (['(30 // RENDER_RATIO, 10 // RENDER_RATIO)'], {}), '((30 // RENDER_RATIO, 10 // RENDER_RATIO))\n', (625, 667), False, 'import pygame\n'), ((987, 1039), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', '(255, 255, 255)', 'self.rect'], {}), '(screen, (255, 255, 255), self.rect)\n', (1003, 1039), False, 'import pygame\n'), ((1048, 1101), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', '(255, 255, 255)', 'self.rect2'], {}), '(screen, (255, 255, 255), self.rect2)\n', (1064, 1101), False, 'import pygame\n'), ((1110, 1163), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', '(255, 255, 255)', 'self.rect3'], {}), '(screen, (255, 255, 255), self.rect3)\n', (1126, 1163), False, 'import pygame\n'), ((1172, 1225), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', '(255, 255, 255)', 'self.rect4'], {}), '(screen, (255, 255, 255), self.rect4)\n', (1188, 1225), False, 'import pygame\n')] |
aidkilda/understanding-drl-navigation | src/internal_representation_analysis/decoder/StateDataset.py | 0d637c2390a935ec1182d4f2d5165644d98d6404 | import random
from internal_representation_analysis.network import ActorCriticFFNetwork
from internal_representation_analysis.scene_loader import THORDiscreteEnvironment as Environment
from internal_representation_analysis.constants import MINI_BATCH_SIZE
class StateDataset(object):
def __init__(self, states):
self.all_states = states
self.train_set = None
self.validation_set = None
self.test_set = None
def __eq__(self, other):
return self.all_states == other.all_states
def split_datasets(self, seed, all_targets=False, test_target_eq_obs=False):
all_states = self.all_states[:]
random.seed(seed)
random.shuffle(all_states)
if test_target_eq_obs:
for s in all_states:
s.embedding = s.target_eq_obs
if not all_targets:
self.train_set = all_states[0:int(0.6 * len(all_states))]
self.validation_set = all_states[int(0.6 * len(all_states)):int(
0.8 * len(all_states))]
self.test_set = all_states[int(0.8 * len(all_states)):]
else:
unique_state_ids = list(set([s.state_id for s in all_states]))
random.shuffle(unique_state_ids)
train_ids = set(unique_state_ids[0:int(0.6 * len(unique_state_ids))])
val_ids = set(unique_state_ids[int(0.6 * len(unique_state_ids)):int(
0.8 * len(unique_state_ids))])
test_ids = set(unique_state_ids[int(0.8 * len(unique_state_ids)):])
self.train_set = [s for s in all_states if s.state_id in train_ids]
self.validation_set = [s for s in all_states if s.state_id in val_ids]
self.test_set = [s for s in all_states if s.state_id in test_ids]
def shuffle_train_set(self):
random.shuffle(self.train_set)
def get_train_mini_batch(self, start_index):
return self.train_set[start_index:start_index + MINI_BATCH_SIZE]
def filter_by_indexes(self, indexList):
self.all_states = [self.all_states[i] for i in indexList]
| [((659, 676), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (670, 676), False, 'import random\n'), ((685, 711), 'random.shuffle', 'random.shuffle', (['all_states'], {}), '(all_states)\n', (699, 711), False, 'import random\n'), ((1815, 1845), 'random.shuffle', 'random.shuffle', (['self.train_set'], {}), '(self.train_set)\n', (1829, 1845), False, 'import random\n'), ((1208, 1240), 'random.shuffle', 'random.shuffle', (['unique_state_ids'], {}), '(unique_state_ids)\n', (1222, 1240), False, 'import random\n')] |
ferrine/PerceptualSimilarity | test_dataset_model.py | 2ff66e86b12dbfbc337991def71b09e3b86d4b12 | import numpy as np
from models import dist_model as dm
from data import data_loader as dl
import argparse
from IPython import embed
parser = argparse.ArgumentParser()
parser.add_argument("--dataset_mode", type=str, default="2afc", help="[2afc,jnd]")
parser.add_argument(
"--datasets",
type=str,
nargs="+",
default=[
"val/traditional",
"val/cnn",
"val/superres",
"val/deblur",
"val/color",
"val/frameinterp",
],
help="datasets to test - for jnd mode: [val/traditional],[val/cnn]; for 2afc mode: [train/traditional],[train/cnn],[train/mix],[val/traditional],[val/cnn],[val/color],[val/deblur],[val/frameinterp],[val/superres]",
)
parser.add_argument(
"--model",
type=str,
default="net-lin",
help="distance model type [net-lin] for linearly calibrated net, [net] for off-the-shelf network, [l2] for euclidean distance, [ssim] for Structured Similarity Image Metric",
)
parser.add_argument(
"--net",
type=str,
default="alex",
help="[squeeze], [alex], or [vgg] for network architectures",
)
parser.add_argument(
"--colorspace",
type=str,
default="Lab",
help="[Lab] or [RGB] for colorspace to use for l2, ssim model types",
)
parser.add_argument(
"--batch_size", type=int, default=50, help="batch size to test image patches in"
)
parser.add_argument("--use_gpu", action="store_true", help="turn on flag to use GPU")
parser.add_argument(
"--model_path",
type=str,
default=None,
help="location of model, will default to ./weights/v[version]/[net_name].pth",
)
parser.add_argument(
"--from_scratch", action="store_true", help="model was initialized from scratch"
)
parser.add_argument(
"--train_trunk", action="store_true", help="model trunk was trained/tuned"
)
parser.add_argument(
"--version",
type=str,
default="0.1",
help="v0.1 is latest, v0.0 was original release",
)
opt = parser.parse_args()
if opt.model in ["l2", "ssim"]:
opt.batch_size = 1
# initialize model
model = dm.DistModel()
# model.initialize(model=opt.model,net=opt.net,colorspace=opt.colorspace,model_path=opt.model_path,use_gpu=opt.use_gpu)
model.initialize(
model=opt.model,
net=opt.net,
colorspace=opt.colorspace,
model_path=opt.model_path,
use_gpu=opt.use_gpu,
pnet_rand=opt.from_scratch,
pnet_tune=opt.train_trunk,
version=opt.version,
)
if opt.model in ["net-lin", "net"]:
print("Testing model [%s]-[%s]" % (opt.model, opt.net))
elif opt.model in ["l2", "ssim"]:
print("Testing model [%s]-[%s]" % (opt.model, opt.colorspace))
# embed()
# initialize data loader
for dataset in opt.datasets:
data_loader = dl.CreateDataLoader(
dataset, dataset_mode=opt.dataset_mode, batch_size=opt.batch_size
)
# evaluate model on data
if opt.dataset_mode == "2afc":
(score, results_verbose) = dm.score_2afc_dataset(data_loader, model.forward)
elif opt.dataset_mode == "jnd":
(score, results_verbose) = dm.score_jnd_dataset(data_loader, model.forward)
# print results
print(" Dataset [%s]: %.2f" % (dataset, 100.0 * score))
| [((142, 167), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (165, 167), False, 'import argparse\n'), ((2041, 2055), 'models.dist_model.DistModel', 'dm.DistModel', ([], {}), '()\n', (2053, 2055), True, 'from models import dist_model as dm\n'), ((2690, 2781), 'data.data_loader.CreateDataLoader', 'dl.CreateDataLoader', (['dataset'], {'dataset_mode': 'opt.dataset_mode', 'batch_size': 'opt.batch_size'}), '(dataset, dataset_mode=opt.dataset_mode, batch_size=opt.\n batch_size)\n', (2709, 2781), True, 'from data import data_loader as dl\n'), ((2891, 2940), 'models.dist_model.score_2afc_dataset', 'dm.score_2afc_dataset', (['data_loader', 'model.forward'], {}), '(data_loader, model.forward)\n', (2912, 2940), True, 'from models import dist_model as dm\n'), ((3012, 3060), 'models.dist_model.score_jnd_dataset', 'dm.score_jnd_dataset', (['data_loader', 'model.forward'], {}), '(data_loader, model.forward)\n', (3032, 3060), True, 'from models import dist_model as dm\n')] |
StrangeTcy/pathnet-pytorch | plotter.py | 58c8088b992ad2f36b843186d93edc872d547c7b | import argparse
import os
import pickle
import numpy as np
import matplotlib.pyplot as plt
plt.style.use('ggplot')
parser = argparse.ArgumentParser(description='PyTorch MNIST Example')
parser.add_argument('--mnist', action='store_true', default=False,
help='open mnist result')
args = parser.parse_args()
def subplot(subplot, data_first, data_second, title):
plt.subplot(subplot)
if args.mnist:
x = np.arange(0,100)
else:
x = np.arange(0,500)
y_first = np.mean(data_first, axis=0)
y_second = np.mean(data_second, axis=0)
y_first_err = np.std(data_first, axis=0) / 2.
y_second_err = np.std(data_second, axis=0) / 2.
plt.fill_between(x, y_first - y_first_err, y_first + y_first_err, color='m', alpha=0.3)
plt.fill_between(x, y_second - y_second_err, y_second + y_second_err, color='c', alpha=0.3)
plt.plot(x, y_first, color='r', label='Task A')
plt.plot(x, y_second, color='g', label='Task B (transfer learning)')
plt.legend(bbox_to_anchor=(0.8, 0.3), loc=2, ncol=1, fontsize=15)
axes = plt.gca()
if args.mnist:
axes.set_xlim([0, 100])
axes.set_ylim([0, 1.2])
else:
axes.set_xlim([0, 500])
axes.set_ylim([0, 0.6])
plt.title(title, fontsize=20, y = 0.9)
plt.ylabel('Accuracy',fontsize=15)
plt.xlabel('Generations',fontsize=15)
plt.grid(True)
try:
if args.mnist:
f = open(os.path.join('./result/result_mnist.pickle'))
result = pickle.load(f)
f.close()
pathnet_first = []
pathnet_second = []
for res in result:
pathnet_first.append(res[2])
pathnet_second.append(res[3])
subplot('111', pathnet_first, pathnet_second,'MNIST')
plt.show()
else:
f = open(os.path.join('./result/result_cifar_svhn.pickle'))
result = pickle.load(f)
f.close()
cifar_first = []
cifar_second = []
svhn_first = []
svhn_second = []
for res in result:
if res[0] == 'pathnet_cifar_first':
cifar_first.append(res[2])
svhn_second.append(res[3])
else:
svhn_first.append(res[2])
cifar_second.append(res[3])
subplot('211', cifar_first, cifar_second,'CIFAR-10')
subplot('212', svhn_first, svhn_second,'cSVHN')
plt.show()
except IOError:
print("Result file does not exist")
| [((91, 114), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""ggplot"""'], {}), "('ggplot')\n", (104, 114), True, 'import matplotlib.pyplot as plt\n'), ((125, 185), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""PyTorch MNIST Example"""'}), "(description='PyTorch MNIST Example')\n", (148, 185), False, 'import argparse\n'), ((386, 406), 'matplotlib.pyplot.subplot', 'plt.subplot', (['subplot'], {}), '(subplot)\n', (397, 406), True, 'import matplotlib.pyplot as plt\n'), ((508, 535), 'numpy.mean', 'np.mean', (['data_first'], {'axis': '(0)'}), '(data_first, axis=0)\n', (515, 535), True, 'import numpy as np\n'), ((551, 579), 'numpy.mean', 'np.mean', (['data_second'], {'axis': '(0)'}), '(data_second, axis=0)\n', (558, 579), True, 'import numpy as np\n'), ((692, 783), 'matplotlib.pyplot.fill_between', 'plt.fill_between', (['x', '(y_first - y_first_err)', '(y_first + y_first_err)'], {'color': '"""m"""', 'alpha': '(0.3)'}), "(x, y_first - y_first_err, y_first + y_first_err, color='m',\n alpha=0.3)\n", (708, 783), True, 'import matplotlib.pyplot as plt\n'), ((784, 880), 'matplotlib.pyplot.fill_between', 'plt.fill_between', (['x', '(y_second - y_second_err)', '(y_second + y_second_err)'], {'color': '"""c"""', 'alpha': '(0.3)'}), "(x, y_second - y_second_err, y_second + y_second_err, color\n ='c', alpha=0.3)\n", (800, 880), True, 'import matplotlib.pyplot as plt\n'), ((880, 927), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y_first'], {'color': '"""r"""', 'label': '"""Task A"""'}), "(x, y_first, color='r', label='Task A')\n", (888, 927), True, 'import matplotlib.pyplot as plt\n'), ((932, 1000), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y_second'], {'color': '"""g"""', 'label': '"""Task B (transfer learning)"""'}), "(x, y_second, color='g', label='Task B (transfer learning)')\n", (940, 1000), True, 'import matplotlib.pyplot as plt\n'), ((1005, 1070), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'bbox_to_anchor': '(0.8, 0.3)', 'loc': '(2)', 'ncol': '(1)', 'fontsize': '(15)'}), '(bbox_to_anchor=(0.8, 0.3), loc=2, ncol=1, fontsize=15)\n', (1015, 1070), True, 'import matplotlib.pyplot as plt\n'), ((1082, 1091), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (1089, 1091), True, 'import matplotlib.pyplot as plt\n'), ((1254, 1290), 'matplotlib.pyplot.title', 'plt.title', (['title'], {'fontsize': '(20)', 'y': '(0.9)'}), '(title, fontsize=20, y=0.9)\n', (1263, 1290), True, 'import matplotlib.pyplot as plt\n'), ((1297, 1332), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Accuracy"""'], {'fontsize': '(15)'}), "('Accuracy', fontsize=15)\n", (1307, 1332), True, 'import matplotlib.pyplot as plt\n'), ((1336, 1374), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Generations"""'], {'fontsize': '(15)'}), "('Generations', fontsize=15)\n", (1346, 1374), True, 'import matplotlib.pyplot as plt\n'), ((1378, 1392), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (1386, 1392), True, 'import matplotlib.pyplot as plt\n'), ((2393, 2403), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2401, 2403), True, 'import matplotlib.pyplot as plt\n'), ((438, 455), 'numpy.arange', 'np.arange', (['(0)', '(100)'], {}), '(0, 100)\n', (447, 455), True, 'import numpy as np\n'), ((477, 494), 'numpy.arange', 'np.arange', (['(0)', '(500)'], {}), '(0, 500)\n', (486, 494), True, 'import numpy as np\n'), ((598, 624), 'numpy.std', 'np.std', (['data_first'], {'axis': '(0)'}), '(data_first, axis=0)\n', (604, 624), True, 'import numpy as np\n'), ((649, 676), 'numpy.std', 'np.std', (['data_second'], {'axis': '(0)'}), '(data_second, axis=0)\n', (655, 676), True, 'import numpy as np\n'), ((1500, 1514), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1511, 1514), False, 'import pickle\n'), ((1770, 1780), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1778, 1780), True, 'import matplotlib.pyplot as plt\n'), ((1878, 1892), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1889, 1892), False, 'import pickle\n'), ((1437, 1481), 'os.path.join', 'os.path.join', (['"""./result/result_mnist.pickle"""'], {}), "('./result/result_mnist.pickle')\n", (1449, 1481), False, 'import os\n'), ((1810, 1859), 'os.path.join', 'os.path.join', (['"""./result/result_cifar_svhn.pickle"""'], {}), "('./result/result_cifar_svhn.pickle')\n", (1822, 1859), False, 'import os\n')] |
mingruimingrui/kindler | kindler/solver/optimizer.py | 8a9c2278b607a167b0ce827b218e54949a1120e7 | import torch
def make_sgd_optimizer(
model,
base_lr=0.001,
bias_lr_factor=2.0,
momentum=0.9,
weight_decay=0.0005,
weight_decay_bias=0.0,
):
params = []
for key, value in model.named_parameters():
if not value.requires_grad:
continue
param_lr = base_lr
param_weight_decay = weight_decay
if "bias" in key:
param_lr = base_lr * bias_lr_factor
param_weight_decay = weight_decay_bias
params.append({
'params': [value],
'lr': param_lr,
'weight_decay': param_weight_decay
})
optimizer = torch.optim.SGD(params, base_lr, momentum=momentum)
return optimizer
| [((642, 693), 'torch.optim.SGD', 'torch.optim.SGD', (['params', 'base_lr'], {'momentum': 'momentum'}), '(params, base_lr, momentum=momentum)\n', (657, 693), False, 'import torch\n')] |
sctiwari/EZFF_ASE | platypus/tests/test_operators.py | 94710d4cf778ff2db5e6df0cd6d10d92e1b98afe | # Copyright 2015-2018 David Hadka
#
# This file is part of Platypus, a Python module for designing and using
# evolutionary algorithms (EAs) and multiobjective evolutionary algorithms
# (MOEAs).
#
# Platypus is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Platypus is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Platypus. If not, see <http://www.gnu.org/licenses/>.
import unittest
from mock import patch
from ..core import Problem, Solution
from ..types import Permutation
from ..operators import Swap
class TestSwap(unittest.TestCase):
def test_swap10(self):
problem = Problem(1, 0)
problem.types[0] = Permutation(range(10))
solution = Solution(problem)
solution.variables[0] = list(range(10))
with patch('random.randrange', side_effect=[2, 4]):
result = Swap(1.0).mutate(solution)
self.assertEqual(result.variables[0][2], 4)
self.assertEqual(result.variables[0][4], 2)
self.assertEqual(solution.variables[0][2], 2)
self.assertEqual(solution.variables[0][4], 4)
def test_swap2a(self):
problem = Problem(1, 0)
problem.types[0] = Permutation(range(2))
solution = Solution(problem)
solution.variables[0] = list(range(2))
with patch('random.randrange', side_effect=[0, 1]):
result = Swap(1.0).mutate(solution)
self.assertEqual(result.variables[0][0], 1)
self.assertEqual(result.variables[0][1], 0)
def test_swap2b(self):
problem = Problem(1, 0)
problem.types[0] = Permutation(range(2))
solution = Solution(problem)
solution.variables[0] = list(range(2))
with patch('random.randrange', side_effect=[1, 1, 0]):
result = Swap(1.0).mutate(solution)
self.assertEqual(result.variables[0][0], 1)
self.assertEqual(result.variables[0][1], 0)
def test_swap1(self):
problem = Problem(1, 0)
problem.types[0] = Permutation(range(1))
solution = Solution(problem)
solution.variables[0] = list(range(1))
with patch('random.randrange', side_effect=[0, 0]):
result = Swap(1.0).mutate(solution)
self.assertEqual(result.variables[0][0], 0) | [((1222, 1267), 'mock.patch', 'patch', (['"""random.randrange"""'], {'side_effect': '[2, 4]'}), "('random.randrange', side_effect=[2, 4])\n", (1227, 1267), False, 'from mock import patch\n'), ((1770, 1815), 'mock.patch', 'patch', (['"""random.randrange"""'], {'side_effect': '[0, 1]'}), "('random.randrange', side_effect=[0, 1])\n", (1775, 1815), False, 'from mock import patch\n'), ((2202, 2250), 'mock.patch', 'patch', (['"""random.randrange"""'], {'side_effect': '[1, 1, 0]'}), "('random.randrange', side_effect=[1, 1, 0])\n", (2207, 2250), False, 'from mock import patch\n'), ((2644, 2689), 'mock.patch', 'patch', (['"""random.randrange"""'], {'side_effect': '[0, 0]'}), "('random.randrange', side_effect=[0, 0])\n", (2649, 2689), False, 'from mock import patch\n')] |
ClovisChen/LearningCNN | fusion_net/bilinear_sampler.py | cd9102a3d71f602024558d818039f5b759c92fa5 | #!/usr/bin/env python
# -*- coding: utf-8 -*-are not covered by the UCLB ACP-A Licence,
from __future__ import absolute_import, division, print_function
import tensorflow as tf
def bilinear_sampler_1d_h(input_images, x_offset, wrap_mode='border', name='bilinear_sampler', **kwargs):
'''
一维双线性采样: x_offset--输入X上偏移量的图
重复函数 : 先将一维的x后面扩展一个维度, 然后在扩展的维度上复制相应的值, 随后将其转成一维的值, exsamples:[1,2,3] --> [1,1,2,2,3,3]
'''
def _repeat(x, n_repeats):
with tf.variable_scope('_repeat'):
rep = tf.tile(tf.expand_dims(x, 1), [1, n_repeats])
return tf.reshape(rep, [-1])
def _interpolate(im, x, y): #插值函数
with tf.variable_scope('_interpolate'):
# handle both texture border types
_edge_size = 0
# 如果包围方式是border, 那么边界长度是1, 在h和w维两侧加一排0
if _wrap_mode == 'border':
_edge_size = 1
im = tf.pad(im, [[0, 0], [1, 1], [1, 1], [0, 0]], mode='CONSTANT')
x = x + _edge_size
y = y + _edge_size
elif _wrap_mode == 'edge':
_edge_size = 0
else:
return None
# 修剪偏移量x, 让它在0到width-1+2*edge_size之间(因为偏移量不能太大,要小于等于padding之后).
x = tf.clip_by_value(x, 0.0, _width_f - 1 + 2 * _edge_size)
# 向下取整x,y然后x加1向上取整x
x0_f = tf.floor(x)
y0_f = tf.floor(y)
x1_f = x0_f + 1
# 将向下取整的x y变成整数, 向上取整的x不能大于padding之后的宽度减1
# cast: 类型转换
x0 = tf.cast(x0_f, tf.int32)
y0 = tf.cast(y0_f, tf.int32)
x1 = tf.cast(tf.minimum(x1_f, _width_f - 1 + 2 * _edge_size), tf.int32)
# 第二维也就是宽度维的宽是padding之后的宽
dim2 = (_width + 2 * _edge_size)
# 第一维也就是图像维的宽是padding之后的分辨率
dim1 = (_width + 2 * _edge_size) * (_height + 2 * _edge_size)
# 计算偏移量索引的基,先得到[0,1,2,...,batch],再将它乘宽度,变成
# [0,dim1,2*dim1,...,batch*dim1],然后重复原图分辨率,变成
# [0,0,......,0,dim1,dim1,......,dim1,2*dim1,2*dim1,......,2*dim1 . . batch * dim, batch * dim, ......, batch * dim]
# 这样就变成基底了,表达的是有batch个图的基
base = _repeat(tf.range(_num_batch) * dim1, _height * _width)
# 将y的偏移乘以dim2,也就是乘以宽度,这样就得到加上y之后的基
# y0是[0,0,...,0,1,1,....,1, . . h + 2 * e, h + 2 * e, ..., h + 2 * e]
# 乘了dim2之后变成
# [0, 0, ..., 0, w+2*e, w+2*e, ..., w+2*e, . . (h + 2 * e) * (w + 2 * e), ..., (h + 2 * e) * (w + 2 * e)]
# 加上base之后得到了考虑了batch,height之后的索引
base_y0 = base + y0 * dim2
# 这个索引加上向上下取整的x索引和向上取整的x索引就得到了现在点的左侧点和右侧点
idx_l = base_y0 + x0
idx_r = base_y0 + x1
# 将图变成[batch*w*h,channel]的形状
im_flat = tf.reshape(im, tf.stack([-1, _num_channels]))
# 利用tf.gather根据左右侧点的索引重新排列图,得到重排之后的左右像素
pix_l = tf.gather(im_flat, idx_l)
pix_r = tf.gather(im_flat, idx_r)
# 计算双线性差值的系数x1-1和x-x0
weight_l = tf.expand_dims(x1_f - x, 1)
weight_r = tf.expand_dims(x - x0_f, 1)
# 利用双线性差值方法计算像素值
return weight_l * pix_l + weight_r * pix_r
# get_disp函数生成视差图后,调用插值函数获得更好的图.
def _transform(input_images, x_offset):
'''
转换函数首先调用meshgrid生成关于X轴和Y轴的索引
exsamples:
假设_width=3,经过linspace(0.0,_width_f-1.0,_width)是[ 0., 1., 2.]。height同理
>>> x = tf.linspace(0.0, 2.0, 3)
>>> sess.run(x)
array([0., 1., 2. ], dtype = float32)
>>> x = tf.linspace(0.0, 2.0, 3)
>>> y = tf.linspace(0.0, 4.0, 5)
>>> x_t, y_t = tf.meshgrid(x, y)
>>> sess.run(x_t)
array([0., 1., 2.],
[0., 1., 2.],
[0., 1., 2.],
[0., 1., 2.],
[0., 1., 2.]], dtype=float32)
>>> sess.run(y_t)
array([0., 0., 0.],
[1., 1., 1.],
[2., 2., 2.],
[3., 3., 3.],
[4., 4., 4.]], dtype=float32)
>>> x_t_flat = tf.reshape(x_t, (1, -1))
>>> y_t_flat = tf.reshape(y_t, (1, -1))
>>> sess.run(x_t_flat)
array([[0., 1., 2., 0., 1., 2., 0., 1., 2., 0., 1., 2., 0., 1., 2.]], dtype=float32)
>>> sess.run(y_t_flat)
array([[0., 0., 0., 1., 1., 1., 2., 2., 2., 3., 3., 3., 4., 4., 4.]], dtype=float32)
>>> x_t_flat = tf.tile(x_t_flat, tf.stack([2,1]))
>>> sess.run(x_t_flat)
arraay([[0., 1., 2., 0., 1., 2., 0., 1., 2., 0., 1., 2., 0., 1., 2.], [0., 1., 2., 0., 1., 2., 0., 1., 2., 0., 1., 2., 0., 1., 2.]], dtype=float32)
>>> x_t_flat = tf.reshape(x_t_flat, (1, -1))
>>> sess.run(x_t_flat)
array([[0., 1., 2., 0., 1., 2., 0., 1., 2., 0., 1., 2., 0., 1., 2., 0., 1., 2., 0., 1., 2., 0., 1., 2., 0., 1., 2., 0., 1., 2.]], dtype=float32)
'''
with tf.variable_scope('transform'):
# grid of (x_t, y_t, 1), eq (1) in ref [1]
x_t, y_t = tf.meshgrid(tf.linspace(0.0, _width_f - 1.0, _width),
tf.linspace(0.0 , _height_f - 1.0 , _height))
x_t_flat = tf.reshape(x_t, (1, -1))
y_t_flat = tf.reshape(y_t, (1, -1))
x_t_flat = tf.tile(x_t_flat, tf.stack([_num_batch, 1]))
y_t_flat = tf.tile(y_t_flat, tf.stack([_num_batch, 1]))
x_t_flat = tf.reshape(x_t_flat, [-1])
y_t_flat = tf.reshape(y_t_flat, [-1])
x_t_flat = x_t_flat + tf.reshape(x_offset, [-1]) * _width_f
input_transformed = _interpolate(input_images, x_t_flat, y_t_flat)
output = tf.reshape(
input_transformed, tf.stack([_num_batch, _height, _width, _num_channels]))
return output
with tf.variable_scope(name):
'''
[num_batch, height, width, num_channels]
'''
_num_batch = tf.shape(input_images)[0]
_height = tf.shape(input_images)[1]
_width = tf.shape(input_images)[2]
_num_channels = tf.shape(input_images)[3]
_height_f = tf.cast(_height, tf.float32)
_width_f = tf.cast(_width, tf.float32)
_wrap_mode = wrap_mode
output = _transform(input_images, x_offset)
return output
| [((5737, 5760), 'tensorflow.variable_scope', 'tf.variable_scope', (['name'], {}), '(name)\n', (5754, 5760), True, 'import tensorflow as tf\n'), ((6056, 6084), 'tensorflow.cast', 'tf.cast', (['_height', 'tf.float32'], {}), '(_height, tf.float32)\n', (6063, 6084), True, 'import tensorflow as tf\n'), ((6105, 6132), 'tensorflow.cast', 'tf.cast', (['_width', 'tf.float32'], {}), '(_width, tf.float32)\n', (6112, 6132), True, 'import tensorflow as tf\n'), ((470, 498), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""_repeat"""'], {}), "('_repeat')\n", (487, 498), True, 'import tensorflow as tf\n'), ((583, 604), 'tensorflow.reshape', 'tf.reshape', (['rep', '[-1]'], {}), '(rep, [-1])\n', (593, 604), True, 'import tensorflow as tf\n'), ((657, 690), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""_interpolate"""'], {}), "('_interpolate')\n", (674, 690), True, 'import tensorflow as tf\n'), ((1250, 1305), 'tensorflow.clip_by_value', 'tf.clip_by_value', (['x', '(0.0)', '(_width_f - 1 + 2 * _edge_size)'], {}), '(x, 0.0, _width_f - 1 + 2 * _edge_size)\n', (1266, 1305), True, 'import tensorflow as tf\n'), ((1359, 1370), 'tensorflow.floor', 'tf.floor', (['x'], {}), '(x)\n', (1367, 1370), True, 'import tensorflow as tf\n'), ((1390, 1401), 'tensorflow.floor', 'tf.floor', (['y'], {}), '(y)\n', (1398, 1401), True, 'import tensorflow as tf\n'), ((1526, 1549), 'tensorflow.cast', 'tf.cast', (['x0_f', 'tf.int32'], {}), '(x0_f, tf.int32)\n', (1533, 1549), True, 'import tensorflow as tf\n'), ((1567, 1590), 'tensorflow.cast', 'tf.cast', (['y0_f', 'tf.int32'], {}), '(y0_f, tf.int32)\n', (1574, 1590), True, 'import tensorflow as tf\n'), ((2885, 2910), 'tensorflow.gather', 'tf.gather', (['im_flat', 'idx_l'], {}), '(im_flat, idx_l)\n', (2894, 2910), True, 'import tensorflow as tf\n'), ((2931, 2956), 'tensorflow.gather', 'tf.gather', (['im_flat', 'idx_r'], {}), '(im_flat, idx_r)\n', (2940, 2956), True, 'import tensorflow as tf\n'), ((3014, 3041), 'tensorflow.expand_dims', 'tf.expand_dims', (['(x1_f - x)', '(1)'], {}), '(x1_f - x, 1)\n', (3028, 3041), True, 'import tensorflow as tf\n'), ((3065, 3092), 'tensorflow.expand_dims', 'tf.expand_dims', (['(x - x0_f)', '(1)'], {}), '(x - x0_f, 1)\n', (3079, 3092), True, 'import tensorflow as tf\n'), ((4838, 4868), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""transform"""'], {}), "('transform')\n", (4855, 4868), True, 'import tensorflow as tf\n'), ((5110, 5134), 'tensorflow.reshape', 'tf.reshape', (['x_t', '(1, -1)'], {}), '(x_t, (1, -1))\n', (5120, 5134), True, 'import tensorflow as tf\n'), ((5158, 5182), 'tensorflow.reshape', 'tf.reshape', (['y_t', '(1, -1)'], {}), '(y_t, (1, -1))\n', (5168, 5182), True, 'import tensorflow as tf\n'), ((5344, 5370), 'tensorflow.reshape', 'tf.reshape', (['x_t_flat', '[-1]'], {}), '(x_t_flat, [-1])\n', (5354, 5370), True, 'import tensorflow as tf\n'), ((5394, 5420), 'tensorflow.reshape', 'tf.reshape', (['y_t_flat', '[-1]'], {}), '(y_t_flat, [-1])\n', (5404, 5420), True, 'import tensorflow as tf\n'), ((5859, 5881), 'tensorflow.shape', 'tf.shape', (['input_images'], {}), '(input_images)\n', (5867, 5881), True, 'import tensorflow as tf\n'), ((5909, 5931), 'tensorflow.shape', 'tf.shape', (['input_images'], {}), '(input_images)\n', (5917, 5931), True, 'import tensorflow as tf\n'), ((5959, 5981), 'tensorflow.shape', 'tf.shape', (['input_images'], {}), '(input_images)\n', (5967, 5981), True, 'import tensorflow as tf\n'), ((6009, 6031), 'tensorflow.shape', 'tf.shape', (['input_images'], {}), '(input_images)\n', (6017, 6031), True, 'import tensorflow as tf\n'), ((526, 546), 'tensorflow.expand_dims', 'tf.expand_dims', (['x', '(1)'], {}), '(x, 1)\n', (540, 546), True, 'import tensorflow as tf\n'), ((909, 970), 'tensorflow.pad', 'tf.pad', (['im', '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {'mode': '"""CONSTANT"""'}), "(im, [[0, 0], [1, 1], [1, 1], [0, 0]], mode='CONSTANT')\n", (915, 970), True, 'import tensorflow as tf\n'), ((1616, 1663), 'tensorflow.minimum', 'tf.minimum', (['x1_f', '(_width_f - 1 + 2 * _edge_size)'], {}), '(x1_f, _width_f - 1 + 2 * _edge_size)\n', (1626, 1663), True, 'import tensorflow as tf\n'), ((2782, 2811), 'tensorflow.stack', 'tf.stack', (['[-1, _num_channels]'], {}), '([-1, _num_channels])\n', (2790, 2811), True, 'import tensorflow as tf\n'), ((4960, 5000), 'tensorflow.linspace', 'tf.linspace', (['(0.0)', '(_width_f - 1.0)', '_width'], {}), '(0.0, _width_f - 1.0, _width)\n', (4971, 5000), True, 'import tensorflow as tf\n'), ((5040, 5082), 'tensorflow.linspace', 'tf.linspace', (['(0.0)', '(_height_f - 1.0)', '_height'], {}), '(0.0, _height_f - 1.0, _height)\n', (5051, 5082), True, 'import tensorflow as tf\n'), ((5225, 5250), 'tensorflow.stack', 'tf.stack', (['[_num_batch, 1]'], {}), '([_num_batch, 1])\n', (5233, 5250), True, 'import tensorflow as tf\n'), ((5293, 5318), 'tensorflow.stack', 'tf.stack', (['[_num_batch, 1]'], {}), '([_num_batch, 1])\n', (5301, 5318), True, 'import tensorflow as tf\n'), ((5643, 5697), 'tensorflow.stack', 'tf.stack', (['[_num_batch, _height, _width, _num_channels]'], {}), '([_num_batch, _height, _width, _num_channels])\n', (5651, 5697), True, 'import tensorflow as tf\n'), ((2180, 2200), 'tensorflow.range', 'tf.range', (['_num_batch'], {}), '(_num_batch)\n', (2188, 2200), True, 'import tensorflow as tf\n'), ((5456, 5482), 'tensorflow.reshape', 'tf.reshape', (['x_offset', '[-1]'], {}), '(x_offset, [-1])\n', (5466, 5482), True, 'import tensorflow as tf\n')] |
adamamiller/iptf16hvw-1 | Supernovae.py | d674114e94b5b20398d2e4208b55eb8e2394dce9 | #import relevant libraries
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from astropy.io import ascii
import json
from IPython.display import display, Image
from specutils import Spectrum1D
from astropy import units
from scipy.optimize import curve_fit
from scipy.interpolate import interp1d
import scipy.integrate as integrate
from astropy.time import Time
from Supernovae import *
#speed of light (km/s)
c = 3e5
#Define class to hold releveant information for spectra data
class Spectra:
#Initialization function
def __init__(self, Spectra, epoch, z , MJD_max):
'''
Spectra (string) - path to JSON formatted spectra file
epoch (float) - MJD date
z (float) - redshift of corresponding SN
MJD_max (float) - date of B band maximum brightness for SN in MJD
'''
#correct flux for redshift, change wavelength to SN restframe, Normalize flux and store in Spectra
self.data= Unpack_Spectra(Spectra, z)
#store epoch of obseravation
self.epoch = float(epoch)
#store phase of observation
self.phase = float(epoch) - float(MJD_max)
class Lightcurve():
def __init__(self, times, fluxes, error, band):
self.band = band
self.data = pd.DataFrame(list(zip(times, fluxes, error)), columns = ['times', 'flux', 'err'])
#Create Supernovae class to store Spectral objects
class Supernovae(object):
#Initialization function
def __init__(self, name, redshift, maximum):
'''
name (str) - String of SN name
redshift (float) - redshift of SN
maximum (float) - date of B band maximum in MJD
'''
#Store name of SN
self.name = name
#Store redshift of SN
self.redshift = redshift
#Store date of B band maximum brightness
self.maximum = maximum
#initiate empty list to hold Spectra objects
self.spectra = []
self.lightcurves = []
#define function to return spectra closest to given phase
def find_spectra(self, phase1):
'''
Args:
phase1 (float )- phase of interest
Returns:
Spectra object - Spectra object with phase closest to phase1
'''
index = np.argmin([ abs(x.phase - phase1) for x in self.spectra])
return self.spectra[index]
#define function to store new spectra
def store_spectra(self, spectra_object):
'''
Args:
spectra_object (Spectra) - Spectra object to store
'''
#Make sure there are no duplicates and that spectra are sorted by date
if spectra_object in self.spectra:
self.spectra.sort(key= lambda x: x.phase)
print('already exists')
elif spectra_object.epoch in [x.epoch for x in self.spectra]:
self.spectra.sort(key= lambda x: x.phase)
pass
else:
self.spectra.append(spectra_object)
self.spectra.sort(key= lambda x: x.phase)
#define function to store lightcurve
def store_lightcurve(self, lightcurve_object):
if lightcurve_object in self.lightcurves:
print('already exists')
else:
self.lightcurves.append(lightcurve_object)
#define function that converts wavlengths to restframe and corrects flux for redshift, and normalizes flux
def Unpack_Spectra(Spectra, z, normalization = [5000,6000]):
'''
Args:
Spectra - one epoch of spectral data in JSON format from OSN
z (float) - redshift of SN
normalizationn (list) - 2 item list containing boundaries of region used for normalization
Returns:
Pandas DataFrame - 2 column dataframe: wavelength and flux
Flux is corrected for redshift and normalized
Wavelength is converted to SN restframe
'''
#Extract Wavelengths
wavelengths = [float(x[0]) for x in Spectra]
#Extract Fluxes
fluxes = [float(x[1]) for x in Spectra]
#correct fluxes for redshift
fluxes = [correct_flux(flux, z) for flux in fluxes]
#Extract fluxes in normalization range
rel_flux_range = [x for x in Spectra if (float(x[0])>normalization[0]) & (float(x[0])<normalization[1])]
#Make sure there rel_flux_range isnt empty
if len(rel_flux_range) == 0:
#print('No wavelengths in normalization region, not including spectra')
return None
#Calculate average flux in this range
flux_sum = 0
for x in rel_flux_range:
flux_sum += float(x[1])
average_flux = flux_sum / float(len(rel_flux_range))
#Normalize flux
fluxes = [float(flux) / average_flux for flux in fluxes]
#convert wavelength to restframe
wavelengths = [wavelength / float(1 + z) for wavelength in wavelengths]
#store in pandas dataframe
df = pd.DataFrame()
df['Flux'] = fluxes
df['Wavelength'] = wavelengths
return df
def correct_flux(flux_obs, z):
'''
Args:
flux_obs (int) - observed flux
z (int) - redshift
Returns:
int - redshift corrected flux
'''
flux_emit = (z * flux_obs) + flux_obs
return flux_emit
#Define function to get relevant spectra from OSN JSON data file
def create_SN_object(JSON, MJD_max, z):
'''
Function to create Supernovae object for given JSON data file from OSN
Args:
JSON (str) - path to OSN JSON file of interest
MJD_max (int) - number of days past maximum brightness
phase (int) - phase for spectra of interest
Returns:
Supernovae - Supernovae object with spectra list filled
'''
supernovae = Supernovae(str(JSON[0:-5]), z, MJD_max)
#Load OSN json data
file = open('../Data/OSN_data/' + str(JSON))
json_data = json.load(file)
spectra_data = json_data[JSON[0:-5]]['spectra']
spectra_data = np.array(spectra_data)
for i in range(len(spectra_data)):
spectra = Spectra(spectra_data[i]['data'], float(spectra_data[i]['time']) / (1+z), z, MJD_max)
if spectra.data is None:
continue
else:
supernovae.store_spectra(spectra)
return supernovae
#Define function to convert calendar date to MJD
def convert_date_toMJD(date):
'''
Args:
date (str) - string of calendar date (e.g. '2002-8-17')
Returns:
float - MJD value of given calendar date
'''
t = Time(date)
t.format = 'mjd'
return t.value
#Define function to calculate absorption velocities
def calc_abs_velc(restframe, dopplershifted):
'''
Args:
restframe (float) - restframe wavelength of absorption
dopplershifted (float) - dopplershifted wavelength of absorption
Returns:
float - corresponding absorption velocity
'''
velocity = ((restframe - dopplershifted) / np.float(restframe))* c
return velocity
| [((5056, 5070), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (5068, 5070), True, 'import pandas as pd\n'), ((5980, 5995), 'json.load', 'json.load', (['file'], {}), '(file)\n', (5989, 5995), False, 'import json\n'), ((6067, 6089), 'numpy.array', 'np.array', (['spectra_data'], {}), '(spectra_data)\n', (6075, 6089), True, 'import numpy as np\n'), ((6623, 6633), 'astropy.time.Time', 'Time', (['date'], {}), '(date)\n', (6627, 6633), False, 'from astropy.time import Time\n'), ((7052, 7071), 'numpy.float', 'np.float', (['restframe'], {}), '(restframe)\n', (7060, 7071), True, 'import numpy as np\n')] |
iraqis1/irqis | userbot/plugins/alive.py | d95303c48b5f15dbe814454a48d847e838793713 | """Check if userbot alive. If you change these, you become the gayest gay such that even the gay world will disown you."""
import asyncio
from telethon import events
from telethon.tl.types import ChannelParticipantsAdmins
from platform import uname
from userbot import ALIVE_NAME
from userbot.utils import admin_cmd
DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else "No name set yet nibba, check pinned in @XtraTgBot"
@command(outgoing=True, pattern="^.lk$")
async def amireallyalive(alive):
""" For .alive command, check if the bot is running. """
await alive.edit("اهلا بك في سورس التليثيون العراقي\n"
"➖➖➖➖➖➖➖➖➖\n"
"استخدم امر .alive اذا اعتقدت ان البوت توقف!\n"
"➖➖➖➖➖➖➖➖➖\n"
"اشترك في قناة السورس لانها تطرح ملفات وشروحات مفيده\n"
"➖➖➖➖➖➖➖➖➖\n"
"يمكنك مراسلتنا لاي خلل حاصل\n"
"➖➖➖➖➖➖➖➖➖\n"
"لتنصيب السورس راسلني احد مطورين السورس\n"
"➖➖➖➖➖➖➖➖➖\n"
"مطورين السورس : \n"
"➖➖➖➖➖➖➖➖➖\n"
"احمد || @HHMHHH \n"
"➖➖➖➖➖➖➖➖➖\n"
"حسن || @VHHHHH \n"
"➖➖➖➖➖➖➖➖➖\n"
"حارث || @cCcYo \n"
"➖➖➖➖➖➖➖➖➖\n"
"قناة السورس الرسميه : @cqccqq\n"
"➖➖➖➖➖➖➖➖➖\n"
"اوامر السورس هي :\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.ytv` + رابط فيديو من اي موقع للتحميل\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.purge` تحذف رسائل بالرد\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.song` + اسم اغنيه \n"
"➖➖➖➖➖➖➖➖➖\n"
"`.spam`+ كلمه + عدد \n"
"➖➖➖➖➖➖➖➖➖\n"
"`.smoon` لعرض ٤ اسطر اقمار \n"
"➖➖➖➖➖➖➖➖➖\n"
"`.moon` لعرض سطر واحد اقمار \n"
"➖➖➖➖➖➖➖➖➖\n"
"`.solarsystem` كواكب تتحرك \n"
"➖➖➖➖➖➖➖➖➖\n"
"`.snake` افعى تتحرك\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.clock` ساعات سطر واحد \n"
"➖➖➖➖➖➖➖➖➖\n"
"`.gmute` كتم\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.ungmute` الغاء كتم \n"
"➖➖➖➖➖➖➖➖➖\n"
"`.kick` طرد \n"
"➖➖➖➖➖➖➖➖➖\n"
"`.ban` حظر \n"
"➖➖➖➖➖➖➖➖➖\n"
"`.tss` + رمز اللغه\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.rnupload` رد ع الملف وسم ملف لتغير اسم الملف\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.send` + اسم النلف يدز الملف\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.unload` + اسم الملف للغاء التثببت\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.scha` يطلع يكتب حتى لو مدز رساله\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.get_bot` معرفه عدد البوتات الموجوده\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.iffuci` كتابه كود الملف\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.savefilter` اضف رد\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.clearfilter` حذف رد \n"
"➖➖➖➖➖➖➖➖➖\n"
"`.purge` حذف كل الرسائل\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.figlet` كتابه نصوص شخوط\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.savewelcome` + ترحيب لوضع ترحيب\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.clearwelcome` لحذف الترحيب \n"
"➖➖➖➖➖➖➖➖➖\n"
"`.whois` + ايدي شخص\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.fuk` فاكيو\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.get_id` ايدي اي شخص دزه بمحادثته\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.admins` المشرفين الي بالكروب\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.pin` تثبيت رساله بالكروب\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.mmf` اسم انكلش رد ع الصوره\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.bye` مغادره من المجموعه\n"
"➖➖➖➖➖➖➖➖➖\n"
"`.decide` يدز صورتين متحركات\n"
"➖➖➖➖➖➖➖➖➖\n"
"يوجد الكثير من الاوامر لكن ثبتنا الاساسيات\n") | [] |
qiutongxue/ArknightsAutoHelper | connector/ADBConnector.py | 6b97b289e9ea4e5e3f39561ef8c2217657f6ff60 | import os
import logging.config
from random import randint
import zlib
import struct
import socket
import time
from PIL import Image
import config
# from config import ADB_ROOT, ADB_HOST, SCREEN_SHOOT_SAVE_PATH, ShellColor, CONFIG_PATH,enable_adb_host_auto_detect, ADB_SERVER
from .ADBClientSession import ADBClientSession
from util.socketutil import recvall
from . import revconn
# from numpy import average, dot, linalg
logger = logging.getLogger(__name__)
def _screencap_to_image(cap):
w, h, pixels = cap
return Image.frombytes('RGBA', (w, h), pixels)
def _ensure_pil_image(imgorfile):
if isinstance(imgorfile, Image.Image):
return imgorfile
return Image.open(imgorfile)
def check_adb_alive():
try:
sess = ADBClientSession(config.ADB_SERVER)
version = int(sess.service('host:version').read_response().decode(), 16)
logger.debug('ADB server version %d', version)
return True
except ConnectionRefusedError:
return False
except RuntimeError:
return False
def ensure_adb_alive():
if check_adb_alive():
return
logger.info('尝试启动 adb server')
import subprocess
adbbin = config.get('device/adb_binary', None)
if adbbin is None:
adb_binaries = ['adb', os.path.join(config.ADB_ROOT, 'adb')]
else:
adb_binaries = [adbbin]
for adbbin in adb_binaries:
try:
logger.debug('trying %r', adbbin)
subprocess.run([adbbin, 'start-server'], check=True)
return True
except FileNotFoundError:
pass
except subprocess.CalledProcessError:
pass
raise OSError("can't start adb server")
class ADBConnector:
def __init__(self, adb_serial=None):
# os.chdir(ADB_ROOT)
self.ADB_ROOT = config.ADB_ROOT
self.adb_serial = adb_serial
self.host_session_factory = lambda: ADBClientSession(config.ADB_SERVER)
self.rch = None
if self.adb_serial is None:
self.adb_serial = self.__adb_device_name_detector()
self.device_session_factory = lambda: self.host_session_factory().device(self.adb_serial)
self.cache_screenshot = config.get('device/cache_screenshot', True)
self.last_screenshot_timestamp = 0
self.last_screenshot_duration = 0
self.last_screenshot = None
if config.get('device/try_emulator_enhanced_mode', True):
loopbacks = self._detect_loopbacks()
if len(loopbacks):
logger.debug('possible loopback addresses: %s', repr(loopbacks))
self.rch = revconn.ReverseConnectionHost()
self.rch.start()
if self._test_reverse_connection(loopbacks):
logger.info('正在使用模拟器优化模式')
self.screencap = self._reverse_connection_screencap
else:
self.rch.stop()
else:
self.loopback = None
def __del__(self):
if self.rch and self.rch.is_alive():
self.rch.stop()
def __adb_device_name_detector(self):
devices = [x for x in self.host_session_factory().devices() if x[1] != 'offline']
if len(devices) == 0:
auto_connect = config.get('device/adb_auto_connect', None)
if auto_connect is not None:
logger.info('没有已连接设备,尝试连接 %s', auto_connect)
try:
self.host_session_factory().disconnect(auto_connect)
except:
pass
self.host_session_factory().connect(auto_connect)
else:
raise RuntimeError('找不到可用设备')
devices = [x for x in self.host_session_factory().devices() if x[1] != 'offline']
always_use_device = config.get('device/adb_always_use_device', None)
if always_use_device is not None:
if always_use_device not in (x[0] for x in devices):
raise RuntimeError('设备 %s 未连接' % always_use_device)
return always_use_device
if len(devices) == 1:
device_name = devices[0][0]
elif len(devices) > 1:
logger.info("检测到多台设备")
num = 0
while True:
try:
num = int(input("请输入序号选择设备: "))
if not 0 <= num < len(devices):
raise ValueError()
break
except ValueError:
logger.error("输入不合法,请重新输入")
device_name = devices[num][0]
else:
raise RuntimeError('找不到可用设备')
logger.info("确认设备名称:" + device_name)
return device_name
def run_device_cmd(self, cmd, DEBUG_LEVEL=2):
output = self.device_session_factory().exec(cmd)
logger.debug("command: %s", cmd)
logger.debug("output: %s", repr(output))
return output
def get_sub_screen(self, image, screen_range):
return image.crop(
(
screen_range[0][0],
screen_range[0][1],
screen_range[0][0] + screen_range[1][0],
screen_range[0][1] + screen_range[1][1]
)
)
def _detect_loopbacks(self):
board = self.device_session_factory().exec('getprop ro.product.board')
if b'goldfish' in board:
return ['10.0.2.2']
modules = self.device_session_factory().exec('grep -o vboxguest /proc/modules')
if b'vboxguest' in modules:
arp = self.device_session_factory().exec('cat /proc/net/arp')
return [x[:x.find(b' ')].decode() for x in arp.splitlines()[1:]]
return []
def _test_reverse_connection(self, loopbacks):
for addr in loopbacks:
logger.debug('testing loopback address %s', addr)
future = self.rch.register_cookie()
with future:
cmd = 'echo -n %sOKAY | nc -w 1 %s %d' % (future.cookie.decode(), addr, self.rch.port)
logger.debug(cmd)
control_sock = self.device_session_factory().exec_stream(cmd)
with control_sock:
conn = future.get(2)
if conn is not None:
data = recvall(conn)
conn.close()
if data == b'OKAY':
self.loopback = addr
logger.debug('found loopback address %s', addr)
return True
return False
def screencap_png(self):
"""returns PNG bytes"""
s = self.device_session_factory().exec_stream('screencap -p')
data = recvall(s, 4194304)
return data
def screencap(self):
"""returns (width, height, pixels)
pixels in RGBA/RGBX format"""
s = self.device_session_factory().exec_stream('screencap|gzip -1')
data = recvall(s, 4194304)
s.close()
data = zlib.decompress(data, zlib.MAX_WBITS | 16, 8388608)
w, h, f = struct.unpack_from('III', data, 0)
assert (f == 1)
return (w, h, data[12:])
def _reverse_connection_screencap(self):
"""returns (width, height, pixels)
pixels in RGBA/RGBX format"""
future = self.rch.register_cookie()
with future:
control_sock = self.device_session_factory().exec_stream('(echo -n %s; screencap) | nc %s %d' % (future.cookie.decode(), self.loopback, self.rch.port))
with control_sock:
with future.get() as conn:
data = recvall(conn, 8388608, True)
w, h, f = struct.unpack_from('III', data, 0)
assert (f == 1)
return (w, h, data[12:].tobytes())
def screenshot(self, cached=True):
t0 = time.monotonic()
if cached and self.cache_screenshot:
if self.last_screenshot is not None and t0 - self.last_screenshot_timestamp < self.last_screenshot_duration:
return self.last_screenshot
rawcap = self.screencap()
img = _screencap_to_image(rawcap)
t1 = time.monotonic()
self.last_screenshot_timestamp = t1
self.last_screenshot_duration = t1 - t0
self.last_screenshot = img
return img
def touch_swipe2(self, origin, movement, duration=None):
# sleep(1)
x1, y1, x2, y2 = origin[0], origin[1], origin[0] + movement[0], origin[1] + movement[1]
logger.debug("滑动初始坐标:({},{}); 移动距离dX:{}, dy:{}".format(*origin, *movement))
command = "input swipe {} {} {} {} ".format(x1, y1, x2, y2)
if duration is not None:
command += str(int(duration))
self.run_device_cmd(command)
def touch_tap(self, XY=None, offsets=None):
# sleep(10)
# sleep(0.5)
if offsets is not None:
final_X = XY[0] + randint(-offsets[0], offsets[0])
final_Y = XY[1] + randint(-offsets[1], offsets[1])
else:
final_X = XY[0] + randint(-1, 1)
final_Y = XY[1] + randint(-1, 1)
# 如果你遇到了问题,可以把这百年输出并把日志分享到群里。
logger.debug("点击坐标:({},{})".format(final_X, final_Y))
command = "input tap {} {}".format(final_X,
final_Y)
self.run_device_cmd(command)
| [((529, 568), 'PIL.Image.frombytes', 'Image.frombytes', (['"""RGBA"""', '(w, h)', 'pixels'], {}), "('RGBA', (w, h), pixels)\n", (544, 568), False, 'from PIL import Image\n'), ((684, 705), 'PIL.Image.open', 'Image.open', (['imgorfile'], {}), '(imgorfile)\n', (694, 705), False, 'from PIL import Image\n'), ((1186, 1223), 'config.get', 'config.get', (['"""device/adb_binary"""', 'None'], {}), "('device/adb_binary', None)\n", (1196, 1223), False, 'import config\n'), ((2199, 2242), 'config.get', 'config.get', (['"""device/cache_screenshot"""', '(True)'], {}), "('device/cache_screenshot', True)\n", (2209, 2242), False, 'import config\n'), ((2376, 2429), 'config.get', 'config.get', (['"""device/try_emulator_enhanced_mode"""', '(True)'], {}), "('device/try_emulator_enhanced_mode', True)\n", (2386, 2429), False, 'import config\n'), ((3796, 3844), 'config.get', 'config.get', (['"""device/adb_always_use_device"""', 'None'], {}), "('device/adb_always_use_device', None)\n", (3806, 3844), False, 'import config\n'), ((6688, 6707), 'util.socketutil.recvall', 'recvall', (['s', '(4194304)'], {}), '(s, 4194304)\n', (6695, 6707), False, 'from util.socketutil import recvall\n'), ((6925, 6944), 'util.socketutil.recvall', 'recvall', (['s', '(4194304)'], {}), '(s, 4194304)\n', (6932, 6944), False, 'from util.socketutil import recvall\n'), ((6978, 7029), 'zlib.decompress', 'zlib.decompress', (['data', '(zlib.MAX_WBITS | 16)', '(8388608)'], {}), '(data, zlib.MAX_WBITS | 16, 8388608)\n', (6993, 7029), False, 'import zlib\n'), ((7048, 7082), 'struct.unpack_from', 'struct.unpack_from', (['"""III"""', 'data', '(0)'], {}), "('III', data, 0)\n", (7066, 7082), False, 'import struct\n'), ((7644, 7678), 'struct.unpack_from', 'struct.unpack_from', (['"""III"""', 'data', '(0)'], {}), "('III', data, 0)\n", (7662, 7678), False, 'import struct\n'), ((7799, 7815), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (7813, 7815), False, 'import time\n'), ((8115, 8131), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (8129, 8131), False, 'import time\n'), ((1278, 1314), 'os.path.join', 'os.path.join', (['config.ADB_ROOT', '"""adb"""'], {}), "(config.ADB_ROOT, 'adb')\n", (1290, 1314), False, 'import os\n'), ((1461, 1513), 'subprocess.run', 'subprocess.run', (["[adbbin, 'start-server']"], {'check': '(True)'}), "([adbbin, 'start-server'], check=True)\n", (1475, 1513), False, 'import subprocess\n'), ((3257, 3300), 'config.get', 'config.get', (['"""device/adb_auto_connect"""', 'None'], {}), "('device/adb_auto_connect', None)\n", (3267, 3300), False, 'import config\n'), ((8872, 8904), 'random.randint', 'randint', (['(-offsets[0])', 'offsets[0]'], {}), '(-offsets[0], offsets[0])\n', (8879, 8904), False, 'from random import randint\n'), ((8935, 8967), 'random.randint', 'randint', (['(-offsets[1])', 'offsets[1]'], {}), '(-offsets[1], offsets[1])\n', (8942, 8967), False, 'from random import randint\n'), ((9012, 9026), 'random.randint', 'randint', (['(-1)', '(1)'], {}), '(-1, 1)\n', (9019, 9026), False, 'from random import randint\n'), ((9057, 9071), 'random.randint', 'randint', (['(-1)', '(1)'], {}), '(-1, 1)\n', (9064, 9071), False, 'from random import randint\n'), ((7597, 7625), 'util.socketutil.recvall', 'recvall', (['conn', '(8388608)', '(True)'], {}), '(conn, 8388608, True)\n', (7604, 7625), False, 'from util.socketutil import recvall\n'), ((6260, 6273), 'util.socketutil.recvall', 'recvall', (['conn'], {}), '(conn)\n', (6267, 6273), False, 'from util.socketutil import recvall\n')] |
douglassimonsen/redshift_upload | redshift_upload/base_utilities.py | e549c770538f022c0b90a983ca056f3e9c16c643 | import inspect
import os
from pathlib import Path
class change_directory:
"""
A class for changing the working directory using a "with" statement.
It takes the directory to change to as an argument. If no directory is given,
it takes the directory of the file from which this function was called.
"""
def __init__(self, directory: str = None) -> None:
self.old_dir = os.getcwd()
if directory is None:
self.new_dir = Path(inspect.getabsfile(inspect.stack()[1][0])).parent # type: ignore
else:
self.new_dir = directory
def __enter__(self, *_) -> None:
os.chdir(self.new_dir)
def __exit__(self, *_) -> None:
os.chdir(self.old_dir)
| [((415, 426), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (424, 426), False, 'import os\n'), ((659, 681), 'os.chdir', 'os.chdir', (['self.new_dir'], {}), '(self.new_dir)\n', (667, 681), False, 'import os\n'), ((730, 752), 'os.chdir', 'os.chdir', (['self.old_dir'], {}), '(self.old_dir)\n', (738, 752), False, 'import os\n'), ((510, 525), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (523, 525), False, 'import inspect\n')] |
Gloriel621/MgallManager | main.py | 7d5c02ab6bdc2f6c6922d4a7e021faef33d868bb | import sys
from PyQt5.QtWidgets import QApplication
from gui import MgallManager
def main():
app = QApplication(sys.argv)
ex = MgallManager()
app.aboutToQuit.connect(ex.ExitHandler)
sys.exit(app.exec_())
if __name__ == "__main__":
main()
| [((106, 128), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (118, 128), False, 'from PyQt5.QtWidgets import QApplication\n'), ((138, 152), 'gui.MgallManager', 'MgallManager', ([], {}), '()\n', (150, 152), False, 'from gui import MgallManager\n')] |
dh256/adventofcode | utils/src/adventofcode/utils/Point3D.py | 428eec13f4cbf153333a0e359bcff23070ef6d27 | class Point3D:
def __init__(self,x,y,z):
self.x = x
self.y = y
self.z = z
'''
Returns the distance between two 3D points
'''
def distance(self, value):
return abs(self.x - value.x) + abs(self.y - value.y) + abs(self.z - value.z)
def __eq__(self, value):
return self.x == value.x and self.y == value.y and self.z == value.z
def __hash__(self):
return hash((self.x,self.y,self.z))
def __repr__(self):
return f'({self.x},{self.y},{self.z})'
def __add__(self,value):
return Point3D(self.x + value.x, self.y + value.y, self.z + value.z) | [] |
javierrcc522/starcraft2_api_machineLear | pysc2/lib/actions.py | 5833ba1344ab5445c4f09fafc33e6058070ebe6c | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define the static list of types and actions for SC2."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import numbers
import six
from pysc2.lib import point
from s2clientprotocol import spatial_pb2 as sc_spatial
from s2clientprotocol import ui_pb2 as sc_ui
def no_op(action):
del action
def move_camera(action, minimap):
"""Move the camera."""
minimap.assign_to(action.action_feature_layer.camera_move.center_minimap)
def select_point(action, select_point_act, screen):
"""Select a unit at a point."""
select = action.action_feature_layer.unit_selection_point
screen.assign_to(select.selection_screen_coord)
select.type = select_point_act
def select_rect(action, select_add, screen, screen2):
"""Select units within a rectangle."""
select = action.action_feature_layer.unit_selection_rect
out_rect = select.selection_screen_coord.add()
screen_rect = point.Rect(screen, screen2)
screen_rect.tl.assign_to(out_rect.p0)
screen_rect.br.assign_to(out_rect.p1)
select.selection_add = bool(select_add)
def select_idle_worker(action, select_worker):
"""Select an idle worker."""
action.action_ui.select_idle_worker.type = select_worker
def select_army(action, select_add):
"""Select the entire army."""
action.action_ui.select_army.selection_add = select_add
def select_warp_gates(action, select_add):
"""Select all warp gates."""
action.action_ui.select_warp_gates.selection_add = select_add
def select_larva(action):
"""Select all larva."""
action.action_ui.select_larva.SetInParent() # Adds the empty proto field.
def select_unit(action, select_unit_act, select_unit_id):
"""Select a specific unit from the multi-unit selection."""
select = action.action_ui.multi_panel
select.type = select_unit_act
select.unit_index = select_unit_id
def control_group(action, control_group_act, control_group_id):
"""Act on a control group, selecting, setting, etc."""
select = action.action_ui.control_group
select.action = control_group_act
select.control_group_index = control_group_id
def unload(action, unload_id):
"""Unload a unit from a transport/bunker/nydus/etc."""
action.action_ui.cargo_panel.unit_index = unload_id
def build_queue(action, build_queue_id):
"""Cancel a unit in the build queue."""
action.action_ui.production_panel.unit_index = build_queue_id
def cmd_quick(action, ability_id, queued):
"""Do a quick command like 'Stop' or 'Stim'."""
action_cmd = action.action_feature_layer.unit_command
action_cmd.ability_id = ability_id
action_cmd.queue_command = queued
def cmd_screen(action, ability_id, queued, screen):
"""Do a command that needs a point on the screen."""
action_cmd = action.action_feature_layer.unit_command
action_cmd.ability_id = ability_id
action_cmd.queue_command = queued
screen.assign_to(action_cmd.target_screen_coord)
def cmd_minimap(action, ability_id, queued, minimap):
"""Do a command that needs a point on the minimap."""
action_cmd = action.action_feature_layer.unit_command
action_cmd.ability_id = ability_id
action_cmd.queue_command = queued
minimap.assign_to(action_cmd.target_minimap_coord)
def autocast(action, ability_id):
"""Toggle autocast."""
action.action_ui.toggle_autocast.ability_id = ability_id
class ArgumentType(collections.namedtuple(
"ArgumentType", ["id", "name", "sizes", "fn"])):
"""Represents a single argument type.
Attributes:
id: The argument id. This is unique.
name: The name of the argument, also unique.
sizes: The max+1 of each of the dimensions this argument takes.
fn: The function to convert the list of integers into something more
meaningful to be set in the protos to send to the game.
"""
__slots__ = ()
def __str__(self):
return "%s/%s %s" % (self.id, self.name, list(self.sizes))
@classmethod
def enum(cls, options):
"""Create an ArgumentType where you choose one of a set of known values."""
return cls(-1, "<none>", (len(options),), lambda a: options[a[0]])
@classmethod
def scalar(cls, value):
"""Create an ArgumentType with a single scalar in range(value)."""
return cls(-1, "<none>", (value,), lambda a: a[0])
@classmethod
def point(cls): # No range because it's unknown at this time.
"""Create an ArgumentType that is represented by a point.Point."""
return cls(-1, "<none>", (0, 0), lambda a: point.Point(*a).floor())
@classmethod
def spec(cls, id_, name, sizes):
"""Create an ArgumentType to be used in ValidActions."""
return cls(id_, name, sizes, None)
class Arguments(collections.namedtuple("Arguments", [
"screen", "minimap", "screen2", "queued", "control_group_act",
"control_group_id", "select_point_act", "select_add", "select_unit_act",
"select_unit_id", "select_worker", "build_queue_id", "unload_id"])):
"""The full list of argument types.
Take a look at TYPES and FUNCTION_TYPES for more details.
Attributes:
screen: A point on the screen.
minimap: A point on the minimap.
screen2: The second point for a rectangle. This is needed so that no
function takes the same type twice.
queued: Whether the action should be done now or later.
control_group_act: What to do with the control group.
control_group_id: Which control group to do it with.
select_point_act: What to do with the unit at the point.
select_add: Whether to add the unit to the selection or replace it.
select_unit_act: What to do when selecting a unit by id.
select_unit_id: Which unit to select by id.
select_worker: What to do when selecting a worker.
build_queue_id: Which build queue index to target.
unload_id: Which unit to target in a transport/nydus/command center.
"""
___slots__ = ()
@classmethod
def types(cls, **kwargs):
"""Create an Arguments of the possible Types."""
named = {name: type_._replace(id=Arguments._fields.index(name), name=name)
for name, type_ in six.iteritems(kwargs)}
return cls(**named)
# The list of known types.
TYPES = Arguments.types(
screen=ArgumentType.point(),
minimap=ArgumentType.point(),
screen2=ArgumentType.point(),
queued=ArgumentType.enum([False, True]), # (now vs add to queue)
control_group_act=ArgumentType.enum([
sc_ui.ActionControlGroup.Recall,
sc_ui.ActionControlGroup.Set,
sc_ui.ActionControlGroup.Append,
sc_ui.ActionControlGroup.SetAndSteal,
sc_ui.ActionControlGroup.AppendAndSteal,
]),
control_group_id=ArgumentType.scalar(10),
select_point_act=ArgumentType.enum([
sc_spatial.ActionSpatialUnitSelectionPoint.Select,
sc_spatial.ActionSpatialUnitSelectionPoint.Toggle,
sc_spatial.ActionSpatialUnitSelectionPoint.AllType,
sc_spatial.ActionSpatialUnitSelectionPoint.AddAllType,
]),
select_add=ArgumentType.enum([False, True]), # (select vs select_add)
select_unit_act=ArgumentType.enum([
sc_ui.ActionMultiPanel.SingleSelect,
sc_ui.ActionMultiPanel.DeselectUnit,
sc_ui.ActionMultiPanel.SelectAllOfType,
sc_ui.ActionMultiPanel.DeselectAllOfType,
]),
select_unit_id=ArgumentType.scalar(500), # Depends on current selection.
select_worker=ArgumentType.enum([
sc_ui.ActionSelectIdleWorker.Set,
sc_ui.ActionSelectIdleWorker.Add,
sc_ui.ActionSelectIdleWorker.All,
sc_ui.ActionSelectIdleWorker.AddAll,
]),
build_queue_id=ArgumentType.scalar(10), # Depends on current build queue.
unload_id=ArgumentType.scalar(500), # Depends on the current loaded units.
)
# Which argument types do each function need?
FUNCTION_TYPES = {
no_op: [],
move_camera: [TYPES.minimap],
select_point: [TYPES.select_point_act, TYPES.screen],
select_rect: [TYPES.select_add, TYPES.screen, TYPES.screen2],
select_unit: [TYPES.select_unit_act, TYPES.select_unit_id],
control_group: [TYPES.control_group_act, TYPES.control_group_id],
select_idle_worker: [TYPES.select_worker],
select_army: [TYPES.select_add],
select_warp_gates: [TYPES.select_add],
select_larva: [],
unload: [TYPES.unload_id],
build_queue: [TYPES.build_queue_id],
cmd_quick: [TYPES.queued],
cmd_screen: [TYPES.queued, TYPES.screen],
cmd_minimap: [TYPES.queued, TYPES.minimap],
autocast: [],
}
# Which ones need an ability?
ABILITY_FUNCTIONS = {cmd_quick, cmd_screen, cmd_minimap, autocast}
# Which ones require a point?
POINT_REQUIRED_FUNCS = {
False: {cmd_quick, autocast},
True: {cmd_screen, cmd_minimap, autocast}}
always = lambda _: True
class Function(collections.namedtuple(
"Function", ["id", "name", "ability_id", "general_id", "function_type",
"args", "avail_fn"])):
"""Represents a function action.
Attributes:
id: The function id, which is what the agent will use.
name: The name of the function. Should be unique.
ability_id: The ability id to pass to sc2.
general_id: 0 for normal abilities, and the ability_id of another ability if
it can be represented by a more general action.
function_type: One of the functions in FUNCTION_TYPES for how to construct
the sc2 action proto out of python types.
args: A list of the types of args passed to function_type.
avail_fn: For non-abilities, this function returns whether the function is
valid.
"""
__slots__ = ()
@classmethod
def ui_func(cls, id_, name, function_type, avail_fn=always):
"""Define a function representing a ui action."""
return cls(id_, name, 0, 0, function_type, FUNCTION_TYPES[function_type],
avail_fn)
@classmethod
def ability(cls, id_, name, function_type, ability_id, general_id=0):
"""Define a function represented as a game ability."""
assert function_type in ABILITY_FUNCTIONS
return cls(id_, name, ability_id, general_id, function_type,
FUNCTION_TYPES[function_type], None)
@classmethod
def spec(cls, id_, name, args):
"""Create a Function to be used in ValidActions."""
return cls(id_, name, None, None, None, args, None)
def __hash__(self): # So it can go in a set().
return self.id
def __str__(self):
return self.str()
def str(self, space=False):
"""String version. Set space=True to line them all up nicely."""
return "%s/%s (%s)" % (str(self.id).rjust(space and 4),
self.name.ljust(space and 50),
"; ".join(str(a) for a in self.args))
class Functions(object):
"""Represents the full set of functions.
Can't use namedtuple since python3 has a limit of 255 function arguments, so
build something similar.
"""
def __init__(self, functions):
self._func_list = functions
self._func_dict = {f.name: f for f in functions}
if len(self._func_dict) != len(self._func_list):
raise ValueError("Function names must be unique.")
def __getattr__(self, name):
return self._func_dict[name]
def __getitem__(self, key):
if isinstance(key, numbers.Number):
return self._func_list[key]
return self._func_dict[key]
def __iter__(self):
return iter(self._func_list)
def __len__(self):
return len(self._func_list)
# pylint: disable=line-too-long
FUNCTIONS = Functions([
Function.ui_func(0, "no_op", no_op),
Function.ui_func(1, "move_camera", move_camera),
Function.ui_func(2, "select_point", select_point),
Function.ui_func(3, "select_rect", select_rect),
Function.ui_func(4, "select_control_group", control_group),
Function.ui_func(5, "select_unit", select_unit,
lambda obs: obs.ui_data.HasField("multi")),
Function.ui_func(6, "select_idle_worker", select_idle_worker,
lambda obs: obs.player_common.idle_worker_count > 0),
Function.ui_func(7, "select_army", select_army,
lambda obs: obs.player_common.army_count > 0),
Function.ui_func(8, "select_warp_gates", select_warp_gates,
lambda obs: obs.player_common.warp_gate_count > 0),
Function.ui_func(9, "select_larva", select_larva,
lambda obs: obs.player_common.larva_count > 0),
Function.ui_func(10, "unload", unload,
lambda obs: obs.ui_data.HasField("cargo")),
Function.ui_func(11, "build_queue", build_queue,
lambda obs: obs.ui_data.HasField("production")),
# Everything below here is generated with gen_actions.py
Function.ability(12, "Attack_screen", cmd_screen, 3674),
Function.ability(13, "Attack_minimap", cmd_minimap, 3674),
Function.ability(14, "Attack_Attack_screen", cmd_screen, 23, 3674),
Function.ability(15, "Attack_Attack_minimap", cmd_minimap, 23, 3674),
Function.ability(16, "Attack_AttackBuilding_screen", cmd_screen, 2048, 3674),
Function.ability(17, "Attack_AttackBuilding_minimap", cmd_minimap, 2048, 3674),
Function.ability(18, "Attack_Redirect_screen", cmd_screen, 1682, 3674),
Function.ability(19, "Scan_Move_screen", cmd_screen, 19, 3674),
Function.ability(20, "Scan_Move_minimap", cmd_minimap, 19, 3674),
Function.ability(21, "Behavior_BuildingAttackOff_quick", cmd_quick, 2082),
Function.ability(22, "Behavior_BuildingAttackOn_quick", cmd_quick, 2081),
Function.ability(23, "Behavior_CloakOff_quick", cmd_quick, 3677),
Function.ability(24, "Behavior_CloakOff_Banshee_quick", cmd_quick, 393, 3677),
Function.ability(25, "Behavior_CloakOff_Ghost_quick", cmd_quick, 383, 3677),
Function.ability(26, "Behavior_CloakOn_quick", cmd_quick, 3676),
Function.ability(27, "Behavior_CloakOn_Banshee_quick", cmd_quick, 392, 3676),
Function.ability(28, "Behavior_CloakOn_Ghost_quick", cmd_quick, 382, 3676),
Function.ability(29, "Behavior_GenerateCreepOff_quick", cmd_quick, 1693),
Function.ability(30, "Behavior_GenerateCreepOn_quick", cmd_quick, 1692),
Function.ability(31, "Behavior_HoldFireOff_quick", cmd_quick, 3689),
Function.ability(32, "Behavior_HoldFireOff_Ghost_quick", cmd_quick, 38, 3689),
Function.ability(33, "Behavior_HoldFireOff_Lurker_quick", cmd_quick, 2552, 3689),
Function.ability(34, "Behavior_HoldFireOn_quick", cmd_quick, 3688),
Function.ability(35, "Behavior_HoldFireOn_Ghost_quick", cmd_quick, 36, 3688),
Function.ability(36, "Behavior_HoldFireOn_Lurker_quick", cmd_quick, 2550, 3688),
Function.ability(37, "Behavior_PulsarBeamOff_quick", cmd_quick, 2376),
Function.ability(38, "Behavior_PulsarBeamOn_quick", cmd_quick, 2375),
Function.ability(39, "Build_Armory_screen", cmd_screen, 331),
Function.ability(40, "Build_Assimilator_screen", cmd_screen, 882),
Function.ability(41, "Build_BanelingNest_screen", cmd_screen, 1162),
Function.ability(42, "Build_Barracks_screen", cmd_screen, 321),
Function.ability(43, "Build_Bunker_screen", cmd_screen, 324),
Function.ability(44, "Build_CommandCenter_screen", cmd_screen, 318),
Function.ability(45, "Build_CreepTumor_screen", cmd_screen, 3691),
Function.ability(46, "Build_CreepTumor_Queen_screen", cmd_screen, 1694, 3691),
Function.ability(47, "Build_CreepTumor_Tumor_screen", cmd_screen, 1733, 3691),
Function.ability(48, "Build_CyberneticsCore_screen", cmd_screen, 894),
Function.ability(49, "Build_DarkShrine_screen", cmd_screen, 891),
Function.ability(50, "Build_EngineeringBay_screen", cmd_screen, 322),
Function.ability(51, "Build_EvolutionChamber_screen", cmd_screen, 1156),
Function.ability(52, "Build_Extractor_screen", cmd_screen, 1154),
Function.ability(53, "Build_Factory_screen", cmd_screen, 328),
Function.ability(54, "Build_FleetBeacon_screen", cmd_screen, 885),
Function.ability(55, "Build_Forge_screen", cmd_screen, 884),
Function.ability(56, "Build_FusionCore_screen", cmd_screen, 333),
Function.ability(57, "Build_Gateway_screen", cmd_screen, 883),
Function.ability(58, "Build_GhostAcademy_screen", cmd_screen, 327),
Function.ability(59, "Build_Hatchery_screen", cmd_screen, 1152),
Function.ability(60, "Build_HydraliskDen_screen", cmd_screen, 1157),
Function.ability(61, "Build_InfestationPit_screen", cmd_screen, 1160),
Function.ability(62, "Build_Interceptors_quick", cmd_quick, 1042),
Function.ability(63, "Build_Interceptors_autocast", autocast, 1042),
Function.ability(64, "Build_MissileTurret_screen", cmd_screen, 323),
Function.ability(65, "Build_Nexus_screen", cmd_screen, 880),
Function.ability(66, "Build_Nuke_quick", cmd_quick, 710),
Function.ability(67, "Build_NydusNetwork_screen", cmd_screen, 1161),
Function.ability(68, "Build_NydusWorm_screen", cmd_screen, 1768),
Function.ability(69, "Build_PhotonCannon_screen", cmd_screen, 887),
Function.ability(70, "Build_Pylon_screen", cmd_screen, 881),
Function.ability(71, "Build_Reactor_quick", cmd_quick, 3683),
Function.ability(72, "Build_Reactor_screen", cmd_screen, 3683),
Function.ability(73, "Build_Reactor_Barracks_quick", cmd_quick, 422, 3683),
Function.ability(74, "Build_Reactor_Barracks_screen", cmd_screen, 422, 3683),
Function.ability(75, "Build_Reactor_Factory_quick", cmd_quick, 455, 3683),
Function.ability(76, "Build_Reactor_Factory_screen", cmd_screen, 455, 3683),
Function.ability(77, "Build_Reactor_Starport_quick", cmd_quick, 488, 3683),
Function.ability(78, "Build_Reactor_Starport_screen", cmd_screen, 488, 3683),
Function.ability(79, "Build_Refinery_screen", cmd_screen, 320),
Function.ability(80, "Build_RoachWarren_screen", cmd_screen, 1165),
Function.ability(81, "Build_RoboticsBay_screen", cmd_screen, 892),
Function.ability(82, "Build_RoboticsFacility_screen", cmd_screen, 893),
Function.ability(83, "Build_SensorTower_screen", cmd_screen, 326),
Function.ability(84, "Build_SpawningPool_screen", cmd_screen, 1155),
Function.ability(85, "Build_SpineCrawler_screen", cmd_screen, 1166),
Function.ability(86, "Build_Spire_screen", cmd_screen, 1158),
Function.ability(87, "Build_SporeCrawler_screen", cmd_screen, 1167),
Function.ability(88, "Build_Stargate_screen", cmd_screen, 889),
Function.ability(89, "Build_Starport_screen", cmd_screen, 329),
Function.ability(90, "Build_StasisTrap_screen", cmd_screen, 2505),
Function.ability(91, "Build_SupplyDepot_screen", cmd_screen, 319),
Function.ability(92, "Build_TechLab_quick", cmd_quick, 3682),
Function.ability(93, "Build_TechLab_screen", cmd_screen, 3682),
Function.ability(94, "Build_TechLab_Barracks_quick", cmd_quick, 421, 3682),
Function.ability(95, "Build_TechLab_Barracks_screen", cmd_screen, 421, 3682),
Function.ability(96, "Build_TechLab_Factory_quick", cmd_quick, 454, 3682),
Function.ability(97, "Build_TechLab_Factory_screen", cmd_screen, 454, 3682),
Function.ability(98, "Build_TechLab_Starport_quick", cmd_quick, 487, 3682),
Function.ability(99, "Build_TechLab_Starport_screen", cmd_screen, 487, 3682),
Function.ability(100, "Build_TemplarArchive_screen", cmd_screen, 890),
Function.ability(101, "Build_TwilightCouncil_screen", cmd_screen, 886),
Function.ability(102, "Build_UltraliskCavern_screen", cmd_screen, 1159),
Function.ability(103, "BurrowDown_quick", cmd_quick, 3661),
Function.ability(104, "BurrowDown_Baneling_quick", cmd_quick, 1374, 3661),
Function.ability(105, "BurrowDown_Drone_quick", cmd_quick, 1378, 3661),
Function.ability(106, "BurrowDown_Hydralisk_quick", cmd_quick, 1382, 3661),
Function.ability(107, "BurrowDown_Infestor_quick", cmd_quick, 1444, 3661),
Function.ability(108, "BurrowDown_InfestorTerran_quick", cmd_quick, 1394, 3661),
Function.ability(109, "BurrowDown_Lurker_quick", cmd_quick, 2108, 3661),
Function.ability(110, "BurrowDown_Queen_quick", cmd_quick, 1433, 3661),
Function.ability(111, "BurrowDown_Ravager_quick", cmd_quick, 2340, 3661),
Function.ability(112, "BurrowDown_Roach_quick", cmd_quick, 1386, 3661),
Function.ability(113, "BurrowDown_SwarmHost_quick", cmd_quick, 2014, 3661),
Function.ability(114, "BurrowDown_Ultralisk_quick", cmd_quick, 1512, 3661),
Function.ability(115, "BurrowDown_WidowMine_quick", cmd_quick, 2095, 3661),
Function.ability(116, "BurrowDown_Zergling_quick", cmd_quick, 1390, 3661),
Function.ability(117, "BurrowUp_quick", cmd_quick, 3662),
Function.ability(118, "BurrowUp_autocast", autocast, 3662),
Function.ability(119, "BurrowUp_Baneling_quick", cmd_quick, 1376, 3662),
Function.ability(120, "BurrowUp_Baneling_autocast", autocast, 1376, 3662),
Function.ability(121, "BurrowUp_Drone_quick", cmd_quick, 1380, 3662),
Function.ability(122, "BurrowUp_Hydralisk_quick", cmd_quick, 1384, 3662),
Function.ability(123, "BurrowUp_Hydralisk_autocast", autocast, 1384, 3662),
Function.ability(124, "BurrowUp_Infestor_quick", cmd_quick, 1446, 3662),
Function.ability(125, "BurrowUp_InfestorTerran_quick", cmd_quick, 1396, 3662),
Function.ability(126, "BurrowUp_InfestorTerran_autocast", autocast, 1396, 3662),
Function.ability(127, "BurrowUp_Lurker_quick", cmd_quick, 2110, 3662),
Function.ability(128, "BurrowUp_Queen_quick", cmd_quick, 1435, 3662),
Function.ability(129, "BurrowUp_Queen_autocast", autocast, 1435, 3662),
Function.ability(130, "BurrowUp_Ravager_quick", cmd_quick, 2342, 3662),
Function.ability(131, "BurrowUp_Ravager_autocast", autocast, 2342, 3662),
Function.ability(132, "BurrowUp_Roach_quick", cmd_quick, 1388, 3662),
Function.ability(133, "BurrowUp_Roach_autocast", autocast, 1388, 3662),
Function.ability(134, "BurrowUp_SwarmHost_quick", cmd_quick, 2016, 3662),
Function.ability(135, "BurrowUp_Ultralisk_quick", cmd_quick, 1514, 3662),
Function.ability(136, "BurrowUp_Ultralisk_autocast", autocast, 1514, 3662),
Function.ability(137, "BurrowUp_WidowMine_quick", cmd_quick, 2097, 3662),
Function.ability(138, "BurrowUp_Zergling_quick", cmd_quick, 1392, 3662),
Function.ability(139, "BurrowUp_Zergling_autocast", autocast, 1392, 3662),
Function.ability(140, "Cancel_quick", cmd_quick, 3659),
Function.ability(141, "Cancel_AdeptPhaseShift_quick", cmd_quick, 2594, 3659),
Function.ability(142, "Cancel_AdeptShadePhaseShift_quick", cmd_quick, 2596, 3659),
Function.ability(143, "Cancel_BarracksAddOn_quick", cmd_quick, 451, 3659),
Function.ability(144, "Cancel_BuildInProgress_quick", cmd_quick, 314, 3659),
Function.ability(145, "Cancel_CreepTumor_quick", cmd_quick, 1763, 3659),
Function.ability(146, "Cancel_FactoryAddOn_quick", cmd_quick, 484, 3659),
Function.ability(147, "Cancel_GravitonBeam_quick", cmd_quick, 174, 3659),
Function.ability(148, "Cancel_LockOn_quick", cmd_quick, 2354, 3659),
Function.ability(149, "Cancel_MorphBroodlord_quick", cmd_quick, 1373, 3659),
Function.ability(150, "Cancel_MorphGreaterSpire_quick", cmd_quick, 1221, 3659),
Function.ability(151, "Cancel_MorphHive_quick", cmd_quick, 1219, 3659),
Function.ability(152, "Cancel_MorphLair_quick", cmd_quick, 1217, 3659),
Function.ability(153, "Cancel_MorphLurker_quick", cmd_quick, 2333, 3659),
Function.ability(154, "Cancel_MorphLurkerDen_quick", cmd_quick, 2113, 3659),
Function.ability(155, "Cancel_MorphMothership_quick", cmd_quick, 1848, 3659),
Function.ability(156, "Cancel_MorphOrbital_quick", cmd_quick, 1517, 3659),
Function.ability(157, "Cancel_MorphOverlordTransport_quick", cmd_quick, 2709, 3659),
Function.ability(158, "Cancel_MorphOverseer_quick", cmd_quick, 1449, 3659),
Function.ability(159, "Cancel_MorphPlanetaryFortress_quick", cmd_quick, 1451, 3659),
Function.ability(160, "Cancel_MorphRavager_quick", cmd_quick, 2331, 3659),
Function.ability(161, "Cancel_MorphThorExplosiveMode_quick", cmd_quick, 2365, 3659),
Function.ability(162, "Cancel_NeuralParasite_quick", cmd_quick, 250, 3659),
Function.ability(163, "Cancel_Nuke_quick", cmd_quick, 1623, 3659),
Function.ability(164, "Cancel_SpineCrawlerRoot_quick", cmd_quick, 1730, 3659),
Function.ability(165, "Cancel_SporeCrawlerRoot_quick", cmd_quick, 1732, 3659),
Function.ability(166, "Cancel_StarportAddOn_quick", cmd_quick, 517, 3659),
Function.ability(167, "Cancel_StasisTrap_quick", cmd_quick, 2535, 3659),
Function.ability(168, "Cancel_Last_quick", cmd_quick, 3671),
Function.ability(169, "Cancel_HangarQueue5_quick", cmd_quick, 1038, 3671),
Function.ability(170, "Cancel_Queue1_quick", cmd_quick, 304, 3671),
Function.ability(171, "Cancel_Queue5_quick", cmd_quick, 306, 3671),
Function.ability(172, "Cancel_QueueAddOn_quick", cmd_quick, 312, 3671),
Function.ability(173, "Cancel_QueueCancelToSelection_quick", cmd_quick, 308, 3671),
Function.ability(174, "Cancel_QueuePasive_quick", cmd_quick, 1831, 3671),
Function.ability(175, "Cancel_QueuePassiveCancelToSelection_quick", cmd_quick, 1833, 3671),
Function.ability(176, "Effect_Abduct_screen", cmd_screen, 2067),
Function.ability(177, "Effect_AdeptPhaseShift_screen", cmd_screen, 2544),
Function.ability(178, "Effect_AutoTurret_screen", cmd_screen, 1764),
Function.ability(179, "Effect_BlindingCloud_screen", cmd_screen, 2063),
Function.ability(180, "Effect_Blink_screen", cmd_screen, 3687),
Function.ability(181, "Effect_Blink_Stalker_screen", cmd_screen, 1442, 3687),
Function.ability(182, "Effect_ShadowStride_screen", cmd_screen, 2700, 3687),
Function.ability(183, "Effect_CalldownMULE_screen", cmd_screen, 171),
Function.ability(184, "Effect_CausticSpray_screen", cmd_screen, 2324),
Function.ability(185, "Effect_Charge_screen", cmd_screen, 1819),
Function.ability(186, "Effect_Charge_autocast", autocast, 1819),
Function.ability(187, "Effect_ChronoBoost_screen", cmd_screen, 261),
Function.ability(188, "Effect_Contaminate_screen", cmd_screen, 1825),
Function.ability(189, "Effect_CorrosiveBile_screen", cmd_screen, 2338),
Function.ability(190, "Effect_EMP_screen", cmd_screen, 1628),
Function.ability(191, "Effect_Explode_quick", cmd_quick, 42),
Function.ability(192, "Effect_Feedback_screen", cmd_screen, 140),
Function.ability(193, "Effect_ForceField_screen", cmd_screen, 1526),
Function.ability(194, "Effect_FungalGrowth_screen", cmd_screen, 74),
Function.ability(195, "Effect_GhostSnipe_screen", cmd_screen, 2714),
Function.ability(196, "Effect_GravitonBeam_screen", cmd_screen, 173),
Function.ability(197, "Effect_GuardianShield_quick", cmd_quick, 76),
Function.ability(198, "Effect_Heal_screen", cmd_screen, 386),
Function.ability(199, "Effect_Heal_autocast", autocast, 386),
Function.ability(200, "Effect_HunterSeekerMissile_screen", cmd_screen, 169),
Function.ability(201, "Effect_ImmortalBarrier_quick", cmd_quick, 2328),
Function.ability(202, "Effect_ImmortalBarrier_autocast", autocast, 2328),
Function.ability(203, "Effect_InfestedTerrans_screen", cmd_screen, 247),
Function.ability(204, "Effect_InjectLarva_screen", cmd_screen, 251),
Function.ability(205, "Effect_KD8Charge_screen", cmd_screen, 2588),
Function.ability(206, "Effect_LockOn_screen", cmd_screen, 2350),
Function.ability(207, "Effect_LocustSwoop_screen", cmd_screen, 2387),
Function.ability(208, "Effect_MassRecall_screen", cmd_screen, 3686),
Function.ability(209, "Effect_MassRecall_Mothership_screen", cmd_screen, 2368, 3686),
Function.ability(210, "Effect_MassRecall_MothershipCore_screen", cmd_screen, 1974, 3686),
Function.ability(211, "Effect_MedivacIgniteAfterburners_quick", cmd_quick, 2116),
Function.ability(212, "Effect_NeuralParasite_screen", cmd_screen, 249),
Function.ability(213, "Effect_NukeCalldown_screen", cmd_screen, 1622),
Function.ability(214, "Effect_OracleRevelation_screen", cmd_screen, 2146),
Function.ability(215, "Effect_ParasiticBomb_screen", cmd_screen, 2542),
Function.ability(216, "Effect_PhotonOvercharge_screen", cmd_screen, 2162),
Function.ability(217, "Effect_PointDefenseDrone_screen", cmd_screen, 144),
Function.ability(218, "Effect_PsiStorm_screen", cmd_screen, 1036),
Function.ability(219, "Effect_PurificationNova_screen", cmd_screen, 2346),
Function.ability(220, "Effect_Repair_screen", cmd_screen, 3685),
Function.ability(221, "Effect_Repair_autocast", autocast, 3685),
Function.ability(222, "Effect_Repair_Mule_screen", cmd_screen, 78, 3685),
Function.ability(223, "Effect_Repair_Mule_autocast", autocast, 78, 3685),
Function.ability(224, "Effect_Repair_SCV_screen", cmd_screen, 316, 3685),
Function.ability(225, "Effect_Repair_SCV_autocast", autocast, 316, 3685),
Function.ability(226, "Effect_Salvage_quick", cmd_quick, 32),
Function.ability(227, "Effect_Scan_screen", cmd_screen, 399),
Function.ability(228, "Effect_SpawnChangeling_quick", cmd_quick, 181),
Function.ability(229, "Effect_SpawnLocusts_screen", cmd_screen, 2704),
Function.ability(230, "Effect_Spray_screen", cmd_screen, 3684),
Function.ability(231, "Effect_Spray_Protoss_screen", cmd_screen, 30, 3684),
Function.ability(232, "Effect_Spray_Terran_screen", cmd_screen, 26, 3684),
Function.ability(233, "Effect_Spray_Zerg_screen", cmd_screen, 28, 3684),
Function.ability(234, "Effect_Stim_quick", cmd_quick, 3675),
Function.ability(235, "Effect_Stim_Marauder_quick", cmd_quick, 253, 3675),
Function.ability(236, "Effect_Stim_Marauder_Redirect_quick", cmd_quick, 1684, 3675),
Function.ability(237, "Effect_Stim_Marine_quick", cmd_quick, 380, 3675),
Function.ability(238, "Effect_Stim_Marine_Redirect_quick", cmd_quick, 1683, 3675),
Function.ability(239, "Effect_SupplyDrop_screen", cmd_screen, 255),
Function.ability(240, "Effect_TacticalJump_screen", cmd_screen, 2358),
Function.ability(241, "Effect_TimeWarp_screen", cmd_screen, 2244),
Function.ability(242, "Effect_Transfusion_screen", cmd_screen, 1664),
Function.ability(243, "Effect_ViperConsume_screen", cmd_screen, 2073),
Function.ability(244, "Effect_VoidRayPrismaticAlignment_quick", cmd_quick, 2393),
Function.ability(245, "Effect_WidowMineAttack_screen", cmd_screen, 2099),
Function.ability(246, "Effect_WidowMineAttack_autocast", autocast, 2099),
Function.ability(247, "Effect_YamatoGun_screen", cmd_screen, 401),
Function.ability(248, "Hallucination_Adept_quick", cmd_quick, 2391),
Function.ability(249, "Hallucination_Archon_quick", cmd_quick, 146),
Function.ability(250, "Hallucination_Colossus_quick", cmd_quick, 148),
Function.ability(251, "Hallucination_Disruptor_quick", cmd_quick, 2389),
Function.ability(252, "Hallucination_HighTemplar_quick", cmd_quick, 150),
Function.ability(253, "Hallucination_Immortal_quick", cmd_quick, 152),
Function.ability(254, "Hallucination_Oracle_quick", cmd_quick, 2114),
Function.ability(255, "Hallucination_Phoenix_quick", cmd_quick, 154),
Function.ability(256, "Hallucination_Probe_quick", cmd_quick, 156),
Function.ability(257, "Hallucination_Stalker_quick", cmd_quick, 158),
Function.ability(258, "Hallucination_VoidRay_quick", cmd_quick, 160),
Function.ability(259, "Hallucination_WarpPrism_quick", cmd_quick, 162),
Function.ability(260, "Hallucination_Zealot_quick", cmd_quick, 164),
Function.ability(261, "Halt_quick", cmd_quick, 3660),
Function.ability(262, "Halt_Building_quick", cmd_quick, 315, 3660),
Function.ability(263, "Halt_TerranBuild_quick", cmd_quick, 348, 3660),
Function.ability(264, "Harvest_Gather_screen", cmd_screen, 3666),
Function.ability(265, "Harvest_Gather_Drone_screen", cmd_screen, 1183, 3666),
Function.ability(266, "Harvest_Gather_Mule_screen", cmd_screen, 166, 3666),
Function.ability(267, "Harvest_Gather_Probe_screen", cmd_screen, 298, 3666),
Function.ability(268, "Harvest_Gather_SCV_screen", cmd_screen, 295, 3666),
Function.ability(269, "Harvest_Return_quick", cmd_quick, 3667),
Function.ability(270, "Harvest_Return_Drone_quick", cmd_quick, 1184, 3667),
Function.ability(271, "Harvest_Return_Mule_quick", cmd_quick, 167, 3667),
Function.ability(272, "Harvest_Return_Probe_quick", cmd_quick, 299, 3667),
Function.ability(273, "Harvest_Return_SCV_quick", cmd_quick, 296, 3667),
Function.ability(274, "HoldPosition_quick", cmd_quick, 18),
Function.ability(275, "Land_screen", cmd_screen, 3678),
Function.ability(276, "Land_Barracks_screen", cmd_screen, 554, 3678),
Function.ability(277, "Land_CommandCenter_screen", cmd_screen, 419, 3678),
Function.ability(278, "Land_Factory_screen", cmd_screen, 520, 3678),
Function.ability(279, "Land_OrbitalCommand_screen", cmd_screen, 1524, 3678),
Function.ability(280, "Land_Starport_screen", cmd_screen, 522, 3678),
Function.ability(281, "Lift_quick", cmd_quick, 3679),
Function.ability(282, "Lift_Barracks_quick", cmd_quick, 452, 3679),
Function.ability(283, "Lift_CommandCenter_quick", cmd_quick, 417, 3679),
Function.ability(284, "Lift_Factory_quick", cmd_quick, 485, 3679),
Function.ability(285, "Lift_OrbitalCommand_quick", cmd_quick, 1522, 3679),
Function.ability(286, "Lift_Starport_quick", cmd_quick, 518, 3679),
Function.ability(287, "Load_screen", cmd_screen, 3668),
Function.ability(288, "Load_Bunker_screen", cmd_screen, 407, 3668),
Function.ability(289, "Load_Medivac_screen", cmd_screen, 394, 3668),
Function.ability(290, "Load_NydusNetwork_screen", cmd_screen, 1437, 3668),
Function.ability(291, "Load_NydusWorm_screen", cmd_screen, 2370, 3668),
Function.ability(292, "Load_Overlord_screen", cmd_screen, 1406, 3668),
Function.ability(293, "Load_WarpPrism_screen", cmd_screen, 911, 3668),
Function.ability(294, "LoadAll_quick", cmd_quick, 3663),
Function.ability(295, "LoadAll_CommandCenter_quick", cmd_quick, 416, 3663),
Function.ability(296, "Morph_Archon_quick", cmd_quick, 1766),
Function.ability(297, "Morph_BroodLord_quick", cmd_quick, 1372),
Function.ability(298, "Morph_Gateway_quick", cmd_quick, 1520),
Function.ability(299, "Morph_GreaterSpire_quick", cmd_quick, 1220),
Function.ability(300, "Morph_Hellbat_quick", cmd_quick, 1998),
Function.ability(301, "Morph_Hellion_quick", cmd_quick, 1978),
Function.ability(302, "Morph_Hive_quick", cmd_quick, 1218),
Function.ability(303, "Morph_Lair_quick", cmd_quick, 1216),
Function.ability(304, "Morph_LiberatorAAMode_quick", cmd_quick, 2560),
Function.ability(305, "Morph_LiberatorAGMode_screen", cmd_screen, 2558),
Function.ability(306, "Morph_Lurker_quick", cmd_quick, 2332),
Function.ability(307, "Morph_LurkerDen_quick", cmd_quick, 2112),
Function.ability(308, "Morph_Mothership_quick", cmd_quick, 1847),
Function.ability(309, "Morph_OrbitalCommand_quick", cmd_quick, 1516),
Function.ability(310, "Morph_OverlordTransport_quick", cmd_quick, 2708),
Function.ability(311, "Morph_Overseer_quick", cmd_quick, 1448),
Function.ability(312, "Morph_PlanetaryFortress_quick", cmd_quick, 1450),
Function.ability(313, "Morph_Ravager_quick", cmd_quick, 2330),
Function.ability(314, "Morph_Root_screen", cmd_screen, 3680),
Function.ability(315, "Morph_SpineCrawlerRoot_screen", cmd_screen, 1729, 3680),
Function.ability(316, "Morph_SporeCrawlerRoot_screen", cmd_screen, 1731, 3680),
Function.ability(317, "Morph_SiegeMode_quick", cmd_quick, 388),
Function.ability(318, "Morph_SupplyDepot_Lower_quick", cmd_quick, 556),
Function.ability(319, "Morph_SupplyDepot_Raise_quick", cmd_quick, 558),
Function.ability(320, "Morph_ThorExplosiveMode_quick", cmd_quick, 2364),
Function.ability(321, "Morph_ThorHighImpactMode_quick", cmd_quick, 2362),
Function.ability(322, "Morph_Unsiege_quick", cmd_quick, 390),
Function.ability(323, "Morph_Uproot_quick", cmd_quick, 3681),
Function.ability(324, "Morph_SpineCrawlerUproot_quick", cmd_quick, 1725, 3681),
Function.ability(325, "Morph_SporeCrawlerUproot_quick", cmd_quick, 1727, 3681),
Function.ability(326, "Morph_VikingAssaultMode_quick", cmd_quick, 403),
Function.ability(327, "Morph_VikingFighterMode_quick", cmd_quick, 405),
Function.ability(328, "Morph_WarpGate_quick", cmd_quick, 1518),
Function.ability(329, "Morph_WarpPrismPhasingMode_quick", cmd_quick, 1528),
Function.ability(330, "Morph_WarpPrismTransportMode_quick", cmd_quick, 1530),
Function.ability(331, "Move_screen", cmd_screen, 16),
Function.ability(332, "Move_minimap", cmd_minimap, 16),
Function.ability(333, "Patrol_screen", cmd_screen, 17),
Function.ability(334, "Patrol_minimap", cmd_minimap, 17),
Function.ability(335, "Rally_Units_screen", cmd_screen, 3673),
Function.ability(336, "Rally_Units_minimap", cmd_minimap, 3673),
Function.ability(337, "Rally_Building_screen", cmd_screen, 195, 3673),
Function.ability(338, "Rally_Building_minimap", cmd_minimap, 195, 3673),
Function.ability(339, "Rally_Hatchery_Units_screen", cmd_screen, 212, 3673),
Function.ability(340, "Rally_Hatchery_Units_minimap", cmd_minimap, 212, 3673),
Function.ability(341, "Rally_Morphing_Unit_screen", cmd_screen, 199, 3673),
Function.ability(342, "Rally_Morphing_Unit_minimap", cmd_minimap, 199, 3673),
Function.ability(343, "Rally_Workers_screen", cmd_screen, 3690),
Function.ability(344, "Rally_Workers_minimap", cmd_minimap, 3690),
Function.ability(345, "Rally_CommandCenter_screen", cmd_screen, 203, 3690),
Function.ability(346, "Rally_CommandCenter_minimap", cmd_minimap, 203, 3690),
Function.ability(347, "Rally_Hatchery_Workers_screen", cmd_screen, 211, 3690),
Function.ability(348, "Rally_Hatchery_Workers_minimap", cmd_minimap, 211, 3690),
Function.ability(349, "Rally_Nexus_screen", cmd_screen, 207, 3690),
Function.ability(350, "Rally_Nexus_minimap", cmd_minimap, 207, 3690),
Function.ability(351, "Research_AdeptResonatingGlaives_quick", cmd_quick, 1594),
Function.ability(352, "Research_AdvancedBallistics_quick", cmd_quick, 805),
Function.ability(353, "Research_BansheeCloakingField_quick", cmd_quick, 790),
Function.ability(354, "Research_BansheeHyperflightRotors_quick", cmd_quick, 799),
Function.ability(355, "Research_BattlecruiserWeaponRefit_quick", cmd_quick, 1532),
Function.ability(356, "Research_Blink_quick", cmd_quick, 1593),
Function.ability(357, "Research_Burrow_quick", cmd_quick, 1225),
Function.ability(358, "Research_CentrifugalHooks_quick", cmd_quick, 1482),
Function.ability(359, "Research_Charge_quick", cmd_quick, 1592),
Function.ability(360, "Research_ChitinousPlating_quick", cmd_quick, 265),
Function.ability(361, "Research_CombatShield_quick", cmd_quick, 731),
Function.ability(362, "Research_ConcussiveShells_quick", cmd_quick, 732),
Function.ability(363, "Research_DrillingClaws_quick", cmd_quick, 764),
Function.ability(364, "Research_ExtendedThermalLance_quick", cmd_quick, 1097),
Function.ability(365, "Research_GlialRegeneration_quick", cmd_quick, 216),
Function.ability(366, "Research_GraviticBooster_quick", cmd_quick, 1093),
Function.ability(367, "Research_GraviticDrive_quick", cmd_quick, 1094),
Function.ability(368, "Research_GroovedSpines_quick", cmd_quick, 1282),
Function.ability(369, "Research_HiSecAutoTracking_quick", cmd_quick, 650),
Function.ability(370, "Research_HighCapacityFuelTanks_quick", cmd_quick, 804),
Function.ability(371, "Research_InfernalPreigniter_quick", cmd_quick, 761),
Function.ability(372, "Research_InterceptorGravitonCatapult_quick", cmd_quick, 44),
Function.ability(373, "Research_MagFieldLaunchers_quick", cmd_quick, 766),
Function.ability(374, "Research_MuscularAugments_quick", cmd_quick, 1283),
Function.ability(375, "Research_NeosteelFrame_quick", cmd_quick, 655),
Function.ability(376, "Research_NeuralParasite_quick", cmd_quick, 1455),
Function.ability(377, "Research_PathogenGlands_quick", cmd_quick, 1454),
Function.ability(378, "Research_PersonalCloaking_quick", cmd_quick, 820),
Function.ability(379, "Research_PhoenixAnionPulseCrystals_quick", cmd_quick, 46),
Function.ability(380, "Research_PneumatizedCarapace_quick", cmd_quick, 1223),
Function.ability(381, "Research_ProtossAirArmor_quick", cmd_quick, 3692),
Function.ability(382, "Research_ProtossAirArmorLevel1_quick", cmd_quick, 1565, 3692),
Function.ability(383, "Research_ProtossAirArmorLevel2_quick", cmd_quick, 1566, 3692),
Function.ability(384, "Research_ProtossAirArmorLevel3_quick", cmd_quick, 1567, 3692),
Function.ability(385, "Research_ProtossAirWeapons_quick", cmd_quick, 3693),
Function.ability(386, "Research_ProtossAirWeaponsLevel1_quick", cmd_quick, 1562, 3693),
Function.ability(387, "Research_ProtossAirWeaponsLevel2_quick", cmd_quick, 1563, 3693),
Function.ability(388, "Research_ProtossAirWeaponsLevel3_quick", cmd_quick, 1564, 3693),
Function.ability(389, "Research_ProtossGroundArmor_quick", cmd_quick, 3694),
Function.ability(390, "Research_ProtossGroundArmorLevel1_quick", cmd_quick, 1065, 3694),
Function.ability(391, "Research_ProtossGroundArmorLevel2_quick", cmd_quick, 1066, 3694),
Function.ability(392, "Research_ProtossGroundArmorLevel3_quick", cmd_quick, 1067, 3694),
Function.ability(393, "Research_ProtossGroundWeapons_quick", cmd_quick, 3695),
Function.ability(394, "Research_ProtossGroundWeaponsLevel1_quick", cmd_quick, 1062, 3695),
Function.ability(395, "Research_ProtossGroundWeaponsLevel2_quick", cmd_quick, 1063, 3695),
Function.ability(396, "Research_ProtossGroundWeaponsLevel3_quick", cmd_quick, 1064, 3695),
Function.ability(397, "Research_ProtossShields_quick", cmd_quick, 3696),
Function.ability(398, "Research_ProtossShieldsLevel1_quick", cmd_quick, 1068, 3696),
Function.ability(399, "Research_ProtossShieldsLevel2_quick", cmd_quick, 1069, 3696),
Function.ability(400, "Research_ProtossShieldsLevel3_quick", cmd_quick, 1070, 3696),
Function.ability(401, "Research_PsiStorm_quick", cmd_quick, 1126),
Function.ability(402, "Research_RavenCorvidReactor_quick", cmd_quick, 793),
Function.ability(403, "Research_RavenRecalibratedExplosives_quick", cmd_quick, 803),
Function.ability(404, "Research_ShadowStrike_quick", cmd_quick, 2720),
Function.ability(405, "Research_Stimpack_quick", cmd_quick, 730),
Function.ability(406, "Research_TerranInfantryArmor_quick", cmd_quick, 3697),
Function.ability(407, "Research_TerranInfantryArmorLevel1_quick", cmd_quick, 656, 3697),
Function.ability(408, "Research_TerranInfantryArmorLevel2_quick", cmd_quick, 657, 3697),
Function.ability(409, "Research_TerranInfantryArmorLevel3_quick", cmd_quick, 658, 3697),
Function.ability(410, "Research_TerranInfantryWeapons_quick", cmd_quick, 3698),
Function.ability(411, "Research_TerranInfantryWeaponsLevel1_quick", cmd_quick, 652, 3698),
Function.ability(412, "Research_TerranInfantryWeaponsLevel2_quick", cmd_quick, 653, 3698),
Function.ability(413, "Research_TerranInfantryWeaponsLevel3_quick", cmd_quick, 654, 3698),
Function.ability(414, "Research_TerranShipWeapons_quick", cmd_quick, 3699),
Function.ability(415, "Research_TerranShipWeaponsLevel1_quick", cmd_quick, 861, 3699),
Function.ability(416, "Research_TerranShipWeaponsLevel2_quick", cmd_quick, 862, 3699),
Function.ability(417, "Research_TerranShipWeaponsLevel3_quick", cmd_quick, 863, 3699),
Function.ability(418, "Research_TerranStructureArmorUpgrade_quick", cmd_quick, 651),
Function.ability(419, "Research_TerranVehicleAndShipPlating_quick", cmd_quick, 3700),
Function.ability(420, "Research_TerranVehicleAndShipPlatingLevel1_quick", cmd_quick, 864, 3700),
Function.ability(421, "Research_TerranVehicleAndShipPlatingLevel2_quick", cmd_quick, 865, 3700),
Function.ability(422, "Research_TerranVehicleAndShipPlatingLevel3_quick", cmd_quick, 866, 3700),
Function.ability(423, "Research_TerranVehicleWeapons_quick", cmd_quick, 3701),
Function.ability(424, "Research_TerranVehicleWeaponsLevel1_quick", cmd_quick, 855, 3701),
Function.ability(425, "Research_TerranVehicleWeaponsLevel2_quick", cmd_quick, 856, 3701),
Function.ability(426, "Research_TerranVehicleWeaponsLevel3_quick", cmd_quick, 857, 3701),
Function.ability(427, "Research_TunnelingClaws_quick", cmd_quick, 217),
Function.ability(428, "Research_WarpGate_quick", cmd_quick, 1568),
Function.ability(429, "Research_ZergFlyerArmor_quick", cmd_quick, 3702),
Function.ability(430, "Research_ZergFlyerArmorLevel1_quick", cmd_quick, 1315, 3702),
Function.ability(431, "Research_ZergFlyerArmorLevel2_quick", cmd_quick, 1316, 3702),
Function.ability(432, "Research_ZergFlyerArmorLevel3_quick", cmd_quick, 1317, 3702),
Function.ability(433, "Research_ZergFlyerAttack_quick", cmd_quick, 3703),
Function.ability(434, "Research_ZergFlyerAttackLevel1_quick", cmd_quick, 1312, 3703),
Function.ability(435, "Research_ZergFlyerAttackLevel2_quick", cmd_quick, 1313, 3703),
Function.ability(436, "Research_ZergFlyerAttackLevel3_quick", cmd_quick, 1314, 3703),
Function.ability(437, "Research_ZergGroundArmor_quick", cmd_quick, 3704),
Function.ability(438, "Research_ZergGroundArmorLevel1_quick", cmd_quick, 1189, 3704),
Function.ability(439, "Research_ZergGroundArmorLevel2_quick", cmd_quick, 1190, 3704),
Function.ability(440, "Research_ZergGroundArmorLevel3_quick", cmd_quick, 1191, 3704),
Function.ability(441, "Research_ZergMeleeWeapons_quick", cmd_quick, 3705),
Function.ability(442, "Research_ZergMeleeWeaponsLevel1_quick", cmd_quick, 1186, 3705),
Function.ability(443, "Research_ZergMeleeWeaponsLevel2_quick", cmd_quick, 1187, 3705),
Function.ability(444, "Research_ZergMeleeWeaponsLevel3_quick", cmd_quick, 1188, 3705),
Function.ability(445, "Research_ZergMissileWeapons_quick", cmd_quick, 3706),
Function.ability(446, "Research_ZergMissileWeaponsLevel1_quick", cmd_quick, 1192, 3706),
Function.ability(447, "Research_ZergMissileWeaponsLevel2_quick", cmd_quick, 1193, 3706),
Function.ability(448, "Research_ZergMissileWeaponsLevel3_quick", cmd_quick, 1194, 3706),
Function.ability(449, "Research_ZerglingAdrenalGlands_quick", cmd_quick, 1252),
Function.ability(450, "Research_ZerglingMetabolicBoost_quick", cmd_quick, 1253),
Function.ability(451, "Smart_screen", cmd_screen, 1),
Function.ability(452, "Smart_minimap", cmd_minimap, 1),
Function.ability(453, "Stop_quick", cmd_quick, 3665),
Function.ability(454, "Stop_Building_quick", cmd_quick, 2057, 3665),
Function.ability(455, "Stop_Redirect_quick", cmd_quick, 1691, 3665),
Function.ability(456, "Stop_Stop_quick", cmd_quick, 4, 3665),
Function.ability(457, "Train_Adept_quick", cmd_quick, 922),
Function.ability(458, "Train_Baneling_quick", cmd_quick, 80),
Function.ability(459, "Train_Banshee_quick", cmd_quick, 621),
Function.ability(460, "Train_Battlecruiser_quick", cmd_quick, 623),
Function.ability(461, "Train_Carrier_quick", cmd_quick, 948),
Function.ability(462, "Train_Colossus_quick", cmd_quick, 978),
Function.ability(463, "Train_Corruptor_quick", cmd_quick, 1353),
Function.ability(464, "Train_Cyclone_quick", cmd_quick, 597),
Function.ability(465, "Train_DarkTemplar_quick", cmd_quick, 920),
Function.ability(466, "Train_Disruptor_quick", cmd_quick, 994),
Function.ability(467, "Train_Drone_quick", cmd_quick, 1342),
Function.ability(468, "Train_Ghost_quick", cmd_quick, 562),
Function.ability(469, "Train_Hellbat_quick", cmd_quick, 596),
Function.ability(470, "Train_Hellion_quick", cmd_quick, 595),
Function.ability(471, "Train_HighTemplar_quick", cmd_quick, 919),
Function.ability(472, "Train_Hydralisk_quick", cmd_quick, 1345),
Function.ability(473, "Train_Immortal_quick", cmd_quick, 979),
Function.ability(474, "Train_Infestor_quick", cmd_quick, 1352),
Function.ability(475, "Train_Liberator_quick", cmd_quick, 626),
Function.ability(476, "Train_Marauder_quick", cmd_quick, 563),
Function.ability(477, "Train_Marine_quick", cmd_quick, 560),
Function.ability(478, "Train_Medivac_quick", cmd_quick, 620),
Function.ability(479, "Train_MothershipCore_quick", cmd_quick, 1853),
Function.ability(480, "Train_Mutalisk_quick", cmd_quick, 1346),
Function.ability(481, "Train_Observer_quick", cmd_quick, 977),
Function.ability(482, "Train_Oracle_quick", cmd_quick, 954),
Function.ability(483, "Train_Overlord_quick", cmd_quick, 1344),
Function.ability(484, "Train_Phoenix_quick", cmd_quick, 946),
Function.ability(485, "Train_Probe_quick", cmd_quick, 1006),
Function.ability(486, "Train_Queen_quick", cmd_quick, 1632),
Function.ability(487, "Train_Raven_quick", cmd_quick, 622),
Function.ability(488, "Train_Reaper_quick", cmd_quick, 561),
Function.ability(489, "Train_Roach_quick", cmd_quick, 1351),
Function.ability(490, "Train_SCV_quick", cmd_quick, 524),
Function.ability(491, "Train_Sentry_quick", cmd_quick, 921),
Function.ability(492, "Train_SiegeTank_quick", cmd_quick, 591),
Function.ability(493, "Train_Stalker_quick", cmd_quick, 917),
Function.ability(494, "Train_SwarmHost_quick", cmd_quick, 1356),
Function.ability(495, "Train_Tempest_quick", cmd_quick, 955),
Function.ability(496, "Train_Thor_quick", cmd_quick, 594),
Function.ability(497, "Train_Ultralisk_quick", cmd_quick, 1348),
Function.ability(498, "Train_VikingFighter_quick", cmd_quick, 624),
Function.ability(499, "Train_Viper_quick", cmd_quick, 1354),
Function.ability(500, "Train_VoidRay_quick", cmd_quick, 950),
Function.ability(501, "Train_WarpPrism_quick", cmd_quick, 976),
Function.ability(502, "Train_WidowMine_quick", cmd_quick, 614),
Function.ability(503, "Train_Zealot_quick", cmd_quick, 916),
Function.ability(504, "Train_Zergling_quick", cmd_quick, 1343),
Function.ability(505, "TrainWarp_Adept_screen", cmd_screen, 1419),
Function.ability(506, "TrainWarp_DarkTemplar_screen", cmd_screen, 1417),
Function.ability(507, "TrainWarp_HighTemplar_screen", cmd_screen, 1416),
Function.ability(508, "TrainWarp_Sentry_screen", cmd_screen, 1418),
Function.ability(509, "TrainWarp_Stalker_screen", cmd_screen, 1414),
Function.ability(510, "TrainWarp_Zealot_screen", cmd_screen, 1413),
Function.ability(511, "UnloadAll_quick", cmd_quick, 3664),
Function.ability(512, "UnloadAll_Bunker_quick", cmd_quick, 408, 3664),
Function.ability(513, "UnloadAll_CommandCenter_quick", cmd_quick, 413, 3664),
Function.ability(514, "UnloadAll_NydasNetwork_quick", cmd_quick, 1438, 3664),
Function.ability(515, "UnloadAll_NydusWorm_quick", cmd_quick, 2371, 3664),
Function.ability(516, "UnloadAllAt_screen", cmd_screen, 3669),
Function.ability(517, "UnloadAllAt_minimap", cmd_minimap, 3669),
Function.ability(518, "UnloadAllAt_Medivac_screen", cmd_screen, 396, 3669),
Function.ability(519, "UnloadAllAt_Medivac_minimap", cmd_minimap, 396, 3669),
Function.ability(520, "UnloadAllAt_Overlord_screen", cmd_screen, 1408, 3669),
Function.ability(521, "UnloadAllAt_Overlord_minimap", cmd_minimap, 1408, 3669),
Function.ability(522, "UnloadAllAt_WarpPrism_screen", cmd_screen, 913, 3669),
Function.ability(523, "UnloadAllAt_WarpPrism_minimap", cmd_minimap, 913, 3669),
])
# pylint: enable=line-too-long
# Some indexes to support features.py and action conversion.
ABILITY_IDS = collections.defaultdict(set) # {ability_id: {funcs}}
for func in FUNCTIONS:
if func.ability_id >= 0:
ABILITY_IDS[func.ability_id].add(func)
ABILITY_IDS = {k: frozenset(v) for k, v in six.iteritems(ABILITY_IDS)}
FUNCTIONS_AVAILABLE = {f.id: f for f in FUNCTIONS if f.avail_fn}
class FunctionCall(collections.namedtuple(
"FunctionCall", ["function", "arguments"])):
"""Represents a function call action.
Attributes:
function: Store the function id, eg 2 for select_point.
arguments: The list of arguments for that function, each being a list of
ints. For select_point this could be: [[0], [23, 38]].
"""
__slots__ = ()
@classmethod
def all_arguments(cls, function, arguments):
"""Helper function for creating `FunctionCall`s with `Arguments`.
Args:
function: The value to store for the action function.
arguments: The values to store for the arguments of the action. Can either
be an `Arguments` object, a `dict`, or an iterable. If a `dict` or an
iterable is provided, the values will be unpacked into an `Arguments`
object.
Returns:
A new `FunctionCall` instance.
"""
if isinstance(arguments, dict):
arguments = Arguments(**arguments)
elif not isinstance(arguments, Arguments):
arguments = Arguments(*arguments)
return cls(function, arguments)
class ValidActions(collections.namedtuple(
"ValidActions", ["types", "functions"])):
"""The set of types and functions that are valid for an agent to use.
Attributes:
types: A namedtuple of the types that the functions require. Unlike TYPES
above, this includes the sizes for screen and minimap.
functions: A namedtuple of all the functions.
"""
__slots__ = ()
| [((3975, 4044), 'collections.namedtuple', 'collections.namedtuple', (['"""ArgumentType"""', "['id', 'name', 'sizes', 'fn']"], {}), "('ArgumentType', ['id', 'name', 'sizes', 'fn'])\n", (3997, 4044), False, 'import collections\n'), ((5264, 5515), 'collections.namedtuple', 'collections.namedtuple', (['"""Arguments"""', "['screen', 'minimap', 'screen2', 'queued', 'control_group_act',\n 'control_group_id', 'select_point_act', 'select_add', 'select_unit_act',\n 'select_unit_id', 'select_worker', 'build_queue_id', 'unload_id']"], {}), "('Arguments', ['screen', 'minimap', 'screen2',\n 'queued', 'control_group_act', 'control_group_id', 'select_point_act',\n 'select_add', 'select_unit_act', 'select_unit_id', 'select_worker',\n 'build_queue_id', 'unload_id'])\n", (5286, 5515), False, 'import collections\n'), ((9310, 9429), 'collections.namedtuple', 'collections.namedtuple', (['"""Function"""', "['id', 'name', 'ability_id', 'general_id', 'function_type', 'args', 'avail_fn']"], {}), "('Function', ['id', 'name', 'ability_id',\n 'general_id', 'function_type', 'args', 'avail_fn'])\n", (9332, 9429), False, 'import collections\n'), ((52246, 52274), 'collections.defaultdict', 'collections.defaultdict', (['set'], {}), '(set)\n', (52269, 52274), False, 'import collections\n'), ((52550, 52615), 'collections.namedtuple', 'collections.namedtuple', (['"""FunctionCall"""', "['function', 'arguments']"], {}), "('FunctionCall', ['function', 'arguments'])\n", (52572, 52615), False, 'import collections\n'), ((53638, 53700), 'collections.namedtuple', 'collections.namedtuple', (['"""ValidActions"""', "['types', 'functions']"], {}), "('ValidActions', ['types', 'functions'])\n", (53660, 53700), False, 'import collections\n'), ((1565, 1592), 'pysc2.lib.point.Rect', 'point.Rect', (['screen', 'screen2'], {}), '(screen, screen2)\n', (1575, 1592), False, 'from pysc2.lib import point\n'), ((52436, 52462), 'six.iteritems', 'six.iteritems', (['ABILITY_IDS'], {}), '(ABILITY_IDS)\n', (52449, 52462), False, 'import six\n'), ((6654, 6675), 'six.iteritems', 'six.iteritems', (['kwargs'], {}), '(kwargs)\n', (6667, 6675), False, 'import six\n'), ((5070, 5085), 'pysc2.lib.point.Point', 'point.Point', (['*a'], {}), '(*a)\n', (5081, 5085), False, 'from pysc2.lib import point\n')] |
ashishpatel26/pywick | pywick/meters/aucmeter.py | 1afffd1c21c2b188836d3599e802146182757bb5 | import numbers
from . import meter
import numpy as np
import torch
class AUCMeter(meter.Meter):
"""
The AUCMeter measures the area under the receiver-operating characteristic
(ROC) curve for binary classification problems. The area under the curve (AUC)
can be interpreted as the probability that, given a randomly selected positive
example and a randomly selected negative example, the positive example is
assigned a higher score by the classification model than the negative example.
The AUCMeter is designed to operate on one-dimensional Tensors `output`
and `target`, where (1) the `output` contains model output scores that ought to
be higher when the model is more convinced that the example should be positively
labeled, and smaller when the model believes the example should be negatively
labeled (for instance, the output of a signoid function); and (2) the `target`
contains only values 0 (for negative examples) and 1 (for positive examples).
"""
def __init__(self):
super(AUCMeter, self).__init__()
self.reset()
def reset(self):
self.scores = torch.DoubleTensor(torch.DoubleStorage()).numpy()
self.targets = torch.LongTensor(torch.LongStorage()).numpy()
def add(self, output, target):
if torch.is_tensor(output):
output = output.cpu().squeeze().numpy()
if torch.is_tensor(target):
target = target.cpu().squeeze().numpy()
elif isinstance(target, numbers.Number):
target = np.asarray([target])
assert np.ndim(output) == 1, \
'wrong output size (1D expected)'
assert np.ndim(target) == 1, \
'wrong target size (1D expected)'
assert output.shape[0] == target.shape[0], \
'number of outputs and targets does not match'
assert np.all(np.add(np.equal(target, 1), np.equal(target, 0))), \
'targets should be binary (0, 1)'
self.scores = np.append(self.scores, output)
self.targets = np.append(self.targets, target)
def value(self):
# case when number of elements added are 0
if self.scores.shape[0] == 0:
return 0.5
# sorting the arrays
scores, sortind = torch.sort(torch.from_numpy(self.scores), dim=0, descending=True)
scores = scores.numpy()
sortind = sortind.numpy()
# creating the roc curve
tpr = np.zeros(shape=(scores.size + 1), dtype=np.float64)
fpr = np.zeros(shape=(scores.size + 1), dtype=np.float64)
for i in range(1, scores.size + 1):
if self.targets[sortind[i - 1]] == 1:
tpr[i] = tpr[i - 1] + 1
fpr[i] = fpr[i - 1]
else:
tpr[i] = tpr[i - 1]
fpr[i] = fpr[i - 1] + 1
tpr /= (self.targets.sum() * 1.0)
fpr /= ((self.targets - 1.0).sum() * -1.0)
# calculating area under curve using trapezoidal rule
n = tpr.shape[0]
h = fpr[1:n] - fpr[0:n - 1]
sum_h = np.zeros(fpr.shape)
sum_h[0:n - 1] = h
sum_h[1:n] += h
area = (sum_h * tpr).sum() / 2.0
return (area, tpr, fpr)
| [((1309, 1332), 'torch.is_tensor', 'torch.is_tensor', (['output'], {}), '(output)\n', (1324, 1332), False, 'import torch\n'), ((1397, 1420), 'torch.is_tensor', 'torch.is_tensor', (['target'], {}), '(target)\n', (1412, 1420), False, 'import torch\n'), ((1991, 2021), 'numpy.append', 'np.append', (['self.scores', 'output'], {}), '(self.scores, output)\n', (2000, 2021), True, 'import numpy as np\n'), ((2045, 2076), 'numpy.append', 'np.append', (['self.targets', 'target'], {}), '(self.targets, target)\n', (2054, 2076), True, 'import numpy as np\n'), ((2447, 2496), 'numpy.zeros', 'np.zeros', ([], {'shape': '(scores.size + 1)', 'dtype': 'np.float64'}), '(shape=scores.size + 1, dtype=np.float64)\n', (2455, 2496), True, 'import numpy as np\n'), ((2513, 2562), 'numpy.zeros', 'np.zeros', ([], {'shape': '(scores.size + 1)', 'dtype': 'np.float64'}), '(shape=scores.size + 1, dtype=np.float64)\n', (2521, 2562), True, 'import numpy as np\n'), ((3064, 3083), 'numpy.zeros', 'np.zeros', (['fpr.shape'], {}), '(fpr.shape)\n', (3072, 3083), True, 'import numpy as np\n'), ((1580, 1595), 'numpy.ndim', 'np.ndim', (['output'], {}), '(output)\n', (1587, 1595), True, 'import numpy as np\n'), ((1665, 1680), 'numpy.ndim', 'np.ndim', (['target'], {}), '(target)\n', (1672, 1680), True, 'import numpy as np\n'), ((2278, 2307), 'torch.from_numpy', 'torch.from_numpy', (['self.scores'], {}), '(self.scores)\n', (2294, 2307), False, 'import torch\n'), ((1544, 1564), 'numpy.asarray', 'np.asarray', (['[target]'], {}), '([target])\n', (1554, 1564), True, 'import numpy as np\n'), ((1876, 1895), 'numpy.equal', 'np.equal', (['target', '(1)'], {}), '(target, 1)\n', (1884, 1895), True, 'import numpy as np\n'), ((1897, 1916), 'numpy.equal', 'np.equal', (['target', '(0)'], {}), '(target, 0)\n', (1905, 1916), True, 'import numpy as np\n'), ((1162, 1183), 'torch.DoubleStorage', 'torch.DoubleStorage', ([], {}), '()\n', (1181, 1183), False, 'import torch\n'), ((1233, 1252), 'torch.LongStorage', 'torch.LongStorage', ([], {}), '()\n', (1250, 1252), False, 'import torch\n')] |
liangleslie/core | homeassistant/components/zha/core/channels/lighting.py | cc807b4d597daaaadc92df4a93c6e30da4f570c6 | """Lighting channels module for Zigbee Home Automation."""
from __future__ import annotations
from contextlib import suppress
from zigpy.zcl.clusters import lighting
from .. import registries
from ..const import REPORT_CONFIG_DEFAULT
from .base import ClientChannel, ZigbeeChannel
@registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Ballast.cluster_id)
class Ballast(ZigbeeChannel):
"""Ballast channel."""
@registries.CLIENT_CHANNELS_REGISTRY.register(lighting.Color.cluster_id)
class ColorClientChannel(ClientChannel):
"""Color client channel."""
@registries.BINDABLE_CLUSTERS.register(lighting.Color.cluster_id)
@registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Color.cluster_id)
class ColorChannel(ZigbeeChannel):
"""Color channel."""
CAPABILITIES_COLOR_XY = 0x08
CAPABILITIES_COLOR_TEMP = 0x10
UNSUPPORTED_ATTRIBUTE = 0x86
REPORT_CONFIG = (
{"attr": "current_x", "config": REPORT_CONFIG_DEFAULT},
{"attr": "current_y", "config": REPORT_CONFIG_DEFAULT},
{"attr": "color_temperature", "config": REPORT_CONFIG_DEFAULT},
)
MAX_MIREDS: int = 500
MIN_MIREDS: int = 153
ZCL_INIT_ATTRS = {
"color_mode": False,
"color_temp_physical_min": True,
"color_temp_physical_max": True,
"color_capabilities": True,
"color_loop_active": False,
}
@property
def color_capabilities(self) -> int:
"""Return color capabilities of the light."""
with suppress(KeyError):
return self.cluster["color_capabilities"]
if self.cluster.get("color_temperature") is not None:
return self.CAPABILITIES_COLOR_XY | self.CAPABILITIES_COLOR_TEMP
return self.CAPABILITIES_COLOR_XY
@property
def color_mode(self) -> int | None:
"""Return cached value of the color_mode attribute."""
return self.cluster.get("color_mode")
@property
def color_loop_active(self) -> int | None:
"""Return cached value of the color_loop_active attribute."""
return self.cluster.get("color_loop_active")
@property
def color_temperature(self) -> int | None:
"""Return cached value of color temperature."""
return self.cluster.get("color_temperature")
@property
def current_x(self) -> int | None:
"""Return cached value of the current_x attribute."""
return self.cluster.get("current_x")
@property
def current_y(self) -> int | None:
"""Return cached value of the current_y attribute."""
return self.cluster.get("current_y")
@property
def min_mireds(self) -> int:
"""Return the coldest color_temp that this channel supports."""
return self.cluster.get("color_temp_physical_min", self.MIN_MIREDS)
@property
def max_mireds(self) -> int:
"""Return the warmest color_temp that this channel supports."""
return self.cluster.get("color_temp_physical_max", self.MAX_MIREDS)
| [((1482, 1500), 'contextlib.suppress', 'suppress', (['KeyError'], {}), '(KeyError)\n', (1490, 1500), False, 'from contextlib import suppress\n')] |
PiRAT4/py-pf | pf/queue.py | 7ffdd0a283d4a36fc4c473433d5f79a84eeb5d31 | """Classes to represent Packet Filter's queueing schedulers and statistics."""
import pf._struct
from pf._base import PFObject
from pf.constants import *
from pf._utils import rate2str
__all__ = ["ServiceCurve",
"FlowQueue",
"PFQueue",
"PFQueueStats"]
class ServiceCurve(PFObject):
""" """
_struct_type = pf._struct.pf_queue_scspec
def __init__(self, bandwidth, burst=0, time=0):
""" """
if isinstance(bandwidth, pf._struct.pf_queue_scspec):
self._from_struct(bandwidth)
else:
self.bandwidth = bandwidth
self.burst = burst
self.time = time
def _from_struct(self, sc):
""" """
self.bandwidth = self._get_bandwidth(sc.m2)
self.burst = self._get_bandwidth(sc.m1)
self.time = sc.d
def _to_struct(self):
""" """
sc = pf._struct.pf_queue_scspec()
if (isinstance(self.bandwidth, basestring) and
self.bandwidth.endswith("%")):
sc.m2.percent = int(self.bandwidth[:-1])
else:
sc.m2.absolute = self.bandwidth
if (isinstance(self.burst, basestring) and
self.burst.endswith("%")):
sc.m1.percent = int(self.burst[:-1])
else:
sc.m1.absolute = self.burst
sc.d = self.time
return sc
def _get_bandwidth(self, bw):
""" """
return "{}%".format(bw.percent) if bw.percent else bw.absolute
def _str_bandwidth(self, bw):
""" """
return bw if isinstance(bw, basestring) else rate2str(bw)
def _to_string(self):
""" """
s = self._str_bandwidth(self.bandwidth)
if self.time:
s += " burst {}".format(self._str_bandwidth(self.burst))
s += " for {.time}ms".format(self)
return s
class FlowQueue(PFObject):
""" """
_struct_type = pf._struct.pf_queue_fqspec
def __init__(self, flows, quantum=0, target=0, interval=0):
""" """
if isinstance(flows, pf._struct.pf_queue_fqspec):
self._from_struct(flows)
else:
self.flows = flows
self.quantum = quantum
self.target = target * 1000000
self.interval = interval * 1000000
def _from_struct(self, fq):
""" """
self.flows = fq.flows
self.quantum = fq.quantum
self.target = fq.target
self.interval = fq.interval
def _to_struct(self):
""" """
fq = pf._struct.pf_queue_fqspec()
fq.flows = self.flows
fq.quantum = self.quantum
fq.target = self.target
fq.interval = self.interval
return fq
def _to_string(self):
""" """
s = "flows {.flows}".format(self)
if self.quantum:
s += " quantum {.quantum}".format(self)
if self.interval:
s += " interval {}ms".format(self.interval / 1000000)
if self.target:
s += " target {}ms".format(self.target / 1000000)
return s
class PFQueue(PFObject):
""" """
_struct_type = pf._struct.pf_queuespec
def __init__(self, queue=None, **kw):
""" """
if isinstance(queue, basestring):
queue = pf._struct.pf_queuespec(qname=queue, qlimit=DEFAULT_QLIMIT)
elif queue is None:
queue = pf._struct.pf_queuespec()
super(PFQueue, self).__init__(queue, **kw)
self.stats = PFQueueStats()
def _from_struct(self, q):
""" """
self.qname = q.qname
self.parent = q.parent
self.ifname = q.ifname
self.flags = q.flags
self.qlimit = q.qlimit
self.qid = q.qid
self.parent_qid = q.parent_qid
self.realtime = ServiceCurve(q.realtime)
self.linkshare = ServiceCurve(q.linkshare)
self.upperlimit = ServiceCurve(q.upperlimit)
self.flowqueue = FlowQueue(q.flowqueue)
def _to_struct(self):
""" """
q = pf._struct.pf_queuespec()
q.qname = self.qname
q.parent = self.parent
q.ifname = self.ifname
q.flags = self.flags
q.qlimit = self.qlimit
q.qid = self.qid
q.parent_qid = self.parent_qid
q.realtime = self.realtime._to_struct()
q.linkshare = self.linkshare._to_struct()
q.upperlimit = self.upperlimit._to_struct()
q.flowqueue = self.flowqueue._to_struct()
return q
def _to_string(self):
""" """
s = "queue {.qname}".format(self)
if self.parent and not self.parent.startswith("_"):
s += " parent {.parent}".format(self)
elif self.ifname:
s += " on {.ifname}".format(self)
if self.flags & PFQS_FLOWQUEUE:
s += " {.flowqueue}".format(self)
if self.linkshare.bandwidth or self.linkshare.burst:
s += " bandwidth {}".format(self.linkshare)
if self.realtime.bandwidth:
s += ", min {}".format(self.realtime)
if self.upperlimit.bandwidth:
s += ", max {}".format(self.upperlimit)
if self.flags & PFQS_DEFAULT:
s += " default"
if self.qlimit:
s += " qlimit {.qlimit}".format(self)
return s
class PFQueueStats(PFObject):
""" """
_struct_type = pf._struct.hfsc_class_stats
def __init__(self, stats=None):
""" """
if stats is None:
stats = pf._struct.hfsc_class_stats()
super(PFQueueStats, self).__init__(stats)
def _from_struct(self, s):
""" """
self.qlength = s.qlength
self.qlimit = s.qlimit
self.packets = (s.xmit_cnt.packets, s.drop_cnt.packets)
self.bytes = (s.xmit_cnt.bytes, s.drop_cnt.bytes)
def _to_string(self):
""" """
s = " [ pkts: {0.packets[0]:10} bytes: {0.bytes[0]:10} " + \
"dropped pkts: {0.packets[1]:6} bytes: {0.bytes[1]:6} ]\n" + \
" [ qlength: {0.qlength:3}/{0.qlimit:3} ]"
return s.format(self)
| [((1594, 1606), 'pf._utils.rate2str', 'rate2str', (['bw'], {}), '(bw)\n', (1602, 1606), False, 'from pf._utils import rate2str\n')] |
pulumi-bot/pulumi-azure-native | sdk/python/pulumi_azure_native/labservices/v20181015/__init__.py | f7b9490b5211544318e455e5cceafe47b628e12c | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
# Export this package's modules as members:
from ._enums import *
from .environment import *
from .environment_setting import *
from .gallery_image import *
from .get_environment import *
from .get_environment_setting import *
from .get_gallery_image import *
from .get_global_user_environment import *
from .get_global_user_operation_batch_status import *
from .get_global_user_operation_status import *
from .get_global_user_personal_preferences import *
from .get_lab import *
from .get_lab_account import *
from .get_lab_account_regional_availability import *
from .get_user import *
from .lab import *
from .lab_account import *
from .list_global_user_environments import *
from .list_global_user_labs import *
from .user import *
from ._inputs import *
from . import outputs
def _register_module():
import pulumi
from ... import _utilities
class Module(pulumi.runtime.ResourceModule):
_version = _utilities.get_semver_version()
def version(self):
return Module._version
def construct(self, name: str, typ: str, urn: str) -> pulumi.Resource:
if typ == "azure-native:labservices/v20181015:Environment":
return Environment(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:labservices/v20181015:EnvironmentSetting":
return EnvironmentSetting(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:labservices/v20181015:GalleryImage":
return GalleryImage(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:labservices/v20181015:Lab":
return Lab(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:labservices/v20181015:LabAccount":
return LabAccount(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:labservices/v20181015:User":
return User(name, pulumi.ResourceOptions(urn=urn))
else:
raise Exception(f"unknown resource type {typ}")
_module_instance = Module()
pulumi.runtime.register_resource_module("azure-native", "labservices/v20181015", _module_instance)
_register_module()
| [((2259, 2361), 'pulumi.runtime.register_resource_module', 'pulumi.runtime.register_resource_module', (['"""azure-native"""', '"""labservices/v20181015"""', '_module_instance'], {}), "('azure-native',\n 'labservices/v20181015', _module_instance)\n", (2298, 2361), False, 'import pulumi\n'), ((1382, 1413), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'urn': 'urn'}), '(urn=urn)\n', (1404, 1413), False, 'import pulumi\n'), ((1544, 1575), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'urn': 'urn'}), '(urn=urn)\n', (1566, 1575), False, 'import pulumi\n'), ((1694, 1725), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'urn': 'urn'}), '(urn=urn)\n', (1716, 1725), False, 'import pulumi\n'), ((1826, 1857), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'urn': 'urn'}), '(urn=urn)\n', (1848, 1857), False, 'import pulumi\n'), ((1972, 2003), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'urn': 'urn'}), '(urn=urn)\n', (1994, 2003), False, 'import pulumi\n'), ((2106, 2137), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'urn': 'urn'}), '(urn=urn)\n', (2128, 2137), False, 'import pulumi\n')] |
ealogar/servicedirectory | servicedirectory/src/sd-api/users/tests/tests_serializers.py | fb4f4bfa8b499b93c03af589ef2f34c08a830b17 | '''
(c) Copyright 2013 Telefonica, I+D. Printed in Spain (Europe). All Rights
Reserved.
The copyright to the software program(s) is property of Telefonica I+D.
The program(s) may be used and or copied only with the express written
consent of Telefonica I+D or in accordance with the terms and conditions
stipulated in the agreement/contract under which the program(s) have
been supplied.
'''
from unittest import TestCase
from mock import MagicMock, patch
from commons.json_schema_validator.schema_reader import SchemaField
from commons.json_schema_validator.schema_reader import SchemaReader
from users.serializers import UserCollectionSerializer
class UserSerializerTests(TestCase):
def setUp(self):
super(UserSerializerTests, self).setUp()
mock_schema_instance = MagicMock(name='mock_schema_instance')
mock_schema_instance.return_value = [
SchemaField(name='username', field_type='string', required=True),
SchemaField(name='password', field_type='string', required=True),
SchemaField(name='is_admin', field_type='boolean', required=True, default=False)
]
mock_get_schema_fields = MagicMock(name='mock_get_schema')
mock_get_schema_fields.return_value = mock_schema_instance
# mock schema instance
schema_reader = SchemaReader()
self.patcher_validate = patch.object(schema_reader, 'validate_object') # @UndefinedVariable
self.patcher_schema = patch.object(schema_reader, # @UndefinedVariable
'get_schema_fields', mock_schema_instance)
self.patcher_schema.start()
self.patcher_validate.start()
def tearDown(self):
self.patcher_schema.stop()
self.patcher_validate.stop()
def test_deserialize_user_should_work(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass'})
self.assertEquals(True, serializer.is_valid(), "Serialization invalid")
def test_deserialize_user_invalid_is_admin_should_work(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass', 'is_admin': 'si'})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
def test_deserialize_user_empty_user_should_give_error_invalid(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': '', 'password': 'pass'})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
self.assertEquals(u"invalid",
serializer.errors['username'][0],
'Invalid error message')
def test_deserialize_user_null_user_should_give_required_error(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'password': 'pass'})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
self.assertEquals(u"required",
serializer.errors['username'][0],
'Invalid error message')
def test_deserialize_user_large_user_ne_should_give_invalid_error(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': 'a' * 600, 'password': 'pass'})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
self.assertEquals(u"invalid",
serializer.errors['username'][0],
'Invalid error message')
def test_deserialize_user_with_invalid_origins_should_give_error(self):
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass', 'origins': ["????"]})
self.assertEquals(False, serializer.is_valid())
self.assertEquals(u"invalid",
serializer.errors['origins'][0],
'Invalid error message')
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass', 'origins': [" tugo"]})
self.assertEquals(False, serializer.is_valid())
self.assertEquals(u"invalid",
serializer.errors['origins'][0],
'Invalid error message')
def test_deserialize_user_with_invalid_classes_should_give_error(self):
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass', 'classes': ["????"]})
self.assertEquals(False, serializer.is_valid())
self.assertEquals(u"invalid",
serializer.errors['classes'][0],
'Invalid error message')
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass', 'classes': [" sms"]})
self.assertEquals(False, serializer.is_valid())
self.assertEquals(u"invalid",
serializer.errors['classes'][0],
'Invalid error message')
def test_deserialize_user_invalid_username_should_give_error(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': 'User.user', 'password': 'pass'})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
self.assertEquals(u"invalid",
serializer.errors['username'][0],
'Invalid error message')
def test_deserialize_user_invalid_is_admin_should_give_error(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': 'usera', 'password': 'pass', 'is_admin': 0})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
self.assertEquals(u"invalid",
serializer.errors['is_admin'][0],
'Invalid error message')
| [((790, 828), 'mock.MagicMock', 'MagicMock', ([], {'name': '"""mock_schema_instance"""'}), "(name='mock_schema_instance')\n", (799, 828), False, 'from mock import MagicMock, patch\n'), ((1156, 1189), 'mock.MagicMock', 'MagicMock', ([], {'name': '"""mock_get_schema"""'}), "(name='mock_get_schema')\n", (1165, 1189), False, 'from mock import MagicMock, patch\n'), ((1312, 1326), 'commons.json_schema_validator.schema_reader.SchemaReader', 'SchemaReader', ([], {}), '()\n', (1324, 1326), False, 'from commons.json_schema_validator.schema_reader import SchemaReader\n'), ((1359, 1405), 'mock.patch.object', 'patch.object', (['schema_reader', '"""validate_object"""'], {}), "(schema_reader, 'validate_object')\n", (1371, 1405), False, 'from mock import MagicMock, patch\n'), ((1458, 1528), 'mock.patch.object', 'patch.object', (['schema_reader', '"""get_schema_fields"""', 'mock_schema_instance'], {}), "(schema_reader, 'get_schema_fields', mock_schema_instance)\n", (1470, 1528), False, 'from mock import MagicMock, patch\n'), ((1899, 1970), 'users.serializers.UserCollectionSerializer', 'UserCollectionSerializer', ([], {'data': "{'username': 'user', 'password': 'pass'}"}), "(data={'username': 'user', 'password': 'pass'})\n", (1923, 1970), False, 'from users.serializers import UserCollectionSerializer\n'), ((2205, 2298), 'users.serializers.UserCollectionSerializer', 'UserCollectionSerializer', ([], {'data': "{'username': 'user', 'password': 'pass', 'is_admin': 'si'}"}), "(data={'username': 'user', 'password': 'pass',\n 'is_admin': 'si'})\n", (2229, 2298), False, 'from users.serializers import UserCollectionSerializer\n'), ((2538, 2605), 'users.serializers.UserCollectionSerializer', 'UserCollectionSerializer', ([], {'data': "{'username': '', 'password': 'pass'}"}), "(data={'username': '', 'password': 'pass'})\n", (2562, 2605), False, 'from users.serializers import UserCollectionSerializer\n'), ((2998, 3049), 'users.serializers.UserCollectionSerializer', 'UserCollectionSerializer', ([], {'data': "{'password': 'pass'}"}), "(data={'password': 'pass'})\n", (3022, 3049), False, 'from users.serializers import UserCollectionSerializer\n'), ((3446, 3520), 'users.serializers.UserCollectionSerializer', 'UserCollectionSerializer', ([], {'data': "{'username': 'a' * 600, 'password': 'pass'}"}), "(data={'username': 'a' * 600, 'password': 'pass'})\n", (3470, 3520), False, 'from users.serializers import UserCollectionSerializer\n'), ((3849, 3945), 'users.serializers.UserCollectionSerializer', 'UserCollectionSerializer', ([], {'data': "{'username': 'user', 'password': 'pass', 'origins': ['????']}"}), "(data={'username': 'user', 'password': 'pass',\n 'origins': ['????']})\n", (3873, 3945), False, 'from users.serializers import UserCollectionSerializer\n'), ((4167, 4264), 'users.serializers.UserCollectionSerializer', 'UserCollectionSerializer', ([], {'data': "{'username': 'user', 'password': 'pass', 'origins': [' tugo']}"}), "(data={'username': 'user', 'password': 'pass',\n 'origins': [' tugo']})\n", (4191, 4264), False, 'from users.serializers import UserCollectionSerializer\n'), ((4563, 4659), 'users.serializers.UserCollectionSerializer', 'UserCollectionSerializer', ([], {'data': "{'username': 'user', 'password': 'pass', 'classes': ['????']}"}), "(data={'username': 'user', 'password': 'pass',\n 'classes': ['????']})\n", (4587, 4659), False, 'from users.serializers import UserCollectionSerializer\n'), ((4881, 4977), 'users.serializers.UserCollectionSerializer', 'UserCollectionSerializer', ([], {'data': "{'username': 'user', 'password': 'pass', 'classes': [' sms']}"}), "(data={'username': 'user', 'password': 'pass',\n 'classes': [' sms']})\n", (4905, 4977), False, 'from users.serializers import UserCollectionSerializer\n'), ((5338, 5414), 'users.serializers.UserCollectionSerializer', 'UserCollectionSerializer', ([], {'data': "{'username': 'User.user', 'password': 'pass'}"}), "(data={'username': 'User.user', 'password': 'pass'})\n", (5362, 5414), False, 'from users.serializers import UserCollectionSerializer\n'), ((5805, 5896), 'users.serializers.UserCollectionSerializer', 'UserCollectionSerializer', ([], {'data': "{'username': 'usera', 'password': 'pass', 'is_admin': 0}"}), "(data={'username': 'usera', 'password': 'pass',\n 'is_admin': 0})\n", (5829, 5896), False, 'from users.serializers import UserCollectionSerializer\n'), ((883, 947), 'commons.json_schema_validator.schema_reader.SchemaField', 'SchemaField', ([], {'name': '"""username"""', 'field_type': '"""string"""', 'required': '(True)'}), "(name='username', field_type='string', required=True)\n", (894, 947), False, 'from commons.json_schema_validator.schema_reader import SchemaField\n'), ((957, 1021), 'commons.json_schema_validator.schema_reader.SchemaField', 'SchemaField', ([], {'name': '"""password"""', 'field_type': '"""string"""', 'required': '(True)'}), "(name='password', field_type='string', required=True)\n", (968, 1021), False, 'from commons.json_schema_validator.schema_reader import SchemaField\n'), ((1031, 1116), 'commons.json_schema_validator.schema_reader.SchemaField', 'SchemaField', ([], {'name': '"""is_admin"""', 'field_type': '"""boolean"""', 'required': '(True)', 'default': '(False)'}), "(name='is_admin', field_type='boolean', required=True, default=False\n )\n", (1042, 1116), False, 'from commons.json_schema_validator.schema_reader import SchemaField\n')] |
abs0lut3pwn4g3/RootersCTF2019-challenges | Web/notifyXAPI/app/src/users/views.py | 397a6fad0b03e55541df06e5103172ae850cd4e5 | ''' User views '''
from datetime import timedelta
from flask import request, jsonify, make_response, redirect, json, render_template
from flask_jwt_extended import (create_access_token, jwt_required)
from flask_restful import Resource
from flask_login import login_user, current_user
from sqlalchemy.exc import IntegrityError, InvalidRequestError
from src import db, api
from .models import User
from .schemas import UserSchema
class UserLoginResource(Resource):
model = User
schema = UserSchema
def get(self):
return make_response(render_template('login.html'))
def post(self):
if request.json:
data = request.json
user = self.model.query.filter(self.model.email == data['email']).first()
if user and self.model.check_password(user, data['password']):
expires = timedelta(days=365)
user = UserSchema(only=('id', 'email', 'is_admin')).dump(user).data
return make_response(
jsonify({'id': user,
'authentication_token': create_access_token(identity=user['id'], expires_delta=expires)}), 200)
else:
return make_response(jsonify({"error": {"code": 400, "msg": "No such user/wrong password."}}), 400)
else:
data = request.form
user = self.model.query.filter(self.model.email == data['email']).first()
if user and self.model.check_password(user, data['password']) and login_user(user):
return make_response(redirect('/admin/', 302))
else:
return make_response(redirect('/api/v1/login', 403))
class UserRegisterResource(Resource):
model = User
schema = UserSchema
def post(self):
data = request.json
if not data:
return make_response(jsonify({'error': 'No data'}), 400)
user = User.query.filter(User.email == data['email']).first()
if user:
return make_response(jsonify({'error': 'User already exists'}), 403)
user, errors = self.schema().load(data)
if errors:
return make_response(jsonify(errors), 400)
try:
user.set_password(data['password'])
db.session.add(user)
db.session.commit()
except (IntegrityError, InvalidRequestError) as e:
print(e)
db.session.rollback()
return make_response(jsonify(error={'code': 400 }), 400)
expires = timedelta(days=365)
return make_response(
jsonify(created_user={'id': user.id,
'user': self.schema(only=('id', 'email', 'is_admin')).dump(user).data,
'authentication_token': create_access_token(identity=user.id,
expires_delta=expires)}), 200)
api.add_resource(UserLoginResource, '/login/', endpoint='login')
api.add_resource(UserRegisterResource, '/register/', endpoint='register') | [((2986, 3050), 'src.api.add_resource', 'api.add_resource', (['UserLoginResource', '"""/login/"""'], {'endpoint': '"""login"""'}), "(UserLoginResource, '/login/', endpoint='login')\n", (3002, 3050), False, 'from src import db, api\n'), ((3051, 3124), 'src.api.add_resource', 'api.add_resource', (['UserRegisterResource', '"""/register/"""'], {'endpoint': '"""register"""'}), "(UserRegisterResource, '/register/', endpoint='register')\n", (3067, 3124), False, 'from src import db, api\n'), ((2513, 2532), 'datetime.timedelta', 'timedelta', ([], {'days': '(365)'}), '(days=365)\n', (2522, 2532), False, 'from datetime import timedelta\n'), ((557, 586), 'flask.render_template', 'render_template', (['"""login.html"""'], {}), "('login.html')\n", (572, 586), False, 'from flask import request, jsonify, make_response, redirect, json, render_template\n'), ((2258, 2278), 'src.db.session.add', 'db.session.add', (['user'], {}), '(user)\n', (2272, 2278), False, 'from src import db, api\n'), ((2291, 2310), 'src.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2308, 2310), False, 'from src import db, api\n'), ((854, 873), 'datetime.timedelta', 'timedelta', ([], {'days': '(365)'}), '(days=365)\n', (863, 873), False, 'from datetime import timedelta\n'), ((1507, 1523), 'flask_login.login_user', 'login_user', (['user'], {}), '(user)\n', (1517, 1523), False, 'from flask_login import login_user, current_user\n'), ((1859, 1888), 'flask.jsonify', 'jsonify', (["{'error': 'No data'}"], {}), "({'error': 'No data'})\n", (1866, 1888), False, 'from flask import request, jsonify, make_response, redirect, json, render_template\n'), ((2015, 2056), 'flask.jsonify', 'jsonify', (["{'error': 'User already exists'}"], {}), "({'error': 'User already exists'})\n", (2022, 2056), False, 'from flask import request, jsonify, make_response, redirect, json, render_template\n'), ((2163, 2178), 'flask.jsonify', 'jsonify', (['errors'], {}), '(errors)\n', (2170, 2178), False, 'from flask import request, jsonify, make_response, redirect, json, render_template\n'), ((2403, 2424), 'src.db.session.rollback', 'db.session.rollback', ([], {}), '()\n', (2422, 2424), False, 'from src import db, api\n'), ((1217, 1289), 'flask.jsonify', 'jsonify', (["{'error': {'code': 400, 'msg': 'No such user/wrong password.'}}"], {}), "({'error': {'code': 400, 'msg': 'No such user/wrong password.'}})\n", (1224, 1289), False, 'from flask import request, jsonify, make_response, redirect, json, render_template\n'), ((1562, 1586), 'flask.redirect', 'redirect', (['"""/admin/"""', '(302)'], {}), "('/admin/', 302)\n", (1570, 1586), False, 'from flask import request, jsonify, make_response, redirect, json, render_template\n'), ((1643, 1673), 'flask.redirect', 'redirect', (['"""/api/v1/login"""', '(403)'], {}), "('/api/v1/login', 403)\n", (1651, 1673), False, 'from flask import request, jsonify, make_response, redirect, json, render_template\n'), ((2458, 2486), 'flask.jsonify', 'jsonify', ([], {'error': "{'code': 400}"}), "(error={'code': 400})\n", (2465, 2486), False, 'from flask import request, jsonify, make_response, redirect, json, render_template\n'), ((2755, 2815), 'flask_jwt_extended.create_access_token', 'create_access_token', ([], {'identity': 'user.id', 'expires_delta': 'expires'}), '(identity=user.id, expires_delta=expires)\n', (2774, 2815), False, 'from flask_jwt_extended import create_access_token, jwt_required\n'), ((1090, 1153), 'flask_jwt_extended.create_access_token', 'create_access_token', ([], {'identity': "user['id']", 'expires_delta': 'expires'}), "(identity=user['id'], expires_delta=expires)\n", (1109, 1153), False, 'from flask_jwt_extended import create_access_token, jwt_required\n')] |
wesokes/django-query-builder | querybuilder/tests/window_tests.py | 3cc53d33ee0a4ada515635e4be631167a774b457 | from querybuilder.fields import (
RankField, RowNumberField, DenseRankField, PercentRankField, CumeDistField, NTileField, LagField,
LeadField, FirstValueField, LastValueField, NthValueField, NumStdDevField
)
from querybuilder.query import QueryWindow, Query
from querybuilder.tests.models import Order
from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str
class QueryWindowTest(QueryTestCase):
def test_query_window(self):
query_window = QueryWindow()
query_str = query_window.get_sql()
expected_query = 'OVER ()'
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_query_window_partition(self):
query_window = QueryWindow().partition_by('field_one')
query_str = query_window.get_sql()
expected_query = 'OVER (PARTITION BY field_one)'
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_query_window_order(self):
query_window = QueryWindow().order_by('field_one')
query_str = query_window.get_sql()
expected_query = 'OVER (ORDER BY field_one ASC)'
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_query_window_partition_order(self):
query_window = QueryWindow().partition_by(
'field_one'
).order_by(
'field_one'
)
query_str = query_window.get_sql()
expected_query = 'OVER (PARTITION BY field_one ORDER BY field_one ASC)'
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_query_window_partition_order_many(self):
query_window = QueryWindow().partition_by(
'field_one'
).partition_by(
'field_two'
).order_by(
'field_one'
).order_by(
'-field_two'
)
query_str = query_window.get_sql()
expected_query = 'OVER (PARTITION BY field_one, field_two ORDER BY field_one ASC, field_two DESC)'
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
class WindowFunctionTest(QueryTestCase):
def test_rank_no_over(self):
query = Query().from_table(
table=Order,
fields=[
RankField()
]
)
query_str = query.get_sql()
expected_query = 'SELECT RANK() AS "rank" FROM querybuilder_tests_order'
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_rank_over(self):
query = Query().from_table(
table=Order,
fields=[
RankField(
over=QueryWindow()
)
]
)
query_str = query.get_sql()
expected_query = 'SELECT RANK() OVER () AS "rank" FROM querybuilder_tests_order'
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_rank_over_order(self):
query = Query().from_table(
table=Order,
fields=[
'id',
RankField(
over=QueryWindow().order_by(
'id'
)
)
]
)
query_str = query.get_sql()
expected_query = (
'SELECT querybuilder_tests_order.id, RANK() OVER (ORDER BY id ASC) AS "rank" FROM querybuilder_tests_order'
)
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_rank_over_partition(self):
query = Query().from_table(
table=Order,
fields=[
'id',
RankField(
over=QueryWindow().partition_by(
'account_id'
)
)
]
)
query_str = query.get_sql()
expected_query = (
'SELECT querybuilder_tests_order.id, RANK() OVER (PARTITION BY account_id) AS "rank" FROM '
'querybuilder_tests_order'
)
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_row_number(self):
query = Query().from_table(
table=Order,
fields=[
'*',
RowNumberField(
over=QueryWindow().order_by(
'-margin'
)
)
]
).order_by(
'row_number'
)
query_str = query.get_sql()
expected_query = (
'SELECT querybuilder_tests_order.*, '
'ROW_NUMBER() OVER (ORDER BY margin DESC) AS "row_number" '
'FROM querybuilder_tests_order '
'ORDER BY row_number '
'ASC'
)
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_rank(self):
query = Query().from_table(
table=Order,
fields=[
'id',
RankField(
over=QueryWindow().partition_by(
'account_id'
).order_by(
'id'
)
)
]
).order_by(
'-rank'
)
query_str = query.get_sql()
expected_query = (
'SELECT querybuilder_tests_order.id, '
'RANK() OVER (PARTITION BY account_id ORDER BY id ASC) AS "rank" '
'FROM querybuilder_tests_order '
'ORDER BY rank '
'DESC'
)
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_dense_rank(self):
query = Query().from_table(
table=Order,
fields=[
'*',
DenseRankField(
over=QueryWindow().order_by(
'-margin'
)
)
]
).order_by(
'dense_rank'
)
query_str = query.get_sql()
expected_query = (
'SELECT querybuilder_tests_order.*, '
'DENSE_RANK() OVER (ORDER BY margin DESC) AS "dense_rank" '
'FROM querybuilder_tests_order '
'ORDER BY dense_rank '
'ASC'
)
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_rank_percent(self):
query = Query().from_table(
table=Order,
fields=[
'*',
PercentRankField(
over=QueryWindow().order_by(
'-margin'
)
)
]
).order_by(
'percent_rank'
)
query_str = query.get_sql()
expected_query = (
'SELECT querybuilder_tests_order.*, '
'PERCENT_RANK() OVER (ORDER BY margin DESC) AS "percent_rank" '
'FROM querybuilder_tests_order '
'ORDER BY percent_rank '
'ASC'
)
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_cume_dist(self):
query = Query().from_table(
table=Order,
fields=[
'*',
CumeDistField(
over=QueryWindow().order_by(
'-margin'
)
)
]
).order_by(
'cume_dist'
)
query_str = query.get_sql()
expected_query = (
'SELECT querybuilder_tests_order.*, '
'CUME_DIST() OVER (ORDER BY margin DESC) AS "cume_dist" '
'FROM querybuilder_tests_order '
'ORDER BY cume_dist '
'ASC'
)
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_ntile(self):
query = Query().from_table(
table=Order,
fields=[
'*',
NTileField(
num_buckets=2,
over=QueryWindow().order_by(
'-margin'
)
)
]
).order_by(
'ntile'
)
query_str = query.get_sql()
expected_query = (
'SELECT querybuilder_tests_order.*, '
'NTILE(2) OVER (ORDER BY margin DESC) AS "ntile" '
'FROM querybuilder_tests_order '
'ORDER BY ntile '
'ASC'
)
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_lag(self):
query = Query().from_table(
table=Order,
fields=[
'*',
LagField(
'margin',
over=QueryWindow().order_by(
'-margin'
)
)
]
)
query_str = query.get_sql()
expected_query = (
'SELECT querybuilder_tests_order.*, '
'LAG(querybuilder_tests_order.margin, 1) OVER (ORDER BY margin DESC) AS "margin_lag" '
'FROM querybuilder_tests_order'
)
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_lag_default(self):
query = Query().from_table(
table=Order,
fields=[
'*',
LagField(
'margin',
default=0,
over=QueryWindow().order_by(
'-margin'
)
)
]
)
query_str = query.get_sql()
expected_query = (
'SELECT querybuilder_tests_order.*, '
'LAG(querybuilder_tests_order.margin, 1, \'0\') OVER (ORDER BY margin DESC) AS "margin_lag" '
'FROM querybuilder_tests_order'
)
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_lead(self):
query = Query().from_table(
table=Order,
fields=[
'*',
LeadField(
'margin',
over=QueryWindow().order_by(
'-margin'
)
)
]
)
query_str = query.get_sql()
expected_query = (
'SELECT querybuilder_tests_order.*, '
'LEAD(querybuilder_tests_order.margin, 1) OVER (ORDER BY margin DESC) AS "margin_lead" '
'FROM querybuilder_tests_order'
)
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_first_value(self):
query = Query().from_table(
table=Order,
fields=[
'*',
FirstValueField(
'margin',
over=QueryWindow().order_by(
'-margin'
)
)
]
)
query_str = query.get_sql()
expected_query = (
'SELECT querybuilder_tests_order.*, '
'FIRST_VALUE(querybuilder_tests_order.margin) OVER (ORDER BY margin DESC) AS "margin_first_value" '
'FROM querybuilder_tests_order'
)
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_last_value(self):
query = Query().from_table(
table=Order,
fields=[
'*',
LastValueField(
'margin',
over=QueryWindow().order_by(
'margin'
)
)
]
)
query_str = query.get_sql()
expected_query = (
'SELECT querybuilder_tests_order.*, '
'LAST_VALUE(querybuilder_tests_order.margin) OVER (ORDER BY margin ASC) AS "margin_last_value" '
'FROM querybuilder_tests_order'
)
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_nth_value(self):
query = Query().from_table(
table=Order,
fields=[
'*',
NthValueField(
'margin',
n=2,
over=QueryWindow().order_by(
'-margin'
)
)
]
)
query_str = query.get_sql()
expected_query = (
'SELECT querybuilder_tests_order.*, '
'NTH_VALUE(querybuilder_tests_order.margin, 2) OVER (ORDER BY margin DESC) AS "margin_nth_value" '
'FROM querybuilder_tests_order'
)
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
def test_num_stddev(self):
query = Query().from_table(
table=Order,
fields=[
'*',
NumStdDevField(
'margin',
over=QueryWindow()
)
]
).order_by(
'-margin_num_stddev'
)
query_str = query.get_sql()
expected_query = (
'SELECT querybuilder_tests_order.*, '
'(CASE WHEN (STDDEV(querybuilder_tests_order.margin) OVER ()) <> 0 '
'THEN ((querybuilder_tests_order.margin - ('
'AVG(querybuilder_tests_order.margin) OVER ())) / (STDDEV(querybuilder_tests_order.margin) OVER ())) '
'ELSE 0 '
'END) '
'AS "margin_num_stddev" '
'FROM querybuilder_tests_order '
'ORDER BY margin_num_stddev '
'DESC'
)
self.assertEqual(query_str, expected_query, get_comparison_str(query_str, expected_query))
| [((483, 496), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (494, 496), False, 'from querybuilder.query import QueryWindow, Query\n'), ((627, 672), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (645, 672), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((933, 978), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (951, 978), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((1231, 1276), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (1249, 1276), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((1632, 1677), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (1650, 1677), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((2158, 2203), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (2176, 2203), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((2584, 2629), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (2602, 2629), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((3029, 3074), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (3047, 3074), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((3631, 3676), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (3649, 3676), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((4272, 4317), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (4290, 4317), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((5023, 5068), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (5041, 5068), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((5830, 5875), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (5848, 5875), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((6581, 6626), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (6599, 6626), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((7344, 7389), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (7362, 7389), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((8089, 8134), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (8107, 8134), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((8847, 8892), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (8865, 8892), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((9543, 9588), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (9561, 9588), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((10285, 10330), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (10303, 10330), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((10985, 11030), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (11003, 11030), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((11709, 11754), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (11727, 11754), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((12427, 12472), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (12445, 12472), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((13171, 13216), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (13189, 13216), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((14164, 14209), 'querybuilder.tests.query_tests.get_comparison_str', 'get_comparison_str', (['query_str', 'expected_query'], {}), '(query_str, expected_query)\n', (14182, 14209), False, 'from querybuilder.tests.query_tests import QueryTestCase, get_comparison_str\n'), ((741, 754), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (752, 754), False, 'from querybuilder.query import QueryWindow, Query\n'), ((1043, 1056), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (1054, 1056), False, 'from querybuilder.query import QueryWindow, Query\n'), ((2297, 2304), 'querybuilder.query.Query', 'Query', ([], {}), '()\n', (2302, 2304), False, 'from querybuilder.query import QueryWindow, Query\n'), ((2678, 2685), 'querybuilder.query.Query', 'Query', ([], {}), '()\n', (2683, 2685), False, 'from querybuilder.query import QueryWindow, Query\n'), ((3129, 3136), 'querybuilder.query.Query', 'Query', ([], {}), '()\n', (3134, 3136), False, 'from querybuilder.query import QueryWindow, Query\n'), ((3735, 3742), 'querybuilder.query.Query', 'Query', ([], {}), '()\n', (3740, 3742), False, 'from querybuilder.query import QueryWindow, Query\n'), ((8935, 8942), 'querybuilder.query.Query', 'Query', ([], {}), '()\n', (8940, 8942), False, 'from querybuilder.query import QueryWindow, Query\n'), ((9639, 9646), 'querybuilder.query.Query', 'Query', ([], {}), '()\n', (9644, 9646), False, 'from querybuilder.query import QueryWindow, Query\n'), ((10374, 10381), 'querybuilder.query.Query', 'Query', ([], {}), '()\n', (10379, 10381), False, 'from querybuilder.query import QueryWindow, Query\n'), ((11081, 11088), 'querybuilder.query.Query', 'Query', ([], {}), '()\n', (11086, 11088), False, 'from querybuilder.query import QueryWindow, Query\n'), ((11804, 11811), 'querybuilder.query.Query', 'Query', ([], {}), '()\n', (11809, 11811), False, 'from querybuilder.query import QueryWindow, Query\n'), ((12521, 12528), 'querybuilder.query.Query', 'Query', ([], {}), '()\n', (12526, 12528), False, 'from querybuilder.query import QueryWindow, Query\n'), ((2379, 2390), 'querybuilder.fields.RankField', 'RankField', ([], {}), '()\n', (2388, 2390), False, 'from querybuilder.fields import RankField, RowNumberField, DenseRankField, PercentRankField, CumeDistField, NTileField, LagField, LeadField, FirstValueField, LastValueField, NthValueField, NumStdDevField\n'), ((1351, 1364), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (1362, 1364), False, 'from querybuilder.query import QueryWindow, Query\n'), ((4367, 4374), 'querybuilder.query.Query', 'Query', ([], {}), '()\n', (4372, 4374), False, 'from querybuilder.query import QueryWindow, Query\n'), ((5112, 5119), 'querybuilder.query.Query', 'Query', ([], {}), '()\n', (5117, 5119), False, 'from querybuilder.query import QueryWindow, Query\n'), ((5925, 5932), 'querybuilder.query.Query', 'Query', ([], {}), '()\n', (5930, 5932), False, 'from querybuilder.query import QueryWindow, Query\n'), ((6678, 6685), 'querybuilder.query.Query', 'Query', ([], {}), '()\n', (6683, 6685), False, 'from querybuilder.query import QueryWindow, Query\n'), ((7438, 7445), 'querybuilder.query.Query', 'Query', ([], {}), '()\n', (7443, 7445), False, 'from querybuilder.query import QueryWindow, Query\n'), ((8179, 8186), 'querybuilder.query.Query', 'Query', ([], {}), '()\n', (8184, 8186), False, 'from querybuilder.query import QueryWindow, Query\n'), ((13266, 13273), 'querybuilder.query.Query', 'Query', ([], {}), '()\n', (13271, 13273), False, 'from querybuilder.query import QueryWindow, Query\n'), ((2796, 2809), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (2807, 2809), False, 'from querybuilder.query import QueryWindow, Query\n'), ((3269, 3282), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (3280, 3282), False, 'from querybuilder.query import QueryWindow, Query\n'), ((3875, 3888), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (3886, 3888), False, 'from querybuilder.query import QueryWindow, Query\n'), ((9103, 9116), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (9114, 9116), False, 'from querybuilder.query import QueryWindow, Query\n'), ((9838, 9851), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (9849, 9851), False, 'from querybuilder.query import QueryWindow, Query\n'), ((10543, 10556), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (10554, 10556), False, 'from querybuilder.query import QueryWindow, Query\n'), ((11256, 11269), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (11267, 11269), False, 'from querybuilder.query import QueryWindow, Query\n'), ((11978, 11991), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (11989, 11991), False, 'from querybuilder.query import QueryWindow, Query\n'), ((12719, 12732), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (12730, 12732), False, 'from querybuilder.query import QueryWindow, Query\n'), ((13440, 13453), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (13451, 13453), False, 'from querybuilder.query import QueryWindow, Query\n'), ((1757, 1770), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (1768, 1770), False, 'from querybuilder.query import QueryWindow, Query\n'), ((4511, 4524), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (4522, 4524), False, 'from querybuilder.query import QueryWindow, Query\n'), ((6069, 6082), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (6080, 6082), False, 'from querybuilder.query import QueryWindow, Query\n'), ((6824, 6837), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (6835, 6837), False, 'from querybuilder.query import QueryWindow, Query\n'), ((7581, 7594), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (7592, 7594), False, 'from querybuilder.query import QueryWindow, Query\n'), ((8354, 8367), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (8365, 8367), False, 'from querybuilder.query import QueryWindow, Query\n'), ((5252, 5265), 'querybuilder.query.QueryWindow', 'QueryWindow', ([], {}), '()\n', (5263, 5265), False, 'from querybuilder.query import QueryWindow, Query\n')] |
wbprice/ojimoji | emoji/coffee.py | 7b1a8b5ed0062d1d52e151e7412e1131e3de7924 | import numpy
h = .25
s = 1
bitmap = numpy.array([
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0],
[0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0],
[0,0,1,1,1,0,1,0,1,1,1,0,0,1,0,0],
[0,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0],
[0,0,1,1,1,0,1,0,1,1,1,0,1,0,0,0],
[0,0,1,1,0,1,0,1,0,1,1,1,0,0,0,0],
[0,0,1,1,1,0,1,0,1,1,1,0,0,0,0,0],
[0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0],
[0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]])
| [((37, 899), 'numpy.array', 'numpy.array', (['[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0], [0, 0, 1, 1, 1, 1,\n 1, 1, 1, 1, 1, 1, 1, 1, 0, 0], [0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, \n 1, 0, 0], [0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0], [0, 0, 1, 1,\n 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0], [0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, \n 1, 0, 0, 0, 0], [0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0], [0, 0,\n 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 1, 1, 1, 1, 1, \n 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0]]'], {}), '([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0], [0, 0,\n 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0], [0, 0, 1, 1, 1, 0, 1, 0, 1, \n 1, 1, 0, 0, 1, 0, 0], [0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0],\n [0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0], [0, 0, 1, 1, 0, 1, 0,\n 1, 0, 1, 1, 1, 0, 0, 0, 0], [0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, \n 0, 0], [0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 1,\n 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0]])\n', (48, 899), False, 'import numpy\n')] |
gkfthddk/keras | dumpcode/npzbdt.py | 46d96c65d69c39df298800336bbb4d867a2561fb | import numpy as np
from sklearn.model_selection import RandomizedSearchCV, GridSearchCV
from sklearn.metrics import roc_auc_score
from sklearn.model_selection import StratifiedKFold
from sklearn.model_selection import KFold
import scipy.stats as sts
import xgboost as xgb
from xiter import *
import pandas as pd
import argparse
from datetime import datetime
def timer(start_time=None):
if not start_time:
start_time = datetime.now()
return start_time
elif start_time:
thour, temp_sec = divmod((datetime.now() - start_time).total_seconds(), 3600)
tmin, tsec = divmod(temp_sec, 60)
print('\n Time taken: %i hours %i minutes and %s seconds.' % (thour, tmin, round(tsec, 2)))
parser=argparse.ArgumentParser()
parser.add_argument("--end",type=float,default=100000.,help='end ratio')
parser.add_argument("--save",type=str,default="test_",help='save name')
parser.add_argument("--network",type=str,default="rnn",help='network name on symbols/')
parser.add_argument("--right",type=str,default="/scratch/yjdata/gluon100_img",help='which train sample (qq,gg,zq,zg)')
parser.add_argument("--pt",type=int,default=200,help='pt range pt~pt*1.1')
parser.add_argument("--ptmin",type=float,default=0.,help='pt range pt~pt*1.1')
parser.add_argument("--ptmax",type=float,default=2.,help='pt range pt~pt*1.1')
parser.add_argument("--epochs",type=int,default=10,help='num epochs')
parser.add_argument("--batch_size",type=int,default=100000,help='batch_size')
parser.add_argument("--loss",type=str,default="categorical_crossentropy",help='network name on symbols/')
parser.add_argument("--gpu",type=int,default=0,help='gpu number')
parser.add_argument("--isz",type=int,default=0,help='0 or z or not')
parser.add_argument("--eta",type=float,default=0.,help='end ratio')
parser.add_argument("--etabin",type=float,default=1,help='end ratio')
parser.add_argument("--unscale",type=int,default=0,help='end ratio')
args=parser.parse_args()
import os
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]=str(args.gpu)
batch_size=args.batch_size
params = {
'max_depth': sts.randint(1,6),
'learning_rate': sts.uniform(0.0010,0.500),
'n_estimators': sts.randint(10,101)
}
model=xgb.XGBClassifier(objective='binary:logistic',tree_method="gpu_hist")
if(args.isz==1):
if(args.etabin==1):
loaded=np.load("zqmixed{}pteta.npz".format(args.pt))
print("zqmixed{}pteta.npz".format(args.pt))
else:
loaded=np.load("zqmixed{}pt.npz".format(args.pt))
print("zqmixed{}pt.npz".format(args.pt))
elif(args.isz==-1):
if(args.etabin==1):
loaded=np.load("qqmixed{}pteta.npz".format(args.pt))
print("qqmixed{}pteta.npz".format(args.pt))
else:
loaded=np.load("qqmixed{}pt.npz".format(args.pt))
print("qqmixed{}pt.npz".format(args.pt))
elif(args.isz==0):
if(args.etabin==1):
if(args.unscale==1):
loaded=np.load("unscalemixed{}pteta.npz".format(args.pt))
else:
loaded=np.load("mixed{}pteta.npz".format(args.pt))
print("etabin 1")
else:
if(args.unscale==1):
loaded=np.load("unscalemixed{}pt.npz".format(args.pt))
else:
loaded=np.load("mixed{}pt.npz".format(args.pt))
print("etabin 2.4")
data=loaded["bdtset"][:,:5]
label=loaded["label"]
line=int(30000)
endline=int(40000)
if(len(label)<40000):
line=int(len(label)*3./4.)
endline=len(label)
X=data[0:line]
vx=data[line:endline]
Y=label[0:line]
vy=label[line:endline]
Y=np.array(Y)[:,0]
folds = 3
param_comb = 100
skf = KFold(n_splits=folds, shuffle = True, random_state = 173)
#skf = StratifiedKFold(n_splits=folds, shuffle = True, random_state = 1001)
random_search = RandomizedSearchCV(model, param_distributions=params, n_iter=param_comb, scoring='log_loss', n_jobs=6, cv=skf.split(X,Y), verbose=3, random_state=173 )
# Here we go
start_time = timer(None) # timing starts from this point for "start_time" variable
random_search.fit(X, Y)
timer(start_time)
#print(random_search.predict(X[:10]))
#print('\n All results:')
#print(random_search.cv_results_)
#print('\n Best estimator:')
#print(random_search.best_estimator_)
print('\n Best normalized gini score for %d-fold search with %d parameter combinations:' % (folds, param_comb))
print(random_search.best_score_ * 2 - 1)
#print('\n Best hyperparameters:')
#print(random_search.best_params_)
results = pd.DataFrame(random_search.cv_results_)
results.to_csv('xgb/{}-{}.csv'.format(args.save,args.pt), index=False)
#random_search.best_estimator_.save_model("bdt-{}.dat".format(args.pt))
| [((727, 752), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (750, 752), False, 'import argparse\n'), ((2254, 2324), 'xgboost.XGBClassifier', 'xgb.XGBClassifier', ([], {'objective': '"""binary:logistic"""', 'tree_method': '"""gpu_hist"""'}), "(objective='binary:logistic', tree_method='gpu_hist')\n", (2271, 2324), True, 'import xgboost as xgb\n'), ((3518, 3571), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': 'folds', 'shuffle': '(True)', 'random_state': '(173)'}), '(n_splits=folds, shuffle=True, random_state=173)\n', (3523, 3571), False, 'from sklearn.model_selection import KFold\n'), ((4361, 4400), 'pandas.DataFrame', 'pd.DataFrame', (['random_search.cv_results_'], {}), '(random_search.cv_results_)\n', (4373, 4400), True, 'import pandas as pd\n'), ((2124, 2141), 'scipy.stats.randint', 'sts.randint', (['(1)', '(6)'], {}), '(1, 6)\n', (2135, 2141), True, 'import scipy.stats as sts\n'), ((2167, 2190), 'scipy.stats.uniform', 'sts.uniform', (['(0.001)', '(0.5)'], {}), '(0.001, 0.5)\n', (2178, 2190), True, 'import scipy.stats as sts\n'), ((2218, 2238), 'scipy.stats.randint', 'sts.randint', (['(10)', '(101)'], {}), '(10, 101)\n', (2229, 2238), True, 'import scipy.stats as sts\n'), ((3467, 3478), 'numpy.array', 'np.array', (['Y'], {}), '(Y)\n', (3475, 3478), True, 'import numpy as np\n'), ((430, 444), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (442, 444), False, 'from datetime import datetime\n'), ((526, 540), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (538, 540), False, 'from datetime import datetime\n')] |
watchsea/RL-Stock | rlenv/StockTradingEnv0.py | 53bd13a1bd1760e082c6db2ad9b010adbc3a767b | import random
import json
import gym
from gym import spaces
import pandas as pd
import numpy as np
MAX_ACCOUNT_BALANCE = 2147483647
MAX_NUM_SHARES = 2147483647
MAX_SHARE_PRICE = 5000
MAX_VOLUME = 1000e8
MAX_AMOUNT = 3e10
MAX_OPEN_POSITIONS = 5
MAX_STEPS = 20000
MAX_DAY_CHANGE = 1
INITIAL_ACCOUNT_BALANCE = 10000
DATA_HIS_PERIOD = 5
# position constant
FLAT = 0 # no position
LONG = 1 # buy position
SHORT = 2 # sell position
# action constant
HOLD = 0
BUY = 1
SELL = 2
class StockTradingEnv(gym.Env):
"""A stock trading environment for OpenAI gym"""
metadata = {'render.modes': ['human']}
def __init__(self, df,show_trade=True):
super(StockTradingEnv, self).__init__()
# show the trade info
self.show_trade = show_trade
self.actions=["FLAT","LONG","SHORT"]
self.fee = 0.0005 # brokage commission
self.df = df
self.closeprices = self.df['close'].values
self.reward_range = (0, MAX_ACCOUNT_BALANCE)
# Actions of the format Buy x%, Sell x%, Hold, etc.
self.action_space = spaces.Discrete(len(self.actions))
# self.action_space = spaces.Box(
# low=np.array([0, 0]), high=np.array([3, 1]), dtype=np.float16)
# Prices contains the OHCL values for the last five prices
self.observation_space = spaces.Box(
low=0, high=1, shape=(DATA_HIS_PERIOD+1,6), dtype=np.float16)
self.history = []
def _next_observation(self):
obs = np.array([
self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'open'].values / MAX_SHARE_PRICE,
self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'high'].values / MAX_SHARE_PRICE,
self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'low'].values / MAX_SHARE_PRICE,
self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'close'].values / MAX_SHARE_PRICE,
self.df.loc[self.current_step-DATA_HIS_PERIOD:self.current_step, 'volume'].values / MAX_NUM_SHARES,
])
# Append additional data and scale each value to between 0-1
obs = np.append(obs,[[self.balance / MAX_ACCOUNT_BALANCE,
self.max_net_worth / MAX_ACCOUNT_BALANCE,
self.shares_held / MAX_NUM_SHARES,
self.cost_basis / MAX_SHARE_PRICE,
self.total_shares_sold / MAX_NUM_SHARES,
self.total_sales_value / (MAX_NUM_SHARES * MAX_SHARE_PRICE)]],axis=0)
return obs
def _take_action(self, action):
# Set the current price to a random price within the time step
# current_price = random.uniform(
# self.df.loc[self.current_step, "open"], self.df.loc[self.current_step, "close"])
# Set the current price to the last close price
self.close_price = self.df.loc[self.current_step,"close"]
amount = 0.5 #the old version has this variable, so reserve
# action comes from the agent
# 1 buy, 2 sell, 0 hold
# single position can be opened per trade
# valid action sequence would be
# LONG : buy - hold - hold - sell
# SHORT : sell - hold - hold - buy
# invalid action sequence is just considered hold
# (e.g.) "buy - buy" would be considred "buy - hold"
self.action = HOLD #hold
if action == BUY: #buy
if self.position == FLAT: # if previous position was flat
self.position = LONG #update position to long
self.action = BUY # record action as buy
self.entry_price = self.close_price
# Buy amount % of balance in shares
total_possible = int(self.balance / self.close_price)
shares_bought = int(total_possible * amount)//100 *100
self.krw_balance = shares_bought * self.entry_price # buy balance
commission = round(self.fee * self.krw_balance,2) # commission fee
self.shares_held = shares_bought
self.balance -= self.krw_balance-commission
#self.cost_basis = (prev_cost + additional_cost) / (self.shares_held + shares_bought)
elif self.position == SHORT: # if previous position was short
self.position = FLAT # update position to flat
self.action = BUY # record action as buy
self.exit_price = self.close_price
self.reward += ((self.entry_price - self.exit_price) / self.exit_price + 1) * (
1 - self.fee) ** 2 - 1 # calculate reward
#self.krw_balance = self.krw_balance * (1.0 + self.reward) # evaluate cumulative return in krw-won
self.balance += round(self.krw_balance * (1.0 + self.reward),2) # calcuate the total balance
self.n_short += 1 # record number of short
self.total_shares_sold += self.shares_held
self.total_sales_value += self.shares_held * self.close_price
self.entry_price = 0 # clear entry price
self.shares_held = 0 # clear the shares_
elif action == SELL:
if self.position == FLAT:
self.position = SHORT
self.action = SELL
self.entry_price = self.close_price
# Sell amount % of shares held
total_possible = int(self.balance / self.close_price)
self.shares_held = int(total_possible * amount)//100 *100
self.krw_balance = self.shares_held * self.entry_price # buy balance
commission = round(self.fee * self.krw_balance,2) # commission fee
self.balance -= self.krw_balance-commission
elif self.position == LONG:
self.position = FLAT
self.action = SELL
self.exit_price = self.close_price
self.reward += ((self.exit_price - self.entry_price) / self.entry_price + 1) * (1 - self.fee) ** 2 - 1
#self.krw_balance = self.krw_balance * (1.0 + self.reward)
self.balance += round(self.krw_balance*(1.0+self.reward),2)
self.n_long += 1
self.total_shares_buy += self.shares_held
self.total_buys_value += self.shares_held * self.close_price
self.shares_held = 0
self.entry_price = 0
# [coin + krw_won] total value evaluated in krw won
if (self.position == LONG):
temp_reward = ((self.close_price - self.entry_price) / self.entry_price + 1) * (
1 - self.fee) ** 2 - 1
new_portfolio = self.krw_balance * (1.0 + temp_reward)
elif (self.position == SHORT):
temp_reward = ((self.entry_price - self.close_price) / self.close_price + 1) * (
1 - self.fee) ** 2 - 1
new_portfolio = self.krw_balance * (1.0 + temp_reward)
else:
temp_reward = 0
new_portfolio = 0
self.net_worth = self.balance + new_portfolio
if self.net_worth > self.max_net_worth:
self.max_net_worth = self.net_worth
if self.shares_held == 0:
self.cost_basis = 0
self.portfolio = round(new_portfolio,2)
def step(self, action):
# Execute one time step within the environment
self._take_action(action)
done = False
self.current_step += 1
delay_modifier = (self.current_step / MAX_STEPS)
# profits
#reward = self.net_worth - INITIAL_ACCOUNT_BALANCE
#reward = 1 if reward > 0 else -100
if self.net_worth <= 0:
done = True
if self.current_step > len(self.df.loc[:, 'open'].values) - 1:
self.current_step = DATA_HIS_PERIOD # loop training
# when loop training, then clear the history
self.action = HOLD
self.position = FLAT
self.balance = INITIAL_ACCOUNT_BALANCE
self.net_worth = INITIAL_ACCOUNT_BALANCE
self.max_net_worth = INITIAL_ACCOUNT_BALANCE
self.krw_balance = 0
self.reward = 0
self.portfolio = 0
self.shares_held = 0
self.cost_basis = 0
self.total_shares_buy = 0
self.total_buys_value = 0
self.total_shares_sold = 0
self.total_sales_value = 0
self.n_long = 0
self.n_short = 0
self.history=[]
# done = True
if (self.show_trade and self.current_step % 1 == 0):
print("Tick: {0}/ Portfolio (krw-won): {1}, balance: {2}".format(self.current_step, self.portfolio,self.net_worth))
print("Long: {0}/ Short: {1}".format(self.n_long, self.n_short))
# save the history data
self.history.append([
self.action,
self.position,
self.current_step,
self.close_price,
self.krw_balance,
self.balance,
self.max_net_worth,
self.shares_held,
self.portfolio,
self.total_shares_buy,
self.total_buys_value,
self.total_shares_sold,
self.total_sales_value])
#self.history.append((self.action, self.current_step, self.closingPrice, self.portfolio, self.reward))
obs = self._next_observation()
if (self.current_step > (self.df.shape[0]) - 1):
self.done = True
self.reward = self.get_profit() # return reward at end of the game
return obs, self.net_worth, done, {'portfolio': np.array([self.portfolio]),
"history": self.history,
"n_trades": {'long': self.n_long, 'short': self.n_short}}
#return obs, reward, done, {}
def get_profit(self):
if(self.position == LONG):
profit = ((self.close_Price - self.entry_price)/self.entry_price + 1)*(1-self.fee)**2 - 1
elif(self.position == SHORT):
profit = ((self.entry_price - self.close_Price)/self.close_Price + 1)*(1-self.fee)**2 - 1
else:
profit = 0
return profit
def reset(self, new_df=None):
# Reset the state of the environment to an initial state
self.action = HOLD
self.position = FLAT
self.balance = INITIAL_ACCOUNT_BALANCE
self.net_worth = INITIAL_ACCOUNT_BALANCE
self.max_net_worth = INITIAL_ACCOUNT_BALANCE
self.krw_balance = 0
self.reward =0
self.portfolio =0
self.shares_held = 0
self.cost_basis = 0
self.total_shares_buy =0
self.total_buys_value=0
self.total_shares_sold = 0
self.total_sales_value = 0
self.n_long=0
self.n_short=0
self.history=[]
# pass test dataset to environment
if new_df:
self.df = new_df
# Set the current step to a random point within the data frame
# self.current_step = random.randint(
# 0, len(self.df.loc[:, 'open'].values) - 6)
# the observation include the given period history data
self.current_step = DATA_HIS_PERIOD #random.randint(DATA_HIS_PERIOD,len(self.df.loc[:,'open'].values)-1)
# for i in range(DATA_HIS_PERIOD):
# self.history.append([0.0,0.0,0.0,0.0,0.0,0.0])
return self._next_observation()
def render(self, mode='human', close=False):
# Render the environment to the screen
profit = self.net_worth - INITIAL_ACCOUNT_BALANCE
print('-'*30)
print(f'Step: {self.current_step}')
print(f'Balance: {self.balance}')
print(f'Shares held: {self.shares_held} (Total sold: {self.total_shares_sold})')
print(f'Avg cost for held shares: {self.cost_basis} (Total sales value: {self.total_sales_value})')
print(f'Net worth: {self.net_worth} (Max net worth: {self.max_net_worth})')
print(f'Profit: {profit}')
return profit
| [((1340, 1415), 'gym.spaces.Box', 'spaces.Box', ([], {'low': '(0)', 'high': '(1)', 'shape': '(DATA_HIS_PERIOD + 1, 6)', 'dtype': 'np.float16'}), '(low=0, high=1, shape=(DATA_HIS_PERIOD + 1, 6), dtype=np.float16)\n', (1350, 1415), False, 'from gym import spaces\n'), ((1501, 2058), 'numpy.array', 'np.array', (["[self.df.loc[self.current_step - DATA_HIS_PERIOD:self.current_step, ('open'\n )].values / MAX_SHARE_PRICE, self.df.loc[self.current_step -\n DATA_HIS_PERIOD:self.current_step, ('high')].values / MAX_SHARE_PRICE, \n self.df.loc[self.current_step - DATA_HIS_PERIOD:self.current_step, (\n 'low')].values / MAX_SHARE_PRICE, self.df.loc[self.current_step -\n DATA_HIS_PERIOD:self.current_step, ('close')].values / MAX_SHARE_PRICE,\n self.df.loc[self.current_step - DATA_HIS_PERIOD:self.current_step, (\n 'volume')].values / MAX_NUM_SHARES]"], {}), "([self.df.loc[self.current_step - DATA_HIS_PERIOD:self.current_step,\n ('open')].values / MAX_SHARE_PRICE, self.df.loc[self.current_step -\n DATA_HIS_PERIOD:self.current_step, ('high')].values / MAX_SHARE_PRICE, \n self.df.loc[self.current_step - DATA_HIS_PERIOD:self.current_step, (\n 'low')].values / MAX_SHARE_PRICE, self.df.loc[self.current_step -\n DATA_HIS_PERIOD:self.current_step, ('close')].values / MAX_SHARE_PRICE,\n self.df.loc[self.current_step - DATA_HIS_PERIOD:self.current_step, (\n 'volume')].values / MAX_NUM_SHARES])\n", (1509, 2058), True, 'import numpy as np\n'), ((2166, 2455), 'numpy.append', 'np.append', (['obs', '[[self.balance / MAX_ACCOUNT_BALANCE, self.max_net_worth /\n MAX_ACCOUNT_BALANCE, self.shares_held / MAX_NUM_SHARES, self.cost_basis /\n MAX_SHARE_PRICE, self.total_shares_sold / MAX_NUM_SHARES, self.\n total_sales_value / (MAX_NUM_SHARES * MAX_SHARE_PRICE)]]'], {'axis': '(0)'}), '(obs, [[self.balance / MAX_ACCOUNT_BALANCE, self.max_net_worth /\n MAX_ACCOUNT_BALANCE, self.shares_held / MAX_NUM_SHARES, self.cost_basis /\n MAX_SHARE_PRICE, self.total_shares_sold / MAX_NUM_SHARES, self.\n total_sales_value / (MAX_NUM_SHARES * MAX_SHARE_PRICE)]], axis=0)\n', (2175, 2455), True, 'import numpy as np\n'), ((9930, 9956), 'numpy.array', 'np.array', (['[self.portfolio]'], {}), '([self.portfolio])\n', (9938, 9956), True, 'import numpy as np\n')] |
mrfleap/us-population-heatmap | processing/manager.py | e3f1c5d8294716ff491c7b8b40adb77929f9aeee | import json
import os
import pathlib
import time
from tqdm import tqdm
from aggregator import aggregate
from download import DOWNLOAD_PATH, download_files, unzip_files
from tqdm.contrib.concurrent import process_map
def main():
start = time.time()
# print("Downloading files...")
# download_files()
# print("Unzipping shapefiles...")
# unzip_files()
state_ids = []
for file in os.listdir(DOWNLOAD_PATH):
file_path = os.path.join(DOWNLOAD_PATH, file)
if os.path.isfile(file_path) and pathlib.Path(file_path).suffix == ".txt":
state_ids.append(file[file.index("BG") + 2 : file.index(".")])
# print("Computing population JSON heatmaps...")
# compute_json_heatmaps(state_ids)
print("Aggregating JSON files into one...")
aggegrate_json_files(state_ids)
end = time.time()
print(f"Done in {(end - start):0.2f}s")
def compute_json_heatmaps(state_ids):
data_files = []
for state_id in state_ids:
data_files.append(
(
state_id,
os.path.join(DOWNLOAD_PATH, f"CenPop2020_Mean_BG{state_id}.txt"),
os.path.join(DOWNLOAD_PATH, f"tl_2020_{state_id}_bg", f"tl_2020_{state_id}_bg.shp"),
)
)
process_map(create_json_for_state, data_files, max_workers=4)
def aggegrate_json_files(state_ids):
with open("public/data/pop.json", "w") as f:
f.write("""{"type": "FeatureCollection", "features": [""")
# state_ids = state_ids[:2]
features = []
for state_id in tqdm(state_ids):
geojson = None
with open(os.path.join(DOWNLOAD_PATH, f"{state_id}.json")) as f:
geojson = json.load(f)
with open("public/data/pop.json", "a") as f:
f.write(json.dumps(geojson["features"])[1:-1] + ("," if state_id != state_ids[-1] else ""))
with open("public/data/pop.json", "a") as f:
f.write("]}")
def create_json_for_state(args):
return aggregate(*args, hide_output=True)
if __name__ == "__main__":
main()
| [((243, 254), 'time.time', 'time.time', ([], {}), '()\n', (252, 254), False, 'import time\n'), ((410, 435), 'os.listdir', 'os.listdir', (['DOWNLOAD_PATH'], {}), '(DOWNLOAD_PATH)\n', (420, 435), False, 'import os\n'), ((839, 850), 'time.time', 'time.time', ([], {}), '()\n', (848, 850), False, 'import time\n'), ((1265, 1326), 'tqdm.contrib.concurrent.process_map', 'process_map', (['create_json_for_state', 'data_files'], {'max_workers': '(4)'}), '(create_json_for_state, data_files, max_workers=4)\n', (1276, 1326), False, 'from tqdm.contrib.concurrent import process_map\n'), ((1554, 1569), 'tqdm.tqdm', 'tqdm', (['state_ids'], {}), '(state_ids)\n', (1558, 1569), False, 'from tqdm import tqdm\n'), ((1978, 2012), 'aggregator.aggregate', 'aggregate', (['*args'], {'hide_output': '(True)'}), '(*args, hide_output=True)\n', (1987, 2012), False, 'from aggregator import aggregate\n'), ((457, 490), 'os.path.join', 'os.path.join', (['DOWNLOAD_PATH', 'file'], {}), '(DOWNLOAD_PATH, file)\n', (469, 490), False, 'import os\n'), ((503, 528), 'os.path.isfile', 'os.path.isfile', (['file_path'], {}), '(file_path)\n', (517, 528), False, 'import os\n'), ((1689, 1701), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1698, 1701), False, 'import json\n'), ((1069, 1133), 'os.path.join', 'os.path.join', (['DOWNLOAD_PATH', 'f"""CenPop2020_Mean_BG{state_id}.txt"""'], {}), "(DOWNLOAD_PATH, f'CenPop2020_Mean_BG{state_id}.txt')\n", (1081, 1133), False, 'import os\n'), ((1151, 1238), 'os.path.join', 'os.path.join', (['DOWNLOAD_PATH', 'f"""tl_2020_{state_id}_bg"""', 'f"""tl_2020_{state_id}_bg.shp"""'], {}), "(DOWNLOAD_PATH, f'tl_2020_{state_id}_bg',\n f'tl_2020_{state_id}_bg.shp')\n", (1163, 1238), False, 'import os\n'), ((1612, 1659), 'os.path.join', 'os.path.join', (['DOWNLOAD_PATH', 'f"""{state_id}.json"""'], {}), "(DOWNLOAD_PATH, f'{state_id}.json')\n", (1624, 1659), False, 'import os\n'), ((533, 556), 'pathlib.Path', 'pathlib.Path', (['file_path'], {}), '(file_path)\n', (545, 556), False, 'import pathlib\n'), ((1776, 1807), 'json.dumps', 'json.dumps', (["geojson['features']"], {}), "(geojson['features'])\n", (1786, 1807), False, 'import json\n')] |
kshoji6011/vehicleai | gen_data.py | 135de71cce65f4a61b42c49493ed356f2d512d6c | from PIL import Image
import os, glob
import numpy as np
from sklearn import model_selection
classes = ["car", "bycycle", "motorcycle", "pedestrian"]
num_class = len(classes)
image_size = 50
# 画像の読み込み
X = []
Y = []
for index, classlabel in enumerate(classes):
photos_dir = "./" + classlabel
files = glob.glob(photos_dir + "/*.jpg")
for i, file in enumerate(files):
if i >=237: break
image = Image.open(file)
image = image.convert("RGB")
image = image.resize((image_size, image_size))
data = np.asarray(image) / 255
X.append(data)
Y.append(index)
X = np.array(X)
Y = np.array(Y)
X_train, X_test, y_train, y_test = model_selection.train_test_split(X, Y)
xy = (X_train, X_test, y_train, y_test)
np.save("./vehicle.npy", xy) | [((623, 634), 'numpy.array', 'np.array', (['X'], {}), '(X)\n', (631, 634), True, 'import numpy as np\n'), ((639, 650), 'numpy.array', 'np.array', (['Y'], {}), '(Y)\n', (647, 650), True, 'import numpy as np\n'), ((687, 725), 'sklearn.model_selection.train_test_split', 'model_selection.train_test_split', (['X', 'Y'], {}), '(X, Y)\n', (719, 725), False, 'from sklearn import model_selection\n'), ((766, 794), 'numpy.save', 'np.save', (['"""./vehicle.npy"""', 'xy'], {}), "('./vehicle.npy', xy)\n", (773, 794), True, 'import numpy as np\n'), ((311, 343), 'glob.glob', 'glob.glob', (["(photos_dir + '/*.jpg')"], {}), "(photos_dir + '/*.jpg')\n", (320, 343), False, 'import os, glob\n'), ((423, 439), 'PIL.Image.open', 'Image.open', (['file'], {}), '(file)\n', (433, 439), False, 'from PIL import Image\n'), ((547, 564), 'numpy.asarray', 'np.asarray', (['image'], {}), '(image)\n', (557, 564), True, 'import numpy as np\n')] |
immortel32/Sword_Sorcery_Story_Generator | app/main.py | 7978dfc335813362b2d94c455b970f58421123c8 | from services import waypoint_scenarios, quest_scenarios
from services.build_campaign import Campaign
from log_setup import log
if __name__ == "__main__":
number_waypoint_scenario = waypoint_scenarios.get_number_of_waypoint_scenarios()
log.info(f"We have {number_waypoint_scenario} waypoint available")
number_quests_available = quest_scenarios.get_number_of_quest_scenarios()
log.info(f"We have {number_quests_available} quests available")
random_waypoint_scenario = waypoint_scenarios.get_random_scenario(10)
random_quest = quest_scenarios.get_random_scenario(1)
campaign = Campaign()
campaign.build_campaign(
waypoint_list=random_waypoint_scenario, quest_list=random_quest
)
| [((187, 240), 'services.waypoint_scenarios.get_number_of_waypoint_scenarios', 'waypoint_scenarios.get_number_of_waypoint_scenarios', ([], {}), '()\n', (238, 240), False, 'from services import waypoint_scenarios, quest_scenarios\n'), ((245, 311), 'log_setup.log.info', 'log.info', (['f"""We have {number_waypoint_scenario} waypoint available"""'], {}), "(f'We have {number_waypoint_scenario} waypoint available')\n", (253, 311), False, 'from log_setup import log\n'), ((342, 389), 'services.quest_scenarios.get_number_of_quest_scenarios', 'quest_scenarios.get_number_of_quest_scenarios', ([], {}), '()\n', (387, 389), False, 'from services import waypoint_scenarios, quest_scenarios\n'), ((394, 457), 'log_setup.log.info', 'log.info', (['f"""We have {number_quests_available} quests available"""'], {}), "(f'We have {number_quests_available} quests available')\n", (402, 457), False, 'from log_setup import log\n'), ((490, 532), 'services.waypoint_scenarios.get_random_scenario', 'waypoint_scenarios.get_random_scenario', (['(10)'], {}), '(10)\n', (528, 532), False, 'from services import waypoint_scenarios, quest_scenarios\n'), ((552, 590), 'services.quest_scenarios.get_random_scenario', 'quest_scenarios.get_random_scenario', (['(1)'], {}), '(1)\n', (587, 590), False, 'from services import waypoint_scenarios, quest_scenarios\n'), ((606, 616), 'services.build_campaign.Campaign', 'Campaign', ([], {}), '()\n', (614, 616), False, 'from services.build_campaign import Campaign\n')] |
mbargull/conda-build | tests/test-recipes/metadata/ignore_some_prefix_files/run_test.py | ebc56f48196774301863fecbe98a32a7ded6eb7e | import os
pkgs = os.path.join(os.environ["ROOT"], "pkgs")
info_dir = os.path.join(pkgs, "conda-build-test-ignore-some-prefix-files-1.0-0", "info")
has_prefix_file = os.path.join(info_dir, "has_prefix")
print(info_dir)
assert os.path.isfile(has_prefix_file)
with open(has_prefix_file) as f:
assert "test2" not in f.read()
| [((18, 58), 'os.path.join', 'os.path.join', (["os.environ['ROOT']", '"""pkgs"""'], {}), "(os.environ['ROOT'], 'pkgs')\n", (30, 58), False, 'import os\n'), ((70, 147), 'os.path.join', 'os.path.join', (['pkgs', '"""conda-build-test-ignore-some-prefix-files-1.0-0"""', '"""info"""'], {}), "(pkgs, 'conda-build-test-ignore-some-prefix-files-1.0-0', 'info')\n", (82, 147), False, 'import os\n'), ((166, 202), 'os.path.join', 'os.path.join', (['info_dir', '"""has_prefix"""'], {}), "(info_dir, 'has_prefix')\n", (178, 202), False, 'import os\n'), ((226, 257), 'os.path.isfile', 'os.path.isfile', (['has_prefix_file'], {}), '(has_prefix_file)\n', (240, 257), False, 'import os\n')] |
Lukeming-tsinghua/Interpretable-NN-for-IBD-diagnosis | distill.py | 5fb0fae774e010cdd6b63ff487a4528f0397647d | import os
from collections import namedtuple
import torch
import torch.nn as nn
import torch.nn.functional as F
from sklearn.metrics import classification_report
from torch.optim import Adam
from tqdm import tqdm
from data import DataIteratorDistill
from loss import FocalLoss
from model import CNN
from torchtext import data, vocab
from args import get_args, print_args
from config import ConfigBinaryClassification
from config import ConfigBinaryClassificationDistill
from config import ConfigTripleClassification
if __name__ == "__main__":
args = get_args()
print_args(args)
if args.class_num == 2:
cfg = ConfigBinaryClassificationDistill()
elif args.class_num == 3:
cfg = ConfigTripleClassification()
else:
raise ValueError("wrong class num")
device = torch.device("cuda:%d" % args.cuda)
Data = DataIteratorDistill(config=cfg, train_batchsize=args.batch_size)
model = torch.load("checkpoints/CNN-29", map_location=device)
optimizer = Adam(model.parameters(), lr=args.lr)
criterion = FocalLoss(classes=args.class_num, device=device).to(device)
criterion_kv = nn.KLDivLoss().to(device)
alpha = 0.2
T = 2
for epoch in range(args.epoch_num):
print(epoch)
for sample in Data.train_iter:
model.train()
optimizer.zero_grad()
output = model(sample.text.permute(1, 0).to(device))
loss_f = criterion(output, sample.label.to(device))
output = F.log_softmax(output/T, 1)
score = torch.cat((sample.pred0.unsqueeze(1).to(device),
sample.pred1.unsqueeze(1).to(device)), dim=1)
score = F.softmax(score/T,1)
loss_kv = criterion_kv(output, score.to(device)) * T * T
loss = alpha * loss_f + (1 - alpha) * loss_kv
#print(loss_f.item(), loss_kv.item())
loss.backward()
optimizer.step()
with torch.no_grad():
model.eval()
preds = []
labels = []
for sample in Data.valid_iter:
output = model(sample.text.permute(1, 0).to(device))
p = output.argmax(1).cpu().tolist()
l = sample.label.tolist()
preds += p
labels += l
report = classification_report(preds, labels)
print(report)
torch.save(model, os.path.join(args.save_dir, args.save_config + str(epoch)))
| [((558, 568), 'args.get_args', 'get_args', ([], {}), '()\n', (566, 568), False, 'from args import get_args, print_args\n'), ((573, 589), 'args.print_args', 'print_args', (['args'], {}), '(args)\n', (583, 589), False, 'from args import get_args, print_args\n'), ((810, 845), 'torch.device', 'torch.device', (["('cuda:%d' % args.cuda)"], {}), "('cuda:%d' % args.cuda)\n", (822, 845), False, 'import torch\n'), ((857, 921), 'data.DataIteratorDistill', 'DataIteratorDistill', ([], {'config': 'cfg', 'train_batchsize': 'args.batch_size'}), '(config=cfg, train_batchsize=args.batch_size)\n', (876, 921), False, 'from data import DataIteratorDistill\n'), ((934, 987), 'torch.load', 'torch.load', (['"""checkpoints/CNN-29"""'], {'map_location': 'device'}), "('checkpoints/CNN-29', map_location=device)\n", (944, 987), False, 'import torch\n'), ((633, 668), 'config.ConfigBinaryClassificationDistill', 'ConfigBinaryClassificationDistill', ([], {}), '()\n', (666, 668), False, 'from config import ConfigBinaryClassificationDistill\n'), ((713, 741), 'config.ConfigTripleClassification', 'ConfigTripleClassification', ([], {}), '()\n', (739, 741), False, 'from config import ConfigTripleClassification\n'), ((1059, 1107), 'loss.FocalLoss', 'FocalLoss', ([], {'classes': 'args.class_num', 'device': 'device'}), '(classes=args.class_num, device=device)\n', (1068, 1107), False, 'from loss import FocalLoss\n'), ((1138, 1152), 'torch.nn.KLDivLoss', 'nn.KLDivLoss', ([], {}), '()\n', (1150, 1152), True, 'import torch.nn as nn\n'), ((1501, 1529), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['(output / T)', '(1)'], {}), '(output / T, 1)\n', (1514, 1529), True, 'import torch.nn.functional as F\n'), ((1680, 1703), 'torch.nn.functional.softmax', 'F.softmax', (['(score / T)', '(1)'], {}), '(score / T, 1)\n', (1689, 1703), True, 'import torch.nn.functional as F\n'), ((1949, 1964), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1962, 1964), False, 'import torch\n'), ((2320, 2356), 'sklearn.metrics.classification_report', 'classification_report', (['preds', 'labels'], {}), '(preds, labels)\n', (2341, 2356), False, 'from sklearn.metrics import classification_report\n')] |
janbrohl/SimpleTAL | tests/TALTests/HTMLTests/TALAttributesTestCases.py | f5a3ddd9a74cf9af7356bb431513e3534717802d | #!/usr/bin/python
# -*- coding: iso-8859-1 -*-
# Copyright (c) 2016, Jan Brohl <[email protected]>
# All rights reserved.
# See LICENSE.txt
# Copyright (c) 2004 Colin Stewart (http://www.owlfish.com/)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# If you make any bug fixes or feature enhancements please let me know!
"""
Unit test cases.
"""
from __future__ import unicode_literals
import unittest
import os
import io
import logging
import logging.config
from simpletal import simpleTAL, simpleTALES
if (os.path.exists("logging.ini")):
logging.config.fileConfig("logging.ini")
else:
logging.basicConfig()
class TALAttributesTestCases(unittest.TestCase):
def setUp(self):
self.context = simpleTALES.Context()
self.context.addGlobal('test', 'testing')
self.context.addGlobal('link', 'www.owlfish.com')
self.context.addGlobal('needsQuoting', """Does "this" work?""")
self.context.addGlobal('number', 5)
self.context.addGlobal('uniQuote', 'Does "this" work?')
self.context.addGlobal('anotherdefault', {
'inhere': simpleTALES.DEFAULTVALUE
})
def _runTest_(self, txt, result, errMsg="Error"):
template = simpleTAL.compileHTMLTemplate(txt)
file = io.StringIO()
template.expand(self.context, file)
realResult = file.getvalue()
self.assertEqual(
realResult, result,
"%s - \npassed in: %s \ngot back %s \nexpected %s\n\nTemplate: %s"
% (errMsg, txt, realResult, result, template))
def testAddingAnAttribute(self):
self._runTest_(
'<html tal:attributes="link link" href="owlfish.com">Hello</html>',
'<html link="www.owlfish.com" href="owlfish.com">Hello</html>',
"Addition of attribute 'link' failed.")
def testRemovingAnAttribute(self):
self._runTest_(
'<html class="test" tal:attributes="href nothing" href="owlfish.com">Hello</html>',
'<html class="test">Hello</html>',
"Removal of attribute 'href' failed.")
def testDefaultAttribute(self):
self._runTest_(
'<html class="test" tal:attributes="href default" href="owlfish.com">Hello</html>',
'<html class="test" href="owlfish.com">Hello</html>',
"Defaulting of attribute 'href' failed.")
def testAnotherDefaultAttribute(self):
self._runTest_(
'<html class="test" tal:attributes="href anotherdefault/inhere" href="owlfish.com">Hello</html>',
'<html class="test" href="owlfish.com">Hello</html>',
"Defaulting of attribute 'href' failed.")
def testMultipleAttributes(self):
self._runTest_(
'<html old="still here" class="test" tal:attributes="href default;class nothing;new test" href="owlfish.com">Hello</html>',
'<html new="testing" old="still here" href="owlfish.com">Hello</html>',
"Setting multiple attributes at once failed.")
def testMultipleAttributesSpace(self):
self._runTest_(
'<html old="still here" class="test" tal:attributes="href default ; class string:Hello there; new test" href="owlfish.com">Hello</html>',
'<html class="Hello there" new="testing" old="still here" href="owlfish.com">Hello</html>',
"Setting multiple attributes at once, with spaces between semi-colons, failed."
)
def testMultipleAttributesEscaped(self):
self._runTest_(
'<html old="still " here" class="test" tal:attributes="href default ; class string: Semi-colon;;test;new test " href="owlfish.com">Hello</html>',
'''<html class="Semi-colon;test" new="testing" old='still " here' href="owlfish.com">Hello</html>''',
"Setting multiple attributes at once, with spaces between semi-colons, failed."
)
def testAttributeEscaping(self):
self._runTest_(
'<html existingAtt=""Testing"" tal:attributes="href needsQuoting">Hello</html>',
"""<html href='Does "this" work?' existingatt='"Testing"'>Hello</html>""",
"Escaping of new attributes failed.")
def testNumberAttributeEscaping(self):
self._runTest_(
'<html existingAtt=""Testing"" tal:attributes="href number">Hello</html>',
"""<html href="5" existingatt='"Testing"'>Hello</html>""",
"Escaping of new attributes failed.")
def testNumberAttributeEscaping(self):
self._runTest_(
'<html existingAtt=""Testing"" tal:attributes="href uniQuote">Hello</html>',
"""<html href='Does "this" work?' existingatt='"Testing"'>Hello</html>""",
"Escaping of new attributes failed.")
def testOriginalAttributes(self):
self._runTest_(
'<html existingAtt=""Testing"" tal:attributes="newAtt attrs/existingatt" tal:content="attrs/existingatt">Hello</html>',
"""<html newAtt='"Testing"' existingatt='"Testing"'>"Testing"</html>""",
"Accessing existing attributes failed.")
def testMultipleOriginalAttributes(self):
self._runTest_(
'<html one="Value One" two="Value two" three="Value three" tal:attributes="four attrs/three" tal:content="attrs/one">Hello</html>',
"""<html four="Value three" one="Value One" two="Value two" three="Value three">Value One</html>""",
"Accessing multiple existing attributes failed.")
def testAmpersandEscapeInAttributes(self):
self._runTest_(
'<html existingAtt="&Testing&" tal:attributes="newAtt attrs/existingatt" tal:content="attrs/existingatt">Hello</html>',
"""<html newAtt="&Testing&" existingatt="&Testing&">&Testing&</html>""",
"Accessing existing attributes failed.")
#~ def testAttributeCase (self):
#~ self._runTest_ ('<html HREF="Testing" tal:attributes="HREF test">Hello</html>'
#~ ,"""<html href="testing">Hello</html>"""
#~ ,"HTML Attributes not treated as case insensitive.")
if __name__ == '__main__':
unittest.main()
| [((1961, 1990), 'os.path.exists', 'os.path.exists', (['"""logging.ini"""'], {}), "('logging.ini')\n", (1975, 1990), False, 'import os\n'), ((1997, 2037), 'logging.config.fileConfig', 'logging.config.fileConfig', (['"""logging.ini"""'], {}), "('logging.ini')\n", (2022, 2037), False, 'import logging\n'), ((2048, 2069), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '()\n', (2067, 2069), False, 'import logging\n'), ((7581, 7596), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7594, 7596), False, 'import unittest\n'), ((2165, 2186), 'simpletal.simpleTALES.Context', 'simpleTALES.Context', ([], {}), '()\n', (2184, 2186), False, 'from simpletal import simpleTAL, simpleTALES\n'), ((2658, 2692), 'simpletal.simpleTAL.compileHTMLTemplate', 'simpleTAL.compileHTMLTemplate', (['txt'], {}), '(txt)\n', (2687, 2692), False, 'from simpletal import simpleTAL, simpleTALES\n'), ((2708, 2721), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (2719, 2721), False, 'import io\n')] |
EBI-Metagenomics/iseq-prof | iseq_prof/fasta.py | ca41a0f3aa1e70e59648bdc08b36da1ec76220ad | from pathlib import Path
from typing import List
from fasta_reader import FASTAItem, FASTAWriter, read_fasta
__all__ = ["downsample"]
def downsample(infile: Path, outfile: Path, size: int, random):
targets: List[FASTAItem] = list(read_fasta(infile))
if size > len(targets):
raise ValueError("Size is greater than the number of targets.")
targets = random.choice(targets, size, replace=False).tolist()
with FASTAWriter(outfile) as writer:
for target in targets:
writer.write_item(target.defline, target.sequence)
| [((238, 256), 'fasta_reader.read_fasta', 'read_fasta', (['infile'], {}), '(infile)\n', (248, 256), False, 'from fasta_reader import FASTAItem, FASTAWriter, read_fasta\n'), ((436, 456), 'fasta_reader.FASTAWriter', 'FASTAWriter', (['outfile'], {}), '(outfile)\n', (447, 456), False, 'from fasta_reader import FASTAItem, FASTAWriter, read_fasta\n')] |
DhruvSrikanth/TSLA-React | src/data_settings.py | 2ce4edb6b21ec1a301047124cfda5bb30deb3a90 | # API keys
# YF_API_KEY = "YRVHVLiFAt3ANYZf00BXr2LHNfZcgKzdWVmsZ9Xi" # yahoo finance api key
TICKER = "TSLA"
INTERVAL = "1m"
PERIOD = "1d"
LOOK_BACK = 30 # hard limit to not reach rate limit of 100 per day | [] |
phoenix-xhuang/ymir | ymir/backend/src/ymir_controller/controller/utils/invoker_mapping.py | 537d3ac389c4a365ce4daef431c95b42ddcd5b1b | from controller.invoker import (
invoker_cmd_branch_checkout,
invoker_cmd_branch_commit,
invoker_cmd_branch_create,
invoker_cmd_branch_delete,
invoker_cmd_branch_list,
invoker_cmd_evaluate,
invoker_cmd_filter,
invoker_cmd_gpu_info,
invoker_cmd_inference,
invoker_cmd_init,
invoker_cmd_label_add,
invoker_cmd_label_get,
invoker_cmd_log,
invoker_cmd_merge,
invoker_cmd_pull_image,
invoker_cmd_repo_check,
invoker_cmd_repo_clear,
invoker_cmd_sampling,
invoker_cmd_terminate,
invoker_cmd_user_create,
invoker_task_factory,
)
from proto import backend_pb2
RequestTypeToInvoker = {
backend_pb2.CMD_BRANCH_CHECKOUT: invoker_cmd_branch_checkout.BranchCheckoutInvoker,
backend_pb2.CMD_BRANCH_CREATE: invoker_cmd_branch_create.BranchCreateInvoker,
backend_pb2.CMD_BRANCH_DEL: invoker_cmd_branch_delete.BranchDeleteInvoker,
backend_pb2.CMD_BRANCH_LIST: invoker_cmd_branch_list.BranchListInvoker,
backend_pb2.CMD_COMMIT: invoker_cmd_branch_commit.BranchCommitInvoker,
backend_pb2.CMD_EVALUATE: invoker_cmd_evaluate.EvaluateInvoker,
backend_pb2.CMD_FILTER: invoker_cmd_filter.FilterBranchInvoker,
backend_pb2.CMD_GPU_INFO_GET: invoker_cmd_gpu_info.GPUInfoInvoker,
backend_pb2.CMD_INFERENCE: invoker_cmd_inference.InferenceCMDInvoker,
backend_pb2.CMD_INIT: invoker_cmd_init.InitInvoker,
backend_pb2.CMD_LABEL_ADD: invoker_cmd_label_add.LabelAddInvoker,
backend_pb2.CMD_LABEL_GET: invoker_cmd_label_get.LabelGetInvoker,
backend_pb2.CMD_LOG: invoker_cmd_log.LogInvoker,
backend_pb2.CMD_MERGE: invoker_cmd_merge.MergeInvoker,
backend_pb2.CMD_PULL_IMAGE: invoker_cmd_pull_image.ImageHandler,
backend_pb2.CMD_TERMINATE: invoker_cmd_terminate.CMDTerminateInvoker,
backend_pb2.CMD_REPO_CHECK: invoker_cmd_repo_check.RepoCheckInvoker,
backend_pb2.CMD_REPO_CLEAR: invoker_cmd_repo_clear.RepoClearInvoker,
backend_pb2.REPO_CREATE: invoker_cmd_init.InitInvoker,
backend_pb2.TASK_CREATE: invoker_task_factory.CreateTaskInvokerFactory,
backend_pb2.USER_CREATE: invoker_cmd_user_create.UserCreateInvoker,
backend_pb2.CMD_SAMPLING: invoker_cmd_sampling.SamplingInvoker,
}
| [] |
asuol/worky | tests/utils/date_utils.py | 362257e77486af05941cc977055c01e49b09a2dd | from datetime import datetime, timedelta
due_date_format = '%Y-%m-%d'
datepicker_date_format = '%m%d%Y'
def current_date():
return datetime.utcnow().strftime(due_date_format)
def datepicker_current_date():
return datetime.utcnow().strftime(datepicker_date_format)
def _date_from_today(days_to_add):
return datetime.utcnow() + timedelta(days=days_to_add)
def date_from_today(days_to_add):
return _date_from_today(days_to_add).strftime(due_date_format)
def datepicker_date_from_today(days_to_add):
return _date_from_today(days_to_add).strftime(datepicker_date_format)
def datepicker_to_due_date_format(datepicker_date):
return datetime.strptime(datepicker_date,
datepicker_date_format).strftime(due_date_format)
| [((326, 343), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (341, 343), False, 'from datetime import datetime, timedelta\n'), ((346, 373), 'datetime.timedelta', 'timedelta', ([], {'days': 'days_to_add'}), '(days=days_to_add)\n', (355, 373), False, 'from datetime import datetime, timedelta\n'), ((139, 156), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (154, 156), False, 'from datetime import datetime, timedelta\n'), ((227, 244), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (242, 244), False, 'from datetime import datetime, timedelta\n'), ((663, 721), 'datetime.datetime.strptime', 'datetime.strptime', (['datepicker_date', 'datepicker_date_format'], {}), '(datepicker_date, datepicker_date_format)\n', (680, 721), False, 'from datetime import datetime, timedelta\n')] |
luk-f-a/numba | numba/roc/tests/hsapy/test_gufuncbuilding.py | 3a682bd827e416335e3574bc7b10f0ec69adb701 | import numpy as np
from numba.roc.vectorizers import HsaGUFuncVectorize
from numba.roc.dispatch import HSAGenerializedUFunc
from numba import guvectorize
import unittest
def ufunc_add_core(a, b, c):
for i in range(c.size):
c[i] = a[i] + b[i]
class TestGUFuncBuilding(unittest.TestCase):
def test_gufunc_building(self):
ufbldr = HsaGUFuncVectorize(ufunc_add_core, "(x),(x)->(x)")
ufbldr.add("(float32[:], float32[:], float32[:])")
ufbldr.add("(intp[:], intp[:], intp[:])")
ufunc = ufbldr.build_ufunc()
self.assertIsInstance(ufunc, HSAGenerializedUFunc)
# Test integer version
A = np.arange(100, dtype=np.intp)
B = np.arange(100, dtype=np.intp) + 1
expected = A + B
got = ufunc(A, B)
np.testing.assert_equal(expected, got)
self.assertEqual(expected.dtype, got.dtype)
self.assertEqual(np.dtype(np.intp), got.dtype)
# Test integer version with 2D inputs
A = A.reshape(50, 2)
B = B.reshape(50, 2)
expected = A + B
got = ufunc(A, B)
np.testing.assert_equal(expected, got)
self.assertEqual(expected.dtype, got.dtype)
self.assertEqual(np.dtype(np.intp), got.dtype)
# Test integer version with 3D inputs
A = A.reshape(5, 10, 2)
B = B.reshape(5, 10, 2)
expected = A + B
got = ufunc(A, B)
np.testing.assert_equal(expected, got)
self.assertEqual(expected.dtype, got.dtype)
self.assertEqual(np.dtype(np.intp), got.dtype)
# Test real version
A = np.arange(100, dtype=np.float32)
B = np.arange(100, dtype=np.float32) + 1
expected = A + B
got = ufunc(A, B)
np.testing.assert_allclose(expected, got)
self.assertEqual(expected.dtype, got.dtype)
self.assertEqual(np.dtype(np.float32), got.dtype)
# Test real version with 2D inputs
A = A.reshape(50, 2)
B = B.reshape(50, 2)
expected = A + B
got = ufunc(A, B)
np.testing.assert_allclose(expected, got)
self.assertEqual(expected.dtype, got.dtype)
self.assertEqual(np.dtype(np.float32), got.dtype)
def test_gufunc_building_scalar_output(self):
def sum_row(inp, out):
tmp = 0.
for i in range(inp.shape[0]):
tmp += inp[i]
out[0] = tmp
ufbldr = HsaGUFuncVectorize(sum_row, "(n)->()")
ufbldr.add("void(int32[:], int32[:])")
ufunc = ufbldr.build_ufunc()
inp = np.arange(300, dtype=np.int32).reshape(100, 3)
out = ufunc(inp)
for i in range(inp.shape[0]):
np.testing.assert_equal(inp[i].sum(), out[i])
def test_gufunc_scalar_input_saxpy(self):
def axpy(a, x, y, out):
for i in range(out.shape[0]):
out[i] = a * x[i] + y[i]
ufbldr = HsaGUFuncVectorize(axpy, '(),(t),(t)->(t)')
ufbldr.add("void(float32, float32[:], float32[:], float32[:])")
saxpy = ufbldr.build_ufunc()
A = np.float32(2)
X = np.arange(10, dtype=np.float32).reshape(5, 2)
Y = np.arange(10, dtype=np.float32).reshape(5, 2)
out = saxpy(A, X, Y)
for j in range(5):
for i in range(2):
exp = A * X[j, i] + Y[j, i]
self.assertTrue(exp == out[j, i])
X = np.arange(10, dtype=np.float32)
Y = np.arange(10, dtype=np.float32)
out = saxpy(A, X, Y)
for j in range(10):
exp = A * X[j] + Y[j]
self.assertTrue(exp == out[j], (exp, out[j]))
A = np.arange(5, dtype=np.float32)
X = np.arange(10, dtype=np.float32).reshape(5, 2)
Y = np.arange(10, dtype=np.float32).reshape(5, 2)
out = saxpy(A, X, Y)
for j in range(5):
for i in range(2):
exp = A[j] * X[j, i] + Y[j, i]
self.assertTrue(exp == out[j, i], (exp, out[j, i]))
class TestGUFuncDecor(unittest.TestCase):
def test_gufunc_decorator(self):
@guvectorize(["void(float32, float32[:], float32[:], float32[:])"],
'(),(t),(t)->(t)', target='roc')
def saxpy(a, x, y, out):
for i in range(out.shape[0]):
out[i] = a * x[i] + y[i]
A = np.float32(2)
X = np.arange(10, dtype=np.float32).reshape(5, 2)
Y = np.arange(10, dtype=np.float32).reshape(5, 2)
out = saxpy(A, X, Y)
for j in range(5):
for i in range(2):
exp = A * X[j, i] + Y[j, i]
self.assertTrue(exp == out[j, i])
X = np.arange(10, dtype=np.float32)
Y = np.arange(10, dtype=np.float32)
out = saxpy(A, X, Y)
for j in range(10):
exp = A * X[j] + Y[j]
self.assertTrue(exp == out[j], (exp, out[j]))
A = np.arange(5, dtype=np.float32)
X = np.arange(10, dtype=np.float32).reshape(5, 2)
Y = np.arange(10, dtype=np.float32).reshape(5, 2)
out = saxpy(A, X, Y)
for j in range(5):
for i in range(2):
exp = A[j] * X[j, i] + Y[j, i]
self.assertTrue(exp == out[j, i], (exp, out[j, i]))
if __name__ == '__main__':
unittest.main()
| [((5288, 5303), 'unittest.main', 'unittest.main', ([], {}), '()\n', (5301, 5303), False, 'import unittest\n'), ((357, 407), 'numba.roc.vectorizers.HsaGUFuncVectorize', 'HsaGUFuncVectorize', (['ufunc_add_core', '"""(x),(x)->(x)"""'], {}), "(ufunc_add_core, '(x),(x)->(x)')\n", (375, 407), False, 'from numba.roc.vectorizers import HsaGUFuncVectorize\n'), ((657, 686), 'numpy.arange', 'np.arange', (['(100)'], {'dtype': 'np.intp'}), '(100, dtype=np.intp)\n', (666, 686), True, 'import numpy as np\n'), ((793, 831), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['expected', 'got'], {}), '(expected, got)\n', (816, 831), True, 'import numpy as np\n'), ((1104, 1142), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['expected', 'got'], {}), '(expected, got)\n', (1127, 1142), True, 'import numpy as np\n'), ((1421, 1459), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['expected', 'got'], {}), '(expected, got)\n', (1444, 1459), True, 'import numpy as np\n'), ((1608, 1640), 'numpy.arange', 'np.arange', (['(100)'], {'dtype': 'np.float32'}), '(100, dtype=np.float32)\n', (1617, 1640), True, 'import numpy as np\n'), ((1750, 1791), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['expected', 'got'], {}), '(expected, got)\n', (1776, 1791), True, 'import numpy as np\n'), ((2064, 2105), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['expected', 'got'], {}), '(expected, got)\n', (2090, 2105), True, 'import numpy as np\n'), ((2434, 2472), 'numba.roc.vectorizers.HsaGUFuncVectorize', 'HsaGUFuncVectorize', (['sum_row', '"""(n)->()"""'], {}), "(sum_row, '(n)->()')\n", (2452, 2472), False, 'from numba.roc.vectorizers import HsaGUFuncVectorize\n'), ((2921, 2964), 'numba.roc.vectorizers.HsaGUFuncVectorize', 'HsaGUFuncVectorize', (['axpy', '"""(),(t),(t)->(t)"""'], {}), "(axpy, '(),(t),(t)->(t)')\n", (2939, 2964), False, 'from numba.roc.vectorizers import HsaGUFuncVectorize\n'), ((3087, 3100), 'numpy.float32', 'np.float32', (['(2)'], {}), '(2)\n', (3097, 3100), True, 'import numpy as np\n'), ((3412, 3443), 'numpy.arange', 'np.arange', (['(10)'], {'dtype': 'np.float32'}), '(10, dtype=np.float32)\n', (3421, 3443), True, 'import numpy as np\n'), ((3456, 3487), 'numpy.arange', 'np.arange', (['(10)'], {'dtype': 'np.float32'}), '(10, dtype=np.float32)\n', (3465, 3487), True, 'import numpy as np\n'), ((3651, 3681), 'numpy.arange', 'np.arange', (['(5)'], {'dtype': 'np.float32'}), '(5, dtype=np.float32)\n', (3660, 3681), True, 'import numpy as np\n'), ((4091, 4194), 'numba.guvectorize', 'guvectorize', (["['void(float32, float32[:], float32[:], float32[:])']", '"""(),(t),(t)->(t)"""'], {'target': '"""roc"""'}), "(['void(float32, float32[:], float32[:], float32[:])'],\n '(),(t),(t)->(t)', target='roc')\n", (4102, 4194), False, 'from numba import guvectorize\n'), ((4341, 4354), 'numpy.float32', 'np.float32', (['(2)'], {}), '(2)\n', (4351, 4354), True, 'import numpy as np\n'), ((4666, 4697), 'numpy.arange', 'np.arange', (['(10)'], {'dtype': 'np.float32'}), '(10, dtype=np.float32)\n', (4675, 4697), True, 'import numpy as np\n'), ((4710, 4741), 'numpy.arange', 'np.arange', (['(10)'], {'dtype': 'np.float32'}), '(10, dtype=np.float32)\n', (4719, 4741), True, 'import numpy as np\n'), ((4905, 4935), 'numpy.arange', 'np.arange', (['(5)'], {'dtype': 'np.float32'}), '(5, dtype=np.float32)\n', (4914, 4935), True, 'import numpy as np\n'), ((699, 728), 'numpy.arange', 'np.arange', (['(100)'], {'dtype': 'np.intp'}), '(100, dtype=np.intp)\n', (708, 728), True, 'import numpy as np\n'), ((909, 926), 'numpy.dtype', 'np.dtype', (['np.intp'], {}), '(np.intp)\n', (917, 926), True, 'import numpy as np\n'), ((1220, 1237), 'numpy.dtype', 'np.dtype', (['np.intp'], {}), '(np.intp)\n', (1228, 1237), True, 'import numpy as np\n'), ((1537, 1554), 'numpy.dtype', 'np.dtype', (['np.intp'], {}), '(np.intp)\n', (1545, 1554), True, 'import numpy as np\n'), ((1653, 1685), 'numpy.arange', 'np.arange', (['(100)'], {'dtype': 'np.float32'}), '(100, dtype=np.float32)\n', (1662, 1685), True, 'import numpy as np\n'), ((1869, 1889), 'numpy.dtype', 'np.dtype', (['np.float32'], {}), '(np.float32)\n', (1877, 1889), True, 'import numpy as np\n'), ((2183, 2203), 'numpy.dtype', 'np.dtype', (['np.float32'], {}), '(np.float32)\n', (2191, 2203), True, 'import numpy as np\n'), ((2572, 2602), 'numpy.arange', 'np.arange', (['(300)'], {'dtype': 'np.int32'}), '(300, dtype=np.int32)\n', (2581, 2602), True, 'import numpy as np\n'), ((3113, 3144), 'numpy.arange', 'np.arange', (['(10)'], {'dtype': 'np.float32'}), '(10, dtype=np.float32)\n', (3122, 3144), True, 'import numpy as np\n'), ((3171, 3202), 'numpy.arange', 'np.arange', (['(10)'], {'dtype': 'np.float32'}), '(10, dtype=np.float32)\n', (3180, 3202), True, 'import numpy as np\n'), ((3694, 3725), 'numpy.arange', 'np.arange', (['(10)'], {'dtype': 'np.float32'}), '(10, dtype=np.float32)\n', (3703, 3725), True, 'import numpy as np\n'), ((3752, 3783), 'numpy.arange', 'np.arange', (['(10)'], {'dtype': 'np.float32'}), '(10, dtype=np.float32)\n', (3761, 3783), True, 'import numpy as np\n'), ((4367, 4398), 'numpy.arange', 'np.arange', (['(10)'], {'dtype': 'np.float32'}), '(10, dtype=np.float32)\n', (4376, 4398), True, 'import numpy as np\n'), ((4425, 4456), 'numpy.arange', 'np.arange', (['(10)'], {'dtype': 'np.float32'}), '(10, dtype=np.float32)\n', (4434, 4456), True, 'import numpy as np\n'), ((4948, 4979), 'numpy.arange', 'np.arange', (['(10)'], {'dtype': 'np.float32'}), '(10, dtype=np.float32)\n', (4957, 4979), True, 'import numpy as np\n'), ((5006, 5037), 'numpy.arange', 'np.arange', (['(10)'], {'dtype': 'np.float32'}), '(10, dtype=np.float32)\n', (5015, 5037), True, 'import numpy as np\n')] |
SRM-IST-KTR/ossmosis | server/form/mongo.py | 06e375dfdd67f91286ffbcb13e04b6543585d8ad | import os
from pymongo import MongoClient
from dotenv import load_dotenv
def database_entry(data):
try:
load_dotenv()
mongo_string = os.getenv('MONGODB_AUTH_URI')
client = MongoClient(mongo_string)
database = client[os.getenv('MONGODB_DB')]
col = database['users']
col.insert_one(data)
return True
except Exception as e:
print(e)
return False
if __name__ == "__main__":
pass
| [((118, 131), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (129, 131), False, 'from dotenv import load_dotenv\n'), ((155, 184), 'os.getenv', 'os.getenv', (['"""MONGODB_AUTH_URI"""'], {}), "('MONGODB_AUTH_URI')\n", (164, 184), False, 'import os\n'), ((202, 227), 'pymongo.MongoClient', 'MongoClient', (['mongo_string'], {}), '(mongo_string)\n', (213, 227), False, 'from pymongo import MongoClient\n'), ((254, 277), 'os.getenv', 'os.getenv', (['"""MONGODB_DB"""'], {}), "('MONGODB_DB')\n", (263, 277), False, 'import os\n')] |
giserh/book-python | control-flow/solution/file_hosts.py | ebd4e70cea1dd56986aa8efbae3629ba3f1ba087 | FILE = r'../src/etc-hosts.txt'
hostnames = []
try:
with open(FILE, encoding='utf-8') as file:
content = file.readlines()
except FileNotFoundError:
print('File does not exist')
except PermissionError:
print('Permission denied')
for line in content:
if line.startswith('#'):
continue
if line.isspace():
continue
line = line.strip().split()
ip = line[0]
hosts = line[1:]
for record in hostnames:
if record['ip'] == ip:
record['hostnames'].update(hosts)
break
else:
hostnames.append({
'hostnames': set(hosts),
'protocol': 'IPv4' if '.' in ip else 'IPv6',
'ip': ip,
})
print(hostnames)
| [] |
eaedk/testing-zindi-package | zindi/docs/utils/n_subimissions_per_day.py | 5aef7375a629b328fa8ecf9c4559e2897611a1e9 | def n_subimissions_per_day( url, headers ):
"""Get the number of submissions we can make per day for the selected challenge.
Parameters
----------
url : {'prize', 'points', 'knowledge' , 'all'}, default='all'
The reward of the challenges for top challengers.
headers : dictionary ,
The headers of the request.
Returns
-------
n_sub : int, default=0 : Means error during info retrieval.
The number of submissions we can make per day.
""" | [] |
ssavinash1/Algorithm_stanford | algo/test/test_maximum_cut.py | f2588b6bcac2b0858e78b819e6e8402109e80ee2 | # -*- coding: utf-8 -*-
import unittest
from src.graph import Graph
from src.maximum_cut import maximum_cut, maximum_cut_for_bipartite_graph
class MaximumCut(unittest.TestCase):
def test_maximum_cut_for_bipartite_graphs(self):
""" Given the following bipartite graph.
(a)-----(b)
\
\----(c)
(d)-----(e)
/
(f)----/
\
\----(g)
"""
g = Graph.build(edges=[('a', 'b'), ('a', 'c'),
('d', 'e'), ('f', 'e'), ('f', 'g')],
directed=False)
(left, right) = maximum_cut_for_bipartite_graph(g)
self.assertIn(len(left), [3,4], 'either 3 or 4')
self.assertIn(len(right), [3,4], 'eighter 3 or 4')
self.assertEqual(7, len(left)+len(right), 'no vertex counted twice')
def test_maximum_cut_for_larger_bipartite_graphs(self):
""" A sligthly larger graph:
(a) (c)
| \ /|
| x |
| / \ |
(b) (d)
| \ /|
| x |
| / \ |
(e) (f)
"""
g = Graph.build(edges=[('a', 'b'), ('a', 'd'), ('c', 'b'), ('c', 'd'),
('b', 'e'), ('b', 'f'), ('d', 'e'), ('d', 'f')],
directed=False)
(left, right) = maximum_cut_for_bipartite_graph(g)
self.assertIn(set(left), [set(['a', 'c', 'e', 'f']), set(['b', 'd'])])
self.assertIn(set(right), [set(['a', 'c', 'e', 'f']), set(['b', 'd'])])
self.assertNotEqual(left, right, 'not the same subsets')
def test_maximum_cut(self):
""" Given a graph:
(u)----(v)
| \ / |
| \/ |
| /\ |
| / \ |
(w)---(x)
"""
g = Graph.build(edges=[
('u', 'v'), ('u', 'w'), ('u', 'x'), ('v', 'x'),('w', 'x')],
directed=False)
(left, right) = maximum_cut(g)
expected = [{'u', 'v'}, {'w', 'x'}, {'x', 'u'}, {'w', 'v'}]
self.assertNotEqual(left, right, 'no common vertices between cuts')
self.assertIn(set(left), expected, 'should correctly split the graph')
self.assertIn(set(right), expected, 'should correctly split the graph')
def test_weighted_maximum_cut(self):
""" Given the following weighted graph.
(u)-3-(v)
| \ / |
| 5\/1 4
2 /\ |
| / \ |
(w)-6-(x)
"""
g = Graph.build(edges=[
('u', 'v', 3), ('u', 'w', 2), ('u', 'x', 5),
('v', 'x', 4),('w', 'x', 6)],
directed=False)
(left, right) = maximum_cut(g)
self.assertEqual(2, len(left), 'left should contain 2 vertices')
self.assertEqual(2, len(right), 'right should contain 2 vertices')
| [((480, 579), 'src.graph.Graph.build', 'Graph.build', ([], {'edges': "[('a', 'b'), ('a', 'c'), ('d', 'e'), ('f', 'e'), ('f', 'g')]", 'directed': '(False)'}), "(edges=[('a', 'b'), ('a', 'c'), ('d', 'e'), ('f', 'e'), ('f',\n 'g')], directed=False)\n", (491, 579), False, 'from src.graph import Graph\n'), ((655, 689), 'src.maximum_cut.maximum_cut_for_bipartite_graph', 'maximum_cut_for_bipartite_graph', (['g'], {}), '(g)\n', (686, 689), False, 'from src.maximum_cut import maximum_cut, maximum_cut_for_bipartite_graph\n'), ((1161, 1296), 'src.graph.Graph.build', 'Graph.build', ([], {'edges': "[('a', 'b'), ('a', 'd'), ('c', 'b'), ('c', 'd'), ('b', 'e'), ('b', 'f'), (\n 'd', 'e'), ('d', 'f')]", 'directed': '(False)'}), "(edges=[('a', 'b'), ('a', 'd'), ('c', 'b'), ('c', 'd'), ('b',\n 'e'), ('b', 'f'), ('d', 'e'), ('d', 'f')], directed=False)\n", (1172, 1296), False, 'from src.graph import Graph\n'), ((1372, 1406), 'src.maximum_cut.maximum_cut_for_bipartite_graph', 'maximum_cut_for_bipartite_graph', (['g'], {}), '(g)\n', (1403, 1406), False, 'from src.maximum_cut import maximum_cut, maximum_cut_for_bipartite_graph\n'), ((1825, 1924), 'src.graph.Graph.build', 'Graph.build', ([], {'edges': "[('u', 'v'), ('u', 'w'), ('u', 'x'), ('v', 'x'), ('w', 'x')]", 'directed': '(False)'}), "(edges=[('u', 'v'), ('u', 'w'), ('u', 'x'), ('v', 'x'), ('w',\n 'x')], directed=False)\n", (1836, 1924), False, 'from src.graph import Graph\n'), ((1969, 1983), 'src.maximum_cut.maximum_cut', 'maximum_cut', (['g'], {}), '(g)\n', (1980, 1983), False, 'from src.maximum_cut import maximum_cut, maximum_cut_for_bipartite_graph\n'), ((2533, 2648), 'src.graph.Graph.build', 'Graph.build', ([], {'edges': "[('u', 'v', 3), ('u', 'w', 2), ('u', 'x', 5), ('v', 'x', 4), ('w', 'x', 6)]", 'directed': '(False)'}), "(edges=[('u', 'v', 3), ('u', 'w', 2), ('u', 'x', 5), ('v', 'x', \n 4), ('w', 'x', 6)], directed=False)\n", (2544, 2648), False, 'from src.graph import Graph\n'), ((2712, 2726), 'src.maximum_cut.maximum_cut', 'maximum_cut', (['g'], {}), '(g)\n', (2723, 2726), False, 'from src.maximum_cut import maximum_cut, maximum_cut_for_bipartite_graph\n')] |
kemonats/avs_commons | gdb/print-avs-rbtree.py | ecce4edf5376d132e3686af227c9adf22ce1090e | # -*- coding: utf-8 -*-
#
# Copyright 2021 AVSystem <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# installation: append "source PATH_TO_THIS_SCRIPT" to ~/.gdbinit
import gdb
class PrintAvsRbtreeBase(gdb.Command):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.intptr_type = gdb.lookup_type('unsigned long long')
self.int_type = gdb.lookup_type('int')
self.output_format = '%%s 0x%%0%dx = %%s' % (self.intptr_type.sizeof * 2,)
# TODO
self.color_offset = -32
self.parent_offset = -24
self.left_offset = -16
self.right_offset = -8
def _print_tree(self, ptr, path='', depth=0, visited_addrs=set()):
left_ptr_value = ptr.cast(self.intptr_type) + self.left_offset
left_ptr = left_ptr_value.cast(ptr.type.pointer()).dereference()
right_ptr_value = ptr.cast(self.intptr_type) + self.right_offset
right_ptr = right_ptr_value.cast(ptr.type.pointer()).dereference()
prefix = ''.join(' |' if x == 'L' else ' ' for x in path)
if path:
if path[-1] == 'L':
prefix += '- '
elif path[-1] == 'R':
prefix = prefix[:-1] + "'- "
print(prefix + self.output_format % (path[-1] if path else ' ', int(ptr), str(ptr.dereference())))
if int(left_ptr) in visited_addrs or int(right_ptr) in visited_addrs:
print('circular tree detected, stopping')
return
visited_addrs.add(left_ptr)
visited_addrs.add(right_ptr)
if int(left_ptr) != 0:
self._print_tree(left_ptr, path + 'L', depth+1, visited_addrs)
if int(right_ptr) != 0:
self._print_tree(right_ptr, path + 'R', depth+1, visited_addrs)
class PrintAvsRbtreeSubtree(PrintAvsRbtreeBase):
def __init__(self):
super().__init__('print-avs-rbtree-subtree',
gdb.COMMAND_DATA,
gdb.COMPLETE_EXPRESSION)
def invoke(self, argv_str, _from_tty):
args = gdb.string_to_argv(argv_str)
if len(args) != 1:
print('usage: print-avs-rbtree-subtree expr\n'
' expr - an expression that avaluates to a valid AVS_RBTREE_NODE pointer\n')
return
expr = args[0]
val = gdb.parse_and_eval(expr)
if val is None:
print('cannot evaluate expression: ' + expr)
return
if val == 0:
print('(null)')
else:
self._print_tree(val)
class PrintAvsRbtree(PrintAvsRbtreeBase):
def __init__(self):
super().__init__('print-avs-rbtree',
gdb.COMMAND_DATA,
gdb.COMPLETE_EXPRESSION)
def invoke(self, argv_str, _from_tty):
args = gdb.string_to_argv(argv_str)
if len(args) != 1:
print('usage: print-avs-rbtree expr\n'
' expr - an expression that avaluates to a valid AVS_RBTREE pointer\n')
return
expr = args[0]
val = gdb.parse_and_eval('*(' + expr + ')')
if val is None:
print('cannot evaluate expression: ' + expr)
return
if val == 0:
print('(null)')
else:
self._print_tree(val)
class PrintAvsRbtreeNode(PrintAvsRbtreeBase):
def __init__(self):
super().__init__('print-avs-rbtree-node',
gdb.COMMAND_DATA,
gdb.COMPLETE_EXPRESSION)
def invoke(self, argv_str, _from_tty):
args = gdb.string_to_argv(argv_str)
if len(args) not in (1, 2):
print('usage: print-avs-rbtree expr [with_magic]\n'
' expr - an expression that avaluates to a valid AVS_RBTREE_NODE pointer\n'
' with_magic - if present, "magic" fields are displayed\n')
return
expr = args[0]
with_magic = len(args) > 1
ptr = gdb.parse_and_eval(expr)
if ptr is None:
print('cannot evaluate expression: ' + expr)
return
if ptr == 0:
print('(null)')
else:
intptr_ptr = ptr.cast(self.intptr_type)
if with_magic:
print((intptr_ptr + self.rb_magic_offset))
print((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer()))
print('rb magic: %s' % ((intptr_ptr + self.rb_magic_offset).cast(self.int_type.pointer()).dereference()))
print('tree magic: %s' % ((intptr_ptr + self.tree_magic_offset).cast(self.int_type.pointer()).dereference()))
print('color: %s' % ((intptr_ptr + self.color_offset ).cast(self.int_type.pointer()).dereference()))
print('parent: 0x%%0%dx' % (self.intptr_type.sizeof * 2) % ((intptr_ptr + self.parent_offset).cast(ptr.type.pointer()).dereference()))
print('left: 0x%%0%dx' % (self.intptr_type.sizeof * 2) % ((intptr_ptr + self.left_offset ).cast(ptr.type.pointer()).dereference()))
print('right: 0x%%0%dx' % (self.intptr_type.sizeof * 2) % ((intptr_ptr + self.right_offset ).cast(ptr.type.pointer()).dereference()))
PrintAvsRbtreeSubtree()
PrintAvsRbtree()
PrintAvsRbtreeNode()
| [((852, 889), 'gdb.lookup_type', 'gdb.lookup_type', (['"""unsigned long long"""'], {}), "('unsigned long long')\n", (867, 889), False, 'import gdb\n'), ((914, 936), 'gdb.lookup_type', 'gdb.lookup_type', (['"""int"""'], {}), "('int')\n", (929, 936), False, 'import gdb\n'), ((2583, 2611), 'gdb.string_to_argv', 'gdb.string_to_argv', (['argv_str'], {}), '(argv_str)\n', (2601, 2611), False, 'import gdb\n'), ((2853, 2877), 'gdb.parse_and_eval', 'gdb.parse_and_eval', (['expr'], {}), '(expr)\n', (2871, 2877), False, 'import gdb\n'), ((3340, 3368), 'gdb.string_to_argv', 'gdb.string_to_argv', (['argv_str'], {}), '(argv_str)\n', (3358, 3368), False, 'import gdb\n'), ((3597, 3634), 'gdb.parse_and_eval', 'gdb.parse_and_eval', (["('*(' + expr + ')')"], {}), "('*(' + expr + ')')\n", (3615, 3634), False, 'import gdb\n'), ((4106, 4134), 'gdb.string_to_argv', 'gdb.string_to_argv', (['argv_str'], {}), '(argv_str)\n', (4124, 4134), False, 'import gdb\n'), ((4503, 4527), 'gdb.parse_and_eval', 'gdb.parse_and_eval', (['expr'], {}), '(expr)\n', (4521, 4527), False, 'import gdb\n')] |
sampx/mongodb-practice | hour17/PythonGroup.py | 0698b21b7da57693ba4146384c8ad65530b0066b | from pymongo import MongoClient
def displayGroup(results):
for result in results:
print (result)
def firstIsALastIsVowel(collection):
key = {'first' : True, "last" : True}
cond = {'first' : 'a', 'last' :
{'$in' : ["a","e","i","o","u"]}}
initial = {'count' : 0}
reduce = "function (obj, prev) { prev.count++; }"
results = collection.group(key, cond, initial, reduce)
print ("\n\n'A' words grouped by first and last" + \
" letter that end with a vowel:")
displayGroup(results)
def firstLetterTotals(collection):
key = {'first' : True}
cond = {}
initial = {'vowels' : 0, 'cons' : 0}
reduce = "function (obj, prev) { " + \
"prev.vowels += obj.stats.vowels; " + \
"prev.cons += obj.stats.consonants; " + \
"}"
finalize = "function (obj) { " + \
"obj.total = obj.vowels + obj.cons; " + \
"}"
results = collection.group(key, cond, initial, reduce, finalize)
print ("\n\nWords grouped by first letter " + \
"with totals:")
displayGroup(results)
if __name__=="__main__":
mongo = MongoClient('mongodb://localhost:27017/')
db = mongo['words']
collection = db['word_stats']
firstIsALastIsVowel(collection)
firstLetterTotals(collection) | [((1199, 1240), 'pymongo.MongoClient', 'MongoClient', (['"""mongodb://localhost:27017/"""'], {}), "('mongodb://localhost:27017/')\n", (1210, 1240), False, 'from pymongo import MongoClient\n')] |
pection/packnet-sfm | packnet_sfm/models/model_utils.py | d5673567b649e6bfda292c894cacdeb06aa80913 | # Copyright 2020 Toyota Research Institute. All rights reserved.
from packnet_sfm.utils.image import flip_lr, interpolate_scales
from packnet_sfm.utils.misc import filter_dict
from packnet_sfm.utils.types import is_tensor, is_list, is_numpy
def flip(tensor, flip_fn):
"""
Flip tensors or list of tensors based on a function
Parameters
----------
tensor : torch.Tensor or list[torch.Tensor] or list[list[torch.Tensor]]
Tensor to be flipped
flip_fn : Function
Flip function
Returns
-------
tensor : torch.Tensor or list[torch.Tensor] or list[list[torch.Tensor]]
Flipped tensor or list of tensors
"""
if not is_list(tensor):
return flip_fn(tensor)
else:
if not is_list(tensor[0]):
return [flip_fn(val) for val in tensor]
else:
return [[flip_fn(v) for v in val] for val in tensor]
def merge_outputs(*outputs):
"""
Merges model outputs for logging
Parameters
----------
outputs : tuple of dict
Outputs to be merged
Returns
-------
output : dict
Dictionary with a "metrics" key containing a dictionary with various metrics and
all other keys that are not "loss" (it is handled differently).
"""
ignore = ['loss'] # Keys to ignore
combine = ['metrics'] # Keys to combine
merge = {key: {} for key in combine}
for output in outputs:
# Iterate over all keys
for key, val in output.items():
# Combine these keys
if key in combine:
for sub_key, sub_val in output[key].items():
assert sub_key not in merge[key].keys(), \
'Combining duplicated key {} to {}'.format(sub_key, key)
merge[key][sub_key] = sub_val
# Ignore these keys
elif key not in ignore:
assert key not in merge.keys(), \
'Adding duplicated key {}'.format(key)
merge[key] = val
return merge
def stack_batch(batch):
"""
Stack multi-camera batches (B,N,C,H,W becomes BN,C,H,W)
Parameters
----------
batch : dict
Batch
Returns
-------
batch : dict
Stacked batch
"""
# If there is multi-camera information
if len(batch['rgb'].shape) == 5:
assert batch['rgb'].shape[0] == 1, 'Only batch size 1 is supported for multi-cameras'
# Loop over all keys
for key in batch.keys():
# If list, stack every item
if is_list(batch[key]):
if is_tensor(batch[key][0]) or is_numpy(batch[key][0]):
batch[key] = [sample[0] for sample in batch[key]]
# Else, stack single item
else:
batch[key] = batch[key][0]
return batch
def flip_batch_input(batch):
"""
Flip batch input information (copies data first)
Parameters
----------
batch : dict
Batch information
Returns
-------
batch : dict
Flipped batch
"""
# Flip tensors
for key in filter_dict(batch, [
'rgb', 'rgb_context',
'input_depth', 'input_depth_context',
]):
batch[key] = flip(batch[key], flip_lr)
# Flip intrinsics
for key in filter_dict(batch, [
'intrinsics'
]):
batch[key] = batch[key].clone()
batch[key][:, 0, 2] = batch['rgb'].shape[3] - batch[key][:, 0, 2]
# Return flipped batch
return batch
def flip_output(output):
"""
Flip output information
Parameters
----------
output : dict
Dictionary of model outputs (e.g. with keys like 'inv_depths' and 'uncertainty')
Returns
-------
output : dict
Flipped output
"""
# Flip tensors
for key in filter_dict(output, [
'uncertainty', 'logits_semantic', 'ord_probability',
'inv_depths', 'inv_depths_context', 'inv_depths1', 'inv_depths2',
'pred_depth', 'pred_depth_context', 'pred_depth1', 'pred_depth2',
'pred_inv_depth', 'pred_inv_depth_context', 'pred_inv_depth1', 'pred_inv_depth2',
]):
output[key] = flip(output[key], flip_lr)
return output
def upsample_output(output, mode='nearest', align_corners=None):
"""
Upsample multi-scale outputs to full resolution.
Parameters
----------
output : dict
Dictionary of model outputs (e.g. with keys like 'inv_depths' and 'uncertainty')
mode : str
Which interpolation mode is used
align_corners: bool or None
Whether corners will be aligned during interpolation
Returns
-------
output : dict
Upsampled output
"""
for key in filter_dict(output, [
'inv_depths', 'uncertainty'
]):
output[key] = interpolate_scales(
output[key], mode=mode, align_corners=align_corners)
for key in filter_dict(output, [
'inv_depths_context'
]):
output[key] = [interpolate_scales(
val, mode=mode, align_corners=align_corners) for val in output[key]]
return output
| [((3118, 3203), 'packnet_sfm.utils.misc.filter_dict', 'filter_dict', (['batch', "['rgb', 'rgb_context', 'input_depth', 'input_depth_context']"], {}), "(batch, ['rgb', 'rgb_context', 'input_depth', 'input_depth_context']\n )\n", (3129, 3203), False, 'from packnet_sfm.utils.misc import filter_dict\n'), ((3307, 3341), 'packnet_sfm.utils.misc.filter_dict', 'filter_dict', (['batch', "['intrinsics']"], {}), "(batch, ['intrinsics'])\n", (3318, 3341), False, 'from packnet_sfm.utils.misc import filter_dict\n'), ((3824, 4128), 'packnet_sfm.utils.misc.filter_dict', 'filter_dict', (['output', "['uncertainty', 'logits_semantic', 'ord_probability', 'inv_depths',\n 'inv_depths_context', 'inv_depths1', 'inv_depths2', 'pred_depth',\n 'pred_depth_context', 'pred_depth1', 'pred_depth2', 'pred_inv_depth',\n 'pred_inv_depth_context', 'pred_inv_depth1', 'pred_inv_depth2']"], {}), "(output, ['uncertainty', 'logits_semantic', 'ord_probability',\n 'inv_depths', 'inv_depths_context', 'inv_depths1', 'inv_depths2',\n 'pred_depth', 'pred_depth_context', 'pred_depth1', 'pred_depth2',\n 'pred_inv_depth', 'pred_inv_depth_context', 'pred_inv_depth1',\n 'pred_inv_depth2'])\n", (3835, 4128), False, 'from packnet_sfm.utils.misc import filter_dict\n'), ((4726, 4776), 'packnet_sfm.utils.misc.filter_dict', 'filter_dict', (['output', "['inv_depths', 'uncertainty']"], {}), "(output, ['inv_depths', 'uncertainty'])\n", (4737, 4776), False, 'from packnet_sfm.utils.misc import filter_dict\n'), ((4914, 4957), 'packnet_sfm.utils.misc.filter_dict', 'filter_dict', (['output', "['inv_depths_context']"], {}), "(output, ['inv_depths_context'])\n", (4925, 4957), False, 'from packnet_sfm.utils.misc import filter_dict\n'), ((679, 694), 'packnet_sfm.utils.types.is_list', 'is_list', (['tensor'], {}), '(tensor)\n', (686, 694), False, 'from packnet_sfm.utils.types import is_tensor, is_list, is_numpy\n'), ((4814, 4885), 'packnet_sfm.utils.image.interpolate_scales', 'interpolate_scales', (['output[key]'], {'mode': 'mode', 'align_corners': 'align_corners'}), '(output[key], mode=mode, align_corners=align_corners)\n', (4832, 4885), False, 'from packnet_sfm.utils.image import flip_lr, interpolate_scales\n'), ((752, 770), 'packnet_sfm.utils.types.is_list', 'is_list', (['tensor[0]'], {}), '(tensor[0])\n', (759, 770), False, 'from packnet_sfm.utils.types import is_tensor, is_list, is_numpy\n'), ((2567, 2586), 'packnet_sfm.utils.types.is_list', 'is_list', (['batch[key]'], {}), '(batch[key])\n', (2574, 2586), False, 'from packnet_sfm.utils.types import is_tensor, is_list, is_numpy\n'), ((4996, 5059), 'packnet_sfm.utils.image.interpolate_scales', 'interpolate_scales', (['val'], {'mode': 'mode', 'align_corners': 'align_corners'}), '(val, mode=mode, align_corners=align_corners)\n', (5014, 5059), False, 'from packnet_sfm.utils.image import flip_lr, interpolate_scales\n'), ((2607, 2631), 'packnet_sfm.utils.types.is_tensor', 'is_tensor', (['batch[key][0]'], {}), '(batch[key][0])\n', (2616, 2631), False, 'from packnet_sfm.utils.types import is_tensor, is_list, is_numpy\n'), ((2635, 2658), 'packnet_sfm.utils.types.is_numpy', 'is_numpy', (['batch[key][0]'], {}), '(batch[key][0])\n', (2643, 2658), False, 'from packnet_sfm.utils.types import is_tensor, is_list, is_numpy\n')] |
dgerod/more-dmps | utils/stg/min_jerk_traj.py | 4dc886a138f289532b2672537f91ff857448ad27 | '''
Created on 25.07.2012
@author: karl
'''
def trajectory(start, goal, duration, delta_t):
traj = []
# inital values
t, td, tdd = start, 0, 0
for i in range(int(2 * duration / delta_t)):
try:
t, td, tdd = _min_jerk_step(t, td, tdd, goal, duration - i * delta_t, delta_t)
except:
break
traj.append([t, td, tdd])
return traj
def _min_jerk_step(x, xd, xdd, goal, tau, dt):
#function [x,xd,xdd] = min_jerk_step(x,xd,xdd,goal,tau, dt) computes
# the update of x,xd,xdd for the next time step dt given that we are
# currently at x,xd,xdd, and that we have tau until we want to reach
# the goal
# ported from matlab dmp toolbox
if tau < dt:
raise Exception, "time left (tau) is smaller than current time (dt) - end of traj reached!"
dist = goal - x
a1 = 0
a0 = xdd * tau ** 2
v1 = 0
v0 = xd * tau
t1 = dt
t2 = dt ** 2
t3 = dt ** 3
t4 = dt ** 4
t5 = dt ** 5
c1 = (6.*dist + (a1 - a0) / 2. - 3.*(v0 + v1)) / tau ** 5
c2 = (-15.*dist + (3.*a0 - 2.*a1) / 2. + 8.*v0 + 7.*v1) / tau ** 4
c3 = (10.*dist + (a1 - 3.*a0) / 2. - 6.*v0 - 4.*v1) / tau ** 3
c4 = xdd / 2.
c5 = xd
c6 = x
x = c1 * t5 + c2 * t4 + c3 * t3 + c4 * t2 + c5 * t1 + c6
xd = 5.*c1 * t4 + 4 * c2 * t3 + 3 * c3 * t2 + 2 * c4 * t1 + c5
xdd = 20.*c1 * t3 + 12.*c2 * t2 + 6.*c3 * t1 + 2.*c4
return (x, xd, xdd)
| [] |
karanrampal/triplet-loss | tests/__init__.py | b62008dedbf8640ccf0dc359b5aadd5e8b0ab134 | # To make directory as a python package
| [] |
jessicalettes/orthoexon | orthoexon/tests/test_util.py | 463ad1908364c602cf75dbddb0b16a42f4100a36 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_orthoexon
----------------------------------
Tests for `orthoexon` module.
"""
import os
import pytest
@pytest.fixture
def exon_id_with_quotes():
return "'ENSE00001229068.1'"
@pytest.fixture
def exon_id():
return "ENSE00001229068.1"
def test_separate_with_quotes(exon_id_with_quotes):
from orthoexon.util import separate
test = separate(exon_id_with_quotes)
true = "ENSE00001229068"
assert test == true
def test_separate(exon_id):
from orthoexon.util import separate
test = separate(exon_id)
true = "ENSE00001229068"
assert test == true
@pytest.fixture
def location():
return "chr20:10256140-10256211:+:0"
def test_splitstart(location):
from orthoexon.util import splitstart
test = splitstart(location)
true = '10256140'
assert test == true
def test_splitend(location):
from orthoexon.util import splitend
test = splitend(location)
true = '10256211'
assert test == true
@pytest.fixture
def human_gtf_filename(table_folder):
return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf')
@pytest.fixture
def human_gtf_database(table_folder):
return os.path.join(table_folder, 'humanrbfox2andfmr1andsnap25.gtf.db')
@pytest.fixture
def human_fasta(table_folder):
return os.path.join(table_folder, 'GRCm38.p3.genome.fa')
def test_translate(exon_id, human_fasta, human_gtf_database):
from orthoexon.util import translate
from orthoexon.util import separate
for index, species1gene in enumerate(human_gtf_database.features_of_type('gene')):
species1gffutilsgeneid = str(species1gene['gene_id'])
species1geneid = separate(species1gffutilsgeneid)
for exon in human_gtf_database.children(species1geneid,
featuretype='CDS',
order_by='start'):
if exon_id == exon:
test = translate(exon, human_fasta)
break
break
true = 'MAEDADMRNELEEMQRRADQLADE'
assert test == true
# def test_getsequence(exon, human_gtf_database):
# from orthoexon.util import getsequence
#
# test = getsequence(exon, human_gtf_database)
# true = 'ATGGCCGAAGACGCAGACATGCGCAATGAGCTGGAGGAGATGCAGCGAAGGGCTGACCAGTT' \
# 'GGCTGATGAG'
#
# assert test == true
# def test_make_sequence_array(finalsequencedf):
# from orthoexon.util import make_sequence_array
#
# test = make_sequence_array(finalsequencedf)
# true = ......
#
# assert test == true | [((406, 435), 'orthoexon.util.separate', 'separate', (['exon_id_with_quotes'], {}), '(exon_id_with_quotes)\n', (414, 435), False, 'from orthoexon.util import separate\n'), ((573, 590), 'orthoexon.util.separate', 'separate', (['exon_id'], {}), '(exon_id)\n', (581, 590), False, 'from orthoexon.util import separate\n'), ((806, 826), 'orthoexon.util.splitstart', 'splitstart', (['location'], {}), '(location)\n', (816, 826), False, 'from orthoexon.util import splitstart\n'), ((957, 975), 'orthoexon.util.splitend', 'splitend', (['location'], {}), '(location)\n', (965, 975), False, 'from orthoexon.util import splitend\n'), ((1090, 1151), 'os.path.join', 'os.path.join', (['table_folder', '"""humanrbfox2andfmr1andsnap25.gtf"""'], {}), "(table_folder, 'humanrbfox2andfmr1andsnap25.gtf')\n", (1102, 1151), False, 'import os\n'), ((1218, 1282), 'os.path.join', 'os.path.join', (['table_folder', '"""humanrbfox2andfmr1andsnap25.gtf.db"""'], {}), "(table_folder, 'humanrbfox2andfmr1andsnap25.gtf.db')\n", (1230, 1282), False, 'import os\n'), ((1342, 1391), 'os.path.join', 'os.path.join', (['table_folder', '"""GRCm38.p3.genome.fa"""'], {}), "(table_folder, 'GRCm38.p3.genome.fa')\n", (1354, 1391), False, 'import os\n'), ((1710, 1742), 'orthoexon.util.separate', 'separate', (['species1gffutilsgeneid'], {}), '(species1gffutilsgeneid)\n', (1718, 1742), False, 'from orthoexon.util import separate\n'), ((1997, 2025), 'orthoexon.util.translate', 'translate', (['exon', 'human_fasta'], {}), '(exon, human_fasta)\n', (2006, 2025), False, 'from orthoexon.util import translate\n')] |
yeyupiaoling/Kersa-Speaker-Recognition | predict_recognition.py | 7ccf42c006f42ff6074ad3937e44a0dfa68c6d33 | import argparse
import os
import shutil
import time
import numpy as np
from utils import model, utils
from utils.record import RecordAudio
parser = argparse.ArgumentParser()
parser.add_argument('--audio_db', default='audio_db/', type=str, help='音频库的路径')
parser.add_argument('--threshold', default=0.7, type=float, help='判断是否为同一个人的阈值')
parser.add_argument('--model_path', default=r'models/resnet34-56.h5', type=str, help='模型的路径')
args = parser.parse_args()
person_feature = []
person_name = []
# 获取模型
network_eval = model.vggvox_resnet2d_icassp(input_dim=(257, None, 1), mode='eval')
# 加载预训练模型
network_eval.load_weights(os.path.join(args.model_path), by_name=True)
print('==> successfully loading model {}.'.format(args.model_path))
# 预测获取声纹特征
def predict(path):
specs = utils.load_data(path, mode='eval')
specs = np.expand_dims(np.expand_dims(specs, 0), -1)
feature = network_eval.predict(specs)[0]
return feature
# 加载要识别的音频库
def load_audio_db(audio_db_path):
start = time.time()
audios = os.listdir(audio_db_path)
for audio in audios:
path = os.path.join(audio_db_path, audio)
name = audio[:-4]
feature = predict(path)
person_name.append(name)
person_feature.append(feature)
print("Loaded %s audio." % name)
end = time.time()
print('加载音频库完成,消耗时间:%fms' % (round((end - start) * 1000)))
# 识别声纹
def recognition(path):
name = ''
pro = 0
feature = predict(path)
for i, person_f in enumerate(person_feature):
# 计算相识度
dist = np.dot(feature, person_f.T)
if dist > pro:
pro = dist
name = person_name[i]
return name, pro
# 声纹注册
def register(path, user_name):
save_path = os.path.join(args.audio_db, user_name + os.path.basename(path)[-4:])
shutil.move(path, save_path)
feature = predict(save_path)
person_name.append(user_name)
person_feature.append(feature)
if __name__ == '__main__':
load_audio_db(args.audio_db)
record_audio = RecordAudio()
while True:
select_fun = int(input("请选择功能,0为注册音频到声纹库,1为执行声纹识别:"))
if select_fun == 0:
audio_path = record_audio.record()
name = input("请输入该音频用户的名称:")
if name == '': continue
register(audio_path, name)
elif select_fun == 1:
audio_path = record_audio.record()
name, p = recognition(audio_path)
if p > args.threshold:
print("识别说话的为:%s,相似度为:%f" % (name, p))
else:
print("音频库没有该用户的语音")
else:
print('请正确选择功能')
| [((151, 176), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (174, 176), False, 'import argparse\n'), ((564, 631), 'utils.model.vggvox_resnet2d_icassp', 'model.vggvox_resnet2d_icassp', ([], {'input_dim': '(257, None, 1)', 'mode': '"""eval"""'}), "(input_dim=(257, None, 1), mode='eval')\n", (592, 631), False, 'from utils import model, utils\n'), ((669, 698), 'os.path.join', 'os.path.join', (['args.model_path'], {}), '(args.model_path)\n', (681, 698), False, 'import os\n'), ((826, 860), 'utils.utils.load_data', 'utils.load_data', (['path'], {'mode': '"""eval"""'}), "(path, mode='eval')\n", (841, 860), False, 'from utils import model, utils\n'), ((1042, 1053), 'time.time', 'time.time', ([], {}), '()\n', (1051, 1053), False, 'import time\n'), ((1067, 1092), 'os.listdir', 'os.listdir', (['audio_db_path'], {}), '(audio_db_path)\n', (1077, 1092), False, 'import os\n'), ((1349, 1360), 'time.time', 'time.time', ([], {}), '()\n', (1358, 1360), False, 'import time\n'), ((1849, 1877), 'shutil.move', 'shutil.move', (['path', 'save_path'], {}), '(path, save_path)\n', (1860, 1877), False, 'import shutil\n'), ((2061, 2074), 'utils.record.RecordAudio', 'RecordAudio', ([], {}), '()\n', (2072, 2074), False, 'from utils.record import RecordAudio\n'), ((888, 912), 'numpy.expand_dims', 'np.expand_dims', (['specs', '(0)'], {}), '(specs, 0)\n', (902, 912), True, 'import numpy as np\n'), ((1133, 1167), 'os.path.join', 'os.path.join', (['audio_db_path', 'audio'], {}), '(audio_db_path, audio)\n', (1145, 1167), False, 'import os\n'), ((1591, 1618), 'numpy.dot', 'np.dot', (['feature', 'person_f.T'], {}), '(feature, person_f.T)\n', (1597, 1618), True, 'import numpy as np\n'), ((1816, 1838), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (1832, 1838), False, 'import os\n')] |
vconrado/datacube-explorer | cubedash/_product.py | ccb9a9a42e5dd16e2b0325a1f881b080bb2806e6 | import logging
from datetime import timedelta
from flask import Blueprint, Response, abort, redirect, url_for
from cubedash import _model, _utils, _utils as utils
_LOG = logging.getLogger(__name__)
bp = Blueprint("product", __name__)
@bp.route("/about.csv")
def legacy_about_csv():
return redirect(".storage_csv")
@bp.route("/audit/storage.csv")
def storage_csv():
"""Get the product storage table as a CSV"""
product_locations = _model.STORE.products_location_samples_all()
return utils.as_csv(
filename_prefix="product-information",
headers=(
"name",
"count",
"locations",
"license",
"definition",
"summary_time",
"metadata_type",
),
rows=(
(
product.name,
summary.dataset_count,
[
location.common_prefix
for location in (product_locations.get(product.name) or [])
],
_utils.product_license(product),
url_for("product.raw_product_doc", name=product.name, _external=True),
summary.last_refresh_time,
product.metadata_type.name,
)
for product, summary in _model.get_products_with_summaries()
),
)
@bp.route("/products.txt")
def product_list_text():
# This is useful for bash scripts when we want to loop products :)
return Response(
"\n".join(t.name for t in _model.STORE.all_dataset_types()),
content_type="text/plain",
)
@bp.route("/metadata-types.txt")
def metadata_type_list_text():
# This is useful for bash scripts when we want to loop them :)
return Response(
"\n".join(t.name for t in _model.STORE.all_metadata_types()),
content_type="text/plain",
)
@bp.route("/audit/storage")
def storage_page():
product_locations = _model.STORE.products_location_samples_all()
return utils.render(
"storage.html",
product_summary_and_location=[
(product, summary, (product_locations.get(product.name) or []))
for product, summary in _model.get_products_with_summaries()
],
)
@bp.route("/product")
def product_redirect():
"""
If people remove the name from a "/product/<name>" url, take them somewhere useful
"""
return redirect(url_for(".products_page"))
@bp.route("/products")
def products_page():
return utils.render(
"products.html",
)
@bp.route("/metadata-types")
def metadata_types_page():
return utils.render(
"metadata-types.html",
)
@bp.route("/product/<name>.odc-product.yaml")
def legacy_raw_product_doc(name):
return redirect(url_for(".raw_product_doc", name=name))
@bp.route("/products/<name>.odc-product.yaml")
def raw_product_doc(name):
product = _model.STORE.index.products.get_by_name(name)
if not product:
abort(404, f"Unknown product {name!r}")
ordered_metadata = utils.prepare_document_formatting(
product.definition, "Product", include_source_url=True
)
return utils.as_yaml(ordered_metadata)
@bp.route("/metadata-type/<name>")
def legacy_metadata_type_page(name):
return redirect(url_for(".metadata_type_page", name=name))
@bp.route("/metadata-types/<name>")
def metadata_type_page(name):
metadata_type = _model.STORE.index.metadata_types.get_by_name(name)
if not metadata_type:
abort(404, f"Unknown metadata type {name!r}")
ordered_metadata = utils.prepare_document_formatting(metadata_type.definition)
products_using_it = sorted(
(
p
for p in _model.STORE.index.products.get_all()
if p.metadata_type.name == name
),
key=lambda p: p.name,
)
return utils.render(
"metadata-type.html",
metadata_type=metadata_type,
metadata_doc=ordered_metadata,
products_using_it=products_using_it,
)
@bp.route("/metadata-type/<name>.odc-type.yaml")
def legacy_metadata_type_doc(name):
return redirect(url_for(".raw_metadata_type_doc", name=name))
@bp.route("/metadata-types/<name>.odc-type.yaml")
def raw_metadata_type_doc(name):
metadata_type = _model.STORE.index.metadata_types.get_by_name(name)
if not metadata_type:
abort(404, f"Unknown metadata type {name!r}")
ordered_metadata = utils.prepare_document_formatting(
metadata_type.definition, "Metadata Type", include_source_url=True
)
return utils.as_yaml(ordered_metadata)
@bp.route("/products.odc-product.yaml")
def raw_all_products_doc():
resp = utils.as_yaml(
*(
utils.prepare_document_formatting(
product.definition,
f"Product {product.name}",
include_source_url=url_for(
".raw_product_doc", name=product.name, _external=True
),
)
for product in _model.STORE.all_dataset_types()
)
)
# Add Explorer ID to the download filename if they have one.
utils.suggest_download_filename(
resp,
prefix="products",
suffix=".odc-product.yaml",
)
return resp
@bp.route("/metadata-types.odc-type.yaml")
def raw_all_metadata_types_doc():
resp = utils.as_yaml(
*(
utils.prepare_document_formatting(
type_.definition,
f"Metadata Type {type_.name}",
include_source_url=url_for(
".raw_metadata_type_doc", name=type_.name, _external=True
),
)
for type_ in _model.STORE.all_metadata_types()
),
)
# Add Explorer ID to the download filename if they have one.
utils.suggest_download_filename(
resp,
prefix="metadata-types",
suffix=".odc-type.yaml",
)
return resp
def _iso8601_duration(tdelta: timedelta):
"""
Format a timedelta as an iso8601 duration
>>> _iso8601_duration(timedelta(seconds=0))
'PT0S'
>>> _iso8601_duration(timedelta(seconds=1))
'PT1S'
>>> _iso8601_duration(timedelta(seconds=23423))
'PT6H30M23S'
>>> _iso8601_duration(timedelta(seconds=4564564556))
'P52830DT14H35M56S'
"""
all_secs = tdelta.total_seconds()
secs = int(all_secs % 60)
h_m_s = (
int(all_secs // 3600 % 24),
int(all_secs // 60 % 60),
secs if secs % 1 != 0 else int(secs),
)
parts = ["P"]
days = int(all_secs // 86400)
if days:
parts.append(f"{days}D")
if any(h_m_s):
parts.append("T")
if all_secs:
for val, name in zip(h_m_s, ["H", "M", "S"]):
if val:
parts.append(f"{val}{name}")
else:
parts.append("T0S")
return "".join(parts)
| [((173, 200), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (190, 200), False, 'import logging\n'), ((206, 236), 'flask.Blueprint', 'Blueprint', (['"""product"""', '__name__'], {}), "('product', __name__)\n", (215, 236), False, 'from flask import Blueprint, Response, abort, redirect, url_for\n'), ((298, 322), 'flask.redirect', 'redirect', (['""".storage_csv"""'], {}), "('.storage_csv')\n", (306, 322), False, 'from flask import Blueprint, Response, abort, redirect, url_for\n'), ((450, 494), 'cubedash._model.STORE.products_location_samples_all', '_model.STORE.products_location_samples_all', ([], {}), '()\n', (492, 494), False, 'from cubedash import _model, _utils, _utils as utils\n'), ((1948, 1992), 'cubedash._model.STORE.products_location_samples_all', '_model.STORE.products_location_samples_all', ([], {}), '()\n', (1990, 1992), False, 'from cubedash import _model, _utils, _utils as utils\n'), ((2503, 2532), 'cubedash._utils.render', 'utils.render', (['"""products.html"""'], {}), "('products.html')\n", (2515, 2532), True, 'from cubedash import _model, _utils, _utils as utils\n'), ((2617, 2652), 'cubedash._utils.render', 'utils.render', (['"""metadata-types.html"""'], {}), "('metadata-types.html')\n", (2629, 2652), True, 'from cubedash import _model, _utils, _utils as utils\n'), ((2900, 2945), 'cubedash._model.STORE.index.products.get_by_name', '_model.STORE.index.products.get_by_name', (['name'], {}), '(name)\n', (2939, 2945), False, 'from cubedash import _model, _utils, _utils as utils\n'), ((3038, 3131), 'cubedash._utils.prepare_document_formatting', 'utils.prepare_document_formatting', (['product.definition', '"""Product"""'], {'include_source_url': '(True)'}), "(product.definition, 'Product',\n include_source_url=True)\n", (3071, 3131), True, 'from cubedash import _model, _utils, _utils as utils\n'), ((3153, 3184), 'cubedash._utils.as_yaml', 'utils.as_yaml', (['ordered_metadata'], {}), '(ordered_metadata)\n', (3166, 3184), True, 'from cubedash import _model, _utils, _utils as utils\n'), ((3410, 3461), 'cubedash._model.STORE.index.metadata_types.get_by_name', '_model.STORE.index.metadata_types.get_by_name', (['name'], {}), '(name)\n', (3455, 3461), False, 'from cubedash import _model, _utils, _utils as utils\n'), ((3565, 3624), 'cubedash._utils.prepare_document_formatting', 'utils.prepare_document_formatting', (['metadata_type.definition'], {}), '(metadata_type.definition)\n', (3598, 3624), True, 'from cubedash import _model, _utils, _utils as utils\n'), ((3843, 3978), 'cubedash._utils.render', 'utils.render', (['"""metadata-type.html"""'], {'metadata_type': 'metadata_type', 'metadata_doc': 'ordered_metadata', 'products_using_it': 'products_using_it'}), "('metadata-type.html', metadata_type=metadata_type,\n metadata_doc=ordered_metadata, products_using_it=products_using_it)\n", (3855, 3978), True, 'from cubedash import _model, _utils, _utils as utils\n'), ((4272, 4323), 'cubedash._model.STORE.index.metadata_types.get_by_name', '_model.STORE.index.metadata_types.get_by_name', (['name'], {}), '(name)\n', (4317, 4323), False, 'from cubedash import _model, _utils, _utils as utils\n'), ((4427, 4532), 'cubedash._utils.prepare_document_formatting', 'utils.prepare_document_formatting', (['metadata_type.definition', '"""Metadata Type"""'], {'include_source_url': '(True)'}), "(metadata_type.definition, 'Metadata Type',\n include_source_url=True)\n", (4460, 4532), True, 'from cubedash import _model, _utils, _utils as utils\n'), ((4554, 4585), 'cubedash._utils.as_yaml', 'utils.as_yaml', (['ordered_metadata'], {}), '(ordered_metadata)\n', (4567, 4585), True, 'from cubedash import _model, _utils, _utils as utils\n'), ((5115, 5204), 'cubedash._utils.suggest_download_filename', 'utils.suggest_download_filename', (['resp'], {'prefix': '"""products"""', 'suffix': '""".odc-product.yaml"""'}), "(resp, prefix='products', suffix=\n '.odc-product.yaml')\n", (5146, 5204), True, 'from cubedash import _model, _utils, _utils as utils\n'), ((5792, 5884), 'cubedash._utils.suggest_download_filename', 'utils.suggest_download_filename', (['resp'], {'prefix': '"""metadata-types"""', 'suffix': '""".odc-type.yaml"""'}), "(resp, prefix='metadata-types', suffix=\n '.odc-type.yaml')\n", (5823, 5884), True, 'from cubedash import _model, _utils, _utils as utils\n'), ((2419, 2444), 'flask.url_for', 'url_for', (['""".products_page"""'], {}), "('.products_page')\n", (2426, 2444), False, 'from flask import Blueprint, Response, abort, redirect, url_for\n'), ((2770, 2808), 'flask.url_for', 'url_for', (['""".raw_product_doc"""'], {'name': 'name'}), "('.raw_product_doc', name=name)\n", (2777, 2808), False, 'from flask import Blueprint, Response, abort, redirect, url_for\n'), ((2974, 3013), 'flask.abort', 'abort', (['(404)', 'f"""Unknown product {name!r}"""'], {}), "(404, f'Unknown product {name!r}')\n", (2979, 3013), False, 'from flask import Blueprint, Response, abort, redirect, url_for\n'), ((3279, 3320), 'flask.url_for', 'url_for', (['""".metadata_type_page"""'], {'name': 'name'}), "('.metadata_type_page', name=name)\n", (3286, 3320), False, 'from flask import Blueprint, Response, abort, redirect, url_for\n'), ((3496, 3541), 'flask.abort', 'abort', (['(404)', 'f"""Unknown metadata type {name!r}"""'], {}), "(404, f'Unknown metadata type {name!r}')\n", (3501, 3541), False, 'from flask import Blueprint, Response, abort, redirect, url_for\n'), ((4121, 4165), 'flask.url_for', 'url_for', (['""".raw_metadata_type_doc"""'], {'name': 'name'}), "('.raw_metadata_type_doc', name=name)\n", (4128, 4165), False, 'from flask import Blueprint, Response, abort, redirect, url_for\n'), ((4358, 4403), 'flask.abort', 'abort', (['(404)', 'f"""Unknown metadata type {name!r}"""'], {}), "(404, f'Unknown metadata type {name!r}')\n", (4363, 4403), False, 'from flask import Blueprint, Response, abort, redirect, url_for\n'), ((3703, 3740), 'cubedash._model.STORE.index.products.get_all', '_model.STORE.index.products.get_all', ([], {}), '()\n', (3738, 3740), False, 'from cubedash import _model, _utils, _utils as utils\n'), ((1042, 1073), 'cubedash._utils.product_license', '_utils.product_license', (['product'], {}), '(product)\n', (1064, 1073), False, 'from cubedash import _model, _utils, _utils as utils\n'), ((1091, 1160), 'flask.url_for', 'url_for', (['"""product.raw_product_doc"""'], {'name': 'product.name', '_external': '(True)'}), "('product.raw_product_doc', name=product.name, _external=True)\n", (1098, 1160), False, 'from flask import Blueprint, Response, abort, redirect, url_for\n'), ((1299, 1335), 'cubedash._model.get_products_with_summaries', '_model.get_products_with_summaries', ([], {}), '()\n', (1333, 1335), False, 'from cubedash import _model, _utils, _utils as utils\n'), ((1533, 1565), 'cubedash._model.STORE.all_dataset_types', '_model.STORE.all_dataset_types', ([], {}), '()\n', (1563, 1565), False, 'from cubedash import _model, _utils, _utils as utils\n'), ((1797, 1830), 'cubedash._model.STORE.all_metadata_types', '_model.STORE.all_metadata_types', ([], {}), '()\n', (1828, 1830), False, 'from cubedash import _model, _utils, _utils as utils\n'), ((2194, 2230), 'cubedash._model.get_products_with_summaries', '_model.get_products_with_summaries', ([], {}), '()\n', (2228, 2230), False, 'from cubedash import _model, _utils, _utils as utils\n'), ((4997, 5029), 'cubedash._model.STORE.all_dataset_types', '_model.STORE.all_dataset_types', ([], {}), '()\n', (5027, 5029), False, 'from cubedash import _model, _utils, _utils as utils\n'), ((5672, 5705), 'cubedash._model.STORE.all_metadata_types', '_model.STORE.all_metadata_types', ([], {}), '()\n', (5703, 5705), False, 'from cubedash import _model, _utils, _utils as utils\n'), ((4854, 4916), 'flask.url_for', 'url_for', (['""".raw_product_doc"""'], {'name': 'product.name', '_external': '(True)'}), "('.raw_product_doc', name=product.name, _external=True)\n", (4861, 4916), False, 'from flask import Blueprint, Response, abort, redirect, url_for\n'), ((5527, 5593), 'flask.url_for', 'url_for', (['""".raw_metadata_type_doc"""'], {'name': 'type_.name', '_external': '(True)'}), "('.raw_metadata_type_doc', name=type_.name, _external=True)\n", (5534, 5593), False, 'from flask import Blueprint, Response, abort, redirect, url_for\n')] |
ozbenh/litex-boards | litex_boards/platforms/sipeed_tang_nano.py | f18b10d1edb4e162a77972e2e9c5bad54ca00788 | #
# This file is part of LiteX-Boards.
#
# Copyright (c) 2021 Florent Kermarrec <[email protected]>
# SPDX-License-Identifier: BSD-2-Clause
# Board diagram/pinout:
# https://user-images.githubusercontent.com/1450143/133655492-532d5e9a-0635-4889-85c9-68683d06cae0.png
# http://dl.sipeed.com/TANG/Nano/HDK/Tang-NANO-2704(Schematic).pdf
from migen import *
from litex.build.generic_platform import *
from litex.build.gowin.platform import GowinPlatform
from litex.build.openfpgaloader import OpenFPGALoader
# IOs ----------------------------------------------------------------------------------------------
_io = [
# Clk / Rst
("clk24", 0, Pins("35"), IOStandard("LVCMOS33")),
# Leds
("user_led", 0, Pins("16"), IOStandard("LVCMOS33")),
("user_led", 1, Pins("17"), IOStandard("LVCMOS33")),
("user_led", 2, Pins("18"), IOStandard("LVCMOS33")),
# Buttons.
("user_btn", 0, Pins("15"), IOStandard("LVCMOS33")),
("user_btn", 0, Pins("14"), IOStandard("LVCMOS33")),
# Serial
("serial", 0,
Subsignal("tx", Pins("8")),
Subsignal("rx", Pins("9")),
IOStandard("LVCMOS33")
),
]
# Connectors ---------------------------------------------------------------------------------------
_connectors = []
# Platform -----------------------------------------------------------------------------------------
class Platform(GowinPlatform):
default_clk_name = "clk24"
default_clk_period = 1e9/24e6
def __init__(self):
GowinPlatform.__init__(self, "GW1N-LV1QN48C6/I5", _io, _connectors, toolchain="gowin", devicename="GW1N-1")
self.toolchain.options["use_done_as_gpio"] = 1
def create_programmer(self):
return OpenFPGALoader("tangnano")
def do_finalize(self, fragment):
GowinPlatform.do_finalize(self, fragment)
self.add_period_constraint(self.lookup_request("clk24", loose=True), 1e9/24e6)
| [((1509, 1620), 'litex.build.gowin.platform.GowinPlatform.__init__', 'GowinPlatform.__init__', (['self', '"""GW1N-LV1QN48C6/I5"""', '_io', '_connectors'], {'toolchain': '"""gowin"""', 'devicename': '"""GW1N-1"""'}), "(self, 'GW1N-LV1QN48C6/I5', _io, _connectors,\n toolchain='gowin', devicename='GW1N-1')\n", (1531, 1620), False, 'from litex.build.gowin.platform import GowinPlatform\n'), ((1721, 1747), 'litex.build.openfpgaloader.OpenFPGALoader', 'OpenFPGALoader', (['"""tangnano"""'], {}), "('tangnano')\n", (1735, 1747), False, 'from litex.build.openfpgaloader import OpenFPGALoader\n'), ((1794, 1835), 'litex.build.gowin.platform.GowinPlatform.do_finalize', 'GowinPlatform.do_finalize', (['self', 'fragment'], {}), '(self, fragment)\n', (1819, 1835), False, 'from litex.build.gowin.platform import GowinPlatform\n')] |
anon-6994/nm-metarl | nm_cavia/rl/metalearner.py | 45c8798c2139d8c200cc7a398331c1b98a0dccec | import torch
from torch.distributions.kl import kl_divergence
from torch.nn.utils.convert_parameters import (vector_to_parameters,
parameters_to_vector)
from rl_utils.optimization import conjugate_gradient
from rl_utils.torch_utils import (weighted_mean, detach_distribution, weighted_normalize)
class MetaLearner(object):
"""Meta-learner
The meta-learner is responsible for sampling the trajectories/episodes
(before and after the one-step adaptation), compute the inner loss, compute
the updated parameters based on the inner-loss, and perform the meta-update.
[1] Chelsea Finn, Pieter Abbeel, Sergey Levine, "Model-Agnostic
Meta-Learning for Fast Adaptation of Deep Networks", 2017
(https://arxiv.org/abs/1703.03400)
[2] Richard Sutton, Andrew Barto, "Reinforcement learning: An introduction",
2018 (http://incompleteideas.net/book/the-book-2nd.html)
[3] John Schulman, Philipp Moritz, Sergey Levine, Michael Jordan,
Pieter Abbeel, "High-Dimensional Continuous Control Using Generalized
Advantage Estimation", 2016 (https://arxiv.org/abs/1506.02438)
[4] John Schulman, Sergey Levine, Philipp Moritz, Michael I. Jordan,
Pieter Abbeel, "Trust Region Policy Optimization", 2015
(https://arxiv.org/abs/1502.05477)
"""
def __init__(self, sampler, policy, baseline, gamma=0.95,
fast_lr=0.5, tau=1.0, device='cpu'):
self.sampler = sampler
self.policy = policy
self.baseline = baseline
self.gamma = gamma
self.fast_lr = fast_lr
self.tau = tau
self.to(device)
def inner_loss(self, episodes, params=None):
"""Compute the inner loss for the one-step gradient update. The inner
loss is REINFORCE with baseline [2], computed on advantages estimated
with Generalized Advantage Estimation (GAE, [3]).
"""
values = self.baseline(episodes)
advantages = episodes.gae(values, tau=self.tau)
advantages = weighted_normalize(advantages, weights=episodes.mask)
pi = self.policy(episodes.observations, params=params)
log_probs = pi.log_prob(episodes.actions)
if log_probs.dim() > 2:
log_probs = torch.sum(log_probs, dim=2)
loss = -weighted_mean(log_probs * advantages, dim=0, weights=episodes.mask)
return loss
def adapt(self, episodes, first_order=False, params=None, lr=None):
"""Adapt the parameters of the policy network to a new task, from
sampled trajectories `episodes`, with a one-step gradient update [1].
"""
if lr is None:
lr = self.fast_lr
# Fit the baseline to the training episodes
self.baseline.fit(episodes)
# Get the loss on the training episodes
loss = self.inner_loss(episodes, params=params)
# Get the new parameters after a one-step gradient update
params = self.policy.update_params(loss, step_size=lr, first_order=first_order, params=params)
return params, loss
def sample(self, tasks, first_order=False):
"""Sample trajectories (before and after the update of the parameters)
for all the tasks `tasks`.
"""
episodes = []
losses = []
for task in tasks:
self.sampler.reset_task(task)
self.policy.reset_context()
train_episodes = self.sampler.sample(self.policy, gamma=self.gamma)
# inner loop (for CAVIA, this only updates the context parameters)
params, loss = self.adapt(train_episodes, first_order=first_order)
# rollouts after inner loop update
valid_episodes = self.sampler.sample(self.policy, params=params, gamma=self.gamma)
episodes.append((train_episodes, valid_episodes))
losses.append(loss.item())
return episodes, losses
def test(self, tasks, num_steps, batch_size, halve_lr):
"""Sample trajectories (before and after the update of the parameters)
for all the tasks `tasks`.batchsize
"""
episodes_per_task = []
for task in tasks:
# reset context params (for cavia) and task
self.policy.reset_context()
self.sampler.reset_task(task)
# start with blank params
params = None
# gather some initial experience and log performance
test_episodes = self.sampler.sample(self.policy, gamma=self.gamma, params=params, batch_size=batch_size)
# initialise list which will log all rollouts for the current task
curr_episodes = [test_episodes]
for i in range(1, num_steps + 1):
# lower learning rate after first update (for MAML, as described in their paper)
if i == 1 and halve_lr:
lr = self.fast_lr / 2
else:
lr = self.fast_lr
# inner-loop update
params, loss = self.adapt(test_episodes, first_order=True, params=params, lr=lr)
# get new rollouts
test_episodes = self.sampler.sample(self.policy, gamma=self.gamma, params=params, batch_size=batch_size)
curr_episodes.append(test_episodes)
episodes_per_task.append(curr_episodes)
self.policy.reset_context()
return episodes_per_task
def kl_divergence(self, episodes, old_pis=None):
kls = []
if old_pis is None:
old_pis = [None] * len(episodes)
for (train_episodes, valid_episodes), old_pi in zip(episodes, old_pis):
# this is the inner-loop update
self.policy.reset_context()
params, _ = self.adapt(train_episodes)
pi = self.policy(valid_episodes.observations, params=params)
if old_pi is None:
old_pi = detach_distribution(pi)
mask = valid_episodes.mask
if valid_episodes.actions.dim() > 2:
mask = mask.unsqueeze(2)
kl = weighted_mean(kl_divergence(pi, old_pi), dim=0, weights=mask)
kls.append(kl)
return torch.mean(torch.stack(kls, dim=0))
def hessian_vector_product(self, episodes, damping=1e-2):
"""Hessian-vector product, based on the Perlmutter method."""
def _product(vector):
kl = self.kl_divergence(episodes)
grads = torch.autograd.grad(kl, self.policy.parameters(), create_graph=True)
flat_grad_kl = parameters_to_vector(grads)
grad_kl_v = torch.dot(flat_grad_kl, vector)
grad2s = torch.autograd.grad(grad_kl_v, self.policy.parameters())
flat_grad2_kl = parameters_to_vector(grad2s)
return flat_grad2_kl + damping * vector
return _product
def surrogate_loss(self, episodes, old_pis=None):
losses, kls, pis = [], [], []
if old_pis is None:
old_pis = [None] * len(episodes)
for (train_episodes, valid_episodes), old_pi in zip(episodes, old_pis):
# do inner-loop update
self.policy.reset_context()
params, _ = self.adapt(train_episodes)
with torch.set_grad_enabled(old_pi is None):
# get action values after inner-loop update
pi = self.policy(valid_episodes.observations, params=params)
pis.append(detach_distribution(pi))
if old_pi is None:
old_pi = detach_distribution(pi)
values = self.baseline(valid_episodes)
advantages = valid_episodes.gae(values, tau=self.tau)
advantages = weighted_normalize(advantages, weights=valid_episodes.mask)
log_ratio = (pi.log_prob(valid_episodes.actions)
- old_pi.log_prob(valid_episodes.actions))
if log_ratio.dim() > 2:
log_ratio = torch.sum(log_ratio, dim=2)
ratio = torch.exp(log_ratio)
loss = -weighted_mean(ratio * advantages, dim=0, weights=valid_episodes.mask)
losses.append(loss)
mask = valid_episodes.mask
if valid_episodes.actions.dim() > 2:
mask = mask.unsqueeze(2)
kl = weighted_mean(kl_divergence(pi, old_pi), dim=0, weights=mask)
kls.append(kl)
return torch.mean(torch.stack(losses, dim=0)), torch.mean(torch.stack(kls, dim=0)), pis
def step(self, episodes, max_kl=1e-3, cg_iters=10, cg_damping=1e-2,
ls_max_steps=10, ls_backtrack_ratio=0.5):
"""Meta-optimization step (ie. update of the initial parameters), based
on Trust Region Policy Optimization (TRPO, [4]).
"""
old_loss, _, old_pis = self.surrogate_loss(episodes)
# this part will take higher order gradients through the inner loop:
grads = torch.autograd.grad(old_loss, self.policy.parameters())
grads = parameters_to_vector(grads)
# Compute the step direction with Conjugate Gradient
hessian_vector_product = self.hessian_vector_product(episodes, damping=cg_damping)
stepdir = conjugate_gradient(hessian_vector_product, grads, cg_iters=cg_iters)
# Compute the Lagrange multiplier
shs = 0.5 * torch.dot(stepdir, hessian_vector_product(stepdir))
lagrange_multiplier = torch.sqrt(shs / max_kl)
step = stepdir / lagrange_multiplier
# Save the old parameters
old_params = parameters_to_vector(self.policy.parameters())
print()
# Line search
step_size = 1.0
for _ in range(ls_max_steps):
vector_to_parameters(old_params - step_size * step, self.policy.parameters())
loss, kl, _ = self.surrogate_loss(episodes, old_pis=old_pis)
improve = loss - old_loss
if (improve.item() < 0.0) and (kl.item() < max_kl):
break
step_size *= ls_backtrack_ratio
else:
print('no update?')
vector_to_parameters(old_params, self.policy.parameters())
print('improve:', improve.item())
print('kl:', kl.item())
print('step_size:', step_size)
return loss
def to(self, device, **kwargs):
self.policy.to(device, **kwargs)
self.baseline.to(device, **kwargs)
self.device = device
| [((2079, 2132), 'rl_utils.torch_utils.weighted_normalize', 'weighted_normalize', (['advantages'], {'weights': 'episodes.mask'}), '(advantages, weights=episodes.mask)\n', (2097, 2132), False, 'from rl_utils.torch_utils import weighted_mean, detach_distribution, weighted_normalize\n'), ((9105, 9132), 'torch.nn.utils.convert_parameters.parameters_to_vector', 'parameters_to_vector', (['grads'], {}), '(grads)\n', (9125, 9132), False, 'from torch.nn.utils.convert_parameters import vector_to_parameters, parameters_to_vector\n'), ((9304, 9372), 'rl_utils.optimization.conjugate_gradient', 'conjugate_gradient', (['hessian_vector_product', 'grads'], {'cg_iters': 'cg_iters'}), '(hessian_vector_product, grads, cg_iters=cg_iters)\n', (9322, 9372), False, 'from rl_utils.optimization import conjugate_gradient\n'), ((9518, 9542), 'torch.sqrt', 'torch.sqrt', (['(shs / max_kl)'], {}), '(shs / max_kl)\n', (9528, 9542), False, 'import torch\n'), ((2303, 2330), 'torch.sum', 'torch.sum', (['log_probs'], {'dim': '(2)'}), '(log_probs, dim=2)\n', (2312, 2330), False, 'import torch\n'), ((2348, 2415), 'rl_utils.torch_utils.weighted_mean', 'weighted_mean', (['(log_probs * advantages)'], {'dim': '(0)', 'weights': 'episodes.mask'}), '(log_probs * advantages, dim=0, weights=episodes.mask)\n', (2361, 2415), False, 'from rl_utils.torch_utils import weighted_mean, detach_distribution, weighted_normalize\n'), ((6259, 6282), 'torch.stack', 'torch.stack', (['kls'], {'dim': '(0)'}), '(kls, dim=0)\n', (6270, 6282), False, 'import torch\n'), ((6610, 6637), 'torch.nn.utils.convert_parameters.parameters_to_vector', 'parameters_to_vector', (['grads'], {}), '(grads)\n', (6630, 6637), False, 'from torch.nn.utils.convert_parameters import vector_to_parameters, parameters_to_vector\n'), ((6663, 6694), 'torch.dot', 'torch.dot', (['flat_grad_kl', 'vector'], {}), '(flat_grad_kl, vector)\n', (6672, 6694), False, 'import torch\n'), ((6801, 6829), 'torch.nn.utils.convert_parameters.parameters_to_vector', 'parameters_to_vector', (['grad2s'], {}), '(grad2s)\n', (6821, 6829), False, 'from torch.nn.utils.convert_parameters import vector_to_parameters, parameters_to_vector\n'), ((5972, 5995), 'rl_utils.torch_utils.detach_distribution', 'detach_distribution', (['pi'], {}), '(pi)\n', (5991, 5995), False, 'from rl_utils.torch_utils import weighted_mean, detach_distribution, weighted_normalize\n'), ((6157, 6182), 'torch.distributions.kl.kl_divergence', 'kl_divergence', (['pi', 'old_pi'], {}), '(pi, old_pi)\n', (6170, 6182), False, 'from torch.distributions.kl import kl_divergence\n'), ((7300, 7338), 'torch.set_grad_enabled', 'torch.set_grad_enabled', (['(old_pi is None)'], {}), '(old_pi is None)\n', (7322, 7338), False, 'import torch\n'), ((7774, 7833), 'rl_utils.torch_utils.weighted_normalize', 'weighted_normalize', (['advantages'], {'weights': 'valid_episodes.mask'}), '(advantages, weights=valid_episodes.mask)\n', (7792, 7833), False, 'from rl_utils.torch_utils import weighted_mean, detach_distribution, weighted_normalize\n'), ((8096, 8116), 'torch.exp', 'torch.exp', (['log_ratio'], {}), '(log_ratio)\n', (8105, 8116), False, 'import torch\n'), ((8531, 8557), 'torch.stack', 'torch.stack', (['losses'], {'dim': '(0)'}), '(losses, dim=0)\n', (8542, 8557), False, 'import torch\n'), ((8571, 8594), 'torch.stack', 'torch.stack', (['kls'], {'dim': '(0)'}), '(kls, dim=0)\n', (8582, 8594), False, 'import torch\n'), ((7505, 7528), 'rl_utils.torch_utils.detach_distribution', 'detach_distribution', (['pi'], {}), '(pi)\n', (7524, 7528), False, 'from rl_utils.torch_utils import weighted_mean, detach_distribution, weighted_normalize\n'), ((7595, 7618), 'rl_utils.torch_utils.detach_distribution', 'detach_distribution', (['pi'], {}), '(pi)\n', (7614, 7618), False, 'from rl_utils.torch_utils import weighted_mean, detach_distribution, weighted_normalize\n'), ((8044, 8071), 'torch.sum', 'torch.sum', (['log_ratio'], {'dim': '(2)'}), '(log_ratio, dim=2)\n', (8053, 8071), False, 'import torch\n'), ((8142, 8211), 'rl_utils.torch_utils.weighted_mean', 'weighted_mean', (['(ratio * advantages)'], {'dim': '(0)', 'weights': 'valid_episodes.mask'}), '(ratio * advantages, dim=0, weights=valid_episodes.mask)\n', (8155, 8211), False, 'from rl_utils.torch_utils import weighted_mean, detach_distribution, weighted_normalize\n'), ((8425, 8450), 'torch.distributions.kl.kl_divergence', 'kl_divergence', (['pi', 'old_pi'], {}), '(pi, old_pi)\n', (8438, 8450), False, 'from torch.distributions.kl import kl_divergence\n')] |
hramezani/django-request | request/management/commands/purgerequests.py | 4b9c7b22f26338d2c93110477aa44041b1c5ddb4 | from datetime import timedelta
from dateutil.relativedelta import relativedelta
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
from ...models import Request
DURATION_OPTIONS = {
'hours': lambda amount: timezone.now() - timedelta(hours=amount),
'days': lambda amount: timezone.now() - timedelta(days=amount),
'weeks': lambda amount: timezone.now() - timedelta(weeks=amount),
'months': lambda amount: timezone.now() + relativedelta(months=-amount),
'years': lambda amount: timezone.now() + relativedelta(years=-amount),
}
try:
# to keep backward Python 2 compatibility
input = raw_input
except NameError:
pass
class Command(BaseCommand):
help = 'Purge old requests.'
def add_arguments(self, parser):
parser.add_argument(
'amount',
type=int,
)
parser.add_argument('duration')
parser.add_argument(
'--noinput',
action='store_false',
dest='interactive',
default=True,
help='Tells Django to NOT prompt the user for input of any kind.'
)
def handle(self, *args, **options):
amount = options['amount']
duration = options['duration']
# Check we have the correct values
if duration[-1] != 's': # If its not plural, make it plural
duration_plural = '{0}s'.format(duration)
else:
duration_plural = duration
if duration_plural not in DURATION_OPTIONS:
raise CommandError('Amount must be {0}'.format(', '.join(DURATION_OPTIONS)))
qs = Request.objects.filter(time__lte=DURATION_OPTIONS[duration_plural](amount))
count = qs.count()
if count == 0:
print('There are no requests to delete.')
return
if options.get('interactive'):
confirm = input('''
You have requested a database reset.
This will IRREVERSIBLY DESTROY any
requests created before {0} {1} ago.
That is a total of {2} requests.
Are you sure you want to do this?
Type 'yes' to continue, or 'no' to cancel:'''.format(amount, duration, count))
else:
confirm = 'yes'
if confirm == 'yes':
qs.delete()
else:
print('Purge cancelled')
| [((262, 276), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (274, 276), False, 'from django.utils import timezone\n'), ((279, 302), 'datetime.timedelta', 'timedelta', ([], {'hours': 'amount'}), '(hours=amount)\n', (288, 302), False, 'from datetime import timedelta\n'), ((331, 345), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (343, 345), False, 'from django.utils import timezone\n'), ((348, 370), 'datetime.timedelta', 'timedelta', ([], {'days': 'amount'}), '(days=amount)\n', (357, 370), False, 'from datetime import timedelta\n'), ((400, 414), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (412, 414), False, 'from django.utils import timezone\n'), ((417, 440), 'datetime.timedelta', 'timedelta', ([], {'weeks': 'amount'}), '(weeks=amount)\n', (426, 440), False, 'from datetime import timedelta\n'), ((471, 485), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (483, 485), False, 'from django.utils import timezone\n'), ((488, 517), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(-amount)'}), '(months=-amount)\n', (501, 517), False, 'from dateutil.relativedelta import relativedelta\n'), ((547, 561), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (559, 561), False, 'from django.utils import timezone\n'), ((564, 592), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'years': '(-amount)'}), '(years=-amount)\n', (577, 592), False, 'from dateutil.relativedelta import relativedelta\n')] |
kimtaehong/PythonForWindows | tests/test_crypto.py | d04eed1754e2e23474213b89580d68e1b73c3fe4 | import pytest
import windows.crypto
import windows.generated_def as gdef
import windows.crypto.generation
from .pfwtest import *
pytestmark = pytest.mark.usefixtures('check_for_gc_garbage')
TEST_CERT = b"""
MIIBwTCCASqgAwIBAgIQG46Uyws+67ZBOfPJCbFrRjANBgkqhkiG9w0BAQsFADAfMR0wGwYDVQQD
ExRQeXRob25Gb3JXaW5kb3dzVGVzdDAeFw0xNzA0MTIxNDM5MjNaFw0xODA0MTIyMDM5MjNaMB8x
HTAbBgNVBAMTFFB5dGhvbkZvcldpbmRvd3NUZXN0MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB
gQCRHwC/sRfXh5pc4poc85aidrudbPdya+0OeonQlf1JQ1ekf7KSfADV5FLkSQu2BzgBK9DIWTGX
XknBJIzZF03UZsVg5D67V2mnSClXucc0cGFcK4pDDt0tHeabA2GPinVe7Z6qDT4ZxPR8lKaXDdV2
Pg2hTdcGSpqaltHxph7G/QIDAQABMA0GCSqGSIb3DQEBCwUAA4GBACcQFdOlVjYICOIyAXowQaEN
qcLpN1iWoL9UijNhTY37+U5+ycFT8QksT3Xmh9lEIqXMh121uViy2P/3p+Ek31AN9bB+BhWIM6PQ
gy+ApYDdSwTtWFARSrMqk7rRHUveYEfMw72yaOWDxCzcopEuADKrrYEute4CzZuXF9PbbgK6"""
## Cert info:
# Name: PythonForWindowsTest
# Serial: '1b 8e 94 cb 0b 3e eb b6 41 39 f3 c9 09 b1 6b 46'
TEST_PFX_PASSWORD = "TestPassword"
TEST_PFX = b"""
MIIGMwIBAzCCBe8GCSqGSIb3DQEHAaCCBeAEggXcMIIF2DCCA7AGCSqGSIb3DQEHAaCCA6EEggOd
MIIDmTCCA5UGCyqGSIb3DQEMCgECoIICtjCCArIwHAYKKoZIhvcNAQwBAzAOBAhoE8r3qUJeTQIC
B9AEggKQT7jm7ppgH64scyJ3cFW50BurqpMPtxgYyYCCtjdmHMlLPbUoujXOZVYi3seAEERE51BS
TXUi5ydHpY8cZ104nU4iEuJBAc+TZ7NQSTkjLKwAY1r1jrIikkQEmewLVlWQnj9dvCwD3lNkGXG8
zJdWusta5Lw1Hz5ftsRXvN9UAvH8gxYviVRVmkZA33rI/BiyPZCulu2EBC0MeDBQHLLONup2xVGy
+YgU4Uf7khJIftWCgdrkyJIaMuB7vGUl014ZBV+XWaox+bS71qFQXUP2WnyTeeBVIaTJtggk+80X
fStWwvvzl02LTwGV3kJqWbazPlJkevfRQ7DNh1xa42eO57YEcEl3sR00anFWbL3J/I0bHb5XWY/e
8DYuMgIlat5gub8CTO2IViu6TexXFMXLxZdWAYvJ8ivc/q7mA/JcDJQlNnGof2Z6jY8ykWYloL/R
XMn2LeGqrql/guyRQcDrZu0LGX4sDG0aP9dbjk5fQpXSif1RUY4/T3HYeL0+1zu86ZKwVIIX5YfT
MLheIUGaXy/UJk361vAFKJBERGv1uufnqBxH0r1bRoytOaZr1niEA04u+VJa0DXOZzKBwxNhQRom
x4ffrsP2VnoJX+wnfYhPOjkiPiHyhswheG0VITTkqD+2uF54M5X2LLdzQuJpu0MZ5HOAHck/ZEpa
xV7h+kNse4p7y17b12H6tJNtVoJOlqP0Ujugc7vh4h8ZaPkSqVSV1nEvHzXx0c7gf038jv1+8WlN
4EgHp09FKU7sbSgcPY9jltElgaAr6J8a+rDGtk+055UeUYxM43U8naBiEOL77LP9FA0y8hKLKlJz
0GBCp4bJrLuZJenXHVb1Zme2EXO0jnQ9nB9OEyI3NpYTbZQxgcswEwYJKoZIhvcNAQkVMQYEBAEA
AAAwRwYJKoZIhvcNAQkUMToeOABQAHkAdABoAG8AbgBGAG8AcgBXAGkAbgBkAG8AdwBzAFQATQBQ
AEMAbwBuAHQAYQBpAG4AZQByMGsGCSsGAQQBgjcRATFeHlwATQBpAGMAcgBvAHMAbwBmAHQAIABF
AG4AaABhAG4AYwBlAGQAIABDAHIAeQBwAHQAbwBnAHIAYQBwAGgAaQBjACAAUAByAG8AdgBpAGQA
ZQByACAAdgAxAC4AMDCCAiAGCSqGSIb3DQEHAaCCAhEEggINMIICCTCCAgUGCyqGSIb3DQEMCgED
oIIB3TCCAdkGCiqGSIb3DQEJFgGgggHJBIIBxTCCAcEwggEqoAMCAQICEBuOlMsLPuu2QTnzyQmx
a0YwDQYJKoZIhvcNAQELBQAwHzEdMBsGA1UEAxMUUHl0aG9uRm9yV2luZG93c1Rlc3QwHhcNMTcw
NDEyMTQzOTIzWhcNMTgwNDEyMjAzOTIzWjAfMR0wGwYDVQQDExRQeXRob25Gb3JXaW5kb3dzVGVz
dDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAkR8Av7EX14eaXOKaHPOWona7nWz3cmvtDnqJ
0JX9SUNXpH+yknwA1eRS5EkLtgc4ASvQyFkxl15JwSSM2RdN1GbFYOQ+u1dpp0gpV7nHNHBhXCuK
Qw7dLR3mmwNhj4p1Xu2eqg0+GcT0fJSmlw3Vdj4NoU3XBkqampbR8aYexv0CAwEAATANBgkqhkiG
9w0BAQsFAAOBgQAnEBXTpVY2CAjiMgF6MEGhDanC6TdYlqC/VIozYU2N+/lOfsnBU/EJLE915ofZ
RCKlzIddtblYstj/96fhJN9QDfWwfgYViDOj0IMvgKWA3UsE7VhQEUqzKpO60R1L3mBHzMO9smjl
g8Qs3KKRLgAyq62BLrXuAs2blxfT224CujEVMBMGCSqGSIb3DQEJFTEGBAQBAAAAMDswHzAHBgUr
DgMCGgQU70h/rEXLQOberGvgJenggoWU5poEFCfdE1wNK1M38Yp3+qfjEqNIJGCPAgIH0A==
"""
@pytest.fixture()
def rawcert():
return b64decode(TEST_CERT)
@pytest.fixture()
def rawpfx():
return b64decode(TEST_PFX)
PFW_TEST_TMP_KEY_CONTAINER = "PythonForWindowsTMPContainerTest"
RANDOM_CERTIF_NAME = b"PythonForWindowsGeneratedRandomCertifTest"
RANDOM_PFX_PASSWORD = "PythonForWindowsGeneratedRandomPFXPassword"
@pytest.fixture()
def randomkeypair(keysize=1024):
r"""Generate a cert / pfx. Based on samples\crypto\encryption_demo.py"""
cert_store = windows.crypto.CertificateStore.new_in_memory()
# Create a TMP context that will hold our newly generated key-pair
with windows.crypto.CryptContext(PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL, 0, retrycreate=True) as ctx:
key = gdef.HCRYPTKEY()
keysize_flags = keysize << 16
# Generate a key-pair that is exportable
windows.winproxy.CryptGenKey(ctx, gdef.AT_KEYEXCHANGE, gdef.CRYPT_EXPORTABLE | keysize_flags, key)
# It does NOT destroy the key-pair from the container,
# It only release the key handle
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa379918(v=vs.85).aspx
windows.winproxy.CryptDestroyKey(key)
# Descrption of the key-container that will be used to generate the certificate
KeyProvInfo = gdef.CRYPT_KEY_PROV_INFO()
KeyProvInfo.pwszContainerName = PFW_TEST_TMP_KEY_CONTAINER
KeyProvInfo.pwszProvName = None
KeyProvInfo.dwProvType = gdef.PROV_RSA_FULL
KeyProvInfo.dwFlags = 0
KeyProvInfo.cProvParam = 0
KeyProvInfo.rgProvParam = None
#KeyProvInfo.dwKeySpec = AT_SIGNATURE
KeyProvInfo.dwKeySpec = gdef.AT_KEYEXCHANGE
crypt_algo = gdef.CRYPT_ALGORITHM_IDENTIFIER()
crypt_algo.pszObjId = gdef.szOID_RSA_SHA256RSA.encode("ascii") # do something else (bytes in generated ctypes ?)
# This is fucking dumb, there is no .format on bytes object...
certif_name = b"".join((b"CN=", RANDOM_CERTIF_NAME))
# Generate a self-signed certificate based on the given key-container and signature algorithme
certif = windows.crypto.generation.generate_selfsigned_certificate(certif_name, key_info=KeyProvInfo, signature_algo=crypt_algo)
# Add the newly created certificate to our TMP cert-store
cert_store.add_certificate(certif)
# Generate a pfx from the TMP cert-store
pfx = windows.crypto.generation.generate_pfx(cert_store, RANDOM_PFX_PASSWORD)
yield certif, pfx
# Destroy the TMP key container
prov = gdef.HCRYPTPROV()
windows.winproxy.CryptAcquireContextW(prov, PFW_TEST_TMP_KEY_CONTAINER, None, gdef.PROV_RSA_FULL, gdef.CRYPT_DELETEKEYSET)
def test_certificate(rawcert):
cert = windows.crypto.Certificate.from_buffer(rawcert)
assert cert.serial == '1b 8e 94 cb 0b 3e eb b6 41 39 f3 c9 09 b1 6b 46'
assert cert.name == b'PythonForWindowsTest'
assert cert.issuer == b'PythonForWindowsTest'
assert cert.thumbprint == 'EF 0C A8 C9 F9 E0 96 AF 74 18 56 8B C1 C9 57 27 A0 89 29 6A'
assert cert.encoded == rawcert
assert cert.version == 2
assert cert == cert
assert cert is cert.duplicate()
cert.chains # TODO: craft a certificate with a chain for test purpose
cert.store.certs
cert.properties
def test_pfx(rawcert, rawpfx):
pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD)
orig_cert = windows.crypto.Certificate.from_buffer(rawcert)
certs = pfx.certs
assert len(certs) == 1
# Test cert comparaison
assert certs[0] == orig_cert
def test_open_pfx_bad_password(rawpfx):
with pytest.raises(WindowsError) as ar:
pfx = windows.crypto.import_pfx(rawpfx, "BadPassword")
def test_encrypt_decrypt(rawcert, rawpfx):
message_to_encrypt = b"Testing message \xff\x01"
cert = windows.crypto.Certificate.from_buffer(rawcert)
# encrypt should accept a cert or iterable of cert
res = windows.crypto.encrypt(cert, message_to_encrypt)
res2 = windows.crypto.encrypt([cert, cert], message_to_encrypt)
del cert
assert message_to_encrypt not in res
# Open pfx and decrypt
pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD)
decrypt = windows.crypto.decrypt(pfx, res)
decrypt2 = windows.crypto.decrypt(pfx, res2)
assert message_to_encrypt == decrypt
assert decrypt == decrypt2
def test_randomkeypair(randomkeypair):
randcert, randrawpfx = randomkeypair
assert randcert.name == RANDOM_CERTIF_NAME
randpfx = windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD) # Check password is good too
def test_encrypt_decrypt_multiple_receivers(rawcert, rawpfx, randomkeypair):
message_to_encrypt = b"\xff\x00 Testing message \xff\x01"
# Receiver 1: random key pair
randcert, randrawpfx = randomkeypair
randpfx = windows.crypto.import_pfx(randrawpfx, RANDOM_PFX_PASSWORD)
# Receiver 1: PFW-test-keypair
pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD)
cert = windows.crypto.Certificate.from_buffer(rawcert)
assert cert.name != randcert.name
assert cert.encoded != randcert.encoded
# Encrypt the message with 2 differents certificates
encrypted = windows.crypto.encrypt([cert, randcert], message_to_encrypt)
# Decrypt with each PFX and check the result is valid/the same
decrypted = windows.crypto.decrypt(pfx, encrypted)
decrypted2 = windows.crypto.decrypt(randpfx, encrypted)
assert decrypted == decrypted2 == message_to_encrypt
def test_crypt_obj():
path = r"C:\windows\system32\kernel32.dll"
x = windows.crypto.CryptObject(path)
x.crypt_msg.certs
x.crypt_msg.signers
x.signers_and_certs
# TODO: Need some better ideas
def test_certificate_from_store():
return windows.crypto.CertificateStore.from_system_store("Root")
def test_sign_verify(rawcert, rawpfx):
message_to_sign = b"Testing message \xff\x01"
# Load PFX (priv+pub key) & certif (pubkey only)
pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD)
cert = windows.crypto.Certificate.from_buffer(rawcert)
signed_blob = windows.crypto.sign(pfx.certs[0], message_to_sign)
assert message_to_sign in signed_blob
decoded_blob = windows.crypto.verify_signature(cert, signed_blob)
assert decoded_blob == message_to_sign
def test_sign_verify_fail(rawcert, rawpfx):
message_to_sign = b"Testing message \xff\x01"
# Load PFX (priv+pub key) & certif (pubkey only)
pfx = windows.crypto.import_pfx(rawpfx, TEST_PFX_PASSWORD)
cert = windows.crypto.Certificate.from_buffer(rawcert)
signed_blob = windows.crypto.sign(pfx.certs[0], message_to_sign)
assert message_to_sign in signed_blob
# Tamper the signed mesasge content
signed_blob = signed_blob.replace(b"message", b"massage")
with pytest.raises(windows.winproxy.WinproxyError) as excinfo:
decoded_blob = windows.crypto.verify_signature(cert, signed_blob)
assert excinfo.value.winerror == gdef.STATUS_INVALID_SIGNATURE
# str(windows.crypto.encrypt(TEST_CERT, "Hello crypto")).encode("base64")
# Target serial == TEST_CERT.Serial == 1b 8e 94 cb 0b 3e eb b6 41 39 f3 c9 09 b1 6b 46
TEST_CRYPTMSG = b"""MIIBJAYJKoZIhvcNAQcDoIIBFTCCARECAQAxgc0wgcoCAQAwMzAfMR0wGwYDVQQDExRQeXRob25G
b3JXaW5kb3dzVGVzdAIQG46Uyws+67ZBOfPJCbFrRjANBgkqhkiG9w0BAQcwAASBgA1fwFY8w4Bb
fOMer94JhazbJxaUnV305QzF27w4GwNQ2UIpl9KWJoJJaF7azU3nVhP33agAxlxmr9fP48B6DeE1
pbu1jX9tEWlTJC6O0TmKcRPjblEaU6VJXXlpKlKZCmwCUuHR9VtcXGnxEU1Hy7FmHM96lvDRmYQT
Y0MnRJLyMDwGCSqGSIb3DQEHATAdBglghkgBZQMEASoEEEdEGEzKBrDO/zC8z6q6HLaAEGbjGCay
s6u32YhUxQ4/QhI="""
def test_cryptmsg_from_data():
rawdata = b64decode(TEST_CRYPTMSG)
cryptmsg = windows.crypto.CryptMessage.from_buffer(rawdata)
rawtarget = b"\x1b\x8e\x94\xcb\x0b>\xeb\xb6A9\xf3\xc9\t\xb1kF"
assert cryptmsg.get_recipient_data(0).SerialNumber.data[::-1] == rawtarget
| [((145, 192), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""check_for_gc_garbage"""'], {}), "('check_for_gc_garbage')\n", (168, 192), False, 'import pytest\n'), ((3143, 3159), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (3157, 3159), False, 'import pytest\n'), ((3210, 3226), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (3224, 3226), False, 'import pytest\n'), ((3473, 3489), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (3487, 3489), False, 'import pytest\n'), ((4424, 4450), 'windows.generated_def.CRYPT_KEY_PROV_INFO', 'gdef.CRYPT_KEY_PROV_INFO', ([], {}), '()\n', (4448, 4450), True, 'import windows.generated_def as gdef\n'), ((4800, 4833), 'windows.generated_def.CRYPT_ALGORITHM_IDENTIFIER', 'gdef.CRYPT_ALGORITHM_IDENTIFIER', ([], {}), '()\n', (4831, 4833), True, 'import windows.generated_def as gdef\n'), ((4860, 4900), 'windows.generated_def.szOID_RSA_SHA256RSA.encode', 'gdef.szOID_RSA_SHA256RSA.encode', (['"""ascii"""'], {}), "('ascii')\n", (4891, 4900), True, 'import windows.generated_def as gdef\n'), ((5605, 5622), 'windows.generated_def.HCRYPTPROV', 'gdef.HCRYPTPROV', ([], {}), '()\n', (5620, 5622), True, 'import windows.generated_def as gdef\n'), ((3870, 3886), 'windows.generated_def.HCRYPTKEY', 'gdef.HCRYPTKEY', ([], {}), '()\n', (3884, 3886), True, 'import windows.generated_def as gdef\n'), ((6668, 6695), 'pytest.raises', 'pytest.raises', (['WindowsError'], {}), '(WindowsError)\n', (6681, 6695), False, 'import pytest\n'), ((9857, 9902), 'pytest.raises', 'pytest.raises', (['windows.winproxy.WinproxyError'], {}), '(windows.winproxy.WinproxyError)\n', (9870, 9902), False, 'import pytest\n')] |
andreatramacere/cdci_data_analysis | cdci_data_analysis/analysis/plot_tools.py | 8ae34a7252d6baf011a3b99fbe4f6e624b63d7df | from __future__ import absolute_import, division, print_function
from builtins import (bytes, str, open, super, range,
zip, round, input, int, pow, object, map, zip)
__author__ = "Andrea Tramacere"
import numpy as np
from astropy import wcs
from bokeh.layouts import row, widgetbox,gridplot
from bokeh.models import CustomJS, Slider,HoverTool,ColorBar,LinearColorMapper,LabelSet,ColumnDataSource
from bokeh.embed import components
from bokeh.plotting import figure
from bokeh.palettes import Plasma256
class Image(object):
def __init__(self,data,header):
self.data=data
self.header=header
def change_image_contrast(self, attr, old, new):
# print attr,old,new
self.fig_im.glyph.color_mapper.update(low=self.graph_min_slider.value, high=self.graph_max_slider.value)
def get_html_draw(self,w=None,h=None, catalog=None, plot=False, vmin=None, vmax=None):
#import plotly
#import plotly.graph_objs as go
#from plotly.graph_objs import Layout
# print('vmin,vmax',vmin,vmax)
msk = ~np.isnan(self.data)
if vmin is None:
vmin = self.data[msk].min()
if vmax is None:
vmax = self.data[msk].max()
min_s = self.data.min()
max_s = self.data.max()
r = self.data.shape[0] * 2
c = self.data.shape[1] * 2
fig = figure(plot_width=w, plot_height=h, x_range=(0, c * 0.5), y_range=(0, r * 0.5),
tools=['pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair'])
w = wcs.WCS(self.header)
color_mapper = LinearColorMapper(low=min_s, high=max_s, palette=Plasma256)
fig_im = fig.image(image=[self.data], x=[0], y=[0], dw=[c * 0.5], dh=[r * 0.5],
color_mapper=color_mapper)
hover = HoverTool(tooltips=[("x", "$x"), ("y", "$y"), ("value", "@image")],
renderers=[fig_im])
fig.add_tools(hover)
#fig, (ax) = plt.subplots(1, 1, figsize=(4, 3), subplot_kw={'projection': WCS(self.header)})
#im = ax.imshow(self.data,
# origin='lower',
# zorder=1,
# interpolation='none',
# aspect='equal',
# cmap=plt.get_cmap('jet'),
# vmin=vmin,
# vmax=vmax)
if catalog is not None:
lon = catalog.ra
lat = catalog.dec
if len(lat) > 0.:
pixcrd = w.wcs_world2pix(np.column_stack((lon, lat)), 0)
msk = ~np.isnan(pixcrd[:, 0])
#ax.plot(pixcrd[:, 0][msk], pixcrd[:, 1][msk], 'o', mfc='none')
source = ColumnDataSource(data=dict(lon=pixcrd[:, 0][msk]+0.5,
lat=pixcrd[:, 1][msk]+0.5,
names=catalog.name[msk]))
#for ID, (x, y) in enumerate(pixcrd):
# if msk[ID]:
# # print ('xy',(pixcrd[:, 0][ID], pixcrd[:, 1][ID]))
# ax.annotate('%s' % catalog.name[ID], xy=(x, y), color='white')
#print(pixcrd[:][msk])
fig.scatter(x='lon', y='lat', marker='circle', size=15,
line_color="white", fill_color=None, alpha=1.0, source=source)
labels = LabelSet(x='lon', y='lat', text='names', level='glyph',
x_offset=5, y_offset=5, render_mode='canvas', source=source, text_color='white')
fig.add_layout(labels)
#print'cat', catalog[msk]
color_bar = ColorBar(color_mapper=color_mapper,
label_standoff=12, border_line_color=None, location=(0, 0))
JS_code_slider = """
var vmin = low_slider.value;
var vmax = high_slider.value;
fig_im.glyph.color_mapper.high = vmax;
fig_im.glyph.color_mapper.low = vmin;
"""
callback = CustomJS(args=dict(fig_im=fig_im), code=JS_code_slider)
self.graph_min_slider = Slider(title="Sig. Min", start=min_s, end=max_s, step=1, value=min_s, callback=callback)
self.graph_max_slider = Slider(title="Sig. Max", start=min_s, end=max_s, step=1, value=max_s * 0.8,
callback=callback)
self.graph_min_slider.on_change('value', self.change_image_contrast)
self.graph_max_slider.on_change('value', self.change_image_contrast)
callback.args["low_slider"] = self.graph_min_slider
callback.args["high_slider"] = self.graph_max_slider
#ax.set_xlabel('RA')
#ax.set_ylabel('DEC')
#ax.grid(True, color='white')
#fig.colorbar(im, ax=ax)
#plugins.connect(fig, plugins.MousePosition(fontsize=14))
#if plot == True:
# print('plot', plot)
# mpld3.show()
fig.add_layout(color_bar, 'right')
layout = row(
fig, widgetbox(self.graph_min_slider, self.graph_max_slider),
)
#curdoc().add_root(layout)
#output_file("slider.html", title="slider.py example")
#from bokeh.io import show
#show(layout)
script, div = components(layout)
html_dict = {}
html_dict['script'] = script
html_dict['div'] = div
return html_dict
class ScatterPlot(object):
def __init__(self,w,h,x_label=None,y_label=None,x_range=None,y_range=None,title=None,y_axis_type='linear',x_axis_type='linear'):
hover = HoverTool(tooltips=[("x", "$x"), ("y", "$y")])
self.fig = figure(title=title, width=w, height=h,x_range=x_range,y_range=y_range,
y_axis_type=y_axis_type,
x_axis_type=x_axis_type,
tools=[hover, 'pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair']
)
if x_label is not None:
self.fig.xaxis.axis_label = x_label
if y_label is not None:
self.fig.yaxis.axis_label = y_label
def add_errorbar(self, x, y, xerr=None, yerr=None, color='red',
point_kwargs={}, error_kwargs={}):
self.fig.circle(x, y, color=color, **point_kwargs)
if xerr is not None:
x_err_x = []
x_err_y = []
for px, py, err in zip(x, y, xerr):
x_err_x.append((px - err, px + err))
x_err_y.append((py, py))
self.fig.multi_line(x_err_x, x_err_y, color=color, **error_kwargs)
if yerr is not None:
y_err_x = []
y_err_y = []
for px, py, err in zip(x, y, yerr):
y_err_x.append((px, px))
y_err_y.append((py - err, py + err))
self.fig.multi_line(y_err_x, y_err_y, color=color, **error_kwargs)
def add_step_line(self,x,y,legend=None):
#print('a')
self.fig.step(x,y,name=legend, mode="center")
#print('b')
def add_line(self,x,y,legend=None,color=None):
self.fig.line(x,y,legend=legend,line_color=color)
def get_html_draw(self):
layout = row(
self.fig
)
#curdoc().add_root(layout)
#show(layout)
script, div = components(layout)
#print ('script',script)
#print ('div',div)
html_dict = {}
html_dict['script'] = script
html_dict['div'] = div
return html_dict
class GridPlot(object):
def __init__(self,f1,f2,w=None,h=None):
self.f1=f1
self.f2=f2
def get_html_draw(self,w=None,h=None):
#l = layout([self.f1.fig],[self.f2.fig])
grid = gridplot([self.f1.fig,self.f2.fig],ncols=1,plot_width=w, plot_height=h)
#curdoc().add_root(grid)
#show(grid)
#output_file("test.html")
script, div = components(grid)
html_dict={}
html_dict['script']=script
html_dict['div'] = div
return html_dict
| [((1406, 1557), 'bokeh.plotting.figure', 'figure', ([], {'plot_width': 'w', 'plot_height': 'h', 'x_range': '(0, c * 0.5)', 'y_range': '(0, r * 0.5)', 'tools': "['pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair']"}), "(plot_width=w, plot_height=h, x_range=(0, c * 0.5), y_range=(0, r * \n 0.5), tools=['pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair'])\n", (1412, 1557), False, 'from bokeh.plotting import figure\n'), ((1587, 1607), 'astropy.wcs.WCS', 'wcs.WCS', (['self.header'], {}), '(self.header)\n', (1594, 1607), False, 'from astropy import wcs\n'), ((1631, 1690), 'bokeh.models.LinearColorMapper', 'LinearColorMapper', ([], {'low': 'min_s', 'high': 'max_s', 'palette': 'Plasma256'}), '(low=min_s, high=max_s, palette=Plasma256)\n', (1648, 1690), False, 'from bokeh.models import CustomJS, Slider, HoverTool, ColorBar, LinearColorMapper, LabelSet, ColumnDataSource\n'), ((1851, 1942), 'bokeh.models.HoverTool', 'HoverTool', ([], {'tooltips': "[('x', '$x'), ('y', '$y'), ('value', '@image')]", 'renderers': '[fig_im]'}), "(tooltips=[('x', '$x'), ('y', '$y'), ('value', '@image')],\n renderers=[fig_im])\n", (1860, 1942), False, 'from bokeh.models import CustomJS, Slider, HoverTool, ColorBar, LinearColorMapper, LabelSet, ColumnDataSource\n'), ((3728, 3828), 'bokeh.models.ColorBar', 'ColorBar', ([], {'color_mapper': 'color_mapper', 'label_standoff': '(12)', 'border_line_color': 'None', 'location': '(0, 0)'}), '(color_mapper=color_mapper, label_standoff=12, border_line_color=\n None, location=(0, 0))\n', (3736, 3828), False, 'from bokeh.models import CustomJS, Slider, HoverTool, ColorBar, LinearColorMapper, LabelSet, ColumnDataSource\n'), ((4224, 4316), 'bokeh.models.Slider', 'Slider', ([], {'title': '"""Sig. Min"""', 'start': 'min_s', 'end': 'max_s', 'step': '(1)', 'value': 'min_s', 'callback': 'callback'}), "(title='Sig. Min', start=min_s, end=max_s, step=1, value=min_s,\n callback=callback)\n", (4230, 4316), False, 'from bokeh.models import CustomJS, Slider, HoverTool, ColorBar, LinearColorMapper, LabelSet, ColumnDataSource\n'), ((4345, 4443), 'bokeh.models.Slider', 'Slider', ([], {'title': '"""Sig. Max"""', 'start': 'min_s', 'end': 'max_s', 'step': '(1)', 'value': '(max_s * 0.8)', 'callback': 'callback'}), "(title='Sig. Max', start=min_s, end=max_s, step=1, value=max_s * 0.8,\n callback=callback)\n", (4351, 4443), False, 'from bokeh.models import CustomJS, Slider, HoverTool, ColorBar, LinearColorMapper, LabelSet, ColumnDataSource\n'), ((5368, 5386), 'bokeh.embed.components', 'components', (['layout'], {}), '(layout)\n', (5378, 5386), False, 'from bokeh.embed import components\n'), ((5685, 5731), 'bokeh.models.HoverTool', 'HoverTool', ([], {'tooltips': "[('x', '$x'), ('y', '$y')]"}), "(tooltips=[('x', '$x'), ('y', '$y')])\n", (5694, 5731), False, 'from bokeh.models import CustomJS, Slider, HoverTool, ColorBar, LinearColorMapper, LabelSet, ColumnDataSource\n'), ((5752, 5956), 'bokeh.plotting.figure', 'figure', ([], {'title': 'title', 'width': 'w', 'height': 'h', 'x_range': 'x_range', 'y_range': 'y_range', 'y_axis_type': 'y_axis_type', 'x_axis_type': 'x_axis_type', 'tools': "[hover, 'pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair']"}), "(title=title, width=w, height=h, x_range=x_range, y_range=y_range,\n y_axis_type=y_axis_type, x_axis_type=x_axis_type, tools=[hover,\n 'pan,box_zoom,box_select,wheel_zoom,reset,save,crosshair'])\n", (5758, 5956), False, 'from bokeh.plotting import figure\n'), ((7289, 7302), 'bokeh.layouts.row', 'row', (['self.fig'], {}), '(self.fig)\n', (7292, 7302), False, 'from bokeh.layouts import row, widgetbox, gridplot\n'), ((7407, 7425), 'bokeh.embed.components', 'components', (['layout'], {}), '(layout)\n', (7417, 7425), False, 'from bokeh.embed import components\n'), ((7824, 7898), 'bokeh.layouts.gridplot', 'gridplot', (['[self.f1.fig, self.f2.fig]'], {'ncols': '(1)', 'plot_width': 'w', 'plot_height': 'h'}), '([self.f1.fig, self.f2.fig], ncols=1, plot_width=w, plot_height=h)\n', (7832, 7898), False, 'from bokeh.layouts import row, widgetbox, gridplot\n'), ((8005, 8021), 'bokeh.embed.components', 'components', (['grid'], {}), '(grid)\n', (8015, 8021), False, 'from bokeh.embed import components\n'), ((1103, 1122), 'numpy.isnan', 'np.isnan', (['self.data'], {}), '(self.data)\n', (1111, 1122), True, 'import numpy as np\n'), ((5120, 5175), 'bokeh.layouts.widgetbox', 'widgetbox', (['self.graph_min_slider', 'self.graph_max_slider'], {}), '(self.graph_min_slider, self.graph_max_slider)\n', (5129, 5175), False, 'from bokeh.layouts import row, widgetbox, gridplot\n'), ((6496, 6511), 'builtins.zip', 'zip', (['x', 'y', 'xerr'], {}), '(x, y, xerr)\n', (6499, 6511), False, 'from builtins import bytes, str, open, super, range, zip, round, input, int, pow, object, map, zip\n'), ((6797, 6812), 'builtins.zip', 'zip', (['x', 'y', 'yerr'], {}), '(x, y, yerr)\n', (6800, 6812), False, 'from builtins import bytes, str, open, super, range, zip, round, input, int, pow, object, map, zip\n'), ((3453, 3593), 'bokeh.models.LabelSet', 'LabelSet', ([], {'x': '"""lon"""', 'y': '"""lat"""', 'text': '"""names"""', 'level': '"""glyph"""', 'x_offset': '(5)', 'y_offset': '(5)', 'render_mode': '"""canvas"""', 'source': 'source', 'text_color': '"""white"""'}), "(x='lon', y='lat', text='names', level='glyph', x_offset=5,\n y_offset=5, render_mode='canvas', source=source, text_color='white')\n", (3461, 3593), False, 'from bokeh.models import CustomJS, Slider, HoverTool, ColorBar, LinearColorMapper, LabelSet, ColumnDataSource\n'), ((2578, 2605), 'numpy.column_stack', 'np.column_stack', (['(lon, lat)'], {}), '((lon, lat))\n', (2593, 2605), True, 'import numpy as np\n'), ((2634, 2658), 'numpy.isnan', 'np.isnan', (['pixcrd[:, (0)]'], {}), '(pixcrd[:, (0)])\n', (2642, 2658), True, 'import numpy as np\n')] |
RebelCodeBase/testaid | test/unit/test_testaid_unit_pathlist.py | 998c827b826fe4374ecf0a234fef61a975e2fcd7 | from pathlib import Path
from testaid.pathlist import PathList
def test_testaid_unit_pathlist_roles_blacklist(testvars_roles_blacklist):
assert testvars_roles_blacklist is not None
def test_testaid_unit_pathlist_roles_whitelist(testvars_roles_whitelist):
assert testvars_roles_whitelist is not None
def test_testaid_unit_pathlist_get(tmp_path):
msd = tmp_path / 'molecule_scenario_directory'
dir1 = msd / 'dir1'
dir1.mkdir(parents=True)
dir2 = tmp_path / 'dir2'
dir2.mkdir()
file1 = dir1 / 'file1.yml'
file1.touch()
file2 = dir1 / 'file2.yml'
file2.touch()
file3 = dir2 / 'file3.yml'
file3.touch()
my_pathlist = [Path(file3), Path(file1), Path(file2)]
my_pathstring = 'dir1:../dir2/file3.yml'
pathlist = PathList(my_pathstring, msd)
assert pathlist.get() == my_pathlist
| [((774, 802), 'testaid.pathlist.PathList', 'PathList', (['my_pathstring', 'msd'], {}), '(my_pathstring, msd)\n', (782, 802), False, 'from testaid.pathlist import PathList\n'), ((675, 686), 'pathlib.Path', 'Path', (['file3'], {}), '(file3)\n', (679, 686), False, 'from pathlib import Path\n'), ((688, 699), 'pathlib.Path', 'Path', (['file1'], {}), '(file1)\n', (692, 699), False, 'from pathlib import Path\n'), ((701, 712), 'pathlib.Path', 'Path', (['file2'], {}), '(file2)\n', (705, 712), False, 'from pathlib import Path\n')] |
vkpro-forks/python-zhmcclient | tests/unit/zhmcclient/test_hba.py | eab2dca37cb417d03411450dabf72805214b5ca0 | # Copyright 2016-2017 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit tests for _hba module.
"""
from __future__ import absolute_import, print_function
import pytest
import re
import copy
from zhmcclient import Client, Hba, HTTPError, NotFound
from zhmcclient_mock import FakedSession
from tests.common.utils import assert_resources
# Object IDs and names of our faked HBAs:
HBA1_OID = 'hba 1-oid'
HBA1_NAME = 'hba 1'
HBA2_OID = 'hba 2-oid'
HBA2_NAME = 'hba 2'
# URIs and Object IDs of elements referenced in HBA properties:
FCP1_OID = 'fake-fcp1-oid'
PORT11_OID = 'fake-port11-oid'
PORT11_URI = '/api/adapters/{}/storage-ports/{}'.format(FCP1_OID, PORT11_OID)
class TestHba(object):
"""All tests for Hba and HbaManager classes."""
def setup_method(self):
"""
Set up a faked session, and add a faked CPC in DPM mode with one
partition that has no HBAs.
Add one FCP adapter and port.
"""
self.session = FakedSession('fake-host', 'fake-hmc', '2.13.1', '1.8')
self.client = Client(self.session)
# Add a CPC in DPM mode
self.faked_cpc = self.session.hmc.cpcs.add({
'element-id': 'fake-cpc1-oid',
# element-uri is set up automatically
'parent': None,
'class': 'cpc',
'name': 'fake-cpc1-name',
'description': 'CPC #1 (DPM mode)',
'status': 'active',
'dpm-enabled': True,
'is-ensemble-member': False,
'iml-mode': 'dpm',
})
self.cpc = self.client.cpcs.find(name='fake-cpc1-name')
# Add a partition to the CPC
self.faked_partition = self.faked_cpc.partitions.add({
'element-id': 'fake-part1-oid',
# element-uri will be automatically set
'parent': self.faked_cpc.uri,
'class': 'partition',
'name': 'fake-part1-name',
'description': 'Partition #1',
'status': 'active',
'initial-memory': 1024,
'maximum-memory': 2048,
})
self.partition = self.cpc.partitions.find(name='fake-part1-name')
# Add an FCP adapter and port to the CPC
self.faked_fcp1 = self.faked_cpc.adapters.add({
'object-id': FCP1_OID,
'parent': self.faked_cpc.uri,
'class': 'adapter',
'name': 'fcp1',
'description': 'FCP #1',
'status': 'active',
'type': 'fcp',
'adapter-id': '123',
'detected-card-type': '10gbe-roce-express',
'card-location': '1234-5678-J.01',
'port-count': 1,
'network-port-uris': [],
'state': 'online',
'configured-capacity': 80,
'used-capacity': 0,
'allowed-capacity': 80,
'maximum-total-capacity': 80,
'physical-channel-status': 'operating',
})
self.faked_port11 = self.faked_fcp1.ports.add({
'element-id': PORT11_OID,
'parent': self.faked_fcp1.uri,
'class': 'storage-port',
'index': 1,
'name': 'fake-port11-name',
'description': 'FCP #1 Port #1',
})
assert PORT11_URI == self.faked_port11.uri
def add_hba1(self):
"""Add a faked HBA 1 to the faked partition."""
faked_hba = self.faked_partition.hbas.add({
'element-id': HBA1_OID,
# element-uri will be automatically set
'parent': self.faked_partition.uri,
'class': 'hba',
'name': HBA1_NAME,
'description': 'HBA ' + HBA1_NAME,
'adapter-port-uri': PORT11_URI,
'wwpn': 'AABBCCDDEEFF0011',
'device-number': '1111',
})
return faked_hba
def add_hba2(self):
"""Add a faked HBA 2 to the faked partition."""
faked_hba = self.faked_partition.hbas.add({
'element-id': HBA2_OID,
# element-uri will be automatically set
'parent': self.faked_partition.uri,
'class': 'hba',
'name': HBA2_NAME,
'description': 'HBA ' + HBA2_NAME,
'adapter-port-uri': PORT11_URI,
'wwpn': 'AABBCCDDEEFF0012',
'device-number': '1112',
})
return faked_hba
def test_hbamanager_initial_attrs(self):
"""Test initial attributes of HbaManager."""
hba_mgr = self.partition.hbas
# Verify all public properties of the manager object
assert hba_mgr.resource_class == Hba
assert hba_mgr.session == self.session
assert hba_mgr.parent == self.partition
assert hba_mgr.partition == self.partition
# TODO: Test for HbaManager.__repr__()
@pytest.mark.parametrize(
"full_properties_kwargs, prop_names", [
(dict(),
['element-uri']),
(dict(full_properties=False),
['element-uri']),
(dict(full_properties=True),
None),
]
)
def test_hbamanager_list_full_properties(
self, full_properties_kwargs, prop_names):
"""Test HbaManager.list() with full_properties."""
# Add two faked HBAs
faked_hba1 = self.add_hba1()
faked_hba2 = self.add_hba2()
exp_faked_hbas = [faked_hba1, faked_hba2]
hba_mgr = self.partition.hbas
# Execute the code to be tested
hbas = hba_mgr.list(**full_properties_kwargs)
assert_resources(hbas, exp_faked_hbas, prop_names)
@pytest.mark.parametrize(
"filter_args, exp_oids", [
({'element-id': HBA1_OID},
[HBA1_OID]),
({'element-id': HBA2_OID},
[HBA2_OID]),
({'element-id': [HBA1_OID, HBA2_OID]},
[HBA1_OID, HBA2_OID]),
({'element-id': [HBA1_OID, HBA1_OID]},
[HBA1_OID]),
({'element-id': HBA1_OID + 'foo'},
[]),
({'element-id': [HBA1_OID, HBA2_OID + 'foo']},
[HBA1_OID]),
({'element-id': [HBA2_OID + 'foo', HBA1_OID]},
[HBA1_OID]),
({'name': HBA1_NAME},
[HBA1_OID]),
({'name': HBA2_NAME},
[HBA2_OID]),
({'name': [HBA1_NAME, HBA2_NAME]},
[HBA1_OID, HBA2_OID]),
({'name': HBA1_NAME + 'foo'},
[]),
({'name': [HBA1_NAME, HBA2_NAME + 'foo']},
[HBA1_OID]),
({'name': [HBA2_NAME + 'foo', HBA1_NAME]},
[HBA1_OID]),
({'name': [HBA1_NAME, HBA1_NAME]},
[HBA1_OID]),
({'name': '.*hba 1'},
[HBA1_OID]),
({'name': 'hba 1.*'},
[HBA1_OID]),
({'name': 'hba .'},
[HBA1_OID, HBA2_OID]),
({'name': '.ba 1'},
[HBA1_OID]),
({'name': '.+'},
[HBA1_OID, HBA2_OID]),
({'name': 'hba 1.+'},
[]),
({'name': '.+hba 1'},
[]),
({'name': HBA1_NAME,
'element-id': HBA1_OID},
[HBA1_OID]),
({'name': HBA1_NAME,
'element-id': HBA1_OID + 'foo'},
[]),
({'name': HBA1_NAME + 'foo',
'element-id': HBA1_OID},
[]),
({'name': HBA1_NAME + 'foo',
'element-id': HBA1_OID + 'foo'},
[]),
]
)
def test_hbamanager_list_filter_args(self, filter_args, exp_oids):
"""Test HbaManager.list() with filter_args."""
# Add two faked HBAs
self.add_hba1()
self.add_hba2()
hba_mgr = self.partition.hbas
# Execute the code to be tested
hbas = hba_mgr.list(filter_args=filter_args)
assert len(hbas) == len(exp_oids)
if exp_oids:
oids = [hba.properties['element-id'] for hba in hbas]
assert set(oids) == set(exp_oids)
@pytest.mark.parametrize(
"initial_partition_status, exp_status_exc", [
('stopped', None),
('terminated', None),
('starting', HTTPError({'http-status': 409, 'reason': 1})),
('active', None),
('stopping', HTTPError({'http-status': 409, 'reason': 1})),
('degraded', None),
('reservation-error', None),
('paused', None),
]
)
@pytest.mark.parametrize(
"input_props, exp_prop_names, exp_prop_exc", [
({},
None,
HTTPError({'http-status': 400, 'reason': 5})),
({'name': 'fake-hba-x'},
None,
HTTPError({'http-status': 400, 'reason': 5})),
({'adapter-port-uri': PORT11_URI},
None,
HTTPError({'http-status': 400, 'reason': 5})),
({'name': 'fake-hba-x',
'adapter-port-uri': PORT11_URI},
['element-uri', 'name', 'adapter-port-uri'],
None),
]
)
def test_hbamanager_create(
self, input_props, exp_prop_names, exp_prop_exc,
initial_partition_status, exp_status_exc):
"""Test HbaManager.create()."""
# Set the status of the faked partition
self.faked_partition.properties['status'] = initial_partition_status
hba_mgr = self.partition.hbas
if exp_status_exc:
exp_exc = exp_status_exc
elif exp_prop_exc:
exp_exc = exp_prop_exc
else:
exp_exc = None
if exp_exc:
with pytest.raises(exp_exc.__class__) as exc_info:
# Execute the code to be tested
hba = hba_mgr.create(properties=input_props)
exc = exc_info.value
if isinstance(exp_exc, HTTPError):
assert exc.http_status == exp_exc.http_status
assert exc.reason == exp_exc.reason
else:
# Execute the code to be tested.
# Note: the Hba object returned by Hba.create() has
# the input properties plus 'element-uri' plus 'element-id'.
hba = hba_mgr.create(properties=input_props)
# Check the resource for consistency within itself
assert isinstance(hba, Hba)
hba_name = hba.name
exp_hba_name = hba.properties['name']
assert hba_name == exp_hba_name
hba_uri = hba.uri
exp_hba_uri = hba.properties['element-uri']
assert hba_uri == exp_hba_uri
# Check the properties against the expected names and values
for prop_name in exp_prop_names:
assert prop_name in hba.properties
if prop_name in input_props:
value = hba.properties[prop_name]
exp_value = input_props[prop_name]
assert value == exp_value
def test_hba_repr(self):
"""Test Hba.__repr__()."""
# Add a faked hba
faked_hba = self.add_hba1()
hba_mgr = self.partition.hbas
hba = hba_mgr.find(name=faked_hba.name)
# Execute the code to be tested
repr_str = repr(hba)
repr_str = repr_str.replace('\n', '\\n')
# We check just the begin of the string:
assert re.match(r'^{classname}\s+at\s+0x{id:08x}\s+\(\\n.*'.
format(classname=hba.__class__.__name__,
id=id(hba)),
repr_str)
@pytest.mark.parametrize(
"initial_partition_status, exp_exc", [
('stopped', None),
('terminated', None),
('starting', HTTPError({'http-status': 409, 'reason': 1})),
('active', None),
('stopping', HTTPError({'http-status': 409, 'reason': 1})),
('degraded', None),
('reservation-error', None),
('paused', None),
]
)
def test_hba_delete(self, initial_partition_status, exp_exc):
"""Test Hba.delete()."""
# Add a faked HBA to be tested and another one
faked_hba = self.add_hba1()
self.add_hba2()
# Set the status of the faked partition
self.faked_partition.properties['status'] = initial_partition_status
hba_mgr = self.partition.hbas
hba = hba_mgr.find(name=faked_hba.name)
if exp_exc:
with pytest.raises(exp_exc.__class__) as exc_info:
# Execute the code to be tested
hba.delete()
exc = exc_info.value
if isinstance(exp_exc, HTTPError):
assert exc.http_status == exp_exc.http_status
assert exc.reason == exp_exc.reason
# Check that the HBA still exists
hba_mgr.find(name=faked_hba.name)
else:
# Execute the code to be tested.
hba.delete()
# Check that the HBA no longer exists
with pytest.raises(NotFound) as exc_info:
hba_mgr.find(name=faked_hba.name)
def test_hba_delete_create_same_name(self):
"""Test Hba.delete() followed by Hba.create() with same name."""
# Add a faked HBA to be tested and another one
faked_hba = self.add_hba1()
hba_name = faked_hba.name
self.add_hba2()
# Construct the input properties for a third HBA with same name
part3_props = copy.deepcopy(faked_hba.properties)
part3_props['description'] = 'Third HBA'
# Set the status of the faked partition
self.faked_partition.properties['status'] = 'stopped' # deletable
hba_mgr = self.partition.hbas
hba = hba_mgr.find(name=hba_name)
# Execute the deletion code to be tested.
hba.delete()
# Check that the HBA no longer exists
with pytest.raises(NotFound):
hba_mgr.find(name=hba_name)
# Execute the creation code to be tested.
hba_mgr.create(part3_props)
# Check that the HBA exists again under that name
hba3 = hba_mgr.find(name=hba_name)
description = hba3.get_property('description')
assert description == 'Third HBA'
@pytest.mark.parametrize(
"input_props", [
{},
{'description': 'New HBA description'},
{'device-number': 'FEDC',
'description': 'New HBA description'},
]
)
def test_hba_update_properties(self, input_props):
"""Test Hba.update_properties()."""
# Add a faked HBA
faked_hba = self.add_hba1()
# Set the status of the faked partition
self.faked_partition.properties['status'] = 'stopped' # updatable
hba_mgr = self.partition.hbas
hba = hba_mgr.find(name=faked_hba.name)
hba.pull_full_properties()
saved_properties = copy.deepcopy(hba.properties)
# Execute the code to be tested
hba.update_properties(properties=input_props)
# Verify that the resource object already reflects the property
# updates.
for prop_name in saved_properties:
if prop_name in input_props:
exp_prop_value = input_props[prop_name]
else:
exp_prop_value = saved_properties[prop_name]
assert prop_name in hba.properties
prop_value = hba.properties[prop_name]
assert prop_value == exp_prop_value
# Refresh the resource object and verify that the resource object
# still reflects the property updates.
hba.pull_full_properties()
for prop_name in saved_properties:
if prop_name in input_props:
exp_prop_value = input_props[prop_name]
else:
exp_prop_value = saved_properties[prop_name]
assert prop_name in hba.properties
prop_value = hba.properties[prop_name]
assert prop_value == exp_prop_value
def test_hba_update_name(self):
"""Test Hba.update_properties() with 'name' property."""
# Add a faked HBA
faked_hba = self.add_hba1()
hba_name = faked_hba.name
# Set the status of the faked partition
self.faked_partition.properties['status'] = 'stopped' # updatable
hba_mgr = self.partition.hbas
hba = hba_mgr.find(name=hba_name)
new_hba_name = "new-" + hba_name
# Execute the code to be tested
hba.update_properties(properties={'name': new_hba_name})
# Verify that the resource is no longer found by its old name, using
# list() (this does not use the name-to-URI cache).
hbas_list = hba_mgr.list(
filter_args=dict(name=hba_name))
assert len(hbas_list) == 0
# Verify that the resource is no longer found by its old name, using
# find() (this uses the name-to-URI cache).
with pytest.raises(NotFound):
hba_mgr.find(name=hba_name)
# Verify that the resource object already reflects the update, even
# though it has not been refreshed yet.
assert hba.properties['name'] == new_hba_name
# Refresh the resource object and verify that it still reflects the
# update.
hba.pull_full_properties()
assert hba.properties['name'] == new_hba_name
# Verify that the resource can be found by its new name, using find()
new_hba_find = hba_mgr.find(name=new_hba_name)
assert new_hba_find.properties['name'] == new_hba_name
# Verify that the resource can be found by its new name, using list()
new_hbas_list = hba_mgr.list(
filter_args=dict(name=new_hba_name))
assert len(new_hbas_list) == 1
new_hba_list = new_hbas_list[0]
assert new_hba_list.properties['name'] == new_hba_name
@pytest.mark.parametrize(
"initial_partition_status, exp_exc", [
('stopped', None),
('terminated', None),
('starting', HTTPError({'http-status': 409, 'reason': 1})),
('active', None),
('stopping', HTTPError({'http-status': 409, 'reason': 1})),
('degraded', None),
('reservation-error', None),
('paused', None),
]
)
def test_hba_reassign_port(self, initial_partition_status, exp_exc):
"""Test Hba.reassign_port()."""
# Add a faked HBA to be tested.
# Its port points to a faked URI.
faked_hba = self.add_hba1()
# Add a faked FCP with one port that the HBA will be reassigned to
faked_adapter = self.faked_cpc.adapters.add({
'object-id': 'fake-fcp1-oid',
# object-uri is auto-set based upon object-id
'parent': self.faked_cpc.uri,
'class': 'adapter',
'name': 'fake-fcp1',
'description': 'FCP #1',
'status': 'active',
'type': 'fcp',
# adapter-family is auto-set based upon type
'adapter-id': '123',
'detected-card-type': 'ficon-express-16s',
'card-location': '1234-5678-J.01',
'port-count': 1,
'storage-port-uris': [],
'state': 'online',
'configured-capacity': 80,
'used-capacity': 0,
'allowed-capacity': 80,
'maximum-total-capacity': 80,
'channel-path-id': '1B',
'physical-channel-status': 'operating',
})
adapter = self.cpc.adapters.find(name='fake-fcp1')
faked_adapter.ports.add({
'element-id': 'fake-port1-oid',
# element-uri is auto-set based upon object-id
'parent': faked_adapter.uri,
'class': 'storage-port',
'name': 'fake-port1',
'description': 'FCP #1 Port 1',
'index': 0,
'fabric-id': None,
})
port = adapter.ports.find(name='fake-port1')
# Set the status of the faked partition
self.faked_partition.properties['status'] = initial_partition_status
# The HBA object we will perform the test on
hba = self.partition.hbas.find(name=faked_hba.name)
# Save the HBA properties for later comparison
hba.pull_full_properties()
saved_properties = copy.deepcopy(hba.properties)
if exp_exc:
with pytest.raises(exp_exc.__class__) as exc_info:
# Execute the code to be tested
hba.reassign_port(port)
exc = exc_info.value
if isinstance(exp_exc, HTTPError):
assert exc.http_status == exp_exc.http_status
assert exc.reason == exp_exc.reason
# Check that the port of the HBA is unchanged ...
prop_name = 'adapter-port-uri'
# ... in the resource object:
assert hba.properties[prop_name] == saved_properties[prop_name]
# ... and again when refreshed from the mock state:
hba.pull_full_properties()
assert hba.properties[prop_name] == saved_properties[prop_name]
else:
# Execute the code to be tested.
hba.reassign_port(port)
# Check that the port of the HBA has been set ...
# ... in the resource object:
prop_name = 'adapter-port-uri'
assert hba.properties[prop_name] == port.uri
# ... and again when refreshed from the mock state:
hba.pull_full_properties()
assert hba.properties[prop_name] == port.uri
| [((6104, 7390), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""filter_args, exp_oids"""', "[({'element-id': HBA1_OID}, [HBA1_OID]), ({'element-id': HBA2_OID}, [\n HBA2_OID]), ({'element-id': [HBA1_OID, HBA2_OID]}, [HBA1_OID, HBA2_OID]\n ), ({'element-id': [HBA1_OID, HBA1_OID]}, [HBA1_OID]), ({'element-id': \n HBA1_OID + 'foo'}, []), ({'element-id': [HBA1_OID, HBA2_OID + 'foo']},\n [HBA1_OID]), ({'element-id': [HBA2_OID + 'foo', HBA1_OID]}, [HBA1_OID]),\n ({'name': HBA1_NAME}, [HBA1_OID]), ({'name': HBA2_NAME}, [HBA2_OID]), (\n {'name': [HBA1_NAME, HBA2_NAME]}, [HBA1_OID, HBA2_OID]), ({'name': \n HBA1_NAME + 'foo'}, []), ({'name': [HBA1_NAME, HBA2_NAME + 'foo']}, [\n HBA1_OID]), ({'name': [HBA2_NAME + 'foo', HBA1_NAME]}, [HBA1_OID]), ({\n 'name': [HBA1_NAME, HBA1_NAME]}, [HBA1_OID]), ({'name': '.*hba 1'}, [\n HBA1_OID]), ({'name': 'hba 1.*'}, [HBA1_OID]), ({'name': 'hba .'}, [\n HBA1_OID, HBA2_OID]), ({'name': '.ba 1'}, [HBA1_OID]), ({'name': '.+'},\n [HBA1_OID, HBA2_OID]), ({'name': 'hba 1.+'}, []), ({'name': '.+hba 1'},\n []), ({'name': HBA1_NAME, 'element-id': HBA1_OID}, [HBA1_OID]), ({\n 'name': HBA1_NAME, 'element-id': HBA1_OID + 'foo'}, []), ({'name': \n HBA1_NAME + 'foo', 'element-id': HBA1_OID}, []), ({'name': HBA1_NAME +\n 'foo', 'element-id': HBA1_OID + 'foo'}, [])]"], {}), "('filter_args, exp_oids', [({'element-id': HBA1_OID},\n [HBA1_OID]), ({'element-id': HBA2_OID}, [HBA2_OID]), ({'element-id': [\n HBA1_OID, HBA2_OID]}, [HBA1_OID, HBA2_OID]), ({'element-id': [HBA1_OID,\n HBA1_OID]}, [HBA1_OID]), ({'element-id': HBA1_OID + 'foo'}, []), ({\n 'element-id': [HBA1_OID, HBA2_OID + 'foo']}, [HBA1_OID]), ({\n 'element-id': [HBA2_OID + 'foo', HBA1_OID]}, [HBA1_OID]), ({'name':\n HBA1_NAME}, [HBA1_OID]), ({'name': HBA2_NAME}, [HBA2_OID]), ({'name': [\n HBA1_NAME, HBA2_NAME]}, [HBA1_OID, HBA2_OID]), ({'name': HBA1_NAME +\n 'foo'}, []), ({'name': [HBA1_NAME, HBA2_NAME + 'foo']}, [HBA1_OID]), ({\n 'name': [HBA2_NAME + 'foo', HBA1_NAME]}, [HBA1_OID]), ({'name': [\n HBA1_NAME, HBA1_NAME]}, [HBA1_OID]), ({'name': '.*hba 1'}, [HBA1_OID]),\n ({'name': 'hba 1.*'}, [HBA1_OID]), ({'name': 'hba .'}, [HBA1_OID,\n HBA2_OID]), ({'name': '.ba 1'}, [HBA1_OID]), ({'name': '.+'}, [HBA1_OID,\n HBA2_OID]), ({'name': 'hba 1.+'}, []), ({'name': '.+hba 1'}, []), ({\n 'name': HBA1_NAME, 'element-id': HBA1_OID}, [HBA1_OID]), ({'name':\n HBA1_NAME, 'element-id': HBA1_OID + 'foo'}, []), ({'name': HBA1_NAME +\n 'foo', 'element-id': HBA1_OID}, []), ({'name': HBA1_NAME + 'foo',\n 'element-id': HBA1_OID + 'foo'}, [])])\n", (6127, 7390), False, 'import pytest\n'), ((14771, 14928), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""input_props"""', "[{}, {'description': 'New HBA description'}, {'device-number': 'FEDC',\n 'description': 'New HBA description'}]"], {}), "('input_props', [{}, {'description':\n 'New HBA description'}, {'device-number': 'FEDC', 'description':\n 'New HBA description'}])\n", (14794, 14928), False, 'import pytest\n'), ((1506, 1560), 'zhmcclient_mock.FakedSession', 'FakedSession', (['"""fake-host"""', '"""fake-hmc"""', '"""2.13.1"""', '"""1.8"""'], {}), "('fake-host', 'fake-hmc', '2.13.1', '1.8')\n", (1518, 1560), False, 'from zhmcclient_mock import FakedSession\n'), ((1583, 1603), 'zhmcclient.Client', 'Client', (['self.session'], {}), '(self.session)\n', (1589, 1603), False, 'from zhmcclient import Client, Hba, HTTPError, NotFound\n'), ((6047, 6097), 'tests.common.utils.assert_resources', 'assert_resources', (['hbas', 'exp_faked_hbas', 'prop_names'], {}), '(hbas, exp_faked_hbas, prop_names)\n', (6063, 6097), False, 'from tests.common.utils import assert_resources\n'), ((13992, 14027), 'copy.deepcopy', 'copy.deepcopy', (['faked_hba.properties'], {}), '(faked_hba.properties)\n', (14005, 14027), False, 'import copy\n'), ((15431, 15460), 'copy.deepcopy', 'copy.deepcopy', (['hba.properties'], {}), '(hba.properties)\n', (15444, 15460), False, 'import copy\n'), ((20883, 20912), 'copy.deepcopy', 'copy.deepcopy', (['hba.properties'], {}), '(hba.properties)\n', (20896, 20912), False, 'import copy\n'), ((14414, 14437), 'pytest.raises', 'pytest.raises', (['NotFound'], {}), '(NotFound)\n', (14427, 14437), False, 'import pytest\n'), ((17482, 17505), 'pytest.raises', 'pytest.raises', (['NotFound'], {}), '(NotFound)\n', (17495, 17505), False, 'import pytest\n'), ((10138, 10170), 'pytest.raises', 'pytest.raises', (['exp_exc.__class__'], {}), '(exp_exc.__class__)\n', (10151, 10170), False, 'import pytest\n'), ((8710, 8754), 'zhmcclient.HTTPError', 'HTTPError', (["{'http-status': 409, 'reason': 1}"], {}), "({'http-status': 409, 'reason': 1})\n", (8719, 8754), False, 'from zhmcclient import Client, Hba, HTTPError, NotFound\n'), ((8812, 8856), 'zhmcclient.HTTPError', 'HTTPError', (["{'http-status': 409, 'reason': 1}"], {}), "({'http-status': 409, 'reason': 1})\n", (8821, 8856), False, 'from zhmcclient import Client, Hba, HTTPError, NotFound\n'), ((9112, 9156), 'zhmcclient.HTTPError', 'HTTPError', (["{'http-status': 400, 'reason': 5}"], {}), "({'http-status': 400, 'reason': 5})\n", (9121, 9156), False, 'from zhmcclient import Client, Hba, HTTPError, NotFound\n'), ((9228, 9272), 'zhmcclient.HTTPError', 'HTTPError', (["{'http-status': 400, 'reason': 5}"], {}), "({'http-status': 400, 'reason': 5})\n", (9237, 9272), False, 'from zhmcclient import Client, Hba, HTTPError, NotFound\n'), ((9354, 9398), 'zhmcclient.HTTPError', 'HTTPError', (["{'http-status': 400, 'reason': 5}"], {}), "({'http-status': 400, 'reason': 5})\n", (9363, 9398), False, 'from zhmcclient import Client, Hba, HTTPError, NotFound\n'), ((12972, 13004), 'pytest.raises', 'pytest.raises', (['exp_exc.__class__'], {}), '(exp_exc.__class__)\n', (12985, 13004), False, 'import pytest\n'), ((13538, 13561), 'pytest.raises', 'pytest.raises', (['NotFound'], {}), '(NotFound)\n', (13551, 13561), False, 'import pytest\n'), ((12236, 12280), 'zhmcclient.HTTPError', 'HTTPError', (["{'http-status': 409, 'reason': 1}"], {}), "({'http-status': 409, 'reason': 1})\n", (12245, 12280), False, 'from zhmcclient import Client, Hba, HTTPError, NotFound\n'), ((12338, 12382), 'zhmcclient.HTTPError', 'HTTPError', (["{'http-status': 409, 'reason': 1}"], {}), "({'http-status': 409, 'reason': 1})\n", (12347, 12382), False, 'from zhmcclient import Client, Hba, HTTPError, NotFound\n'), ((20952, 20984), 'pytest.raises', 'pytest.raises', (['exp_exc.__class__'], {}), '(exp_exc.__class__)\n', (20965, 20984), False, 'import pytest\n'), ((18583, 18627), 'zhmcclient.HTTPError', 'HTTPError', (["{'http-status': 409, 'reason': 1}"], {}), "({'http-status': 409, 'reason': 1})\n", (18592, 18627), False, 'from zhmcclient import Client, Hba, HTTPError, NotFound\n'), ((18685, 18729), 'zhmcclient.HTTPError', 'HTTPError', (["{'http-status': 409, 'reason': 1}"], {}), "({'http-status': 409, 'reason': 1})\n", (18694, 18729), False, 'from zhmcclient import Client, Hba, HTTPError, NotFound\n')] |
medismailben/llvm-project | compiler-rt/test/asan/TestCases/Windows/lit.local.cfg.py | e334a839032fe500c3bba22bf976ab7af13ce1c1 | def getRoot(config):
if not config.parent:
return config
return getRoot(config.parent)
root = getRoot(config)
# We only run a small set of tests on Windows for now.
# Override the parent directory's "unsupported" decision until we can handle
# all of its tests.
if root.host_os in ['Windows']:
config.unsupported = False
else:
config.unsupported = True
| [] |
oskomorokhov/cpython | Lib/test/libregrtest/utils.py | c0e11a3ceb9427e09db4224f394c7789bf6deec5 | import math
import os.path
import sys
import textwrap
from test import support
def format_duration(seconds):
ms = math.ceil(seconds * 1e3)
seconds, ms = divmod(ms, 1000)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
parts = []
if hours:
parts.append('%s hour' % hours)
if minutes:
parts.append('%s min' % minutes)
if seconds:
if parts:
# 2 min 1 sec
parts.append('%s sec' % seconds)
else:
# 1.0 sec
parts.append('%.1f sec' % (seconds + ms / 1000))
if not parts:
return '%s ms' % ms
parts = parts[:2]
return ' '.join(parts)
def removepy(names):
if not names:
return
for idx, name in enumerate(names):
basename, ext = os.path.splitext(name)
if ext == '.py':
names[idx] = basename
def count(n, word):
if n == 1:
return "%d %s" % (n, word)
else:
return "%d %ss" % (n, word)
def printlist(x, width=70, indent=4, file=None):
"""Print the elements of iterable x to stdout.
Optional arg width (default 70) is the maximum line length.
Optional arg indent (default 4) is the number of blanks with which to
begin each line.
"""
blanks = ' ' * indent
# Print the sorted list: 'x' may be a '--random' list or a set()
print(textwrap.fill(' '.join(str(elt) for elt in sorted(x)), width,
initial_indent=blanks, subsequent_indent=blanks),
file=file)
def print_warning(msg):
support.print_warning(msg)
orig_unraisablehook = None
def regrtest_unraisable_hook(unraisable):
global orig_unraisablehook
support.environment_altered = True
print_warning("Unraisable exception")
old_stderr = sys.stderr
try:
sys.stderr = sys.__stderr__
orig_unraisablehook(unraisable)
finally:
sys.stderr = old_stderr
def setup_unraisable_hook():
global orig_unraisablehook
orig_unraisablehook = sys.unraisablehook
sys.unraisablehook = regrtest_unraisable_hook
def clear_caches():
# Clear the warnings registry, so they can be displayed again
for mod in sys.modules.values():
if hasattr(mod, '__warningregistry__'):
del mod.__warningregistry__
# Flush standard output, so that buffered data is sent to the OS and
# associated Python objects are reclaimed.
for stream in (sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__):
if stream is not None:
stream.flush()
# Clear assorted module caches.
# Don't worry about resetting the cache if the module is not loaded
try:
distutils_dir_util = sys.modules['distutils.dir_util']
except KeyError:
pass
else:
distutils_dir_util._path_created.clear()
try:
re = sys.modules['re']
except KeyError:
pass
else:
re.purge()
try:
_strptime = sys.modules['_strptime']
except KeyError:
pass
else:
_strptime._regex_cache.clear()
try:
urllib_parse = sys.modules['urllib.parse']
except KeyError:
pass
else:
urllib_parse.clear_cache()
try:
urllib_request = sys.modules['urllib.request']
except KeyError:
pass
else:
urllib_request.urlcleanup()
try:
linecache = sys.modules['linecache']
except KeyError:
pass
else:
linecache.clearcache()
try:
mimetypes = sys.modules['mimetypes']
except KeyError:
pass
else:
mimetypes._default_mime_types()
try:
filecmp = sys.modules['filecmp']
except KeyError:
pass
else:
filecmp._cache.clear()
try:
struct = sys.modules['struct']
except KeyError:
pass
else:
struct._clearcache()
try:
doctest = sys.modules['doctest']
except KeyError:
pass
else:
doctest.master = None
try:
ctypes = sys.modules['ctypes']
except KeyError:
pass
else:
ctypes._reset_cache()
try:
typing = sys.modules['typing']
except KeyError:
pass
else:
for f in typing._cleanups:
f()
support.gc_collect()
| [((120, 147), 'math.ceil', 'math.ceil', (['(seconds * 1000.0)'], {}), '(seconds * 1000.0)\n', (129, 147), False, 'import math\n'), ((1571, 1597), 'test.support.print_warning', 'support.print_warning', (['msg'], {}), '(msg)\n', (1592, 1597), False, 'from test import support\n'), ((2201, 2221), 'sys.modules.values', 'sys.modules.values', ([], {}), '()\n', (2219, 2221), False, 'import sys\n'), ((4281, 4301), 'test.support.gc_collect', 'support.gc_collect', ([], {}), '()\n', (4299, 4301), False, 'from test import support\n')] |
Syrkadian/efetch | efetch_server/plugins/fa_sqlite/fa_sqlite_ajax.py | 120ac963507d54998beecfd8b8cd85ad123e6e54 | """
AJAX for SQLite Viewer plugin
"""
from yapsy.IPlugin import IPlugin
from flask import Response, jsonify
import json
import logging
import sqlite3
class FaSqliteAjax(IPlugin):
def __init__(self):
self.display_name = 'SQLite Ajax'
self.popularity = 0
self.cache = True
self.fast = False
self.action = False
IPlugin.__init__(self)
def activate(self):
IPlugin.activate(self)
return
def deactivate(self):
IPlugin.deactivate(self)
return
def check(self, evidence, path_on_disk):
"""Checks if the file is compatible with this plugin"""
return True
def mimetype(self, mimetype):
"""Returns the mimetype of this plugins get command"""
return "application/json"
def get(self, evidence, helper, path_on_disk, request):
"""Returns the result of this plugin to be displayed in a browser"""
method = helper.get_request_value(request, 'method', raise_key_error=True)
if method == "base":
return self.base_tree(path_on_disk)
elif method == "children":
return self.get_children(request, helper, path_on_disk)
elif method == "values":
return self.values(request, helper, path_on_disk)
logging.error('Unknown method "' + method + '" provided')
raise ValueError('Method "' + method + '" is not valid')
def base_tree(self, path_on_disk):
connection = sqlite3.connect(path_on_disk)
cursor = connection.cursor()
base_tree = []
cursor.execute("SELECT * FROM sqlite_master WHERE type='table';")
cursor.fetchone()
# Master Table
base_tree.append({'title': u'Master Table (1)',
'key': u'master',
'folder': True,
'lazy': True
})
# Tables
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
tables = cursor.fetchall()
base_tree.append({'title': u'Tables (' + unicode(len(tables)) + u')',
'key': u'table',
'folder': True,
'lazy': True
})
# Views
cursor.execute("SELECT name FROM sqlite_master WHERE type='view';")
views = cursor.fetchall()
base_tree.append({'title': u'Views (' + unicode(len(views)) + u')',
'key': u'view',
'folder': True,
'lazy': True
})
# Indexes
cursor.execute("SELECT name FROM sqlite_master WHERE type='index';")
indexes = cursor.fetchall()
base_tree.append({'title': u'Indexes (' + unicode(len(indexes)) + u')',
'key': u'index',
'folder': True,
'lazy': True
})
# Triggers
cursor.execute("SELECT name FROM sqlite_master WHERE type='trigger';")
triggers = cursor.fetchall()
base_tree.append({'title': u'Triggers (' + unicode(len(triggers)) + u')',
'key': u'trigger',
'folder': True,
'lazy': True
})
connection.close()
# TODO REPLACE WITH DICTIONARY AND JSONIFY, SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask
return Response(json.dumps(base_tree), mimetype='application/json')
def get_children(self, request, helper, path_on_disk):
key = unicode(helper.get_request_value(request, 'key'))
children = []
if key == u'master':
children.append({'title': u'Master Table (1)',
'key': u'sqlite_master',
'folder': False,
'lazy': False
})
else:
for child in self.get_tables(key, path_on_disk):
children.append({'title': child,
'key': child,
'folder': False,
'lazy': False
})
# TODO REPLACE WITH DICTIONARY AND JSONIFY, SEE: http://stackoverflow.com/questions/12435297/how-do-i-jsonify-a-list-in-flask
return Response(json.dumps(children), mimetype='application/json')
def get_tables(self, key, path_on_disk):
connection = sqlite3.connect(path_on_disk)
cursor = connection.cursor()
tables = []
table_list = cursor.execute("SELECT name FROM sqlite_master WHERE type='" + key + "';")
for table in table_list:
tables.append(unicode(table[0]))
connection.close()
return tables
def values(self, request, helper, path_on_disk):
key = unicode(helper.get_request_value(request, 'key'))
connection = sqlite3.connect(path_on_disk)
cursor = connection.cursor()
cursor.execute("pragma table_info('" + key + "')")
rows = cursor.fetchall()
table = [ u'<table id="sqlitet01" class="display">', u' <thead><tr>' ]
for row in rows:
table.append(u' <th>' + unicode(row[1]) + u'</th>')
table.append(u' </tr> </thead>')
cursor.execute('SELECT * FROM ' + key)
rows = cursor.fetchall()
for row in rows:
table.append(u' <tr>')
for item in row:
try:
table.append(u' <td>' + unicode(item) + u'</td>')
except:
table.append(u' <td>' + unicode(type(item)) + u'</td>')
table.append(u' </tr>')
table.append(u'</table>')
connection.close()
return jsonify({'table': '\n'.join(table)}) | [((364, 386), 'yapsy.IPlugin.IPlugin.__init__', 'IPlugin.__init__', (['self'], {}), '(self)\n', (380, 386), False, 'from yapsy.IPlugin import IPlugin\n'), ((420, 442), 'yapsy.IPlugin.IPlugin.activate', 'IPlugin.activate', (['self'], {}), '(self)\n', (436, 442), False, 'from yapsy.IPlugin import IPlugin\n'), ((493, 517), 'yapsy.IPlugin.IPlugin.deactivate', 'IPlugin.deactivate', (['self'], {}), '(self)\n', (511, 517), False, 'from yapsy.IPlugin import IPlugin\n'), ((1301, 1358), 'logging.error', 'logging.error', (['(\'Unknown method "\' + method + \'" provided\')'], {}), '(\'Unknown method "\' + method + \'" provided\')\n', (1314, 1358), False, 'import logging\n'), ((1485, 1514), 'sqlite3.connect', 'sqlite3.connect', (['path_on_disk'], {}), '(path_on_disk)\n', (1500, 1514), False, 'import sqlite3\n'), ((4591, 4620), 'sqlite3.connect', 'sqlite3.connect', (['path_on_disk'], {}), '(path_on_disk)\n', (4606, 4620), False, 'import sqlite3\n'), ((5043, 5072), 'sqlite3.connect', 'sqlite3.connect', (['path_on_disk'], {}), '(path_on_disk)\n', (5058, 5072), False, 'import sqlite3\n'), ((3550, 3571), 'json.dumps', 'json.dumps', (['base_tree'], {}), '(base_tree)\n', (3560, 3571), False, 'import json\n'), ((4473, 4493), 'json.dumps', 'json.dumps', (['children'], {}), '(children)\n', (4483, 4493), False, 'import json\n')] |
sslppractice/propython | raspagem/random/lista_cidades.py | fa470c3bf0dcfbb26037146d77c7491596cabb26 | import requests, json
url = 'http://educacao.dadosabertosbr.com/api/cidades/ce'
cidades = requests.get(url).content
cidades = cidades.decode('utf-8')
cidades = json.loads(cidades)
for cidade in cidades:
codigo, nome = cidade.split(':')
print(nome)
| [((161, 180), 'json.loads', 'json.loads', (['cidades'], {}), '(cidades)\n', (171, 180), False, 'import requests, json\n'), ((91, 108), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (103, 108), False, 'import requests, json\n')] |
startupgrind/mezzanine | mezzanine/__init__.py | 23d24a07c69bf8f02d60148b0b8da6c76bc5061e |
__version__ = "4.3.1.post1"
| [] |
Subsets and Splits